summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rwxr-xr-x.hgignore3
-rw-r--r--CHANGES138
-rw-r--r--doc/build/Makefile129
-rw-r--r--doc/build/builder/util.py6
-rw-r--r--doc/build/conf.py133
-rw-r--r--doc/build/core/engines.rst2
-rw-r--r--doc/build/core/expression_api.rst2
-rw-r--r--doc/build/core/schema.rst6
-rw-r--r--doc/build/core/tutorial.rst10
-rw-r--r--doc/build/dialects/index.rst32
-rw-r--r--doc/build/dialects/informix.rst5
-rw-r--r--doc/build/index.rst2
-rw-r--r--doc/build/orm/extensions/declarative.rst2
-rw-r--r--doc/build/orm/mapper_config.rst4
-rw-r--r--doc/build/orm/relationships.rst2
-rw-r--r--doc/build/orm/session.rst221
-rw-r--r--doc/build/orm/tutorial.rst17
-rw-r--r--doc/build/templates/site_base.mako1
-rw-r--r--examples/elementtree/__init__.py17
-rw-r--r--examples/elementtree/adjacency_list.py15
-rw-r--r--examples/elementtree/optimized_al.py38
-rw-r--r--examples/elementtree/pickle.py15
-rw-r--r--lib/sqlalchemy/dialects/informix/base.py293
-rw-r--r--lib/sqlalchemy/dialects/informix/informixdb.py37
-rw-r--r--lib/sqlalchemy/dialects/sqlite/pysqlite.py7
-rw-r--r--lib/sqlalchemy/engine/base.py8
-rw-r--r--lib/sqlalchemy/exc.py6
-rwxr-xr-xlib/sqlalchemy/ext/declarative.py243
-rw-r--r--lib/sqlalchemy/orm/__init__.py14
-rw-r--r--lib/sqlalchemy/orm/attributes.py2
-rw-r--r--lib/sqlalchemy/orm/dependency.py6
-rw-r--r--lib/sqlalchemy/orm/interfaces.py12
-rw-r--r--lib/sqlalchemy/orm/mapper.py83
-rw-r--r--lib/sqlalchemy/orm/properties.py8
-rw-r--r--lib/sqlalchemy/orm/query.py79
-rw-r--r--lib/sqlalchemy/orm/scoping.py11
-rw-r--r--lib/sqlalchemy/orm/session.py49
-rw-r--r--lib/sqlalchemy/orm/unitofwork.py7
-rw-r--r--lib/sqlalchemy/orm/util.py6
-rw-r--r--lib/sqlalchemy/schema.py42
-rw-r--r--lib/sqlalchemy/sql/compiler.py4
-rw-r--r--lib/sqlalchemy/sql/expression.py58
-rw-r--r--lib/sqlalchemy/sql/util.py19
-rw-r--r--lib/sqlalchemy/test/requires.py11
-rw-r--r--lib/sqlalchemy/topological.py24
-rw-r--r--lib/sqlalchemy/types.py2
-rw-r--r--lib/sqlalchemy/util.py8
-rw-r--r--setup.py136
-rw-r--r--test/aaa_profiling/test_zoomark_orm.py4
-rw-r--r--test/base/test_dependency.py23
-rw-r--r--test/dialect/test_informix.py2
-rw-r--r--test/engine/test_bind.py2
-rw-r--r--test/engine/test_execute.py11
-rw-r--r--test/engine/test_metadata.py63
-rw-r--r--test/engine/test_pool.py2
-rw-r--r--test/engine/test_reconnect.py14
-rw-r--r--test/engine/test_reflection.py8
-rw-r--r--test/engine/test_transaction.py18
-rw-r--r--test/ext/test_declarative.py286
-rw-r--r--test/orm/inheritance/test_magazine.py3
-rw-r--r--test/orm/inheritance/test_single.py6
-rw-r--r--test/orm/test_cascade.py63
-rw-r--r--test/orm/test_load_on_fks.py9
-rw-r--r--test/orm/test_mapper.py29
-rw-r--r--test/orm/test_naturalpks.py9
-rw-r--r--test/orm/test_query.py199
-rw-r--r--test/orm/test_scoping.py17
-rw-r--r--test/orm/test_session.py33
-rw-r--r--test/sql/test_compiler.py423
-rw-r--r--test/sql/test_query.py20
-rw-r--r--test/sql/test_types.py41
71 files changed, 2546 insertions, 714 deletions
diff --git a/.hgignore b/.hgignore
index 0a1571405..ef7855fae 100755
--- a/.hgignore
+++ b/.hgignore
@@ -3,4 +3,5 @@ syntax:regexp
^doc/build/output
.pyc$
.orig$
-.egg-info \ No newline at end of file
+.egg-info
+test.cfg
diff --git a/CHANGES b/CHANGES
index 657695c7e..ab5e240bf 100644
--- a/CHANGES
+++ b/CHANGES
@@ -11,6 +11,47 @@ CHANGES
backrefs involved, where the initiating parent
was a subclass (with its own mapper) of the
previous parent.
+
+ - Fixed a regression in 0.6.4 which occurred if you
+ passed an empty list to "include_properties" on
+ mapper() [ticket:1918]
+
+ - Patched a case where query.join() would adapt the
+ right side to the right side of the left's join
+ inappropriately [ticket:1925]
+
+ - Query.select_from() has been beefed up to help
+ ensure that a subsequent call to query.join()
+ will use the select_from() entity, assuming it's
+ a mapped entity and not a plain selectable,
+ as the default "left" side, not the first entity
+ in the Query object's list of entities.
+
+ - The exception raised by Session when it is used
+ subsequent to a subtransaction rollback (which is what
+ happens when a flush fails in autocommit=False mode) has
+ now been reworded (this is the "inactive due to a
+ rollback in a subtransaction" message). In particular,
+ if the rollback was due to an exception during flush(),
+ the message states this is the case, and reiterates the
+ string form of the original exception that occurred
+ during flush. If the session is closed due to explicit
+ usage of subtransactions (not very common), the message
+ just states this is the case.
+
+ - The exception raised by Mapper when repeated requests to
+ its initialization are made after initialization already
+ failed no longer assumes the "hasattr" case, since
+ there's other scenarios in which this message gets
+ emitted, and the message also does not compound onto
+ itself multiple times - you get the same message for
+ each attempt at usage. The misnomer "compiles" is being
+ traded out for "initialize".
+
+ - Fixed bug in query.update() where 'evaluate' or 'fetch'
+ expiration would fail if the column expression key was
+ a class attribute with a different keyname as the
+ actual column name. [ticket:1935]
- Added an assertion during flush which ensures
that no NULL-holding identity keys were generated
@@ -37,12 +78,21 @@ CHANGES
object is loaded, so backrefs aren't available until
after a flush. The flag is only intended for very
specific use cases.
-
- - Slight improvement to the behavior of "passive_updates=False"
- when placed only on the many-to-one side of a
- relationship; documentation has been clarified
- that passive_updates=False should really be on the
- one-to-many side.
+
+ - Another new flag on relationship(), cascade_backrefs,
+ disables the "save-update" cascade when the event was
+ initiated on the "reverse" side of a bidirectional
+ relationship. This is a cleaner behavior so that
+ many-to-ones can be set on a transient object without
+ it getting sucked into the child object's session,
+ while still allowing the forward collection to
+ cascade. We *might* default this to False in 0.7.
+
+ - Slight improvement to the behavior of
+ "passive_updates=False" when placed only on the
+ many-to-one side of a relationship; documentation has
+ been clarified that passive_updates=False should really
+ be on the one-to-many side.
- Placing passive_deletes=True on a many-to-one emits
a warning, since you probably intended to put it on
@@ -58,7 +108,81 @@ CHANGES
the "where type in (x, y, z)" is placed on the outside
of the query only, instead of repeatedly. May make
some more adjustments to this.
-
+
+ - scoped_session emits a warning when configure() is
+ called if a Session is already present (checks only the
+ current thread) [ticket:1924]
+
+ - reworked the internals of mapper.cascade_iterator() to
+ cut down method calls by about 9% in some circumstances.
+ [ticket:1932]
+
+- sql
+ - Table.tometadata() now copies Index objects associated
+ with the Table as well.
+
+ - Table.tometadata() issues a warning if the given Table
+ is already present in the target MetaData - the existing
+ Table object is returned.
+
+ - An informative error message is raised if a Column
+ which has not yet been assigned a name, i.e. as in
+ declarative, is used in a context where it is
+ exported to the columns collection of an enclosing
+ select() construct, or if any construct involving
+ that column is compiled before its name is
+ assigned.
+
+ - as_scalar(), label() can be called on a selectable
+ which contains a Column that is not yet named.
+ [ticket:1862]
+
+ - Fixed recursion overflow which could occur when operating
+ with two expressions both of type "NullType", but
+ not the singleton NULLTYPE instance. [ticket:1907]
+
+- declarative
+ - @classproperty (soon/now @declared_attr) takes effect for
+ __mapper_args__, __table_args__, __tablename__ on
+ a base class that is not a mixin, as well as mixins.
+ [ticket:1922]
+
+ - @classproperty 's official name/location for usage
+ with declarative is sqlalchemy.ext.declarative.declared_attr.
+ Same thing, but moving there since it is more of a
+ "marker" that's specific to declararative,
+ not just an attribute technique. [ticket:1915]
+
+ - Fixed bug whereby columns on a mixin wouldn't propagate
+ correctly to a single-table, or joined-table,
+ inheritance scheme where the attribute name is
+ different than that of the column. [ticket:1930],
+ [ticket:1931].
+
+- engine
+
+ - Fixed a regression in 0.6.4 whereby the change that
+ allowed cursor errors to be raised consistently broke
+ the result.lastrowid accessor. Test coverage has
+ been added for result.lastrowid. Note that lastrowid
+ is only supported by Pysqlite and some MySQL drivers,
+ so isn't super-useful in the general case.
+
+ - the logging message emitted by the engine when
+ a connection is first used is now "BEGIN (implicit)"
+ to emphasize that DBAPI has no explicit begin().
+
+- informix
+ - *Major* cleanup / modernization of the Informix
+ dialect for 0.6, courtesy Florian Apolloner.
+ [ticket:1906]
+
+- misc
+ - CircularDependencyError now has .cycles and .edges
+ members, which are the set of elements involved in
+ one or more cycles, and the set of edges as 2-tuples.
+ [ticket:1890]
+
0.6.4
=====
- orm
diff --git a/doc/build/Makefile b/doc/build/Makefile
index f7ac2ca57..31dce05e6 100644
--- a/doc/build/Makefile
+++ b/doc/build/Makefile
@@ -5,28 +5,42 @@
SPHINXOPTS =
SPHINXBUILD = sphinx-build
PAPER =
+BUILDDIR = output
# Internal variables.
PAPEROPT_a4 = -D latex_paper_size=a4
PAPEROPT_letter = -D latex_paper_size=letter
-ALLSPHINXOPTS = -d output/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
-.PHONY: help clean html latex site-mako
+.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest dist-html site-mako
help:
@echo "Please use \`make <target>' where <target> is one of"
- @echo " html to make standalone HTML files"
- @echo " dist-html same as html, but places files in /doc"
- @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+ @echo " html to make standalone HTML files"
+ @echo " dist-html same as html, but places files in /doc"
+ @echo " dirhtml to make HTML files named index.html in directories"
+ @echo " singlehtml to make a single large HTML file"
+ @echo " pickle to make pickle files"
+ @echo " json to make JSON files"
+ @echo " htmlhelp to make HTML files and a HTML help project"
+ @echo " qthelp to make HTML files and a qthelp project"
+ @echo " devhelp to make HTML files and a Devhelp project"
+ @echo " epub to make an epub"
+ @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+ @echo " latexpdf to make LaTeX files and run them through pdflatex"
+ @echo " text to make text files"
+ @echo " man to make manual pages"
+ @echo " changes to make an overview of all changed/added/deprecated items"
+ @echo " linkcheck to check all external links for integrity"
+ @echo " doctest to run all doctests embedded in the documentation (if enabled)"
clean:
- -rm -rf output/*
+ -rm -rf $(BUILDDIR)/*
html:
- mkdir -p output/html output/doctrees
- $(SPHINXBUILD) -b html -A mako_layout=html $(ALLSPHINXOPTS) output/html
+ $(SPHINXBUILD) -b html -A mako_layout=html $(ALLSPHINXOPTS) $(BUILDDIR)/html
@echo
- @echo "Build finished. The HTML pages are in output/html."
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
dist-html:
$(SPHINXBUILD) -b html -A mako_layout=html $(ALLSPHINXOPTS) ..
@@ -34,18 +48,95 @@ dist-html:
@echo "Build finished. The HTML pages are in ../."
site-mako:
- mkdir -p output/site output/doctrees
- $(SPHINXBUILD) -b html -A mako_layout=site $(ALLSPHINXOPTS) output/site
+ $(SPHINXBUILD) -b html -A mako_layout=site $(ALLSPHINXOPTS) $(BUILDDIR)/site
@echo
- @echo "Build finished. The Mako pages are in output/site."
+ @echo "Build finished. The Mako pages are in $(BUILDDIR)/site."
+
+dirhtml:
+ $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+singlehtml:
+ $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
+ @echo
+ @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
+
+pickle:
+ $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
+ @echo
+ @echo "Build finished; now you can process the pickle files."
+
+json:
+ $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
+ @echo
+ @echo "Build finished; now you can process the JSON files."
+
+htmlhelp:
+ $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
+ @echo
+ @echo "Build finished; now you can run HTML Help Workshop with the" \
+ ".hhp project file in $(BUILDDIR)/htmlhelp."
+
+qthelp:
+ $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
+ @echo
+ @echo "Build finished; now you can run "qcollectiongenerator" with the" \
+ ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
+ @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/SQLAlchemy.qhcp"
+ @echo "To view the help file:"
+ @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/SQLAlchemy.qhc"
+
+devhelp:
+ $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
+ @echo
+ @echo "Build finished."
+ @echo "To view the help file:"
+ @echo "# mkdir -p $$HOME/.local/share/devhelp/SQLAlchemy"
+ @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/SQLAlchemy"
+ @echo "# devhelp"
+
+epub:
+ $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
+ @echo
+ @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
latex:
- mkdir -p output/latex output/doctrees
- $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) output/latex
- cp texinputs/* output/latex/
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ cp texinputs/* $(BUILDDIR)/latex/
@echo
- @echo "Build finished; the LaTeX files are in output/latex."
- @echo "Run \`make all-pdf' or \`make all-ps' in that directory to" \
- "run these through (pdf)latex."
+ @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
+ @echo "Run \`make' in that directory to run these through (pdf)latex" \
+ "(use \`make latexpdf' here to do that automatically)."
+
+latexpdf:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo "Running LaTeX files through pdflatex..."
+ make -C $(BUILDDIR)/latex all-pdf
+ @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+text:
+ $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
+ @echo
+ @echo "Build finished. The text files are in $(BUILDDIR)/text."
+
+man:
+ $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
+ @echo
+ @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
+
+changes:
+ $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
+ @echo
+ @echo "The overview file is in $(BUILDDIR)/changes."
+
+linkcheck:
+ $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
+ @echo
+ @echo "Link check complete; look for any errors in the above output " \
+ "or in $(BUILDDIR)/linkcheck/output.txt."
+
doctest:
- $(SPHINXBUILD) -b doctest -d output/doctrees . .
+ $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) .
+ @echo "Testing of doctests in the sources finished, look at the " \
+ "results in $(BUILDDIR)/doctest/output.txt."
diff --git a/doc/build/builder/util.py b/doc/build/builder/util.py
index 0ae4de5cd..dc2e27245 100644
--- a/doc/build/builder/util.py
+++ b/doc/build/builder/util.py
@@ -3,6 +3,10 @@ import re
def striptags(text):
return re.compile(r'<[^>]*>').sub('', text)
+def go(m):
+ # .html with no anchor if present, otherwise "#" for top of page
+ return m.group(1) or '#'
+
def strip_toplevel_anchors(text):
- return re.compile(r'\.html#[-\w]+-toplevel').sub('.html', text)
+ return re.compile(r'(\.html)?#[-\w]+-toplevel').sub(go, text)
diff --git a/doc/build/conf.py b/doc/build/conf.py
index e44651685..81120fed0 100644
--- a/doc/build/conf.py
+++ b/doc/build/conf.py
@@ -1,36 +1,38 @@
# -*- coding: utf-8 -*-
#
-# Foo Bar documentation build configuration file, created by
+# SQLAlchemy documentation build configuration file, created by
# sphinx-quickstart on Wed Nov 26 19:50:10 2008.
#
# This file is execfile()d with the current directory set to its containing dir.
#
-# The contents of this file are pickled, so don't put values in the namespace
-# that aren't pickleable (module imports are okay, they're removed automatically).
+# Note that not all possible configuration values are present in this
+# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
-# If your extensions are in another directory, add it here. If the directory
-# is relative to the documentation root, use os.path.abspath to make it
-# absolute, like shown here.
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../../lib'))
sys.path.insert(0, os.path.abspath('../../examples'))
sys.path.insert(0, os.path.abspath('.'))
import sqlalchemy
-# General configuration
-# ---------------------
+# -- General configuration -----------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
#extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode',
# 'sphinx.ext.doctest', 'builder.builders']
-extensions = ['sphinx.ext.autodoc',
+extensions = ['sphinx.ext.autodoc',
'sphinx.ext.doctest', 'builder.builders']
# Add any paths that contain templates here, relative to this directory.
@@ -44,7 +46,7 @@ source_suffix = '.rst'
template_bridge = "builder.builders.MakoBridge"
# The encoding of source files.
-#source_encoding = 'utf-8'
+#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
@@ -72,12 +74,9 @@ release = sqlalchemy.__version__
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
-# List of documents that shouldn't be included in the build.
-unused_docs = []
-
-# List of directories, relative to source directory, that shouldn't be searched
-# for source files.
-exclude_trees = ['build']
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+exclude_patterns = ['build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
@@ -96,9 +95,23 @@ exclude_trees = ['build']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
+# A list of ignored prefixes for module index sorting.
+#modindex_common_prefix = []
+
+
+# -- Options for HTML output ---------------------------------------------------
-# Options for HTML output
-# -----------------------
+# The theme to use for HTML and HTML Help pages. See the documentation for
+# a list of builtin themes.
+html_theme = 'default'
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further. For a list of options available for each theme, see the
+# documentation.
+#html_theme_options = {}
+
+# Add any paths that contain custom themes here, relative to this directory.
+#html_theme_path = []
# The style sheet to use for HTML and HTML Help pages. A file of that name
# must exist either in Sphinx' static/ path, or in one of the custom paths
@@ -142,7 +155,7 @@ html_last_updated_fmt = '%m/%d/%Y %H:%M:%S'
#html_additional_pages = {}
# If false, no module index is generated.
-html_use_modindex = False
+html_domain_indices = False
# If false, no index is generated.
#html_use_index = True
@@ -153,21 +166,29 @@ html_use_modindex = False
# If true, the reST sources are included in the HTML build as _sources/<name>.
#html_copy_source = True
+# If true, links to the reST sources are added to the pages.
+#html_show_sourcelink = True
+
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+#html_show_sphinx = True
+
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+#html_show_copyright = True
+
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
-# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
-#html_file_suffix = ''
+# This is the file name suffix for HTML files (e.g. ".xhtml").
+#html_file_suffix = None
# Output file base name for HTML help builder.
-htmlhelp_basename = 'FooBardoc'
+htmlhelp_basename = 'SQLAlchemydoc'
#autoclass_content = 'both'
-# Options for LaTeX output
-# ------------------------
+# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
@@ -176,7 +197,7 @@ htmlhelp_basename = 'FooBardoc'
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
-# (source start file, target name, title, author, document class [howto/manual]).
+# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'sqlalchemy_%s.tex' % release.replace('.', '_'), ur'SQLAlchemy Documentation',
ur'Mike Bayer', 'manual'),
@@ -190,6 +211,12 @@ latex_documents = [
# not chapters.
#latex_use_parts = False
+# If true, show page references after internal links.
+#latex_show_pagerefs = False
+
+# If true, show URL addresses after external links.
+#latex_show_urls = False
+
# Additional stuff for the LaTeX preamble.
# sets TOC depth to 2.
latex_preamble = '\setcounter{tocdepth}{3}'
@@ -198,4 +225,58 @@ latex_preamble = '\setcounter{tocdepth}{3}'
#latex_appendices = []
# If false, no module index is generated.
-#latex_use_modindex = True
+#latex_domain_indices = True
+
+#latex_elements = {
+# 'papersize': 'letterpaper',
+# 'pointsize': '10pt',
+#}
+
+# -- Options for manual page output --------------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+ ('index', 'sqlalchemy', u'SQLAlchemy Documentation',
+ [u'SQLAlchemy authors'], 1)
+]
+
+
+# -- Options for Epub output ---------------------------------------------------
+
+# Bibliographic Dublin Core info.
+epub_title = u'SQLAlchemy'
+epub_author = u'SQLAlchemy authors'
+epub_publisher = u'SQLAlchemy authors'
+epub_copyright = u'2010, SQLAlchemy authors'
+
+# The language of the text. It defaults to the language option
+# or en if the language is not set.
+#epub_language = ''
+
+# The scheme of the identifier. Typical schemes are ISBN or URL.
+#epub_scheme = ''
+
+# The unique identifier of the text. This can be a ISBN number
+# or the project homepage.
+#epub_identifier = ''
+
+# A unique identification for the text.
+#epub_uid = ''
+
+# HTML files that should be inserted before the pages created by sphinx.
+# The format is a list of tuples containing the path and title.
+#epub_pre_files = []
+
+# HTML files shat should be inserted after the pages created by sphinx.
+# The format is a list of tuples containing the path and title.
+#epub_post_files = []
+
+# A list of files that should not be packed into the epub file.
+#epub_exclude_files = []
+
+# The depth of the table of contents in toc.ncx.
+#epub_tocdepth = 3
+
+# Allow duplicate toc entries.
+#epub_tocdup = True
diff --git a/doc/build/core/engines.rst b/doc/build/core/engines.rst
index de81a6d73..576bace36 100644
--- a/doc/build/core/engines.rst
+++ b/doc/build/core/engines.rst
@@ -63,7 +63,7 @@ ibm-db_ thirdparty thirdparty thirdparty
**Firebird**
kinterbasdb_ ``firebird+kinterbasdb``\* yes development no yes yes
**Informix**
-informixdb_ ``informix+informixdb``\* development development no unknown unknown
+informixdb_ ``informix+informixdb``\* yes development no unknown unknown
**MaxDB**
sapdb_ ``maxdb+sapdb``\* development development no yes unknown
**Microsoft Access**
diff --git a/doc/build/core/expression_api.rst b/doc/build/core/expression_api.rst
index c01f84c2c..c39701a59 100644
--- a/doc/build/core/expression_api.rst
+++ b/doc/build/core/expression_api.rst
@@ -1,3 +1,5 @@
+.. _expression_api_toplevel:
+
SQL Statements and Expressions
==============================
diff --git a/doc/build/core/schema.rst b/doc/build/core/schema.rst
index 13b70af8b..11ebecc67 100644
--- a/doc/build/core/schema.rst
+++ b/doc/build/core/schema.rst
@@ -1025,7 +1025,7 @@ constraints generally should only refer to the column to which they are
placed, while table level constraints can refer to any columns in the table.
Note that some databases do not actively support check constraints such as
-MySQL and SQLite.
+MySQL.
.. sourcecode:: python+sql
@@ -1061,7 +1061,7 @@ Other Constraint Classes
.. autoclass:: ColumnCollectionConstraint
:show-inheritance:
-
+
.. autoclass:: PrimaryKeyConstraint
:show-inheritance:
@@ -1299,7 +1299,7 @@ other DDL elements except it accepts a string which is the text to be emitted:
A more comprehensive method of creating libraries of DDL constructs is to use
custom compilation - see :ref:`sqlalchemy.ext.compiler_toplevel` for
- details.
+details.
.. _schema_api_ddl:
diff --git a/doc/build/core/tutorial.rst b/doc/build/core/tutorial.rst
index 23190a143..bf3920198 100644
--- a/doc/build/core/tutorial.rst
+++ b/doc/build/core/tutorial.rst
@@ -1187,12 +1187,14 @@ Finally, a delete. Easy enough:
Further Reference
==================
-API docs: :mod:`sqlalchemy.sql.expression`
+Expression Language Reference: :ref:`expression_api_toplevel`
-Table Metadata Reference: :ref:`metadata_toplevel`
+Database Metadata Reference: :ref:`metadata_toplevel`
-Engine/Connection/Execution Reference: :ref:`engines_toplevel`
+Engine Reference: :ref:`engines_toplevel`
-SQL Types: :ref:`types`
+Connection Reference: :ref:`connections_toplevel`
+
+Types Reference: :ref:`types_toplevel`
diff --git a/doc/build/dialects/index.rst b/doc/build/dialects/index.rst
index a8cfc3324..8ad9330bc 100644
--- a/doc/build/dialects/index.rst
+++ b/doc/build/dialects/index.rst
@@ -3,22 +3,23 @@
Dialects
========
-The *dialect* is the system SQLAlchemy uses to communicate with various types of DBAPIs and databases.
-A compatibility chart of supported backends can be found at :ref:`supported_dbapis`.
+The **dialect** is the system SQLAlchemy uses to communicate with various types of DBAPIs and databases.
+A compatibility chart of supported backends can be found at :ref:`supported_dbapis`. The sections that
+follow contain reference documentation and notes specific to the usage of each backend, as well as notes
+for the various DBAPIs.
-This section contains all notes and documentation specific to the usage of various backends.
-
-Supported Databases
--------------------
-
-These backends are fully operational with
-current versions of SQLAlchemy.
+Note that not all backends are fully ported and tested with
+current versions of SQLAlchemy. The compatibility chart
+should be consulted to check for current support level.
.. toctree::
:maxdepth: 1
:glob:
firebird
+ informix
+ maxdb
+ access
mssql
mysql
oracle
@@ -26,17 +27,4 @@ current versions of SQLAlchemy.
sqlite
sybase
-Unsupported Databases
----------------------
-
-These backends are untested and may not be completely
-ported to current versions of SQLAlchemy.
-
-.. toctree::
- :maxdepth: 1
- :glob:
-
- access
- informix
- maxdb
diff --git a/doc/build/dialects/informix.rst b/doc/build/dialects/informix.rst
index 7cf271d0b..617b8cd9d 100644
--- a/doc/build/dialects/informix.rst
+++ b/doc/build/dialects/informix.rst
@@ -2,3 +2,8 @@ Informix
========
.. automodule:: sqlalchemy.dialects.informix.base
+
+informixdb Notes
+--------------------
+
+.. automodule:: sqlalchemy.dialects.informix.informixdb \ No newline at end of file
diff --git a/doc/build/index.rst b/doc/build/index.rst
index 2c66bd47f..2d75a5c08 100644
--- a/doc/build/index.rst
+++ b/doc/build/index.rst
@@ -8,7 +8,7 @@ Table of Contents
orm/index
core/index
dialects/index
-
+
Indices and tables
------------------
diff --git a/doc/build/orm/extensions/declarative.rst b/doc/build/orm/extensions/declarative.rst
index 010371314..4bdb68fa7 100644
--- a/doc/build/orm/extensions/declarative.rst
+++ b/doc/build/orm/extensions/declarative.rst
@@ -8,6 +8,8 @@ API Reference
.. autofunction:: declarative_base
+.. autoclass:: declared_attr
+
.. autofunction:: _declarative_constructor
.. autofunction:: has_inherited_table
diff --git a/doc/build/orm/mapper_config.rst b/doc/build/orm/mapper_config.rst
index 574646ca7..15377436f 100644
--- a/doc/build/orm/mapper_config.rst
+++ b/doc/build/orm/mapper_config.rst
@@ -1,7 +1,7 @@
-.. _mapper_config_toplevel:
-
.. module:: sqlalchemy.orm
+.. _mapper_config_toplevel:
+
Mapper Configuration
====================
diff --git a/doc/build/orm/relationships.rst b/doc/build/orm/relationships.rst
index 342847328..f9337a5f6 100644
--- a/doc/build/orm/relationships.rst
+++ b/doc/build/orm/relationships.rst
@@ -1,5 +1,7 @@
.. module:: sqlalchemy.orm
+.. _relationship_config_toplevel:
+
Relationship Configuration
==========================
diff --git a/doc/build/orm/session.rst b/doc/build/orm/session.rst
index 7448392fe..16ca7aff0 100644
--- a/doc/build/orm/session.rst
+++ b/doc/build/orm/session.rst
@@ -346,6 +346,8 @@ The :func:`~sqlalchemy.orm.session.Session.add` operation **cascades** along
the ``save-update`` cascade. For more details see the section
:ref:`unitofwork_cascades`.
+.. _unitofwork_merging:
+
Merging
-------
@@ -358,17 +360,17 @@ follows::
When given an instance, it follows these steps:
- * It examines the primary key of the instance. If it's present, it attempts
- to load an instance with that primary key (or pulls from the local
- identity map).
- * If there's no primary key on the given instance, or the given primary key
- does not exist in the database, a new instance is created.
- * The state of the given instance is then copied onto the located/newly
- created instance.
- * The operation is cascaded to associated child items along the ``merge``
- cascade. Note that all changes present on the given instance, including
- changes to collections, are merged.
- * The new instance is returned.
+* It examines the primary key of the instance. If it's present, it attempts
+ to load an instance with that primary key (or pulls from the local
+ identity map).
+* If there's no primary key on the given instance, or the given primary key
+ does not exist in the database, a new instance is created.
+* The state of the given instance is then copied onto the located/newly
+ created instance.
+* The operation is cascaded to associated child items along the ``merge``
+ cascade. Note that all changes present on the given instance, including
+ changes to collections, are merged.
+* The new instance is returned.
With :func:`~sqlalchemy.orm.session.Session.merge`, the given instance is not
placed within the session, and can be associated with a different session or
@@ -377,19 +379,22 @@ taking the state of any kind of object structure without regard for its
origins or current session associations and placing that state within a
session. Here's two examples:
- * An application which reads an object structure from a file and wishes to
- save it to the database might parse the file, build up the structure, and
- then use :func:`~sqlalchemy.orm.session.Session.merge` to save it to the
- database, ensuring that the data within the file is used to formulate the
- primary key of each element of the structure. Later, when the file has
- changed, the same process can be re-run, producing a slightly different
- object structure, which can then be ``merged`` in again, and the
- :class:`~sqlalchemy.orm.session.Session` will automatically update the
- database to reflect those changes.
- * A web application stores mapped entities within an HTTP session object.
- When each request starts up, the serialized data can be merged into the
- session, so that the original entity may be safely shared among requests
- and threads.
+* An application which reads an object structure from a file and wishes to
+ save it to the database might parse the file, build up the
+ structure, and then use
+ :func:`~sqlalchemy.orm.session.Session.merge` to save it
+ to the database, ensuring that the data within the file is
+ used to formulate the primary key of each element of the
+ structure. Later, when the file has changed, the same
+ process can be re-run, producing a slightly different
+ object structure, which can then be ``merged`` in again,
+ and the :class:`~sqlalchemy.orm.session.Session` will
+ automatically update the database to reflect those
+ changes.
+* A web application stores mapped entities within an HTTP session object.
+ When each request starts up, the serialized data can be
+ merged into the session, so that the original entity may
+ be safely shared among requests and threads.
:func:`~sqlalchemy.orm.session.Session.merge` is frequently used by
applications which implement their own second level caches. This refers to an
@@ -406,6 +411,133 @@ all of its children may not contain any pending changes, and it's also of
course possible that newer information in the database will not be present on
the merged object, since no load is issued.
+Merge Tips
+~~~~~~~~~~
+
+:meth:`~.Session.merge` is an extremely useful method for many purposes. However,
+it deals with the intricate border between objects that are transient/detached and
+those that are persistent, as well as the automated transferrence of state.
+The wide variety of scenarios that can present themselves here often require a
+more careful approach to the state of objects. Common problems with merge usually involve
+some unexpected state regarding the object being passed to :meth:`~.Session.merge`.
+
+Lets use the canonical example of the User and Address objects::
+
+ class User(Base):
+ __tablename__ = 'user'
+
+ id = Column(Integer, primary_key=True)
+ name = Column(String(50), nullable=False)
+ addresses = relationship("Address", backref="user")
+
+ class Address(Base):
+ __tablename__ = 'address'
+
+ id = Column(Integer, primary_key=True)
+ email_address = Column(String(50), nullable=False)
+ user_id = Column(Integer, ForeignKey('user.id'), nullable=False)
+
+Assume a ``User`` object with one ``Address``, already persistent::
+
+ >>> u1 = User(name='ed', addresses=[Address(email_address='ed@ed.com')])
+ >>> session.add(u1)
+ >>> session.commit()
+
+We now create ``a1``, an object outside the session, which we'd like
+to merge on top of the existing ``Address``::
+
+ >>> existing_a1 = u1.addresses[0]
+ >>> a1 = Address(id=existing_a1.id)
+
+A surprise would occur if we said this::
+
+ >>> a1.user = u1
+ >>> a1 = session.merge(a1)
+ >>> session.commit()
+ sqlalchemy.orm.exc.FlushError: New instance <Address at 0x1298f50>
+ with identity key (<class '__main__.Address'>, (1,)) conflicts with
+ persistent instance <Address at 0x12a25d0>
+
+Why is that ? We weren't careful with our cascades. The assignment
+of ``a1.user`` to a persistent object cascaded to the backref of ``User.addresses``
+and made our ``a1`` object pending, as though we had added it. Now we have
+*two* ``Address`` objects in the session::
+
+ >>> a1 = Address()
+ >>> a1.user = u1
+ >>> a1 in session
+ True
+ >>> existing_a1 in session
+ True
+ >>> a1 is existing_a1
+ False
+
+Above, our ``a1`` is already pending in the session. The
+subsequent :meth:`~.Session.merge` operation essentially
+does nothing. Cascade can be configured via the ``cascade``
+option on :func:`.relationship`, although in this case it
+would mean removing the ``save-update`` cascade from the
+``User.addresses`` relationship - and usually, that behavior
+is extremely convenient. The solution here would usually be to not assign
+``a1.user`` to an object already persistent in the target
+session.
+
+Note that a new :func:`.relationship` option introduced in 0.6.5,
+``cascade_backrefs=False``, will also prevent the ``Address`` from
+being added to the session via the ``a1.user = u1`` assignment.
+
+Further detail on cascade operation is at :ref:`unitofwork_cascades`.
+
+Another example of unexpected state::
+
+ >>> a1 = Address(id=existing_a1.id, user_id=u1.id)
+ >>> assert a1.user is None
+ >>> True
+ >>> a1 = session.merge(a1)
+ >>> session.commit()
+ sqlalchemy.exc.IntegrityError: (IntegrityError) address.user_id
+ may not be NULL
+
+Here, we accessed a1.user, which returned its default value
+of ``None``, which as a result of this access, has been placed in the ``__dict__`` of
+our object ``a1``. Normally, this operation creates no change event,
+so the ``user_id`` attribute takes precedence during a
+flush. But when we merge the ``Address`` object into the session, the operation
+is equivalent to::
+
+ >>> existing_a1.id = existing_a1.id
+ >>> existing_a1.user_id = u1.id
+ >>> existing_a1.user = None
+
+Where above, both ``user_id`` and ``user`` are assigned to, and change events
+are emitted for both. The ``user`` association
+takes precedence, and None is applied to ``user_id``, causing a failure.
+
+Most :meth:`~.Session.merge` issues can be examined by first checking -
+is the object prematurely in the session ?
+
+.. sourcecode:: python+sql
+
+ >>> a1 = Address(id=existing_a1, user_id=user.id)
+ >>> assert a1 not in session
+ >>> a1 = session.merge(a1)
+
+Or is there state on the object that we don't want ? Examining ``__dict__``
+is a quick way to check::
+
+ >>> a1 = Address(id=existing_a1, user_id=user.id)
+ >>> a1.user
+ >>> a1.__dict__
+ {'_sa_instance_state': <sqlalchemy.orm.state.InstanceState object at 0x1298d10>,
+ 'user_id': 1,
+ 'id': 1,
+ 'user': None}
+ >>> # we don't want user=None merged, remove it
+ >>> del a1.user
+ >>> a1 = session.merge(a1)
+ >>> # success
+ >>> session.commit()
+
Deleting
--------
@@ -729,7 +861,7 @@ relationship between an ``Order`` and an ``Item`` object.
The ``customer`` relationship specifies only the "save-update" cascade value,
indicating most operations will not be cascaded from a parent ``Order``
instance to a child ``User`` instance except for the
-:func:`~sqlalchemy.orm.session.Session.add` operation. "save-update" cascade
+:func:`~sqlalchemy.orm.session.Session.add` operation. ``save-update`` cascade
indicates that an :func:`~sqlalchemy.orm.session.Session.add` on the parent
will cascade to all child items, and also that items added to a parent which
is already present in a session will also be added to that same session.
@@ -752,6 +884,45 @@ objects to allow attachment to only one parent at a time.
The default value for ``cascade`` on :func:`~sqlalchemy.orm.relationship` is
``save-update, merge``.
+``save-update`` cascade also takes place on backrefs by default. This means
+that, given a mapping such as this::
+
+ mapper(Order, order_table, properties={
+ 'items' : relationship(Item, items_table, backref='order')
+ })
+
+If an ``Order`` is already in the session, and is assigned to the ``order``
+attribute of an ``Item``, the backref appends the ``Item`` to the ``orders``
+collection of that ``Order``, resulting in the ``save-update`` cascade taking
+place::
+
+ >>> o1 = Order()
+ >>> session.add(o1)
+ >>> o1 in session
+ True
+
+ >>> i1 = Item()
+ >>> i1.order = o1
+ >>> i1 in o1.orders
+ True
+ >>> i1 in session
+ True
+
+This behavior can be disabled as of 0.6.5 using the ``cascade_backrefs`` flag::
+
+ mapper(Order, order_table, properties={
+ 'items' : relationship(Item, items_table, backref='order',
+ cascade_backrefs=False)
+ })
+
+So above, the assignment of ``i1.order = o1`` will append ``i1`` to the ``orders``
+collection of ``o1``, but will not add ``i1`` to the session. You can of
+course :func:`~.Session.add` ``i1`` to the session at a later point. This option
+may be helpful for situations where an object needs to be kept out of a
+session until it's construction is completed, but still needs to be given
+associations to objects which are already persistent in the target session.
+
+
.. _unitofwork_transaction:
Managing Transactions
diff --git a/doc/build/orm/tutorial.rst b/doc/build/orm/tutorial.rst
index 6f38a35c9..7de7ac344 100644
--- a/doc/build/orm/tutorial.rst
+++ b/doc/build/orm/tutorial.rst
@@ -254,7 +254,7 @@ For example, below we create a new :class:`~sqlalchemy.orm.query.Query` object w
.. sourcecode:: python+sql
{sql}>>> our_user = session.query(User).filter_by(name='ed').first() # doctest:+ELLIPSIS,+NORMALIZE_WHITESPACE
- BEGIN
+ BEGIN (implicit)
INSERT INTO users (name, fullname, password) VALUES (?, ?, ?)
('ed', 'Ed Jones', 'edspassword')
SELECT users.id AS users_id, users.name AS users_name, users.fullname AS users_fullname, users.password AS users_password
@@ -325,7 +325,7 @@ If we look at Ed's ``id`` attribute, which earlier was ``None``, it now has a va
.. sourcecode:: python+sql
{sql}>>> ed_user.id # doctest: +NORMALIZE_WHITESPACE
- BEGIN
+ BEGIN (implicit)
SELECT users.id AS users_id, users.name AS users_name, users.fullname AS users_fullname, users.password AS users_password
FROM users
WHERE users.id = ?
@@ -373,7 +373,7 @@ Rolling back, we can see that ``ed_user``'s name is back to ``ed``, and ``fake_u
{stop}
{sql}>>> ed_user.name #doctest: +NORMALIZE_WHITESPACE
- BEGIN
+ BEGIN (implicit)
SELECT users.id AS users_id, users.name AS users_name, users.fullname AS users_fullname, users.password AS users_password
FROM users
WHERE users.id = ?
@@ -824,7 +824,7 @@ Querying for Jack, we get just Jack back. No SQL is yet issued for Jack's addre
.. sourcecode:: python+sql
{sql}>>> jack = session.query(User).filter_by(name='jack').one() #doctest: +NORMALIZE_WHITESPACE
- BEGIN
+ BEGIN (implicit)
SELECT users.id AS users_id, users.name AS users_name, users.fullname AS users_fullname, users.password AS users_password
FROM users
WHERE users.name = ?
@@ -1196,6 +1196,7 @@ Below, we use ``mapper()`` to reconfigure an ORM mapping for ``User`` and ``Addr
.. sourcecode:: python+sql
+ >>> users_table = User.__table__
>>> mapper(User, users_table, properties={ # doctest: +ELLIPSIS
... 'addresses':relationship(Address, backref='user', cascade="all, delete, delete-orphan")
... })
@@ -1211,7 +1212,7 @@ Now when we load Jack (below using ``get()``, which loads by primary key), remov
# load Jack by primary key
{sql}>>> jack = session.query(User).get(5) #doctest: +NORMALIZE_WHITESPACE
- BEGIN
+ BEGIN (implicit)
SELECT users.id AS users_id, users.name AS users_name, users.fullname AS users_fullname, users.password AS users_password
FROM users
WHERE users.id = ?
@@ -1436,6 +1437,8 @@ Further Reference
Query Reference: :ref:`query_api_toplevel`
-Further information on mapping setups are in :ref:`datamapping_toplevel`.
+Mapper Reference: :ref:`mapper_config_toplevel`
+
+Relationship Reference: :ref:`relationship_config_toplevel`
-Further information on working with Sessions: :ref:`session_toplevel`.
+Session Reference: :ref:`session_toplevel`.
diff --git a/doc/build/templates/site_base.mako b/doc/build/templates/site_base.mako
index 36bd0d973..301c6a6dc 100644
--- a/doc/build/templates/site_base.mako
+++ b/doc/build/templates/site_base.mako
@@ -7,7 +7,6 @@
</%text>
<div style="text-align:right">
-<b>Quick Select:</b> <a href="/docs/06/">0.6</a> | <a href="/docs/05/">0.5</a> | <a href="/docs/04/">0.4</a><br/>
<b>PDF Download:</b> <a href="${pathto('sqlalchemy_' + release.replace('.', '_') + '.pdf', 1)}">download</a>
</div>
diff --git a/examples/elementtree/__init__.py b/examples/elementtree/__init__.py
index 70554f5c9..33805c0cb 100644
--- a/examples/elementtree/__init__.py
+++ b/examples/elementtree/__init__.py
@@ -1,6 +1,11 @@
"""
-Illustrates three strategies for persisting and querying XML documents as represented by
-ElementTree in a relational database. The techniques do not apply any mappings to the ElementTree objects directly, so are compatible with the native cElementTree as well as lxml, and can be adapted to suit any kind of DOM representation system. Querying along xpath-like strings is illustrated as well.
+Illustrates three strategies for persisting and querying XML
+documents as represented by ElementTree in a relational
+database. The techniques do not apply any mappings to the
+ElementTree objects directly, so are compatible with the
+native cElementTree as well as lxml, and can be adapted to
+suit any kind of DOM representation system. Querying along
+xpath-like strings is illustrated as well.
In order of complexity:
@@ -10,10 +15,10 @@ In order of complexity:
represented in a separate table. The nodes are associated in a hierarchy using an adjacency list
structure. A query function is introduced which can search for nodes along any path with a given
structure of attributes, basically a (very narrow) subset of xpath.
-* ``optimized_al.py`` - Uses the same strategy as ``adjacency_list.py``, but adds a
- :class:`~sqlalchemy.orm.interfaces.MapperExtension` which optimizes how the hierarchical structure
- is loaded, such that the full set of DOM nodes are loaded within a single table result set, and
- are organized hierarchically as they are received during a load.
+* ``optimized_al.py`` - Uses the same strategy as ``adjacency_list.py``, but associates each
+ DOM row with its owning document row, so that a full document of DOM nodes can be
+ loaded using O(1) queries - the construction of the "hierarchy" is performed after
+ the load in a non-recursive fashion and is much more efficient.
E.g.::
diff --git a/examples/elementtree/adjacency_list.py b/examples/elementtree/adjacency_list.py
index 78d71f3fe..3b9e4c523 100644
--- a/examples/elementtree/adjacency_list.py
+++ b/examples/elementtree/adjacency_list.py
@@ -8,15 +8,15 @@ styles of persistence are identical, as is the structure of the main Document cl
################################# PART I - Imports/Coniguration ####################################
from sqlalchemy import (MetaData, Table, Column, Integer, String, ForeignKey,
- Unicode, and_)
-from sqlalchemy.orm import mapper, relationship, create_session, lazyload
+ Unicode, and_, create_engine)
+from sqlalchemy.orm import mapper, relationship, Session, lazyload
import sys, os, StringIO, re
from xml.etree import ElementTree
+e = create_engine('sqlite://')
meta = MetaData()
-meta.bind = 'sqlite://'
################################# PART II - Table Metadata #########################################
@@ -44,7 +44,7 @@ attributes = Table('attributes', meta,
Column('name', Unicode(100), nullable=False, primary_key=True),
Column('value', Unicode(255)))
-meta.create_all()
+meta.create_all(e)
#################################### PART III - Model #############################################
@@ -142,7 +142,7 @@ Document.element = ElementTreeMarshal()
line = "\n--------------------------------------------------------"
# save to DB
-session = create_session()
+session = Session(e)
# get ElementTree documents
for file in ('test.xml', 'test2.xml', 'test3.xml'):
@@ -151,12 +151,9 @@ for file in ('test.xml', 'test2.xml', 'test3.xml'):
session.add(Document(file, doc))
print "\nSaving three documents...", line
-session.flush()
+session.commit()
print "Done."
-# clear session (to illustrate a full load), restore
-session.expunge_all()
-
print "\nFull text of document 'text.xml':", line
document = session.query(Document).filter_by(filename="test.xml").first()
diff --git a/examples/elementtree/optimized_al.py b/examples/elementtree/optimized_al.py
index 98c4e1129..d6110a132 100644
--- a/examples/elementtree/optimized_al.py
+++ b/examples/elementtree/optimized_al.py
@@ -5,19 +5,19 @@ which joins on only three tables.
"""
-################################# PART I - Imports/Configuration ###########################################
+##################### PART I - Imports/Configuration #########################
from sqlalchemy import (MetaData, Table, Column, Integer, String, ForeignKey,
- Unicode, and_)
-from sqlalchemy.orm import mapper, relationship, create_session, lazyload
+ Unicode, and_, create_engine)
+from sqlalchemy.orm import mapper, relationship, Session, lazyload
import sys, os, StringIO, re
from xml.etree import ElementTree
+e = create_engine('sqlite://', echo=True)
meta = MetaData()
-meta.bind = 'sqlite://'
-################################# PART II - Table Metadata ###########################################
+####################### PART II - Table Metadata #############################
# stores a top level record of an XML document.
documents = Table('documents', meta,
@@ -43,9 +43,9 @@ attributes = Table('attributes', meta,
Column('name', Unicode(100), nullable=False, primary_key=True),
Column('value', Unicode(255)))
-meta.create_all()
+meta.create_all(e)
-#################################### PART III - Model #############################################
+########################### PART III - Model #################################
# our document class. contains a string name,
# and the ElementTree root element.
@@ -59,7 +59,7 @@ class Document(object):
self.element.write(buf)
return buf.getvalue()
-#################################### PART IV - Persistence Mapping ###################################
+########################## PART IV - Persistence Mapping #####################
# Node class. a non-public class which will represent
# the DB-persisted Element/SubElement object. We cannot create mappers for
@@ -145,12 +145,12 @@ class ElementTreeMarshal(object):
# override Document's "element" attribute with the marshaller.
Document.element = ElementTreeMarshal()
-########################################### PART V - Basic Persistence Example ############################
+###################### PART V - Basic Persistence Example ####################
line = "\n--------------------------------------------------------"
# save to DB
-session = create_session()
+session = Session(e)
# get ElementTree documents
for file in ('test.xml', 'test2.xml', 'test3.xml'):
@@ -159,25 +159,25 @@ for file in ('test.xml', 'test2.xml', 'test3.xml'):
session.add(Document(file, doc))
print "\nSaving three documents...", line
-session.flush()
+session.commit()
print "Done."
-# clear session (to illustrate a full load), restore
-session.expunge_all()
-
print "\nFull text of document 'text.xml':", line
document = session.query(Document).filter_by(filename="test.xml").first()
print document
-############################################ PART VI - Searching for Paths #######################################
+######################## PART VI - Searching for Paths #######################
# manually search for a document which contains "/somefile/header/field1:hi"
print "\nManual search for /somefile/header/field1=='hi':", line
-d = session.query(Document).join('_nodes', aliased=True).filter(and_(_Node.parent_id==None, _Node.tag==u'somefile')).\
- join('children', aliased=True, from_joinpoint=True).filter(_Node.tag==u'header').\
- join('children', aliased=True, from_joinpoint=True).filter(and_(_Node.tag==u'field1', _Node.text==u'hi')).\
- one()
+d = session.query(Document).join('_nodes', aliased=True).\
+ filter(and_(_Node.parent_id==None, _Node.tag==u'somefile')).\
+ join('children', aliased=True, from_joinpoint=True).\
+ filter(_Node.tag==u'header').\
+ join('children', aliased=True, from_joinpoint=True).\
+ filter(and_(_Node.tag==u'field1', _Node.text==u'hi')).\
+ one()
print d
# generalize the above approach into an extremely impoverished xpath function:
diff --git a/examples/elementtree/pickle.py b/examples/elementtree/pickle.py
index 4eaaa2f8d..5e53e6798 100644
--- a/examples/elementtree/pickle.py
+++ b/examples/elementtree/pickle.py
@@ -8,14 +8,14 @@ styles of persistence are identical, as is the structure of the main Document cl
from sqlalchemy import (create_engine, MetaData, Table, Column, Integer, String,
PickleType)
-from sqlalchemy.orm import mapper, create_session
+from sqlalchemy.orm import mapper, Session
import sys, os
from xml.etree import ElementTree
-engine = create_engine('sqlite://')
-meta = MetaData(engine)
+e = create_engine('sqlite://')
+meta = MetaData()
# setup a comparator for the PickleType since it's a mutable
# element.
@@ -30,7 +30,7 @@ documents = Table('documents', meta,
Column('element', PickleType(comparator=are_elements_equal))
)
-meta.create_all()
+meta.create_all(e)
# our document class. contains a string name,
# and the ElementTree root element.
@@ -49,12 +49,11 @@ filename = os.path.join(os.path.dirname(__file__), "test.xml")
doc = ElementTree.parse(filename)
# save to DB
-session = create_session()
+session = Session(e)
session.add(Document("test.xml", doc))
-session.flush()
+session.commit()
-# clear session (to illustrate a full load), restore
-session.expunge_all()
+# restore
document = session.query(Document).filter_by(filename="test.xml").first()
# print
diff --git a/lib/sqlalchemy/dialects/informix/base.py b/lib/sqlalchemy/dialects/informix/base.py
index 242b8a328..9aa23173b 100644
--- a/lib/sqlalchemy/dialects/informix/base.py
+++ b/lib/sqlalchemy/dialects/informix/base.py
@@ -7,7 +7,7 @@
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Support for the Informix database.
-This dialect is *not* tested on SQLAlchemy 0.6.
+This dialect is mostly functional as of SQLAlchemy 0.6.5.
"""
@@ -16,7 +16,7 @@ This dialect is *not* tested on SQLAlchemy 0.6.
import datetime
from sqlalchemy import sql, schema, exc, pool, util
-from sqlalchemy.sql import compiler
+from sqlalchemy.sql import compiler, text
from sqlalchemy.engine import default, reflection
from sqlalchemy import types as sqltypes
@@ -47,9 +47,9 @@ class InfoTime(sqltypes.Time):
return value
return process
-
colspecs = {
sqltypes.DateTime : InfoDateTime,
+ sqltypes.TIMESTAMP: InfoDateTime,
sqltypes.Time: InfoTime,
}
@@ -85,6 +85,9 @@ class InfoTypeCompiler(compiler.GenericTypeCompiler):
def visit_TIME(self, type_):
return "DATETIME HOUR TO SECOND"
+ def visit_TIMESTAMP(self, type_):
+ return "DATETIME YEAR TO SECOND"
+
def visit_large_binary(self, type_):
return "BYTE"
@@ -92,17 +95,16 @@ class InfoTypeCompiler(compiler.GenericTypeCompiler):
return "SMALLINT"
class InfoSQLCompiler(compiler.SQLCompiler):
-
def default_from(self):
return " from systables where tabname = 'systables' "
def get_select_precolumns(self, select):
- s = select._distinct and "DISTINCT " or ""
- # only has limit
+ s = ""
+ if select._offset:
+ s += "SKIP %s " % select._offset
if select._limit:
- s += " FIRST %s " % select._limit
- else:
- s += ""
+ s += "FIRST %s " % select._limit
+ s += select._distinct and "DISTINCT " or ""
return s
def visit_select(self, select, asfrom=False, parens=True, **kw):
@@ -114,8 +116,6 @@ class InfoSQLCompiler(compiler.SQLCompiler):
return text
def limit_clause(self, select):
- if select._offset is not None and select._offset > 0:
- raise NotImplementedError("Informix does not support OFFSET")
return ""
def visit_function(self, func, **kw):
@@ -128,14 +128,32 @@ class InfoSQLCompiler(compiler.SQLCompiler):
else:
return compiler.SQLCompiler.visit_function(self, func, **kw)
+ def visit_mod(self, binary, **kw):
+ return "MOD(%s, %s)" % (self.process(binary.left), self.process(binary.right))
+
class InfoDDLCompiler(compiler.DDLCompiler):
- def get_column_specification(self, column, first_pk=False):
+
+ def visit_add_constraint(self, create):
+ preparer = self.preparer
+ return "ALTER TABLE %s ADD CONSTRAINT %s" % (
+ self.preparer.format_table(create.element.table),
+ self.process(create.element)
+ )
+
+ def get_column_specification(self, column, **kw):
colspec = self.preparer.format_column(column)
- if column.primary_key and \
- len(column.foreign_keys)==0 and \
- column.autoincrement and \
- isinstance(column.type, sqltypes.Integer) and first_pk:
+ first = None
+ if column.primary_key and column.autoincrement:
+ try:
+ first = [c for c in column.table.primary_key.columns
+ if (c.autoincrement and
+ isinstance(c.type, sqltypes.Integer) and
+ not c.foreign_keys)].pop(0)
+ except IndexError:
+ pass
+
+ if column is first:
colspec += " SERIAL"
else:
colspec += " " + self.dialect.type_compiler.process(column.type)
@@ -148,18 +166,53 @@ class InfoDDLCompiler(compiler.DDLCompiler):
return colspec
+ def get_column_default_string(self, column):
+ if (isinstance(column.server_default, schema.DefaultClause) and
+ isinstance(column.server_default.arg, basestring)):
+ if isinstance(column.type, (sqltypes.Integer, sqltypes.Numeric)):
+ return self.sql_compiler.process(text(column.server_default.arg))
+
+ return super(InfoDDLCompiler, self).get_column_default_string(column)
+
+ ### Informix wants the constraint name at the end, hence this ist c&p from sql/compiler.py
+ def visit_primary_key_constraint(self, constraint):
+ if len(constraint) == 0:
+ return ''
+ text = "PRIMARY KEY "
+ text += "(%s)" % ', '.join(self.preparer.quote(c.name, c.quote)
+ for c in constraint)
+ text += self.define_constraint_deferrability(constraint)
+
+ if constraint.name is not None:
+ text += " CONSTRAINT %s" % self.preparer.format_constraint(constraint)
+ return text
+
+ def visit_foreign_key_constraint(self, constraint):
+ preparer = self.dialect.identifier_preparer
+ remote_table = list(constraint._elements.values())[0].column.table
+ text = "FOREIGN KEY (%s) REFERENCES %s (%s)" % (
+ ', '.join(preparer.quote(f.parent.name, f.parent.quote)
+ for f in constraint._elements.values()),
+ preparer.format_table(remote_table),
+ ', '.join(preparer.quote(f.column.name, f.column.quote)
+ for f in constraint._elements.values())
+ )
+ text += self.define_constraint_cascades(constraint)
+ text += self.define_constraint_deferrability(constraint)
+
+ if constraint.name is not None:
+ text += " CONSTRAINT %s " % \
+ preparer.format_constraint(constraint)
+ return text
+
+ def visit_unique_constraint(self, constraint):
+ text = "UNIQUE (%s)" % (', '.join(self.preparer.quote(c.name, c.quote) for c in constraint))
+ text += self.define_constraint_deferrability(constraint)
+
+ if constraint.name is not None:
+ text += "CONSTRAINT %s " % self.preparer.format_constraint(constraint)
+ return text
-class InfoIdentifierPreparer(compiler.IdentifierPreparer):
- def __init__(self, dialect):
- super(InfoIdentifierPreparer, self).\
- __init__(dialect, initial_quote="'")
-
- def format_constraint(self, constraint):
- # informix doesnt support names for constraints
- return ''
-
- def _requires_quotes(self, value):
- return False
class InformixDialect(default.DefaultDialect):
name = 'informix'
@@ -169,9 +222,13 @@ class InformixDialect(default.DefaultDialect):
type_compiler = InfoTypeCompiler
statement_compiler = InfoSQLCompiler
ddl_compiler = InfoDDLCompiler
- preparer = InfoIdentifierPreparer
colspecs = colspecs
ischema_names = ischema_names
+ default_paramstyle = 'qmark'
+
+ def __init__(self, has_transactions=True, *args, **kwargs):
+ self.has_transactions = has_transactions
+ default.DefaultDialect.__init__(self, *args, **kwargs)
def initialize(self, connection):
super(InformixDialect, self).initialize(connection)
@@ -182,43 +239,78 @@ class InformixDialect(default.DefaultDialect):
else:
self.max_identifier_length = 128
- def do_begin(self, connect):
- cu = connect.cursor()
+ def do_begin(self, connection):
+ cu = connection.cursor()
cu.execute('SET LOCK MODE TO WAIT')
- #cu.execute('SET ISOLATION TO REPEATABLE READ')
+ if self.has_transactions:
+ cu.execute('SET ISOLATION TO REPEATABLE READ')
+
+ def do_commit(self, connection):
+ if self.has_transactions:
+ connection.commit()
+
+ def do_rollback(self, connection):
+ if self.has_transactions:
+ connection.rollback()
+
+ def _get_table_names(self, connection, schema, type, **kw):
+ schema = schema or self.default_schema_name
+ s = "select tabname, owner from systables where owner=? and tabtype=?"
+ return [row[0] for row in connection.execute(s, schema, type)]
@reflection.cache
def get_table_names(self, connection, schema=None, **kw):
- s = "select tabname from systables"
+ return self._get_table_names(connection, schema, 'T', **kw)
+
+ @reflection.cache
+ def get_view_names(self, connection, schema=None, **kw):
+ return self._get_table_names(connection, schema, 'V', **kw)
+
+ @reflection.cache
+ def get_schema_names(self, connection, **kw):
+ s = "select owner from systables"
return [row[0] for row in connection.execute(s)]
def has_table(self, connection, table_name, schema=None):
+ schema = schema or self.default_schema_name
cursor = connection.execute(
- """select tabname from systables where tabname=?""",
- table_name.lower())
+ """select tabname from systables where tabname=? and owner=?""",
+ table_name, schema)
return cursor.first() is not None
@reflection.cache
def get_columns(self, connection, table_name, schema=None, **kw):
+ schema = schema or self.default_schema_name
c = connection.execute(
"""select colname, coltype, collength, t3.default, t1.colno from
syscolumns as t1 , systables as t2 , OUTER sysdefaults as t3
- where t1.tabid = t2.tabid and t2.tabname=?
+ where t1.tabid = t2.tabid and t2.tabname=? and t2.owner=?
and t3.tabid = t2.tabid and t3.colno = t1.colno
- order by t1.colno""", table.name.lower())
+ order by t1.colno""", table_name, schema)
+
+ primary_cols = self.get_primary_keys(connection, table_name, schema, **kw)
+
columns = []
+ rows = c.fetchall()
for name, colattr, collength, default, colno in rows:
name = name.lower()
- if include_columns and name not in include_columns:
- continue
+
+ autoincrement = False
+ primary_key = False
+
+ if name in primary_cols:
+ primary_key = True
# in 7.31, coltype = 0x000
# ^^-- column type
# ^-- 1 not null, 0 null
- nullable, coltype = divmod(colattr, 256)
+ not_nullable, coltype = divmod(colattr, 256)
if coltype not in (0, 13) and default:
default = default.split()[-1]
+ if coltype == 6: # Serial, mark as autoincrement
+ autoincrement = True
+
if coltype == 0 or coltype == 13: # char, varchar
coltype = ischema_names[coltype](collength)
if default:
@@ -236,32 +328,34 @@ class InformixDialect(default.DefaultDialect):
(coltype, name))
coltype = sqltypes.NULLTYPE
- # TODO: nullability ??
- nullable = True
-
- column_info = dict(name=name, type=coltype, nullable=nullable,
- default=default)
+ column_info = dict(name=name, type=coltype, nullable=not not_nullable,
+ default=default, autoincrement=autoincrement,
+ primary_key=primary_key)
columns.append(column_info)
return columns
@reflection.cache
def get_foreign_keys(self, connection, table_name, schema=None, **kw):
- # FK
+ schema_sel = schema or self.default_schema_name
c = connection.execute(
- """select t1.constrname as cons_name , t1.constrtype as cons_type ,
- t4.colname as local_column , t7.tabname as remote_table ,
- t6.colname as remote_column
+ """select t1.constrname as cons_name,
+ t4.colname as local_column, t7.tabname as remote_table,
+ t6.colname as remote_column, t7.owner as remote_owner
from sysconstraints as t1 , systables as t2 ,
sysindexes as t3 , syscolumns as t4 ,
sysreferences as t5 , syscolumns as t6 , systables as t7 ,
sysconstraints as t8 , sysindexes as t9
- where t1.tabid = t2.tabid and t2.tabname=? and t1.constrtype = 'R'
+ where t1.tabid = t2.tabid and t2.tabname=? and t2.owner=? and t1.constrtype = 'R'
and t3.tabid = t2.tabid and t3.idxname = t1.idxname
- and t4.tabid = t2.tabid and t4.colno = t3.part1
+ and t4.tabid = t2.tabid and t4.colno in (t3.part1, t3.part2, t3.part3,
+ t3.part4, t3.part5, t3.part6, t3.part7, t3.part8, t3.part9, t3.part10,
+ t3.part11, t3.part11, t3.part12, t3.part13, t3.part4, t3.part15, t3.part16)
and t5.constrid = t1.constrid and t8.constrid = t5.primary
- and t6.tabid = t5.ptabid and t6.colno = t9.part1 and t9.idxname =
+ and t6.tabid = t5.ptabid and t6.colno in (t9.part1, t9.part2, t9.part3,
+ t9.part4, t9.part5, t9.part6, t9.part7, t9.part8, t9.part9, t9.part10,
+ t9.part11, t9.part11, t9.part12, t9.part13, t9.part4, t9.part15, t9.part16) and t9.idxname =
t8.idxname
- and t7.tabid = t5.ptabid""", table.name.lower())
+ and t7.tabid = t5.ptabid""", table_name, schema_sel)
def fkey_rec():
@@ -275,8 +369,9 @@ class InformixDialect(default.DefaultDialect):
fkeys = util.defaultdict(fkey_rec)
- for cons_name, cons_type, local_column, \
- remote_table, remote_column in rows:
+ rows = c.fetchall()
+ for cons_name, local_column, \
+ remote_table, remote_column, remote_owner in rows:
rec = fkeys[cons_name]
rec['name'] = cons_name
@@ -285,25 +380,91 @@ class InformixDialect(default.DefaultDialect):
if not rec['referred_table']:
rec['referred_table'] = remote_table
+ if schema is not None:
+ rec['referred_schema'] = remote_owner
- local_cols.append(local_column)
- remote_cols.append(remote_column)
+ if local_column not in local_cols:
+ local_cols.append(local_column)
+ if remote_column not in remote_cols:
+ remote_cols.append(remote_column)
return fkeys.values()
@reflection.cache
def get_primary_keys(self, connection, table_name, schema=None, **kw):
+ schema = schema or self.default_schema_name
+
+ # Select the column positions from sysindexes for sysconstraints
+ data = connection.execute(
+ """select t2.*
+ from systables as t1, sysindexes as t2, sysconstraints as t3
+ where t1.tabid=t2.tabid and t1.tabname=? and t1.owner=?
+ and t2.idxname=t3.idxname and t3.constrtype='P'""",
+ table_name, schema
+ ).fetchall()
+
+ colpositions = set()
+
+ for row in data:
+ colpos = set([getattr(row, 'part%d' % x) for x in range(1,16)])
+ colpositions |= colpos
+
+ if not len(colpositions):
+ return []
+
+ # Select the column names using the columnpositions
+ # TODO: Maybe cache a bit of those col infos (eg select all colnames for one table)
+ place_holder = ','.join('?'*len(colpositions))
c = connection.execute(
- """select t4.colname as local_column
- from sysconstraints as t1 , systables as t2 ,
- sysindexes as t3 , syscolumns as t4
- where t1.tabid = t2.tabid and t2.tabname=? and t1.constrtype = 'P'
- and t3.tabid = t2.tabid and t3.idxname = t1.idxname
- and t4.tabid = t2.tabid and t4.colno = t3.part1""",
- table.name.lower())
- return [r[0] for r in c.fetchall()]
+ """select t1.colname
+ from syscolumns as t1, systables as t2
+ where t2.tabname=? and t1.tabid = t2.tabid and
+ t1.colno in (%s)""" % place_holder,
+ table_name, *colpositions
+ ).fetchall()
+
+ return reduce(lambda x,y: list(x)+list(y), c, [])
@reflection.cache
def get_indexes(self, connection, table_name, schema, **kw):
- # TODO
- return []
+ # TODO: schema...
+ c = connection.execute(
+ """select t1.*
+ from sysindexes as t1 , systables as t2
+ where t1.tabid = t2.tabid and t2.tabname=?""",
+ table_name)
+
+ indexes = []
+ for row in c.fetchall():
+ colnames = [getattr(row, 'part%d' % x) for x in range(1,16)]
+ colnames = [x for x in colnames if x]
+ place_holder = ','.join('?'*len(colnames))
+ c = connection.execute(
+ """select t1.colname
+ from syscolumns as t1, systables as t2
+ where t2.tabname=? and t1.tabid = t2.tabid and
+ t1.colno in (%s)""" % place_holder,
+ table_name, *colnames
+ ).fetchall()
+ c = reduce(lambda x,y: list(x)+list(y), c, [])
+ indexes.append({
+ 'name': row.idxname,
+ 'unique': row.idxtype.lower() == 'u',
+ 'column_names': c
+ })
+ return indexes
+
+ @reflection.cache
+ def get_view_definition(self, connection, view_name, schema=None, **kw):
+ schema = schema or self.default_schema_name
+ c = connection.execute(
+ """select t1.viewtext
+ from sysviews as t1 , systables as t2
+ where t1.tabid=t2.tabid and t2.tabname=?
+ and t2.owner=? order by seqno""",
+ view_name, schema).fetchall()
+
+ return ''.join([row[0] for row in c])
+
+ def _get_default_schema_name(self, connection):
+ return connection.execute('select CURRENT_ROLE from systables').scalar()
diff --git a/lib/sqlalchemy/dialects/informix/informixdb.py b/lib/sqlalchemy/dialects/informix/informixdb.py
index 8edcc953b..f11c57bb6 100644
--- a/lib/sqlalchemy/dialects/informix/informixdb.py
+++ b/lib/sqlalchemy/dialects/informix/informixdb.py
@@ -1,16 +1,38 @@
+"""
+Support for the informixdb DBAPI.
+
+informixdb is available at:
+
+ http://informixdb.sourceforge.net/
+
+Connecting
+^^^^^^^^^^
+
+Sample informix connection::
+
+ engine = create_engine('informix+informixdb://user:password@host/dbname')
+
+"""
+
+import re
+
from sqlalchemy.dialects.informix.base import InformixDialect
from sqlalchemy.engine import default
+VERSION_RE = re.compile(r'(\d+)\.(\d+)(.+\d+)')
+
class InformixExecutionContext_informixdb(default.DefaultExecutionContext):
def post_exec(self):
if self.isinsert:
- self._lastrowid = [self.cursor.sqlerrd[1]]
+ self._lastrowid = self.cursor.sqlerrd[1]
+
+ def get_lastrowid(self):
+ return self._lastrowid
class InformixDialect_informixdb(InformixDialect):
driver = 'informixdb'
- default_paramstyle = 'qmark'
- execution_context_cls = InformixExecutionContext_informixdb
+ execution_ctx_cls = InformixExecutionContext_informixdb
@classmethod
def dbapi(cls):
@@ -31,13 +53,8 @@ class InformixDialect_informixdb(InformixDialect):
def _get_server_version_info(self, connection):
# http://informixdb.sourceforge.net/manual.html#inspecting-version-numbers
- version = []
- for n in connection.connection.dbms_version.split('.'):
- try:
- version.append(int(n))
- except ValueError:
- version.append(n)
- return tuple(version)
+ v = VERSION_RE.split(connection.connection.dbms_version)
+ return (int(v[1]), int(v[2]), v[3])
def is_disconnect(self, e):
if isinstance(e, self.dbapi.OperationalError):
diff --git a/lib/sqlalchemy/dialects/sqlite/pysqlite.py b/lib/sqlalchemy/dialects/sqlite/pysqlite.py
index 575cb37f2..b2295f49b 100644
--- a/lib/sqlalchemy/dialects/sqlite/pysqlite.py
+++ b/lib/sqlalchemy/dialects/sqlite/pysqlite.py
@@ -68,12 +68,13 @@ pysqlite's driver does not. Additionally, SQLAlchemy does not at
this time automatically render the "cast" syntax required for the
freestanding functions "current_timestamp" and "current_date" to return
datetime/date types natively. Unfortunately, pysqlite
-does not provide the standard DBAPI types in `cursor.description`,
+does not provide the standard DBAPI types in ``cursor.description``,
leaving SQLAlchemy with no way to detect these types on the fly
without expensive per-row type checks.
-Usage of PARSE_DECLTYPES can be forced if one configures
-"native_datetime=True" on create_engine()::
+Keeping in mind that pysqlite's parsing option is not recommended,
+nor should be necessary, for use with SQLAlchemy, usage of PARSE_DECLTYPES
+can be forced if one configures "native_datetime=True" on create_engine()::
engine = create_engine('sqlite://',
connect_args={'detect_types': sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES},
diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py
index 73cd53137..ba4ef6037 100644
--- a/lib/sqlalchemy/engine/base.py
+++ b/lib/sqlalchemy/engine/base.py
@@ -1070,7 +1070,7 @@ class Connection(Connectable):
def _begin_impl(self):
if self._echo:
- self.engine.logger.info("BEGIN")
+ self.engine.logger.info("BEGIN (implicit)")
try:
self.engine.dialect.do_begin(self.connection)
except Exception, e:
@@ -2300,7 +2300,7 @@ class ResultProxy(object):
self.context = context
self.dialect = context.dialect
self.closed = False
- self.cursor = context.cursor
+ self.cursor = self._saved_cursor = context.cursor
self.connection = context.root_connection
self._echo = self.connection._echo and \
context.engine._should_log_debug()
@@ -2355,12 +2355,12 @@ class ResultProxy(object):
regardless of database backend.
"""
- return self.cursor.lastrowid
+ return self._saved_cursor.lastrowid
def _cursor_description(self):
"""May be overridden by subclasses."""
- return self.cursor.description
+ return self._saved_cursor.description
def _autoclose(self):
"""called by the Connection to autoclose cursors that have no pending
diff --git a/lib/sqlalchemy/exc.py b/lib/sqlalchemy/exc.py
index 003969f56..42ba226ee 100644
--- a/lib/sqlalchemy/exc.py
+++ b/lib/sqlalchemy/exc.py
@@ -26,7 +26,11 @@ class ArgumentError(SQLAlchemyError):
class CircularDependencyError(SQLAlchemyError):
"""Raised by topological sorts when a circular dependency is detected"""
-
+ def __init__(self, message, cycles, edges):
+ message += ": cycles: %r all edges: %r" % (cycles, edges)
+ SQLAlchemyError.__init__(self, message)
+ self.cycles = cycles
+ self.edges = edges
class CompileError(SQLAlchemyError):
"""Raised when an error occurs during SQL compilation"""
diff --git a/lib/sqlalchemy/ext/declarative.py b/lib/sqlalchemy/ext/declarative.py
index 6d4bdda43..fabd9aaf9 100755
--- a/lib/sqlalchemy/ext/declarative.py
+++ b/lib/sqlalchemy/ext/declarative.py
@@ -358,10 +358,10 @@ and simply pass it to declarative classes::
Base.metadata.reflect(some_engine)
class User(Base):
- __table__ = metadata['user']
+ __table__ = metadata.tables['user']
class Address(Base):
- __table__ = metadata['address']
+ __table__ = metadata.tables['address']
Some configuration schemes may find it more appropriate to use ``__table__``,
such as those which already take advantage of the data-driven nature of
@@ -589,13 +589,13 @@ keys, as a :class:`ForeignKey` itself contains references to columns
which can't be properly recreated at this level. For columns that
have foreign keys, as well as for the variety of mapper-level constructs
that require destination-explicit context, the
-:func:`~sqlalchemy.util.classproperty` decorator is provided so that
+:func:`~.declared_attr` decorator is provided so that
patterns common to many classes can be defined as callables::
- from sqlalchemy.util import classproperty
+ from sqlalchemy.ext.declarative import declared_attr
class ReferenceAddressMixin(object):
- @classproperty
+ @declared_attr
def address_id(cls):
return Column(Integer, ForeignKey('address.id'))
@@ -608,14 +608,14 @@ point at which the ``User`` class is constructed, and the declarative
extension can use the resulting :class:`Column` object as returned by
the method without the need to copy it.
-Columns generated by :func:`~sqlalchemy.util.classproperty` can also be
+Columns generated by :func:`~.declared_attr` can also be
referenced by ``__mapper_args__`` to a limited degree, currently
by ``polymorphic_on`` and ``version_id_col``, by specifying the
classdecorator itself into the dictionary - the declarative extension
will resolve them at class construction time::
class MyMixin:
- @classproperty
+ @declared_attr
def type_(cls):
return Column(String(50))
@@ -625,26 +625,23 @@ will resolve them at class construction time::
__tablename__='test'
id = Column(Integer, primary_key=True)
-.. note:: The usage of :func:`~sqlalchemy.util.classproperty` with mixin
- columns is a new feature as of SQLAlchemy 0.6.2.
-
Mixing in Relationships
~~~~~~~~~~~~~~~~~~~~~~~
Relationships created by :func:`~sqlalchemy.orm.relationship` are provided
with declarative mixin classes exclusively using the
-:func:`~sqlalchemy.util.classproperty` approach, eliminating any ambiguity
+:func:`.declared_attr` approach, eliminating any ambiguity
which could arise when copying a relationship and its possibly column-bound
contents. Below is an example which combines a foreign key column and a
relationship so that two classes ``Foo`` and ``Bar`` can both be configured to
reference a common target class via many-to-one::
class RefTargetMixin(object):
- @classproperty
+ @declared_attr
def target_id(cls):
return Column('target_id', ForeignKey('target.id'))
- @classproperty
+ @declared_attr
def target(cls):
return relationship("Target")
@@ -667,20 +664,16 @@ To reference the mixin class in these expressions, use the given ``cls``
to get it's name::
class RefTargetMixin(object):
- @classproperty
+ @declared_attr
def target_id(cls):
return Column('target_id', ForeignKey('target.id'))
- @classproperty
+ @declared_attr
def target(cls):
return relationship("Target",
primaryjoin="Target.id==%s.target_id" % cls.__name__
)
-.. note:: The usage of :func:`~sqlalchemy.util.classproperty` with mixin
- relationships is a new feature as of SQLAlchemy 0.6.2.
-
-
Mixing in deferred(), column_property(), etc.
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -688,21 +681,18 @@ Like :func:`~sqlalchemy.orm.relationship`, all
:class:`~sqlalchemy.orm.interfaces.MapperProperty` subclasses such as
:func:`~sqlalchemy.orm.deferred`, :func:`~sqlalchemy.orm.column_property`,
etc. ultimately involve references to columns, and therefore, when
-used with declarative mixins, have the :func:`~sqlalchemy.util.classproperty`
+used with declarative mixins, have the :func:`.declared_attr`
requirement so that no reliance on copying is needed::
class SomethingMixin(object):
- @classproperty
+ @declared_attr
def dprop(cls):
return deferred(Column(Integer))
class Something(Base, SomethingMixin):
__tablename__ = "something"
-.. note:: The usage of :func:`~sqlalchemy.util.classproperty` with mixin
- mapper properties is a new feature as of SQLAlchemy 0.6.2.
-
Controlling table inheritance with mixins
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -721,10 +711,10 @@ where you wanted to use that mixin in a single table inheritance
hierarchy, you can explicitly specify ``__tablename__`` as ``None`` to
indicate that the class should not have a table mapped::
- from sqlalchemy.util import classproperty
+ from sqlalchemy.ext.declarative import declared_attr
class Tablename:
- @classproperty
+ @declared_attr
def __tablename__(cls):
return cls.__name__.lower()
@@ -748,11 +738,11 @@ has a mapped table.
As an example, here's a mixin that will only allow single table
inheritance::
- from sqlalchemy.util import classproperty
+ from sqlalchemy.ext.declarative import declared_attr
from sqlalchemy.ext.declarative import has_inherited_table
class Tablename:
- @classproperty
+ @declared_attr
def __tablename__(cls):
if has_inherited_table(cls):
return None
@@ -772,11 +762,11 @@ table inheritance, you would need a slightly different mixin and use
it on any joined table child classes in addition to their parent
classes::
- from sqlalchemy.util import classproperty
+ from sqlalchemy.ext.declarative import declared_attr
from sqlalchemy.ext.declarative import has_inherited_table
class Tablename:
- @classproperty
+ @declared_attr
def __tablename__(cls):
if (has_inherited_table(cls) and
Tablename not in cls.__bases__):
@@ -806,11 +796,11 @@ In the case of ``__table_args__`` or ``__mapper_args__``
specified with declarative mixins, you may want to combine
some parameters from several mixins with those you wish to
define on the class iteself. The
-:func:`~sqlalchemy.util.classproperty` decorator can be used
+:func:`.declared_attr` decorator can be used
here to create user-defined collation routines that pull
from multiple collections::
- from sqlalchemy.util import classproperty
+ from sqlalchemy.ext.declarative import declared_attr
class MySQLSettings:
__table_args__ = {'mysql_engine':'InnoDB'}
@@ -821,7 +811,7 @@ from multiple collections::
class MyModel(Base,MySQLSettings,MyOtherMixin):
__tablename__='my_model'
- @classproperty
+ @declared_attr
def __table_args__(self):
args = dict()
args.update(MySQLSettings.__table_args__)
@@ -865,7 +855,7 @@ from sqlalchemy.orm.interfaces import MapperProperty
from sqlalchemy.orm.properties import RelationshipProperty, ColumnProperty
from sqlalchemy.orm.util import _is_mapped_class
from sqlalchemy import util, exceptions
-from sqlalchemy.sql import util as sql_util
+from sqlalchemy.sql import util as sql_util, expression
__all__ = 'declarative_base', 'synonym_for', \
@@ -907,56 +897,70 @@ def _as_declarative(cls, classname, dict_):
tablename = None
parent_columns = ()
+ declarative_props = (declared_attr, util.classproperty)
+
for base in cls.__mro__:
- if _is_mapped_class(base):
+ class_mapped = _is_mapped_class(base)
+ if class_mapped:
parent_columns = base.__table__.c.keys()
- else:
- for name,obj in vars(base).items():
- if name == '__mapper_args__':
- if not mapper_args:
- mapper_args = cls.__mapper_args__
- elif name == '__tablename__':
- if not tablename:
- tablename = cls.__tablename__
- elif name == '__table_args__':
- if not table_args:
- table_args = cls.__table_args__
- if base is not cls:
- inherited_table_args = True
- elif base is not cls:
- # we're a mixin.
-
- if isinstance(obj, Column):
- if obj.foreign_keys:
- raise exceptions.InvalidRequestError(
- "Columns with foreign keys to other columns "
- "must be declared as @classproperty callables "
- "on declarative mixin classes. ")
- if name not in dict_ and not (
- '__table__' in dict_ and
- name in dict_['__table__'].c
- ):
- potential_columns[name] = \
- column_copies[obj] = \
- obj.copy()
- column_copies[obj]._creation_order = \
- obj._creation_order
- elif isinstance(obj, MapperProperty):
+
+ for name,obj in vars(base).items():
+ if name == '__mapper_args__':
+ if not mapper_args and (
+ not class_mapped or
+ isinstance(obj, declarative_props)
+ ):
+ mapper_args = cls.__mapper_args__
+ elif name == '__tablename__':
+ if not tablename and (
+ not class_mapped or
+ isinstance(obj, declarative_props)
+ ):
+ tablename = cls.__tablename__
+ elif name == '__table_args__':
+ if not table_args and (
+ not class_mapped or
+ isinstance(obj, declarative_props)
+ ):
+ table_args = cls.__table_args__
+ if base is not cls:
+ inherited_table_args = True
+ elif class_mapped:
+ continue
+ elif base is not cls:
+ # we're a mixin.
+
+ if isinstance(obj, Column):
+ if obj.foreign_keys:
raise exceptions.InvalidRequestError(
- "Mapper properties (i.e. deferred,"
- "column_property(), relationship(), etc.) must "
- "be declared as @classproperty callables "
- "on declarative mixin classes.")
- elif isinstance(obj, util.classproperty):
- dict_[name] = ret = \
- column_copies[obj] = getattr(cls, name)
- if isinstance(ret, (Column, MapperProperty)) and \
- ret.doc is None:
- ret.doc = obj.__doc__
+ "Columns with foreign keys to other columns "
+ "must be declared as @classproperty callables "
+ "on declarative mixin classes. ")
+ if name not in dict_ and not (
+ '__table__' in dict_ and
+ (obj.name or name) in dict_['__table__'].c
+ ):
+ potential_columns[name] = \
+ column_copies[obj] = \
+ obj.copy()
+ column_copies[obj]._creation_order = \
+ obj._creation_order
+ elif isinstance(obj, MapperProperty):
+ raise exceptions.InvalidRequestError(
+ "Mapper properties (i.e. deferred,"
+ "column_property(), relationship(), etc.) must "
+ "be declared as @classproperty callables "
+ "on declarative mixin classes.")
+ elif isinstance(obj, declarative_props):
+ dict_[name] = ret = \
+ column_copies[obj] = getattr(cls, name)
+ if isinstance(ret, (Column, MapperProperty)) and \
+ ret.doc is None:
+ ret.doc = obj.__doc__
# apply inherited columns as we should
for k, v in potential_columns.items():
- if tablename or k not in parent_columns:
+ if tablename or (v.name or k) not in parent_columns:
dict_[k] = v
if inherited_table_args and not tablename:
@@ -972,7 +976,7 @@ def _as_declarative(cls, classname, dict_):
for k in dict_:
value = dict_[k]
- if isinstance(value, util.classproperty):
+ if isinstance(value, declarative_props):
value = getattr(cls, k)
if (isinstance(value, tuple) and len(value) == 1 and
@@ -1083,7 +1087,7 @@ def _as_declarative(cls, classname, dict_):
"Can't place __table_args__ on an inherited class "
"with no table."
)
-
+
# add any columns declared here to the inherited table.
for c in cols:
if c.primary_key:
@@ -1112,7 +1116,25 @@ def _as_declarative(cls, classname, dict_):
set([c.key for c in inherited_table.c
if c not in inherited_mapper._columntoproperty])
exclude_properties.difference_update([c.key for c in cols])
-
+
+ # look through columns in the current mapper that
+ # are keyed to a propname different than the colname
+ # (if names were the same, we'd have popped it out above,
+ # in which case the mapper makes this combination).
+ # See if the superclass has a similar column property.
+ # If so, join them together.
+ for k, col in our_stuff.items():
+ if not isinstance(col, expression.ColumnElement):
+ continue
+ if k in inherited_mapper._props:
+ p = inherited_mapper._props[k]
+ if isinstance(p, ColumnProperty):
+ # note here we place the superclass column
+ # first. this corresponds to the
+ # append() in mapper._configure_property().
+ # change this ordering when we do [ticket:1892]
+ our_stuff[k] = p.columns + [col]
+
cls.__mapper__ = mapper_cls(cls,
table,
properties=our_stuff,
@@ -1192,7 +1214,7 @@ def _deferred_relationship(cls, prop):
return x
except NameError, n:
raise exceptions.InvalidRequestError(
- "When compiling mapper %s, expression %r failed to "
+ "When initializing mapper %s, expression %r failed to "
"locate a name (%r). If this is a class name, consider "
"adding this relationship() to the %r class after "
"both dependent classes have been defined." %
@@ -1261,6 +1283,63 @@ def comparable_using(comparator_factory):
return comparable_property(comparator_factory, fn)
return decorate
+class declared_attr(property):
+ """Mark a class-level method as representing the definition of
+ a mapped property or special declarative member name.
+
+ .. note:: @declared_attr is available as
+ sqlalchemy.util.classproperty for SQLAlchemy versions
+ 0.6.2, 0.6.3, 0.6.4.
+
+ @declared_attr turns the attribute into a scalar-like
+ property that can be invoked from the uninstantiated class.
+ Declarative treats attributes specifically marked with
+ @declared_attr as returning a construct that is specific
+ to mapping or declarative table configuration. The name
+ of the attribute is that of what the non-dynamic version
+ of the attribute would be.
+
+ @declared_attr is more often than not applicable to mixins,
+ to define relationships that are to be applied to different
+ implementors of the class::
+
+ class ProvidesUser(object):
+ "A mixin that adds a 'user' relationship to classes."
+
+ @declared_attr
+ def user(self):
+ return relationship("User")
+
+ It also can be applied to mapped classes, such as to provide
+ a "polymorphic" scheme for inheritance::
+
+ class Employee(Base):
+ id = Column(Integer, primary_key=True)
+ type = Column(String(50), nullable=False)
+
+ @declared_attr
+ def __tablename__(cls):
+ return cls.__name__.lower()
+
+ @declared_attr
+ def __mapper_args__(cls):
+ if cls.__name__ == 'Employee':
+ return {
+ "polymorphic_on":cls.type,
+ "polymorphic_identity":"Employee"
+ }
+ else:
+ return {"polymorphic_identity":cls.__name__}
+
+ """
+
+ def __init__(self, fget, *arg, **kw):
+ super(declared_attr, self).__init__(fget, *arg, **kw)
+ self.__doc__ = fget.__doc__
+
+ def __get__(desc, self, cls):
+ return desc.fget(cls)
+
def _declarative_constructor(self, **kwargs):
"""A simple constructor that allows initialization from kwargs.
diff --git a/lib/sqlalchemy/orm/__init__.py b/lib/sqlalchemy/orm/__init__.py
index 39c68f0aa..8b32d1a27 100644
--- a/lib/sqlalchemy/orm/__init__.py
+++ b/lib/sqlalchemy/orm/__init__.py
@@ -255,7 +255,19 @@ def relationship(argument, secondary=None, **kwargs):
* ``all`` - shorthand for "save-update,merge, refresh-expire,
expunge, delete"
-
+
+ :param cascade_backrefs=True:
+ a boolean value indicating if the ``save-update`` cascade should
+ operate along a backref event. When set to ``False`` on a
+ one-to-many relationship that has a many-to-one backref, assigning
+ a persistent object to the many-to-one attribute on a transient object
+ will not add the transient to the session. Similarly, when
+ set to ``False`` on a many-to-one relationship that has a one-to-many
+ backref, appending a persistent object to the one-to-many collection
+ on a transient object will not add the transient to the session.
+
+ ``cascade_backrefs`` is new in 0.6.5.
+
:param collection_class:
a class or callable that returns a new list-holding object. will
be used in place of a plain list for storing elements.
diff --git a/lib/sqlalchemy/orm/attributes.py b/lib/sqlalchemy/orm/attributes.py
index d21e24d86..46b5a5dca 100644
--- a/lib/sqlalchemy/orm/attributes.py
+++ b/lib/sqlalchemy/orm/attributes.py
@@ -418,7 +418,7 @@ class AttributeImpl(object):
# Return a new, empty value
return self.initialize(state, dict_)
-
+
def append(self, state, dict_, value, initiator, passive=PASSIVE_OFF):
self.set(state, dict_, value, initiator, passive=passive)
diff --git a/lib/sqlalchemy/orm/dependency.py b/lib/sqlalchemy/orm/dependency.py
index 662cfc67b..4458a8547 100644
--- a/lib/sqlalchemy/orm/dependency.py
+++ b/lib/sqlalchemy/orm/dependency.py
@@ -806,8 +806,10 @@ class DetectKeySwitch(DependencyProcessor):
if not issubclass(state.class_, self.parent.class_):
continue
dict_ = state.dict
- related = state.get_impl(self.key).get(state, dict_, passive=self.passive_updates)
- if related is not attributes.PASSIVE_NO_RESULT and related is not None:
+ related = state.get_impl(self.key).get(state, dict_,
+ passive=self.passive_updates)
+ if related is not attributes.PASSIVE_NO_RESULT and \
+ related is not None:
related_state = attributes.instance_state(dict_[self.key])
if related_state in switchers:
uowcommit.register_object(state,
diff --git a/lib/sqlalchemy/orm/interfaces.py b/lib/sqlalchemy/orm/interfaces.py
index 979fb48e3..f4933b8ca 100644
--- a/lib/sqlalchemy/orm/interfaces.py
+++ b/lib/sqlalchemy/orm/interfaces.py
@@ -60,6 +60,13 @@ class MapperProperty(object):
attribute access, loading behavior, and dependency calculations.
"""
+ cascade = ()
+ """The set of 'cascade' attribute names.
+
+ This collection is checked before the 'cascade_iterator' method is called.
+
+ """
+
def setup(self, context, entity, path, adapter, **kwargs):
"""Called by Query for the purposes of constructing a SQL statement.
@@ -111,6 +118,11 @@ class MapperProperty(object):
halt_on=None):
"""Iterate through instances related to the given instance for
a particular 'cascade', starting with this MapperProperty.
+
+ Return an iterator3-tuples (instance, mapper, state).
+
+ Note that the 'cascade' collection on this MapperProperty is
+ checked first for the given type before cascade_iterator is called.
See PropertyLoader for the related instance implementation.
"""
diff --git a/lib/sqlalchemy/orm/mapper.py b/lib/sqlalchemy/orm/mapper.py
index d1fb47e23..b3aa7fb29 100644
--- a/lib/sqlalchemy/orm/mapper.py
+++ b/lib/sqlalchemy/orm/mapper.py
@@ -21,15 +21,17 @@ from itertools import chain, groupby
deque = __import__('collections').deque
from sqlalchemy import sql, util, log, exc as sa_exc, event
-from sqlalchemy.sql import expression, visitors, operators, \
- util as sqlutil
-from sqlalchemy.orm import instrumentation, sync, exc as orm_exc, \
- unitofwork, attributes
-from sqlalchemy.orm.interfaces import MapperProperty, EXT_CONTINUE, \
- PropComparator
-from sqlalchemy.orm.util import ExtensionCarrier, _INSTRUMENTOR, \
- _class_to_mapper, _state_mapper, class_mapper, instance_str, \
- state_str
+from sqlalchemy.sql import expression, visitors, operators, util as sqlutil
+from sqlalchemy.orm import instrumentation, attributes, sync, \
+ exc as orm_exc, unitofwork
+from sqlalchemy.orm.interfaces import (
+ MapperProperty, EXT_CONTINUE, PropComparator
+ )
+from sqlalchemy.orm.util import (
+ ExtensionCarrier, _INSTRUMENTOR, _class_to_mapper,
+ _state_mapper, class_mapper, instance_str, state_str,
+ )
+import sys
__all__ = (
'Mapper',
@@ -193,7 +195,7 @@ class Mapper(object):
else:
self.polymorphic_map = _polymorphic_map
- if include_properties:
+ if include_properties is not None:
self.include_properties = util.to_set(include_properties)
else:
self.include_properties = None
@@ -637,7 +639,9 @@ class Mapper(object):
"or more attributes for these same-named columns "
"explicitly."
% (prop.columns[-1], column, key))
-
+
+ # this hypothetically changes to
+ # prop.columns.insert(0, column) when we do [ticket:1892]
prop.columns.append(column)
self._log("appending to existing ColumnProperty %s" % (key))
@@ -786,12 +790,13 @@ class Mapper(object):
# the order of mapper compilation
for mapper in list(_mapper_registry):
if getattr(mapper, '_compile_failed', False):
- raise sa_exc.InvalidRequestError(
- "One or more mappers failed to compile. "
- "Exception was probably "
- "suppressed within a hasattr() call. "
- "Message was: %s" %
- mapper._compile_failed)
+ e = sa_exc.InvalidRequestError(
+ "One or more mappers failed to initialize - "
+ "can't proceed with initialization of other "
+ "mappers. Original exception was: %s"
+ % mapper._compile_failed)
+ e._compile_failed = mapper._compile_failed
+ raise e
if not mapper.compiled:
mapper._post_configure_properties()
@@ -800,9 +805,9 @@ class Mapper(object):
finally:
_already_compiling = False
except:
- import sys
exc = sys.exc_info()[1]
- self._compile_failed = exc
+ if not hasattr(exc, '_compile_failed'):
+ self._compile_failed = exc
raise
finally:
self._expire_memoizations()
@@ -1398,25 +1403,30 @@ class Mapper(object):
"""
visited_instances = util.IdentitySet()
- visitables = [(self._props.itervalues(), 'property', state)]
+ prp, mpp = object(), object()
+
+ visitables = [(deque(self._props.values()), prp, state)]
while visitables:
iterator, item_type, parent_state = visitables[-1]
- try:
- if item_type == 'property':
- prop = iterator.next()
- visitables.append(
- (prop.cascade_iterator(type_, parent_state,
- visited_instances, halt_on), 'mapper', None)
- )
- elif item_type == 'mapper':
- instance, instance_mapper, corresponding_state = \
- iterator.next()
- yield (instance, instance_mapper)
- visitables.append((instance_mapper._props.itervalues(),
- 'property', corresponding_state))
- except StopIteration:
+ if not iterator:
visitables.pop()
+ continue
+
+ if item_type is prp:
+ prop = iterator.popleft()
+ if type_ not in prop.cascade:
+ continue
+ queue = deque(prop.cascade_iterator(type_, parent_state,
+ visited_instances, halt_on))
+ if queue:
+ visitables.append((queue,mpp, None))
+ elif item_type is mpp:
+ instance, instance_mapper, corresponding_state = \
+ iterator.popleft()
+ yield (instance, instance_mapper)
+ visitables.append((deque(instance_mapper._props.values()),
+ prp, corresponding_state))
@_memoized_compiled_property
def _compiled_cache(self):
@@ -2372,6 +2382,11 @@ def validates(*names):
can then raise validation exceptions to halt the process from continuing,
or can modify or replace the value before proceeding. The function
should otherwise return the given value.
+
+ Note that a validator for a collection **cannot** issue a load of that
+ collection within the validation routine - this usage raises
+ an assertion to avoid recursion overflows. This is a reentrant
+ condition which is not supported.
"""
def wrap(fn):
diff --git a/lib/sqlalchemy/orm/properties.py b/lib/sqlalchemy/orm/properties.py
index 80443a7f3..4efd2acc9 100644
--- a/lib/sqlalchemy/orm/properties.py
+++ b/lib/sqlalchemy/orm/properties.py
@@ -444,8 +444,10 @@ class RelationshipProperty(StrategizedProperty):
comparator_factory=None,
single_parent=False, innerjoin=False,
doc=None,
+ cascade_backrefs=True,
load_on_pending=False,
- strategy_class=None, _local_remote_pairs=None, query_class=None):
+ strategy_class=None, _local_remote_pairs=None,
+ query_class=None):
self.uselist = uselist
self.argument = argument
@@ -460,6 +462,7 @@ class RelationshipProperty(StrategizedProperty):
self._user_defined_foreign_keys = foreign_keys
self.collection_class = collection_class
self.passive_deletes = passive_deletes
+ self.cascade_backrefs = cascade_backrefs
self.passive_updates = passive_updates
self.remote_side = remote_side
self.enable_typechecks = enable_typechecks
@@ -865,7 +868,8 @@ class RelationshipProperty(StrategizedProperty):
# cascade using the mapper local to this
# object, so that its individual properties are located
instance_mapper = instance_state.manager.mapper
- yield (c, instance_mapper, instance_state)
+ yield c, instance_mapper, instance_state
+
def _add_reverse_property(self, key):
other = self.mapper.get_property(key, _compile_mappers=False)
diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py
index b22a10b55..605f391aa 100644
--- a/lib/sqlalchemy/orm/query.py
+++ b/lib/sqlalchemy/orm/query.py
@@ -32,7 +32,7 @@ from sqlalchemy.orm import (
from sqlalchemy.orm.util import (
AliasedClass, ORMAdapter, _entity_descriptor, _entity_info,
_is_aliased_class, _is_mapped_class, _orm_columns, _orm_selectable,
- join as orm_join,with_parent
+ join as orm_join,with_parent, _attr_as_key
)
@@ -90,6 +90,7 @@ class Query(object):
_only_load_props = None
_refresh_state = None
_from_obj = ()
+ _select_from_entity = None
_filter_aliases = None
_from_obj_alias = None
_joinpath = _joinpoint = util.frozendict()
@@ -266,7 +267,8 @@ class Query(object):
return self._entities[0]
def _mapper_zero(self):
- return self._entity_zero().entity_zero
+ return self._select_from_entity or \
+ self._entity_zero().entity_zero
def _extension_zero(self):
ent = self._entity_zero()
@@ -283,8 +285,9 @@ class Query(object):
def _joinpoint_zero(self):
return self._joinpoint.get(
- '_joinpoint_entity',
- self._entity_zero().entity_zero)
+ '_joinpoint_entity',
+ self._mapper_zero()
+ )
def _mapper_zero_or_none(self):
if not getattr(self._entities[0], 'primary_entity', False):
@@ -495,7 +498,12 @@ class Query(object):
@property
def whereclause(self):
- """The WHERE criterion for this Query."""
+ """A readonly attribute which returns the current WHERE criterion for this Query.
+
+ This returned value is a SQL expression construct, or ``None`` if no
+ criterion has been established.
+
+ """
return self._criterion
@_generative()
@@ -807,7 +815,7 @@ class Query(object):
opt.process_query(self)
@_generative()
- def with_hint(self, selectable, text, dialect_name=None):
+ def with_hint(self, selectable, text, dialect_name='*'):
"""Add an indexing hint for the given entity or selectable to
this :class:`Query`.
@@ -1169,7 +1177,7 @@ class Query(object):
arg1, arg2 = arg1
else:
arg2 = None
-
+
# determine onclause/right_entity. there
# is a little bit of legacy behavior still at work here
# which means they might be in either order. may possibly
@@ -1250,7 +1258,7 @@ class Query(object):
(left, right))
left_mapper, left_selectable, left_is_aliased = _entity_info(left)
- right_mapper, right_selectable, is_aliased_class = _entity_info(right)
+ right_mapper, right_selectable, right_is_aliased = _entity_info(right)
if right_mapper and prop and \
not right_mapper.common_parent(prop.mapper):
@@ -1279,7 +1287,7 @@ class Query(object):
need_adapter = True
aliased_entity = right_mapper and \
- not is_aliased_class and \
+ not right_is_aliased and \
(
right_mapper.with_polymorphic or
isinstance(
@@ -1342,8 +1350,16 @@ class Query(object):
)
)
- join_to_left = not is_aliased_class and not left_is_aliased
-
+ # this is an overly broad assumption here, but there's a
+ # very wide variety of situations where we rely upon orm.join's
+ # adaption to glue clauses together, with joined-table inheritance's
+ # wide array of variables taking up most of the space.
+ # Setting the flag here is still a guess, so it is a bug
+ # that we don't have definitive criterion to determine when
+ # adaption should be enabled (or perhaps that we're even doing the
+ # whole thing the way we are here).
+ join_to_left = not right_is_aliased and not left_is_aliased
+
if self._from_obj and left_selectable is not None:
replace_clause_index, clause = sql_util.find_join_source(
self._from_obj,
@@ -1351,10 +1367,16 @@ class Query(object):
if clause is not None:
# the entire query's FROM clause is an alias of itself (i.e.
# from_self(), similar). if the left clause is that one,
- # ensure it aliases to the left side.
+ # ensure it adapts to the left side.
if self._from_obj_alias and clause is self._from_obj[0]:
join_to_left = True
-
+
+ # An exception case where adaption to the left edge is not
+ # desirable. See above note on join_to_left.
+ if join_to_left and isinstance(clause, expression.Join) and \
+ sql_util.clause_is_present(left_selectable, clause):
+ join_to_left = False
+
clause = orm_join(clause,
right,
onclause, isouter=outerjoin,
@@ -1403,20 +1425,23 @@ class Query(object):
@_generative(_no_clauseelement_condition)
def select_from(self, *from_obj):
- """Set the `from_obj` parameter of the query and return the newly
- resulting ``Query``. This replaces the table which this Query selects
- from with the given table.
+ """Set the FROM clause of this :class:`.Query` explicitly.
- ``select_from()`` also accepts class arguments. Though usually not
- necessary, can ensure that the full selectable of the given mapper is
- applied, e.g. for joined-table mappers.
-
- """
+ Sending a mapped class or entity here effectively replaces the
+ "left edge" of any calls to :meth:`.Query.join`, when no
+ joinpoint is otherwise established - usually, the default "join
+ point" is the leftmost entity in the :class:`.Query` object's
+ list of entities to be selected.
+ Mapped entities or plain :class:`.Table` or other selectables
+ can be sent here which will form the default FROM clause.
+
+ """
obj = []
for fo in from_obj:
if _is_mapped_class(fo):
mapper, selectable, is_aliased_class = _entity_info(fo)
+ self._select_from_entity = fo
obj.append(selectable)
elif not isinstance(fo, expression.FromClause):
raise sa_exc.ArgumentError(
@@ -1425,7 +1450,7 @@ class Query(object):
obj.append(fo)
self._set_select_from(*obj)
-
+
def __getitem__(self, item):
if isinstance(item, slice):
start, stop, step = util.decode_slice(item)
@@ -2008,8 +2033,7 @@ class Query(object):
Also, the ``before_delete()`` and ``after_delete()``
:class:`~sqlalchemy.orm.interfaces.MapperExtension` methods are not
called from this method. For a delete hook here, use the
- ``after_bulk_delete()``
- :class:`~sqlalchemy.orm.interfaces.MapperExtension` method.
+ :meth:`.SessionExtension.after_bulk_delete()` event hook.
"""
#TODO: lots of duplication and ifs - probably needs to be
@@ -2134,8 +2158,7 @@ class Query(object):
Also, the ``before_update()`` and ``after_update()``
:class:`~sqlalchemy.orm.interfaces.MapperExtension` methods are not
called from this method. For an update hook here, use the
- ``after_bulk_update()``
- :class:`~sqlalchemy.orm.interfaces.SessionExtension` method.
+ :meth:`.SessionExtension.after_bulk_update()` event hook.
"""
@@ -2181,7 +2204,7 @@ class Query(object):
value_evaluators = {}
for key,value in values.iteritems():
- key = expression._column_as_key(key)
+ key = _attr_as_key(key)
value_evaluators[key] = evaluator_compiler.process(
expression._literal_as_binds(value))
except evaluator.UnevaluatableError:
@@ -2236,7 +2259,7 @@ class Query(object):
if identity_key in session.identity_map:
session.expire(
session.identity_map[identity_key],
- [expression._column_as_key(k) for k in values]
+ [_attr_as_key(k) for k in values]
)
for ext in session.extensions:
diff --git a/lib/sqlalchemy/orm/scoping.py b/lib/sqlalchemy/orm/scoping.py
index c1a5fd577..140328e24 100644
--- a/lib/sqlalchemy/orm/scoping.py
+++ b/lib/sqlalchemy/orm/scoping.py
@@ -6,7 +6,8 @@
import sqlalchemy.exceptions as sa_exc
from sqlalchemy.util import ScopedRegistry, ThreadLocalRegistry, \
- to_list, get_cls_kwargs, deprecated
+ to_list, get_cls_kwargs, deprecated,\
+ warn
from sqlalchemy.orm import (
EXT_CONTINUE, MapperExtension, class_mapper, object_session
)
@@ -45,7 +46,8 @@ class ScopedSession(object):
scope = kwargs.pop('scope', False)
if scope is not None:
if self.registry.has():
- raise sa_exc.InvalidRequestError("Scoped session is already present; no new arguments may be specified.")
+ raise sa_exc.InvalidRequestError("Scoped session is already present; "
+ "no new arguments may be specified.")
else:
sess = self.session_factory(**kwargs)
self.registry.set(sess)
@@ -85,6 +87,11 @@ class ScopedSession(object):
def configure(self, **kwargs):
"""reconfigure the sessionmaker used by this ScopedSession."""
+
+ if self.registry.has():
+ warn('At least one scoped session is already present. '
+ ' configure() can not affect sessions that have '
+ 'already been created.')
self.session_factory.configure(**kwargs)
diff --git a/lib/sqlalchemy/orm/session.py b/lib/sqlalchemy/orm/session.py
index 54b41fcc6..95d29812e 100644
--- a/lib/sqlalchemy/orm/session.py
+++ b/lib/sqlalchemy/orm/session.py
@@ -22,6 +22,7 @@ from sqlalchemy.orm.util import (
from sqlalchemy.orm.mapper import Mapper, _none_set
from sqlalchemy.orm.unitofwork import UOWTransaction
from sqlalchemy.orm import identity
+import sys
__all__ = ['Session', 'SessionTransaction', 'SessionExtension']
@@ -105,13 +106,13 @@ def sessionmaker(bind=None, class_=None, autoflush=True, autocommit=False,
The full resolution is described in the ``get_bind()`` method of
``Session``. Usage looks like::
- sess = Session(binds={
+ Session = sessionmaker(binds={
SomeMappedClass: create_engine('postgresql://engine1'),
somemapper: create_engine('postgresql://engine2'),
some_table: create_engine('postgresql://engine3'),
})
- Also see the ``bind_mapper()`` and ``bind_table()`` methods.
+ Also see the :meth:`.Session.bind_mapper` and :meth:`.Session.bind_table` methods.
:param \class_: Specify an alternate class other than
``sqlalchemy.orm.session.Session`` which should be used by the returned
@@ -142,8 +143,9 @@ def sessionmaker(bind=None, class_=None, autoflush=True, autocommit=False,
as returned by the ``query()`` method. Defaults to
:class:`~sqlalchemy.orm.query.Query`.
- :param twophase: When ``True``, all transactions will be started using
- :mod:`~sqlalchemy.engine_TwoPhaseTransaction`. During a ``commit()``,
+ :param twophase: When ``True``, all transactions will be started as
+ a "two phase" transaction, i.e. using the "two phase" semantics
+ of the database in use along with an XID. During a ``commit()``,
after ``flush()`` has been issued for all attached databases, the
``prepare()`` method on each database's ``TwoPhaseTransaction`` will
be called. This allows each database to roll back the entire
@@ -206,7 +208,9 @@ class SessionTransaction(object):
single: thread safety; SessionTransaction
"""
-
+
+ _rollback_exception = None
+
def __init__(self, session, parent=None, nested=False):
self.session = session
self._connections = {}
@@ -229,9 +233,21 @@ class SessionTransaction(object):
def _assert_is_active(self):
self._assert_is_open()
if not self._active:
- raise sa_exc.InvalidRequestError(
- "The transaction is inactive due to a rollback in a "
- "subtransaction. Issue rollback() to cancel the transaction.")
+ if self._rollback_exception:
+ raise sa_exc.InvalidRequestError(
+ "This Session's transaction has been rolled back "
+ "due to a previous exception during flush."
+ " To begin a new transaction with this Session, "
+ "first issue Session.rollback()."
+ " Original exception was: %s"
+ % self._rollback_exception
+ )
+ else:
+ raise sa_exc.InvalidRequestError(
+ "This Session's transaction has been rolled back "
+ "by a nested rollback() call. To begin a new "
+ "transaction, issue Session.rollback() first."
+ )
def _assert_is_open(self, error_msg="The transaction is closed"):
if self.session is None:
@@ -288,14 +304,16 @@ class SessionTransaction(object):
assert not self.session._deleted
for s in self.session.identity_map.all_states():
- _expire_state(s, s.dict, None, instance_dict=self.session.identity_map)
+ _expire_state(s, s.dict, None,
+ instance_dict=self.session.identity_map)
def _remove_snapshot(self):
assert self._is_transaction_boundary
if not self.nested and self.session.expire_on_commit:
for s in self.session.identity_map.all_states():
- _expire_state(s, s.dict, None, instance_dict=self.session.identity_map)
+ _expire_state(s, s.dict, None,
+ instance_dict=self.session.identity_map)
def _connection_for_bind(self, bind):
self._assert_is_active()
@@ -379,7 +397,7 @@ class SessionTransaction(object):
self.close()
return self._parent
- def rollback(self):
+ def rollback(self, _capture_exception=False):
self._assert_is_open()
stx = self.session.transaction
@@ -397,6 +415,8 @@ class SessionTransaction(object):
transaction._deactivate()
self.close()
+ if self._parent and _capture_exception:
+ self._parent._rollback_exception = sys.exc_info()[1]
return self._parent
def _rollback_impl(self):
@@ -415,7 +435,8 @@ class SessionTransaction(object):
def close(self):
self.session.transaction = self._parent
if self._parent is None:
- for connection, transaction, autoclose in set(self._connections.values()):
+ for connection, transaction, autoclose in \
+ set(self._connections.values()):
if autoclose:
connection.close()
else:
@@ -1133,6 +1154,8 @@ class Session(object):
This operation cascades to associated instances if the association is
mapped with ``cascade="merge"``.
+ See :ref:`unitofwork_merging` for a detailed discussion of merging.
+
"""
if 'dont_load' in kw:
load = not kw['dont_load']
@@ -1451,7 +1474,7 @@ class Session(object):
ext.after_flush(self, flush_context)
transaction.commit()
except:
- transaction.rollback()
+ transaction.rollback(_capture_exception=True)
raise
flush_context.finalize_flush_changes()
diff --git a/lib/sqlalchemy/orm/unitofwork.py b/lib/sqlalchemy/orm/unitofwork.py
index 830ac3c0c..a9808e6ba 100644
--- a/lib/sqlalchemy/orm/unitofwork.py
+++ b/lib/sqlalchemy/orm/unitofwork.py
@@ -33,10 +33,13 @@ class UOWEventHandler(interfaces.AttributeExtension):
def append(self, state, item, initiator):
# process "save_update" cascade rules for when
# an instance is appended to the list of another instance
+
sess = _state_session(state)
if sess:
prop = _state_mapper(state).get_property(self.key)
- if prop.cascade.save_update and item not in sess:
+ if prop.cascade.save_update and \
+ (prop.cascade_backrefs or self.key == initiator.key) and \
+ item not in sess:
sess.add(item)
return item
@@ -55,11 +58,13 @@ class UOWEventHandler(interfaces.AttributeExtension):
# is attached to another instance
if oldvalue is newvalue:
return newvalue
+
sess = _state_session(state)
if sess:
prop = _state_mapper(state).get_property(self.key)
if newvalue is not None and \
prop.cascade.save_update and \
+ (prop.cascade_backrefs or self.key == initiator.key) and \
newvalue not in sess:
sess.add(newvalue)
if prop.cascade.delete_orphan and \
diff --git a/lib/sqlalchemy/orm/util.py b/lib/sqlalchemy/orm/util.py
index d68ff4473..f79a8449f 100644
--- a/lib/sqlalchemy/orm/util.py
+++ b/lib/sqlalchemy/orm/util.py
@@ -582,6 +582,12 @@ def _orm_selectable(entity):
mapper, selectable, is_aliased_class = _entity_info(entity)
return selectable
+def _attr_as_key(attr):
+ if hasattr(attr, 'key'):
+ return attr.key
+ else:
+ return expression._column_as_key(attr)
+
def _is_aliased_class(entity):
return isinstance(entity, AliasedClass)
diff --git a/lib/sqlalchemy/schema.py b/lib/sqlalchemy/schema.py
index bad04b6c0..8070fd9ca 100644
--- a/lib/sqlalchemy/schema.py
+++ b/lib/sqlalchemy/schema.py
@@ -448,18 +448,34 @@ class Table(SchemaItem, expression.TableClause):
"""
- try:
- if schema is RETAIN_SCHEMA:
- schema = self.schema
- key = _get_table_key(self.name, schema)
+ if schema is RETAIN_SCHEMA:
+ schema = self.schema
+ key = _get_table_key(self.name, schema)
+ if key in metadata.tables:
+ util.warn("Table '%s' already exists within the given "
+ "MetaData - not copying." % self.description)
return metadata.tables[key]
- except KeyError:
- args = []
- for c in self.columns:
- args.append(c.copy(schema=schema))
- for c in self.constraints:
- args.append(c.copy(schema=schema))
- return Table(self.name, metadata, schema=schema, *args)
+
+ args = []
+ for c in self.columns:
+ args.append(c.copy(schema=schema))
+ for c in self.constraints:
+ args.append(c.copy(schema=schema))
+ table = Table(
+ self.name, metadata, schema=schema,
+ *args, **self.kwargs
+ )
+ for index in self.indexes:
+ # skip indexes that would be generated
+ # by the 'index' flag on Column
+ if len(index.columns) == 1 and \
+ list(index.columns)[0].index:
+ continue
+ Index(index.name,
+ unique=index.unique,
+ *[table.c[col] for col in index.columns.keys()],
+ **index.kwargs)
+ return table
class Column(SchemaItem, expression.ColumnClause):
"""Represents a column in a database table."""
@@ -887,6 +903,10 @@ class Column(SchemaItem, expression.ColumnClause):
"""
fk = [ForeignKey(f.column) for f in self.foreign_keys]
+ if name is None and self.name is None:
+ raise exc.InvalidRequestError("Cannot initialize a sub-selectable"
+ " with this Column object until it's 'name' has "
+ "been assigned.")
c = self._constructor(
name or self.name,
self.type,
diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py
index fcff5e355..e47db7e28 100644
--- a/lib/sqlalchemy/sql/compiler.py
+++ b/lib/sqlalchemy/sql/compiler.py
@@ -332,6 +332,10 @@ class SQLCompiler(engine.Compiled):
def visit_column(self, column, result_map=None, **kwargs):
name = column.name
+ if name is None:
+ raise exc.CompileError("Cannot compile Column object until "
+ "it's 'name' is assigned.")
+
if not column.is_literal and isinstance(name, sql._generated_label):
name = self._truncated_identifier("colident", name)
diff --git a/lib/sqlalchemy/sql/expression.py b/lib/sqlalchemy/sql/expression.py
index 1b1cfee8a..219e3bf14 100644
--- a/lib/sqlalchemy/sql/expression.py
+++ b/lib/sqlalchemy/sql/expression.py
@@ -1852,17 +1852,19 @@ class ColumnElement(ClauseElement, _CompareMixin):
descending selectable.
"""
-
- if name:
- co = ColumnClause(name, selectable, type_=getattr(self,
- 'type', None))
+ if name is None:
+ name = self.anon_label
+ # TODO: may want to change this to anon_label,
+ # or some value that is more useful than the
+ # compiled form of the expression
+ key = str(self)
else:
- name = str(self)
- co = ColumnClause(self.anon_label, selectable,
- type_=getattr(self, 'type', None))
-
+ key = name
+
+ co = ColumnClause(name, selectable, type_=getattr(self,
+ 'type', None))
co.proxies = [self]
- selectable.columns[name] = co
+ selectable.columns[key] = co
return co
def compare(self, other, use_proxies=False, equivalents=None, **kw):
@@ -2199,7 +2201,7 @@ class FromClause(Selectable):
def _reset_exported(self):
"""delete memoized collections when a FromClause is cloned."""
- for attr in '_columns', '_primary_key_foreign_keys', \
+ for attr in '_columns', '_primary_key', '_foreign_keys', \
'locate_all_froms':
self.__dict__.pop(attr, None)
@@ -3683,8 +3685,7 @@ class _ScalarSelect(_Grouping):
def __init__(self, element):
self.element = element
- cols = list(element.c)
- self.type = cols[0].type
+ self.type = element._scalar_type()
@property
def columns(self):
@@ -3735,7 +3736,10 @@ class CompoundSelect(_SelectBaseMixin, FromClause):
self.selects.append(s.self_group(self))
_SelectBaseMixin.__init__(self, **kwargs)
-
+
+ def _scalar_type(self):
+ return self.selects[0]._scalar_type()
+
def self_group(self, against=None):
return _FromGrouping(self)
@@ -3908,6 +3912,11 @@ class Select(_SelectBaseMixin, FromClause):
return froms
+ def _scalar_type(self):
+ elem = self._raw_columns[0]
+ cols = list(elem._select_iterable)
+ return cols[0].type
+
@property
def froms(self):
"""Return the displayed list of FromClause elements."""
@@ -3915,16 +3924,21 @@ class Select(_SelectBaseMixin, FromClause):
return self._get_display_froms()
@_generative
- def with_hint(self, selectable, text, dialect_name=None):
+ def with_hint(self, selectable, text, dialect_name='*'):
"""Add an indexing hint for the given selectable to this
:class:`Select`.
- The text of the hint is written specific to a specific backend, and
- typically uses Python string substitution syntax to render the name
- of the table or alias, such as for Oracle::
+ The text of the hint is rendered in the appropriate
+ location for the database backend in use, relative
+ to the given :class:`.Table` or :class:`.Alias` passed as the
+ *selectable* argument. The dialect implementation
+ typically uses Python string substitution syntax
+ with the token ``%(name)s`` to render the name of
+ the table or alias. E.g. when using Oracle, the
+ following::
- select([mytable]).with_hint(mytable, "+ index(%(name)s
- ix_mytable)")
+ select([mytable]).\\
+ with_hint(mytable, "+ index(%(name)s ix_mytable)")
Would render SQL as::
@@ -3934,13 +3948,11 @@ class Select(_SelectBaseMixin, FromClause):
hint to a particular backend. Such as, to add hints for both Oracle
and Sybase simultaneously::
- select([mytable]).\
- with_hint(mytable, "+ index(%(name)s ix_mytable)", 'oracle').\
+ select([mytable]).\\
+ with_hint(mytable, "+ index(%(name)s ix_mytable)", 'oracle').\\
with_hint(mytable, "WITH INDEX ix_mytable", 'sybase')
"""
- if not dialect_name:
- dialect_name = '*'
self._hints = self._hints.union({(selectable, dialect_name):text})
@property
diff --git a/lib/sqlalchemy/sql/util.py b/lib/sqlalchemy/sql/util.py
index bd4f70247..638549e12 100644
--- a/lib/sqlalchemy/sql/util.py
+++ b/lib/sqlalchemy/sql/util.py
@@ -92,6 +92,25 @@ def find_columns(clause):
visitors.traverse(clause, {}, {'column':cols.add})
return cols
+def clause_is_present(clause, search):
+ """Given a target clause and a second to search within, return True
+ if the target is plainly present in the search without any
+ subqueries or aliases involved.
+
+ Basically descends through Joins.
+
+ """
+
+ stack = [search]
+ while stack:
+ elem = stack.pop()
+ if clause is elem:
+ return True
+ elif isinstance(elem, expression.Join):
+ stack.extend((elem.left, elem.right))
+ return False
+
+
def bind_values(clause):
"""Return an ordered list of "bound" values in the given clause.
diff --git a/lib/sqlalchemy/test/requires.py b/lib/sqlalchemy/test/requires.py
index 501f0e24d..14c548f12 100644
--- a/lib/sqlalchemy/test/requires.py
+++ b/lib/sqlalchemy/test/requires.py
@@ -52,6 +52,7 @@ def boolean_col_expressions(fn):
no_support('mssql', 'not supported by database'),
no_support('sybase', 'not supported by database'),
no_support('maxdb', 'FIXME: verify not supported by database'),
+ no_support('informix', 'not supported by database'),
)
def identity(fn):
@@ -120,6 +121,7 @@ def savepoints(fn):
no_support('sqlite', 'not supported by database'),
no_support('sybase', 'FIXME: guessing, needs confirmation'),
exclude('mysql', '<', (5, 0, 3), 'not supported by database'),
+ exclude('informix', '<', (11, 55, 'xC3'), 'not supported by database'),
)
def denormalized_names(fn):
@@ -148,6 +150,7 @@ def sequences(fn):
no_support('mysql', 'no SEQUENCE support'),
no_support('sqlite', 'no SEQUENCE support'),
no_support('sybase', 'no SEQUENCE support'),
+ no_support('informix', 'no SEQUENCE support'),
)
def update_nowait(fn):
@@ -176,6 +179,7 @@ def intersect(fn):
fails_on('firebird', 'no support for INTERSECT'),
fails_on('mysql', 'no support for INTERSECT'),
fails_on('sybase', 'no support for INTERSECT'),
+ fails_on('informix', 'no support for INTERSECT'),
)
def except_(fn):
@@ -185,6 +189,7 @@ def except_(fn):
fails_on('firebird', 'no support for EXCEPT'),
fails_on('mysql', 'no support for EXCEPT'),
fails_on('sybase', 'no support for EXCEPT'),
+ fails_on('informix', 'no support for EXCEPT'),
)
def offset(fn):
@@ -247,6 +252,12 @@ def sane_rowcount(fn):
skip_if(lambda: not testing.db.dialect.supports_sane_rowcount)
)
+def dbapi_lastrowid(fn):
+ return _chain_decorators_on(
+ fn,
+ fails_on_everything_except('mysql+mysqldb', 'mysql+oursql', 'sqlite+pysqlite')
+ )
+
def sane_multi_rowcount(fn):
return _chain_decorators_on(
fn,
diff --git a/lib/sqlalchemy/topological.py b/lib/sqlalchemy/topological.py
index 6c3e90d98..0f4f32461 100644
--- a/lib/sqlalchemy/topological.py
+++ b/lib/sqlalchemy/topological.py
@@ -9,6 +9,7 @@
from sqlalchemy.exc import CircularDependencyError
from sqlalchemy import util
+
__all__ = ['sort', 'sort_as_subsets', 'find_cycles']
def sort_as_subsets(tuples, allitems):
@@ -27,8 +28,10 @@ def sort_as_subsets(tuples, allitems):
if not output:
raise CircularDependencyError(
- "Circular dependency detected: cycles: %r all edges: %s" %
- (find_cycles(tuples, allitems), _dump_edges(edges, True)))
+ "Circular dependency detected",
+ find_cycles(tuples, allitems),
+ _gen_edges(edges)
+ )
todo.difference_update(output)
yield output
@@ -72,14 +75,9 @@ def find_cycles(tuples, allitems):
node = stack.pop()
return output
-def _dump_edges(edges, reverse):
- l = []
- for left in edges:
- for right in edges[left]:
- if reverse:
- l.append((right, left))
- else:
- l.append((left, right))
- return repr(l)
-
-
+def _gen_edges(edges):
+ return set([
+ (right, left)
+ for left in edges
+ for right in edges[left]
+ ])
diff --git a/lib/sqlalchemy/types.py b/lib/sqlalchemy/types.py
index af7ef22e6..46e5901a3 100644
--- a/lib/sqlalchemy/types.py
+++ b/lib/sqlalchemy/types.py
@@ -639,7 +639,7 @@ class NullType(TypeEngine):
__visit_name__ = 'null'
def _adapt_expression(self, op, othertype):
- if othertype is NULLTYPE or not operators.is_commutative(op):
+ if isinstance(othertype, NullType) or not operators.is_commutative(op):
return op, self
else:
return othertype._adapt_expression(op, self)
diff --git a/lib/sqlalchemy/util.py b/lib/sqlalchemy/util.py
index 3f1b89cfc..351b50883 100644
--- a/lib/sqlalchemy/util.py
+++ b/lib/sqlalchemy/util.py
@@ -1804,8 +1804,12 @@ class classproperty(property):
"""A decorator that behaves like @property except that operates
on classes rather than instances.
- This is helpful when you need to compute __table_args__ and/or
- __mapper_args__ when using declarative."""
+ The decorator is currently special when using the declarative
+ module, but note that the
+ :class:`~.sqlalchemy.ext.declarative.declared_attr`
+ decorator should be used for this purpose with declarative.
+
+ """
def __init__(self, fget, *arg, **kw):
super(classproperty, self).__init__(fget, *arg, **kw)
diff --git a/setup.py b/setup.py
index 3b64bc7f7..76cba0584 100644
--- a/setup.py
+++ b/setup.py
@@ -56,7 +56,8 @@ elif BUILD_CEXTENSIONS:
def find_packages(dir_):
packages = []
- for _dir, subdirectories, files in os.walk(os.path.join(dir_, 'sqlalchemy')):
+ for _dir, subdirectories, files in os.walk(os.path.join(dir_,
+ 'sqlalchemy')):
if '__init__.py' in files:
lib, fragment = _dir.split(os.sep, 1)
packages.append(fragment.replace(os.sep, '.'))
@@ -65,8 +66,10 @@ def find_packages(dir_):
if sys.version_info < (2, 4):
raise Exception("SQLAlchemy requires Python 2.4 or higher.")
-v = open(os.path.join(os.path.dirname(__file__), 'lib', 'sqlalchemy', '__init__.py'))
-VERSION = re.compile(r".*__version__ = '(.*?)'", re.S).match(v.read()).group(1)
+v = open(os.path.join(os.path.dirname(__file__), 'lib', 'sqlalchemy',
+ '__init__.py'))
+VERSION = re.compile(r".*__version__ = '(.*?)'",
+ re.S).match(v.read()).group(1)
v.close()
setup(name = "SQLAlchemy",
@@ -94,28 +97,123 @@ setup(name = "SQLAlchemy",
long_description = """\
SQLAlchemy is:
- * The Python SQL toolkit and Object Relational Mapper that gives application developers the full power and flexibility of SQL. SQLAlchemy provides a full suite of well known enterprise-level persistence patterns, designed for efficient and high-performing database access, adapted into a simple and Pythonic domain language.
- * extremely easy to use for all the basic tasks, such as: accessing pooled connections, constructing SQL from Python expressions, finding object instances, and commiting object modifications back to the database.
- * powerful enough for complicated tasks, such as: eager load a graph of objects and their dependencies via joins; map recursive adjacency structures automatically; map objects to not just tables but to any arbitrary join or select statement; combine multiple tables together to load whole sets of otherwise unrelated objects from a single result set; commit entire graphs of object changes in one step.
- * built to conform to what DBAs demand, including the ability to swap out generated SQL with hand-optimized statements, full usage of bind parameters for all literal values, fully transactionalized and consistent updates using Unit of Work.
- * modular. Different parts of SQLAlchemy can be used independently of the rest, including the connection pool, SQL construction, and ORM. SQLAlchemy is constructed in an open style that allows plenty of customization, with an architecture that supports custom datatypes, custom SQL extensions, and ORM plugins which can augment or extend mapping functionality.
+ * The Python SQL toolkit and Object Relational Mapper
+ that gives application developers the full power and
+ flexibility of SQL. SQLAlchemy provides a full suite
+ of well known enterprise-level persistence patterns,
+ designed for efficient and high-performing database
+ access, adapted into a simple and Pythonic domain
+ language.
+ * extremely easy to use for all the basic tasks, such
+ as: accessing pooled connections, constructing SQL
+ from Python expressions, finding object instances, and
+ commiting object modifications back to the database.
+ * powerful enough for complicated tasks, such as: eager
+ load a graph of objects and their dependencies via
+ joins; map recursive adjacency structures
+ automatically; map objects to not just tables but to
+ any arbitrary join or select statement; combine
+ multiple tables together to load whole sets of
+ otherwise unrelated objects from a single result set;
+ commit entire graphs of object changes in one step.
+ * built to conform to what DBAs demand, including the
+ ability to swap out generated SQL with hand-optimized
+ statements, full usage of bind parameters for all
+ literal values, fully transactionalized and consistent
+ updates using Unit of Work.
+ * modular. Different parts of SQLAlchemy can be used
+ independently of the rest, including the connection
+ pool, SQL construction, and ORM. SQLAlchemy is
+ constructed in an open style that allows plenty of
+ customization, with an architecture that supports
+ custom datatypes, custom SQL extensions, and ORM
+ plugins which can augment or extend mapping
+ functionality.
SQLAlchemy's Philosophy:
- * SQL databases behave less and less like object collections the more size and performance start to matter; object collections behave less and less like tables and rows the more abstraction starts to matter. SQLAlchemy aims to accomodate both of these principles.
- * Your classes aren't tables, and your objects aren't rows. Databases aren't just collections of tables; they're relational algebra engines. You don't have to select from just tables, you can select from joins, subqueries, and unions. Database and domain concepts should be visibly decoupled from the beginning, allowing both sides to develop to their full potential.
- * For example, table metadata (objects that describe tables) are declared distinctly from the classes theyre designed to store. That way database relationship concepts don't interfere with your object design concepts, and vice-versa; the transition from table-mapping to selectable-mapping is seamless; a class can be mapped against the database in more than one way. SQLAlchemy provides a powerful mapping layer that can work as automatically or as manually as you choose, determining relationships based on foreign keys or letting you define the join conditions explicitly, to bridge the gap between database and domain.
+ * SQL databases behave less and less like object
+ collections the more size and performance start to
+ matter; object collections behave less and less like
+ tables and rows the more abstraction starts to matter.
+ SQLAlchemy aims to accomodate both of these
+ principles.
+ * Your classes aren't tables, and your objects aren't
+ rows. Databases aren't just collections of tables;
+ they're relational algebra engines. You don't have to
+ select from just tables, you can select from joins,
+ subqueries, and unions. Database and domain concepts
+ should be visibly decoupled from the beginning,
+ allowing both sides to develop to their full
+ potential.
+ * For example, table metadata (objects that describe
+ tables) are declared distinctly from the classes
+ theyre designed to store. That way database
+ relationship concepts don't interfere with your object
+ design concepts, and vice-versa; the transition from
+ table-mapping to selectable-mapping is seamless; a
+ class can be mapped against the database in more than
+ one way. SQLAlchemy provides a powerful mapping layer
+ that can work as automatically or as manually as you
+ choose, determining relationships based on foreign
+ keys or letting you define the join conditions
+ explicitly, to bridge the gap between database and
+ domain.
SQLAlchemy's Advantages:
- * The Unit Of Work system organizes pending CRUD operations into queues and commits them all in one batch. It then performs a topological "dependency sort" of all items to be committed and deleted and groups redundant statements together. This produces the maxiumum efficiency and transaction safety, and minimizes chances of deadlocks. Modeled after Fowler's "Unit of Work" pattern as well as Java Hibernate.
- * Function-based query construction allows boolean expressions, operators, functions, table aliases, selectable subqueries, create/update/insert/delete queries, correlated updates, correlated EXISTS clauses, UNION clauses, inner and outer joins, bind parameters, free mixing of literal text within expressions, as little or as much as desired. Query-compilation is vendor-specific; the same query object can be compiled into any number of resulting SQL strings depending on its compilation algorithm.
- * Database mapping and class design are totally separate. Persisted objects have no subclassing requirement (other than 'object') and are POPO's : plain old Python objects. They retain serializability (pickling) for usage in various caching systems and session objects. SQLAlchemy "decorates" classes with non-intrusive property accessors to automatically log object creates and modifications with the UnitOfWork engine, to lazyload related data, as well as to track attribute change histories.
- * Custom list classes can be used with eagerly or lazily loaded child object lists, allowing rich relationships to be created on the fly as SQLAlchemy appends child objects to an object attribute.
- * Composite (multiple-column) primary keys are supported, as are "association" objects that represent the middle of a "many-to-many" relationship.
- * Self-referential tables and mappers are supported. Adjacency list structures can be created, saved, and deleted with proper cascading, with no extra programming.
- * Data mapping can be used in a row-based manner. Any bizarre hyper-optimized query that you or your DBA can cook up, you can run in SQLAlchemy, and as long as it returns the expected columns within a rowset, you can get your objects from it. For a rowset that contains more than one kind of object per row, multiple mappers can be chained together to return multiple object instance lists from a single database round trip.
- * The type system allows pre- and post- processing of data, both at the bind parameter and the result set level. User-defined types can be freely mixed with built-in types. Generic types as well as SQL-specific types are available.
+ * The Unit Of Work system organizes pending CRUD
+ operations into queues and commits them all in one
+ batch. It then performs a topological "dependency
+ sort" of all items to be committed and deleted and
+ groups redundant statements together. This produces
+ the maxiumum efficiency and transaction safety, and
+ minimizes chances of deadlocks. Modeled after Fowler's
+ "Unit of Work" pattern as well as Java Hibernate.
+ * Function-based query construction allows boolean
+ expressions, operators, functions, table aliases,
+ selectable subqueries, create/update/insert/delete
+ queries, correlated updates, correlated EXISTS
+ clauses, UNION clauses, inner and outer joins, bind
+ parameters, free mixing of literal text within
+ expressions, as little or as much as desired.
+ Query-compilation is vendor-specific; the same query
+ object can be compiled into any number of resulting
+ SQL strings depending on its compilation algorithm.
+ * Database mapping and class design are totally
+ separate. Persisted objects have no subclassing
+ requirement (other than 'object') and are POPO's :
+ plain old Python objects. They retain serializability
+ (pickling) for usage in various caching systems and
+ session objects. SQLAlchemy "decorates" classes with
+ non-intrusive property accessors to automatically log
+ object creates and modifications with the UnitOfWork
+ engine, to lazyload related data, as well as to track
+ attribute change histories.
+ * Custom list classes can be used with eagerly or lazily
+ loaded child object lists, allowing rich relationships
+ to be created on the fly as SQLAlchemy appends child
+ objects to an object attribute.
+ * Composite (multiple-column) primary keys are
+ supported, as are "association" objects that represent
+ the middle of a "many-to-many" relationship.
+ * Self-referential tables and mappers are supported.
+ Adjacency list structures can be created, saved, and
+ deleted with proper cascading, with no extra
+ programming.
+ * Data mapping can be used in a row-based manner. Any
+ bizarre hyper-optimized query that you or your DBA can
+ cook up, you can run in SQLAlchemy, and as long as it
+ returns the expected columns within a rowset, you can
+ get your objects from it. For a rowset that contains
+ more than one kind of object per row, multiple mappers
+ can be chained together to return multiple object
+ instance lists from a single database round trip.
+ * The type system allows pre- and post- processing of
+ data, both at the bind parameter and the result set
+ level. User-defined types can be freely mixed with
+ built-in types. Generic types as well as SQL-specific
+ types are available.
""",
classifiers = [
diff --git a/test/aaa_profiling/test_zoomark_orm.py b/test/aaa_profiling/test_zoomark_orm.py
index 3e30efa24..623ec67ba 100644
--- a/test/aaa_profiling/test_zoomark_orm.py
+++ b/test/aaa_profiling/test_zoomark_orm.py
@@ -335,11 +335,11 @@ class ZooMarkTest(TestBase):
def test_profile_1_create_tables(self):
self.test_baseline_1_create_tables()
- @profiling.function_call_count(9225)
+ @profiling.function_call_count(8469)
def test_profile_1a_populate(self):
self.test_baseline_1a_populate()
- @profiling.function_call_count(640)
+ @profiling.function_call_count(591)
def test_profile_2_insert(self):
self.test_baseline_2_insert()
diff --git a/test/base/test_dependency.py b/test/base/test_dependency.py
index aa4410576..9fddfc47f 100644
--- a/test/base/test_dependency.py
+++ b/test/base/test_dependency.py
@@ -84,10 +84,16 @@ class DependencySortTest(TestBase):
(node4, node1),
]
allitems = self._nodes_from_tuples(tuples)
- assert_raises(exc.CircularDependencyError, list,
- topological.sort(tuples, allitems))
- # TODO: test find_cycles
+ try:
+ list(topological.sort(tuples, allitems))
+ assert False
+ except exc.CircularDependencyError, err:
+ eq_(err.cycles, set(['node1', 'node3', 'node2', 'node5',
+ 'node4']))
+ eq_(err.edges, set([('node3', 'node1'), ('node4', 'node1'),
+ ('node2', 'node3'), ('node1', 'node2'),
+ ('node4','node5'), ('node5', 'node4')]))
def test_raise_on_cycle_two(self):
@@ -101,10 +107,15 @@ class DependencySortTest(TestBase):
tuples = [(node1, node2), (node3, node1), (node2, node4),
(node3, node2), (node2, node3)]
allitems = self._nodes_from_tuples(tuples)
- assert_raises(exc.CircularDependencyError, list,
- topological.sort(tuples, allitems))
- # TODO: test find_cycles
+ try:
+ list(topological.sort(tuples, allitems))
+ assert False
+ except exc.CircularDependencyError, err:
+ eq_(err.cycles, set(['node1', 'node3', 'node2']))
+ eq_(err.edges, set([('node3', 'node1'), ('node2', 'node3'),
+ ('node3', 'node2'), ('node1', 'node2'),
+ ('node2','node4')]))
def test_raise_on_cycle_three(self):
question, issue, providerservice, answer, provider = \
diff --git a/test/dialect/test_informix.py b/test/dialect/test_informix.py
index 78dc54eda..ceec587d9 100644
--- a/test/dialect/test_informix.py
+++ b/test/dialect/test_informix.py
@@ -5,7 +5,6 @@ from sqlalchemy.test import *
class CompileTest(TestBase, AssertsCompiledSQL):
- __only_on__ = 'informix'
__dialect__ = informix.InformixDialect()
def test_statements(self):
@@ -23,3 +22,4 @@ class CompileTest(TestBase, AssertsCompiledSQL):
't1.col1 = t2.col3')
self.assert_compile(t1.update().values({t1.c.col1: t1.c.col1
+ 1}), 'UPDATE t1 SET col1=(t1.col1 + ?)')
+
diff --git a/test/engine/test_bind.py b/test/engine/test_bind.py
index 547afc64c..dfcc5e172 100644
--- a/test/engine/test_bind.py
+++ b/test/engine/test_bind.py
@@ -147,7 +147,7 @@ class BindTest(testing.TestBase):
table.insert().execute(foo=7)
trans.rollback()
metadata.bind = None
- assert conn.execute('select count(1) from test_table'
+ assert conn.execute('select count(*) from test_table'
).scalar() == 0
finally:
metadata.drop_all(bind=conn)
diff --git a/test/engine/test_execute.py b/test/engine/test_execute.py
index d85279981..86764f170 100644
--- a/test/engine/test_execute.py
+++ b/test/engine/test_execute.py
@@ -34,9 +34,10 @@ class ExecuteTest(TestBase):
def teardown_class(cls):
metadata.drop_all()
- @testing.fails_on_everything_except('firebird', 'maxdb',
- 'sqlite', '+pyodbc',
- '+mxodbc', '+zxjdbc', 'mysql+oursql')
+ @testing.fails_on_everything_except('firebird', 'maxdb',
+ 'sqlite', '+pyodbc',
+ '+mxodbc', '+zxjdbc', 'mysql+oursql',
+ 'informix+informixdb')
def test_raw_qmark(self):
for conn in testing.db, testing.db.connect():
conn.execute('insert into users (user_id, user_name) '
@@ -103,7 +104,7 @@ class ExecuteTest(TestBase):
'horse'), (4, 'sally')]
conn.execute('delete from users')
- @testing.fails_on_everything_except('sqlite', 'oracle+cx_oracle')
+ @testing.fails_on_everything_except('sqlite', 'oracle+cx_oracle', 'informix+informixdb')
def test_raw_named(self):
for conn in testing.db, testing.db.connect():
conn.execute('insert into users (user_id, user_name) '
@@ -183,7 +184,7 @@ class CompiledCacheTest(TestBase):
cached_conn.execute(ins, {'user_name':'u2'})
cached_conn.execute(ins, {'user_name':'u3'})
assert len(cache) == 1
- eq_(conn.execute("select count(1) from users").scalar(), 3)
+ eq_(conn.execute("select count(*) from users").scalar(), 3)
class LogTest(TestBase):
def _test_logger(self, eng, eng_name, pool_name):
diff --git a/test/engine/test_metadata.py b/test/engine/test_metadata.py
index 7ea753621..528d56244 100644
--- a/test/engine/test_metadata.py
+++ b/test/engine/test_metadata.py
@@ -1,8 +1,11 @@
-from sqlalchemy.test.testing import assert_raises, assert_raises_message
+from sqlalchemy.test.testing import assert_raises
+from sqlalchemy.test.testing import assert_raises_message
+from sqlalchemy.test.testing import emits_warning
+
import pickle
from sqlalchemy import Integer, String, UniqueConstraint, \
CheckConstraint, ForeignKey, MetaData, Sequence, \
- ForeignKeyConstraint, ColumnDefault
+ ForeignKeyConstraint, ColumnDefault, Index
from sqlalchemy.test.schema import Table, Column
from sqlalchemy import schema, exc
import sqlalchemy as tsa
@@ -246,6 +249,62 @@ class MetaDataTest(TestBase, ComparesTables):
eq_(str(table_c.join(table2_c).onclause),
'someschema.mytable.myid = someschema.othertable.myid')
+ def test_tometadata_kwargs(self):
+ meta = MetaData()
+
+ table = Table('mytable', meta,
+ Column('myid', Integer, primary_key=True),
+ mysql_engine='InnoDB',
+ )
+
+ meta2 = MetaData()
+ table_c = table.tometadata(meta2)
+
+ eq_(table.kwargs,table_c.kwargs)
+
+ def test_tometadata_indexes(self):
+ meta = MetaData()
+
+ table = Table('mytable', meta,
+ Column('id', Integer, primary_key=True),
+ Column('data1', Integer, index=True),
+ Column('data2', Integer),
+ )
+ Index('multi',table.c.data1,table.c.data2),
+
+ meta2 = MetaData()
+ table_c = table.tometadata(meta2)
+
+ def _get_key(i):
+ return [i.name,i.unique] + \
+ sorted(i.kwargs.items()) + \
+ i.columns.keys()
+
+ eq_(
+ sorted([_get_key(i) for i in table.indexes]),
+ sorted([_get_key(i) for i in table_c.indexes])
+ )
+
+ @emits_warning("Table '.+' already exists within the given MetaData")
+ def test_tometadata_already_there(self):
+
+ meta1 = MetaData()
+ table1 = Table('mytable', meta1,
+ Column('myid', Integer, primary_key=True),
+ )
+ meta2 = MetaData()
+ table2 = Table('mytable', meta2,
+ Column('yourid', Integer, primary_key=True),
+ )
+
+ meta3 = MetaData()
+
+ table_c = table1.tometadata(meta2)
+ table_d = table2.tometadata(meta2)
+
+ # d'oh!
+ assert table_c is table_d
+
def test_tometadata_default_schema(self):
meta = MetaData()
diff --git a/test/engine/test_pool.py b/test/engine/test_pool.py
index d567d6663..29591f11e 100644
--- a/test/engine/test_pool.py
+++ b/test/engine/test_pool.py
@@ -667,7 +667,7 @@ class QueuePoolTest(PoolTestBase):
c1 = None
c1 = p.connect()
assert c1.connection.id != c_id
-
+
def test_recreate(self):
dbapi = MockDBAPI()
p = pool.QueuePool(creator=lambda : dbapi.connect('foo.db'),
diff --git a/test/engine/test_reconnect.py b/test/engine/test_reconnect.py
index 8ef851358..e26413957 100644
--- a/test/engine/test_reconnect.py
+++ b/test/engine/test_reconnect.py
@@ -221,6 +221,8 @@ class RealReconnectTest(TestBase):
def teardown(self):
engine.dispose()
+ @testing.fails_on('+informixdb',
+ "Wrong error thrown, fix in informixdb?")
def test_reconnect(self):
conn = engine.connect()
@@ -256,7 +258,7 @@ class RealReconnectTest(TestBase):
assert not conn.invalidated
conn.close()
-
+
def test_invalidate_twice(self):
conn = engine.connect()
conn.invalidate()
@@ -288,7 +290,9 @@ class RealReconnectTest(TestBase):
# pool was recreated
assert engine.pool is not p1
-
+
+ @testing.fails_on('+informixdb',
+ "Wrong error thrown, fix in informixdb?")
def test_null_pool(self):
engine = \
engines.reconnecting_engine(options=dict(poolclass=pool.NullPool))
@@ -307,6 +311,8 @@ class RealReconnectTest(TestBase):
eq_(conn.execute(select([1])).scalar(), 1)
assert not conn.invalidated
+ @testing.fails_on('+informixdb',
+ "Wrong error thrown, fix in informixdb?")
def test_close(self):
conn = engine.connect()
eq_(conn.execute(select([1])).scalar(), 1)
@@ -325,6 +331,8 @@ class RealReconnectTest(TestBase):
conn = engine.connect()
eq_(conn.execute(select([1])).scalar(), 1)
+ @testing.fails_on('+informixdb',
+ "Wrong error thrown, fix in informixdb?")
def test_with_transaction(self):
conn = engine.connect()
trans = conn.begin()
@@ -401,6 +409,8 @@ class InvalidateDuringResultTest(TestBase):
@testing.fails_on('+pg8000',
"Buffers the result set and doesn't check for "
"connection close")
+ @testing.fails_on('+informixdb',
+ "Wrong error thrown, fix in informixdb?")
def test_invalidate_on_results(self):
conn = engine.connect()
result = conn.execute('select * from sometable')
diff --git a/test/engine/test_reflection.py b/test/engine/test_reflection.py
index a82f1ec52..6a8e7cf8a 100644
--- a/test/engine/test_reflection.py
+++ b/test/engine/test_reflection.py
@@ -281,7 +281,7 @@ class ReflectionTest(TestBase, ComparesTables):
meta.create_all()
try:
meta2 = MetaData(testing.db)
- a2 = Table('addresses', meta2,
+ a2 = Table('addresses', meta2,
Column('user_id',sa.Integer, sa.ForeignKey('users.id')),
autoload=True)
u2 = Table('users', meta2, autoload=True)
@@ -597,7 +597,7 @@ class ReflectionTest(TestBase, ComparesTables):
Column('data', sa.String(50)),
sa.ForeignKeyConstraint(['foo', 'bar', 'lala'],
['multi.multi_id', 'multi.multi_rev', 'multi.multi_hoho'
- ]),
+ ]),
test_needs_fk=True,
)
meta.create_all()
@@ -620,6 +620,7 @@ class ReflectionTest(TestBase, ComparesTables):
@testing.crashes('oracle', 'FIXME: unknown, confirm not fails_on')
+ @testing.fails_on('+informixdb', 'FIXME: should be supported via the DELIMITED env var but that breaks everything else for now')
def test_reserved(self):
# check a table that uses an SQL reserved name doesn't cause an
@@ -742,7 +743,6 @@ class ReflectionTest(TestBase, ComparesTables):
m2 = MetaData(testing.db)
t2 = Table('party', m2, autoload=True)
- print len(t2.indexes), t2.indexes
assert len(t2.indexes) == 3
# Make sure indexes are in the order we expect them in
tmp = [(idx.name, idx) for idx in t2.indexes]
@@ -1056,7 +1056,7 @@ def createTables(meta, schema=None):
Column('test3', sa.Text),
Column('test4', sa.Numeric(10, 2), nullable = False),
Column('test5', sa.Date),
- Column('test5-1', sa.TIMESTAMP),
+ Column('test5_1', sa.TIMESTAMP),
parent_user_id,
Column('test6', sa.Date, nullable=False),
Column('test7', sa.Text),
diff --git a/test/engine/test_transaction.py b/test/engine/test_transaction.py
index e7e2fe1b8..f09c67164 100644
--- a/test/engine/test_transaction.py
+++ b/test/engine/test_transaction.py
@@ -135,7 +135,7 @@ class TransactionTest(TestBase):
conn2 = connection.execution_options(dummy=True)
conn2.execute(users.insert(), user_id=2, user_name='user2')
transaction.rollback()
- eq_(connection.scalar("select count(1) from query_users"), 0)
+ eq_(connection.scalar("select count(*) from query_users"), 0)
finally:
connection.close()
@@ -150,7 +150,7 @@ class TransactionTest(TestBase):
connection.execute(users.insert(), user_id=5, user_name='user5')
trans2.commit()
transaction.rollback()
- self.assert_(connection.scalar('select count(1) from '
+ self.assert_(connection.scalar('select count(*) from '
'query_users') == 0)
result = connection.execute('select * from query_users')
assert len(result.fetchall()) == 0
@@ -170,7 +170,7 @@ class TransactionTest(TestBase):
assert connection.in_transaction()
transaction.commit()
assert not connection.in_transaction()
- self.assert_(connection.scalar('select count(1) from '
+ self.assert_(connection.scalar('select count(*) from '
'query_users') == 5)
result = connection.execute('select * from query_users')
assert len(result.fetchall()) == 5
@@ -190,7 +190,7 @@ class TransactionTest(TestBase):
assert connection.in_transaction()
transaction.close()
assert not connection.in_transaction()
- self.assert_(connection.scalar('select count(1) from '
+ self.assert_(connection.scalar('select count(*) from '
'query_users') == 0)
result = connection.execute('select * from query_users')
assert len(result.fetchall()) == 0
@@ -604,7 +604,7 @@ class TLTransactionTest(TestBase):
def test_commits(self):
connection = tlengine.connect()
- assert connection.execute('select count(1) from query_users'
+ assert connection.execute('select count(*) from query_users'
).scalar() == 0
connection.close()
connection = tlengine.contextual_connect()
@@ -697,7 +697,7 @@ class TLTransactionTest(TestBase):
tlengine.rollback()
try:
self.assert_(external_connection.scalar(
- 'select count(1) from query_users'
+ 'select count(*) from query_users'
) == 0)
finally:
external_connection.close()
@@ -719,7 +719,7 @@ class TLTransactionTest(TestBase):
tlengine.commit()
try:
self.assert_(external_connection.scalar(
- 'select count(1) from query_users'
+ 'select count(*) from query_users'
) == 5)
finally:
external_connection.close()
@@ -751,7 +751,7 @@ class TLTransactionTest(TestBase):
conn.close()
try:
self.assert_(external_connection.scalar(
- 'select count(1) from query_users'
+ 'select count(*) from query_users'
) == 0)
finally:
external_connection.close()
@@ -778,7 +778,7 @@ class TLTransactionTest(TestBase):
connection.close()
try:
self.assert_(external_connection.scalar(
- 'select count(1) from query_users'
+ 'select count(*) from query_users'
) == 0)
finally:
external_connection.close()
diff --git a/test/ext/test_declarative.py b/test/ext/test_declarative.py
index 26e1563fe..0202aa69f 100644
--- a/test/ext/test_declarative.py
+++ b/test/ext/test_declarative.py
@@ -14,6 +14,7 @@ from sqlalchemy.orm import relationship, create_session, class_mapper, \
from sqlalchemy.test.testing import eq_
from sqlalchemy.util import classproperty
from test.orm._base import ComparableEntity, MappedTest
+from sqlalchemy.ext.declarative import declared_attr
class DeclarativeTestBase(testing.TestBase, testing.AssertsExecutionResults):
def setup(self):
@@ -369,11 +370,14 @@ class DeclarativeTest(DeclarativeTestBase):
hasattr(User.addresses, 'property')
- # the exeption is preserved
-
- assert_raises_message(sa.exc.InvalidRequestError,
- r"suppressed within a hasattr\(\)",
- compile_mappers)
+ # the exception is preserved. Remains the
+ # same through repeated calls.
+ for i in range(3):
+ assert_raises_message(sa.exc.InvalidRequestError,
+ "^One or more mappers failed to initialize - "
+ "can't proceed with initialization of other "
+ "mappers. Original exception was: When initializing.*",
+ compile_mappers)
def test_custom_base(self):
class MyBase(object):
@@ -690,7 +694,7 @@ class DeclarativeTest(DeclarativeTestBase):
eq_(sess.query(User).all(), [User(name='u1', address_count=2,
addresses=[Address(email='one'), Address(email='two')])])
- def test_useless_classproperty(self):
+ def test_useless_declared_attr(self):
class Address(Base, ComparableEntity):
__tablename__ = 'addresses'
@@ -707,7 +711,7 @@ class DeclarativeTest(DeclarativeTestBase):
name = Column('name', String(50))
addresses = relationship('Address', backref='user')
- @classproperty
+ @declared_attr
def address_count(cls):
# this doesn't really gain us anything. but if
# one is used, lets have it function as expected...
@@ -2194,7 +2198,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
def test_table_name_inherited(self):
class MyMixin:
- @classproperty
+ @declared_attr
def __tablename__(cls):
return cls.__name__.lower()
id = Column(Integer, primary_key=True)
@@ -2203,11 +2207,23 @@ class DeclarativeMixinTest(DeclarativeTestBase):
pass
eq_(MyModel.__table__.name, 'mymodel')
+
+ def test_classproperty_still_works(self):
+ class MyMixin(object):
+ @classproperty
+ def __tablename__(cls):
+ return cls.__name__.lower()
+ id = Column(Integer, primary_key=True)
+
+ class MyModel(Base, MyMixin):
+ __tablename__ = 'overridden'
+ eq_(MyModel.__table__.name, 'overridden')
+
def test_table_name_not_inherited(self):
class MyMixin:
- @classproperty
+ @declared_attr
def __tablename__(cls):
return cls.__name__.lower()
id = Column(Integer, primary_key=True)
@@ -2220,12 +2236,12 @@ class DeclarativeMixinTest(DeclarativeTestBase):
def test_table_name_inheritance_order(self):
class MyMixin1:
- @classproperty
+ @declared_attr
def __tablename__(cls):
return cls.__name__.lower() + '1'
class MyMixin2:
- @classproperty
+ @declared_attr
def __tablename__(cls):
return cls.__name__.lower() + '2'
@@ -2237,7 +2253,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
def test_table_name_dependent_on_subclass(self):
class MyHistoryMixin:
- @classproperty
+ @declared_attr
def __tablename__(cls):
return cls.parent_name + '_changelog'
@@ -2261,7 +2277,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
def test_table_args_inherited_descriptor(self):
class MyMixin:
- @classproperty
+ @declared_attr
def __table_args__(cls):
return {'info': cls.__name__}
@@ -2288,6 +2304,100 @@ class DeclarativeMixinTest(DeclarativeTestBase):
assert Specific.__table__ is General.__table__
eq_(General.__table__.kwargs, {'mysql_engine': 'InnoDB'})
+ def test_columns_single_table_inheritance(self):
+ """Test a column on a mixin with an alternate attribute name,
+ mapped to a superclass and single-table inheritance subclass.
+ The superclass table gets the column, the subclass shares
+ the MapperProperty.
+
+ """
+
+ class MyMixin(object):
+ foo = Column('foo', Integer)
+ bar = Column('bar_newname', Integer)
+
+ class General(Base, MyMixin):
+ __tablename__ = 'test'
+ id = Column(Integer, primary_key=True)
+ type_ = Column(String(50))
+ __mapper__args = {'polymorphic_on': type_}
+
+ class Specific(General):
+ __mapper_args__ = {'polymorphic_identity': 'specific'}
+
+ assert General.bar.prop.columns[0] is General.__table__.c.bar_newname
+ assert len(General.bar.prop.columns) == 1
+ assert Specific.bar.prop is General.bar.prop
+
+ def test_columns_joined_table_inheritance(self):
+ """Test a column on a mixin with an alternate attribute name,
+ mapped to a superclass and joined-table inheritance subclass.
+ Both tables get the column, in the case of the subclass the two
+ columns are joined under one MapperProperty.
+
+ """
+
+ class MyMixin(object):
+ foo = Column('foo', Integer)
+ bar = Column('bar_newname', Integer)
+
+ class General(Base, MyMixin):
+ __tablename__ = 'test'
+ id = Column(Integer, primary_key=True)
+ type_ = Column(String(50))
+ __mapper__args = {'polymorphic_on': type_}
+
+ class Specific(General):
+ __tablename__ = 'sub'
+ id = Column(Integer, ForeignKey('test.id'), primary_key=True)
+ __mapper_args__ = {'polymorphic_identity': 'specific'}
+
+ assert General.bar.prop.columns[0] is General.__table__.c.bar_newname
+ assert len(General.bar.prop.columns) == 1
+ assert Specific.bar.prop is not General.bar.prop
+ assert len(Specific.bar.prop.columns) == 2
+ assert Specific.bar.prop.columns[0] is General.__table__.c.bar_newname
+ assert Specific.bar.prop.columns[1] is Specific.__table__.c.bar_newname
+
+ def test_column_join_checks_superclass_type(self):
+ """Test that the logic which joins subclass props to those
+ of the superclass checks that the superclass property is a column.
+
+ """
+ class General(Base):
+ __tablename__ = 'test'
+ id = Column(Integer, primary_key=True)
+ general_id = Column(Integer, ForeignKey('test.id'))
+ type_ = relationship("General")
+
+ class Specific(General):
+ __tablename__ = 'sub'
+ id = Column(Integer, ForeignKey('test.id'), primary_key=True)
+ type_ = Column('foob', String(50))
+
+ assert isinstance(General.type_.property, sa.orm.RelationshipProperty)
+ assert Specific.type_.property.columns[0] is Specific.__table__.c.foob
+
+ def test_column_join_checks_subclass_type(self):
+ """Test that the logic which joins subclass props to those
+ of the superclass checks that the subclass property is a column.
+
+ """
+ def go():
+ class General(Base):
+ __tablename__ = 'test'
+ id = Column(Integer, primary_key=True)
+ type_ = Column('foob', Integer)
+
+ class Specific(General):
+ __tablename__ = 'sub'
+ id = Column(Integer, ForeignKey('test.id'), primary_key=True)
+ specific_id = Column(Integer, ForeignKey('sub.id'))
+ type_ = relationship("Specific")
+ assert_raises_message(
+ sa.exc.ArgumentError, "column 'foob' conflicts with property", go
+ )
+
def test_table_args_overridden(self):
class MyMixin:
@@ -2300,10 +2410,10 @@ class DeclarativeMixinTest(DeclarativeTestBase):
eq_(MyModel.__table__.kwargs, {'mysql_engine': 'InnoDB'})
- def test_mapper_args_classproperty(self):
+ def test_mapper_args_declared_attr(self):
class ComputedMapperArgs:
- @classproperty
+ @declared_attr
def __mapper_args__(cls):
if cls.__name__ == 'Person':
return {'polymorphic_on': cls.discriminator}
@@ -2323,13 +2433,13 @@ class DeclarativeMixinTest(DeclarativeTestBase):
is Person.__table__.c.type
eq_(class_mapper(Engineer).polymorphic_identity, 'Engineer')
- def test_mapper_args_classproperty_two(self):
+ def test_mapper_args_declared_attr_two(self):
- # same as test_mapper_args_classproperty, but we repeat
+ # same as test_mapper_args_declared_attr, but we repeat
# ComputedMapperArgs on both classes for no apparent reason.
class ComputedMapperArgs:
- @classproperty
+ @declared_attr
def __mapper_args__(cls):
if cls.__name__ == 'Person':
return {'polymorphic_on': cls.discriminator}
@@ -2364,7 +2474,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
__tablename__ = 'test'
- @classproperty
+ @declared_attr
def __table_args__(self):
info = {}
args = dict(info=info)
@@ -2392,7 +2502,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
class MyMixin:
- @classproperty
+ @declared_attr
def __mapper_args__(cls):
# tenuous, but illustrates the problem!
@@ -2454,19 +2564,93 @@ class DeclarativeMixinTest(DeclarativeTestBase):
__tablename__ = 'test'
- @classproperty
- def __mapper_args__(self):
+ @declared_attr
+ def __mapper_args__(cls):
args = {}
args.update(MyMixin1.__mapper_args__)
args.update(MyMixin2.__mapper_args__)
+ if cls.__name__ != 'MyModel':
+ args.pop('polymorphic_on')
+ args['polymorphic_identity'] = cls.__name__
+
return args
id = Column(Integer, primary_key=True)
-
- col = MyModel.__mapper__.polymorphic_on
- eq_(col.name, 'type_')
- assert col.table is not None
+
+ class MySubModel(MyModel):
+ pass
+
+ eq_(
+ MyModel.__mapper__.polymorphic_on.name,
+ 'type_'
+ )
+ assert MyModel.__mapper__.polymorphic_on.table is not None
eq_(MyModel.__mapper__.always_refresh, True)
+ eq_(MySubModel.__mapper__.always_refresh, True)
+ eq_(MySubModel.__mapper__.polymorphic_identity, 'MySubModel')
+
+ def test_mapper_args_property(self):
+ class MyModel(Base):
+
+ @declared_attr
+ def __tablename__(cls):
+ return cls.__name__.lower()
+
+ @declared_attr
+ def __table_args__(cls):
+ return {'mysql_engine':'InnoDB'}
+
+ @declared_attr
+ def __mapper_args__(cls):
+ args = {}
+ args['polymorphic_identity'] = cls.__name__
+ return args
+ id = Column(Integer, primary_key=True)
+
+ class MySubModel(MyModel):
+ id = Column(Integer, ForeignKey('mymodel.id'), primary_key=True)
+ class MySubModel2(MyModel):
+ __tablename__ = 'sometable'
+ id = Column(Integer, ForeignKey('mymodel.id'), primary_key=True)
+
+ eq_(MyModel.__mapper__.polymorphic_identity, 'MyModel')
+ eq_(MySubModel.__mapper__.polymorphic_identity, 'MySubModel')
+ eq_(MyModel.__table__.kwargs['mysql_engine'], 'InnoDB')
+ eq_(MySubModel.__table__.kwargs['mysql_engine'], 'InnoDB')
+ eq_(MySubModel2.__table__.kwargs['mysql_engine'], 'InnoDB')
+ eq_(MyModel.__table__.name, 'mymodel')
+ eq_(MySubModel.__table__.name, 'mysubmodel')
+
+ def test_mapper_args_custom_base(self):
+ """test the @declared_attr approach from a custom base."""
+
+ class Base(object):
+ @declared_attr
+ def __tablename__(cls):
+ return cls.__name__.lower()
+
+ @declared_attr
+ def __table_args__(cls):
+ return {'mysql_engine':'InnoDB'}
+
+ @declared_attr
+ def id(self):
+ return Column(Integer, primary_key=True)
+
+ Base = decl.declarative_base(cls=Base)
+
+ class MyClass(Base):
+ pass
+
+ class MyOtherClass(Base):
+ pass
+
+ eq_(MyClass.__table__.kwargs['mysql_engine'], 'InnoDB')
+ eq_(MyClass.__table__.name, 'myclass')
+ eq_(MyOtherClass.__table__.name, 'myotherclass')
+ assert MyClass.__table__.c.id.table is MyClass.__table__
+ assert MyOtherClass.__table__.c.id.table is MyOtherClass.__table__
+
def test_single_table_no_propagation(self):
class IdColumn:
@@ -2494,7 +2678,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
class CommonMixin:
- @classproperty
+ @declared_attr
def __tablename__(cls):
return cls.__name__.lower()
__table_args__ = {'mysql_engine': 'InnoDB'}
@@ -2524,7 +2708,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
class CommonMixin:
- @classproperty
+ @declared_attr
def __tablename__(cls):
return cls.__name__.lower()
__table_args__ = {'mysql_engine': 'InnoDB'}
@@ -2561,7 +2745,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
class NoJoinedTableNameMixin:
- @classproperty
+ @declared_attr
def __tablename__(cls):
if decl.has_inherited_table(cls):
return None
@@ -2589,7 +2773,7 @@ class DeclarativeMixinTest(DeclarativeTestBase):
class TableNameMixin:
- @classproperty
+ @declared_attr
def __tablename__(cls):
if decl.has_inherited_table(cls) and TableNameMixin \
not in cls.__bases__:
@@ -2656,7 +2840,6 @@ class DeclarativeMixinTest(DeclarativeTestBase):
class ColumnMixin:
tada = Column(Integer)
-
def go():
class Model(Base, ColumnMixin):
@@ -2670,6 +2853,29 @@ class DeclarativeMixinTest(DeclarativeTestBase):
"Can't add additional column 'tada' when "
"specifying __table__", go)
+ def test_table_in_model_and_different_named_alt_key_column_in_mixin(self):
+
+ # here, the __table__ has a column 'tada'. We disallow
+ # the add of the 'foobar' column, even though it's
+ # keyed to 'tada'.
+
+ class ColumnMixin:
+ tada = Column('foobar', Integer)
+
+ def go():
+
+ class Model(Base, ColumnMixin):
+
+ __table__ = Table('foo', Base.metadata,
+ Column('data',Integer),
+ Column('tada', Integer),
+ Column('id', Integer,primary_key=True))
+ foo = relationship("Dest")
+
+ assert_raises_message(sa.exc.ArgumentError,
+ "Can't add additional column 'foobar' when "
+ "specifying __table__", go)
+
def test_table_in_model_overrides_different_typed_column_in_mixin(self):
class ColumnMixin:
@@ -2714,7 +2920,7 @@ class DeclarativeMixinPropertyTest(DeclarativeTestBase):
class MyMixin(object):
- @classproperty
+ @declared_attr
def prop_hoho(cls):
return column_property(Column('prop', String(50)))
@@ -2753,20 +2959,20 @@ class DeclarativeMixinPropertyTest(DeclarativeTestBase):
def test_doc(self):
"""test documentation transfer.
- the documentation situation with @classproperty is problematic.
+ the documentation situation with @declared_attr is problematic.
at least see if mapped subclasses get the doc.
"""
class MyMixin(object):
- @classproperty
+ @declared_attr
def type_(cls):
"""this is a document."""
return Column(String(50))
- @classproperty
+ @declared_attr
def t2(cls):
"""this is another document."""
@@ -2785,7 +2991,7 @@ class DeclarativeMixinPropertyTest(DeclarativeTestBase):
class MyMixin(object):
- @classproperty
+ @declared_attr
def type_(cls):
return Column(String(50))
__mapper_args__ = {'polymorphic_on': type_}
@@ -2804,7 +3010,7 @@ class DeclarativeMixinPropertyTest(DeclarativeTestBase):
class MyMixin(object):
- @classproperty
+ @declared_attr
def data(cls):
return deferred(Column('data', String(50)))
@@ -2828,19 +3034,19 @@ class DeclarativeMixinPropertyTest(DeclarativeTestBase):
class RefTargetMixin(object):
- @classproperty
+ @declared_attr
def target_id(cls):
return Column('target_id', ForeignKey('target.id'))
if usestring:
- @classproperty
+ @declared_attr
def target(cls):
return relationship('Target',
primaryjoin='Target.id==%s.target_id'
% cls.__name__)
else:
- @classproperty
+ @declared_attr
def target(cls):
return relationship('Target')
diff --git a/test/orm/inheritance/test_magazine.py b/test/orm/inheritance/test_magazine.py
index 36ac7f919..125a5629c 100644
--- a/test/orm/inheritance/test_magazine.py
+++ b/test/orm/inheritance/test_magazine.py
@@ -187,17 +187,18 @@ def generate_round_trip_test(use_unions=False, use_joins=False):
pub = Publication(name='Test')
issue = Issue(issue=46,publication=pub)
-
location = Location(ref='ABC',name='London',issue=issue)
page_size = PageSize(name='A4',width=210,height=297)
magazine = Magazine(location=location,size=page_size)
+
page = ClassifiedPage(magazine=magazine,page_no=1)
page2 = MagazinePage(magazine=magazine,page_no=2)
page3 = ClassifiedPage(magazine=magazine,page_no=3)
session.add(pub)
+
session.flush()
print [x for x in session]
session.expunge_all()
diff --git a/test/orm/inheritance/test_single.py b/test/orm/inheritance/test_single.py
index 420430954..2efde2b32 100644
--- a/test/orm/inheritance/test_single.py
+++ b/test/orm/inheritance/test_single.py
@@ -252,10 +252,12 @@ class RelationshipFromSingleTest(testing.AssertsCompiledSQL, MappedTest):
'employee_stuff_name, anon_1.employee_id '
'AS anon_1_employee_id FROM (SELECT '
'employee.id AS employee_id FROM employee '
- 'WHERE employee.type IN (?)) AS anon_1 '
+ 'WHERE employee.type IN (:type_1)) AS anon_1 '
'JOIN employee_stuff ON anon_1.employee_id '
'= employee_stuff.employee_id ORDER BY '
- 'anon_1.employee_id')
+ 'anon_1.employee_id',
+ use_default_dialect=True
+ )
class RelationshipToSingleTest(MappedTest):
@classmethod
diff --git a/test/orm/test_cascade.py b/test/orm/test_cascade.py
index 1c935df13..75b9e22ec 100644
--- a/test/orm/test_cascade.py
+++ b/test/orm/test_cascade.py
@@ -4,7 +4,7 @@ from sqlalchemy import Integer, String, ForeignKey, Sequence, \
exc as sa_exc
from sqlalchemy.test.schema import Table, Column
from sqlalchemy.orm import mapper, relationship, create_session, \
- sessionmaker, class_mapper, backref
+ sessionmaker, class_mapper, backref, Session
from sqlalchemy.orm import attributes, exc as orm_exc
from sqlalchemy.test import testing
from sqlalchemy.test.testing import eq_
@@ -939,6 +939,67 @@ class M2MCascadeTest(_base.MappedTest):
assert b1 not in a1.bs
assert b1 in a2.bs
+class NoBackrefCascadeTest(_fixtures.FixtureTest):
+ run_inserts = None
+
+ @classmethod
+ @testing.resolve_artifact_names
+ def setup_mappers(cls):
+ mapper(Address, addresses)
+ mapper(User, users, properties={
+ 'addresses':relationship(Address, backref='user',
+ cascade_backrefs=False)
+ })
+
+ mapper(Dingaling, dingalings, properties={
+ 'address' : relationship(Address, backref='dingalings',
+ cascade_backrefs=False)
+ })
+
+ @testing.resolve_artifact_names
+ def test_o2m(self):
+ sess = Session()
+
+ u1 = User(name='u1')
+ sess.add(u1)
+
+ a1 = Address(email_address='a1')
+ a1.user = u1
+ assert a1 not in sess
+
+ sess.commit()
+
+ assert a1 not in sess
+
+ sess.add(a1)
+
+ d1 = Dingaling()
+ d1.address = a1
+ assert d1 in a1.dingalings
+ assert d1 in sess
+
+ sess.commit()
+
+ @testing.resolve_artifact_names
+ def test_m2o(self):
+ sess = Session()
+
+ a1 = Address(email_address='a1')
+ d1 = Dingaling()
+ sess.add(d1)
+
+ a1.dingalings.append(d1)
+ assert a1 not in sess
+
+ a2 = Address(email_address='a2')
+ sess.add(a2)
+
+ u1 = User(name='u1')
+ u1.addresses.append(a2)
+ assert u1 in sess
+
+ sess.commit()
+
class UnsavedOrphansTest(_base.MappedTest):
"""Pending entities that are orphans"""
diff --git a/test/orm/test_load_on_fks.py b/test/orm/test_load_on_fks.py
index d32a35933..8e4f53b0d 100644
--- a/test/orm/test_load_on_fks.py
+++ b/test/orm/test_load_on_fks.py
@@ -6,6 +6,7 @@ from sqlalchemy.test.testing import TestBase, eq_, AssertsExecutionResults, asse
from sqlalchemy.test import testing
from sqlalchemy.orm.attributes import instance_state
from sqlalchemy.orm.exc import FlushError
+from sqlalchemy.test.schema import Table, Column
engine = testing.db
@@ -18,13 +19,13 @@ class FlushOnPendingTest(AssertsExecutionResults, TestBase):
class Parent(Base):
__tablename__ = 'parent'
- id= Column(Integer, primary_key=True)
+ id= Column(Integer, primary_key=True, test_needs_autoincrement=True)
name = Column(String(50), nullable=False)
children = relationship("Child", load_on_pending=True)
class Child(Base):
__tablename__ = 'child'
- id= Column(Integer, primary_key=True)
+ id= Column(Integer, primary_key=True, test_needs_autoincrement=True)
parent_id = Column(Integer, ForeignKey('parent.id'))
Base.metadata.create_all(engine)
@@ -66,13 +67,13 @@ class LoadOnFKsTest(AssertsExecutionResults, TestBase):
__tablename__ = 'parent'
__table_args__ = {'mysql_engine':'InnoDB'}
- id= Column(Integer, primary_key=True)
+ id= Column(Integer, primary_key=True, test_needs_autoincrement=True)
class Child(Base):
__tablename__ = 'child'
__table_args__ = {'mysql_engine':'InnoDB'}
- id= Column(Integer, primary_key=True)
+ id= Column(Integer, primary_key=True, test_needs_autoincrement=True)
parent_id = Column(Integer, ForeignKey('parent.id'))
parent = relationship(Parent, backref=backref("children"))
diff --git a/test/orm/test_mapper.py b/test/orm/test_mapper.py
index 1b0aa14f9..05cf1fd31 100644
--- a/test/orm/test_mapper.py
+++ b/test/orm/test_mapper.py
@@ -88,14 +88,25 @@ class MapperTest(_fixtures.FixtureTest):
@testing.resolve_artifact_names
def test_exceptions_sticky(self):
- """test preservation of mapper compile errors raised during hasattr()."""
+ """test preservation of mapper compile errors raised during hasattr(),
+ as well as for redundant mapper compile calls. Test that
+ repeated calls don't stack up error messages.
+
+ """
mapper(Address, addresses, properties={
'user':relationship(User)
})
hasattr(Address.user, 'property')
- assert_raises_message(sa.exc.InvalidRequestError, r"suppressed within a hasattr\(\)", compile_mappers)
+ for i in range(3):
+ assert_raises_message(sa.exc.InvalidRequestError,
+ "^One or more mappers failed to "
+ "initialize - can't proceed with "
+ "initialization of other mappers. "
+ "Original exception was: Class "
+ "'test.orm._fixtures.User' is not mapped$"
+ , compile_mappers)
@testing.resolve_artifact_names
def test_column_prefix(self):
@@ -157,13 +168,15 @@ class MapperTest(_fixtures.FixtureTest):
@testing.resolve_artifact_names
def test_column_not_present(self):
- assert_raises_message(sa.exc.ArgumentError, "not represented in the mapper's table", mapper, User, users, properties={
- 'foo':addresses.c.user_id
- })
+ assert_raises_message(sa.exc.ArgumentError,
+ "not represented in the mapper's table",
+ mapper, User, users, properties={'foo'
+ : addresses.c.user_id})
@testing.resolve_artifact_names
def test_bad_constructor(self):
"""If the construction of a mapped class fails, the instance does not get placed in the session"""
+
class Foo(object):
def __init__(self, one, two, _sa_session=None):
pass
@@ -487,7 +500,10 @@ class MapperTest(_fixtures.FixtureTest):
class HasDef(object):
def name(self):
pass
-
+ class Empty(object):pass
+
+ empty = mapper(Empty, t, properties={'empty_id' : t.c.id},
+ include_properties=[])
p_m = mapper(Person, t, polymorphic_on=t.c.type,
include_properties=('id', 'type', 'name'))
e_m = mapper(Employee, inherits=p_m,
@@ -546,6 +562,7 @@ class MapperTest(_fixtures.FixtureTest):
# excluding the discriminator column is currently not allowed
class Foo(Person):
pass
+ assert_props(Empty, ['empty_id'])
assert_raises(
sa.exc.InvalidRequestError,
diff --git a/test/orm/test_naturalpks.py b/test/orm/test_naturalpks.py
index d02ecb707..ca88251d7 100644
--- a/test/orm/test_naturalpks.py
+++ b/test/orm/test_naturalpks.py
@@ -972,7 +972,7 @@ class JoinedInheritanceTest(_base.MappedTest):
self._test_fk(True)
# PG etc. need passive=True to allow PK->PK cascade
- @testing.fails_on_everything_except('sqlite', 'mysql+zxjdbc',
+ @testing.fails_on_everything_except('sqlite', 'mysql+zxjdbc', 'oracle',
'postgresql+zxjdbc')
def test_fk_nonpassive(self):
self._test_fk(False)
@@ -1026,13 +1026,18 @@ class JoinedInheritanceTest(_base.MappedTest):
e1, e2, m1
])
sess.commit()
+
+ eq_(e1.boss_name, 'dogbert')
+ eq_(e2.boss_name, 'dogbert')
+ sess.expire_all()
m1.name = 'pointy haired'
e1.primary_language = 'scala'
e2.primary_language = 'cobol'
sess.commit()
-
+ eq_(e1.boss_name, 'pointy haired')
+ eq_(e2.boss_name, 'pointy haired')
diff --git a/test/orm/test_query.py b/test/orm/test_query.py
index 3a6436610..91c09be63 100644
--- a/test/orm/test_query.py
+++ b/test/orm/test_query.py
@@ -2543,6 +2543,30 @@ class JoinTest(QueryTest, AssertsCompiledSQL):
"Could not find a FROM",
sess.query(users.c.id).select_from(users).join, User
)
+
+ def test_select_from(self):
+ """Test that the left edge of the join can be set reliably with select_from()."""
+
+ sess = create_session()
+ self.assert_compile(
+ sess.query(Item.id).select_from(User).join(User.orders).join(Order.items),
+ "SELECT items.id AS items_id FROM users JOIN orders ON "
+ "users.id = orders.user_id JOIN order_items AS order_items_1 "
+ "ON orders.id = order_items_1.order_id JOIN items ON items.id = "
+ "order_items_1.item_id",
+ use_default_dialect=True
+ )
+
+ # here, the join really wants to add a second FROM clause
+ # for "Item". but select_from disallows that
+ self.assert_compile(
+ sess.query(Item.id).select_from(User).join((Item, User.id==Item.id)),
+ "SELECT items.id AS items_id FROM users JOIN items ON users.id = items.id",
+ use_default_dialect=True
+ )
+
+
+
def test_from_self_resets_joinpaths(self):
"""test a join from from_self() doesn't confuse joins inside the subquery
@@ -3618,6 +3642,89 @@ class CustomJoinTest(QueryTest):
[User(id=7)]
)
+class SelfRefMixedTest(_base.MappedTest, AssertsCompiledSQL):
+ run_setup_mappers = 'once'
+ __dialect__ = default.DefaultDialect()
+
+ @classmethod
+ def define_tables(cls, metadata):
+ nodes = Table('nodes', metadata,
+ Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('parent_id', Integer, ForeignKey('nodes.id'))
+ )
+
+ sub_table = Table('sub_table', metadata,
+ Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('node_id', Integer, ForeignKey('nodes.id')),
+ )
+
+ assoc_table = Table('assoc_table', metadata,
+ Column('left_id', Integer, ForeignKey('nodes.id')),
+ Column('right_id', Integer, ForeignKey('nodes.id'))
+ )
+
+ @classmethod
+ @testing.resolve_artifact_names
+ def setup_classes(cls):
+ class Node(Base):
+ pass
+
+ class Sub(Base):
+ pass
+
+ mapper(Node, nodes, properties={
+ 'children':relationship(Node, lazy='select', join_depth=3,
+ backref=backref('parent', remote_side=[nodes.c.id])
+ ),
+ 'subs' : relationship(Sub),
+ 'assoc':relationship(Node,
+ secondary=assoc_table,
+ primaryjoin=nodes.c.id==assoc_table.c.left_id,
+ secondaryjoin=nodes.c.id==assoc_table.c.right_id)
+ })
+ mapper(Sub, sub_table)
+
+ @testing.resolve_artifact_names
+ def test_o2m_aliased_plus_o2m(self):
+ sess = create_session()
+ n1 = aliased(Node)
+
+ self.assert_compile(
+ sess.query(Node).join((n1, Node.children)).join((Sub, n1.subs)),
+ "SELECT nodes.id AS nodes_id, nodes.parent_id AS nodes_parent_id "
+ "FROM nodes JOIN nodes AS nodes_1 ON nodes.id = nodes_1.parent_id "
+ "JOIN sub_table ON nodes_1.id = sub_table.node_id"
+ )
+
+ self.assert_compile(
+ sess.query(Node).join((n1, Node.children)).join((Sub, Node.subs)),
+ "SELECT nodes.id AS nodes_id, nodes.parent_id AS nodes_parent_id "
+ "FROM nodes JOIN nodes AS nodes_1 ON nodes.id = nodes_1.parent_id "
+ "JOIN sub_table ON nodes.id = sub_table.node_id"
+ )
+
+ @testing.resolve_artifact_names
+ def test_m2m_aliased_plus_o2m(self):
+ sess = create_session()
+ n1 = aliased(Node)
+
+ self.assert_compile(
+ sess.query(Node).join((n1, Node.assoc)).join((Sub, n1.subs)),
+ "SELECT nodes.id AS nodes_id, nodes.parent_id AS nodes_parent_id "
+ "FROM nodes JOIN assoc_table AS assoc_table_1 ON nodes.id = "
+ "assoc_table_1.left_id JOIN nodes AS nodes_1 ON nodes_1.id = "
+ "assoc_table_1.right_id JOIN sub_table ON nodes_1.id = sub_table.node_id",
+ )
+
+ self.assert_compile(
+ sess.query(Node).join((n1, Node.assoc)).join((Sub, Node.subs)),
+ "SELECT nodes.id AS nodes_id, nodes.parent_id AS nodes_parent_id "
+ "FROM nodes JOIN assoc_table AS assoc_table_1 ON nodes.id = "
+ "assoc_table_1.left_id JOIN nodes AS nodes_1 ON nodes_1.id = "
+ "assoc_table_1.right_id JOIN sub_table ON nodes.id = sub_table.node_id",
+ )
+
+
class SelfReferentialTest(_base.MappedTest, AssertsCompiledSQL):
run_setup_mappers = 'once'
run_inserts = 'once'
@@ -3630,20 +3737,23 @@ class SelfReferentialTest(_base.MappedTest, AssertsCompiledSQL):
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('parent_id', Integer, ForeignKey('nodes.id')),
Column('data', String(30)))
-
+
@classmethod
def insert_data(cls):
- global Node
-
+ # TODO: somehow using setup_classes()
+ # here normally is screwing up the other tests.
+
+ global Node, Sub
class Node(Base):
def append(self, node):
self.children.append(node)
-
+
mapper(Node, nodes, properties={
'children':relationship(Node, lazy='select', join_depth=3,
backref=backref('parent', remote_side=[nodes.c.id])
- )
+ ),
})
+
sess = create_session()
n1 = Node(data='n1')
n1.append(Node(data='n11'))
@@ -3656,6 +3766,7 @@ class SelfReferentialTest(_base.MappedTest, AssertsCompiledSQL):
sess.flush()
sess.close()
+ @testing.resolve_artifact_names
def test_join(self):
sess = create_session()
@@ -3673,6 +3784,7 @@ class SelfReferentialTest(_base.MappedTest, AssertsCompiledSQL):
join('parent', aliased=True, from_joinpoint=True).filter_by(data='n1').first()
assert node.data == 'n122'
+ @testing.resolve_artifact_names
def test_string_or_prop_aliased(self):
"""test that join('foo') behaves the same as join(Cls.foo) in a self
referential scenario.
@@ -3721,6 +3833,7 @@ class SelfReferentialTest(_base.MappedTest, AssertsCompiledSQL):
use_default_dialect=True
)
+ @testing.resolve_artifact_names
def test_from_self_inside_excludes_outside(self):
"""test the propagation of aliased() from inside to outside
on a from_self()..
@@ -3774,11 +3887,48 @@ class SelfReferentialTest(_base.MappedTest, AssertsCompiledSQL):
use_default_dialect=True
)
+ @testing.resolve_artifact_names
def test_explicit_join(self):
sess = create_session()
n1 = aliased(Node)
n2 = aliased(Node)
+
+ self.assert_compile(
+ join(Node, n1, 'children').join(n2, 'children'),
+ "nodes JOIN nodes AS nodes_1 ON nodes.id = nodes_1.parent_id JOIN nodes AS nodes_2 ON nodes_1.id = nodes_2.parent_id",
+ use_default_dialect=True
+ )
+
+ self.assert_compile(
+ join(Node, n1, Node.children).join(n2, n1.children),
+ "nodes JOIN nodes AS nodes_1 ON nodes.id = nodes_1.parent_id JOIN nodes AS nodes_2 ON nodes_1.id = nodes_2.parent_id",
+ use_default_dialect=True
+ )
+
+ # the join_to_left=False here is unfortunate. the default on this flag should
+ # be False.
+ self.assert_compile(
+ join(Node, n1, Node.children).join(n2, Node.children, join_to_left=False),
+ "nodes JOIN nodes AS nodes_1 ON nodes.id = nodes_1.parent_id JOIN nodes AS nodes_2 ON nodes.id = nodes_2.parent_id",
+ use_default_dialect=True
+ )
+
+ self.assert_compile(
+ sess.query(Node).join((n1, Node.children)).join((n2, n1.children)),
+ "SELECT nodes.id AS nodes_id, nodes.parent_id AS nodes_parent_id, nodes.data AS "
+ "nodes_data FROM nodes JOIN nodes AS nodes_1 ON nodes.id = nodes_1.parent_id "
+ "JOIN nodes AS nodes_2 ON nodes_1.id = nodes_2.parent_id",
+ use_default_dialect=True
+ )
+
+ self.assert_compile(
+ sess.query(Node).join((n1, Node.children)).join((n2, Node.children)),
+ "SELECT nodes.id AS nodes_id, nodes.parent_id AS nodes_parent_id, nodes.data AS "
+ "nodes_data FROM nodes JOIN nodes AS nodes_1 ON nodes.id = nodes_1.parent_id "
+ "JOIN nodes AS nodes_2 ON nodes.id = nodes_2.parent_id",
+ use_default_dialect=True
+ )
node = sess.query(Node).select_from(join(Node, n1, 'children')).filter(n1.data=='n122').first()
assert node.data=='n12'
@@ -3800,7 +3950,8 @@ class SelfReferentialTest(_base.MappedTest, AssertsCompiledSQL):
list(sess.query(Node).select_from(join(Node, n1, 'parent').join(n2, 'parent')).\
filter(and_(Node.data=='n122', n1.data=='n12', n2.data=='n1')).values(Node.data, n1.data, n2.data)),
[('n122', 'n12', 'n1')])
-
+
+ @testing.resolve_artifact_names
def test_join_to_nonaliased(self):
sess = create_session()
@@ -3819,6 +3970,7 @@ class SelfReferentialTest(_base.MappedTest, AssertsCompiledSQL):
)
+ @testing.resolve_artifact_names
def test_multiple_explicit_entities(self):
sess = create_session()
@@ -3868,6 +4020,7 @@ class SelfReferentialTest(_base.MappedTest, AssertsCompiledSQL):
)
+ @testing.resolve_artifact_names
def test_any(self):
sess = create_session()
eq_(sess.query(Node).filter(Node.children.any(Node.data=='n1')).all(), [])
@@ -3875,6 +4028,7 @@ class SelfReferentialTest(_base.MappedTest, AssertsCompiledSQL):
eq_(sess.query(Node).filter(~Node.children.any()).order_by(Node.id).all(),
[Node(data='n11'), Node(data='n13'),Node(data='n121'),Node(data='n122'),Node(data='n123'),])
+ @testing.resolve_artifact_names
def test_has(self):
sess = create_session()
@@ -3883,6 +4037,7 @@ class SelfReferentialTest(_base.MappedTest, AssertsCompiledSQL):
eq_(sess.query(Node).filter(Node.parent.has(Node.data=='n122')).all(), [])
eq_(sess.query(Node).filter(~Node.parent.has()).all(), [Node(data='n1')])
+ @testing.resolve_artifact_names
def test_contains(self):
sess = create_session()
@@ -3892,6 +4047,7 @@ class SelfReferentialTest(_base.MappedTest, AssertsCompiledSQL):
n13 = sess.query(Node).filter(Node.data=='n13').one()
eq_(sess.query(Node).filter(Node.children.contains(n13)).all(), [Node(data='n1')])
+ @testing.resolve_artifact_names
def test_eq_ne(self):
sess = create_session()
@@ -4202,7 +4358,8 @@ class UpdateDeleteTest(_base.MappedTest):
def setup_mappers(cls):
mapper(User, users)
mapper(Document, documents, properties={
- 'user': relationship(User, lazy='joined', backref=backref('documents', lazy='select'))
+ 'user': relationship(User, lazy='joined',
+ backref=backref('documents', lazy='select'))
})
@testing.resolve_artifact_names
@@ -4321,6 +4478,34 @@ class UpdateDeleteTest(_base.MappedTest):
eq_([john.age, jack.age, jill.age, jane.age], [15,27,19,27])
eq_(sess.query(User.age).order_by(User.id).all(), zip([15,27,19,27]))
+ @testing.resolve_artifact_names
+ @testing.provide_metadata
+ def test_update_attr_names(self):
+ data = Table('data', metadata,
+ Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
+ Column('counter', Integer, nullable=False, default=0)
+ )
+ class Data(_base.ComparableEntity):
+ pass
+
+ mapper(Data, data, properties={'cnt':data.c.counter})
+ metadata.create_all()
+ d1 = Data()
+ sess = Session()
+ sess.add(d1)
+ sess.commit()
+ eq_(d1.cnt, 0)
+
+ sess.query(Data).update({Data.cnt:Data.cnt + 1})
+ sess.flush()
+
+ eq_(d1.cnt, 1)
+
+ sess.query(Data).update({Data.cnt:Data.cnt + 1}, 'fetch')
+ sess.flush()
+
+ eq_(d1.cnt, 2)
+ sess.close()
@testing.resolve_artifact_names
def test_update_with_bindparams(self):
diff --git a/test/orm/test_scoping.py b/test/orm/test_scoping.py
index 7db74308b..1682e0f7e 100644
--- a/test/orm/test_scoping.py
+++ b/test/orm/test_scoping.py
@@ -76,7 +76,22 @@ class ScopedSessionTest(_base.MappedTest):
assert not isinstance(SomeOtherObject.query, CustomQuery)
assert isinstance(SomeOtherObject.custom_query, query.Query)
-
+ def test_config_errors(self):
+ Session = scoped_session(sa.orm.sessionmaker())
+
+ s = Session()
+ assert_raises_message(
+ sa.exc.InvalidRequestError,
+ "Scoped session is already present",
+ Session, bind=testing.db
+ )
+
+ assert_raises_message(
+ sa.exc.SAWarning,
+ "At least one scoped session is already present. ",
+ Session.configure, bind=testing.db
+ )
+
class ScopedMapperTest(_ScopedTest):
@classmethod
diff --git a/test/orm/test_session.py b/test/orm/test_session.py
index b7ae104a1..6ac42a6b3 100644
--- a/test/orm/test_session.py
+++ b/test/orm/test_session.py
@@ -713,12 +713,39 @@ class SessionTest(_fixtures.FixtureTest):
sess.flush()
sess.rollback()
assert_raises_message(sa.exc.InvalidRequestError,
- 'inactive due to a rollback in a '
- 'subtransaction', sess.begin,
- subtransactions=True)
+ "This Session's transaction has been "
+ r"rolled back by a nested rollback\(\) "
+ "call. To begin a new transaction, "
+ r"issue Session.rollback\(\) first.",
+ sess.begin, subtransactions=True)
sess.close()
@testing.resolve_artifact_names
+ def test_preserve_flush_error(self):
+ mapper(User, users)
+ sess = Session()
+
+ sess.add(User(id=5))
+ assert_raises(
+ sa.exc.DBAPIError,
+ sess.commit
+ )
+
+ for i in range(5):
+ assert_raises_message(sa.exc.InvalidRequestError,
+ "^This Session's transaction has been "
+ r"rolled back due to a previous exception during flush. To "
+ "begin a new transaction with this "
+ "Session, first issue "
+ r"Session.rollback\(\). Original exception "
+ "was:",
+ sess.commit)
+ sess.rollback()
+ sess.add(User(id=5, name='some name'))
+ sess.commit()
+
+
+ @testing.resolve_artifact_names
def test_no_autocommit_with_explicit_commit(self):
mapper(User, users)
session = create_session(autocommit=False)
diff --git a/test/sql/test_compiler.py b/test/sql/test_compiler.py
index b7e5d0953..4c712ce38 100644
--- a/test/sql/test_compiler.py
+++ b/test/sql/test_compiler.py
@@ -89,7 +89,7 @@ class SelectTest(TestBase, AssertsCompiledSQL):
def test_invalid_col_argument(self):
assert_raises(exc.ArgumentError, select, table1)
assert_raises(exc.ArgumentError, select, table1.c.myid)
-
+
def test_from_subquery(self):
"""tests placing select statements in the column clause of another select, for the
purposes of selecting from the exported columns of that select."""
@@ -263,13 +263,27 @@ sq.myothertable_othername AS sq_myothertable_othername FROM (" + sqstring + ") A
s3 = select([s2], use_labels=True)
s4 = s3.alias()
s5 = select([s4], use_labels=True)
- self.assert_compile(s5, "SELECT anon_1.anon_2_myid AS anon_1_anon_2_myid, anon_1.anon_2_name AS anon_1_anon_2_name, "\
- "anon_1.anon_2_description AS anon_1_anon_2_description FROM (SELECT anon_2.myid AS anon_2_myid, anon_2.name AS anon_2_name, "\
- "anon_2.description AS anon_2_description FROM (SELECT mytable.myid AS myid, mytable.name AS name, mytable.description "\
- "AS description FROM mytable) AS anon_2) AS anon_1")
+ self.assert_compile(s5,
+ 'SELECT anon_1.anon_2_myid AS '
+ 'anon_1_anon_2_myid, anon_1.anon_2_name AS '
+ 'anon_1_anon_2_name, anon_1.anon_2_descript'
+ 'ion AS anon_1_anon_2_description FROM '
+ '(SELECT anon_2.myid AS anon_2_myid, '
+ 'anon_2.name AS anon_2_name, '
+ 'anon_2.description AS anon_2_description '
+ 'FROM (SELECT mytable.myid AS myid, '
+ 'mytable.name AS name, mytable.description '
+ 'AS description FROM mytable) AS anon_2) '
+ 'AS anon_1')
def test_dont_overcorrelate(self):
- self.assert_compile(select([table1], from_obj=[table1, table1.select()]), """SELECT mytable.myid, mytable.name, mytable.description FROM mytable, (SELECT mytable.myid AS myid, mytable.name AS name, mytable.description AS description FROM mytable)""")
+ self.assert_compile(select([table1], from_obj=[table1,
+ table1.select()]),
+ "SELECT mytable.myid, mytable.name, "
+ "mytable.description FROM mytable, (SELECT "
+ "mytable.myid AS myid, mytable.name AS "
+ "name, mytable.description AS description "
+ "FROM mytable)")
def test_full_correlate(self):
# intentional
@@ -301,31 +315,56 @@ sq.myothertable_othername AS sq_myothertable_othername FROM (" + sqstring + ") A
"EXISTS (SELECT mytable.myid FROM mytable WHERE mytable.myid = :myid_1)"
)
- self.assert_compile(exists([table1.c.myid], table1.c.myid==5).select(), "SELECT EXISTS (SELECT mytable.myid FROM mytable WHERE mytable.myid = :myid_1)", params={'mytable_myid':5})
-
- self.assert_compile(select([table1, exists([1], from_obj=table2)]), "SELECT mytable.myid, mytable.name, mytable.description, EXISTS (SELECT 1 FROM myothertable) FROM mytable", params={})
-
- self.assert_compile(select([table1, exists([1], from_obj=table2).label('foo')]), "SELECT mytable.myid, mytable.name, mytable.description, EXISTS (SELECT 1 FROM myothertable) AS foo FROM mytable", params={})
-
- self.assert_compile(
- table1.select(exists().where(table2.c.otherid == table1.c.myid).correlate(table1)),
- "SELECT mytable.myid, mytable.name, mytable.description FROM mytable WHERE EXISTS (SELECT * FROM myothertable WHERE myothertable.otherid = mytable.myid)"
- )
-
- self.assert_compile(
- table1.select(exists().where(table2.c.otherid == table1.c.myid).correlate(table1)),
- "SELECT mytable.myid, mytable.name, mytable.description FROM mytable WHERE EXISTS (SELECT * FROM myothertable WHERE myothertable.otherid = mytable.myid)"
- )
-
- self.assert_compile(
- table1.select(exists().where(table2.c.otherid == table1.c.myid).correlate(table1)).replace_selectable(table2, table2.alias()),
- "SELECT mytable.myid, mytable.name, mytable.description FROM mytable WHERE EXISTS (SELECT * FROM myothertable AS myothertable_1 WHERE myothertable_1.otherid = mytable.myid)"
- )
-
- self.assert_compile(
- table1.select(exists().where(table2.c.otherid == table1.c.myid).correlate(table1)).select_from(table1.join(table2, table1.c.myid==table2.c.otherid)).replace_selectable(table2, table2.alias()),
- "SELECT mytable.myid, mytable.name, mytable.description FROM mytable JOIN myothertable AS myothertable_1 ON mytable.myid = myothertable_1.otherid WHERE EXISTS (SELECT * FROM myothertable AS myothertable_1 WHERE myothertable_1.otherid = mytable.myid)"
- )
+ self.assert_compile(exists([table1.c.myid], table1.c.myid
+ == 5).select(),
+ 'SELECT EXISTS (SELECT mytable.myid FROM '
+ 'mytable WHERE mytable.myid = :myid_1)',
+ params={'mytable_myid': 5})
+ self.assert_compile(select([table1, exists([1],
+ from_obj=table2)]),
+ 'SELECT mytable.myid, mytable.name, '
+ 'mytable.description, EXISTS (SELECT 1 '
+ 'FROM myothertable) FROM mytable',
+ params={})
+ self.assert_compile(select([table1, exists([1],
+ from_obj=table2).label('foo')]),
+ 'SELECT mytable.myid, mytable.name, '
+ 'mytable.description, EXISTS (SELECT 1 '
+ 'FROM myothertable) AS foo FROM mytable',
+ params={})
+
+ self.assert_compile(table1.select(exists().where(table2.c.otherid
+ == table1.c.myid).correlate(table1)),
+ 'SELECT mytable.myid, mytable.name, '
+ 'mytable.description FROM mytable WHERE '
+ 'EXISTS (SELECT * FROM myothertable WHERE '
+ 'myothertable.otherid = mytable.myid)')
+ self.assert_compile(table1.select(exists().where(table2.c.otherid
+ == table1.c.myid).correlate(table1)),
+ 'SELECT mytable.myid, mytable.name, '
+ 'mytable.description FROM mytable WHERE '
+ 'EXISTS (SELECT * FROM myothertable WHERE '
+ 'myothertable.otherid = mytable.myid)')
+ self.assert_compile(table1.select(exists().where(table2.c.otherid
+ == table1.c.myid).correlate(table1)).replace_selectable(table2,
+ table2.alias()),
+ 'SELECT mytable.myid, mytable.name, '
+ 'mytable.description FROM mytable WHERE '
+ 'EXISTS (SELECT * FROM myothertable AS '
+ 'myothertable_1 WHERE myothertable_1.otheri'
+ 'd = mytable.myid)')
+ self.assert_compile(table1.select(exists().where(table2.c.otherid
+ == table1.c.myid).correlate(table1)).select_from(table1.join(table2,
+ table1.c.myid
+ == table2.c.otherid)).replace_selectable(table2,
+ table2.alias()),
+ 'SELECT mytable.myid, mytable.name, '
+ 'mytable.description FROM mytable JOIN '
+ 'myothertable AS myothertable_1 ON '
+ 'mytable.myid = myothertable_1.otherid '
+ 'WHERE EXISTS (SELECT * FROM myothertable '
+ 'AS myothertable_1 WHERE '
+ 'myothertable_1.otherid = mytable.myid)')
self.assert_compile(
select([
@@ -334,62 +373,93 @@ sq.myothertable_othername AS sq_myothertable_othername FROM (" + sqstring + ") A
exists().where(table2.c.otherid=='bar')
)
]),
- "SELECT (EXISTS (SELECT * FROM myothertable WHERE myothertable.otherid = :otherid_1)) "\
- "OR (EXISTS (SELECT * FROM myothertable WHERE myothertable.otherid = :otherid_2)) AS anon_1"
+ "SELECT (EXISTS (SELECT * FROM myothertable "
+ "WHERE myothertable.otherid = :otherid_1)) "
+ "OR (EXISTS (SELECT * FROM myothertable WHERE "
+ "myothertable.otherid = :otherid_2)) AS anon_1"
)
def test_where_subquery(self):
- s = select([addresses.c.street], addresses.c.user_id==users.c.user_id, correlate=True).alias('s')
- self.assert_compile(
- select([users, s.c.street], from_obj=s),
- """SELECT users.user_id, users.user_name, users.password, s.street FROM users, (SELECT addresses.street AS street FROM addresses WHERE addresses.user_id = users.user_id) AS s""")
-
- self.assert_compile(
- table1.select(table1.c.myid == select([table1.c.myid], table1.c.name=='jack')),
- "SELECT mytable.myid, mytable.name, mytable.description FROM mytable WHERE mytable.myid = (SELECT mytable.myid FROM mytable WHERE mytable.name = :name_1)"
- )
-
- self.assert_compile(
- table1.select(table1.c.myid == select([table2.c.otherid], table1.c.name == table2.c.othername)),
- "SELECT mytable.myid, mytable.name, mytable.description FROM mytable WHERE mytable.myid = (SELECT myothertable.otherid FROM myothertable WHERE mytable.name = myothertable.othername)"
- )
-
- self.assert_compile(
- table1.select(exists([1], table2.c.otherid == table1.c.myid)),
- "SELECT mytable.myid, mytable.name, mytable.description FROM mytable WHERE EXISTS (SELECT 1 FROM myothertable WHERE myothertable.otherid = mytable.myid)"
- )
-
-
+ s = select([addresses.c.street], addresses.c.user_id
+ == users.c.user_id, correlate=True).alias('s')
+ self.assert_compile(select([users, s.c.street], from_obj=s),
+ "SELECT users.user_id, users.user_name, "
+ "users.password, s.street FROM users, "
+ "(SELECT addresses.street AS street FROM "
+ "addresses WHERE addresses.user_id = "
+ "users.user_id) AS s")
+ self.assert_compile(table1.select(table1.c.myid
+ == select([table1.c.myid], table1.c.name
+ == 'jack')),
+ 'SELECT mytable.myid, mytable.name, '
+ 'mytable.description FROM mytable WHERE '
+ 'mytable.myid = (SELECT mytable.myid FROM '
+ 'mytable WHERE mytable.name = :name_1)')
+ self.assert_compile(table1.select(table1.c.myid
+ == select([table2.c.otherid], table1.c.name
+ == table2.c.othername)),
+ 'SELECT mytable.myid, mytable.name, '
+ 'mytable.description FROM mytable WHERE '
+ 'mytable.myid = (SELECT '
+ 'myothertable.otherid FROM myothertable '
+ 'WHERE mytable.name = myothertable.othernam'
+ 'e)')
+ self.assert_compile(table1.select(exists([1], table2.c.otherid
+ == table1.c.myid)),
+ 'SELECT mytable.myid, mytable.name, '
+ 'mytable.description FROM mytable WHERE '
+ 'EXISTS (SELECT 1 FROM myothertable WHERE '
+ 'myothertable.otherid = mytable.myid)')
talias = table1.alias('ta')
- s = subquery('sq2', [talias], exists([1], table2.c.otherid == talias.c.myid))
- self.assert_compile(
- select([s, table1])
- ,"SELECT sq2.myid, sq2.name, sq2.description, mytable.myid, mytable.name, mytable.description FROM (SELECT ta.myid AS myid, ta.name AS name, ta.description AS description FROM mytable AS ta WHERE EXISTS (SELECT 1 FROM myothertable WHERE myothertable.otherid = ta.myid)) AS sq2, mytable")
-
- s = select([addresses.c.street], addresses.c.user_id==users.c.user_id, correlate=True).alias('s')
- self.assert_compile(
- select([users, s.c.street], from_obj=s),
- """SELECT users.user_id, users.user_name, users.password, s.street FROM users, (SELECT addresses.street AS street FROM addresses WHERE addresses.user_id = users.user_id) AS s""")
-
- # test constructing the outer query via append_column(), which occurs in the ORM's Query object
- s = select([], exists([1], table2.c.otherid==table1.c.myid), from_obj=table1)
+ s = subquery('sq2', [talias], exists([1], table2.c.otherid
+ == talias.c.myid))
+ self.assert_compile(select([s, table1]),
+ 'SELECT sq2.myid, sq2.name, '
+ 'sq2.description, mytable.myid, '
+ 'mytable.name, mytable.description FROM '
+ '(SELECT ta.myid AS myid, ta.name AS name, '
+ 'ta.description AS description FROM '
+ 'mytable AS ta WHERE EXISTS (SELECT 1 FROM '
+ 'myothertable WHERE myothertable.otherid = '
+ 'ta.myid)) AS sq2, mytable')
+ s = select([addresses.c.street], addresses.c.user_id
+ == users.c.user_id, correlate=True).alias('s')
+ self.assert_compile(select([users, s.c.street], from_obj=s),
+ "SELECT users.user_id, users.user_name, "
+ "users.password, s.street FROM users, "
+ "(SELECT addresses.street AS street FROM "
+ "addresses WHERE addresses.user_id = "
+ "users.user_id) AS s")
+
+ # test constructing the outer query via append_column(), which
+ # occurs in the ORM's Query object
+
+ s = select([], exists([1], table2.c.otherid == table1.c.myid),
+ from_obj=table1)
s.append_column(table1)
- self.assert_compile(
- s,
- "SELECT mytable.myid, mytable.name, mytable.description FROM mytable WHERE EXISTS (SELECT 1 FROM myothertable WHERE myothertable.otherid = mytable.myid)"
- )
+ self.assert_compile(s,
+ 'SELECT mytable.myid, mytable.name, '
+ 'mytable.description FROM mytable WHERE '
+ 'EXISTS (SELECT 1 FROM myothertable WHERE '
+ 'myothertable.otherid = mytable.myid)')
def test_orderby_subquery(self):
- self.assert_compile(
- table1.select(order_by=[select([table2.c.otherid], table1.c.myid==table2.c.otherid)]),
- "SELECT mytable.myid, mytable.name, mytable.description FROM mytable ORDER BY (SELECT myothertable.otherid FROM myothertable WHERE mytable.myid = myothertable.otherid)"
- )
- self.assert_compile(
- table1.select(order_by=[desc(select([table2.c.otherid], table1.c.myid==table2.c.otherid))]),
- "SELECT mytable.myid, mytable.name, mytable.description FROM mytable ORDER BY (SELECT myothertable.otherid FROM myothertable WHERE mytable.myid = myothertable.otherid) DESC"
- )
+ self.assert_compile(table1.select(order_by=[select([table2.c.otherid],
+ table1.c.myid == table2.c.otherid)]),
+ 'SELECT mytable.myid, mytable.name, '
+ 'mytable.description FROM mytable ORDER BY '
+ '(SELECT myothertable.otherid FROM '
+ 'myothertable WHERE mytable.myid = '
+ 'myothertable.otherid)')
+ self.assert_compile(table1.select(order_by=[desc(select([table2.c.otherid],
+ table1.c.myid == table2.c.otherid))]),
+ 'SELECT mytable.myid, mytable.name, '
+ 'mytable.description FROM mytable ORDER BY '
+ '(SELECT myothertable.otherid FROM '
+ 'myothertable WHERE mytable.myid = '
+ 'myothertable.otherid) DESC')
@testing.uses_deprecated('scalar option')
def test_scalar_select(self):
@@ -401,41 +471,76 @@ sq.myothertable_othername AS sq_myothertable_othername FROM (" + sqstring + ") A
)
s = select([table1.c.myid], correlate=False).as_scalar()
- self.assert_compile(select([table1, s]), "SELECT mytable.myid, mytable.name, mytable.description, (SELECT mytable.myid FROM mytable) AS anon_1 FROM mytable")
-
+ self.assert_compile(select([table1, s]),
+ 'SELECT mytable.myid, mytable.name, '
+ 'mytable.description, (SELECT mytable.myid '
+ 'FROM mytable) AS anon_1 FROM mytable')
s = select([table1.c.myid]).as_scalar()
- self.assert_compile(select([table2, s]), "SELECT myothertable.otherid, myothertable.othername, (SELECT mytable.myid FROM mytable) AS anon_1 FROM myothertable")
-
+ self.assert_compile(select([table2, s]),
+ 'SELECT myothertable.otherid, '
+ 'myothertable.othername, (SELECT '
+ 'mytable.myid FROM mytable) AS anon_1 FROM '
+ 'myothertable')
s = select([table1.c.myid]).correlate(None).as_scalar()
- self.assert_compile(select([table1, s]), "SELECT mytable.myid, mytable.name, mytable.description, (SELECT mytable.myid FROM mytable) AS anon_1 FROM mytable")
-
- # test that aliases use as_scalar() when used in an explicitly scalar context
- s = select([table1.c.myid]).alias()
- self.assert_compile(select([table1.c.myid]).where(table1.c.myid==s), "SELECT mytable.myid FROM mytable WHERE mytable.myid = (SELECT mytable.myid FROM mytable)")
- self.assert_compile(select([table1.c.myid]).where(s > table1.c.myid), "SELECT mytable.myid FROM mytable WHERE mytable.myid < (SELECT mytable.myid FROM mytable)")
+ self.assert_compile(select([table1, s]),
+ 'SELECT mytable.myid, mytable.name, '
+ 'mytable.description, (SELECT mytable.myid '
+ 'FROM mytable) AS anon_1 FROM mytable')
+ # test that aliases use as_scalar() when used in an explicitly
+ # scalar context
+ s = select([table1.c.myid]).alias()
+ self.assert_compile(select([table1.c.myid]).where(table1.c.myid
+ == s),
+ 'SELECT mytable.myid FROM mytable WHERE '
+ 'mytable.myid = (SELECT mytable.myid FROM '
+ 'mytable)')
+ self.assert_compile(select([table1.c.myid]).where(s
+ > table1.c.myid),
+ 'SELECT mytable.myid FROM mytable WHERE '
+ 'mytable.myid < (SELECT mytable.myid FROM '
+ 'mytable)')
s = select([table1.c.myid]).as_scalar()
- self.assert_compile(select([table2, s]), "SELECT myothertable.otherid, myothertable.othername, (SELECT mytable.myid FROM mytable) AS anon_1 FROM myothertable")
+ self.assert_compile(select([table2, s]),
+ 'SELECT myothertable.otherid, '
+ 'myothertable.othername, (SELECT '
+ 'mytable.myid FROM mytable) AS anon_1 FROM '
+ 'myothertable')
# test expressions against scalar selects
- self.assert_compile(select([s - literal(8)]), "SELECT (SELECT mytable.myid FROM mytable) - :param_1 AS anon_1")
- self.assert_compile(select([select([table1.c.name]).as_scalar() + literal('x')]), "SELECT (SELECT mytable.name FROM mytable) || :param_1 AS anon_1")
- self.assert_compile(select([s > literal(8)]), "SELECT (SELECT mytable.myid FROM mytable) > :param_1 AS anon_1")
- self.assert_compile(select([select([table1.c.name]).label('foo')]), "SELECT (SELECT mytable.name FROM mytable) AS foo")
+ self.assert_compile(select([s - literal(8)]),
+ 'SELECT (SELECT mytable.myid FROM mytable) '
+ '- :param_1 AS anon_1')
+ self.assert_compile(select([select([table1.c.name]).as_scalar()
+ + literal('x')]),
+ 'SELECT (SELECT mytable.name FROM mytable) '
+ '|| :param_1 AS anon_1')
+ self.assert_compile(select([s > literal(8)]),
+ 'SELECT (SELECT mytable.myid FROM mytable) '
+ '> :param_1 AS anon_1')
+ self.assert_compile(select([select([table1.c.name]).label('foo'
+ )]),
+ 'SELECT (SELECT mytable.name FROM mytable) '
+ 'AS foo')
+
+ # scalar selects should not have any attributes on their 'c' or
+ # 'columns' attribute
- # scalar selects should not have any attributes on their 'c' or 'columns' attribute
s = select([table1.c.myid]).as_scalar()
try:
s.c.foo
except exc.InvalidRequestError, err:
- assert str(err) == 'Scalar Select expression has no columns; use this object directly within a column-level expression.'
-
+ assert str(err) \
+ == 'Scalar Select expression has no columns; use this '\
+ 'object directly within a column-level expression.'
try:
s.columns.foo
except exc.InvalidRequestError, err:
- assert str(err) == 'Scalar Select expression has no columns; use this object directly within a column-level expression.'
+ assert str(err) \
+ == 'Scalar Select expression has no columns; use this '\
+ 'object directly within a column-level expression.'
zips = table('zips',
column('zipcode'),
@@ -455,29 +560,55 @@ sq.myothertable_othername AS sq_myothertable_othername FROM (" + sqstring + ") A
order_by = ['dist', places.c.nm]
)
- self.assert_compile(q,"SELECT places.id, places.nm, zips.zipcode, latlondist((SELECT zips.latitude FROM zips WHERE "
- "zips.zipcode = :zipcode_1), (SELECT zips.longitude FROM zips WHERE zips.zipcode = :zipcode_2)) AS dist "
- "FROM places, zips WHERE zips.zipcode = :zipcode_3 ORDER BY dist, places.nm")
+ self.assert_compile(q,
+ 'SELECT places.id, places.nm, '
+ 'zips.zipcode, latlondist((SELECT '
+ 'zips.latitude FROM zips WHERE '
+ 'zips.zipcode = :zipcode_1), (SELECT '
+ 'zips.longitude FROM zips WHERE '
+ 'zips.zipcode = :zipcode_2)) AS dist FROM '
+ 'places, zips WHERE zips.zipcode = '
+ ':zipcode_3 ORDER BY dist, places.nm')
zalias = zips.alias('main_zip')
qlat = select([zips.c.latitude], zips.c.zipcode == zalias.c.zipcode).as_scalar()
qlng = select([zips.c.longitude], zips.c.zipcode == zalias.c.zipcode).as_scalar()
- q = select([places.c.id, places.c.nm, zalias.c.zipcode, func.latlondist(qlat, qlng).label('dist')],
- order_by = ['dist', places.c.nm]
- )
- self.assert_compile(q, "SELECT places.id, places.nm, main_zip.zipcode, latlondist((SELECT zips.latitude FROM zips WHERE zips.zipcode = main_zip.zipcode), (SELECT zips.longitude FROM zips WHERE zips.zipcode = main_zip.zipcode)) AS dist FROM places, zips AS main_zip ORDER BY dist, places.nm")
+ q = select([places.c.id, places.c.nm, zalias.c.zipcode,
+ func.latlondist(qlat, qlng).label('dist')],
+ order_by=['dist', places.c.nm])
+ self.assert_compile(q,
+ 'SELECT places.id, places.nm, '
+ 'main_zip.zipcode, latlondist((SELECT '
+ 'zips.latitude FROM zips WHERE '
+ 'zips.zipcode = main_zip.zipcode), (SELECT '
+ 'zips.longitude FROM zips WHERE '
+ 'zips.zipcode = main_zip.zipcode)) AS dist '
+ 'FROM places, zips AS main_zip ORDER BY '
+ 'dist, places.nm')
a1 = table2.alias('t2alias')
s1 = select([a1.c.otherid], table1.c.myid==a1.c.otherid).as_scalar()
j1 = table1.join(table2, table1.c.myid==table2.c.otherid)
s2 = select([table1, s1], from_obj=j1)
- self.assert_compile(s2, "SELECT mytable.myid, mytable.name, mytable.description, (SELECT t2alias.otherid FROM myothertable AS t2alias WHERE mytable.myid = t2alias.otherid) AS anon_1 FROM mytable JOIN myothertable ON mytable.myid = myothertable.otherid")
+ self.assert_compile(s2,
+ 'SELECT mytable.myid, mytable.name, '
+ 'mytable.description, (SELECT '
+ 't2alias.otherid FROM myothertable AS '
+ 't2alias WHERE mytable.myid = '
+ 't2alias.otherid) AS anon_1 FROM mytable '
+ 'JOIN myothertable ON mytable.myid = '
+ 'myothertable.otherid')
def test_label_comparison(self):
x = func.lala(table1.c.myid).label('foo')
- self.assert_compile(select([x], x==5), "SELECT lala(mytable.myid) AS foo FROM mytable WHERE lala(mytable.myid) = :param_1")
+ self.assert_compile(select([x], x == 5),
+ 'SELECT lala(mytable.myid) AS foo FROM '
+ 'mytable WHERE lala(mytable.myid) = '
+ ':param_1')
- self.assert_compile(label('bar', column('foo', type_=String)) + "foo", "foo || :param_1")
+ self.assert_compile(
+ label('bar', column('foo', type_=String))+ 'foo',
+ 'foo || :param_1')
def test_conjunctions(self):
@@ -491,7 +622,8 @@ sq.myothertable_othername AS sq_myothertable_othername FROM (" + sqstring + ") A
)
self.assert_compile(
- and_(table1.c.myid == 12, table1.c.name=='asdf', table2.c.othername == 'foo', "sysdate() = today()"),
+ and_(table1.c.myid == 12, table1.c.name=='asdf',
+ table2.c.othername == 'foo', "sysdate() = today()"),
"mytable.myid = :myid_1 AND mytable.name = :name_1 "\
"AND myothertable.othername = :othername_1 AND sysdate() = today()"
)
@@ -499,11 +631,14 @@ sq.myothertable_othername AS sq_myothertable_othername FROM (" + sqstring + ") A
self.assert_compile(
and_(
table1.c.myid == 12,
- or_(table2.c.othername=='asdf', table2.c.othername == 'foo', table2.c.otherid == 9),
+ or_(table2.c.othername=='asdf',
+ table2.c.othername == 'foo', table2.c.otherid == 9),
"sysdate() = today()",
),
- "mytable.myid = :myid_1 AND (myothertable.othername = :othername_1 OR "\
- "myothertable.othername = :othername_2 OR myothertable.otherid = :otherid_1) AND sysdate() = today()",
+ 'mytable.myid = :myid_1 AND (myothertable.othername = '
+ ':othername_1 OR myothertable.othername = :othername_2 OR '
+ 'myothertable.otherid = :otherid_1) AND sysdate() = '
+ 'today()',
checkparams = {'othername_1': 'asdf', 'othername_2':'foo', 'otherid_1': 9, 'myid_1': 12}
)
@@ -1763,18 +1898,74 @@ sq.myothertable_othername AS sq_myothertable_othername FROM (" + sqstring + ") A
self.assert_compile(table.select(between((table.c.field == table.c.field), False, True)),
"SELECT op.field FROM op WHERE (op.field = op.field) BETWEEN :param_1 AND :param_2")
+ def test_delayed_col_naming(self):
+ my_str = Column(String)
+
+ sel1 = select([my_str])
+
+ assert_raises_message(
+ exc.InvalidRequestError,
+ "Cannot initialize a sub-selectable with this Column",
+ lambda: sel1.c
+ )
+
+ # calling label or as_scalar doesn't compile
+ # anything.
+ sel2 = select([func.substr(my_str, 2, 3)]).label('my_substr')
+
+ assert_raises_message(
+ exc.CompileError,
+ "Cannot compile Column object until it's 'name' is assigned.",
+ str, sel2
+ )
+
+ sel3 = select([my_str]).as_scalar()
+ assert_raises_message(
+ exc.CompileError,
+ "Cannot compile Column object until it's 'name' is assigned.",
+ str, sel3
+ )
+
+ my_str.name = 'foo'
+
+ self.assert_compile(
+ sel1,
+ "SELECT foo",
+ )
+ self.assert_compile(
+ sel2,
+ '(SELECT substr(foo, :substr_2, :substr_3) AS substr_1)',
+ )
+
+ self.assert_compile(
+ sel3,
+ "(SELECT foo)"
+ )
+
def test_naming(self):
- s1 = select([table1.c.myid, table1.c.myid.label('foobar'), func.hoho(table1.c.name), func.lala(table1.c.name).label('gg')])
- assert s1.c.keys() == ['myid', 'foobar', 'hoho(mytable.name)', 'gg']
+ f1 = func.hoho(table1.c.name)
+ s1 = select([table1.c.myid, table1.c.myid.label('foobar'),
+ f1,
+ func.lala(table1.c.name).label('gg')])
+
+ eq_(
+ s1.c.keys(),
+ ['myid', 'foobar', str(f1), 'gg']
+ )
meta = MetaData()
t1 = Table('mytable', meta, Column('col1', Integer))
+ exprs = (
+ table1.c.myid==12,
+ func.hoho(table1.c.myid),
+ cast(table1.c.name, Numeric)
+ )
for col, key, expr, label in (
(table1.c.name, 'name', 'mytable.name', None),
- (table1.c.myid==12, 'mytable.myid = :myid_1', 'mytable.myid = :myid_1', 'anon_1'),
- (func.hoho(table1.c.myid), 'hoho(mytable.myid)', 'hoho(mytable.myid)', 'hoho_1'),
- (cast(table1.c.name, Numeric), 'CAST(mytable.name AS NUMERIC)', 'CAST(mytable.name AS NUMERIC)', 'anon_1'),
+ (exprs[0], str(exprs[0]), 'mytable.myid = :myid_1', 'anon_1'),
+ (exprs[1], str(exprs[1]), 'hoho(mytable.myid)', 'hoho_1'),
+ (exprs[2], str(exprs[2]), 'CAST(mytable.name AS NUMERIC)', 'anon_1'),
(t1.c.col1, 'col1', 'mytable.col1', None),
(column('some wacky thing'), 'some wacky thing', '"some wacky thing"', '')
):
diff --git a/test/sql/test_query.py b/test/sql/test_query.py
index 2093e1f69..410ff73a6 100644
--- a/test/sql/test_query.py
+++ b/test/sql/test_query.py
@@ -259,6 +259,7 @@ class QueryTest(TestBase):
)
concat = ("test: " + users.c.user_name).label('thedata')
+ print select([concat]).order_by("thedata")
eq_(
select([concat]).order_by("thedata").execute().fetchall(),
[("test: ed",), ("test: fred",), ("test: jack",)]
@@ -397,6 +398,7 @@ class QueryTest(TestBase):
@testing.fails_on("firebird", "see dialect.test_firebird:MiscTest.test_percents_in_text")
@testing.fails_on("oracle", "neither % nor %% are accepted")
+ @testing.fails_on("informix", "neither % nor %% are accepted")
@testing.fails_on("+pg8000", "can't interpret result column from '%%'")
@testing.emits_warning('.*now automatically escapes.*')
def test_percents_in_text(self):
@@ -619,6 +621,16 @@ class QueryTest(TestBase):
eq_(r[users.c.user_name], 'jack')
eq_(r.user_name, 'jack')
+ @testing.requires.dbapi_lastrowid
+ def test_native_lastrowid(self):
+ r = testing.db.execute(
+ users.insert(),
+ {'user_id':1, 'user_name':'ed'}
+ )
+
+ eq_(r.lastrowid, 1)
+
+
def test_graceful_fetch_on_non_rows(self):
"""test that calling fetchone() etc. on a result that doesn't
return rows fails gracefully.
@@ -651,7 +663,7 @@ class QueryTest(TestBase):
"This result object is closed.",
result.fetchone
)
-
+
def test_result_case_sensitivity(self):
"""test name normalization for result sets."""
@@ -785,7 +797,10 @@ class QueryTest(TestBase):
)
shadowed.create(checkfirst=True)
try:
- shadowed.insert().execute(shadow_id=1, shadow_name='The Shadow', parent='The Light', row='Without light there is no shadow', _parent='Hidden parent', _row='Hidden row')
+ shadowed.insert().execute(shadow_id=1, shadow_name='The Shadow', parent='The Light',
+ row='Without light there is no shadow',
+ _parent='Hidden parent',
+ _row='Hidden row')
r = shadowed.select(shadowed.c.shadow_id==1).execute().first()
self.assert_(r.shadow_id == r['shadow_id'] == r[shadowed.c.shadow_id] == 1)
self.assert_(r.shadow_name == r['shadow_name'] == r[shadowed.c.shadow_name] == 'The Shadow')
@@ -1166,6 +1181,7 @@ class CompoundTest(TestBase):
@testing.fails_on('firebird', "has trouble extracting anonymous column from union subquery")
@testing.fails_on('mysql', 'FIXME: unknown')
@testing.fails_on('sqlite', 'FIXME: unknown')
+ @testing.fails_on('informix', "FIXME: unknown (maybe the second alias isn't allows)")
def test_union_all(self):
e = union_all(
select([t1.c.col3]),
diff --git a/test/sql/test_types.py b/test/sql/test_types.py
index af460628e..2a21ce034 100644
--- a/test/sql/test_types.py
+++ b/test/sql/test_types.py
@@ -738,10 +738,10 @@ class ExpressionTest(TestBase, AssertsExecutionResults, AssertsCompiledSQL):
meta.create_all()
test_table.insert().execute({
- 'id':1,
- 'data':'somedata',
- 'atimestamp':datetime.date(2007, 10, 15),
- 'avalue':25, 'bvalue':'foo'})
+ 'id':1,
+ 'data':'somedata',
+ 'atimestamp':datetime.date(2007, 10, 15),
+ 'avalue':25, 'bvalue':'foo'})
@classmethod
def teardown_class(cls):
@@ -752,7 +752,8 @@ class ExpressionTest(TestBase, AssertsExecutionResults, AssertsCompiledSQL):
eq_(
test_table.select().execute().fetchall(),
- [(1, 'somedata', datetime.date(2007, 10, 15), 25, "BIND_INfooBIND_OUT")]
+ [(1, 'somedata', datetime.date(2007, 10, 15), 25,
+ 'BIND_INfooBIND_OUT')]
)
def test_bind_adapt(self):
@@ -762,9 +763,9 @@ class ExpressionTest(TestBase, AssertsExecutionResults, AssertsCompiledSQL):
eq_(
testing.db.execute(
- select([test_table.c.id, test_table.c.data, test_table.c.atimestamp])
- .where(expr),
- {"thedate":datetime.date(2007, 10, 15)}).fetchall(),
+ select([test_table.c.id, test_table.c.data, test_table.c.atimestamp])
+ .where(expr),
+ {"thedate":datetime.date(2007, 10, 15)}).fetchall(),
[(1, 'somedata', datetime.date(2007, 10, 15))]
)
@@ -772,21 +773,25 @@ class ExpressionTest(TestBase, AssertsExecutionResults, AssertsCompiledSQL):
eq_(expr.right.type._type_affinity, MyCustomType)
eq_(
- testing.db.execute(test_table.select().where(expr), {"somevalue":25}).fetchall(),
- [(1, 'somedata', datetime.date(2007, 10, 15), 25, 'BIND_INfooBIND_OUT')]
+ testing.db.execute(test_table.select().where(expr),
+ {'somevalue': 25}).fetchall(),
+ [(1, 'somedata', datetime.date(2007, 10, 15), 25,
+ 'BIND_INfooBIND_OUT')]
)
expr = test_table.c.bvalue == bindparam("somevalue")
eq_(expr.right.type._type_affinity, String)
eq_(
- testing.db.execute(test_table.select().where(expr), {"somevalue":"foo"}).fetchall(),
- [(1, 'somedata', datetime.date(2007, 10, 15), 25, 'BIND_INfooBIND_OUT')]
+ testing.db.execute(test_table.select().where(expr),
+ {"somevalue":"foo"}).fetchall(),
+ [(1, 'somedata',
+ datetime.date(2007, 10, 15), 25, 'BIND_INfooBIND_OUT')]
)
def test_literal_adapt(self):
- # literals get typed based on the types dictionary, unless compatible
- # with the left side type
+ # literals get typed based on the types dictionary, unless
+ # compatible with the left side type
expr = column('foo', String) == 5
eq_(expr.right.type._type_affinity, Integer)
@@ -933,7 +938,13 @@ class ExpressionTest(TestBase, AssertsExecutionResults, AssertsCompiledSQL):
)
assert isinstance(expr.type, types.Numeric)
+ def test_null_comparison(self):
+ eq_(
+ str(column('a', types.NullType()) + column('b', types.NullType())),
+ "a + b"
+ )
+
def test_expression_typing(self):
expr = column('bar', Integer) - 3
@@ -1107,7 +1118,7 @@ class NumericTest(TestBase):
def teardown(self):
metadata.drop_all()
- @testing.emits_warning(r".*does \*not\* support Decimal objects natively")
+ @testing.emits_warning(r".*does \*not\* support Decimal objects natively")
def _do_test(self, type_, input_, output, filter_ = None):
t = Table('t', metadata, Column('x', type_))
t.create()