summaryrefslogtreecommitdiff
path: root/lib/sqlalchemy/dialects
diff options
context:
space:
mode:
Diffstat (limited to 'lib/sqlalchemy/dialects')
-rw-r--r--lib/sqlalchemy/dialects/__init__.py3
-rw-r--r--lib/sqlalchemy/dialects/drizzle/base.py4
-rw-r--r--lib/sqlalchemy/dialects/firebird/__init__.py2
-rw-r--r--lib/sqlalchemy/dialects/firebird/base.py33
-rw-r--r--lib/sqlalchemy/dialects/firebird/fdb.py2
-rw-r--r--lib/sqlalchemy/dialects/firebird/kinterbasdb.py11
-rw-r--r--lib/sqlalchemy/dialects/informix/__init__.py9
-rw-r--r--lib/sqlalchemy/dialects/informix/base.py590
-rw-r--r--lib/sqlalchemy/dialects/informix/informixdb.py69
-rw-r--r--lib/sqlalchemy/dialects/mssql/__init__.py2
-rw-r--r--lib/sqlalchemy/dialects/mssql/adodbapi.py2
-rw-r--r--lib/sqlalchemy/dialects/mssql/base.py110
-rw-r--r--lib/sqlalchemy/dialects/mssql/information_schema.py2
-rw-r--r--lib/sqlalchemy/dialects/mssql/mxodbc.py2
-rw-r--r--lib/sqlalchemy/dialects/mssql/pymssql.py3
-rw-r--r--lib/sqlalchemy/dialects/mssql/pyodbc.py14
-rw-r--r--lib/sqlalchemy/dialects/mssql/zxjdbc.py2
-rw-r--r--lib/sqlalchemy/dialects/mysql/__init__.py2
-rw-r--r--lib/sqlalchemy/dialects/mysql/base.py350
-rw-r--r--lib/sqlalchemy/dialects/mysql/cymysql.py6
-rw-r--r--lib/sqlalchemy/dialects/mysql/gaerdbms.py2
-rw-r--r--lib/sqlalchemy/dialects/mysql/mysqlconnector.py7
-rw-r--r--lib/sqlalchemy/dialects/mysql/mysqldb.py28
-rw-r--r--lib/sqlalchemy/dialects/mysql/oursql.py2
-rw-r--r--lib/sqlalchemy/dialects/mysql/pymysql.py2
-rw-r--r--lib/sqlalchemy/dialects/mysql/pyodbc.py2
-rw-r--r--lib/sqlalchemy/dialects/mysql/zxjdbc.py2
-rw-r--r--lib/sqlalchemy/dialects/oracle/__init__.py2
-rw-r--r--lib/sqlalchemy/dialects/oracle/base.py87
-rw-r--r--lib/sqlalchemy/dialects/oracle/cx_oracle.py46
-rw-r--r--lib/sqlalchemy/dialects/oracle/zxjdbc.py2
-rw-r--r--lib/sqlalchemy/dialects/postgres.py2
-rw-r--r--lib/sqlalchemy/dialects/postgresql/__init__.py8
-rw-r--r--lib/sqlalchemy/dialects/postgresql/base.py252
-rw-r--r--lib/sqlalchemy/dialects/postgresql/constraints.py6
-rw-r--r--lib/sqlalchemy/dialects/postgresql/hstore.py13
-rw-r--r--lib/sqlalchemy/dialects/postgresql/json.py199
-rw-r--r--lib/sqlalchemy/dialects/postgresql/pg8000.py6
-rw-r--r--lib/sqlalchemy/dialects/postgresql/psycopg2.py52
-rw-r--r--lib/sqlalchemy/dialects/postgresql/pypostgresql.py2
-rw-r--r--lib/sqlalchemy/dialects/postgresql/ranges.py2
-rw-r--r--lib/sqlalchemy/dialects/postgresql/zxjdbc.py2
-rw-r--r--lib/sqlalchemy/dialects/sqlite/__init__.py2
-rw-r--r--lib/sqlalchemy/dialects/sqlite/base.py34
-rw-r--r--lib/sqlalchemy/dialects/sqlite/pysqlite.py8
-rw-r--r--lib/sqlalchemy/dialects/sybase/__init__.py2
-rw-r--r--lib/sqlalchemy/dialects/sybase/base.py4
-rw-r--r--lib/sqlalchemy/dialects/sybase/mxodbc.py2
-rw-r--r--lib/sqlalchemy/dialects/sybase/pyodbc.py2
-rw-r--r--lib/sqlalchemy/dialects/sybase/pysybase.py2
50 files changed, 1016 insertions, 984 deletions
diff --git a/lib/sqlalchemy/dialects/__init__.py b/lib/sqlalchemy/dialects/__init__.py
index 7f5d34707..974d4f787 100644
--- a/lib/sqlalchemy/dialects/__init__.py
+++ b/lib/sqlalchemy/dialects/__init__.py
@@ -1,5 +1,5 @@
# dialects/__init__.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -7,7 +7,6 @@
__all__ = (
'drizzle',
'firebird',
-# 'informix',
'mssql',
'mysql',
'oracle',
diff --git a/lib/sqlalchemy/dialects/drizzle/base.py b/lib/sqlalchemy/dialects/drizzle/base.py
index efad13549..b5addb422 100644
--- a/lib/sqlalchemy/dialects/drizzle/base.py
+++ b/lib/sqlalchemy/dialects/drizzle/base.py
@@ -1,5 +1,5 @@
# drizzle/base.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
# Copyright (C) 2010-2011 Monty Taylor <mordred@inaugust.com>
#
# This module is part of SQLAlchemy and is released under
@@ -417,6 +417,7 @@ class DrizzleIdentifierPreparer(mysql_dialect.MySQLIdentifierPreparer):
pass
+@log.class_logger
class DrizzleDialect(mysql_dialect.MySQLDialect):
"""Details of the Drizzle dialect.
@@ -495,4 +496,3 @@ class DrizzleDialect(mysql_dialect.MySQLDialect):
self._backslash_escapes = False
-log.class_logger(DrizzleDialect)
diff --git a/lib/sqlalchemy/dialects/firebird/__init__.py b/lib/sqlalchemy/dialects/firebird/__init__.py
index e57457a39..094ac3e83 100644
--- a/lib/sqlalchemy/dialects/firebird/__init__.py
+++ b/lib/sqlalchemy/dialects/firebird/__init__.py
@@ -1,5 +1,5 @@
# firebird/__init__.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/firebird/base.py b/lib/sqlalchemy/dialects/firebird/base.py
index dcaa68f4e..21db57b68 100644
--- a/lib/sqlalchemy/dialects/firebird/base.py
+++ b/lib/sqlalchemy/dialects/firebird/base.py
@@ -1,5 +1,5 @@
# firebird/base.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -359,6 +359,7 @@ class FBIdentifierPreparer(sql.compiler.IdentifierPreparer):
"""Install Firebird specific reserved words."""
reserved_words = RESERVED_WORDS
+ illegal_initial_characters = compiler.ILLEGAL_INITIAL_CHARACTERS.union(['_'])
def __init__(self, dialect):
super(FBIdentifierPreparer, self).__init__(dialect, omit_schema=True)
@@ -401,6 +402,8 @@ class FBDialect(default.DefaultDialect):
colspecs = colspecs
ischema_names = ischema_names
+ construct_arguments = []
+
# defaults to dialect ver. 3,
# will be autodetected off upon
# first connect
@@ -474,18 +477,34 @@ class FBDialect(default.DefaultDialect):
@reflection.cache
def get_table_names(self, connection, schema=None, **kw):
+ # there are two queries commonly mentioned for this.
+ # this one, using view_blr, is at the Firebird FAQ among other places:
+ # http://www.firebirdfaq.org/faq174/
s = """
- SELECT DISTINCT rdb$relation_name
- FROM rdb$relation_fields
- WHERE rdb$system_flag=0 AND rdb$view_context IS NULL
+ select rdb$relation_name
+ from rdb$relations
+ where rdb$view_blr is null
+ and (rdb$system_flag is null or rdb$system_flag = 0);
"""
+
+ # the other query is this one. It's not clear if there's really
+ # any difference between these two. This link:
+ # http://www.alberton.info/firebird_sql_meta_info.html#.Ur3vXfZGni8
+ # states them as interchangeable. Some discussion at [ticket:2898]
+ # SELECT DISTINCT rdb$relation_name
+ # FROM rdb$relation_fields
+ # WHERE rdb$system_flag=0 AND rdb$view_context IS NULL
+
return [self.normalize_name(row[0]) for row in connection.execute(s)]
@reflection.cache
def get_view_names(self, connection, schema=None, **kw):
+ # see http://www.firebirdfaq.org/faq174/
s = """
- SELECT distinct rdb$view_name
- FROM rdb$view_relations
+ select rdb$relation_name
+ from rdb$relations
+ where rdb$view_blr is not null
+ and (rdb$system_flag is null or rdb$system_flag = 0);
"""
return [self.normalize_name(row[0]) for row in connection.execute(s)]
@@ -700,7 +719,7 @@ class FBDialect(default.DefaultDialect):
ic.rdb$index_name
WHERE ix.rdb$relation_name=? AND ix.rdb$foreign_key IS NULL
AND rdb$relation_constraints.rdb$constraint_type IS NULL
- ORDER BY index_name, field_name
+ ORDER BY index_name, ic.rdb$field_position
"""
c = connection.execute(qry, [self.denormalize_name(table_name)])
diff --git a/lib/sqlalchemy/dialects/firebird/fdb.py b/lib/sqlalchemy/dialects/firebird/fdb.py
index 36b424d49..4d94ef0d5 100644
--- a/lib/sqlalchemy/dialects/firebird/fdb.py
+++ b/lib/sqlalchemy/dialects/firebird/fdb.py
@@ -1,5 +1,5 @@
# firebird/fdb.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/firebird/kinterbasdb.py b/lib/sqlalchemy/dialects/firebird/kinterbasdb.py
index c8d8e986f..b8a83a07b 100644
--- a/lib/sqlalchemy/dialects/firebird/kinterbasdb.py
+++ b/lib/sqlalchemy/dialects/firebird/kinterbasdb.py
@@ -1,5 +1,5 @@
# firebird/kinterbasdb.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -42,7 +42,7 @@ from re import match
import decimal
-class _FBNumeric_kinterbasdb(sqltypes.Numeric):
+class _kinterbasdb_numeric(object):
def bind_processor(self, dialect):
def process(value):
if isinstance(value, decimal.Decimal):
@@ -51,6 +51,12 @@ class _FBNumeric_kinterbasdb(sqltypes.Numeric):
return value
return process
+class _FBNumeric_kinterbasdb(_kinterbasdb_numeric, sqltypes.Numeric):
+ pass
+
+class _FBFloat_kinterbasdb(_kinterbasdb_numeric, sqltypes.Float):
+ pass
+
class FBExecutionContext_kinterbasdb(FBExecutionContext):
@property
@@ -74,6 +80,7 @@ class FBDialect_kinterbasdb(FBDialect):
FBDialect.colspecs,
{
sqltypes.Numeric: _FBNumeric_kinterbasdb,
+ sqltypes.Float: _FBFloat_kinterbasdb,
}
)
diff --git a/lib/sqlalchemy/dialects/informix/__init__.py b/lib/sqlalchemy/dialects/informix/__init__.py
deleted file mode 100644
index a55277c9f..000000000
--- a/lib/sqlalchemy/dialects/informix/__init__.py
+++ /dev/null
@@ -1,9 +0,0 @@
-# informix/__init__.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-from sqlalchemy.dialects.informix import base, informixdb
-
-base.dialect = informixdb.dialect
diff --git a/lib/sqlalchemy/dialects/informix/base.py b/lib/sqlalchemy/dialects/informix/base.py
deleted file mode 100644
index e13ea8819..000000000
--- a/lib/sqlalchemy/dialects/informix/base.py
+++ /dev/null
@@ -1,590 +0,0 @@
-# informix/base.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
-# coding: gbk
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""
-.. dialect:: informix
- :name: Informix
-
-.. note::
-
- The Informix dialect functions on current SQLAlchemy versions
- but is not regularly tested, and may have many issues and
- caveats not currently handled.
-
-"""
-
-
-import datetime
-
-from sqlalchemy import sql, schema, exc, pool, util
-from sqlalchemy.sql import compiler, text
-from sqlalchemy.engine import default, reflection
-from sqlalchemy import types as sqltypes
-from functools import reduce
-
-RESERVED_WORDS = set(
- ["abs", "absolute", "access", "access_method", "acos", "active", "add",
- "address", "add_months", "admin", "after", "aggregate", "alignment",
- "all", "allocate", "all_rows", "alter", "and", "ansi", "any", "append",
- "array", "as", "asc", "ascii", "asin", "at", "atan", "atan2", "attach",
- "attributes", "audit", "authentication", "authid", "authorization",
- "authorized", "auto", "autofree", "auto_reprepare", "auto_stat_mode",
- "avg", "avoid_execute", "avoid_fact", "avoid_full", "avoid_hash",
- "avoid_index", "avoid_index_sj", "avoid_multi_index", "avoid_nl",
- "avoid_star_join", "avoid_subqf", "based", "before", "begin",
- "between", "bigint", "bigserial", "binary", "bitand", "bitandnot",
- "bitnot", "bitor", "bitxor", "blob", "blobdir", "boolean", "both",
- "bound_impl_pdq", "buffered", "builtin", "by", "byte", "cache", "call",
- "cannothash", "cardinality", "cascade", "case", "cast", "ceil", "char",
- "character", "character_length", "char_length", "check", "class",
- "class_origin", "client", "clob", "clobdir", "close", "cluster",
- "clustersize", "cobol", "codeset", "collation", "collection",
- "column", "columns", "commit", "committed", "commutator", "component",
- "components", "concat", "concurrent", "connect", "connection",
- "connection_name", "connect_by_iscycle", "connect_by_isleaf",
- "connect_by_rootconst", "constraint", "constraints", "constructor",
- "context", "continue", "copy", "cos", "costfunc", "count", "crcols",
- "create", "cross", "current", "current_role", "currval", "cursor",
- "cycle", "database", "datafiles", "dataskip", "date", "datetime",
- "day", "dba", "dbdate", "dbinfo", "dbpassword", "dbsecadm",
- "dbservername", "deallocate", "debug", "debugmode", "debug_env", "dec",
- "decimal", "declare", "decode", "decrypt_binary", "decrypt_char",
- "dec_t", "default", "default_role", "deferred", "deferred_prepare",
- "define", "delay", "delete", "deleting", "delimited", "delimiter",
- "deluxe", "desc", "describe", "descriptor", "detach", "diagnostics",
- "directives", "dirty", "disable", "disabled", "disconnect", "disk",
- "distinct", "distributebinary", "distributesreferences",
- "distributions", "document", "domain", "donotdistribute", "dormant",
- "double", "drop", "dtime_t", "each", "elif", "else", "enabled",
- "encryption", "encrypt_aes", "encrypt_tdes", "end", "enum",
- "environment", "error", "escape", "exception", "exclusive", "exec",
- "execute", "executeanywhere", "exemption", "exists", "exit", "exp",
- "explain", "explicit", "express", "expression", "extdirectives",
- "extend", "extent", "external", "fact", "false", "far", "fetch",
- "file", "filetoblob", "filetoclob", "fillfactor", "filtering", "first",
- "first_rows", "fixchar", "fixed", "float", "floor", "flush", "for",
- "force", "forced", "force_ddl_exec", "foreach", "foreign", "format",
- "format_units", "fortran", "found", "fraction", "fragment",
- "fragments", "free", "from", "full", "function", "general", "get",
- "gethint", "global", "go", "goto", "grant", "greaterthan",
- "greaterthanorequal", "group", "handlesnulls", "hash", "having", "hdr",
- "hex", "high", "hint", "hold", "home", "hour", "idslbacreadarray",
- "idslbacreadset", "idslbacreadtree", "idslbacrules",
- "idslbacwritearray", "idslbacwriteset", "idslbacwritetree",
- "idssecuritylabel", "if", "ifx_auto_reprepare", "ifx_batchedread_table",
- "ifx_int8_t", "ifx_lo_create_spec_t", "ifx_lo_stat_t", "immediate",
- "implicit", "implicit_pdq", "in", "inactive", "increment", "index",
- "indexes", "index_all", "index_sj", "indicator", "informix", "init",
- "initcap", "inline", "inner", "inout", "insert", "inserting", "instead",
- "int", "int8", "integ", "integer", "internal", "internallength",
- "interval", "into", "intrvl_t", "is", "iscanonical", "isolation",
- "item", "iterator", "java", "join", "keep", "key", "label", "labeleq",
- "labelge", "labelglb", "labelgt", "labelle", "labellt", "labellub",
- "labeltostring", "language", "last", "last_day", "leading", "left",
- "length", "lessthan", "lessthanorequal", "let", "level", "like",
- "limit", "list", "listing", "load", "local", "locator", "lock", "locks",
- "locopy", "loc_t", "log", "log10", "logn", "long", "loop", "lotofile",
- "low", "lower", "lpad", "ltrim", "lvarchar", "matched", "matches",
- "max", "maxerrors", "maxlen", "maxvalue", "mdy", "median", "medium",
- "memory", "memory_resident", "merge", "message_length", "message_text",
- "middle", "min", "minute", "minvalue", "mod", "mode", "moderate",
- "modify", "module", "money", "month", "months_between", "mounting",
- "multiset", "multi_index", "name", "nchar", "negator", "new", "next",
- "nextval", "next_day", "no", "nocache", "nocycle", "nomaxvalue",
- "nomigrate", "nominvalue", "none", "non_dim", "non_resident", "noorder",
- "normal", "not", "notemplatearg", "notequal", "null", "nullif",
- "numeric", "numrows", "numtodsinterval", "numtoyminterval", "nvarchar",
- "nvl", "octet_length", "of", "off", "old", "on", "online", "only",
- "opaque", "opclass", "open", "optcompind", "optical", "optimization",
- "option", "or", "order", "ordered", "out", "outer", "output",
- "override", "page", "parallelizable", "parameter", "partition",
- "pascal", "passedbyvalue", "password", "pdqpriority", "percaltl_cos",
- "pipe", "pli", "pload", "policy", "pow", "power", "precision",
- "prepare", "previous", "primary", "prior", "private", "privileges",
- "procedure", "properties", "public", "put", "raise", "range", "raw",
- "read", "real", "recordend", "references", "referencing", "register",
- "rejectfile", "relative", "release", "remainder", "rename",
- "reoptimization", "repeatable", "replace", "replication", "reserve",
- "resolution", "resource", "restart", "restrict", "resume", "retain",
- "retainupdatelocks", "return", "returned_sqlstate", "returning",
- "returns", "reuse", "revoke", "right", "robin", "role", "rollback",
- "rollforward", "root", "round", "routine", "row", "rowid", "rowids",
- "rows", "row_count", "rpad", "rtrim", "rule", "sameas", "samples",
- "sampling", "save", "savepoint", "schema", "scroll", "seclabel_by_comp",
- "seclabel_by_name", "seclabel_to_char", "second", "secondary",
- "section", "secured", "security", "selconst", "select", "selecting",
- "selfunc", "selfuncargs", "sequence", "serial", "serial8",
- "serializable", "serveruuid", "server_name", "session", "set",
- "setsessionauth", "share", "short", "siblings", "signed", "sin",
- "sitename", "size", "skall", "skinhibit", "skip", "skshow",
- "smallfloat", "smallint", "some", "specific", "sql", "sqlcode",
- "sqlcontext", "sqlerror", "sqlstate", "sqlwarning", "sqrt",
- "stability", "stack", "standard", "start", "star_join", "statchange",
- "statement", "static", "statistics", "statlevel", "status", "stdev",
- "step", "stop", "storage", "store", "strategies", "string",
- "stringtolabel", "struct", "style", "subclass_origin", "substr",
- "substring", "sum", "support", "sync", "synonym", "sysdate",
- "sysdbclose", "sysdbopen", "system", "sys_connect_by_path", "table",
- "tables", "tan", "task", "temp", "template", "test", "text", "then",
- "time", "timeout", "to", "today", "to_char", "to_date",
- "to_dsinterval", "to_number", "to_yminterval", "trace", "trailing",
- "transaction", "transition", "tree", "trigger", "triggers", "trim",
- "true", "trunc", "truncate", "trusted", "type", "typedef", "typeid",
- "typename", "typeof", "uid", "uncommitted", "under", "union",
- "unique", "units", "unknown", "unload", "unlock", "unsigned",
- "update", "updating", "upon", "upper", "usage", "use",
- "uselastcommitted", "user", "use_hash", "use_nl", "use_subqf",
- "using", "value", "values", "var", "varchar", "variable", "variance",
- "variant", "varying", "vercols", "view", "violations", "void",
- "volatile", "wait", "warning", "weekday", "when", "whenever", "where",
- "while", "with", "without", "work", "write", "writedown", "writeup",
- "xadatasource", "xid", "xload", "xunload", "year"
- ])
-
-
-class InfoDateTime(sqltypes.DateTime):
-
- def bind_processor(self, dialect):
- def process(value):
- if value is not None:
- if value.microsecond:
- value = value.replace(microsecond=0)
- return value
- return process
-
-
-class InfoTime(sqltypes.Time):
-
- def bind_processor(self, dialect):
- def process(value):
- if value is not None:
- if value.microsecond:
- value = value.replace(microsecond=0)
- return value
- return process
-
- def result_processor(self, dialect, coltype):
- def process(value):
- if isinstance(value, datetime.datetime):
- return value.time()
- else:
- return value
- return process
-
-colspecs = {
- sqltypes.DateTime: InfoDateTime,
- sqltypes.TIMESTAMP: InfoDateTime,
- sqltypes.Time: InfoTime,
-}
-
-
-ischema_names = {
- 0: sqltypes.CHAR, # CHAR
- 1: sqltypes.SMALLINT, # SMALLINT
- 2: sqltypes.INTEGER, # INT
- 3: sqltypes.FLOAT, # Float
- 3: sqltypes.Float, # SmallFloat
- 5: sqltypes.DECIMAL, # DECIMAL
- 6: sqltypes.Integer, # Serial
- 7: sqltypes.DATE, # DATE
- 8: sqltypes.Numeric, # MONEY
- 10: sqltypes.DATETIME, # DATETIME
- 11: sqltypes.LargeBinary, # BYTE
- 12: sqltypes.TEXT, # TEXT
- 13: sqltypes.VARCHAR, # VARCHAR
- 15: sqltypes.NCHAR, # NCHAR
- 16: sqltypes.NVARCHAR, # NVARCHAR
- 17: sqltypes.Integer, # INT8
- 18: sqltypes.Integer, # Serial8
- 43: sqltypes.String, # LVARCHAR
- -1: sqltypes.BLOB, # BLOB
- -1: sqltypes.CLOB, # CLOB
-}
-
-
-class InfoTypeCompiler(compiler.GenericTypeCompiler):
- def visit_DATETIME(self, type_):
- return "DATETIME YEAR TO SECOND"
-
- def visit_TIME(self, type_):
- return "DATETIME HOUR TO SECOND"
-
- def visit_TIMESTAMP(self, type_):
- return "DATETIME YEAR TO SECOND"
-
- def visit_large_binary(self, type_):
- return "BYTE"
-
- def visit_boolean(self, type_):
- return "SMALLINT"
-
-
-class InfoSQLCompiler(compiler.SQLCompiler):
-
- def default_from(self):
- return " from systables where tabname = 'systables' "
-
- def get_select_precolumns(self, select):
- s = ""
- if select._offset:
- s += "SKIP %s " % select._offset
- if select._limit:
- s += "FIRST %s " % select._limit
- s += select._distinct and "DISTINCT " or ""
- return s
-
- def visit_select(self, select, asfrom=False, parens=True, **kw):
- text = compiler.SQLCompiler.visit_select(self, select, asfrom, parens, **kw)
- if asfrom and parens and self.dialect.server_version_info < (11,):
- #assuming that 11 version doesn't need this, not tested
- return "table(multiset" + text + ")"
- else:
- return text
-
- def limit_clause(self, select):
- return ""
-
- def visit_function(self, func, **kw):
- if func.name.lower() == 'current_date':
- return "today"
- elif func.name.lower() == 'current_time':
- return "CURRENT HOUR TO SECOND"
- elif func.name.lower() in ('current_timestamp', 'now'):
- return "CURRENT YEAR TO SECOND"
- else:
- return compiler.SQLCompiler.visit_function(self, func, **kw)
-
- def visit_mod_binary(self, binary, operator, **kw):
- return "MOD(%s, %s)" % (self.process(binary.left, **kw),
- self.process(binary.right, **kw))
-
-
-class InfoDDLCompiler(compiler.DDLCompiler):
-
- def visit_add_constraint(self, create):
- preparer = self.preparer
- return "ALTER TABLE %s ADD CONSTRAINT %s" % (
- self.preparer.format_table(create.element.table),
- self.process(create.element)
- )
-
- def get_column_specification(self, column, **kw):
- colspec = self.preparer.format_column(column)
- first = None
- if column.primary_key and column.autoincrement:
- try:
- first = [c for c in column.table.primary_key.columns
- if (c.autoincrement and
- isinstance(c.type, sqltypes.Integer) and
- not c.foreign_keys)].pop(0)
- except IndexError:
- pass
-
- if column is first:
- colspec += " SERIAL"
- else:
- colspec += " " + self.dialect.type_compiler.process(column.type)
- default = self.get_column_default_string(column)
- if default is not None:
- colspec += " DEFAULT " + default
-
- if not column.nullable:
- colspec += " NOT NULL"
-
- return colspec
-
- def get_column_default_string(self, column):
- if (isinstance(column.server_default, schema.DefaultClause) and
- isinstance(column.server_default.arg, util.string_types)):
- if isinstance(column.type, (sqltypes.Integer, sqltypes.Numeric)):
- return self.sql_compiler.process(text(column.server_default.arg))
-
- return super(InfoDDLCompiler, self).get_column_default_string(column)
-
- ### Informix wants the constraint name at the end, hence this ist c&p from sql/compiler.py
- def visit_primary_key_constraint(self, constraint):
- if len(constraint) == 0:
- return ''
- text = "PRIMARY KEY "
- text += "(%s)" % ', '.join(self.preparer.quote(c.name, c.quote)
- for c in constraint)
- text += self.define_constraint_deferrability(constraint)
-
- if constraint.name is not None:
- text += " CONSTRAINT %s" % self.preparer.format_constraint(constraint)
- return text
-
- def visit_foreign_key_constraint(self, constraint):
- preparer = self.dialect.identifier_preparer
- remote_table = list(constraint._elements.values())[0].column.table
- text = "FOREIGN KEY (%s) REFERENCES %s (%s)" % (
- ', '.join(preparer.quote(f.parent.name, f.parent.quote)
- for f in constraint._elements.values()),
- preparer.format_table(remote_table),
- ', '.join(preparer.quote(f.column.name, f.column.quote)
- for f in constraint._elements.values())
- )
- text += self.define_constraint_cascades(constraint)
- text += self.define_constraint_deferrability(constraint)
-
- if constraint.name is not None:
- text += " CONSTRAINT %s " % \
- preparer.format_constraint(constraint)
- return text
-
- def visit_unique_constraint(self, constraint):
- text = "UNIQUE (%s)" % (', '.join(self.preparer.quote(c.name, c.quote) for c in constraint))
- text += self.define_constraint_deferrability(constraint)
-
- if constraint.name is not None:
- text += "CONSTRAINT %s " % self.preparer.format_constraint(constraint)
- return text
-
-
-class InformixIdentifierPreparer(compiler.IdentifierPreparer):
-
- reserved_words = RESERVED_WORDS
-
-
-class InformixDialect(default.DefaultDialect):
- name = 'informix'
-
- max_identifier_length = 128 # adjusts at runtime based on server version
-
- type_compiler = InfoTypeCompiler
- statement_compiler = InfoSQLCompiler
- ddl_compiler = InfoDDLCompiler
- colspecs = colspecs
- ischema_names = ischema_names
- preparer = InformixIdentifierPreparer
- default_paramstyle = 'qmark'
-
- def initialize(self, connection):
- super(InformixDialect, self).initialize(connection)
-
- # http://www.querix.com/support/knowledge-base/error_number_message/error_200
- if self.server_version_info < (9, 2):
- self.max_identifier_length = 18
- else:
- self.max_identifier_length = 128
-
- def _get_table_names(self, connection, schema, type, **kw):
- schema = schema or self.default_schema_name
- s = "select tabname, owner from systables where owner=? and tabtype=?"
- return [row[0] for row in connection.execute(s, schema, type)]
-
- @reflection.cache
- def get_table_names(self, connection, schema=None, **kw):
- return self._get_table_names(connection, schema, 'T', **kw)
-
- @reflection.cache
- def get_view_names(self, connection, schema=None, **kw):
- return self._get_table_names(connection, schema, 'V', **kw)
-
- @reflection.cache
- def get_schema_names(self, connection, **kw):
- s = "select owner from systables"
- return [row[0] for row in connection.execute(s)]
-
- def has_table(self, connection, table_name, schema=None):
- schema = schema or self.default_schema_name
- cursor = connection.execute(
- """select tabname from systables where tabname=? and owner=?""",
- table_name, schema)
- return cursor.first() is not None
-
- @reflection.cache
- def get_columns(self, connection, table_name, schema=None, **kw):
- schema = schema or self.default_schema_name
- c = connection.execute(
- """select colname, coltype, collength, t3.default, t1.colno from
- syscolumns as t1 , systables as t2 , OUTER sysdefaults as t3
- where t1.tabid = t2.tabid and t2.tabname=? and t2.owner=?
- and t3.tabid = t2.tabid and t3.colno = t1.colno
- order by t1.colno""", table_name, schema)
-
- pk_constraint = self.get_pk_constraint(connection, table_name, schema, **kw)
- primary_cols = pk_constraint['constrained_columns']
-
- columns = []
- rows = c.fetchall()
- for name, colattr, collength, default, colno in rows:
- name = name.lower()
-
- autoincrement = False
- primary_key = False
-
- if name in primary_cols:
- primary_key = True
-
- # in 7.31, coltype = 0x000
- # ^^-- column type
- # ^-- 1 not null, 0 null
- not_nullable, coltype = divmod(colattr, 256)
- if coltype not in (0, 13) and default:
- default = default.split()[-1]
-
- if coltype == 6: # Serial, mark as autoincrement
- autoincrement = True
-
- if coltype == 0 or coltype == 13: # char, varchar
- coltype = ischema_names[coltype](collength)
- if default:
- default = "'%s'" % default
- elif coltype == 5: # decimal
- precision, scale = (collength & 0xFF00) >> 8, collength & 0xFF
- if scale == 255:
- scale = 0
- coltype = sqltypes.Numeric(precision, scale)
- else:
- try:
- coltype = ischema_names[coltype]
- except KeyError:
- util.warn("Did not recognize type '%s' of column '%s'" %
- (coltype, name))
- coltype = sqltypes.NULLTYPE
-
- column_info = dict(name=name, type=coltype, nullable=not not_nullable,
- default=default, autoincrement=autoincrement,
- primary_key=primary_key)
- columns.append(column_info)
- return columns
-
- @reflection.cache
- def get_foreign_keys(self, connection, table_name, schema=None, **kw):
- schema_sel = schema or self.default_schema_name
- c = connection.execute(
- """select t1.constrname as cons_name,
- t4.colname as local_column, t7.tabname as remote_table,
- t6.colname as remote_column, t7.owner as remote_owner
- from sysconstraints as t1 , systables as t2 ,
- sysindexes as t3 , syscolumns as t4 ,
- sysreferences as t5 , syscolumns as t6 , systables as t7 ,
- sysconstraints as t8 , sysindexes as t9
- where t1.tabid = t2.tabid and t2.tabname=? and t2.owner=? and t1.constrtype = 'R'
- and t3.tabid = t2.tabid and t3.idxname = t1.idxname
- and t4.tabid = t2.tabid and t4.colno in (t3.part1, t3.part2, t3.part3,
- t3.part4, t3.part5, t3.part6, t3.part7, t3.part8, t3.part9, t3.part10,
- t3.part11, t3.part11, t3.part12, t3.part13, t3.part4, t3.part15, t3.part16)
- and t5.constrid = t1.constrid and t8.constrid = t5.primary
- and t6.tabid = t5.ptabid and t6.colno in (t9.part1, t9.part2, t9.part3,
- t9.part4, t9.part5, t9.part6, t9.part7, t9.part8, t9.part9, t9.part10,
- t9.part11, t9.part11, t9.part12, t9.part13, t9.part4, t9.part15, t9.part16) and t9.idxname =
- t8.idxname
- and t7.tabid = t5.ptabid""", table_name, schema_sel)
-
- def fkey_rec():
- return {
- 'name': None,
- 'constrained_columns': [],
- 'referred_schema': None,
- 'referred_table': None,
- 'referred_columns': []
- }
-
- fkeys = util.defaultdict(fkey_rec)
-
- rows = c.fetchall()
- for cons_name, local_column, \
- remote_table, remote_column, remote_owner in rows:
-
- rec = fkeys[cons_name]
- rec['name'] = cons_name
- local_cols, remote_cols = \
- rec['constrained_columns'], rec['referred_columns']
-
- if not rec['referred_table']:
- rec['referred_table'] = remote_table
- if schema is not None:
- rec['referred_schema'] = remote_owner
-
- if local_column not in local_cols:
- local_cols.append(local_column)
- if remote_column not in remote_cols:
- remote_cols.append(remote_column)
-
- return list(fkeys.values())
-
- @reflection.cache
- def get_pk_constraint(self, connection, table_name, schema=None, **kw):
- schema = schema or self.default_schema_name
-
- # Select the column positions from sysindexes for sysconstraints
- data = connection.execute(
- """select t2.*
- from systables as t1, sysindexes as t2, sysconstraints as t3
- where t1.tabid=t2.tabid and t1.tabname=? and t1.owner=?
- and t2.idxname=t3.idxname and t3.constrtype='P'""",
- table_name, schema
- ).fetchall()
-
- colpositions = set()
-
- for row in data:
- colpos = set([getattr(row, 'part%d' % x) for x in range(1, 16)])
- colpositions |= colpos
-
- if not len(colpositions):
- return {'constrained_columns': [], 'name': None}
-
- # Select the column names using the columnpositions
- # TODO: Maybe cache a bit of those col infos (eg select all colnames for one table)
- place_holder = ','.join('?' * len(colpositions))
- c = connection.execute(
- """select t1.colname
- from syscolumns as t1, systables as t2
- where t2.tabname=? and t1.tabid = t2.tabid and
- t1.colno in (%s)""" % place_holder,
- table_name, *colpositions
- ).fetchall()
-
- cols = reduce(lambda x, y: list(x) + list(y), c, [])
- return {'constrained_columns': cols, 'name': None}
-
- @reflection.cache
- def get_indexes(self, connection, table_name, schema, **kw):
- # TODO: schema...
- c = connection.execute(
- """select t1.*
- from sysindexes as t1 , systables as t2
- where t1.tabid = t2.tabid and t2.tabname=?""",
- table_name)
-
- indexes = []
- for row in c.fetchall():
- colnames = [getattr(row, 'part%d' % x) for x in range(1, 16)]
- colnames = [x for x in colnames if x]
- place_holder = ','.join('?' * len(colnames))
- c = connection.execute(
- """select t1.colname
- from syscolumns as t1, systables as t2
- where t2.tabname=? and t1.tabid = t2.tabid and
- t1.colno in (%s)""" % place_holder,
- table_name, *colnames
- ).fetchall()
- c = reduce(lambda x, y: list(x) + list(y), c, [])
- indexes.append({
- 'name': row.idxname,
- 'unique': row.idxtype.lower() == 'u',
- 'column_names': c
- })
- return indexes
-
- @reflection.cache
- def get_view_definition(self, connection, view_name, schema=None, **kw):
- schema = schema or self.default_schema_name
- c = connection.execute(
- """select t1.viewtext
- from sysviews as t1 , systables as t2
- where t1.tabid=t2.tabid and t2.tabname=?
- and t2.owner=? order by seqno""",
- view_name, schema).fetchall()
-
- return ''.join([row[0] for row in c])
-
- def _get_default_schema_name(self, connection):
- return connection.execute('select CURRENT_ROLE from systables').scalar()
diff --git a/lib/sqlalchemy/dialects/informix/informixdb.py b/lib/sqlalchemy/dialects/informix/informixdb.py
deleted file mode 100644
index f2f0d3e80..000000000
--- a/lib/sqlalchemy/dialects/informix/informixdb.py
+++ /dev/null
@@ -1,69 +0,0 @@
-# informix/informixdb.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""
-
-.. dialect:: informix+informixdb
- :name: informixdb
- :dbapi: informixdb
- :connectstring: informix+informixdb://user:password@host/dbname
- :url: http://informixdb.sourceforge.net/
-
-"""
-
-import re
-
-from sqlalchemy.dialects.informix.base import InformixDialect
-from sqlalchemy.engine import default
-
-VERSION_RE = re.compile(r'(\d+)\.(\d+)(.+\d+)')
-
-
-class InformixExecutionContext_informixdb(default.DefaultExecutionContext):
-
- def post_exec(self):
- if self.isinsert:
- self._lastrowid = self.cursor.sqlerrd[1]
-
- def get_lastrowid(self):
- return self._lastrowid
-
-
-class InformixDialect_informixdb(InformixDialect):
- driver = 'informixdb'
- execution_ctx_cls = InformixExecutionContext_informixdb
-
- @classmethod
- def dbapi(cls):
- return __import__('informixdb')
-
- def create_connect_args(self, url):
- if url.host:
- dsn = '%s@%s' % (url.database, url.host)
- else:
- dsn = url.database
-
- if url.username:
- opt = {'user': url.username, 'password': url.password}
- else:
- opt = {}
-
- return ([dsn], opt)
-
- def _get_server_version_info(self, connection):
- # http://informixdb.sourceforge.net/manual.html#inspecting-version-numbers
- v = VERSION_RE.split(connection.connection.dbms_version)
- return (int(v[1]), int(v[2]), v[3])
-
- def is_disconnect(self, e, connection, cursor):
- if isinstance(e, self.dbapi.OperationalError):
- return 'closed the connection' in str(e) \
- or 'connection not open' in str(e)
- else:
- return False
-
-
-dialect = InformixDialect_informixdb
diff --git a/lib/sqlalchemy/dialects/mssql/__init__.py b/lib/sqlalchemy/dialects/mssql/__init__.py
index 0b81d6df9..7a2dfa60b 100644
--- a/lib/sqlalchemy/dialects/mssql/__init__.py
+++ b/lib/sqlalchemy/dialects/mssql/__init__.py
@@ -1,5 +1,5 @@
# mssql/__init__.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/mssql/adodbapi.py b/lib/sqlalchemy/dialects/mssql/adodbapi.py
index 167b4e807..95cf42423 100644
--- a/lib/sqlalchemy/dialects/mssql/adodbapi.py
+++ b/lib/sqlalchemy/dialects/mssql/adodbapi.py
@@ -1,5 +1,5 @@
# mssql/adodbapi.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/mssql/base.py b/lib/sqlalchemy/dialects/mssql/base.py
index 7621f4aab..522cb5ce3 100644
--- a/lib/sqlalchemy/dialects/mssql/base.py
+++ b/lib/sqlalchemy/dialects/mssql/base.py
@@ -1,5 +1,5 @@
# mssql/base.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -103,22 +103,49 @@ for these types will be issued as DATETIME.
.. _mssql_indexes:
-MSSQL-Specific Index Options
------------------------------
-
-The MSSQL dialect supports special options for :class:`.Index`.
+Clustered Index Support
+-----------------------
-CLUSTERED
-^^^^^^^^^^
+The MSSQL dialect supports clustered indexes (and primary keys) via the
+``mssql_clustered`` option. This option is available to :class:`.Index`,
+:class:`.UniqueConstraint`. and :class:`.PrimaryKeyConstraint`.
-The ``mssql_clustered`` option adds the CLUSTERED keyword to the index::
+To generate a clustered index::
Index("my_index", table.c.x, mssql_clustered=True)
-would render the index as ``CREATE CLUSTERED INDEX my_index ON table (x)``
+which renders the index as ``CREATE CLUSTERED INDEX my_index ON table (x)``.
.. versionadded:: 0.8
+To generate a clustered primary key use::
+
+ Table('my_table', metadata,
+ Column('x', ...),
+ Column('y', ...),
+ PrimaryKeyConstraint("x", "y", mssql_clustered=True))
+
+which will render the table, for example, as::
+
+ CREATE TABLE my_table (x INTEGER NOT NULL, y INTEGER NOT NULL, PRIMARY KEY CLUSTERED (x, y))
+
+Similarly, we can generate a clustered unique constraint using::
+
+ Table('my_table', metadata,
+ Column('x', ...),
+ Column('y', ...),
+ PrimaryKeyConstraint("x"),
+ UniqueConstraint("y", mssql_clustered=True),
+ )
+
+ .. versionadded:: 0.9.2
+
+MSSQL-Specific Index Options
+-----------------------------
+
+In addition to clustering, the MSSQL dialect supports other special options
+for :class:`.Index`.
+
INCLUDE
^^^^^^^
@@ -991,7 +1018,7 @@ class MSDDLCompiler(compiler.DDLCompiler):
text += "UNIQUE "
# handle clustering option
- if index.kwargs.get("mssql_clustered"):
+ if index.dialect_options['mssql']['clustered']:
text += "CLUSTERED "
text += "INDEX %s ON %s (%s)" \
@@ -1001,29 +1028,61 @@ class MSDDLCompiler(compiler.DDLCompiler):
preparer.format_table(index.table),
', '.join(
self.sql_compiler.process(expr,
- include_table=False) for
+ include_table=False, literal_binds=True) for
expr in index.expressions)
)
# handle other included columns
- if index.kwargs.get("mssql_include"):
+ if index.dialect_options['mssql']['include']:
inclusions = [index.table.c[col]
if isinstance(col, util.string_types) else col
- for col in index.kwargs["mssql_include"]]
+ for col in index.dialect_options['mssql']['include']]
text += " INCLUDE (%s)" \
- % ', '.join([preparer.quote(c.name, c.quote)
+ % ', '.join([preparer.quote(c.name)
for c in inclusions])
return text
def visit_drop_index(self, drop):
- return "\nDROP INDEX %s.%s" % (
- self.preparer.quote_identifier(drop.element.table.name),
- self._prepared_index_name(drop.element,
- include_schema=True)
+ return "\nDROP INDEX %s ON %s" % (
+ self._prepared_index_name(drop.element, include_schema=False),
+ self.preparer.format_table(drop.element.table)
)
+ def visit_primary_key_constraint(self, constraint):
+ if len(constraint) == 0:
+ return ''
+ text = ""
+ if constraint.name is not None:
+ text += "CONSTRAINT %s " % \
+ self.preparer.format_constraint(constraint)
+ text += "PRIMARY KEY "
+
+ if constraint.dialect_options['mssql']['clustered']:
+ text += "CLUSTERED "
+
+ text += "(%s)" % ', '.join(self.preparer.quote(c.name)
+ for c in constraint)
+ text += self.define_constraint_deferrability(constraint)
+ return text
+
+ def visit_unique_constraint(self, constraint):
+ if len(constraint) == 0:
+ return ''
+ text = ""
+ if constraint.name is not None:
+ text += "CONSTRAINT %s " % \
+ self.preparer.format_constraint(constraint)
+ text += "UNIQUE "
+
+ if constraint.dialect_options['mssql']['clustered']:
+ text += "CLUSTERED "
+
+ text += "(%s)" % ', '.join(self.preparer.quote(c.name)
+ for c in constraint)
+ text += self.define_constraint_deferrability(constraint)
+ return text
class MSIdentifierPreparer(compiler.IdentifierPreparer):
reserved_words = RESERVED_WORDS
@@ -1035,7 +1094,7 @@ class MSIdentifierPreparer(compiler.IdentifierPreparer):
def _escape_identifier(self, value):
return value
- def quote_schema(self, schema, force=True):
+ def quote_schema(self, schema, force=None):
"""Prepare a quoted table and schema name."""
result = '.'.join([self.quote(x, force) for x in schema.split('.')])
return result
@@ -1105,6 +1164,19 @@ class MSDialect(default.DefaultDialect):
type_compiler = MSTypeCompiler
preparer = MSIdentifierPreparer
+ construct_arguments = [
+ (sa_schema.PrimaryKeyConstraint, {
+ "clustered": False
+ }),
+ (sa_schema.UniqueConstraint, {
+ "clustered": False
+ }),
+ (sa_schema.Index, {
+ "clustered": False,
+ "include": None
+ })
+ ]
+
def __init__(self,
query_timeout=None,
use_scope_identity=True,
diff --git a/lib/sqlalchemy/dialects/mssql/information_schema.py b/lib/sqlalchemy/dialects/mssql/information_schema.py
index 7ac1b703e..26e70f7f0 100644
--- a/lib/sqlalchemy/dialects/mssql/information_schema.py
+++ b/lib/sqlalchemy/dialects/mssql/information_schema.py
@@ -1,5 +1,5 @@
# mssql/information_schema.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/mssql/mxodbc.py b/lib/sqlalchemy/dialects/mssql/mxodbc.py
index ddf31100c..5b686c47a 100644
--- a/lib/sqlalchemy/dialects/mssql/mxodbc.py
+++ b/lib/sqlalchemy/dialects/mssql/mxodbc.py
@@ -1,5 +1,5 @@
# mssql/mxodbc.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/mssql/pymssql.py b/lib/sqlalchemy/dialects/mssql/pymssql.py
index b916612fb..021219cb9 100644
--- a/lib/sqlalchemy/dialects/mssql/pymssql.py
+++ b/lib/sqlalchemy/dialects/mssql/pymssql.py
@@ -1,5 +1,5 @@
# mssql/pymssql.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -86,6 +86,7 @@ class MSDialect_pymssql(MSDialect):
def is_disconnect(self, e, connection, cursor):
for msg in (
"Adaptive Server connection timed out",
+ "Net-Lib error during Connection reset by peer",
"message 20003", # connection timeout
"Error 10054",
"Not connected to any MS SQL server",
diff --git a/lib/sqlalchemy/dialects/mssql/pyodbc.py b/lib/sqlalchemy/dialects/mssql/pyodbc.py
index 5a359d179..8c43eb8a1 100644
--- a/lib/sqlalchemy/dialects/mssql/pyodbc.py
+++ b/lib/sqlalchemy/dialects/mssql/pyodbc.py
@@ -1,5 +1,5 @@
# mssql/pyodbc.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -116,8 +116,8 @@ from ...connectors.pyodbc import PyODBCConnector
from ... import types as sqltypes, util
import decimal
+class _ms_numeric_pyodbc(object):
-class _MSNumeric_pyodbc(sqltypes.Numeric):
"""Turns Decimals with adjusted() < 0 or > 7 into strings.
The routines here are needed for older pyodbc versions
@@ -127,7 +127,7 @@ class _MSNumeric_pyodbc(sqltypes.Numeric):
def bind_processor(self, dialect):
- super_process = super(_MSNumeric_pyodbc, self).\
+ super_process = super(_ms_numeric_pyodbc, self).\
bind_processor(dialect)
if not dialect._need_decimal_fix:
@@ -180,6 +180,11 @@ class _MSNumeric_pyodbc(sqltypes.Numeric):
[str(s) for s in _int][0:value.adjusted() + 1]))
return result
+class _MSNumeric_pyodbc(_ms_numeric_pyodbc, sqltypes.Numeric):
+ pass
+
+class _MSFloat_pyodbc(_ms_numeric_pyodbc, sqltypes.Float):
+ pass
class MSExecutionContext_pyodbc(MSExecutionContext):
_embedded_scope_identity = False
@@ -238,7 +243,8 @@ class MSDialect_pyodbc(PyODBCConnector, MSDialect):
colspecs = util.update_copy(
MSDialect.colspecs,
{
- sqltypes.Numeric: _MSNumeric_pyodbc
+ sqltypes.Numeric: _MSNumeric_pyodbc,
+ sqltypes.Float: _MSFloat_pyodbc
}
)
diff --git a/lib/sqlalchemy/dialects/mssql/zxjdbc.py b/lib/sqlalchemy/dialects/mssql/zxjdbc.py
index 9bf31e20c..706eef3a4 100644
--- a/lib/sqlalchemy/dialects/mssql/zxjdbc.py
+++ b/lib/sqlalchemy/dialects/mssql/zxjdbc.py
@@ -1,5 +1,5 @@
# mssql/zxjdbc.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/mysql/__init__.py b/lib/sqlalchemy/dialects/mysql/__init__.py
index 2bb636ff3..4eb8cc6d2 100644
--- a/lib/sqlalchemy/dialects/mysql/__init__.py
+++ b/lib/sqlalchemy/dialects/mysql/__init__.py
@@ -1,5 +1,5 @@
# mysql/__init__.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py
index d5e33c802..e45f6ecd8 100644
--- a/lib/sqlalchemy/dialects/mysql/base.py
+++ b/lib/sqlalchemy/dialects/mysql/base.py
@@ -1,5 +1,5 @@
# mysql/base.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -20,6 +20,8 @@ example, they won't work in SQLAlchemy either.
See the official MySQL documentation for detailed information about features
supported in any given server release.
+.. _mysql_connection_timeouts:
+
Connection Timeouts
-------------------
@@ -263,6 +265,41 @@ http://dev.mysql.com/doc/refman/5.0/en/create-index.html
http://dev.mysql.com/doc/refman/5.0/en/create-table.html
+.. _mysql_foreign_keys:
+
+MySQL Foreign Key Options
+-------------------------
+
+MySQL does not support the foreign key arguments "DEFERRABLE", "INITIALLY",
+or "MATCH". Using the ``deferrable`` or ``initially`` keyword argument with
+:class:`.ForeignKeyConstraint` or :class:`.ForeignKey` will have the effect of these keywords being
+rendered in a DDL expression, which will then raise an error on MySQL.
+In order to use these keywords on a foreign key while having them ignored
+on a MySQL backend, use a custom compile rule::
+
+ from sqlalchemy.ext.compiler import compiles
+ from sqlalchemy.schema import ForeignKeyConstraint
+
+ @compiles(ForeignKeyConstraint, "mysql")
+ def process(element, compiler, **kw):
+ element.deferrable = element.initially = None
+ return compiler.visit_foreign_key_constraint(element, **kw)
+
+.. versionchanged:: 0.9.0 - the MySQL backend no longer silently ignores
+ the ``deferrable`` or ``initially`` keyword arguments of :class:`.ForeignKeyConstraint`
+ and :class:`.ForeignKey`.
+
+The "MATCH" keyword is in fact more insidious, and is explicitly disallowed
+by SQLAlchemy in conjunction with the MySQL backend. This argument is silently
+ignored by MySQL, but in addition has the effect of ON UPDATE and ON DELETE options
+also being ignored by the backend. Therefore MATCH should never be used with the
+MySQL backend; as is the case with DEFERRABLE and INITIALLY, custom compilation
+rules can be used to correct a MySQL ForeignKeyConstraint at DDL definition time.
+
+.. versionadded:: 0.9.0 - the MySQL backend will raise a :class:`.CompileError`
+ when the ``match`` keyword is used with :class:`.ForeignKeyConstraint`
+ or :class:`.ForeignKey`.
+
"""
import datetime
@@ -318,10 +355,20 @@ RESERVED_WORDS = set(
'union', 'unique', 'unlock', 'unsigned', 'update', 'usage', 'use',
'using', 'utc_date', 'utc_time', 'utc_timestamp', 'values', 'varbinary',
'varchar', 'varcharacter', 'varying', 'when', 'where', 'while', 'with',
+
'write', 'x509', 'xor', 'year_month', 'zerofill', # 5.0
+
'columns', 'fields', 'privileges', 'soname', 'tables', # 4.1
+
'accessible', 'linear', 'master_ssl_verify_server_cert', 'range',
'read_only', 'read_write', # 5.1
+
+ 'general', 'ignore_server_ids', 'master_heartbeat_period', 'maxvalue',
+ 'resignal', 'signal', 'slow', # 5.5
+
+ 'get', 'io_after_gtids', 'io_before_gtids', 'master_bind', 'one_shot',
+ 'partition', 'sql_after_gtids', 'sql_before_gtids', # 5.6
+
])
AUTOCOMMIT_RE = re.compile(
@@ -333,13 +380,21 @@ SET_RE = re.compile(
class _NumericType(object):
- """Base for MySQL numeric types."""
+ """Base for MySQL numeric types.
+
+ This is the base both for NUMERIC as well as INTEGER, hence
+ it's a mixin.
+
+ """
def __init__(self, unsigned=False, zerofill=False, **kw):
self.unsigned = unsigned
self.zerofill = zerofill
super(_NumericType, self).__init__(**kw)
+ def __repr__(self):
+ return util.generic_repr(self,
+ to_inspect=[_NumericType, sqltypes.Numeric])
class _FloatType(_NumericType, sqltypes.Float):
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
@@ -351,22 +406,27 @@ class _FloatType(_NumericType, sqltypes.Float):
raise exc.ArgumentError(
"You must specify both precision and scale or omit "
"both altogether.")
-
super(_FloatType, self).__init__(precision=precision, asdecimal=asdecimal, **kw)
self.scale = scale
+ def __repr__(self):
+ return util.generic_repr(self,
+ to_inspect=[_FloatType, _NumericType, sqltypes.Float])
class _IntegerType(_NumericType, sqltypes.Integer):
def __init__(self, display_width=None, **kw):
self.display_width = display_width
super(_IntegerType, self).__init__(**kw)
+ def __repr__(self):
+ return util.generic_repr(self,
+ to_inspect=[_IntegerType, _NumericType, sqltypes.Integer])
class _StringType(sqltypes.String):
"""Base for MySQL string types."""
def __init__(self, charset=None, collation=None,
- ascii=False, binary=False,
+ ascii=False, binary=False, unicode=False,
national=False, **kw):
self.charset = charset
@@ -374,16 +434,14 @@ class _StringType(sqltypes.String):
kw.setdefault('collation', kw.pop('collate', collation))
self.ascii = ascii
- # We have to munge the 'unicode' param strictly as a dict
- # otherwise 2to3 will turn it into str.
- self.__dict__['unicode'] = kw.get('unicode', False)
- # sqltypes.String does not accept the 'unicode' arg at all.
- if 'unicode' in kw:
- del kw['unicode']
+ self.unicode = unicode
self.binary = binary
self.national = national
super(_StringType, self).__init__(**kw)
+ def __repr__(self):
+ return util.generic_repr(self,
+ to_inspect=[_StringType, sqltypes.String])
class NUMERIC(_NumericType, sqltypes.NUMERIC):
"""MySQL NUMERIC type."""
@@ -443,6 +501,14 @@ class DOUBLE(_FloatType):
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
"""Construct a DOUBLE.
+ .. note::
+
+ The :class:`.DOUBLE` type by default converts from float
+ to Decimal, using a truncation that defaults to 10 digits. Specify
+ either ``scale=n`` or ``decimal_return_scale=n`` in order to change
+ this scale, or ``asdecimal=False`` to return values directly as
+ Python floating points.
+
:param precision: Total digits in this number. If scale and precision
are both None, values are stored to limits allowed by the server.
@@ -468,6 +534,14 @@ class REAL(_FloatType, sqltypes.REAL):
def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
"""Construct a REAL.
+ .. note::
+
+ The :class:`.REAL` type by default converts from float
+ to Decimal, using a truncation that defaults to 10 digits. Specify
+ either ``scale=n`` or ``decimal_return_scale=n`` in order to change
+ this scale, or ``asdecimal=False`` to return values directly as
+ Python floating points.
+
:param precision: Total digits in this number. If scale and precision
are both None, values are stored to limits allowed by the server.
@@ -902,6 +976,25 @@ class CHAR(_StringType, sqltypes.CHAR):
"""
super(CHAR, self).__init__(length=length, **kwargs)
+ @classmethod
+ def _adapt_string_for_cast(self, type_):
+ # copy the given string type into a CHAR
+ # for the purposes of rendering a CAST expression
+ type_ = sqltypes.to_instance(type_)
+ if isinstance(type_, sqltypes.CHAR):
+ return type_
+ elif isinstance(type_, _StringType):
+ return CHAR(
+ length=type_.length,
+ charset=type_.charset,
+ collation=type_.collation,
+ ascii=type_.ascii,
+ binary=type_.binary,
+ unicode=type_.unicode,
+ national=False # not supported in CAST
+ )
+ else:
+ return CHAR(length=type_.length)
class NVARCHAR(_StringType, sqltypes.NVARCHAR):
"""MySQL NVARCHAR type.
@@ -972,8 +1065,49 @@ class LONGBLOB(sqltypes._Binary):
__visit_name__ = 'LONGBLOB'
+class _EnumeratedValues(_StringType):
+ def _init_values(self, values, kw):
+ self.quoting = kw.pop('quoting', 'auto')
-class ENUM(sqltypes.Enum, _StringType):
+ if self.quoting == 'auto' and len(values):
+ # What quoting character are we using?
+ q = None
+ for e in values:
+ if len(e) == 0:
+ self.quoting = 'unquoted'
+ break
+ elif q is None:
+ q = e[0]
+
+ if len(e) == 1 or e[0] != q or e[-1] != q:
+ self.quoting = 'unquoted'
+ break
+ else:
+ self.quoting = 'quoted'
+
+ if self.quoting == 'quoted':
+ util.warn_deprecated(
+ 'Manually quoting %s value literals is deprecated. Supply '
+ 'unquoted values and use the quoting= option in cases of '
+ 'ambiguity.' % self.__class__.__name__)
+
+ values = self._strip_values(values)
+
+ self._enumerated_values = values
+ length = max([len(v) for v in values] + [0])
+ return values, length
+
+ @classmethod
+ def _strip_values(cls, values):
+ strip_values = []
+ for a in values:
+ if a[0:1] == '"' or a[0:1] == "'":
+ # strip enclosing quotes and unquote interior
+ a = a[1:-1].replace(a[0] * 2, a[0])
+ strip_values.append(a)
+ return strip_values
+
+class ENUM(sqltypes.Enum, _EnumeratedValues):
"""MySQL ENUM type."""
__visit_name__ = 'ENUM'
@@ -981,9 +1115,9 @@ class ENUM(sqltypes.Enum, _StringType):
def __init__(self, *enums, **kw):
"""Construct an ENUM.
- Example:
+ E.g.::
- Column('myenum', MSEnum("foo", "bar", "baz"))
+ Column('myenum', ENUM("foo", "bar", "baz"))
:param enums: The range of valid values for this ENUM. Values will be
quoted when generating the schema according to the quoting flag (see
@@ -1027,33 +1161,8 @@ class ENUM(sqltypes.Enum, _StringType):
literals for you. This is a transitional option.
"""
- self.quoting = kw.pop('quoting', 'auto')
-
- if self.quoting == 'auto' and len(enums):
- # What quoting character are we using?
- q = None
- for e in enums:
- if len(e) == 0:
- self.quoting = 'unquoted'
- break
- elif q is None:
- q = e[0]
-
- if e[0] != q or e[-1] != q:
- self.quoting = 'unquoted'
- break
- else:
- self.quoting = 'quoted'
-
- if self.quoting == 'quoted':
- util.warn_deprecated(
- 'Manually quoting ENUM value literals is deprecated. Supply '
- 'unquoted values and use the quoting= option in cases of '
- 'ambiguity.')
- enums = self._strip_enums(enums)
-
+ values, length = self._init_values(enums, kw)
self.strict = kw.pop('strict', False)
- length = max([len(v) for v in enums] + [0])
kw.pop('metadata', None)
kw.pop('schema', None)
kw.pop('name', None)
@@ -1061,17 +1170,11 @@ class ENUM(sqltypes.Enum, _StringType):
kw.pop('native_enum', None)
kw.pop('inherit_schema', None)
_StringType.__init__(self, length=length, **kw)
- sqltypes.Enum.__init__(self, *enums)
+ sqltypes.Enum.__init__(self, *values)
- @classmethod
- def _strip_enums(cls, enums):
- strip_enums = []
- for a in enums:
- if a[0:1] == '"' or a[0:1] == "'":
- # strip enclosing quotes and unquote interior
- a = a[1:-1].replace(a[0] * 2, a[0])
- strip_enums.append(a)
- return strip_enums
+ def __repr__(self):
+ return util.generic_repr(self,
+ to_inspect=[ENUM, _StringType, sqltypes.Enum])
def bind_processor(self, dialect):
super_convert = super(ENUM, self).bind_processor(dialect)
@@ -1091,7 +1194,7 @@ class ENUM(sqltypes.Enum, _StringType):
return sqltypes.Enum.adapt(self, impltype, **kw)
-class SET(_StringType):
+class SET(_EnumeratedValues):
"""MySQL SET type."""
__visit_name__ = 'SET'
@@ -1099,15 +1202,16 @@ class SET(_StringType):
def __init__(self, *values, **kw):
"""Construct a SET.
- Example::
+ E.g.::
- Column('myset', MSSet("'foo'", "'bar'", "'baz'"))
+ Column('myset', SET("foo", "bar", "baz"))
:param values: The range of valid values for this SET. Values will be
- used exactly as they appear when generating schemas. Strings must
- be quoted, as in the example above. Single-quotes are suggested for
- ANSI compatibility and are required for portability to servers with
- ANSI_QUOTES enabled.
+ quoted when generating the schema according to the quoting flag (see
+ below).
+
+ .. versionchanged:: 0.9.0 quoting is applied automatically to
+ :class:`.mysql.SET` in the same way as for :class:`.mysql.ENUM`.
:param charset: Optional, a column-level character set for this string
value. Takes precedence to 'ascii' or 'unicode' short-hand.
@@ -1126,18 +1230,27 @@ class SET(_StringType):
BINARY in schema. This does not affect the type of data stored,
only the collation of character data.
- """
- self._ddl_values = values
+ :param quoting: Defaults to 'auto': automatically determine enum value
+ quoting. If all enum values are surrounded by the same quoting
+ character, then use 'quoted' mode. Otherwise, use 'unquoted' mode.
- strip_values = []
- for a in values:
- if a[0:1] == '"' or a[0:1] == "'":
- # strip enclosing quotes and unquote interior
- a = a[1:-1].replace(a[0] * 2, a[0])
- strip_values.append(a)
+ 'quoted': values in enums are already quoted, they will be used
+ directly when generating the schema - this usage is deprecated.
+
+ 'unquoted': values in enums are not quoted, they will be escaped and
+ surrounded by single quotes when generating the schema.
+
+ Previous versions of this type always required manually quoted
+ values to be supplied; future versions will always quote the string
+ literals for you. This is a transitional option.
+
+ .. versionadded:: 0.9.0
+
+ """
+ values, length = self._init_values(values, kw)
+ self.values = tuple(values)
- self.values = strip_values
- kw.setdefault('length', max([len(v) for v in strip_values] + [0]))
+ kw.setdefault('length', length)
super(SET, self).__init__(**kw)
def result_processor(self, dialect, coltype):
@@ -1209,6 +1322,9 @@ MSFloat = FLOAT
MSInteger = INTEGER
colspecs = {
+ _IntegerType: _IntegerType,
+ _NumericType: _NumericType,
+ _FloatType: _FloatType,
sqltypes.Numeric: NUMERIC,
sqltypes.Float: FLOAT,
sqltypes.Time: TIME,
@@ -1300,14 +1416,9 @@ class MySQLCompiler(compiler.SQLCompiler):
elif isinstance(type_, (sqltypes.DECIMAL, sqltypes.DateTime,
sqltypes.Date, sqltypes.Time)):
return self.dialect.type_compiler.process(type_)
- elif isinstance(type_, sqltypes.Text):
- return 'CHAR'
- elif (isinstance(type_, sqltypes.String) and not
- isinstance(type_, (ENUM, SET))):
- if getattr(type_, 'length'):
- return 'CHAR(%s)' % type_.length
- else:
- return 'CHAR'
+ elif isinstance(type_, sqltypes.String) and not isinstance(type_, (ENUM, SET)):
+ adapted = CHAR._adapt_string_for_cast(type_)
+ return self.dialect.type_compiler.process(adapted)
elif isinstance(type_, sqltypes._Binary):
return 'BINARY'
elif isinstance(type_, sqltypes.NUMERIC):
@@ -1359,10 +1470,10 @@ class MySQLCompiler(compiler.SQLCompiler):
self.process(join.onclause, **kwargs)))
def for_update_clause(self, select):
- if select.for_update == 'read':
- return ' LOCK IN SHARE MODE'
+ if select._for_update_arg.read:
+ return " LOCK IN SHARE MODE"
else:
- return super(MySQLCompiler, self).for_update_clause(select)
+ return " FOR UPDATE"
def limit_clause(self, select):
# MySQL supports:
@@ -1426,9 +1537,9 @@ class MySQLDDLCompiler(compiler.DDLCompiler):
constraint_string = super(
MySQLDDLCompiler, self).create_table_constraints(table)
- engine_key = '%s_engine' % self.dialect.name
- is_innodb = engine_key in table.kwargs and \
- table.kwargs[engine_key].lower() == 'innodb'
+ # why self.dialect.name and not 'mysql'? because of drizzle
+ is_innodb = 'engine' in table.dialect_options[self.dialect.name] and \
+ table.dialect_options[self.dialect.name]['engine'].lower() == 'innodb'
auto_inc_column = table._autoincrement_column
@@ -1439,7 +1550,7 @@ class MySQLDDLCompiler(compiler.DDLCompiler):
constraint_string += ", \n\t"
constraint_string += "KEY %s (%s)" % (
self.preparer.quote(
- "idx_autoinc_%s" % auto_inc_column.name, None
+ "idx_autoinc_%s" % auto_inc_column.name
),
self.preparer.format_column(auto_inc_column)
)
@@ -1511,7 +1622,8 @@ class MySQLDDLCompiler(compiler.DDLCompiler):
self._verify_index_table(index)
preparer = self.preparer
table = preparer.format_table(index.table)
- columns = [self.sql_compiler.process(expr, include_table=False)
+ columns = [self.sql_compiler.process(expr, include_table=False,
+ literal_binds=True)
for expr in index.expressions]
name = self._prepared_index_name(index)
@@ -1521,8 +1633,8 @@ class MySQLDDLCompiler(compiler.DDLCompiler):
text += "UNIQUE "
text += "INDEX %s ON %s " % (name, table)
- if 'mysql_length' in index.kwargs:
- length = index.kwargs['mysql_length']
+ length = index.dialect_options['mysql']['length']
+ if length is not None:
if isinstance(length, dict):
# length value can be a (column_name --> integer value) mapping
@@ -1543,19 +1655,18 @@ class MySQLDDLCompiler(compiler.DDLCompiler):
columns = ', '.join(columns)
text += '(%s)' % columns
- if 'mysql_using' in index.kwargs:
- using = index.kwargs['mysql_using']
- text += " USING %s" % (preparer.quote(using, index.quote))
+ using = index.dialect_options['mysql']['using']
+ if using is not None:
+ text += " USING %s" % (preparer.quote(using))
return text
def visit_primary_key_constraint(self, constraint):
text = super(MySQLDDLCompiler, self).\
visit_primary_key_constraint(constraint)
- if "mysql_using" in constraint.kwargs:
- using = constraint.kwargs['mysql_using']
- text += " USING %s" % (
- self.preparer.quote(using, constraint.quote))
+ using = constraint.dialect_options['mysql']['using']
+ if using:
+ text += " USING %s" % (self.preparer.quote(using))
return text
def visit_drop_index(self, drop):
@@ -1584,7 +1695,11 @@ class MySQLDDLCompiler(compiler.DDLCompiler):
(self.preparer.format_table(constraint.table),
qual, const)
- def define_constraint_deferrability(self, constraint):
+ def define_constraint_match(self, constraint):
+ if constraint.match is not None:
+ raise exc.CompileError(
+ "MySQL ignores the 'MATCH' keyword while at the same time "
+ "causes ON UPDATE/ON DELETE clauses to be ignored.")
return ""
class MySQLTypeCompiler(compiler.GenericTypeCompiler):
@@ -1818,7 +1933,7 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler):
if not type_.native_enum:
return super(MySQLTypeCompiler, self).visit_enum(type_)
else:
- return self.visit_ENUM(type_)
+ return self._visit_enumerated_values("ENUM", type_, type_.enums)
def visit_BLOB(self, type_):
if type_.length:
@@ -1835,16 +1950,21 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler):
def visit_LONGBLOB(self, type_):
return "LONGBLOB"
- def visit_ENUM(self, type_):
+ def _visit_enumerated_values(self, name, type_, enumerated_values):
quoted_enums = []
- for e in type_.enums:
+ for e in enumerated_values:
quoted_enums.append("'%s'" % e.replace("'", "''"))
- return self._extend_string(type_, {}, "ENUM(%s)" %
- ",".join(quoted_enums))
+ return self._extend_string(type_, {}, "%s(%s)" % (
+ name, ",".join(quoted_enums))
+ )
+
+ def visit_ENUM(self, type_):
+ return self._visit_enumerated_values("ENUM", type_,
+ type_._enumerated_values)
def visit_SET(self, type_):
- return self._extend_string(type_, {}, "SET(%s)" %
- ",".join(type_._ddl_values))
+ return self._visit_enumerated_values("SET", type_,
+ type_._enumerated_values)
def visit_BOOLEAN(self, type):
return "BOOL"
@@ -1871,6 +1991,7 @@ class MySQLIdentifierPreparer(compiler.IdentifierPreparer):
return tuple([self.quote_identifier(i) for i in ids if i is not None])
+@log.class_logger
class MySQLDialect(default.DefaultDialect):
"""Details of the MySQL dialect. Not used directly in application code."""
@@ -1902,6 +2023,22 @@ class MySQLDialect(default.DefaultDialect):
_backslash_escapes = True
_server_ansiquotes = False
+ construct_arguments = [
+ (sa_schema.Table, {
+ "*": None
+ }),
+ (sql.Update, {
+ "limit": None
+ }),
+ (sa_schema.PrimaryKeyConstraint, {
+ "using": None
+ }),
+ (sa_schema.Index, {
+ "using": None,
+ "length": None,
+ })
+ ]
+
def __init__(self, isolation_level=None, **kwargs):
kwargs.pop('use_ansiquotes', None) # legacy
default.DefaultDialect.__init__(self, **kwargs)
@@ -2058,7 +2195,6 @@ class MySQLDialect(default.DefaultDialect):
rs.close()
def initialize(self, connection):
- default.DefaultDialect.initialize(self, connection)
self._connection_charset = self._detect_charset(connection)
self._detect_ansiquotes(connection)
if self._server_ansiquotes:
@@ -2067,6 +2203,8 @@ class MySQLDialect(default.DefaultDialect):
self.identifier_preparer = self.preparer(self,
server_ansiquotes=self._server_ansiquotes)
+ default.DefaultDialect.initialize(self, connection)
+
@property
def _supports_cast(self):
return self.server_version_info is None or \
@@ -2163,7 +2301,7 @@ class MySQLDialect(default.DefaultDialect):
ref_names = spec['foreign']
con_kw = {}
- for opt in ('name', 'onupdate', 'ondelete'):
+ for opt in ('onupdate', 'ondelete'):
if spec.get(opt, False):
con_kw[opt] = spec[opt]
@@ -2336,6 +2474,7 @@ class MySQLDialect(default.DefaultDialect):
# as of MySQL 5.0.1
self._backslash_escapes = 'NO_BACKSLASH_ESCAPES' not in mode
+
def _show_create_table(self, connection, table, charset=None,
full_name=None):
"""Run SHOW CREATE TABLE for a ``Table``."""
@@ -2394,6 +2533,7 @@ class ReflectedState(object):
self.constraints = []
+@log.class_logger
class MySQLTableDefinitionParser(object):
"""Parses the results of a SHOW CREATE TABLE statement."""
@@ -2558,8 +2698,8 @@ class MySQLTableDefinitionParser(object):
if spec.get(kw, False):
type_kw[kw] = spec[kw]
- if type_ == 'enum':
- type_args = ENUM._strip_enums(type_args)
+ if issubclass(col_type, _EnumeratedValues):
+ type_args = _EnumeratedValues._strip_values(type_args)
type_instance = col_type(*type_args, **type_kw)
@@ -2733,7 +2873,7 @@ class MySQLTableDefinitionParser(object):
#
# unique constraints come back as KEYs
kw = quotes.copy()
- kw['on'] = 'RESTRICT|CASCASDE|SET NULL|NOACTION'
+ kw['on'] = 'RESTRICT|CASCADE|SET NULL|NOACTION'
self._re_constraint = _re_compile(
r' '
r'CONSTRAINT +'
@@ -2796,8 +2936,6 @@ class MySQLTableDefinitionParser(object):
_options_of_type_string = ('COMMENT', 'DATA DIRECTORY', 'INDEX DIRECTORY',
'PASSWORD', 'CONNECTION')
-log.class_logger(MySQLTableDefinitionParser)
-log.class_logger(MySQLDialect)
class _DecodingRowProxy(object):
diff --git a/lib/sqlalchemy/dialects/mysql/cymysql.py b/lib/sqlalchemy/dialects/mysql/cymysql.py
index deb2de449..e81a79b85 100644
--- a/lib/sqlalchemy/dialects/mysql/cymysql.py
+++ b/lib/sqlalchemy/dialects/mysql/cymysql.py
@@ -1,5 +1,5 @@
# mysql/cymysql.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -37,7 +37,9 @@ class MySQLDialect_cymysql(MySQLDialect_mysqldb):
driver = 'cymysql'
description_encoding = None
- supports_sane_rowcount = False
+ supports_sane_rowcount = True
+ supports_sane_multi_rowcount = False
+ supports_unicode_statements = True
colspecs = util.update_copy(
MySQLDialect.colspecs,
diff --git a/lib/sqlalchemy/dialects/mysql/gaerdbms.py b/lib/sqlalchemy/dialects/mysql/gaerdbms.py
index c479e25e0..13203fce3 100644
--- a/lib/sqlalchemy/dialects/mysql/gaerdbms.py
+++ b/lib/sqlalchemy/dialects/mysql/gaerdbms.py
@@ -1,5 +1,5 @@
# mysql/gaerdbms.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/mysql/mysqlconnector.py b/lib/sqlalchemy/dialects/mysql/mysqlconnector.py
index b1906d3b9..b6e7c75fb 100644
--- a/lib/sqlalchemy/dialects/mysql/mysqlconnector.py
+++ b/lib/sqlalchemy/dialects/mysql/mysqlconnector.py
@@ -1,5 +1,5 @@
# mysql/mysqlconnector.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -79,12 +79,13 @@ class MySQLDialect_mysqlconnector(MySQLDialect):
def create_connect_args(self, url):
opts = url.translate_connect_args(username='user')
+
opts.update(url.query)
util.coerce_kw_type(opts, 'buffered', bool)
util.coerce_kw_type(opts, 'raise_on_warnings', bool)
- opts['buffered'] = True
- opts['raise_on_warnings'] = True
+ opts.setdefault('buffered', True)
+ opts.setdefault('raise_on_warnings', True)
# FOUND_ROWS must be set in ClientFlag to enable
# supports_sane_rowcount.
diff --git a/lib/sqlalchemy/dialects/mysql/mysqldb.py b/lib/sqlalchemy/dialects/mysql/mysqldb.py
index 60e68e5ee..84e8299d5 100644
--- a/lib/sqlalchemy/dialects/mysql/mysqldb.py
+++ b/lib/sqlalchemy/dialects/mysql/mysqldb.py
@@ -1,5 +1,5 @@
# mysql/mysqldb.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -56,7 +56,8 @@ from ...connectors.mysqldb import (
MySQLDBIdentifierPreparer,
MySQLDBConnector
)
-
+from .base import TEXT
+from ... import sql
class MySQLExecutionContext_mysqldb(MySQLDBExecutionContext, MySQLExecutionContext):
pass
@@ -75,4 +76,27 @@ class MySQLDialect_mysqldb(MySQLDBConnector, MySQLDialect):
statement_compiler = MySQLCompiler_mysqldb
preparer = MySQLIdentifierPreparer_mysqldb
+ def _check_unicode_returns(self, connection):
+ # work around issue fixed in
+ # https://github.com/farcepest/MySQLdb1/commit/cd44524fef63bd3fcb71947392326e9742d520e8
+ # specific issue w/ the utf8_bin collation and unicode returns
+
+ has_utf8_bin = connection.scalar(
+ "show collation where %s = 'utf8' and %s = 'utf8_bin'"
+ % (
+ self.identifier_preparer.quote("Charset"),
+ self.identifier_preparer.quote("Collation")
+ ))
+ if has_utf8_bin:
+ additional_tests = [
+ sql.collate(sql.cast(
+ sql.literal_column(
+ "'test collated returns'"),
+ TEXT(charset='utf8')), "utf8_bin")
+ ]
+ else:
+ additional_tests = []
+ return super(MySQLDBConnector, self)._check_unicode_returns(
+ connection, additional_tests)
+
dialect = MySQLDialect_mysqldb
diff --git a/lib/sqlalchemy/dialects/mysql/oursql.py b/lib/sqlalchemy/dialects/mysql/oursql.py
index 77370f91d..e6b50f335 100644
--- a/lib/sqlalchemy/dialects/mysql/oursql.py
+++ b/lib/sqlalchemy/dialects/mysql/oursql.py
@@ -1,5 +1,5 @@
# mysql/oursql.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/mysql/pymysql.py b/lib/sqlalchemy/dialects/mysql/pymysql.py
index ba48017ac..74de09c4d 100644
--- a/lib/sqlalchemy/dialects/mysql/pymysql.py
+++ b/lib/sqlalchemy/dialects/mysql/pymysql.py
@@ -1,5 +1,5 @@
# mysql/pymysql.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/mysql/pyodbc.py b/lib/sqlalchemy/dialects/mysql/pyodbc.py
index af3579665..e60e39cea 100644
--- a/lib/sqlalchemy/dialects/mysql/pyodbc.py
+++ b/lib/sqlalchemy/dialects/mysql/pyodbc.py
@@ -1,5 +1,5 @@
# mysql/pyodbc.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/mysql/zxjdbc.py b/lib/sqlalchemy/dialects/mysql/zxjdbc.py
index 20f2e7359..b5fcfbdaf 100644
--- a/lib/sqlalchemy/dialects/mysql/zxjdbc.py
+++ b/lib/sqlalchemy/dialects/mysql/zxjdbc.py
@@ -1,5 +1,5 @@
# mysql/zxjdbc.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/oracle/__init__.py b/lib/sqlalchemy/dialects/oracle/__init__.py
index 576790707..070e387d0 100644
--- a/lib/sqlalchemy/dialects/oracle/__init__.py
+++ b/lib/sqlalchemy/dialects/oracle/__init__.py
@@ -1,5 +1,5 @@
# oracle/__init__.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/oracle/base.py b/lib/sqlalchemy/dialects/oracle/base.py
index 272bd1740..218a7ccfc 100644
--- a/lib/sqlalchemy/dialects/oracle/base.py
+++ b/lib/sqlalchemy/dialects/oracle/base.py
@@ -1,5 +1,5 @@
# oracle/base.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -16,12 +16,12 @@ Connect Arguments
The dialect supports several :func:`~sqlalchemy.create_engine()` arguments which
affect the behavior of the dialect regardless of driver in use.
-* *use_ansi* - Use ANSI JOIN constructs (see the section on Oracle 8). Defaults
+* ``use_ansi`` - Use ANSI JOIN constructs (see the section on Oracle 8). Defaults
to ``True``. If ``False``, Oracle-8 compatible constructs are used for joins.
-* *optimize_limits* - defaults to ``False``. see the section on LIMIT/OFFSET.
+* ``optimize_limits`` - defaults to ``False``. see the section on LIMIT/OFFSET.
-* *use_binds_for_limits* - defaults to ``True``. see the section on LIMIT/OFFSET.
+* ``use_binds_for_limits`` - defaults to ``True``. see the section on LIMIT/OFFSET.
Auto Increment Behavior
-----------------------
@@ -99,6 +99,41 @@ http://www.sqlalchemy.org/trac/wiki/UsageRecipes/WindowFunctionsByDefault
which installs a select compiler that overrides the generation of limit/offset with
a window function.
+RETURNING Support
+-----------------
+
+The Oracle database supports a limited form of RETURNING, in order to retrieve result
+sets of matched rows from INSERT, UPDATE and DELETE statements. Oracle's
+RETURNING..INTO syntax only supports one row being returned, as it relies upon
+OUT parameters in order to function. In addition, supported DBAPIs have further
+limitations (see :ref:`cx_oracle_returning`).
+
+SQLAlchemy's "implicit returning" feature, which employs RETURNING within an INSERT
+and sometimes an UPDATE statement in order to fetch newly generated primary key values
+and other SQL defaults and expressions, is normally enabled on the Oracle
+backend. By default, "implicit returning" typically only fetches the value of a
+single ``nextval(some_seq)`` expression embedded into an INSERT in order to increment
+a sequence within an INSERT statement and get the value back at the same time.
+To disable this feature across the board, specify ``implicit_returning=False`` to
+:func:`.create_engine`::
+
+ engine = create_engine("oracle://scott:tiger@dsn", implicit_returning=False)
+
+Implicit returning can also be disabled on a table-by-table basis as a table option::
+
+ # Core Table
+ my_table = Table("my_table", metadata, ..., implicit_returning=False)
+
+
+ # declarative
+ class MyClass(Base):
+ __tablename__ = 'my_table'
+ __table_args__ = {"implicit_returning": False}
+
+.. seealso::
+
+ :ref:`cx_oracle_returning` - additional cx_oracle-specific restrictions on implicit returning.
+
ON UPDATE CASCADE
-----------------
@@ -133,9 +168,10 @@ Synonym/DBLINK Reflection
-------------------------
When using reflection with Table objects, the dialect can optionally search for tables
-indicated by synonyms that reference DBLINK-ed tables by passing the flag
-oracle_resolve_synonyms=True as a keyword argument to the Table construct. If DBLINK
-is not in use this flag should be left off.
+indicated by synonyms, either in local or remote schemas or accessed over DBLINK,
+by passing the flag oracle_resolve_synonyms=True as a
+keyword argument to the Table construct. If synonyms are not in use
+this flag should be left off.
"""
@@ -145,7 +181,7 @@ from sqlalchemy import util, sql
from sqlalchemy.engine import default, base, reflection
from sqlalchemy.sql import compiler, visitors, expression
from sqlalchemy.sql import operators as sql_operators, functions as sql_functions
-from sqlalchemy import types as sqltypes
+from sqlalchemy import types as sqltypes, schema as sa_schema
from sqlalchemy.types import VARCHAR, NVARCHAR, CHAR, DATE, DATETIME, \
BLOB, CLOB, TIMESTAMP, FLOAT
@@ -362,7 +398,9 @@ class OracleTypeCompiler(compiler.GenericTypeCompiler):
return self._visit_varchar(type_, '', '')
def _visit_varchar(self, type_, n, num):
- if not n and self.dialect._supports_char_length:
+ if not type_.length:
+ return "%(n)sVARCHAR%(two)s" % {'two': num, 'n': n}
+ elif not n and self.dialect._supports_char_length:
varchar = "VARCHAR%(two)s(%(length)s CHAR)"
return varchar % {'length': type_.length, 'two': num}
else:
@@ -521,7 +559,6 @@ class OracleCompiler(compiler.SQLCompiler):
return self.process(alias.original, **kwargs)
def returning_clause(self, stmt, returning_cols):
-
columns = []
binds = []
for i, column in enumerate(expression._select_iterables(returning_cols)):
@@ -595,7 +632,7 @@ class OracleCompiler(compiler.SQLCompiler):
# If needed, add the ora_rn, and wrap again with offset.
if select._offset is None:
- limitselect.for_update = select.for_update
+ limitselect._for_update_arg = select._for_update_arg
select = limitselect
else:
limitselect = limitselect.column(
@@ -614,7 +651,7 @@ class OracleCompiler(compiler.SQLCompiler):
offsetselect.append_whereclause(
sql.literal_column("ora_rn") > offset_value)
- offsetselect.for_update = select.for_update
+ offsetselect._for_update_arg = select._for_update_arg
select = offsetselect
kwargs['iswrapper'] = getattr(select, '_is_wrapper', False)
@@ -626,10 +663,19 @@ class OracleCompiler(compiler.SQLCompiler):
def for_update_clause(self, select):
if self.is_subquery():
return ""
- elif select.for_update == "nowait":
- return " FOR UPDATE NOWAIT"
- else:
- return super(OracleCompiler, self).for_update_clause(select)
+
+ tmp = ' FOR UPDATE'
+
+ if select._for_update_arg.of:
+ tmp += ' OF ' + ', '.join(
+ self.process(elem) for elem in
+ select._for_update_arg.of
+ )
+
+ if select._for_update_arg.nowait:
+ tmp += " NOWAIT"
+
+ return tmp
class OracleDDLCompiler(compiler.DDLCompiler):
@@ -708,6 +754,10 @@ class OracleDialect(default.DefaultDialect):
reflection_options = ('oracle_resolve_synonyms', )
+ construct_arguments = [
+ (sa_schema.Table, {"resolve_synonyms": False})
+ ]
+
def __init__(self,
use_ansi=True,
optimize_limits=False,
@@ -800,14 +850,15 @@ class OracleDialect(default.DefaultDialect):
returns the actual name, owner, dblink name, and synonym name if found.
"""
- q = "SELECT owner, table_owner, table_name, db_link, synonym_name FROM all_synonyms WHERE "
+ q = "SELECT owner, table_owner, table_name, db_link, "\
+ "synonym_name FROM all_synonyms WHERE "
clauses = []
params = {}
if desired_synonym:
clauses.append("synonym_name = :synonym_name")
params['synonym_name'] = desired_synonym
if desired_owner:
- clauses.append("table_owner = :desired_owner")
+ clauses.append("owner = :desired_owner")
params['desired_owner'] = desired_owner
if desired_table:
clauses.append("table_name = :tname")
diff --git a/lib/sqlalchemy/dialects/oracle/cx_oracle.py b/lib/sqlalchemy/dialects/oracle/cx_oracle.py
index e013799db..599eb21a3 100644
--- a/lib/sqlalchemy/dialects/oracle/cx_oracle.py
+++ b/lib/sqlalchemy/dialects/oracle/cx_oracle.py
@@ -1,5 +1,5 @@
# oracle/cx_oracle.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -17,11 +17,11 @@ Additional Connect Arguments
When connecting with ``dbname`` present, the host, port, and dbname tokens are
converted to a TNS name using
-the cx_oracle :func:`makedsn()` function. Otherwise, the host token is taken
+the cx_oracle ``makedsn()`` function. Otherwise, the host token is taken
directly as a TNS name.
Additional arguments which may be specified either as query string arguments
-on the URL, or as keyword arguments to :func:`~sqlalchemy.create_engine()` are:
+on the URL, or as keyword arguments to :func:`.create_engine()` are:
* allow_twophase - enable two-phase transactions. Defaults to ``True``.
@@ -65,6 +65,27 @@ of the encoding to be used.
Note that this behavior is disabled when Oracle 8 is detected, as it has been
observed that issues remain when passing Python unicodes to cx_oracle with Oracle 8.
+.. _cx_oracle_returning:
+
+RETURNING Support
+-----------------
+
+cx_oracle supports a limited subset of Oracle's already limited RETURNING support.
+Typically, results can only be guaranteed for at most one column being returned;
+this is the typical case when SQLAlchemy uses RETURNING to get just the value of a
+primary-key-associated sequence value. Additional column expressions will
+cause problems in a non-determinative way, due to cx_oracle's lack of support for
+the OCI_DATA_AT_EXEC API which is required for more complex RETURNING scenarios.
+
+.. seealso::
+
+ http://docs.oracle.com/cd/B10501_01/appdev.920/a96584/oci05bnd.htm#420693 - OCI documentation for RETURNING
+
+ http://sourceforge.net/mailarchive/message.php?msg_id=31338136 - cx_oracle developer commentary
+
+
+
+
LOB Objects
-----------
@@ -75,7 +96,7 @@ like result.fetchmany() and result.fetchall(). This means that by default, LOB
objects are fully fetched unconditionally by SQLAlchemy, and the linkage to a live
cursor is broken.
-To disable this processing, pass ``auto_convert_lobs=False`` to :func:`create_engine()`.
+To disable this processing, pass ``auto_convert_lobs=False`` to :func:`.create_engine()`.
Two Phase Transaction Support
-----------------------------
@@ -108,7 +129,7 @@ the application can make one of several choices:
* For ad-hoc two-phase operations without disabling pooling, the DBAPI
connection in use can be evicted from the connection pool using the
- :class:`.Connection.detach` method.
+ :meth:`.Connection.detach` method.
.. versionchanged:: 0.8.0b2,0.7.10
Support for cx_oracle prepared transactions has been implemented
@@ -211,10 +232,7 @@ class _OracleNumeric(sqltypes.Numeric):
if dialect.supports_native_decimal:
if self.asdecimal:
- if self.scale is None:
- fstring = "%.10f"
- else:
- fstring = "%%.%df" % self.scale
+ fstring = "%%.%df" % self._effective_decimal_return_scale
def to_decimal(value):
if value is None:
@@ -362,7 +380,8 @@ class _OracleRowid(oracle.ROWID):
class OracleCompiler_cx_oracle(OracleCompiler):
- def bindparam_string(self, name, quote=None, **kw):
+ def bindparam_string(self, name, **kw):
+ quote = getattr(name, 'quote', None)
if quote is True or quote is not False and \
self.preparer._bindparam_requires_quotes(name):
quoted_name = '"%s"' % name
@@ -506,7 +525,6 @@ class ReturningResultProxy(_result.FullyBufferedResultProxy):
def _cursor_description(self):
returning = self.context.compiled.returning
-
return [
("ret_%d" % i, None)
for i, col in enumerate(returning)
@@ -730,9 +748,6 @@ class OracleDialect_cx_oracle(OracleDialect):
255,
outconverter=self._detect_decimal,
arraysize=cursor.arraysize)
- # allow all strings to come back natively as Unicode
- elif defaultType in (cx_Oracle.STRING, cx_Oracle.FIXED_CHAR):
- return cursor.var(util.text_type, size, cursor.arraysize)
def on_connect(conn):
conn.outputtypehandler = output_type_handler
@@ -805,8 +820,9 @@ class OracleDialect_cx_oracle(OracleDialect):
# ORA-03113: end-of-file on communication channel
# ORA-03135: connection lost contact
# ORA-01033: ORACLE initialization or shutdown in progress
+ # ORA-02396: exceeded maximum idle time, please connect again
# TODO: Others ?
- return error.code in (28, 3114, 3113, 3135, 1033)
+ return error.code in (28, 3114, 3113, 3135, 1033, 2396)
else:
return False
diff --git a/lib/sqlalchemy/dialects/oracle/zxjdbc.py b/lib/sqlalchemy/dialects/oracle/zxjdbc.py
index ad53b89a1..710645b23 100644
--- a/lib/sqlalchemy/dialects/oracle/zxjdbc.py
+++ b/lib/sqlalchemy/dialects/oracle/zxjdbc.py
@@ -1,5 +1,5 @@
# oracle/zxjdbc.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/postgres.py b/lib/sqlalchemy/dialects/postgres.py
index 82d1a39c2..6ed7e18bc 100644
--- a/lib/sqlalchemy/dialects/postgres.py
+++ b/lib/sqlalchemy/dialects/postgres.py
@@ -1,5 +1,5 @@
# dialects/postgres.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/postgresql/__init__.py b/lib/sqlalchemy/dialects/postgresql/__init__.py
index 408b67846..180e9fc7e 100644
--- a/lib/sqlalchemy/dialects/postgresql/__init__.py
+++ b/lib/sqlalchemy/dialects/postgresql/__init__.py
@@ -1,5 +1,5 @@
# postgresql/__init__.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -11,9 +11,11 @@ base.dialect = psycopg2.dialect
from .base import \
INTEGER, BIGINT, SMALLINT, VARCHAR, CHAR, TEXT, NUMERIC, FLOAT, REAL, \
INET, CIDR, UUID, BIT, MACADDR, DOUBLE_PRECISION, TIMESTAMP, TIME, \
- DATE, BYTEA, BOOLEAN, INTERVAL, ARRAY, ENUM, dialect, array, Any, All
+ DATE, BYTEA, BOOLEAN, INTERVAL, ARRAY, ENUM, dialect, array, Any, All, \
+ TSVECTOR
from .constraints import ExcludeConstraint
from .hstore import HSTORE, hstore
+from .json import JSON, JSONElement
from .ranges import INT4RANGE, INT8RANGE, NUMRANGE, DATERANGE, TSRANGE, \
TSTZRANGE
@@ -23,5 +25,5 @@ __all__ = (
'DOUBLE_PRECISION', 'TIMESTAMP', 'TIME', 'DATE', 'BYTEA', 'BOOLEAN',
'INTERVAL', 'ARRAY', 'ENUM', 'dialect', 'Any', 'All', 'array', 'HSTORE',
'hstore', 'INT4RANGE', 'INT8RANGE', 'NUMRANGE', 'DATERANGE',
- 'TSRANGE', 'TSTZRANGE'
+ 'TSRANGE', 'TSTZRANGE', 'json', 'JSON', 'JSONElement'
)
diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py
index 6ccf7190e..11bd3830d 100644
--- a/lib/sqlalchemy/dialects/postgresql/base.py
+++ b/lib/sqlalchemy/dialects/postgresql/base.py
@@ -1,5 +1,5 @@
# postgresql/base.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -131,6 +131,44 @@ use the :meth:`._UpdateBase.returning` method on a per-statement basis::
where(table.c.name=='foo')
print result.fetchall()
+.. _postgresql_match:
+
+Full Text Search
+----------------
+
+SQLAlchemy makes available the Postgresql ``@@`` operator via the
+:meth:`.ColumnElement.match` method on any textual column expression.
+On a Postgresql dialect, an expression like the following::
+
+ select([sometable.c.text.match("search string")])
+
+will emit to the database::
+
+ SELECT text @@ to_tsquery('search string') FROM table
+
+The Postgresql text search functions such as ``to_tsquery()``
+and ``to_tsvector()`` are available
+explicitly using the standard :attr:`.func` construct. For example::
+
+ select([
+ func.to_tsvector('fat cats ate rats').match('cat & rat')
+ ])
+
+Emits the equivalent of::
+
+ SELECT to_tsvector('fat cats ate rats') @@ to_tsquery('cat & rat')
+
+The :class:`.postgresql.TSVECTOR` type can provide for explicit CAST::
+
+ from sqlalchemy.dialects.postgresql import TSVECTOR
+ from sqlalchemy import select, cast
+ select([cast("some text", TSVECTOR)])
+
+produces a statement equivalent to::
+
+ SELECT CAST('some text' AS TSVECTOR) AS anon_1
+
+
FROM ONLY ...
------------------------
@@ -210,7 +248,7 @@ import re
from ... import sql, schema, exc, util
from ...engine import default, reflection
-from ...sql import compiler, expression, util as sql_util, operators
+from ...sql import compiler, expression, operators
from ... import types as sqltypes
try:
@@ -230,7 +268,7 @@ RESERVED_WORDS = set(
"default", "deferrable", "desc", "distinct", "do", "else", "end",
"except", "false", "fetch", "for", "foreign", "from", "grant", "group",
"having", "in", "initially", "intersect", "into", "leading", "limit",
- "localtime", "localtimestamp", "new", "not", "null", "off", "offset",
+ "localtime", "localtimestamp", "new", "not", "null", "of", "off", "offset",
"old", "on", "only", "or", "order", "placing", "primary", "references",
"returning", "select", "session_user", "some", "symmetric", "table",
"then", "to", "trailing", "true", "union", "unique", "user", "using",
@@ -368,6 +406,23 @@ class UUID(sqltypes.TypeEngine):
PGUuid = UUID
+class TSVECTOR(sqltypes.TypeEngine):
+ """The :class:`.postgresql.TSVECTOR` type implements the Postgresql
+ text search type TSVECTOR.
+
+ It can be used to do full text queries on natural language
+ documents.
+
+ .. versionadded:: 0.9.0
+
+ .. seealso::
+
+ :ref:`postgresql_match`
+
+ """
+ __visit_name__ = 'TSVECTOR'
+
+
class _Slice(expression.ColumnElement):
__visit_name__ = 'slice'
@@ -913,6 +968,7 @@ ischema_names = {
'interval': INTERVAL,
'interval year to month': INTERVAL,
'interval day to second': INTERVAL,
+ 'tsvector' : TSVECTOR
}
@@ -954,25 +1010,30 @@ class PGCompiler(compiler.SQLCompiler):
def visit_ilike_op_binary(self, binary, operator, **kw):
escape = binary.modifiers.get("escape", None)
+
return '%s ILIKE %s' % \
(self.process(binary.left, **kw),
self.process(binary.right, **kw)) \
- + (escape and
- (' ESCAPE ' + self.render_literal_value(escape, None))
- or '')
+ + (
+ ' ESCAPE ' +
+ self.render_literal_value(escape, sqltypes.STRINGTYPE)
+ if escape else ''
+ )
def visit_notilike_op_binary(self, binary, operator, **kw):
escape = binary.modifiers.get("escape", None)
return '%s NOT ILIKE %s' % \
(self.process(binary.left, **kw),
self.process(binary.right, **kw)) \
- + (escape and
- (' ESCAPE ' + self.render_literal_value(escape, None))
- or '')
+ + (
+ ' ESCAPE ' +
+ self.render_literal_value(escape, sqltypes.STRINGTYPE)
+ if escape else ''
+ )
def render_literal_value(self, value, type_):
value = super(PGCompiler, self).render_literal_value(value, type_)
- # TODO: need to inspect "standard_conforming_strings"
+
if self.dialect._backslash_escapes:
value = value.replace('\\', '\\\\')
return value
@@ -1009,14 +1070,25 @@ class PGCompiler(compiler.SQLCompiler):
return ""
def for_update_clause(self, select):
- if select.for_update == 'nowait':
- return " FOR UPDATE NOWAIT"
- elif select.for_update == 'read':
- return " FOR SHARE"
- elif select.for_update == 'read_nowait':
- return " FOR SHARE NOWAIT"
+
+ if select._for_update_arg.read:
+ tmp = " FOR SHARE"
else:
- return super(PGCompiler, self).for_update_clause(select)
+ tmp = " FOR UPDATE"
+
+ if select._for_update_arg.of:
+ tables = util.OrderedSet(
+ c.table if isinstance(c, expression.ColumnClause)
+ else c for c in select._for_update_arg.of)
+ tmp += " OF " + ", ".join(
+ self.process(table, ashint=True)
+ for table in tables
+ )
+
+ if select._for_update_arg.nowait:
+ tmp += " NOWAIT"
+
+ return tmp
def returning_clause(self, stmt, returning_cols):
@@ -1039,12 +1111,15 @@ class PGCompiler(compiler.SQLCompiler):
class PGDDLCompiler(compiler.DDLCompiler):
def get_column_specification(self, column, **kwargs):
+
colspec = self.preparer.format_column(column)
impl_type = column.type.dialect_impl(self.dialect)
if column.primary_key and \
column is column.table._autoincrement_column and \
- not isinstance(impl_type, sqltypes.SmallInteger) and \
(
+ self.dialect.supports_smallserial or
+ not isinstance(impl_type, sqltypes.SmallInteger)
+ ) and (
column.default is None or
(
isinstance(column.default, schema.Sequence) and
@@ -1052,6 +1127,8 @@ class PGDDLCompiler(compiler.DDLCompiler):
)):
if isinstance(impl_type, sqltypes.BigInteger):
colspec += " BIGSERIAL"
+ elif isinstance(impl_type, sqltypes.SmallInteger):
+ colspec += " SMALLSERIAL"
else:
colspec += " SERIAL"
else:
@@ -1069,7 +1146,9 @@ class PGDDLCompiler(compiler.DDLCompiler):
return "CREATE TYPE %s AS ENUM (%s)" % (
self.preparer.format_type(type_),
- ",".join("'%s'" % e for e in type_.enums)
+ ", ".join(
+ self.sql_compiler.process(sql.literal(e), literal_binds=True)
+ for e in type_.enums)
)
def visit_drop_enum_type(self, drop):
@@ -1092,31 +1171,29 @@ class PGDDLCompiler(compiler.DDLCompiler):
preparer.format_table(index.table)
)
- if 'postgresql_using' in index.kwargs:
- using = index.kwargs['postgresql_using']
- text += "USING %s " % preparer.quote(using, index.quote)
+ using = index.dialect_options['postgresql']['using']
+ if using:
+ text += "USING %s " % preparer.quote(using)
- ops = index.kwargs.get('postgresql_ops', {})
+ ops = index.dialect_options["postgresql"]["ops"]
text += "(%s)" \
% (
', '.join([
- self.sql_compiler.process(expr, include_table=False) +
-
-
+ self.sql_compiler.process(
+ expr.self_group()
+ if not isinstance(expr, expression.ColumnClause)
+ else expr,
+ include_table=False, literal_binds=True) +
(c.key in ops and (' ' + ops[c.key]) or '')
-
-
for expr, c in zip(index.expressions, index.columns)])
)
- if 'postgresql_where' in index.kwargs:
- whereclause = index.kwargs['postgresql_where']
- else:
- whereclause = None
+ whereclause = index.dialect_options["postgresql"]["where"]
if whereclause is not None:
- whereclause = sql_util.expression_as_ddl(whereclause)
- where_compiled = self.sql_compiler.process(whereclause)
+ where_compiled = self.sql_compiler.process(
+ whereclause, include_table=False,
+ literal_binds=True)
text += " WHERE " + where_compiled
return text
@@ -1128,16 +1205,20 @@ class PGDDLCompiler(compiler.DDLCompiler):
elements = []
for c in constraint.columns:
op = constraint.operators[c.name]
- elements.append(self.preparer.quote(c.name, c.quote)+' WITH '+op)
+ elements.append(self.preparer.quote(c.name) + ' WITH '+op)
text += "EXCLUDE USING %s (%s)" % (constraint.using, ', '.join(elements))
if constraint.where is not None:
- sqltext = sql_util.expression_as_ddl(constraint.where)
- text += ' WHERE (%s)' % self.sql_compiler.process(sqltext)
+ text += ' WHERE (%s)' % self.sql_compiler.process(
+ constraint.where,
+ literal_binds=True)
text += self.define_constraint_deferrability(constraint)
return text
class PGTypeCompiler(compiler.GenericTypeCompiler):
+ def visit_TSVECTOR(self, type):
+ return "TSVECTOR"
+
def visit_INET(self, type_):
return "INET"
@@ -1162,6 +1243,9 @@ class PGTypeCompiler(compiler.GenericTypeCompiler):
def visit_HSTORE(self, type_):
return "HSTORE"
+ def visit_JSON(self, type_):
+ return "JSON"
+
def visit_INT4RANGE(self, type_):
return "INT4RANGE"
@@ -1250,9 +1334,9 @@ class PGIdentifierPreparer(compiler.IdentifierPreparer):
if not type_.name:
raise exc.CompileError("Postgresql ENUM type requires a name.")
- name = self.quote(type_.name, type_.quote)
+ name = self.quote(type_.name)
if not self.omit_schema and use_schema and type_.schema is not None:
- name = self.quote_schema(type_.schema, type_.quote) + "." + name
+ name = self.quote_schema(type_.schema) + "." + name
return name
@@ -1328,6 +1412,7 @@ class PGDialect(default.DefaultDialect):
supports_native_enum = True
supports_native_boolean = True
+ supports_smallserial = True
supports_sequences = True
sequences_optional = True
@@ -1349,12 +1434,22 @@ class PGDialect(default.DefaultDialect):
inspector = PGInspector
isolation_level = None
- # TODO: need to inspect "standard_conforming_strings"
+ construct_arguments = [
+ (schema.Index, {
+ "using": False,
+ "where": None,
+ "ops": {}
+ })
+ ]
+
_backslash_escapes = True
- def __init__(self, isolation_level=None, **kwargs):
+ def __init__(self, isolation_level=None, json_serializer=None,
+ json_deserializer=None, **kwargs):
default.DefaultDialect.__init__(self, **kwargs)
self.isolation_level = isolation_level
+ self._json_deserializer = json_deserializer
+ self._json_serializer = json_serializer
def initialize(self, connection):
super(PGDialect, self).initialize(connection)
@@ -1368,6 +1463,13 @@ class PGDialect(default.DefaultDialect):
# psycopg2, others may have placed ENUM here as well
self.colspecs.pop(ENUM, None)
+ # http://www.postgresql.org/docs/9.3/static/release-9-2.html#AEN116689
+ self.supports_smallserial = self.server_version_info >= (9, 2)
+
+ self._backslash_escapes = connection.scalar(
+ "show standard_conforming_strings"
+ ) == 'off'
+
def on_connect(self):
if self.isolation_level is not None:
def connect(conn):
@@ -1515,12 +1617,6 @@ class PGDialect(default.DefaultDialect):
return bool(cursor.first())
def has_type(self, connection, type_name, schema=None):
- bindparams = [
- sql.bindparam('typname',
- util.text_type(type_name), type_=sqltypes.Unicode),
- sql.bindparam('nspname',
- util.text_type(schema), type_=sqltypes.Unicode),
- ]
if schema is not None:
query = """
SELECT EXISTS (
@@ -1530,6 +1626,7 @@ class PGDialect(default.DefaultDialect):
AND n.nspname = :nspname
)
"""
+ query = sql.text(query)
else:
query = """
SELECT EXISTS (
@@ -1538,13 +1635,23 @@ class PGDialect(default.DefaultDialect):
AND pg_type_is_visible(t.oid)
)
"""
- cursor = connection.execute(sql.text(query, bindparams=bindparams))
+ query = sql.text(query)
+ query = query.bindparams(
+ sql.bindparam('typname',
+ util.text_type(type_name), type_=sqltypes.Unicode),
+ )
+ if schema is not None:
+ query = query.bindparams(
+ sql.bindparam('nspname',
+ util.text_type(schema), type_=sqltypes.Unicode),
+ )
+ cursor = connection.execute(query)
return bool(cursor.scalar())
def _get_server_version_info(self, connection):
v = connection.execute("select version()").scalar()
m = re.match(
- '(?:PostgreSQL|EnterpriseDB) '
+ '.*(?:PostgreSQL|EnterpriseDB) '
'(\d+)\.(\d+)(?:\.(\d+))?(?:\.\d+)?(?:devel)?',
v)
if not m:
@@ -1578,12 +1685,10 @@ class PGDialect(default.DefaultDialect):
table_name = util.text_type(table_name)
if schema is not None:
schema = util.text_type(schema)
- s = sql.text(query, bindparams=[
- sql.bindparam('table_name', type_=sqltypes.Unicode),
- sql.bindparam('schema', type_=sqltypes.Unicode)
- ],
- typemap={'oid': sqltypes.Integer}
- )
+ s = sql.text(query).bindparams(table_name=sqltypes.Unicode)
+ s = s.columns(oid=sqltypes.Integer)
+ if schema:
+ s = s.bindparams(sql.bindparam('schema', type_=sqltypes.Unicode))
c = connection.execute(s, table_name=table_name, schema=schema)
table_oid = c.scalar()
if table_oid is None:
@@ -1675,8 +1780,7 @@ class PGDialect(default.DefaultDialect):
SQL_COLS = """
SELECT a.attname,
pg_catalog.format_type(a.atttypid, a.atttypmod),
- (SELECT substring(pg_catalog.pg_get_expr(d.adbin, d.adrelid)
- for 128)
+ (SELECT pg_catalog.pg_get_expr(d.adbin, d.adrelid)
FROM pg_catalog.pg_attrdef d
WHERE d.adrelid = a.attrelid AND d.adnum = a.attnum
AND a.atthasdef)
@@ -1883,6 +1987,15 @@ class PGDialect(default.DefaultDialect):
n.oid = c.relnamespace
ORDER BY 1
"""
+ # http://www.postgresql.org/docs/9.0/static/sql-createtable.html
+ FK_REGEX = re.compile(
+ r'FOREIGN KEY \((.*?)\) REFERENCES (?:(.*?)\.)?(.*?)\((.*?)\)'
+ r'[\s]?(MATCH (FULL|PARTIAL|SIMPLE)+)?'
+ r'[\s]?(ON UPDATE (CASCADE|RESTRICT|NO ACTION|SET NULL|SET DEFAULT)+)?'
+ r'[\s]?(ON DELETE (CASCADE|RESTRICT|NO ACTION|SET NULL|SET DEFAULT)+)?'
+ r'[\s]?(DEFERRABLE|NOT DEFERRABLE)?'
+ r'[\s]?(INITIALLY (DEFERRED|IMMEDIATE)+)?'
+ )
t = sql.text(FK_SQL, typemap={
'conname': sqltypes.Unicode,
@@ -1890,15 +2003,18 @@ class PGDialect(default.DefaultDialect):
c = connection.execute(t, table=table_oid)
fkeys = []
for conname, condef, conschema in c.fetchall():
- m = re.search('FOREIGN KEY \((.*?)\) REFERENCES '
- '(?:(.*?)\.)?(.*?)\((.*?)\)', condef).groups()
+ m = re.search(FK_REGEX, condef).groups()
constrained_columns, referred_schema, \
- referred_table, referred_columns = m
+ referred_table, referred_columns, \
+ _, match, _, onupdate, _, ondelete, \
+ deferrable, _, initially = m
+ if deferrable is not None:
+ deferrable = True if deferrable == 'DEFERRABLE' else False
constrained_columns = [preparer._unquote_identifier(x)
for x in re.split(r'\s*,\s*', constrained_columns)]
if referred_schema:
- referred_schema =\
+ referred_schema = \
preparer._unquote_identifier(referred_schema)
elif schema is not None and schema == conschema:
# no schema was returned by pg_get_constraintdef(). This
@@ -1916,7 +2032,14 @@ class PGDialect(default.DefaultDialect):
'constrained_columns': constrained_columns,
'referred_schema': referred_schema,
'referred_table': referred_table,
- 'referred_columns': referred_columns
+ 'referred_columns': referred_columns,
+ 'options': {
+ 'onupdate': onupdate,
+ 'ondelete': ondelete,
+ 'deferrable': deferrable,
+ 'initially': initially,
+ 'match': match
+ }
}
fkeys.append(fkey_d)
return fkeys
@@ -1926,11 +2049,14 @@ class PGDialect(default.DefaultDialect):
table_oid = self.get_table_oid(connection, table_name, schema,
info_cache=kw.get('info_cache'))
+ # cast indkey as varchar since it's an int2vector,
+ # returned as a list by some drivers such as pypostgresql
+
IDX_SQL = """
SELECT
i.relname as relname,
ix.indisunique, ix.indexprs, ix.indpred,
- a.attname, a.attnum, ix.indkey
+ a.attname, a.attnum, ix.indkey::varchar
FROM
pg_class t
join pg_index ix on t.oid = ix.indrelid
diff --git a/lib/sqlalchemy/dialects/postgresql/constraints.py b/lib/sqlalchemy/dialects/postgresql/constraints.py
index 5b8bbe643..f45cef1a2 100644
--- a/lib/sqlalchemy/dialects/postgresql/constraints.py
+++ b/lib/sqlalchemy/dialects/postgresql/constraints.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2013-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -39,7 +39,7 @@ class ExcludeConstraint(ColumnCollectionConstraint):
:param using:
Optional string. If set, emit USING <index_method> when issuing DDL
for this constraint. Defaults to 'gist'.
-
+
:param where:
Optional string. If set, emit WHERE <predicate> when issuing DDL
for this constraint.
@@ -60,7 +60,7 @@ class ExcludeConstraint(ColumnCollectionConstraint):
where = kw.get('where')
if where:
self.where = expression._literal_as_text(where)
-
+
def copy(self, **kw):
elements = [(col, self.operators[col])
for col in self.columns.keys()]
diff --git a/lib/sqlalchemy/dialects/postgresql/hstore.py b/lib/sqlalchemy/dialects/postgresql/hstore.py
index c645e25d2..76562088d 100644
--- a/lib/sqlalchemy/dialects/postgresql/hstore.py
+++ b/lib/sqlalchemy/dialects/postgresql/hstore.py
@@ -1,5 +1,5 @@
# postgresql/hstore.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -144,8 +144,10 @@ class HSTORE(sqltypes.Concatenable, sqltypes.TypeEngine):
For usage with the SQLAlchemy ORM, it may be desirable to combine
the usage of :class:`.HSTORE` with :class:`.MutableDict` dictionary
now part of the :mod:`sqlalchemy.ext.mutable`
- extension. This extension will allow in-place changes to dictionary
- values to be detected by the unit of work::
+ extension. This extension will allow "in-place" changes to the
+ dictionary, e.g. addition of new keys or replacement/removal of existing
+ keys to/from the current dictionary, to produce events which will be detected
+ by the unit of work::
from sqlalchemy.ext.mutable import MutableDict
@@ -163,6 +165,11 @@ class HSTORE(sqltypes.Concatenable, sqltypes.TypeEngine):
session.commit()
+ When the :mod:`sqlalchemy.ext.mutable` extension is not used, the ORM
+ will not be alerted to any changes to the contents of an existing dictionary,
+ unless that dictionary value is re-assigned to the HSTORE-attribute itself,
+ thus generating a change event.
+
.. versionadded:: 0.8
.. seealso::
diff --git a/lib/sqlalchemy/dialects/postgresql/json.py b/lib/sqlalchemy/dialects/postgresql/json.py
new file mode 100644
index 000000000..2e29185e8
--- /dev/null
+++ b/lib/sqlalchemy/dialects/postgresql/json.py
@@ -0,0 +1,199 @@
+# postgresql/json.py
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
+#
+# This module is part of SQLAlchemy and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+from __future__ import absolute_import
+
+import json
+
+from .base import ischema_names
+from ... import types as sqltypes
+from ...sql.operators import custom_op
+from ... import sql
+from ...sql import elements
+from ... import util
+
+__all__ = ('JSON', 'JSONElement')
+
+
+class JSONElement(elements.BinaryExpression):
+ """Represents accessing an element of a :class:`.JSON` value.
+
+ The :class:`.JSONElement` is produced whenever using the Python index
+ operator on an expression that has the type :class:`.JSON`::
+
+ expr = mytable.c.json_data['some_key']
+
+ The expression typically compiles to a JSON access such as ``col -> key``.
+ Modifiers are then available for typing behavior, including :meth:`.JSONElement.cast`
+ and :attr:`.JSONElement.astext`.
+
+ """
+ def __init__(self, left, right, astext=False, opstring=None, result_type=None):
+ self._astext = astext
+ if opstring is None:
+ if hasattr(right, '__iter__') and \
+ not isinstance(right, util.string_types):
+ opstring = "#>"
+ right = "{%s}" % (", ".join(util.text_type(elem) for elem in right))
+ else:
+ opstring = "->"
+
+ self._json_opstring = opstring
+ operator = custom_op(opstring, precedence=5)
+ right = left._check_literal(left, operator, right)
+ super(JSONElement, self).__init__(left, right, operator, type_=result_type)
+
+ @property
+ def astext(self):
+ """Convert this :class:`.JSONElement` to use the 'astext' operator
+ when evaluated.
+
+ E.g.::
+
+ select([data_table.c.data['some key'].astext])
+
+ .. seealso::
+
+ :meth:`.JSONElement.cast`
+
+ """
+ if self._astext:
+ return self
+ else:
+ return JSONElement(
+ self.left,
+ self.right,
+ astext=True,
+ opstring=self._json_opstring + ">",
+ result_type=sqltypes.String(convert_unicode=True)
+ )
+
+ def cast(self, type_):
+ """Convert this :class:`.JSONElement` to apply both the 'astext' operator
+ as well as an explicit type cast when evaulated.
+
+ E.g.::
+
+ select([data_table.c.data['some key'].cast(Integer)])
+
+ .. seealso::
+
+ :attr:`.JSONElement.astext`
+
+ """
+ if not self._astext:
+ return self.astext.cast(type_)
+ else:
+ return sql.cast(self, type_)
+
+
+class JSON(sqltypes.TypeEngine):
+ """Represent the Postgresql JSON type.
+
+ The :class:`.JSON` type stores arbitrary JSON format data, e.g.::
+
+ data_table = Table('data_table', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('data', JSON)
+ )
+
+ with engine.connect() as conn:
+ conn.execute(
+ data_table.insert(),
+ data = {"key1": "value1", "key2": "value2"}
+ )
+
+ :class:`.JSON` provides several operations:
+
+ * Index operations::
+
+ data_table.c.data['some key']
+
+ * Index operations returning text (required for text comparison)::
+
+ data_table.c.data['some key'].astext == 'some value'
+
+ * Index operations with a built-in CAST call::
+
+ data_table.c.data['some key'].cast(Integer) == 5
+
+ * Path index operations::
+
+ data_table.c.data[('key_1', 'key_2', ..., 'key_n')]
+
+ * Path index operations returning text (required for text comparison)::
+
+ data_table.c.data[('key_1', 'key_2', ..., 'key_n')].astext == 'some value'
+
+ Index operations return an instance of :class:`.JSONElement`, which represents
+ an expression such as ``column -> index``. This element then defines
+ methods such as :attr:`.JSONElement.astext` and :meth:`.JSONElement.cast`
+ for setting up type behavior.
+
+ The :class:`.JSON` type, when used with the SQLAlchemy ORM, does not detect
+ in-place mutations to the structure. In order to detect these, the
+ :mod:`sqlalchemy.ext.mutable` extension must be used. This extension will
+ allow "in-place" changes to the datastructure to produce events which
+ will be detected by the unit of work. See the example at :class:`.HSTORE`
+ for a simple example involving a dictionary.
+
+ Custom serializers and deserializers are specified at the dialect level,
+ that is using :func:`.create_engine`. The reason for this is that when
+ using psycopg2, the DBAPI only allows serializers at the per-cursor
+ or per-connection level. E.g.::
+
+ engine = create_engine("postgresql://scott:tiger@localhost/test",
+ json_serializer=my_serialize_fn,
+ json_deserializer=my_deserialize_fn
+ )
+
+ When using the psycopg2 dialect, the json_deserializer is registered
+ against the database using ``psycopg2.extras.register_default_json``.
+
+ .. versionadded:: 0.9
+
+ """
+
+ __visit_name__ = 'JSON'
+
+ class comparator_factory(sqltypes.Concatenable.Comparator):
+ """Define comparison operations for :class:`.JSON`."""
+
+ def __getitem__(self, other):
+ """Get the value at a given key."""
+
+ return JSONElement(self.expr, other)
+
+ def _adapt_expression(self, op, other_comparator):
+ if isinstance(op, custom_op):
+ if op.opstring == '->':
+ return op, sqltypes.Text
+ return sqltypes.Concatenable.Comparator.\
+ _adapt_expression(self, op, other_comparator)
+
+ def bind_processor(self, dialect):
+ json_serializer = dialect._json_serializer or json.dumps
+ if util.py2k:
+ encoding = dialect.encoding
+ def process(value):
+ return json_serializer(value).encode(encoding)
+ else:
+ def process(value):
+ return json_serializer(value)
+ return process
+
+ def result_processor(self, dialect, coltype):
+ json_deserializer = dialect._json_deserializer or json.loads
+ if util.py2k:
+ encoding = dialect.encoding
+ def process(value):
+ return json_deserializer(value.decode(encoding))
+ else:
+ def process(value):
+ return json_deserializer(value)
+ return process
+
+
+ischema_names['json'] = JSON
diff --git a/lib/sqlalchemy/dialects/postgresql/pg8000.py b/lib/sqlalchemy/dialects/postgresql/pg8000.py
index 0e503746c..bc73f9757 100644
--- a/lib/sqlalchemy/dialects/postgresql/pg8000.py
+++ b/lib/sqlalchemy/dialects/postgresql/pg8000.py
@@ -1,5 +1,5 @@
# postgresql/pg8000.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -39,7 +39,9 @@ class _PGNumeric(sqltypes.Numeric):
def result_processor(self, dialect, coltype):
if self.asdecimal:
if coltype in _FLOAT_TYPES:
- return processors.to_decimal_processor_factory(decimal.Decimal)
+ return processors.to_decimal_processor_factory(
+ decimal.Decimal,
+ self._effective_decimal_return_scale)
elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
# pg8000 returns Decimal natively for 1700
return None
diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg2.py b/lib/sqlalchemy/dialects/postgresql/psycopg2.py
index 02eda094e..e9f64f829 100644
--- a/lib/sqlalchemy/dialects/postgresql/psycopg2.py
+++ b/lib/sqlalchemy/dialects/postgresql/psycopg2.py
@@ -1,5 +1,5 @@
# postgresql/psycopg2.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -179,6 +179,7 @@ from .base import PGDialect, PGCompiler, \
ENUM, ARRAY, _DECIMAL_TYPES, _FLOAT_TYPES,\
_INT_TYPES
from .hstore import HSTORE
+from .json import JSON
logger = logging.getLogger('sqlalchemy.dialects.postgresql')
@@ -191,7 +192,9 @@ class _PGNumeric(sqltypes.Numeric):
def result_processor(self, dialect, coltype):
if self.asdecimal:
if coltype in _FLOAT_TYPES:
- return processors.to_decimal_processor_factory(decimal.Decimal)
+ return processors.to_decimal_processor_factory(
+ decimal.Decimal,
+ self._effective_decimal_return_scale)
elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
# pg8000 returns Decimal natively for 1700
return None
@@ -210,23 +213,13 @@ class _PGNumeric(sqltypes.Numeric):
class _PGEnum(ENUM):
- def __init__(self, *arg, **kw):
- super(_PGEnum, self).__init__(*arg, **kw)
- if util.py2k:
- if self.convert_unicode:
- self.convert_unicode = "force"
-
-
-class _PGArray(ARRAY):
- def __init__(self, *arg, **kw):
- super(_PGArray, self).__init__(*arg, **kw)
- if util.py2k:
- # FIXME: this check won't work for setups that
- # have convert_unicode only on their create_engine().
- if isinstance(self.item_type, sqltypes.String) and \
- self.item_type.convert_unicode:
- self.item_type.convert_unicode = "force"
-
+ def result_processor(self, dialect, coltype):
+ if util.py2k and self.convert_unicode is True:
+ # we can't easily use PG's extensions here because
+ # the OID is on the fly, and we need to give it a python
+ # function anyway - not really worth it.
+ self.convert_unicode = "force_nocheck"
+ return super(_PGEnum, self).result_processor(dialect, coltype)
class _PGHStore(HSTORE):
def bind_processor(self, dialect):
@@ -241,6 +234,15 @@ class _PGHStore(HSTORE):
else:
return super(_PGHStore, self).result_processor(dialect, coltype)
+
+class _PGJSON(JSON):
+
+ def result_processor(self, dialect, coltype):
+ if dialect._has_native_json:
+ return None
+ else:
+ return super(_PGJSON, self).result_processor(dialect, coltype)
+
# When we're handed literal SQL, ensure it's a SELECT-query. Since
# 8.3, combining cursors and "FOR UPDATE" has been fine.
SERVER_SIDE_CURSOR_RE = re.compile(
@@ -325,6 +327,7 @@ class PGDialect_psycopg2(PGDialect):
psycopg2_version = (0, 0)
_has_native_hstore = False
+ _has_native_json = False
colspecs = util.update_copy(
PGDialect.colspecs,
@@ -332,8 +335,8 @@ class PGDialect_psycopg2(PGDialect):
sqltypes.Numeric: _PGNumeric,
ENUM: _PGEnum, # needs force_unicode
sqltypes.Enum: _PGEnum, # needs force_unicode
- ARRAY: _PGArray, # needs force_unicode
HSTORE: _PGHStore,
+ JSON: _PGJSON
}
)
@@ -361,6 +364,7 @@ class PGDialect_psycopg2(PGDialect):
self._has_native_hstore = self.use_native_hstore and \
self._hstore_oids(connection.connection) \
is not None
+ self._has_native_json = self.psycopg2_version >= (2, 5)
@classmethod
def dbapi(cls):
@@ -369,7 +373,7 @@ class PGDialect_psycopg2(PGDialect):
@util.memoized_property
def _isolation_lookup(self):
- extensions = __import__('psycopg2.extensions').extensions
+ from psycopg2 import extensions
return {
'AUTOCOMMIT': extensions.ISOLATION_LEVEL_AUTOCOMMIT,
'READ COMMITTED': extensions.ISOLATION_LEVEL_READ_COMMITTED,
@@ -407,6 +411,7 @@ class PGDialect_psycopg2(PGDialect):
if self.dbapi and self.use_native_unicode:
def on_connect(conn):
extensions.register_type(extensions.UNICODE, conn)
+ extensions.register_type(extensions.UNICODEARRAY, conn)
fns.append(on_connect)
if self.dbapi and self.use_native_hstore:
@@ -423,6 +428,11 @@ class PGDialect_psycopg2(PGDialect):
array_oid=array_oid)
fns.append(on_connect)
+ if self.dbapi and self._json_deserializer:
+ def on_connect(conn):
+ extras.register_default_json(conn, loads=self._json_deserializer)
+ fns.append(on_connect)
+
if fns:
def on_connect(conn):
for fn in fns:
diff --git a/lib/sqlalchemy/dialects/postgresql/pypostgresql.py b/lib/sqlalchemy/dialects/postgresql/pypostgresql.py
index 289bef114..f030d2c1b 100644
--- a/lib/sqlalchemy/dialects/postgresql/pypostgresql.py
+++ b/lib/sqlalchemy/dialects/postgresql/pypostgresql.py
@@ -1,5 +1,5 @@
# postgresql/pypostgresql.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/postgresql/ranges.py b/lib/sqlalchemy/dialects/postgresql/ranges.py
index d03f948a7..57b0c4c30 100644
--- a/lib/sqlalchemy/dialects/postgresql/ranges.py
+++ b/lib/sqlalchemy/dialects/postgresql/ranges.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2013-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/postgresql/zxjdbc.py b/lib/sqlalchemy/dialects/postgresql/zxjdbc.py
index 583afc23f..67e7d53e6 100644
--- a/lib/sqlalchemy/dialects/postgresql/zxjdbc.py
+++ b/lib/sqlalchemy/dialects/postgresql/zxjdbc.py
@@ -1,5 +1,5 @@
# postgresql/zxjdbc.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/sqlite/__init__.py b/lib/sqlalchemy/dialects/sqlite/__init__.py
index 0d06160ae..a9b23575b 100644
--- a/lib/sqlalchemy/dialects/sqlite/__init__.py
+++ b/lib/sqlalchemy/dialects/sqlite/__init__.py
@@ -1,5 +1,5 @@
# sqlite/__init__.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/sqlite/base.py b/lib/sqlalchemy/dialects/sqlite/base.py
index fb7d968be..579a61046 100644
--- a/lib/sqlalchemy/dialects/sqlite/base.py
+++ b/lib/sqlalchemy/dialects/sqlite/base.py
@@ -1,5 +1,5 @@
# sqlite/base.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -130,14 +130,14 @@ for new connections through the usage of events::
import datetime
import re
-from sqlalchemy import sql, exc
-from sqlalchemy.engine import default, base, reflection
-from sqlalchemy import types as sqltypes
-from sqlalchemy import util
-from sqlalchemy.sql import compiler
-from sqlalchemy import processors
+from ... import sql, exc
+from ...engine import default, reflection
+from ... import types as sqltypes, schema as sa_schema
+from ... import util
+from ...sql import compiler
+from ... import processors
-from sqlalchemy.types import BIGINT, BLOB, BOOLEAN, CHAR,\
+from ...types import BIGINT, BLOB, BOOLEAN, CHAR,\
DECIMAL, FLOAT, REAL, INTEGER, NUMERIC, SMALLINT, TEXT,\
TIMESTAMP, VARCHAR
@@ -160,6 +160,13 @@ class _DateTimeMixin(object):
kw["regexp"] = self._reg
return util.constructor_copy(self, cls, **kw)
+ def literal_processor(self, dialect):
+ bp = self.bind_processor(dialect)
+ def process(value):
+ return "'%s'" % bp(value)
+ return process
+
+
class DATETIME(_DateTimeMixin, sqltypes.DateTime):
"""Represent a Python datetime object in SQLite using a string.
@@ -211,6 +218,7 @@ class DATETIME(_DateTimeMixin, sqltypes.DateTime):
"%(hour)02d:%(minute)02d:%(second)02d"
)
+
def bind_processor(self, dialect):
datetime_datetime = datetime.datetime
datetime_date = datetime.date
@@ -491,7 +499,7 @@ class SQLiteDDLCompiler(compiler.DDLCompiler):
colspec += " NOT NULL"
if (column.primary_key and
- column.table.kwargs.get('sqlite_autoincrement', False) and
+ column.table.dialect_options['sqlite']['autoincrement'] and
len(column.table.primary_key.columns) == 1 and
issubclass(column.type._type_affinity, sqltypes.Integer) and
not column.foreign_keys):
@@ -506,7 +514,7 @@ class SQLiteDDLCompiler(compiler.DDLCompiler):
if len(constraint.columns) == 1:
c = list(constraint)[0]
if c.primary_key and \
- c.table.kwargs.get('sqlite_autoincrement', False) and \
+ c.table.dialect_options['sqlite']['autoincrement'] and \
issubclass(c.type._type_affinity, sqltypes.Integer) and \
not c.foreign_keys:
return None
@@ -615,6 +623,12 @@ class SQLiteDialect(default.DefaultDialect):
supports_cast = True
supports_default_values = True
+ construct_arguments = [
+ (sa_schema.Table, {
+ "autoincrement": False
+ })
+ ]
+
_broken_fk_pragma_quotes = False
def __init__(self, isolation_level=None, native_datetime=False, **kwargs):
diff --git a/lib/sqlalchemy/dialects/sqlite/pysqlite.py b/lib/sqlalchemy/dialects/sqlite/pysqlite.py
index ad0dd5292..b53f4d4a0 100644
--- a/lib/sqlalchemy/dialects/sqlite/pysqlite.py
+++ b/lib/sqlalchemy/dialects/sqlite/pysqlite.py
@@ -1,5 +1,5 @@
# sqlite/pysqlite.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -97,6 +97,8 @@ or result processing. Execution of "func.current_date()" will return a string.
"func.current_timestamp()" is registered as returning a DATETIME type in
SQLAlchemy, so this function still receives SQLAlchemy-level result processing.
+.. _pysqlite_threading_pooling:
+
Threading/Pooling Behavior
---------------------------
@@ -160,8 +162,8 @@ Using Temporary Tables with SQLite
Due to the way SQLite deals with temporary tables, if you wish to use a
temporary table in a file-based SQLite database across multiple checkouts
from the connection pool, such as when using an ORM :class:`.Session` where
-the temporary table should continue to remain after :meth:`.commit` or
-:meth:`.rollback` is called, a pool which maintains a single connection must
+the temporary table should continue to remain after :meth:`.Session.commit` or
+:meth:`.Session.rollback` is called, a pool which maintains a single connection must
be used. Use :class:`.SingletonThreadPool` if the scope is only needed
within the current thread, or :class:`.StaticPool` is scope is needed within
multiple threads for this case::
diff --git a/lib/sqlalchemy/dialects/sybase/__init__.py b/lib/sqlalchemy/dialects/sybase/__init__.py
index f61352ceb..85f9dd9c9 100644
--- a/lib/sqlalchemy/dialects/sybase/__init__.py
+++ b/lib/sqlalchemy/dialects/sybase/__init__.py
@@ -1,5 +1,5 @@
# sybase/__init__.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/sybase/base.py b/lib/sqlalchemy/dialects/sybase/base.py
index 1c42d4846..501270778 100644
--- a/lib/sqlalchemy/dialects/sybase/base.py
+++ b/lib/sqlalchemy/dialects/sybase/base.py
@@ -1,5 +1,5 @@
# sybase/base.py
-# Copyright (C) 2010-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2010-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
# get_select_precolumns(), limit_clause() implementation
# copyright (C) 2007 Fisch Asset Management
# AG http://www.fam.ch, with coding by Alexander Houben
@@ -440,6 +440,8 @@ class SybaseDialect(default.DefaultDialect):
preparer = SybaseIdentifierPreparer
inspector = SybaseInspector
+ construct_arguments = []
+
def _get_default_schema_name(self, connection):
return connection.scalar(
text("SELECT user_name() as user_name",
diff --git a/lib/sqlalchemy/dialects/sybase/mxodbc.py b/lib/sqlalchemy/dialects/sybase/mxodbc.py
index 829132bdc..f14d1c420 100644
--- a/lib/sqlalchemy/dialects/sybase/mxodbc.py
+++ b/lib/sqlalchemy/dialects/sybase/mxodbc.py
@@ -1,5 +1,5 @@
# sybase/mxodbc.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/sybase/pyodbc.py b/lib/sqlalchemy/dialects/sybase/pyodbc.py
index 283c60da3..f773e5a6d 100644
--- a/lib/sqlalchemy/dialects/sybase/pyodbc.py
+++ b/lib/sqlalchemy/dialects/sybase/pyodbc.py
@@ -1,5 +1,5 @@
# sybase/pyodbc.py
-# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/lib/sqlalchemy/dialects/sybase/pysybase.py b/lib/sqlalchemy/dialects/sybase/pysybase.py
index 8961ce8ef..664bd9ac0 100644
--- a/lib/sqlalchemy/dialects/sybase/pysybase.py
+++ b/lib/sqlalchemy/dialects/sybase/pysybase.py
@@ -1,5 +1,5 @@
# sybase/pysybase.py
-# Copyright (C) 2010-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
+# Copyright (C) 2010-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php