diff options
author | Mike Bayer <mike_mp@zzzcomputing.com> | 2012-11-20 11:03:01 -0500 |
---|---|---|
committer | Mike Bayer <mike_mp@zzzcomputing.com> | 2012-11-20 11:03:01 -0500 |
commit | fe8f8349c9f46abe37cd9de7876df0eeb5e12c94 (patch) | |
tree | 29b6b1f953d24f4b49a84069f13f3ef72e594212 /lib/sqlalchemy | |
parent | 2799a674038bef5a81102935ae43c70163f0c556 (diff) | |
parent | ead37e4ec0dd5750769226befea043d9527f869e (diff) | |
download | sqlalchemy-fe8f8349c9f46abe37cd9de7876df0eeb5e12c94.tar.gz |
- an enormous merge just because I committed a one line log entry. the joy of DVCS
Diffstat (limited to 'lib/sqlalchemy')
134 files changed, 3015 insertions, 1919 deletions
diff --git a/lib/sqlalchemy/connectors/__init__.py b/lib/sqlalchemy/connectors/__init__.py index 5a0e2eb24..97ac5232c 100644 --- a/lib/sqlalchemy/connectors/__init__.py +++ b/lib/sqlalchemy/connectors/__init__.py @@ -7,4 +7,3 @@ class Connector(object): pass - diff --git a/lib/sqlalchemy/connectors/mxodbc.py b/lib/sqlalchemy/connectors/mxodbc.py index d74e9639b..5f6fa8fc8 100644 --- a/lib/sqlalchemy/connectors/mxodbc.py +++ b/lib/sqlalchemy/connectors/mxodbc.py @@ -24,8 +24,9 @@ import warnings from . import Connector + class MxODBCConnector(Connector): - driver='mxodbc' + driver = 'mxodbc' supports_sane_multi_rowcount = False supports_unicode_statements = True @@ -47,7 +48,7 @@ class MxODBCConnector(Connector): elif platform == 'darwin': from mx.ODBC import iODBC as module else: - raise ImportError, "Unrecognized platform for mxODBC import" + raise ImportError("Unrecognized platform for mxODBC import") return module @classmethod @@ -73,8 +74,8 @@ class MxODBCConnector(Connector): emit Python standard warnings. """ from mx.ODBC.Error import Warning as MxOdbcWarning - def error_handler(connection, cursor, errorclass, errorvalue): + def error_handler(connection, cursor, errorclass, errorvalue): if issubclass(errorclass, MxOdbcWarning): errorclass.__bases__ = (Warning,) warnings.warn(message=str(errorvalue), @@ -141,7 +142,8 @@ class MxODBCConnector(Connector): return True def do_executemany(self, cursor, statement, parameters, context=None): - cursor.executemany(statement, parameters, direct=self._get_direct(context)) + cursor.executemany( + statement, parameters, direct=self._get_direct(context)) def do_execute(self, cursor, statement, parameters, context=None): cursor.execute(statement, parameters, direct=self._get_direct(context)) diff --git a/lib/sqlalchemy/connectors/mysqldb.py b/lib/sqlalchemy/connectors/mysqldb.py index 4479deafe..5f4b3e4d3 100644 --- a/lib/sqlalchemy/connectors/mysqldb.py +++ b/lib/sqlalchemy/connectors/mysqldb.py @@ -10,9 +10,11 @@ from ..sql import operators as sql_operators from .. import exc, log, schema, sql, types as sqltypes, util, processors import re + # the subclassing of Connector by all classes # here is not strictly necessary + class MySQLDBExecutionContext(Connector): @property @@ -22,6 +24,7 @@ class MySQLDBExecutionContext(Connector): else: return self.cursor.rowcount + class MySQLDBCompiler(Connector): def visit_mod_binary(self, binary, operator, **kw): return self.process(binary.left, **kw) + " %% " + \ @@ -30,12 +33,14 @@ class MySQLDBCompiler(Connector): def post_process_text(self, text): return text.replace('%', '%%') + class MySQLDBIdentifierPreparer(Connector): def _escape_identifier(self, value): value = value.replace(self.escape_quote, self.escape_to_quote) return value.replace("%", "%%") + class MySQLDBConnector(Connector): driver = 'mysqldb' supports_unicode_statements = False @@ -76,7 +81,8 @@ class MySQLDBConnector(Connector): # query string. ssl = {} - for key in ['ssl_ca', 'ssl_key', 'ssl_cert', 'ssl_capath', 'ssl_cipher']: + keys = ['ssl_ca', 'ssl_key', 'ssl_cert', 'ssl_capath', 'ssl_cipher'] + for key in keys: if key in opts: ssl[key[4:]] = opts[key] util.coerce_kw_type(ssl, key[4:], str) @@ -148,4 +154,3 @@ class MySQLDBConnector(Connector): "combination of MySQL server and MySQL-python. " "MySQL-python >= 1.2.2 is recommended. Assuming latin1.") return 'latin1' - diff --git a/lib/sqlalchemy/connectors/pyodbc.py b/lib/sqlalchemy/connectors/pyodbc.py index 7ef0922cf..678d50f4c 100644 --- a/lib/sqlalchemy/connectors/pyodbc.py +++ b/lib/sqlalchemy/connectors/pyodbc.py @@ -11,8 +11,9 @@ import sys import re import urllib + class PyODBCConnector(Connector): - driver='pyodbc' + driver = 'pyodbc' supports_sane_multi_rowcount = False # PyODBC unicode is broken on UCS-4 builds @@ -63,7 +64,7 @@ class PyODBCConnector(Connector): dsn_connection = 'dsn' in keys or \ ('host' in keys and 'database' not in keys) if dsn_connection: - connectors= ['dsn=%s' % (keys.pop('host', '') or \ + connectors = ['dsn=%s' % (keys.pop('host', '') or \ keys.pop('dsn', ''))] else: port = '' @@ -73,7 +74,7 @@ class PyODBCConnector(Connector): connectors = ["DRIVER={%s}" % keys.pop('driver', self.pyodbc_driver_name), 'Server=%s%s' % (keys.pop('host', ''), port), - 'Database=%s' % keys.pop('database', '') ] + 'Database=%s' % keys.pop('database', '')] user = keys.pop("user", None) if user: @@ -90,8 +91,8 @@ class PyODBCConnector(Connector): connectors.append("AutoTranslate=%s" % keys.pop("odbc_autotranslate")) - connectors.extend(['%s=%s' % (k,v) for k,v in keys.iteritems()]) - return [[";".join (connectors)], connect_args] + connectors.extend(['%s=%s' % (k, v) for k, v in keys.iteritems()]) + return [[";".join(connectors)], connect_args] def is_disconnect(self, e, connection, cursor): if isinstance(e, self.dbapi.ProgrammingError): @@ -117,18 +118,20 @@ class PyODBCConnector(Connector): )) if self.freetds: - self.freetds_driver_version = dbapi_con.getinfo(pyodbc.SQL_DRIVER_VER) + self.freetds_driver_version = dbapi_con.getinfo( + pyodbc.SQL_DRIVER_VER) # the "Py2K only" part here is theoretical. # have not tried pyodbc + python3.1 yet. # Py2K - self.supports_unicode_statements = not self.freetds and not self.easysoft + self.supports_unicode_statements = ( + not self.freetds and not self.easysoft) if self._user_supports_unicode_binds is not None: self.supports_unicode_binds = self._user_supports_unicode_binds else: - self.supports_unicode_binds = (not self.freetds or - self.freetds_driver_version >= '0.91' - ) and not self.easysoft + self.supports_unicode_binds = ( + not self.freetds or self.freetds_driver_version >= '0.91' + ) and not self.easysoft # end Py2K # run other initialization which asks for user name, etc. diff --git a/lib/sqlalchemy/connectors/zxJDBC.py b/lib/sqlalchemy/connectors/zxJDBC.py index 1db7a619d..433e02745 100644 --- a/lib/sqlalchemy/connectors/zxJDBC.py +++ b/lib/sqlalchemy/connectors/zxJDBC.py @@ -7,6 +7,7 @@ import sys from . import Connector + class ZxJDBCConnector(Connector): driver = 'zxjdbc' diff --git a/lib/sqlalchemy/dialects/__init__.py b/lib/sqlalchemy/dialects/__init__.py index 2d400aa60..61cb4e1a1 100644 --- a/lib/sqlalchemy/dialects/__init__.py +++ b/lib/sqlalchemy/dialects/__init__.py @@ -18,6 +18,7 @@ __all__ = ( from .. import util + def _auto_fn(name): """default dialect importer. @@ -42,4 +43,4 @@ def _auto_fn(name): else: return None -registry = util.PluginLoader("sqlalchemy.dialects", auto_fn=_auto_fn)
\ No newline at end of file +registry = util.PluginLoader("sqlalchemy.dialects", auto_fn=_auto_fn) diff --git a/lib/sqlalchemy/dialects/firebird/__init__.py b/lib/sqlalchemy/dialects/firebird/__init__.py index f79588d24..6b2c6878d 100644 --- a/lib/sqlalchemy/dialects/firebird/__init__.py +++ b/lib/sqlalchemy/dialects/firebird/__init__.py @@ -18,5 +18,3 @@ __all__ = ( 'TEXT', 'NUMERIC', 'FLOAT', 'TIMESTAMP', 'VARCHAR', 'CHAR', 'BLOB', 'dialect' ) - - diff --git a/lib/sqlalchemy/dialects/firebird/base.py b/lib/sqlalchemy/dialects/firebird/base.py index df20060e5..9a8e8044a 100644 --- a/lib/sqlalchemy/dialects/firebird/base.py +++ b/lib/sqlalchemy/dialects/firebird/base.py @@ -69,7 +69,7 @@ the SQLAlchemy ``returning()`` method, such as:: """ -import datetime, re +import datetime from sqlalchemy import schema as sa_schema from sqlalchemy import exc, types as sqltypes, sql, util @@ -130,6 +130,7 @@ class _StringType(sqltypes.String): self.charset = charset super(_StringType, self).__init__(**kw) + class VARCHAR(_StringType, sqltypes.VARCHAR): """Firebird VARCHAR type""" __visit_name__ = 'VARCHAR' @@ -137,6 +138,7 @@ class VARCHAR(_StringType, sqltypes.VARCHAR): def __init__(self, length=None, **kwargs): super(VARCHAR, self).__init__(length=length, **kwargs) + class CHAR(_StringType, sqltypes.CHAR): """Firebird CHAR type""" __visit_name__ = 'CHAR' @@ -211,7 +213,6 @@ class FBTypeCompiler(compiler.GenericTypeCompiler): return self._extend_string(type_, basic) - class FBCompiler(sql.compiler.SQLCompiler): """Firebird specific idiosyncrasies""" @@ -516,7 +517,7 @@ class FBDialect(default.DefaultDialect): # get primary key fields c = connection.execute(keyqry, ["PRIMARY KEY", tablename]) pkfields = [self.normalize_name(r['fname']) for r in c.fetchall()] - return {'constrained_columns':pkfields, 'name':None} + return {'constrained_columns': pkfields, 'name': None} @reflection.cache def get_column_sequence(self, connection, @@ -624,11 +625,11 @@ class FBDialect(default.DefaultDialect): # Redundant defvalue = None col_d = { - 'name' : name, - 'type' : coltype, - 'nullable' : not bool(row['null_flag']), - 'default' : defvalue, - 'autoincrement':defvalue is None + 'name': name, + 'type': coltype, + 'nullable': not bool(row['null_flag']), + 'default': defvalue, + 'autoincrement': defvalue is None } if orig_colname.lower() == orig_colname: @@ -636,7 +637,7 @@ class FBDialect(default.DefaultDialect): # if the PK is a single field, try to see if its linked to # a sequence thru a trigger - if len(pkey_cols)==1 and name==pkey_cols[0]: + if len(pkey_cols) == 1 and name == pkey_cols[0]: seq_d = self.get_column_sequence(connection, tablename, name) if seq_d is not None: col_d['sequence'] = seq_d @@ -666,12 +667,12 @@ class FBDialect(default.DefaultDialect): tablename = self.denormalize_name(table_name) c = connection.execute(fkqry, ["FOREIGN KEY", tablename]) - fks = util.defaultdict(lambda:{ - 'name' : None, - 'constrained_columns' : [], - 'referred_schema' : None, - 'referred_table' : None, - 'referred_columns' : [] + fks = util.defaultdict(lambda: { + 'name': None, + 'constrained_columns': [], + 'referred_schema': None, + 'referred_table': None, + 'referred_columns': [] }) for row in c: diff --git a/lib/sqlalchemy/dialects/firebird/fdb.py b/lib/sqlalchemy/dialects/firebird/fdb.py index aac3579d6..b9356b3a3 100644 --- a/lib/sqlalchemy/dialects/firebird/fdb.py +++ b/lib/sqlalchemy/dialects/firebird/fdb.py @@ -26,6 +26,7 @@ The fdb dialect is new and not yet tested (can't get fdb to build). from .kinterbasdb import FBDialect_kinterbasdb from ... import util + class FBDialect_fdb(FBDialect_kinterbasdb): @classmethod @@ -63,4 +64,4 @@ class FBDialect_fdb(FBDialect_kinterbasdb): return self._parse_version_info(version) -dialect = FBDialect_fdb
\ No newline at end of file +dialect = FBDialect_fdb diff --git a/lib/sqlalchemy/dialects/firebird/kinterbasdb.py b/lib/sqlalchemy/dialects/firebird/kinterbasdb.py index 90fadde50..f6c533cfc 100644 --- a/lib/sqlalchemy/dialects/firebird/kinterbasdb.py +++ b/lib/sqlalchemy/dialects/firebird/kinterbasdb.py @@ -63,6 +63,7 @@ class _FBNumeric_kinterbasdb(sqltypes.Numeric): return value return process + class FBExecutionContext_kinterbasdb(FBExecutionContext): @property def rowcount(self): @@ -72,6 +73,7 @@ class FBExecutionContext_kinterbasdb(FBExecutionContext): else: return -1 + class FBDialect_kinterbasdb(FBDialect): driver = 'kinterbasdb' supports_sane_rowcount = False @@ -83,7 +85,7 @@ class FBDialect_kinterbasdb(FBDialect): colspecs = util.update_copy( FBDialect.colspecs, { - sqltypes.Numeric:_FBNumeric_kinterbasdb, + sqltypes.Numeric: _FBNumeric_kinterbasdb, } ) diff --git a/lib/sqlalchemy/dialects/informix/__init__.py b/lib/sqlalchemy/dialects/informix/__init__.py index e500bea55..798ae94a6 100644 --- a/lib/sqlalchemy/dialects/informix/__init__.py +++ b/lib/sqlalchemy/dialects/informix/__init__.py @@ -6,4 +6,4 @@ from sqlalchemy.dialects.informix import base, informixdb -base.dialect = informixdb.dialect
\ No newline at end of file +base.dialect = informixdb.dialect diff --git a/lib/sqlalchemy/dialects/informix/base.py b/lib/sqlalchemy/dialects/informix/base.py index f54bf6d37..c3fbccb11 100644 --- a/lib/sqlalchemy/dialects/informix/base.py +++ b/lib/sqlalchemy/dialects/informix/base.py @@ -144,7 +144,9 @@ RESERVED_WORDS = set( "xadatasource", "xid", "xload", "xunload", "year" ]) + class InfoDateTime(sqltypes.DateTime): + def bind_processor(self, dialect): def process(value): if value is not None: @@ -153,7 +155,9 @@ class InfoDateTime(sqltypes.DateTime): return value return process + class InfoTime(sqltypes.Time): + def bind_processor(self, dialect): def process(value): if value is not None: @@ -171,33 +175,33 @@ class InfoTime(sqltypes.Time): return process colspecs = { - sqltypes.DateTime : InfoDateTime, + sqltypes.DateTime: InfoDateTime, sqltypes.TIMESTAMP: InfoDateTime, sqltypes.Time: InfoTime, } ischema_names = { - 0 : sqltypes.CHAR, # CHAR - 1 : sqltypes.SMALLINT, # SMALLINT - 2 : sqltypes.INTEGER, # INT - 3 : sqltypes.FLOAT, # Float - 3 : sqltypes.Float, # SmallFloat - 5 : sqltypes.DECIMAL, # DECIMAL - 6 : sqltypes.Integer, # Serial - 7 : sqltypes.DATE, # DATE - 8 : sqltypes.Numeric, # MONEY - 10 : sqltypes.DATETIME, # DATETIME - 11 : sqltypes.LargeBinary, # BYTE - 12 : sqltypes.TEXT, # TEXT - 13 : sqltypes.VARCHAR, # VARCHAR - 15 : sqltypes.NCHAR, # NCHAR - 16 : sqltypes.NVARCHAR, # NVARCHAR - 17 : sqltypes.Integer, # INT8 - 18 : sqltypes.Integer, # Serial8 - 43 : sqltypes.String, # LVARCHAR - -1 : sqltypes.BLOB, # BLOB - -1 : sqltypes.CLOB, # CLOB + 0: sqltypes.CHAR, # CHAR + 1: sqltypes.SMALLINT, # SMALLINT + 2: sqltypes.INTEGER, # INT + 3: sqltypes.FLOAT, # Float + 3: sqltypes.Float, # SmallFloat + 5: sqltypes.DECIMAL, # DECIMAL + 6: sqltypes.Integer, # Serial + 7: sqltypes.DATE, # DATE + 8: sqltypes.Numeric, # MONEY + 10: sqltypes.DATETIME, # DATETIME + 11: sqltypes.LargeBinary, # BYTE + 12: sqltypes.TEXT, # TEXT + 13: sqltypes.VARCHAR, # VARCHAR + 15: sqltypes.NCHAR, # NCHAR + 16: sqltypes.NVARCHAR, # NVARCHAR + 17: sqltypes.Integer, # INT8 + 18: sqltypes.Integer, # Serial8 + 43: sqltypes.String, # LVARCHAR + -1: sqltypes.BLOB, # BLOB + -1: sqltypes.CLOB, # CLOB } @@ -217,7 +221,9 @@ class InfoTypeCompiler(compiler.GenericTypeCompiler): def visit_boolean(self, type_): return "SMALLINT" + class InfoSQLCompiler(compiler.SQLCompiler): + def default_from(self): return " from systables where tabname = 'systables' " @@ -337,6 +343,7 @@ class InfoDDLCompiler(compiler.DDLCompiler): text += "CONSTRAINT %s " % self.preparer.format_constraint(constraint) return text + class InformixIdentifierPreparer(compiler.IdentifierPreparer): reserved_words = RESERVED_WORDS @@ -345,7 +352,7 @@ class InformixIdentifierPreparer(compiler.IdentifierPreparer): class InformixDialect(default.DefaultDialect): name = 'informix' - max_identifier_length = 128 # adjusts at runtime based on server version + max_identifier_length = 128 # adjusts at runtime based on server version type_compiler = InfoTypeCompiler statement_compiler = InfoSQLCompiler @@ -438,14 +445,14 @@ class InformixDialect(default.DefaultDialect): if coltype not in (0, 13) and default: default = default.split()[-1] - if coltype == 6: # Serial, mark as autoincrement + if coltype == 6: # Serial, mark as autoincrement autoincrement = True - if coltype == 0 or coltype == 13: # char, varchar + if coltype == 0 or coltype == 13: # char, varchar coltype = ischema_names[coltype](collength) if default: default = "'%s'" % default - elif coltype == 5: # decimal + elif coltype == 5: # decimal precision, scale = (collength & 0xFF00) >> 8, collength & 0xFF if scale == 255: scale = 0 @@ -487,14 +494,13 @@ class InformixDialect(default.DefaultDialect): t8.idxname and t7.tabid = t5.ptabid""", table_name, schema_sel) - def fkey_rec(): return { - 'name' : None, - 'constrained_columns' : [], - 'referred_schema' : None, - 'referred_table' : None, - 'referred_columns' : [] + 'name': None, + 'constrained_columns': [], + 'referred_schema': None, + 'referred_table': None, + 'referred_columns': [] } fkeys = util.defaultdict(fkey_rec) @@ -536,15 +542,15 @@ class InformixDialect(default.DefaultDialect): colpositions = set() for row in data: - colpos = set([getattr(row, 'part%d' % x) for x in range(1,16)]) + colpos = set([getattr(row, 'part%d' % x) for x in range(1, 16)]) colpositions |= colpos if not len(colpositions): - return {'constrained_columns':[], 'name':None} + return {'constrained_columns': [], 'name': None} # Select the column names using the columnpositions # TODO: Maybe cache a bit of those col infos (eg select all colnames for one table) - place_holder = ','.join('?'*len(colpositions)) + place_holder = ','.join('?' * len(colpositions)) c = connection.execute( """select t1.colname from syscolumns as t1, systables as t2 @@ -553,8 +559,8 @@ class InformixDialect(default.DefaultDialect): table_name, *colpositions ).fetchall() - cols = reduce(lambda x,y: list(x)+list(y), c, []) - return {'constrained_columns':cols, 'name':None} + cols = reduce(lambda x, y: list(x) + list(y), c, []) + return {'constrained_columns': cols, 'name': None} @reflection.cache def get_indexes(self, connection, table_name, schema, **kw): @@ -567,9 +573,9 @@ class InformixDialect(default.DefaultDialect): indexes = [] for row in c.fetchall(): - colnames = [getattr(row, 'part%d' % x) for x in range(1,16)] + colnames = [getattr(row, 'part%d' % x) for x in range(1, 16)] colnames = [x for x in colnames if x] - place_holder = ','.join('?'*len(colnames)) + place_holder = ','.join('?' * len(colnames)) c = connection.execute( """select t1.colname from syscolumns as t1, systables as t2 @@ -577,7 +583,7 @@ class InformixDialect(default.DefaultDialect): t1.colno in (%s)""" % place_holder, table_name, *colnames ).fetchall() - c = reduce(lambda x,y: list(x)+list(y), c, []) + c = reduce(lambda x, y: list(x) + list(y), c, []) indexes.append({ 'name': row.idxname, 'unique': row.idxtype.lower() == 'u', diff --git a/lib/sqlalchemy/dialects/informix/informixdb.py b/lib/sqlalchemy/dialects/informix/informixdb.py index 474bc5f11..2fdccfdc2 100644 --- a/lib/sqlalchemy/dialects/informix/informixdb.py +++ b/lib/sqlalchemy/dialects/informix/informixdb.py @@ -21,7 +21,9 @@ from sqlalchemy.engine import default VERSION_RE = re.compile(r'(\d+)\.(\d+)(.+\d+)') + class InformixExecutionContext_informixdb(default.DefaultExecutionContext): + def post_exec(self): if self.isinsert: self._lastrowid = self.cursor.sqlerrd[1] diff --git a/lib/sqlalchemy/dialects/mssql/__init__.py b/lib/sqlalchemy/dialects/mssql/__init__.py index e262d208b..bad9b72c8 100644 --- a/lib/sqlalchemy/dialects/mssql/__init__.py +++ b/lib/sqlalchemy/dialects/mssql/__init__.py @@ -23,4 +23,4 @@ __all__ = ( 'DATETIME2', 'DATETIMEOFFSET', 'DATE', 'TIME', 'SMALLDATETIME', 'BINARY', 'VARBINARY', 'BIT', 'REAL', 'IMAGE', 'TIMESTAMP', 'MONEY', 'SMALLMONEY', 'UNIQUEIDENTIFIER', 'SQL_VARIANT', 'dialect' -)
\ No newline at end of file +) diff --git a/lib/sqlalchemy/dialects/mssql/adodbapi.py b/lib/sqlalchemy/dialects/mssql/adodbapi.py index 747ea17db..2b864db96 100644 --- a/lib/sqlalchemy/dialects/mssql/adodbapi.py +++ b/lib/sqlalchemy/dialects/mssql/adodbapi.py @@ -22,6 +22,7 @@ from sqlalchemy import types as sqltypes, util from sqlalchemy.dialects.mssql.base import MSDateTime, MSDialect import sys + class MSDateTime_adodbapi(MSDateTime): def result_processor(self, dialect, coltype): def process(value): @@ -49,7 +50,7 @@ class MSDialect_adodbapi(MSDialect): colspecs = util.update_copy( MSDialect.colspecs, { - sqltypes.DateTime:MSDateTime_adodbapi + sqltypes.DateTime: MSDateTime_adodbapi } ) @@ -58,18 +59,18 @@ class MSDialect_adodbapi(MSDialect): connectors = ["Provider=SQLOLEDB"] if 'port' in keys: - connectors.append ("Data Source=%s, %s" % + connectors.append("Data Source=%s, %s" % (keys.get("host"), keys.get("port"))) else: - connectors.append ("Data Source=%s" % keys.get("host")) - connectors.append ("Initial Catalog=%s" % keys.get("database")) + connectors.append("Data Source=%s" % keys.get("host")) + connectors.append("Initial Catalog=%s" % keys.get("database")) user = keys.get("user") if user: connectors.append("User Id=%s" % user) connectors.append("Password=%s" % keys.get("password", "")) else: connectors.append("Integrated Security=SSPI") - return [[";".join (connectors)], {}] + return [[";".join(connectors)], {}] def is_disconnect(self, e, connection, cursor): return isinstance(e, self.dbapi.adodbapi.DatabaseError) and \ diff --git a/lib/sqlalchemy/dialects/mssql/base.py b/lib/sqlalchemy/dialects/mssql/base.py index 09db05e1f..f55ae4644 100644 --- a/lib/sqlalchemy/dialects/mssql/base.py +++ b/lib/sqlalchemy/dialects/mssql/base.py @@ -216,6 +216,7 @@ RESERVED_WORDS = set( 'writetext', ]) + class REAL(sqltypes.REAL): __visit_name__ = 'REAL' @@ -224,6 +225,7 @@ class REAL(sqltypes.REAL): kw['precision'] = 24 super(REAL, self).__init__(**kw) + class TINYINT(sqltypes.Integer): __visit_name__ = 'TINYINT' @@ -243,6 +245,7 @@ class _MSDate(sqltypes.Date): return process _reg = re.compile(r"(\d+)-(\d+)-(\d+)") + def result_processor(self, dialect, coltype): def process(value): if isinstance(value, datetime.datetime): @@ -256,6 +259,7 @@ class _MSDate(sqltypes.Date): return value return process + class TIME(sqltypes.TIME): def __init__(self, precision=None, **kwargs): self.precision = precision @@ -274,6 +278,7 @@ class TIME(sqltypes.TIME): return process _reg = re.compile(r"(\d+):(\d+):(\d+)(?:\.(\d{0,6}))?") + def result_processor(self, dialect, coltype): def process(value): if isinstance(value, datetime.datetime): @@ -287,6 +292,7 @@ class TIME(sqltypes.TIME): return process _MSTime = TIME + class _DateTimeBase(object): def bind_processor(self, dialect): def process(value): @@ -296,12 +302,15 @@ class _DateTimeBase(object): return value return process + class _MSDateTime(_DateTimeBase, sqltypes.DateTime): pass + class SMALLDATETIME(_DateTimeBase, sqltypes.DateTime): __visit_name__ = 'SMALLDATETIME' + class DATETIME2(_DateTimeBase, sqltypes.DateTime): __visit_name__ = 'DATETIME2' @@ -317,12 +326,14 @@ class DATETIMEOFFSET(sqltypes.TypeEngine): def __init__(self, precision=None, **kwargs): self.precision = precision + class _StringType(object): """Base for MSSQL string types.""" def __init__(self, collation=None): self.collation = collation + class TEXT(_StringType, sqltypes.TEXT): """MSSQL TEXT type, for variable-length text up to 2^31 characters.""" @@ -336,6 +347,7 @@ class TEXT(_StringType, sqltypes.TEXT): _StringType.__init__(self, collation) sqltypes.Text.__init__(self, length, **kw) + class NTEXT(_StringType, sqltypes.UnicodeText): """MSSQL NTEXT type, for variable-length unicode text up to 2^30 characters.""" @@ -381,6 +393,7 @@ class VARCHAR(_StringType, sqltypes.VARCHAR): _StringType.__init__(self, collation) sqltypes.VARCHAR.__init__(self, length, **kw) + class NVARCHAR(_StringType, sqltypes.NVARCHAR): """MSSQL NVARCHAR type. @@ -398,6 +411,7 @@ class NVARCHAR(_StringType, sqltypes.NVARCHAR): _StringType.__init__(self, collation) sqltypes.NVARCHAR.__init__(self, length, **kw) + class CHAR(_StringType, sqltypes.CHAR): """MSSQL CHAR type, for fixed-length non-Unicode data with a maximum of 8,000 characters.""" @@ -426,6 +440,7 @@ class CHAR(_StringType, sqltypes.CHAR): _StringType.__init__(self, collation) sqltypes.CHAR.__init__(self, length, **kw) + class NCHAR(_StringType, sqltypes.NCHAR): """MSSQL NCHAR type. @@ -443,9 +458,11 @@ class NCHAR(_StringType, sqltypes.NCHAR): _StringType.__init__(self, collation) sqltypes.NCHAR.__init__(self, length, **kw) + class IMAGE(sqltypes.LargeBinary): __visit_name__ = 'IMAGE' + class BIT(sqltypes.TypeEngine): __visit_name__ = 'BIT' @@ -453,12 +470,15 @@ class BIT(sqltypes.TypeEngine): class MONEY(sqltypes.TypeEngine): __visit_name__ = 'MONEY' + class SMALLMONEY(sqltypes.TypeEngine): __visit_name__ = 'SMALLMONEY' + class UNIQUEIDENTIFIER(sqltypes.TypeEngine): __visit_name__ = "UNIQUEIDENTIFIER" + class SQL_VARIANT(sqltypes.TypeEngine): __visit_name__ = 'SQL_VARIANT' @@ -487,30 +507,30 @@ MSUniqueIdentifier = UNIQUEIDENTIFIER MSVariant = SQL_VARIANT ischema_names = { - 'int' : INTEGER, + 'int': INTEGER, 'bigint': BIGINT, - 'smallint' : SMALLINT, - 'tinyint' : TINYINT, - 'varchar' : VARCHAR, - 'nvarchar' : NVARCHAR, - 'char' : CHAR, - 'nchar' : NCHAR, - 'text' : TEXT, - 'ntext' : NTEXT, - 'decimal' : DECIMAL, - 'numeric' : NUMERIC, - 'float' : FLOAT, - 'datetime' : DATETIME, - 'datetime2' : DATETIME2, - 'datetimeoffset' : DATETIMEOFFSET, + 'smallint': SMALLINT, + 'tinyint': TINYINT, + 'varchar': VARCHAR, + 'nvarchar': NVARCHAR, + 'char': CHAR, + 'nchar': NCHAR, + 'text': TEXT, + 'ntext': NTEXT, + 'decimal': DECIMAL, + 'numeric': NUMERIC, + 'float': FLOAT, + 'datetime': DATETIME, + 'datetime2': DATETIME2, + 'datetimeoffset': DATETIMEOFFSET, 'date': DATE, 'time': TIME, - 'smalldatetime' : SMALLDATETIME, - 'binary' : BINARY, - 'varbinary' : VARBINARY, + 'smalldatetime': SMALLDATETIME, + 'binary': BINARY, + 'varbinary': VARBINARY, 'bit': BIT, - 'real' : REAL, - 'image' : IMAGE, + 'real': REAL, + 'image': IMAGE, 'timestamp': TIMESTAMP, 'money': MONEY, 'smallmoney': SMALLMONEY, @@ -586,8 +606,7 @@ class MSTypeCompiler(compiler.GenericTypeCompiler): return self._extend("TEXT", type_) def visit_VARCHAR(self, type_): - return self._extend("VARCHAR", type_, - length = type_.length or 'max') + return self._extend("VARCHAR", type_, length=type_.length or 'max') def visit_CHAR(self, type_): return self._extend("CHAR", type_) @@ -596,8 +615,7 @@ class MSTypeCompiler(compiler.GenericTypeCompiler): return self._extend("NCHAR", type_) def visit_NVARCHAR(self, type_): - return self._extend("NVARCHAR", type_, - length = type_.length or 'max') + return self._extend("NVARCHAR", type_, length=type_.length or 'max') def visit_date(self, type_): if self.dialect.server_version_info < MS_2008_VERSION: @@ -641,6 +659,7 @@ class MSTypeCompiler(compiler.GenericTypeCompiler): def visit_SQL_VARIANT(self, type_): return 'SQL_VARIANT' + class MSExecutionContext(default.DefaultExecutionContext): _enable_identity_insert = False _select_lastrowid = False @@ -718,6 +737,7 @@ class MSExecutionContext(default.DefaultExecutionContext): else: return engine.ResultProxy(self) + class MSSQLCompiler(compiler.SQLCompiler): returning_precedes_values = True @@ -947,6 +967,7 @@ class MSSQLCompiler(compiler.SQLCompiler): fromhints=from_hints, **kw) for t in [from_table] + extra_froms) + class MSSQLStrictCompiler(MSSQLCompiler): """A subclass of MSSQLCompiler which disables the usage of bind parameters where not allowed natively by MS-SQL. @@ -990,6 +1011,7 @@ class MSSQLStrictCompiler(MSSQLCompiler): return super(MSSQLStrictCompiler, self).\ render_literal_value(value, type_) + class MSDDLCompiler(compiler.DDLCompiler): def get_column_specification(self, column, **kwargs): colspec = (self.preparer.format_column(column) + " " @@ -1049,6 +1071,7 @@ class MSIdentifierPreparer(compiler.IdentifierPreparer): result = '.'.join([self.quote(x, force) for x in schema.split('.')]) return result + def _db_plus_owner_listing(fn): def wrap(dialect, connection, schema=None, **kw): dbname, owner = _owner_plus_db(dialect, schema) @@ -1056,6 +1079,7 @@ def _db_plus_owner_listing(fn): dbname, owner, schema, **kw) return update_wrapper(wrap, fn) + def _db_plus_owner(fn): def wrap(dialect, connection, tablename, schema=None, **kw): dbname, owner = _owner_plus_db(dialect, schema) @@ -1063,6 +1087,7 @@ def _db_plus_owner(fn): tablename, dbname, owner, schema, **kw) return update_wrapper(wrap, fn) + def _switch_db(dbname, connection, fn, *arg, **kw): if dbname: current_db = connection.scalar("select db_name()") @@ -1073,6 +1098,7 @@ def _switch_db(dbname, connection, fn, *arg, **kw): if dbname: connection.execute("use %s" % current_db) + def _owner_plus_db(dialect, schema): if not schema: return None, dialect.default_schema_name @@ -1081,6 +1107,7 @@ def _owner_plus_db(dialect, schema): else: return None, schema + class MSDialect(default.DefaultDialect): name = 'mssql' supports_default_values = True @@ -1091,9 +1118,9 @@ class MSDialect(default.DefaultDialect): schema_name = "dbo" colspecs = { - sqltypes.DateTime : _MSDateTime, - sqltypes.Date : _MSDate, - sqltypes.Time : TIME, + sqltypes.DateTime: _MSDateTime, + sqltypes.Date: _MSDate, + sqltypes.Time: TIME, } ischema_names = ischema_names @@ -1142,7 +1169,7 @@ class MSDialect(default.DefaultDialect): "behaviors may not function properly. If using ODBC " "with FreeTDS, ensure server version 7.0 or 8.0, not 4.2, " "is configured in the FreeTDS configuration." % - ".".join(str(x) for x in self.server_version_info) ) + ".".join(str(x) for x in self.server_version_info)) if self.server_version_info >= MS_2005_VERSION and \ 'implicit_returning' not in self.__dict__: self.implicit_returning = True @@ -1241,7 +1268,7 @@ class MSDialect(default.DefaultDialect): sql.bindparam('schname', owner, sqltypes.String(convert_unicode=True)) ], - typemap = { + typemap={ 'name': sqltypes.Unicode() } ) @@ -1270,9 +1297,7 @@ class MSDialect(default.DefaultDialect): sql.bindparam('schname', owner, sqltypes.String(convert_unicode=True)) ], - typemap = { - 'name': sqltypes.Unicode() - } + typemap={'name': sqltypes.Unicode()} ), ) for row in rp: @@ -1362,11 +1387,11 @@ class MSDialect(default.DefaultDialect): coltype = coltype(**kwargs) cdict = { - 'name' : name, - 'type' : coltype, - 'nullable' : nullable, - 'default' : default, - 'autoincrement':False, + 'name': name, + 'type': coltype, + 'nullable': nullable, + 'default': default, + 'autoincrement': False, } cols.append(cdict) # autoincrement and identity @@ -1449,10 +1474,8 @@ class MSDialect(default.DefaultDialect): RR.c.unique_constraint_name, C.c.ordinal_position == R.c.ordinal_position ), - order_by= [ - RR.c.constraint_name, - R.c.ordinal_position]) - + order_by=[RR.c.constraint_name, R.c.ordinal_position] + ) # group rows by constraint ID, to handle multi-column FKs fkeys = [] @@ -1489,4 +1512,3 @@ class MSDialect(default.DefaultDialect): remote_cols.append(rcol) return fkeys.values() - diff --git a/lib/sqlalchemy/dialects/mssql/information_schema.py b/lib/sqlalchemy/dialects/mssql/information_schema.py index 30849e0ef..85d9e501d 100644 --- a/lib/sqlalchemy/dialects/mssql/information_schema.py +++ b/lib/sqlalchemy/dialects/mssql/information_schema.py @@ -11,6 +11,7 @@ from ...types import String, Unicode, Integer, TypeDecorator ischema = MetaData() + class CoerceUnicode(TypeDecorator): impl = Unicode @@ -93,4 +94,3 @@ views = Table("VIEWS", ischema, Column("CHECK_OPTION", String, key="check_option"), Column("IS_UPDATABLE", String, key="is_updatable"), schema="INFORMATION_SCHEMA") - diff --git a/lib/sqlalchemy/dialects/mssql/mxodbc.py b/lib/sqlalchemy/dialects/mssql/mxodbc.py index 91922a442..e210d97de 100644 --- a/lib/sqlalchemy/dialects/mssql/mxodbc.py +++ b/lib/sqlalchemy/dialects/mssql/mxodbc.py @@ -54,6 +54,7 @@ class _MSNumeric_mxodbc(_MSNumeric_pyodbc): """Include pyodbc's numeric processor. """ + class _MSDate_mxodbc(_MSDate): def bind_processor(self, dialect): def process(value): @@ -63,6 +64,7 @@ class _MSDate_mxodbc(_MSDate): return None return process + class _MSTime_mxodbc(_MSTime): def bind_processor(self, dialect): def process(value): @@ -72,6 +74,7 @@ class _MSTime_mxodbc(_MSTime): return None return process + class MSExecutionContext_mxodbc(MSExecutionContext_pyodbc): """ The pyodbc execution context is useful for enabling @@ -82,6 +85,7 @@ class MSExecutionContext_mxodbc(MSExecutionContext_pyodbc): # is really only being used in cases where OUTPUT # won't work. + class MSDialect_mxodbc(MxODBCConnector, MSDialect): # this is only needed if "native ODBC" mode is used, @@ -94,16 +98,14 @@ class MSDialect_mxodbc(MxODBCConnector, MSDialect): _need_decimal_fix = True colspecs = { - sqltypes.Numeric : _MSNumeric_mxodbc, - sqltypes.DateTime : _MSDateTime, - sqltypes.Date : _MSDate_mxodbc, - sqltypes.Time : _MSTime_mxodbc, + sqltypes.Numeric: _MSNumeric_mxodbc, + sqltypes.DateTime: _MSDateTime, + sqltypes.Date: _MSDate_mxodbc, + sqltypes.Time: _MSTime_mxodbc, } - def __init__(self, description_encoding=None, **params): super(MSDialect_mxodbc, self).__init__(**params) self.description_encoding = description_encoding dialect = MSDialect_mxodbc - diff --git a/lib/sqlalchemy/dialects/mssql/pymssql.py b/lib/sqlalchemy/dialects/mssql/pymssql.py index 881893422..eb795200d 100644 --- a/lib/sqlalchemy/dialects/mssql/pymssql.py +++ b/lib/sqlalchemy/dialects/mssql/pymssql.py @@ -28,6 +28,7 @@ from .base import MSDialect from ... import types as sqltypes, util, processors import re + class _MSNumeric_pymssql(sqltypes.Numeric): def result_processor(self, dialect, type_): if not self.asdecimal: @@ -35,6 +36,7 @@ class _MSNumeric_pymssql(sqltypes.Numeric): else: return sqltypes.Numeric.result_processor(self, dialect, type_) + class MSDialect_pymssql(MSDialect): supports_sane_rowcount = False driver = 'pymssql' @@ -42,10 +44,11 @@ class MSDialect_pymssql(MSDialect): colspecs = util.update_copy( MSDialect.colspecs, { - sqltypes.Numeric:_MSNumeric_pymssql, - sqltypes.Float:sqltypes.Float, + sqltypes.Numeric: _MSNumeric_pymssql, + sqltypes.Float: sqltypes.Float, } ) + @classmethod def dbapi(cls): module = __import__('pymssql') diff --git a/lib/sqlalchemy/dialects/mssql/pyodbc.py b/lib/sqlalchemy/dialects/mssql/pyodbc.py index 47a4851b0..771586524 100644 --- a/lib/sqlalchemy/dialects/mssql/pyodbc.py +++ b/lib/sqlalchemy/dialects/mssql/pyodbc.py @@ -116,6 +116,7 @@ from ...connectors.pyodbc import PyODBCConnector from ... import types as sqltypes, util from ...util.compat import decimal + class _MSNumeric_pyodbc(sqltypes.Numeric): """Turns Decimals with adjusted() < 0 or > 7 into strings. @@ -163,7 +164,7 @@ class _MSNumeric_pyodbc(sqltypes.Numeric): result = "%s%s%s" % ( (value < 0 and '-' or ''), "".join([str(s) for s in _int]), - "0" * (value.adjusted() - (len(_int)-1))) + "0" * (value.adjusted() - (len(_int) - 1))) else: if (len(_int) - 1) > value.adjusted(): result = "%s%s.%s" % ( @@ -237,7 +238,7 @@ class MSDialect_pyodbc(PyODBCConnector, MSDialect): colspecs = util.update_copy( MSDialect.colspecs, { - sqltypes.Numeric:_MSNumeric_pyodbc + sqltypes.Numeric: _MSNumeric_pyodbc } ) diff --git a/lib/sqlalchemy/dialects/mssql/zxjdbc.py b/lib/sqlalchemy/dialects/mssql/zxjdbc.py index 1b36075b0..751af6efe 100644 --- a/lib/sqlalchemy/dialects/mssql/zxjdbc.py +++ b/lib/sqlalchemy/dialects/mssql/zxjdbc.py @@ -17,6 +17,7 @@ from ...connectors.zxJDBC import ZxJDBCConnector from .base import MSDialect, MSExecutionContext from ... import engine + class MSExecutionContext_zxjdbc(MSExecutionContext): _embedded_scope_identity = False diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py index 92c6f58a0..dc2ae7515 100644 --- a/lib/sqlalchemy/dialects/mysql/base.py +++ b/lib/sqlalchemy/dialects/mysql/base.py @@ -339,10 +339,10 @@ RESERVED_WORDS = set( 'union', 'unique', 'unlock', 'unsigned', 'update', 'usage', 'use', 'using', 'utc_date', 'utc_time', 'utc_timestamp', 'values', 'varbinary', 'varchar', 'varcharacter', 'varying', 'when', 'where', 'while', 'with', - 'write', 'x509', 'xor', 'year_month', 'zerofill', # 5.0 - 'columns', 'fields', 'privileges', 'soname', 'tables', # 4.1 + 'write', 'x509', 'xor', 'year_month', 'zerofill', # 5.0 + 'columns', 'fields', 'privileges', 'soname', 'tables', # 4.1 'accessible', 'linear', 'master_ssl_verify_server_cert', 'range', - 'read_only', 'read_write', # 5.1 + 'read_only', 'read_write', # 5.1 ]) AUTOCOMMIT_RE = re.compile( @@ -361,6 +361,7 @@ class _NumericType(object): self.zerofill = zerofill super(_NumericType, self).__init__(**kw) + class _FloatType(_NumericType, sqltypes.Float): def __init__(self, precision=None, scale=None, asdecimal=True, **kw): if isinstance(self, (REAL, DOUBLE)) and \ @@ -375,11 +376,13 @@ class _FloatType(_NumericType, sqltypes.Float): super(_FloatType, self).__init__(precision=precision, asdecimal=asdecimal, **kw) self.scale = scale + class _IntegerType(_NumericType, sqltypes.Integer): def __init__(self, display_width=None, **kw): self.display_width = display_width super(_IntegerType, self).__init__(**kw) + class _StringType(sqltypes.String): """Base for MySQL string types.""" @@ -476,6 +479,7 @@ class DOUBLE(_FloatType): super(DOUBLE, self).__init__(precision=precision, scale=scale, asdecimal=asdecimal, **kw) + class REAL(_FloatType, sqltypes.REAL): """MySQL REAL type.""" @@ -500,6 +504,7 @@ class REAL(_FloatType, sqltypes.REAL): super(REAL, self).__init__(precision=precision, scale=scale, asdecimal=asdecimal, **kw) + class FLOAT(_FloatType, sqltypes.FLOAT): """MySQL FLOAT type.""" @@ -527,6 +532,7 @@ class FLOAT(_FloatType, sqltypes.FLOAT): def bind_processor(self, dialect): return None + class INTEGER(_IntegerType, sqltypes.INTEGER): """MySQL INTEGER type.""" @@ -547,6 +553,7 @@ class INTEGER(_IntegerType, sqltypes.INTEGER): """ super(INTEGER, self).__init__(display_width=display_width, **kw) + class BIGINT(_IntegerType, sqltypes.BIGINT): """MySQL BIGINTEGER type.""" @@ -567,6 +574,7 @@ class BIGINT(_IntegerType, sqltypes.BIGINT): """ super(BIGINT, self).__init__(display_width=display_width, **kw) + class MEDIUMINT(_IntegerType): """MySQL MEDIUMINTEGER type.""" @@ -587,6 +595,7 @@ class MEDIUMINT(_IntegerType): """ super(MEDIUMINT, self).__init__(display_width=display_width, **kw) + class TINYINT(_IntegerType): """MySQL TINYINT type.""" @@ -607,6 +616,7 @@ class TINYINT(_IntegerType): """ super(TINYINT, self).__init__(display_width=display_width, **kw) + class SMALLINT(_IntegerType, sqltypes.SMALLINT): """MySQL SMALLINTEGER type.""" @@ -627,6 +637,7 @@ class SMALLINT(_IntegerType, sqltypes.SMALLINT): """ super(SMALLINT, self).__init__(display_width=display_width, **kw) + class BIT(sqltypes.TypeEngine): """MySQL BIT type. @@ -663,6 +674,7 @@ class BIT(sqltypes.TypeEngine): return value return process + class TIME(sqltypes.TIME): """MySQL TIME type. @@ -696,6 +708,7 @@ class TIME(sqltypes.TIME): def result_processor(self, dialect, coltype): time = datetime.time + def process(value): # convert from a timedelta value if value is not None: @@ -710,10 +723,12 @@ class TIME(sqltypes.TIME): return None return process + class TIMESTAMP(sqltypes.TIMESTAMP): """MySQL TIMESTAMP type.""" __visit_name__ = 'TIMESTAMP' + class YEAR(sqltypes.TypeEngine): """MySQL YEAR type, for single byte storage of years 1901-2155.""" @@ -722,6 +737,7 @@ class YEAR(sqltypes.TypeEngine): def __init__(self, display_width=None): self.display_width = display_width + class TEXT(_StringType, sqltypes.TEXT): """MySQL TEXT type, for text up to 2^16 characters.""" @@ -757,6 +773,7 @@ class TEXT(_StringType, sqltypes.TEXT): """ super(TEXT, self).__init__(length=length, **kw) + class TINYTEXT(_StringType): """MySQL TINYTEXT type, for text up to 2^8 characters.""" @@ -788,6 +805,7 @@ class TINYTEXT(_StringType): """ super(TINYTEXT, self).__init__(**kwargs) + class MEDIUMTEXT(_StringType): """MySQL MEDIUMTEXT type, for text up to 2^24 characters.""" @@ -819,6 +837,7 @@ class MEDIUMTEXT(_StringType): """ super(MEDIUMTEXT, self).__init__(**kwargs) + class LONGTEXT(_StringType): """MySQL LONGTEXT type, for text up to 2^32 characters.""" @@ -882,6 +901,7 @@ class VARCHAR(_StringType, sqltypes.VARCHAR): """ super(VARCHAR, self).__init__(length=length, **kwargs) + class CHAR(_StringType, sqltypes.CHAR): """MySQL CHAR type, for fixed-length character data.""" @@ -902,6 +922,7 @@ class CHAR(_StringType, sqltypes.CHAR): """ super(CHAR, self).__init__(length=length, **kwargs) + class NVARCHAR(_StringType, sqltypes.NVARCHAR): """MySQL NVARCHAR type. @@ -954,23 +975,24 @@ class NCHAR(_StringType, sqltypes.NCHAR): super(NCHAR, self).__init__(length=length, **kwargs) - - class TINYBLOB(sqltypes._Binary): """MySQL TINYBLOB type, for binary data up to 2^8 bytes.""" __visit_name__ = 'TINYBLOB' + class MEDIUMBLOB(sqltypes._Binary): """MySQL MEDIUMBLOB type, for binary data up to 2^24 bytes.""" __visit_name__ = 'MEDIUMBLOB' + class LONGBLOB(sqltypes._Binary): """MySQL LONGBLOB type, for binary data up to 2^32 bytes.""" __visit_name__ = 'LONGBLOB' + class ENUM(sqltypes.Enum, _StringType): """MySQL ENUM type.""" @@ -1072,6 +1094,7 @@ class ENUM(sqltypes.Enum, _StringType): def bind_processor(self, dialect): super_convert = super(ENUM, self).bind_processor(dialect) + def process(value): if self.strict and value is not None and value not in self.enums: raise exc.InvalidRequestError('"%s" not a valid value for ' @@ -1086,6 +1109,7 @@ class ENUM(sqltypes.Enum, _StringType): kw['strict'] = self.strict return sqltypes.Enum.adapt(self, impltype, **kw) + class SET(_StringType): """MySQL SET type.""" @@ -1158,6 +1182,7 @@ class SET(_StringType): def bind_processor(self, dialect): super_convert = super(SET, self).bind_processor(dialect) + def process(value): if value is None or isinstance(value, (int, long, basestring)): pass @@ -1250,20 +1275,20 @@ ischema_names = { 'year': YEAR, } + class MySQLExecutionContext(default.DefaultExecutionContext): def should_autocommit_text(self, statement): return AUTOCOMMIT_RE.match(statement) + class MySQLCompiler(compiler.SQLCompiler): render_table_with_column_in_update_from = True """Overridden from base SQLCompiler value""" extract_map = compiler.SQLCompiler.extract_map.copy() - extract_map.update ({ - 'milliseconds': 'millisecond', - }) + extract_map.update({'milliseconds': 'millisecond'}) def visit_random_func(self, fn, **kw): return "rand%s" % self.function_argspec(fn) @@ -1445,7 +1470,6 @@ class MySQLDDLCompiler(compiler.DDLCompiler): return constraint_string - def get_column_specification(self, column, **kw): """Builds column DDL.""" @@ -1505,7 +1529,6 @@ class MySQLDDLCompiler(compiler.DDLCompiler): table_opts.append(joiner.join((opt, arg))) return ' '.join(table_opts) - def visit_create_index(self, create): index = create.element preparer = self.preparer @@ -1569,6 +1592,7 @@ class MySQLDDLCompiler(compiler.DDLCompiler): (self.preparer.format_table(constraint.table), qual, const) + class MySQLTypeCompiler(compiler.GenericTypeCompiler): def _extend_numeric(self, type_, spec): "Extend a numeric-type declaration with MySQL specific extensions." @@ -1627,7 +1651,7 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler): else: return self._extend_numeric(type_, "NUMERIC(%(precision)s, %(scale)s)" % - {'precision': type_.precision, 'scale' : type_.scale}) + {'precision': type_.precision, 'scale': type_.scale}) def visit_DECIMAL(self, type_): if type_.precision is None: @@ -1639,13 +1663,13 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler): else: return self._extend_numeric(type_, "DECIMAL(%(precision)s, %(scale)s)" % - {'precision': type_.precision, 'scale' : type_.scale}) + {'precision': type_.precision, 'scale': type_.scale}) def visit_DOUBLE(self, type_): if type_.precision is not None and type_.scale is not None: return self._extend_numeric(type_, "DOUBLE(%(precision)s, %(scale)s)" % {'precision': type_.precision, - 'scale' : type_.scale}) + 'scale': type_.scale}) else: return self._extend_numeric(type_, 'DOUBLE') @@ -1653,7 +1677,7 @@ class MySQLTypeCompiler(compiler.GenericTypeCompiler): if type_.precision is not None and type_.scale is not None: return self._extend_numeric(type_, "REAL(%(precision)s, %(scale)s)" % {'precision': type_.precision, - 'scale' : type_.scale}) + 'scale': type_.scale}) else: return self._extend_numeric(type_, 'REAL') @@ -1843,6 +1867,7 @@ class MySQLIdentifierPreparer(compiler.IdentifierPreparer): return tuple([self.quote_identifier(i) for i in ids if i is not None]) + class MySQLDialect(default.DefaultDialect): """Details of the MySQL dialect. Not used directly in application code.""" @@ -1992,7 +2017,6 @@ class MySQLDialect(default.DefaultDialect): def _get_default_schema_name(self, connection): return connection.execute('SELECT DATABASE()').scalar() - def has_table(self, connection, table_name, schema=None): # SHOW TABLE STATUS LIKE and SHOW TABLES LIKE do not function properly # on macosx (and maybe win?) with multibyte table names. @@ -2004,7 +2028,6 @@ class MySQLDialect(default.DefaultDialect): # full_name = self.identifier_preparer.format_table(table, # use_schema=True) - full_name = '.'.join(self.identifier_preparer._quote_free_identifiers( schema, table_name)) @@ -2096,8 +2119,8 @@ class MySQLDialect(default.DefaultDialect): if key['type'] == 'PRIMARY': # There can be only one. cols = [s[0] for s in key['columns']] - return {'constrained_columns':cols, 'name':None} - return {'constrained_columns':[], 'name':None} + return {'constrained_columns': cols, 'name': None} + return {'constrained_columns': [], 'name': None} @reflection.cache def get_foreign_keys(self, connection, table_name, schema=None, **kw): @@ -2128,12 +2151,12 @@ class MySQLDialect(default.DefaultDialect): con_kw[opt] = spec[opt] fkey_d = { - 'name' : spec['name'], - 'constrained_columns' : loc_names, - 'referred_schema' : ref_schema, - 'referred_table' : ref_name, - 'referred_columns' : ref_names, - 'options' : con_kw + 'name': spec['name'], + 'constrained_columns': loc_names, + 'referred_schema': ref_schema, + 'referred_table': ref_name, + 'referred_columns': ref_names, + 'options': con_kw } fkeys.append(fkey_d) return fkeys @@ -2325,6 +2348,7 @@ class MySQLDialect(default.DefaultDialect): rp.close() return rows + class ReflectedState(object): """Stores raw information about a SHOW CREATE TABLE statement.""" @@ -2335,6 +2359,7 @@ class ReflectedState(object): self.keys = [] self.constraints = [] + class MySQLTableDefinitionParser(object): """Parses the results of a SHOW CREATE TABLE statement.""" @@ -2611,7 +2636,6 @@ class MySQLTableDefinitionParser(object): # 123 or 123,456 self._re_csv_int = _re_compile(r'\d+') - # `colname` <type> [type opts] # (NOT NULL | NULL) # DEFAULT ('value' | CURRENT_TIMESTAMP...) @@ -2720,7 +2744,7 @@ class MySQLTableDefinitionParser(object): r"'(?P<val>(?:[^']|'')*?)'(?!')" % (re.escape(directive), self._optional_equals)) self._pr_options.append( - _pr_compile(regex, lambda v: v.replace("\\\\","\\").replace("''", "'"))) + _pr_compile(regex, lambda v: v.replace("\\\\", "\\").replace("''", "'"))) def _add_option_word(self, directive): regex = (r'(?P<directive>%s)%s' @@ -2790,8 +2814,8 @@ def _pr_compile(regex, cleanup=None): return (_re_compile(regex), cleanup) + def _re_compile(regex): """Compile a string to regex, I and UNICODE.""" return re.compile(regex, re.I | re.UNICODE) - diff --git a/lib/sqlalchemy/dialects/mysql/gaerdbms.py b/lib/sqlalchemy/dialects/mysql/gaerdbms.py index 66180e233..25201fee6 100644 --- a/lib/sqlalchemy/dialects/mysql/gaerdbms.py +++ b/lib/sqlalchemy/dialects/mysql/gaerdbms.py @@ -52,4 +52,4 @@ class MySQLDialect_gaerdbms(MySQLDialect_mysqldb): if code: return int(code) -dialect = MySQLDialect_gaerdbms
\ No newline at end of file +dialect = MySQLDialect_gaerdbms diff --git a/lib/sqlalchemy/dialects/mysql/mysqlconnector.py b/lib/sqlalchemy/dialects/mysql/mysqlconnector.py index 82d906785..45dfb1f54 100644 --- a/lib/sqlalchemy/dialects/mysql/mysqlconnector.py +++ b/lib/sqlalchemy/dialects/mysql/mysqlconnector.py @@ -20,6 +20,7 @@ from .base import (MySQLDialect, from ... import util + class MySQLExecutionContext_mysqlconnector(MySQLExecutionContext): def get_lastrowid(self): @@ -34,18 +35,21 @@ class MySQLCompiler_mysqlconnector(MySQLCompiler): def post_process_text(self, text): return text.replace('%', '%%') + class MySQLIdentifierPreparer_mysqlconnector(MySQLIdentifierPreparer): def _escape_identifier(self, value): value = value.replace(self.escape_quote, self.escape_to_quote) return value.replace("%", "%%") + class _myconnpyBIT(BIT): def result_processor(self, dialect, coltype): """MySQL-connector already converts mysql bits, so.""" return None + class MySQLDialect_mysqlconnector(MySQLDialect): driver = 'mysqlconnector' supports_unicode_statements = True @@ -107,7 +111,7 @@ class MySQLDialect_mysqlconnector(MySQLDialect): def is_disconnect(self, e, connection, cursor): errnos = (2006, 2013, 2014, 2045, 2055, 2048) - exceptions = (self.dbapi.OperationalError,self.dbapi.InterfaceError) + exceptions = (self.dbapi.OperationalError, self.dbapi.InterfaceError) if isinstance(e, exceptions): return e.errno in errnos else: diff --git a/lib/sqlalchemy/dialects/mysql/mysqldb.py b/lib/sqlalchemy/dialects/mysql/mysqldb.py index 7385f6e60..d4938540b 100644 --- a/lib/sqlalchemy/dialects/mysql/mysqldb.py +++ b/lib/sqlalchemy/dialects/mysql/mysqldb.py @@ -57,6 +57,7 @@ from ...connectors.mysqldb import ( MySQLDBConnector ) + class MySQLExecutionContext_mysqldb(MySQLDBExecutionContext, MySQLExecutionContext): pass @@ -68,6 +69,7 @@ class MySQLCompiler_mysqldb(MySQLDBCompiler, MySQLCompiler): class MySQLIdentifierPreparer_mysqldb(MySQLDBIdentifierPreparer, MySQLIdentifierPreparer): pass + class MySQLDialect_mysqldb(MySQLDBConnector, MySQLDialect): execution_ctx_cls = MySQLExecutionContext_mysqldb statement_compiler = MySQLCompiler_mysqldb diff --git a/lib/sqlalchemy/dialects/mysql/oursql.py b/lib/sqlalchemy/dialects/mysql/oursql.py index cc4e3b5f2..ca4986d62 100644 --- a/lib/sqlalchemy/dialects/mysql/oursql.py +++ b/lib/sqlalchemy/dialects/mysql/oursql.py @@ -39,7 +39,6 @@ from .base import (BIT, MySQLDialect, MySQLExecutionContext) from ... import types as sqltypes, util - class _oursqlBIT(BIT): def result_processor(self, dialect, coltype): """oursql already converts mysql bits, so.""" @@ -53,6 +52,7 @@ class MySQLExecutionContext_oursql(MySQLExecutionContext): def plain_query(self): return self.execution_options.get('_oursql_plain_query', False) + class MySQLDialect_oursql(MySQLDialect): driver = 'oursql' # Py2K @@ -126,64 +126,67 @@ class MySQLDialect_oursql(MySQLDialect): # Q: why didn't we need all these "plain_query" overrides earlier ? # am i on a newer/older version of OurSQL ? def has_table(self, connection, table_name, schema=None): - return MySQLDialect.has_table(self, - connection.connect().\ - execution_options(_oursql_plain_query=True), - table_name, schema) + return MySQLDialect.has_table( + self, + connection.connect().execution_options(_oursql_plain_query=True), + table_name, + schema + ) def get_table_options(self, connection, table_name, schema=None, **kw): - return MySQLDialect.get_table_options(self, - connection.connect().\ - execution_options(_oursql_plain_query=True), - table_name, - schema = schema, - **kw + return MySQLDialect.get_table_options( + self, + connection.connect().execution_options(_oursql_plain_query=True), + table_name, + schema=schema, + **kw ) - def get_columns(self, connection, table_name, schema=None, **kw): - return MySQLDialect.get_columns(self, - connection.connect().\ - execution_options(_oursql_plain_query=True), - table_name, - schema=schema, - **kw + return MySQLDialect.get_columns( + self, + connection.connect().execution_options(_oursql_plain_query=True), + table_name, + schema=schema, + **kw ) def get_view_names(self, connection, schema=None, **kw): - return MySQLDialect.get_view_names(self, - connection.connect().\ - execution_options(_oursql_plain_query=True), - schema=schema, - **kw + return MySQLDialect.get_view_names( + self, + connection.connect().execution_options(_oursql_plain_query=True), + schema=schema, + **kw ) def get_table_names(self, connection, schema=None, **kw): - return MySQLDialect.get_table_names(self, - connection.connect().\ - execution_options(_oursql_plain_query=True), - schema + return MySQLDialect.get_table_names( + self, + connection.connect().execution_options(_oursql_plain_query=True), + schema ) def get_schema_names(self, connection, **kw): - return MySQLDialect.get_schema_names(self, - connection.connect().\ - execution_options(_oursql_plain_query=True), - **kw + return MySQLDialect.get_schema_names( + self, + connection.connect().execution_options(_oursql_plain_query=True), + **kw ) def initialize(self, connection): return MySQLDialect.initialize( - self, - connection.execution_options(_oursql_plain_query=True) - ) + self, + connection.execution_options(_oursql_plain_query=True) + ) def _show_create_table(self, connection, table, charset=None, full_name=None): - return MySQLDialect._show_create_table(self, - connection.contextual_connect(close_with_result=True). - execution_options(_oursql_plain_query=True), - table, charset, full_name) + return MySQLDialect._show_create_table( + self, + connection.contextual_connect(close_with_result=True). + execution_options(_oursql_plain_query=True), + table, charset, full_name + ) def is_disconnect(self, e, connection, cursor): if isinstance(e, self.dbapi.ProgrammingError): diff --git a/lib/sqlalchemy/dialects/mysql/pymysql.py b/lib/sqlalchemy/dialects/mysql/pymysql.py index 36b49ba3b..3989d037c 100644 --- a/lib/sqlalchemy/dialects/mysql/pymysql.py +++ b/lib/sqlalchemy/dialects/mysql/pymysql.py @@ -23,12 +23,14 @@ the pymysql driver as well. from .mysqldb import MySQLDialect_mysqldb + class MySQLDialect_pymysql(MySQLDialect_mysqldb): driver = 'pymysql' description_encoding = None + @classmethod def dbapi(cls): return __import__('pymysql') -dialect = MySQLDialect_pymysql
\ No newline at end of file +dialect = MySQLDialect_pymysql diff --git a/lib/sqlalchemy/dialects/mysql/pyodbc.py b/lib/sqlalchemy/dialects/mysql/pyodbc.py index 2736ef7a4..f1dcae6d1 100644 --- a/lib/sqlalchemy/dialects/mysql/pyodbc.py +++ b/lib/sqlalchemy/dialects/mysql/pyodbc.py @@ -29,6 +29,7 @@ from ...connectors.pyodbc import PyODBCConnector from ... import util import re + class MySQLExecutionContext_pyodbc(MySQLExecutionContext): def get_lastrowid(self): @@ -38,6 +39,7 @@ class MySQLExecutionContext_pyodbc(MySQLExecutionContext): cursor.close() return lastrowid + class MySQLDialect_pyodbc(PyODBCConnector, MySQLDialect): supports_unicode_statements = False execution_ctx_cls = MySQLExecutionContext_pyodbc diff --git a/lib/sqlalchemy/dialects/mysql/zxjdbc.py b/lib/sqlalchemy/dialects/mysql/zxjdbc.py index 955044a58..cda33c8c9 100644 --- a/lib/sqlalchemy/dialects/mysql/zxjdbc.py +++ b/lib/sqlalchemy/dialects/mysql/zxjdbc.py @@ -28,6 +28,7 @@ from ... import types as sqltypes, util from ...connectors.zxJDBC import ZxJDBCConnector from .base import BIT, MySQLDialect, MySQLExecutionContext + class _ZxJDBCBit(BIT): def result_processor(self, dialect, coltype): """Converts boolean or byte arrays from MySQL Connector/J to longs.""" @@ -96,7 +97,7 @@ class MySQLDialect_zxjdbc(ZxJDBCConnector, MySQLDialect): if c: return int(c) - def _get_server_version_info(self,connection): + def _get_server_version_info(self, connection): dbapi_con = connection.connection version = [] r = re.compile('[.\-]') diff --git a/lib/sqlalchemy/dialects/oracle/base.py b/lib/sqlalchemy/dialects/oracle/base.py index 7a4d835c9..baea4815b 100644 --- a/lib/sqlalchemy/dialects/oracle/base.py +++ b/lib/sqlalchemy/dialects/oracle/base.py @@ -139,10 +139,9 @@ is not in use this flag should be left off. """ -import random, re +import re -from sqlalchemy import schema as sa_schema -from sqlalchemy import util, sql, log +from sqlalchemy import util, sql from sqlalchemy.engine import default, base, reflection from sqlalchemy.sql import compiler, visitors, expression from sqlalchemy.sql import operators as sql_operators, functions as sql_functions @@ -164,18 +163,22 @@ RESERVED_WORDS = \ NO_ARG_FNS = set('UID CURRENT_DATE SYSDATE USER ' 'CURRENT_TIME CURRENT_TIMESTAMP'.split()) + class RAW(sqltypes._Binary): __visit_name__ = 'RAW' OracleRaw = RAW + class NCLOB(sqltypes.Text): __visit_name__ = 'NCLOB' + class VARCHAR2(VARCHAR): __visit_name__ = 'VARCHAR2' NVARCHAR2 = NVARCHAR + class NUMBER(sqltypes.Numeric, sqltypes.Integer): __visit_name__ = 'NUMBER' @@ -201,18 +204,22 @@ class NUMBER(sqltypes.Numeric, sqltypes.Integer): class DOUBLE_PRECISION(sqltypes.Numeric): __visit_name__ = 'DOUBLE_PRECISION' + def __init__(self, precision=None, scale=None, asdecimal=None): if asdecimal is None: asdecimal = False super(DOUBLE_PRECISION, self).__init__(precision=precision, scale=scale, asdecimal=asdecimal) + class BFILE(sqltypes.LargeBinary): __visit_name__ = 'BFILE' + class LONG(sqltypes.Text): __visit_name__ = 'LONG' + class INTERVAL(sqltypes.TypeEngine): __visit_name__ = 'INTERVAL' @@ -243,6 +250,7 @@ class INTERVAL(sqltypes.TypeEngine): def _type_affinity(self): return sqltypes.Interval + class ROWID(sqltypes.TypeEngine): """Oracle ROWID type. @@ -252,33 +260,32 @@ class ROWID(sqltypes.TypeEngine): __visit_name__ = 'ROWID' - class _OracleBoolean(sqltypes.Boolean): def get_dbapi_type(self, dbapi): return dbapi.NUMBER colspecs = { - sqltypes.Boolean : _OracleBoolean, - sqltypes.Interval : INTERVAL, + sqltypes.Boolean: _OracleBoolean, + sqltypes.Interval: INTERVAL, } ischema_names = { - 'VARCHAR2' : VARCHAR, - 'NVARCHAR2' : NVARCHAR, - 'CHAR' : CHAR, - 'DATE' : DATE, - 'NUMBER' : NUMBER, - 'BLOB' : BLOB, - 'BFILE' : BFILE, - 'CLOB' : CLOB, - 'NCLOB' : NCLOB, - 'TIMESTAMP' : TIMESTAMP, - 'TIMESTAMP WITH TIME ZONE' : TIMESTAMP, - 'INTERVAL DAY TO SECOND' : INTERVAL, - 'RAW' : RAW, - 'FLOAT' : FLOAT, - 'DOUBLE PRECISION' : DOUBLE_PRECISION, - 'LONG' : LONG, + 'VARCHAR2': VARCHAR, + 'NVARCHAR2': NVARCHAR, + 'CHAR': CHAR, + 'DATE': DATE, + 'NUMBER': NUMBER, + 'BLOB': BLOB, + 'BFILE': BFILE, + 'CLOB': CLOB, + 'NCLOB': NCLOB, + 'TIMESTAMP': TIMESTAMP, + 'TIMESTAMP WITH TIME ZONE': TIMESTAMP, + 'INTERVAL DAY TO SECOND': INTERVAL, + 'RAW': RAW, + 'FLOAT': FLOAT, + 'DOUBLE PRECISION': DOUBLE_PRECISION, + 'LONG': LONG, } @@ -335,9 +342,11 @@ class OracleTypeCompiler(compiler.GenericTypeCompiler): if precision is None: return name elif scale is None: - return "%(name)s(%(precision)s)" % {'name':name,'precision': precision} + n = "%(name)s(%(precision)s)" + return n % {'name': name, 'precision': precision} else: - return "%(name)s(%(precision)s, %(scale)s)" % {'name':name,'precision': precision, 'scale' : scale} + n = "%(name)s(%(precision)s, %(scale)s)" + return n % {'name': name, 'precision': precision, 'scale': scale} def visit_string(self, type_): return self.visit_VARCHAR2(type_) @@ -354,12 +363,11 @@ class OracleTypeCompiler(compiler.GenericTypeCompiler): def _visit_varchar(self, type_, n, num): if not n and self.dialect._supports_char_length: - return "VARCHAR%(two)s(%(length)s CHAR)" % { - 'length' : type_.length, - 'two':num} + varchar = "VARCHAR%(two)s(%(length)s CHAR)" + return varchar % {'length': type_.length, 'two': num} else: - return "%(n)sVARCHAR%(two)s(%(length)s)" % {'length' : type_.length, - 'two':num, 'n':n} + varchar = "%(n)sVARCHAR%(two)s(%(length)s)" + return varchar % {'length': type_.length, 'two': num, 'n': n} def visit_text(self, type_): return self.visit_CLOB(type_) @@ -381,13 +389,14 @@ class OracleTypeCompiler(compiler.GenericTypeCompiler): def visit_RAW(self, type_): if type_.length: - return "RAW(%(length)s)" % {'length' : type_.length} + return "RAW(%(length)s)" % {'length': type_.length} else: return "RAW" def visit_ROWID(self, type_): return "ROWID" + class OracleCompiler(compiler.SQLCompiler): """Oracle compiler modifies the lexical structure of Select statements to work under non-ANSI configured Oracle databases, if @@ -460,7 +469,7 @@ class OracleCompiler(compiler.SQLCompiler): elif binary.right.table is join.right: binary.right = _OuterJoinColumn(binary.right) clauses.append(visitors.cloned_traverse(join.onclause, {}, - {'binary':visit_binary})) + {'binary': visit_binary})) else: clauses.append(join.onclause) @@ -613,6 +622,7 @@ class OracleCompiler(compiler.SQLCompiler): else: return super(OracleCompiler, self).for_update_clause(select) + class OracleDDLCompiler(compiler.DDLCompiler): def define_constraint_cascades(self, constraint): @@ -634,6 +644,7 @@ class OracleDDLCompiler(compiler.DDLCompiler): return super(OracleDDLCompiler, self).\ visit_create_index(create, include_schema=True) + class OracleIdentifierPreparer(compiler.IdentifierPreparer): reserved_words = set([x.lower() for x in RESERVED_WORDS]) @@ -658,6 +669,7 @@ class OracleExecutionContext(default.DefaultExecutionContext): self.dialect.identifier_preparer.format_sequence(seq) + ".nextval FROM DUAL", type_) + class OracleDialect(default.DefaultDialect): name = 'oracle' supports_alter = True @@ -854,7 +866,6 @@ class OracleDialect(default.DefaultDialect): cursor = connection.execute(s, owner=schema) return [self.normalize_name(row[0]) for row in cursor] - @reflection.cache def get_view_names(self, connection, schema=None, **kw): schema = self.denormalize_name(schema or self.default_schema_name) @@ -892,14 +903,14 @@ class OracleDialect(default.DefaultDialect): "SELECT column_name, data_type, %(char_length_col)s, data_precision, data_scale, " "nullable, data_default FROM ALL_TAB_COLUMNS%(dblink)s " "WHERE table_name = :table_name AND owner = :owner " - "ORDER BY column_id" % {'dblink': dblink, 'char_length_col':char_length_col}), + "ORDER BY column_id" % {'dblink': dblink, 'char_length_col': char_length_col}), table_name=table_name, owner=schema) for row in c: (colname, orig_colname, coltype, length, precision, scale, nullable, default) = \ - (self.normalize_name(row[0]), row[0], row[1], row[2], row[3], row[4], row[5]=='Y', row[6]) + (self.normalize_name(row[0]), row[0], row[1], row[2], row[3], row[4], row[5] == 'Y', row[6]) - if coltype == 'NUMBER' : + if coltype == 'NUMBER': coltype = NUMBER(precision, scale) elif coltype in ('VARCHAR2', 'NVARCHAR2', 'CHAR'): coltype = self.ischema_names.get(coltype)(length) @@ -919,7 +930,7 @@ class OracleDialect(default.DefaultDialect): 'type': coltype, 'nullable': nullable, 'default': default, - 'autoincrement':default is None + 'autoincrement': default is None } if orig_colname.lower() == orig_colname: cdict['quote'] = True @@ -931,7 +942,6 @@ class OracleDialect(default.DefaultDialect): def get_indexes(self, connection, table_name, schema=None, resolve_synonyms=False, dblink='', **kw): - info_cache = kw.get('info_cache') (table_name, schema, dblink, synonym) = \ self._prepare_reflection_args(connection, table_name, schema, @@ -1042,7 +1052,7 @@ class OracleDialect(default.DefaultDialect): if constraint_name is None: constraint_name = self.normalize_name(cons_name) pkeys.append(local_column) - return {'constrained_columns':pkeys, 'name':constraint_name} + return {'constrained_columns': pkeys, 'name': constraint_name} @reflection.cache def get_foreign_keys(self, connection, table_name, schema=None, **kw): @@ -1056,7 +1066,7 @@ class OracleDialect(default.DefaultDialect): """ - requested_schema = schema # to check later on + requested_schema = schema # to check later on resolve_synonyms = kw.get('oracle_resolve_synonyms', False) dblink = kw.get('dblink', '') info_cache = kw.get('info_cache') @@ -1072,11 +1082,11 @@ class OracleDialect(default.DefaultDialect): def fkey_rec(): return { - 'name' : None, - 'constrained_columns' : [], - 'referred_schema' : None, - 'referred_table' : None, - 'referred_columns' : [] + 'name': None, + 'constrained_columns': [], + 'referred_schema': None, + 'referred_table': None, + 'referred_columns': [] } fkeys = util.defaultdict(fkey_rec) @@ -1091,7 +1101,7 @@ class OracleDialect(default.DefaultDialect): util.warn( ("Got 'None' querying 'table_name' from " "all_cons_columns%(dblink)s - does the user have " - "proper rights to the table?") % {'dblink':dblink}) + "proper rights to the table?") % {'dblink': dblink}) continue rec = fkeys[cons_name] @@ -1141,12 +1151,8 @@ class OracleDialect(default.DefaultDialect): return None - class _OuterJoinColumn(sql.ClauseElement): __visit_name__ = 'outer_join_column' def __init__(self, column): self.column = column - - - diff --git a/lib/sqlalchemy/dialects/oracle/cx_oracle.py b/lib/sqlalchemy/dialects/oracle/cx_oracle.py index 233f3cb27..bee730800 100644 --- a/lib/sqlalchemy/dialects/oracle/cx_oracle.py +++ b/lib/sqlalchemy/dialects/oracle/cx_oracle.py @@ -155,12 +155,12 @@ from .base import OracleCompiler, OracleDialect, \ from . import base as oracle from ...engine import result as _result from sqlalchemy import types as sqltypes, util, exc, processors -from datetime import datetime import random import collections from sqlalchemy.util.compat import decimal import re + class _OracleNumeric(sqltypes.Numeric): def bind_processor(self, dialect): # cx_oracle accepts Decimal objects and floats @@ -182,6 +182,7 @@ class _OracleNumeric(sqltypes.Numeric): fstring = "%.10f" else: fstring = "%%.%df" % self.scale + def to_decimal(value): if value is None: return None @@ -189,6 +190,7 @@ class _OracleNumeric(sqltypes.Numeric): return value else: return decimal.Decimal(fstring % value) + return to_decimal else: if self.precision is None and self.scale is None: @@ -204,6 +206,7 @@ class _OracleNumeric(sqltypes.Numeric): return super(_OracleNumeric, self).\ result_processor(dialect, coltype) + class _OracleDate(sqltypes.Date): def bind_processor(self, dialect): return None @@ -216,6 +219,7 @@ class _OracleDate(sqltypes.Date): return value return process + class _LOBMixin(object): def result_processor(self, dialect, coltype): if not dialect.auto_convert_lobs: @@ -229,6 +233,7 @@ class _LOBMixin(object): return value return process + class _NativeUnicodeMixin(object): # Py3K #pass @@ -249,21 +254,26 @@ class _NativeUnicodeMixin(object): # unicode in all cases, so the "native_unicode" flag # will be set for the default String.result_processor. + class _OracleChar(_NativeUnicodeMixin, sqltypes.CHAR): def get_dbapi_type(self, dbapi): return dbapi.FIXED_CHAR + class _OracleNVarChar(_NativeUnicodeMixin, sqltypes.NVARCHAR): def get_dbapi_type(self, dbapi): return getattr(dbapi, 'UNICODE', dbapi.STRING) + class _OracleText(_LOBMixin, sqltypes.Text): def get_dbapi_type(self, dbapi): return dbapi.CLOB + class _OracleString(_NativeUnicodeMixin, sqltypes.String): pass + class _OracleUnicodeText(_LOBMixin, _NativeUnicodeMixin, sqltypes.UnicodeText): def get_dbapi_type(self, dbapi): return dbapi.NCLOB @@ -282,6 +292,7 @@ class _OracleUnicodeText(_LOBMixin, _NativeUnicodeMixin, sqltypes.UnicodeText): return string_processor(lob_processor(value)) return process + class _OracleInteger(sqltypes.Integer): def result_processor(self, dialect, coltype): def to_int(val): @@ -290,6 +301,7 @@ class _OracleInteger(sqltypes.Integer): return val return to_int + class _OracleBinary(_LOBMixin, sqltypes.LargeBinary): def get_dbapi_type(self, dbapi): return dbapi.BLOB @@ -297,17 +309,21 @@ class _OracleBinary(_LOBMixin, sqltypes.LargeBinary): def bind_processor(self, dialect): return None + class _OracleInterval(oracle.INTERVAL): def get_dbapi_type(self, dbapi): return dbapi.INTERVAL + class _OracleRaw(oracle.RAW): pass + class _OracleRowid(oracle.ROWID): def get_dbapi_type(self, dbapi): return dbapi.ROWID + class OracleCompiler_cx_oracle(OracleCompiler): def bindparam_string(self, name, quote=None, **kw): if quote is True or quote is not False and \ @@ -421,6 +437,7 @@ class OracleExecutionContext_cx_oracle(OracleExecutionContext): return result + class OracleExecutionContext_cx_oracle_with_unicode(OracleExecutionContext_cx_oracle): """Support WITH_UNICODE in Python 2.xx. @@ -442,6 +459,7 @@ class OracleExecutionContext_cx_oracle_with_unicode(OracleExecutionContext_cx_or return super(OracleExecutionContext_cx_oracle_with_unicode, self).\ _execute_scalar(unicode(stmt)) + class ReturningResultProxy(_result.FullyBufferedResultProxy): """Result proxy which stuffs the _returning clause + outparams into the fetch.""" @@ -461,6 +479,7 @@ class ReturningResultProxy(_result.FullyBufferedResultProxy): return collections.deque([tuple(self._returning_params["ret_%d" % i] for i, c in enumerate(self._returning_params))]) + class OracleDialect_cx_oracle(OracleDialect): execution_ctx_cls = OracleExecutionContext_cx_oracle statement_compiler = OracleCompiler_cx_oracle @@ -469,25 +488,27 @@ class OracleDialect_cx_oracle(OracleDialect): colspecs = colspecs = { sqltypes.Numeric: _OracleNumeric, - sqltypes.Date : _OracleDate, # generic type, assume datetime.date is desired + sqltypes.Date: _OracleDate, # generic type, assume datetime.date is desired oracle.DATE: oracle.DATE, # non generic type - passthru - sqltypes.LargeBinary : _OracleBinary, - sqltypes.Boolean : oracle._OracleBoolean, - sqltypes.Interval : _OracleInterval, - oracle.INTERVAL : _OracleInterval, - sqltypes.Text : _OracleText, - sqltypes.String : _OracleString, - sqltypes.UnicodeText : _OracleUnicodeText, - sqltypes.CHAR : _OracleChar, - sqltypes.Integer : _OracleInteger, # this is only needed for OUT parameters. - # it would be nice if we could not use it otherwise. + sqltypes.LargeBinary: _OracleBinary, + sqltypes.Boolean: oracle._OracleBoolean, + sqltypes.Interval: _OracleInterval, + oracle.INTERVAL: _OracleInterval, + sqltypes.Text: _OracleText, + sqltypes.String: _OracleString, + sqltypes.UnicodeText: _OracleUnicodeText, + sqltypes.CHAR: _OracleChar, + + # this is only needed for OUT parameters. + # it would be nice if we could not use it otherwise. + sqltypes.Integer: _OracleInteger, + oracle.RAW: _OracleRaw, sqltypes.Unicode: _OracleNVarChar, - sqltypes.NVARCHAR : _OracleNVarChar, + sqltypes.NVARCHAR: _OracleNVarChar, oracle.ROWID: _OracleRowid, } - execute_sequence_format = list def __init__(self, @@ -568,10 +589,11 @@ class OracleDialect_cx_oracle(OracleDialect): # expect encoded strings or unicodes, etc. self.dbapi_type_map = { self.dbapi.CLOB: oracle.CLOB(), - self.dbapi.NCLOB:oracle.NCLOB(), + self.dbapi.NCLOB: oracle.NCLOB(), self.dbapi.BLOB: oracle.BLOB(), self.dbapi.BINARY: oracle.RAW(), } + @classmethod def dbapi(cls): import cx_Oracle @@ -637,6 +659,7 @@ class OracleDialect_cx_oracle(OracleDialect): return cx_Oracle = self.dbapi + def output_type_handler(cursor, name, defaultType, size, precision, scale): # convert all NUMBER with precision + positive scale to Decimal diff --git a/lib/sqlalchemy/dialects/oracle/zxjdbc.py b/lib/sqlalchemy/dialects/oracle/zxjdbc.py index 54608969b..94d8f8219 100644 --- a/lib/sqlalchemy/dialects/oracle/zxjdbc.py +++ b/lib/sqlalchemy/dialects/oracle/zxjdbc.py @@ -24,6 +24,7 @@ import collections SQLException = zxJDBC = None + class _ZxJDBCDate(sqltypes.Date): def result_processor(self, dialect, coltype): @@ -77,7 +78,7 @@ class OracleCompiler_zxjdbc(OracleCompiler): self.binds[bindparam.key] = bindparam binds.append(self.bindparam_string(self._truncate_bindparam(bindparam))) - return 'RETURNING ' + ', '.join(columns) + " INTO " + ", ".join(binds) + return 'RETURNING ' + ', '.join(columns) + " INTO " + ", ".join(binds) class OracleExecutionContext_zxjdbc(OracleExecutionContext): @@ -177,7 +178,7 @@ class OracleDialect_zxjdbc(ZxJDBCConnector, OracleDialect): colspecs = util.update_copy( OracleDialect.colspecs, { - sqltypes.Date : _ZxJDBCDate, + sqltypes.Date: _ZxJDBCDate, sqltypes.Numeric: _ZxJDBCNumeric } ) @@ -188,17 +189,19 @@ class OracleDialect_zxjdbc(ZxJDBCConnector, OracleDialect): from java.sql import SQLException from com.ziclix.python.sql import zxJDBC from com.ziclix.python.sql.handler import OracleDataHandler - class OracleReturningDataHandler(OracleDataHandler): + class OracleReturningDataHandler(OracleDataHandler): """zxJDBC DataHandler that specially handles ReturningParam.""" def setJDBCObject(self, statement, index, object, dbtype=None): if type(object) is ReturningParam: statement.registerReturnParameter(index, object.type) elif dbtype is None: - OracleDataHandler.setJDBCObject(self, statement, index, object) + OracleDataHandler.setJDBCObject( + self, statement, index, object) else: - OracleDataHandler.setJDBCObject(self, statement, index, object, dbtype) + OracleDataHandler.setJDBCObject( + self, statement, index, object, dbtype) self.DataHandler = OracleReturningDataHandler def initialize(self, connection): diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py index f1061c90b..c7e84751d 100644 --- a/lib/sqlalchemy/dialects/postgresql/base.py +++ b/lib/sqlalchemy/dialects/postgresql/base.py @@ -154,9 +154,10 @@ Operator Classes ^^^^^^^^^^^^^^^^^ PostgreSQL allows the specification of an *operator class* for each column of -an index (see http://www.postgresql.org/docs/8.3/interactive/indexes-opclass.html). -The :class:`.Index` construct allows these to be specified via the ``postgresql_ops`` -keyword argument:: +an index (see +http://www.postgresql.org/docs/8.3/interactive/indexes-opclass.html). +The :class:`.Index` construct allows these to be specified via the +``postgresql_ops`` keyword argument:: Index('my_index', my_table.c.id, my_table.c.data, postgresql_ops={ @@ -168,15 +169,15 @@ keyword argument:: ``postgresql_ops`` keyword argument to :class:`.Index` construct. Note that the keys in the ``postgresql_ops`` dictionary are the "key" name of -the :class:`.Column`, i.e. the name used to access it from the ``.c`` collection -of :class:`.Table`, which can be configured to be different than the actual -name of the column as expressed in the database. +the :class:`.Column`, i.e. the name used to access it from the ``.c`` +collection of :class:`.Table`, which can be configured to be different than +the actual name of the column as expressed in the database. Index Types ^^^^^^^^^^^^ -PostgreSQL provides several index types: B-Tree, Hash, GiST, and GIN, as well as -the ability for users to create their own (see +PostgreSQL provides several index types: B-Tree, Hash, GiST, and GIN, as well +as the ability for users to create their own (see http://www.postgresql.org/docs/8.3/static/indexes-types.html). These can be specified on :class:`.Index` using the ``postgresql_using`` keyword argument:: @@ -226,24 +227,30 @@ _DECIMAL_TYPES = (1231, 1700) _FLOAT_TYPES = (700, 701, 1021, 1022) _INT_TYPES = (20, 21, 23, 26, 1005, 1007, 1016) + class BYTEA(sqltypes.LargeBinary): __visit_name__ = 'BYTEA' + class DOUBLE_PRECISION(sqltypes.Float): __visit_name__ = 'DOUBLE_PRECISION' + class INET(sqltypes.TypeEngine): __visit_name__ = "INET" PGInet = INET + class CIDR(sqltypes.TypeEngine): __visit_name__ = "CIDR" PGCidr = CIDR + class MACADDR(sqltypes.TypeEngine): __visit_name__ = "MACADDR" PGMacAddr = MACADDR + class TIMESTAMP(sqltypes.TIMESTAMP): def __init__(self, timezone=False, precision=None): super(TIMESTAMP, self).__init__(timezone=timezone) @@ -255,6 +262,7 @@ class TIME(sqltypes.TIME): super(TIME, self).__init__(timezone=timezone) self.precision = precision + class INTERVAL(sqltypes.TypeEngine): """Postgresql INTERVAL type. @@ -263,6 +271,7 @@ class INTERVAL(sqltypes.TypeEngine): """ __visit_name__ = 'INTERVAL' + def __init__(self, precision=None): self.precision = precision @@ -276,8 +285,10 @@ class INTERVAL(sqltypes.TypeEngine): PGInterval = INTERVAL + class BIT(sqltypes.TypeEngine): __visit_name__ = 'BIT' + def __init__(self, length=None, varying=False): if not varying: # BIT without VARYING defaults to length 1 @@ -289,6 +300,7 @@ class BIT(sqltypes.TypeEngine): PGBit = BIT + class UUID(sqltypes.TypeEngine): """Postgresql UUID type. @@ -313,8 +325,8 @@ class UUID(sqltypes.TypeEngine): """ if as_uuid and _python_UUID is None: raise NotImplementedError( - "This version of Python does not support the native UUID type." - ) + "This version of Python does not support the native UUID type." + ) self.as_uuid = as_uuid def bind_processor(self, dialect): @@ -339,9 +351,11 @@ class UUID(sqltypes.TypeEngine): PGUuid = UUID + class _Slice(expression.ColumnElement): __visit_name__ = 'slice' type = sqltypes.NULLTYPE + def __init__(self, slice_, source_comparator): self.start = source_comparator._check_literal( source_comparator.expr, @@ -350,6 +364,7 @@ class _Slice(expression.ColumnElement): source_comparator.expr, operators.getitem, slice_.stop) + class array(expression.Tuple): """A Postgresql ARRAY literal. @@ -399,6 +414,7 @@ class array(expression.Tuple): def self_group(self, against): return self + class ARRAY(sqltypes.Concatenable, sqltypes.TypeEngine): """Postgresql ARRAY type. @@ -436,8 +452,8 @@ class ARRAY(sqltypes.Concatenable, sqltypes.TypeEngine): to optimize itself to expect exactly that number of dimensions. Note that Postgresql itself still allows N dimensions with such a type. - SQL expressions of type :class:`.ARRAY` have support for "index" and "slice" - behavior. The Python ``[]`` operator works normally here, given + SQL expressions of type :class:`.ARRAY` have support for "index" and + "slice" behavior. The Python ``[]`` operator works normally here, given integer indexes or slices. Note that Postgresql arrays default to 1-based indexing. The operator produces binary expression constructs which will produce the appropriate SQL, both for @@ -539,6 +555,7 @@ class ARRAY(sqltypes.Concatenable, sqltypes.TypeEngine): item_proc = self.item_type.\ dialect_impl(dialect).\ bind_processor(dialect) + def process(value): if value is None: return value @@ -554,6 +571,7 @@ class ARRAY(sqltypes.Concatenable, sqltypes.TypeEngine): item_proc = self.item_type.\ dialect_impl(dialect).\ result_processor(dialect, coltype) + def process(value): if value is None: return value @@ -567,6 +585,7 @@ class ARRAY(sqltypes.Concatenable, sqltypes.TypeEngine): PGArray = ARRAY + class ENUM(sqltypes.Enum): """Postgresql ENUM type. @@ -703,45 +722,44 @@ class ENUM(sqltypes.Enum): self.drop(bind=bind, checkfirst=checkfirst) colspecs = { - sqltypes.Interval:INTERVAL, - sqltypes.Enum:ENUM, + sqltypes.Interval: INTERVAL, + sqltypes.Enum: ENUM, } ischema_names = { - 'integer' : INTEGER, - 'bigint' : BIGINT, - 'smallint' : SMALLINT, - 'character varying' : VARCHAR, - 'character' : CHAR, - '"char"' : sqltypes.String, - 'name' : sqltypes.String, - 'text' : TEXT, - 'numeric' : NUMERIC, - 'float' : FLOAT, - 'real' : REAL, + 'integer': INTEGER, + 'bigint': BIGINT, + 'smallint': SMALLINT, + 'character varying': VARCHAR, + 'character': CHAR, + '"char"': sqltypes.String, + 'name': sqltypes.String, + 'text': TEXT, + 'numeric': NUMERIC, + 'float': FLOAT, + 'real': REAL, 'inet': INET, 'cidr': CIDR, 'uuid': UUID, 'bit': BIT, 'bit varying': BIT, 'macaddr': MACADDR, - 'double precision' : DOUBLE_PRECISION, - 'timestamp' : TIMESTAMP, - 'timestamp with time zone' : TIMESTAMP, - 'timestamp without time zone' : TIMESTAMP, - 'time with time zone' : TIME, - 'time without time zone' : TIME, - 'date' : DATE, + 'double precision': DOUBLE_PRECISION, + 'timestamp': TIMESTAMP, + 'timestamp with time zone': TIMESTAMP, + 'timestamp without time zone': TIMESTAMP, + 'time with time zone': TIME, + 'time without time zone': TIME, + 'date': DATE, 'time': TIME, - 'bytea' : BYTEA, - 'boolean' : BOOLEAN, - 'interval':INTERVAL, - 'interval year to month':INTERVAL, - 'interval day to second':INTERVAL, + 'bytea': BYTEA, + 'boolean': BOOLEAN, + 'interval': INTERVAL, + 'interval year to month': INTERVAL, + 'interval day to second': INTERVAL, } - class PGCompiler(compiler.SQLCompiler): def visit_array(self, element, **kw): @@ -814,7 +832,7 @@ class PGCompiler(compiler.SQLCompiler): elif isinstance(select._distinct, (list, tuple)): return "DISTINCT ON (" + ', '.join( [self.process(col) for col in select._distinct] - )+ ") " + ) + ") " else: return "DISTINCT ON (" + self.process(select._distinct) + ") " else: @@ -861,6 +879,7 @@ class PGCompiler(compiler.SQLCompiler): return "EXTRACT(%s FROM %s)" % ( field, self.process(expr)) + class PGDDLCompiler(compiler.DDLCompiler): def get_column_specification(self, column, **kwargs): colspec = self.preparer.format_column(column) @@ -1046,6 +1065,7 @@ class PGIdentifierPreparer(compiler.IdentifierPreparer): name = self.quote_schema(type_.schema, type_.quote) + "." + name return name + class PGInspector(reflection.Inspector): def __init__(self, conn): @@ -1057,11 +1077,14 @@ class PGInspector(reflection.Inspector): return self.dialect.get_table_oid(self.bind, table_name, schema, info_cache=self.info_cache) + class CreateEnumType(schema._CreateDropBase): - __visit_name__ = "create_enum_type" + __visit_name__ = "create_enum_type" + class DropEnumType(schema._CreateDropBase): - __visit_name__ = "drop_enum_type" + __visit_name__ = "drop_enum_type" + class PGExecutionContext(default.DefaultExecutionContext): def fire_sequence(self, seq, type_): @@ -1091,7 +1114,8 @@ class PGExecutionContext(default.DefaultExecutionContext): col = column.name tab = tab[0:29 + max(0, (29 - len(col)))] col = col[0:29 + max(0, (29 - len(tab)))] - column._postgresql_seq_name = seq_name = "%s_%s_seq" % (tab, col) + name = "%s_%s_seq" % (tab, col) + column._postgresql_seq_name = seq_name = name sch = column.table.schema if sch is not None: @@ -1105,6 +1129,7 @@ class PGExecutionContext(default.DefaultExecutionContext): return super(PGExecutionContext, self).get_insert_default(column) + class PGDialect(default.DefaultDialect): name = 'postgresql' supports_alter = True @@ -1226,9 +1251,10 @@ class PGDialect(default.DefaultDialect): return connection.scalar("select current_schema()") def has_schema(self, connection, schema): + query = "select nspname from pg_namespace where lower(nspname)=:schema" cursor = connection.execute( sql.text( - "select nspname from pg_namespace where lower(nspname)=:schema", + query, bindparams=[ sql.bindparam( 'schema', unicode(schema.lower()), @@ -1365,7 +1391,7 @@ class PGDialect(default.DefaultDialect): sql.bindparam('table_name', type_=sqltypes.Unicode), sql.bindparam('schema', type_=sqltypes.Unicode) ], - typemap={'oid':sqltypes.Integer} + typemap={'oid': sqltypes.Integer} ) c = connection.execute(s, table_name=table_name, schema=schema) table_oid = c.scalar() @@ -1404,12 +1430,11 @@ class PGDialect(default.DefaultDialect): "AND '%s' = (select nspname from pg_namespace n " "where n.oid = c.relnamespace) " % current_schema, - typemap = {'relname':sqltypes.Unicode} + typemap={'relname': sqltypes.Unicode} ) ) return [row[0] for row in result] - @reflection.cache def get_view_names(self, connection, schema=None, **kw): if schema is not None: @@ -1484,8 +1509,8 @@ class PGDialect(default.DefaultDialect): # format columns columns = [] for name, format_type, default, notnull, attnum, table_oid in rows: - column_info = self._get_column_info(name, format_type, default, - notnull, domains, enums, schema) + column_info = self._get_column_info( + name, format_type, default, notnull, domains, enums, schema) columns.append(column_info) return columns @@ -1670,8 +1695,8 @@ class PGDialect(default.DefaultDialect): """ t = sql.text(FK_SQL, typemap={ - 'conname':sqltypes.Unicode, - 'condef':sqltypes.Unicode}) + 'conname': sqltypes.Unicode, + 'condef': sqltypes.Unicode}) c = connection.execute(t, table=table_oid) fkeys = [] for conname, condef, conschema in c.fetchall(): @@ -1697,11 +1722,11 @@ class PGDialect(default.DefaultDialect): referred_columns = [preparer._unquote_identifier(x) for x in re.split(r'\s*,\s', referred_columns)] fkey_d = { - 'name' : conname, - 'constrained_columns' : constrained_columns, - 'referred_schema' : referred_schema, - 'referred_table' : referred_table, - 'referred_columns' : referred_columns + 'name': conname, + 'constrained_columns': constrained_columns, + 'referred_schema': referred_schema, + 'referred_table': referred_table, + 'referred_columns': referred_columns } fkeys.append(fkey_d) return fkeys @@ -1732,7 +1757,7 @@ class PGDialect(default.DefaultDialect): i.relname """ - t = sql.text(IDX_SQL, typemap={'attname':sqltypes.Unicode}) + t = sql.text(IDX_SQL, typemap={'attname': sqltypes.Unicode}) c = connection.execute(t, table_oid=table_oid) index_names = {} @@ -1756,7 +1781,7 @@ class PGDialect(default.DefaultDialect): if idx_name in index_names: index_d = index_names[idx_name] else: - index_d = {'column_names':[]} + index_d = {'column_names': []} indexes.append(index_d) index_names[idx_name] = index_d index_d['name'] = idx_name @@ -1785,8 +1810,8 @@ class PGDialect(default.DefaultDialect): """ s = sql.text(SQL_ENUMS, typemap={ - 'attname':sqltypes.Unicode, - 'label':sqltypes.Unicode}) + 'attname': sqltypes.Unicode, + 'label': sqltypes.Unicode}) c = connection.execute(s) enums = {} @@ -1823,7 +1848,7 @@ class PGDialect(default.DefaultDialect): WHERE t.typtype = 'd' """ - s = sql.text(SQL_DOMAINS, typemap={'attname':sqltypes.Unicode}) + s = sql.text(SQL_DOMAINS, typemap={'attname': sqltypes.Unicode}) c = connection.execute(s) domains = {} @@ -1840,10 +1865,9 @@ class PGDialect(default.DefaultDialect): name = "%s.%s" % (domain['schema'], domain['name']) domains[name] = { - 'attype':attype, + 'attype': attype, 'nullable': domain['nullable'], 'default': domain['default'] } return domains - diff --git a/lib/sqlalchemy/dialects/postgresql/hstore.py b/lib/sqlalchemy/dialects/postgresql/hstore.py index c20b23038..8ac65b912 100644 --- a/lib/sqlalchemy/dialects/postgresql/hstore.py +++ b/lib/sqlalchemy/dialects/postgresql/hstore.py @@ -32,7 +32,6 @@ HSTORE_DELIMITER_RE = re.compile(r""" """, re.VERBOSE) - def _parse_error(hstore_str, pos): """format an unmarshalling error.""" diff --git a/lib/sqlalchemy/dialects/postgresql/pg8000.py b/lib/sqlalchemy/dialects/postgresql/pg8000.py index 6a7c5cecb..d016bf7e1 100644 --- a/lib/sqlalchemy/dialects/postgresql/pg8000.py +++ b/lib/sqlalchemy/dialects/postgresql/pg8000.py @@ -34,6 +34,7 @@ from .base import PGDialect, \ PGCompiler, PGIdentifierPreparer, PGExecutionContext,\ _DECIMAL_TYPES, _FLOAT_TYPES, _INT_TYPES + class _PGNumeric(sqltypes.Numeric): def result_processor(self, dialect, coltype): if self.asdecimal: @@ -60,6 +61,7 @@ class _PGNumericNoBind(_PGNumeric): def bind_processor(self, dialect): return None + class PGExecutionContext_pg8000(PGExecutionContext): pass @@ -71,7 +73,8 @@ class PGCompiler_pg8000(PGCompiler): def post_process_text(self, text): if '%%' in text: - util.warn("The SQLAlchemy postgresql dialect now automatically escapes '%' in text() " + util.warn("The SQLAlchemy postgresql dialect " + "now automatically escapes '%' in text() " "expressions to '%%'.") return text.replace('%', '%%') @@ -99,8 +102,8 @@ class PGDialect_pg8000(PGDialect): colspecs = util.update_copy( PGDialect.colspecs, { - sqltypes.Numeric : _PGNumericNoBind, - sqltypes.Float : _PGNumeric + sqltypes.Numeric: _PGNumericNoBind, + sqltypes.Float: _PGNumeric } ) diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg2.py b/lib/sqlalchemy/dialects/postgresql/psycopg2.py index 73f712328..ebac014db 100644 --- a/lib/sqlalchemy/dialects/postgresql/psycopg2.py +++ b/lib/sqlalchemy/dialects/postgresql/psycopg2.py @@ -59,11 +59,12 @@ The following DBAPI-specific options are respected when used with :meth:`.Query.execution_options`, in addition to those not specific to DBAPIs: * isolation_level - Set the transaction isolation level for the lifespan of a - :class:`.Connection` (can only be set on a connection, not a statement or query). - This includes the options ``SERIALIZABLE``, ``READ COMMITTED``, + :class:`.Connection` (can only be set on a connection, not a statement + or query). This includes the options ``SERIALIZABLE``, ``READ COMMITTED``, ``READ UNCOMMITTED`` and ``REPEATABLE READ``. * stream_results - Enable or disable usage of server side cursors. - If ``None`` or not set, the ``server_side_cursors`` option of the :class:`.Engine` is used. + If ``None`` or not set, the ``server_side_cursors`` option of the + :class:`.Engine` is used. Unicode ------- @@ -91,13 +92,14 @@ on all new connections based on the value passed to This overrides the encoding specified in the Postgresql client configuration. .. versionadded:: 0.7.3 - The psycopg2-specific ``client_encoding`` parameter to :func:`.create_engine`. + The psycopg2-specific ``client_encoding`` parameter to + :func:`.create_engine`. SQLAlchemy can also be instructed to skip the usage of the psycopg2 ``UNICODE`` extension and to instead utilize it's own unicode encode/decode services, which are normally reserved only for those DBAPIs that don't -fully support unicode directly. Passing ``use_native_unicode=False`` -to :func:`.create_engine` will disable usage of ``psycopg2.extensions.UNICODE``. +fully support unicode directly. Passing ``use_native_unicode=False`` to +:func:`.create_engine` will disable usage of ``psycopg2.extensions.UNICODE``. SQLAlchemy will instead encode data itself into Python bytestrings on the way in and coerce from bytes on the way back, using the value of the :func:`.create_engine` ``encoding`` parameter, which @@ -184,6 +186,7 @@ class _PGNumeric(sqltypes.Numeric): raise exc.InvalidRequestError( "Unknown PG numeric type: %d" % coltype) + class _PGEnum(ENUM): def __init__(self, *arg, **kw): super(_PGEnum, self).__init__(*arg, **kw) @@ -192,6 +195,7 @@ class _PGEnum(ENUM): self.convert_unicode = "force" # end Py2K + class _PGArray(ARRAY): def __init__(self, *arg, **kw): super(_PGArray, self).__init__(*arg, **kw) @@ -203,6 +207,7 @@ class _PGArray(ARRAY): self.item_type.convert_unicode = "force" # end Py2K + class _PGHStore(HSTORE): def bind_processor(self, dialect): if dialect._has_native_hstore: @@ -224,6 +229,7 @@ SERVER_SIDE_CURSOR_RE = re.compile( _server_side_id = util.counter() + class PGExecutionContext_psycopg2(PGExecutionContext): def create_cursor(self): # TODO: coverage for server side cursors + select.for_update() @@ -240,7 +246,8 @@ class PGExecutionContext_psycopg2(PGExecutionContext): ) ) else: - is_server_side = self.execution_options.get('stream_results', False) + is_server_side = \ + self.execution_options.get('stream_results', False) self.__is_server_side = is_server_side if is_server_side: @@ -284,6 +291,7 @@ class PGIdentifierPreparer_psycopg2(PGIdentifierPreparer): value = value.replace(self.escape_quote, self.escape_to_quote) return value.replace('%', '%%') + class PGDialect_psycopg2(PGDialect): driver = 'psycopg2' # Py2K @@ -301,11 +309,11 @@ class PGDialect_psycopg2(PGDialect): colspecs = util.update_copy( PGDialect.colspecs, { - sqltypes.Numeric : _PGNumeric, - ENUM : _PGEnum, # needs force_unicode - sqltypes.Enum : _PGEnum, # needs force_unicode - ARRAY : _PGArray, # needs force_unicode - HSTORE : _PGHStore, + sqltypes.Numeric: _PGNumeric, + ENUM: _PGEnum, # needs force_unicode + sqltypes.Enum: _PGEnum, # needs force_unicode + ARRAY: _PGArray, # needs force_unicode + HSTORE: _PGHStore, } ) @@ -328,7 +336,6 @@ class PGDialect_psycopg2(PGDialect): for x in m.group(1, 2, 3) if x is not None) - def initialize(self, connection): super(PGDialect_psycopg2, self).initialize(connection) self._has_native_hstore = self.use_native_hstore and \ @@ -344,10 +351,10 @@ class PGDialect_psycopg2(PGDialect): def _isolation_lookup(self): extensions = __import__('psycopg2.extensions').extensions return { - 'READ COMMITTED':extensions.ISOLATION_LEVEL_READ_COMMITTED, - 'READ UNCOMMITTED':extensions.ISOLATION_LEVEL_READ_UNCOMMITTED, - 'REPEATABLE READ':extensions.ISOLATION_LEVEL_REPEATABLE_READ, - 'SERIALIZABLE':extensions.ISOLATION_LEVEL_SERIALIZABLE + 'READ COMMITTED': extensions.ISOLATION_LEVEL_READ_COMMITTED, + 'READ UNCOMMITTED': extensions.ISOLATION_LEVEL_READ_UNCOMMITTED, + 'REPEATABLE READ': extensions.ISOLATION_LEVEL_REPEATABLE_READ, + 'SERIALIZABLE': extensions.ISOLATION_LEVEL_SERIALIZABLE } def set_isolation_level(self, connection, level): @@ -434,4 +441,3 @@ class PGDialect_psycopg2(PGDialect): return False dialect = PGDialect_psycopg2 - diff --git a/lib/sqlalchemy/dialects/postgresql/pypostgresql.py b/lib/sqlalchemy/dialects/postgresql/pypostgresql.py index e7023610b..879bced45 100644 --- a/lib/sqlalchemy/dialects/postgresql/pypostgresql.py +++ b/lib/sqlalchemy/dialects/postgresql/pypostgresql.py @@ -18,6 +18,7 @@ from ... import types as sqltypes from .base import PGDialect, PGExecutionContext from ... import processors + class PGNumeric(sqltypes.Numeric): def bind_processor(self, dialect): return processors.to_str @@ -28,9 +29,11 @@ class PGNumeric(sqltypes.Numeric): else: return processors.to_float + class PGExecutionContext_pypostgresql(PGExecutionContext): pass + class PGDialect_pypostgresql(PGDialect): driver = 'pypostgresql' @@ -48,8 +51,10 @@ class PGDialect_pypostgresql(PGDialect): colspecs = util.update_copy( PGDialect.colspecs, { - sqltypes.Numeric : PGNumeric, - sqltypes.Float: sqltypes.Float, # prevents PGNumeric from being used + sqltypes.Numeric: PGNumeric, + + # prevents PGNumeric from being used + sqltypes.Float: sqltypes.Float, } ) diff --git a/lib/sqlalchemy/dialects/postgresql/zxjdbc.py b/lib/sqlalchemy/dialects/postgresql/zxjdbc.py index 196d77aaa..7b19872cd 100644 --- a/lib/sqlalchemy/dialects/postgresql/zxjdbc.py +++ b/lib/sqlalchemy/dialects/postgresql/zxjdbc.py @@ -16,6 +16,7 @@ from ...connectors.zxJDBC import ZxJDBCConnector from .base import PGDialect, PGExecutionContext + class PGExecutionContext_zxjdbc(PGExecutionContext): def create_cursor(self): @@ -38,6 +39,7 @@ class PGDialect_zxjdbc(ZxJDBCConnector, PGDialect): self.DataHandler = PostgresqlDataHandler def _get_server_version_info(self, connection): - return tuple(int(x) for x in connection.connection.dbversion.split('.')) + parts = connection.connection.dbversion.split('.') + return tuple(int(x) for x in parts) dialect = PGDialect_zxjdbc diff --git a/lib/sqlalchemy/dialects/sqlite/__init__.py b/lib/sqlalchemy/dialects/sqlite/__init__.py index 0958c813c..e11923b44 100644 --- a/lib/sqlalchemy/dialects/sqlite/__init__.py +++ b/lib/sqlalchemy/dialects/sqlite/__init__.py @@ -15,6 +15,7 @@ from sqlalchemy.dialects.sqlite.base import \ NUMERIC, SMALLINT, TEXT, TIME, TIMESTAMP, VARCHAR, dialect __all__ = ( - 'BLOB', 'BOOLEAN', 'CHAR', 'DATE', 'DATETIME', 'DECIMAL', 'FLOAT', 'INTEGER', - 'NUMERIC', 'SMALLINT', 'TEXT', 'TIME', 'TIMESTAMP', 'VARCHAR', 'dialect', 'REAL' -)
\ No newline at end of file + 'BLOB', 'BOOLEAN', 'CHAR', 'DATE', 'DATETIME', 'DECIMAL', 'FLOAT', + 'INTEGER', 'NUMERIC', 'SMALLINT', 'TEXT', 'TIME', 'TIMESTAMP', 'VARCHAR', + 'REAL', 'dialect' +) diff --git a/lib/sqlalchemy/dialects/sqlite/base.py b/lib/sqlalchemy/dialects/sqlite/base.py index c38baa738..6d2d0e89d 100644 --- a/lib/sqlalchemy/dialects/sqlite/base.py +++ b/lib/sqlalchemy/dialects/sqlite/base.py @@ -12,14 +12,15 @@ Date and Time Types ------------------- -SQLite does not have built-in DATE, TIME, or DATETIME types, and pysqlite does not provide -out of the box functionality for translating values between Python `datetime` objects -and a SQLite-supported format. SQLAlchemy's own :class:`~sqlalchemy.types.DateTime` -and related types provide date formatting and parsing functionality when SQlite is used. -The implementation classes are :class:`~.sqlite.DATETIME`, :class:`~.sqlite.DATE` and :class:`~.sqlite.TIME`. -These types represent dates and times as ISO formatted strings, which also nicely -support ordering. There's no reliance on typical "libc" internals for these functions -so historical dates are fully supported. +SQLite does not have built-in DATE, TIME, or DATETIME types, and pysqlite +does not provide out of the box functionality for translating values between +Python `datetime` objects and a SQLite-supported format. SQLAlchemy's own +:class:`~sqlalchemy.types.DateTime` and related types provide date formatting +and parsing functionality when SQlite is used. The implementation classes are +:class:`~.sqlite.DATETIME`, :class:`~.sqlite.DATE` and :class:`~.sqlite.TIME`. +These types represent dates and times as ISO formatted strings, which also +nicely support ordering. There's no reliance on typical "libc" internals +for these functions so historical dates are fully supported. Auto Incrementing Behavior -------------------------- @@ -46,44 +47,47 @@ to the Table construct:: Transaction Isolation Level --------------------------- -:func:`.create_engine` accepts an ``isolation_level`` parameter which results in -the command ``PRAGMA read_uncommitted <level>`` being invoked for every new -connection. Valid values for this parameter are ``SERIALIZABLE`` and -``READ UNCOMMITTED`` corresponding to a value of 0 and 1, respectively. +:func:`.create_engine` accepts an ``isolation_level`` parameter which +results in the command ``PRAGMA read_uncommitted <level>`` being invoked for +every new connection. Valid values for this parameter are ``SERIALIZABLE`` +and ``READ UNCOMMITTED`` corresponding to a value of 0 and 1, respectively. See the section :ref:`pysqlite_serializable` for an important workaround when using serializable isolation with Pysqlite. Database Locking Behavior / Concurrency --------------------------------------- -Note that SQLite is not designed for a high level of concurrency. The database -itself, being a file, is locked completely during write operations and within -transactions, meaning exactly one connection has exclusive access to the database -during this period - all other connections will be blocked during this time. +Note that SQLite is not designed for a high level of concurrency. The +database itself, being a file, is locked completely during write operations +and within transactions, meaning exactly one connection has exclusive access +to the database during this period - all other connections will be blocked +during this time. The Python DBAPI specification also calls for a connection model that is always -in a transaction; there is no BEGIN method, only commit and rollback. This implies -that a SQLite DBAPI driver would technically allow only serialized access to a -particular database file at all times. The pysqlite driver attempts to ameliorate this by -deferring the actual BEGIN statement until the first DML (INSERT, UPDATE, or -DELETE) is received within a transaction. While this breaks serializable isolation, -it at least delays the exclusive locking inherent in SQLite's design. +in a transaction; there is no BEGIN method, only commit and rollback. This +implies that a SQLite DBAPI driver would technically allow only serialized +access to a particular database file at all times. The pysqlite driver +attempts to ameliorate this by deferring the actual BEGIN statement until +the first DML (INSERT, UPDATE, or DELETE) is received within a +transaction. While this breaks serializable isolation, it at least delays +the exclusive locking inherent in SQLite's design. SQLAlchemy's default mode of usage with the ORM is known -as "autocommit=False", which means the moment the :class:`.Session` begins to be -used, a transaction is begun. As the :class:`.Session` is used, the autoflush -feature, also on by default, will flush out pending changes to the database -before each query. The effect of this is that a :class:`.Session` used in its -default mode will often emit DML early on, long before the transaction is actually -committed. This again will have the effect of serializing access to the SQLite -database. If highly concurrent reads are desired against the SQLite database, -it is advised that the autoflush feature be disabled, and potentially even -that autocommit be re-enabled, which has the effect of each SQL statement and -flush committing changes immediately. +as "autocommit=False", which means the moment the :class:`.Session` begins to +be used, a transaction is begun. As the :class:`.Session` is used, the +autoflush feature, also on by default, will flush out pending changes to the +database before each query. The effect of this is that a :class:`.Session` +used in its default mode will often emit DML early on, long before the +transaction is actually committed. This again will have the effect of +serializing access to the SQLite database. If highly concurrent reads are +desired against the SQLite database, it is advised that the autoflush feature +be disabled, and potentially even that autocommit be re-enabled, which has +the effect of each SQL statement and flush committing changes immediately. For more information on SQLite's lack of concurrency by design, please -see `Situations Where Another RDBMS May Work Better - High Concurrency <http://www.sqlite.org/whentouse.html>`_ -near the bottom of the page. +see `Situations Where Another RDBMS May Work Better - High +Concurrency <http://www.sqlite.org/whentouse.html>`_ near the bottom of + the page. .. _sqlite_foreign_keys: @@ -123,7 +127,8 @@ for new connections through the usage of events:: """ -import datetime, re +import datetime +import re from sqlalchemy import sql, exc from sqlalchemy.engine import default, base, reflection @@ -135,6 +140,7 @@ from sqlalchemy import processors from sqlalchemy.types import BLOB, BOOLEAN, CHAR, DATE, DATETIME, DECIMAL,\ FLOAT, REAL, INTEGER, NUMERIC, SMALLINT, TEXT, TIME, TIMESTAMP, VARCHAR + class _DateTimeMixin(object): _reg = None _storage_format = None @@ -146,6 +152,7 @@ class _DateTimeMixin(object): if storage_format is not None: self._storage_format = storage_format + class DATETIME(_DateTimeMixin, sqltypes.DateTime): """Represent a Python datetime object in SQLite using a string. @@ -164,9 +171,9 @@ class DATETIME(_DateTimeMixin, sqltypes.DateTime): from sqlalchemy.dialects.sqlite import DATETIME dt = DATETIME( - storage_format="%(year)04d/%(month)02d/%(day)02d %(hour)02d:%(min)02d:%(second)02d", - regexp=re.compile("(\d+)/(\d+)/(\d+) (\d+)-(\d+)-(\d+)") - ) + storage_format="%(year)04d/%(month)02d/%(day)02d %(hour)02d:%(min)02d:%(second)02d", + regexp=re.compile("(\d+)/(\d+)/(\d+) (\d+)-(\d+)-(\d+)") + ) :param storage_format: format string which will be applied to the dict with keys year, month, day, hour, minute, second, and microsecond. @@ -201,6 +208,7 @@ class DATETIME(_DateTimeMixin, sqltypes.DateTime): datetime_datetime = datetime.datetime datetime_date = datetime.date format = self._storage_format + def process(value): if value is None: return None @@ -236,6 +244,7 @@ class DATETIME(_DateTimeMixin, sqltypes.DateTime): else: return processors.str_to_datetime + class DATE(_DateTimeMixin, sqltypes.Date): """Represent a Python date object in SQLite using a string. @@ -274,6 +283,7 @@ class DATE(_DateTimeMixin, sqltypes.Date): def bind_processor(self, dialect): datetime_date = datetime.date format = self._storage_format + def process(value): if value is None: return None @@ -295,6 +305,7 @@ class DATE(_DateTimeMixin, sqltypes.Date): else: return processors.str_to_date + class TIME(_DateTimeMixin, sqltypes.Time): """Represent a Python time object in SQLite using a string. @@ -313,9 +324,9 @@ class TIME(_DateTimeMixin, sqltypes.Time): from sqlalchemy.dialects.sqlite import TIME t = TIME( - storage_format="%(hour)02d-%(minute)02d-%(second)02d-%(microsecond)06d", - regexp=re.compile("(\d+)-(\d+)-(\d+)-(?:-(\d+))?") - ) + storage_format="%(hour)02d-%(minute)02d-%(second)02d-%(microsecond)06d", + regexp=re.compile("(\d+)-(\d+)-(\d+)-(?:-(\d+))?") + ) :param storage_format: format string which will be applied to the dict with keys hour, minute, second, and microsecond. @@ -343,6 +354,7 @@ class TIME(_DateTimeMixin, sqltypes.Time): def bind_processor(self, dialect): datetime_time = datetime.time format = self._storage_format + def process(value): if value is None: return None @@ -394,7 +406,6 @@ ischema_names = { } - class SQLiteCompiler(compiler.SQLCompiler): extract_map = util.update_copy( compiler.SQLCompiler.extract_map, @@ -435,7 +446,9 @@ class SQLiteCompiler(compiler.SQLCompiler): def visit_extract(self, extract, **kw): try: return "CAST(STRFTIME('%s', %s) AS INTEGER)" % ( - self.extract_map[extract.field], self.process(extract.expr, **kw)) + self.extract_map[extract.field], + self.process(extract.expr, **kw) + ) except KeyError: raise exc.CompileError( "%s is not a valid extract argument." % extract.field) @@ -443,7 +456,7 @@ class SQLiteCompiler(compiler.SQLCompiler): def limit_clause(self, select): text = "" if select._limit is not None: - text += "\n LIMIT " + self.process(sql.literal(select._limit)) + text += "\n LIMIT " + self.process(sql.literal(select._limit)) if select._offset is not None: if select._limit is None: text += "\n LIMIT " + self.process(sql.literal(-1)) @@ -460,7 +473,8 @@ class SQLiteCompiler(compiler.SQLCompiler): class SQLiteDDLCompiler(compiler.DDLCompiler): def get_column_specification(self, column, **kwargs): - colspec = self.preparer.format_column(column) + " " + self.dialect.type_compiler.process(column.type) + coltype = self.dialect.type_compiler.process(column.type) + colspec = self.preparer.format_column(column) + " " + coltype default = self.get_column_default_string(column) if default is not None: colspec += " DEFAULT " + default @@ -468,12 +482,12 @@ class SQLiteDDLCompiler(compiler.DDLCompiler): if not column.nullable: colspec += " NOT NULL" - if column.primary_key and \ - column.table.kwargs.get('sqlite_autoincrement', False) and \ - len(column.table.primary_key.columns) == 1 and \ - issubclass(column.type._type_affinity, sqltypes.Integer) and \ - not column.foreign_keys: - colspec += " PRIMARY KEY AUTOINCREMENT" + if (column.primary_key and + column.table.kwargs.get('sqlite_autoincrement', False) and + len(column.table.primary_key.columns) == 1 and + issubclass(column.type._type_affinity, sqltypes.Integer) and + not column.foreign_keys): + colspec += " PRIMARY KEY AUTOINCREMENT" return colspec @@ -521,10 +535,12 @@ class SQLiteDDLCompiler(compiler.DDLCompiler): for c in index.columns)) return text + class SQLiteTypeCompiler(compiler.GenericTypeCompiler): def visit_large_binary(self, type_): return self.visit_BLOB(type_) + class SQLiteIdentifierPreparer(compiler.IdentifierPreparer): reserved_words = set([ 'add', 'after', 'all', 'alter', 'analyze', 'and', 'as', 'asc', @@ -536,14 +552,15 @@ class SQLiteIdentifierPreparer(compiler.IdentifierPreparer): 'drop', 'each', 'else', 'end', 'escape', 'except', 'exclusive', 'explain', 'false', 'fail', 'for', 'foreign', 'from', 'full', 'glob', 'group', 'having', 'if', 'ignore', 'immediate', 'in', 'index', - 'indexed', 'initially', 'inner', 'insert', 'instead', 'intersect', 'into', 'is', - 'isnull', 'join', 'key', 'left', 'like', 'limit', 'match', 'natural', - 'not', 'notnull', 'null', 'of', 'offset', 'on', 'or', 'order', 'outer', - 'plan', 'pragma', 'primary', 'query', 'raise', 'references', - 'reindex', 'rename', 'replace', 'restrict', 'right', 'rollback', - 'row', 'select', 'set', 'table', 'temp', 'temporary', 'then', 'to', - 'transaction', 'trigger', 'true', 'union', 'unique', 'update', 'using', - 'vacuum', 'values', 'view', 'virtual', 'when', 'where', + 'indexed', 'initially', 'inner', 'insert', 'instead', 'intersect', + 'into', 'is', 'isnull', 'join', 'key', 'left', 'like', 'limit', + 'match', 'natural', 'not', 'notnull', 'null', 'of', 'offset', 'on', + 'or', 'order', 'outer', 'plan', 'pragma', 'primary', 'query', + 'raise', 'references', 'reindex', 'rename', 'replace', 'restrict', + 'right', 'rollback', 'row', 'select', 'set', 'table', 'temp', + 'temporary', 'then', 'to', 'transaction', 'trigger', 'true', 'union', + 'unique', 'update', 'using', 'vacuum', 'values', 'view', 'virtual', + 'when', 'where', ]) def format_index(self, index, use_schema=True, name=None): @@ -552,10 +569,14 @@ class SQLiteIdentifierPreparer(compiler.IdentifierPreparer): if name is None: name = index.name result = self.quote(name, index.quote) - if not self.omit_schema and use_schema and getattr(index.table, "schema", None): - result = self.quote_schema(index.table.schema, index.table.quote_schema) + "." + result + if (not self.omit_schema and + use_schema and + getattr(index.table, "schema", None)): + result = self.quote_schema( + index.table.schema, index.table.quote_schema) + "." + result return result + class SQLiteExecutionContext(default.DefaultExecutionContext): @util.memoized_property def _preserve_raw_colnames(self): @@ -611,9 +632,10 @@ class SQLiteDialect(default.DefaultDialect): self.dbapi.sqlite_version_info >= (3, 2, 3) _isolation_lookup = { - 'READ UNCOMMITTED':1, - 'SERIALIZABLE':0 + 'READ UNCOMMITTED': 1, + 'SERIALIZABLE': 0 } + def set_isolation_level(self, connection, level): try: isolation_level = self._isolation_lookup[level.replace('_', ' ')] @@ -686,7 +708,8 @@ class SQLiteDialect(default.DefaultDialect): else: pragma = "PRAGMA " qtable = quote(table_name) - cursor = _pragma_cursor(connection.execute("%stable_info(%s)" % (pragma, qtable))) + statement = "%stable_info(%s)" % (pragma, qtable) + cursor = _pragma_cursor(connection.execute(statement)) row = cursor.fetchone() # consume remaining rows, to work around @@ -752,9 +775,8 @@ class SQLiteDialect(default.DefaultDialect): else: pragma = "PRAGMA " qtable = quote(table_name) - c = _pragma_cursor( - connection.execute("%stable_info(%s)" % - (pragma, qtable))) + statement = "%stable_info(%s)" % (pragma, qtable) + c = _pragma_cursor(connection.execute(statement)) rows = c.fetchall() columns = [] @@ -806,7 +828,7 @@ class SQLiteDialect(default.DefaultDialect): for col in cols: if col['primary_key']: pkeys.append(col['name']) - return {'constrained_columns':pkeys, 'name':None} + return {'constrained_columns': pkeys, 'name': None} @reflection.cache def get_foreign_keys(self, connection, table_name, schema=None, **kw): @@ -816,7 +838,8 @@ class SQLiteDialect(default.DefaultDialect): else: pragma = "PRAGMA " qtable = quote(table_name) - c = _pragma_cursor(connection.execute("%sforeign_key_list(%s)" % (pragma, qtable))) + statement = "%sforeign_key_list(%s)" % (pragma, qtable) + c = _pragma_cursor(connection.execute(statement)) fkeys = [] fks = {} while True: @@ -839,10 +862,10 @@ class SQLiteDialect(default.DefaultDialect): except KeyError: fk = { 'name': None, - 'constrained_columns' : [], - 'referred_schema' : None, - 'referred_table' : rtbl, - 'referred_columns' : [] + 'constrained_columns': [], + 'referred_schema': None, + 'referred_table': rtbl, + 'referred_columns': [] } fkeys.append(fk) fks[numerical_id] = fk @@ -864,7 +887,8 @@ class SQLiteDialect(default.DefaultDialect): pragma = "PRAGMA " include_auto_indexes = kw.pop('include_auto_indexes', False) qtable = quote(table_name) - c = _pragma_cursor(connection.execute("%sindex_list(%s)" % (pragma, qtable))) + statement = "%sindex_list(%s)" % (pragma, qtable) + c = _pragma_cursor(connection.execute(statement)) indexes = [] while True: row = c.fetchone() @@ -872,13 +896,15 @@ class SQLiteDialect(default.DefaultDialect): break # ignore implicit primary key index. # http://www.mail-archive.com/sqlite-users@sqlite.org/msg30517.html - elif not include_auto_indexes and row[1].startswith('sqlite_autoindex'): + elif (not include_auto_indexes and + row[1].startswith('sqlite_autoindex')): continue indexes.append(dict(name=row[1], column_names=[], unique=row[2])) # loop thru unique indexes to get the column names. for idx in indexes: - c = connection.execute("%sindex_info(%s)" % (pragma, quote(idx['name']))) + statement = "%sindex_info(%s)" % (pragma, quote(idx['name'])) + c = connection.execute(statement) cols = idx['column_names'] while True: row = c.fetchone() diff --git a/lib/sqlalchemy/dialects/sqlite/pysqlite.py b/lib/sqlalchemy/dialects/sqlite/pysqlite.py index 558f1016b..bb77b27b6 100644 --- a/lib/sqlalchemy/dialects/sqlite/pysqlite.py +++ b/lib/sqlalchemy/dialects/sqlite/pysqlite.py @@ -35,31 +35,32 @@ this explicitly:: Connect Strings --------------- -The file specification for the SQLite database is taken as the "database" portion of -the URL. Note that the format of a SQLAlchemy url is:: +The file specification for the SQLite database is taken as the "database" +portion of the URL. Note that the format of a SQLAlchemy url is:: driver://user:pass@host/database -This means that the actual filename to be used starts with the characters to the -**right** of the third slash. So connecting to a relative filepath looks like:: +This means that the actual filename to be used starts with the characters to +the **right** of the third slash. So connecting to a relative filepath +looks like:: # relative path e = create_engine('sqlite:///path/to/database.db') -An absolute path, which is denoted by starting with a slash, means you need **four** -slashes:: +An absolute path, which is denoted by starting with a slash, means you +need **four** slashes:: # absolute path e = create_engine('sqlite:////path/to/database.db') -To use a Windows path, regular drive specifications and backslashes can be used. -Double backslashes are probably needed:: +To use a Windows path, regular drive specifications and backslashes can be +used. Double backslashes are probably needed:: # absolute path on Windows e = create_engine('sqlite:///C:\\\\path\\\\to\\\\database.db') -The sqlite ``:memory:`` identifier is the default if no filepath is present. Specify -``sqlite://`` and nothing else:: +The sqlite ``:memory:`` identifier is the default if no filepath is +present. Specify ``sqlite://`` and nothing else:: # in-memory database e = create_engine('sqlite://') @@ -86,13 +87,13 @@ nor should be necessary, for use with SQLAlchemy, usage of PARSE_DECLTYPES can be forced if one configures "native_datetime=True" on create_engine():: engine = create_engine('sqlite://', - connect_args={'detect_types': sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES}, - native_datetime=True - ) + connect_args={'detect_types': sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES}, + native_datetime=True + ) -With this flag enabled, the DATE and TIMESTAMP types (but note - not the DATETIME -or TIME types...confused yet ?) will not perform any bind parameter or result -processing. Execution of "func.current_date()" will return a string. +With this flag enabled, the DATE and TIMESTAMP types (but note - not the +DATETIME or TIME types...confused yet ?) will not perform any bind parameter +or result processing. Execution of "func.current_date()" will return a string. "func.current_timestamp()" is registered as returning a DATETIME type in SQLAlchemy, so this function still receives SQLAlchemy-level result processing. @@ -100,8 +101,8 @@ Threading/Pooling Behavior --------------------------- Pysqlite's default behavior is to prohibit the usage of a single connection -in more than one thread. This is originally intended to work with older versions -of SQLite that did not support multithreaded operation under +in more than one thread. This is originally intended to work with older +versions of SQLite that did not support multithreaded operation under various circumstances. In particular, older SQLite versions did not allow a ``:memory:`` database to be used in multiple threads under any circumstances. @@ -117,17 +118,17 @@ thread-safety to make this usage worth it. SQLAlchemy sets up pooling to work with Pysqlite's default behavior: -* When a ``:memory:`` SQLite database is specified, the dialect by default will use - :class:`.SingletonThreadPool`. This pool maintains a single connection per - thread, so that all access to the engine within the current thread use the - same ``:memory:`` database - other threads would access a different - ``:memory:`` database. -* When a file-based database is specified, the dialect will use :class:`.NullPool` - as the source of connections. This pool closes and discards connections - which are returned to the pool immediately. SQLite file-based connections - have extremely low overhead, so pooling is not necessary. The scheme also - prevents a connection from being used again in a different thread and works - best with SQLite's coarse-grained file locking. +* When a ``:memory:`` SQLite database is specified, the dialect by default + will use :class:`.SingletonThreadPool`. This pool maintains a single + connection per thread, so that all access to the engine within the current + thread use the same ``:memory:`` database - other threads would access a + different ``:memory:`` database. +* When a file-based database is specified, the dialect will use + :class:`.NullPool` as the source of connections. This pool closes and + discards connections which are returned to the pool immediately. SQLite + file-based connections have extremely low overhead, so pooling is not + necessary. The scheme also prevents a connection from being used again in + a different thread and works best with SQLite's coarse-grained file locking. .. versionchanged:: 0.7 Default selection of :class:`.NullPool` for SQLite file-based databases. @@ -140,9 +141,10 @@ Using a Memory Database in Multiple Threads To use a ``:memory:`` database in a multithreaded scenario, the same connection object must be shared among threads, since the database exists -only within the scope of that connection. The :class:`.StaticPool` implementation -will maintain a single connection globally, and the ``check_same_thread`` flag -can be passed to Pysqlite as ``False``:: +only within the scope of that connection. The +:class:`.StaticPool` implementation will maintain a single connection +globally, and the ``check_same_thread`` flag can be passed to Pysqlite +as ``False``:: from sqlalchemy.pool import StaticPool engine = create_engine('sqlite://', @@ -155,13 +157,14 @@ version of SQLite. Using Temporary Tables with SQLite ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Due to the way SQLite deals with temporary tables, if you wish to use a temporary table -in a file-based SQLite database across multiple checkouts from the connection pool, such -as when using an ORM :class:`.Session` where the temporary table should continue to remain -after :meth:`.commit` or :meth:`.rollback` is called, -a pool which maintains a single connection must be used. Use :class:`.SingletonThreadPool` -if the scope is only needed within the current thread, or :class:`.StaticPool` is scope is -needed within multiple threads for this case:: +Due to the way SQLite deals with temporary tables, if you wish to use a +temporary table in a file-based SQLite database across multiple checkouts +from the connection pool, such as when using an ORM :class:`.Session` where +the temporary table should continue to remain after :meth:`.commit` or +:meth:`.rollback` is called, a pool which maintains a single connection must +be used. Use :class:`.SingletonThreadPool` if the scope is only needed +within the current thread, or :class:`.StaticPool` is scope is needed within +multiple threads for this case:: # maintain the same connection per thread from sqlalchemy.pool import SingletonThreadPool @@ -174,17 +177,17 @@ needed within multiple threads for this case:: engine = create_engine('sqlite:///mydb.db', poolclass=StaticPool) -Note that :class:`.SingletonThreadPool` should be configured for the number of threads -that are to be used; beyond that number, connections will be closed out in a non deterministic -way. +Note that :class:`.SingletonThreadPool` should be configured for the number +of threads that are to be used; beyond that number, connections will be +closed out in a non deterministic way. Unicode ------- -The pysqlite driver only returns Python ``unicode`` objects in result sets, never -plain strings, and accommodates ``unicode`` objects within bound parameter -values in all cases. Regardless of the SQLAlchemy string type in use, -string-based result values will by Python ``unicode`` in Python 2. +The pysqlite driver only returns Python ``unicode`` objects in result sets, +never plain strings, and accommodates ``unicode`` objects within bound +parameter values in all cases. Regardless of the SQLAlchemy string type in +use, string-based result values will by Python ``unicode`` in Python 2. The :class:`.Unicode` type should still be used to indicate those columns that require unicode, however, so that non-``unicode`` values passed inadvertently will emit a warning. Pysqlite will emit an error if a non-``unicode`` string @@ -224,6 +227,7 @@ from sqlalchemy import util import os + class _SQLite_pysqliteTimeStamp(DATETIME): def bind_processor(self, dialect): if dialect.native_datetime: @@ -237,6 +241,7 @@ class _SQLite_pysqliteTimeStamp(DATETIME): else: return DATETIME.result_processor(self, dialect, coltype) + class _SQLite_pysqliteDate(DATE): def bind_processor(self, dialect): if dialect.native_datetime: @@ -250,14 +255,15 @@ class _SQLite_pysqliteDate(DATE): else: return DATE.result_processor(self, dialect, coltype) + class SQLiteDialect_pysqlite(SQLiteDialect): default_paramstyle = 'qmark' colspecs = util.update_copy( SQLiteDialect.colspecs, { - sqltypes.Date:_SQLite_pysqliteDate, - sqltypes.TIMESTAMP:_SQLite_pysqliteTimeStamp, + sqltypes.Date: _SQLite_pysqliteDate, + sqltypes.TIMESTAMP: _SQLite_pysqliteTimeStamp, } ) @@ -284,7 +290,7 @@ class SQLiteDialect_pysqlite(SQLiteDialect): from pysqlite2 import dbapi2 as sqlite except ImportError, e: try: - from sqlite3 import dbapi2 as sqlite #try the 2.5+ stdlib name. + from sqlite3 import dbapi2 as sqlite # try 2.5+ stdlib name. except ImportError: raise e return sqlite diff --git a/lib/sqlalchemy/dialects/sybase/__init__.py b/lib/sqlalchemy/dialects/sybase/__init__.py index 4502c8f66..2a79023dd 100644 --- a/lib/sqlalchemy/dialects/sybase/__init__.py +++ b/lib/sqlalchemy/dialects/sybase/__init__.py @@ -10,18 +10,18 @@ from sqlalchemy.dialects.sybase import base, pysybase, pyodbc base.dialect = pyodbc.dialect from base import CHAR, VARCHAR, TIME, NCHAR, NVARCHAR,\ - TEXT,DATE,DATETIME, FLOAT, NUMERIC,\ - BIGINT,INT, INTEGER, SMALLINT, BINARY,\ - VARBINARY,UNITEXT,UNICHAR,UNIVARCHAR,\ - IMAGE,BIT,MONEY,SMALLMONEY,TINYINT,\ - dialect + TEXT, DATE, DATETIME, FLOAT, NUMERIC,\ + BIGINT, INT, INTEGER, SMALLINT, BINARY,\ + VARBINARY, UNITEXT, UNICHAR, UNIVARCHAR,\ + IMAGE, BIT, MONEY, SMALLMONEY, TINYINT,\ + dialect __all__ = ( - 'CHAR', 'VARCHAR', 'TIME', 'NCHAR', 'NVARCHAR', - 'TEXT','DATE','DATETIME', 'FLOAT', 'NUMERIC', - 'BIGINT','INT', 'INTEGER', 'SMALLINT', 'BINARY', - 'VARBINARY','UNITEXT','UNICHAR','UNIVARCHAR', - 'IMAGE','BIT','MONEY','SMALLMONEY','TINYINT', - 'dialect' + 'CHAR', 'VARCHAR', 'TIME', 'NCHAR', 'NVARCHAR', + 'TEXT', 'DATE', 'DATETIME', 'FLOAT', 'NUMERIC', + 'BIGINT', 'INT', 'INTEGER', 'SMALLINT', 'BINARY', + 'VARBINARY', 'UNITEXT', 'UNICHAR', 'UNIVARCHAR', + 'IMAGE', 'BIT', 'MONEY', 'SMALLMONEY', 'TINYINT', + 'dialect' ) diff --git a/lib/sqlalchemy/dialects/sybase/base.py b/lib/sqlalchemy/dialects/sybase/base.py index dfa26a170..5173bb241 100644 --- a/lib/sqlalchemy/dialects/sybase/base.py +++ b/lib/sqlalchemy/dialects/sybase/base.py @@ -102,35 +102,44 @@ class _SybaseUnitypeMixin(object): def result_processor(self, dialect, coltype): def process(value): if value is not None: - return str(value) #.decode("ucs-2") + return str(value) # decode("ucs-2") else: return None return process + class UNICHAR(_SybaseUnitypeMixin, sqltypes.Unicode): __visit_name__ = 'UNICHAR' + class UNIVARCHAR(_SybaseUnitypeMixin, sqltypes.Unicode): __visit_name__ = 'UNIVARCHAR' + class UNITEXT(_SybaseUnitypeMixin, sqltypes.UnicodeText): __visit_name__ = 'UNITEXT' + class TINYINT(sqltypes.Integer): __visit_name__ = 'TINYINT' + class BIT(sqltypes.TypeEngine): __visit_name__ = 'BIT' + class MONEY(sqltypes.TypeEngine): __visit_name__ = "MONEY" + class SMALLMONEY(sqltypes.TypeEngine): __visit_name__ = "SMALLMONEY" + class UNIQUEIDENTIFIER(sqltypes.TypeEngine): __visit_name__ = "UNIQUEIDENTIFIER" + class IMAGE(sqltypes.LargeBinary): __visit_name__ = 'IMAGE' @@ -174,19 +183,19 @@ class SybaseTypeCompiler(compiler.GenericTypeCompiler): ischema_names = { 'bigint': BIGINT, - 'int' : INTEGER, - 'integer' : INTEGER, - 'smallint' : SMALLINT, - 'tinyint' : TINYINT, - 'unsigned bigint' : BIGINT, # TODO: unsigned flags - 'unsigned int' : INTEGER, # TODO: unsigned flags - 'unsigned smallint' : SMALLINT, # TODO: unsigned flags - 'numeric' : NUMERIC, - 'decimal' : DECIMAL, - 'dec' : DECIMAL, - 'float' : FLOAT, - 'double' : NUMERIC, # TODO - 'double precision' : NUMERIC, # TODO + 'int': INTEGER, + 'integer': INTEGER, + 'smallint': SMALLINT, + 'tinyint': TINYINT, + 'unsigned bigint': BIGINT, # TODO: unsigned flags + 'unsigned int': INTEGER, # TODO: unsigned flags + 'unsigned smallint': SMALLINT, # TODO: unsigned flags + 'numeric': NUMERIC, + 'decimal': DECIMAL, + 'dec': DECIMAL, + 'float': FLOAT, + 'double': NUMERIC, # TODO + 'double precision': NUMERIC, # TODO 'real': REAL, 'smallmoney': SMALLMONEY, 'money': MONEY, @@ -194,13 +203,13 @@ ischema_names = { 'datetime': DATETIME, 'date': DATE, 'time': TIME, - 'char' : CHAR, - 'character' : CHAR, - 'varchar' : VARCHAR, - 'character varying' : VARCHAR, - 'char varying' : VARCHAR, - 'unichar' : UNICHAR, - 'unicode character' : UNIVARCHAR, + 'char': CHAR, + 'character': CHAR, + 'varchar': VARCHAR, + 'character varying': VARCHAR, + 'char varying': VARCHAR, + 'unichar': UNICHAR, + 'unicode character': UNIVARCHAR, 'nchar': NCHAR, 'national char': NCHAR, 'national character': NCHAR, @@ -210,13 +219,13 @@ ischema_names = { 'national character varying': NVARCHAR, 'text': TEXT, 'unitext': UNITEXT, - 'binary' : BINARY, - 'varbinary' : VARBINARY, - 'image' : IMAGE, + 'binary': BINARY, + 'varbinary': VARBINARY, + 'image': IMAGE, 'bit': BIT, # not in documentation for ASE 15.7 - 'long varchar' : TEXT, # TODO + 'long varchar': TEXT, # TODO 'timestamp': TIMESTAMP, 'uniqueidentifier': UNIQUEIDENTIFIER, @@ -300,6 +309,7 @@ class SybaseExecutionContext(default.DefaultExecutionContext): cursor.close() return lastrowid + class SybaseSQLCompiler(compiler.SQLCompiler): ansi_bind_rules = True @@ -326,7 +336,7 @@ class SybaseSQLCompiler(compiler.SQLCompiler): # FIXME: sybase doesn't allow an offset without a limit # so use a huge value for TOP here s += "TOP 1000000 " - s += "START AT %s " % (select._offset+1,) + s += "START AT %s " % (select._offset + 1,) return s def get_from_hint_text(self, table, text): @@ -406,9 +416,11 @@ class SybaseDDLCompiler(compiler.DDLCompiler): self._index_identifier(index.name), index.quote) ) + class SybaseIdentifierPreparer(compiler.IdentifierPreparer): reserved_words = RESERVED_WORDS + class SybaseDialect(default.DefaultDialect): name = 'sybase' supports_unicode_statements = False @@ -431,7 +443,7 @@ class SybaseDialect(default.DefaultDialect): def _get_default_schema_name(self, connection): return connection.scalar( text("SELECT user_name() as user_name", - typemap={'user_name':Unicode}) + typemap={'user_name': Unicode}) ) def initialize(self, connection): @@ -484,16 +496,16 @@ class SybaseDialect(default.DefaultDialect): COLUMN_SQL = text(""" SELECT col.name AS name, - t.name AS type, + t.name AS type, (col.status & 8) AS nullable, (col.status & 128) AS autoincrement, com.text AS 'default', col.prec AS precision, col.scale AS scale, col.length AS length - FROM systypes t, syscolumns col LEFT OUTER JOIN syscomments com ON + FROM systypes t, syscolumns col LEFT OUTER JOIN syscomments com ON col.cdefault = com.id - WHERE col.usertype = t.usertype + WHERE col.usertype = t.usertype AND col.id = :table_id ORDER BY col.colid """) @@ -501,7 +513,7 @@ class SybaseDialect(default.DefaultDialect): results = connection.execute(COLUMN_SQL, table_id=table_id) columns = [] - for (name, type_, nullable, autoincrement, default, precision, scale, + for (name, type_, nullable, autoincrement, default, precision, scale, length) in results: col_info = self._get_column_info(name, type_, bool(nullable), bool(autoincrement), default, precision, scale, @@ -541,7 +553,7 @@ class SybaseDialect(default.DefaultDialect): default = re.sub("^'(.*)'$", lambda m: m.group(1), default) else: default = None - + column_info = dict(name=name, type=coltype, nullable=nullable, default=default, autoincrement=autoincrement) return column_info @@ -551,25 +563,25 @@ class SybaseDialect(default.DefaultDialect): table_id = self.get_table_id(connection, table_name, schema, info_cache=kw.get("info_cache")) - + table_cache = {} column_cache = {} foreign_keys = [] - + table_cache[table_id] = {"name": table_name, "schema": schema} - + COLUMN_SQL = text(""" SELECT c.colid AS id, c.name AS name FROM syscolumns c WHERE c.id = :table_id """) - + results = connection.execute(COLUMN_SQL, table_id=table_id) columns = {} for col in results: columns[col["id"]] = col["name"] column_cache[table_id] = columns - + REFCONSTRAINT_SQL = text(""" SELECT o.name AS name, r.reftabid AS reftable_id, r.keycnt AS 'count', @@ -591,7 +603,7 @@ class SybaseDialect(default.DefaultDialect): """) referential_constraints = connection.execute(REFCONSTRAINT_SQL, table_id=table_id) - + REFTABLE_SQL = text(""" SELECT o.name AS name, u.name AS 'schema' FROM sysobjects o JOIN sysusers u ON o.uid = u.uid @@ -599,9 +611,8 @@ class SybaseDialect(default.DefaultDialect): """) for r in referential_constraints: - reftable_id = r["reftable_id"] - + if reftable_id not in table_cache: c = connection.execute(REFTABLE_SQL, table_id=reftable_id) reftable = c.fetchone() @@ -617,16 +628,16 @@ class SybaseDialect(default.DefaultDialect): for col in results: reftable_columns[col["id"]] = col["name"] column_cache[reftable_id] = reftable_columns - + reftable = table_cache[reftable_id] reftable_columns = column_cache[reftable_id] - + constrained_columns = [] referred_columns = [] - for i in range(1, r["count"]+1): + for i in range(1, r["count"] + 1): constrained_columns.append(columns[r["fokey%i" % i]]) referred_columns.append(reftable_columns[r["refkey%i" % i]]) - + fk_info = { "constrained_columns": constrained_columns, "referred_schema": reftable["schema"], @@ -634,9 +645,9 @@ class SybaseDialect(default.DefaultDialect): "referred_columns": referred_columns, "name": r["name"] } - + foreign_keys.append(fk_info) - + return foreign_keys @reflection.cache @@ -720,9 +731,9 @@ class SybaseDialect(default.DefaultDialect): results = connection.execute(PK_SQL, table_id=table_id) pks = results.fetchone() results.close() - + constrained_columns = [] - for i in range(1, pks["count"]+1): + for i in range(1, pks["count"] + 1): constrained_columns.append(pks["pk_%i" % (i,)]) return {"constrained_columns": constrained_columns, "name": pks["name"]} diff --git a/lib/sqlalchemy/dialects/sybase/mxodbc.py b/lib/sqlalchemy/dialects/sybase/mxodbc.py index 2bf4071dd..5d5e5443e 100644 --- a/lib/sqlalchemy/dialects/sybase/mxodbc.py +++ b/lib/sqlalchemy/dialects/sybase/mxodbc.py @@ -17,12 +17,15 @@ """ -from sqlalchemy.dialects.sybase.base import SybaseDialect, SybaseExecutionContext +from sqlalchemy.dialects.sybase.base import SybaseDialect +from sqlalchemy.dialects.sybase.base import SybaseExecutionContext from sqlalchemy.connectors.mxodbc import MxODBCConnector + class SybaseExecutionContext_mxodbc(SybaseExecutionContext): pass + class SybaseDialect_mxodbc(MxODBCConnector, SybaseDialect): execution_ctx_cls = SybaseExecutionContext_mxodbc diff --git a/lib/sqlalchemy/dialects/sybase/pyodbc.py b/lib/sqlalchemy/dialects/sybase/pyodbc.py index c4badd3e5..0b68aec2d 100644 --- a/lib/sqlalchemy/dialects/sybase/pyodbc.py +++ b/lib/sqlalchemy/dialects/sybase/pyodbc.py @@ -35,9 +35,10 @@ Currently *not* supported are:: from sqlalchemy.dialects.sybase.base import SybaseDialect,\ SybaseExecutionContext from sqlalchemy.connectors.pyodbc import PyODBCConnector -from sqlalchemy import types as sqltypes, util, processors +from sqlalchemy import types as sqltypes, processors from sqlalchemy.util.compat import decimal + class _SybNumeric_pyodbc(sqltypes.Numeric): """Turns Decimals with adjusted() < -6 into floats. @@ -48,7 +49,7 @@ class _SybNumeric_pyodbc(sqltypes.Numeric): """ def bind_processor(self, dialect): - super_process = super(_SybNumeric_pyodbc,self).\ + super_process = super(_SybNumeric_pyodbc, self).\ bind_processor(dialect) def process(value): @@ -64,6 +65,7 @@ class _SybNumeric_pyodbc(sqltypes.Numeric): return value return process + class SybaseExecutionContext_pyodbc(SybaseExecutionContext): def set_ddl_autocommit(self, connection, value): if value: @@ -71,11 +73,12 @@ class SybaseExecutionContext_pyodbc(SybaseExecutionContext): else: connection.autocommit = False + class SybaseDialect_pyodbc(PyODBCConnector, SybaseDialect): execution_ctx_cls = SybaseExecutionContext_pyodbc colspecs = { - sqltypes.Numeric:_SybNumeric_pyodbc, + sqltypes.Numeric: _SybNumeric_pyodbc, } dialect = SybaseDialect_pyodbc diff --git a/lib/sqlalchemy/dialects/sybase/pysybase.py b/lib/sqlalchemy/dialects/sybase/pysybase.py index 58a669be0..e83130840 100644 --- a/lib/sqlalchemy/dialects/sybase/pysybase.py +++ b/lib/sqlalchemy/dialects/sybase/pysybase.py @@ -31,6 +31,7 @@ class _SybNumeric(sqltypes.Numeric): else: return sqltypes.Numeric.result_processor(self, dialect, type_) + class SybaseExecutionContext_pysybase(SybaseExecutionContext): def set_ddl_autocommit(self, dbapi_connection, value): @@ -53,14 +54,15 @@ class SybaseSQLCompiler_pysybase(SybaseSQLCompiler): def bindparam_string(self, name, **kw): return "@" + name + class SybaseDialect_pysybase(SybaseDialect): driver = 'pysybase' execution_ctx_cls = SybaseExecutionContext_pysybase statement_compiler = SybaseSQLCompiler_pysybase - colspecs={ - sqltypes.Numeric:_SybNumeric, - sqltypes.Float:sqltypes.Float + colspecs = { + sqltypes.Numeric: _SybNumeric, + sqltypes.Float: sqltypes.Float } @classmethod diff --git a/lib/sqlalchemy/engine/__init__.py b/lib/sqlalchemy/engine/__init__.py index f02e79f33..5ef190394 100644 --- a/lib/sqlalchemy/engine/__init__.py +++ b/lib/sqlalchemy/engine/__init__.py @@ -87,6 +87,7 @@ from . import util, strategies default_strategy = 'plain' + def create_engine(*args, **kwargs): """Create a new :class:`.Engine` instance. @@ -253,13 +254,13 @@ def create_engine(*args, **kwargs): opened above and beyond the pool_size setting, which defaults to five. this is only used with :class:`~sqlalchemy.pool.QueuePool`. - :param module=None: reference to a Python module object (the module itself, not - its string name). Specifies an alternate DBAPI module to be used - by the engine's dialect. Each sub-dialect references a specific DBAPI which - will be imported before first connect. This parameter causes the - import to be bypassed, and the given module to be used instead. - Can be used for testing of DBAPIs as well as to inject "mock" - DBAPI implementations into the :class:`.Engine`. + :param module=None: reference to a Python module object (the module + itself, not its string name). Specifies an alternate DBAPI module to + be used by the engine's dialect. Each sub-dialect references a + specific DBAPI which will be imported before first connect. This + parameter causes the import to be bypassed, and the given module to + be used instead. Can be used for testing of DBAPIs as well as to + inject "mock" DBAPI implementations into the :class:`.Engine`. :param pool=None: an already-constructed instance of :class:`~sqlalchemy.pool.Pool`, such as a @@ -353,5 +354,3 @@ __all__ = ( 'create_engine', 'engine_from_config', ) - - diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py index 800742a11..a3a1bcfc2 100644 --- a/lib/sqlalchemy/engine/base.py +++ b/lib/sqlalchemy/engine/base.py @@ -17,6 +17,7 @@ from .interfaces import Connectable, Compiled from .util import _distill_params import contextlib + class Connection(Connectable): """Provides high-level functionality for a wrapped DB-API connection. @@ -118,10 +119,11 @@ class Connection(Connectable): result = connection.execution_options(stream_results=True).\\ execute(stmt) - Note that any key/value can be passed to :meth:`.Connection.execution_options`, - and it will be stored in the ``_execution_options`` dictionary of - the :class:`.Connnection`. It is suitable for usage by end-user - schemes to communicate with event listeners, for example. + Note that any key/value can be passed to + :meth:`.Connection.execution_options`, and it will be stored in the + ``_execution_options`` dictionary of the :class:`.Connnection`. It + is suitable for usage by end-user schemes to communicate with + event listeners, for example. The keywords that are currently recognized by SQLAlchemy itself include all those listed under :meth:`.Executable.execution_options`, @@ -328,7 +330,6 @@ class Connection(Connectable): del self.__connection self.__invalid = True - def detach(self): """Detach the underlying DB-API connection from its connection pool. @@ -377,7 +378,8 @@ class Connection(Connectable): :meth:`.Connection.begin_twophase` - use a two phase /XID transaction - :meth:`.Engine.begin` - context manager available from :class:`.Engine`. + :meth:`.Engine.begin` - context manager available from + :class:`.Engine`. """ @@ -414,8 +416,8 @@ class Connection(Connectable): The returned object is an instance of :class:`.TwoPhaseTransaction`, which in addition to the methods provided by - :class:`.Transaction`, also provides a :meth:`~.TwoPhaseTransaction.prepare` - method. + :class:`.Transaction`, also provides a + :meth:`~.TwoPhaseTransaction.prepare` method. :param xid: the two phase transaction id. If not supplied, a random id will be generated. @@ -595,7 +597,8 @@ class Connection(Connectable): return self.execute(object, *multiparams, **params).scalar() def execute(self, object, *multiparams, **params): - """Executes the a SQL statement construct and returns a :class:`.ResultProxy`. + """Executes the a SQL statement construct and returns a + :class:`.ResultProxy`. :param object: The statement to be executed. May be one of: @@ -664,7 +667,6 @@ class Connection(Connectable): "Unexecutable object type: %s" % type(object)) - def _execute_function(self, func, multiparams, params): """Execute a sql.FunctionElement object.""" @@ -754,7 +756,6 @@ class Connection(Connectable): dialect=dialect, column_keys=keys, inline=len(distilled_params) > 1) - ret = self._execute_context( dialect, dialect.execution_ctx_cls._init_compiled, @@ -877,7 +878,6 @@ class Connection(Connectable): context) raise - if self._has_events: self.dispatch.after_cursor_execute(self, cursor, statement, @@ -1007,8 +1007,7 @@ class Connection(Connectable): context.handle_dbapi_exception(e) is_disconnect = isinstance(e, self.dialect.dbapi.Error) and \ - self.dialect.is_disconnect(e, self.__connection, cursor) - + self.dialect.is_disconnect(e, self.__connection, cursor) if is_disconnect: dbapi_conn_wrapper = self.connection @@ -1057,7 +1056,6 @@ class Connection(Connectable): basestring: _execute_text } - def default_schema_name(self): return self.engine.dialect.get_default_schema_name(self) @@ -1221,6 +1219,7 @@ class Transaction(object): else: self.rollback() + class RootTransaction(Transaction): def __init__(self, connection): super(RootTransaction, self).__init__(connection, None) @@ -1385,10 +1384,10 @@ class Engine(Connectable, log.Identified): shard1 = primary_engine.execution_options(shard_id="shard1") shard2 = primary_engine.execution_options(shard_id="shard2") - Above, the ``shard1`` engine serves as a factory for :class:`.Connection` - objects that will contain the execution option ``shard_id=shard1``, - and ``shard2`` will produce :class:`.Connection` objects that contain - the execution option ``shard_id=shard2``. + Above, the ``shard1`` engine serves as a factory for + :class:`.Connection` objects that will contain the execution option + ``shard_id=shard1``, and ``shard2`` will produce :class:`.Connection` + objects that contain the execution option ``shard_id=shard2``. An event handler can consume the above execution option to perform a schema switch or other operation, given a connection. Below @@ -1469,7 +1468,6 @@ class Engine(Connectable, log.Identified): """ self.pool = self.pool._replace() - def _execute_default(self, default): with self.contextual_connect() as conn: return conn._execute_default(default, (), {}) @@ -1504,7 +1502,6 @@ class Engine(Connectable, log.Identified): if not self.close_with_result: self.conn.close() - def begin(self, close_with_result=False): """Return a context manager delivering a :class:`.Connection` with a :class:`.Transaction` established. @@ -1521,11 +1518,11 @@ class Engine(Connectable, log.Identified): The ``close_with_result`` flag is normally ``False``, and indicates that the :class:`.Connection` will be closed when the operation - is complete. When set to ``True``, it indicates the :class:`.Connection` - is in "single use" mode, where the :class:`.ResultProxy` - returned by the first call to :meth:`.Connection.execute` will - close the :class:`.Connection` when that :class:`.ResultProxy` - has exhausted all result rows. + is complete. When set to ``True``, it indicates the + :class:`.Connection` is in "single use" mode, where the + :class:`.ResultProxy` returned by the first call to + :meth:`.Connection.execute` will close the :class:`.Connection` when + that :class:`.ResultProxy` has exhausted all result rows. .. versionadded:: 0.7.6 @@ -1637,29 +1634,33 @@ class Engine(Connectable, log.Identified): def connect(self, **kwargs): """Return a new :class:`.Connection` object. - The :class:`.Connection` object is a facade that uses a DBAPI connection internally - in order to communicate with the database. This connection is procured - from the connection-holding :class:`.Pool` referenced by this :class:`.Engine`. - When the :meth:`~.Connection.close` method of the :class:`.Connection` object is called, - the underlying DBAPI connection is then returned to the connection pool, - where it may be used again in a subsequent call to :meth:`~.Engine.connect`. + The :class:`.Connection` object is a facade that uses a DBAPI + connection internally in order to communicate with the database. This + connection is procured from the connection-holding :class:`.Pool` + referenced by this :class:`.Engine`. When the + :meth:`~.Connection.close` method of the :class:`.Connection` object + is called, the underlying DBAPI connection is then returned to the + connection pool, where it may be used again in a subsequent call to + :meth:`~.Engine.connect`. """ return self._connection_cls(self, **kwargs) def contextual_connect(self, close_with_result=False, **kwargs): - """Return a :class:`.Connection` object which may be part of some ongoing context. + """Return a :class:`.Connection` object which may be part of some + ongoing context. By default, this method does the same thing as :meth:`.Engine.connect`. Subclasses of :class:`.Engine` may override this method to provide contextual behavior. - :param close_with_result: When True, the first :class:`.ResultProxy` created - by the :class:`.Connection` will call the :meth:`.Connection.close` method - of that connection as soon as any pending result rows are exhausted. - This is used to supply the "connectionless execution" behavior provided - by the :meth:`.Engine.execute` method. + :param close_with_result: When True, the first :class:`.ResultProxy` + created by the :class:`.Connection` will call the + :meth:`.Connection.close` method of that connection as soon as any + pending result rows are exhausted. This is used to supply the + "connectionless execution" behavior provided by the + :meth:`.Engine.execute` method. """ @@ -1704,6 +1705,7 @@ class Engine(Connectable, log.Identified): return self.pool.unique_connection() + class OptionEngine(Engine): def __init__(self, proxied, execution_options): self._proxied = proxied diff --git a/lib/sqlalchemy/engine/ddl.py b/lib/sqlalchemy/engine/ddl.py index f95e549b0..8da678525 100644 --- a/lib/sqlalchemy/engine/ddl.py +++ b/lib/sqlalchemy/engine/ddl.py @@ -14,8 +14,11 @@ class DDLBase(schema.SchemaVisitor): def __init__(self, connection): self.connection = connection + class SchemaGenerator(DDLBase): - def __init__(self, dialect, connection, checkfirst=False, tables=None, **kwargs): + + def __init__(self, dialect, connection, checkfirst=False, + tables=None, **kwargs): super(SchemaGenerator, self).__init__(connection, **kwargs) self.checkfirst = checkfirst self.tables = tables and set(tables) or None @@ -103,7 +106,9 @@ class SchemaGenerator(DDLBase): class SchemaDropper(DDLBase): - def __init__(self, dialect, connection, checkfirst=False, tables=None, **kwargs): + + def __init__(self, dialect, connection, checkfirst=False, + tables=None, **kwargs): super(SchemaDropper, self).__init__(connection, **kwargs) self.checkfirst = checkfirst self.tables = tables @@ -116,15 +121,22 @@ class SchemaDropper(DDLBase): tables = self.tables else: tables = metadata.tables.values() - collection = [t for t in reversed(sql_util.sort_tables(tables)) - if self._can_drop_table(t)] - seq_coll = [s for s in metadata._sequences.values() - if s.column is None and self._can_drop_sequence(s)] - metadata.dispatch.before_drop(metadata, self.connection, - tables=collection, - checkfirst=self.checkfirst, - _ddl_runner=self) + collection = [ + t + for t in reversed(sql_util.sort_tables(tables)) + if self._can_drop_table(t) + ] + + seq_coll = [ + s + for s in metadata._sequences.values() + if s.column is None and self._can_drop_sequence(s) + ] + + metadata.dispatch.before_drop( + metadata, self.connection, tables=collection, + checkfirst=self.checkfirst, _ddl_runner=self) for table in collection: self.traverse_single(table, drop_ok=True) @@ -132,10 +144,9 @@ class SchemaDropper(DDLBase): for seq in seq_coll: self.traverse_single(seq, drop_ok=True) - metadata.dispatch.after_drop(metadata, self.connection, - tables=collection, - checkfirst=self.checkfirst, - _ddl_runner=self) + metadata.dispatch.after_drop( + metadata, self.connection, tables=collection, + checkfirst=self.checkfirst, _ddl_runner=self) def _can_drop_table(self, table): self.dialect.validate_identifier(table.name) diff --git a/lib/sqlalchemy/engine/default.py b/lib/sqlalchemy/engine/default.py index c536d1a4c..335515a7b 100644 --- a/lib/sqlalchemy/engine/default.py +++ b/lib/sqlalchemy/engine/default.py @@ -45,7 +45,6 @@ class DefaultDialect(interfaces.Dialect): postfetch_lastrowid = True implicit_returning = False - supports_native_enum = False supports_native_boolean = False @@ -66,7 +65,6 @@ class DefaultDialect(interfaces.Dialect): description_encoding = 'use_encoding' # end Py2K - name = 'default' # length at which to truncate @@ -341,6 +339,7 @@ class DefaultDialect(interfaces.Dialect): # the configured default of this dialect. self.set_isolation_level(dbapi_conn, self.default_isolation_level) + class DefaultExecutionContext(interfaces.ExecutionContext): isinsert = False isupdate = False diff --git a/lib/sqlalchemy/engine/interfaces.py b/lib/sqlalchemy/engine/interfaces.py index 6d0b1cb61..66856a00e 100644 --- a/lib/sqlalchemy/engine/interfaces.py +++ b/lib/sqlalchemy/engine/interfaces.py @@ -8,6 +8,7 @@ from .. import util, event, events + class Dialect(object): """Define the behavior of a specific database and DB-API combination. @@ -485,7 +486,8 @@ class Dialect(object): raise NotImplementedError() - def do_execute_no_params(self, cursor, statement, parameters, context=None): + def do_execute_no_params(self, cursor, statement, parameters, + context=None): """Provide an implementation of ``cursor.execute(statement)``. The parameter collection should not be sent. @@ -767,8 +769,8 @@ class TypeCompiler(object): class Connectable(object): """Interface for an object which supports execution of SQL constructs. - The two implementations of :class:`.Connectable` are :class:`.Connection` and - :class:`.Engine`. + The two implementations of :class:`.Connectable` are + :class:`.Connection` and :class:`.Engine`. Connectable must also implement the 'dialect' member which references a :class:`.Dialect` instance. @@ -777,7 +779,6 @@ class Connectable(object): dispatch = event.dispatcher(events.ConnectionEvents) - def connect(self, **kwargs): """Return a :class:`.Connection` object. @@ -801,17 +802,19 @@ class Connectable(object): raise NotImplementedError() - @util.deprecated("0.7", "Use the create() method on the given schema " - "object directly, i.e. :meth:`.Table.create`, " - ":meth:`.Index.create`, :meth:`.MetaData.create_all`") + @util.deprecated("0.7", + "Use the create() method on the given schema " + "object directly, i.e. :meth:`.Table.create`, " + ":meth:`.Index.create`, :meth:`.MetaData.create_all`") def create(self, entity, **kwargs): """Emit CREATE statements for the given schema entity.""" raise NotImplementedError() - @util.deprecated("0.7", "Use the drop() method on the given schema " - "object directly, i.e. :meth:`.Table.drop`, " - ":meth:`.Index.drop`, :meth:`.MetaData.drop_all`") + @util.deprecated("0.7", + "Use the drop() method on the given schema " + "object directly, i.e. :meth:`.Table.drop`, " + ":meth:`.Index.drop`, :meth:`.MetaData.drop_all`") def drop(self, entity, **kwargs): """Emit DROP statements for the given schema entity.""" @@ -834,4 +837,3 @@ class Connectable(object): def _execute_clauseelement(self, elem, multiparams=None, params=None): raise NotImplementedError() - diff --git a/lib/sqlalchemy/engine/reflection.py b/lib/sqlalchemy/engine/reflection.py index 4505aa18a..8367d8761 100644 --- a/lib/sqlalchemy/engine/reflection.py +++ b/lib/sqlalchemy/engine/reflection.py @@ -33,6 +33,7 @@ from ..util import topological from .. import inspection from .base import Connectable + @util.decorator def cache(fn, self, con, *args, **kw): info_cache = kw.get('info_cache', None) @@ -107,17 +108,19 @@ class Inspector(object): @classmethod def from_engine(cls, bind): - """Construct a new dialect-specific Inspector object from the given engine or connection. + """Construct a new dialect-specific Inspector object from the given + engine or connection. :param bind: a :class:`~sqlalchemy.engine.base.Connectable`, which is typically an instance of :class:`~sqlalchemy.engine.Engine` or :class:`~sqlalchemy.engine.Connection`. - This method differs from direct a direct constructor call of :class:`.Inspector` - in that the :class:`~sqlalchemy.engine.base.Dialect` is given a chance to provide - a dialect-specific :class:`.Inspector` instance, which may provide additional - methods. + This method differs from direct a direct constructor call of + :class:`.Inspector` in that the + :class:`~sqlalchemy.engine.base.Dialect` is given a chance to provide + a dialect-specific :class:`.Inspector` instance, which may provide + additional methods. See the example at :class:`.Inspector`. @@ -194,15 +197,16 @@ class Inspector(object): return tnames def get_table_options(self, table_name, schema=None, **kw): - """Return a dictionary of options specified when the table of the given name was created. + """Return a dictionary of options specified when the table of the + given name was created. This currently includes some options that apply to MySQL tables. """ if hasattr(self.dialect, 'get_table_options'): - return self.dialect.get_table_options(self.bind, table_name, schema, - info_cache=self.info_cache, - **kw) + return self.dialect.get_table_options( + self.bind, table_name, schema, + info_cache=self.info_cache, **kw) return {} def get_view_names(self, schema=None): @@ -285,7 +289,6 @@ class Inspector(object): info_cache=self.info_cache, **kw) - def get_foreign_keys(self, table_name, schema=None, **kw): """Return information about foreign_keys in `table_name`. @@ -341,7 +344,8 @@ class Inspector(object): info_cache=self.info_cache, **kw) def reflecttable(self, table, include_columns, exclude_columns=()): - """Given a Table object, load its internal constructs based on introspection. + """Given a Table object, load its internal constructs based on + introspection. This is the underlying method used by most dialects to produce table reflection. Direct usage is like:: @@ -410,8 +414,9 @@ class Inspector(object): colargs = [] if col_d.get('default') is not None: - # the "default" value is assumed to be a literal SQL expression, - # so is wrapped in text() so that no quoting occurs on re-issuance. + # the "default" value is assumed to be a literal SQL + # expression, so is wrapped in text() so that no quoting + # occurs on re-issuance. colargs.append( sa_schema.DefaultClause( sql.text(col_d['default']), _reflected=True @@ -437,11 +442,18 @@ class Inspector(object): # Primary keys pk_cons = self.get_pk_constraint(table_name, schema, **tblkw) if pk_cons: - pk_cols = [table.c[pk] - for pk in pk_cons['constrained_columns'] - if pk in table.c and pk not in exclude_columns - ] + [pk for pk in table.primary_key if pk.key in exclude_columns] - primary_key_constraint = sa_schema.PrimaryKeyConstraint(name=pk_cons.get('name'), + pk_cols = [ + table.c[pk] + for pk in pk_cons['constrained_columns'] + if pk in table.c and pk not in exclude_columns + ] + pk_cols += [ + pk + for pk in table.primary_key + if pk.key in exclude_columns + ] + primary_key_constraint = sa_schema.PrimaryKeyConstraint( + name=pk_cons.get('name'), *pk_cols ) diff --git a/lib/sqlalchemy/engine/result.py b/lib/sqlalchemy/engine/result.py index 6962a4d1e..98b0ea4b2 100644 --- a/lib/sqlalchemy/engine/result.py +++ b/lib/sqlalchemy/engine/result.py @@ -831,6 +831,7 @@ class ResultProxy(object): else: return None + class BufferedRowResultProxy(ResultProxy): """A ResultProxy with row buffering behavior. @@ -934,6 +935,7 @@ class FullyBufferedResultProxy(ResultProxy): self.__rowbuffer = collections.deque() return ret + class BufferedColumnRow(RowProxy): def __init__(self, parent, row, processors, keymap): # preprocess row diff --git a/lib/sqlalchemy/engine/strategies.py b/lib/sqlalchemy/engine/strategies.py index 1e321603e..2db1bfcc5 100644 --- a/lib/sqlalchemy/engine/strategies.py +++ b/lib/sqlalchemy/engine/strategies.py @@ -79,19 +79,18 @@ class DefaultEngineStrategy(EngineStrategy): try: return dialect.connect(*cargs, **cparams) except Exception, e: + invalidated = dialect.is_disconnect(e, None, None) # Py3K #raise exc.DBAPIError.instance(None, None, - # e, dialect.dbapi.Error, - # connection_invalidated= - # dialect.is_disconnect(e, None, None) - # ) from e + # e, dialect.dbapi.Error, + # connection_invalidated=invalidated + #) from e # Py2K import sys raise exc.DBAPIError.instance( - None, None, e, dialect.dbapi.Error, - connection_invalidated= - dialect.is_disconnect(e, None, None)), \ - None, sys.exc_info()[2] + None, None, e, dialect.dbapi.Error, + connection_invalidated=invalidated + ), None, sys.exc_info()[2] # end Py2K creator = kwargs.pop('creator', connect) @@ -107,9 +106,9 @@ class DefaultEngineStrategy(EngineStrategy): 'echo': 'echo_pool', 'timeout': 'pool_timeout', 'recycle': 'pool_recycle', - 'events':'pool_events', - 'use_threadlocal':'pool_threadlocal', - 'reset_on_return':'pool_reset_on_return'} + 'events': 'pool_events', + 'use_threadlocal': 'pool_threadlocal', + 'reset_on_return': 'pool_reset_on_return'} for k in util.get_cls_kwargs(poolclass): tk = translate.get(k, k) if tk in kwargs: @@ -147,7 +146,8 @@ class DefaultEngineStrategy(EngineStrategy): do_on_connect = dialect.on_connect() if do_on_connect: def on_connect(dbapi_connection, connection_record): - conn = getattr(dbapi_connection, '_sqla_unwrap', dbapi_connection) + conn = getattr( + dbapi_connection, '_sqla_unwrap', dbapi_connection) if conn is None: return do_on_connect(conn) @@ -238,12 +238,14 @@ class MockEngineStrategy(EngineStrategy): kwargs['checkfirst'] = False from sqlalchemy.engine import ddl - ddl.SchemaGenerator(self.dialect, self, **kwargs).traverse_single(entity) + ddl.SchemaGenerator( + self.dialect, self, **kwargs).traverse_single(entity) def drop(self, entity, **kwargs): kwargs['checkfirst'] = False from sqlalchemy.engine import ddl - ddl.SchemaDropper(self.dialect, self, **kwargs).traverse_single(entity) + ddl.SchemaDropper( + self.dialect, self, **kwargs).traverse_single(entity) def _run_visitor(self, visitorcallable, element, connection=None, diff --git a/lib/sqlalchemy/engine/threadlocal.py b/lib/sqlalchemy/engine/threadlocal.py index 2ff498db5..06246e854 100644 --- a/lib/sqlalchemy/engine/threadlocal.py +++ b/lib/sqlalchemy/engine/threadlocal.py @@ -6,16 +6,19 @@ """Provides a thread-local transactional wrapper around the root Engine class. -The ``threadlocal`` module is invoked when using the ``strategy="threadlocal"`` flag -with :func:`~sqlalchemy.engine.create_engine`. This module is semi-private and is -invoked automatically when the threadlocal engine strategy is used. +The ``threadlocal`` module is invoked when using the +``strategy="threadlocal"`` flag with :func:`~sqlalchemy.engine.create_engine`. +This module is semi-private and is invoked automatically when the threadlocal +engine strategy is used. """ -from .. import util, event +from .. import util from . import base import weakref + class TLConnection(base.Connection): + def __init__(self, *arg, **kw): super(TLConnection, self).__init__(*arg, **kw) self.__opencount = 0 @@ -33,16 +36,18 @@ class TLConnection(base.Connection): self.__opencount = 0 base.Connection.close(self) + class TLEngine(base.Engine): - """An Engine that includes support for thread-local managed transactions.""" + """An Engine that includes support for thread-local managed + transactions. + """ _tl_connection_cls = TLConnection def __init__(self, *args, **kwargs): super(TLEngine, self).__init__(*args, **kwargs) self._connections = util.threading.local() - def contextual_connect(self, **kw): if not hasattr(self._connections, 'conn'): connection = None @@ -52,21 +57,24 @@ class TLEngine(base.Engine): if connection is None or connection.closed: # guards against pool-level reapers, if desired. # or not connection.connection.is_valid: - connection = self._tl_connection_cls(self, self.pool.connect(), **kw) - self._connections.conn = conn = weakref.ref(connection) + connection = self._tl_connection_cls( + self, self.pool.connect(), **kw) + self._connections.conn = weakref.ref(connection) return connection._increment_connect() def begin_twophase(self, xid=None): if not hasattr(self._connections, 'trans'): self._connections.trans = [] - self._connections.trans.append(self.contextual_connect().begin_twophase(xid=xid)) + self._connections.trans.append( + self.contextual_connect().begin_twophase(xid=xid)) return self def begin_nested(self): if not hasattr(self._connections, 'trans'): self._connections.trans = [] - self._connections.trans.append(self.contextual_connect().begin_nested()) + self._connections.trans.append( + self.contextual_connect().begin_nested()) return self def begin(self): diff --git a/lib/sqlalchemy/engine/url.py b/lib/sqlalchemy/engine/url.py index 5947deec3..42b5de7db 100644 --- a/lib/sqlalchemy/engine/url.py +++ b/lib/sqlalchemy/engine/url.py @@ -7,8 +7,9 @@ """Provides the :class:`~sqlalchemy.engine.url.URL` class which encapsulates information about a database connection specification. -The URL object is created automatically when :func:`~sqlalchemy.engine.create_engine` is called -with a string argument; alternatively, the URL is a public-facing construct which can +The URL object is created automatically when +:func:`~sqlalchemy.engine.create_engine` is called with a string +argument; alternatively, the URL is a public-facing construct which can be used directly and is also accepted directly by ``create_engine()``. """ @@ -124,8 +125,8 @@ class URL(object): :param \**kw: Optional, alternate key names for url attributes. - :param names: Deprecated. Same purpose as the keyword-based alternate names, - but correlates the name to the original positionally. + :param names: Deprecated. Same purpose as the keyword-based alternate + names, but correlates the name to the original positionally. """ translated = {} @@ -141,6 +142,7 @@ class URL(object): translated[name] = getattr(self, sname) return translated + def make_url(name_or_url): """Given a string or unicode instance, produce a new URL instance. @@ -153,6 +155,7 @@ def make_url(name_or_url): else: return name_or_url + def _parse_rfc1738_args(name): pattern = re.compile(r''' (?P<name>[\w\+]+):// @@ -165,8 +168,7 @@ def _parse_rfc1738_args(name): (?::(?P<port>[^/]*))? )? (?:/(?P<database>.*))? - ''' - , re.X) + ''', re.X) m = pattern.match(name) if m is not None: @@ -184,7 +186,8 @@ def _parse_rfc1738_args(name): components['query'] = query if components['password'] is not None: - components['password'] = urllib.unquote_plus(components['password']) + components['password'] = \ + urllib.unquote_plus(components['password']) name = components.pop('name') return URL(name, **components) @@ -192,11 +195,12 @@ def _parse_rfc1738_args(name): raise exc.ArgumentError( "Could not parse rfc1738 URL from string '%s'" % name) + def _parse_keyvalue_args(name): - m = re.match( r'(\w+)://(.*)', name) + m = re.match(r'(\w+)://(.*)', name) if m is not None: (name, args) = m.group(1, 2) - opts = dict( util.parse_qsl( args ) ) + opts = dict(util.parse_qsl(args)) return URL(name, *opts) else: return None diff --git a/lib/sqlalchemy/engine/util.py b/lib/sqlalchemy/engine/util.py index 6bf8f2d3f..8cdfd5cdf 100644 --- a/lib/sqlalchemy/engine/util.py +++ b/lib/sqlalchemy/engine/util.py @@ -6,6 +6,7 @@ from .. import util + def _coerce_config(configuration, prefix): """Convert configuration values to expected types.""" @@ -26,6 +27,7 @@ def _coerce_config(configuration, prefix): util.coerce_kw_type(options, option, type_) return options + def connection_memoize(key): """Decorator, memoize a function in a connection.info stash. @@ -44,6 +46,7 @@ def connection_memoize(key): return decorated + def py_fallback(): def _distill_params(multiparams, params): """Given arguments from the calling form *multiparams, **params, @@ -89,5 +92,3 @@ try: from sqlalchemy.cutils import _distill_params except ImportError: globals().update(py_fallback()) - - diff --git a/lib/sqlalchemy/event.py b/lib/sqlalchemy/event.py index c702d9d34..bf996ae3c 100644 --- a/lib/sqlalchemy/event.py +++ b/lib/sqlalchemy/event.py @@ -12,6 +12,7 @@ from itertools import chain CANCEL = util.symbol('CANCEL') NO_RETVAL = util.symbol('NO_RETVAL') + def listen(target, identifier, fn, *args, **kw): """Register a listener function for the given target. @@ -40,6 +41,7 @@ def listen(target, identifier, fn, *args, **kw): raise exc.InvalidRequestError("No such event '%s' for target '%s'" % (identifier, target)) + def listens_for(target, identifier, *args, **kw): """Decorate a function as a listener for the given target + identifier. @@ -60,6 +62,7 @@ def listens_for(target, identifier, *args, **kw): return fn return decorate + def remove(target, identifier, fn): """Remove an event listener. @@ -75,12 +78,14 @@ def remove(target, identifier, fn): _registrars = util.defaultdict(list) + def _is_event_name(name): return not name.startswith('_') and name != 'dispatch' + class _UnpickleDispatch(object): - """Serializable callable that re-generates an instance of :class:`_Dispatch` - given a particular :class:`.Events` subclass. + """Serializable callable that re-generates an instance of + :class:`_Dispatch` given a particular :class:`.Events` subclass. """ def __call__(self, _parent_cls): @@ -90,6 +95,7 @@ class _UnpickleDispatch(object): else: raise AttributeError("No class with a 'dispatch' member present.") + class _Dispatch(object): """Mirror the event listening definitions of an Events class with listener collections. @@ -154,9 +160,11 @@ class _Dispatch(object): if _is_event_name(attr): getattr(self, attr).for_modify(self).clear() + def _event_descriptors(target): return [getattr(target, k) for k in dir(target) if _is_event_name(k)] + class _EventMeta(type): """Intercept new Event subclasses and create associated _Dispatch classes.""" @@ -165,6 +173,7 @@ class _EventMeta(type): _create_dispatcher_class(cls, classname, bases, dict_) return type.__init__(cls, classname, bases, dict_) + def _create_dispatcher_class(cls, classname, bases, dict_): """Create a :class:`._Dispatch` class corresponding to an :class:`.Events` class.""" @@ -182,6 +191,7 @@ def _create_dispatcher_class(cls, classname, bases, dict_): setattr(dispatch_cls, k, _DispatchDescriptor(dict_[k])) _registrars[k].append(cls) + def _remove_dispatcher(cls): for k in dir(cls): if _is_event_name(k): @@ -189,10 +199,10 @@ def _remove_dispatcher(cls): if not _registrars[k]: del _registrars[k] + class Events(object): """Define event listening functions for a particular target type.""" - __metaclass__ = _EventMeta @classmethod @@ -225,6 +235,7 @@ class Events(object): def _clear(cls): cls.dispatch._clear() + class _DispatchDescriptor(object): """Class-level attributes on :class:`._Dispatch` classes.""" @@ -357,6 +368,7 @@ class _EmptyListener(object): def __nonzero__(self): return bool(self.parent_listeners) + class _CompoundListener(object): _exec_once = False @@ -479,8 +491,10 @@ class _JoinedDispatchDescriptor(object): ) return ret + class _JoinedListener(_CompoundListener): _exec_once = False + def __init__(self, parent, name, local): self.parent = parent self.name = name @@ -527,4 +541,3 @@ class dispatcher(object): return self.dispatch_cls obj.__dict__['dispatch'] = disp = self.dispatch_cls(cls) return disp - diff --git a/lib/sqlalchemy/events.py b/lib/sqlalchemy/events.py index 61392ea62..07661c919 100644 --- a/lib/sqlalchemy/events.py +++ b/lib/sqlalchemy/events.py @@ -216,6 +216,7 @@ class DDLEvents(event.Events): """ + class SchemaEventTarget(object): """Base class for elements that are the targets of :class:`.DDLEvents` events. @@ -235,6 +236,7 @@ class SchemaEventTarget(object): self._set_parent(parent) self.dispatch.after_parent_attach(self, parent) + class PoolEvents(event.Events): """Available events for :class:`.Pool`. @@ -334,6 +336,7 @@ class PoolEvents(event.Events): """ + class ConnectionEvents(event.Events): """Available events for :class:`.Connectable`, which includes :class:`.Connection` and :class:`.Engine`. @@ -661,4 +664,3 @@ class ConnectionEvents(event.Events): :meth:`.TwoPhaseTransaction.prepare` was called. """ - diff --git a/lib/sqlalchemy/exc.py b/lib/sqlalchemy/exc.py index 3c4a64704..1334d63f2 100644 --- a/lib/sqlalchemy/exc.py +++ b/lib/sqlalchemy/exc.py @@ -6,14 +6,15 @@ """Exceptions used with SQLAlchemy. -The base exception class is :class:`.SQLAlchemyError`. Exceptions which are raised as a -result of DBAPI exceptions are all subclasses of +The base exception class is :class:`.SQLAlchemyError`. Exceptions which are +raised as a result of DBAPI exceptions are all subclasses of :class:`.DBAPIError`. """ import traceback + class SQLAlchemyError(Exception): """Generic error class.""" @@ -25,14 +26,17 @@ class ArgumentError(SQLAlchemyError): """ + class NoForeignKeysError(ArgumentError): """Raised when no foreign keys can be located between two selectables during a join.""" + class AmbiguousForeignKeysError(ArgumentError): """Raised when more than one foreign key matching can be located between two selectables during a join.""" + class CircularDependencyError(SQLAlchemyError): """Raised by topological sorts when a circular dependency is detected. @@ -64,9 +68,11 @@ class CircularDependencyError(SQLAlchemyError): return self.__class__, (None, self.cycles, self.edges, self.args[0]) + class CompileError(SQLAlchemyError): """Raised when an error occurs during SQL compilation""" + class IdentifierError(SQLAlchemyError): """Raised when a schema name is beyond the max character limit""" @@ -75,15 +81,14 @@ class DisconnectionError(SQLAlchemyError): """A disconnect is detected on a raw DB-API connection. This error is raised and consumed internally by a connection pool. It can - be raised by the :meth:`.PoolEvents.checkout` event - so that the host pool forces a retry; the exception will be caught - three times in a row before the pool gives up and raises - :class:`~sqlalchemy.exc.InvalidRequestError` regarding the connection attempt. + be raised by the :meth:`.PoolEvents.checkout` event so that the host pool + forces a retry; the exception will be caught three times in a row before + the pool gives up and raises :class:`~sqlalchemy.exc.InvalidRequestError` + regarding the connection attempt. """ - class TimeoutError(SQLAlchemyError): """Raised when a connection pool times out on getting a connection.""" @@ -95,23 +100,30 @@ class InvalidRequestError(SQLAlchemyError): """ + class NoInspectionAvailable(InvalidRequestError): """A subject passed to :func:`sqlalchemy.inspection.inspect` produced no context for inspection.""" + class ResourceClosedError(InvalidRequestError): """An operation was requested from a connection, cursor, or other object that's in a closed state.""" + class NoSuchColumnError(KeyError, InvalidRequestError): """A nonexistent column is requested from a ``RowProxy``.""" + class NoReferenceError(InvalidRequestError): """Raised by ``ForeignKey`` to indicate a reference cannot be resolved.""" + class NoReferencedTableError(NoReferenceError): - """Raised by ``ForeignKey`` when the referred ``Table`` cannot be located.""" + """Raised by ``ForeignKey`` when the referred ``Table`` cannot be + located. + """ def __init__(self, message, tname): NoReferenceError.__init__(self, message) self.table_name = tname @@ -119,9 +131,12 @@ class NoReferencedTableError(NoReferenceError): def __reduce__(self): return self.__class__, (self.args[0], self.table_name) + class NoReferencedColumnError(NoReferenceError): - """Raised by ``ForeignKey`` when the referred ``Column`` cannot be located.""" + """Raised by ``ForeignKey`` when the referred ``Column`` cannot be + located. + """ def __init__(self, message, tname, cname): NoReferenceError.__init__(self, message) self.table_name = tname @@ -131,6 +146,7 @@ class NoReferencedColumnError(NoReferenceError): return self.__class__, (self.args[0], self.table_name, self.column_name) + class NoSuchTableError(InvalidRequestError): """Table does not exist or is not visible to a connection.""" @@ -166,6 +182,7 @@ if sys.version_info < (2, 5): # Moved to orm.exc; compatibility definition installed by orm import until 0.6 UnmappedColumnError = None + class StatementError(SQLAlchemyError): """An error occurred during execution of a SQL statement. @@ -207,6 +224,7 @@ class StatementError(SQLAlchemyError): def __unicode__(self): return self.__str__() + class DBAPIError(StatementError): """Raised when the execution of a database operation fails. @@ -219,13 +237,14 @@ class DBAPIError(StatementError): raise the same exception type for any given error condition. :class:`DBAPIError` features :attr:`~.StatementError.statement` - and :attr:`~.StatementError.params` attributes which supply context regarding - the specifics of the statement which had an issue, for the + and :attr:`~.StatementError.params` attributes which supply context + regarding the specifics of the statement which had an issue, for the typical case when the error was raised within the context of emitting a SQL statement. - The wrapped exception object is available in the :attr:`~.StatementError.orig` attribute. - Its type and properties are DB-API implementation specific. + The wrapped exception object is available in the + :attr:`~.StatementError.orig` attribute. Its type and properties are + DB-API implementation specific. """ @@ -243,11 +262,12 @@ class DBAPIError(StatementError): # not a DBAPI error, statement is present. # raise a StatementError if not isinstance(orig, dbapi_base_err) and statement: + msg = traceback.format_exception_only( + orig.__class__, orig)[-1].strip() return StatementError( - "%s (original cause: %s)" % ( - str(orig), - traceback.format_exception_only(orig.__class__, orig)[-1].strip() - ), statement, params, orig) + "%s (original cause: %s)" % (str(orig), msg), + statement, params, orig + ) name, glob = orig.__class__.__name__, globals() if name in glob and issubclass(glob[name], DBAPIError): diff --git a/lib/sqlalchemy/ext/__init__.py b/lib/sqlalchemy/ext/__init__.py index 7558ac268..bb99e60fc 100644 --- a/lib/sqlalchemy/ext/__init__.py +++ b/lib/sqlalchemy/ext/__init__.py @@ -3,4 +3,3 @@ # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php - diff --git a/lib/sqlalchemy/ext/associationproxy.py b/lib/sqlalchemy/ext/associationproxy.py index 27c76eb6b..f6c0764e4 100644 --- a/lib/sqlalchemy/ext/associationproxy.py +++ b/lib/sqlalchemy/ext/associationproxy.py @@ -27,24 +27,25 @@ def association_proxy(target_collection, attr, **kw): The returned value is an instance of :class:`.AssociationProxy`. - Implements a Python property representing a relationship as a collection of - simpler values, or a scalar value. The proxied property will mimic the collection type of - the target (list, dict or set), or, in the case of a one to one relationship, - a simple scalar value. + Implements a Python property representing a relationship as a collection + of simpler values, or a scalar value. The proxied property will mimic + the collection type of the target (list, dict or set), or, in the case of + a one to one relationship, a simple scalar value. :param target_collection: Name of the attribute we'll proxy to. This attribute is typically mapped by :func:`~sqlalchemy.orm.relationship` to link to a target collection, but can also be a many-to-one or non-scalar relationship. - :param attr: Attribute on the associated instance or instances we'll proxy for. + :param attr: Attribute on the associated instance or instances we'll + proxy for. For example, given a target collection of [obj1, obj2], a list created by this proxy property would look like [getattr(obj1, *attr*), getattr(obj2, *attr*)] - If the relationship is one-to-one or otherwise uselist=False, then simply: - getattr(obj, *attr*) + If the relationship is one-to-one or otherwise uselist=False, then + simply: getattr(obj, *attr*) :param creator: optional. @@ -89,34 +90,36 @@ class AssociationProxy(object): :param target_collection: Name of the collection we'll proxy to, usually created with :func:`.relationship`. - :param attr: Attribute on the collected instances we'll proxy for. For example, - given a target collection of [obj1, obj2], a list created by this - proxy property would look like [getattr(obj1, attr), getattr(obj2, - attr)] + :param attr: Attribute on the collected instances we'll proxy + for. For example, given a target collection of [obj1, obj2], a + list created by this proxy property would look like + [getattr(obj1, attr), getattr(obj2, attr)] - :param creator: Optional. When new items are added to this proxied collection, new - instances of the class collected by the target collection will be - created. For list and set collections, the target class constructor - will be called with the 'value' for the new instance. For dict - types, two arguments are passed: key and value. + :param creator: Optional. When new items are added to this proxied + collection, new instances of the class collected by the target + collection will be created. For list and set collections, the + target class constructor will be called with the 'value' for the + new instance. For dict types, two arguments are passed: + key and value. If you want to construct instances differently, supply a 'creator' function that takes arguments as above and returns instances. - :param getset_factory: Optional. Proxied attribute access is automatically handled by - routines that get and set values based on the `attr` argument for - this proxy. + :param getset_factory: Optional. Proxied attribute access is + automatically handled by routines that get and set values based on + the `attr` argument for this proxy. If you would like to customize this behavior, you may supply a `getset_factory` callable that produces a tuple of `getter` and `setter` functions. The factory is called with two arguments, the abstract type of the underlying collection and this proxy instance. - :param proxy_factory: Optional. The type of collection to emulate is determined by - sniffing the target collection. If your collection type can't be - determined by duck typing or you'd like to use a different - collection implementation, you may supply a factory function to - produce those collections. Only applicable to non-scalar relationships. + :param proxy_factory: Optional. The type of collection to emulate is + determined by sniffing the target collection. If your collection + type can't be determined by duck typing or you'd like to use a + different collection implementation, you may supply a factory + function to produce those collections. Only applicable to + non-scalar relationships. :param proxy_bulk_set: Optional, use with proxy_factory. See the _set() method for details. @@ -279,7 +282,8 @@ class AssociationProxy(object): self.collection_class = util.duck_type_collection(lazy_collection()) if self.proxy_factory: - return self.proxy_factory(lazy_collection, creator, self.value_attr, self) + return self.proxy_factory( + lazy_collection, creator, self.value_attr, self) if self.getset_factory: getter, setter = self.getset_factory(self.collection_class, self) @@ -287,11 +291,14 @@ class AssociationProxy(object): getter, setter = self._default_getset(self.collection_class) if self.collection_class is list: - return _AssociationList(lazy_collection, creator, getter, setter, self) + return _AssociationList( + lazy_collection, creator, getter, setter, self) elif self.collection_class is dict: - return _AssociationDict(lazy_collection, creator, getter, setter, self) + return _AssociationDict( + lazy_collection, creator, getter, setter, self) elif self.collection_class is set: - return _AssociationSet(lazy_collection, creator, getter, setter, self) + return _AssociationSet( + lazy_collection, creator, getter, setter, self) else: raise exc.ArgumentError( 'could not guess which interface to use for ' @@ -340,9 +347,11 @@ class AssociationProxy(object): """ if self._value_is_scalar: - value_expr = getattr(self.target_class, self.value_attr).has(criterion, **kwargs) + value_expr = getattr( + self.target_class, self.value_attr).has(criterion, **kwargs) else: - value_expr = getattr(self.target_class, self.value_attr).any(criterion, **kwargs) + value_expr = getattr( + self.target_class, self.value_attr).any(criterion, **kwargs) # check _value_is_scalar here, otherwise # we're scalar->scalar - call .any() so that @@ -409,12 +418,13 @@ class _lazy_collection(object): return getattr(obj, self.target) def __getstate__(self): - return {'obj':self.ref(), 'target':self.target} + return {'obj': self.ref(), 'target': self.target} def __setstate__(self, state): self.ref = weakref.ref(state['obj']) self.target = state['target'] + class _AssociationCollection(object): def __init__(self, lazy_collection, creator, getter, setter, parent): """Constructs an _AssociationCollection. @@ -456,13 +466,14 @@ class _AssociationCollection(object): return bool(self.col) def __getstate__(self): - return {'parent':self.parent, 'lazy_collection':self.lazy_collection} + return {'parent': self.parent, 'lazy_collection': self.lazy_collection} def __setstate__(self, state): self.parent = state['parent'] self.lazy_collection = state['lazy_collection'] self.parent._inflate(self) + class _AssociationList(_AssociationCollection): """Generic, converting, list-to-list proxy.""" @@ -652,6 +663,8 @@ class _AssociationList(_AssociationCollection): _NotProvided = util.symbol('_NotProvided') + + class _AssociationDict(_AssociationCollection): """Generic, converting, dict-to-dict proxy.""" @@ -734,7 +747,7 @@ class _AssociationDict(_AssociationCollection): return self.col.iterkeys() def values(self): - return [ self._get(member) for member in self.col.values() ] + return [self._get(member) for member in self.col.values()] def itervalues(self): for key in self.col: @@ -766,8 +779,8 @@ class _AssociationDict(_AssociationCollection): len(a)) elif len(a) == 1: seq_or_map = a[0] - # discern dict from sequence - took the advice - # from http://www.voidspace.org.uk/python/articles/duck_typing.shtml + # discern dict from sequence - took the advice from + # http://www.voidspace.org.uk/python/articles/duck_typing.shtml # still not perfect :( if hasattr(seq_or_map, 'keys'): for item in seq_or_map: diff --git a/lib/sqlalchemy/ext/compiler.py b/lib/sqlalchemy/ext/compiler.py index e3e668364..93984d0d1 100644 --- a/lib/sqlalchemy/ext/compiler.py +++ b/lib/sqlalchemy/ext/compiler.py @@ -9,8 +9,9 @@ Synopsis ======== -Usage involves the creation of one or more :class:`~sqlalchemy.sql.expression.ClauseElement` -subclasses and one or more callables defining its compilation:: +Usage involves the creation of one or more +:class:`~sqlalchemy.sql.expression.ClauseElement` subclasses and one or +more callables defining its compilation:: from sqlalchemy.ext.compiler import compiles from sqlalchemy.sql.expression import ColumnClause @@ -58,7 +59,8 @@ invoked for the dialect in use:: def visit_alter_column(element, compiler, **kw): return "ALTER TABLE %s ALTER COLUMN %s ..." % (element.table.name, element.column.name) -The second ``visit_alter_table`` will be invoked when any ``postgresql`` dialect is used. +The second ``visit_alter_table`` will be invoked when any ``postgresql`` +dialect is used. Compiling sub-elements of a custom expression construct ======================================================= @@ -99,10 +101,11 @@ Produces:: Cross Compiling between SQL and DDL compilers --------------------------------------------- -SQL and DDL constructs are each compiled using different base compilers - ``SQLCompiler`` -and ``DDLCompiler``. A common need is to access the compilation rules of SQL expressions -from within a DDL expression. The ``DDLCompiler`` includes an accessor ``sql_compiler`` for this reason, such as below where we generate a CHECK -constraint that embeds a SQL expression:: +SQL and DDL constructs are each compiled using different base compilers - +``SQLCompiler`` and ``DDLCompiler``. A common need is to access the +compilation rules of SQL expressions from within a DDL expression. The +``DDLCompiler`` includes an accessor ``sql_compiler`` for this reason, such as +below where we generate a CHECK constraint that embeds a SQL expression:: @compiles(MyConstraint) def compile_my_constraint(constraint, ddlcompiler, **kw): @@ -116,20 +119,22 @@ constraint that embeds a SQL expression:: Enabling Autocommit on a Construct ================================== -Recall from the section :ref:`autocommit` that the :class:`.Engine`, when asked to execute -a construct in the absence of a user-defined transaction, detects if the given -construct represents DML or DDL, that is, a data modification or data definition statement, which -requires (or may require, in the case of DDL) that the transaction generated by the DBAPI be committed -(recall that DBAPI always has a transaction going on regardless of what SQLAlchemy does). Checking -for this is actually accomplished -by checking for the "autocommit" execution option on the construct. When building a construct like -an INSERT derivation, a new DDL type, or perhaps a stored procedure that alters data, the "autocommit" -option needs to be set in order for the statement to function with "connectionless" execution +Recall from the section :ref:`autocommit` that the :class:`.Engine`, when +asked to execute a construct in the absence of a user-defined transaction, +detects if the given construct represents DML or DDL, that is, a data +modification or data definition statement, which requires (or may require, +in the case of DDL) that the transaction generated by the DBAPI be committed +(recall that DBAPI always has a transaction going on regardless of what +SQLAlchemy does). Checking for this is actually accomplished by checking for +the "autocommit" execution option on the construct. When building a +construct like an INSERT derivation, a new DDL type, or perhaps a stored +procedure that alters data, the "autocommit" option needs to be set in order +for the statement to function with "connectionless" execution (as described in :ref:`dbengine_implicit`). -Currently a quick way to do this is to subclass :class:`.Executable`, then add the "autocommit" flag -to the ``_execution_options`` dictionary (note this is a "frozen" dictionary which supplies a generative -``union()`` method):: +Currently a quick way to do this is to subclass :class:`.Executable`, then +add the "autocommit" flag to the ``_execution_options`` dictionary (note this +is a "frozen" dictionary which supplies a generative ``union()`` method):: from sqlalchemy.sql.expression import Executable, ClauseElement @@ -137,8 +142,9 @@ to the ``_execution_options`` dictionary (note this is a "frozen" dictionary whi _execution_options = \\ Executable._execution_options.union({'autocommit': True}) -More succinctly, if the construct is truly similar to an INSERT, UPDATE, or DELETE, :class:`.UpdateBase` -can be used, which already is a subclass of :class:`.Executable`, :class:`.ClauseElement` and includes the +More succinctly, if the construct is truly similar to an INSERT, UPDATE, or +DELETE, :class:`.UpdateBase` can be used, which already is a subclass +of :class:`.Executable`, :class:`.ClauseElement` and includes the ``autocommit`` flag:: from sqlalchemy.sql.expression import UpdateBase @@ -150,7 +156,8 @@ can be used, which already is a subclass of :class:`.Executable`, :class:`.Claus -DDL elements that subclass :class:`.DDLElement` already have the "autocommit" flag turned on. +DDL elements that subclass :class:`.DDLElement` already have the +"autocommit" flag turned on. @@ -158,13 +165,16 @@ DDL elements that subclass :class:`.DDLElement` already have the "autocommit" fl Changing the default compilation of existing constructs ======================================================= -The compiler extension applies just as well to the existing constructs. When overriding -the compilation of a built in SQL construct, the @compiles decorator is invoked upon -the appropriate class (be sure to use the class, i.e. ``Insert`` or ``Select``, instead of the creation function such as ``insert()`` or ``select()``). +The compiler extension applies just as well to the existing constructs. When +overriding the compilation of a built in SQL construct, the @compiles +decorator is invoked upon the appropriate class (be sure to use the class, +i.e. ``Insert`` or ``Select``, instead of the creation function such +as ``insert()`` or ``select()``). -Within the new compilation function, to get at the "original" compilation routine, -use the appropriate visit_XXX method - this because compiler.process() will call upon the -overriding routine and cause an endless loop. Such as, to add "prefix" to all insert statements:: +Within the new compilation function, to get at the "original" compilation +routine, use the appropriate visit_XXX method - this +because compiler.process() will call upon the overriding routine and cause +an endless loop. Such as, to add "prefix" to all insert statements:: from sqlalchemy.sql.expression import Insert @@ -172,14 +182,16 @@ overriding routine and cause an endless loop. Such as, to add "prefix" to all def prefix_inserts(insert, compiler, **kw): return compiler.visit_insert(insert.prefix_with("some prefix"), **kw) -The above compiler will prefix all INSERT statements with "some prefix" when compiled. +The above compiler will prefix all INSERT statements with "some prefix" when +compiled. .. _type_compilation_extension: Changing Compilation of Types ============================= -``compiler`` works for types, too, such as below where we implement the MS-SQL specific 'max' keyword for ``String``/``VARCHAR``:: +``compiler`` works for types, too, such as below where we implement the +MS-SQL specific 'max' keyword for ``String``/``VARCHAR``:: @compiles(String, 'mssql') @compiles(VARCHAR, 'mssql') @@ -248,10 +260,10 @@ A synopsis is as follows: ``execute_at()`` method, allowing the construct to be invoked during CREATE TABLE and DROP TABLE sequences. -* :class:`~sqlalchemy.sql.expression.Executable` - This is a mixin which should be - used with any expression class that represents a "standalone" SQL statement that - can be passed directly to an ``execute()`` method. It is already implicit - within ``DDLElement`` and ``FunctionElement``. +* :class:`~sqlalchemy.sql.expression.Executable` - This is a mixin which + should be used with any expression class that represents a "standalone" + SQL statement that can be passed directly to an ``execute()`` method. It + is already implicit within ``DDLElement`` and ``FunctionElement``. Further Examples ================ @@ -259,12 +271,13 @@ Further Examples "UTC timestamp" function ------------------------- -A function that works like "CURRENT_TIMESTAMP" except applies the appropriate conversions -so that the time is in UTC time. Timestamps are best stored in relational databases -as UTC, without time zones. UTC so that your database doesn't think time has gone -backwards in the hour when daylight savings ends, without timezones because timezones -are like character encodings - they're best applied only at the endpoints of an -application (i.e. convert to UTC upon user input, re-apply desired timezone upon display). +A function that works like "CURRENT_TIMESTAMP" except applies the +appropriate conversions so that the time is in UTC time. Timestamps are best +stored in relational databases as UTC, without time zones. UTC so that your +database doesn't think time has gone backwards in the hour when daylight +savings ends, without timezones because timezones are like character +encodings - they're best applied only at the endpoints of an application +(i.e. convert to UTC upon user input, re-apply desired timezone upon display). For Postgresql and Microsoft SQL Server:: @@ -298,10 +311,10 @@ Example usage:: "GREATEST" function ------------------- -The "GREATEST" function is given any number of arguments and returns the one that is -of the highest value - it's equivalent to Python's ``max`` function. A SQL -standard version versus a CASE based version which only accommodates two -arguments:: +The "GREATEST" function is given any number of arguments and returns the one +that is of the highest value - it's equivalent to Python's ``max`` +function. A SQL standard version versus a CASE based version which only +accommodates two arguments:: from sqlalchemy.sql import expression from sqlalchemy.ext.compiler import compiles @@ -339,7 +352,8 @@ Example usage:: "false" expression ------------------ -Render a "false" constant expression, rendering as "0" on platforms that don't have a "false" constant:: +Render a "false" constant expression, rendering as "0" on platforms that +don't have a "false" constant:: from sqlalchemy.sql import expression from sqlalchemy.ext.compiler import compiles @@ -370,6 +384,7 @@ Example usage:: from .. import exc from ..sql import visitors + def compiles(class_, *specs): """Register a function as a compiler for a given :class:`.ClauseElement` type.""" @@ -384,7 +399,8 @@ def compiles(class_, *specs): existing.specs['default'] = existing_dispatch # TODO: why is the lambda needed ? - setattr(class_, '_compiler_dispatch', lambda *arg, **kw: existing(*arg, **kw)) + setattr(class_, '_compiler_dispatch', + lambda *arg, **kw: existing(*arg, **kw)) setattr(class_, '_compiler_dispatcher', existing) if specs: @@ -396,6 +412,7 @@ def compiles(class_, *specs): return fn return decorate + def deregister(class_): """Remove all custom compilers associated with a given :class:`.ClauseElement` type.""" @@ -422,4 +439,3 @@ class _dispatcher(object): "%s construct has no default " "compilation handler." % type(element)) return fn(element, compiler, **kw) - diff --git a/lib/sqlalchemy/ext/declarative/__init__.py b/lib/sqlalchemy/ext/declarative/__init__.py index bf6e6786e..1fb3feb6a 100644 --- a/lib/sqlalchemy/ext/declarative/__init__.py +++ b/lib/sqlalchemy/ext/declarative/__init__.py @@ -51,7 +51,8 @@ assigned. To name columns explicitly with a name distinct from their mapped attribute, just give the column a name. Below, column "some_table_id" is mapped to the -"id" attribute of `SomeClass`, but in SQL will be represented as "some_table_id":: +"id" attribute of `SomeClass`, but in SQL will be represented as +"some_table_id":: class SomeClass(Base): __tablename__ = 'some_table' @@ -312,7 +313,8 @@ such as those which already take advantage of the data-driven nature of Note that when the ``__table__`` approach is used, the object is immediately usable as a plain :class:`.Table` within the class declaration body itself, as a Python class is only another syntactical block. Below this is illustrated -by using the ``id`` column in the ``primaryjoin`` condition of a :func:`.relationship`:: +by using the ``id`` column in the ``primaryjoin`` condition of a +:func:`.relationship`:: class MyClass(Base): __table__ = Table('my_table', Base.metadata, @@ -324,8 +326,8 @@ by using the ``id`` column in the ``primaryjoin`` condition of a :func:`.relatio primaryjoin=Widget.myclass_id==__table__.c.id) Similarly, mapped attributes which refer to ``__table__`` can be placed inline, -as below where we assign the ``name`` column to the attribute ``_name``, generating -a synonym for ``name``:: +as below where we assign the ``name`` column to the attribute ``_name``, +generating a synonym for ``name``:: from sqlalchemy.ext.declarative import synonym_for @@ -383,9 +385,9 @@ Mapper Configuration Declarative makes use of the :func:`~.orm.mapper` function internally when it creates the mapping to the declared table. The options -for :func:`~.orm.mapper` are passed directly through via the ``__mapper_args__`` -class attribute. As always, arguments which reference locally -mapped columns can reference them directly from within the +for :func:`~.orm.mapper` are passed directly through via the +``__mapper_args__`` class attribute. As always, arguments which reference +locally mapped columns can reference them directly from within the class declaration:: from datetime import datetime @@ -521,8 +523,8 @@ In a situation like this, Declarative can't be sure of the intent, especially if the ``start_date`` columns had, for example, different types. A situation like this can be resolved by using :class:`.declared_attr` to define the :class:`.Column` conditionally, taking -care to return the **existing column** via the parent ``__table__`` if it already -exists:: +care to return the **existing column** via the parent ``__table__`` if it +already exists:: from sqlalchemy.ext.declarative import declared_attr @@ -654,12 +656,13 @@ Using the Concrete Helpers ^^^^^^^^^^^^^^^^^^^^^^^^^^^ Helper classes provides a simpler pattern for concrete inheritance. -With these objects, the ``__declare_last__`` helper is used to configure the "polymorphic" -loader for the mapper after all subclasses have been declared. +With these objects, the ``__declare_last__`` helper is used to configure the +"polymorphic" loader for the mapper after all subclasses have been declared. .. versionadded:: 0.7.3 -An abstract base can be declared using the :class:`.AbstractConcreteBase` class:: +An abstract base can be declared using the +:class:`.AbstractConcreteBase` class:: from sqlalchemy.ext.declarative import AbstractConcreteBase @@ -757,8 +760,8 @@ Augmenting the Base In addition to using a pure mixin, most of the techniques in this section can also be applied to the base class itself, for patterns that -should apply to all classes derived from a particular base. This -is achieved using the ``cls`` argument of the :func:`.declarative_base` function:: +should apply to all classes derived from a particular base. This is achieved +using the ``cls`` argument of the :func:`.declarative_base` function:: from sqlalchemy.ext.declarative import declared_attr @@ -778,9 +781,9 @@ is achieved using the ``cls`` argument of the :func:`.declarative_base` function class MyModel(Base): name = Column(String(1000)) -Where above, ``MyModel`` and all other classes that derive from ``Base`` will have -a table name derived from the class name, an ``id`` primary key column, as well as -the "InnoDB" engine for MySQL. +Where above, ``MyModel`` and all other classes that derive from ``Base`` will +have a table name derived from the class name, an ``id`` primary key column, +as well as the "InnoDB" engine for MySQL. Mixing in Columns ~~~~~~~~~~~~~~~~~ @@ -840,7 +843,8 @@ extension can use the resulting :class:`.Column` object as returned by the method without the need to copy it. .. versionchanged:: > 0.6.5 - Rename 0.6.5 ``sqlalchemy.util.classproperty`` into :class:`~.declared_attr`. + Rename 0.6.5 ``sqlalchemy.util.classproperty`` + into :class:`~.declared_attr`. Columns generated by :class:`~.declared_attr` can also be referenced by ``__mapper_args__`` to a limited degree, currently @@ -933,12 +937,13 @@ Mixing in Association Proxy and Other Attributes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Mixins can specify user-defined attributes as well as other extension -units such as :func:`.association_proxy`. The usage of :class:`.declared_attr` -is required in those cases where the attribute must be tailored specifically -to the target subclass. An example is when constructing multiple -:func:`.association_proxy` attributes which each target a different type -of child object. Below is an :func:`.association_proxy` / mixin example -which provides a scalar list of string values to an implementing class:: +units such as :func:`.association_proxy`. The usage of +:class:`.declared_attr` is required in those cases where the attribute must +be tailored specifically to the target subclass. An example is when +constructing multiple :func:`.association_proxy` attributes which each +target a different type of child object. Below is an +:func:`.association_proxy` / mixin example which provides a scalar list of +string values to an implementing class:: from sqlalchemy import Column, Integer, ForeignKey, String from sqlalchemy.orm import relationship @@ -1138,8 +1143,8 @@ Creating Indexes with Mixins ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ To define a named, potentially multicolumn :class:`.Index` that applies to all -tables derived from a mixin, use the "inline" form of :class:`.Index` and establish -it as part of ``__table_args__``:: +tables derived from a mixin, use the "inline" form of :class:`.Index` and +establish it as part of ``__table_args__``:: class MyMixin(object): a = Column(Integer) @@ -1160,9 +1165,9 @@ Special Directives ~~~~~~~~~~~~~~~~~~~~~~ The ``__declare_last__()`` hook allows definition of -a class level function that is automatically called by the :meth:`.MapperEvents.after_configured` -event, which occurs after mappings are assumed to be completed and the 'configure' step -has finished:: +a class level function that is automatically called by the +:meth:`.MapperEvents.after_configured` event, which occurs after mappings are +assumed to be completed and the 'configure' step has finished:: class MyClass(Base): @classmethod @@ -1178,9 +1183,9 @@ has finished:: ~~~~~~~~~~~~~~~~~~~ ``__abstract__`` causes declarative to skip the production -of a table or mapper for the class entirely. A class can be added within a hierarchy -in the same way as mixin (see :ref:`declarative_mixins`), allowing subclasses to extend -just from the special class:: +of a table or mapper for the class entirely. A class can be added within a +hierarchy in the same way as mixin (see :ref:`declarative_mixins`), allowing +subclasses to extend just from the special class:: class SomeAbstractBase(Base): __abstract__ = True @@ -1195,8 +1200,8 @@ just from the special class:: class MyMappedClass(SomeAbstractBase): "" -One possible use of ``__abstract__`` is to use a distinct :class:`.MetaData` for different -bases:: +One possible use of ``__abstract__`` is to use a distinct +:class:`.MetaData` for different bases:: Base = declarative_base() @@ -1208,9 +1213,10 @@ bases:: __abstract__ = True metadata = MetaData() -Above, classes which inherit from ``DefaultBase`` will use one :class:`.MetaData` as the -registry of tables, and those which inherit from ``OtherBase`` will use a different one. -The tables themselves can then be created perhaps within distinct databases:: +Above, classes which inherit from ``DefaultBase`` will use one +:class:`.MetaData` as the registry of tables, and those which inherit from +``OtherBase`` will use a different one. The tables themselves can then be +created perhaps within distinct databases:: DefaultBase.metadata.create_all(some_engine) OtherBase.metadata_create_all(some_other_engine) diff --git a/lib/sqlalchemy/ext/declarative/api.py b/lib/sqlalchemy/ext/declarative/api.py index 0db3f4e6b..b4b6f733b 100644 --- a/lib/sqlalchemy/ext/declarative/api.py +++ b/lib/sqlalchemy/ext/declarative/api.py @@ -33,6 +33,7 @@ def instrument_declarative(cls, registry, metadata): cls.metadata = metadata _as_declarative(cls, cls.__name__, cls.__dict__) + def has_inherited_table(cls): """Given a class, return True if any of the classes it inherits from has a mapped table, otherwise return False. @@ -42,6 +43,7 @@ def has_inherited_table(cls): return True return False + class DeclarativeMeta(type): def __init__(cls, classname, bases, dict_): if '_decl_class_registry' not in cls.__dict__: @@ -51,6 +53,7 @@ class DeclarativeMeta(type): def __setattr__(cls, key, value): _add_attribute(cls, key, value) + def synonym_for(name, map_column=False): """Decorator, make a Python @property a query synonym for a column. @@ -73,6 +76,7 @@ def synonym_for(name, map_column=False): return _orm_synonym(name, map_column=map_column, descriptor=fn) return decorate + def comparable_using(comparator_factory): """Decorator, allow a Python @property to be used in query criteria. @@ -95,6 +99,7 @@ def comparable_using(comparator_factory): return comparable_property(comparator_factory, fn) return decorate + class declared_attr(interfaces._MappedAttribute, property): """Mark a class-level method as representing the definition of a mapped property or special declarative member name. @@ -154,6 +159,7 @@ class declared_attr(interfaces._MappedAttribute, property): def __get__(desc, self, cls): return desc.fget(cls) + def declarative_base(bind=None, metadata=None, mapper=None, cls=object, name='Base', constructor=_declarative_constructor, class_registry=None, @@ -231,6 +237,7 @@ def declarative_base(bind=None, metadata=None, mapper=None, cls=object, return metaclass(name, bases, class_dict) + class ConcreteBase(object): """A helper class for 'concrete' declarative mappings. @@ -285,6 +292,7 @@ class ConcreteBase(object): m._set_with_polymorphic(("*", pjoin)) m._set_polymorphic_on(pjoin.c.type) + class AbstractConcreteBase(ConcreteBase): """A helper class for 'concrete' declarative mappings. @@ -362,7 +370,8 @@ class DeferredReflection(object): method is called which first reflects all :class:`.Table` objects created so far. Classes can define it as such:: - from sqlalchemy.ext.declarative import declarative_base, DeferredReflection + from sqlalchemy.ext.declarative import declarative_base + from sqlalchemy.ext.declarative import DeferredReflection Base = declarative_base() class MyClass(DeferredReflection, Base): @@ -370,7 +379,8 @@ class DeferredReflection(object): Above, ``MyClass`` is not yet mapped. After a series of classes have been defined in the above fashion, all tables - can be reflected and mappings created using :meth:`.DeferredReflection.prepare`:: + can be reflected and mappings created using + :meth:`.DeferredReflection.prepare`:: engine = create_engine("someengine://...") DeferredReflection.prepare(engine) diff --git a/lib/sqlalchemy/ext/declarative/base.py b/lib/sqlalchemy/ext/declarative/base.py index 8e8f5626c..954a9abfe 100644 --- a/lib/sqlalchemy/ext/declarative/base.py +++ b/lib/sqlalchemy/ext/declarative/base.py @@ -15,6 +15,7 @@ from ...sql import expression from ... import event from . import clsregistry + def _declared_mapping_info(cls): # deferred mapping if cls in _MapperConfig.configs: @@ -192,7 +193,8 @@ def _as_declarative(cls, classname, dict_): # in multi-column ColumnProperties. if key == c.key: del our_stuff[key] - declared_columns = sorted(declared_columns, key=lambda c: c._creation_order) + declared_columns = sorted( + declared_columns, key=lambda c: c._creation_order) table = None if hasattr(cls, '__table_cls__'): @@ -217,9 +219,10 @@ def _as_declarative(cls, classname, dict_): if autoload: table_kw['autoload'] = True - cls.__table__ = table = table_cls(tablename, cls.metadata, - *(tuple(declared_columns) + tuple(args)), - **table_kw) + cls.__table__ = table = table_cls( + tablename, cls.metadata, + *(tuple(declared_columns) + tuple(args)), + **table_kw) else: table = cls.__table__ if declared_columns: @@ -291,6 +294,7 @@ def _as_declarative(cls, classname, dict_): if not hasattr(cls, '_sa_decl_prepare'): mt.map() + class _MapperConfig(object): configs = util.OrderedDict() mapped_table = None @@ -375,6 +379,7 @@ class _MapperConfig(object): **mapper_args ) + def _add_attribute(cls, key, value): """add an attribute to an existing declarative class. @@ -395,14 +400,15 @@ def _add_attribute(cls, key, value): cls.__mapper__.add_property(key, value) elif isinstance(value, MapperProperty): cls.__mapper__.add_property( - key, - clsregistry._deferred_relationship(cls, value) - ) + key, + clsregistry._deferred_relationship(cls, value) + ) else: type.__setattr__(cls, key, value) else: type.__setattr__(cls, key, value) + def _declarative_constructor(self, **kwargs): """A simple constructor that allows initialization from kwargs. diff --git a/lib/sqlalchemy/ext/declarative/clsregistry.py b/lib/sqlalchemy/ext/declarative/clsregistry.py index 47450c5b7..a0e177f77 100644 --- a/lib/sqlalchemy/ext/declarative/clsregistry.py +++ b/lib/sqlalchemy/ext/declarative/clsregistry.py @@ -22,6 +22,7 @@ import weakref # themselves when all references to contained classes are removed. _registries = set() + def add_class(classname, cls): """Add a class to the _decl_class_registry associated with the given declarative class. @@ -111,6 +112,7 @@ class _MultipleClassMarker(object): ) self.contents.add(weakref.ref(item, self._remove_item)) + class _ModuleMarker(object): """"refers to a module name within _decl_class_registry. @@ -160,7 +162,6 @@ class _ModuleMarker(object): on_remove=lambda: self._remove_item(name)) - class _ModNS(object): def __init__(self, parent): self.__parent = parent @@ -180,6 +181,7 @@ class _ModNS(object): raise AttributeError("Module %r has no mapped classes " "registered under the name %r" % (self.__parent.name, key)) + class _GetColumns(object): def __init__(self, cls): self.cls = cls @@ -200,6 +202,7 @@ class _GetColumns(object): " directly to a Column)." % key) return getattr(self.cls, key) + class _GetTable(object): def __init__(self, key, metadata): self.key = key @@ -210,11 +213,13 @@ class _GetTable(object): _get_table_key(key, self.key) ] + def _determine_container(key, value): if isinstance(value, _MultipleClassMarker): value = value.attempt_get([], key) return _GetColumns(value) + def _resolver(cls, prop): def resolve_arg(arg): import sqlalchemy @@ -232,11 +237,13 @@ def _resolver(cls, prop): return _GetTable(key, cls.metadata) elif '_sa_module_registry' in cls._decl_class_registry and \ key in cls._decl_class_registry['_sa_module_registry']: - return cls._decl_class_registry['_sa_module_registry'].resolve_attr(key) + registry = cls._decl_class_registry['_sa_module_registry'] + return registry.resolve_attr(key) else: return fallback[key] d = util.PopulateDict(access_cls) + def return_cls(): try: x = eval(arg, globals(), d) @@ -256,6 +263,7 @@ def _resolver(cls, prop): return return_cls return resolve_arg + def _deferred_relationship(cls, prop): if isinstance(prop, RelationshipProperty): diff --git a/lib/sqlalchemy/ext/horizontal_shard.py b/lib/sqlalchemy/ext/horizontal_shard.py index f55cfae0a..1edc4d4c2 100644 --- a/lib/sqlalchemy/ext/horizontal_shard.py +++ b/lib/sqlalchemy/ext/horizontal_shard.py @@ -14,13 +14,13 @@ the source distribution. """ -from .. import exc as sa_exc from .. import util from ..orm.session import Session from ..orm.query import Query __all__ = ['ShardedSession', 'ShardedQuery'] + class ShardedQuery(Query): def __init__(self, *args, **kwargs): super(ShardedQuery, self).__init__(*args, **kwargs) @@ -72,28 +72,29 @@ class ShardedQuery(Query): else: return None + class ShardedSession(Session): def __init__(self, shard_chooser, id_chooser, query_chooser, shards=None, query_cls=ShardedQuery, **kwargs): """Construct a ShardedSession. - :param shard_chooser: A callable which, passed a Mapper, a mapped instance, and possibly a - SQL clause, returns a shard ID. This id may be based off of the - attributes present within the object, or on some round-robin - scheme. If the scheme is based on a selection, it should set - whatever state on the instance to mark it in the future as + :param shard_chooser: A callable which, passed a Mapper, a mapped + instance, and possibly a SQL clause, returns a shard ID. This id + may be based off of the attributes present within the object, or on + some round-robin scheme. If the scheme is based on a selection, it + should set whatever state on the instance to mark it in the future as participating in that shard. - :param id_chooser: A callable, passed a query and a tuple of identity values, which - should return a list of shard ids where the ID might reside. The - databases will be queried in the order of this listing. + :param id_chooser: A callable, passed a query and a tuple of identity + values, which should return a list of shard ids where the ID might + reside. The databases will be queried in the order of this listing. - :param query_chooser: For a given Query, returns the list of shard_ids where the query - should be issued. Results from all shards returned will be combined - together into a single listing. + :param query_chooser: For a given Query, returns the list of shard_ids + where the query should be issued. Results from all shards returned + will be combined together into a single listing. - :param shards: A dictionary of string shard names to :class:`~sqlalchemy.engine.Engine` - objects. + :param shards: A dictionary of string shard names + to :class:`~sqlalchemy.engine.Engine` objects. """ super(ShardedSession, self).__init__(query_cls=query_cls, **kwargs) @@ -117,12 +118,11 @@ class ShardedSession(Session): shard_id=shard_id, instance=instance).contextual_connect(**kwargs) - def get_bind(self, mapper, shard_id=None, instance=None, clause=None, **kw): + def get_bind(self, mapper, shard_id=None, + instance=None, clause=None, **kw): if shard_id is None: shard_id = self.shard_chooser(mapper, instance, clause=clause) return self.__binds[shard_id] def bind_shard(self, shard_id, bind): self.__binds[shard_id] = bind - - diff --git a/lib/sqlalchemy/ext/hybrid.py b/lib/sqlalchemy/ext/hybrid.py index 57d39866c..047b2ff95 100644 --- a/lib/sqlalchemy/ext/hybrid.py +++ b/lib/sqlalchemy/ext/hybrid.py @@ -240,8 +240,8 @@ The above hybrid property ``balance`` works with the first in-Python getter/setter methods can treat ``accounts`` as a Python list available on ``self``. -However, at the expression level, it's expected that the ``User`` class will be used -in an appropriate context such that an appropriate join to +However, at the expression level, it's expected that the ``User`` class will +be used in an appropriate context such that an appropriate join to ``SavingsAccount`` will be present:: >>> print Session().query(User, User.balance).\\ @@ -268,11 +268,10 @@ Correlated Subquery Relationship Hybrid ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ We can, of course, forego being dependent on the enclosing query's usage -of joins in favor of the correlated -subquery, which can portably be packed into a single colunn expression. -A correlated subquery is more portable, but often performs more poorly -at the SQL level. -Using the same technique illustrated at :ref:`mapper_column_property_sql_expressions`, +of joins in favor of the correlated subquery, which can portably be packed +into a single colunn expression. A correlated subquery is more portable, but +often performs more poorly at the SQL level. Using the same technique +illustrated at :ref:`mapper_column_property_sql_expressions`, we can adjust our ``SavingsAccount`` example to aggregate the balances for *all* accounts, and use a correlated subquery for the column expression:: @@ -629,6 +628,7 @@ there's probably a whole lot of amazing things it can be used for. from .. import util from ..orm import attributes, interfaces + class hybrid_method(object): """A decorator which allows definition of a Python object method with both instance-level and class-level behavior. @@ -668,6 +668,7 @@ class hybrid_method(object): self.expr = expr return self + class hybrid_property(object): """A decorator which allows definition of a Python descriptor with both instance-level and class-level behavior. @@ -750,6 +751,7 @@ class hybrid_property(object): self.expr = expr return self + class Comparator(interfaces.PropComparator): """A helper class that allows easy construction of custom :class:`~.orm.interfaces.PropComparator` diff --git a/lib/sqlalchemy/ext/instrumentation.py b/lib/sqlalchemy/ext/instrumentation.py index f840ad066..bb44a492c 100644 --- a/lib/sqlalchemy/ext/instrumentation.py +++ b/lib/sqlalchemy/ext/instrumentation.py @@ -61,6 +61,7 @@ attribute. """ + def find_native_user_instrumentation_hook(cls): """Find user-specified instrumentation management for a class.""" return getattr(cls, INSTRUMENTATION_MANAGER, None) @@ -81,6 +82,7 @@ ClassManager instrumentation is used. """ + class ExtendedInstrumentationRegistry(InstrumentationFactory): """Extends :class:`.InstrumentationFactory` with additional bookkeeping, to accommodate multiple types of @@ -169,17 +171,21 @@ class ExtendedInstrumentationRegistry(InstrumentationFactory): def state_of(self, instance): if instance is None: raise AttributeError("None has no persistent state.") - return self._state_finders.get(instance.__class__, _default_state_getter)(instance) + return self._state_finders.get( + instance.__class__, _default_state_getter)(instance) def dict_of(self, instance): if instance is None: raise AttributeError("None has no persistent state.") - return self._dict_finders.get(instance.__class__, _default_dict_getter)(instance) + return self._dict_finders.get( + instance.__class__, _default_dict_getter)(instance) + orm_instrumentation._instrumentation_factory = \ _instrumentation_factory = ExtendedInstrumentationRegistry() orm_instrumentation.instrumentation_finders = instrumentation_finders + class InstrumentationManager(object): """User-defined class instrumentation extension. @@ -259,6 +265,7 @@ class InstrumentationManager(object): def dict_getter(self, class_): return lambda inst: self.get_instance_dict(class_, inst) + class _ClassInstrumentationAdapter(ClassManager): """Adapts a user-defined InstrumentationManager to a ClassManager.""" @@ -353,6 +360,7 @@ class _ClassInstrumentationAdapter(ClassManager): def dict_getter(self): return self._get_dict + def _install_instrumented_lookups(): """Replace global class/object management functions with ExtendedInstrumentationRegistry implementations, which @@ -368,22 +376,24 @@ def _install_instrumented_lookups(): """ _install_lookups( dict( - instance_state = _instrumentation_factory.state_of, - instance_dict = _instrumentation_factory.dict_of, - manager_of_class = _instrumentation_factory.manager_of_class + instance_state=_instrumentation_factory.state_of, + instance_dict=_instrumentation_factory.dict_of, + manager_of_class=_instrumentation_factory.manager_of_class ) ) + def _reinstall_default_lookups(): """Restore simplified lookups.""" _install_lookups( dict( - instance_state = _default_state_getter, - instance_dict = _default_dict_getter, - manager_of_class = _default_manager_getter + instance_state=_default_state_getter, + instance_dict=_default_dict_getter, + manager_of_class=_default_manager_getter ) ) + def _install_lookups(lookups): global instance_state, instance_dict, manager_of_class instance_state = lookups['instance_state'] diff --git a/lib/sqlalchemy/ext/mutable.py b/lib/sqlalchemy/ext/mutable.py index db05a82b4..36d60d6d5 100644 --- a/lib/sqlalchemy/ext/mutable.py +++ b/lib/sqlalchemy/ext/mutable.py @@ -7,13 +7,13 @@ """Provide support for tracking of in-place changes to scalar values, which are propagated into ORM change events on owning parent objects. -The :mod:`sqlalchemy.ext.mutable` extension replaces SQLAlchemy's legacy approach to in-place -mutations of scalar values, established by the :class:`.types.MutableType` -class as well as the ``mutable=True`` type flag, with a system that allows -change events to be propagated from the value to the owning parent, thereby -removing the need for the ORM to maintain copies of values as well as the very -expensive requirement of scanning through all "mutable" values on each flush -call, looking for changes. +The :mod:`sqlalchemy.ext.mutable` extension replaces SQLAlchemy's legacy +approach to in-place mutations of scalar values, established by the +:class:`.types.MutableType` class as well as the ``mutable=True`` type flag, +with a system that allows change events to be propagated from the value to +the owning parent, thereby removing the need for the ORM to maintain copies +of values as well as the very expensive requirement of scanning through all +"mutable" values on each flush call, looking for changes. .. _mutable_scalars: @@ -43,8 +43,8 @@ JSON strings before being persisted:: value = json.loads(value) return value -The usage of ``json`` is only for the purposes of example. The :mod:`sqlalchemy.ext.mutable` -extension can be used +The usage of ``json`` is only for the purposes of example. The +:mod:`sqlalchemy.ext.mutable` extension can be used with any type whose target Python type may be mutable, including :class:`.PickleType`, :class:`.postgresql.ARRAY`, etc. @@ -86,19 +86,19 @@ The above dictionary class takes the approach of subclassing the Python built-in ``dict`` to produce a dict subclass which routes all mutation events through ``__setitem__``. There are variants on this approach, such as subclassing ``UserDict.UserDict`` or -``collections.MutableMapping``; the part that's important to this -example is that the :meth:`.Mutable.changed` method is called whenever an in-place change to the -datastructure takes place. +``collections.MutableMapping``; the part that's important to this example is +that the :meth:`.Mutable.changed` method is called whenever an in-place +change to the datastructure takes place. We also redefine the :meth:`.Mutable.coerce` method which will be used to convert any values that are not instances of ``MutableDict``, such as the plain dictionaries returned by the ``json`` module, into the -appropriate type. Defining this method is optional; we could just as well created our -``JSONEncodedDict`` such that it always returns an instance of ``MutableDict``, -and additionally ensured that all calling code uses ``MutableDict`` -explicitly. When :meth:`.Mutable.coerce` is not overridden, any values -applied to a parent object which are not instances of the mutable type -will raise a ``ValueError``. +appropriate type. Defining this method is optional; we could just as well +created our ``JSONEncodedDict`` such that it always returns an instance +of ``MutableDict``, and additionally ensured that all calling code +uses ``MutableDict`` explicitly. When :meth:`.Mutable.coerce` is not +overridden, any values applied to a parent object which are not instances +of the mutable type will raise a ``ValueError``. Our new ``MutableDict`` type offers a class method :meth:`~.Mutable.as_mutable` which we can use within column metadata @@ -156,9 +156,10 @@ will flag the attribute as "dirty" on the parent object:: True The ``MutableDict`` can be associated with all future instances -of ``JSONEncodedDict`` in one step, using :meth:`~.Mutable.associate_with`. This -is similar to :meth:`~.Mutable.as_mutable` except it will intercept -all occurrences of ``MutableDict`` in all mappings unconditionally, without +of ``JSONEncodedDict`` in one step, using +:meth:`~.Mutable.associate_with`. This is similar to +:meth:`~.Mutable.as_mutable` except it will intercept all occurrences +of ``MutableDict`` in all mappings unconditionally, without the need to declare it individually:: MutableDict.associate_with(JSONEncodedDict) @@ -330,11 +331,14 @@ from ..orm.attributes import flag_modified from .. import event, types from ..orm import mapper, object_mapper from ..util import memoized_property -from .. import exc import weakref + class MutableBase(object): - """Common base class to :class:`.Mutable` and :class:`.MutableComposite`.""" + """Common base class to :class:`.Mutable` + and :class:`.MutableComposite`. + + """ @memoized_property def _parents(self): @@ -356,7 +360,8 @@ class MutableBase(object): """ if value is None: return None - raise ValueError("Attribute '%s' does not accept objects of type %s" % (key, type(value))) + msg = "Attribute '%s' does not accept objects of type %s" + raise ValueError(msg % (key, type(value))) @classmethod def _listen_on_attribute(cls, attribute, coerce, parent_cls): @@ -414,12 +419,17 @@ class MutableBase(object): for val in state_dict['ext.mutable.values']: val._parents[state.obj()] = key + event.listen(parent_cls, 'load', load, + raw=True, propagate=True) + event.listen(parent_cls, 'refresh', load, + raw=True, propagate=True) + event.listen(attribute, 'set', set, + raw=True, retval=True, propagate=True) + event.listen(parent_cls, 'pickle', pickle, + raw=True, propagate=True) + event.listen(parent_cls, 'unpickle', unpickle, + raw=True, propagate=True) - event.listen(parent_cls, 'load', load, raw=True, propagate=True) - event.listen(parent_cls, 'refresh', load, raw=True, propagate=True) - event.listen(attribute, 'set', set, raw=True, retval=True, propagate=True) - event.listen(parent_cls, 'pickle', pickle, raw=True, propagate=True) - event.listen(parent_cls, 'unpickle', unpickle, raw=True, propagate=True) class Mutable(MutableBase): """Mixin that defines transparent propagation of change @@ -448,15 +458,16 @@ class Mutable(MutableBase): """Associate this wrapper with all future mapped columns of the given type. - This is a convenience method that calls ``associate_with_attribute`` automatically. + This is a convenience method that calls + ``associate_with_attribute`` automatically. .. warning:: The listeners established by this method are *global* to all mappers, and are *not* garbage collected. Only use - :meth:`.associate_with` for types that are permanent to an application, - not with ad-hoc types else this will cause unbounded growth - in memory usage. + :meth:`.associate_with` for types that are permanent to an + application, not with ad-hoc types else this will cause unbounded + growth in memory usage. """ @@ -483,8 +494,8 @@ class Mutable(MutableBase): ) Note that the returned type is always an instance, even if a class - is given, and that only columns which are declared specifically with that - type instance receive additional instrumentation. + is given, and that only columns which are declared specifically with + that type instance receive additional instrumentation. To associate a particular mutable type with all occurrences of a particular type, use the :meth:`.Mutable.associate_with` classmethod @@ -511,11 +522,13 @@ class Mutable(MutableBase): return sqltype + class _MutableCompositeMeta(type): def __init__(cls, classname, bases, dict_): cls._setup_listeners() return type.__init__(cls, classname, bases, dict_) + class MutableComposite(MutableBase): """Mixin that defines transparent propagation of change events on a SQLAlchemy "composite" object to its @@ -526,10 +539,10 @@ class MutableComposite(MutableBase): .. warning:: The listeners established by the :class:`.MutableComposite` - class are *global* to all mappers, and are *not* garbage collected. Only use - :class:`.MutableComposite` for types that are permanent to an application, - not with ad-hoc types else this will cause unbounded growth - in memory usage. + class are *global* to all mappers, and are *not* garbage + collected. Only use :class:`.MutableComposite` for types that are + permanent to an application, not with ad-hoc types else this will + cause unbounded growth in memory usage. """ __metaclass__ = _MutableCompositeMeta @@ -550,19 +563,21 @@ class MutableComposite(MutableBase): """Associate this wrapper with all future mapped composites of the given type. - This is a convenience method that calls ``associate_with_attribute`` automatically. + This is a convenience method that calls ``associate_with_attribute`` + automatically. """ def listen_for_type(mapper, class_): for prop in mapper.iterate_properties: - if hasattr(prop, 'composite_class') and issubclass(prop.composite_class, cls): - cls._listen_on_attribute(getattr(class_, prop.key), False, class_) + if (hasattr(prop, 'composite_class') and + issubclass(prop.composite_class, cls)): + cls._listen_on_attribute( + getattr(class_, prop.key), False, class_) event.listen(mapper, 'mapper_configured', listen_for_type) - class MutableDict(Mutable, dict): """A dictionary type that implements :class:`.Mutable`. diff --git a/lib/sqlalchemy/ext/orderinglist.py b/lib/sqlalchemy/ext/orderinglist.py index 968c0a4a9..a2604c379 100644 --- a/lib/sqlalchemy/ext/orderinglist.py +++ b/lib/sqlalchemy/ext/orderinglist.py @@ -8,10 +8,11 @@ :author: Jason Kirtland -``orderinglist`` is a helper for mutable ordered relationships. It will intercept -list operations performed on a relationship collection and automatically -synchronize changes in list position with an attribute on the related objects. -(See :ref:`advdatamapping_entitycollections` for more information on the general pattern.) +``orderinglist`` is a helper for mutable ordered relationships. It will +intercept list operations performed on a relationship collection and +automatically synchronize changes in list position with an attribute on the +related objects. (See :ref:`advdatamapping_entitycollections` for more +information on the general pattern.) Example: Two tables that store slides in a presentation. Each slide has a number of bullet points, displayed in order by the 'position' @@ -41,15 +42,15 @@ affected rows when changes are made. }) mapper(Bullet, bullets_table) -The standard relationship mapping will produce a list-like attribute on each Slide -containing all related Bullets, but coping with changes in ordering is totally -your responsibility. If you insert a Bullet into that list, there is no -magic- it won't have a position attribute unless you assign it it one, and +The standard relationship mapping will produce a list-like attribute on each +Slide containing all related Bullets, but coping with changes in ordering is +totally your responsibility. If you insert a Bullet into that list, there is +no magic - it won't have a position attribute unless you assign it it one, and you'll need to manually renumber all the subsequent Bullets in the list to accommodate the insert. -An ``orderinglist`` can automate this and manage the 'position' attribute on all -related bullets for you. +An ``orderinglist`` can automate this and manage the 'position' attribute on +all related bullets for you. .. sourcecode:: python+sql @@ -69,18 +70,20 @@ related bullets for you. s.bullets[2].position >>> 2 -Use the ``ordering_list`` function to set up the ``collection_class`` on relationships -(as in the mapper example above). This implementation depends on the list -starting in the proper order, so be SURE to put an order_by on your relationship. +Use the ``ordering_list`` function to set up the ``collection_class`` on +relationships (as in the mapper example above). This implementation depends +on the list starting in the proper order, so be SURE to put an order_by on +your relationship. .. warning:: ``ordering_list`` only provides limited functionality when a primary - key column or unique column is the target of the sort. Since changing the order of - entries often means that two rows must trade values, this is not possible when - the value is constrained by a primary key or unique constraint, since one of the rows - would temporarily have to point to a third available value so that the other row - could take its old value. ``ordering_list`` doesn't do any of this for you, + key column or unique column is the target of the sort. Since changing the + order of entries often means that two rows must trade values, this is not + possible when the value is constrained by a primary key or unique + constraint, since one of the rows would temporarily have to point to a + third available value so that the other row could take its old + value. ``ordering_list`` doesn't do any of this for you, nor does SQLAlchemy itself. ``ordering_list`` takes the name of the related object's ordering attribute as @@ -100,14 +103,14 @@ index to any value you require. from ..orm.collections import collection from .. import util -__all__ = [ 'ordering_list' ] +__all__ = ['ordering_list'] def ordering_list(attr, count_from=None, **kw): """Prepares an OrderingList factory for use in mapper definitions. - Returns an object suitable for use as an argument to a Mapper relationship's - ``collection_class`` option. Arguments are: + Returns an object suitable for use as an argument to a Mapper + relationship's ``collection_class`` option. Arguments are: attr Name of the mapped attribute to use for storage and retrieval of @@ -125,17 +128,22 @@ def ordering_list(attr, count_from=None, **kw): kw = _unsugar_count_from(count_from=count_from, **kw) return lambda: OrderingList(attr, **kw) + # Ordering utility functions + + def count_from_0(index, collection): """Numbering function: consecutive integers starting at 0.""" return index + def count_from_1(index, collection): """Numbering function: consecutive integers starting at 1.""" return index + 1 + def count_from_n_factory(start): """Numbering function: consecutive integers starting at arbitrary start.""" @@ -147,6 +155,7 @@ def count_from_n_factory(start): pass return f + def _unsugar_count_from(**kw): """Builds counting functions from keyword arguments. @@ -164,6 +173,7 @@ def _unsugar_count_from(**kw): kw['ordering_func'] = count_from_n_factory(count_from) return kw + class OrderingList(list): """A custom list that manages position information for its children. @@ -188,9 +198,10 @@ class OrderingList(list): Name of the attribute that stores the object's order in the relationship. - :param ordering_func: Optional. A function that maps the position in the Python list to a - value to store in the ``ordering_attr``. Values returned are - usually (but need not be!) integers. + :param ordering_func: Optional. A function that maps the position in + the Python list to a value to store in the + ``ordering_attr``. Values returned are usually (but need not be!) + integers. An ``ordering_func`` is called with two positional parameters: the index of the element in the list, and the list itself. @@ -323,6 +334,7 @@ class OrderingList(list): func.__doc__ = getattr(list, func_name).__doc__ del func_name, func + def _reconstitute(cls, dict_, items): """ Reconstitute an ``OrderingList``. diff --git a/lib/sqlalchemy/ext/serializer.py b/lib/sqlalchemy/ext/serializer.py index 8a5882107..3ed41f48a 100644 --- a/lib/sqlalchemy/ext/serializer.py +++ b/lib/sqlalchemy/ext/serializer.py @@ -39,18 +39,19 @@ The serializer module is only appropriate for query structures. It is not needed for: * instances of user-defined classes. These contain no references to engines, - sessions or expression constructs in the typical case and can be serialized directly. + sessions or expression constructs in the typical case and can be serialized + directly. -* Table metadata that is to be loaded entirely from the serialized structure (i.e. is - not already declared in the application). Regular pickle.loads()/dumps() can - be used to fully dump any ``MetaData`` object, typically one which was reflected - from an existing database at some previous point in time. The serializer module - is specifically for the opposite case, where the Table metadata is already present - in memory. +* Table metadata that is to be loaded entirely from the serialized structure + (i.e. is not already declared in the application). Regular + pickle.loads()/dumps() can be used to fully dump any ``MetaData`` object, + typically one which was reflected from an existing database at some previous + point in time. The serializer module is specifically for the opposite case, + where the Table metadata is already present in memory. """ -from ..orm import class_mapper, Query +from ..orm import class_mapper from ..orm.session import Session from ..orm.mapper import Mapper from ..orm.attributes import QueryableAttribute @@ -78,7 +79,6 @@ b64decode = base64.b64decode __all__ = ['Serializer', 'Deserializer', 'dumps', 'loads'] - def Serializer(*args, **kw): pickler = pickle.Pickler(*args, **kw) @@ -107,6 +107,7 @@ def Serializer(*args, **kw): our_ids = re.compile(r'(mapper|table|column|session|attribute|engine):(.*)') + def Deserializer(file, metadata=None, scoped_session=None, engine=None): unpickler = pickle.Unpickler(file) @@ -147,15 +148,15 @@ def Deserializer(file, metadata=None, scoped_session=None, engine=None): unpickler.persistent_load = persistent_load return unpickler + def dumps(obj, protocol=0): buf = byte_buffer() pickler = Serializer(buf, protocol) pickler.dump(obj) return buf.getvalue() + def loads(data, metadata=None, scoped_session=None, engine=None): buf = byte_buffer(data) unpickler = Deserializer(buf, metadata, scoped_session, engine) return unpickler.load() - - diff --git a/lib/sqlalchemy/inspection.py b/lib/sqlalchemy/inspection.py index f9c345dce..905ec1e4d 100644 --- a/lib/sqlalchemy/inspection.py +++ b/lib/sqlalchemy/inspection.py @@ -33,6 +33,7 @@ in a forwards-compatible way. from . import util, exc _registrars = util.defaultdict(list) + def inspect(subject, raiseerr=True): """Produce an inspection object for the given target. @@ -73,6 +74,7 @@ def inspect(subject, raiseerr=True): type_) return ret + def _inspects(*types): def decorate(fn_or_cls): for type_ in types: @@ -84,5 +86,6 @@ def _inspects(*types): return fn_or_cls return decorate + def _self_inspects(*types): - _inspects(*types)(True)
\ No newline at end of file + _inspects(*types)(True) diff --git a/lib/sqlalchemy/interfaces.py b/lib/sqlalchemy/interfaces.py index f904bdf31..4a06f05a8 100644 --- a/lib/sqlalchemy/interfaces.py +++ b/lib/sqlalchemy/interfaces.py @@ -14,6 +14,7 @@ event system. from . import event, util + class PoolListener(object): """Hooks into the lifecycle of connections in a :class:`.Pool`. @@ -89,7 +90,6 @@ class PoolListener(object): if hasattr(listener, 'checkin'): event.listen(self, 'checkin', listener.checkin) - def connect(self, dbapi_con, con_record): """Called once for each new DB-API connection or Pool's ``creator()``. @@ -148,6 +148,7 @@ class PoolListener(object): """ + class ConnectionProxy(object): """Allows interception of statement execution by Connections. @@ -161,11 +162,13 @@ class ConnectionProxy(object): cursor level executions, e.g.:: class MyProxy(ConnectionProxy): - def execute(self, conn, execute, clauseelement, *multiparams, **params): + def execute(self, conn, execute, clauseelement, + *multiparams, **params): print "compiled statement:", clauseelement return execute(clauseelement, *multiparams, **params) - def cursor_execute(self, execute, cursor, statement, parameters, context, executemany): + def cursor_execute(self, execute, cursor, statement, + parameters, context, executemany): print "raw statement:", statement return execute(cursor, statement, parameters, context) @@ -195,7 +198,7 @@ class ConnectionProxy(object): event.listen(self, 'before_execute', adapt_execute) def adapt_cursor_execute(conn, cursor, statement, - parameters,context, executemany, ): + parameters, context, executemany): def execute_wrapper( cursor, @@ -245,14 +248,13 @@ class ConnectionProxy(object): event.listen(self, 'commit_twophase', adapt_listener(listener.commit_twophase)) - def execute(self, conn, execute, clauseelement, *multiparams, **params): """Intercept high level execute() events.""" - return execute(clauseelement, *multiparams, **params) - def cursor_execute(self, execute, cursor, statement, parameters, context, executemany): + def cursor_execute(self, execute, cursor, statement, parameters, + context, executemany): """Intercept low-level cursor execute() events.""" return execute(cursor, statement, parameters, context) @@ -306,4 +308,3 @@ class ConnectionProxy(object): """Intercept commit_twophase() events.""" return commit_twophase(xid, is_prepared) - diff --git a/lib/sqlalchemy/log.py b/lib/sqlalchemy/log.py index 5370c6431..463ca972f 100644 --- a/lib/sqlalchemy/log.py +++ b/lib/sqlalchemy/log.py @@ -19,7 +19,6 @@ instance only. import logging import sys -from . import util # set initial level to WARN. This so that # log statements don't occur in the absense of explicit @@ -28,13 +27,17 @@ rootlogger = logging.getLogger('sqlalchemy') if rootlogger.level == logging.NOTSET: rootlogger.setLevel(logging.WARN) + def _add_default_handler(logger): handler = logging.StreamHandler(sys.stdout) handler.setFormatter(logging.Formatter( '%(asctime)s %(levelname)s %(name)s %(message)s')) logger.addHandler(handler) + _logged_classes = set() + + def class_logger(cls, enable=False): logger = logging.getLogger(cls.__module__ + "." + cls.__name__) if enable == 'debug': @@ -56,6 +59,7 @@ class Identified(object): def _should_log_info(self): return self.logger.isEnabledFor(logging.INFO) + class InstanceLogger(object): """A logger adapter (wrapper) for :class:`.Identified` subclasses. @@ -167,6 +171,7 @@ class InstanceLogger(object): level = self.logger.getEffectiveLevel() return level + def instance_logger(instance, echoflag=None): """create a logger for an instance that implements :class:`.Identified`.""" @@ -191,6 +196,7 @@ def instance_logger(instance, echoflag=None): instance.logger = logger + class echo_property(object): __doc__ = """\ When ``True``, enable log output for this element. diff --git a/lib/sqlalchemy/orm/__init__.py b/lib/sqlalchemy/orm/__init__.py index b8085ca3c..c69fabf14 100644 --- a/lib/sqlalchemy/orm/__init__.py +++ b/lib/sqlalchemy/orm/__init__.py @@ -165,6 +165,7 @@ def create_session(bind=None, **kwargs): kwargs.setdefault('expire_on_commit', False) return Session(bind=bind, **kwargs) + def relationship(argument, secondary=None, **kwargs): """Provide a relationship of a primary Mapper to a secondary Mapper. @@ -179,10 +180,10 @@ def relationship(argument, secondary=None, **kwargs): Some arguments accepted by :func:`.relationship` optionally accept a callable function, which when called produces the desired value. - The callable is invoked by the parent :class:`.Mapper` at "mapper initialization" - time, which happens only when mappers are first used, and is assumed - to be after all mappings have been constructed. This can be used - to resolve order-of-declaration and other dependency issues, such as + The callable is invoked by the parent :class:`.Mapper` at "mapper + initialization" time, which happens only when mappers are first used, and + is assumed to be after all mappings have been constructed. This can be + used to resolve order-of-declaration and other dependency issues, such as if ``Child`` is declared below ``Parent`` in the same file:: mapper(Parent, properties={ @@ -195,8 +196,8 @@ def relationship(argument, secondary=None, **kwargs): These string arguments are converted into callables that evaluate the string as Python code, using the Declarative class-registry as a namespace. This allows the lookup of related - classes to be automatic via their string name, and removes the need to import - related classes at all into the local module space:: + classes to be automatic via their string name, and removes the need to + import related classes at all into the local module space:: from sqlalchemy.ext.declarative import declarative_base @@ -211,8 +212,8 @@ def relationship(argument, secondary=None, **kwargs): :func:`.relationship` is at :ref:`relationship_config_toplevel`. :param argument: - a mapped class, or actual :class:`.Mapper` instance, representing the target of - the relationship. + a mapped class, or actual :class:`.Mapper` instance, representing the + target of the relationship. ``argument`` may also be passed as a callable function which is evaluated at mapper initialization time, and may be passed as a @@ -362,8 +363,8 @@ def relationship(argument, secondary=None, **kwargs): There are only two use cases for ``foreign_keys`` - one, when it is not convenient for :class:`.Table` metadata to contain its own foreign key - metadata (which should be almost never, unless reflecting a large amount of - tables from a MySQL MyISAM schema, or a schema that doesn't actually + metadata (which should be almost never, unless reflecting a large amount + of tables from a MySQL MyISAM schema, or a schema that doesn't actually have foreign keys on it). The other is for extremely rare and exotic composite foreign key setups where some columns should artificially not be considered as foreign. @@ -621,16 +622,19 @@ def relationship(argument, secondary=None, **kwargs): case, use an alternative method. .. versionchanged:: 0.6 - :func:`relationship` was renamed from its previous name :func:`relation`. + :func:`relationship` was renamed from its previous name + :func:`relation`. """ return RelationshipProperty(argument, secondary=secondary, **kwargs) + def relation(*arg, **kw): """A synonym for :func:`relationship`.""" return relationship(*arg, **kw) + def dynamic_loader(argument, **kw): """Construct a dynamically-loading mapper property. @@ -650,6 +654,7 @@ def dynamic_loader(argument, **kw): kw['lazy'] = 'dynamic' return relationship(argument, **kw) + def column_property(*cols, **kw): """Provide a column-level property for use with a Mapper. @@ -728,6 +733,7 @@ def column_property(*cols, **kw): return ColumnProperty(*cols, **kw) + def composite(class_, *cols, **kwargs): """Return a composite column-based property for use with a Mapper. @@ -784,8 +790,8 @@ def composite(class_, *cols, **kwargs): def backref(name, **kwargs): - """Create a back reference with explicit keyword arguments, which are the same - arguments one can send to :func:`relationship`. + """Create a back reference with explicit keyword arguments, which are the + same arguments one can send to :func:`relationship`. Used with the ``backref`` keyword argument to :func:`relationship` in place of a string argument, e.g.:: @@ -795,6 +801,7 @@ def backref(name, **kwargs): """ return (name, kwargs) + def deferred(*columns, **kwargs): """Return a :class:`.DeferredColumnProperty`, which indicates this object attributes should only be loaded from its corresponding @@ -809,6 +816,7 @@ def deferred(*columns, **kwargs): """ return ColumnProperty(deferred=True, *columns, **kwargs) + def mapper(class_, local_table=None, *args, **params): """Return a new :class:`~.Mapper` object. @@ -864,9 +872,9 @@ def mapper(class_, local_table=None, *args, **params): this mapper inherits from another mapper using single-table inheritance. When using Declarative, this argument is automatically passed by the extension, based on what - is configured via the ``__table__`` argument or via the :class:`.Table` - produced as a result of the ``__tablename__`` and :class:`.Column` - arguments present. + is configured via the ``__table__`` argument or via the + :class:`.Table` produced as a result of the ``__tablename__`` + and :class:`.Column` arguments present. :param always_refresh: If True, all query operations for this mapped class will overwrite all data within object instances that already @@ -910,9 +918,9 @@ def mapper(class_, local_table=None, *args, **params): See :ref:`include_exclude_cols` for an example. :param extension: A :class:`.MapperExtension` instance or - list of :class:`.MapperExtension` - instances which will be applied to all operations by this - :class:`.Mapper`. **Deprecated.** Please see :class:`.MapperEvents`. + list of :class:`.MapperExtension` instances which will be applied + to all operations by this :class:`.Mapper`. **Deprecated.** + Please see :class:`.MapperEvents`. :param include_properties: An inclusive list or set of string column names to map. @@ -921,8 +929,8 @@ def mapper(class_, local_table=None, *args, **params): :param inherits: A mapped class or the corresponding :class:`.Mapper` of one indicating a superclass to which this :class:`.Mapper` - should *inherit* from. The mapped class here must be a subclass of the - other mapper's class. When using Declarative, this argument + should *inherit* from. The mapped class here must be a subclass + of the other mapper's class. When using Declarative, this argument is passed automatically as a result of the natural class hierarchy of the declared classes. @@ -955,8 +963,8 @@ def mapper(class_, local_table=None, *args, **params): ordering. :param passive_updates: Indicates UPDATE behavior of foreign key - columns when a primary key column changes on a joined-table inheritance - mapping. Defaults to ``True``. + columns when a primary key column changes on a joined-table + inheritance mapping. Defaults to ``True``. When True, it is assumed that ON UPDATE CASCADE is configured on the foreign key in the database, and that the database will handle @@ -1092,8 +1100,8 @@ def mapper(class_, local_table=None, *args, **params): that will be used to keep a running version id of mapped entities in the database. This is used during save operations to ensure that no other thread or process has updated the instance during the - lifetime of the entity, else a :class:`~sqlalchemy.orm.exc.StaleDataError` - exception is + lifetime of the entity, else a + :class:`~sqlalchemy.orm.exc.StaleDataError` exception is thrown. By default the column must be of :class:`.Integer` type, unless ``version_id_generator`` specifies a new generation algorithm. @@ -1139,6 +1147,7 @@ def mapper(class_, local_table=None, *args, **params): """ return Mapper(class_, local_table, *args, **params) + def synonym(name, map_column=False, descriptor=None, comparator_factory=None, doc=None): """Denote an attribute name as a synonym to a mapped property. @@ -1182,6 +1191,7 @@ def synonym(name, map_column=False, descriptor=None, comparator_factory=comparator_factory, doc=doc) + def comparable_property(comparator_factory, descriptor=None): """Provides a method of applying a :class:`.PropComparator` to any Python descriptor attribute. @@ -1222,8 +1232,8 @@ def comparable_property(comparator_factory, descriptor=None): id = Column(Integer, primary_key=True) word = Column(String) word_insensitive = comparable_property(lambda prop, mapper: - CaseInsensitiveComparator(mapper.c.word, mapper) - ) + CaseInsensitiveComparator(mapper.c.word, mapper) + ) A mapping like the above allows the ``word_insensitive`` attribute @@ -1246,13 +1256,17 @@ def comparable_property(comparator_factory, descriptor=None): """ return ComparableProperty(comparator_factory, descriptor) + @sa_util.deprecated("0.7", message=":func:`.compile_mappers` " "is renamed to :func:`.configure_mappers`") def compile_mappers(): - """Initialize the inter-mapper relationships of all mappers that have been defined.""" + """Initialize the inter-mapper relationships of all mappers that have + been defined. + """ configure_mappers() + def clear_mappers(): """Remove all mappers from all classes. @@ -1285,6 +1299,7 @@ def clear_mappers(): finally: mapperlib._CONFIGURE_MUTEX.release() + def joinedload(*keys, **kw): """Return a ``MapperOption`` that will convert the property of the given name or series of mapped attributes into an joined eager load. @@ -1324,12 +1339,13 @@ def joinedload(*keys, **kw): it **does not affect the query results**. An :meth:`.Query.order_by` or :meth:`.Query.filter` call **cannot** reference these aliased tables - so-called "user space" joins are constructed using - :meth:`.Query.join`. The rationale for this is that :func:`joinedload` is only - applied in order to affect how related objects or collections are loaded - as an optimizing detail - it can be added or removed with no impact - on actual results. See the section :ref:`zen_of_eager_loading` for - a detailed description of how this is used, including how to use a single - explicit JOIN for filtering/ordering and eager loading simultaneously. + :meth:`.Query.join`. The rationale for this is that + :func:`joinedload` is only applied in order to affect how related + objects or collections are loaded as an optimizing detail - it can be + added or removed with no impact on actual results. See the section + :ref:`zen_of_eager_loading` for a detailed description of how this is + used, including how to use a single explicit JOIN for + filtering/ordering and eager loading simultaneously. See also: :func:`subqueryload`, :func:`lazyload` @@ -1343,6 +1359,7 @@ def joinedload(*keys, **kw): else: return strategies.EagerLazyOption(keys, lazy='joined') + def joinedload_all(*keys, **kw): """Return a ``MapperOption`` that will convert all properties along the given dot-separated path or series of mapped attributes @@ -1360,8 +1377,8 @@ def joinedload_all(*keys, **kw): query.options(joinedload_all('orders.items.keywords'))... - will set all of ``orders``, ``orders.items``, and ``orders.items.keywords`` to - load in one joined eager load. + will set all of ``orders``, ``orders.items``, and + ``orders.items.keywords`` to load in one joined eager load. Individual descriptors are accepted as arguments as well:: @@ -1388,10 +1405,12 @@ def eagerload(*args, **kwargs): """A synonym for :func:`joinedload()`.""" return joinedload(*args, **kwargs) + def eagerload_all(*args, **kwargs): """A synonym for :func:`joinedload_all()`""" return joinedload_all(*args, **kwargs) + def subqueryload(*keys): """Return a ``MapperOption`` that will convert the property of the given name or series of mapped attributes @@ -1420,6 +1439,7 @@ def subqueryload(*keys): """ return strategies.EagerLazyOption(keys, lazy="subquery") + def subqueryload_all(*keys): """Return a ``MapperOption`` that will convert all properties along the given dot-separated path or series of mapped attributes @@ -1431,8 +1451,8 @@ def subqueryload_all(*keys): query.options(subqueryload_all('orders.items.keywords'))... - will set all of ``orders``, ``orders.items``, and ``orders.items.keywords`` to - load in one subquery eager load. + will set all of ``orders``, ``orders.items``, and + ``orders.items.keywords`` to load in one subquery eager load. Individual descriptors are accepted as arguments as well:: @@ -1444,6 +1464,7 @@ def subqueryload_all(*keys): """ return strategies.EagerLazyOption(keys, lazy="subquery", chained=True) + def lazyload(*keys): """Return a ``MapperOption`` that will convert the property of the given name or series of mapped attributes into a lazy load. @@ -1455,6 +1476,7 @@ def lazyload(*keys): """ return strategies.EagerLazyOption(keys, lazy=True) + def lazyload_all(*keys): """Return a ``MapperOption`` that will convert all the properties along the given dot-separated path or series of mapped attributes @@ -1467,6 +1489,7 @@ def lazyload_all(*keys): """ return strategies.EagerLazyOption(keys, lazy=True, chained=True) + def noload(*keys): """Return a ``MapperOption`` that will convert the property of the given name or series of mapped attributes into a non-load. @@ -1479,6 +1502,7 @@ def noload(*keys): """ return strategies.EagerLazyOption(keys, lazy=None) + def immediateload(*keys): """Return a ``MapperOption`` that will convert the property of the given name or series of mapped attributes into an immediate load. @@ -1503,6 +1527,7 @@ def immediateload(*keys): """ return strategies.EagerLazyOption(keys, lazy='immediate') + def contains_alias(alias): """Return a :class:`.MapperOption` that will indicate to the query that the main table has been aliased. @@ -1537,6 +1562,7 @@ def contains_alias(alias): """ return AliasOption(alias) + def contains_eager(*keys, **kwargs): """Return a ``MapperOption`` that will indicate to the query that the given attribute should be eagerly loaded from columns currently @@ -1580,6 +1606,7 @@ def contains_eager(*keys, **kwargs): propagate_to_loaders=False, chained=True), \ strategies.LoadEagerFromAliasOption(keys, alias=alias, chained=True) + def defer(*key): """Return a :class:`.MapperOption` that will convert the column property of the given name into a deferred load. @@ -1624,6 +1651,7 @@ def defer(*key): """ return strategies.DeferredOption(key, defer=True) + def undefer(*key): """Return a :class:`.MapperOption` that will convert the column property of the given name into a non-deferred (regular column) load. @@ -1634,28 +1662,29 @@ def undefer(*key): from sqlalchemy.orm import undefer - query(MyClass).options(undefer("attribute_one"), - undefer("attribute_two")) + query(MyClass).options( + undefer("attribute_one"), + undefer("attribute_two")) A class bound descriptor is also accepted:: query(MyClass).options( - undefer(MyClass.attribute_one), - undefer(MyClass.attribute_two)) + undefer(MyClass.attribute_one), + undefer(MyClass.attribute_two)) A "path" can be specified onto a related or collection object using a dotted name. The :func:`.orm.undefer` option will be applied to that object when loaded:: query(MyClass).options( - undefer("related.attribute_one"), - undefer("related.attribute_two")) + undefer("related.attribute_one"), + undefer("related.attribute_two")) To specify a path via class, send multiple arguments:: query(MyClass).options( - undefer(MyClass.related, MyOtherClass.attribute_one), - undefer(MyClass.related, MyOtherClass.attribute_two)) + undefer(MyClass.related, MyOtherClass.attribute_one), + undefer(MyClass.related, MyOtherClass.attribute_two)) See also: @@ -1671,9 +1700,10 @@ def undefer(*key): """ return strategies.DeferredOption(key, defer=False) + def undefer_group(name): - """Return a :class:`.MapperOption` that will convert the given group of deferred - column properties into a non-deferred (regular column) load. + """Return a :class:`.MapperOption` that will convert the given group of + deferred column properties into a non-deferred (regular column) load. Used with :meth:`.Query.options`. diff --git a/lib/sqlalchemy/orm/attributes.py b/lib/sqlalchemy/orm/attributes.py index 983b4dfec..060eaa6e5 100644 --- a/lib/sqlalchemy/orm/attributes.py +++ b/lib/sqlalchemy/orm/attributes.py @@ -85,8 +85,8 @@ canonical=16 ) LOAD_AGAINST_COMMITTED = util.symbol("LOAD_AGAINST_COMMITTED", -"""callables should use committed values as primary/foreign keys during a load""", -canonical=32 +"""callables should use committed values as primary/foreign keys during a load +""", canonical=32 ) # pre-packaged sets of flags used as inputs @@ -229,6 +229,7 @@ class QueryableAttribute(interfaces._MappedAttribute, inspection._self_inspects(QueryableAttribute) + class InstrumentedAttribute(QueryableAttribute): """Class bound instrumented attribute which adds descriptor methods.""" @@ -247,7 +248,8 @@ class InstrumentedAttribute(QueryableAttribute): if self._supports_population and self.key in dict_: return dict_[self.key] else: - return self.impl.get(instance_state(instance),dict_) + return self.impl.get(instance_state(instance), dict_) + def create_proxied_attribute(descriptor): """Create an QueryableAttribute / user descriptor hybrid. @@ -291,8 +293,10 @@ def create_proxied_attribute(descriptor): return self._comparator def adapted(self, adapter): - """Proxy adapted() for the use case of AliasedClass calling adapted.""" + """Proxy adapted() for the use case of AliasedClass calling + adapted. + """ return self.__class__(self.class_, self.key, self.descriptor, self._comparator, adapter) @@ -325,7 +329,6 @@ def create_proxied_attribute(descriptor): attribute) ) - Proxy.__name__ = type(descriptor).__name__ + 'Proxy' util.monkeypatch_proxied_specials(Proxy, type(descriptor), @@ -333,6 +336,7 @@ def create_proxied_attribute(descriptor): from_instance=descriptor) return Proxy + class AttributeImpl(object): """internal implementation for instrumented attributes.""" @@ -417,7 +421,6 @@ class AttributeImpl(object): active_history = property(_get_active_history, _set_active_history) - def hasparent(self, state, optimistic=False): """Return the boolean value of a `hasparent` flag attached to the given state. @@ -434,7 +437,8 @@ class AttributeImpl(object): will also not have a `hasparent` flag. """ - assert self.trackparent, "This AttributeImpl is not configured to track parents." + msg = "This AttributeImpl is not configured to track parents." + assert self.trackparent, msg return state.parents.get(id(self.parent_token), optimistic) \ is not False @@ -445,7 +449,8 @@ class AttributeImpl(object): attribute represented by this ``InstrumentedAttribute``. """ - assert self.trackparent, "This AttributeImpl is not configured to track parents." + msg = "This AttributeImpl is not configured to track parents." + assert self.trackparent, msg id_ = id(self.parent_token) if value: @@ -472,7 +477,6 @@ class AttributeImpl(object): state.parents[id_] = False - def set_callable(self, state, callable_): """Set a callable function for this attribute on the given object. @@ -596,6 +600,7 @@ class AttributeImpl(object): state._commit(dict_, [self.key]) return value + class ScalarAttributeImpl(AttributeImpl): """represents a scalar value-holding InstrumentedAttribute.""" @@ -651,8 +656,6 @@ class ScalarAttributeImpl(AttributeImpl): self.property.columns[0].type - - class ScalarObjectAttributeImpl(ScalarAttributeImpl): """represents a scalar-holding InstrumentedAttribute, where the target object is also instrumented. @@ -834,7 +837,6 @@ class CollectionAttributeImpl(AttributeImpl): return [(instance_state(o), o) for o in current] - def fire_append_event(self, state, dict_, value, initiator): for fn in self.dispatch.append: value = fn(state, value, initiator or self) @@ -1011,6 +1013,7 @@ class CollectionAttributeImpl(AttributeImpl): return getattr(user_data, '_sa_adapter') + def backref_listeners(attribute, key, uselist): """Apply listeners to synchronize a two-way relationship.""" @@ -1110,6 +1113,7 @@ History = util.namedtuple("History", [ "added", "unchanged", "deleted" ]) + class History(History): """A 3-tuple of added, unchanged and deleted values, representing the changes which have occurred on an instrumented @@ -1273,6 +1277,7 @@ class History(History): HISTORY_BLANK = History(None, None, None) + def get_history(obj, key, passive=PASSIVE_OFF): """Return a :class:`.History` record for the given object and attribute key. @@ -1300,6 +1305,7 @@ def get_history(obj, key, passive=PASSIVE_OFF): return get_state_history(instance_state(obj), key, passive) + def get_state_history(state, key, passive=PASSIVE_OFF): return state.get_history(key, passive) @@ -1310,6 +1316,7 @@ def has_parent(cls, obj, key, optimistic=False): state = instance_state(obj) return manager.has_parent(state, key, optimistic) + def register_attribute(class_, key, **kw): comparator = kw.pop('comparator', None) parententity = kw.pop('parententity', None) @@ -1319,6 +1326,7 @@ def register_attribute(class_, key, **kw): register_attribute_impl(class_, key, **kw) return desc + def register_attribute_impl(class_, key, uselist=False, callable_=None, useobject=False, @@ -1341,7 +1349,7 @@ def register_attribute_impl(class_, key, typecallable=typecallable, **kw) elif useobject: impl = ScalarObjectAttributeImpl(class_, key, callable_, - dispatch,**kw) + dispatch, **kw) else: impl = ScalarAttributeImpl(class_, key, callable_, dispatch, **kw) @@ -1353,6 +1361,7 @@ def register_attribute_impl(class_, key, manager.post_configure_attribute(key) return manager[key] + def register_descriptor(class_, key, comparator=None, parententity=None, doc=None): manager = manager_of_class(class_) @@ -1365,9 +1374,11 @@ def register_descriptor(class_, key, comparator=None, manager.instrument_attribute(key, descriptor) return descriptor + def unregister_attribute(class_, key): manager_of_class(class_).uninstrument_attribute(key) + def init_collection(obj, key): """Initialize a collection attribute and return the collection adapter. @@ -1390,6 +1401,7 @@ def init_collection(obj, key): dict_ = state.dict return init_state_collection(state, dict_, key) + def init_state_collection(state, dict_, key): """Initialize a collection attribute and return the collection adapter.""" @@ -1397,6 +1409,7 @@ def init_state_collection(state, dict_, key): user_data = attr.initialize(state, dict_) return attr.get_collection(state, dict_, user_data) + def set_committed_value(instance, key, value): """Set the value of an attribute with no history events. @@ -1415,6 +1428,7 @@ def set_committed_value(instance, key, value): state, dict_ = instance_state(instance), instance_dict(instance) state.manager[key].impl.set_committed_value(state, dict_, value) + def set_attribute(instance, key, value): """Set the value of an attribute, firing history events. @@ -1428,6 +1442,7 @@ def set_attribute(instance, key, value): state, dict_ = instance_state(instance), instance_dict(instance) state.manager[key].impl.set(state, dict_, value, None) + def get_attribute(instance, key): """Get the value of an attribute, firing any callables required. @@ -1441,6 +1456,7 @@ def get_attribute(instance, key): state, dict_ = instance_state(instance), instance_dict(instance) return state.manager[key].impl.get(state, dict_) + def del_attribute(instance, key): """Delete the value of an attribute, firing history events. @@ -1454,6 +1470,7 @@ def del_attribute(instance, key): state, dict_ = instance_state(instance), instance_dict(instance) state.manager[key].impl.delete(state, dict_) + def flag_modified(instance, key): """Mark an attribute on an instance as 'modified'. @@ -1464,4 +1481,3 @@ def flag_modified(instance, key): state, dict_ = instance_state(instance), instance_dict(instance) impl = state.manager[key].impl state._modified_event(dict_, impl, NO_VALUE) - diff --git a/lib/sqlalchemy/orm/collections.py b/lib/sqlalchemy/orm/collections.py index a57ef5e68..80206011c 100644 --- a/lib/sqlalchemy/orm/collections.py +++ b/lib/sqlalchemy/orm/collections.py @@ -153,6 +153,7 @@ class _PlainColumnGetter(object): else: return key[0] + class _SerializableColumnGetter(object): """Column-based getter used in version 0.7.6 only. @@ -178,6 +179,7 @@ class _SerializableColumnGetter(object): else: return key[0] + class _SerializableColumnGetterV2(_PlainColumnGetter): """Updated serializable getter which deals with multi-table mapped classes. @@ -239,6 +241,7 @@ def column_mapped_collection(mapping_spec): keyfunc = _PlainColumnGetter(cols) return lambda: MappedCollection(keyfunc) + class _SerializableAttrGetter(object): def __init__(self, name): self.name = name @@ -250,6 +253,7 @@ class _SerializableAttrGetter(object): def __reduce__(self): return _SerializableAttrGetter, (self.name, ) + def attribute_mapped_collection(attr_name): """A dictionary-based collection type with attribute-based keying. @@ -282,6 +286,7 @@ def mapped_collection(keyfunc): """ return lambda: MappedCollection(keyfunc) + class collection(object): """Decorators for entity collection classes. @@ -551,6 +556,7 @@ def collection_adapter(collection): return getattr(collection, '_sa_adapter', None) + def collection_iter(collection): """Iterate over an object supporting the @iterator or __iter__ protocols. @@ -803,6 +809,7 @@ def bulk_replace(values, existing_adapter, new_adapter): for member in removals: existing_adapter.remove_with_event(member) + def prepare_instrumentation(factory): """Prepare a callable for future use as a collection class factory. @@ -837,6 +844,7 @@ def prepare_instrumentation(factory): return factory + def __converting_factory(original_factory): """Convert the type returned by collection factories on the fly. @@ -864,6 +872,7 @@ def __converting_factory(original_factory): pass return wrapper + def _instrument_class(cls): """Modify methods in a class and install instrumentation.""" @@ -973,6 +982,7 @@ def _instrument_class(cls): setattr(cls, '_sa_instrumented', id(cls)) + def _instrument_membership_mutator(method, before, argument, after): """Route method args and/or return value through the collection adapter.""" # This isn't smart enough to handle @adds(1) for 'def fn(self, (a, b))' @@ -1029,6 +1039,7 @@ def _instrument_membership_mutator(method, before, argument, after): pass return wrapper + def __set(collection, item, _sa_initiator=None): """Run set events, may eventually be inlined into decorators.""" @@ -1038,6 +1049,7 @@ def __set(collection, item, _sa_initiator=None): item = getattr(executor, 'fire_append_event')(item, _sa_initiator) return item + def __del(collection, item, _sa_initiator=None): """Run del events, may eventually be inlined into decorators.""" if _sa_initiator is not False: @@ -1045,12 +1057,14 @@ def __del(collection, item, _sa_initiator=None): if executor: getattr(executor, 'fire_remove_event')(item, _sa_initiator) + def __before_delete(collection, _sa_initiator=None): """Special method to run 'commit existing value' methods""" executor = getattr(collection, '_sa_adapter', None) if executor: getattr(executor, 'fire_pre_remove_event')(_sa_initiator) + def _list_decorators(): """Tailored instrumentation wrappers for any list-like class.""" @@ -1188,6 +1202,7 @@ def _list_decorators(): l.pop('_tidy') return l + def _dict_decorators(): """Tailored instrumentation wrappers for any dict-like mapping class.""" @@ -1281,11 +1296,13 @@ else: import sets _set_binop_bases = (set, frozenset, sets.BaseSet) + def _set_binops_check_strict(self, obj): """Allow only set, frozenset and self.__class__-derived objects in binops.""" return isinstance(obj, _set_binop_bases + (self.__class__,)) + def _set_binops_check_loose(self, obj): """Allow anything set-like to participate in set binops.""" return (isinstance(obj, _set_binop_bases + (self.__class__,)) or @@ -1448,6 +1465,7 @@ class InstrumentedList(list): 'remover': 'remove', 'iterator': '__iter__', } + class InstrumentedSet(set): """An instrumented version of the built-in set.""" @@ -1456,6 +1474,7 @@ class InstrumentedSet(set): 'remover': 'remove', 'iterator': '__iter__', } + class InstrumentedDict(dict): """An instrumented version of the built-in dict.""" @@ -1494,6 +1513,7 @@ __interfaces = { None: {} } + class MappedCollection(dict): """A basic dictionary-based collection class. @@ -1577,4 +1597,3 @@ class MappedCollection(dict): _instrument_class(MappedCollection) _instrument_class(InstrumentedList) _instrument_class(InstrumentedSet) - diff --git a/lib/sqlalchemy/orm/dependency.py b/lib/sqlalchemy/orm/dependency.py index 5c9efb398..2f2658bb9 100644 --- a/lib/sqlalchemy/orm/dependency.py +++ b/lib/sqlalchemy/orm/dependency.py @@ -13,6 +13,7 @@ from . import attributes, exc, sync, unitofwork, \ util as mapperutil from .interfaces import ONETOMANY, MANYTOONE, MANYTOMANY + class DependencyProcessor(object): def __init__(self, prop): self.prop = prop @@ -63,7 +64,6 @@ class DependencyProcessor(object): """ uow.register_preprocessor(self, True) - def per_property_flush_actions(self, uow): after_save = unitofwork.ProcessAll(uow, self, False, True) before_delete = unitofwork.ProcessAll(uow, self, True, True) @@ -95,7 +95,6 @@ class DependencyProcessor(object): before_delete ) - def per_state_flush_actions(self, uow, states, isdelete): """establish actions and dependencies related to a flush. @@ -159,7 +158,8 @@ class DependencyProcessor(object): # detect if there's anything changed or loaded # by a preprocessor on this state/attribute. if not, # we should be able to skip it entirely. - sum_ = state.manager[self.key].impl.get_all_pending(state, state.dict) + sum_ = state.manager[self.key].impl.get_all_pending( + state, state.dict) if not sum_: continue @@ -210,7 +210,6 @@ class DependencyProcessor(object): after_save, before_delete, isdelete, childisdelete) - def presort_deletes(self, uowcommit, states): return False @@ -314,6 +313,7 @@ class DependencyProcessor(object): def __repr__(self): return "%s(%s)" % (self.__class__.__name__, self.prop) + class OneToManyDP(DependencyProcessor): def per_property_dependencies(self, uow, parent_saves, @@ -437,8 +437,6 @@ class OneToManyDP(DependencyProcessor): uowcommit.register_object(child, operation="delete", prop=self.prop) - - def presort_saves(self, uowcommit, states): children_added = uowcommit.memo(('children_added', self), set) @@ -581,6 +579,7 @@ class OneToManyDP(DependencyProcessor): self.parent, self.prop.synchronize_pairs) + class ManyToOneDP(DependencyProcessor): def __init__(self, prop): DependencyProcessor.__init__(self, prop) @@ -694,8 +693,8 @@ class ManyToOneDP(DependencyProcessor): continue uowcommit.register_object(child, isdelete=True, operation="delete", prop=self.prop) - for c, m, st_, dct_ in self.mapper.cascade_iterator( - 'delete', child): + t = self.mapper.cascade_iterator('delete', child) + for c, m, st_, dct_ in t: uowcommit.register_object( st_, isdelete=True) @@ -713,11 +712,9 @@ class ManyToOneDP(DependencyProcessor): uowcommit.register_object(child, isdelete=True, operation="delete", prop=self.prop) - for c, m, st_, dct_ in self.mapper.cascade_iterator( - 'delete', child): - uowcommit.register_object( - st_, - isdelete=True) + t = self.mapper.cascade_iterator('delete', child) + for c, m, st_, dct_ in t: + uowcommit.register_object(st_, isdelete=True) def process_deletes(self, uowcommit, states): if self.post_update and \ @@ -776,6 +773,7 @@ class ManyToOneDP(DependencyProcessor): uowcommit, False) + class DetectKeySwitch(DependencyProcessor): """For many-to-one relationships with no one-to-many backref, searches for parents through the unit of work when a primary @@ -1100,11 +1098,11 @@ class ManyToManyDP(DependencyProcessor): if result.supports_sane_multi_rowcount() and \ result.rowcount != len(secondary_delete): raise exc.StaleDataError( - "DELETE statement on table '%s' expected to delete %d row(s); " - "Only %d were matched." % - (self.secondary.description, len(secondary_delete), - result.rowcount) - ) + "DELETE statement on table '%s' expected to delete " + "%d row(s); Only %d were matched." % + (self.secondary.description, len(secondary_delete), + result.rowcount) + ) if secondary_update: associationrow = secondary_update[0] @@ -1117,11 +1115,11 @@ class ManyToManyDP(DependencyProcessor): if result.supports_sane_multi_rowcount() and \ result.rowcount != len(secondary_update): raise exc.StaleDataError( - "UPDATE statement on table '%s' expected to update %d row(s); " - "Only %d were matched." % - (self.secondary.description, len(secondary_update), - result.rowcount) - ) + "UPDATE statement on table '%s' expected to update " + "%d row(s); Only %d were matched." % + (self.secondary.description, len(secondary_update), + result.rowcount) + ) if secondary_insert: statement = self.secondary.insert() @@ -1157,8 +1155,7 @@ class ManyToManyDP(DependencyProcessor): self.prop.synchronize_pairs) _direction_to_processor = { - ONETOMANY : OneToManyDP, + ONETOMANY: OneToManyDP, MANYTOONE: ManyToOneDP, - MANYTOMANY : ManyToManyDP, + MANYTOMANY: ManyToManyDP, } - diff --git a/lib/sqlalchemy/orm/deprecated_interfaces.py b/lib/sqlalchemy/orm/deprecated_interfaces.py index cee15f1b6..bc9b352d4 100644 --- a/lib/sqlalchemy/orm/deprecated_interfaces.py +++ b/lib/sqlalchemy/orm/deprecated_interfaces.py @@ -116,7 +116,6 @@ class MapperExtension(object): event.listen(self, "%s" % meth, ls_meth, raw=False, retval=True, propagate=True) - def instrument_class(self, mapper, class_): """Receive a class when the mapper is first constructed, and has applied instrumentation to the mapped class. @@ -374,6 +373,7 @@ class MapperExtension(object): return EXT_CONTINUE + class SessionExtension(object): """Base implementation for :class:`.Session` event hooks. @@ -439,7 +439,7 @@ class SessionExtension(object): Note that this may not be per-flush if a longer running transaction is ongoing.""" - def before_flush( self, session, flush_context, instances): + def before_flush(self, session, flush_context, instances): """Execute before flush process has started. `instances` is an optional list of objects which were passed to @@ -462,7 +462,7 @@ class SessionExtension(object): occurred, depending on whether or not the flush started its own transaction or participated in a larger transaction. """ - def after_begin( self, session, transaction, connection): + def after_begin(self, session, transaction, connection): """Execute after a transaction is begun on a connection `transaction` is the SessionTransaction. This method is called @@ -473,7 +473,7 @@ class SessionExtension(object): This is called after an add, delete or merge. """ - def after_bulk_update( self, session, query, query_context, result): + def after_bulk_update(self, session, query, query_context, result): """Execute after a bulk update operation to the session. This is called after a session.query(...).update() @@ -483,7 +483,7 @@ class SessionExtension(object): `result` is the result object returned from the bulk operation. """ - def after_bulk_delete( self, session, query, query_context, result): + def after_bulk_delete(self, session, query, query_context, result): """Execute after a bulk delete operation to the session. This is called after a session.query(...).delete() @@ -586,5 +586,3 @@ class AttributeExtension(object): """ return value - - diff --git a/lib/sqlalchemy/orm/descriptor_props.py b/lib/sqlalchemy/orm/descriptor_props.py index fdbe44c6c..a058fc812 100644 --- a/lib/sqlalchemy/orm/descriptor_props.py +++ b/lib/sqlalchemy/orm/descriptor_props.py @@ -17,6 +17,7 @@ from .. import util, sql, exc as sa_exc, event, schema from ..sql import expression properties = util.importlater('sqlalchemy.orm', 'properties') + class DescriptorProperty(MapperProperty): """:class:`.MapperProperty` which proxies access to a user-defined descriptor.""" @@ -47,8 +48,10 @@ class DescriptorProperty(MapperProperty): if self.descriptor is None: def fset(obj, value): setattr(obj, self.name, value) + def fdel(obj): delattr(obj, self.name) + def fget(obj): return getattr(obj, self.name) @@ -124,7 +127,10 @@ class CompositeProperty(DescriptorProperty): # key not present. Iterate through related # attributes, retrieve their values. This # ensures they all load. - values = [getattr(instance, key) for key in self._attribute_keys] + values = [ + getattr(instance, key) + for key in self._attribute_keys + ] # current expected behavior here is that the composite is # created on access if the object is persistent or if @@ -239,12 +245,15 @@ class CompositeProperty(DescriptorProperty): state.dict.pop(self.key, None) event.listen(self.parent, 'after_insert', - insert_update_handler, raw=True) + insert_update_handler, raw=True) event.listen(self.parent, 'after_update', - insert_update_handler, raw=True) - event.listen(self.parent, 'load', load_handler, raw=True, propagate=True) - event.listen(self.parent, 'refresh', load_handler, raw=True, propagate=True) - event.listen(self.parent, "expire", expire_handler, raw=True, propagate=True) + insert_update_handler, raw=True) + event.listen(self.parent, 'load', + load_handler, raw=True, propagate=True) + event.listen(self.parent, 'refresh', + load_handler, raw=True, propagate=True) + event.listen(self.parent, 'expire', + expire_handler, raw=True, propagate=True) # TODO: need a deserialize hook here @@ -285,7 +294,7 @@ class CompositeProperty(DescriptorProperty): ) else: return attributes.History( - (),[self.composite_class(*added)], () + (), [self.composite_class(*added)], () ) def _comparator_factory(self, mapper): @@ -317,7 +326,7 @@ class CompositeProperty(DescriptorProperty): if self.adapter: # TODO: test coverage for adapted composite comparison return expression.ClauseList( - *[self.adapter(x) for x in self.prop._comparable_elements]) + *[self.adapter(x) for x in self.prop._comparable_elements]) else: return expression.ClauseList(*self.prop._comparable_elements) @@ -329,7 +338,9 @@ class CompositeProperty(DescriptorProperty): else: values = other.__composite_values__() return sql.and_( - *[a==b for a, b in zip(self.prop._comparable_elements, values)]) + *[a == b + for a, b in zip(self.prop._comparable_elements, values)] + ) def __ne__(self, other): return sql.not_(self.__eq__(other)) @@ -337,6 +348,7 @@ class CompositeProperty(DescriptorProperty): def __str__(self): return str(self.parent.class_.__name__) + "." + self.key + class ConcreteInheritedProperty(DescriptorProperty): """A 'do nothing' :class:`.MapperProperty` that disables an attribute on a concrete subclass that is only present @@ -374,8 +386,10 @@ class ConcreteInheritedProperty(DescriptorProperty): class NoninheritedConcreteProp(object): def __set__(s, obj, value): warn() + def __delete__(s, obj): warn() + def __get__(s, obj, owner): if obj is None: return self.descriptor @@ -440,6 +454,7 @@ class SynonymProperty(DescriptorProperty): self.parent = parent + class ComparableProperty(DescriptorProperty): """Instruments a Python property for use in query expressions.""" diff --git a/lib/sqlalchemy/orm/dynamic.py b/lib/sqlalchemy/orm/dynamic.py index 598575576..c1dea95f1 100644 --- a/lib/sqlalchemy/orm/dynamic.py +++ b/lib/sqlalchemy/orm/dynamic.py @@ -19,14 +19,15 @@ from . import ( ) from .query import Query + class DynaLoader(strategies.AbstractRelationshipLoader): def init_class_attribute(self, mapper): self.is_class_level = True if not self.uselist: raise exc.InvalidRequestError( - "On relationship %s, 'dynamic' loaders cannot be used with " - "many-to-one/one-to-one relationships and/or " - "uselist=False." % self.parent_property) + "On relationship %s, 'dynamic' loaders cannot be used with " + "many-to-one/one-to-one relationships and/or " + "uselist=False." % self.parent_property) strategies._register_attribute(self, mapper, useobject=True, @@ -38,6 +39,7 @@ class DynaLoader(strategies.AbstractRelationshipLoader): log.class_logger(DynaLoader) + class DynamicAttributeImpl(attributes.AttributeImpl): uses_objects = True accepts_scalar_loader = False @@ -118,7 +120,6 @@ class DynamicAttributeImpl(attributes.AttributeImpl): return self._set_iterable(state, dict_, value) - def _set_iterable(self, state, dict_, iterable, adapter=None): collection_history = self._modified_event(state, dict_) new_values = list(iterable) @@ -144,7 +145,8 @@ class DynamicAttributeImpl(attributes.AttributeImpl): c.deleted_items) def get_all_pending(self, state, dict_): - c = self._get_collection_history(state, attributes.PASSIVE_NO_INITIALIZE) + c = self._get_collection_history( + state, attributes.PASSIVE_NO_INITIALIZE) return [ (attributes.instance_state(x), x) for x in @@ -174,6 +176,7 @@ class DynamicAttributeImpl(attributes.AttributeImpl): if initiator is not self: self.fire_remove_event(state, dict_, value, initiator) + class DynCollectionAdapter(object): """the dynamic analogue to orm.collections.CollectionAdapter""" @@ -197,6 +200,7 @@ class DynCollectionAdapter(object): def remove_without_event(self, item): pass + class AppenderMixin(object): query_class = None @@ -228,7 +232,7 @@ class AppenderMixin(object): def session(self): return self.__session() - session = property(session, lambda s, x:None) + session = property(session, lambda s, x: None) def __iter__(self): sess = self.__session() @@ -302,6 +306,7 @@ def mixin_user_query(cls): name = 'Appender' + cls.__name__ return type(name, (AppenderMixin, cls), {'query_class': cls}) + class CollectionHistory(object): """Overrides AttributeHistory to receive append/remove events directly.""" @@ -318,4 +323,3 @@ class CollectionHistory(object): self.deleted_items = [] self.added_items = [] self.unchanged_items = [] - diff --git a/lib/sqlalchemy/orm/evaluator.py b/lib/sqlalchemy/orm/evaluator.py index 0bc635db6..894ac139e 100644 --- a/lib/sqlalchemy/orm/evaluator.py +++ b/lib/sqlalchemy/orm/evaluator.py @@ -7,6 +7,7 @@ import operator from ..sql import operators + class UnevaluatableError(Exception): pass @@ -24,6 +25,7 @@ _notimplemented_ops = set(getattr(operators, op) 'notilike_op', 'between_op', 'in_op', 'notin_op', 'endswith_op', 'concat_op')) + class EvaluatorCompiler(object): def process(self, clause): meth = getattr(self, "visit_%s" % clause.__visit_name__, None) @@ -77,7 +79,7 @@ class EvaluatorCompiler(object): return evaluate def visit_binary(self, clause): - eval_left,eval_right = map(self.process, + eval_left, eval_right = map(self.process, [clause.left, clause.right]) operator = clause.operator if operator is operators.is_: diff --git a/lib/sqlalchemy/orm/events.py b/lib/sqlalchemy/orm/events.py index 72c430b38..291e79ead 100644 --- a/lib/sqlalchemy/orm/events.py +++ b/lib/sqlalchemy/orm/events.py @@ -94,13 +94,13 @@ class InstrumentationEvents(event.Events): """ - def attribute_instrument(self, cls, key, inst): """Called when an attribute is instrumented.""" + class _InstrumentationEventsHold(object): - """temporary marker object used to transfer from _accept_with() to _listen() - on the InstrumentationEvents class. + """temporary marker object used to transfer from _accept_with() to + _listen() on the InstrumentationEvents class. """ def __init__(self, class_): @@ -174,6 +174,7 @@ class InstanceEvents(event.Events): def _listen(cls, target, identifier, fn, raw=False, propagate=False): if not raw: orig_fn = fn + def wrap(state, *arg, **kw): return orig_fn(state.obj(), *arg, **kw) fn = wrap @@ -185,7 +186,8 @@ class InstanceEvents(event.Events): @classmethod def _remove(cls, identifier, target, fn): - raise NotImplementedError("Removal of instance events not yet implemented") + msg = "Removal of instance events not yet implemented" + raise NotImplementedError(msg) @classmethod def _clear(cls): @@ -314,6 +316,7 @@ class InstanceEvents(event.Events): """ + class _EventsHold(object): """Hold onto listeners against unmapped, uninstrumented classes. @@ -361,6 +364,7 @@ class _EventsHold(object): subject.dispatch._listen(subject, ident, fn, raw, propagate) + class _InstanceEventsHold(_EventsHold): all_holds = weakref.WeakKeyDictionary() @@ -389,7 +393,8 @@ class MapperEvents(event.Events): # associate the listener function with SomeMappedClass, # to execute during the "before_insert" hook - event.listen(SomeMappedClass, 'before_insert', my_before_insert_listener) + event.listen( + SomeMappedClass, 'before_insert', my_before_insert_listener) Available targets include mapped classes, instances of :class:`.Mapper` (i.e. returned by :func:`.mapper`, @@ -469,11 +474,13 @@ class MapperEvents(event.Events): if not raw: meth = getattr(cls, identifier) try: - target_index = inspect.getargspec(meth)[0].index('target') - 1 + target_index = \ + inspect.getargspec(meth)[0].index('target') - 1 except ValueError: target_index = None wrapped_fn = fn + def wrap(*arg, **kw): if not raw and target_index is not None: arg = list(arg) @@ -516,9 +523,9 @@ class MapperEvents(event.Events): """Called when the mapper for the class is fully configured. This event is the latest phase of mapper construction, and - is invoked when the mapped classes are first used, so that relationships - between mappers can be resolved. When the event is called, - the mapper should be in its final state. + is invoked when the mapped classes are first used, so that + relationships between mappers can be resolved. When the event is + called, the mapper should be in its final state. While the configuration event normally occurs automatically, it can be forced to occur ahead of time, in the case where the event @@ -542,9 +549,9 @@ class MapperEvents(event.Events): Theoretically this event is called once per application, but is actually called any time new mappers - have been affected by a :func:`.orm.configure_mappers` call. If new mappings - are constructed after existing ones have already been used, - this event can be called again. + have been affected by a :func:`.orm.configure_mappers` + call. If new mappings are constructed after existing ones have + already been used, this event can be called again. """ @@ -632,7 +639,6 @@ class MapperEvents(event.Events): """ - def populate_instance(self, mapper, context, row, target, **flags): """Receive an instance before that instance has @@ -688,24 +694,26 @@ class MapperEvents(event.Events): .. warning:: Mapper-level flush events are designed to operate **on attributes local to the immediate object being handled - and via SQL operations with the given** :class:`.Connection` **only.** - Handlers here should **not** make alterations to the state of - the :class:`.Session` overall, and in general should not - affect any :func:`.relationship` -mapped attributes, as - session cascade rules will not function properly, nor is it - always known if the related class has already been handled. - Operations that **are not supported in mapper events** include: + and via SQL operations with the given** + :class:`.Connection` **only.** Handlers here should **not** make + alterations to the state of the :class:`.Session` overall, and + in general should not affect any :func:`.relationship` -mapped + attributes, as session cascade rules will not function properly, + nor is it always known if the related class has already been + handled. Operations that **are not supported in mapper + events** include: * :meth:`.Session.add` * :meth:`.Session.delete` * Mapped collection append, add, remove, delete, discard, etc. - * Mapped relationship attribute set/del events, i.e. ``someobject.related = someotherobject`` + * Mapped relationship attribute set/del events, + i.e. ``someobject.related = someotherobject`` Operations which manipulate the state of the object relative to other objects are better handled: - * In the ``__init__()`` method of the mapped object itself, or another method - designed to establish some particular state. + * In the ``__init__()`` method of the mapped object itself, or + another method designed to establish some particular state. * In a ``@validates`` handler, see :ref:`simple_validators` * Within the :meth:`.SessionEvents.before_flush` event. @@ -744,24 +752,26 @@ class MapperEvents(event.Events): .. warning:: Mapper-level flush events are designed to operate **on attributes local to the immediate object being handled - and via SQL operations with the given** :class:`.Connection` **only.** - Handlers here should **not** make alterations to the state of - the :class:`.Session` overall, and in general should not - affect any :func:`.relationship` -mapped attributes, as - session cascade rules will not function properly, nor is it - always known if the related class has already been handled. - Operations that **are not supported in mapper events** include: + and via SQL operations with the given** + :class:`.Connection` **only.** Handlers here should **not** make + alterations to the state of the :class:`.Session` overall, and in + general should not affect any :func:`.relationship` -mapped + attributes, as session cascade rules will not function properly, + nor is it always known if the related class has already been + handled. Operations that **are not supported in mapper + events** include: * :meth:`.Session.add` * :meth:`.Session.delete` * Mapped collection append, add, remove, delete, discard, etc. - * Mapped relationship attribute set/del events, i.e. ``someobject.related = someotherobject`` + * Mapped relationship attribute set/del events, + i.e. ``someobject.related = someotherobject`` Operations which manipulate the state of the object relative to other objects are better handled: - * In the ``__init__()`` method of the mapped object itself, or another method - designed to establish some particular state. + * In the ``__init__()`` method of the mapped object itself, + or another method designed to establish some particular state. * In a ``@validates`` handler, see :ref:`simple_validators` * Within the :meth:`.SessionEvents.before_flush` event. @@ -819,9 +829,9 @@ class MapperEvents(event.Events): .. warning:: Mapper-level flush events are designed to operate **on attributes local to the immediate object being handled - and via SQL operations with the given** :class:`.Connection` **only.** - Handlers here should **not** make alterations to the state of - the :class:`.Session` overall, and in general should not + and via SQL operations with the given** :class:`.Connection` + **only.** Handlers here should **not** make alterations to the + state of the :class:`.Session` overall, and in general should not affect any :func:`.relationship` -mapped attributes, as session cascade rules will not function properly, nor is it always known if the related class has already been handled. @@ -830,13 +840,14 @@ class MapperEvents(event.Events): * :meth:`.Session.add` * :meth:`.Session.delete` * Mapped collection append, add, remove, delete, discard, etc. - * Mapped relationship attribute set/del events, i.e. ``someobject.related = someotherobject`` + * Mapped relationship attribute set/del events, + i.e. ``someobject.related = someotherobject`` Operations which manipulate the state of the object relative to other objects are better handled: - * In the ``__init__()`` method of the mapped object itself, or another method - designed to establish some particular state. + * In the ``__init__()`` method of the mapped object itself, + or another method designed to establish some particular state. * In a ``@validates`` handler, see :ref:`simple_validators` * Within the :meth:`.SessionEvents.before_flush` event. @@ -892,9 +903,9 @@ class MapperEvents(event.Events): .. warning:: Mapper-level flush events are designed to operate **on attributes local to the immediate object being handled - and via SQL operations with the given** :class:`.Connection` **only.** - Handlers here should **not** make alterations to the state of - the :class:`.Session` overall, and in general should not + and via SQL operations with the given** :class:`.Connection` + **only.** Handlers here should **not** make alterations to the + state of the :class:`.Session` overall, and in general should not affect any :func:`.relationship` -mapped attributes, as session cascade rules will not function properly, nor is it always known if the related class has already been handled. @@ -903,13 +914,14 @@ class MapperEvents(event.Events): * :meth:`.Session.add` * :meth:`.Session.delete` * Mapped collection append, add, remove, delete, discard, etc. - * Mapped relationship attribute set/del events, i.e. ``someobject.related = someotherobject`` + * Mapped relationship attribute set/del events, + i.e. ``someobject.related = someotherobject`` Operations which manipulate the state of the object relative to other objects are better handled: - * In the ``__init__()`` method of the mapped object itself, or another method - designed to establish some particular state. + * In the ``__init__()`` method of the mapped object itself, + or another method designed to establish some particular state. * In a ``@validates`` handler, see :ref:`simple_validators` * Within the :meth:`.SessionEvents.before_flush` event. @@ -942,9 +954,9 @@ class MapperEvents(event.Events): .. warning:: Mapper-level flush events are designed to operate **on attributes local to the immediate object being handled - and via SQL operations with the given** :class:`.Connection` **only.** - Handlers here should **not** make alterations to the state of - the :class:`.Session` overall, and in general should not + and via SQL operations with the given** :class:`.Connection` + **only.** Handlers here should **not** make alterations to the + state of the :class:`.Session` overall, and in general should not affect any :func:`.relationship` -mapped attributes, as session cascade rules will not function properly, nor is it always known if the related class has already been handled. @@ -953,13 +965,14 @@ class MapperEvents(event.Events): * :meth:`.Session.add` * :meth:`.Session.delete` * Mapped collection append, add, remove, delete, discard, etc. - * Mapped relationship attribute set/del events, i.e. ``someobject.related = someotherobject`` + * Mapped relationship attribute set/del events, + i.e. ``someobject.related = someotherobject`` Operations which manipulate the state of the object relative to other objects are better handled: - * In the ``__init__()`` method of the mapped object itself, or another method - designed to establish some particular state. + * In the ``__init__()`` method of the mapped object itself, + or another method designed to establish some particular state. * In a ``@validates`` handler, see :ref:`simple_validators` * Within the :meth:`.SessionEvents.before_flush` event. @@ -992,9 +1005,9 @@ class MapperEvents(event.Events): .. warning:: Mapper-level flush events are designed to operate **on attributes local to the immediate object being handled - and via SQL operations with the given** :class:`.Connection` **only.** - Handlers here should **not** make alterations to the state of - the :class:`.Session` overall, and in general should not + and via SQL operations with the given** :class:`.Connection` + **only.** Handlers here should **not** make alterations to the + state of the :class:`.Session` overall, and in general should not affect any :func:`.relationship` -mapped attributes, as session cascade rules will not function properly, nor is it always known if the related class has already been handled. @@ -1003,13 +1016,14 @@ class MapperEvents(event.Events): * :meth:`.Session.add` * :meth:`.Session.delete` * Mapped collection append, add, remove, delete, discard, etc. - * Mapped relationship attribute set/del events, i.e. ``someobject.related = someotherobject`` + * Mapped relationship attribute set/del events, + i.e. ``someobject.related = someotherobject`` Operations which manipulate the state of the object relative to other objects are better handled: - * In the ``__init__()`` method of the mapped object itself, or another method - designed to establish some particular state. + * In the ``__init__()`` method of the mapped object itself, + or another method designed to establish some particular state. * In a ``@validates`` handler, see :ref:`simple_validators` * Within the :meth:`.SessionEvents.before_flush` event. @@ -1029,7 +1043,9 @@ class MapperEvents(event.Events): @classmethod def _remove(cls, identifier, target, fn): - raise NotImplementedError("Removal of mapper events not yet implemented") + "Removal of mapper events not yet implemented" + raise NotImplementedError(msg) + class _MapperEventsHold(_EventsHold): all_holds = weakref.WeakKeyDictionary() @@ -1042,6 +1058,7 @@ class _MapperEventsHold(_EventsHold): dispatch = event.dispatcher(HoldMapperEvents) + class SessionEvents(event.Events): """Define events specific to :class:`.Session` lifecycle. @@ -1066,7 +1083,6 @@ class SessionEvents(event.Events): globally. """ - @classmethod def _accept_with(cls, target): if isinstance(target, orm.scoped_session): @@ -1082,7 +1098,6 @@ class SessionEvents(event.Events): "requires that its creation callable " "is associated with the Session class.") - if isinstance(target, orm.sessionmaker): return target.class_ elif isinstance(target, type): @@ -1097,7 +1112,8 @@ class SessionEvents(event.Events): @classmethod def _remove(cls, identifier, target, fn): - raise NotImplementedError("Removal of session events not yet implemented") + msg = "Removal of session events not yet implemented" + raise NotImplementedError(msg) def after_transaction_create(self, session, transaction): """Execute when a new :class:`.SessionTransaction` is created. @@ -1176,16 +1192,16 @@ class SessionEvents(event.Events): session.execute("select * from some_table") :param session: The target :class:`.Session`. - :param previous_transaction: The :class:`.SessionTransaction` transactional - marker object which was just closed. The current :class:`.SessionTransaction` - for the given :class:`.Session` is available via the - :attr:`.Session.transaction` attribute. + :param previous_transaction: The :class:`.SessionTransaction` + transactional marker object which was just closed. The current + :class:`.SessionTransaction` for the given :class:`.Session` is + available via the :attr:`.Session.transaction` attribute. .. versionadded:: 0.7.3 """ - def before_flush( self, session, flush_context, instances): + def before_flush(self, session, flush_context, instances): """Execute before flush process has started. :param session: The target :class:`.Session`. @@ -1225,7 +1241,7 @@ class SessionEvents(event.Events): which handles the details of the flush. """ - def after_begin( self, session, transaction, connection): + def after_begin(self, session, transaction, connection): """Execute after a transaction is begun on a connection :param session: The target :class:`.Session`. @@ -1266,7 +1282,7 @@ class SessionEvents(event.Events): """ - def after_bulk_update( self, session, query, query_context, result): + def after_bulk_update(self, session, query, query_context, result): """Execute after a bulk update operation to the session. This is called as a result of the :meth:`.Query.update` method. @@ -1280,7 +1296,7 @@ class SessionEvents(event.Events): """ - def after_bulk_delete( self, session, query, query_context, result): + def after_bulk_delete(self, session, query, query_context, result): """Execute after a bulk delete operation to the session. This is called as a result of the :meth:`.Query.delete` method. @@ -1372,6 +1388,7 @@ class AttributeEvents(event.Events): if not raw or not retval: orig_fn = fn + def wrap(target, value, *arg): if not raw: target = target.obj() @@ -1392,7 +1409,8 @@ class AttributeEvents(event.Events): @classmethod def _remove(cls, identifier, target, fn): - raise NotImplementedError("Removal of attribute events not yet implemented") + msg = "Removal of attribute events not yet implemented" + raise NotImplementedError(msg) def append(self, target, value, initiator): """Receive a collection append event. @@ -1445,4 +1463,3 @@ class AttributeEvents(event.Events): the given value, or a new effective value, should be returned. """ - diff --git a/lib/sqlalchemy/orm/exc.py b/lib/sqlalchemy/orm/exc.py index 783434504..d081970da 100644 --- a/lib/sqlalchemy/orm/exc.py +++ b/lib/sqlalchemy/orm/exc.py @@ -12,6 +12,7 @@ attributes = util.importlater('sqlalchemy.orm', 'attributes') NO_STATE = (AttributeError, KeyError) """Exception types that may be raised by instrumentation implementations.""" + class StaleDataError(sa_exc.SQLAlchemyError): """An operation encountered database state that is unaccounted for. @@ -48,13 +49,19 @@ class FlushError(sa_exc.SQLAlchemyError): class UnmappedError(sa_exc.InvalidRequestError): """Base for exceptions that involve expected mappings not present.""" + class ObjectDereferencedError(sa_exc.SQLAlchemyError): - """An operation cannot complete due to an object being garbage collected.""" + """An operation cannot complete due to an object being garbage + collected. + + """ + class DetachedInstanceError(sa_exc.SQLAlchemyError): """An attempt to access unloaded attributes on a mapped instance that is detached.""" + class UnmappedInstanceError(UnmappedError): """An mapping operation was requested for an unknown instance.""" @@ -64,8 +71,9 @@ class UnmappedInstanceError(UnmappedError): mapper = orm_util.class_mapper(type(obj)) name = _safe_cls_name(type(obj)) msg = ("Class %r is mapped, but this instance lacks " - "instrumentation. This occurs when the instance is created " - "before sqlalchemy.orm.mapper(%s) was called." % (name, name)) + "instrumentation. This occurs when the instance" + "is created before sqlalchemy.orm.mapper(%s) " + "was called." % (name, name)) except UnmappedClassError: msg = _default_unmapped(type(obj)) if isinstance(obj, type): @@ -77,6 +85,7 @@ class UnmappedInstanceError(UnmappedError): def __reduce__(self): return self.__class__, (None, self.args[0]) + class UnmappedClassError(UnmappedError): """An mapping operation was requested for an unknown class.""" @@ -88,6 +97,7 @@ class UnmappedClassError(UnmappedError): def __reduce__(self): return self.__class__, (None, self.args[0]) + class ObjectDeletedError(sa_exc.InvalidRequestError): """A refresh operation failed to retrieve the database row corresponding to an object's known primary key identity. @@ -117,6 +127,7 @@ class ObjectDeletedError(sa_exc.InvalidRequestError): def __reduce__(self): return self.__class__, (None, self.args[0]) + class UnmappedColumnError(sa_exc.InvalidRequestError): """Mapping operation was requested on an unknown column.""" @@ -138,6 +149,7 @@ def _safe_cls_name(cls): cls_name = repr(cls) return cls_name + def _default_unmapped(cls): try: mappers = attributes.manager_of_class(cls).mappers diff --git a/lib/sqlalchemy/orm/identity.py b/lib/sqlalchemy/orm/identity.py index b58aa14a6..e5a2dbb87 100644 --- a/lib/sqlalchemy/orm/identity.py +++ b/lib/sqlalchemy/orm/identity.py @@ -39,8 +39,10 @@ class IdentityMap(dict): return self._modified def check_modified(self): - """return True if any InstanceStates present have been marked as 'modified'.""" + """return True if any InstanceStates present have been marked + as 'modified'. + """ return bool(self._modified) def has_key(self, key): @@ -64,6 +66,7 @@ class IdentityMap(dict): def __delitem__(self, key): raise NotImplementedError("IdentityMap uses remove() to remove data") + class WeakInstanceDict(IdentityMap): def __init__(self): IdentityMap.__init__(self) @@ -110,9 +113,10 @@ class WeakInstanceDict(IdentityMap): if existing_state is not state: o = existing_state.obj() if o is not None: - raise AssertionError("A conflicting state is already " - "present in the identity map for key %r" - % (key, )) + raise AssertionError( + "A conflicting state is already " + "present in the identity map for key %r" + % (key, )) else: return except KeyError: @@ -156,10 +160,12 @@ class WeakInstanceDict(IdentityMap): # return iter(self._values()) # Py2K items = _items + def iteritems(self): return iter(self.items()) values = _values + def itervalues(self): return iter(self.values()) # end Py2K @@ -180,12 +186,15 @@ class WeakInstanceDict(IdentityMap): def prune(self): return 0 + class StrongInstanceDict(IdentityMap): def all_states(self): return [attributes.instance_state(o) for o in self.itervalues()] def contains_state(self, state): - return state.key in self and attributes.instance_state(self[state.key]) is state + return ( + state.key in self and + attributes.instance_state(self[state.key]) is state) def replace(self, state): if dict.__contains__(self, state.key): @@ -232,4 +241,3 @@ class StrongInstanceDict(IdentityMap): dict.update(self, keepers) self.modified = bool(dirty) return ref_count - len(self) - diff --git a/lib/sqlalchemy/orm/instrumentation.py b/lib/sqlalchemy/orm/instrumentation.py index 0e828ce87..5a4fc2093 100644 --- a/lib/sqlalchemy/orm/instrumentation.py +++ b/lib/sqlalchemy/orm/instrumentation.py @@ -32,9 +32,9 @@ alternate instrumentation forms. from . import exc, collections, events from operator import attrgetter from .. import event, util -import weakref state = util.importlater("sqlalchemy.orm", "state") + class ClassManager(dict): """tracks state information at the class level.""" @@ -308,6 +308,7 @@ class ClassManager(dict): return '<%s of %r at %x>' % ( self.__class__.__name__, self.class_, id(self)) + class InstrumentationFactory(object): """Factory for new ClassManager instances.""" @@ -352,6 +353,7 @@ class InstrumentationFactory(object): # when importred. _instrumentation_factory = InstrumentationFactory() + def register_class(class_): """Register class instrumentation. @@ -364,6 +366,7 @@ def register_class(class_): manager = _instrumentation_factory.create_manager_for_cls(class_) return manager + def unregister_class(class_): """Unregister class instrumentation.""" @@ -390,6 +393,7 @@ instance_dict = _default_dict_getter = ClassManager.dict_getter() manager_of_class = _default_manager_getter = ClassManager.manager_getter() + def _generate_init(class_, class_manager): """Build an __init__ decorator that triggers ClassManager events.""" @@ -433,4 +437,3 @@ def __init__(%(apply_pos)s): #if func_kw_defaults: # __init__.__kwdefaults__ = func_kw_defaults return __init__ - diff --git a/lib/sqlalchemy/orm/interfaces.py b/lib/sqlalchemy/orm/interfaces.py index b30630434..c91746da0 100644 --- a/lib/sqlalchemy/orm/interfaces.py +++ b/lib/sqlalchemy/orm/interfaces.py @@ -16,7 +16,6 @@ classes within should be considered mostly private. """ from __future__ import absolute_import -from itertools import chain from .. import exc as sa_exc, util, inspect from ..sql import operators @@ -53,6 +52,7 @@ from .deprecated_interfaces import AttributeExtension, \ SessionExtension, \ MapperExtension + class _InspectionAttr(object): """Define a series of attributes that all ORM inspection targets need to have.""" @@ -65,11 +65,14 @@ class _InspectionAttr(object): is_attribute = False is_clause_element = False + class _MappedAttribute(object): """Mixin for attributes which should be replaced by mapper-assigned attributes. """ + + class MapperProperty(_MappedAttribute, _InspectionAttr): """Manage the relationship of a ``Mapper`` to a single class attribute, as well as that attribute as it appears on individual @@ -80,7 +83,8 @@ class MapperProperty(_MappedAttribute, _InspectionAttr): mapped :class:`.Column`, which is represented in a mapping as an instance of :class:`.ColumnProperty`, and a reference to another class produced by :func:`.relationship`, - represented in the mapping as an instance of :class:`.RelationshipProperty`. + represented in the mapping as an instance of + :class:`.RelationshipProperty`. """ @@ -185,7 +189,6 @@ class MapperProperty(_MappedAttribute, _InspectionAttr): """ pass - def is_primary(self): """Return True if this ``MapperProperty``'s mapper is the primary mapper for its class. @@ -216,6 +219,7 @@ class MapperProperty(_MappedAttribute, _InspectionAttr): return operator(self.comparator, value) + class PropComparator(operators.ColumnOperators): """Defines boolean, comparison, and other operators for :class:`.MapperProperty` objects. @@ -223,8 +227,8 @@ class PropComparator(operators.ColumnOperators): SQLAlchemy allows for operators to be redefined at both the Core and ORM level. :class:`.PropComparator` is the base class of operator redefinition for ORM-level operations, - including those of :class:`.ColumnProperty`, :class:`.RelationshipProperty`, - and :class:`.CompositeProperty`. + including those of :class:`.ColumnProperty`, + :class:`.RelationshipProperty`, and :class:`.CompositeProperty`. .. note:: With the advent of Hybrid properties introduced in SQLAlchemy 0.7, as well as Core-level operator redefinition in @@ -274,10 +278,10 @@ class PropComparator(operators.ColumnOperators): class SomeMappedClass(Base): some_column = column_property(Column("some_column", String), - comparator_factory=MyColumnComparator) + comparator_factory=MyColumnComparator) some_relationship = relationship(SomeOtherClass, - comparator_factory=MyRelationshipComparator) + comparator_factory=MyRelationshipComparator) some_composite = composite( Column("a", String), Column("b", String), @@ -310,7 +314,6 @@ class PropComparator(operators.ColumnOperators): self._parentmapper = parentmapper self.adapter = adapter - def __clause_element__(self): raise NotImplementedError("%r" % self) @@ -345,8 +348,8 @@ class PropComparator(operators.ColumnOperators): query.join(Company.employees.of_type(Engineer)).\\ filter(Engineer.name=='foo') - :param \class_: a class or mapper indicating that criterion will be against - this specific subclass. + :param \class_: a class or mapper indicating that criterion will be + against this specific subclass. """ @@ -363,9 +366,9 @@ class PropComparator(operators.ColumnOperators): :param criterion: an optional ClauseElement formulated against the member class' table or attributes. - :param \**kwargs: key/value pairs corresponding to member class attribute - names which will be compared via equality to the corresponding - values. + :param \**kwargs: key/value pairs corresponding to member class + attribute names which will be compared via equality to the + corresponding values. """ @@ -381,9 +384,9 @@ class PropComparator(operators.ColumnOperators): :param criterion: an optional ClauseElement formulated against the member class' table or attributes. - :param \**kwargs: key/value pairs corresponding to member class attribute - names which will be compared via equality to the corresponding - values. + :param \**kwargs: key/value pairs corresponding to member class + attribute names which will be compared via equality to the + corresponding values. """ @@ -456,6 +459,7 @@ class StrategizedProperty(MapperProperty): not mapper.class_manager._attr_has_impl(self.key): self.strategy.init_class_attribute(mapper) + class MapperOption(object): """Describe a modification to a Query.""" @@ -476,6 +480,7 @@ class MapperOption(object): self.process_query(query) + class PropertyOption(MapperOption): """A MapperOption that is applied to a property off the mapper or one of its child mappers, identified by a dot-separated key @@ -685,6 +690,7 @@ class PropertyOption(MapperOption): return paths + class StrategizedOption(PropertyOption): """A MapperOption that affects which LoaderStrategy will be used for an operation by a StrategizedProperty. @@ -711,6 +717,7 @@ class StrategizedOption(PropertyOption): def get_strategy_class(self): raise NotImplementedError() + class LoaderStrategy(object): """Describe the loading behavior of a StrategizedProperty object. @@ -758,5 +765,3 @@ class LoaderStrategy(object): def __str__(self): return str(self.parent_property) - - diff --git a/lib/sqlalchemy/orm/loading.py b/lib/sqlalchemy/orm/loading.py index df1477210..a5d156a1f 100644 --- a/lib/sqlalchemy/orm/loading.py +++ b/lib/sqlalchemy/orm/loading.py @@ -23,6 +23,7 @@ sessionlib = util.importlater("sqlalchemy.orm", "session") _new_runid = util.counter() + def instances(query, cursor, context): """Return an ORM result as an iterator.""" session = query.session @@ -96,6 +97,7 @@ def instances(query, cursor, context): if not query._yield_per: break + def merge_result(query, iterator, load=True): """Merge a result into this :class:`.Query` object's Session.""" @@ -137,6 +139,7 @@ def merge_result(query, iterator, load=True): finally: session.autoflush = autoflush + def get_from_identity(session, key, passive): """Look up the given key in the given session's identity map, check the object for expired state if found. @@ -165,6 +168,7 @@ def get_from_identity(session, key, passive): else: return None + def load_on_ident(query, key, refresh_state=None, lockmode=None, only_load_props=None): @@ -222,6 +226,7 @@ def load_on_ident(query, key, except orm_exc.NoResultFound: return None + def instance_processor(mapper, context, path, adapter, polymorphic_from=None, only_load_props=None, @@ -475,7 +480,6 @@ def instance_processor(mapper, context, path, adapter, if isnew: state.manager.dispatch.refresh(state, context, attrs) - if result is not None: if append_result: for fn in append_result: @@ -491,6 +495,7 @@ def instance_processor(mapper, context, path, adapter, return instance return _instance + def _populators(mapper, context, path, row, adapter, new_populators, existing_populators, eager_populators): """Produce a collection of attribute level row processor @@ -509,6 +514,7 @@ def _populators(mapper, context, path, row, adapter, if delayed_populators: new_populators.extend(delayed_populators) + def _configure_subclass_mapper(mapper, context, path, adapter): """Produce a mapper level row processor callable factory for mappers inheriting this one.""" @@ -538,6 +544,7 @@ def _configure_subclass_mapper(mapper, context, path, adapter): polymorphic_from=mapper) return configure_subclass_mapper + def load_scalar_attributes(mapper, state, attribute_names): """initiate a column-based attribute refresh operation.""" @@ -599,4 +606,3 @@ def load_scalar_attributes(mapper, state, attribute_names): # may not complete (even if PK attributes are assigned) if has_key and result is None: raise orm_exc.ObjectDeletedError(state) - diff --git a/lib/sqlalchemy/orm/mapper.py b/lib/sqlalchemy/orm/mapper.py index dfd8a12b7..b89163340 100644 --- a/lib/sqlalchemy/orm/mapper.py +++ b/lib/sqlalchemy/orm/mapper.py @@ -54,6 +54,7 @@ NO_ATTRIBUTE = util.symbol('NO_ATTRIBUTE') # lock used to synchronize the "mapper configure" step _CONFIGURE_MUTEX = util.threading.RLock() + class Mapper(_InspectionAttr): """Define the correlation of class attributes to database table columns. @@ -629,7 +630,6 @@ class Mapper(_InspectionAttr): column=None): self._adapt_inherited_property(key, prop, False) - def _set_polymorphic_on(self, polymorphic_on): self.polymorphic_on = polymorphic_on self._configure_polymorphic_setter(True) @@ -745,7 +745,6 @@ class Mapper(_InspectionAttr): configure_mappers() return self - @property @util.deprecated("0.7", message=":attr:`.Mapper.compiled` " "is replaced by :attr:`.Mapper.configured`") @@ -1051,8 +1050,6 @@ class Mapper(_InspectionAttr): else: self._set_polymorphic_identity = None - - def _adapt_inherited_property(self, key, prop, init): if not self.concrete: self._configure_property(key, prop, init=False, setparent=False) @@ -1409,7 +1406,6 @@ class Mapper(_InspectionAttr): return [self] return self._mappers_from_spec(*self.with_polymorphic) - @_memoized_configured_property def _with_polymorphic_selectable(self): if not self.with_polymorphic: @@ -1822,7 +1818,6 @@ class Mapper(_InspectionAttr): return state.manager[prop.key].impl.\ get_committed_value(state, dict_, passive=passive) - def _optimized_get_statement(self, state, attribute_names): """assemble a WHERE clause which retrieves a given state by primary key, using a minimized set of tables. @@ -2008,6 +2003,7 @@ class Mapper(_InspectionAttr): inspection._self_inspects(Mapper) log.class_logger(Mapper) + def configure_mappers(): """Initialize the inter-mapper relationships of all mappers that have been constructed thus far. @@ -2068,6 +2064,7 @@ def configure_mappers(): if _call_configured is not None: _call_configured.dispatch.after_configured() + def reconstructor(fn): """Decorate a method as the 'reconstructor' hook. @@ -2087,6 +2084,7 @@ def reconstructor(fn): fn.__sa_reconstructor__ = True return fn + def validates(*names, **kw): """Decorate a method as a 'validator' for one or more named properties. @@ -2120,11 +2118,13 @@ def validates(*names, **kw): return fn return wrap + def _event_on_load(state, ctx): instrumenting_mapper = state.manager.info[_INSTRUMENTOR] if instrumenting_mapper._reconstructor: instrumenting_mapper._reconstructor(state.obj()) + def _event_on_first_init(manager, cls): """Initial mapper compilation trigger. @@ -2138,6 +2138,7 @@ def _event_on_first_init(manager, cls): if _new_mappers: configure_mappers() + def _event_on_init(state, args, kwargs): """Run init_instance hooks. @@ -2154,6 +2155,7 @@ def _event_on_init(state, args, kwargs): if instrumenting_mapper._set_polymorphic_identity: instrumenting_mapper._set_polymorphic_identity(state) + def _event_on_resurrect(state): # re-populate the primary key elements # of the dict based on the mapping. diff --git a/lib/sqlalchemy/orm/persistence.py b/lib/sqlalchemy/orm/persistence.py index 5945addc4..4774667b2 100644 --- a/lib/sqlalchemy/orm/persistence.py +++ b/lib/sqlalchemy/orm/persistence.py @@ -20,6 +20,7 @@ from . import attributes, sync, exc as orm_exc, evaluator from .util import _state_mapper, state_str, _attr_as_key from ..sql import expression + def save_obj(base_mapper, states, uowtransaction, single=False): """Issue ``INSERT`` and/or ``UPDATE`` statements for a list of objects. @@ -64,6 +65,7 @@ def save_obj(base_mapper, states, uowtransaction, single=False): _finalize_insert_update_commands(base_mapper, uowtransaction, states_to_insert, states_to_update) + def post_update(base_mapper, states, uowtransaction, post_update_cols): """Issue UPDATE statements on behalf of a relationship() which specifies post_update. @@ -75,7 +77,6 @@ def post_update(base_mapper, states, uowtransaction, post_update_cols): base_mapper, states, uowtransaction) - for table, mapper in base_mapper._sorted_tables.iteritems(): update = _collect_post_update_commands(base_mapper, uowtransaction, table, states_to_update, @@ -86,6 +87,7 @@ def post_update(base_mapper, states, uowtransaction, post_update_cols): cached_connections, mapper, table, update) + def delete_obj(base_mapper, states, uowtransaction): """Issue ``DELETE`` statements for a list of objects. @@ -116,6 +118,7 @@ def delete_obj(base_mapper, states, uowtransaction): in states_to_delete: mapper.dispatch.after_delete(mapper, connection, state) + def _organize_states_for_save(base_mapper, states, uowtransaction): """Make an initial pass across a set of states for INSERT or UPDATE. @@ -185,6 +188,7 @@ def _organize_states_for_save(base_mapper, states, uowtransaction): return states_to_insert, states_to_update + def _organize_states_for_post_update(base_mapper, states, uowtransaction): """Make an initial pass across a set of states for UPDATE @@ -198,6 +202,7 @@ def _organize_states_for_post_update(base_mapper, states, return list(_connections_for_states(base_mapper, uowtransaction, states)) + def _organize_states_for_delete(base_mapper, states, uowtransaction): """Make an initial pass across a set of states for DELETE. @@ -218,6 +223,7 @@ def _organize_states_for_delete(base_mapper, states, uowtransaction): bool(state.key), connection)) return states_to_delete + def _collect_insert_commands(base_mapper, uowtransaction, table, states_to_insert): """Identify sets of values to use in INSERT statements for a @@ -261,6 +267,7 @@ def _collect_insert_commands(base_mapper, uowtransaction, table, connection, value_params, has_all_pks)) return insert + def _collect_update_commands(base_mapper, uowtransaction, table, states_to_update): """Identify sets of values to use in UPDATE statements for a @@ -412,6 +419,7 @@ def _collect_post_update_commands(base_mapper, uowtransaction, table, connection)) return update + def _collect_delete_commands(base_mapper, uowtransaction, table, states_to_delete): """Identify values to use in DELETE statements for a list of @@ -507,6 +515,7 @@ def _emit_update_statements(base_mapper, uowtransaction, c.dialect.dialect_description, stacklevel=12) + def _emit_insert_statements(base_mapper, uowtransaction, cached_connections, table, insert): """Emit INSERT statements corresponding to value lists collected @@ -582,7 +591,6 @@ def _emit_insert_statements(base_mapper, uowtransaction, value_params) - def _emit_post_update_statements(base_mapper, uowtransaction, cached_connections, mapper, table, update): """Emit UPDATE statements corresponding to value lists collected @@ -703,6 +711,7 @@ def _finalize_insert_update_commands(base_mapper, uowtransaction, else: mapper.dispatch.after_update(mapper, connection, state) + def _postfetch(mapper, uowtransaction, table, state, dict_, prefetch_cols, postfetch_cols, params, value_params): @@ -733,6 +742,7 @@ def _postfetch(mapper, uowtransaction, table, uowtransaction, mapper.passive_updates) + def _connections_for_states(base_mapper, uowtransaction, states): """Return an iterator of (state, state.dict, mapper, connection). @@ -762,6 +772,7 @@ def _connections_for_states(base_mapper, uowtransaction, states): yield state, state.dict, mapper, connection + def _cached_connection_dict(base_mapper): # dictionary of connection->connection_with_cache_options. return util.PopulateDict( @@ -769,6 +780,7 @@ def _cached_connection_dict(base_mapper): compiled_cache=base_mapper._compiled_cache )) + def _sort_states(states): pending = set(states) persistent = set(s for s in pending if s.key is not None) @@ -776,6 +788,7 @@ def _sort_states(states): return sorted(pending, key=operator.attrgetter("insert_order")) + \ sorted(persistent, key=lambda q: q.key[1]) + class BulkUD(object): """Handle bulk update and deletes via a :class:`.Query`.""" @@ -825,6 +838,7 @@ class BulkUD(object): def _do_post_synchronize(self): pass + class BulkEvaluate(BulkUD): """BulkUD which does the 'evaluate' method of session state resolution.""" @@ -858,6 +872,7 @@ class BulkEvaluate(BulkUD): if issubclass(cls, target_cls) and eval_condition(obj)] + class BulkFetch(BulkUD): """BulkUD which does the 'fetch' method of session state resolution.""" @@ -870,6 +885,7 @@ class BulkFetch(BulkUD): select_stmt, params=query._params).fetchall() + class BulkUpdate(BulkUD): """BulkUD which handles UPDATEs.""" @@ -899,6 +915,7 @@ class BulkUpdate(BulkUD): session.dispatch.after_bulk_update(session, self.query, self.context, self.result) + class BulkDelete(BulkUD): """BulkUD which handles DELETEs.""" @@ -927,6 +944,7 @@ class BulkDelete(BulkUD): session.dispatch.after_bulk_delete(session, self.query, self.context, self.result) + class BulkUpdateEvaluate(BulkEvaluate, BulkUpdate): """BulkUD which handles UPDATEs using the "evaluate" method of session resolution.""" @@ -962,6 +980,7 @@ class BulkUpdateEvaluate(BulkEvaluate, BulkUpdate): states.add(state) session._register_altered(states) + class BulkDeleteEvaluate(BulkEvaluate, BulkDelete): """BulkUD which handles DELETEs using the "evaluate" method of session resolution.""" @@ -971,6 +990,7 @@ class BulkDeleteEvaluate(BulkEvaluate, BulkDelete): [attributes.instance_state(obj) for obj in self.matched_objects]) + class BulkUpdateFetch(BulkFetch, BulkUpdate): """BulkUD which handles UPDATEs using the "fetch" method of session resolution.""" @@ -993,6 +1013,7 @@ class BulkUpdateFetch(BulkFetch, BulkUpdate): session._expire_state(state, attrib) session._register_altered(states) + class BulkDeleteFetch(BulkFetch, BulkDelete): """BulkUD which handles DELETEs using the "fetch" method of session resolution.""" @@ -1011,4 +1032,3 @@ class BulkDeleteFetch(BulkFetch, BulkDelete): session.identity_map[identity_key] )] ) - diff --git a/lib/sqlalchemy/orm/properties.py b/lib/sqlalchemy/orm/properties.py index e2b5e94e0..12656952a 100644 --- a/lib/sqlalchemy/orm/properties.py +++ b/lib/sqlalchemy/orm/properties.py @@ -44,7 +44,8 @@ class ColumnProperty(StrategizedProperty): def __init__(self, *columns, **kwargs): """Construct a ColumnProperty. - Note the public constructor is the :func:`.orm.column_property` function. + Note the public constructor is the :func:`.orm.column_property` + function. :param \*columns: The list of `columns` describes a single object property. If there are multiple tables joined @@ -108,7 +109,6 @@ class ColumnProperty(StrategizedProperty): else: self.strategy_class = strategies.ColumnLoader - @property def expression(self): """Return the primary column or expression for this ColumnProperty. @@ -170,7 +170,8 @@ class ColumnProperty(StrategizedProperty): """Produce boolean, comparison, and other operators for :class:`.ColumnProperty` attributes. - See the documentation for :class:`.PropComparator` for a brief overview. + See the documentation for :class:`.PropComparator` for a brief + overview. See also: @@ -189,8 +190,8 @@ class ColumnProperty(StrategizedProperty): return self.adapter(self.prop.columns[0]) else: return self.prop.columns[0]._annotate({ - "parententity": self._parentmapper, - "parentmapper": self._parentmapper}) + "parententity": self._parentmapper, + "parentmapper": self._parentmapper}) def __getattr__(self, key): """proxy attribute access down to the mapped column. @@ -214,6 +215,7 @@ class ColumnProperty(StrategizedProperty): log.class_logger(ColumnProperty) + class RelationshipProperty(StrategizedProperty): """Describes an object property that holds a single item or list of items that correspond to a related database table. @@ -541,7 +543,8 @@ class RelationshipProperty(StrategizedProperty): # should not correlate or otherwise reach out # to anything in the enclosing query. if criterion is not None: - criterion = criterion._annotate({'no_replacement_traverse': True}) + criterion = criterion._annotate( + {'no_replacement_traverse': True}) crit = j & criterion @@ -582,7 +585,8 @@ class RelationshipProperty(StrategizedProperty): will produce:: SELECT * FROM my_table WHERE - NOT EXISTS (SELECT 1 FROM related WHERE related.my_id=my_table.id) + NOT EXISTS (SELECT 1 FROM related WHERE + related.my_id=my_table.id) :meth:`~.RelationshipProperty.Comparator.any` is only valid for collections, i.e. a :func:`.relationship` @@ -612,8 +616,8 @@ class RelationshipProperty(StrategizedProperty): Will produce a query like:: SELECT * FROM my_table WHERE - EXISTS (SELECT 1 FROM related WHERE related.id==my_table.related_id - AND related.x=2) + EXISTS (SELECT 1 FROM related WHERE + related.id==my_table.related_id AND related.x=2) Because :meth:`~.RelationshipProperty.Comparator.has` uses a correlated subquery, its performance is not nearly as @@ -706,10 +710,9 @@ class RelationshipProperty(StrategizedProperty): state = attributes.instance_state(other) def state_bindparam(x, state, col): - o = state.obj() # strong ref - return sql.bindparam(x, unique=True, callable_=lambda : \ - self.property.mapper._get_committed_attr_by_column(o, - col)) + o = state.obj() # strong ref + return sql.bindparam(x, unique=True, callable_=lambda: \ + self.property.mapper._get_committed_attr_by_column(o, col)) def adapt(col): if self.adapter: @@ -724,7 +727,7 @@ class RelationshipProperty(StrategizedProperty): adapt(x) == None) for (x, y) in self.property.local_remote_pairs]) - criterion = sql.and_(*[x==y for (x, y) in + criterion = sql.and_(*[x == y for (x, y) in zip( self.property.mapper.primary_key, self.property.\ @@ -835,7 +838,6 @@ class RelationshipProperty(StrategizedProperty): if (source_state, r) in _recursive: return - if not "merge" in self.cascade: return @@ -912,8 +914,8 @@ class RelationshipProperty(StrategizedProperty): else: return [(attributes.instance_state(x), x)] - - def cascade_iterator(self, type_, state, dict_, visited_states, halt_on=None): + def cascade_iterator(self, type_, state, dict_, + visited_states, halt_on=None): #assert type_ in self.cascade # only actively lazy load on the 'delete' cascade @@ -967,7 +969,6 @@ class RelationshipProperty(StrategizedProperty): yield c, instance_mapper, instance_state, instance_dict - def _add_reverse_property(self, key): other = self.mapper.get_property(key, _configure_mappers=False) self._reverse_property.add(other) @@ -1140,7 +1141,6 @@ class RelationshipProperty(StrategizedProperty): "cause dependency issues during flush" % (self.key, self.parent, inheriting)) - def _check_cascade_settings(self): if self.cascade.delete_orphan and not self.single_parent \ and (self.direction is MANYTOMANY or self.direction @@ -1288,4 +1288,3 @@ class RelationshipProperty(StrategizedProperty): PropertyLoader = RelationProperty = RelationshipProperty log.class_logger(RelationshipProperty) - diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py index a34fd882a..ca334e273 100644 --- a/lib/sqlalchemy/orm/query.py +++ b/lib/sqlalchemy/orm/query.py @@ -27,7 +27,7 @@ from . import ( from .util import ( AliasedClass, ORMAdapter, _entity_descriptor, PathRegistry, _is_aliased_class, _is_mapped_class, _orm_columns, - join as orm_join,with_parent, aliased + join as orm_join, with_parent, aliased ) from .. import sql, util, log, exc as sa_exc, inspect, inspection, \ types as sqltypes @@ -54,6 +54,7 @@ def _generative(*assertions): _path_registry = PathRegistry.root + class Query(object): """ORM-level SQL construction object. @@ -177,7 +178,6 @@ class Query(object): self._from_obj_alias = sql_util.ColumnAdapter( self._from_obj[0], equivs) - def _reset_polymorphic_adapter(self, mapper): for m2 in mapper._with_polymorphic_mappers: self._polymorphic_adapters.pop(m2, None) @@ -274,7 +274,6 @@ class Query(object): return self._select_from_entity or \ self._entity_zero().entity_zero - @property def _mapper_entities(self): # TODO: this is wrong, its hardcoded to "primary entity" when @@ -325,7 +324,6 @@ class Query(object): ) return self._entity_zero() - def __all_equivs(self): equivs = {} for ent in self._mapper_entities: @@ -540,10 +538,9 @@ class Query(object): return self.enable_eagerloads(False).statement.label(name) - def as_scalar(self): - """Return the full SELECT statement represented by this :class:`.Query`, converted - to a scalar subquery. + """Return the full SELECT statement represented by this + :class:`.Query`, converted to a scalar subquery. Analogous to :meth:`sqlalchemy.sql.SelectBaseMixin.as_scalar`. @@ -618,7 +615,8 @@ class Query(object): @property def whereclause(self): - """A readonly attribute which returns the current WHERE criterion for this Query. + """A readonly attribute which returns the current WHERE criterion for + this Query. This returned value is a SQL expression construct, or ``None`` if no criterion has been established. @@ -648,11 +646,11 @@ class Query(object): :meth:`.Query.with_polymorphic` applies transformations to the "main" mapped class represented by this :class:`.Query`. The "main" mapped class here means the :class:`.Query` - object's first argument is a full class, i.e. ``session.query(SomeClass)``. - These transformations allow additional tables to be present - in the FROM clause so that columns for a joined-inheritance - subclass are available in the query, both for the purposes - of load-time efficiency as well as the ability to use + object's first argument is a full class, i.e. + ``session.query(SomeClass)``. These transformations allow additional + tables to be present in the FROM clause so that columns for a + joined-inheritance subclass are available in the query, both for the + purposes of load-time efficiency as well as the ability to use these columns at query time. See the documentation section :ref:`with_polymorphic` for @@ -783,7 +781,8 @@ class Query(object): not mapper.always_refresh and \ self._lockmode is None: - instance = loading.get_from_identity(self.session, key, attributes.PASSIVE_OFF) + instance = loading.get_from_identity( + self.session, key, attributes.PASSIVE_OFF) if instance is not None: # reject calls for id in identity map but class # mismatch. @@ -980,8 +979,8 @@ class Query(object): @_generative() def with_entities(self, *entities): - """Return a new :class:`.Query` replacing the SELECT list with the given - entities. + """Return a new :class:`.Query` replacing the SELECT list with the + given entities. e.g.:: @@ -1006,7 +1005,6 @@ class Query(object): """ self._set_entities(entities) - @_generative() def add_columns(self, *column): """Add one or more column expressions to the list @@ -1024,13 +1022,13 @@ class Query(object): ":meth:`.add_column` is superseded by :meth:`.add_columns`", False) def add_column(self, column): - """Add a column expression to the list of result columns to be returned. + """Add a column expression to the list of result columns to be + returned. Pending deprecation: :meth:`.add_column` will be superseded by :meth:`.add_columns`. """ - return self.add_columns(column) def options(self, *args): @@ -1340,9 +1338,8 @@ class Query(object): """ - return self._from_selectable( - expression.union(*([self]+ list(q)))) + expression.union(*([self] + list(q)))) def union_all(self, *q): """Produce a UNION ALL of this Query against one or more queries. @@ -1352,7 +1349,7 @@ class Query(object): """ return self._from_selectable( - expression.union_all(*([self]+ list(q))) + expression.union_all(*([self] + list(q))) ) def intersect(self, *q): @@ -1363,7 +1360,7 @@ class Query(object): """ return self._from_selectable( - expression.intersect(*([self]+ list(q))) + expression.intersect(*([self] + list(q))) ) def intersect_all(self, *q): @@ -1374,7 +1371,7 @@ class Query(object): """ return self._from_selectable( - expression.intersect_all(*([self]+ list(q))) + expression.intersect_all(*([self] + list(q))) ) def except_(self, *q): @@ -1385,7 +1382,7 @@ class Query(object): """ return self._from_selectable( - expression.except_(*([self]+ list(q))) + expression.except_(*([self] + list(q))) ) def except_all(self, *q): @@ -1396,7 +1393,7 @@ class Query(object): """ return self._from_selectable( - expression.except_all(*([self]+ list(q))) + expression.except_all(*([self] + list(q))) ) def join(self, *props, **kwargs): @@ -1422,9 +1419,9 @@ class Query(object): In the above example we refer to ``User.addresses`` as passed to :meth:`~.Query.join` as the *on clause*, that is, it indicates how the "ON" portion of the JOIN should be constructed. For a - single-entity query such as the one above (i.e. we start by selecting only from - ``User`` and nothing else), the relationship can also be specified by its - string name:: + single-entity query such as the one above (i.e. we start by selecting + only from ``User`` and nothing else), the relationship can also be + specified by its string name:: q = session.query(User).join("addresses") @@ -1434,8 +1431,9 @@ class Query(object): q = session.query(User).join("orders", "items", "keywords") - The above would be shorthand for three separate calls to :meth:`~.Query.join`, - each using an explicit attribute to indicate the source entity:: + The above would be shorthand for three separate calls to + :meth:`~.Query.join`, each using an explicit attribute to indicate + the source entity:: q = session.query(User).\\ join(User.orders).\\ @@ -1511,25 +1509,26 @@ class Query(object): There is a lot of flexibility in what the "target" can be when using :meth:`~.Query.join`. As noted previously, it also accepts - :class:`.Table` constructs and other selectables such as :func:`.alias` - and :func:`.select` constructs, with either the one or two-argument forms:: + :class:`.Table` constructs and other selectables such as + :func:`.alias` and :func:`.select` constructs, with either the one + or two-argument forms:: addresses_q = select([Address.user_id]).\\ - where(Address.email_address.endswith("@bar.com")).\\ - alias() + where(Address.email_address.endswith("@bar.com")).\\ + alias() q = session.query(User).\\ join(addresses_q, addresses_q.c.user_id==User.id) :meth:`~.Query.join` also features the ability to *adapt* a - :meth:`~sqlalchemy.orm.relationship` -driven ON clause to the target selectable. - Below we construct a JOIN from ``User`` to a subquery against ``Address``, allowing - the relationship denoted by ``User.addresses`` to *adapt* itself - to the altered target:: + :meth:`~sqlalchemy.orm.relationship` -driven ON clause to the target + selectable. Below we construct a JOIN from ``User`` to a subquery + against ``Address``, allowing the relationship denoted by + ``User.addresses`` to *adapt* itself to the altered target:: address_subq = session.query(Address).\\ - filter(Address.email_address == 'ed@foo.com').\\ - subquery() + filter(Address.email_address == 'ed@foo.com').\\ + subquery() q = session.query(User).join(address_subq, User.addresses) @@ -1811,7 +1810,7 @@ class Query(object): if not create_aliases and prop: self._update_joinpoint({ '_joinpoint_entity': right, - 'prev':((left, right, prop.key), self._joinpoint) + 'prev': ((left, right, prop.key), self._joinpoint) }) else: self._joinpoint = { @@ -2051,7 +2050,7 @@ class Query(object): if item == -1: return list(self)[-1] else: - return list(self[item:item+1])[0] + return list(self[item:item + 1])[0] @_generative(_no_statement_condition) def slice(self, start, stop): @@ -2261,8 +2260,8 @@ class Query(object): def _execute_and_instances(self, querycontext): conn = self._connection_from_session( - mapper = self._mapper_zero_or_none(), - clause = querycontext.statement, + mapper=self._mapper_zero_or_none(), + clause=querycontext.statement, close_with_result=True) result = conn.execute(querycontext.statement, self._params) @@ -2330,20 +2329,20 @@ class Query(object): return loading.instances(self, cursor, context) - def merge_result(self, iterator, load=True): """Merge a result into this :class:`.Query` object's Session. - Given an iterator returned by a :class:`.Query` of the same structure as this - one, return an identical iterator of results, with all mapped - instances merged into the session using :meth:`.Session.merge`. This is an - optimized method which will merge all mapped instances, preserving the - structure of the result rows and unmapped columns with less method - overhead than that of calling :meth:`.Session.merge` explicitly for each - value. + Given an iterator returned by a :class:`.Query` of the same structure + as this one, return an identical iterator of results, with all mapped + instances merged into the session using :meth:`.Session.merge`. This + is an optimized method which will merge all mapped instances, + preserving the structure of the result rows and unmapped columns with + less method overhead than that of calling :meth:`.Session.merge` + explicitly for each value. The structure of the results is determined based on the column list of - this :class:`.Query` - if these do not correspond, unchecked errors will occur. + this :class:`.Query` - if these do not correspond, unchecked errors + will occur. The 'load' argument is the same as that of :meth:`.Session.merge`. @@ -2359,12 +2358,12 @@ class Query(object): @property def _select_args(self): return { - 'limit':self._limit, - 'offset':self._offset, - 'distinct':self._distinct, - 'prefixes':self._prefixes, - 'group_by':self._group_by or None, - 'having':self._having + 'limit': self._limit, + 'offset': self._offset, + 'distinct': self._distinct, + 'prefixes': self._prefixes, + 'group_by': self._group_by or None, + 'having': self._having } @property @@ -2435,8 +2434,8 @@ class Query(object): removed from the session. Matched objects are removed from the session. - ``'evaluate'`` - Evaluate the query's criteria in Python straight on - the objects in the session. If evaluation of the criteria isn't + ``'evaluate'`` - Evaluate the query's criteria in Python straight + on the objects in the session. If evaluation of the criteria isn't implemented, an error is raised. In that case you probably want to use the 'fetch' strategy as a fallback. @@ -2487,8 +2486,8 @@ class Query(object): objects that are matched by the update query. The updated attributes are expired on matched objects. - ``'evaluate'`` - Evaluate the Query's criteria in Python straight on - the objects in the session. If evaluation of the criteria isn't + ``'evaluate'`` - Evaluate the Query's criteria in Python straight + on the objects in the session. If evaluation of the criteria isn't implemented, an exception is raised. The expression evaluator currently doesn't account for differing @@ -2522,7 +2521,6 @@ class Query(object): update_op.exec_() return update_op.rowcount - _lockmode_lookup = { 'read': 'read', 'read_nowait': 'read_nowait', @@ -2708,6 +2706,7 @@ class Query(object): inspection._self_inspects(Query) + class _QueryEntity(object): """represent an entity column returned within a Query result.""" @@ -2726,6 +2725,7 @@ class _QueryEntity(object): q.__dict__ = self.__dict__.copy() return q + class _MapperEntity(_QueryEntity): """mapper/class/AliasedClass entity""" @@ -2739,7 +2739,7 @@ class _MapperEntity(_QueryEntity): def setup_entity(self, ext_info, aliased_adapter): self.mapper = ext_info.mapper self.aliased_adapter = aliased_adapter - self.selectable = ext_info.selectable + self.selectable = ext_info.selectable self.is_aliased_class = ext_info.is_aliased_class self._with_polymorphic = ext_info.with_polymorphic_mappers self._polymorphic_discriminator = \ @@ -2829,28 +2829,27 @@ class _MapperEntity(_QueryEntity): # require row aliasing unconditionally. if not adapter and self.mapper._requires_row_aliasing: adapter = sql_util.ColumnAdapter( - self.selectable, - self.mapper._equivalent_columns) + self.selectable, + self.mapper._equivalent_columns) if self.primary_entity: _instance = loading.instance_processor( - self.mapper, - context, - self.path, - adapter, - only_load_props=query._only_load_props, - refresh_state=context.refresh_state, - polymorphic_discriminator= - self._polymorphic_discriminator + self.mapper, + context, + self.path, + adapter, + only_load_props=query._only_load_props, + refresh_state=context.refresh_state, + polymorphic_discriminator=self._polymorphic_discriminator ) else: _instance = loading.instance_processor( - self.mapper, - context, - self.path, - adapter, - polymorphic_discriminator= - self._polymorphic_discriminator) + self.mapper, + context, + self.path, + adapter, + polymorphic_discriminator=self._polymorphic_discriminator + ) return _instance, self._label_name @@ -2902,6 +2901,7 @@ class _MapperEntity(_QueryEntity): def __str__(self): return str(self.mapper) + class _ColumnEntity(_QueryEntity): """Column/expression based entity.""" @@ -2931,7 +2931,6 @@ class _ColumnEntity(_QueryEntity): if c is not column: return - if not isinstance(column, sql.ColumnElement): raise sa_exc.InvalidRequestError( "SQL expression, column, or mapped entity " @@ -2981,7 +2980,6 @@ class _ColumnEntity(_QueryEntity): else: self.entity_zero = None - @property def entity_zero_or_selectable(self): if self.entity_zero is not None: @@ -2995,7 +2993,6 @@ class _ColumnEntity(_QueryEntity): def type(self): return self.column.type - def adapt_to_selectable(self, query, sel): c = _ColumnEntity(query, sel.corresponding_column(self.column)) c._label_name = self._label_name @@ -3040,8 +3037,10 @@ class _ColumnEntity(_QueryEntity): def __str__(self): return str(self.column) + log.class_logger(Query) + class QueryContext(object): multi_row_eager_loaders = False adapter = None @@ -3089,5 +3088,3 @@ class AliasOption(interfaces.MapperOption): else: alias = self.alias query._from_obj_alias = sql_util.ColumnAdapter(alias) - - diff --git a/lib/sqlalchemy/orm/relationships.py b/lib/sqlalchemy/orm/relationships.py index 373fba785..24ca32d9a 100644 --- a/lib/sqlalchemy/orm/relationships.py +++ b/lib/sqlalchemy/orm/relationships.py @@ -22,6 +22,7 @@ from ..sql.util import ( from ..sql import operators, expression, visitors from .interfaces import MANYTOMANY, MANYTOONE, ONETOMANY + def remote(expr): """Annotate a portion of a primaryjoin expression with a 'remote' annotation. @@ -41,6 +42,7 @@ def remote(expr): return _annotate_columns(expression._clause_element_as_expr(expr), {"remote": True}) + def foreign(expr): """Annotate a portion of a primaryjoin expression with a 'foreign' annotation. @@ -73,6 +75,7 @@ def _annotate_columns(element, annotations): element = clone(element) return element + class JoinCondition(object): def __init__(self, parent_selectable, @@ -166,35 +169,33 @@ class JoinCondition(object): # general mapped table, which in the case of inheritance is # a join. try: + consider_as_foreign_keys = self.consider_as_foreign_keys or None if self.secondary is not None: if self.secondaryjoin is None: self.secondaryjoin = \ join_condition( - self.child_selectable, - self.secondary, - a_subset=self.child_local_selectable, - consider_as_foreign_keys=\ - self.consider_as_foreign_keys or None - ) + self.child_selectable, + self.secondary, + a_subset=self.child_local_selectable, + consider_as_foreign_keys=consider_as_foreign_keys + ) if self.primaryjoin is None: self.primaryjoin = \ join_condition( - self.parent_selectable, - self.secondary, - a_subset=self.parent_local_selectable, - consider_as_foreign_keys=\ - self.consider_as_foreign_keys or None - ) + self.parent_selectable, + self.secondary, + a_subset=self.parent_local_selectable, + consider_as_foreign_keys=consider_as_foreign_keys + ) else: if self.primaryjoin is None: self.primaryjoin = \ join_condition( - self.parent_selectable, - self.child_selectable, - a_subset=self.parent_local_selectable, - consider_as_foreign_keys=\ - self.consider_as_foreign_keys or None - ) + self.parent_selectable, + self.child_selectable, + a_subset=self.parent_local_selectable, + consider_as_foreign_keys=consider_as_foreign_keys + ) except sa_exc.NoForeignKeysError: if self.secondary is not None: raise sa_exc.NoForeignKeysError("Could not determine join " @@ -312,7 +313,7 @@ class JoinCondition(object): def _annotate_from_fk_list(self): def check_fk(col): if col in self.consider_as_foreign_keys: - return col._annotate({"foreign":True}) + return col._annotate({"foreign": True}) self.primaryjoin = visitors.replacement_traverse( self.primaryjoin, {}, @@ -330,6 +331,7 @@ class JoinCondition(object): secondarycols = util.column_set(self.secondary.c) else: secondarycols = set() + def is_foreign(a, b): if isinstance(a, schema.Column) and \ isinstance(b, schema.Column): @@ -355,21 +357,21 @@ class JoinCondition(object): if col is not None: if col.compare(binary.left): binary.left = binary.left._annotate( - {"foreign":True}) + {"foreign": True}) elif col.compare(binary.right): binary.right = binary.right._annotate( - {"foreign":True}) + {"foreign": True}) self.primaryjoin = visitors.cloned_traverse( self.primaryjoin, {}, - {"binary":visit_binary} + {"binary": visit_binary} ) if self.secondaryjoin is not None: self.secondaryjoin = visitors.cloned_traverse( self.secondaryjoin, {}, - {"binary":visit_binary} + {"binary": visit_binary} ) def _refers_to_parent_table(self): @@ -380,6 +382,7 @@ class JoinCondition(object): pt = self.parent_selectable mt = self.child_selectable result = [False] + def visit_binary(binary): c, f = binary.left, binary.right if ( @@ -394,7 +397,7 @@ class JoinCondition(object): visitors.traverse( self.primaryjoin, {}, - {"binary":visit_binary} + {"binary": visit_binary} ) return result[0] @@ -421,7 +424,7 @@ class JoinCondition(object): elif self._local_remote_pairs or self._remote_side: self._annotate_remote_from_args() elif self._refers_to_parent_table(): - self._annotate_selfref(lambda col:"foreign" in col._annotations) + self._annotate_selfref(lambda col: "foreign" in col._annotations) elif self._tables_overlap(): self._annotate_remote_with_overlap() else: @@ -434,7 +437,7 @@ class JoinCondition(object): """ def repl(element): if self.secondary.c.contains_column(element): - return element._annotate({"remote":True}) + return element._annotate({"remote": True}) self.primaryjoin = visitors.replacement_traverse( self.primaryjoin, {}, repl) self.secondaryjoin = visitors.replacement_traverse( @@ -451,17 +454,17 @@ class JoinCondition(object): isinstance(binary.right, expression.ColumnClause): # assume one to many - FKs are "remote" if fn(binary.left): - binary.left = binary.left._annotate({"remote":True}) + binary.left = binary.left._annotate({"remote": True}) if fn(binary.right) and \ not equated: binary.right = binary.right._annotate( - {"remote":True}) + {"remote": True}) else: self._warn_non_column_elements() self.primaryjoin = visitors.cloned_traverse( self.primaryjoin, {}, - {"binary":visit_binary}) + {"binary": visit_binary}) def _annotate_remote_from_args(self): """annotate 'remote' in primaryjoin, secondaryjoin @@ -481,11 +484,11 @@ class JoinCondition(object): remote_side = self._remote_side if self._refers_to_parent_table(): - self._annotate_selfref(lambda col:col in remote_side) + self._annotate_selfref(lambda col: col in remote_side) else: def repl(element): if element in remote_side: - return element._annotate({"remote":True}) + return element._annotate({"remote": True}) self.primaryjoin = visitors.replacement_traverse( self.primaryjoin, {}, repl) @@ -501,20 +504,21 @@ class JoinCondition(object): binary.right) binary.right, binary.left = proc_left_right(binary.right, binary.left) + def proc_left_right(left, right): if isinstance(left, expression.ColumnClause) and \ isinstance(right, expression.ColumnClause): if self.child_selectable.c.contains_column(right) and \ self.parent_selectable.c.contains_column(left): - right = right._annotate({"remote":True}) + right = right._annotate({"remote": True}) else: - self._warn_non_column_elements() + self._warn_non_column_elements() return left, right self.primaryjoin = visitors.cloned_traverse( self.primaryjoin, {}, - {"binary":visit_binary}) + {"binary": visit_binary}) def _annotate_remote_distinct_selectables(self): """annotate 'remote' in primaryjoin, secondaryjoin @@ -530,7 +534,7 @@ class JoinCondition(object): or self.child_local_selectable.c.\ contains_column(element) ): - return element._annotate({"remote":True}) + return element._annotate({"remote": True}) self.primaryjoin = visitors.replacement_traverse( self.primaryjoin, {}, repl) @@ -565,7 +569,7 @@ class JoinCondition(object): def locals_(elem): if "remote" not in elem._annotations and \ elem in local_side: - return elem._annotate({"local":True}) + return elem._annotate({"local": True}) self.primaryjoin = visitors.replacement_traverse( self.primaryjoin, {}, locals_ ) @@ -736,7 +740,8 @@ class JoinCondition(object): self.local_remote_pairs = self._deannotate_pairs(lrp) self.synchronize_pairs = self._deannotate_pairs(sync_pairs) - self.secondary_synchronize_pairs = self._deannotate_pairs(secondary_sync_pairs) + self.secondary_synchronize_pairs = \ + self._deannotate_pairs(secondary_sync_pairs) @util.memoized_property def remote_columns(self): @@ -780,7 +785,6 @@ class JoinCondition(object): if annotation.issubset(col._annotations) ]) - def join_targets(self, source_selectable, dest_selectable, aliased, @@ -801,7 +805,7 @@ class JoinCondition(object): # regardless of context. dest_selectable = _shallow_annotate( dest_selectable, - {'no_replacement_traverse':True}) + {'no_replacement_traverse': True}) primaryjoin, secondaryjoin, secondary = self.primaryjoin, \ self.secondaryjoin, self.secondary @@ -894,6 +898,3 @@ class JoinCondition(object): bind_to_col = dict((binds[col].key, col) for col in binds) return lazywhere, bind_to_col, equated_columns - - - diff --git a/lib/sqlalchemy/orm/scoping.py b/lib/sqlalchemy/orm/scoping.py index 3c4a0e5d8..e0ee62012 100644 --- a/lib/sqlalchemy/orm/scoping.py +++ b/lib/sqlalchemy/orm/scoping.py @@ -87,7 +87,8 @@ class scoped_session(object): self.registry.clear() def configure(self, **kwargs): - """reconfigure the :class:`.sessionmaker` used by this :class:`.scoped_session`. + """reconfigure the :class:`.sessionmaker` used by this + :class:`.scoped_session`. See :meth:`.sessionmaker.configure`. @@ -142,27 +143,34 @@ class scoped_session(object): ScopedSession = scoped_session """Old name for backwards compatibility.""" + def instrument(name): def do(self, *args, **kwargs): return getattr(self.registry(), name)(*args, **kwargs) return do + for meth in Session.public_methods: setattr(scoped_session, meth, instrument(meth)) + def makeprop(name): def set(self, attr): setattr(self.registry(), name, attr) + def get(self): return getattr(self.registry(), name) + return property(get, set) + for prop in ('bind', 'dirty', 'deleted', 'new', 'identity_map', - 'is_active', 'autoflush', 'no_autoflush'): + 'is_active', 'autoflush', 'no_autoflush'): setattr(scoped_session, prop, makeprop(prop)) + def clslevel(name): def do(cls, *args, **kwargs): return getattr(Session, name)(*args, **kwargs) return classmethod(do) + for prop in ('close_all', 'object_session', 'identity_key'): setattr(scoped_session, prop, clslevel(prop)) - diff --git a/lib/sqlalchemy/orm/session.py b/lib/sqlalchemy/orm/session.py index 5a8b086d9..e4cb90847 100644 --- a/lib/sqlalchemy/orm/session.py +++ b/lib/sqlalchemy/orm/session.py @@ -56,6 +56,7 @@ class _SessionClassMethods(object): return object_session(instance) + class SessionTransaction(object): """A :class:`.Session`-level transaction. @@ -66,52 +67,55 @@ class SessionTransaction(object): back all at once. It also provides optional two-phase commit behavior which can augment this coordination operation. - The :attr:`.Session.transaction` attribute of :class:`.Session` refers to the - current :class:`.SessionTransaction` object in use, if any. + The :attr:`.Session.transaction` attribute of :class:`.Session` + refers to the current :class:`.SessionTransaction` object in use, if any. A :class:`.SessionTransaction` is associated with a :class:`.Session` in its default mode of ``autocommit=False`` immediately, associated with no database connections. As the :class:`.Session` is called upon to emit SQL on behalf of various :class:`.Engine` or :class:`.Connection` - objects, a corresponding :class:`.Connection` and associated :class:`.Transaction` - is added to a collection within the :class:`.SessionTransaction` object, - becoming one of the connection/transaction pairs maintained by the + objects, a corresponding :class:`.Connection` and associated + :class:`.Transaction` is added to a collection within the + :class:`.SessionTransaction` object, becoming one of the + connection/transaction pairs maintained by the :class:`.SessionTransaction`. The lifespan of the :class:`.SessionTransaction` ends when the - :meth:`.Session.commit`, :meth:`.Session.rollback` or :meth:`.Session.close` - methods are called. At this point, the :class:`.SessionTransaction` removes - its association with its parent :class:`.Session`. A :class:`.Session` - that is in ``autocommit=False`` mode will create a new - :class:`.SessionTransaction` to replace it immediately, whereas a - :class:`.Session` that's in ``autocommit=True`` + :meth:`.Session.commit`, :meth:`.Session.rollback` or + :meth:`.Session.close` methods are called. At this point, the + :class:`.SessionTransaction` removes its association with its parent + :class:`.Session`. A :class:`.Session` that is in ``autocommit=False`` + mode will create a new :class:`.SessionTransaction` to replace it + immediately, whereas a :class:`.Session` that's in ``autocommit=True`` mode will remain without a :class:`.SessionTransaction` until the :meth:`.Session.begin` method is called. Another detail of :class:`.SessionTransaction` behavior is that it is - capable of "nesting". This means that the :meth:`.Session.begin` method can - be called while an existing :class:`.SessionTransaction` is already present, - producing a new :class:`.SessionTransaction` that temporarily replaces - the parent :class:`.SessionTransaction`. When a :class:`.SessionTransaction` - is produced as nested, it assigns itself to the :attr:`.Session.transaction` - attribute. When it is ended via :meth:`.Session.commit` or :meth:`.Session.rollback`, - it restores its parent :class:`.SessionTransaction` back onto the - :attr:`.Session.transaction` attribute. The - behavior is effectively a stack, where :attr:`.Session.transaction` refers - to the current head of the stack. - - The purpose of this stack is to allow nesting of :meth:`.Session.rollback` or - :meth:`.Session.commit` calls in context with various flavors of :meth:`.Session.begin`. - This nesting behavior applies to when :meth:`.Session.begin_nested` - is used to emit a SAVEPOINT transaction, and is also used to produce - a so-called "subtransaction" which allows a block of code to use a + capable of "nesting". This means that the :meth:`.Session.begin` method + can be called while an existing :class:`.SessionTransaction` is already + present, producing a new :class:`.SessionTransaction` that temporarily + replaces the parent :class:`.SessionTransaction`. When a + :class:`.SessionTransaction` is produced as nested, it assigns itself to + the :attr:`.Session.transaction` attribute. When it is ended via + :meth:`.Session.commit` or :meth:`.Session.rollback`, it restores its + parent :class:`.SessionTransaction` back onto the + :attr:`.Session.transaction` attribute. The behavior is effectively a + stack, where :attr:`.Session.transaction` refers to the current head of + the stack. + + The purpose of this stack is to allow nesting of + :meth:`.Session.rollback` or :meth:`.Session.commit` calls in context + with various flavors of :meth:`.Session.begin`. This nesting behavior + applies to when :meth:`.Session.begin_nested` is used to emit a + SAVEPOINT transaction, and is also used to produce a so-called + "subtransaction" which allows a block of code to use a begin/rollback/commit sequence regardless of whether or not its enclosing - code block has begun a transaction. The :meth:`.flush` method, whether called - explicitly or via autoflush, is the primary consumer of the "subtransaction" - feature, in that it wishes to guarantee that it works within in a transaction block - regardless of whether or not the :class:`.Session` is in transactional mode - when the method is called. + code block has begun a transaction. The :meth:`.flush` method, whether + called explicitly or via autoflush, is the primary consumer of the + "subtransaction" feature, in that it wishes to guarantee that it works + within in a transaction block regardless of whether or not the + :class:`.Session` is in transactional mode when the method is called. See also: @@ -425,6 +429,7 @@ class SessionTransaction(object): else: self.rollback() + class Session(_SessionClassMethods): """Manages persistence operations for ORM-mapped objects. @@ -441,7 +446,6 @@ class Session(_SessionClassMethods): 'merge', 'query', 'refresh', 'rollback', 'scalar') - def __init__(self, bind=None, autoflush=True, expire_on_commit=True, _enable_transaction_accounting=True, autocommit=False, twophase=False, @@ -453,42 +457,46 @@ class Session(_SessionClassMethods): generate a :class:`.Session`-producing callable with a given set of arguments. - :param autocommit: Defaults to ``False``. When ``True``, the ``Session`` - does not keep a persistent transaction running, and will acquire - connections from the engine on an as-needed basis, returning them - immediately after their use. Flushes will begin and commit (or possibly - rollback) their own transaction if no transaction is present. When using - this mode, the `session.begin()` method may be used to begin a - transaction explicitly. - - Leaving it on its default value of ``False`` means that the ``Session`` - will acquire a connection and begin a transaction the first time it is - used, which it will maintain persistently until ``rollback()``, - ``commit()``, or ``close()`` is called. When the transaction is released - by any of these methods, the ``Session`` is ready for the next usage, - which will again acquire and maintain a new connection/transaction. + :param autocommit: Defaults to ``False``. When ``True``, the + ``Session`` does not keep a persistent transaction running, and + will acquire connections from the engine on an as-needed basis, + returning them immediately after their use. Flushes will begin and + commit (or possibly rollback) their own transaction if no + transaction is present. When using this mode, the + `session.begin()` method may be used to begin a transaction + explicitly. + + Leaving it on its default value of ``False`` means that the + ``Session`` will acquire a connection and begin a transaction the + first time it is used, which it will maintain persistently until + ``rollback()``, ``commit()``, or ``close()`` is called. When the + transaction is released by any of these methods, the ``Session`` + is ready for the next usage, which will again acquire and maintain + a new connection/transaction. :param autoflush: When ``True``, all query operations will issue a ``flush()`` call to this ``Session`` before proceeding. This is a - convenience feature so that ``flush()`` need not be called repeatedly - in order for database queries to retrieve results. It's typical that - ``autoflush`` is used in conjunction with ``autocommit=False``. In this - scenario, explicit calls to ``flush()`` are rarely needed; you usually - only need to call ``commit()`` (which flushes) to finalize changes. + convenience feature so that ``flush()`` need not be called + repeatedly in order for database queries to retrieve results. It's + typical that ``autoflush`` is used in conjunction with + ``autocommit=False``. In this scenario, explicit calls to + ``flush()`` are rarely needed; you usually only need to call + ``commit()`` (which flushes) to finalize changes. :param bind: An optional ``Engine`` or ``Connection`` to which this ``Session`` should be bound. When specified, all SQL operations performed by this session will execute via this connectable. - :param binds: An optional dictionary which contains more granular "bind" - information than the ``bind`` parameter provides. This dictionary can - map individual ``Table`` instances as well as ``Mapper`` instances to - individual ``Engine`` or ``Connection`` objects. Operations which - proceed relative to a particular ``Mapper`` will consult this - dictionary for the direct ``Mapper`` instance as well as the mapper's - ``mapped_table`` attribute in order to locate an connectable to use. - The full resolution is described in the ``get_bind()`` method of - ``Session``. Usage looks like:: + :param binds: An optional dictionary which contains more granular + "bind" information than the ``bind`` parameter provides. This + dictionary can map individual ``Table`` instances as well as + ``Mapper`` instances to individual ``Engine`` or ``Connection`` + objects. Operations which proceed relative to a particular + ``Mapper`` will consult this dictionary for the direct ``Mapper`` + instance as well as the mapper's ``mapped_table`` attribute in + order to locate an connectable to use. The full resolution is + described in the ``get_bind()`` method of ``Session``. + Usage looks like:: Session = sessionmaker(binds={ SomeMappedClass: create_engine('postgresql://engine1'), @@ -496,42 +504,43 @@ class Session(_SessionClassMethods): some_table: create_engine('postgresql://engine3'), }) - Also see the :meth:`.Session.bind_mapper` and :meth:`.Session.bind_table` methods. + Also see the :meth:`.Session.bind_mapper` + and :meth:`.Session.bind_table` methods. :param \class_: Specify an alternate class other than - ``sqlalchemy.orm.session.Session`` which should be used by the returned - class. This is the only argument that is local to the + ``sqlalchemy.orm.session.Session`` which should be used by the + returned class. This is the only argument that is local to the ``sessionmaker()`` function, and is not sent directly to the constructor for ``Session``. :param _enable_transaction_accounting: Defaults to ``True``. A - legacy-only flag which when ``False`` disables *all* 0.5-style object - accounting on transaction boundaries, including auto-expiry of - instances on rollback and commit, maintenance of the "new" and + legacy-only flag which when ``False`` disables *all* 0.5-style + object accounting on transaction boundaries, including auto-expiry + of instances on rollback and commit, maintenance of the "new" and "deleted" lists upon rollback, and autoflush of pending changes upon begin(), all of which are interdependent. :param expire_on_commit: Defaults to ``True``. When ``True``, all - instances will be fully expired after each ``commit()``, so that all - attribute/object access subsequent to a completed transaction will load - from the most recent database state. + instances will be fully expired after each ``commit()``, so that + all attribute/object access subsequent to a completed transaction + will load from the most recent database state. :param extension: An optional :class:`~.SessionExtension` instance, or a list - of such instances, which will receive pre- and post- commit and flush - events, as well as a post-rollback event. **Deprecated.** + of such instances, which will receive pre- and post- commit and + flush events, as well as a post-rollback event. **Deprecated.** Please see :class:`.SessionEvents`. - :param query_cls: Class which should be used to create new Query objects, - as returned by the ``query()`` method. Defaults to + :param query_cls: Class which should be used to create new Query + objects, as returned by the ``query()`` method. Defaults to :class:`~sqlalchemy.orm.query.Query`. :param twophase: When ``True``, all transactions will be started as a "two phase" transaction, i.e. using the "two phase" semantics of the database in use along with an XID. During a ``commit()``, after ``flush()`` has been issued for all attached databases, the - ``prepare()`` method on each database's ``TwoPhaseTransaction`` will - be called. This allows each database to roll back the entire + ``prepare()`` method on each database's ``TwoPhaseTransaction`` + will be called. This allows each database to roll back the entire transaction, before each transaction is committed. :param weak_identity_map: Defaults to ``True`` - when set to @@ -594,13 +603,14 @@ class Session(_SessionClassMethods): transaction or nested transaction, an error is raised, unless ``subtransactions=True`` or ``nested=True`` is specified. - The ``subtransactions=True`` flag indicates that this :meth:`~.Session.begin` - can create a subtransaction if a transaction is already in progress. - For documentation on subtransactions, please see :ref:`session_subtransactions`. + The ``subtransactions=True`` flag indicates that this + :meth:`~.Session.begin` can create a subtransaction if a transaction + is already in progress. For documentation on subtransactions, please + see :ref:`session_subtransactions`. The ``nested`` flag begins a SAVEPOINT transaction and is equivalent - to calling :meth:`~.Session.begin_nested`. For documentation on SAVEPOINT - transactions, please see :ref:`session_begin_nested`. + to calling :meth:`~.Session.begin_nested`. For documentation on + SAVEPOINT transactions, please see :ref:`session_begin_nested`. """ if self.transaction is not None: @@ -609,8 +619,8 @@ class Session(_SessionClassMethods): nested=nested) else: raise sa_exc.InvalidRequestError( - "A transaction is already begun. Use subtransactions=True " - "to allow subtransactions.") + "A transaction is already begun. Use " + "subtransactions=True to allow subtransactions.") else: self.transaction = SessionTransaction( self, nested=nested) @@ -702,18 +712,20 @@ class Session(_SessionClassMethods): :class:`.Session` object's transactional state. If this :class:`.Session` is configured with ``autocommit=False``, - either the :class:`.Connection` corresponding to the current transaction - is returned, or if no transaction is in progress, a new one is begun - and the :class:`.Connection` returned (note that no transactional state - is established with the DBAPI until the first SQL statement is emitted). + either the :class:`.Connection` corresponding to the current + transaction is returned, or if no transaction is in progress, a new + one is begun and the :class:`.Connection` returned (note that no + transactional state is established with the DBAPI until the first + SQL statement is emitted). - Alternatively, if this :class:`.Session` is configured with ``autocommit=True``, - an ad-hoc :class:`.Connection` is returned using :meth:`.Engine.contextual_connect` - on the underlying :class:`.Engine`. + Alternatively, if this :class:`.Session` is configured with + ``autocommit=True``, an ad-hoc :class:`.Connection` is returned + using :meth:`.Engine.contextual_connect` on the underlying + :class:`.Engine`. - Ambiguity in multi-bind or unbound :class:`.Session` objects can be resolved through - any of the optional keyword arguments. This ultimately makes usage of the - :meth:`.get_bind` method for resolution. + Ambiguity in multi-bind or unbound :class:`.Session` objects can be + resolved through any of the optional keyword arguments. This + ultimately makes usage of the :meth:`.get_bind` method for resolution. :param bind: Optional :class:`.Engine` to be used as the bind. If @@ -733,10 +745,11 @@ class Session(_SessionClassMethods): cannot otherwise be identified. :param close_with_result: Passed to :meth:`Engine.connect`, indicating - the :class:`.Connection` should be considered "single use", automatically - closing when the first result set is closed. This flag only has - an effect if this :class:`.Session` is configured with ``autocommit=True`` - and does not already have a transaction in progress. + the :class:`.Connection` should be considered "single use", + automatically closing when the first result set is closed. This + flag only has an effect if this :class:`.Session` is configured with + ``autocommit=True`` and does not already have a transaction + in progress. :param \**kw: Additional keyword arguments are sent to :meth:`get_bind()`, @@ -778,8 +791,8 @@ class Session(_SessionClassMethods): :func:`~.sql.expression.delete`, and :func:`~.sql.expression.text`. Plain SQL strings can be passed as well, which in the case of :meth:`.Session.execute` only - will be interpreted the same as if it were passed via a :func:`~.expression.text` - construct. That is, the following usage:: + will be interpreted the same as if it were passed via a + :func:`~.expression.text` construct. That is, the following usage:: result = session.execute( "SELECT * FROM user WHERE id=:param", @@ -795,9 +808,10 @@ class Session(_SessionClassMethods): ) The second positional argument to :meth:`.Session.execute` is an - optional parameter set. Similar to that of :meth:`.Connection.execute`, whether this - is passed as a single dictionary, or a list of dictionaries, determines - whether the DBAPI cursor's ``execute()`` or ``executemany()`` is used to execute the + optional parameter set. Similar to that of + :meth:`.Connection.execute`, whether this is passed as a single + dictionary, or a list of dictionaries, determines whether the DBAPI + cursor's ``execute()`` or ``executemany()`` is used to execute the statement. An INSERT construct may be invoked for a single row:: result = session.execute(users.insert(), {"id": 7, "name": "somename"}) @@ -826,12 +840,13 @@ class Session(_SessionClassMethods): The :class:`.ResultProxy` returned by the :meth:`.Session.execute` method is returned with the "close_with_result" flag set to true; the significance of this flag is that if this :class:`.Session` is - autocommitting and does not have a transaction-dedicated :class:`.Connection` - available, a temporary :class:`.Connection` is established for the - statement execution, which is closed (meaning, returned to the connection - pool) when the :class:`.ResultProxy` has consumed all available data. - This applies *only* when the :class:`.Session` is configured with - autocommit=True and no transaction has been started. + autocommitting and does not have a transaction-dedicated + :class:`.Connection` available, a temporary :class:`.Connection` is + established for the statement execution, which is closed (meaning, + returned to the connection pool) when the :class:`.ResultProxy` has + consumed all available data. This applies *only* when the + :class:`.Session` is configured with autocommit=True and no + transaction has been started. :param clause: An executable statement (i.e. an :class:`.Executable` expression @@ -886,7 +901,8 @@ class Session(_SessionClassMethods): def scalar(self, clause, params=None, mapper=None, bind=None, **kw): """Like :meth:`~.Session.execute` but return a scalar result.""" - return self.execute(clause, params=params, mapper=mapper, bind=bind, **kw).scalar() + return self.execute( + clause, params=params, mapper=mapper, bind=bind, **kw).scalar() def close(self): """Close this Session. @@ -918,7 +934,8 @@ class Session(_SessionClassMethods): self._deleted = {} # TODO: need much more test coverage for bind_mapper() and similar ! - # TODO: + crystalize + document resolution order vis. bind_mapper/bind_table + # TODO: + crystalize + document resolution order + # vis. bind_mapper/bind_table def bind_mapper(self, mapper, bind): """Bind operations for a mapper to a Connectable. @@ -1001,10 +1018,10 @@ class Session(_SessionClassMethods): :param clause: A :class:`.ClauseElement` (i.e. :func:`~.sql.expression.select`, :func:`~.sql.expression.text`, - etc.). If the ``mapper`` argument is not present or could not produce - a bind, the given expression construct will be searched for a bound - element, typically a :class:`.Table` associated with bound - :class:`.MetaData`. + etc.). If the ``mapper`` argument is not present or could not + produce a bind, the given expression construct will be searched + for a bound element, typically a :class:`.Table` associated with + bound :class:`.MetaData`. """ if mapper is clause is None: @@ -1274,11 +1291,12 @@ class Session(_SessionClassMethods): raise exc.FlushError( "Instance %s has a NULL identity key. If this is an " "auto-generated value, check that the database table " - "allows generation of new primary key values, and that " - "the mapped Column object is configured to expect these " - "generated values. Ensure also that this flush() is " - "not occurring at an inappropriate time, such as within " - "a load() event." % orm_util.state_str(state) + "allows generation of new primary key values, and " + "that the mapped Column object is configured to " + "expect these generated values. Ensure also that " + "this flush() is not occurring at an inappropriate " + "time, such aswithin a load() event." + % orm_util.state_str(state) ) if state.key is None: @@ -1292,7 +1310,8 @@ class Session(_SessionClassMethods): orig_key = self.transaction._key_switches[state][0] else: orig_key = state.key - self.transaction._key_switches[state] = (orig_key, instance_key) + self.transaction._key_switches[state] = ( + orig_key, instance_key) state.key = instance_key self.identity_map.replace(state) @@ -1410,8 +1429,8 @@ class Session(_SessionClassMethods): source instance, and attempts to reconcile it with an instance of the same primary key in the session. If not found locally, it attempts to load the object from the database based on primary key, and if - none can be located, creates a new instance. The state of each attribute - on the source instance is then copied to the target instance. + none can be located, creates a new instance. The state of each + attribute on the source instance is then copied to the target instance. The resulting target instance is then returned by the method; the original source instance is left unmodified, and un-associated with the :class:`.Session` if not already. @@ -1455,7 +1474,7 @@ class Session(_SessionClassMethods): # flush current contents if we expect to load data self._autoflush() - object_mapper(instance) # verify mapped + object_mapper(instance) # verify mapped autoflush = self.autoflush try: self.autoflush = False @@ -1564,7 +1583,6 @@ class Session(_SessionClassMethods): merged_state.manager.dispatch.load(merged_state, None) return merged - def _validate_persistent(self, state): if not self.identity_map.contains_state(state): raise sa_exc.InvalidRequestError( @@ -1661,10 +1679,10 @@ class Session(_SessionClassMethods): it to the :class:`.Session` using :meth:`.Session.add` normally. :meth:`.Session.enable_relationship_loading` does not improve - behavior when the ORM is used normally - object references should be constructed - at the object level, not at the foreign key level, so that they - are present in an ordinary way before flush() proceeds. This method - is not intended for general use. + behavior when the ORM is used normally - object references should be + constructed at the object level, not at the foreign key level, so + that they are present in an ordinary way before flush() + proceeds. This method is not intended for general use. .. versionadded:: 0.8 @@ -1717,8 +1735,10 @@ class Session(_SessionClassMethods): return self._contains_state(state) def __iter__(self): - """Iterate over all pending or persistent instances within this Session.""" + """Iterate over all pending or persistent instances within this + Session. + """ return iter(list(self._new.values()) + self.identity_map.values()) def _contains_state(self, state): @@ -1763,11 +1783,12 @@ class Session(_SessionClassMethods): self._flushing = False def _flush_warning(self, method): - util.warn("Usage of the '%s' operation is not currently supported " - "within the execution stage of the flush process. " - "Results may not be consistent. Consider using alternative " - "event listeners or connection-level operations instead." - % method) + util.warn( + "Usage of the '%s' operation is not currently supported " + "within the execution stage of the flush process. " + "Results may not be consistent. Consider using alternative " + "event listeners or connection-level operations instead." + % method) def _is_clean(self): return not self.identity_map.check_modified() and \ @@ -1818,7 +1839,8 @@ class Session(_SessionClassMethods): proc = new.union(dirty).difference(deleted) for state in proc: - is_orphan = _state_mapper(state)._is_orphan(state) and state.has_identity + is_orphan = ( + _state_mapper(state)._is_orphan(state) and state.has_identity) flush_context.register_object(state, isdelete=is_orphan) processed.add(state) @@ -1876,7 +1898,6 @@ class Session(_SessionClassMethods): transaction.rollback(_capture_exception=True) raise - def is_modified(self, instance, include_collections=True, passive=True): """Return ``True`` if the given instance has locally @@ -1923,19 +1944,19 @@ class Session(_SessionClassMethods): usually needed, and in those few cases where it isn't, is less expensive on average than issuing a defensive SELECT. - The "old" value is fetched unconditionally upon set only if the attribute - container has the ``active_history`` flag set to ``True``. This flag - is set typically for primary key attributes and scalar object references - that are not a simple many-to-one. To set this flag for - any arbitrary mapped column, use the ``active_history`` argument - with :func:`.column_property`. + The "old" value is fetched unconditionally upon set only if the + attribute container has the ``active_history`` flag set to ``True``. + This flag is set typically for primary key attributes and scalar + object references that are not a simple many-to-one. To set this + flag for any arbitrary mapped column, use the ``active_history`` + argument with :func:`.column_property`. :param instance: mapped instance to be tested for pending changes. - :param include_collections: Indicates if multivalued collections should be - included in the operation. Setting this to ``False`` is a way to detect - only local-column based properties (i.e. scalar columns or many-to-one - foreign keys) that would result in an UPDATE for this instance upon - flush. + :param include_collections: Indicates if multivalued collections + should be included in the operation. Setting this to ``False`` is a + way to detect only local-column based properties (i.e. scalar columns + or many-to-one foreign keys) that would result in an UPDATE for this + instance upon flush. :param passive: .. versionchanged:: 0.8 Ignored for backwards compatibility. @@ -1991,34 +2012,36 @@ class Session(_SessionClassMethods): target :class:`.Connection` to a user-defined event listener. The "partial rollback" state refers to when an "inner" transaction, - typically used during a flush, encounters an error and emits - a rollback of the DBAPI connection. At this point, the :class:`.Session` - is in "partial rollback" and awaits for the user to call :meth:`.Session.rollback`, - in order to close out the transaction stack. It is in this "partial - rollback" period that the :attr:`.is_active` flag returns False. After - the call to :meth:`.Session.rollback`, the :class:`.SessionTransaction` is replaced + typically used during a flush, encounters an error and emits a + rollback of the DBAPI connection. At this point, the + :class:`.Session` is in "partial rollback" and awaits for the user to + call :meth:`.Session.rollback`, in order to close out the + transaction stack. It is in this "partial rollback" period that the + :attr:`.is_active` flag returns False. After the call to + :meth:`.Session.rollback`, the :class:`.SessionTransaction` is replaced with a new one and :attr:`.is_active` returns ``True`` again. When a :class:`.Session` is used in ``autocommit=True`` mode, the :class:`.SessionTransaction` is only instantiated within the scope of a flush call, or when :meth:`.Session.begin` is called. So :attr:`.is_active` will always be ``False`` outside of a flush or - :meth:`.Session.begin` block in this mode, and will be ``True`` within the - :meth:`.Session.begin` block as long as it doesn't enter "partial rollback" - state. + :meth:`.Session.begin` block in this mode, and will be ``True`` + within the :meth:`.Session.begin` block as long as it doesn't enter + "partial rollback" state. From all the above, it follows that the only purpose to this flag is for application frameworks that wish to detect is a "rollback" is - necessary within a generic error handling routine, for :class:`.Session` - objects that would otherwise be in "partial rollback" mode. In - a typical integration case, this is also not necessary as it is standard - practice to emit :meth:`.Session.rollback` unconditionally within the - outermost exception catch. + necessary within a generic error handling routine, for + :class:`.Session` objects that would otherwise be in + "partial rollback" mode. In a typical integration case, this is also + not necessary as it is standard practice to emit + :meth:`.Session.rollback` unconditionally within the outermost + exception catch. To track the transactional state of a :class:`.Session` fully, use event listeners, primarily the :meth:`.SessionEvents.after_begin`, - :meth:`.SessionEvents.after_commit`, :meth:`.SessionEvents.after_rollback` - and related events. + :meth:`.SessionEvents.after_commit`, + :meth:`.SessionEvents.after_rollback` and related events. """ return self.transaction and self.transaction.is_active @@ -2087,6 +2110,7 @@ class Session(_SessionClassMethods): return util.IdentitySet(self._new.values()) + class sessionmaker(_SessionClassMethods): """A configurable :class:`.Session` factory. @@ -2200,6 +2224,7 @@ class sessionmaker(_SessionClassMethods): _sessions = weakref.WeakValueDictionary() + def make_transient(instance): """Make the given instance 'transient'. @@ -2228,6 +2253,7 @@ def make_transient(instance): if state.deleted: del state.deleted + def object_session(instance): """Return the ``Session`` to which instance belongs. diff --git a/lib/sqlalchemy/orm/state.py b/lib/sqlalchemy/orm/state.py index 9ef27f0d4..523c3a980 100644 --- a/lib/sqlalchemy/orm/state.py +++ b/lib/sqlalchemy/orm/state.py @@ -214,7 +214,7 @@ class InstanceState(interfaces._InspectionAttr): return self._pending_mutations[key] def __getstate__(self): - d = {'instance':self.obj()} + d = {'instance': self.obj()} d.update( (k, self.__dict__[k]) for k in ( 'committed_state', '_pending_mutations', 'modified', 'expired', @@ -396,7 +396,6 @@ class InstanceState(interfaces._InspectionAttr): return set(keys).intersection(self.manager).\ difference(self.committed_state) - @property def unloaded(self): """Return the set of keys which do not have a loaded value. @@ -515,6 +514,7 @@ class InstanceState(interfaces._InspectionAttr): state.modified = state.expired = False state._strong_obj = None + class AttributeState(object): """Provide an inspection interface corresponding to a particular attribute on a particular mapped object. @@ -589,4 +589,3 @@ class PendingCollection(object): self.added_items.remove(value) else: self.deleted_items.add(value) - diff --git a/lib/sqlalchemy/orm/strategies.py b/lib/sqlalchemy/orm/strategies.py index 35ef7667b..39329f9b1 100644 --- a/lib/sqlalchemy/orm/strategies.py +++ b/lib/sqlalchemy/orm/strategies.py @@ -22,6 +22,7 @@ from .interfaces import ( from .session import _state_session import itertools + def _register_attribute(strategy, mapper, useobject, compare_function=None, typecallable=None, @@ -86,6 +87,7 @@ def _register_attribute(strategy, mapper, useobject, for hook in listen_hooks: hook(desc, prop) + class UninstrumentedColumnLoader(LoaderStrategy): """Represent the a non-instrumented MapperProperty. @@ -107,6 +109,7 @@ class UninstrumentedColumnLoader(LoaderStrategy): def create_row_processor(self, context, path, mapper, row, adapter): return None, None, None + class ColumnLoader(LoaderStrategy): """Provide loading behavior for a :class:`.ColumnProperty`.""" @@ -131,7 +134,7 @@ class ColumnLoader(LoaderStrategy): _register_attribute(self, mapper, useobject=False, compare_function=coltype.compare_values, - active_history = active_history + active_history=active_history ) def create_row_processor(self, context, path, @@ -151,8 +154,10 @@ class ColumnLoader(LoaderStrategy): state._expire_attribute_pre_commit(dict_, key) return expire_for_non_present_col, None, None + log.class_logger(ColumnLoader) + class DeferredColumnLoader(LoaderStrategy): """Provide loading behavior for a deferred :class:`.ColumnProperty`.""" @@ -177,7 +182,8 @@ class DeferredColumnLoader(LoaderStrategy): elif not self.is_class_level: def set_deferred_for_local_state(state, dict_, row): - state._set_callable(dict_, key, LoadDeferredColumns(state, key)) + state._set_callable( + dict_, key, LoadDeferredColumns(state, key)) return set_deferred_for_local_state, None, None else: def reset_col_for_deferred(state, dict_, row): @@ -220,7 +226,7 @@ class DeferredColumnLoader(LoaderStrategy): localparent.iterate_properties if isinstance(p, StrategizedProperty) and isinstance(p.strategy, DeferredColumnLoader) and - p.group==self.group + p.group == self.group ] else: toload = [self.key] @@ -243,8 +249,10 @@ class DeferredColumnLoader(LoaderStrategy): return attributes.ATTR_WAS_SET + log.class_logger(DeferredColumnLoader) + class LoadDeferredColumns(object): """serializable loader object used by DeferredColumnLoader""" @@ -260,6 +268,7 @@ class LoadDeferredColumns(object): strategy = prop._strategies[DeferredColumnLoader] return strategy._load_for_state(state, passive) + class DeferredOption(StrategizedOption): propagate_to_loaders = True @@ -273,6 +282,7 @@ class DeferredOption(StrategizedOption): else: return ColumnLoader + class UndeferGroupOption(MapperOption): propagate_to_loaders = True @@ -282,6 +292,7 @@ class UndeferGroupOption(MapperOption): def process_query(self, query): query._attributes[("undefer", self.group)] = True + class AbstractRelationshipLoader(LoaderStrategy): """LoaderStratgies which deal with related objects.""" @@ -291,6 +302,7 @@ class AbstractRelationshipLoader(LoaderStrategy): self.target = self.parent_property.target self.uselist = self.parent_property.uselist + class NoLoader(AbstractRelationshipLoader): """Provide loading behavior for a :class:`.RelationshipProperty` with "lazy=None". @@ -303,7 +315,7 @@ class NoLoader(AbstractRelationshipLoader): _register_attribute(self, mapper, useobject=True, uselist=self.parent_property.uselist, - typecallable = self.parent_property.collection_class, + typecallable=self.parent_property.collection_class, ) def create_row_processor(self, context, path, mapper, row, adapter): @@ -311,8 +323,10 @@ class NoLoader(AbstractRelationshipLoader): state._initialize(self.key) return invoke_no_load, None, None + log.class_logger(NoLoader) + class LazyLoader(AbstractRelationshipLoader): """Provide loading behavior for a :class:`.RelationshipProperty` with "lazy=True", that is loads when first accessed. @@ -355,6 +369,12 @@ class LazyLoader(AbstractRelationshipLoader): def init_class_attribute(self, mapper): self.is_class_level = True + active_history = ( + self.parent_property.active_history or + self.parent_property.direction is not interfaces.MANYTOONE or + not self.use_get + ) + # MANYTOONE currently only needs the # "old" value for delete-orphan # cascades. the required _SingleParentValidator @@ -362,18 +382,14 @@ class LazyLoader(AbstractRelationshipLoader): # in that case. otherwise we don't need the # "old" value during backref operations. _register_attribute(self, - mapper, - useobject=True, - callable_=self._load_for_state, - uselist = self.parent_property.uselist, - backref = self.parent_property.back_populates, - typecallable = self.parent_property.collection_class, - active_history = \ - self.parent_property.active_history or \ - self.parent_property.direction is not \ - interfaces.MANYTOONE or \ - not self.use_get, - ) + mapper, + useobject=True, + callable_=self._load_for_state, + uselist=self.parent_property.uselist, + backref=self.parent_property.back_populates, + typecallable=self.parent_property.collection_class, + active_history=active_history + ) def lazy_clause(self, state, reverse_direction=False, alias_secondary=False, @@ -421,7 +437,6 @@ class LazyLoader(AbstractRelationshipLoader): state, dict_, bind_to_col[bindparam._identifying_key]) - if self.parent_property.secondary is not None and alias_secondary: criterion = sql_util.ClauseAdapter( self.parent_property.secondary.alias()).\ @@ -582,7 +597,6 @@ class LazyLoader(AbstractRelationshipLoader): else: return None - def create_row_processor(self, context, path, mapper, row, adapter): key = self.key @@ -615,6 +629,7 @@ class LazyLoader(AbstractRelationshipLoader): log.class_logger(LazyLoader) + class LoadLazyAttribute(object): """serializable loader object used by LazyLoader""" @@ -649,6 +664,7 @@ class ImmediateLoader(AbstractRelationshipLoader): return None, None, load_immediate + class SubqueryLoader(AbstractRelationshipLoader): def __init__(self, parent): super(SubqueryLoader, self).__init__(parent) @@ -713,7 +729,7 @@ class SubqueryLoader(AbstractRelationshipLoader): q = orig_query.session.query(effective_entity) q._attributes = { ("orig_query", SubqueryLoader): orig_query, - ('subquery_path', None) : subq_path + ('subquery_path', None): subq_path } q = q._enable_single_crit(False) @@ -779,13 +795,12 @@ class SubqueryLoader(AbstractRelationshipLoader): left_alias = orm_util.AliasedClass(leftmost_mapper, embed_q) return left_alias - def _prep_for_joins(self, left_alias, subq_path): subq_path = subq_path.path # figure out what's being joined. a.k.a. the fun part to_join = [ - (subq_path[i], subq_path[i+1]) + (subq_path[i], subq_path[i + 1]) for i in xrange(0, len(subq_path), 2) ] @@ -905,7 +920,7 @@ class SubqueryLoader(AbstractRelationshipLoader): (k, [v[0] for v in v]) for k, v in itertools.groupby( subq, - lambda x:x[1:] + lambda x: x[1:] )) path.set(context, 'collections', collections) @@ -946,8 +961,10 @@ class SubqueryLoader(AbstractRelationshipLoader): return load_scalar_from_subq, None, None + log.class_logger(SubqueryLoader) + class JoinedLoader(AbstractRelationshipLoader): """Provide loading behavior for a :class:`.RelationshipProperty` using joined eager loading. @@ -1173,7 +1190,6 @@ class JoinedLoader(AbstractRelationshipLoader): ) ) - def _create_eager_adapter(self, context, row, adapter, path): user_defined_adapter = path.get(context, "user_defined_eager_row_processor", @@ -1291,8 +1307,10 @@ class JoinedLoader(AbstractRelationshipLoader): load_scalar_from_joined_existing_row, \ None, load_scalar_from_joined_exec + log.class_logger(JoinedLoader) + class EagerLazyOption(StrategizedOption): def __init__(self, key, lazy=True, chained=False, propagate_to_loaders=True @@ -1314,18 +1332,21 @@ class EagerLazyOption(StrategizedOption): return self.strategy_cls _factory = { - False:JoinedLoader, - "joined":JoinedLoader, - None:NoLoader, - "noload":NoLoader, - "select":LazyLoader, - True:LazyLoader, - "subquery":SubqueryLoader, - "immediate":ImmediateLoader + False: JoinedLoader, + "joined": JoinedLoader, + None: NoLoader, + "noload": NoLoader, + "select": LazyLoader, + True: LazyLoader, + "subquery": SubqueryLoader, + "immediate": ImmediateLoader } + + def factory(identifier): return _factory.get(identifier, LazyLoader) + class EagerJoinOption(PropertyOption): def __init__(self, key, innerjoin, chained=False): @@ -1340,6 +1361,7 @@ class EagerJoinOption(PropertyOption): else: paths[-1].set(query, "eager_join_type", self.innerjoin) + class LoadEagerFromAliasOption(PropertyOption): def __init__(self, key, alias=None, chained=False): @@ -1382,6 +1404,7 @@ class LoadEagerFromAliasOption(PropertyOption): paths[-1].set(query, "user_defined_eager_row_processor", adapter) + def single_parent_validator(desc, prop): def _do_check(state, value, oldvalue, initiator): if value is not None and initiator.key == prop.key: @@ -1405,4 +1428,3 @@ def single_parent_validator(desc, prop): active_history=True) event.listen(desc, 'set', set_, raw=True, retval=True, active_history=True) - diff --git a/lib/sqlalchemy/orm/sync.py b/lib/sqlalchemy/orm/sync.py index 2b2f6d092..847bce30e 100644 --- a/lib/sqlalchemy/orm/sync.py +++ b/lib/sqlalchemy/orm/sync.py @@ -11,6 +11,7 @@ between instances based on join conditions. from . import exc, util as orm_util, attributes + def populate(source, source_mapper, dest, dest_mapper, synchronize_pairs, uowcommit, flag_cascaded_pks): source_dict = source.dict @@ -42,38 +43,45 @@ def populate(source, source_mapper, dest, dest_mapper, r.references(l): uowcommit.attributes[("pk_cascaded", dest, r)] = True + def clear(dest, dest_mapper, synchronize_pairs): for l, r in synchronize_pairs: if r.primary_key: raise AssertionError( - "Dependency rule tried to blank-out primary key " - "column '%s' on instance '%s'" % - (r, orm_util.state_str(dest)) - ) + "Dependency rule tried to blank-out primary key " + "column '%s' on instance '%s'" % + (r, orm_util.state_str(dest)) + ) try: dest_mapper._set_state_attr_by_column(dest, dest.dict, r, None) except exc.UnmappedColumnError: _raise_col_to_prop(True, None, l, dest_mapper, r) + def update(source, source_mapper, dest, old_prefix, synchronize_pairs): for l, r in synchronize_pairs: try: - oldvalue = source_mapper._get_committed_attr_by_column(source.obj(), l) - value = source_mapper._get_state_attr_by_column(source, source.dict, l) + oldvalue = source_mapper._get_committed_attr_by_column( + source.obj(), l) + value = source_mapper._get_state_attr_by_column( + source, source.dict, l) except exc.UnmappedColumnError: _raise_col_to_prop(False, source_mapper, l, None, r) dest[r.key] = value dest[old_prefix + r.key] = oldvalue + def populate_dict(source, source_mapper, dict_, synchronize_pairs): for l, r in synchronize_pairs: try: - value = source_mapper._get_state_attr_by_column(source, source.dict, l) + value = source_mapper._get_state_attr_by_column( + source, source.dict, l) except exc.UnmappedColumnError: _raise_col_to_prop(False, source_mapper, l, None, r) dict_[r.key] = value + def source_modified(uowcommit, source, source_mapper, synchronize_pairs): """return true if the source object has changes from an old to a new value on the given synchronize pairs @@ -90,6 +98,7 @@ def source_modified(uowcommit, source, source_mapper, synchronize_pairs): else: return False + def _raise_col_to_prop(isdest, source_mapper, source_column, dest_mapper, dest_column): if isdest: diff --git a/lib/sqlalchemy/orm/unitofwork.py b/lib/sqlalchemy/orm/unitofwork.py index 1cba58321..4d0c70ea3 100644 --- a/lib/sqlalchemy/orm/unitofwork.py +++ b/lib/sqlalchemy/orm/unitofwork.py @@ -18,6 +18,7 @@ from . import attributes, persistence, util as orm_util sessionlib = util.importlater("sqlalchemy.orm", "session") + def track_cascade_events(descriptor, prop): """Establish event listeners on object attributes which handle cascade-on-set/append. @@ -370,7 +371,6 @@ class UOWTransaction(object): postsort_actions): rec.execute(self) - def finalize_flush_changes(self): """mark processed objects as clean / deleted after a successful flush(). @@ -388,6 +388,7 @@ class UOWTransaction(object): self.session._remove_newly_deleted(isdel) self.session._register_newly_persistent(other) + class IterateMappersMixin(object): def _mappers(self, uow): if self.fromparent: @@ -399,6 +400,7 @@ class IterateMappersMixin(object): else: return self.dependency_processor.mapper.self_and_descendants + class Preprocess(IterateMappersMixin): def __init__(self, dependency_processor, fromparent): self.dependency_processor = dependency_processor @@ -439,6 +441,7 @@ class Preprocess(IterateMappersMixin): else: return False + class PostSortRec(object): disabled = False @@ -461,6 +464,7 @@ class PostSortRec(object): ",".join(str(x) for x in self.__dict__.values()) ) + class ProcessAll(IterateMappersMixin, PostSortRec): def __init__(self, uow, dependency_processor, delete, fromparent): self.dependency_processor = dependency_processor @@ -497,6 +501,7 @@ class ProcessAll(IterateMappersMixin, PostSortRec): if isdelete == self.delete and not listonly: yield state + class IssuePostUpdate(PostSortRec): def __init__(self, uow, mapper, isdelete): self.mapper = mapper @@ -508,6 +513,7 @@ class IssuePostUpdate(PostSortRec): persistence.post_update(self.mapper, states, uow, cols) + class SaveUpdateAll(PostSortRec): def __init__(self, uow, mapper): self.mapper = mapper @@ -519,7 +525,6 @@ class SaveUpdateAll(PostSortRec): uow ) - def per_state_flush_actions(self, uow): states = list(uow.states_for_mapper_hierarchy( self.mapper, False, False)) @@ -536,6 +541,7 @@ class SaveUpdateAll(PostSortRec): states_for_prop = uow.filter_states_for_dep(dep, states) dep.per_state_flush_actions(uow, states_for_prop, False) + class DeleteAll(PostSortRec): def __init__(self, uow, mapper): self.mapper = mapper @@ -563,6 +569,7 @@ class DeleteAll(PostSortRec): states_for_prop = uow.filter_states_for_dep(dep, states) dep.per_state_flush_actions(uow, states_for_prop, True) + class ProcessState(PostSortRec): def __init__(self, uow, dependency_processor, delete, state): self.dependency_processor = dependency_processor @@ -592,6 +599,7 @@ class ProcessState(PostSortRec): self.delete ) + class SaveUpdateState(PostSortRec): def __init__(self, uow, state, mapper): self.state = state @@ -615,6 +623,7 @@ class SaveUpdateState(PostSortRec): orm_util.state_str(self.state) ) + class DeleteState(PostSortRec): def __init__(self, uow, state, mapper): self.state = state @@ -637,4 +646,3 @@ class DeleteState(PostSortRec): self.__class__.__name__, orm_util.state_str(self.state) ) - diff --git a/lib/sqlalchemy/orm/util.py b/lib/sqlalchemy/orm/util.py index 43632ff13..97baffc9a 100644 --- a/lib/sqlalchemy/orm/util.py +++ b/lib/sqlalchemy/orm/util.py @@ -22,6 +22,7 @@ _INSTRUMENTOR = ('mapper', 'instrumentor') _none_set = frozenset([None]) + class CascadeOptions(frozenset): """Keeps track of the options sent to relationship().cascade""" @@ -69,6 +70,7 @@ class CascadeOptions(frozenset): ",".join([x for x in sorted(self)]) ) + def _validator_events(desc, key, validator, include_removes): """Runs a validation method on an attribute value to be set or appended.""" @@ -93,6 +95,7 @@ def _validator_events(desc, key, validator, include_removes): if include_removes: event.listen(desc, "remove", remove, raw=True, retval=True) + def polymorphic_union(table_map, typecolname, aliasname='p_union', cast_nulls=True): """Create a ``UNION`` statement used by a polymorphic mapper. @@ -155,6 +158,7 @@ def polymorphic_union(table_map, typecolname, from_obj=[table])) return sql.union_all(*result).alias(aliasname) + def identity_key(*args, **kwargs): """Get an identity key. @@ -211,6 +215,7 @@ def identity_key(*args, **kwargs): mapper = object_mapper(instance) return mapper.identity_key_from_instance(instance) + class ORMAdapter(sql_util.ColumnAdapter): """Extends ColumnAdapter to accept ORM entities. @@ -240,6 +245,7 @@ class ORMAdapter(sql_util.ColumnAdapter): else: return None + class PathRegistry(object): """Represent query load paths and registry functions. @@ -335,6 +341,7 @@ class PathRegistry(object): def __repr__(self): return "%s(%r)" % (self.__class__.__name__, self.path, ) + class RootRegistry(PathRegistry): """Root registry, defers to mappers so that paths are maintained per-root-mapper. @@ -347,6 +354,7 @@ class RootRegistry(PathRegistry): return mapper._sa_path_registry PathRegistry.root = RootRegistry() + class KeyRegistry(PathRegistry): def __init__(self, parent, key): self.key = key @@ -362,6 +370,7 @@ class KeyRegistry(PathRegistry): self, entity ) + class EntityRegistry(PathRegistry, dict): is_aliased_class = False @@ -542,6 +551,7 @@ class AliasedClass(object): return '<AliasedClass at 0x%x; %s>' % ( id(self), self.__target.__name__) + AliasedInsp = util.namedtuple("AliasedInsp", [ "entity", "mapper", @@ -551,6 +561,7 @@ AliasedInsp = util.namedtuple("AliasedInsp", [ "polymorphic_on" ]) + class AliasedInsp(_InspectionAttr, AliasedInsp): """Provide an inspection interface for an :class:`.AliasedClass` object. @@ -596,8 +607,10 @@ class AliasedInsp(_InspectionAttr, AliasedInsp): :class:`.AliasedInsp`.""" return self.mapper.class_ + inspection._inspects(AliasedClass)(lambda target: target._aliased_insp) + def aliased(element, alias=None, name=None, adapt_on_names=False): """Produce an alias of the given element, usually an :class:`.AliasedClass` instance. @@ -677,6 +690,7 @@ def aliased(element, alias=None, name=None, adapt_on_names=False): return AliasedClass(element, alias=alias, name=name, adapt_on_names=adapt_on_names) + def with_polymorphic(base, classes, selectable=False, polymorphic_on=None, aliased=False, innerjoin=False): @@ -750,6 +764,7 @@ def _orm_annotate(element, exclude=None): """ return sql_util._deep_annotate(element, {'_orm_adapt': True}, exclude) + def _orm_deannotate(element): """Remove annotations that link a column to a particular mapping. @@ -763,9 +778,11 @@ def _orm_deannotate(element): values=("_orm_adapt", "parententity") ) + def _orm_full_deannotate(element): return sql_util._deep_deannotate(element) + class _ORMJoin(expression.Join): """Extend Join to support ORM constructs as input.""" @@ -836,6 +853,7 @@ class _ORMJoin(expression.Join): def outerjoin(self, right, onclause=None, join_to_left=True): return _ORMJoin(self, right, onclause, True, join_to_left) + def join(left, right, onclause=None, isouter=False, join_to_left=True): """Produce an inner join between left and right clauses. @@ -878,6 +896,7 @@ def join(left, right, onclause=None, isouter=False, join_to_left=True): """ return _ORMJoin(left, right, onclause, isouter, join_to_left) + def outerjoin(left, right, onclause=None, join_to_left=True): """Produce a left outer join between left and right clauses. @@ -888,6 +907,7 @@ def outerjoin(left, right, onclause=None, join_to_left=True): """ return _ORMJoin(left, right, onclause, True, join_to_left) + def with_parent(instance, prop): """Create filtering criterion that relates this query's primary entity to the given related instance, using established :func:`.relationship()` @@ -932,8 +952,10 @@ def _attr_as_key(attr): else: return expression._column_as_key(attr) + _state_mapper = util.dottedgetter('manager.mapper') + @inspection._inspects(object) def _inspect_mapped_object(instance): try: @@ -945,6 +967,7 @@ def _inspect_mapped_object(instance): except exc.NO_STATE: return None + @inspection._inspects(type) def _inspect_mapped_class(class_, configure=False): try: @@ -978,6 +1001,7 @@ def object_mapper(instance): """ return object_state(instance).mapper + def object_state(instance): """Given an object, return the :class:`.InstanceState` associated with the object. @@ -1001,6 +1025,7 @@ def object_state(instance): else: return state + def class_mapper(class_, configure=True): """Given a class, return the primary :class:`.Mapper` associated with the key. @@ -1027,6 +1052,7 @@ def class_mapper(class_, configure=True): else: return mapper + def _class_to_mapper(class_or_mapper): insp = inspection.inspect(class_or_mapper, False) if insp is not None: @@ -1034,6 +1060,7 @@ def _class_to_mapper(class_or_mapper): else: raise exc.UnmappedClassError(class_or_mapper) + def _mapper_or_none(entity): """Return the :class:`.Mapper` for the given class or None if the class is not mapped.""" @@ -1044,6 +1071,7 @@ def _mapper_or_none(entity): else: return None + def _is_mapped_class(entity): """Return True if the given object is a mapped class, :class:`.Mapper`, or :class:`.AliasedClass`.""" @@ -1089,6 +1117,7 @@ def _entity_descriptor(entity, key): (description, key) ) + def _orm_columns(entity): insp = inspection.inspect(entity, False) if hasattr(insp, 'selectable'): @@ -1096,15 +1125,18 @@ def _orm_columns(entity): else: return [entity] + def has_identity(object): state = attributes.instance_state(object) return state.has_identity + def instance_str(instance): """Return a string describing an instance.""" return state_str(attributes.instance_state(instance)) + def state_str(state): """Return a string describing an instance via its InstanceState.""" @@ -1113,6 +1145,7 @@ def state_str(state): else: return '<%s at 0x%x>' % (state.class_.__name__, id(state.obj())) + def state_class_str(state): """Return a string describing an instance's class via its InstanceState.""" @@ -1121,9 +1154,10 @@ def state_class_str(state): else: return '<%s>' % (state.class_.__name__, ) + def attribute_str(instance, attribute): return instance_str(instance) + "." + attribute + def state_attribute_str(state, attribute): return state_str(state) + "." + attribute - diff --git a/lib/sqlalchemy/pool.py b/lib/sqlalchemy/pool.py index caef60c2f..5518b1e22 100644 --- a/lib/sqlalchemy/pool.py +++ b/lib/sqlalchemy/pool.py @@ -16,14 +16,18 @@ regular DB-API connect() methods to be transparently managed by a SQLAlchemy connection pool. """ -import weakref, time, traceback +import time +import traceback +import weakref from . import exc, log, event, events, interfaces, util from .util import queue as sqla_queue from .util import threading, memoized_property, \ chop_traceback + proxies = {} + def manage(module, **params): """Return a proxy for a DB-API module that automatically pools connections. @@ -46,6 +50,7 @@ def manage(module, **params): except KeyError: return proxies.setdefault(module, _DBProxy(module, **params)) + def clear_managers(): """Remove all current DB-API 2.0 managers. @@ -159,7 +164,8 @@ class Pool(log.Identified): dispatch = event.dispatcher(events.PoolEvents) - @util.deprecated(2.7, "Pool.add_listener is deprecated. Use event.listen()") + @util.deprecated( + 2.7, "Pool.add_listener is deprecated. Use event.listen()") def add_listener(self, listener): """Add a :class:`.PoolListener`-like object to this pool. @@ -384,8 +390,10 @@ def _finalize_fairy(connection, connection_record, pool, ref, echo): pool.dispatch.checkin(connection, connection_record) pool._return_conn(connection_record) + _refs = set() + class _ConnectionFairy(object): """Proxies a DB-API connection and provides return-on-dereference support.""" @@ -515,6 +523,7 @@ class _ConnectionFairy(object): self.connection = None self._connection_record = None + class SingletonThreadPool(Pool): """A Pool that maintains one connection per thread. @@ -590,12 +599,16 @@ class SingletonThreadPool(Pool): self._cleanup() return c + class DummyLock(object): + def acquire(self, wait=True): return True + def release(self): pass + class QueuePool(Pool): """A :class:`.Pool` that imposes a limit on the number of open connections. @@ -794,6 +807,7 @@ class QueuePool(Pool): def checkedout(self): return self._pool.maxsize - self._pool.qsize() + self._overflow + class NullPool(Pool): """A Pool which does not pool connections. @@ -878,9 +892,10 @@ class StaticPool(Pool): def _do_get(self): return self.connection + class AssertionPool(Pool): - """A :class:`.Pool` that allows at most one checked out connection at any given - time. + """A :class:`.Pool` that allows at most one checked out connection at + any given time. This will raise an exception if more than one connection is checked out at a time. Useful for debugging code that is using more connections @@ -936,6 +951,7 @@ class AssertionPool(Pool): self._checkout_traceback = traceback.format_stack() return self._conn + class _DBProxy(object): """Layers connection pooling behavior on top of a standard DB-API module. diff --git a/lib/sqlalchemy/processors.py b/lib/sqlalchemy/processors.py index ddca43a6c..ddea44dab 100644 --- a/lib/sqlalchemy/processors.py +++ b/lib/sqlalchemy/processors.py @@ -16,11 +16,13 @@ import codecs import re import datetime + def str_to_datetime_processor_factory(regexp, type_): rmatch = regexp.match # Even on python2.6 datetime.strptime is both slower than this code # and it does not support microseconds. has_named_groups = bool(regexp.groupindex) + def process(value): if value is None: return None @@ -42,12 +44,14 @@ def str_to_datetime_processor_factory(regexp, type_): return type_(*map(int, m.groups(0))) return process + def boolean_to_int(value): if value is None: return None else: return int(value) + def py_fallback(): def to_unicode_processor_factory(encoding, errors=None): decoder = codecs.getdecoder(encoding) @@ -125,4 +129,3 @@ try: except ImportError: globals().update(py_fallback()) - diff --git a/lib/sqlalchemy/schema.py b/lib/sqlalchemy/schema.py index 9aa742177..f6a6b83b4 100644 --- a/lib/sqlalchemy/schema.py +++ b/lib/sqlalchemy/schema.py @@ -50,6 +50,7 @@ __all__.sort() RETAIN_SCHEMA = util.symbol('retain_schema') + class SchemaItem(events.SchemaEventTarget, visitors.Visitable): """Base class for items that define a database schema.""" @@ -82,12 +83,14 @@ class SchemaItem(events.SchemaEventTarget, visitors.Visitable): """ return {} + def _get_table_key(name, schema): if schema is None: return name else: return schema + "." + name + def _validate_dialect_kwargs(kwargs, name): # validate remaining kwargs that they all specify DB prefixes if len([k for k in kwargs @@ -101,6 +104,7 @@ def _validate_dialect_kwargs(kwargs, name): inspection._self_inspects(SchemaItem) + class Table(SchemaItem, expression.TableClause): """Represent a table in a database. @@ -111,9 +115,9 @@ class Table(SchemaItem, expression.TableClause): Column('value', String(50)) ) - The :class:`.Table` object constructs a unique instance of itself based on its - name and optional schema name within the given :class:`.MetaData` object. - Calling the :class:`.Table` + The :class:`.Table` object constructs a unique instance of itself based + on its name and optional schema name within the given + :class:`.MetaData` object. Calling the :class:`.Table` constructor with the same name and same :class:`.MetaData` argument a second time will return the *same* :class:`.Table` object - in this way the :class:`.Table` constructor acts as a registry function. @@ -170,9 +174,10 @@ class Table(SchemaItem, expression.TableClause): or Connection instance to be used for the table reflection. If ``None``, the underlying MetaData's bound connectable will be used. - :param extend_existing: When ``True``, indicates that if this :class:`.Table` is already - present in the given :class:`.MetaData`, apply further arguments within - the constructor to the existing :class:`.Table`. + :param extend_existing: When ``True``, indicates that if this + :class:`.Table` is already present in the given :class:`.MetaData`, + apply further arguments within the constructor to the existing + :class:`.Table`. If ``extend_existing`` or ``keep_existing`` are not set, an error is raised if additional table modifiers are specified when @@ -292,15 +297,16 @@ class Table(SchemaItem, expression.TableClause): keep_existing = kw.pop('keep_existing', False) extend_existing = kw.pop('extend_existing', False) if 'useexisting' in kw: - util.warn_deprecated("useexisting is deprecated. Use extend_existing.") + msg = "useexisting is deprecated. Use extend_existing." + util.warn_deprecated(msg) if extend_existing: - raise exc.ArgumentError("useexisting is synonymous " - "with extend_existing.") + msg = "useexisting is synonymous with extend_existing." + raise exc.ArgumentError(msg) extend_existing = kw.pop('useexisting', False) if keep_existing and extend_existing: - raise exc.ArgumentError("keep_existing and extend_existing " - "are mutually exclusive.") + msg = "keep_existing and extend_existing are mutually exclusive." + raise exc.ArgumentError(msg) mustexist = kw.pop('mustexist', False) key = _get_table_key(name, schema) @@ -348,7 +354,8 @@ class Table(SchemaItem, expression.TableClause): self.schema = kwargs.pop('schema', None) if self.schema is None: self.schema = metadata.schema - self.quote_schema = kwargs.pop('quote_schema', metadata.quote_schema) + self.quote_schema = kwargs.pop( + 'quote_schema', metadata.quote_schema) else: self.quote_schema = kwargs.pop('quote_schema', None) @@ -393,7 +400,8 @@ class Table(SchemaItem, expression.TableClause): # allow user-overrides self._init_items(*args) - def _autoload(self, metadata, autoload_with, include_columns, exclude_columns=()): + def _autoload(self, metadata, autoload_with, include_columns, + exclude_columns=()): if self.primary_key.columns: PrimaryKeyConstraint(*[ c for c in self.primary_key.columns @@ -419,9 +427,11 @@ class Table(SchemaItem, expression.TableClause): @property def _sorted_constraints(self): - """Return the set of constraints as a list, sorted by creation order.""" + """Return the set of constraints as a list, sorted by creation + order. - return sorted(self.constraints, key=lambda c:c._creation_order) + """ + return sorted(self.constraints, key=lambda c: c._creation_order) def _init_existing(self, *args, **kwargs): autoload = kwargs.pop('autoload', False) @@ -438,7 +448,7 @@ class Table(SchemaItem, expression.TableClause): if include_columns is not None: for c in self.c: if c.name not in include_columns: - self._columns.remove(c) + self._columns.remove(c) for key in ('quote', 'quote_schema'): if key in kwargs: @@ -452,7 +462,8 @@ class Table(SchemaItem, expression.TableClause): exclude_columns = [c.name for c in self.c] else: exclude_columns = () - self._autoload(self.metadata, autoload_with, include_columns, exclude_columns) + self._autoload( + self.metadata, autoload_with, include_columns, exclude_columns) self._extra_kwargs(**kwargs) self._init_items(*args) @@ -532,19 +543,21 @@ class Table(SchemaItem, expression.TableClause): column._set_parent_with_dispatch(self) def append_constraint(self, constraint): - """Append a :class:`~.schema.Constraint` to this :class:`~.schema.Table`. + """Append a :class:`~.schema.Constraint` to this + :class:`~.schema.Table`. This has the effect of the constraint being included in any future CREATE TABLE statement, assuming specific DDL creation - events have not been associated with the given :class:`~.schema.Constraint` - object. + events have not been associated with the given + :class:`~.schema.Constraint` object. Note that this does **not** produce the constraint within the relational database automatically, for a table that already exists in the database. To add a constraint to an existing relational database table, the SQL ALTER command must - be used. SQLAlchemy also provides the :class:`.AddConstraint` construct - which can produce this SQL when invoked as an executable clause. + be used. SQLAlchemy also provides the + :class:`.AddConstraint` construct which can produce this SQL when + invoked as an executable clause. """ @@ -601,7 +614,6 @@ class Table(SchemaItem, expression.TableClause): self, checkfirst=checkfirst) - def drop(self, bind=None, checkfirst=False): """Issue a ``DROP`` statement for this :class:`.Table`, using the given :class:`.Connectable` @@ -616,7 +628,6 @@ class Table(SchemaItem, expression.TableClause): self, checkfirst=checkfirst) - def tometadata(self, metadata, schema=RETAIN_SCHEMA): """Return a copy of this :class:`.Table` associated with a different :class:`.MetaData`. @@ -667,6 +678,7 @@ class Table(SchemaItem, expression.TableClause): table.dispatch._update(self.dispatch) return table + class Column(SchemaItem, expression.ColumnClause): """Represents a column in a database table.""" @@ -768,8 +780,8 @@ class Column(SchemaItem, expression.ColumnClause): .. versionchanged:: 0.7.4 ``autoincrement`` accepts a special value ``'ignore_fk'`` - to indicate that autoincrementing status regardless of foreign key - references. This applies to certain composite foreign key + to indicate that autoincrementing status regardless of foreign + key references. This applies to certain composite foreign key setups, such as the one demonstrated in the ORM documentation at :ref:`post_update`. @@ -1219,8 +1231,9 @@ class ForeignKey(SchemaItem): ``True`` in which case the rendered name of the column is used. .. versionadded:: 0.7.4 - Note that if the schema name is not included, and the underlying - :class:`.MetaData` has a "schema", that value will be used. + Note that if the schema name is not included, and the + underlying :class:`.MetaData` has a "schema", that value will + be used. :param name: Optional string. An in-database name for the key if `constraint` is not provided. @@ -1263,7 +1276,6 @@ class ForeignKey(SchemaItem): # markers. self.constraint = _constraint - self.use_alter = use_alter self.name = name self.onupdate = onupdate @@ -1307,7 +1319,8 @@ class ForeignKey(SchemaItem): return fk def _get_colspec(self, schema=None): - """Return a string based 'column specification' for this :class:`.ForeignKey`. + """Return a string based 'column specification' for this + :class:`.ForeignKey`. This is usually the equivalent of the string-based "tablename.colname" argument first passed to the object's constructor. @@ -1328,7 +1341,8 @@ class ForeignKey(SchemaItem): target_fullname = property(_get_colspec) def references(self, table): - """Return True if the given :class:`.Table` is referenced by this :class:`.ForeignKey`.""" + """Return True if the given :class:`.Table` is referenced by this + :class:`.ForeignKey`.""" return table.corresponding_column(self.column) is not None @@ -1345,7 +1359,8 @@ class ForeignKey(SchemaItem): @util.memoized_property def column(self): - """Return the target :class:`.Column` referenced by this :class:`.ForeignKey`. + """Return the target :class:`.Column` referenced by this + :class:`.ForeignKey`. If this :class:`.ForeignKey` was created using a string-based target column specification, this @@ -1482,6 +1497,7 @@ class ForeignKey(SchemaItem): self.constraint._set_parent_with_dispatch(table) table.foreign_keys.add(self) + class _NotAColumnExpr(object): def _not_a_column_expr(self): raise exc.InvalidRequestError( @@ -1491,6 +1507,7 @@ class _NotAColumnExpr(object): __clause_element__ = self_group = lambda self: self._not_a_column_expr() _from_objects = property(lambda self: self._not_a_column_expr()) + class DefaultGenerator(_NotAColumnExpr, SchemaItem): """Base class for column *default* values.""" @@ -1647,6 +1664,7 @@ class ColumnDefault(DefaultGenerator): def __repr__(self): return "ColumnDefault(%r)" % self.arg + class Sequence(DefaultGenerator): """Represents a named database sequence. @@ -1707,9 +1725,9 @@ class Sequence(DefaultGenerator): :param metadata: optional :class:`.MetaData` object which will be associated with this :class:`.Sequence`. A :class:`.Sequence` that is associated with a :class:`.MetaData` gains access to the - ``bind`` of that :class:`.MetaData`, meaning the :meth:`.Sequence.create` - and :meth:`.Sequence.drop` methods will make usage of that engine - automatically. + ``bind`` of that :class:`.MetaData`, meaning the + :meth:`.Sequence.create` and :meth:`.Sequence.drop` methods will + make usage of that engine automatically. .. versionchanged:: 0.7 Additionally, the appropriate CREATE SEQUENCE/ @@ -1846,6 +1864,7 @@ class FetchedValue(_NotAColumnExpr, events.SchemaEventTarget): inspection._self_inspects(FetchedValue) + class DefaultClause(FetchedValue): """A DDL-specified DEFAULT column value. @@ -1882,6 +1901,7 @@ class DefaultClause(FetchedValue): return "DefaultClause(%r, for_update=%r)" % \ (self.arg, self.for_update) + class PassiveDefault(DefaultClause): """A DDL-specified DEFAULT column value. @@ -1896,6 +1916,7 @@ class PassiveDefault(DefaultClause): def __init__(self, *arg, **kw): DefaultClause.__init__(self, *arg, **kw) + class Constraint(SchemaItem): """A table-level SQL constraint.""" @@ -1966,6 +1987,7 @@ class Constraint(SchemaItem): def copy(self, **kw): raise NotImplementedError() + class ColumnCollectionMixin(object): def __init__(self, *columns): self.columns = expression.ColumnCollection() @@ -1982,6 +2004,7 @@ class ColumnCollectionMixin(object): col = table.c[col] self.columns.add(col) + class ColumnCollectionConstraint(ColumnCollectionMixin, Constraint): """A constraint that proxies a ColumnCollection.""" @@ -2072,7 +2095,6 @@ class CheckConstraint(Constraint): self._set_parent_with_dispatch( tables.pop()) - def __visit_name__(self): if isinstance(self.parent, Table): return "check_constraint" @@ -2089,6 +2111,7 @@ class CheckConstraint(Constraint): c.dispatch._update(self.dispatch) return c + class ForeignKeyConstraint(Constraint): """A table-level FOREIGN KEY constraint. @@ -2186,7 +2209,6 @@ class ForeignKeyConstraint(Constraint): columns[0].table is not None: self._set_parent_with_dispatch(columns[0].table) - @property def _col_description(self): return ", ".join(self._elements) @@ -2210,9 +2232,9 @@ class ForeignKeyConstraint(Constraint): col = table.c[col] except KeyError: raise exc.ArgumentError( - "Can't create ForeignKeyConstraint " - "on table '%s': no column " - "named '%s' is present." % (table.description, col)) + "Can't create ForeignKeyConstraint " + "on table '%s': no column " + "named '%s' is present." % (table.description, col)) if not hasattr(fk, 'parent') or \ fk.parent is not col: @@ -2223,9 +2245,10 @@ class ForeignKeyConstraint(Constraint): return table in set(kw['tables']) and \ bind.dialect.supports_alter - event.listen(table.metadata, "after_create", AddConstraint(self, on=supports_alter)) - event.listen(table.metadata, "before_drop", DropConstraint(self, on=supports_alter)) - + event.listen(table.metadata, "after_create", + AddConstraint(self, on=supports_alter)) + event.listen(table.metadata, "before_drop", + DropConstraint(self, on=supports_alter)) def copy(self, **kw): fkc = ForeignKeyConstraint( @@ -2243,6 +2266,7 @@ class ForeignKeyConstraint(Constraint): fkc.dispatch._update(self.dispatch) return fkc + class PrimaryKeyConstraint(ColumnCollectionConstraint): """A table-level PRIMARY KEY constraint. @@ -2268,6 +2292,7 @@ class PrimaryKeyConstraint(ColumnCollectionConstraint): def _replace(self, col): self.columns.replace(col) + class UniqueConstraint(ColumnCollectionConstraint): """A table-level UNIQUE constraint. @@ -2279,6 +2304,7 @@ class UniqueConstraint(ColumnCollectionConstraint): __visit_name__ = 'unique_constraint' + class Index(ColumnCollectionMixin, SchemaItem): """A table-level INDEX. @@ -2382,8 +2408,10 @@ class Index(ColumnCollectionMixin, SchemaItem): (self.unique and ["unique=True"] or []) )) + class MetaData(SchemaItem): - """A collection of :class:`.Table` objects and their associated schema constructs. + """A collection of :class:`.Table` objects and their associated schema + constructs. Holds a collection of :class:`.Table` objects as well as an optional binding to an :class:`.Engine` or @@ -2391,8 +2419,8 @@ class MetaData(SchemaItem): in the collection and their columns may participate in implicit SQL execution. - The :class:`.Table` objects themselves are stored in the ``metadata.tables`` - dictionary. + The :class:`.Table` objects themselves are stored in the + ``metadata.tables`` dictionary. The ``bind`` property may be assigned to dynamically. A common pattern is to start unbound and then bind later when an engine is available:: @@ -2418,7 +2446,8 @@ class MetaData(SchemaItem): __visit_name__ = 'metadata' - def __init__(self, bind=None, reflect=False, schema=None, quote_schema=None): + def __init__(self, bind=None, reflect=False, schema=None, + quote_schema=None): """Create a new MetaData object. :param bind: @@ -2434,13 +2463,14 @@ class MetaData(SchemaItem): Please use the :meth:`.MetaData.reflect` method. :param schema: - The default schema to use for the :class:`.Table`, :class:`.Sequence`, and other - objects associated with this :class:`.MetaData`. - Defaults to ``None``. + The default schema to use for the :class:`.Table`, + :class:`.Sequence`, and other objects associated with this + :class:`.MetaData`. Defaults to ``None``. :param quote_schema: - Sets the ``quote_schema`` flag for those :class:`.Table`, :class:`.Sequence`, - and other objects which make usage of the local ``schema`` name. + Sets the ``quote_schema`` flag for those :class:`.Table`, + :class:`.Sequence`, and other objects which make usage of the + local ``schema`` name. .. versionadded:: 0.7.4 ``schema`` and ``quote_schema`` parameters. @@ -2484,10 +2514,11 @@ class MetaData(SchemaItem): if t.schema is not None]) def __getstate__(self): - return {'tables': self.tables, 'schema':self.schema, - 'quote_schema':self.quote_schema, - 'schemas':self._schemas, - 'sequences':self._sequences} + return {'tables': self.tables, + 'schema': self.schema, + 'quote_schema': self.quote_schema, + 'schemas': self._schemas, + 'sequences': self._sequences} def __setstate__(self, state): self.tables = state['tables'] @@ -2705,6 +2736,7 @@ class MetaData(SchemaItem): checkfirst=checkfirst, tables=tables) + class ThreadLocalMetaData(MetaData): """A MetaData variant that presents a different ``bind`` in every thread. @@ -2769,6 +2801,7 @@ class ThreadLocalMetaData(MetaData): if hasattr(e, 'dispose'): e.dispose() + class SchemaVisitor(visitors.ClauseVisitor): """Define the visiting for ``SchemaItem`` objects.""" @@ -2782,6 +2815,7 @@ class _DDLCompiles(expression.ClauseElement): return dialect.ddl_compiler(dialect, self, **kw) + class DDLElement(expression.Executable, _DDLCompiles): """Base class for DDL expression constructs. @@ -2813,7 +2847,7 @@ class DDLElement(expression.Executable, _DDLCompiles): """ _execution_options = expression.Executable.\ - _execution_options.union({'autocommit':True}) + _execution_options.union({'autocommit': True}) target = None on = None @@ -2925,8 +2959,9 @@ class DDLElement(expression.Executable, _DDLCompiles): This DDL element. :target: - The :class:`.Table` or :class:`.MetaData` object which is the target of - this event. May be None if the DDL is executed explicitly. + The :class:`.Table` or :class:`.MetaData` object which is the + target of this event. May be None if the DDL is executed + explicitly. :bind: The :class:`.Connection` being used for DDL execution @@ -3007,6 +3042,7 @@ class DDLElement(expression.Executable, _DDLCompiles): def bind(self): if self._bind: return self._bind + def _set_bind(self, bind): self._bind = bind bind = property(bind, _set_bind) @@ -3022,9 +3058,9 @@ class DDL(DDLElement): Specifies literal SQL DDL to be executed by the database. DDL objects function as DDL event listeners, and can be subscribed to those events - listed in :class:`.DDLEvents`, using either :class:`.Table` or :class:`.MetaData` - objects as targets. Basic templating support allows a single DDL instance - to handle repetitive tasks for multiple tables. + listed in :class:`.DDLEvents`, using either :class:`.Table` or + :class:`.MetaData` objects as targets. Basic templating support allows + a single DDL instance to handle repetitive tasks for multiple tables. Examples:: @@ -3133,7 +3169,6 @@ class DDL(DDLElement): self.on = on self._bind = bind - def __repr__(self): return '<%s@%s; %s>' % ( type(self).__name__, id(self), @@ -3142,20 +3177,24 @@ class DDL(DDLElement): for key in ('on', 'context') if getattr(self, key)])) + def _to_schema_column(element): - if hasattr(element, '__clause_element__'): - element = element.__clause_element__() - if not isinstance(element, Column): - raise exc.ArgumentError("schema.Column object expected") - return element + if hasattr(element, '__clause_element__'): + element = element.__clause_element__() + if not isinstance(element, Column): + raise exc.ArgumentError("schema.Column object expected") + return element + def _to_schema_column_or_string(element): if hasattr(element, '__clause_element__'): element = element.__clause_element__() if not isinstance(element, (basestring, expression.ColumnElement)): - raise exc.ArgumentError("Element %r is not a string name or column element" % element) + msg = "Element %r is not a string name or column element" + raise exc.ArgumentError(msg % element) return element + class _CreateDropBase(DDLElement): """Base class for DDL constucts that represent CREATE and DROP or equivalents. @@ -3182,6 +3221,7 @@ class _CreateDropBase(DDLElement): """ return False + class CreateSchema(_CreateDropBase): """Represent a CREATE SCHEMA statement. @@ -3199,6 +3239,7 @@ class CreateSchema(_CreateDropBase): self.quote = quote super(CreateSchema, self).__init__(name, **kw) + class DropSchema(_CreateDropBase): """Represent a DROP SCHEMA statement. @@ -3214,7 +3255,7 @@ class DropSchema(_CreateDropBase): """Create a new :class:`.DropSchema` construct.""" self.quote = quote - self.cascade=cascade + self.cascade = cascade super(DropSchema, self).__init__(name, **kw) @@ -3247,6 +3288,7 @@ class _DropView(_CreateDropBase): """ __visit_name__ = "drop_view" + class CreateColumn(_DDLCompiles): """Represent a :class:`.Column` as rendered in a CREATE TABLE statement, via the :class:`.CreateTable` construct. @@ -3321,31 +3363,37 @@ class CreateColumn(_DDLCompiles): def __init__(self, element): self.element = element + class DropTable(_CreateDropBase): """Represent a DROP TABLE statement.""" __visit_name__ = "drop_table" + class CreateSequence(_CreateDropBase): """Represent a CREATE SEQUENCE statement.""" __visit_name__ = "create_sequence" + class DropSequence(_CreateDropBase): """Represent a DROP SEQUENCE statement.""" __visit_name__ = "drop_sequence" + class CreateIndex(_CreateDropBase): """Represent a CREATE INDEX statement.""" __visit_name__ = "create_index" + class DropIndex(_CreateDropBase): """Represent a DROP INDEX statement.""" __visit_name__ = "drop_index" + class AddConstraint(_CreateDropBase): """Represent an ALTER TABLE ADD CONSTRAINT statement.""" @@ -3356,6 +3404,7 @@ class AddConstraint(_CreateDropBase): element._create_rule = util.portable_instancemethod( self._create_rule_disable) + class DropConstraint(_CreateDropBase): """Represent an ALTER TABLE DROP CONSTRAINT statement.""" @@ -3367,6 +3416,7 @@ class DropConstraint(_CreateDropBase): element._create_rule = util.portable_instancemethod( self._create_rule_disable) + def _bind_or_error(schemaitem, msg=None): bind = schemaitem.bind if not bind: @@ -3390,4 +3440,3 @@ def _bind_or_error(schemaitem, msg=None): (item, bindable) raise exc.UnboundExecutionError(msg) return bind - diff --git a/lib/sqlalchemy/sql/__init__.py b/lib/sqlalchemy/sql/__init__.py index e25bb3160..d0ffd8076 100644 --- a/lib/sqlalchemy/sql/__init__.py +++ b/lib/sqlalchemy/sql/__init__.py @@ -66,4 +66,3 @@ from .visitors import ClauseVisitor __tmp = locals().keys() __all__ = sorted([i for i in __tmp if not i.startswith('__')]) - diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py index 74127c86a..102b44a7e 100644 --- a/lib/sqlalchemy/sql/compiler.py +++ b/lib/sqlalchemy/sql/compiler.py @@ -57,59 +57,59 @@ BIND_PARAMS = re.compile(r'(?<![:\w\$\x5c]):([\w\$]+)(?![:\w\$])', re.UNICODE) BIND_PARAMS_ESC = re.compile(r'\x5c(:[\w\$]+)(?![:\w\$])', re.UNICODE) BIND_TEMPLATES = { - 'pyformat':"%%(%(name)s)s", - 'qmark':"?", - 'format':"%%s", - 'numeric':":[_POSITION]", - 'named':":%(name)s" + 'pyformat': "%%(%(name)s)s", + 'qmark': "?", + 'format': "%%s", + 'numeric': ":[_POSITION]", + 'named': ":%(name)s" } OPERATORS = { # binary - operators.and_ : ' AND ', - operators.or_ : ' OR ', - operators.add : ' + ', - operators.mul : ' * ', - operators.sub : ' - ', + operators.and_: ' AND ', + operators.or_: ' OR ', + operators.add: ' + ', + operators.mul: ' * ', + operators.sub: ' - ', # Py2K - operators.div : ' / ', + operators.div: ' / ', # end Py2K - operators.mod : ' % ', - operators.truediv : ' / ', - operators.neg : '-', - operators.lt : ' < ', - operators.le : ' <= ', - operators.ne : ' != ', - operators.gt : ' > ', - operators.ge : ' >= ', - operators.eq : ' = ', - operators.concat_op : ' || ', - operators.between_op : ' BETWEEN ', - operators.match_op : ' MATCH ', - operators.in_op : ' IN ', - operators.notin_op : ' NOT IN ', - operators.comma_op : ', ', - operators.from_ : ' FROM ', - operators.as_ : ' AS ', - operators.is_ : ' IS ', - operators.isnot : ' IS NOT ', - operators.collate : ' COLLATE ', + operators.mod: ' % ', + operators.truediv: ' / ', + operators.neg: '-', + operators.lt: ' < ', + operators.le: ' <= ', + operators.ne: ' != ', + operators.gt: ' > ', + operators.ge: ' >= ', + operators.eq: ' = ', + operators.concat_op: ' || ', + operators.between_op: ' BETWEEN ', + operators.match_op: ' MATCH ', + operators.in_op: ' IN ', + operators.notin_op: ' NOT IN ', + operators.comma_op: ', ', + operators.from_: ' FROM ', + operators.as_: ' AS ', + operators.is_: ' IS ', + operators.isnot: ' IS NOT ', + operators.collate: ' COLLATE ', # unary - operators.exists : 'EXISTS ', - operators.distinct_op : 'DISTINCT ', - operators.inv : 'NOT ', + operators.exists: 'EXISTS ', + operators.distinct_op: 'DISTINCT ', + operators.inv: 'NOT ', # modifiers - operators.desc_op : ' DESC', - operators.asc_op : ' ASC', - operators.nullsfirst_op : ' NULLS FIRST', - operators.nullslast_op : ' NULLS LAST', + operators.desc_op: ' DESC', + operators.asc_op: ' ASC', + operators.nullsfirst_op: ' NULLS FIRST', + operators.nullslast_op: ' NULLS LAST', } FUNCTIONS = { - functions.coalesce : 'coalesce%(expr)s', + functions.coalesce: 'coalesce%(expr)s', functions.current_date: 'CURRENT_DATE', functions.current_time: 'CURRENT_TIME', functions.current_timestamp: 'CURRENT_TIMESTAMP', @@ -118,7 +118,7 @@ FUNCTIONS = { functions.localtimestamp: 'LOCALTIMESTAMP', functions.random: 'random%(expr)s', functions.sysdate: 'sysdate', - functions.session_user :'SESSION_USER', + functions.session_user: 'SESSION_USER', functions.user: 'USER' } @@ -141,14 +141,15 @@ EXTRACT_MAP = { } COMPOUND_KEYWORDS = { - sql.CompoundSelect.UNION : 'UNION', - sql.CompoundSelect.UNION_ALL : 'UNION ALL', - sql.CompoundSelect.EXCEPT : 'EXCEPT', - sql.CompoundSelect.EXCEPT_ALL : 'EXCEPT ALL', - sql.CompoundSelect.INTERSECT : 'INTERSECT', - sql.CompoundSelect.INTERSECT_ALL : 'INTERSECT ALL' + sql.CompoundSelect.UNION: 'UNION', + sql.CompoundSelect.UNION_ALL: 'UNION ALL', + sql.CompoundSelect.EXCEPT: 'EXCEPT', + sql.CompoundSelect.EXCEPT_ALL: 'EXCEPT ALL', + sql.CompoundSelect.INTERSECT: 'INTERSECT', + sql.CompoundSelect.INTERSECT_ALL: 'INTERSECT ALL' } + class _CompileLabel(visitors.Visitable): """lightweight label object which acts as an expression.Label.""" @@ -297,16 +298,16 @@ class SQLCompiler(engine.Compiled): poscount = itertools.count(1) self.string = re.sub( r'\[_POSITION\]', - lambda m:str(util.next(poscount)), + lambda m: str(util.next(poscount)), self.string) @util.memoized_property def _bind_processors(self): return dict( (key, value) for key, value in - ( (self.bind_names[bindparam], + ((self.bind_names[bindparam], bindparam.type._cached_bind_processor(self.dialect)) - for bindparam in self.bind_names ) + for bindparam in self.bind_names) if value is not None ) @@ -750,7 +751,6 @@ class SQLCompiler(engine.Compiled): (' ESCAPE ' + self.render_literal_value(escape, None)) or '') - def visit_bindparam(self, bindparam, within_columns_clause=False, literal_binds=False, skip_bind_expression=False, @@ -873,7 +873,7 @@ class SQLCompiler(engine.Compiled): positional_names.append(name) else: self.positiontup.append(name) - return self.bindtemplate % {'name':name} + return self.bindtemplate % {'name': name} def visit_cte(self, cte, asfrom=False, ashint=False, fromhints=None, @@ -1240,7 +1240,7 @@ class SQLCompiler(engine.Compiled): def limit_clause(self, select): text = "" if select._limit is not None: - text += "\n LIMIT " + self.process(sql.literal(select._limit)) + text += "\n LIMIT " + self.process(sql.literal(select._limit)) if select._offset is not None: if select._limit is None: text += "\n LIMIT -1" @@ -1449,7 +1449,6 @@ class SQLCompiler(engine.Compiled): bindparam._is_crud = True return bindparam._compiler_dispatch(self) - def _get_colparams(self, stmt, extra_tables=None): """create a set of tuples representing column/string pairs for use in an INSERT or UPDATE statement. @@ -1505,7 +1504,6 @@ class SQLCompiler(engine.Compiled): values.append((k, v)) - need_pks = self.isinsert and \ not self.inline and \ not stmt._returning @@ -1534,7 +1532,7 @@ class SQLCompiler(engine.Compiled): value = normalized_params[c] if sql._is_literal(value): value = self._create_crud_bind_param( - c, value, required=value is required) + c, value, required=value is required) else: self.postfetch.append(c) value = self.process(value.self_group()) @@ -1775,10 +1773,12 @@ class DDLCompiler(engine.Compiled): return self.sql_compiler.post_process_text(ddl.statement % context) def visit_create_schema(self, create): - return "CREATE SCHEMA " + self.preparer.format_schema(create.element, create.quote) + schema = self.preparer.format_schema(create.element, create.quote) + return "CREATE SCHEMA " + schema def visit_drop_schema(self, drop): - text = "DROP SCHEMA " + self.preparer.format_schema(drop.element, drop.quote) + schema = self.preparer.format_schema(drop.element, drop.quote) + text = "DROP SCHEMA " + schema if drop.cascade: text += " CASCADE" return text @@ -1921,9 +1921,7 @@ class DDLCompiler(engine.Compiled): index_name = schema_name + "." + index_name return index_name - def visit_add_constraint(self, create): - preparer = self.preparer return "ALTER TABLE %s ADD %s" % ( self.preparer.format_table(create.element.table), self.process(create.element) @@ -1943,7 +1941,6 @@ class DDLCompiler(engine.Compiled): self.preparer.format_sequence(drop.element) def visit_drop_constraint(self, drop): - preparer = self.preparer return "ALTER TABLE %s DROP CONSTRAINT %s%s" % ( self.preparer.format_table(drop.element.table), self.preparer.format_constraint(drop.element), @@ -2084,7 +2081,7 @@ class GenericTypeCompiler(engine.TypeCompiler): else: return "NUMERIC(%(precision)s, %(scale)s)" % \ {'precision': type_.precision, - 'scale' : type_.scale} + 'scale': type_.scale} def visit_DECIMAL(self, type_): return "DECIMAL" @@ -2152,7 +2149,6 @@ class GenericTypeCompiler(engine.TypeCompiler): def visit_BOOLEAN(self, type_): return "BOOLEAN" - def visit_large_binary(self, type_): return self.visit_BLOB(type_) @@ -2210,6 +2206,7 @@ class GenericTypeCompiler(engine.TypeCompiler): def visit_user_defined(self, type_): return type_.get_col_spec() + class IdentifierPreparer(object): """Handle quoting and case-folding of identifiers based on options.""" @@ -2388,9 +2385,9 @@ class IdentifierPreparer(object): r'(?:' r'(?:%(initial)s((?:%(escaped)s|[^%(final)s])+)%(final)s' r'|([^\.]+))(?=\.|$))+' % - { 'initial': initial, - 'final': final, - 'escaped': escaped_final }) + {'initial': initial, + 'final': final, + 'escaped': escaped_final}) return r def unformat_identifiers(self, identifiers): diff --git a/lib/sqlalchemy/sql/expression.py b/lib/sqlalchemy/sql/expression.py index 1d3be7de1..3dc8dfea4 100644 --- a/lib/sqlalchemy/sql/expression.py +++ b/lib/sqlalchemy/sql/expression.py @@ -57,6 +57,7 @@ __all__ = [ PARSE_AUTOCOMMIT = util.symbol('PARSE_AUTOCOMMIT') NO_ARG = util.symbol('NO_ARG') + def nullsfirst(column): """Return a NULLS FIRST ``ORDER BY`` clause element. @@ -71,6 +72,7 @@ def nullsfirst(column): """ return UnaryExpression(column, modifier=operators.nullsfirst_op) + def nullslast(column): """Return a NULLS LAST ``ORDER BY`` clause element. @@ -85,6 +87,7 @@ def nullslast(column): """ return UnaryExpression(column, modifier=operators.nullslast_op) + def desc(column): """Return a descending ``ORDER BY`` clause element. @@ -99,6 +102,7 @@ def desc(column): """ return UnaryExpression(column, modifier=operators.desc_op) + def asc(column): """Return an ascending ``ORDER BY`` clause element. @@ -113,6 +117,7 @@ def asc(column): """ return UnaryExpression(column, modifier=operators.asc_op) + def outerjoin(left, right, onclause=None): """Return an ``OUTER JOIN`` clause element. @@ -137,6 +142,7 @@ def outerjoin(left, right, onclause=None): """ return Join(left, right, onclause, isouter=True) + def join(left, right, onclause=None, isouter=False): """Return a ``JOIN`` clause element (regular inner join). @@ -162,6 +168,7 @@ def join(left, right, onclause=None, isouter=False): """ return Join(left, right, onclause, isouter) + def select(columns=None, whereclause=None, from_obj=[], **kwargs): """Returns a ``SELECT`` clause element. @@ -297,6 +304,7 @@ def select(columns=None, whereclause=None, from_obj=[], **kwargs): return Select(columns, whereclause=whereclause, from_obj=from_obj, **kwargs) + def subquery(alias, *args, **kwargs): """Return an :class:`.Alias` object derived from a :class:`.Select`. @@ -312,6 +320,7 @@ def subquery(alias, *args, **kwargs): """ return Select(*args, **kwargs).alias(alias) + def insert(table, values=None, inline=False, **kwargs): """Represent an ``INSERT`` statement via the :class:`.Insert` SQL construct. @@ -358,6 +367,7 @@ def insert(table, values=None, inline=False, **kwargs): """ return Insert(table, values, inline=inline, **kwargs) + def update(table, whereclause=None, values=None, inline=False, **kwargs): """Represent an ``UPDATE`` statement via the :class:`.Update` SQL construct. @@ -470,6 +480,7 @@ def update(table, whereclause=None, values=None, inline=False, **kwargs): inline=inline, **kwargs) + def delete(table, whereclause=None, **kwargs): """Represent a ``DELETE`` statement via the :class:`.Delete` SQL construct. @@ -491,6 +502,7 @@ def delete(table, whereclause=None, **kwargs): """ return Delete(table, whereclause, **kwargs) + def and_(*clauses): """Join a list of clauses together using the ``AND`` operator. @@ -503,6 +515,7 @@ def and_(*clauses): return clauses[0] return BooleanClauseList(operator=operators.and_, *clauses) + def or_(*clauses): """Join a list of clauses together using the ``OR`` operator. @@ -515,6 +528,7 @@ def or_(*clauses): return clauses[0] return BooleanClauseList(operator=operators.or_, *clauses) + def not_(clause): """Return a negation of the given clause, i.e. ``NOT(clause)``. @@ -525,6 +539,7 @@ def not_(clause): """ return operators.inv(_literal_as_binds(clause)) + def distinct(expr): """Return a ``DISTINCT`` clause. @@ -541,6 +556,7 @@ def distinct(expr): return UnaryExpression(expr, operator=operators.distinct_op, type_=expr.type) + def between(ctest, cleft, cright): """Return a ``BETWEEN`` predicate clause. @@ -554,6 +570,7 @@ def between(ctest, cleft, cright): ctest = _literal_as_binds(ctest) return ctest.between(cleft, cright) + def case(whens, value=None, else_=None): """Produce a ``CASE`` statement. @@ -608,6 +625,7 @@ def case(whens, value=None, else_=None): return Case(whens, value=value, else_=else_) + def cast(clause, totype, **kwargs): """Return a ``CAST`` function. @@ -624,11 +642,13 @@ def cast(clause, totype, **kwargs): """ return Cast(clause, totype, **kwargs) + def extract(field, expr): """Return the clause ``extract(field FROM expr)``.""" return Extract(field, expr) + def collate(expression, collation): """Return the clause ``expression COLLATE collation``. @@ -648,6 +668,7 @@ def collate(expression, collation): _literal_as_text(collation), operators.collate, type_=expr.type) + def exists(*args, **kwargs): """Return an ``EXISTS`` clause as applied to a :class:`.Select` object. @@ -667,6 +688,7 @@ def exists(*args, **kwargs): """ return Exists(*args, **kwargs) + def union(*selects, **kwargs): """Return a ``UNION`` of multiple selectables. @@ -686,6 +708,7 @@ def union(*selects, **kwargs): """ return CompoundSelect(CompoundSelect.UNION, *selects, **kwargs) + def union_all(*selects, **kwargs): """Return a ``UNION ALL`` of multiple selectables. @@ -705,6 +728,7 @@ def union_all(*selects, **kwargs): """ return CompoundSelect(CompoundSelect.UNION_ALL, *selects, **kwargs) + def except_(*selects, **kwargs): """Return an ``EXCEPT`` of multiple selectables. @@ -721,6 +745,7 @@ def except_(*selects, **kwargs): """ return CompoundSelect(CompoundSelect.EXCEPT, *selects, **kwargs) + def except_all(*selects, **kwargs): """Return an ``EXCEPT ALL`` of multiple selectables. @@ -737,6 +762,7 @@ def except_all(*selects, **kwargs): """ return CompoundSelect(CompoundSelect.EXCEPT_ALL, *selects, **kwargs) + def intersect(*selects, **kwargs): """Return an ``INTERSECT`` of multiple selectables. @@ -753,6 +779,7 @@ def intersect(*selects, **kwargs): """ return CompoundSelect(CompoundSelect.INTERSECT, *selects, **kwargs) + def intersect_all(*selects, **kwargs): """Return an ``INTERSECT ALL`` of multiple selectables. @@ -769,6 +796,7 @@ def intersect_all(*selects, **kwargs): """ return CompoundSelect(CompoundSelect.INTERSECT_ALL, *selects, **kwargs) + def alias(selectable, name=None): """Return an :class:`.Alias` object. @@ -826,6 +854,7 @@ def literal(value, type_=None): """ return BindParameter(None, value, type_=type_, unique=True) + def tuple_(*expr): """Return a SQL tuple. @@ -846,6 +875,7 @@ def tuple_(*expr): """ return Tuple(*expr) + def type_coerce(expr, type_): """Coerce the given expression into the given type, on the Python side only. @@ -919,6 +949,7 @@ def label(name, obj): """ return Label(name, obj) + def column(text, type_=None): """Return a textual column clause, as would be in the columns clause of a ``SELECT`` statement. @@ -947,6 +978,7 @@ def column(text, type_=None): """ return ColumnClause(text, type_=type_) + def literal_column(text, type_=None): """Return a textual column expression, as would be in the columns clause of a ``SELECT`` statement. @@ -970,15 +1002,18 @@ def literal_column(text, type_=None): """ return ColumnClause(text, type_=type_, is_literal=True) + def table(name, *columns): """Represent a textual table clause. - The object returned is an instance of :class:`.TableClause`, which represents the - "syntactical" portion of the schema-level :class:`~.schema.Table` object. + The object returned is an instance of :class:`.TableClause`, which + represents the "syntactical" portion of the schema-level + :class:`~.schema.Table` object. It may be used to construct lightweight table constructs. Note that the :func:`~.expression.table` function is not part of - the ``sqlalchemy`` namespace. It must be imported from the ``sql`` package:: + the ``sqlalchemy`` namespace. It must be imported from the + ``sql`` package:: from sqlalchemy.sql import table, column @@ -991,6 +1026,7 @@ def table(name, *columns): """ return TableClause(name, *columns) + def bindparam(key, value=NO_ARG, type_=None, unique=False, required=NO_ARG, quote=None, callable_=None): """Create a bind parameter clause with the given key. @@ -1009,8 +1045,8 @@ def bindparam(key, value=NO_ARG, type_=None, unique=False, required=NO_ARG, compilation/execution. Defaults to ``None``, however if neither ``value`` nor - ``callable`` are passed explicitly, the ``required`` flag will be set to - ``True`` which has the effect of requiring a value be present + ``callable`` are passed explicitly, the ``required`` flag will be + set to ``True`` which has the effect of requiring a value be present when the statement is actually executed. .. versionchanged:: 0.8 The ``required`` flag is set to ``True`` @@ -1062,6 +1098,7 @@ def bindparam(key, value=NO_ARG, type_=None, unique=False, required=NO_ARG, unique=unique, required=required, quote=quote) + def outparam(key, type_=None): """Create an 'OUT' parameter for usage in functions (stored procedures), for databases which support them. @@ -1075,6 +1112,7 @@ def outparam(key, type_=None): return BindParameter( key, None, type_=type_, unique=False, isoutparam=True) + def text(text, bind=None, *args, **kwargs): """Create a SQL construct that is represented by a literal string. @@ -1171,6 +1209,7 @@ def text(text, bind=None, *args, **kwargs): """ return TextClause(text, bind=bind, *args, **kwargs) + def over(func, partition_by=None, order_by=None): """Produce an OVER clause against a function. @@ -1201,12 +1240,14 @@ def over(func, partition_by=None, order_by=None): """ return Over(func, partition_by=partition_by, order_by=order_by) + def null(): """Return a :class:`Null` object, which compiles to ``NULL``. """ return Null() + def true(): """Return a :class:`True_` object, which compiles to ``true``, or the boolean equivalent for the target dialect. @@ -1214,6 +1255,7 @@ def true(): """ return True_() + def false(): """Return a :class:`False_` object, which compiles to ``false``, or the boolean equivalent for the target dialect. @@ -1221,6 +1263,7 @@ def false(): """ return False_() + class _FunctionGenerator(object): """Generate :class:`.Function` objects based on getattr calls.""" @@ -1333,6 +1376,7 @@ func = _FunctionGenerator() # TODO: use UnaryExpression for this instead ? modifier = _FunctionGenerator(group=False) + class _truncated_label(unicode): """A unicode subclass used to identify symbolic " "names that may require truncation.""" @@ -1346,6 +1390,7 @@ class _truncated_label(unicode): # compiler _generated_label = _truncated_label + class _anonymous_label(_truncated_label): """A unicode subclass used to identify anonymously generated names.""" @@ -1363,6 +1408,7 @@ class _anonymous_label(_truncated_label): def apply_map(self, map_): return self % map_ + def _as_truncated(value): """coerce the given value to :class:`._truncated_label`. @@ -1376,6 +1422,7 @@ def _as_truncated(value): else: return _truncated_label(value) + def _string_or_unprintable(element): if isinstance(element, basestring): return element @@ -1385,9 +1432,11 @@ def _string_or_unprintable(element): except: return "unprintable element %r" % element + def _clone(element, **kw): return element._clone() + def _expand_cloned(elements): """expand the given set of ClauseElements to be the set of all 'cloned' predecessors. @@ -1395,6 +1444,7 @@ def _expand_cloned(elements): """ return itertools.chain(*[x._cloned_set for x in elements]) + def _select_iterables(elements): """expand tables into individual columns in the given list of column expressions. @@ -1402,6 +1452,7 @@ def _select_iterables(elements): """ return itertools.chain(*[c._select_iterable for c in elements]) + def _cloned_intersection(a, b): """return the intersection of sets a and b, counting any overlap between 'cloned' predecessors. @@ -1413,15 +1464,18 @@ def _cloned_intersection(a, b): return set(elem for elem in a if all_overlap.intersection(elem._cloned_set)) + def _from_objects(*elements): return itertools.chain(*[element._from_objects for element in elements]) + def _labeled(element): if not hasattr(element, 'name'): return element.label(None) else: return element + # there is some inconsistency here between the usage of # inspect() vs. checking for Visitable and __clause_element__. # Ideally all functions here would derive from inspect(), @@ -1432,6 +1486,7 @@ def _labeled(element): # _interpret_as_from() where we'd like to be able to receive ORM entities # that have no defined namespace, hence inspect() is needed there. + def _column_as_key(element): if isinstance(element, basestring): return element @@ -1442,12 +1497,14 @@ def _column_as_key(element): except AttributeError: return None + def _clause_element_as_expr(element): if hasattr(element, '__clause_element__'): return element.__clause_element__() else: return element + def _literal_as_text(element): if isinstance(element, Visitable): return element @@ -1462,6 +1519,7 @@ def _literal_as_text(element): "SQL expression object or string expected." ) + def _no_literals(element): if hasattr(element, '__clause_element__'): return element.__clause_element__() @@ -1473,16 +1531,19 @@ def _no_literals(element): else: return element + def _is_literal(element): return not isinstance(element, Visitable) and \ not hasattr(element, '__clause_element__') + def _only_column_elements_or_none(element, name): if element is None: return None else: return _only_column_elements(element, name) + def _only_column_elements(element, name): if hasattr(element, '__clause_element__'): element = element.__clause_element__() @@ -1504,6 +1565,7 @@ def _literal_as_binds(element, name=None, type_=None): else: return element + def _interpret_as_column_or_from(element): if isinstance(element, Visitable): return element @@ -1519,6 +1581,7 @@ def _interpret_as_column_or_from(element): return literal_column(str(element)) + def _interpret_as_from(element): insp = inspection.inspect(element, raiseerr=False) if insp is None: @@ -1528,6 +1591,7 @@ def _interpret_as_from(element): return insp.selectable raise exc.ArgumentError("FROM expression expected") + def _const_expr(element): if element is None: return null() @@ -1564,6 +1628,7 @@ def _corresponding_column_or_error(fromclause, column, ) return c + @util.decorator def _generative(fn, *args, **kw): """Mark a method as generative.""" @@ -1798,7 +1863,6 @@ class ClauseElement(Visitable): """ return self - def compile(self, bind=None, dialect=None, **kw): """Compile this SQL expression. @@ -2007,15 +2071,14 @@ class _DefaultColumnComparator(operators.ColumnOperators): # as_scalar() to produce a multi- column selectable that # does not export itself as a FROM clause - return self._boolean_compare(expr, op, seq_or_selectable.as_scalar(), - negate=negate_op, **kw) + return self._boolean_compare( + expr, op, seq_or_selectable.as_scalar(), + negate=negate_op, **kw) elif isinstance(seq_or_selectable, (Selectable, TextClause)): return self._boolean_compare(expr, op, seq_or_selectable, negate=negate_op, **kw) - # Handle non selectable arguments as sequences - args = [] for o in seq_or_selectable: if not _is_literal(o): @@ -2120,7 +2183,6 @@ class _DefaultColumnComparator(operators.ColumnOperators): "rshift": (_unsupported_impl,), } - def _check_literal(self, expr, operator, other): if isinstance(other, (ColumnElement, TextClause)): if isinstance(other, BindParameter) and \ @@ -2152,15 +2214,15 @@ class ColumnElement(ClauseElement, ColumnOperators): :class:`.Column` object, :class:`.ColumnElement` serves as the basis for any unit that may be present in a SQL expression, including the expressions themselves, SQL functions, bound parameters, - literal expressions, keywords such as ``NULL``, etc. :class:`.ColumnElement` - is the ultimate base class for all such elements. + literal expressions, keywords such as ``NULL``, etc. + :class:`.ColumnElement` is the ultimate base class for all such elements. A :class:`.ColumnElement` provides the ability to generate new :class:`.ColumnElement` objects using Python expressions. This means that Python operators such as ``==``, ``!=`` and ``<`` are overloaded to mimic SQL operations, - and allow the instantiation of further :class:`.ColumnElement` instances which - are composed from other, more fundamental :class:`.ColumnElement` + and allow the instantiation of further :class:`.ColumnElement` instances + which are composed from other, more fundamental :class:`.ColumnElement` objects. For example, two :class:`.ColumnClause` objects can be added together with the addition operator ``+`` to produce a :class:`.BinaryExpression`. @@ -2181,7 +2243,6 @@ class ColumnElement(ClauseElement, ColumnOperators): discussion of this concept can be found at `Expression Transformations <http://techspot.zzzeek.org/2008/01/23/expression-transformations/>`_. - """ __visit_name__ = 'column' @@ -2338,6 +2399,7 @@ class ColumnElement(ClauseElement, ColumnOperators): return _anonymous_label('%%(%d %s)s' % (id(self), getattr(self, 'name', 'anon'))) + class ColumnCollection(util.OrderedProperties): """An ordered dictionary that stores a list of ColumnElement instances. @@ -2459,6 +2521,7 @@ class ColumnCollection(util.OrderedProperties): def as_immutable(self): return ImmutableColumnCollection(self._data, self._all_cols) + class ImmutableColumnCollection(util.ImmutableProperties, ColumnCollection): def __init__(self, data, colset): util.ImmutableProperties.__init__(self, data) @@ -2489,6 +2552,7 @@ class ColumnSet(util.ordered_column_set): def __hash__(self): return hash(tuple(x for x in self)) + class Selectable(ClauseElement): """mark a class as being selectable""" __visit_name__ = 'selectable' @@ -2499,6 +2563,7 @@ class Selectable(ClauseElement): def selectable(self): return self + class FromClause(Selectable): """Represent an element that can be used within the ``FROM`` clause of a ``SELECT`` statement. @@ -2790,6 +2855,7 @@ class FromClause(Selectable): else: return None + class BindParameter(ColumnElement): """Represent a bind parameter. @@ -2938,6 +3004,7 @@ class BindParameter(ColumnElement): return 'BindParameter(%r, %r, type_=%r)' % (self.key, self.value, self.type) + class TypeClause(ClauseElement): """Handle a type keyword in a SQL statement. @@ -2983,8 +3050,9 @@ class Executable(Generative): Execution options can be set on a per-statement or per :class:`.Connection` basis. Additionally, the - :class:`.Engine` and ORM :class:`~.orm.query.Query` objects provide access - to execution options which they in turn configure upon connections. + :class:`.Engine` and ORM :class:`~.orm.query.Query` objects provide + access to execution options which they in turn configure upon + connections. The :meth:`execution_options` method is generative. A new instance of this statement is returned that contains the options:: @@ -3064,6 +3132,7 @@ class Executable(Generative): # legacy, some outside users may be calling this _Executable = Executable + class TextClause(Executable, ClauseElement): """Represent a literal SQL text fragment. @@ -3162,6 +3231,7 @@ class Null(ColumnElement): def compare(self, other): return isinstance(other, Null) + class False_(ColumnElement): """Represent the ``false`` keyword in a SQL statement. @@ -3174,6 +3244,7 @@ class False_(ColumnElement): def __init__(self): self.type = sqltypes.BOOLEANTYPE + class True_(ColumnElement): """Represent the ``true`` keyword in a SQL statement. @@ -3262,6 +3333,7 @@ class ClauseList(ClauseElement): else: return False + class BooleanClauseList(ClauseList, ColumnElement): __visit_name__ = 'clauselist' @@ -3280,6 +3352,7 @@ class BooleanClauseList(ClauseList, ColumnElement): else: return super(BooleanClauseList, self).self_group(against=against) + class Tuple(ClauseList, ColumnElement): def __init__(self, *clauses, **kw): @@ -3360,6 +3433,7 @@ class Case(ColumnElement): return list(itertools.chain(*[x._from_objects for x in self.get_children()])) + class FunctionElement(Executable, ColumnElement, FromClause): """Base for SQL function-oriented constructs. @@ -3717,6 +3791,7 @@ class BinaryExpression(ColumnElement): else: return super(BinaryExpression, self)._negate() + class Exists(UnaryExpression): __visit_name__ = UnaryExpression.__visit_name__ _from_objects = [] @@ -3746,9 +3821,9 @@ class Exists(UnaryExpression): return e def select_from(self, clause): - """return a new :class:`.Exists` construct, applying the given expression - to the :meth:`.Select.select_from` method of the select statement - contained. + """return a new :class:`.Exists` construct, applying the given + expression to the :meth:`.Select.select_from` method of the select + statement contained. """ e = self._clone() @@ -3764,6 +3839,7 @@ class Exists(UnaryExpression): e.element = self.element.where(clause).self_group() return e + class Join(FromClause): """represent a ``JOIN`` construct between two :class:`.FromClause` elements. @@ -3916,6 +3992,7 @@ class Join(FromClause): self.left._from_objects + \ self.right._from_objects + class Alias(FromClause): """Represents an table or selectable alias (AS). @@ -4009,6 +4086,7 @@ class Alias(FromClause): def bind(self): return self.element.bind + class CTE(Alias): """Represent a Common Table Expression. @@ -4093,6 +4171,7 @@ class Grouping(ColumnElement): return isinstance(other, Grouping) and \ self.element.compare(other.element) + class FromGrouping(FromClause): """Represent a grouping of a FROM clause""" __visit_name__ = 'grouping' @@ -4141,6 +4220,7 @@ class FromGrouping(FromClause): def __setstate__(self, state): self.element = state['element'] + class Over(ColumnElement): """Represent an OVER clause. @@ -4187,6 +4267,7 @@ class Over(ColumnElement): if c is not None] )) + class Label(ColumnElement): """Represents a column label (AS). @@ -4260,6 +4341,7 @@ class Label(ColumnElement): e.type = self._type return e + class ColumnClause(Immutable, ColumnElement): """Represents a generic column expression from any textual string. @@ -4403,7 +4485,6 @@ class ColumnClause(Immutable, ColumnElement): else: return name - def _bind_param(self, operator, obj): return BindParameter(self.name, obj, _compared_to_operator=operator, @@ -4431,6 +4512,7 @@ class ColumnClause(Immutable, ColumnElement): selectable._columns[c.key] = c return c + class TableClause(Immutable, FromClause): """Represents a minimal "table" construct. @@ -4564,6 +4646,7 @@ class TableClause(Immutable, FromClause): def _from_objects(self): return [self] + class SelectBase(Executable, FromClause): """Base class for :class:`.Select` and ``CompoundSelects``.""" @@ -4871,6 +4954,7 @@ class ScalarSelect(Generative, Grouping): def self_group(self, **kwargs): return self + class CompoundSelect(SelectBase): """Forms the basis of ``UNION``, ``UNION ALL``, and other SELECT-based set operations.""" @@ -4984,6 +5068,7 @@ class CompoundSelect(SelectBase): self._bind = bind bind = property(bind, _set_bind) + class HasPrefixes(object): _prefixes = () @@ -5020,6 +5105,7 @@ class HasPrefixes(object): self._prefixes = self._prefixes + tuple( [(_literal_as_text(p), dialect) for p in prefixes]) + class Select(HasPrefixes, SelectBase): """Represents a ``SELECT`` statement. @@ -5332,8 +5418,8 @@ class Select(HasPrefixes, SelectBase): other either based on foreign key, or via a simple equality comparison in the WHERE clause of the statement. The primary purpose of this method is to automatically construct a select statement - with all uniquely-named columns, without the need to use table-qualified - labels as :meth:`.apply_labels` does. + with all uniquely-named columns, without the need to use + table-qualified labels as :meth:`.apply_labels` does. When columns are omitted based on foreign key, the referred-to column is the one that's kept. When columns are omitted based on @@ -5488,7 +5574,6 @@ class Select(HasPrefixes, SelectBase): else: self._distinct = True - @_generative def select_from(self, fromclause): """return a new :func:`.select` construct with the @@ -5733,6 +5818,7 @@ class Select(HasPrefixes, SelectBase): self._bind = bind bind = property(bind, _set_bind) + class UpdateBase(HasPrefixes, Executable, ClauseElement): """Form the base for ``INSERT``, ``UPDATE``, and ``DELETE`` statements. @@ -5779,7 +5865,6 @@ class UpdateBase(HasPrefixes, Executable, ClauseElement): self._bind = bind bind = property(bind, _set_bind) - _returning_re = re.compile(r'(?:firebird|postgres(?:ql)?)_returning') def _process_deprecated_kw(self, kwargs): @@ -5866,6 +5951,7 @@ class UpdateBase(HasPrefixes, Executable, ClauseElement): self._hints = self._hints.union( {(selectable, dialect_name): text}) + class ValuesBase(UpdateBase): """Supplies support for :meth:`.ValuesBase.values` to INSERT and UPDATE constructs.""" @@ -5923,6 +6009,7 @@ class ValuesBase(UpdateBase): self.parameters.update(self._process_colparams(v)) self.parameters.update(kwargs) + class Insert(ValuesBase): """Represent an INSERT construct. @@ -5936,7 +6023,6 @@ class Insert(ValuesBase): """ __visit_name__ = 'insert' - def __init__(self, table, values=None, @@ -6032,6 +6118,7 @@ class Update(ValuesBase): return froms + class Delete(UpdateBase): """Represent a DELETE construct. @@ -6083,6 +6170,7 @@ class Delete(UpdateBase): # TODO: coverage self._whereclause = clone(self._whereclause, **kw) + class _IdentifiedClause(Executable, ClauseElement): __visit_name__ = 'identified' @@ -6093,12 +6181,15 @@ class _IdentifiedClause(Executable, ClauseElement): def __init__(self, ident): self.ident = ident + class SavepointClause(_IdentifiedClause): __visit_name__ = 'savepoint' + class RollbackToSavepointClause(_IdentifiedClause): __visit_name__ = 'rollback_to_savepoint' + class ReleaseSavepointClause(_IdentifiedClause): __visit_name__ = 'release_savepoint' @@ -6123,4 +6214,3 @@ _Exists = Exists _Grouping = Grouping _FromGrouping = FromGrouping _ScalarSelect = ScalarSelect - diff --git a/lib/sqlalchemy/sql/functions.py b/lib/sqlalchemy/sql/functions.py index d26589bd9..fd6607be0 100644 --- a/lib/sqlalchemy/sql/functions.py +++ b/lib/sqlalchemy/sql/functions.py @@ -15,6 +15,7 @@ from .. import util _registry = util.defaultdict(dict) + def register_function(identifier, fn, package="_default"): """Associate a callable with a particular func. name. @@ -39,6 +40,7 @@ class _GenericMeta(VisitableType): register_function(identifier, cls, package) super(_GenericMeta, cls).__init__(clsname, bases, clsdict) + class GenericFunction(Function): """Define a 'generic' function. @@ -113,6 +115,7 @@ class GenericFunction(Function): __metaclass__ = _GenericMeta coerce_arguments = True + def __init__(self, *args, **kwargs): parsed_args = kwargs.pop('_parsed_args', None) if parsed_args is None: @@ -129,6 +132,7 @@ class GenericFunction(Function): register_function("cast", cast) register_function("extract", extract) + class next_value(GenericFunction): """Represent the 'next value', given a :class:`.Sequence` as it's single argument. @@ -151,10 +155,12 @@ class next_value(GenericFunction): def _from_objects(self): return [] + class AnsiFunction(GenericFunction): def __init__(self, **kwargs): GenericFunction.__init__(self, **kwargs) + class ReturnTypeFromArgs(GenericFunction): """Define a function whose return type is the same as its arguments.""" @@ -164,15 +170,19 @@ class ReturnTypeFromArgs(GenericFunction): kwargs['_parsed_args'] = args GenericFunction.__init__(self, *args, **kwargs) + class coalesce(ReturnTypeFromArgs): pass + class max(ReturnTypeFromArgs): pass + class min(ReturnTypeFromArgs): pass + class sum(ReturnTypeFromArgs): pass @@ -180,21 +190,27 @@ class sum(ReturnTypeFromArgs): class now(GenericFunction): type = sqltypes.DateTime + class concat(GenericFunction): type = sqltypes.String + class char_length(GenericFunction): type = sqltypes.Integer def __init__(self, arg, **kwargs): GenericFunction.__init__(self, arg, **kwargs) + class random(GenericFunction): pass + class count(GenericFunction): - """The ANSI COUNT aggregate function. With no arguments, emits COUNT \*.""" + """The ANSI COUNT aggregate function. With no arguments, + emits COUNT \*. + """ type = sqltypes.Integer def __init__(self, expression=None, **kwargs): @@ -202,30 +218,38 @@ class count(GenericFunction): expression = literal_column('*') GenericFunction.__init__(self, expression, **kwargs) + class current_date(AnsiFunction): type = sqltypes.Date + class current_time(AnsiFunction): type = sqltypes.Time + class current_timestamp(AnsiFunction): type = sqltypes.DateTime + class current_user(AnsiFunction): type = sqltypes.String + class localtime(AnsiFunction): type = sqltypes.DateTime + class localtimestamp(AnsiFunction): type = sqltypes.DateTime + class session_user(AnsiFunction): type = sqltypes.String + class sysdate(AnsiFunction): type = sqltypes.DateTime + class user(AnsiFunction): type = sqltypes.String - diff --git a/lib/sqlalchemy/sql/operators.py b/lib/sqlalchemy/sql/operators.py index 7513c0b82..0f90f50ab 100644 --- a/lib/sqlalchemy/sql/operators.py +++ b/lib/sqlalchemy/sql/operators.py @@ -395,8 +395,8 @@ class ColumnOperators(Operators): def notlike(self, other, escape=None): """implement the ``NOT LIKE`` operator. - This is equivalent to using negation with :meth:`.ColumnOperators.like`, - i.e. ``~x.like(y)``. + This is equivalent to using negation with + :meth:`.ColumnOperators.like`, i.e. ``~x.like(y)``. .. versionadded:: 0.8 @@ -410,8 +410,8 @@ class ColumnOperators(Operators): def notilike(self, other, escape=None): """implement the ``NOT ILIKE`` operator. - This is equivalent to using negation with :meth:`.ColumnOperators.ilike`, - i.e. ``~x.ilike(y)``. + This is equivalent to using negation with + :meth:`.ColumnOperators.ilike`, i.e. ``~x.ilike(y)``. .. versionadded:: 0.8 @@ -549,7 +549,10 @@ class ColumnOperators(Operators): return self.operate(between_op, cleft, cright) def distinct(self): - """Produce a :func:`~.expression.distinct` clause against the parent object.""" + """Produce a :func:`~.expression.distinct` clause against the + parent object. + + """ return self.operate(distinct_op) def __add__(self, other): @@ -612,100 +615,132 @@ class ColumnOperators(Operators): """ return self.reverse_operate(truediv, other) + def from_(): raise NotImplementedError() + def as_(): raise NotImplementedError() + def exists(): raise NotImplementedError() + def is_(a, b): return a.is_(b) + def isnot(a, b): return a.isnot(b) + def collate(a, b): return a.collate(b) + def op(a, opstring, b): return a.op(opstring)(b) + def like_op(a, b, escape=None): return a.like(b, escape=escape) + def notlike_op(a, b, escape=None): return a.notlike(b, escape=escape) + def ilike_op(a, b, escape=None): return a.ilike(b, escape=escape) + def notilike_op(a, b, escape=None): return a.notilike(b, escape=escape) + def between_op(a, b, c): return a.between(b, c) + def in_op(a, b): return a.in_(b) + def notin_op(a, b): return a.notin_(b) + def distinct_op(a): return a.distinct() + def startswith_op(a, b, escape=None): return a.startswith(b, escape=escape) + def notstartswith_op(a, b, escape=None): return ~a.startswith(b, escape=escape) + def endswith_op(a, b, escape=None): return a.endswith(b, escape=escape) + def notendswith_op(a, b, escape=None): return ~a.endswith(b, escape=escape) + def contains_op(a, b, escape=None): return a.contains(b, escape=escape) + def notcontains_op(a, b, escape=None): return ~a.contains(b, escape=escape) + def match_op(a, b): return a.match(b) + def comma_op(a, b): raise NotImplementedError() + def concat_op(a, b): return a.concat(b) + def desc_op(a): return a.desc() + def asc_op(a): return a.asc() + def nullsfirst_op(a): return a.nullsfirst() + def nullslast_op(a): return a.nullslast() + _commutative = set([eq, ne, add, mul]) _comparison = set([eq, ne, lt, gt, ge, le]) + def is_comparison(op): return op in _comparison + def is_commutative(op): return op in _commutative + def is_ordering_modifier(op): return op in (asc_op, desc_op, nullsfirst_op, nullslast_op) diff --git a/lib/sqlalchemy/sql/util.py b/lib/sqlalchemy/sql/util.py index 2c0769012..29504cd71 100644 --- a/lib/sqlalchemy/sql/util.py +++ b/lib/sqlalchemy/sql/util.py @@ -12,12 +12,14 @@ from collections import deque """Utility functions that build upon SQL and Schema constructs.""" + def sort_tables(tables, skip_fn=None): """sort a collection of Table objects in order of their foreign-key dependency.""" tables = list(tables) tuples = [] + def visit_foreign_key(fkey): if fkey.use_alter: return @@ -40,6 +42,7 @@ def sort_tables(tables, skip_fn=None): return list(topological.sort(tuples, tables)) + def find_join_source(clauses, join_to): """Given a list of FROM clauses and a selectable, return the first index and element from the list of @@ -102,6 +105,7 @@ def visit_binary_product(fn, expr): """ stack = [] + def visit(element): if isinstance(element, (expression.ScalarSelect)): # we dont want to dig into correlated subqueries, @@ -124,6 +128,7 @@ def visit_binary_product(fn, expr): yield e list(visit(expr)) + def find_tables(clause, check_columns=False, include_aliases=False, include_joins=False, include_selects=False, include_crud=False): @@ -139,7 +144,7 @@ def find_tables(clause, check_columns=False, _visitors['join'] = tables.append if include_aliases: - _visitors['alias'] = tables.append + _visitors['alias'] = tables.append if include_crud: _visitors['insert'] = _visitors['update'] = \ @@ -152,16 +157,18 @@ def find_tables(clause, check_columns=False, _visitors['table'] = tables.append - visitors.traverse(clause, {'column_collections':False}, _visitors) + visitors.traverse(clause, {'column_collections': False}, _visitors) return tables + def find_columns(clause): """locate Column objects within the given expression.""" cols = util.column_set() - visitors.traverse(clause, {}, {'column':cols.add}) + visitors.traverse(clause, {}, {'column': cols.add}) return cols + def unwrap_order_by(clause): """Break up an 'order by' expression into individual column-expressions, without DESC/ASC/NULLS FIRST/NULLS LAST""" @@ -181,6 +188,7 @@ def unwrap_order_by(clause): stack.append(c) return cols + def clause_is_present(clause, search): """Given a target clause and a second to search within, return True if the target is plainly present in the search without any @@ -213,12 +221,14 @@ def bind_values(clause): """ v = [] + def visit_bindparam(bind): v.append(bind.effective_value) - visitors.traverse(clause, {}, {'bindparam':visit_bindparam}) + visitors.traverse(clause, {}, {'bindparam': visit_bindparam}) return v + def _quote_ddl_expr(element): if isinstance(element, basestring): element = element.replace("'", "''") @@ -226,6 +236,7 @@ def _quote_ddl_expr(element): else: return repr(element) + class _repr_params(object): """A string view of bound parameters, truncating display to the given number of 'multi' parameter sets. @@ -239,9 +250,10 @@ class _repr_params(object): if isinstance(self.params, (list, tuple)) and \ len(self.params) > self.batches and \ isinstance(self.params[0], (list, dict, tuple)): + msg = " ... displaying %i of %i total bound parameter sets ... " return ' '.join(( repr(self.params[:self.batches - 2])[0:-1], - " ... displaying %i of %i total bound parameter sets ... " % (self.batches, len(self.params)), + msg % (self.batches, len(self.params)), repr(self.params[-2:])[1:] )) else: @@ -268,8 +280,12 @@ def expression_as_ddl(clause): return visitors.replacement_traverse(clause, {}, repl) + def adapt_criterion_to_null(crit, nulls): - """given criterion containing bind params, convert selected elements to IS NULL.""" + """given criterion containing bind params, convert selected elements + to IS NULL. + + """ def visit_binary(binary): if isinstance(binary.left, expression.BindParameter) \ @@ -285,7 +301,7 @@ def adapt_criterion_to_null(crit, nulls): binary.operator = operators.is_ binary.negate = operators.isnot - return visitors.cloned_traverse(crit, {}, {'binary':visit_binary}) + return visitors.cloned_traverse(crit, {}, {'binary': visit_binary}) def join_condition(a, b, ignore_nonexistent_tables=False, @@ -325,7 +341,7 @@ def join_condition(a, b, ignore_nonexistent_tables=False, continue for fk in sorted( b.foreign_keys, - key=lambda fk:fk.parent._creation_order): + key=lambda fk: fk.parent._creation_order): if consider_as_foreign_keys is not None and \ fk.parent not in consider_as_foreign_keys: continue @@ -343,7 +359,7 @@ def join_condition(a, b, ignore_nonexistent_tables=False, if left is not b: for fk in sorted( left.foreign_keys, - key=lambda fk:fk.parent._creation_order): + key=lambda fk: fk.parent._creation_order): if consider_as_foreign_keys is not None and \ fk.parent not in consider_as_foreign_keys: continue @@ -473,6 +489,7 @@ class Annotated(object): else: return hash(other) == hash(self) + class AnnotatedColumnElement(Annotated): def __init__(self, element, values): Annotated.__init__(self, element, values) @@ -506,6 +523,7 @@ for cls in expression.__dict__.values() + [schema.Column, schema.Table]: " pass" % (cls.__name__, annotation_cls) in locals() exec "annotated_classes[cls] = Annotated%s" % (cls.__name__,) + def _deep_annotate(element, annotations, exclude=None): """Deep copy the given ClauseElement, annotating each element with the given annotations dictionary. @@ -529,6 +547,7 @@ def _deep_annotate(element, annotations, exclude=None): element = clone(element) return element + def _deep_deannotate(element, values=None): """Deep copy the given element, removing annotations.""" @@ -554,6 +573,7 @@ def _deep_deannotate(element, values=None): element = clone(element) return element + def _shallow_annotate(element, annotations): """Annotate the given ClauseElement and copy its internals so that internal objects refer to the new annotated object. @@ -566,6 +586,7 @@ def _shallow_annotate(element, annotations): element._copy_internals() return element + def splice_joins(left, right, stop_on=None): if left is None: return right @@ -590,12 +611,15 @@ def splice_joins(left, right, stop_on=None): return ret + def reduce_columns(columns, *clauses, **kw): - """given a list of columns, return a 'reduced' set based on natural equivalents. + """given a list of columns, return a 'reduced' set based on natural + equivalents. the set is reduced to the smallest list of columns which have no natural - equivalent present in the list. A "natural equivalent" means that two columns - will ultimately represent the same value because they are related by a foreign key. + equivalent present in the list. A "natural equivalent" means that two + columns will ultimately represent the same value because they are related + by a foreign key. \*clauses is an optional list of join clauses which will be traversed to further identify columns that are "equivalent". @@ -659,6 +683,7 @@ def reduce_columns(columns, *clauses, **kw): return expression.ColumnSet(columns.difference(omit)) + def criterion_as_pairs(expression, consider_as_foreign_keys=None, consider_as_referenced_keys=None, any_operator=False): """traverse an expression and locate binary criterion pairs.""" @@ -705,7 +730,7 @@ def criterion_as_pairs(expression, consider_as_foreign_keys=None, elif binary.right.references(binary.left): pairs.append((binary.left, binary.right)) pairs = [] - visitors.traverse(expression, {}, {'binary':visit_binary}) + visitors.traverse(expression, {}, {'binary': visit_binary}) return pairs @@ -768,7 +793,7 @@ class ClauseAdapter(visitors.ReplacingCloningVisitor): include=None, exclude=None, include_fn=None, exclude_fn=None, adapt_on_names=False): - self.__traverse_options__ = {'stop_on':[selectable]} + self.__traverse_options__ = {'stop_on': [selectable]} self.selectable = selectable if include: assert not include_fn @@ -783,7 +808,8 @@ class ClauseAdapter(visitors.ReplacingCloningVisitor): self.equivalents = util.column_dict(equivalents or {}) self.adapt_on_names = adapt_on_names - def _corresponding_column(self, col, require_embedded, _seen=util.EMPTY_SET): + def _corresponding_column(self, col, require_embedded, + _seen=util.EMPTY_SET): newcol = self.selectable.corresponding_column( col, require_embedded=require_embedded) @@ -811,6 +837,7 @@ class ClauseAdapter(visitors.ReplacingCloningVisitor): else: return self._corresponding_column(col, True) + class ColumnAdapter(ClauseAdapter): """Extends ClauseAdapter with extra utility functions. diff --git a/lib/sqlalchemy/sql/visitors.py b/lib/sqlalchemy/sql/visitors.py index 6f2c82992..09c50a934 100644 --- a/lib/sqlalchemy/sql/visitors.py +++ b/lib/sqlalchemy/sql/visitors.py @@ -24,7 +24,6 @@ http://techspot.zzzeek.org/2008/01/23/expression-transformations/ """ from collections import deque -import re from .. import util import operator @@ -33,6 +32,7 @@ __all__ = ['VisitableType', 'Visitable', 'ClauseVisitor', 'iterate_depthfirst', 'traverse_using', 'traverse', 'cloned_traverse', 'replacement_traverse'] + class VisitableType(type): """Metaclass which assigns a `_compiler_dispatch` method to classes having a `__visit_name__` attribute. @@ -43,7 +43,8 @@ class VisitableType(type): def _compiler_dispatch (self, visitor, **kw): '''Look for an attribute named "visit_" + self.__visit_name__ on the visitor, and call it with the same kw params.''' - return getattr(visitor, 'visit_%s' % self.__visit_name__)(self, **kw) + visit_attr = 'visit_%s' % self.__visit_name__ + return getattr(visitor, visit_attr)(self, **kw) Classes having no __visit_name__ attribute will remain unaffected. """ @@ -68,6 +69,7 @@ def _generate_dispatch(cls): # the string name of the class's __visit_name__ is known at # this early stage (import time) so it can be pre-constructed. getter = operator.attrgetter("visit_%s" % visit_name) + def _compiler_dispatch(self, visitor, **kw): return getter(visitor)(self, **kw) else: @@ -75,14 +77,16 @@ def _generate_dispatch(cls): # __visit_name__ is not yet a string. As a result, the visit # string has to be recalculated with each compilation. def _compiler_dispatch(self, visitor, **kw): - return getattr(visitor, 'visit_%s' % self.__visit_name__)(self, **kw) + visit_attr = 'visit_%s' % self.__visit_name__ + return getattr(visitor, visit_attr)(self, **kw) - _compiler_dispatch.__doc__ = \ + _compiler_dispatch.__doc__ = \ """Look for an attribute named "visit_" + self.__visit_name__ on the visitor, and call it with the same kw params. """ cls._compiler_dispatch = _compiler_dispatch + class Visitable(object): """Base class for visitable objects, applies the ``VisitableType`` metaclass. @@ -91,6 +95,7 @@ class Visitable(object): __metaclass__ = VisitableType + class ClauseVisitor(object): """Base class for visitor objects which can traverse using the traverse() function. @@ -106,8 +111,10 @@ class ClauseVisitor(object): return meth(obj, **kw) def iterate(self, obj): - """traverse the given expression structure, returning an iterator of all elements.""" + """traverse the given expression structure, returning an iterator + of all elements. + """ return iterate(obj, self.__traverse_options__) def traverse(self, obj): @@ -143,6 +150,7 @@ class ClauseVisitor(object): tail._next = visitor return self + class CloningVisitor(ClauseVisitor): """Base class for visitor objects which can traverse using the cloned_traverse() function. @@ -150,14 +158,18 @@ class CloningVisitor(ClauseVisitor): """ def copy_and_process(self, list_): - """Apply cloned traversal to the given list of elements, and return the new list.""" + """Apply cloned traversal to the given list of elements, and return + the new list. + """ return [self.traverse(x) for x in list_] def traverse(self, obj): """traverse and visit the given expression structure.""" - return cloned_traverse(obj, self.__traverse_options__, self._visitor_dict) + return cloned_traverse( + obj, self.__traverse_options__, self._visitor_dict) + class ReplacingCloningVisitor(CloningVisitor): """Base class for visitor objects which can traverse using @@ -184,6 +196,7 @@ class ReplacingCloningVisitor(CloningVisitor): return e return replacement_traverse(obj, self.__traverse_options__, replace) + def iterate(obj, opts): """traverse the given expression structure, returning an iterator. @@ -197,6 +210,7 @@ def iterate(obj, opts): for c in t.get_children(**opts): stack.append(c) + def iterate_depthfirst(obj, opts): """traverse the given expression structure, returning an iterator. @@ -212,25 +226,35 @@ def iterate_depthfirst(obj, opts): stack.append(c) return iter(traversal) + def traverse_using(iterator, obj, visitors): - """visit the given expression structure using the given iterator of objects.""" + """visit the given expression structure using the given iterator of + objects. + """ for target in iterator: meth = visitors.get(target.__visit_name__, None) if meth: meth(target) return obj + def traverse(obj, opts, visitors): - """traverse and visit the given expression structure using the default iterator.""" + """traverse and visit the given expression structure using the default + iterator. + """ return traverse_using(iterate(obj, opts), obj, visitors) + def traverse_depthfirst(obj, opts, visitors): - """traverse and visit the given expression structure using the depth-first iterator.""" + """traverse and visit the given expression structure using the + depth-first iterator. + """ return traverse_using(iterate_depthfirst(obj, opts), obj, visitors) + def cloned_traverse(obj, opts, visitors): """clone the given expression structure, allowing modifications by visitors.""" diff --git a/lib/sqlalchemy/testing/__init__.py b/lib/sqlalchemy/testing/__init__.py index 15b3471aa..e571a5045 100644 --- a/lib/sqlalchemy/testing/__init__.py +++ b/lib/sqlalchemy/testing/__init__.py @@ -10,12 +10,11 @@ from .exclusions import db_spec, _is_excluded, fails_if, skip_if, future,\ from .assertions import emits_warning, emits_warning_on, uses_deprecated, \ eq_, ne_, is_, is_not_, startswith_, assert_raises, \ - assert_raises_message, AssertsCompiledSQL, ComparesTables, AssertsExecutionResults + assert_raises_message, AssertsCompiledSQL, ComparesTables, \ + AssertsExecutionResults from .util import run_as_contextmanager, rowset, fail, provide_metadata, adict crashes = skip from .config import db, requirements as requires - - diff --git a/lib/sqlalchemy/testing/assertions.py b/lib/sqlalchemy/testing/assertions.py index e74d13a97..ebd10b130 100644 --- a/lib/sqlalchemy/testing/assertions.py +++ b/lib/sqlalchemy/testing/assertions.py @@ -16,6 +16,7 @@ import itertools from .util import fail import contextlib + def emits_warning(*messages): """Mark a test as emitting a warning. @@ -50,6 +51,7 @@ def emits_warning(*messages): resetwarnings() return decorate + def emits_warning_on(db, *warnings): """Mark a test as emitting a warning on a specific dialect. @@ -115,7 +117,6 @@ def uses_deprecated(*messages): return decorate - def global_cleanup_assertions(): """Check things that have to be finalized at the end of a test suite. @@ -129,28 +130,32 @@ def global_cleanup_assertions(): assert not pool._refs, str(pool._refs) - def eq_(a, b, msg=None): """Assert a == b, with repr messaging on failure.""" assert a == b, msg or "%r != %r" % (a, b) + def ne_(a, b, msg=None): """Assert a != b, with repr messaging on failure.""" assert a != b, msg or "%r == %r" % (a, b) + def is_(a, b, msg=None): """Assert a is b, with repr messaging on failure.""" assert a is b, msg or "%r is not %r" % (a, b) + def is_not_(a, b, msg=None): """Assert a is not b, with repr messaging on failure.""" assert a is not b, msg or "%r is %r" % (a, b) + def startswith_(a, fragment, msg=None): """Assert a.startswith(fragment), with repr messaging on failure.""" assert a.startswith(fragment), msg or "%r does not start with %r" % ( a, fragment) + def assert_raises(except_cls, callable_, *args, **kw): try: callable_(*args, **kw) @@ -161,6 +166,7 @@ def assert_raises(except_cls, callable_, *args, **kw): # assert outside the block so it works for AssertionError too ! assert success, "Callable did not raise an exception" + def assert_raises_message(except_cls, msg, callable_, *args, **kwargs): try: callable_(*args, **kwargs) @@ -214,7 +220,9 @@ class AssertsCompiledSQL(object): p = c.construct_params(params) eq_(tuple([p[x] for x in c.positiontup]), checkpositional) + class ComparesTables(object): + def assert_tables_equal(self, table, reflected_table, strict_types=False): assert len(table.c) == len(reflected_table.c) for c, reflected_c in zip(table.c, reflected_table.c): @@ -224,15 +232,19 @@ class ComparesTables(object): eq_(c.nullable, reflected_c.nullable) if strict_types: + msg = "Type '%s' doesn't correspond to type '%s'" assert type(reflected_c.type) is type(c.type), \ - "Type '%s' doesn't correspond to type '%s'" % (reflected_c.type, c.type) + msg % (reflected_c.type, c.type) else: self.assert_types_base(reflected_c, c) if isinstance(c.type, sqltypes.String): eq_(c.type.length, reflected_c.type.length) - eq_(set([f.column.name for f in c.foreign_keys]), set([f.column.name for f in reflected_c.foreign_keys])) + eq_( + set([f.column.name for f in c.foreign_keys]), + set([f.column.name for f in reflected_c.foreign_keys]) + ) if c.server_default: assert isinstance(reflected_c.server_default, schema.FetchedValue) @@ -246,6 +258,7 @@ class ComparesTables(object): "On column %r, type '%s' doesn't correspond to type '%s'" % \ (c1.name, c1.type, c2.type) + class AssertsExecutionResults(object): def assert_result(self, result, class_, *objects): result = list(result) @@ -296,6 +309,7 @@ class AssertsExecutionResults(object): len(found), len(expected))) NOVALUE = object() + def _compare_item(obj, spec): for key, value in spec.iteritems(): if isinstance(value, tuple): @@ -347,7 +361,8 @@ class AssertsExecutionResults(object): self.assert_sql_execution(db, callable_, *newrules) def assert_sql_count(self, db, callable_, count): - self.assert_sql_execution(db, callable_, assertsql.CountStatements(count)) + self.assert_sql_execution( + db, callable_, assertsql.CountStatements(count)) @contextlib.contextmanager def assert_execution(self, *rules): @@ -359,4 +374,4 @@ class AssertsExecutionResults(object): assertsql.asserter.clear_rules() def assert_statement_count(self, count): - return self.assert_execution(assertsql.CountStatements(count))
\ No newline at end of file + return self.assert_execution(assertsql.CountStatements(count)) diff --git a/lib/sqlalchemy/testing/assertsql.py b/lib/sqlalchemy/testing/assertsql.py index 08ee55d57..d955d1554 100644 --- a/lib/sqlalchemy/testing/assertsql.py +++ b/lib/sqlalchemy/testing/assertsql.py @@ -3,6 +3,7 @@ from ..engine.default import DefaultDialect from .. import util import re + class AssertRule(object): def process_execute(self, clauseelement, *multiparams, **params): @@ -40,6 +41,7 @@ class AssertRule(object): assert False, 'Rule has not been consumed' return self.is_consumed() + class SQLMatchRule(AssertRule): def __init__(self): self._result = None @@ -56,6 +58,7 @@ class SQLMatchRule(AssertRule): return True + class ExactSQL(SQLMatchRule): def __init__(self, sql, params=None): @@ -138,6 +141,7 @@ class RegexSQL(SQLMatchRule): _received_statement, _received_parameters) + class CompiledSQL(SQLMatchRule): def __init__(self, statement, params): @@ -217,6 +221,7 @@ class CountStatements(AssertRule): % (self.count, self._statement_count) return True + class AllOf(AssertRule): def __init__(self, *rules): @@ -244,6 +249,7 @@ class AllOf(AssertRule): def consume_final(self): return len(self.rules) == 0 + def _process_engine_statement(query, context): if util.jython: @@ -256,6 +262,7 @@ def _process_engine_statement(query, context): query = re.sub(r'\n', '', query) return query + def _process_assertion_statement(query, context): paramstyle = context.dialect.paramstyle if paramstyle == 'named': @@ -275,6 +282,7 @@ def _process_assertion_statement(query, context): return query + class SQLAssert(object): rules = None @@ -311,4 +319,3 @@ class SQLAssert(object): executemany) asserter = SQLAssert() - diff --git a/lib/sqlalchemy/testing/config.py b/lib/sqlalchemy/testing/config.py index 2945bd456..ae4f585e1 100644 --- a/lib/sqlalchemy/testing/config.py +++ b/lib/sqlalchemy/testing/config.py @@ -1,3 +1,2 @@ requirements = None db = None - diff --git a/lib/sqlalchemy/testing/engines.py b/lib/sqlalchemy/testing/engines.py index 9d15c5078..20bcf0317 100644 --- a/lib/sqlalchemy/testing/engines.py +++ b/lib/sqlalchemy/testing/engines.py @@ -9,7 +9,9 @@ from .. import event, pool import re import warnings + class ConnectionKiller(object): + def __init__(self): self.proxy_refs = weakref.WeakKeyDictionary() self.testing_engines = weakref.WeakKeyDictionary() @@ -83,12 +85,14 @@ class ConnectionKiller(object): testing_reaper = ConnectionKiller() + def drop_all_tables(metadata, bind): testing_reaper.close_all() if hasattr(bind, 'close'): bind.close() metadata.drop_all(bind) + @decorator def assert_conns_closed(fn, *args, **kw): try: @@ -96,6 +100,7 @@ def assert_conns_closed(fn, *args, **kw): finally: testing_reaper.assert_all_closed() + @decorator def rollback_open_connections(fn, *args, **kw): """Decorator that rolls back all open connections after fn execution.""" @@ -105,6 +110,7 @@ def rollback_open_connections(fn, *args, **kw): finally: testing_reaper.rollback_all() + @decorator def close_first(fn, *args, **kw): """Decorator that closes all connections before fn execution.""" @@ -121,6 +127,7 @@ def close_open_connections(fn, *args, **kw): finally: testing_reaper.close_all() + def all_dialects(exclude=None): import sqlalchemy.databases as d for name in d.__all__: @@ -129,10 +136,13 @@ def all_dialects(exclude=None): continue mod = getattr(d, name, None) if not mod: - mod = getattr(__import__('sqlalchemy.databases.%s' % name).databases, name) + mod = getattr(__import__( + 'sqlalchemy.databases.%s' % name).databases, name) yield mod.dialect() + class ReconnectFixture(object): + def __init__(self, dbapi): self.dbapi = dbapi self.connections = [] @@ -165,6 +175,7 @@ class ReconnectFixture(object): self._safe(c.close) self.connections = [] + def reconnecting_engine(url=None, options=None): url = url or config.db_url dbapi = config.db.dialect.dbapi @@ -173,9 +184,11 @@ def reconnecting_engine(url=None, options=None): options['module'] = ReconnectFixture(dbapi) engine = testing_engine(url, options) _dispose = engine.dispose + def dispose(): engine.dialect.dbapi.shutdown() _dispose() + engine.test_shutdown = engine.dialect.dbapi.shutdown engine.dispose = dispose return engine @@ -209,6 +222,7 @@ def testing_engine(url=None, options=None): return engine + def utf8_engine(url=None, options=None): """Hook for dialects or drivers that don't handle utf8 by default.""" @@ -226,6 +240,7 @@ def utf8_engine(url=None, options=None): return testing_engine(url, options) + def mock_engine(dialect_name=None): """Provides a mocking engine based on the current testing.db. @@ -244,17 +259,21 @@ def mock_engine(dialect_name=None): dialect_name = config.db.name buffer = [] + def executor(sql, *a, **kw): buffer.append(sql) + def assert_sql(stmts): recv = [re.sub(r'[\n\t]', '', str(s)) for s in buffer] assert recv == stmts, recv + def print_sql(): d = engine.dialect return "\n".join( str(s.compile(dialect=d)) for s in engine.mock ) + engine = create_engine(dialect_name + '://', strategy='mock', executor=executor) assert not hasattr(engine, 'mock') @@ -263,6 +282,7 @@ def mock_engine(dialect_name=None): engine.print_sql = print_sql return engine + class DBAPIProxyCursor(object): """Proxy a DBAPI cursor. @@ -287,6 +307,7 @@ class DBAPIProxyCursor(object): def __getattr__(self, key): return getattr(self.cursor, key) + class DBAPIProxyConnection(object): """Proxy a DBAPI connection. @@ -308,14 +329,17 @@ class DBAPIProxyConnection(object): def __getattr__(self, key): return getattr(self.conn, key) -def proxying_engine(conn_cls=DBAPIProxyConnection, cursor_cls=DBAPIProxyCursor): + +def proxying_engine(conn_cls=DBAPIProxyConnection, + cursor_cls=DBAPIProxyCursor): """Produce an engine that provides proxy hooks for common methods. """ def mock_conn(): return conn_cls(config.db, cursor_cls) - return testing_engine(options={'creator':mock_conn}) + return testing_engine(options={'creator': mock_conn}) + class ReplayableSession(object): """A simple record/playback tool. @@ -427,4 +451,3 @@ class ReplayableSession(object): raise AttributeError(key) else: return result - diff --git a/lib/sqlalchemy/testing/entities.py b/lib/sqlalchemy/testing/entities.py index 1b24e73b7..5c5e69154 100644 --- a/lib/sqlalchemy/testing/entities.py +++ b/lib/sqlalchemy/testing/entities.py @@ -2,7 +2,10 @@ import sqlalchemy as sa from sqlalchemy import exc as sa_exc _repr_stack = set() + + class BasicEntity(object): + def __init__(self, **kw): for key, value in kw.iteritems(): setattr(self, key, value) @@ -21,7 +24,10 @@ class BasicEntity(object): _repr_stack.remove(id(self)) _recursion_stack = set() + + class ComparableEntity(BasicEntity): + def __hash__(self): return hash(self.__class__) diff --git a/lib/sqlalchemy/testing/exclusions.py b/lib/sqlalchemy/testing/exclusions.py index 3c70ec8d9..f105c8b6a 100644 --- a/lib/sqlalchemy/testing/exclusions.py +++ b/lib/sqlalchemy/testing/exclusions.py @@ -61,6 +61,7 @@ class skip_if(object): self._fails_on = skip_if(other, reason) return self + class fails_if(skip_if): def __call__(self, fn): @decorator @@ -69,14 +70,17 @@ class fails_if(skip_if): return fn(*args, **kw) return decorate(fn) + def only_if(predicate, reason=None): predicate = _as_predicate(predicate) return skip_if(NotPredicate(predicate), reason) + def succeeds_if(predicate, reason=None): predicate = _as_predicate(predicate) return fails_if(NotPredicate(predicate), reason) + class Predicate(object): @classmethod def as_predicate(cls, predicate): @@ -93,6 +97,7 @@ class Predicate(object): else: assert False, "unknown predicate type: %s" % predicate + class BooleanPredicate(Predicate): def __init__(self, value, description=None): self.value = value @@ -110,6 +115,7 @@ class BooleanPredicate(Predicate): def __str__(self): return self._as_string() + class SpecPredicate(Predicate): def __init__(self, db, op=None, spec=None, description=None): self.db = db @@ -177,6 +183,7 @@ class SpecPredicate(Predicate): def __str__(self): return self._as_string() + class LambdaPredicate(Predicate): def __init__(self, lambda_, description=None, args=None, kw=None): self.lambda_ = lambda_ @@ -201,6 +208,7 @@ class LambdaPredicate(Predicate): def __str__(self): return self._as_string() + class NotPredicate(Predicate): def __init__(self, predicate): self.predicate = predicate @@ -211,6 +219,7 @@ class NotPredicate(Predicate): def __str__(self): return self.predicate._as_string(True) + class OrPredicate(Predicate): def __init__(self, predicates, description=None): self.predicates = predicates @@ -256,9 +265,11 @@ class OrPredicate(Predicate): _as_predicate = Predicate.as_predicate + def _is_excluded(db, op, spec): return SpecPredicate(db, op, spec)() + def _server_version(engine): """Return a server_version_info tuple.""" @@ -268,24 +279,30 @@ def _server_version(engine): conn.close() return version + def db_spec(*dbs): return OrPredicate( Predicate.as_predicate(db) for db in dbs ) + def open(): return skip_if(BooleanPredicate(False, "mark as execute")) + def closed(): return skip_if(BooleanPredicate(True, "marked as skip")) + @decorator def future(fn, *args, **kw): return fails_if(LambdaPredicate(fn, *args, **kw), "Future feature") + def fails_on(db, reason=None): return fails_if(SpecPredicate(db), reason) + def fails_on_everything_except(*dbs): return succeeds_if( OrPredicate([ @@ -293,9 +310,11 @@ def fails_on_everything_except(*dbs): ]) ) + def skip(db, reason=None): return skip_if(SpecPredicate(db), reason) + def only_on(dbs, reason=None): return only_if( OrPredicate([SpecPredicate(db) for db in util.to_list(dbs)]) diff --git a/lib/sqlalchemy/testing/fixtures.py b/lib/sqlalchemy/testing/fixtures.py index 1a1204898..5c587cb2f 100644 --- a/lib/sqlalchemy/testing/fixtures.py +++ b/lib/sqlalchemy/testing/fixtures.py @@ -7,6 +7,7 @@ import sys import sqlalchemy as sa from sqlalchemy.ext.declarative import declarative_base, DeclarativeMeta + class TestBase(object): # A sequence of database names to always run, regardless of the # constraints below. @@ -29,6 +30,7 @@ class TestBase(object): def assert_(self, val, msg=None): assert val, msg + class TablesTest(TestBase): # 'once', None @@ -208,9 +210,11 @@ class _ORMTest(object): sa.orm.session.Session.close_all() sa.orm.clear_mappers() + class ORMTest(_ORMTest, TestBase): pass + class MappedTest(_ORMTest, TablesTest, assertions.AssertsExecutionResults): # 'once', 'each', None run_setup_classes = 'once' @@ -252,7 +256,6 @@ class MappedTest(_ORMTest, TablesTest, assertions.AssertsExecutionResults): cls.classes.clear() _ORMTest.teardown_class() - @classmethod def _setup_once_classes(cls): if cls.run_setup_classes == 'once': @@ -275,18 +278,21 @@ class MappedTest(_ORMTest, TablesTest, assertions.AssertsExecutionResults): """ cls_registry = cls.classes + class FindFixture(type): def __init__(cls, classname, bases, dict_): cls_registry[classname] = cls return type.__init__(cls, classname, bases, dict_) - class _Base(object): __metaclass__ = FindFixture + class Basic(BasicEntity, _Base): pass + class Comparable(ComparableEntity, _Base): pass + cls.Basic = Basic cls.Comparable = Comparable fn() @@ -306,6 +312,7 @@ class MappedTest(_ORMTest, TablesTest, assertions.AssertsExecutionResults): def setup_mappers(cls): pass + class DeclarativeMappedTest(MappedTest): run_setup_classes = 'once' run_setup_mappers = 'once' @@ -317,17 +324,21 @@ class DeclarativeMappedTest(MappedTest): @classmethod def _with_register_classes(cls, fn): cls_registry = cls.classes + class FindFixtureDeclarative(DeclarativeMeta): def __init__(cls, classname, bases, dict_): cls_registry[classname] = cls return DeclarativeMeta.__init__( cls, classname, bases, dict_) + class DeclarativeBasic(object): __table_cls__ = schema.Table + _DeclBase = declarative_base(metadata=cls.metadata, metaclass=FindFixtureDeclarative, cls=DeclarativeBasic) cls.DeclarativeBasic = _DeclBase fn() + if cls.metadata.tables: cls.metadata.create_all(config.db) diff --git a/lib/sqlalchemy/testing/pickleable.py b/lib/sqlalchemy/testing/pickleable.py index f5b8b827c..09d51b5fa 100644 --- a/lib/sqlalchemy/testing/pickleable.py +++ b/lib/sqlalchemy/testing/pickleable.py @@ -1,43 +1,59 @@ -"""Classes used in pickling tests, need to be at the module level for unpickling.""" +"""Classes used in pickling tests, need to be at the module level for +unpickling. +""" from . import fixtures + class User(fixtures.ComparableEntity): pass + class Order(fixtures.ComparableEntity): pass + class Dingaling(fixtures.ComparableEntity): pass + class EmailUser(User): pass + class Address(fixtures.ComparableEntity): pass + # TODO: these are kind of arbitrary.... class Child1(fixtures.ComparableEntity): pass + class Child2(fixtures.ComparableEntity): pass + class Parent(fixtures.ComparableEntity): pass + class Screen(object): + def __init__(self, obj, parent=None): self.obj = obj self.parent = parent + class Foo(object): + def __init__(self, moredata): self.data = 'im data' self.stuff = 'im stuff' self.moredata = moredata + __hash__ = object.__hash__ + def __eq__(self, other): return other.data == self.data and \ other.stuff == self.stuff and \ @@ -45,40 +61,53 @@ class Foo(object): class Bar(object): + def __init__(self, x, y): self.x = x self.y = y + __hash__ = object.__hash__ + def __eq__(self, other): return other.__class__ is self.__class__ and \ other.x == self.x and \ other.y == self.y + def __str__(self): return "Bar(%d, %d)" % (self.x, self.y) + class OldSchool: + def __init__(self, x, y): self.x = x self.y = y + def __eq__(self, other): return other.__class__ is self.__class__ and \ other.x == self.x and \ other.y == self.y + class OldSchoolWithoutCompare: + def __init__(self, x, y): self.x = x self.y = y + class BarWithoutCompare(object): + def __init__(self, x, y): self.x = x self.y = y + def __str__(self): return "Bar(%d, %d)" % (self.x, self.y) class NotComparable(object): + def __init__(self, data): self.data = data @@ -93,6 +122,7 @@ class NotComparable(object): class BrokenComparable(object): + def __init__(self, data): self.data = data @@ -104,4 +134,3 @@ class BrokenComparable(object): def __ne__(self, other): raise NotImplementedError - diff --git a/lib/sqlalchemy/testing/plugin/noseplugin.py b/lib/sqlalchemy/testing/plugin/noseplugin.py index 37f7b29f5..c104c4614 100644 --- a/lib/sqlalchemy/testing/plugin/noseplugin.py +++ b/lib/sqlalchemy/testing/plugin/noseplugin.py @@ -41,6 +41,7 @@ db_opts = {} options = None _existing_engine = None + def _log(option, opt_str, value, parser): global logging if not logging: @@ -59,34 +60,42 @@ def _list_dbs(*args): print "%20s\t%s" % (macro, file_config.get('db', macro)) sys.exit(0) + def _server_side_cursors(options, opt_str, value, parser): db_opts['server_side_cursors'] = True + def _engine_strategy(options, opt_str, value, parser): if value: db_opts['strategy'] = value pre_configure = [] post_configure = [] + + def pre(fn): pre_configure.append(fn) return fn + + def post(fn): post_configure.append(fn) return fn + @pre def _setup_options(opt, file_config): global options options = opt + @pre def _monkeypatch_cdecimal(options, file_config): if options.cdecimal: - import sys import cdecimal sys.modules['decimal'] = cdecimal + @post def _engine_uri(options, file_config): global db_label, db_url @@ -105,6 +114,7 @@ def _engine_uri(options, file_config): % db_label) db_url = file_config.get('db', db_label) + @post def _require(options, file_config): if not(options.require or @@ -131,12 +141,14 @@ def _require(options, file_config): continue pkg_resources.require(requirement) + @post def _engine_pool(options, file_config): if options.mockpool: from sqlalchemy import pool db_opts['poolclass'] = pool.AssertionPool + @post def _create_testing_engine(options, file_config): from sqlalchemy.testing import engines, config @@ -199,6 +211,7 @@ def _set_table_options(options, file_config): if options.mysql_engine: table_options['mysql_engine'] = options.mysql_engine + @post def _reverse_topological(options, file_config): if options.reversetop: @@ -208,6 +221,7 @@ def _reverse_topological(options, file_config): topological.set = unitofwork.set = session.set = mapper.set = \ dependency.set = RandomSet + @post def _requirements(options, file_config): from sqlalchemy.testing import config @@ -230,6 +244,7 @@ def _post_setup_options(opt, file_config): from sqlalchemy.testing import config config.options = options + @post def _setup_profiling(options, file_config): from sqlalchemy.testing import profiling diff --git a/lib/sqlalchemy/testing/profiling.py b/lib/sqlalchemy/testing/profiling.py index a22e83cbc..ae9d176b7 100644 --- a/lib/sqlalchemy/testing/profiling.py +++ b/lib/sqlalchemy/testing/profiling.py @@ -22,6 +22,7 @@ from ..util.compat import jython, pypy, win32 _current_test = None + def profiled(target=None, **target_opts): """Function profiling. @@ -69,13 +70,13 @@ def profiled(target=None, **target_opts): else: stats.print_stats() - print_callers = target_opts.get('print_callers', - profile_config['print_callers']) + print_callers = target_opts.get( + 'print_callers', profile_config['print_callers']) if print_callers: stats.print_callers() - print_callees = target_opts.get('print_callees', - profile_config['print_callees']) + print_callees = target_opts.get( + 'print_callees', profile_config['print_callees']) if print_callees: stats.print_callees() @@ -92,10 +93,14 @@ class ProfileStatsFile(object): """ def __init__(self, filename): - self.write = config.options is not None and config.options.write_profiles + self.write = ( + config.options is not None and + config.options.write_profiles + ) self.fname = os.path.abspath(filename) self.short_fname = os.path.split(self.fname)[-1] - self.data = collections.defaultdict(lambda: collections.defaultdict(dict)) + self.data = collections.defaultdict( + lambda: collections.defaultdict(dict)) self._read() if self.write: # rewrite for the case where features changed, @@ -124,7 +129,10 @@ class ProfileStatsFile(object): def has_stats(self): test_key = _current_test - return test_key in self.data and self.platform_key in self.data[test_key] + return ( + test_key in self.data and + self.platform_key in self.data[test_key] + ) def result(self, callcount): test_key = _current_test @@ -153,7 +161,6 @@ class ProfileStatsFile(object): per_platform['current_count'] += 1 return result - def _header(self): return \ "# %s\n"\ @@ -165,8 +172,8 @@ class ProfileStatsFile(object): "# assertions are raised if the counts do not match.\n"\ "# \n"\ "# To add a new callcount test, apply the function_call_count \n"\ - "# decorator and re-run the tests using the --write-profiles option - \n"\ - "# this file will be rewritten including the new count.\n"\ + "# decorator and re-run the tests using the --write-profiles \n"\ + "# option - this file will be rewritten including the new count.\n"\ "# \n"\ "" % (self.fname) @@ -183,7 +190,8 @@ class ProfileStatsFile(object): test_key, platform_key, counts = line.split() per_fn = self.data[test_key] per_platform = per_fn[platform_key] - per_platform['counts'] = [int(count) for count in counts.split(",")] + c = [int(count) for count in counts.split(",")] + per_platform['counts'] = c per_platform['lineno'] = lineno + 1 per_platform['current_count'] = 0 profile_f.close() @@ -198,16 +206,13 @@ class ProfileStatsFile(object): profile_f.write("\n# TEST: %s\n\n" % test_key) for platform_key in sorted(per_fn): per_platform = per_fn[platform_key] - profile_f.write( - "%s %s %s\n" % ( - test_key, - platform_key, ",".join(str(count) for count in per_platform['counts']) - ) - ) + c = ",".join(str(count) for count in per_platform['counts']) + profile_f.write("%s %s %s\n" % (test_key, platform_key, c)) profile_f.close() from sqlalchemy.util.compat import update_wrapper + def function_call_count(variance=0.05): """Assert a target for a test case's function call count. @@ -222,7 +227,6 @@ def function_call_count(variance=0.05): def decorate(fn): def wrap(*args, **kw): - if cProfile is None: raise SkipTest("cProfile is not installed") @@ -237,7 +241,6 @@ def function_call_count(variance=0.05): gc_collect() - timespent, load_stats, fn_result = _profile( fn, *args, **kw ) @@ -263,8 +266,9 @@ def function_call_count(variance=0.05): if abs(callcount - expected_count) > deviance: raise AssertionError( "Adjusted function call count %s not within %s%% " - "of expected %s. (Delete line %d of file %s to regenerate " - "this callcount, when tests are run with --write-profiles.)" + "of expected %s. (Delete line %d of file %s to " + "regenerate this callcount, when tests are run " + "with --write-profiles.)" % ( callcount, (variance * 100), expected_count, line_no, @@ -288,4 +292,3 @@ def _profile(fn, *args, **kw): ended = time.time() return ended - began, load_stats, locals()['result'] - diff --git a/lib/sqlalchemy/testing/requirements.py b/lib/sqlalchemy/testing/requirements.py index d58538db9..68659a855 100644 --- a/lib/sqlalchemy/testing/requirements.py +++ b/lib/sqlalchemy/testing/requirements.py @@ -10,6 +10,7 @@ to provide specific inclusion/exlusions. from . import exclusions + class Requirements(object): def __init__(self, db, config): self.db = db @@ -178,7 +179,6 @@ class SuiteRequirements(Requirements): """ return exclusions.open() - @property def datetime(self): """target dialect supports representation of Python @@ -237,8 +237,10 @@ class SuiteRequirements(Requirements): @property def empty_strings_varchar(self): - """target database can persist/return an empty string with a varchar.""" + """target database can persist/return an empty string with a + varchar. + """ return exclusions.open() @property @@ -248,7 +250,6 @@ class SuiteRequirements(Requirements): return exclusions.open() - @property def update_from(self): """Target must support UPDATE..FROM syntax""" diff --git a/lib/sqlalchemy/testing/runner.py b/lib/sqlalchemy/testing/runner.py index 1a4ba5212..6ec73d7c8 100644 --- a/lib/sqlalchemy/testing/runner.py +++ b/lib/sqlalchemy/testing/runner.py @@ -28,5 +28,6 @@ from sqlalchemy.testing.plugin.noseplugin import NoseSQLAlchemy import nose + def main(): nose.main(addplugins=[NoseSQLAlchemy()]) diff --git a/lib/sqlalchemy/testing/schema.py b/lib/sqlalchemy/testing/schema.py index 5dfdc0e07..ad233ec22 100644 --- a/lib/sqlalchemy/testing/schema.py +++ b/lib/sqlalchemy/testing/schema.py @@ -7,6 +7,7 @@ __all__ = 'Table', 'Column', table_options = {} + def Table(*args, **kw): """A schema.Table wrapper/hook for dialect-specific tweaks.""" @@ -76,10 +77,10 @@ def Column(*args, **kw): event.listen(col, 'after_parent_attach', add_seq, propagate=True) return col + def _truncate_name(dialect, name): if len(name) > dialect.max_identifier_length: return name[0:max(dialect.max_identifier_length - 6, 0)] + \ "_" + hex(hash(name) % 64)[2:] else: return name - diff --git a/lib/sqlalchemy/testing/suite/test_ddl.py b/lib/sqlalchemy/testing/suite/test_ddl.py index 466429aa5..c5b162413 100644 --- a/lib/sqlalchemy/testing/suite/test_ddl.py +++ b/lib/sqlalchemy/testing/suite/test_ddl.py @@ -25,7 +25,6 @@ class TableDDLTest(fixtures.TestBase): (1, 'some data') ) - @requirements.create_table @util.provide_metadata def test_create_table(self): @@ -35,7 +34,6 @@ class TableDDLTest(fixtures.TestBase): ) self._simple_roundtrip() - @requirements.drop_table @util.provide_metadata def test_drop_table(self): @@ -48,4 +46,4 @@ class TableDDLTest(fixtures.TestBase): ) -__all__ = ('TableDDLTest', )
\ No newline at end of file +__all__ = ('TableDDLTest', ) diff --git a/lib/sqlalchemy/testing/suite/test_insert.py b/lib/sqlalchemy/testing/suite/test_insert.py index 3cd7d39bc..b2b2a0aa8 100644 --- a/lib/sqlalchemy/testing/suite/test_insert.py +++ b/lib/sqlalchemy/testing/suite/test_insert.py @@ -8,6 +8,7 @@ from sqlalchemy import Integer, String, select, util from ..schema import Table, Column + class LastrowidTest(fixtures.TablesTest): run_deletes = 'each' @@ -88,7 +89,6 @@ class InsertBehaviorTest(fixtures.TablesTest): else: engine = config.db - r = engine.execute( self.tables.autoinc_pk.insert(), data="some data" @@ -107,6 +107,7 @@ class InsertBehaviorTest(fixtures.TablesTest): assert r.is_insert assert not r.returns_rows + class ReturningTest(fixtures.TablesTest): run_deletes = 'each' __requires__ = 'returning', 'autoincrement_insert' @@ -162,5 +163,3 @@ class ReturningTest(fixtures.TablesTest): __all__ = ('LastrowidTest', 'InsertBehaviorTest', 'ReturningTest') - - diff --git a/lib/sqlalchemy/testing/suite/test_reflection.py b/lib/sqlalchemy/testing/suite/test_reflection.py index a7c814db7..b9894347a 100644 --- a/lib/sqlalchemy/testing/suite/test_reflection.py +++ b/lib/sqlalchemy/testing/suite/test_reflection.py @@ -18,6 +18,7 @@ from sqlalchemy import event metadata, users = None, None + class HasTableTest(fixtures.TablesTest): @classmethod def define_tables(cls, metadata): @@ -31,6 +32,7 @@ class HasTableTest(fixtures.TablesTest): assert config.db.dialect.has_table(conn, "test_table") assert not config.db.dialect.has_table(conn, "nonexistent_table") + class HasSequenceTest(fixtures.TestBase): __requires__ = 'sequences', @@ -425,4 +427,4 @@ class ComponentReflectionTest(fixtures.TablesTest): self._test_get_table_oid('users', schema='test_schema') -__all__ = ('ComponentReflectionTest', 'HasSequenceTest', 'HasTableTest')
\ No newline at end of file +__all__ = ('ComponentReflectionTest', 'HasSequenceTest', 'HasTableTest') diff --git a/lib/sqlalchemy/testing/suite/test_types.py b/lib/sqlalchemy/testing/suite/test_types.py index 74cb52c6e..8d0500d71 100644 --- a/lib/sqlalchemy/testing/suite/test_types.py +++ b/lib/sqlalchemy/testing/suite/test_types.py @@ -8,6 +8,7 @@ from sqlalchemy import Date, DateTime, Time, MetaData, String from ..schema import Table, Column import datetime + class _UnicodeFixture(object): __requires__ = 'unicode_data', @@ -70,7 +71,6 @@ class _UnicodeFixture(object): for row in rows: assert isinstance(row[0], unicode) - def _test_empty_strings(self): unicode_table = self.tables.unicode_table @@ -83,16 +83,17 @@ class _UnicodeFixture(object): ).first() eq_(row, (u'',)) + class UnicodeVarcharTest(_UnicodeFixture, fixtures.TablesTest): __requires__ = 'unicode_data', datatype = Unicode(255) - @requirements.empty_strings_varchar def test_empty_strings_varchar(self): self._test_empty_strings() + class UnicodeTextTest(_UnicodeFixture, fixtures.TablesTest): __requires__ = 'unicode_data', 'text_type' @@ -114,6 +115,7 @@ class StringTest(fixtures.TestBase): foo.create(config.db) foo.drop(config.db) + class _DateFixture(object): compare = None @@ -165,37 +167,44 @@ class DateTimeTest(_DateFixture, fixtures.TablesTest): datatype = DateTime data = datetime.datetime(2012, 10, 15, 12, 57, 18) + class DateTimeMicrosecondsTest(_DateFixture, fixtures.TablesTest): __requires__ = 'datetime_microseconds', datatype = DateTime data = datetime.datetime(2012, 10, 15, 12, 57, 18, 396) + class TimeTest(_DateFixture, fixtures.TablesTest): __requires__ = 'time', datatype = Time data = datetime.time(12, 57, 18) + class TimeMicrosecondsTest(_DateFixture, fixtures.TablesTest): __requires__ = 'time_microseconds', datatype = Time data = datetime.time(12, 57, 18, 396) + class DateTest(_DateFixture, fixtures.TablesTest): __requires__ = 'date', datatype = Date data = datetime.date(2012, 10, 15) + class DateTimeCoercedToDateTimeTest(_DateFixture, fixtures.TablesTest): __requires__ = 'date', datatype = Date data = datetime.datetime(2012, 10, 15, 12, 57, 18) compare = datetime.date(2012, 10, 15) + class DateTimeHistoricTest(_DateFixture, fixtures.TablesTest): __requires__ = 'datetime_historic', datatype = DateTime data = datetime.datetime(1850, 11, 10, 11, 52, 35) + class DateHistoricTest(_DateFixture, fixtures.TablesTest): __requires__ = 'date_historic', datatype = Date @@ -207,6 +216,3 @@ __all__ = ('UnicodeVarcharTest', 'UnicodeTextTest', 'DateTimeHistoricTest', 'DateTimeCoercedToDateTimeTest', 'TimeMicrosecondsTest', 'TimeTest', 'DateTimeMicrosecondsTest', 'DateHistoricTest', 'StringTest') - - - diff --git a/lib/sqlalchemy/testing/suite/test_update_delete.py b/lib/sqlalchemy/testing/suite/test_update_delete.py index e73b05485..a3456ac2a 100644 --- a/lib/sqlalchemy/testing/suite/test_update_delete.py +++ b/lib/sqlalchemy/testing/suite/test_update_delete.py @@ -1,9 +1,7 @@ from .. import fixtures, config -from ..config import requirements from ..assertions import eq_ -from .. import engines -from sqlalchemy import Integer, String, select +from sqlalchemy import Integer, String from ..schema import Table, Column @@ -61,4 +59,4 @@ class SimpleUpdateDeleteTest(fixtures.TablesTest): ] ) -__all__ = ('SimpleUpdateDeleteTest', )
\ No newline at end of file +__all__ = ('SimpleUpdateDeleteTest', ) diff --git a/lib/sqlalchemy/testing/util.py b/lib/sqlalchemy/testing/util.py index 41b0a30b3..2592c341e 100644 --- a/lib/sqlalchemy/testing/util.py +++ b/lib/sqlalchemy/testing/util.py @@ -26,9 +26,11 @@ elif pypy: else: # assume CPython - straight gc.collect, lazy_gc() is a pass gc_collect = gc.collect + def lazy_gc(): pass + def picklers(): picklers = set() # Py2K @@ -56,6 +58,7 @@ def round_decimal(value, prec): ).to_integral(decimal.ROUND_FLOOR) / \ pow(10, prec) + class RandomSet(set): def __iter__(self): l = list(set.__iter__(self)) @@ -80,6 +83,7 @@ class RandomSet(set): def copy(self): return RandomSet(self) + def conforms_partial_ordering(tuples, sorted_elements): """True if the given sorting conforms to the given partial ordering.""" @@ -93,6 +97,7 @@ def conforms_partial_ordering(tuples, sorted_elements): else: return True + def all_partial_orderings(tuples, elements): edges = defaultdict(set) for parent, child in tuples: @@ -131,7 +136,6 @@ def function_named(fn, name): return fn - def run_as_contextmanager(ctx, fn, *arg, **kw): """Run the given function under the given contextmanager, simulating the behavior of 'with' to support older @@ -152,6 +156,7 @@ def run_as_contextmanager(ctx, fn, *arg, **kw): else: return raise_ + def rowset(results): """Converts the results of sql execution into a plain set of column tuples. @@ -182,6 +187,7 @@ def provide_metadata(fn, *args, **kw): metadata.drop_all() self.metadata = prev_meta + class adict(dict): """Dict keys available as attributes. Shadows.""" def __getattribute__(self, key): @@ -192,5 +198,3 @@ class adict(dict): def get_all(self, *keys): return tuple([self[key] for key in keys]) - - diff --git a/lib/sqlalchemy/testing/warnings.py b/lib/sqlalchemy/testing/warnings.py index 7afcc63c5..41f3dbfed 100644 --- a/lib/sqlalchemy/testing/warnings.py +++ b/lib/sqlalchemy/testing/warnings.py @@ -4,6 +4,7 @@ import warnings from .. import exc as sa_exc from .. import util + def testing_warn(msg, stacklevel=3): """Replaces sqlalchemy.util.warn during tests.""" @@ -14,6 +15,7 @@ def testing_warn(msg, stacklevel=3): else: warnings.warn_explicit(msg, filename, lineno) + def resetwarnings(): """Reset warning behavior to testing defaults.""" @@ -24,6 +26,7 @@ def resetwarnings(): warnings.filterwarnings('error', category=sa_exc.SADeprecationWarning) warnings.filterwarnings('error', category=sa_exc.SAWarning) + def assert_warnings(fn, warnings): """Assert that each of the given warnings are emitted by fn.""" @@ -31,6 +34,7 @@ def assert_warnings(fn, warnings): canary = [] orig_warn = util.warn + def capture_warnings(*args, **kw): orig_warn(*args, **kw) popwarn = warnings.pop(0) diff --git a/lib/sqlalchemy/types.py b/lib/sqlalchemy/types.py index 579bd354d..6f4cf69d1 100644 --- a/lib/sqlalchemy/types.py +++ b/lib/sqlalchemy/types.py @@ -5,8 +5,8 @@ # the MIT License: http://www.opensource.org/licenses/mit-license.php """defines genericized SQL types, each represented by a subclass of -:class:`~sqlalchemy.types.AbstractType`. Dialects define further subclasses of these -types. +:class:`~sqlalchemy.types.AbstractType`. Dialects define further subclasses +of these types. For more information see the SQLAlchemy documentation on types. @@ -14,11 +14,11 @@ For more information see the SQLAlchemy documentation on types. __all__ = ['TypeEngine', 'TypeDecorator', 'AbstractType', 'UserDefinedType', 'INT', 'CHAR', 'VARCHAR', 'NCHAR', 'NVARCHAR', 'TEXT', 'Text', 'FLOAT', 'NUMERIC', 'REAL', 'DECIMAL', 'TIMESTAMP', 'DATETIME', - 'CLOB', 'BLOB', 'BINARY', 'VARBINARY', 'BOOLEAN', 'BIGINT', 'SMALLINT', - 'INTEGER', 'DATE', 'TIME', 'String', 'Integer', 'SmallInteger', - 'BigInteger', 'Numeric', 'Float', 'DateTime', 'Date', 'Time', - 'LargeBinary', 'Binary', 'Boolean', 'Unicode', 'Concatenable', - 'UnicodeText', 'PickleType', 'Interval', 'Enum'] + 'CLOB', 'BLOB', 'BINARY', 'VARBINARY', 'BOOLEAN', 'BIGINT', + 'SMALLINT', 'INTEGER', 'DATE', 'TIME', 'String', 'Integer', + 'SmallInteger', 'BigInteger', 'Numeric', 'Float', 'DateTime', + 'Date', 'Time', 'LargeBinary', 'Binary', 'Boolean', 'Unicode', + 'Concatenable', 'UnicodeText', 'PickleType', 'Interval', 'Enum'] import datetime as dt import codecs @@ -35,10 +35,12 @@ NoneType = type(None) if util.jython: import array + class AbstractType(Visitable): """Base for all types - not needed except for backwards compatibility.""" + class TypeEngine(AbstractType): """Base for built-in types.""" @@ -158,8 +160,8 @@ class TypeEngine(AbstractType): parameter within the statement. It is used for special data types that require literals being wrapped in some special database function in order to coerce an application-level value into a database-specific - format. It is the SQL analogue of the :meth:`.TypeEngine.bind_processor` - method. + format. It is the SQL analogue of the + :meth:`.TypeEngine.bind_processor` method. The method is evaluated at statement compile time, as opposed to statement construction time. @@ -230,8 +232,8 @@ class TypeEngine(AbstractType): The construction of :meth:`.TypeEngine.with_variant` is always from the "fallback" type to that which is dialect specific. The returned type is an instance of :class:`.Variant`, which - itself provides a :meth:`~sqlalchemy.types.Variant.with_variant` that can - be called repeatedly. + itself provides a :meth:`~sqlalchemy.types.Variant.with_variant` + that can be called repeatedly. :param type_: a :class:`.TypeEngine` that will be selected as a variant from the originating type, when a dialect @@ -259,8 +261,10 @@ class TypeEngine(AbstractType): return self.__class__ def dialect_impl(self, dialect): - """Return a dialect-specific implementation for this :class:`.TypeEngine`.""" + """Return a dialect-specific implementation for this + :class:`.TypeEngine`. + """ try: return dialect._type_memos[self]['impl'] except KeyError: @@ -390,9 +394,11 @@ class TypeEngine(AbstractType): def __repr__(self): return util.generic_repr(self) + def _reconstitute_comparator(expression): return expression.comparator + class UserDefinedType(TypeEngine): """Base for user defined types. @@ -450,12 +456,13 @@ class UserDefinedType(TypeEngine): Default behavior for :class:`.UserDefinedType` is the same as that of :class:`.TypeDecorator`; by default it returns ``self``, assuming the compared value should be coerced into - the same type as this one. See :meth:`.TypeDecorator.coerce_compared_value` - for more detail. + the same type as this one. See + :meth:`.TypeDecorator.coerce_compared_value` for more detail. .. versionchanged:: 0.8 :meth:`.UserDefinedType.coerce_compared_value` now returns ``self`` by default, rather than falling onto the - more fundamental behavior of :meth:`.TypeEngine.coerce_compared_value`. + more fundamental behavior of + :meth:`.TypeEngine.coerce_compared_value`. """ @@ -610,7 +617,8 @@ class TypeDecorator(TypeEngine): the :class:`.TypeEngine` type represented by ``self.impl``. Makes usage of :meth:`dialect_impl` but also traverses into wrapped :class:`.TypeDecorator` instances. - Behavior can be customized here by overriding :meth:`load_dialect_impl`. + Behavior can be customized here by overriding + :meth:`load_dialect_impl`. """ adapted = dialect.type_descriptor(self) @@ -727,7 +735,8 @@ class TypeDecorator(TypeEngine): return self.impl.bind_processor(dialect) def result_processor(self, dialect, coltype): - """Provide a result value processing function for the given :class:`.Dialect`. + """Provide a result value processing function for the given + :class:`.Dialect`. This is the method that fulfills the :class:`.TypeEngine` contract for result value conversion. :class:`.TypeDecorator` @@ -795,7 +804,8 @@ class TypeDecorator(TypeEngine): return instance def get_dbapi_type(self, dbapi): - """Return the DBAPI type object represented by this :class:`.TypeDecorator`. + """Return the DBAPI type object represented by this + :class:`.TypeDecorator`. By default this calls upon :meth:`.TypeEngine.get_dbapi_type` of the underlying "impl". @@ -836,8 +846,8 @@ class Variant(TypeDecorator): """Construct a new :class:`.Variant`. :param base: the base 'fallback' type - :param mapping: dictionary of string dialect names to :class:`.TypeEngine` - instances. + :param mapping: dictionary of string dialect names to + :class:`.TypeEngine` instances. """ self.impl = base @@ -880,6 +890,7 @@ def to_instance(typeobj, *arg, **kw): else: return typeobj + def adapt_type(typeobj, colspecs): if isinstance(typeobj, type): typeobj = typeobj() @@ -928,6 +939,7 @@ class NullType(TypeEngine): NullTypeEngine = NullType + class Concatenable(object): """A mixin that marks a type as supporting 'concatenation', typically strings.""" @@ -958,6 +970,7 @@ class _DateAffinity(object): class Comparator(TypeEngine.Comparator): _blank_dict = util.immutabledict() + def _adapt_expression(self, op, other_comparator): othertype = other_comparator.type._type_affinity return op, \ @@ -965,6 +978,7 @@ class _DateAffinity(object): get(othertype, NULLTYPE) comparator_factory = Comparator + class String(Concatenable, TypeEngine): """The base for all string and character types. @@ -1077,6 +1091,7 @@ class String(Concatenable, TypeEngine): else: encoder = codecs.getencoder(dialect.encoding) warn_on_bytestring = self._warn_on_bytestring + def process(value): if isinstance(value, unicode): return encoder(value, self.unicode_error)[0] @@ -1126,6 +1141,7 @@ class String(Concatenable, TypeEngine): def get_dbapi_type(self, dbapi): return dbapi.STRING + class Text(String): """A variably sized string type. @@ -1136,6 +1152,7 @@ class Text(String): """ __visit_name__ = 'text' + class Unicode(String): """A variable length Unicode string type. @@ -1206,6 +1223,7 @@ class Unicode(String): kwargs.setdefault('_warn_on_bytestring', True) super(Unicode, self).__init__(length=length, **kwargs) + class UnicodeText(Text): """An unbounded-length Unicode string type. @@ -1251,32 +1269,33 @@ class Integer(_DateAffinity, TypeEngine): # TODO: need a dictionary object that will # handle operators generically here, this is incomplete return { - operators.add:{ - Date:Date, - Integer:self.__class__, - Numeric:Numeric, + operators.add: { + Date: Date, + Integer: self.__class__, + Numeric: Numeric, }, - operators.mul:{ - Interval:Interval, - Integer:self.__class__, - Numeric:Numeric, + operators.mul: { + Interval: Interval, + Integer: self.__class__, + Numeric: Numeric, }, # Py2K - operators.div:{ - Integer:self.__class__, - Numeric:Numeric, + operators.div: { + Integer: self.__class__, + Numeric: Numeric, }, # end Py2K - operators.truediv:{ - Integer:self.__class__, - Numeric:Numeric, + operators.truediv: { + Integer: self.__class__, + Numeric: Numeric, }, - operators.sub:{ - Integer:self.__class__, - Numeric:Numeric, + operators.sub: { + Integer: self.__class__, + Numeric: Numeric, }, } + class SmallInteger(Integer): """A type for smaller ``int`` integers. @@ -1426,31 +1445,32 @@ class Numeric(_DateAffinity, TypeEngine): @util.memoized_property def _expression_adaptations(self): return { - operators.mul:{ - Interval:Interval, - Numeric:self.__class__, - Integer:self.__class__, + operators.mul: { + Interval: Interval, + Numeric: self.__class__, + Integer: self.__class__, }, # Py2K - operators.div:{ - Numeric:self.__class__, - Integer:self.__class__, + operators.div: { + Numeric: self.__class__, + Integer: self.__class__, }, # end Py2K - operators.truediv:{ - Numeric:self.__class__, - Integer:self.__class__, + operators.truediv: { + Numeric: self.__class__, + Integer: self.__class__, }, - operators.add:{ - Numeric:self.__class__, - Integer:self.__class__, + operators.add: { + Numeric: self.__class__, + Integer: self.__class__, }, - operators.sub:{ - Numeric:self.__class__, - Integer:self.__class__, + operators.sub: { + Numeric: self.__class__, + Integer: self.__class__, } } + class Float(Numeric): """A type for ``float`` numbers. @@ -1477,7 +1497,8 @@ class Float(Numeric): :param \**kwargs: deprecated. Additional arguments here are ignored by the default :class:`.Float` type. For database specific floats that support additional arguments, see that dialect's - documentation for details, such as :class:`sqlalchemy.dialects.mysql.FLOAT`. + documentation for details, such as + :class:`sqlalchemy.dialects.mysql.FLOAT`. """ self.precision = precision @@ -1495,23 +1516,23 @@ class Float(Numeric): @util.memoized_property def _expression_adaptations(self): return { - operators.mul:{ - Interval:Interval, - Numeric:self.__class__, + operators.mul: { + Interval: Interval, + Numeric: self.__class__, }, # Py2K - operators.div:{ - Numeric:self.__class__, + operators.div: { + Numeric: self.__class__, }, # end Py2K - operators.truediv:{ - Numeric:self.__class__, + operators.truediv: { + Numeric: self.__class__, }, - operators.add:{ - Numeric:self.__class__, + operators.add: { + Numeric: self.__class__, }, - operators.sub:{ - Numeric:self.__class__, + operators.sub: { + Numeric: self.__class__, } } @@ -1550,17 +1571,17 @@ class DateTime(_DateAffinity, TypeEngine): @util.memoized_property def _expression_adaptations(self): return { - operators.add:{ - Interval:self.__class__, + operators.add: { + Interval: self.__class__, }, - operators.sub:{ - Interval:self.__class__, - DateTime:Interval, + operators.sub: { + Interval: self.__class__, + DateTime: Interval, }, } -class Date(_DateAffinity,TypeEngine): +class Date(_DateAffinity, TypeEngine): """A type for ``datetime.date()`` objects.""" __visit_name__ = 'date' @@ -1575,29 +1596,29 @@ class Date(_DateAffinity,TypeEngine): @util.memoized_property def _expression_adaptations(self): return { - operators.add:{ - Integer:self.__class__, - Interval:DateTime, - Time:DateTime, + operators.add: { + Integer: self.__class__, + Interval: DateTime, + Time: DateTime, }, - operators.sub:{ + operators.sub: { # date - integer = date - Integer:self.__class__, + Integer: self.__class__, # date - date = integer. - Date:Integer, + Date: Integer, - Interval:DateTime, + Interval: DateTime, # date - datetime = interval, # this one is not in the PG docs # but works - DateTime:Interval, + DateTime: Interval, }, } -class Time(_DateAffinity,TypeEngine): +class Time(_DateAffinity, TypeEngine): """A type for ``datetime.time()`` objects.""" __visit_name__ = 'time' @@ -1615,13 +1636,13 @@ class Time(_DateAffinity,TypeEngine): @util.memoized_property def _expression_adaptations(self): return { - operators.add:{ - Date:DateTime, - Interval:self.__class__ + operators.add: { + Date: DateTime, + Interval: self.__class__ }, - operators.sub:{ - Time:Interval, - Interval:self.__class__, + operators.sub: { + Time: Interval, + Interval: self.__class__, }, } @@ -1644,6 +1665,7 @@ class _Binary(TypeEngine): # here, though pg8000 does to indicate "bytea" def bind_processor(self, dialect): DBAPIBinary = dialect.dbapi.Binary + def process(value): x = self if value is not None: @@ -1681,6 +1703,7 @@ class _Binary(TypeEngine): def get_dbapi_type(self, dbapi): return dbapi.BINARY + class LargeBinary(_Binary): """A type for large binary byte data. @@ -1708,6 +1731,7 @@ class LargeBinary(_Binary): """ _Binary.__init__(self, length=length) + class Binary(LargeBinary): """Deprecated. Renamed to LargeBinary.""" @@ -1716,6 +1740,7 @@ class Binary(LargeBinary): 'LargeBinary.') LargeBinary.__init__(self, *arg, **kw) + class SchemaType(events.SchemaEventTarget): """Mark a type as possibly requiring schema-level DDL for usage. @@ -1724,9 +1749,10 @@ class SchemaType(events.SchemaEventTarget): constraints, triggers, and other rules. :class:`.SchemaType` classes can also be targets for the - :meth:`.DDLEvents.before_parent_attach` and :meth:`.DDLEvents.after_parent_attach` - events, where the events fire off surrounding the association of - the type object with a parent :class:`.Column`. + :meth:`.DDLEvents.before_parent_attach` and + :meth:`.DDLEvents.after_parent_attach` events, where the events fire off + surrounding the association of the type object with a parent + :class:`.Column`. """ @@ -1818,6 +1844,7 @@ class SchemaType(events.SchemaEventTarget): if t.__class__ is not self.__class__ and isinstance(t, SchemaType): t._on_metadata_drop(target, bind, **kw) + class Enum(String, SchemaType): """Generic Enum Type. @@ -1915,7 +1942,6 @@ class Enum(String, SchemaType): if self.native_enum: SchemaType._set_table(self, column, table) - e = schema.CheckConstraint( column.in_(self.enums), name=self.name, @@ -1938,6 +1964,7 @@ class Enum(String, SchemaType): else: return super(Enum, self).adapt(impltype, **kw) + class PickleType(TypeDecorator): """Holds Python objects, which are serialized using pickle. @@ -2073,6 +2100,7 @@ class Boolean(TypeEngine, SchemaType): else: return processors.int_to_boolean + class Interval(_DateAffinity, TypeDecorator): """A type for ``datetime.timedelta()`` objects. @@ -2165,24 +2193,24 @@ class Interval(_DateAffinity, TypeDecorator): @util.memoized_property def _expression_adaptations(self): return { - operators.add:{ - Date:DateTime, - Interval:self.__class__, - DateTime:DateTime, - Time:Time, + operators.add: { + Date: DateTime, + Interval: self.__class__, + DateTime: DateTime, + Time: Time, }, - operators.sub:{ - Interval:self.__class__ + operators.sub: { + Interval: self.__class__ }, - operators.mul:{ - Numeric:self.__class__ + operators.mul: { + Numeric: self.__class__ }, operators.truediv: { - Numeric:self.__class__ + Numeric: self.__class__ }, # Py2K operators.div: { - Numeric:self.__class__ + Numeric: self.__class__ } # end Py2K } @@ -2202,11 +2230,13 @@ class REAL(Float): __visit_name__ = 'REAL' + class FLOAT(Float): """The SQL FLOAT type.""" __visit_name__ = 'FLOAT' + class NUMERIC(Numeric): """The SQL NUMERIC type.""" @@ -2237,6 +2267,7 @@ class BIGINT(BigInteger): __visit_name__ = 'BIGINT' + class TIMESTAMP(DateTime): """The SQL TIMESTAMP type.""" @@ -2245,6 +2276,7 @@ class TIMESTAMP(DateTime): def get_dbapi_type(self, dbapi): return dbapi.TIMESTAMP + class DATETIME(DateTime): """The SQL DATETIME type.""" @@ -2262,11 +2294,13 @@ class TIME(Time): __visit_name__ = 'TIME' + class TEXT(Text): """The SQL TEXT type.""" __visit_name__ = 'TEXT' + class CLOB(Text): """The CLOB type. @@ -2275,16 +2309,19 @@ class CLOB(Text): __visit_name__ = 'CLOB' + class VARCHAR(String): """The SQL VARCHAR type.""" __visit_name__ = 'VARCHAR' + class NVARCHAR(Unicode): """The SQL NVARCHAR type.""" __visit_name__ = 'NVARCHAR' + class CHAR(String): """The SQL CHAR type.""" @@ -2302,11 +2339,13 @@ class BLOB(LargeBinary): __visit_name__ = 'BLOB' + class BINARY(_Binary): """The SQL BINARY type.""" __visit_name__ = 'BINARY' + class VARBINARY(_Binary): """The SQL VARBINARY type.""" @@ -2325,18 +2364,17 @@ STRINGTYPE = String() _type_map = { str: String(), # Py3K - #bytes : LargeBinary(), + #bytes: LargeBinary(), # Py2K - unicode : Unicode(), + unicode: Unicode(), # end Py2K - int : Integer(), - float : Numeric(), + int: Integer(), + float: Numeric(), bool: BOOLEANTYPE, - decimal.Decimal : Numeric(), - dt.date : Date(), - dt.datetime : DateTime(), - dt.time : Time(), - dt.timedelta : Interval(), + decimal.Decimal: Numeric(), + dt.date: Date(), + dt.datetime: DateTime(), + dt.time: Time(), + dt.timedelta: Interval(), NoneType: NULLTYPE } - |