summaryrefslogtreecommitdiff
path: root/lib/sqlalchemy/dialects/postgresql/base.py
diff options
context:
space:
mode:
authorMike Bayer <mike_mp@zzzcomputing.com>2012-07-22 16:36:29 -0400
committerMike Bayer <mike_mp@zzzcomputing.com>2012-07-22 16:36:29 -0400
commitfaa9b2c8da63aa116579fc6c43a30ce479b92ac2 (patch)
tree30a75f94f18a9f51197658e5b0a8d5ae1b09753d /lib/sqlalchemy/dialects/postgresql/base.py
parent2bee05098e09dcdf09f7c8ff1c7efeba0c2fc9f2 (diff)
downloadsqlalchemy-faa9b2c8da63aa116579fc6c43a30ce479b92ac2.tar.gz
- [feature] Revised the rules used to determine
the operator precedence for the user-defined operator, i.e. that granted using the ``op()`` method. Previously, the smallest precedence was applied in all cases, now the default precedence is zero, lower than all operators except "comma" (such as, used in the argument list of a ``func`` call) and "AS", and is also customizable via the "precedence" argument on the ``op()`` method. [ticket:2537]
Diffstat (limited to 'lib/sqlalchemy/dialects/postgresql/base.py')
-rw-r--r--lib/sqlalchemy/dialects/postgresql/base.py203
1 files changed, 102 insertions, 101 deletions
diff --git a/lib/sqlalchemy/dialects/postgresql/base.py b/lib/sqlalchemy/dialects/postgresql/base.py
index 248d39ed6..c13e0e1b3 100644
--- a/lib/sqlalchemy/dialects/postgresql/base.py
+++ b/lib/sqlalchemy/dialects/postgresql/base.py
@@ -21,7 +21,7 @@ default corresponding to the column.
To specify a specific named sequence to be used for primary key generation,
use the :func:`~sqlalchemy.schema.Sequence` construct::
- Table('sometable', metadata,
+ Table('sometable', metadata,
Column('id', Integer, Sequence('some_id_seq'), primary_key=True)
)
@@ -51,7 +51,7 @@ parameter are ``READ COMMITTED``, ``READ UNCOMMITTED``, ``REPEATABLE READ``,
and ``SERIALIZABLE``::
engine = create_engine(
- "postgresql+pg8000://scott:tiger@localhost/test",
+ "postgresql+pg8000://scott:tiger@localhost/test",
isolation_level="READ UNCOMMITTED"
)
@@ -75,7 +75,7 @@ the current ``search_path``, the "schema" attribute of the resulting
remote table matches that of the referencing table, and the "schema" argument
was explicitly stated on the referencing table.
-The best practice here is to not use the ``schema`` argument
+The best practice here is to not use the ``schema`` argument
on :class:`.Table` for any schemas that are present in ``search_path``.
``search_path`` defaults to "public", but care should be taken
to inspect the actual value using::
@@ -87,7 +87,7 @@ to inspect the actual value using::
were also in the ``search_path`` could make an incorrect assumption
if the schemas were explicitly stated on each :class:`.Table`.
-Background on PG's ``search_path`` is at:
+Background on PG's ``search_path`` is at:
http://www.postgresql.org/docs/9.0/static/ddl-schemas.html#DDL-SCHEMAS-PATH
INSERT/UPDATE...RETURNING
@@ -144,7 +144,7 @@ to the PostgreSQL dialect.
Partial Indexes
^^^^^^^^^^^^^^^^
-Partial indexes add criterion to the index definition so that the index is
+Partial indexes add criterion to the index definition so that the index is
applied to a subset of rows. These can be specified on :class:`.Index`
using the ``postgresql_where`` keyword argument::
@@ -158,11 +158,11 @@ an index (see http://www.postgresql.org/docs/8.3/interactive/indexes-opclass.htm
The :class:`.Index` construct allows these to be specified via the ``postgresql_ops``
keyword argument::
- Index('my_index', my_table.c.id, my_table.c.data,
+ Index('my_index', my_table.c.id, my_table.c.data,
postgresql_ops={
- 'data': 'text_pattern_ops',
+ 'data': 'text_pattern_ops',
'id': 'int4_ops'
- })
+ })
.. versionadded:: 0.7.2
``postgresql_ops`` keyword argument to :class:`.Index` construct.
@@ -374,7 +374,7 @@ class ARRAY(sqltypes.Concatenable, sqltypes.TypeEngine):
:param dimensions: if non-None, the ARRAY will assume a fixed
number of dimensions. This will cause the DDL emitted for this
ARRAY to include the exact number of bracket clauses ``[]``,
- and will also optimize the performance of the type overall.
+ and will also optimize the performance of the type overall.
Note that PG arrays are always implicitly "non-dimensioned",
meaning they can store any number of dimensions no matter how
they were declared.
@@ -404,9 +404,9 @@ class ARRAY(sqltypes.Concatenable, sqltypes.TypeEngine):
else:
return collection(
self._proc_array(
- x, itemproc,
- dim - 1 if dim is not None else None,
- collection)
+ x, itemproc,
+ dim - 1 if dim is not None else None,
+ collection)
for x in arr
)
@@ -419,9 +419,9 @@ class ARRAY(sqltypes.Concatenable, sqltypes.TypeEngine):
return value
else:
return self._proc_array(
- value,
- item_proc,
- self.dimensions,
+ value,
+ item_proc,
+ self.dimensions,
list)
return process
@@ -434,9 +434,9 @@ class ARRAY(sqltypes.Concatenable, sqltypes.TypeEngine):
return value
else:
return self._proc_array(
- value,
- item_proc,
- self.dimensions,
+ value,
+ item_proc,
+ self.dimensions,
tuple if self.as_tuple else list)
return process
@@ -444,37 +444,37 @@ PGArray = ARRAY
class ENUM(sqltypes.Enum):
"""Postgresql ENUM type.
-
+
This is a subclass of :class:`.types.Enum` which includes
support for PG's ``CREATE TYPE``.
-
- :class:`~.postgresql.ENUM` is used automatically when
+
+ :class:`~.postgresql.ENUM` is used automatically when
using the :class:`.types.Enum` type on PG assuming
- the ``native_enum`` is left as ``True``. However, the
+ the ``native_enum`` is left as ``True``. However, the
:class:`~.postgresql.ENUM` class can also be instantiated
directly in order to access some additional Postgresql-specific
- options, namely finer control over whether or not
+ options, namely finer control over whether or not
``CREATE TYPE`` should be emitted.
-
- Note that both :class:`.types.Enum` as well as
+
+ Note that both :class:`.types.Enum` as well as
:class:`~.postgresql.ENUM` feature create/drop
methods; the base :class:`.types.Enum` type ultimately
delegates to the :meth:`~.postgresql.ENUM.create` and
:meth:`~.postgresql.ENUM.drop` methods present here.
-
+
"""
def __init__(self, *enums, **kw):
"""Construct an :class:`~.postgresql.ENUM`.
-
+
Arguments are the same as that of
:class:`.types.Enum`, but also including
the following parameters.
-
- :param create_type: Defaults to True.
- Indicates that ``CREATE TYPE`` should be
- emitted, after optionally checking for the
- presence of the type, when the parent
+
+ :param create_type: Defaults to True.
+ Indicates that ``CREATE TYPE`` should be
+ emitted, after optionally checking for the
+ presence of the type, when the parent
table is being created; and additionally
that ``DROP TYPE`` is called when the table
is dropped. When ``False``, no check
@@ -485,7 +485,7 @@ class ENUM(sqltypes.Enum):
are called directly.
Setting to ``False`` is helpful
when invoking a creation scheme to a SQL file
- without access to the actual database -
+ without access to the actual database -
the :meth:`~.postgresql.ENUM.create` and
:meth:`~.postgresql.ENUM.drop` methods can
be used to emit SQL to a target bind.
@@ -497,20 +497,20 @@ class ENUM(sqltypes.Enum):
super(ENUM, self).__init__(*enums, **kw)
def create(self, bind=None, checkfirst=True):
- """Emit ``CREATE TYPE`` for this
+ """Emit ``CREATE TYPE`` for this
:class:`~.postgresql.ENUM`.
-
+
If the underlying dialect does not support
Postgresql CREATE TYPE, no action is taken.
-
+
:param bind: a connectable :class:`.Engine`,
:class:`.Connection`, or similar object to emit
SQL.
- :param checkfirst: if ``True``, a query against
+ :param checkfirst: if ``True``, a query against
the PG catalog will be first performed to see
if the type does not exist already before
creating.
-
+
"""
if not bind.dialect.supports_native_enum:
return
@@ -520,19 +520,19 @@ class ENUM(sqltypes.Enum):
bind.execute(CreateEnumType(self))
def drop(self, bind=None, checkfirst=True):
- """Emit ``DROP TYPE`` for this
+ """Emit ``DROP TYPE`` for this
:class:`~.postgresql.ENUM`.
-
+
If the underlying dialect does not support
Postgresql DROP TYPE, no action is taken.
-
+
:param bind: a connectable :class:`.Engine`,
:class:`.Connection`, or similar object to emit
SQL.
- :param checkfirst: if ``True``, a query against
+ :param checkfirst: if ``True``, a query against
the PG catalog will be first performed to see
if the type actually exists before dropping.
-
+
"""
if not bind.dialect.supports_native_enum:
return
@@ -544,7 +544,7 @@ class ENUM(sqltypes.Enum):
def _check_for_name_in_memos(self, checkfirst, kw):
"""Look in the 'ddl runner' for 'memos', then
note our name in that collection.
-
+
This to ensure a particular named enum is operated
upon only once within any kind of create/drop
sequence without relying upon "checkfirst".
@@ -621,14 +621,14 @@ class PGCompiler(compiler.SQLCompiler):
def visit_match_op(self, binary, **kw):
return "%s @@ to_tsquery(%s)" % (
- self.process(binary.left),
+ self.process(binary.left),
self.process(binary.right))
def visit_ilike_op(self, binary, **kw):
escape = binary.modifiers.get("escape", None)
return '%s ILIKE %s' % \
(self.process(binary.left), self.process(binary.right)) \
- + (escape and
+ + (escape and
(' ESCAPE ' + self.render_literal_value(escape, None))
or '')
@@ -636,7 +636,7 @@ class PGCompiler(compiler.SQLCompiler):
escape = binary.modifiers.get("escape", None)
return '%s NOT ILIKE %s' % \
(self.process(binary.left), self.process(binary.right)) \
- + (escape and
+ + (escape and
(' ESCAPE ' + self.render_literal_value(escape, None))
or '')
@@ -692,9 +692,9 @@ class PGCompiler(compiler.SQLCompiler):
columns = [
self.process(
- self.label_select_column(None, c, asfrom=False),
- within_columns_clause=True,
- result_map=self.result_map)
+ self.label_select_column(None, c, asfrom=False),
+ within_columns_clause=True,
+ result_map=self.result_map)
for c in expression._select_iterables(returning_cols)
]
@@ -708,13 +708,14 @@ class PGCompiler(compiler.SQLCompiler):
affinity = None
casts = {
- sqltypes.Date:'date',
- sqltypes.DateTime:'timestamp',
+ sqltypes.Date:'date',
+ sqltypes.DateTime:'timestamp',
sqltypes.Interval:'interval', sqltypes.Time:'time'
}
cast = casts.get(affinity, None)
if isinstance(extract.expr, sql.ColumnElement) and cast is not None:
- expr = extract.expr.op('::')(sql.literal_column(cast))
+ expr = extract.expr.op('::', precedence=100)(
+ sql.literal_column(cast))
else:
expr = extract.expr
return "EXTRACT(%s FROM %s)" % (
@@ -728,7 +729,7 @@ class PGDDLCompiler(compiler.DDLCompiler):
column is column.table._autoincrement_column and \
not isinstance(impl_type, sqltypes.SmallInteger) and \
(
- column.default is None or
+ column.default is None or
(
isinstance(column.default, schema.Sequence) and
column.default.optional
@@ -783,7 +784,7 @@ class PGDDLCompiler(compiler.DDLCompiler):
text += "(%s)" \
% (
', '.join([
- preparer.format_column(c) +
+ preparer.format_column(c) +
(c.key in ops and (' ' + ops[c.key]) or '')
for c in index.columns])
)
@@ -841,14 +842,14 @@ class PGTypeCompiler(compiler.GenericTypeCompiler):
def visit_TIMESTAMP(self, type_):
return "TIMESTAMP%s %s" % (
- getattr(type_, 'precision', None) and "(%d)" %
+ getattr(type_, 'precision', None) and "(%d)" %
type_.precision or "",
(type_.timezone and "WITH" or "WITHOUT") + " TIME ZONE"
)
def visit_TIME(self, type_):
return "TIME%s %s" % (
- getattr(type_, 'precision', None) and "(%d)" %
+ getattr(type_, 'precision', None) and "(%d)" %
type_.precision or "",
(type_.timezone and "WITH" or "WITHOUT") + " TIME ZONE"
)
@@ -878,8 +879,8 @@ class PGTypeCompiler(compiler.GenericTypeCompiler):
return "BYTEA"
def visit_ARRAY(self, type_):
- return self.process(type_.item_type) + ('[]' * (type_.dimensions
- if type_.dimensions
+ return self.process(type_.item_type) + ('[]' * (type_.dimensions
+ if type_.dimensions
is not None else 1))
@@ -932,21 +933,21 @@ class PGExecutionContext(default.DefaultExecutionContext):
return self._execute_scalar("select %s" %
column.server_default.arg, column.type)
- elif (column.default is None or
+ elif (column.default is None or
(column.default.is_sequence and
column.default.optional)):
- # execute the sequence associated with a SERIAL primary
+ # execute the sequence associated with a SERIAL primary
# key column. for non-primary-key SERIAL, the ID just
# generates server side.
try:
seq_name = column._postgresql_seq_name
except AttributeError:
- tab = column.table.name
- col = column.name
- tab = tab[0:29 + max(0, (29 - len(col)))]
- col = col[0:29 + max(0, (29 - len(tab)))]
+ tab = column.table.name
+ col = column.name
+ tab = tab[0:29 + max(0, (29 - len(col)))]
+ col = col[0:29 + max(0, (29 - len(tab)))]
column._postgresql_seq_name = seq_name = "%s_%s_seq" % (tab, col)
sch = column.table.schema
@@ -1016,7 +1017,7 @@ class PGDialect(default.DefaultDialect):
else:
return None
- _isolation_lookup = set(['SERIALIZABLE',
+ _isolation_lookup = set(['SERIALIZABLE',
'READ UNCOMMITTED', 'READ COMMITTED', 'REPEATABLE READ'])
def set_isolation_level(self, connection, level):
@@ -1024,9 +1025,9 @@ class PGDialect(default.DefaultDialect):
if level not in self._isolation_lookup:
raise exc.ArgumentError(
"Invalid value '%s' for isolation_level. "
- "Valid isolation levels for %s are %s" %
+ "Valid isolation levels for %s are %s" %
(level, self.name, ", ".join(self._isolation_lookup))
- )
+ )
cursor = connection.cursor()
cursor.execute(
"SET SESSION CHARACTERISTICS AS TRANSACTION "
@@ -1047,13 +1048,13 @@ class PGDialect(default.DefaultDialect):
def do_prepare_twophase(self, connection, xid):
connection.execute("PREPARE TRANSACTION '%s'" % xid)
- def do_rollback_twophase(self, connection, xid,
+ def do_rollback_twophase(self, connection, xid,
is_prepared=True, recover=False):
if is_prepared:
if recover:
- #FIXME: ugly hack to get out of transaction
+ #FIXME: ugly hack to get out of transaction
# context when committing recoverable transactions
- # Must find out a way how to make the dbapi not
+ # Must find out a way how to make the dbapi not
# open a transaction.
connection.execute("ROLLBACK")
connection.execute("ROLLBACK PREPARED '%s'" % xid)
@@ -1062,7 +1063,7 @@ class PGDialect(default.DefaultDialect):
else:
self.do_rollback(connection.connection)
- def do_commit_twophase(self, connection, xid,
+ def do_commit_twophase(self, connection, xid,
is_prepared=True, recover=False):
if is_prepared:
if recover:
@@ -1114,10 +1115,10 @@ class PGDialect(default.DefaultDialect):
"n.oid=c.relnamespace where n.nspname=:schema and "
"relname=:name",
bindparams=[
- sql.bindparam('name',
+ sql.bindparam('name',
unicode(table_name), type_=sqltypes.Unicode),
- sql.bindparam('schema',
- unicode(schema), type_=sqltypes.Unicode)]
+ sql.bindparam('schema',
+ unicode(schema), type_=sqltypes.Unicode)]
)
)
return bool(cursor.first())
@@ -1133,7 +1134,7 @@ class PGDialect(default.DefaultDialect):
bindparams=[
sql.bindparam('name', unicode(sequence_name),
type_=sqltypes.Unicode)
- ]
+ ]
)
)
else:
@@ -1145,7 +1146,7 @@ class PGDialect(default.DefaultDialect):
bindparams=[
sql.bindparam('name', unicode(sequence_name),
type_=sqltypes.Unicode),
- sql.bindparam('schema',
+ sql.bindparam('schema',
unicode(schema), type_=sqltypes.Unicode)
]
)
@@ -1273,13 +1274,13 @@ class PGDialect(default.DefaultDialect):
SELECT relname
FROM pg_class c
WHERE relkind = 'v'
- AND '%(schema)s' = (select nspname from pg_namespace n
+ AND '%(schema)s' = (select nspname from pg_namespace n
where n.oid = c.relnamespace)
""" % dict(schema=current_schema)
# Py3K
#view_names = [row[0] for row in connection.execute(s)]
# Py2K
- view_names = [row[0].decode(self.encoding)
+ view_names = [row[0].decode(self.encoding)
for row in connection.execute(s)]
# end Py2K
return view_names
@@ -1313,10 +1314,10 @@ class PGDialect(default.DefaultDialect):
SQL_COLS = """
SELECT a.attname,
pg_catalog.format_type(a.atttypid, a.atttypmod),
- (SELECT substring(pg_catalog.pg_get_expr(d.adbin, d.adrelid)
- for 128)
+ (SELECT substring(pg_catalog.pg_get_expr(d.adbin, d.adrelid)
+ for 128)
FROM pg_catalog.pg_attrdef d
- WHERE d.adrelid = a.attrelid AND d.adnum = a.attnum
+ WHERE d.adrelid = a.attrelid AND d.adnum = a.attnum
AND a.atthasdef)
AS DEFAULT,
a.attnotnull, a.attnum, a.attrelid as table_oid
@@ -1325,8 +1326,8 @@ class PGDialect(default.DefaultDialect):
AND a.attnum > 0 AND NOT a.attisdropped
ORDER BY a.attnum
"""
- s = sql.text(SQL_COLS,
- bindparams=[sql.bindparam('table_oid', type_=sqltypes.Integer)],
+ s = sql.text(SQL_COLS,
+ bindparams=[sql.bindparam('table_oid', type_=sqltypes.Integer)],
typemap={'attname':sqltypes.Unicode, 'default':sqltypes.Unicode}
)
c = connection.execute(s, table_oid=table_oid)
@@ -1337,7 +1338,7 @@ class PGDialect(default.DefaultDialect):
# format columns
columns = []
for name, format_type, default, notnull, attnum, table_oid in rows:
- ## strip (5) from character varying(5), timestamp(5)
+ ## strip (5) from character varying(5), timestamp(5)
# with time zone, etc
attype = re.sub(r'\([\d,]+\)', '', format_type)
@@ -1362,13 +1363,13 @@ class PGDialect(default.DefaultDialect):
args = (53, )
elif attype == 'integer':
args = ()
- elif attype in ('timestamp with time zone',
+ elif attype in ('timestamp with time zone',
'time with time zone'):
kwargs['timezone'] = True
if charlen:
kwargs['precision'] = int(charlen)
args = ()
- elif attype in ('timestamp without time zone',
+ elif attype in ('timestamp without time zone',
'time without time zone', 'time'):
kwargs['timezone'] = False
if charlen:
@@ -1409,7 +1410,7 @@ class PGDialect(default.DefaultDialect):
# A table can't override whether the domain is nullable.
nullable = domain['nullable']
if domain['default'] and not default:
- # It can, however, override the default
+ # It can, however, override the default
# value, but can't set it to null.
default = domain['default']
continue
@@ -1435,7 +1436,7 @@ class PGDialect(default.DefaultDialect):
sch = schema
if '.' not in match.group(2) and sch is not None:
# unconditionally quote the schema name. this could
- # later be enhanced to obey quoting rules /
+ # later be enhanced to obey quoting rules /
# "quote schema"
default = match.group(1) + \
('"%s"' % sch) + '.' + \
@@ -1453,10 +1454,10 @@ class PGDialect(default.DefaultDialect):
PK_SQL = """
SELECT a.attname
- FROM
+ FROM
pg_class t
join pg_index ix on t.oid = ix.indrelid
- join pg_attribute a
+ join pg_attribute a
on t.oid=a.attrelid and a.attnum=ANY(ix.indkey)
WHERE
t.oid = :table_oid and
@@ -1487,14 +1488,14 @@ class PGDialect(default.DefaultDialect):
info_cache=kw.get('info_cache'))
FK_SQL = """
- SELECT r.conname,
+ SELECT r.conname,
pg_catalog.pg_get_constraintdef(r.oid, true) as condef,
n.nspname as conschema
FROM pg_catalog.pg_constraint r,
pg_namespace n,
pg_class c
- WHERE r.conrelid = :table AND
+ WHERE r.conrelid = :table AND
r.contype = 'f' AND
c.oid = confrelid AND
n.oid = c.relnamespace
@@ -1511,7 +1512,7 @@ class PGDialect(default.DefaultDialect):
'(?:(.*?)\.)?(.*?)\((.*?)\)', condef).groups()
constrained_columns, referred_schema, \
referred_table, referred_columns = m
- constrained_columns = [preparer._unquote_identifier(x)
+ constrained_columns = [preparer._unquote_identifier(x)
for x in re.split(r'\s*,\s*', constrained_columns)]
if referred_schema:
@@ -1526,7 +1527,7 @@ class PGDialect(default.DefaultDialect):
# and an explicit schema was given for the referencing table.
referred_schema = schema
referred_table = preparer._unquote_identifier(referred_table)
- referred_columns = [preparer._unquote_identifier(x)
+ referred_columns = [preparer._unquote_identifier(x)
for x in re.split(r'\s*,\s', referred_columns)]
fkey_d = {
'name' : conname,
@@ -1549,11 +1550,11 @@ class PGDialect(default.DefaultDialect):
ix.indisunique, ix.indexprs, ix.indpred,
a.attname
FROM
- pg_class t
+ pg_class t
join pg_index ix on t.oid = ix.indrelid
join pg_class i on i.oid=ix.indexrelid
- left outer join
- pg_attribute a
+ left outer join
+ pg_attribute a
on t.oid=a.attrelid and a.attnum=ANY(ix.indkey)
WHERE
t.relkind = 'r'
@@ -1605,7 +1606,7 @@ class PGDialect(default.DefaultDialect):
SQL_ENUMS = """
SELECT t.typname as "name",
-- no enum defaults in 8.4 at least
- -- t.typdefault as "default",
+ -- t.typdefault as "default",
pg_catalog.pg_type_is_visible(t.oid) as "visible",
n.nspname as "schema",
e.enumlabel as "label"
@@ -1672,8 +1673,8 @@ class PGDialect(default.DefaultDialect):
name = "%s.%s" % (domain['schema'], domain['name'])
domains[name] = {
- 'attype':attype,
- 'nullable': domain['nullable'],
+ 'attype':attype,
+ 'nullable': domain['nullable'],
'default': domain['default']
}