diff options
author | Mike Bayer <mike_mp@zzzcomputing.com> | 2019-01-06 01:14:26 -0500 |
---|---|---|
committer | mike bayer <mike_mp@zzzcomputing.com> | 2019-01-06 17:34:50 +0000 |
commit | 1e1a38e7801f410f244e4bbb44ec795ae152e04e (patch) | |
tree | 28e725c5c8188bd0cfd133d1e268dbca9b524978 /lib/sqlalchemy/sql/ddl.py | |
parent | 404e69426b05a82d905cbb3ad33adafccddb00dd (diff) | |
download | sqlalchemy-1e1a38e7801f410f244e4bbb44ec795ae152e04e.tar.gz |
Run black -l 79 against all source files
This is a straight reformat run using black as is, with no edits
applied at all.
The black run will format code consistently, however in
some cases that are prevalent in SQLAlchemy code it produces
too-long lines. The too-long lines will be resolved in the
following commit that will resolve all remaining flake8 issues
including shadowed builtins, long lines, import order, unused
imports, duplicate imports, and docstring issues.
Change-Id: I7eda77fed3d8e73df84b3651fd6cfcfe858d4dc9
Diffstat (limited to 'lib/sqlalchemy/sql/ddl.py')
-rw-r--r-- | lib/sqlalchemy/sql/ddl.py | 306 |
1 files changed, 180 insertions, 126 deletions
diff --git a/lib/sqlalchemy/sql/ddl.py b/lib/sqlalchemy/sql/ddl.py index 91e93efe7..f21b3d7f0 100644 --- a/lib/sqlalchemy/sql/ddl.py +++ b/lib/sqlalchemy/sql/ddl.py @@ -56,8 +56,9 @@ class DDLElement(Executable, _DDLCompiles): """ - _execution_options = Executable.\ - _execution_options.union({'autocommit': True}) + _execution_options = Executable._execution_options.union( + {"autocommit": True} + ) target = None on = None @@ -95,11 +96,13 @@ class DDLElement(Executable, _DDLCompiles): if self._should_execute(target, bind): return bind.execute(self.against(target)) else: - bind.engine.logger.info( - "DDL execution skipped, criteria not met.") + bind.engine.logger.info("DDL execution skipped, criteria not met.") - @util.deprecated("0.7", "See :class:`.DDLEvents`, as well as " - ":meth:`.DDLElement.execute_if`.") + @util.deprecated( + "0.7", + "See :class:`.DDLEvents`, as well as " + ":meth:`.DDLElement.execute_if`.", + ) def execute_at(self, event_name, target): """Link execution of this DDL to the DDL lifecycle of a SchemaItem. @@ -129,11 +132,12 @@ class DDLElement(Executable, _DDLCompiles): """ def call_event(target, connection, **kw): - if self._should_execute_deprecated(event_name, - target, connection, **kw): + if self._should_execute_deprecated( + event_name, target, connection, **kw + ): return connection.execute(self.against(target)) - event.listen(target, "" + event_name.replace('-', '_'), call_event) + event.listen(target, "" + event_name.replace("-", "_"), call_event) @_generative def against(self, target): @@ -211,8 +215,9 @@ class DDLElement(Executable, _DDLCompiles): self.state = state def _should_execute(self, target, bind, **kw): - if self.on is not None and \ - not self._should_execute_deprecated(None, target, bind, **kw): + if self.on is not None and not self._should_execute_deprecated( + None, target, bind, **kw + ): return False if isinstance(self.dialect, util.string_types): @@ -221,9 +226,9 @@ class DDLElement(Executable, _DDLCompiles): elif isinstance(self.dialect, (tuple, list, set)): if bind.engine.name not in self.dialect: return False - if (self.callable_ is not None and - not self.callable_(self, target, bind, - state=self.state, **kw)): + if self.callable_ is not None and not self.callable_( + self, target, bind, state=self.state, **kw + ): return False return True @@ -245,13 +250,15 @@ class DDLElement(Executable, _DDLCompiles): return bind.execute(self.against(target)) def _check_ddl_on(self, on): - if (on is not None and - (not isinstance(on, util.string_types + (tuple, list, set)) and - not util.callable(on))): + if on is not None and ( + not isinstance(on, util.string_types + (tuple, list, set)) + and not util.callable(on) + ): raise exc.ArgumentError( "Expected the name of a database dialect, a tuple " "of names, or a callable for " - "'on' criteria, got type '%s'." % type(on).__name__) + "'on' criteria, got type '%s'." % type(on).__name__ + ) def bind(self): if self._bind: @@ -259,6 +266,7 @@ class DDLElement(Executable, _DDLCompiles): def _set_bind(self, bind): self._bind = bind + bind = property(bind, _set_bind) def _generate(self): @@ -375,8 +383,9 @@ class DDL(DDLElement): if not isinstance(statement, util.string_types): raise exc.ArgumentError( - "Expected a string or unicode SQL statement, got '%r'" % - statement) + "Expected a string or unicode SQL statement, got '%r'" + % statement + ) self.statement = statement self.context = context or {} @@ -386,12 +395,18 @@ class DDL(DDLElement): self._bind = bind def __repr__(self): - return '<%s@%s; %s>' % ( - type(self).__name__, id(self), - ', '.join([repr(self.statement)] + - ['%s=%r' % (key, getattr(self, key)) - for key in ('on', 'context') - if getattr(self, key)])) + return "<%s@%s; %s>" % ( + type(self).__name__, + id(self), + ", ".join( + [repr(self.statement)] + + [ + "%s=%r" % (key, getattr(self, key)) + for key in ("on", "context") + if getattr(self, key) + ] + ), + ) class _CreateDropBase(DDLElement): @@ -464,8 +479,8 @@ class CreateTable(_CreateDropBase): __visit_name__ = "create_table" def __init__( - self, element, on=None, bind=None, - include_foreign_key_constraints=None): + self, element, on=None, bind=None, include_foreign_key_constraints=None + ): """Create a :class:`.CreateTable` construct. :param element: a :class:`.Table` that's the subject @@ -481,9 +496,7 @@ class CreateTable(_CreateDropBase): """ super(CreateTable, self).__init__(element, on=on, bind=bind) - self.columns = [CreateColumn(column) - for column in element.columns - ] + self.columns = [CreateColumn(column) for column in element.columns] self.include_foreign_key_constraints = include_foreign_key_constraints @@ -494,6 +507,7 @@ class _DropView(_CreateDropBase): This object will eventually be part of a public "view" API. """ + __visit_name__ = "drop_view" @@ -602,7 +616,8 @@ class CreateColumn(_DDLCompiles): to support custom column creation styles. """ - __visit_name__ = 'create_column' + + __visit_name__ = "create_column" def __init__(self, element): self.element = element @@ -646,7 +661,8 @@ class AddConstraint(_CreateDropBase): def __init__(self, element, *args, **kw): super(AddConstraint, self).__init__(element, *args, **kw) element._create_rule = util.portable_instancemethod( - self._create_rule_disable) + self._create_rule_disable + ) class DropConstraint(_CreateDropBase): @@ -658,7 +674,8 @@ class DropConstraint(_CreateDropBase): self.cascade = cascade super(DropConstraint, self).__init__(element, **kw) element._create_rule = util.portable_instancemethod( - self._create_rule_disable) + self._create_rule_disable + ) class SetTableComment(_CreateDropBase): @@ -691,9 +708,9 @@ class DDLBase(SchemaVisitor): class SchemaGenerator(DDLBase): - - def __init__(self, dialect, connection, checkfirst=False, - tables=None, **kwargs): + def __init__( + self, dialect, connection, checkfirst=False, tables=None, **kwargs + ): super(SchemaGenerator, self).__init__(connection, **kwargs) self.checkfirst = checkfirst self.tables = tables @@ -706,25 +723,22 @@ class SchemaGenerator(DDLBase): effective_schema = self.connection.schema_for_object(table) if effective_schema: self.dialect.validate_identifier(effective_schema) - return not self.checkfirst or \ - not self.dialect.has_table(self.connection, - table.name, schema=effective_schema) + return not self.checkfirst or not self.dialect.has_table( + self.connection, table.name, schema=effective_schema + ) def _can_create_sequence(self, sequence): effective_schema = self.connection.schema_for_object(sequence) - return self.dialect.supports_sequences and \ - ( - (not self.dialect.sequences_optional or - not sequence.optional) and - ( - not self.checkfirst or - not self.dialect.has_sequence( - self.connection, - sequence.name, - schema=effective_schema) + return self.dialect.supports_sequences and ( + (not self.dialect.sequences_optional or not sequence.optional) + and ( + not self.checkfirst + or not self.dialect.has_sequence( + self.connection, sequence.name, schema=effective_schema ) ) + ) def visit_metadata(self, metadata): if self.tables is not None: @@ -733,18 +747,23 @@ class SchemaGenerator(DDLBase): tables = list(metadata.tables.values()) collection = sort_tables_and_constraints( - [t for t in tables if self._can_create_table(t)]) - - seq_coll = [s for s in metadata._sequences.values() - if s.column is None and self._can_create_sequence(s)] + [t for t in tables if self._can_create_table(t)] + ) - event_collection = [ - t for (t, fks) in collection if t is not None + seq_coll = [ + s + for s in metadata._sequences.values() + if s.column is None and self._can_create_sequence(s) ] - metadata.dispatch.before_create(metadata, self.connection, - tables=event_collection, - checkfirst=self.checkfirst, - _ddl_runner=self) + + event_collection = [t for (t, fks) in collection if t is not None] + metadata.dispatch.before_create( + metadata, + self.connection, + tables=event_collection, + checkfirst=self.checkfirst, + _ddl_runner=self, + ) for seq in seq_coll: self.traverse_single(seq, create_ok=True) @@ -752,30 +771,40 @@ class SchemaGenerator(DDLBase): for table, fkcs in collection: if table is not None: self.traverse_single( - table, create_ok=True, + table, + create_ok=True, include_foreign_key_constraints=fkcs, - _is_metadata_operation=True) + _is_metadata_operation=True, + ) else: for fkc in fkcs: self.traverse_single(fkc) - metadata.dispatch.after_create(metadata, self.connection, - tables=event_collection, - checkfirst=self.checkfirst, - _ddl_runner=self) + metadata.dispatch.after_create( + metadata, + self.connection, + tables=event_collection, + checkfirst=self.checkfirst, + _ddl_runner=self, + ) def visit_table( - self, table, create_ok=False, - include_foreign_key_constraints=None, - _is_metadata_operation=False): + self, + table, + create_ok=False, + include_foreign_key_constraints=None, + _is_metadata_operation=False, + ): if not create_ok and not self._can_create_table(table): return table.dispatch.before_create( - table, self.connection, + table, + self.connection, checkfirst=self.checkfirst, _ddl_runner=self, - _is_metadata_operation=_is_metadata_operation) + _is_metadata_operation=_is_metadata_operation, + ) for column in table.columns: if column.default is not None: @@ -788,10 +817,11 @@ class SchemaGenerator(DDLBase): self.connection.execute( CreateTable( table, - include_foreign_key_constraints=include_foreign_key_constraints - )) + include_foreign_key_constraints=include_foreign_key_constraints, + ) + ) - if hasattr(table, 'indexes'): + if hasattr(table, "indexes"): for index in table.indexes: self.traverse_single(index) @@ -804,10 +834,12 @@ class SchemaGenerator(DDLBase): self.connection.execute(SetColumnComment(column)) table.dispatch.after_create( - table, self.connection, + table, + self.connection, checkfirst=self.checkfirst, _ddl_runner=self, - _is_metadata_operation=_is_metadata_operation) + _is_metadata_operation=_is_metadata_operation, + ) def visit_foreign_key_constraint(self, constraint): if not self.dialect.supports_alter: @@ -824,9 +856,9 @@ class SchemaGenerator(DDLBase): class SchemaDropper(DDLBase): - - def __init__(self, dialect, connection, checkfirst=False, - tables=None, **kwargs): + def __init__( + self, dialect, connection, checkfirst=False, tables=None, **kwargs + ): super(SchemaDropper, self).__init__(connection, **kwargs) self.checkfirst = checkfirst self.tables = tables @@ -842,15 +874,17 @@ class SchemaDropper(DDLBase): try: unsorted_tables = [t for t in tables if self._can_drop_table(t)] - collection = list(reversed( - sort_tables_and_constraints( - unsorted_tables, - filter_fn=lambda constraint: False - if not self.dialect.supports_alter - or constraint.name is None - else None + collection = list( + reversed( + sort_tables_and_constraints( + unsorted_tables, + filter_fn=lambda constraint: False + if not self.dialect.supports_alter + or constraint.name is None + else None, + ) ) - )) + ) except exc.CircularDependencyError as err2: if not self.dialect.supports_alter: util.warn( @@ -862,16 +896,15 @@ class SchemaDropper(DDLBase): "ForeignKeyConstraint " "objects involved in the cycle to mark these as known " "cycles that will be ignored." - % ( - ", ".join(sorted([t.fullname for t in err2.cycles])) - ) + % (", ".join(sorted([t.fullname for t in err2.cycles]))) ) collection = [(t, ()) for t in unsorted_tables] else: util.raise_from_cause( exc.CircularDependencyError( err2.args[0], - err2.cycles, err2.edges, + err2.cycles, + err2.edges, msg="Can't sort tables for DROP; an " "unresolvable foreign key " "dependency exists between tables: %s. Please ensure " @@ -880,9 +913,10 @@ class SchemaDropper(DDLBase): "names so that they can be dropped using " "DROP CONSTRAINT." % ( - ", ".join(sorted([t.fullname for t in err2.cycles])) - ) - + ", ".join( + sorted([t.fullname for t in err2.cycles]) + ) + ), ) ) @@ -892,18 +926,21 @@ class SchemaDropper(DDLBase): if s.column is None and self._can_drop_sequence(s) ] - event_collection = [ - t for (t, fks) in collection if t is not None - ] + event_collection = [t for (t, fks) in collection if t is not None] metadata.dispatch.before_drop( - metadata, self.connection, tables=event_collection, - checkfirst=self.checkfirst, _ddl_runner=self) + metadata, + self.connection, + tables=event_collection, + checkfirst=self.checkfirst, + _ddl_runner=self, + ) for table, fkcs in collection: if table is not None: self.traverse_single( - table, drop_ok=True, _is_metadata_operation=True) + table, drop_ok=True, _is_metadata_operation=True + ) else: for fkc in fkcs: self.traverse_single(fkc) @@ -912,8 +949,12 @@ class SchemaDropper(DDLBase): self.traverse_single(seq, drop_ok=True) metadata.dispatch.after_drop( - metadata, self.connection, tables=event_collection, - checkfirst=self.checkfirst, _ddl_runner=self) + metadata, + self.connection, + tables=event_collection, + checkfirst=self.checkfirst, + _ddl_runner=self, + ) def _can_drop_table(self, table): self.dialect.validate_identifier(table.name) @@ -921,19 +962,20 @@ class SchemaDropper(DDLBase): if effective_schema: self.dialect.validate_identifier(effective_schema) return not self.checkfirst or self.dialect.has_table( - self.connection, table.name, schema=effective_schema) + self.connection, table.name, schema=effective_schema + ) def _can_drop_sequence(self, sequence): effective_schema = self.connection.schema_for_object(sequence) - return self.dialect.supports_sequences and \ - ((not self.dialect.sequences_optional or - not sequence.optional) and - (not self.checkfirst or - self.dialect.has_sequence( - self.connection, - sequence.name, - schema=effective_schema)) - ) + return self.dialect.supports_sequences and ( + (not self.dialect.sequences_optional or not sequence.optional) + and ( + not self.checkfirst + or self.dialect.has_sequence( + self.connection, sequence.name, schema=effective_schema + ) + ) + ) def visit_index(self, index): self.connection.execute(DropIndex(index)) @@ -943,10 +985,12 @@ class SchemaDropper(DDLBase): return table.dispatch.before_drop( - table, self.connection, + table, + self.connection, checkfirst=self.checkfirst, _ddl_runner=self, - _is_metadata_operation=_is_metadata_operation) + _is_metadata_operation=_is_metadata_operation, + ) self.connection.execute(DropTable(table)) @@ -960,10 +1004,12 @@ class SchemaDropper(DDLBase): self.traverse_single(column.default) table.dispatch.after_drop( - table, self.connection, + table, + self.connection, checkfirst=self.checkfirst, _ddl_runner=self, - _is_metadata_operation=_is_metadata_operation) + _is_metadata_operation=_is_metadata_operation, + ) def visit_foreign_key_constraint(self, constraint): if not self.dialect.supports_alter: @@ -1019,25 +1065,29 @@ def sort_tables(tables, skip_fn=None, extra_dependencies=None): """ if skip_fn is not None: + def _skip_fn(fkc): for fk in fkc.elements: if skip_fn(fk): return True else: return None + else: _skip_fn = None return [ - t for (t, fkcs) in - sort_tables_and_constraints( - tables, filter_fn=_skip_fn, extra_dependencies=extra_dependencies) + t + for (t, fkcs) in sort_tables_and_constraints( + tables, filter_fn=_skip_fn, extra_dependencies=extra_dependencies + ) if t is not None ] def sort_tables_and_constraints( - tables, filter_fn=None, extra_dependencies=None): + tables, filter_fn=None, extra_dependencies=None +): """sort a collection of :class:`.Table` / :class:`.ForeignKeyConstraint` objects. @@ -1109,8 +1159,9 @@ def sort_tables_and_constraints( try: candidate_sort = list( topological.sort( - fixed_dependencies.union(mutable_dependencies), tables, - deterministic_order=True + fixed_dependencies.union(mutable_dependencies), + tables, + deterministic_order=True, ) ) except exc.CircularDependencyError as err: @@ -1118,8 +1169,10 @@ def sort_tables_and_constraints( if edge in mutable_dependencies: table = edge[1] can_remove = [ - fkc for fkc in table.foreign_key_constraints - if filter_fn is None or filter_fn(fkc) is not False] + fkc + for fkc in table.foreign_key_constraints + if filter_fn is None or filter_fn(fkc) is not False + ] remaining_fkcs.update(can_remove) for fkc in can_remove: dependent_on = fkc.referred_table @@ -1127,8 +1180,9 @@ def sort_tables_and_constraints( mutable_dependencies.discard((dependent_on, table)) candidate_sort = list( topological.sort( - fixed_dependencies.union(mutable_dependencies), tables, - deterministic_order=True + fixed_dependencies.union(mutable_dependencies), + tables, + deterministic_order=True, ) ) |