summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.zuul.yaml13
-rw-r--r--doc/source/contributor/ptl-guide.rst73
-rw-r--r--doc/source/reference/database-migrations.rst12
-rw-r--r--nova/compute/manager.py14
-rw-r--r--nova/compute/resource_tracker.py38
-rw-r--r--nova/db/api/legacy_migrations/README4
-rw-r--r--nova/db/api/legacy_migrations/__init__.py0
-rw-r--r--nova/db/api/legacy_migrations/manage.py20
-rw-r--r--nova/db/api/legacy_migrations/migrate.cfg20
-rw-r--r--nova/db/api/legacy_migrations/versions/067_train.py602
-rw-r--r--nova/db/api/legacy_migrations/versions/068_placeholder.py22
-rw-r--r--nova/db/api/legacy_migrations/versions/069_placeholder.py22
-rw-r--r--nova/db/api/legacy_migrations/versions/070_placeholder.py22
-rw-r--r--nova/db/api/legacy_migrations/versions/071_placeholder.py22
-rw-r--r--nova/db/api/legacy_migrations/versions/072_placeholder.py22
-rw-r--r--nova/db/api/legacy_migrations/versions/073_placeholder.py22
-rw-r--r--nova/db/api/legacy_migrations/versions/074_placeholder.py22
-rw-r--r--nova/db/api/legacy_migrations/versions/075_placeholder.py22
-rw-r--r--nova/db/api/legacy_migrations/versions/076_placeholder.py22
-rw-r--r--nova/db/api/legacy_migrations/versions/077_placeholder.py22
-rw-r--r--nova/db/api/legacy_migrations/versions/078_placeholder.py22
-rw-r--r--nova/db/api/legacy_migrations/versions/079_placeholder.py22
-rw-r--r--nova/db/api/legacy_migrations/versions/080_placeholder.py22
-rw-r--r--nova/db/api/legacy_migrations/versions/081_placeholder.py22
-rw-r--r--nova/db/api/legacy_migrations/versions/082_placeholder.py22
-rw-r--r--nova/db/api/legacy_migrations/versions/083_placeholder.py22
-rw-r--r--nova/db/api/legacy_migrations/versions/084_placeholder.py22
-rw-r--r--nova/db/api/legacy_migrations/versions/085_placeholder.py22
-rw-r--r--nova/db/api/legacy_migrations/versions/086_placeholder.py22
-rw-r--r--nova/db/api/legacy_migrations/versions/087_placeholder.py22
-rw-r--r--nova/db/api/legacy_migrations/versions/__init__.py0
-rw-r--r--nova/db/main/legacy_migrations/README4
-rw-r--r--nova/db/main/legacy_migrations/__init__.py0
-rw-r--r--nova/db/main/legacy_migrations/manage.py20
-rw-r--r--nova/db/main/legacy_migrations/migrate.cfg20
-rw-r--r--nova/db/main/legacy_migrations/versions/402_train.py1619
-rw-r--r--nova/db/main/legacy_migrations/versions/403_placeholder.py22
-rw-r--r--nova/db/main/legacy_migrations/versions/404_placeholder.py22
-rw-r--r--nova/db/main/legacy_migrations/versions/405_placeholder.py22
-rw-r--r--nova/db/main/legacy_migrations/versions/406_placeholder.py22
-rw-r--r--nova/db/main/legacy_migrations/versions/407_placeholder.py22
-rw-r--r--nova/db/main/legacy_migrations/versions/408_placeholder.py22
-rw-r--r--nova/db/main/legacy_migrations/versions/409_placeholder.py22
-rw-r--r--nova/db/main/legacy_migrations/versions/410_placeholder.py22
-rw-r--r--nova/db/main/legacy_migrations/versions/411_placeholder.py22
-rw-r--r--nova/db/main/legacy_migrations/versions/412_placeholder.py22
-rw-r--r--nova/db/main/legacy_migrations/versions/413_placeholder.py22
-rw-r--r--nova/db/main/legacy_migrations/versions/414_placeholder.py22
-rw-r--r--nova/db/main/legacy_migrations/versions/415_placeholder.py22
-rw-r--r--nova/db/main/legacy_migrations/versions/416_placeholder.py22
-rw-r--r--nova/db/main/legacy_migrations/versions/417_placeholder.py22
-rw-r--r--nova/db/main/legacy_migrations/versions/418_placeholder.py22
-rw-r--r--nova/db/main/legacy_migrations/versions/419_placeholder.py22
-rw-r--r--nova/db/main/legacy_migrations/versions/420_placeholder.py22
-rw-r--r--nova/db/main/legacy_migrations/versions/421_placeholder.py22
-rw-r--r--nova/db/main/legacy_migrations/versions/422_placeholder.py22
-rw-r--r--nova/db/main/legacy_migrations/versions/__init__.py0
-rw-r--r--nova/db/migration.py77
-rw-r--r--nova/objects/service.py2
-rw-r--r--nova/pci/request.py4
-rw-r--r--nova/pci/stats.py2
-rw-r--r--nova/pci/whitelist.py2
-rw-r--r--nova/scheduler/client/report.py2
-rw-r--r--nova/scheduler/manager.py35
-rw-r--r--nova/tests/functional/libvirt/test_pci_sriov_servers.py16
-rw-r--r--nova/tests/functional/regressions/test_bug_1995153.py109
-rw-r--r--nova/tests/unit/compute/test_compute.py19
-rw-r--r--nova/tests/unit/compute/test_compute_mgr.py43
-rw-r--r--nova/tests/unit/compute/test_resource_tracker.py19
-rw-r--r--nova/tests/unit/compute/test_shelve.py4
-rw-r--r--nova/tests/unit/db/api/test_migrations.py44
-rw-r--r--nova/tests/unit/db/main/test_migrations.py44
-rw-r--r--nova/tests/unit/db/test_migration.py189
-rw-r--r--nova/tests/unit/scheduler/test_manager.py36
-rw-r--r--nova/virt/hyperv/driver.py8
-rw-r--r--nova/virt/libvirt/driver.py4
-rw-r--r--nova/virt/libvirt/event.py7
-rw-r--r--nova/virt/libvirt/utils.py6
-rw-r--r--releasenotes/notes/hyperv-experimental-antelope-372e18a05cafc295.yaml6
-rw-r--r--releasenotes/notes/remove-sqlalchemy-migrate-907c200314884d81.yaml5
-rw-r--r--requirements.txt1
-rw-r--r--tox.ini2
82 files changed, 387 insertions, 3638 deletions
diff --git a/.zuul.yaml b/.zuul.yaml
index abe4d2fa4a..9c41476e68 100644
--- a/.zuul.yaml
+++ b/.zuul.yaml
@@ -658,15 +658,6 @@
image_conversion:
output_format: raw
-# TODO(gmann): As per the 2023.1 testing runtime, we need to run at least
-# one job on Focal. This job can be removed as per the future testing
-# runtime (whenever we drop the Ubuntu Focal testing).
-- job:
- name: tempest-integrated-compute-ubuntu-focal
- description: This is integrated compute job testing on Ubuntu Focal(20.04)
- parent: tempest-integrated-compute
- nodeset: openstack-single-node-focal
-
# TODO(gmann): Remove this jobs once all the required services for intergrate
# compute gate (Cinder, Glance, Neutron) by default enable scope and new
# defaults which means all the nova jobs will be tested with new RBAC in
@@ -753,8 +744,6 @@
- ^setup.cfg$
- ^tools/.*$
- ^tox.ini$
- - tempest-integrated-compute-ubuntu-focal:
- irrelevant-files: *policies-irrelevant-files
- tempest-integrated-compute-enforce-scope-new-defaults:
irrelevant-files: *policies-irrelevant-files
- grenade-skip-level-always:
@@ -790,8 +779,6 @@
- ^(?!nova/network/.*)(?!nova/virt/libvirt/vif.py).*$
- tempest-integrated-compute:
irrelevant-files: *policies-irrelevant-files
- - tempest-integrated-compute-ubuntu-focal:
- irrelevant-files: *policies-irrelevant-files
- tempest-integrated-compute-enforce-scope-new-defaults:
irrelevant-files: *policies-irrelevant-files
- grenade-skip-level-always:
diff --git a/doc/source/contributor/ptl-guide.rst b/doc/source/contributor/ptl-guide.rst
index 813f1bc83e..b530b100bc 100644
--- a/doc/source/contributor/ptl-guide.rst
+++ b/doc/source/contributor/ptl-guide.rst
@@ -29,7 +29,11 @@ New PTL
* Get acquainted with the release schedule
- * Example: https://wiki.openstack.org/wiki/Nova/Stein_Release_Schedule
+ * Example: https://releases.openstack.org/antelope/schedule.html
+
+ * Also, note that we usually create a specific wiki page for each cycle like
+ https://wiki.openstack.org/wiki/Nova/2023.1_Release_Schedule but it's
+ preferred to use the main release schedule above.
Project Team Gathering
----------------------
@@ -37,30 +41,34 @@ Project Team Gathering
* Create PTG planning etherpad, retrospective etherpad and alert about it in
nova meeting and dev mailing list
- * Example: https://etherpad.openstack.org/p/nova-ptg-stein
+ * Example: https://etherpad.opendev.org/p/nova-antelope-ptg
* Run sessions at the PTG
-* Have a priorities discussion at the PTG
+* Do a retro of the previous cycle
- * Example: https://etherpad.openstack.org/p/nova-ptg-stein-priorities
+* Make agreement on the agenda for this release, including but not exhaustively:
-* Sign up for group photo at the PTG (if applicable)
+ * Number of review days, for either specs or implementation
+ * Define the Spec approval and Feature freeze dates
+ * Modify the release schedule if needed by adding the new dates.
+ As an example : https://review.opendev.org/c/openstack/releases/+/877094
+
+* Discuss the implications of the `SLURP or non-SLURP`__ current release
-* Open review runways for the cycle
+.. __: https://governance.openstack.org/tc/resolutions/20220210-release-cadence-adjustment.html
+
+* Sign up for group photo at the PTG (if applicable)
- * Example: https://etherpad.openstack.org/p/nova-runways-stein
After PTG
---------
* Send PTG session summaries to the dev mailing list
-* Make sure the cycle priorities spec gets reviewed and merged
-
- * Example: https://specs.openstack.org/openstack/nova-specs/priorities/stein-priorities.html
+* Add `RFE bugs`__ if you have action items that are simple to do but without a owner yet.
-* Run the count-blueprints script daily to gather data for the cycle burndown chart
+.. __: https://bugs.launchpad.net/nova/+bugs?field.tag=rfe
A few weeks before milestone 1
------------------------------
@@ -70,12 +78,13 @@ A few weeks before milestone 1
* Periodically check the series goals others have proposed in the “Set series
goals” link:
- * Example: https://blueprints.launchpad.net/nova/stein/+setgoals
+ * Example: https://blueprints.launchpad.net/nova/antelope/+setgoals
Milestone 1
-----------
-* Do milestone release of nova and python-novaclient (in launchpad only)
+* Do milestone release of nova and python-novaclient (in launchpad only, can be
+ optional)
* This is launchpad bookkeeping only. With the latest release team changes,
projects no longer do milestone releases. See: https://releases.openstack.org/reference/release_models.html#cycle-with-milestones-legacy
@@ -87,6 +96,8 @@ Milestone 1
the minor version to leave room for future stable branch releases
* os-vif
+ * placement
+ * os-traits / os-resource-classes
* Release stable branches of nova
@@ -117,28 +128,26 @@ Summit
* Prepare the on-boarding session materials. Enlist help of others
+* Prepare the operator meet-and-greet session. Enlist help of others
+
A few weeks before milestone 2
------------------------------
* Plan a spec review day (optional)
-* Periodically check the series goals others have proposed in the “Set series
- goals” link:
-
- * Example: https://blueprints.launchpad.net/nova/stein/+setgoals
-
Milestone 2
-----------
-* Spec freeze
+* Spec freeze (if agreed)
-* Release nova and python-novaclient
+* Release nova and python-novaclient (if new features were merged)
* Release other libraries as needed
* Stable branch releases of nova
* For nova, set the launchpad milestone release as “released” with the date
+ (can be optional)
Shortly after spec freeze
-------------------------
@@ -146,7 +155,7 @@ Shortly after spec freeze
* Create a blueprint status etherpad to help track, especially non-priority
blueprint work, to help things get done by Feature Freeze (FF). Example:
- * https://etherpad.openstack.org/p/nova-stein-blueprint-status
+ * https://etherpad.opendev.org/p/nova-antelope-blueprint-status
* Create or review a patch to add the next release’s specs directory so people
can propose specs for next release after spec freeze for current release
@@ -155,13 +164,15 @@ Non-client library release freeze
---------------------------------
* Final release for os-vif
+* Final release for os-traits
+* Final release for os-resource-classes
Milestone 3
-----------
* Feature freeze day
-* Client library freeze, release python-novaclient
+* Client library freeze, release python-novaclient and osc-placement
* Close out all blueprints, including “catch all” blueprints like mox,
versioned notifications
@@ -170,7 +181,7 @@ Milestone 3
* For nova, set the launchpad milestone release as “released” with the date
-* Write the `cycle highlights
+* Start writing the `cycle highlights
<https://docs.openstack.org/project-team-guide/release-management.html#cycle-highlights>`__
Week following milestone 3
@@ -199,7 +210,7 @@ A few weeks before RC
* Make a RC1 todos etherpad and tag bugs as ``<release>-rc-potential`` and keep
track of them, example:
- * https://etherpad.openstack.org/p/nova-stein-rc-potential
+ * https://etherpad.opendev.org/p/nova-antelope-rc-potential
* Go through the bug list and identify any rc-potential bugs and tag them
@@ -242,7 +253,7 @@ RC
* Example: https://review.opendev.org/644412
-* Write the cycle-highlights in marketing-friendly sentences and propose to the
+* Push the cycle-highlights in marketing-friendly sentences and propose to the
openstack/releases repo. Usually based on reno prelude but made more readable
and friendly
@@ -257,11 +268,13 @@ Immediately after RC
* https://wiki.openstack.org/wiki/Nova/ReleaseChecklist
- * Drop old RPC compat code (if there was a RPC major version bump)
+ * Drop old RPC compat code (if there was a RPC major version bump and if
+ agreed on at the PTG)
* Example: https://review.opendev.org/543580
- * Bump the oldest supported compute service version
+ * Bump the oldest supported compute service version (if master branch is now
+ on a non-SLURP version)
* https://review.opendev.org/#/c/738482/
@@ -275,7 +288,9 @@ Immediately after RC
* Set the previous to last series status to “supported”
-* Repeat launchpad steps ^ for python-novaclient
+* Repeat launchpad steps ^ for python-novaclient (optional)
+
+* Repeat launchpad steps ^ for placement
* Register milestones in launchpad for the new cycle based on the new cycle
release schedule
@@ -293,7 +308,7 @@ Immediately after RC
* Create new release wiki:
- * Example: https://wiki.openstack.org/wiki/Nova/Train_Release_Schedule
+ * Example: https://wiki.openstack.org/wiki/Nova/2023.1_Release_Schedule
* Update the contributor guide for the new cycle
diff --git a/doc/source/reference/database-migrations.rst b/doc/source/reference/database-migrations.rst
index add7597e93..ea2b9050d9 100644
--- a/doc/source/reference/database-migrations.rst
+++ b/doc/source/reference/database-migrations.rst
@@ -24,6 +24,10 @@ Schema migrations
The database migration engine was changed from ``sqlalchemy-migrate`` to
``alembic``.
+.. versionchanged:: 27.0.0 (Antelope)
+
+ The legacy ``sqlalchemy-migrate``-based database migrations were removed.
+
The `alembic`__ database migration tool is used to manage schema migrations in
nova. The migration files and related metadata can be found in
``nova/db/api/migrations`` (for the API database) and
@@ -36,10 +40,10 @@ respectively.
.. note::
- There are also legacy migrations provided in the ``legacy_migrations``
- subdirectory for both the API and main databases. These are provided to
- facilitate upgrades from pre-Xena (24.0.0) deployments and will be removed
- in a future release. They should not be modified or extended.
+ There were also legacy migrations provided in the ``legacy_migrations``
+ subdirectory for both the API and main databases. These were provided to
+ facilitate upgrades from pre-Xena (24.0.0) deployments. They were removed
+ in the 27.0.0 (Antelope) release.
The best reference for alembic is the `alembic documentation`__, but a small
example is provided here. You can create the migration either manually or
diff --git a/nova/compute/manager.py b/nova/compute/manager.py
index 5ea71827fc..5c42aa4d89 100644
--- a/nova/compute/manager.py
+++ b/nova/compute/manager.py
@@ -3791,9 +3791,21 @@ class ComputeManager(manager.Manager):
try:
compute_node = self._get_compute_info(context, self.host)
scheduled_node = compute_node.hypervisor_hostname
- except exception.ComputeHostNotFound:
+ except exception.ComputeHostNotFound as e:
+ # This means we were asked to rebuild one of our own
+ # instances, or another instance as a target of an
+ # evacuation, but we are unable to find a matching compute
+ # node.
LOG.exception('Failed to get compute_info for %s',
self.host)
+ self._set_migration_status(migration, 'failed')
+ self._notify_instance_rebuild_error(context, instance, e,
+ bdms)
+ raise exception.InstanceFaultRollback(
+ inner_exception=exception.BuildAbortException(
+ instance_uuid=instance.uuid,
+ reason=e.format_message()))
+
else:
scheduled_node = instance.node
diff --git a/nova/compute/resource_tracker.py b/nova/compute/resource_tracker.py
index 3f911f3708..9ee6670c17 100644
--- a/nova/compute/resource_tracker.py
+++ b/nova/compute/resource_tracker.py
@@ -146,16 +146,20 @@ class ResourceTracker(object):
during the instance build.
"""
if self.disabled(nodename):
- # instance_claim() was called before update_available_resource()
- # (which ensures that a compute node exists for nodename). We
- # shouldn't get here but in case we do, just set the instance's
- # host and nodename attribute (probably incorrect) and return a
- # NoopClaim.
- # TODO(jaypipes): Remove all the disabled junk from the resource
- # tracker. Servicegroup API-level active-checking belongs in the
- # nova-compute manager.
- self._set_instance_host_and_node(instance, nodename)
- return claims.NopClaim()
+ # If we get here, it means we are trying to claim for an instance
+ # that was scheduled to a node that we do not have in our list,
+ # or is in some other way unmanageable by this node. This would
+ # mean that we are unable to account for resources, create
+ # allocations in placement, or do any of the other accounting
+ # necessary for this to work. In the past, this situation was
+ # effectively ignored silently, but in a world where we track
+ # resources with placement and instance assignment to compute nodes
+ # by service, we can no longer be leaky.
+ raise exception.ComputeResourcesUnavailable(
+ ('Attempt to claim resources for instance %(inst)s '
+ 'on unknown node %(node)s failed') % {
+ 'inst': instance.uuid,
+ 'node': nodename})
# sanity checks:
if instance.host:
@@ -280,9 +284,17 @@ class ResourceTracker(object):
context, instance, new_flavor, nodename, move_type)
if self.disabled(nodename):
- # compute_driver doesn't support resource tracking, just
- # generate the migration record and continue the resize:
- return claims.NopClaim(migration=migration)
+ # This means we were asked to accept an incoming migration to a
+ # node that we do not own or track. We really should not get here,
+ # but if we do, we must refuse to continue with the migration
+ # process, since we cannot account for those resources, create
+ # allocations in placement, etc. This has been a silent resource
+ # leak in the past, but it must be a hard failure now.
+ raise exception.ComputeResourcesUnavailable(
+ ('Attempt to claim move resources for instance %(inst)s on '
+ 'unknown node %(node)s failed') % {
+ 'inst': instance.uuid,
+ 'node': 'nodename'})
cn = self.compute_nodes[nodename]
diff --git a/nova/db/api/legacy_migrations/README b/nova/db/api/legacy_migrations/README
deleted file mode 100644
index 6218f8cac4..0000000000
--- a/nova/db/api/legacy_migrations/README
+++ /dev/null
@@ -1,4 +0,0 @@
-This is a database migration repository.
-
-More information at
-http://code.google.com/p/sqlalchemy-migrate/
diff --git a/nova/db/api/legacy_migrations/__init__.py b/nova/db/api/legacy_migrations/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
--- a/nova/db/api/legacy_migrations/__init__.py
+++ /dev/null
diff --git a/nova/db/api/legacy_migrations/manage.py b/nova/db/api/legacy_migrations/manage.py
deleted file mode 100644
index 6c2b3842ba..0000000000
--- a/nova/db/api/legacy_migrations/manage.py
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2012 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-from migrate.versioning.shell import main
-
-
-if __name__ == '__main__':
- main(debug='False', repository='.')
diff --git a/nova/db/api/legacy_migrations/migrate.cfg b/nova/db/api/legacy_migrations/migrate.cfg
deleted file mode 100644
index 3e2ccef016..0000000000
--- a/nova/db/api/legacy_migrations/migrate.cfg
+++ /dev/null
@@ -1,20 +0,0 @@
-[db_settings]
-# Used to identify which repository this database is versioned under.
-# You can use the name of your project.
-repository_id=nova_api
-
-# The name of the database table used to track the schema version.
-# This name shouldn't already be used by your project.
-# If this is changed once a database is under version control, you'll need to
-# change the table name in each database too.
-version_table=migrate_version
-
-# When committing a change script, Migrate will attempt to generate the
-# sql for all supported databases; normally, if one of them fails - probably
-# because you don't have that database installed - it is ignored and the
-# commit continues, perhaps ending successfully.
-# Databases in this list MUST compile successfully during a commit, or the
-# entire commit will fail. List the databases your application will actually
-# be using to ensure your updates to that database work properly.
-# This must be a list; example: ['postgres','sqlite']
-required_dbs=[]
diff --git a/nova/db/api/legacy_migrations/versions/067_train.py b/nova/db/api/legacy_migrations/versions/067_train.py
deleted file mode 100644
index 6b82b17e4b..0000000000
--- a/nova/db/api/legacy_migrations/versions/067_train.py
+++ /dev/null
@@ -1,602 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-from migrate.changeset.constraint import ForeignKeyConstraint
-from migrate import UniqueConstraint
-import sqlalchemy as sa
-from sqlalchemy import dialects
-
-from nova.db import types
-from nova.objects import keypair
-
-
-def InetSmall():
- return sa.String(length=39).with_variant(
- dialects.postgresql.INET(), 'postgresql'
- )
-
-
-def upgrade(migrate_engine):
- meta = sa.MetaData()
- # NOTE(stephenfin): This is not compatible with SQLAlchemy 2.0 but neither
- # is sqlalchemy-migrate which requires this. We'll remove these migrations
- # when dropping SQLAlchemy < 2.x support
- meta.bind = migrate_engine
-
- cell_mappings = sa.Table('cell_mappings', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('uuid', sa.String(length=36), nullable=False),
- sa.Column('name', sa.String(length=255)),
- sa.Column('transport_url', sa.Text()),
- sa.Column('database_connection', sa.Text()),
- # NOTE(stephenfin): These were originally added by sqlalchemy-migrate
- # which did not generate the constraints
- sa.Column(
- 'disabled', sa.Boolean(create_constraint=False), default=False),
- UniqueConstraint('uuid', name='uniq_cell_mappings0uuid'),
- sa.Index('uuid_idx', 'uuid'),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- host_mappings = sa.Table('host_mappings', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('cell_id', sa.Integer, nullable=False),
- sa.Column('host', sa.String(length=255), nullable=False),
- UniqueConstraint(
- 'host', name='uniq_host_mappings0host'),
- sa.Index('host_idx', 'host'),
- ForeignKeyConstraint(
- columns=['cell_id'], refcolumns=[cell_mappings.c.id]),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- instance_mappings = sa.Table('instance_mappings', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('instance_uuid', sa.String(length=36), nullable=False),
- sa.Column('cell_id', sa.Integer, nullable=True),
- sa.Column('project_id', sa.String(length=255), nullable=False),
- # NOTE(stephenfin): These were originally added by sqlalchemy-migrate
- # which did not generate the constraints
- sa.Column(
- 'queued_for_delete', sa.Boolean(create_constraint=False),
- default=False),
- sa.Column('user_id', sa.String(length=255), nullable=True),
- UniqueConstraint(
- 'instance_uuid', name='uniq_instance_mappings0instance_uuid'),
- sa.Index('instance_uuid_idx', 'instance_uuid'),
- sa.Index('project_id_idx', 'project_id'),
- sa.Index(
- 'instance_mappings_user_id_project_id_idx', 'user_id',
- 'project_id'),
- ForeignKeyConstraint(
- columns=['cell_id'], refcolumns=[cell_mappings.c.id]),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- flavors = sa.Table('flavors', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('name', sa.String(length=255), nullable=False),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('memory_mb', sa.Integer, nullable=False),
- sa.Column('vcpus', sa.Integer, nullable=False),
- sa.Column('swap', sa.Integer, nullable=False),
- sa.Column('vcpu_weight', sa.Integer),
- sa.Column('flavorid', sa.String(length=255), nullable=False),
- sa.Column('rxtx_factor', sa.Float),
- sa.Column('root_gb', sa.Integer),
- sa.Column('ephemeral_gb', sa.Integer),
- sa.Column('disabled', sa.Boolean),
- sa.Column('is_public', sa.Boolean),
- sa.Column('description', sa.Text()),
- UniqueConstraint('flavorid', name='uniq_flavors0flavorid'),
- UniqueConstraint('name', name='uniq_flavors0name'),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- flavor_extra_specs = sa.Table('flavor_extra_specs', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('flavor_id', sa.Integer, nullable=False),
- sa.Column('key', sa.String(length=255), nullable=False),
- sa.Column('value', sa.String(length=255)),
- UniqueConstraint(
- 'flavor_id', 'key', name='uniq_flavor_extra_specs0flavor_id0key'),
- sa.Index('flavor_extra_specs_flavor_id_key_idx', 'flavor_id', 'key'),
- ForeignKeyConstraint(columns=['flavor_id'], refcolumns=[flavors.c.id]),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- flavor_projects = sa.Table('flavor_projects', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('flavor_id', sa.Integer, nullable=False),
- sa.Column('project_id', sa.String(length=255), nullable=False),
- UniqueConstraint(
- 'flavor_id', 'project_id',
- name='uniq_flavor_projects0flavor_id0project_id'),
- ForeignKeyConstraint(
- columns=['flavor_id'], refcolumns=[flavors.c.id]),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- request_specs = sa.Table('request_specs', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('instance_uuid', sa.String(36), nullable=False),
- sa.Column('spec', types.MediumText(), nullable=False),
- UniqueConstraint(
- 'instance_uuid', name='uniq_request_specs0instance_uuid'),
- sa.Index('request_spec_instance_uuid_idx', 'instance_uuid'),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- build_requests = sa.Table('build_requests', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('request_spec_id', sa.Integer, nullable=True),
- sa.Column('project_id', sa.String(length=255), nullable=False),
- sa.Column('user_id', sa.String(length=255), nullable=True),
- sa.Column('display_name', sa.String(length=255)),
- sa.Column('instance_metadata', sa.Text),
- sa.Column('progress', sa.Integer),
- sa.Column('vm_state', sa.String(length=255)),
- sa.Column('task_state', sa.String(length=255)),
- sa.Column('image_ref', sa.String(length=255)),
- sa.Column('access_ip_v4', InetSmall()),
- sa.Column('access_ip_v6', InetSmall()),
- sa.Column('info_cache', sa.Text),
- sa.Column('security_groups', sa.Text, nullable=True),
- sa.Column('config_drive', sa.Boolean, default=False, nullable=True),
- sa.Column('key_name', sa.String(length=255)),
- sa.Column(
- 'locked_by',
- sa.Enum('owner', 'admin', name='build_requests0locked_by')),
- sa.Column('instance_uuid', sa.String(length=36)),
- sa.Column('instance', types.MediumText()),
- sa.Column('block_device_mappings', types.MediumText()),
- sa.Column('tags', sa.Text()),
- UniqueConstraint(
- 'instance_uuid', name='uniq_build_requests0instance_uuid'),
- sa.Index('build_requests_project_id_idx', 'project_id'),
- sa.Index('build_requests_instance_uuid_idx', 'instance_uuid'),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- keypairs = sa.Table('key_pairs', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('name', sa.String(255), nullable=False),
- sa.Column('user_id', sa.String(255), nullable=False),
- sa.Column('fingerprint', sa.String(255)),
- sa.Column('public_key', sa.Text()),
- sa.Column(
- 'type',
- sa.Enum('ssh', 'x509', metadata=meta, name='keypair_types'),
- nullable=False, server_default=keypair.KEYPAIR_TYPE_SSH),
- UniqueConstraint(
- 'user_id', 'name', name='uniq_key_pairs0user_id0name'),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- projects = sa.Table('projects', meta,
- sa.Column(
- 'id', sa.Integer, primary_key=True, nullable=False,
- autoincrement=True),
- sa.Column('external_id', sa.String(length=255), nullable=False),
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- UniqueConstraint('external_id', name='uniq_projects0external_id'),
- mysql_engine='InnoDB',
- mysql_charset='latin1',
- )
-
- users = sa.Table('users', meta,
- sa.Column(
- 'id', sa.Integer, primary_key=True, nullable=False,
- autoincrement=True),
- sa.Column('external_id', sa.String(length=255), nullable=False),
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- UniqueConstraint('external_id', name='uniq_users0external_id'),
- mysql_engine='InnoDB',
- mysql_charset='latin1',
- )
-
- resource_classes = sa.Table('resource_classes', meta,
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('name', sa.String(length=255), nullable=False),
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- UniqueConstraint('name', name='uniq_resource_classes0name'),
- mysql_engine='InnoDB',
- mysql_charset='latin1'
- )
-
- nameargs = {}
- if migrate_engine.name == 'mysql':
- nameargs['collation'] = 'utf8_bin'
-
- resource_providers = sa.Table(
- 'resource_providers', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('uuid', sa.String(36), nullable=False),
- sa.Column('name', sa.Unicode(200, **nameargs), nullable=True),
- sa.Column('generation', sa.Integer, default=0),
- sa.Column('can_host', sa.Integer, default=0),
- sa.Column(
- 'root_provider_id', sa.Integer,
- sa.ForeignKey('resource_providers.id')),
- sa.Column(
- 'parent_provider_id', sa.Integer,
- sa.ForeignKey('resource_providers.id')),
- UniqueConstraint('uuid', name='uniq_resource_providers0uuid'),
- UniqueConstraint('name', name='uniq_resource_providers0name'),
- sa.Index('resource_providers_name_idx', 'name'),
- sa.Index('resource_providers_uuid_idx', 'uuid'),
- sa.Index(
- 'resource_providers_root_provider_id_idx', 'root_provider_id'),
- sa.Index(
- 'resource_providers_parent_provider_id_idx', 'parent_provider_id'),
- mysql_engine='InnoDB',
- mysql_charset='latin1'
- )
-
- inventories = sa.Table(
- 'inventories', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('resource_provider_id', sa.Integer, nullable=False),
- sa.Column('resource_class_id', sa.Integer, nullable=False),
- sa.Column('total', sa.Integer, nullable=False),
- sa.Column('reserved', sa.Integer, nullable=False),
- sa.Column('min_unit', sa.Integer, nullable=False),
- sa.Column('max_unit', sa.Integer, nullable=False),
- sa.Column('step_size', sa.Integer, nullable=False),
- sa.Column('allocation_ratio', sa.Float, nullable=False),
- sa.Index(
- 'inventories_resource_provider_id_idx', 'resource_provider_id'),
- sa.Index(
- 'inventories_resource_provider_resource_class_idx',
- 'resource_provider_id', 'resource_class_id'),
- sa.Index(
- 'inventories_resource_class_id_idx', 'resource_class_id'),
- UniqueConstraint(
- 'resource_provider_id', 'resource_class_id',
- name='uniq_inventories0resource_provider_resource_class'),
- mysql_engine='InnoDB',
- mysql_charset='latin1'
- )
-
- traits = sa.Table(
- 'traits', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column(
- 'id', sa.Integer, primary_key=True, nullable=False,
- autoincrement=True),
- sa.Column('name', sa.Unicode(255, **nameargs), nullable=False),
- UniqueConstraint('name', name='uniq_traits0name'),
- mysql_engine='InnoDB',
- mysql_charset='latin1',
- )
-
- allocations = sa.Table(
- 'allocations', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('resource_provider_id', sa.Integer, nullable=False),
- sa.Column('consumer_id', sa.String(36), nullable=False),
- sa.Column('resource_class_id', sa.Integer, nullable=False),
- sa.Column('used', sa.Integer, nullable=False),
- sa.Index(
- 'allocations_resource_provider_class_used_idx',
- 'resource_provider_id', 'resource_class_id', 'used'),
- sa.Index(
- 'allocations_resource_class_id_idx', 'resource_class_id'),
- sa.Index('allocations_consumer_id_idx', 'consumer_id'),
- mysql_engine='InnoDB',
- mysql_charset='latin1'
- )
-
- consumers = sa.Table(
- 'consumers', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column(
- 'id', sa.Integer, primary_key=True, nullable=False,
- autoincrement=True),
- sa.Column('uuid', sa.String(length=36), nullable=False),
- sa.Column('project_id', sa.Integer, nullable=False),
- sa.Column('user_id', sa.Integer, nullable=False),
- sa.Column(
- 'generation', sa.Integer, default=0, server_default=sa.text('0'),
- nullable=False),
- sa.Index('consumers_project_id_uuid_idx', 'project_id', 'uuid'),
- sa.Index(
- 'consumers_project_id_user_id_uuid_idx', 'project_id', 'user_id',
- 'uuid'),
- UniqueConstraint('uuid', name='uniq_consumers0uuid'),
- mysql_engine='InnoDB',
- mysql_charset='latin1',
- )
-
- resource_provider_aggregates = sa.Table(
- 'resource_provider_aggregates', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column(
- 'resource_provider_id', sa.Integer, primary_key=True,
- nullable=False),
- sa.Column(
- 'aggregate_id', sa.Integer, primary_key=True, nullable=False),
- sa.Index(
- 'resource_provider_aggregates_aggregate_id_idx', 'aggregate_id'),
- mysql_engine='InnoDB',
- mysql_charset='latin1'
- )
-
- resource_provider_traits = sa.Table(
- 'resource_provider_traits', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column(
- 'trait_id', sa.Integer, sa.ForeignKey('traits.id'),
- primary_key=True, nullable=False),
- sa.Column(
- 'resource_provider_id', sa.Integer, primary_key=True,
- nullable=False),
- sa.Index(
- 'resource_provider_traits_resource_provider_trait_idx',
- 'resource_provider_id', 'trait_id'),
- ForeignKeyConstraint(
- columns=['resource_provider_id'],
- refcolumns=[resource_providers.c.id]),
- mysql_engine='InnoDB',
- mysql_charset='latin1',
- )
-
- placement_aggregates = sa.Table('placement_aggregates', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('uuid', sa.String(length=36), index=True),
- UniqueConstraint('uuid', name='uniq_placement_aggregates0uuid'),
- mysql_engine='InnoDB',
- mysql_charset='latin1'
- )
-
- aggregates = sa.Table('aggregates', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('uuid', sa.String(length=36)),
- sa.Column('name', sa.String(length=255)),
- sa.Index('aggregate_uuid_idx', 'uuid'),
- UniqueConstraint('name', name='uniq_aggregate0name'),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- aggregate_hosts = sa.Table('aggregate_hosts', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('host', sa.String(length=255)),
- sa.Column(
- 'aggregate_id', sa.Integer, sa.ForeignKey('aggregates.id'),
- nullable=False),
- UniqueConstraint(
- 'host', 'aggregate_id',
- name='uniq_aggregate_hosts0host0aggregate_id'),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- aggregate_metadata = sa.Table('aggregate_metadata', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column(
- 'aggregate_id', sa.Integer, sa.ForeignKey('aggregates.id'),
- nullable=False),
- sa.Column('key', sa.String(length=255), nullable=False),
- sa.Column('value', sa.String(length=255), nullable=False),
- UniqueConstraint(
- 'aggregate_id', 'key',
- name='uniq_aggregate_metadata0aggregate_id0key'),
- sa.Index('aggregate_metadata_key_idx', 'key'),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- groups = sa.Table('instance_groups', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('user_id', sa.String(length=255)),
- sa.Column('project_id', sa.String(length=255)),
- sa.Column('uuid', sa.String(length=36), nullable=False),
- sa.Column('name', sa.String(length=255)),
- UniqueConstraint(
- 'uuid', name='uniq_instance_groups0uuid'),
- mysql_engine='InnoDB',
- mysql_charset='utf8',
- )
-
- group_policy = sa.Table('instance_group_policy', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('policy', sa.String(length=255)),
- sa.Column(
- 'group_id', sa.Integer, sa.ForeignKey('instance_groups.id'),
- nullable=False),
- sa.Column('rules', sa.Text),
- sa.Index('instance_group_policy_policy_idx', 'policy'),
- mysql_engine='InnoDB',
- mysql_charset='utf8',
- )
-
- group_member = sa.Table('instance_group_member', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('instance_uuid', sa.String(length=255)),
- sa.Column(
- 'group_id', sa.Integer, sa.ForeignKey('instance_groups.id'),
- nullable=False),
- sa.Index('instance_group_member_instance_idx', 'instance_uuid'),
- mysql_engine='InnoDB',
- mysql_charset='utf8',
- )
-
- quota_classes = sa.Table('quota_classes', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('class_name', sa.String(length=255)),
- sa.Column('resource', sa.String(length=255)),
- sa.Column('hard_limit', sa.Integer),
- sa.Index('quota_classes_class_name_idx', 'class_name'),
- mysql_engine='InnoDB',
- mysql_charset='utf8',
- )
-
- quota_usages = sa.Table('quota_usages', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('project_id', sa.String(length=255)),
- sa.Column('resource', sa.String(length=255), nullable=False),
- sa.Column('in_use', sa.Integer, nullable=False),
- sa.Column('reserved', sa.Integer, nullable=False),
- sa.Column('until_refresh', sa.Integer),
- sa.Column('user_id', sa.String(length=255)),
- sa.Index('quota_usages_project_id_idx', 'project_id'),
- sa.Index('quota_usages_user_id_idx', 'user_id'),
- mysql_engine='InnoDB',
- mysql_charset='utf8',
- )
-
- quotas = sa.Table('quotas', meta,
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('project_id', sa.String(length=255)),
- sa.Column('resource', sa.String(length=255), nullable=False),
- sa.Column('hard_limit', sa.Integer),
- UniqueConstraint(
- 'project_id', 'resource', name='uniq_quotas0project_id0resource'),
- mysql_engine='InnoDB',
- mysql_charset='utf8',
- )
-
- project_user_quotas = sa.Table('project_user_quotas', meta,
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('user_id', sa.String(length=255), nullable=False),
- sa.Column('project_id', sa.String(length=255), nullable=False),
- sa.Column('resource', sa.String(length=255), nullable=False),
- sa.Column('hard_limit', sa.Integer, nullable=True),
- UniqueConstraint(
- 'user_id', 'project_id', 'resource',
- name='uniq_project_user_quotas0user_id0project_id0resource'),
- sa.Index(
- 'project_user_quotas_project_id_idx', 'project_id'),
- sa.Index(
- 'project_user_quotas_user_id_idx', 'user_id'),
- mysql_engine='InnoDB',
- mysql_charset='utf8',
- )
-
- reservations = sa.Table('reservations', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('uuid', sa.String(length=36), nullable=False),
- sa.Column(
- 'usage_id', sa.Integer, sa.ForeignKey('quota_usages.id'),
- nullable=False),
- sa.Column('project_id', sa.String(length=255)),
- sa.Column('resource', sa.String(length=255)),
- sa.Column('delta', sa.Integer, nullable=False),
- sa.Column('expire', sa.DateTime),
- sa.Column('user_id', sa.String(length=255)),
- sa.Index('reservations_project_id_idx', 'project_id'),
- sa.Index('reservations_uuid_idx', 'uuid'),
- sa.Index('reservations_expire_idx', 'expire'),
- sa.Index('reservations_user_id_idx', 'user_id'),
- mysql_engine='InnoDB',
- mysql_charset='utf8',
- )
-
- tables = [
- cell_mappings,
- host_mappings,
- instance_mappings,
- flavors,
- flavor_extra_specs,
- flavor_projects,
- request_specs,
- build_requests,
- keypairs,
- projects,
- users,
- resource_classes,
- resource_providers,
- inventories,
- traits,
- allocations,
- consumers,
- resource_provider_aggregates,
- resource_provider_traits,
- placement_aggregates,
- aggregates,
- aggregate_hosts,
- aggregate_metadata,
- groups,
- group_policy,
- group_member,
- quota_classes,
- quota_usages,
- quotas,
- project_user_quotas,
- reservations,
- ]
- for table in tables:
- table.create(checkfirst=True)
diff --git a/nova/db/api/legacy_migrations/versions/068_placeholder.py b/nova/db/api/legacy_migrations/versions/068_placeholder.py
deleted file mode 100644
index 7a93224504..0000000000
--- a/nova/db/api/legacy_migrations/versions/068_placeholder.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# This is a placeholder for backports.
-# Do not use this number for new work. New work starts after
-# all the placeholders.
-#
-# See this for more information:
-# http://lists.openstack.org/pipermail/openstack-dev/2013-March/006827.html
-
-
-def upgrade(migrate_engine):
- pass
diff --git a/nova/db/api/legacy_migrations/versions/069_placeholder.py b/nova/db/api/legacy_migrations/versions/069_placeholder.py
deleted file mode 100644
index 7a93224504..0000000000
--- a/nova/db/api/legacy_migrations/versions/069_placeholder.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# This is a placeholder for backports.
-# Do not use this number for new work. New work starts after
-# all the placeholders.
-#
-# See this for more information:
-# http://lists.openstack.org/pipermail/openstack-dev/2013-March/006827.html
-
-
-def upgrade(migrate_engine):
- pass
diff --git a/nova/db/api/legacy_migrations/versions/070_placeholder.py b/nova/db/api/legacy_migrations/versions/070_placeholder.py
deleted file mode 100644
index 7a93224504..0000000000
--- a/nova/db/api/legacy_migrations/versions/070_placeholder.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# This is a placeholder for backports.
-# Do not use this number for new work. New work starts after
-# all the placeholders.
-#
-# See this for more information:
-# http://lists.openstack.org/pipermail/openstack-dev/2013-March/006827.html
-
-
-def upgrade(migrate_engine):
- pass
diff --git a/nova/db/api/legacy_migrations/versions/071_placeholder.py b/nova/db/api/legacy_migrations/versions/071_placeholder.py
deleted file mode 100644
index 7a93224504..0000000000
--- a/nova/db/api/legacy_migrations/versions/071_placeholder.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# This is a placeholder for backports.
-# Do not use this number for new work. New work starts after
-# all the placeholders.
-#
-# See this for more information:
-# http://lists.openstack.org/pipermail/openstack-dev/2013-March/006827.html
-
-
-def upgrade(migrate_engine):
- pass
diff --git a/nova/db/api/legacy_migrations/versions/072_placeholder.py b/nova/db/api/legacy_migrations/versions/072_placeholder.py
deleted file mode 100644
index 7a93224504..0000000000
--- a/nova/db/api/legacy_migrations/versions/072_placeholder.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# This is a placeholder for backports.
-# Do not use this number for new work. New work starts after
-# all the placeholders.
-#
-# See this for more information:
-# http://lists.openstack.org/pipermail/openstack-dev/2013-March/006827.html
-
-
-def upgrade(migrate_engine):
- pass
diff --git a/nova/db/api/legacy_migrations/versions/073_placeholder.py b/nova/db/api/legacy_migrations/versions/073_placeholder.py
deleted file mode 100644
index 7a93224504..0000000000
--- a/nova/db/api/legacy_migrations/versions/073_placeholder.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# This is a placeholder for backports.
-# Do not use this number for new work. New work starts after
-# all the placeholders.
-#
-# See this for more information:
-# http://lists.openstack.org/pipermail/openstack-dev/2013-March/006827.html
-
-
-def upgrade(migrate_engine):
- pass
diff --git a/nova/db/api/legacy_migrations/versions/074_placeholder.py b/nova/db/api/legacy_migrations/versions/074_placeholder.py
deleted file mode 100644
index 7a93224504..0000000000
--- a/nova/db/api/legacy_migrations/versions/074_placeholder.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# This is a placeholder for backports.
-# Do not use this number for new work. New work starts after
-# all the placeholders.
-#
-# See this for more information:
-# http://lists.openstack.org/pipermail/openstack-dev/2013-March/006827.html
-
-
-def upgrade(migrate_engine):
- pass
diff --git a/nova/db/api/legacy_migrations/versions/075_placeholder.py b/nova/db/api/legacy_migrations/versions/075_placeholder.py
deleted file mode 100644
index 7a93224504..0000000000
--- a/nova/db/api/legacy_migrations/versions/075_placeholder.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# This is a placeholder for backports.
-# Do not use this number for new work. New work starts after
-# all the placeholders.
-#
-# See this for more information:
-# http://lists.openstack.org/pipermail/openstack-dev/2013-March/006827.html
-
-
-def upgrade(migrate_engine):
- pass
diff --git a/nova/db/api/legacy_migrations/versions/076_placeholder.py b/nova/db/api/legacy_migrations/versions/076_placeholder.py
deleted file mode 100644
index 7a93224504..0000000000
--- a/nova/db/api/legacy_migrations/versions/076_placeholder.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# This is a placeholder for backports.
-# Do not use this number for new work. New work starts after
-# all the placeholders.
-#
-# See this for more information:
-# http://lists.openstack.org/pipermail/openstack-dev/2013-March/006827.html
-
-
-def upgrade(migrate_engine):
- pass
diff --git a/nova/db/api/legacy_migrations/versions/077_placeholder.py b/nova/db/api/legacy_migrations/versions/077_placeholder.py
deleted file mode 100644
index 7a93224504..0000000000
--- a/nova/db/api/legacy_migrations/versions/077_placeholder.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# This is a placeholder for backports.
-# Do not use this number for new work. New work starts after
-# all the placeholders.
-#
-# See this for more information:
-# http://lists.openstack.org/pipermail/openstack-dev/2013-March/006827.html
-
-
-def upgrade(migrate_engine):
- pass
diff --git a/nova/db/api/legacy_migrations/versions/078_placeholder.py b/nova/db/api/legacy_migrations/versions/078_placeholder.py
deleted file mode 100644
index 7a93224504..0000000000
--- a/nova/db/api/legacy_migrations/versions/078_placeholder.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# This is a placeholder for backports.
-# Do not use this number for new work. New work starts after
-# all the placeholders.
-#
-# See this for more information:
-# http://lists.openstack.org/pipermail/openstack-dev/2013-March/006827.html
-
-
-def upgrade(migrate_engine):
- pass
diff --git a/nova/db/api/legacy_migrations/versions/079_placeholder.py b/nova/db/api/legacy_migrations/versions/079_placeholder.py
deleted file mode 100644
index 7a93224504..0000000000
--- a/nova/db/api/legacy_migrations/versions/079_placeholder.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# This is a placeholder for backports.
-# Do not use this number for new work. New work starts after
-# all the placeholders.
-#
-# See this for more information:
-# http://lists.openstack.org/pipermail/openstack-dev/2013-March/006827.html
-
-
-def upgrade(migrate_engine):
- pass
diff --git a/nova/db/api/legacy_migrations/versions/080_placeholder.py b/nova/db/api/legacy_migrations/versions/080_placeholder.py
deleted file mode 100644
index 7a93224504..0000000000
--- a/nova/db/api/legacy_migrations/versions/080_placeholder.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# This is a placeholder for backports.
-# Do not use this number for new work. New work starts after
-# all the placeholders.
-#
-# See this for more information:
-# http://lists.openstack.org/pipermail/openstack-dev/2013-March/006827.html
-
-
-def upgrade(migrate_engine):
- pass
diff --git a/nova/db/api/legacy_migrations/versions/081_placeholder.py b/nova/db/api/legacy_migrations/versions/081_placeholder.py
deleted file mode 100644
index 7a93224504..0000000000
--- a/nova/db/api/legacy_migrations/versions/081_placeholder.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# This is a placeholder for backports.
-# Do not use this number for new work. New work starts after
-# all the placeholders.
-#
-# See this for more information:
-# http://lists.openstack.org/pipermail/openstack-dev/2013-March/006827.html
-
-
-def upgrade(migrate_engine):
- pass
diff --git a/nova/db/api/legacy_migrations/versions/082_placeholder.py b/nova/db/api/legacy_migrations/versions/082_placeholder.py
deleted file mode 100644
index 7a93224504..0000000000
--- a/nova/db/api/legacy_migrations/versions/082_placeholder.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# This is a placeholder for backports.
-# Do not use this number for new work. New work starts after
-# all the placeholders.
-#
-# See this for more information:
-# http://lists.openstack.org/pipermail/openstack-dev/2013-March/006827.html
-
-
-def upgrade(migrate_engine):
- pass
diff --git a/nova/db/api/legacy_migrations/versions/083_placeholder.py b/nova/db/api/legacy_migrations/versions/083_placeholder.py
deleted file mode 100644
index 7a93224504..0000000000
--- a/nova/db/api/legacy_migrations/versions/083_placeholder.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# This is a placeholder for backports.
-# Do not use this number for new work. New work starts after
-# all the placeholders.
-#
-# See this for more information:
-# http://lists.openstack.org/pipermail/openstack-dev/2013-March/006827.html
-
-
-def upgrade(migrate_engine):
- pass
diff --git a/nova/db/api/legacy_migrations/versions/084_placeholder.py b/nova/db/api/legacy_migrations/versions/084_placeholder.py
deleted file mode 100644
index 7a93224504..0000000000
--- a/nova/db/api/legacy_migrations/versions/084_placeholder.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# This is a placeholder for backports.
-# Do not use this number for new work. New work starts after
-# all the placeholders.
-#
-# See this for more information:
-# http://lists.openstack.org/pipermail/openstack-dev/2013-March/006827.html
-
-
-def upgrade(migrate_engine):
- pass
diff --git a/nova/db/api/legacy_migrations/versions/085_placeholder.py b/nova/db/api/legacy_migrations/versions/085_placeholder.py
deleted file mode 100644
index 7a93224504..0000000000
--- a/nova/db/api/legacy_migrations/versions/085_placeholder.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# This is a placeholder for backports.
-# Do not use this number for new work. New work starts after
-# all the placeholders.
-#
-# See this for more information:
-# http://lists.openstack.org/pipermail/openstack-dev/2013-March/006827.html
-
-
-def upgrade(migrate_engine):
- pass
diff --git a/nova/db/api/legacy_migrations/versions/086_placeholder.py b/nova/db/api/legacy_migrations/versions/086_placeholder.py
deleted file mode 100644
index 7a93224504..0000000000
--- a/nova/db/api/legacy_migrations/versions/086_placeholder.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# This is a placeholder for backports.
-# Do not use this number for new work. New work starts after
-# all the placeholders.
-#
-# See this for more information:
-# http://lists.openstack.org/pipermail/openstack-dev/2013-March/006827.html
-
-
-def upgrade(migrate_engine):
- pass
diff --git a/nova/db/api/legacy_migrations/versions/087_placeholder.py b/nova/db/api/legacy_migrations/versions/087_placeholder.py
deleted file mode 100644
index 7a93224504..0000000000
--- a/nova/db/api/legacy_migrations/versions/087_placeholder.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# This is a placeholder for backports.
-# Do not use this number for new work. New work starts after
-# all the placeholders.
-#
-# See this for more information:
-# http://lists.openstack.org/pipermail/openstack-dev/2013-March/006827.html
-
-
-def upgrade(migrate_engine):
- pass
diff --git a/nova/db/api/legacy_migrations/versions/__init__.py b/nova/db/api/legacy_migrations/versions/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
--- a/nova/db/api/legacy_migrations/versions/__init__.py
+++ /dev/null
diff --git a/nova/db/main/legacy_migrations/README b/nova/db/main/legacy_migrations/README
deleted file mode 100644
index c5f51f2280..0000000000
--- a/nova/db/main/legacy_migrations/README
+++ /dev/null
@@ -1,4 +0,0 @@
-This is a database migration repository.
-
-More information at
-https://sqlalchemy-migrate.readthedocs.io/en/latest/
diff --git a/nova/db/main/legacy_migrations/__init__.py b/nova/db/main/legacy_migrations/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
--- a/nova/db/main/legacy_migrations/__init__.py
+++ /dev/null
diff --git a/nova/db/main/legacy_migrations/manage.py b/nova/db/main/legacy_migrations/manage.py
deleted file mode 100644
index 6c2b3842ba..0000000000
--- a/nova/db/main/legacy_migrations/manage.py
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2012 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-from migrate.versioning.shell import main
-
-
-if __name__ == '__main__':
- main(debug='False', repository='.')
diff --git a/nova/db/main/legacy_migrations/migrate.cfg b/nova/db/main/legacy_migrations/migrate.cfg
deleted file mode 100644
index 006e01e406..0000000000
--- a/nova/db/main/legacy_migrations/migrate.cfg
+++ /dev/null
@@ -1,20 +0,0 @@
-[db_settings]
-# Used to identify which repository this database is versioned under.
-# You can use the name of your project.
-repository_id=nova
-
-# The name of the database table used to track the schema version.
-# This name shouldn't already be used by your project.
-# If this is changed once a database is under version control, you'll need to
-# change the table name in each database too.
-version_table=migrate_version
-
-# When committing a change script, Migrate will attempt to generate the
-# sql for all supported databases; normally, if one of them fails - probably
-# because you don't have that database installed - it is ignored and the
-# commit continues, perhaps ending successfully.
-# Databases in this list MUST compile successfully during a commit, or the
-# entire commit will fail. List the databases your application will actually
-# be using to ensure your updates to that database work properly.
-# This must be a list; example: ['postgres','sqlite']
-required_dbs=[]
diff --git a/nova/db/main/legacy_migrations/versions/402_train.py b/nova/db/main/legacy_migrations/versions/402_train.py
deleted file mode 100644
index 5a39d87f8c..0000000000
--- a/nova/db/main/legacy_migrations/versions/402_train.py
+++ /dev/null
@@ -1,1619 +0,0 @@
-# Copyright 2012 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-from migrate.changeset import UniqueConstraint
-from oslo_log import log as logging
-import sqlalchemy as sa
-from sqlalchemy import dialects
-from sqlalchemy.ext import compiler
-from sqlalchemy import types as sqla_types
-
-from nova.db import types
-from nova.objects import keypair
-
-LOG = logging.getLogger(__name__)
-
-
-def Inet():
- return sa.String(length=43).with_variant(
- dialects.postgresql.INET(), 'postgresql',
- )
-
-
-def InetSmall():
- return sa.String(length=39).with_variant(
- dialects.postgresql.INET(), 'postgresql',
- )
-
-
-# We explicitly name many of our foreignkeys for MySQL so they match Havana
-@compiler.compiles(sa.ForeignKeyConstraint, 'postgresql')
-def process(element, compiler, **kw):
- element.name = None
- return compiler.visit_foreign_key_constraint(element, **kw)
-
-
-def _create_shadow_tables(migrate_engine):
- meta = sa.MetaData()
- meta.reflect(migrate_engine)
- table_names = list(meta.tables.keys())
-
- # NOTE(stephenfin): This is not compatible with SQLAlchemy 2.0 but neither
- # is sqlalchemy-migrate which requires this. We'll remove these migrations
- # when dropping SQLAlchemy < 2.x support
- meta.bind = migrate_engine
-
- for table_name in table_names:
- # Skip tables that are not soft-deletable
- if table_name in (
- 'tags',
- 'resource_providers',
- 'inventories',
- 'allocations',
- 'resource_provider_aggregates',
- 'console_auth_tokens',
- ):
- continue
-
- table = sa.Table(table_name, meta, autoload_with=migrate_engine)
-
- columns = []
- for column in table.columns:
- column_copy = None
-
- # NOTE(boris-42): BigInteger is not supported by sqlite, so after
- # copy it will have NullType. The other types that are used in Nova
- # are supported by sqlite
- if isinstance(column.type, sqla_types.NullType):
- column_copy = sa.Column(
- column.name, sa.BigInteger(), default=0,
- )
-
- if table_name == 'instances' and column.name == 'locked_by':
- enum = sa.Enum(
- 'owner', 'admin', name='shadow_instances0locked_by',
- )
- column_copy = sa.Column(column.name, enum)
-
- # TODO(stephenfin): Fix these various bugs in a follow-up
-
- # 244_increase_user_id_length_volume_usage_cache; this
- # alteration should apply to shadow tables also
-
- if table_name == 'volume_usage_cache' and column.name == 'user_id':
- # nullable should be True
- column_copy = sa.Column('user_id', sa.String(36))
-
- # 247_nullable_mismatch; these alterations should apply to shadow
- # tables also
-
- if table_name == 'quota_usages' and column.name == 'resources':
- # nullable should be False
- column_copy = sa.Column('resource', sa.String(length=255))
-
- if table_name == 'pci_devices':
- if column.name == 'deleted':
- # nullable should be True
- column_copy = sa.Column(
- 'deleted', sa.Integer, default=0, nullable=False,
- )
-
- if column.name == 'product_id':
- # nullable should be False
- column_copy = sa.Column('product_id', sa.String(4))
-
- if column.name == 'vendor_id':
- # nullable should be False
- column_copy = sa.Column('vendor_id', sa.String(4))
-
- if column.name == 'dev_type':
- # nullable should be False
- column_copy = sa.Column('dev_type', sa.String(8))
-
- # 280_add_nullable_false_to_keypairs_name; this should apply to the
- # shadow table also
-
- if table_name == 'key_pairs' and column.name == 'name':
- # nullable should be False
- column_copy = sa.Column('name', sa.String(length=255))
-
- # NOTE(stephenfin): By default, 'sqlalchemy.Enum' will issue a
- # 'CREATE TYPE' command on PostgreSQL, even if the type already
- # exists. We work around this by using the PostgreSQL-specific
- # 'sqlalchemy.dialects.postgresql.ENUM' type and setting
- # 'create_type' to 'False'. See [1] for more information.
- #
- # [1] https://stackoverflow.com/a/28894354/613428
- if migrate_engine.name == 'postgresql':
- if table_name == 'key_pairs' and column.name == 'type':
- enum = dialects.postgresql.ENUM(
- 'ssh', 'x509', name='keypair_types', create_type=False)
- column_copy = sa.Column(
- column.name, enum, nullable=False,
- server_default=keypair.KEYPAIR_TYPE_SSH)
- elif (
- table_name == 'migrations' and
- column.name == 'migration_type'
- ):
- enum = dialects.postgresql.ENUM(
- 'migration', 'resize', 'live-migration', 'evacuation',
- name='migration_type', create_type=False)
- column_copy = sa.Column(column.name, enum, nullable=True)
-
- if column_copy is None:
- # NOTE(stephenfin): Yes, this is private. Yes, this is what we
- # were told to use. Blame zzzeek!
- column_copy = column._copy()
-
- columns.append(column_copy)
-
- shadow_table = sa.Table(
- 'shadow_' + table_name, meta, *columns, mysql_engine='InnoDB',
- )
-
- try:
- shadow_table.create()
- except Exception:
- LOG.info(repr(shadow_table))
- LOG.exception('Exception while creating table.')
- raise
-
- # TODO(stephenfin): Fix these various bugs in a follow-up
-
- # 252_add_instance_extra_table; we don't create indexes for shadow tables
- # in general and these should be removed
-
- table = sa.Table(
- 'shadow_instance_extra', meta, autoload_with=migrate_engine,
- )
- idx = sa.Index('shadow_instance_extra_idx', table.c.instance_uuid)
- idx.create(migrate_engine)
-
- # 373_migration_uuid; we should't create indexes for shadow tables
-
- table = sa.Table('shadow_migrations', meta, autoload_with=migrate_engine)
- idx = sa.Index('shadow_migrations_uuid', table.c.uuid, unique=True)
- idx.create(migrate_engine)
-
-
-def upgrade(migrate_engine):
- meta = sa.MetaData()
- # NOTE(stephenfin): This is not compatible with SQLAlchemy 2.0 but neither
- # is sqlalchemy-migrate which requires this. We'll remove these migrations
- # when dropping SQLAlchemy < 2.x support
- meta.bind = migrate_engine
-
- agent_builds = sa.Table('agent_builds', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('deleted_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('hypervisor', sa.String(length=255)),
- sa.Column('os', sa.String(length=255)),
- sa.Column('architecture', sa.String(length=255)),
- sa.Column('version', sa.String(length=255)),
- sa.Column('url', sa.String(length=255)),
- sa.Column('md5hash', sa.String(length=255)),
- sa.Column('deleted', sa.Integer),
- sa.Index(
- 'agent_builds_hypervisor_os_arch_idx',
- 'hypervisor', 'os', 'architecture'),
- UniqueConstraint(
- 'hypervisor', 'os', 'architecture', 'deleted',
- name='uniq_agent_builds0hypervisor0os0architecture0deleted'),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- aggregate_hosts = sa.Table('aggregate_hosts', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('deleted_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('host', sa.String(length=255)),
- sa.Column(
- 'aggregate_id', sa.Integer, sa.ForeignKey('aggregates.id'),
- nullable=False),
- sa.Column('deleted', sa.Integer),
- UniqueConstraint(
- 'host', 'aggregate_id', 'deleted',
- name='uniq_aggregate_hosts0host0aggregate_id0deleted'),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- aggregate_metadata = sa.Table('aggregate_metadata', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('deleted_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column(
- 'aggregate_id', sa.Integer, sa.ForeignKey('aggregates.id'),
- nullable=False),
- sa.Column('key', sa.String(length=255), nullable=False),
- sa.Column('value', sa.String(length=255), nullable=False),
- sa.Column('deleted', sa.Integer),
- sa.Index('aggregate_metadata_key_idx', 'key'),
- sa.Index('aggregate_metadata_value_idx', 'value'),
- UniqueConstraint(
- 'aggregate_id', 'key', 'deleted',
- name='uniq_aggregate_metadata0aggregate_id0key0deleted'),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- aggregates = sa.Table('aggregates', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('deleted_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('name', sa.String(length=255)),
- sa.Column('deleted', sa.Integer),
- sa.Column('uuid', sa.String(36)),
- sa.Index('aggregate_uuid_idx', 'uuid'),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- allocations = sa.Table('allocations', meta,
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('resource_provider_id', sa.Integer, nullable=False),
- sa.Column('consumer_id', sa.String(36), nullable=False),
- sa.Column('resource_class_id', sa.Integer, nullable=False),
- sa.Column('used', sa.Integer, nullable=False),
- sa.Index(
- 'allocations_resource_provider_class_used_idx',
- 'resource_provider_id', 'resource_class_id', 'used'),
- sa.Index('allocations_consumer_id_idx', 'consumer_id'),
- sa.Index('allocations_resource_class_id_idx', 'resource_class_id'),
- mysql_engine='InnoDB',
- mysql_charset='latin1',
- )
-
- block_device_mapping = sa.Table('block_device_mapping', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('deleted_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('device_name', sa.String(length=255), nullable=True),
- sa.Column('delete_on_termination', sa.Boolean),
- sa.Column('snapshot_id', sa.String(length=36), nullable=True),
- sa.Column('volume_id', sa.String(length=36), nullable=True),
- sa.Column('volume_size', sa.Integer),
- sa.Column('no_device', sa.Boolean),
- sa.Column('connection_info', types.MediumText()),
- sa.Column(
- 'instance_uuid', sa.String(length=36),
- sa.ForeignKey(
- 'instances.uuid',
- name='block_device_mapping_instance_uuid_fkey')),
- sa.Column('deleted', sa.Integer),
- sa.Column('source_type', sa.String(length=255), nullable=True),
- sa.Column('destination_type', sa.String(length=255), nullable=True),
- sa.Column('guest_format', sa.String(length=255), nullable=True),
- sa.Column('device_type', sa.String(length=255), nullable=True),
- sa.Column('disk_bus', sa.String(length=255), nullable=True),
- sa.Column('boot_index', sa.Integer),
- sa.Column('image_id', sa.String(length=36), nullable=True),
- sa.Column('tag', sa.String(255)),
- sa.Column('attachment_id', sa.String(36), nullable=True),
- sa.Column('uuid', sa.String(36), nullable=True),
- sa.Column('volume_type', sa.String(255), nullable=True),
- sa.Index('snapshot_id', 'snapshot_id'),
- sa.Index('volume_id', 'volume_id'),
- sa.Index('block_device_mapping_instance_uuid_idx', 'instance_uuid'),
- sa.Index(
- 'block_device_mapping_instance_uuid_device_name_idx',
- 'instance_uuid', 'device_name'),
- sa.Index(
- 'block_device_mapping_instance_uuid_volume_id_idx',
- 'instance_uuid', 'volume_id'),
- UniqueConstraint('uuid', name='uniq_block_device_mapping0uuid'),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- bw_usage_cache = sa.Table('bw_usage_cache', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('deleted_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('start_period', sa.DateTime, nullable=False),
- sa.Column('last_refreshed', sa.DateTime),
- sa.Column('bw_in', sa.BigInteger),
- sa.Column('bw_out', sa.BigInteger),
- sa.Column('mac', sa.String(length=255)),
- sa.Column('uuid', sa.String(length=36)),
- sa.Column('last_ctr_in', sa.BigInteger()),
- sa.Column('last_ctr_out', sa.BigInteger()),
- sa.Column('deleted', sa.Integer),
- sa.Index(
- 'bw_usage_cache_uuid_start_period_idx',
- 'uuid', 'start_period'),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- cells = sa.Table('cells', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('deleted_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('api_url', sa.String(length=255)),
- sa.Column('weight_offset', sa.Float),
- sa.Column('weight_scale', sa.Float),
- sa.Column('name', sa.String(length=255)),
- sa.Column('is_parent', sa.Boolean),
- sa.Column('deleted', sa.Integer),
- sa.Column('transport_url', sa.String(length=255), nullable=False),
- UniqueConstraint(
- 'name', 'deleted',
- name='uniq_cells0name0deleted'),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- certificates = sa.Table('certificates', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('deleted_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('user_id', sa.String(length=255)),
- sa.Column('project_id', sa.String(length=255)),
- sa.Column('file_name', sa.String(length=255)),
- sa.Column('deleted', sa.Integer),
- sa.Index(
- 'certificates_project_id_deleted_idx',
- 'project_id', 'deleted'),
- sa.Index('certificates_user_id_deleted_idx', 'user_id', 'deleted'),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- compute_nodes = sa.Table('compute_nodes', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('deleted_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('service_id', sa.Integer, nullable=True),
- sa.Column('vcpus', sa.Integer, nullable=False),
- sa.Column('memory_mb', sa.Integer, nullable=False),
- sa.Column('local_gb', sa.Integer, nullable=False),
- sa.Column('vcpus_used', sa.Integer, nullable=False),
- sa.Column('memory_mb_used', sa.Integer, nullable=False),
- sa.Column('local_gb_used', sa.Integer, nullable=False),
- sa.Column('hypervisor_type', types.MediumText(), nullable=False),
- sa.Column('hypervisor_version', sa.Integer, nullable=False),
- sa.Column('cpu_info', types.MediumText(), nullable=False),
- sa.Column('disk_available_least', sa.Integer),
- sa.Column('free_ram_mb', sa.Integer),
- sa.Column('free_disk_gb', sa.Integer),
- sa.Column('current_workload', sa.Integer),
- sa.Column('running_vms', sa.Integer),
- sa.Column('hypervisor_hostname', sa.String(length=255)),
- sa.Column('deleted', sa.Integer),
- sa.Column('host_ip', InetSmall()),
- sa.Column('supported_instances', sa.Text),
- sa.Column('pci_stats', sa.Text, nullable=True),
- sa.Column('metrics', sa.Text, nullable=True),
- sa.Column('extra_resources', sa.Text, nullable=True),
- sa.Column('stats', sa.Text, default='{}'),
- sa.Column('numa_topology', sa.Text, nullable=True),
- sa.Column('host', sa.String(255), nullable=True),
- sa.Column('ram_allocation_ratio', sa.Float, nullable=True),
- sa.Column('cpu_allocation_ratio', sa.Float, nullable=True),
- sa.Column('uuid', sa.String(36), nullable=True),
- sa.Column('disk_allocation_ratio', sa.Float, nullable=True),
- sa.Column('mapped', sa.Integer, default=0, nullable=True),
- sa.Index('compute_nodes_uuid_idx', 'uuid', unique=True),
- UniqueConstraint(
- 'host', 'hypervisor_hostname', 'deleted',
- name='uniq_compute_nodes0host0hypervisor_hostname0deleted',
- ),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- console_auth_tokens = sa.Table('console_auth_tokens', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('token_hash', sa.String(255), nullable=False),
- sa.Column('console_type', sa.String(255), nullable=False),
- sa.Column('host', sa.String(255), nullable=False),
- sa.Column('port', sa.Integer, nullable=False),
- sa.Column('internal_access_path', sa.String(255)),
- sa.Column('instance_uuid', sa.String(36), nullable=False),
- sa.Column('expires', sa.Integer, nullable=False),
- sa.Column('access_url_base', sa.String(255), nullable=True),
- sa.Index('console_auth_tokens_instance_uuid_idx', 'instance_uuid'),
- sa.Index('console_auth_tokens_host_expires_idx', 'host', 'expires'),
- sa.Index('console_auth_tokens_token_hash_idx', 'token_hash'),
- sa.Index(
- 'console_auth_tokens_token_hash_instance_uuid_idx',
- 'token_hash', 'instance_uuid'),
- UniqueConstraint(
- 'token_hash', name='uniq_console_auth_tokens0token_hash'),
- mysql_engine='InnoDB',
- mysql_charset='utf8',
- )
-
- console_pools = sa.Table('console_pools', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('deleted_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('address', InetSmall()),
- sa.Column('username', sa.String(length=255)),
- sa.Column('password', sa.String(length=255)),
- sa.Column('console_type', sa.String(length=255)),
- sa.Column('public_hostname', sa.String(length=255)),
- sa.Column('host', sa.String(length=255)),
- sa.Column('compute_host', sa.String(length=255)),
- sa.Column('deleted', sa.Integer),
- UniqueConstraint(
- 'host', 'console_type', 'compute_host', 'deleted',
- name='uniq_console_pools0host0console_type0compute_host0deleted'),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- consoles = sa.Table('consoles', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('deleted_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('instance_name', sa.String(length=255)),
- sa.Column('password', sa.String(length=255)),
- sa.Column('port', sa.Integer),
- sa.Column('pool_id', sa.Integer, sa.ForeignKey('console_pools.id')),
- sa.Column(
- 'instance_uuid', sa.String(length=36),
- sa.ForeignKey(
- 'instances.uuid', name='consoles_instance_uuid_fkey')),
- sa.Column('deleted', sa.Integer),
- sa.Index('consoles_instance_uuid_idx', 'instance_uuid'),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- dns_domains = sa.Table('dns_domains', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('deleted_at', sa.DateTime),
- sa.Column('deleted', sa.Boolean),
- sa.Column(
- 'domain', sa.String(length=255), primary_key=True, nullable=False),
- sa.Column('scope', sa.String(length=255)),
- sa.Column('availability_zone', sa.String(length=255)),
- sa.Column('project_id', sa.String(length=255)),
- sa.Index('dns_domains_domain_deleted_idx', 'domain', 'deleted'),
- sa.Index('dns_domains_project_id_idx', 'project_id'),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- fixed_ips = sa.Table('fixed_ips', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('deleted_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('address', InetSmall()),
- sa.Column('network_id', sa.Integer),
- sa.Column('allocated', sa.Boolean),
- sa.Column('leased', sa.Boolean),
- sa.Column('reserved', sa.Boolean),
- sa.Column('virtual_interface_id', sa.Integer),
- sa.Column('host', sa.String(length=255)),
- sa.Column(
- 'instance_uuid', sa.String(length=36),
- sa.ForeignKey(
- 'instances.uuid', name='fixed_ips_instance_uuid_fkey'),
- ),
- sa.Column('deleted', sa.Integer),
- sa.Index('network_id', 'network_id'),
- sa.Index('address', 'address'),
- sa.Index('fixed_ips_instance_uuid_fkey', 'instance_uuid'),
- sa.Index(
- 'fixed_ips_virtual_interface_id_fkey',
- 'virtual_interface_id'),
- sa.Index('fixed_ips_host_idx', 'host'),
- sa.Index(
- 'fixed_ips_network_id_host_deleted_idx', 'network_id',
- 'host', 'deleted'),
- sa.Index(
- 'fixed_ips_address_reserved_network_id_deleted_idx',
- 'address', 'reserved',
- 'network_id', 'deleted'),
- sa.Index(
- 'fixed_ips_deleted_allocated_idx',
- 'address', 'deleted', 'allocated'),
- sa.Index(
- 'fixed_ips_deleted_allocated_updated_at_idx',
- 'deleted', 'allocated', 'updated_at'),
- UniqueConstraint(
- 'address', 'deleted',
- name='uniq_fixed_ips0address0deleted'),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- floating_ips = sa.Table('floating_ips', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('deleted_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('address', InetSmall()),
- sa.Column('fixed_ip_id', sa.Integer),
- sa.Column('project_id', sa.String(length=255)),
- sa.Column('host', sa.String(length=255)),
- sa.Column('auto_assigned', sa.Boolean),
- sa.Column('pool', sa.String(length=255)),
- sa.Column('interface', sa.String(length=255)),
- sa.Column('deleted', sa.Integer),
- sa.Index('fixed_ip_id', 'fixed_ip_id'),
- sa.Index('floating_ips_host_idx', 'host'),
- sa.Index('floating_ips_project_id_idx', 'project_id'),
- sa.Index(
- 'floating_ips_pool_deleted_fixed_ip_id_project_id_idx',
- 'pool', 'deleted', 'fixed_ip_id', 'project_id'),
- UniqueConstraint(
- 'address', 'deleted',
- name='uniq_floating_ips0address0deleted'),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- instance_faults = sa.Table('instance_faults', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('deleted_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column(
- 'instance_uuid', sa.String(length=36),
- sa.ForeignKey(
- 'instances.uuid', name='fk_instance_faults_instance_uuid')),
- sa.Column('code', sa.Integer, nullable=False),
- sa.Column('message', sa.String(length=255)),
- sa.Column('details', types.MediumText()),
- sa.Column('host', sa.String(length=255)),
- sa.Column('deleted', sa.Integer),
- sa.Index('instance_faults_host_idx', 'host'),
- sa.Index(
- 'instance_faults_instance_uuid_deleted_created_at_idx',
- 'instance_uuid', 'deleted', 'created_at'),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- instance_id_mappings = sa.Table('instance_id_mappings', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('deleted_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('uuid', sa.String(36), nullable=False),
- sa.Column('deleted', sa.Integer),
- sa.Index('ix_instance_id_mappings_uuid', 'uuid'),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- instance_info_caches = sa.Table('instance_info_caches', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('deleted_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('network_info', types.MediumText()),
- sa.Column(
- 'instance_uuid', sa.String(length=36),
- sa.ForeignKey(
- 'instances.uuid',
- name='instance_info_caches_instance_uuid_fkey'),
- nullable=False),
- sa.Column('deleted', sa.Integer),
- UniqueConstraint(
- 'instance_uuid',
- name='uniq_instance_info_caches0instance_uuid'),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- groups = sa.Table('instance_groups', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('deleted_at', sa.DateTime),
- sa.Column('deleted', sa.Integer),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('user_id', sa.String(length=255)),
- sa.Column('project_id', sa.String(length=255)),
- sa.Column('uuid', sa.String(length=36), nullable=False),
- sa.Column('name', sa.String(length=255)),
- UniqueConstraint(
- 'uuid', 'deleted',
- name='uniq_instance_groups0uuid0deleted'),
- mysql_engine='InnoDB',
- mysql_charset='utf8',
- )
-
- group_policy = sa.Table('instance_group_policy', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('deleted_at', sa.DateTime),
- sa.Column('deleted', sa.Integer),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('policy', sa.String(length=255)),
- sa.Column(
- 'group_id', sa.Integer, sa.ForeignKey('instance_groups.id'),
- nullable=False),
- sa.Index('instance_group_policy_policy_idx', 'policy'),
- mysql_engine='InnoDB',
- mysql_charset='utf8',
- )
-
- group_member = sa.Table('instance_group_member', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('deleted_at', sa.DateTime),
- sa.Column('deleted', sa.Integer),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('instance_id', sa.String(length=255)),
- sa.Column(
- 'group_id', sa.Integer, sa.ForeignKey('instance_groups.id'),
- nullable=False),
- sa.Index(
- 'instance_group_member_instance_idx',
- 'instance_id'),
- mysql_engine='InnoDB',
- mysql_charset='utf8',
- )
-
- instance_metadata = sa.Table('instance_metadata', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('deleted_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('key', sa.String(length=255)),
- sa.Column('value', sa.String(length=255)),
- sa.Column(
- 'instance_uuid', sa.String(length=36),
- sa.ForeignKey(
- 'instances.uuid', name='instance_metadata_instance_uuid_fkey'),
- nullable=True),
- sa.Column('deleted', sa.Integer),
- sa.Index('instance_metadata_instance_uuid_idx', 'instance_uuid'),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- instance_system_metadata = sa.Table('instance_system_metadata', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('deleted_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column(
- 'instance_uuid', sa.String(length=36),
- sa.ForeignKey(
- 'instances.uuid', name='instance_system_metadata_ibfk_1'),
- nullable=False),
- sa.Column('key', sa.String(length=255), nullable=False),
- sa.Column('value', sa.String(length=255)),
- sa.Column('deleted', sa.Integer),
- sa.Index('instance_uuid', 'instance_uuid'),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- # TODO(stephenfin): Remove this table since it has been moved to the API DB
- instance_type_extra_specs = sa.Table('instance_type_extra_specs', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('deleted_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column(
- 'instance_type_id', sa.Integer, sa.ForeignKey('instance_types.id'),
- nullable=False),
- sa.Column('key', sa.String(length=255)),
- sa.Column('value', sa.String(length=255)),
- sa.Column('deleted', sa.Integer),
- sa.Index(
- 'instance_type_extra_specs_instance_type_id_key_idx',
- 'instance_type_id', 'key'),
- UniqueConstraint(
- 'instance_type_id', 'key', 'deleted',
- name='uniq_instance_type_extra_specs0instance_type_id0key0deleted'
- ),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- # TODO(stephenfin): Remove this table since it has been moved to the API DB
- instance_type_projects = sa.Table('instance_type_projects', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('deleted_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column(
- 'instance_type_id', sa.Integer,
- sa.ForeignKey(
- 'instance_types.id', name='instance_type_projects_ibfk_1'),
- nullable=False),
- sa.Column('project_id', sa.String(length=255)),
- sa.Column('deleted', sa.Integer),
- UniqueConstraint(
- 'instance_type_id', 'project_id', 'deleted',
- name='uniq_instance_type_projects0instance_type_id0project_id'
- '0deleted'),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- # TODO(stephenfin): Remove this table since it has been moved to the API DB
- instance_types = sa.Table('instance_types', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('deleted_at', sa.DateTime),
- sa.Column('name', sa.String(length=255)),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('memory_mb', sa.Integer, nullable=False),
- sa.Column('vcpus', sa.Integer, nullable=False),
- sa.Column('swap', sa.Integer, nullable=False),
- sa.Column('vcpu_weight', sa.Integer),
- sa.Column('flavorid', sa.String(length=255)),
- sa.Column('rxtx_factor', sa.Float),
- sa.Column('root_gb', sa.Integer),
- sa.Column('ephemeral_gb', sa.Integer),
- sa.Column('disabled', sa.Boolean),
- sa.Column('is_public', sa.Boolean),
- sa.Column('deleted', sa.Integer),
- UniqueConstraint(
- 'name', 'deleted',
- name='uniq_instance_types0name0deleted'),
- UniqueConstraint(
- 'flavorid', 'deleted',
- name='uniq_instance_types0flavorid0deleted'),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- instances = sa.Table('instances', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('deleted_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('internal_id', sa.Integer),
- sa.Column('user_id', sa.String(length=255)),
- sa.Column('project_id', sa.String(length=255)),
- sa.Column('image_ref', sa.String(length=255)),
- sa.Column('kernel_id', sa.String(length=255)),
- sa.Column('ramdisk_id', sa.String(length=255)),
- sa.Column('launch_index', sa.Integer),
- sa.Column('key_name', sa.String(length=255)),
- sa.Column('key_data', types.MediumText()),
- sa.Column('power_state', sa.Integer),
- sa.Column('vm_state', sa.String(length=255)),
- sa.Column('memory_mb', sa.Integer),
- sa.Column('vcpus', sa.Integer),
- sa.Column('hostname', sa.String(length=255)),
- sa.Column('host', sa.String(length=255)),
- sa.Column('user_data', types.MediumText()),
- sa.Column('reservation_id', sa.String(length=255)),
- sa.Column('launched_at', sa.DateTime),
- sa.Column('terminated_at', sa.DateTime),
- sa.Column('display_name', sa.String(length=255)),
- sa.Column('display_description', sa.String(length=255)),
- sa.Column('availability_zone', sa.String(length=255)),
- sa.Column('locked', sa.Boolean),
- sa.Column('os_type', sa.String(length=255)),
- sa.Column('launched_on', types.MediumText()),
- sa.Column('instance_type_id', sa.Integer),
- sa.Column('vm_mode', sa.String(length=255)),
- sa.Column('uuid', sa.String(length=36), nullable=False),
- sa.Column('architecture', sa.String(length=255)),
- sa.Column('root_device_name', sa.String(length=255)),
- sa.Column('access_ip_v4', InetSmall()),
- sa.Column('access_ip_v6', InetSmall()),
- sa.Column('config_drive', sa.String(length=255)),
- sa.Column('task_state', sa.String(length=255)),
- sa.Column('default_ephemeral_device', sa.String(length=255)),
- sa.Column('default_swap_device', sa.String(length=255)),
- sa.Column('progress', sa.Integer),
- sa.Column('auto_disk_config', sa.Boolean),
- sa.Column('shutdown_terminate', sa.Boolean),
- sa.Column('disable_terminate', sa.Boolean),
- sa.Column('root_gb', sa.Integer),
- sa.Column('ephemeral_gb', sa.Integer),
- sa.Column('cell_name', sa.String(length=255)),
- sa.Column('node', sa.String(length=255)),
- sa.Column('deleted', sa.Integer),
- sa.Column(
- 'locked_by',
- sa.Enum('owner', 'admin', name='instances0locked_by')),
- sa.Column('cleaned', sa.Integer, default=0),
- sa.Column('ephemeral_key_uuid', sa.String(36)),
- # NOTE(danms): This column originally included default=False. We
- # discovered in bug #1862205 that this will attempt to rewrite
- # the entire instances table with that value, which can time out
- # for large data sets (and does not even abort).
- # NOTE(stephenfin): This was originally added by sqlalchemy-migrate
- # which did not generate the constraints
- sa.Column('hidden', sa.Boolean(create_constraint=False)),
- sa.Index('uuid', 'uuid', unique=True),
- sa.Index('instances_reservation_id_idx', 'reservation_id'),
- sa.Index(
- 'instances_terminated_at_launched_at_idx',
- 'terminated_at', 'launched_at'),
- sa.Index(
- 'instances_task_state_updated_at_idx',
- 'task_state', 'updated_at'),
- sa.Index('instances_uuid_deleted_idx', 'uuid', 'deleted'),
- sa.Index('instances_host_node_deleted_idx', 'host', 'node', 'deleted'),
- sa.Index(
- 'instances_host_deleted_cleaned_idx',
- 'host', 'deleted', 'cleaned'),
- sa.Index('instances_project_id_deleted_idx', 'project_id', 'deleted'),
- sa.Index('instances_deleted_created_at_idx', 'deleted', 'created_at'),
- sa.Index('instances_project_id_idx', 'project_id'),
- sa.Index(
- 'instances_updated_at_project_id_idx',
- 'updated_at', 'project_id'),
- UniqueConstraint('uuid', name='uniq_instances0uuid'),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- instance_actions = sa.Table('instance_actions', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('deleted_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('action', sa.String(length=255)),
- sa.Column(
- 'instance_uuid', sa.String(length=36),
- sa.ForeignKey(
- 'instances.uuid', name='fk_instance_actions_instance_uuid')),
- sa.Column('request_id', sa.String(length=255)),
- sa.Column('user_id', sa.String(length=255)),
- sa.Column('project_id', sa.String(length=255)),
- sa.Column('start_time', sa.DateTime),
- sa.Column('finish_time', sa.DateTime),
- sa.Column('message', sa.String(length=255)),
- sa.Column('deleted', sa.Integer),
- sa.Index('instance_uuid_idx', 'instance_uuid'),
- sa.Index('request_id_idx', 'request_id'),
- sa.Index(
- 'instance_actions_instance_uuid_updated_at_idx',
- 'instance_uuid', 'updated_at'),
- mysql_engine='InnoDB',
- mysql_charset='utf8',
- )
-
- instance_actions_events = sa.Table('instance_actions_events', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('deleted_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('event', sa.String(length=255)),
- sa.Column(
- 'action_id', sa.Integer, sa.ForeignKey('instance_actions.id')),
- sa.Column('start_time', sa.DateTime),
- sa.Column('finish_time', sa.DateTime),
- sa.Column('result', sa.String(length=255)),
- sa.Column('traceback', sa.Text),
- sa.Column('deleted', sa.Integer),
- sa.Column('host', sa.String(255)),
- sa.Column('details', sa.Text),
- mysql_engine='InnoDB',
- mysql_charset='utf8',
- )
-
- instance_extra = sa.Table('instance_extra', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('deleted_at', sa.DateTime),
- sa.Column('deleted', sa.Integer),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column(
- 'instance_uuid', sa.String(length=36),
- sa.ForeignKey(
- 'instances.uuid', name='instance_extra_instance_uuid_fkey'),
- nullable=False),
- sa.Column('numa_topology', sa.Text, nullable=True),
- sa.Column('pci_requests', sa.Text, nullable=True),
- sa.Column('flavor', sa.Text, nullable=True),
- sa.Column('vcpu_model', sa.Text, nullable=True),
- sa.Column('migration_context', sa.Text, nullable=True),
- sa.Column('keypairs', sa.Text, nullable=True),
- sa.Column('device_metadata', sa.Text, nullable=True),
- sa.Column('trusted_certs', sa.Text, nullable=True),
- sa.Column('vpmems', sa.Text, nullable=True),
- sa.Column('resources', sa.Text, nullable=True),
- sa.Index('instance_extra_idx', 'instance_uuid'),
- mysql_engine='InnoDB',
- mysql_charset='utf8',
- )
-
- inventories = sa.Table('inventories', meta,
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('resource_provider_id', sa.Integer, nullable=False),
- sa.Column('resource_class_id', sa.Integer, nullable=False),
- sa.Column('total', sa.Integer, nullable=False),
- sa.Column('reserved', sa.Integer, nullable=False),
- sa.Column('min_unit', sa.Integer, nullable=False),
- sa.Column('max_unit', sa.Integer, nullable=False),
- sa.Column('step_size', sa.Integer, nullable=False),
- sa.Column('allocation_ratio', sa.Float, nullable=False),
- sa.Index(
- 'inventories_resource_provider_id_idx', 'resource_provider_id'),
- sa.Index(
- 'inventories_resource_class_id_idx', 'resource_class_id'),
- sa.Index(
- 'inventories_resource_provider_resource_class_idx',
- 'resource_provider_id', 'resource_class_id'),
- UniqueConstraint(
- 'resource_provider_id', 'resource_class_id',
- name='uniq_inventories0resource_provider_resource_class'),
- mysql_engine='InnoDB',
- mysql_charset='latin1',
- )
-
- key_pairs = sa.Table('key_pairs', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('deleted_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('name', sa.String(length=255), nullable=False),
- sa.Column('user_id', sa.String(length=255)),
- sa.Column('fingerprint', sa.String(length=255)),
- sa.Column('public_key', types.MediumText()),
- sa.Column('deleted', sa.Integer),
- sa.Column(
- 'type', sa.Enum('ssh', 'x509', name='keypair_types'),
- nullable=False, server_default=keypair.KEYPAIR_TYPE_SSH),
- UniqueConstraint(
- 'user_id', 'name', 'deleted',
- name='uniq_key_pairs0user_id0name0deleted'),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- migrations = sa.Table('migrations', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('deleted_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('source_compute', sa.String(length=255)),
- sa.Column('dest_compute', sa.String(length=255)),
- sa.Column('dest_host', sa.String(length=255)),
- sa.Column('status', sa.String(length=255)),
- sa.Column(
- 'instance_uuid', sa.String(length=36),
- sa.ForeignKey(
- 'instances.uuid', name='fk_migrations_instance_uuid')),
- sa.Column('old_instance_type_id', sa.Integer),
- sa.Column('new_instance_type_id', sa.Integer),
- sa.Column('source_node', sa.String(length=255)),
- sa.Column('dest_node', sa.String(length=255)),
- sa.Column('deleted', sa.Integer),
- sa.Column(
- 'migration_type',
- sa.Enum(
- 'migration', 'resize', 'live-migration', 'evacuation',
- name='migration_type'),
- nullable=True),
- # NOTE(stephenfin): This was originally added by sqlalchemy-migrate
- # which did not generate the constraints
- sa.Column(
- 'hidden', sa.Boolean(create_constraint=False), default=False),
- sa.Column('memory_total', sa.BigInteger, nullable=True),
- sa.Column('memory_processed', sa.BigInteger, nullable=True),
- sa.Column('memory_remaining', sa.BigInteger, nullable=True),
- sa.Column('disk_total', sa.BigInteger, nullable=True),
- sa.Column('disk_processed', sa.BigInteger, nullable=True),
- sa.Column('disk_remaining', sa.BigInteger, nullable=True),
- sa.Column('uuid', sa.String(36)),
- # NOTE(stephenfin): This was originally added by sqlalchemy-migrate
- # which did not generate the constraints
- sa.Column(
- 'cross_cell_move', sa.Boolean(create_constraint=False),
- default=False),
- sa.Column('user_id', sa.String(255), nullable=True),
- sa.Column('project_id', sa.String(255), nullable=True),
- sa.Index('migrations_uuid', 'uuid', unique=True),
- sa.Index(
- 'migrations_instance_uuid_and_status_idx',
- 'deleted', 'instance_uuid', 'status'),
- sa.Index('migrations_updated_at_idx', 'updated_at'),
- # mysql-specific index by leftmost 100 chars. (mysql gets angry if the
- # index key length is too long.)
- sa.Index(
- 'migrations_by_host_nodes_and_status_idx',
- 'deleted', 'source_compute', 'dest_compute', 'source_node',
- 'dest_node', 'status',
- mysql_length={
- 'source_compute': 100,
- 'dest_compute': 100,
- 'source_node': 100,
- 'dest_node': 100,
- }),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- networks = sa.Table('networks', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('deleted_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('injected', sa.Boolean),
- sa.Column('cidr', Inet()),
- sa.Column('netmask', InetSmall()),
- sa.Column('bridge', sa.String(length=255)),
- sa.Column('gateway', InetSmall()),
- sa.Column('broadcast', InetSmall()),
- sa.Column('dns1', InetSmall()),
- sa.Column('vlan', sa.Integer),
- sa.Column('vpn_public_address', InetSmall()),
- sa.Column('vpn_public_port', sa.Integer),
- sa.Column('vpn_private_address', InetSmall()),
- sa.Column('dhcp_start', InetSmall()),
- sa.Column('project_id', sa.String(length=255)),
- sa.Column('host', sa.String(length=255)),
- sa.Column('cidr_v6', Inet()),
- sa.Column('gateway_v6', InetSmall()),
- sa.Column('label', sa.String(length=255)),
- sa.Column('netmask_v6', InetSmall()),
- sa.Column('bridge_interface', sa.String(length=255)),
- sa.Column('multi_host', sa.Boolean),
- sa.Column('dns2', InetSmall()),
- sa.Column('uuid', sa.String(length=36)),
- sa.Column('priority', sa.Integer),
- sa.Column('rxtx_base', sa.Integer),
- sa.Column('deleted', sa.Integer),
- sa.Column('mtu', sa.Integer),
- sa.Column('dhcp_server', types.IPAddress),
- # NOTE(stephenfin): These were originally added by sqlalchemy-migrate
- # which did not generate the constraints
- sa.Column(
- 'enable_dhcp', sa.Boolean(create_constraint=False), default=True),
- sa.Column(
- 'share_address', sa.Boolean(create_constraint=False),
- default=False),
- sa.Index('networks_host_idx', 'host'),
- sa.Index('networks_cidr_v6_idx', 'cidr_v6'),
- sa.Index('networks_bridge_deleted_idx', 'bridge', 'deleted'),
- sa.Index('networks_project_id_deleted_idx', 'project_id', 'deleted'),
- sa.Index(
- 'networks_uuid_project_id_deleted_idx',
- 'uuid', 'project_id', 'deleted'),
- sa.Index('networks_vlan_deleted_idx', 'vlan', 'deleted'),
- UniqueConstraint('vlan', 'deleted', name='uniq_networks0vlan0deleted'),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- pci_devices = sa.Table('pci_devices', meta,
- sa.Column('created_at', sa.DateTime(timezone=False)),
- sa.Column('updated_at', sa.DateTime(timezone=False)),
- sa.Column('deleted_at', sa.DateTime(timezone=False)),
- sa.Column('deleted', sa.Integer, default=0, nullable=True),
- sa.Column('id', sa.Integer, primary_key=True),
- sa.Column(
- 'compute_node_id', sa.Integer,
- sa.ForeignKey(
- 'compute_nodes.id', name='pci_devices_compute_node_id_fkey'),
- nullable=False),
- sa.Column('address', sa.String(12), nullable=False),
- sa.Column('product_id', sa.String(4), nullable=False),
- sa.Column('vendor_id', sa.String(4), nullable=False),
- sa.Column('dev_type', sa.String(8), nullable=False),
- sa.Column('dev_id', sa.String(255)),
- sa.Column('label', sa.String(255), nullable=False),
- sa.Column('status', sa.String(36), nullable=False),
- sa.Column('extra_info', sa.Text, nullable=True),
- sa.Column('instance_uuid', sa.String(36), nullable=True),
- sa.Column('request_id', sa.String(36), nullable=True),
- sa.Column('numa_node', sa.Integer, default=None),
- sa.Column('parent_addr', sa.String(12), nullable=True),
- sa.Column('uuid', sa.String(36)),
- sa.Index(
- 'ix_pci_devices_instance_uuid_deleted',
- 'instance_uuid', 'deleted'),
- sa.Index(
- 'ix_pci_devices_compute_node_id_deleted',
- 'compute_node_id', 'deleted'),
- sa.Index(
- 'ix_pci_devices_compute_node_id_parent_addr_deleted',
- 'compute_node_id', 'parent_addr', 'deleted'),
- UniqueConstraint(
- 'compute_node_id', 'address', 'deleted',
- name='uniq_pci_devices0compute_node_id0address0deleted'),
- mysql_engine='InnoDB',
- mysql_charset='utf8')
-
- provider_fw_rules = sa.Table('provider_fw_rules', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('deleted_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('protocol', sa.String(length=5)),
- sa.Column('from_port', sa.Integer),
- sa.Column('to_port', sa.Integer),
- sa.Column('cidr', Inet()),
- sa.Column('deleted', sa.Integer),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- quota_classes = sa.Table('quota_classes', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('deleted_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('class_name', sa.String(length=255)),
- sa.Column('resource', sa.String(length=255)),
- sa.Column('hard_limit', sa.Integer),
- sa.Column('deleted', sa.Integer),
- sa.Index('ix_quota_classes_class_name', 'class_name'),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- quota_usages = sa.Table('quota_usages', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('deleted_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('project_id', sa.String(length=255)),
- sa.Column('resource', sa.String(length=255), nullable=False),
- sa.Column('in_use', sa.Integer, nullable=False),
- sa.Column('reserved', sa.Integer, nullable=False),
- sa.Column('until_refresh', sa.Integer),
- sa.Column('deleted', sa.Integer),
- sa.Column('user_id', sa.String(length=255)),
- sa.Index('ix_quota_usages_project_id', 'project_id'),
- sa.Index('ix_quota_usages_user_id_deleted', 'user_id', 'deleted'),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- quotas = sa.Table('quotas', meta,
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('deleted_at', sa.DateTime),
- sa.Column('project_id', sa.String(length=255)),
- sa.Column('resource', sa.String(length=255), nullable=False),
- sa.Column('hard_limit', sa.Integer),
- sa.Column('deleted', sa.Integer),
- UniqueConstraint(
- 'project_id', 'resource', 'deleted',
- name='uniq_quotas0project_id0resource0deleted'),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- project_user_quotas = sa.Table('project_user_quotas', meta,
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('deleted_at', sa.DateTime),
- sa.Column('deleted', sa.Integer),
- sa.Column('user_id', sa.String(length=255), nullable=False),
- sa.Column('project_id', sa.String(length=255), nullable=False),
- sa.Column('resource', sa.String(length=255), nullable=False),
- sa.Column('hard_limit', sa.Integer, nullable=True),
- sa.Index(
- 'project_user_quotas_project_id_deleted_idx',
- 'project_id', 'deleted'),
- sa.Index(
- 'project_user_quotas_user_id_deleted_idx',
- 'user_id', 'deleted'),
- UniqueConstraint(
- 'user_id', 'project_id', 'resource', 'deleted',
- name='uniq_project_user_quotas0user_id0project_id0resource0'
- 'deleted'),
- mysql_engine='InnoDB',
- mysql_charset='utf8',
- )
-
- reservations = sa.Table('reservations', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('deleted_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('uuid', sa.String(length=36), nullable=False),
- sa.Column(
- 'usage_id', sa.Integer,
- sa.ForeignKey('quota_usages.id', name='reservations_ibfk_1'),
- nullable=False),
- sa.Column('project_id', sa.String(length=255)),
- sa.Column('resource', sa.String(length=255)),
- sa.Column('delta', sa.Integer, nullable=False),
- sa.Column('expire', sa.DateTime),
- sa.Column('deleted', sa.Integer),
- sa.Column('user_id', sa.String(length=255)),
- sa.Index('ix_reservations_project_id', 'project_id'),
- sa.Index('ix_reservations_user_id_deleted', 'user_id', 'deleted'),
- sa.Index('reservations_uuid_idx', 'uuid'),
- sa.Index('reservations_deleted_expire_idx', 'deleted', 'expire'),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- resource_providers = sa.Table('resource_providers', meta,
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('uuid', sa.String(36), nullable=False),
- sa.Column('name', sa.Unicode(200), nullable=True),
- sa.Column('generation', sa.Integer, default=0),
- sa.Column('can_host', sa.Integer, default=0),
- UniqueConstraint('uuid', name='uniq_resource_providers0uuid'),
- UniqueConstraint('name', name='uniq_resource_providers0name'),
- sa.Index('resource_providers_name_idx', 'name'),
- sa.Index('resource_providers_uuid_idx', 'uuid'),
- mysql_engine='InnoDB',
- mysql_charset='latin1',
- )
-
- resource_provider_aggregates = sa.Table(
- 'resource_provider_aggregates', meta,
- sa.Column(
- 'resource_provider_id', sa.Integer, primary_key=True,
- nullable=False),
- sa.Column(
- 'aggregate_id', sa.Integer, primary_key=True, nullable=False),
- sa.Index(
- 'resource_provider_aggregates_aggregate_id_idx', 'aggregate_id'),
- mysql_engine='InnoDB',
- mysql_charset='latin1',
- )
-
- s3_images = sa.Table('s3_images', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('deleted_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('uuid', sa.String(length=36), nullable=False),
- sa.Column('deleted', sa.Integer),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- security_group_instance_association = sa.Table(
- 'security_group_instance_association', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('deleted_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column(
- 'security_group_id', sa.Integer,
- sa.ForeignKey(
- 'security_groups.id',
- name='security_group_instance_association_ibfk_1'),
- ),
- sa.Column(
- 'instance_uuid', sa.String(length=36),
- sa.ForeignKey(
- 'instances.uuid',
- name='security_group_instance_association_instance_uuid_fkey'),
- ),
- sa.Column('deleted', sa.Integer),
- sa.Index(
- 'security_group_instance_association_instance_uuid_idx',
- 'instance_uuid'),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- security_group_rules = sa.Table('security_group_rules', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('deleted_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column(
- 'parent_group_id', sa.Integer,
- sa.ForeignKey('security_groups.id')),
- sa.Column('protocol', sa.String(length=255)),
- sa.Column('from_port', sa.Integer),
- sa.Column('to_port', sa.Integer),
- sa.Column('cidr', Inet()),
- sa.Column('group_id', sa.Integer, sa.ForeignKey('security_groups.id')),
- sa.Column('deleted', sa.Integer),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- security_groups = sa.Table('security_groups', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('deleted_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('name', sa.String(length=255)),
- sa.Column('description', sa.String(length=255)),
- sa.Column('user_id', sa.String(length=255)),
- sa.Column('project_id', sa.String(length=255)),
- sa.Column('deleted', sa.Integer),
- UniqueConstraint(
- 'project_id', 'name', 'deleted',
- name='uniq_security_groups0project_id0name0deleted'),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- security_group_default_rules = sa.Table(
- 'security_group_default_rules', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('deleted_at', sa.DateTime),
- sa.Column('deleted', sa.Integer, default=0),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('protocol', sa.String(length=5)),
- sa.Column('from_port', sa.Integer),
- sa.Column('to_port', sa.Integer),
- sa.Column('cidr', Inet()),
- mysql_engine='InnoDB',
- mysql_charset='utf8',
- )
-
- services = sa.Table('services', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('deleted_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('host', sa.String(length=255)),
- sa.Column('binary', sa.String(length=255)),
- sa.Column('topic', sa.String(length=255)),
- sa.Column('report_count', sa.Integer, nullable=False),
- sa.Column('disabled', sa.Boolean),
- sa.Column('deleted', sa.Integer),
- sa.Column('disabled_reason', sa.String(length=255)),
- sa.Column('last_seen_up', sa.DateTime, nullable=True),
- # NOTE(stephenfin): This was originally added by sqlalchemy-migrate
- # which did not generate the constraints
- sa.Column(
- 'forced_down', sa.Boolean(create_constraint=False), default=False),
- sa.Column('version', sa.Integer, default=0),
- sa.Column('uuid', sa.String(36), nullable=True),
- sa.Index('services_uuid_idx', 'uuid', unique=True),
- UniqueConstraint(
- 'host', 'topic', 'deleted',
- name='uniq_services0host0topic0deleted'),
- UniqueConstraint(
- 'host', 'binary', 'deleted',
- name='uniq_services0host0binary0deleted'),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- snapshot_id_mappings = sa.Table('snapshot_id_mappings', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('deleted_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('uuid', sa.String(length=36), nullable=False),
- sa.Column('deleted', sa.Integer),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- snapshots = sa.Table('snapshots', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('deleted_at', sa.DateTime),
- sa.Column(
- 'id', sa.String(length=36), primary_key=True, nullable=False),
- sa.Column('volume_id', sa.String(length=36), nullable=False),
- sa.Column('user_id', sa.String(length=255)),
- sa.Column('project_id', sa.String(length=255)),
- sa.Column('status', sa.String(length=255)),
- sa.Column('progress', sa.String(length=255)),
- sa.Column('volume_size', sa.Integer),
- sa.Column('scheduled_at', sa.DateTime),
- sa.Column('display_name', sa.String(length=255)),
- sa.Column('display_description', sa.String(length=255)),
- sa.Column('deleted', sa.String(length=36)),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- tags = sa.Table('tags', meta,
- sa.Column(
- 'resource_id', sa.String(36), primary_key=True, nullable=False),
- sa.Column('tag', sa.Unicode(80), primary_key=True, nullable=False),
- sa.Index('tags_tag_idx', 'tag'),
- mysql_engine='InnoDB',
- mysql_charset='utf8',
- )
-
- task_log = sa.Table('task_log', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('deleted_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('task_name', sa.String(length=255), nullable=False),
- sa.Column('state', sa.String(length=255), nullable=False),
- sa.Column('host', sa.String(length=255), nullable=False),
- sa.Column('period_beginning', sa.DateTime, nullable=False),
- sa.Column('period_ending', sa.DateTime, nullable=False),
- sa.Column('message', sa.String(length=255), nullable=False),
- sa.Column('task_items', sa.Integer),
- sa.Column('errors', sa.Integer),
- sa.Column('deleted', sa.Integer),
- sa.Index('ix_task_log_period_beginning', 'period_beginning'),
- sa.Index('ix_task_log_host', 'host'),
- sa.Index('ix_task_log_period_ending', 'period_ending'),
- UniqueConstraint(
- 'task_name', 'host', 'period_beginning', 'period_ending',
- name='uniq_task_log0task_name0host0period_beginning0period_ending',
- ),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- virtual_interfaces = sa.Table('virtual_interfaces', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('deleted_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('address', sa.String(length=255)),
- sa.Column('network_id', sa.Integer),
- sa.Column('uuid', sa.String(length=36)),
- sa.Column(
- 'instance_uuid', sa.String(length=36),
- sa.ForeignKey(
- 'instances.uuid',
- name='virtual_interfaces_instance_uuid_fkey'),
- nullable=True),
- sa.Column('deleted', sa.Integer),
- sa.Column('tag', sa.String(255)),
- sa.Index('virtual_interfaces_instance_uuid_fkey', 'instance_uuid'),
- sa.Index('virtual_interfaces_network_id_idx', 'network_id'),
- sa.Index('virtual_interfaces_uuid_idx', 'uuid'),
- UniqueConstraint(
- 'address', 'deleted',
- name='uniq_virtual_interfaces0address0deleted'),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- volume_id_mappings = sa.Table('volume_id_mappings', meta,
- sa.Column('created_at', sa.DateTime),
- sa.Column('updated_at', sa.DateTime),
- sa.Column('deleted_at', sa.DateTime),
- sa.Column('id', sa.Integer, primary_key=True, nullable=False),
- sa.Column('uuid', sa.String(length=36), nullable=False),
- sa.Column('deleted', sa.Integer),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- volume_usage_cache = sa.Table('volume_usage_cache', meta,
- sa.Column('created_at', sa.DateTime(timezone=False)),
- sa.Column('updated_at', sa.DateTime(timezone=False)),
- sa.Column('deleted_at', sa.DateTime(timezone=False)),
- sa.Column('id', sa.Integer(), primary_key=True, nullable=False),
- sa.Column('volume_id', sa.String(36), nullable=False),
- sa.Column('tot_last_refreshed', sa.DateTime(timezone=False)),
- sa.Column('tot_reads', sa.BigInteger(), default=0),
- sa.Column('tot_read_bytes', sa.BigInteger(), default=0),
- sa.Column('tot_writes', sa.BigInteger(), default=0),
- sa.Column('tot_write_bytes', sa.BigInteger(), default=0),
- sa.Column('curr_last_refreshed', sa.DateTime(timezone=False)),
- sa.Column('curr_reads', sa.BigInteger(), default=0),
- sa.Column('curr_read_bytes', sa.BigInteger(), default=0),
- sa.Column('curr_writes', sa.BigInteger(), default=0),
- sa.Column('curr_write_bytes', sa.BigInteger(), default=0),
- sa.Column('deleted', sa.Integer),
- sa.Column('instance_uuid', sa.String(length=36)),
- sa.Column('project_id', sa.String(length=36)),
- sa.Column('user_id', sa.String(length=64)),
- sa.Column('availability_zone', sa.String(length=255)),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
- # create all tables
- tables = [instances, aggregates, console_auth_tokens,
- console_pools, instance_types,
- security_groups, snapshots,
- # those that are children and others later
- agent_builds, aggregate_hosts, aggregate_metadata,
- block_device_mapping, bw_usage_cache, cells,
- certificates, compute_nodes, consoles,
- dns_domains, fixed_ips, floating_ips,
- instance_faults, instance_id_mappings, instance_info_caches,
- instance_metadata, instance_system_metadata,
- instance_type_extra_specs, instance_type_projects,
- instance_actions, instance_actions_events, instance_extra,
- groups, group_policy, group_member,
- key_pairs, migrations, networks,
- pci_devices, provider_fw_rules, quota_classes, quota_usages,
- quotas, project_user_quotas,
- reservations, s3_images, security_group_instance_association,
- security_group_rules, security_group_default_rules,
- services, snapshot_id_mappings, tags, task_log,
- virtual_interfaces,
- volume_id_mappings,
- volume_usage_cache,
- resource_providers, inventories, allocations,
- resource_provider_aggregates]
-
- for table in tables:
- try:
- table.create()
- except Exception:
- LOG.info(repr(table))
- LOG.exception('Exception while creating table.')
- raise
-
- # MySQL specific indexes
- if migrate_engine.name == 'mysql':
- # NOTE(stephenfin): For some reason, we have to put this within the if
- # statement to avoid it being evaluated for the sqlite case. Even
- # though we don't call create except in the MySQL case... Failure to do
- # this will result in the following ugly error message:
- #
- # sqlalchemy.exc.OperationalError: (sqlite3.OperationalError) no such
- # index: instance_type_id
- #
- # Yeah, I don't get it either...
- mysql_specific_indexes = [
- sa.Index(
- 'instance_type_id',
- instance_type_projects.c.instance_type_id),
- sa.Index('usage_id', reservations.c.usage_id),
- sa.Index(
- 'security_group_id',
- security_group_instance_association.c.security_group_id),
- ]
-
- for index in mysql_specific_indexes:
- index.create(migrate_engine)
-
- if migrate_engine.name == 'mysql':
- # In Folsom we explicitly converted migrate_version to UTF8.
- with migrate_engine.connect() as conn:
- conn.exec_driver_sql(
- 'ALTER TABLE migrate_version CONVERT TO CHARACTER SET utf8'
- )
- # Set default DB charset to UTF8.
- conn.exec_driver_sql(
- 'ALTER DATABASE `%s` DEFAULT CHARACTER SET utf8' % (
- migrate_engine.url.database,
- )
- )
-
- # NOTE(cdent): The resource_providers table is defined as latin1 to
- # be more efficient. Now we need the name column to be UTF8. We
- # modify it here otherwise the declarative handling in sqlalchemy
- # gets confused.
- conn.exec_driver_sql(
- 'ALTER TABLE resource_providers MODIFY name '
- 'VARCHAR(200) CHARACTER SET utf8'
- )
-
- _create_shadow_tables(migrate_engine)
-
- # TODO(stephenfin): Fix these various bugs in a follow-up
-
- # 298_mysql_extra_specs_binary_collation; we should update the shadow table
- # also
-
- if migrate_engine.name == 'mysql':
- with migrate_engine.connect() as conn:
- # Use binary collation for extra specs table
- conn.exec_driver_sql(
- 'ALTER TABLE instance_type_extra_specs '
- 'CONVERT TO CHARACTER SET utf8 '
- 'COLLATE utf8_bin'
- )
diff --git a/nova/db/main/legacy_migrations/versions/403_placeholder.py b/nova/db/main/legacy_migrations/versions/403_placeholder.py
deleted file mode 100644
index 7a93224504..0000000000
--- a/nova/db/main/legacy_migrations/versions/403_placeholder.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# This is a placeholder for backports.
-# Do not use this number for new work. New work starts after
-# all the placeholders.
-#
-# See this for more information:
-# http://lists.openstack.org/pipermail/openstack-dev/2013-March/006827.html
-
-
-def upgrade(migrate_engine):
- pass
diff --git a/nova/db/main/legacy_migrations/versions/404_placeholder.py b/nova/db/main/legacy_migrations/versions/404_placeholder.py
deleted file mode 100644
index 7a93224504..0000000000
--- a/nova/db/main/legacy_migrations/versions/404_placeholder.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# This is a placeholder for backports.
-# Do not use this number for new work. New work starts after
-# all the placeholders.
-#
-# See this for more information:
-# http://lists.openstack.org/pipermail/openstack-dev/2013-March/006827.html
-
-
-def upgrade(migrate_engine):
- pass
diff --git a/nova/db/main/legacy_migrations/versions/405_placeholder.py b/nova/db/main/legacy_migrations/versions/405_placeholder.py
deleted file mode 100644
index 7a93224504..0000000000
--- a/nova/db/main/legacy_migrations/versions/405_placeholder.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# This is a placeholder for backports.
-# Do not use this number for new work. New work starts after
-# all the placeholders.
-#
-# See this for more information:
-# http://lists.openstack.org/pipermail/openstack-dev/2013-March/006827.html
-
-
-def upgrade(migrate_engine):
- pass
diff --git a/nova/db/main/legacy_migrations/versions/406_placeholder.py b/nova/db/main/legacy_migrations/versions/406_placeholder.py
deleted file mode 100644
index 7a93224504..0000000000
--- a/nova/db/main/legacy_migrations/versions/406_placeholder.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# This is a placeholder for backports.
-# Do not use this number for new work. New work starts after
-# all the placeholders.
-#
-# See this for more information:
-# http://lists.openstack.org/pipermail/openstack-dev/2013-March/006827.html
-
-
-def upgrade(migrate_engine):
- pass
diff --git a/nova/db/main/legacy_migrations/versions/407_placeholder.py b/nova/db/main/legacy_migrations/versions/407_placeholder.py
deleted file mode 100644
index 7a93224504..0000000000
--- a/nova/db/main/legacy_migrations/versions/407_placeholder.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# This is a placeholder for backports.
-# Do not use this number for new work. New work starts after
-# all the placeholders.
-#
-# See this for more information:
-# http://lists.openstack.org/pipermail/openstack-dev/2013-March/006827.html
-
-
-def upgrade(migrate_engine):
- pass
diff --git a/nova/db/main/legacy_migrations/versions/408_placeholder.py b/nova/db/main/legacy_migrations/versions/408_placeholder.py
deleted file mode 100644
index 7a93224504..0000000000
--- a/nova/db/main/legacy_migrations/versions/408_placeholder.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# This is a placeholder for backports.
-# Do not use this number for new work. New work starts after
-# all the placeholders.
-#
-# See this for more information:
-# http://lists.openstack.org/pipermail/openstack-dev/2013-March/006827.html
-
-
-def upgrade(migrate_engine):
- pass
diff --git a/nova/db/main/legacy_migrations/versions/409_placeholder.py b/nova/db/main/legacy_migrations/versions/409_placeholder.py
deleted file mode 100644
index 7a93224504..0000000000
--- a/nova/db/main/legacy_migrations/versions/409_placeholder.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# This is a placeholder for backports.
-# Do not use this number for new work. New work starts after
-# all the placeholders.
-#
-# See this for more information:
-# http://lists.openstack.org/pipermail/openstack-dev/2013-March/006827.html
-
-
-def upgrade(migrate_engine):
- pass
diff --git a/nova/db/main/legacy_migrations/versions/410_placeholder.py b/nova/db/main/legacy_migrations/versions/410_placeholder.py
deleted file mode 100644
index 7a93224504..0000000000
--- a/nova/db/main/legacy_migrations/versions/410_placeholder.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# This is a placeholder for backports.
-# Do not use this number for new work. New work starts after
-# all the placeholders.
-#
-# See this for more information:
-# http://lists.openstack.org/pipermail/openstack-dev/2013-March/006827.html
-
-
-def upgrade(migrate_engine):
- pass
diff --git a/nova/db/main/legacy_migrations/versions/411_placeholder.py b/nova/db/main/legacy_migrations/versions/411_placeholder.py
deleted file mode 100644
index 7a93224504..0000000000
--- a/nova/db/main/legacy_migrations/versions/411_placeholder.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# This is a placeholder for backports.
-# Do not use this number for new work. New work starts after
-# all the placeholders.
-#
-# See this for more information:
-# http://lists.openstack.org/pipermail/openstack-dev/2013-March/006827.html
-
-
-def upgrade(migrate_engine):
- pass
diff --git a/nova/db/main/legacy_migrations/versions/412_placeholder.py b/nova/db/main/legacy_migrations/versions/412_placeholder.py
deleted file mode 100644
index 7a93224504..0000000000
--- a/nova/db/main/legacy_migrations/versions/412_placeholder.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# This is a placeholder for backports.
-# Do not use this number for new work. New work starts after
-# all the placeholders.
-#
-# See this for more information:
-# http://lists.openstack.org/pipermail/openstack-dev/2013-March/006827.html
-
-
-def upgrade(migrate_engine):
- pass
diff --git a/nova/db/main/legacy_migrations/versions/413_placeholder.py b/nova/db/main/legacy_migrations/versions/413_placeholder.py
deleted file mode 100644
index 7a93224504..0000000000
--- a/nova/db/main/legacy_migrations/versions/413_placeholder.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# This is a placeholder for backports.
-# Do not use this number for new work. New work starts after
-# all the placeholders.
-#
-# See this for more information:
-# http://lists.openstack.org/pipermail/openstack-dev/2013-March/006827.html
-
-
-def upgrade(migrate_engine):
- pass
diff --git a/nova/db/main/legacy_migrations/versions/414_placeholder.py b/nova/db/main/legacy_migrations/versions/414_placeholder.py
deleted file mode 100644
index 7a93224504..0000000000
--- a/nova/db/main/legacy_migrations/versions/414_placeholder.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# This is a placeholder for backports.
-# Do not use this number for new work. New work starts after
-# all the placeholders.
-#
-# See this for more information:
-# http://lists.openstack.org/pipermail/openstack-dev/2013-March/006827.html
-
-
-def upgrade(migrate_engine):
- pass
diff --git a/nova/db/main/legacy_migrations/versions/415_placeholder.py b/nova/db/main/legacy_migrations/versions/415_placeholder.py
deleted file mode 100644
index 7a93224504..0000000000
--- a/nova/db/main/legacy_migrations/versions/415_placeholder.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# This is a placeholder for backports.
-# Do not use this number for new work. New work starts after
-# all the placeholders.
-#
-# See this for more information:
-# http://lists.openstack.org/pipermail/openstack-dev/2013-March/006827.html
-
-
-def upgrade(migrate_engine):
- pass
diff --git a/nova/db/main/legacy_migrations/versions/416_placeholder.py b/nova/db/main/legacy_migrations/versions/416_placeholder.py
deleted file mode 100644
index 7a93224504..0000000000
--- a/nova/db/main/legacy_migrations/versions/416_placeholder.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# This is a placeholder for backports.
-# Do not use this number for new work. New work starts after
-# all the placeholders.
-#
-# See this for more information:
-# http://lists.openstack.org/pipermail/openstack-dev/2013-March/006827.html
-
-
-def upgrade(migrate_engine):
- pass
diff --git a/nova/db/main/legacy_migrations/versions/417_placeholder.py b/nova/db/main/legacy_migrations/versions/417_placeholder.py
deleted file mode 100644
index 7a93224504..0000000000
--- a/nova/db/main/legacy_migrations/versions/417_placeholder.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# This is a placeholder for backports.
-# Do not use this number for new work. New work starts after
-# all the placeholders.
-#
-# See this for more information:
-# http://lists.openstack.org/pipermail/openstack-dev/2013-March/006827.html
-
-
-def upgrade(migrate_engine):
- pass
diff --git a/nova/db/main/legacy_migrations/versions/418_placeholder.py b/nova/db/main/legacy_migrations/versions/418_placeholder.py
deleted file mode 100644
index 7a93224504..0000000000
--- a/nova/db/main/legacy_migrations/versions/418_placeholder.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# This is a placeholder for backports.
-# Do not use this number for new work. New work starts after
-# all the placeholders.
-#
-# See this for more information:
-# http://lists.openstack.org/pipermail/openstack-dev/2013-March/006827.html
-
-
-def upgrade(migrate_engine):
- pass
diff --git a/nova/db/main/legacy_migrations/versions/419_placeholder.py b/nova/db/main/legacy_migrations/versions/419_placeholder.py
deleted file mode 100644
index 7a93224504..0000000000
--- a/nova/db/main/legacy_migrations/versions/419_placeholder.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# This is a placeholder for backports.
-# Do not use this number for new work. New work starts after
-# all the placeholders.
-#
-# See this for more information:
-# http://lists.openstack.org/pipermail/openstack-dev/2013-March/006827.html
-
-
-def upgrade(migrate_engine):
- pass
diff --git a/nova/db/main/legacy_migrations/versions/420_placeholder.py b/nova/db/main/legacy_migrations/versions/420_placeholder.py
deleted file mode 100644
index 7a93224504..0000000000
--- a/nova/db/main/legacy_migrations/versions/420_placeholder.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# This is a placeholder for backports.
-# Do not use this number for new work. New work starts after
-# all the placeholders.
-#
-# See this for more information:
-# http://lists.openstack.org/pipermail/openstack-dev/2013-March/006827.html
-
-
-def upgrade(migrate_engine):
- pass
diff --git a/nova/db/main/legacy_migrations/versions/421_placeholder.py b/nova/db/main/legacy_migrations/versions/421_placeholder.py
deleted file mode 100644
index 7a93224504..0000000000
--- a/nova/db/main/legacy_migrations/versions/421_placeholder.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# This is a placeholder for backports.
-# Do not use this number for new work. New work starts after
-# all the placeholders.
-#
-# See this for more information:
-# http://lists.openstack.org/pipermail/openstack-dev/2013-March/006827.html
-
-
-def upgrade(migrate_engine):
- pass
diff --git a/nova/db/main/legacy_migrations/versions/422_placeholder.py b/nova/db/main/legacy_migrations/versions/422_placeholder.py
deleted file mode 100644
index 7a93224504..0000000000
--- a/nova/db/main/legacy_migrations/versions/422_placeholder.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# This is a placeholder for backports.
-# Do not use this number for new work. New work starts after
-# all the placeholders.
-#
-# See this for more information:
-# http://lists.openstack.org/pipermail/openstack-dev/2013-March/006827.html
-
-
-def upgrade(migrate_engine):
- pass
diff --git a/nova/db/main/legacy_migrations/versions/__init__.py b/nova/db/main/legacy_migrations/versions/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
--- a/nova/db/main/legacy_migrations/versions/__init__.py
+++ /dev/null
diff --git a/nova/db/migration.py b/nova/db/migration.py
index 80410c3192..2b185af1a6 100644
--- a/nova/db/migration.py
+++ b/nova/db/migration.py
@@ -19,24 +19,12 @@ import os
from alembic import command as alembic_api
from alembic import config as alembic_config
from alembic.runtime import migration as alembic_migration
-from migrate import exceptions as migrate_exceptions
-from migrate.versioning import api as migrate_api
-from migrate.versioning import repository as migrate_repository
from oslo_log import log as logging
from nova.db.api import api as api_db_api
from nova.db.main import api as main_db_api
from nova import exception
-MIGRATE_INIT_VERSION = {
- 'main': 401,
- 'api': 66,
-}
-ALEMBIC_INIT_VERSION = {
- 'main': '8f2f1571d55b',
- 'api': 'd67eeaabee36',
-}
-
LOG = logging.getLogger(__name__)
@@ -48,16 +36,6 @@ def _get_engine(database='main', context=None):
return api_db_api.get_engine()
-def _find_migrate_repo(database='main'):
- """Get the path for the migrate repository."""
-
- path = os.path.join(
- os.path.abspath(os.path.dirname(__file__)),
- database, 'legacy_migrations')
-
- return migrate_repository.Repository(path)
-
-
def _find_alembic_conf(database='main'):
"""Get the path for the alembic repository."""
@@ -73,35 +51,6 @@ def _find_alembic_conf(database='main'):
return config
-def _is_database_under_migrate_control(engine, repository):
- try:
- migrate_api.db_version(engine, repository)
- return True
- except migrate_exceptions.DatabaseNotControlledError:
- return False
-
-
-def _is_database_under_alembic_control(engine):
- with engine.connect() as conn:
- context = alembic_migration.MigrationContext.configure(conn)
- return bool(context.get_current_revision())
-
-
-def _init_alembic_on_legacy_database(engine, database, repository, config):
- """Init alembic in an existing environment with sqlalchemy-migrate."""
- LOG.info(
- 'The database is still under sqlalchemy-migrate control; '
- 'applying any remaining sqlalchemy-migrate-based migrations '
- 'and fake applying the initial alembic migration'
- )
- migrate_api.upgrade(engine, repository)
-
- # re-use the connection rather than creating a new one
- with engine.begin() as connection:
- config.attributes['connection'] = connection
- alembic_api.stamp(config, ALEMBIC_INIT_VERSION[database])
-
-
def _upgrade_alembic(engine, config, version):
# re-use the connection rather than creating a new one
with engine.begin() as connection:
@@ -126,7 +75,6 @@ def db_sync(version=None, database='main', context=None):
engine = _get_engine(database, context=context)
- repository = _find_migrate_repo(database)
config = _find_alembic_conf(database)
# discard the URL stored in alembic.ini in favour of the URL configured
# for the engine, casting from 'sqlalchemy.engine.url.URL' to str in the
@@ -138,16 +86,6 @@ def db_sync(version=None, database='main', context=None):
url = str(engine.url).replace('%', '%%')
config.set_main_option('sqlalchemy.url', url)
- # if we're in a deployment where sqlalchemy-migrate is already present,
- # then apply all the updates for that and fake apply the initial alembic
- # migration; if we're not then 'upgrade' will take care of everything
- # this should be a one-time operation
- if (
- _is_database_under_migrate_control(engine, repository) and
- not _is_database_under_alembic_control(engine)
- ):
- _init_alembic_on_legacy_database(engine, database, repository, config)
-
# apply anything later
LOG.info('Applying migration(s)')
@@ -161,17 +99,10 @@ def db_version(database='main', context=None):
if database not in ('main', 'api'):
raise exception.Invalid('%s is not a valid database' % database)
- repository = _find_migrate_repo(database)
engine = _get_engine(database, context=context)
- migrate_version = None
- if _is_database_under_migrate_control(engine, repository):
- migrate_version = migrate_api.db_version(engine, repository)
-
- alembic_version = None
- if _is_database_under_alembic_control(engine):
- with engine.connect() as conn:
- m_context = alembic_migration.MigrationContext.configure(conn)
- alembic_version = m_context.get_current_revision()
+ with engine.connect() as conn:
+ m_context = alembic_migration.MigrationContext.configure(conn)
+ version = m_context.get_current_revision()
- return alembic_version or migrate_version
+ return version
diff --git a/nova/objects/service.py b/nova/objects/service.py
index b17b5c2050..1a4629cc84 100644
--- a/nova/objects/service.py
+++ b/nova/objects/service.py
@@ -253,7 +253,7 @@ NODE_IDENTITY_VERSION = 65
# and value be the latest service version that the release supports (for
# example, before Bobcat RC1, please add 'Bobcat': XX where X is the latest
# servion version that was added)
-OLDEST_SUPPORTED_SERVICE_VERSION = 'Yoga'
+OLDEST_SUPPORTED_SERVICE_VERSION = 'Antelope'
SERVICE_VERSION_ALIASES = {
'Victoria': 52,
'Wallaby': 54,
diff --git a/nova/pci/request.py b/nova/pci/request.py
index 27ada6c045..8ae2385549 100644
--- a/nova/pci/request.py
+++ b/nova/pci/request.py
@@ -168,7 +168,7 @@ def _get_alias_from_config() -> Alias:
def _translate_alias_to_requests(
- alias_spec: str, affinity_policy: str = None,
+ alias_spec: str, affinity_policy: ty.Optional[str] = None,
) -> ty.List['objects.InstancePCIRequest']:
"""Generate complete pci requests from pci aliases in extra_spec."""
pci_aliases = _get_alias_from_config()
@@ -255,7 +255,7 @@ def get_instance_pci_request_from_vif(
def get_pci_requests_from_flavor(
- flavor: 'objects.Flavor', affinity_policy: str = None,
+ flavor: 'objects.Flavor', affinity_policy: ty.Optional[str] = None,
) -> 'objects.InstancePCIRequests':
"""Validate and return PCI requests.
diff --git a/nova/pci/stats.py b/nova/pci/stats.py
index 5c5f7c669c..c6e4844b34 100644
--- a/nova/pci/stats.py
+++ b/nova/pci/stats.py
@@ -82,7 +82,7 @@ class PciDeviceStats(object):
self,
numa_topology: 'objects.NUMATopology',
stats: 'objects.PCIDevicePoolList' = None,
- dev_filter: whitelist.Whitelist = None,
+ dev_filter: ty.Optional[whitelist.Whitelist] = None,
) -> None:
self.numa_topology = numa_topology
self.pools = (
diff --git a/nova/pci/whitelist.py b/nova/pci/whitelist.py
index 8862a0ef4f..152cc29ca6 100644
--- a/nova/pci/whitelist.py
+++ b/nova/pci/whitelist.py
@@ -33,7 +33,7 @@ class Whitelist(object):
assignable.
"""
- def __init__(self, whitelist_spec: str = None) -> None:
+ def __init__(self, whitelist_spec: ty.Optional[str] = None) -> None:
"""White list constructor
For example, the following json string specifies that devices whose
diff --git a/nova/scheduler/client/report.py b/nova/scheduler/client/report.py
index 1242752be1..7c14f3d7ef 100644
--- a/nova/scheduler/client/report.py
+++ b/nova/scheduler/client/report.py
@@ -1047,7 +1047,7 @@ class SchedulerReportClient(object):
context: nova_context.RequestContext,
rp_uuid: str,
traits: ty.Iterable[str],
- generation: int = None
+ generation: ty.Optional[int] = None
):
"""Replace a provider's traits with those specified.
diff --git a/nova/scheduler/manager.py b/nova/scheduler/manager.py
index 11581c4f2d..620519d403 100644
--- a/nova/scheduler/manager.py
+++ b/nova/scheduler/manager.py
@@ -23,6 +23,7 @@ import collections
import copy
import random
+from keystoneauth1 import exceptions as ks_exc
from oslo_log import log as logging
import oslo_messaging as messaging
from oslo_serialization import jsonutils
@@ -67,10 +68,42 @@ class SchedulerManager(manager.Manager):
self.host_manager = host_manager.HostManager()
self.servicegroup_api = servicegroup.API()
self.notifier = rpc.get_notifier('scheduler')
- self.placement_client = report.report_client_singleton()
+ self._placement_client = None
+
+ try:
+ # Test our placement client during initialization
+ self.placement_client
+ except (ks_exc.EndpointNotFound,
+ ks_exc.DiscoveryFailure,
+ ks_exc.RequestTimeout,
+ ks_exc.GatewayTimeout,
+ ks_exc.ConnectFailure) as e:
+ # Non-fatal, likely transient (although not definitely);
+ # continue startup but log the warning so that when things
+ # fail later, it will be clear why we can not do certain
+ # things.
+ LOG.warning('Unable to initialize placement client (%s); '
+ 'Continuing with startup, but scheduling '
+ 'will not be possible.', e)
+ except (ks_exc.MissingAuthPlugin,
+ ks_exc.Unauthorized) as e:
+ # This is almost definitely fatal mis-configuration. The
+ # Unauthorized error might be transient, but it is
+ # probably reasonable to consider it fatal.
+ LOG.error('Fatal error initializing placement client; '
+ 'config is incorrect or incomplete: %s', e)
+ raise
+ except Exception as e:
+ # Unknown/unexpected errors here are fatal
+ LOG.error('Fatal error initializing placement client: %s', e)
+ raise
super().__init__(service_name='scheduler', *args, **kwargs)
+ @property
+ def placement_client(self):
+ return report.report_client_singleton()
+
@periodic_task.periodic_task(
spacing=CONF.scheduler.discover_hosts_in_cells_interval,
run_immediately=True)
diff --git a/nova/tests/functional/libvirt/test_pci_sriov_servers.py b/nova/tests/functional/libvirt/test_pci_sriov_servers.py
index 135a457154..098a0e857b 100644
--- a/nova/tests/functional/libvirt/test_pci_sriov_servers.py
+++ b/nova/tests/functional/libvirt/test_pci_sriov_servers.py
@@ -1549,7 +1549,11 @@ class VDPAServersTest(_PCIServersWithMigrationTestBase):
'not supported for instance with vDPA ports',
ex.response.text)
+ # NOTE(sbauza): Now we're post-Antelope release, we don't need to support
+ # this test
def test_attach_interface_service_version_61(self):
+ self.flags(disable_compute_service_check_for_ffu=True,
+ group='workarounds')
with mock.patch(
"nova.objects.service.get_minimum_version_all_cells",
return_value=61
@@ -1578,7 +1582,11 @@ class VDPAServersTest(_PCIServersWithMigrationTestBase):
self.assertEqual(hostname, port['binding:host_id'])
self.assertEqual(server['id'], port['device_id'])
+ # NOTE(sbauza): Now we're post-Antelope release, we don't need to support
+ # this test
def test_detach_interface_service_version_61(self):
+ self.flags(disable_compute_service_check_for_ffu=True,
+ group='workarounds')
with mock.patch(
"nova.objects.service.get_minimum_version_all_cells",
return_value=61
@@ -1864,7 +1872,11 @@ class VDPAServersTest(_PCIServersWithMigrationTestBase):
self.assertEqual(
dest, server['OS-EXT-SRV-ATTR:hypervisor_hostname'])
+ # NOTE(sbauza): Now we're post-Antelope release, we don't need to support
+ # this test
def test_suspend_and_resume_service_version_62(self):
+ self.flags(disable_compute_service_check_for_ffu=True,
+ group='workarounds')
with mock.patch(
"nova.objects.service.get_minimum_version_all_cells",
return_value=62
@@ -1883,7 +1895,11 @@ class VDPAServersTest(_PCIServersWithMigrationTestBase):
self.assertPCIDeviceCounts(source, total=num_pci, free=num_pci - 2)
self.assertEqual('ACTIVE', server['status'])
+ # NOTE(sbauza): Now we're post-Antelope release, we don't need to support
+ # this test
def test_live_migrate_service_version_62(self):
+ self.flags(disable_compute_service_check_for_ffu=True,
+ group='workarounds')
with mock.patch(
"nova.objects.service.get_minimum_version_all_cells",
return_value=62
diff --git a/nova/tests/functional/regressions/test_bug_1995153.py b/nova/tests/functional/regressions/test_bug_1995153.py
new file mode 100644
index 0000000000..c897156d99
--- /dev/null
+++ b/nova/tests/functional/regressions/test_bug_1995153.py
@@ -0,0 +1,109 @@
+# Copyright (C) 2023 Red Hat, Inc
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+
+import fixtures
+from unittest import mock
+
+from oslo_serialization import jsonutils
+from oslo_utils import units
+
+from nova.objects import fields
+from nova.tests.fixtures import libvirt as fakelibvirt
+from nova.tests.functional import integrated_helpers
+from nova.tests.functional.libvirt import base
+
+
+class Bug1995153RegressionTest(
+ base.ServersTestBase,
+ integrated_helpers.InstanceHelperMixin
+):
+
+ ADDITIONAL_FILTERS = ['NUMATopologyFilter', 'PciPassthroughFilter']
+
+ ALIAS_NAME = 'a1'
+ PCI_DEVICE_SPEC = [jsonutils.dumps(
+ {
+ 'vendor_id': fakelibvirt.PCI_VEND_ID,
+ 'product_id': fakelibvirt.PCI_PROD_ID,
+ }
+ )]
+ # we set the numa_affinity policy to required to ensure strict affinity
+ # between pci devices and the guest cpu and memory will be enforced.
+ PCI_ALIAS = [jsonutils.dumps(
+ {
+ 'vendor_id': fakelibvirt.PCI_VEND_ID,
+ 'product_id': fakelibvirt.PCI_PROD_ID,
+ 'name': ALIAS_NAME,
+ 'device_type': fields.PciDeviceType.STANDARD,
+ 'numa_policy': fields.PCINUMAAffinityPolicy.REQUIRED,
+ }
+ )]
+
+ def setUp(self):
+ super(Bug1995153RegressionTest, self).setUp()
+ self.flags(
+ device_spec=self.PCI_DEVICE_SPEC,
+ alias=self.PCI_ALIAS,
+ group='pci'
+ )
+ host_manager = self.scheduler.manager.host_manager
+ pci_filter_class = host_manager.filter_cls_map['PciPassthroughFilter']
+ host_pass_mock = mock.Mock(wraps=pci_filter_class().host_passes)
+ self.mock_filter = self.useFixture(fixtures.MockPatch(
+ 'nova.scheduler.filters.pci_passthrough_filter'
+ '.PciPassthroughFilter.host_passes',
+ side_effect=host_pass_mock)).mock
+
+ def test_socket_policy_bug_1995153(self):
+ """The numa_usage_from_instance_numa() method in hardware.py saves the
+ host NUMAToplogy object with NUMACells that have no `socket` set. This
+ was an omission in the original implementation of the `socket` PCI NUMA
+ affinity policy. The consequence is that any code path that calls into
+ numa_usage_from_instance_numa() will clobber the host NUMA topology in
+ the database with a socket-less version. Booting an instance with NUMA
+ toplogy will do that, for example. If then a second instance is booted
+ with the `socket` PCI NUMA affinity policy, it will read the
+ socket-less host NUMATopology from the database, and error out with a
+ NotImplementedError. This is bug 1995153.
+ """
+ host_info = fakelibvirt.HostInfo(
+ cpu_nodes=2, cpu_sockets=1, cpu_cores=2, cpu_threads=2,
+ kB_mem=(16 * units.Gi) // units.Ki)
+ self.flags(cpu_dedicated_set='0-3', group='compute')
+ pci_info = fakelibvirt.HostPCIDevicesInfo(num_pci=1, numa_node=1)
+
+ self.start_compute(host_info=host_info, pci_info=pci_info)
+
+ extra_spec = {
+ 'hw:cpu_policy': 'dedicated',
+ 'pci_passthrough:alias': '%s:1' % self.ALIAS_NAME,
+ 'hw:pci_numa_affinity_policy': 'socket'
+ }
+ # Boot a first instance with a guest NUMA topology to run the buggy
+ # code in numa_usage_from_instance_numa() and save the socket-less host
+ # NUMATopology to the database.
+ self._create_server(
+ flavor_id=self._create_flavor(
+ extra_spec={'hw:cpu_policy': 'dedicated'}))
+
+ # FIXME(artom) Attempt to boot an instance with the `socket` PCI NUMA
+ # affinity policy and observe the fireworks.
+ flavor_id = self._create_flavor(extra_spec=extra_spec)
+ server = self._create_server(flavor_id=flavor_id,
+ expected_state='ERROR')
+ self.assertIn('fault', server)
+ self.assertIn('NotImplementedError', server['fault']['message'])
+ self.assertTrue(self.mock_filter.called)
diff --git a/nova/tests/unit/compute/test_compute.py b/nova/tests/unit/compute/test_compute.py
index 49cf15ec17..bb9b726912 100644
--- a/nova/tests/unit/compute/test_compute.py
+++ b/nova/tests/unit/compute/test_compute.py
@@ -13513,7 +13513,8 @@ class EvacuateHostTestCase(BaseTestCase):
super(EvacuateHostTestCase, self).tearDown()
def _rebuild(self, on_shared_storage=True, migration=None,
- send_node=False, vm_states_is_stopped=False):
+ send_node=False, vm_states_is_stopped=False,
+ expect_error=False):
network_api = self.compute.network_api
ctxt = context.get_admin_context()
@@ -13560,6 +13561,11 @@ class EvacuateHostTestCase(BaseTestCase):
action='power_off', phase='start'),
mock.call(ctxt, self.inst, self.inst.host,
action='power_off', phase='end')])
+ elif expect_error:
+ mock_notify_rebuild.assert_has_calls([
+ mock.call(ctxt, self.inst, self.compute.host,
+ phase='error', exception=mock.ANY, bdms=bdms)])
+ return
else:
mock_notify_rebuild.assert_has_calls([
mock.call(ctxt, self.inst, self.inst.host, phase='start',
@@ -13614,14 +13620,15 @@ class EvacuateHostTestCase(BaseTestCase):
mock.patch.object(self.compute, '_get_compute_info',
side_effect=fake_get_compute_info)
) as (mock_inst, mock_get):
- self._rebuild()
+ self.assertRaises(exception.InstanceFaultRollback,
+ self._rebuild, expect_error=True)
# Should be on destination host
instance = db.instance_get(self.context, self.inst.id)
- self.assertEqual(instance['host'], self.compute.host)
- self.assertIsNone(instance['node'])
- self.assertTrue(mock_inst.called)
- self.assertTrue(mock_get.called)
+ self.assertEqual('fake_host_2', instance['host'])
+ self.assertEqual('fakenode2', instance['node'])
+ mock_inst.assert_not_called()
+ mock_get.assert_called_once_with(mock.ANY, self.compute.host)
def test_rebuild_on_host_node_passed(self):
patch_get_info = mock.patch.object(self.compute, '_get_compute_info')
diff --git a/nova/tests/unit/compute/test_compute_mgr.py b/nova/tests/unit/compute/test_compute_mgr.py
index 1c69cd8f1c..73c9d32197 100644
--- a/nova/tests/unit/compute/test_compute_mgr.py
+++ b/nova/tests/unit/compute/test_compute_mgr.py
@@ -2560,10 +2560,11 @@ class ComputeManagerUnitTestCase(test.NoDBTestCase,
self.assertFalse(mock_get_info.called)
self.assertFalse(mock_sync_power_state.called)
+ @mock.patch('nova.compute.resource_tracker.ResourceTracker.instance_claim')
@mock.patch('nova.compute.manager.ComputeManager.'
'_sync_instance_power_state')
def test_query_driver_power_state_and_sync_not_found_driver(
- self, mock_sync_power_state):
+ self, mock_sync_power_state, mock_claim):
error = exception.InstanceNotFound(instance_id=1)
with mock.patch.object(self.compute.driver,
'get_info', side_effect=error) as mock_get_info:
@@ -6568,6 +6569,8 @@ class ComputeManagerBuildInstanceTestCase(test.NoDBTestCase):
fake_rt = fake_resource_tracker.FakeResourceTracker(self.compute.host,
self.compute.driver)
self.compute.rt = fake_rt
+ self.compute.driver._set_nodes([self.node])
+ self.compute.rt.compute_nodes = {self.node: objects.ComputeNode()}
self.allocations = {
uuids.provider1: {
@@ -6857,6 +6860,7 @@ class ComputeManagerBuildInstanceTestCase(test.NoDBTestCase):
mock_get_arqs.assert_called_once_with(
self.instance.uuid, only_resolved=True)
+ @mock.patch('nova.compute.resource_tracker.ResourceTracker.instance_claim')
@mock.patch.object(fake_driver.FakeDriver, 'spawn')
@mock.patch('nova.objects.Instance.save')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
@@ -6868,7 +6872,7 @@ class ComputeManagerBuildInstanceTestCase(test.NoDBTestCase):
@mock.patch.object(manager.ComputeManager, '_notify_about_instance_usage')
def test_spawn_called_with_accel_info(self, mock_ins_usage,
mock_ins_create, mock_dev_tag, mock_certs, mock_req_group_map,
- mock_get_allocations, mock_ins_save, mock_spawn):
+ mock_get_allocations, mock_ins_save, mock_spawn, mock_claim):
accel_info = [{'k1': 'v1', 'k2': 'v2'}]
@@ -7142,13 +7146,15 @@ class ComputeManagerBuildInstanceTestCase(test.NoDBTestCase):
self.security_groups, self.block_device_mapping,
request_spec={}, host_lists=[fake_host_list])
+ @mock.patch('nova.compute.resource_tracker.ResourceTracker.instance_claim')
@mock.patch.object(manager.ComputeManager, '_shutdown_instance')
@mock.patch.object(manager.ComputeManager, '_build_networks_for_instance')
@mock.patch.object(fake_driver.FakeDriver, 'spawn')
@mock.patch.object(objects.Instance, 'save')
@mock.patch.object(manager.ComputeManager, '_notify_about_instance_usage')
def test_rescheduled_exception_with_non_ascii_exception(self,
- mock_notify, mock_save, mock_spawn, mock_build, mock_shutdown):
+ mock_notify, mock_save, mock_spawn, mock_build, mock_shutdown,
+ mock_claim):
exc = exception.NovaException(u's\xe9quence')
mock_build.return_value = self.network_info
@@ -7164,7 +7170,6 @@ class ComputeManagerBuildInstanceTestCase(test.NoDBTestCase):
self.accel_uuids)
mock_save.assert_has_calls([
mock.call(),
- mock.call(),
mock.call(expected_task_state='block_device_mapping'),
])
mock_notify.assert_has_calls([
@@ -7670,6 +7675,7 @@ class ComputeManagerBuildInstanceTestCase(test.NoDBTestCase):
self.assertEqual(10, mock_failed.call_count)
mock_succeeded.assert_not_called()
+ @mock.patch('nova.compute.resource_tracker.ResourceTracker.instance_claim')
@mock.patch.object(manager.ComputeManager, '_shutdown_instance')
@mock.patch.object(manager.ComputeManager, '_build_networks_for_instance')
@mock.patch.object(fake_driver.FakeDriver, 'spawn')
@@ -7677,7 +7683,7 @@ class ComputeManagerBuildInstanceTestCase(test.NoDBTestCase):
@mock.patch.object(manager.ComputeManager, '_notify_about_instance_usage')
def _test_instance_exception(self, exc, raised_exc,
mock_notify, mock_save, mock_spawn,
- mock_build, mock_shutdown):
+ mock_build, mock_shutdown, mock_claim):
"""This method test the instance related InstanceNotFound
and reschedule on exception errors. The test cases get from
arguments.
@@ -7700,7 +7706,6 @@ class ComputeManagerBuildInstanceTestCase(test.NoDBTestCase):
mock_save.assert_has_calls([
mock.call(),
- mock.call(),
mock.call(expected_task_state='block_device_mapping')])
mock_notify.assert_has_calls([
mock.call(self.context, self.instance, 'create.start',
@@ -7811,11 +7816,12 @@ class ComputeManagerBuildInstanceTestCase(test.NoDBTestCase):
'_shutdown_instance'),
mock.patch.object(self.compute,
'_validate_instance_group_policy'),
+ mock.patch.object(self.compute.rt, 'instance_claim'),
mock.patch('nova.compute.utils.notify_about_instance_create')
) as (spawn, save,
_build_networks_for_instance, _notify_about_instance_usage,
_shutdown_instance, _validate_instance_group_policy,
- mock_notify):
+ mock_claim, mock_notify):
self.assertRaises(exception.BuildAbortException,
self.compute._build_and_run_instance, self.context,
@@ -7846,7 +7852,6 @@ class ComputeManagerBuildInstanceTestCase(test.NoDBTestCase):
save.assert_has_calls([
mock.call(),
- mock.call(),
mock.call(
expected_task_state=task_states.BLOCK_DEVICE_MAPPING)])
@@ -7908,11 +7913,12 @@ class ComputeManagerBuildInstanceTestCase(test.NoDBTestCase):
request_spec={}, host_lists=[fake_host_list])
mock_nil.assert_called_once_with(self.instance)
+ @mock.patch('nova.compute.resource_tracker.ResourceTracker.instance_claim')
@mock.patch.object(manager.ComputeManager, '_build_resources')
@mock.patch.object(objects.Instance, 'save')
@mock.patch.object(manager.ComputeManager, '_notify_about_instance_usage')
def test_build_resources_buildabort_reraise(self, mock_notify, mock_save,
- mock_build):
+ mock_build, mock_claim):
exc = exception.BuildAbortException(
instance_uuid=self.instance.uuid, reason='')
mock_build.side_effect = exc
@@ -7926,7 +7932,6 @@ class ComputeManagerBuildInstanceTestCase(test.NoDBTestCase):
self.node, self.limits, self.filter_properties,
request_spec=[], accel_uuids=self.accel_uuids)
- mock_save.assert_called_once_with()
mock_notify.assert_has_calls([
mock.call(self.context, self.instance, 'create.start',
extra_usage_info={'image_name': self.image.get('name')}),
@@ -8581,10 +8586,11 @@ class ComputeManagerBuildInstanceTestCase(test.NoDBTestCase):
ctxt, instance, req_networks)
warning_mock.assert_not_called()
+ @mock.patch('nova.compute.resource_tracker.ResourceTracker.instance_claim')
@mock.patch('nova.compute.utils.notify_about_instance_create')
@mock.patch.object(manager.ComputeManager, '_instance_update')
def test_launched_at_in_create_end_notification(self,
- mock_instance_update, mock_notify_instance_create):
+ mock_instance_update, mock_notify_instance_create, mock_claim):
def fake_notify(*args, **kwargs):
if args[2] == 'create.end':
@@ -8624,6 +8630,7 @@ class ComputeManagerBuildInstanceTestCase(test.NoDBTestCase):
self.flags(default_access_ip_network_name='test1')
instance = fake_instance.fake_db_instance()
+ @mock.patch.object(self.compute.rt, 'instance_claim')
@mock.patch.object(db, 'instance_update_and_get_original',
return_value=({}, instance))
@mock.patch.object(self.compute.driver, 'spawn')
@@ -8632,7 +8639,7 @@ class ComputeManagerBuildInstanceTestCase(test.NoDBTestCase):
@mock.patch.object(db, 'instance_extra_update_by_uuid')
@mock.patch.object(self.compute, '_notify_about_instance_usage')
def _check_access_ip(mock_notify, mock_extra, mock_networks,
- mock_spawn, mock_db_update):
+ mock_spawn, mock_db_update, mock_claim):
self.compute._build_and_run_instance(self.context, self.instance,
self.image, self.injected_files, self.admin_pass,
self.requested_networks, self.security_groups,
@@ -8653,8 +8660,10 @@ class ComputeManagerBuildInstanceTestCase(test.NoDBTestCase):
_check_access_ip()
+ @mock.patch('nova.compute.resource_tracker.ResourceTracker.instance_claim')
@mock.patch.object(manager.ComputeManager, '_instance_update')
- def test_create_error_on_instance_delete(self, mock_instance_update):
+ def test_create_error_on_instance_delete(self, mock_instance_update,
+ mock_claim):
def fake_notify(*args, **kwargs):
if args[2] == 'create.error':
@@ -8668,7 +8677,7 @@ class ComputeManagerBuildInstanceTestCase(test.NoDBTestCase):
mock.patch.object(self.compute,
'_build_networks_for_instance', return_value=[]),
mock.patch.object(self.instance, 'save',
- side_effect=[None, None, None, exc]),
+ side_effect=[None, None, exc]),
mock.patch.object(self.compute, '_notify_about_instance_usage',
side_effect=fake_notify)
) as (mock_spawn, mock_networks, mock_save, mock_notify):
@@ -8697,7 +8706,8 @@ class ComputeManagerBuildInstanceTestCase(test.NoDBTestCase):
mock.patch.object(
self.compute, '_build_networks_for_instance', return_value=[]),
mock.patch.object(self.instance, 'save'),
- ) as (mock_spawn, mock_networks, mock_save):
+ mock.patch.object(self.compute.rt, 'instance_claim'),
+ ) as (mock_spawn, mock_networks, mock_save, mock_claim):
self.compute._build_and_run_instance(
self.context,
self.instance, self.image, self.injected_files,
@@ -8747,7 +8757,8 @@ class ComputeManagerBuildInstanceTestCase(test.NoDBTestCase):
mock.patch.object(self.instance, 'save'),
mock.patch('nova.scheduler.client.report.'
'SchedulerReportClient._get_resource_provider'),
- ) as (mock_spawn, mock_networks, mock_save, mock_get_rp):
+ mock.patch.object(self.compute.rt, 'instance_claim'),
+ ) as (mock_spawn, mock_networks, mock_save, mock_get_rp, mock_claim):
mock_get_rp.return_value = {
'uuid': uuids.rp1,
'name': 'compute1:sriov-agent:ens3'
diff --git a/nova/tests/unit/compute/test_resource_tracker.py b/nova/tests/unit/compute/test_resource_tracker.py
index cd36b8987f..b6770cb5b8 100644
--- a/nova/tests/unit/compute/test_resource_tracker.py
+++ b/nova/tests/unit/compute/test_resource_tracker.py
@@ -2231,14 +2231,19 @@ class TestInstanceClaim(BaseTestCase):
self.rt.compute_nodes = {}
self.assertTrue(self.rt.disabled(_NODENAME))
- with mock.patch.object(self.instance, 'save'):
- claim = self.rt.instance_claim(mock.sentinel.ctx, self.instance,
- _NODENAME, self.allocations, None)
+ # Reset all changes to the instance to make sure that we can detect
+ # any manipulation after the failure.
+ self.instance.obj_reset_changes(recursive=True)
- self.assertEqual(self.rt.host, self.instance.host)
- self.assertEqual(self.rt.host, self.instance.launched_on)
- self.assertEqual(_NODENAME, self.instance.node)
- self.assertIsInstance(claim, claims.NopClaim)
+ with mock.patch.object(self.instance, 'save') as mock_save:
+ self.assertRaises(exc.ComputeResourcesUnavailable,
+ self.rt.instance_claim,
+ mock.sentinel.ctx, self.instance,
+ _NODENAME, self.allocations, None)
+ mock_save.assert_not_called()
+
+ # Make sure the instance was not touched by the failed claim process
+ self.assertEqual(set(), self.instance.obj_what_changed())
@mock.patch('nova.compute.utils.is_volume_backed_instance')
@mock.patch('nova.objects.MigrationList.get_in_progress_and_error')
diff --git a/nova/tests/unit/compute/test_shelve.py b/nova/tests/unit/compute/test_shelve.py
index 0a1e3f54fc..62321bddec 100644
--- a/nova/tests/unit/compute/test_shelve.py
+++ b/nova/tests/unit/compute/test_shelve.py
@@ -646,7 +646,7 @@ class ShelveComputeManagerTestCase(test_compute.BaseTestCase):
self.compute.unshelve_instance(
self.context, instance, image=None,
- filter_properties={}, node='fake-node', request_spec=request_spec,
+ filter_properties={}, node='fakenode2', request_spec=request_spec,
accel_uuids=[])
mock_update_pci.assert_called_once_with(
@@ -700,7 +700,7 @@ class ShelveComputeManagerTestCase(test_compute.BaseTestCase):
self.assertRaises(test.TestingException,
self.compute.unshelve_instance, self.context, instance,
image=shelved_image, filter_properties={},
- node='fake-node', request_spec=fake_spec, accel_uuids=[])
+ node='fakenode2', request_spec=fake_spec, accel_uuids=[])
self.assertEqual(instance.image_ref, initial_image_ref)
@mock.patch.object(objects.InstanceList, 'get_by_filters')
diff --git a/nova/tests/unit/db/api/test_migrations.py b/nova/tests/unit/db/api/test_migrations.py
index 3b9b17aab2..7c99f2f44a 100644
--- a/nova/tests/unit/db/api/test_migrations.py
+++ b/nova/tests/unit/db/api/test_migrations.py
@@ -25,7 +25,6 @@ from unittest import mock
from alembic import command as alembic_api
from alembic import script as alembic_script
-from migrate.versioning import api as migrate_api
from oslo_db.sqlalchemy import enginefacade
from oslo_db.sqlalchemy import test_fixtures
from oslo_db.sqlalchemy import test_migrations
@@ -127,47 +126,6 @@ class TestModelsSyncPostgreSQL(
FIXTURE = test_fixtures.PostgresqlOpportunisticFixture
-class NovaModelsMigrationsLegacySync(NovaModelsMigrationsSync):
- """Test that the models match the database after old migrations are run."""
-
- def db_sync(self, engine):
- # the 'nova.db.migration.db_sync' method will not use the legacy
- # sqlalchemy-migrate-based migration flow unless the database is
- # already controlled with sqlalchemy-migrate, so we need to manually
- # enable version controlling with this tool to test this code path
- repository = migration._find_migrate_repo(database='api')
- migrate_api.version_control(
- engine, repository, migration.MIGRATE_INIT_VERSION['api'])
-
- # now we can apply migrations as expected and the legacy path will be
- # followed
- super().db_sync(engine)
-
-
-class TestModelsLegacySyncSQLite(
- NovaModelsMigrationsLegacySync,
- test_fixtures.OpportunisticDBTestMixin,
- testtools.TestCase,
-):
- pass
-
-
-class TestModelsLegacySyncMySQL(
- NovaModelsMigrationsLegacySync,
- test_fixtures.OpportunisticDBTestMixin,
- testtools.TestCase,
-):
- FIXTURE = test_fixtures.MySQLOpportunisticFixture
-
-
-class TestModelsLegacySyncPostgreSQL(
- NovaModelsMigrationsLegacySync,
- test_fixtures.OpportunisticDBTestMixin,
- testtools.TestCase,
-):
- FIXTURE = test_fixtures.PostgresqlOpportunisticFixture
-
-
class NovaMigrationsWalk(
test_fixtures.OpportunisticDBTestMixin, test.NoDBTestCase,
):
@@ -180,7 +138,7 @@ class NovaMigrationsWalk(
super().setUp()
self.engine = enginefacade.writer.get_engine()
self.config = migration._find_alembic_conf('api')
- self.init_version = migration.ALEMBIC_INIT_VERSION['api']
+ self.init_version = 'd67eeaabee36'
def _migrate_up(self, connection, revision):
if revision == self.init_version: # no tests for the initial revision
diff --git a/nova/tests/unit/db/main/test_migrations.py b/nova/tests/unit/db/main/test_migrations.py
index e52deb262a..579888cfd2 100644
--- a/nova/tests/unit/db/main/test_migrations.py
+++ b/nova/tests/unit/db/main/test_migrations.py
@@ -30,7 +30,6 @@ from unittest import mock
from alembic import command as alembic_api
from alembic import script as alembic_script
import fixtures
-from migrate.versioning import api as migrate_api
from oslo_db.sqlalchemy import enginefacade
from oslo_db.sqlalchemy import test_fixtures
from oslo_db.sqlalchemy import test_migrations
@@ -174,47 +173,6 @@ class TestModelsSyncPostgreSQL(
FIXTURE = test_fixtures.PostgresqlOpportunisticFixture
-class NovaModelsMigrationsLegacySync(NovaModelsMigrationsSync):
- """Test that the models match the database after old migrations are run."""
-
- def db_sync(self, engine):
- # the 'nova.db.migration.db_sync' method will not use the legacy
- # sqlalchemy-migrate-based migration flow unless the database is
- # already controlled with sqlalchemy-migrate, so we need to manually
- # enable version controlling with this tool to test this code path
- repository = migration._find_migrate_repo(database='main')
- migrate_api.version_control(
- engine, repository, migration.MIGRATE_INIT_VERSION['main'])
-
- # now we can apply migrations as expected and the legacy path will be
- # followed
- super().db_sync(engine)
-
-
-class TestModelsLegacySyncSQLite(
- NovaModelsMigrationsLegacySync,
- test_fixtures.OpportunisticDBTestMixin,
- base.BaseTestCase,
-):
- pass
-
-
-class TestModelsLegacySyncMySQL(
- NovaModelsMigrationsLegacySync,
- test_fixtures.OpportunisticDBTestMixin,
- base.BaseTestCase,
-):
- FIXTURE = test_fixtures.MySQLOpportunisticFixture
-
-
-class TestModelsLegacySyncPostgreSQL(
- NovaModelsMigrationsLegacySync,
- test_fixtures.OpportunisticDBTestMixin,
- base.BaseTestCase,
-):
- FIXTURE = test_fixtures.PostgresqlOpportunisticFixture
-
-
class NovaMigrationsWalk(
test_fixtures.OpportunisticDBTestMixin, test.NoDBTestCase,
):
@@ -227,7 +185,7 @@ class NovaMigrationsWalk(
super().setUp()
self.engine = enginefacade.writer.get_engine()
self.config = migration._find_alembic_conf('main')
- self.init_version = migration.ALEMBIC_INIT_VERSION['main']
+ self.init_version = '8f2f1571d55b'
def assertIndexExists(self, connection, table_name, index):
self.assertTrue(
diff --git a/nova/tests/unit/db/test_migration.py b/nova/tests/unit/db/test_migration.py
index ca86f6347c..17a099a8cc 100644
--- a/nova/tests/unit/db/test_migration.py
+++ b/nova/tests/unit/db/test_migration.py
@@ -12,14 +12,10 @@
# License for the specific language governing permissions and limitations
# under the License.
-import glob
-import os
from unittest import mock
import urllib
from alembic.runtime import migration as alembic_migration
-from migrate import exceptions as migrate_exceptions
-from migrate.versioning import api as migrate_api
from nova.db.api import api as api_db_api
from nova.db.main import api as main_db_api
@@ -68,17 +64,9 @@ class TestDBSync(test.NoDBTestCase):
migration.db_sync, '402')
@mock.patch.object(migration, '_upgrade_alembic')
- @mock.patch.object(migration, '_init_alembic_on_legacy_database')
- @mock.patch.object(migration, '_is_database_under_alembic_control')
- @mock.patch.object(migration, '_is_database_under_migrate_control')
@mock.patch.object(migration, '_find_alembic_conf')
- @mock.patch.object(migration, '_find_migrate_repo')
@mock.patch.object(migration, '_get_engine')
- def _test_db_sync(
- self, has_migrate, has_alembic, mock_get_engine, mock_find_repo,
- mock_find_conf, mock_is_migrate, mock_is_alembic, mock_init,
- mock_upgrade,
- ):
+ def test_db_sync(self, mock_get_engine, mock_find_conf, mock_upgrade):
# return an encoded URL to mimic sqlalchemy
mock_get_engine.return_value.url = (
@@ -86,13 +74,10 @@ class TestDBSync(test.NoDBTestCase):
'read_default_file=%2Fetc%2Fmy.cnf.d%2Fnova.cnf'
'&read_default_group=nova'
)
- mock_is_migrate.return_value = has_migrate
- mock_is_alembic.return_value = has_alembic
migration.db_sync()
mock_get_engine.assert_called_once_with('main', context=None)
- mock_find_repo.assert_called_once_with('main')
mock_find_conf.assert_called_once_with('main')
mock_find_conf.return_value.set_main_option.assert_called_once_with(
'sqlalchemy.url',
@@ -100,93 +85,25 @@ class TestDBSync(test.NoDBTestCase):
'read_default_file=%%2Fetc%%2Fmy.cnf.d%%2Fnova.cnf' # ...
'&read_default_group=nova'
)
- mock_is_migrate.assert_called_once_with(
- mock_get_engine.return_value, mock_find_repo.return_value)
-
- if has_migrate:
- mock_is_alembic.assert_called_once_with(
- mock_get_engine.return_value)
- else:
- mock_is_alembic.assert_not_called()
-
- # we should only attempt the upgrade of the remaining
- # sqlalchemy-migrate-based migrations and fake apply of the initial
- # alembic migrations if sqlalchemy-migrate is in place but alembic
- # hasn't been used yet
- if has_migrate and not has_alembic:
- mock_init.assert_called_once_with(
- mock_get_engine.return_value, 'main',
- mock_find_repo.return_value, mock_find_conf.return_value)
- else:
- mock_init.assert_not_called()
- # however, we should always attempt to upgrade the requested migration
- # to alembic
mock_upgrade.assert_called_once_with(
- mock_get_engine.return_value, mock_find_conf.return_value, None)
-
- def test_db_sync_new_deployment(self):
- """Mimic a new deployment without existing sqlalchemy-migrate cruft."""
- has_migrate = False
- has_alembic = False
- self._test_db_sync(has_migrate, has_alembic)
-
- def test_db_sync_with_existing_migrate_database(self):
- """Mimic a deployment currently managed by sqlalchemy-migrate."""
- has_migrate = True
- has_alembic = False
- self._test_db_sync(has_migrate, has_alembic)
-
- def test_db_sync_with_existing_alembic_database(self):
- """Mimic a deployment that's already switched to alembic."""
- has_migrate = True
- has_alembic = True
- self._test_db_sync(has_migrate, has_alembic)
+ mock_get_engine.return_value, mock_find_conf.return_value, None,
+ )
@mock.patch.object(alembic_migration.MigrationContext, 'configure')
-@mock.patch.object(migrate_api, 'db_version')
-@mock.patch.object(migration, '_is_database_under_alembic_control')
-@mock.patch.object(migration, '_is_database_under_migrate_control')
@mock.patch.object(migration, '_get_engine')
-@mock.patch.object(migration, '_find_migrate_repo')
class TestDBVersion(test.NoDBTestCase):
def test_db_version_invalid_database(
- self, mock_find_repo, mock_get_engine, mock_is_migrate,
- mock_is_alembic, mock_migrate_version, mock_m_context_configure,
+ self, mock_get_engine, mock_m_context_configure,
):
"""We only have two databases."""
self.assertRaises(
exception.Invalid, migration.db_version, database='invalid')
- def test_db_version_migrate(
- self, mock_find_repo, mock_get_engine, mock_is_migrate,
- mock_is_alembic, mock_migrate_version, mock_m_context_configure,
- ):
- """Database is controlled by sqlalchemy-migrate."""
- mock_is_migrate.return_value = True
- mock_is_alembic.return_value = False
-
- ret = migration.db_version('main')
- self.assertEqual(mock_migrate_version.return_value, ret)
-
- mock_find_repo.assert_called_once_with('main')
- mock_get_engine.assert_called_once_with('main', context=None)
- mock_is_migrate.assert_called_once()
- mock_is_alembic.assert_called_once()
- mock_migrate_version.assert_called_once_with(
- mock_get_engine.return_value, mock_find_repo.return_value)
- mock_m_context_configure.assert_not_called()
-
- def test_db_version_alembic(
- self, mock_find_repo, mock_get_engine, mock_is_migrate,
- mock_is_alembic, mock_migrate_version, mock_m_context_configure,
- ):
+ def test_db_version(self, mock_get_engine, mock_m_context_configure):
"""Database is controlled by alembic."""
- mock_is_migrate.return_value = False
- mock_is_alembic.return_value = True
-
ret = migration.db_version('main')
mock_m_context = mock_m_context_configure.return_value
self.assertEqual(
@@ -194,31 +111,9 @@ class TestDBVersion(test.NoDBTestCase):
ret
)
- mock_find_repo.assert_called_once_with('main')
mock_get_engine.assert_called_once_with('main', context=None)
- mock_is_migrate.assert_called_once()
- mock_is_alembic.assert_called_once()
- mock_migrate_version.assert_not_called()
mock_m_context_configure.assert_called_once()
- def test_db_version_not_controlled(
- self, mock_find_repo, mock_get_engine, mock_is_migrate,
- mock_is_alembic, mock_migrate_version, mock_m_context_configure,
- ):
- """Database is not controlled."""
- mock_is_migrate.return_value = False
- mock_is_alembic.return_value = False
-
- ret = migration.db_version()
- self.assertIsNone(ret)
-
- mock_find_repo.assert_called_once_with('main')
- mock_get_engine.assert_called_once_with('main', context=None)
- mock_is_migrate.assert_called_once()
- mock_is_alembic.assert_called_once()
- mock_migrate_version.assert_not_called()
- mock_m_context_configure.assert_not_called()
-
class TestGetEngine(test.NoDBTestCase):
@@ -237,77 +132,3 @@ class TestGetEngine(test.NoDBTestCase):
engine = migration._get_engine('api')
self.assertEqual('engine', engine)
mock_get_engine.assert_called_once_with()
-
-
-class TestDatabaseUnderVersionControl(test.NoDBTestCase):
-
- @mock.patch.object(migrate_api, 'db_version')
- def test__is_database_under_migrate_control__true(self, mock_db_version):
- ret = migration._is_database_under_migrate_control('engine', 'repo')
- self.assertTrue(ret)
-
- mock_db_version.assert_called_once_with('engine', 'repo')
-
- @mock.patch.object(migrate_api, 'db_version')
- def test__is_database_under_migrate_control__false(self, mock_db_version):
- mock_db_version.side_effect = \
- migrate_exceptions.DatabaseNotControlledError()
-
- ret = migration._is_database_under_migrate_control('engine', 'repo')
- self.assertFalse(ret)
-
- mock_db_version.assert_called_once_with('engine', 'repo')
-
- @mock.patch.object(alembic_migration.MigrationContext, 'configure')
- def test__is_database_under_alembic_control__true(self, mock_configure):
- context = mock_configure.return_value
- context.get_current_revision.return_value = 'foo'
- engine = mock.MagicMock()
-
- ret = migration._is_database_under_alembic_control(engine)
- self.assertTrue(ret)
-
- context.get_current_revision.assert_called_once_with()
-
- @mock.patch.object(alembic_migration.MigrationContext, 'configure')
- def test__is_database_under_alembic_control__false(self, mock_configure):
- context = mock_configure.return_value
- context.get_current_revision.return_value = None
- engine = mock.MagicMock()
-
- ret = migration._is_database_under_alembic_control(engine)
- self.assertFalse(ret)
-
- context.get_current_revision.assert_called_once_with()
-
-
-class ProjectTestCase(test.NoDBTestCase):
-
- def test_no_migrations_have_downgrade(self):
- topdir = os.path.normpath(os.path.dirname(__file__) + '/../../../')
- # Walk both the nova_api and nova (cell) database migrations.
- includes_downgrade = []
- for directory in (
- os.path.join(topdir, 'db', 'main', 'legacy_migrations'),
- os.path.join(topdir, 'db', 'api', 'legacy_migrations'),
- ):
- py_glob = os.path.join(directory, 'versions', '*.py')
- for path in glob.iglob(py_glob):
- has_upgrade = False
- has_downgrade = False
- with open(path, "r") as f:
- for line in f:
- if 'def upgrade(' in line:
- has_upgrade = True
- if 'def downgrade(' in line:
- has_downgrade = True
-
- if has_upgrade and has_downgrade:
- fname = os.path.basename(path)
- includes_downgrade.append(fname)
-
- helpful_msg = (
- "The following migrations have a downgrade "
- "which is not supported:"
- "\n\t%s" % '\n\t'.join(sorted(includes_downgrade)))
- self.assertFalse(includes_downgrade, helpful_msg)
diff --git a/nova/tests/unit/scheduler/test_manager.py b/nova/tests/unit/scheduler/test_manager.py
index e7866069b3..e992fe6034 100644
--- a/nova/tests/unit/scheduler/test_manager.py
+++ b/nova/tests/unit/scheduler/test_manager.py
@@ -19,6 +19,7 @@ Tests For Scheduler
from unittest import mock
+from keystoneauth1 import exceptions as ks_exc
import oslo_messaging as messaging
from oslo_serialization import jsonutils
from oslo_utils.fixture import uuidsentinel as uuids
@@ -1688,6 +1689,41 @@ class SchedulerManagerTestCase(test.NoDBTestCase):
mock_log_warning.assert_not_called()
mock_log_debug.assert_called_once_with(msg)
+ @mock.patch('nova.scheduler.client.report.report_client_singleton')
+ @mock.patch.object(manager, 'LOG')
+ @mock.patch('nova.scheduler.host_manager.HostManager')
+ @mock.patch('nova.servicegroup.API')
+ @mock.patch('nova.rpc.get_notifier')
+ def test_init_lazy_placement_client(self, mock_rpc, mock_sg, mock_hm,
+ mock_log, mock_report):
+ # Simulate keytone or placement being offline at startup
+ mock_report.side_effect = ks_exc.RequestTimeout
+ mgr = manager.SchedulerManager()
+ mock_report.assert_called_once_with()
+ self.assertTrue(mock_log.warning.called)
+
+ # Make sure we're raising the actual error to subsequent callers
+ self.assertRaises(ks_exc.RequestTimeout, lambda: mgr.placement_client)
+
+ # Simulate recovery of the keystone or placement service
+ mock_report.reset_mock(side_effect=True)
+ mgr.placement_client
+ mock_report.assert_called_once_with()
+
+ @mock.patch('nova.scheduler.client.report.report_client_singleton')
+ @mock.patch('nova.scheduler.host_manager.HostManager')
+ @mock.patch('nova.servicegroup.API')
+ @mock.patch('nova.rpc.get_notifier')
+ def test_init_lazy_placement_client_failures(self, mock_rpc, mock_sg,
+ mock_hm, mock_report):
+ # Certain keystoneclient exceptions are fatal
+ mock_report.side_effect = ks_exc.Unauthorized
+ self.assertRaises(ks_exc.Unauthorized, manager.SchedulerManager)
+
+ # Anything else is fatal
+ mock_report.side_effect = test.TestingException
+ self.assertRaises(test.TestingException, manager.SchedulerManager)
+
class SchedulerManagerAllocationCandidateTestCase(test.NoDBTestCase):
diff --git a/nova/virt/hyperv/driver.py b/nova/virt/hyperv/driver.py
index 1291f975ad..ba18c85cf7 100644
--- a/nova/virt/hyperv/driver.py
+++ b/nova/virt/hyperv/driver.py
@@ -146,6 +146,14 @@ class HyperVDriver(driver.ComputeDriver):
'in Rocky.')
def init_host(self, host):
+ LOG.warning(
+ 'The hyperv driver is not tested by the OpenStack project nor '
+ 'does it have clear maintainer(s) and thus its quality can not be '
+ 'ensured. It should be considered experimental and may be removed '
+ 'in a future release. If you are using the driver in production '
+ 'please let us know via the openstack-discuss mailing list.'
+ )
+
self._serialconsoleops.start_console_handlers()
event_handler = eventhandler.InstanceEventHandler(
state_change_callback=self.emit_event)
diff --git a/nova/virt/libvirt/driver.py b/nova/virt/libvirt/driver.py
index 73134d8391..fe48960296 100644
--- a/nova/virt/libvirt/driver.py
+++ b/nova/virt/libvirt/driver.py
@@ -7636,7 +7636,7 @@ class LibvirtDriver(driver.ComputeDriver):
instance: 'objects.Instance',
power_on: bool = True,
pause: bool = False,
- post_xml_callback: ty.Callable = None,
+ post_xml_callback: ty.Optional[ty.Callable] = None,
) -> libvirt_guest.Guest:
"""Create a Guest from XML.
@@ -7697,7 +7697,7 @@ class LibvirtDriver(driver.ComputeDriver):
block_device_info: ty.Optional[ty.Dict[str, ty.Any]],
power_on: bool = True,
vifs_already_plugged: bool = False,
- post_xml_callback: ty.Callable = None,
+ post_xml_callback: ty.Optional[ty.Callable] = None,
external_events: ty.Optional[ty.List[ty.Tuple[str, str]]] = None,
cleanup_instance_dir: bool = False,
cleanup_instance_disks: bool = False,
diff --git a/nova/virt/libvirt/event.py b/nova/virt/libvirt/event.py
index a7d2a3624f..56951dc11c 100644
--- a/nova/virt/libvirt/event.py
+++ b/nova/virt/libvirt/event.py
@@ -9,6 +9,8 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
+import typing as ty
+
from nova.virt import event
@@ -22,7 +24,10 @@ class LibvirtEvent(event.InstanceEvent):
class DeviceEvent(LibvirtEvent):
"""Base class for device related libvirt events"""
- def __init__(self, uuid: str, dev: str, timestamp: float = None):
+ def __init__(self,
+ uuid: str,
+ dev: str,
+ timestamp: ty.Optional[float] = None):
super().__init__(uuid, timestamp)
self.dev = dev
diff --git a/nova/virt/libvirt/utils.py b/nova/virt/libvirt/utils.py
index adb2ec45a1..e1298ee5c8 100644
--- a/nova/virt/libvirt/utils.py
+++ b/nova/virt/libvirt/utils.py
@@ -261,8 +261,8 @@ def copy_image(
dest: str,
host: ty.Optional[str] = None,
receive: bool = False,
- on_execute: ty.Callable = None,
- on_completion: ty.Callable = None,
+ on_execute: ty.Optional[ty.Callable] = None,
+ on_completion: ty.Optional[ty.Callable] = None,
compression: bool = True,
) -> None:
"""Copy a disk image to an existing directory
@@ -639,7 +639,7 @@ def mdev_name2uuid(mdev_name: str) -> str:
return str(uuid.UUID(mdev_uuid))
-def mdev_uuid2name(mdev_uuid: str, parent: str = None) -> str:
+def mdev_uuid2name(mdev_uuid: str, parent: ty.Optional[str] = None) -> str:
"""Convert an mdev uuid (of the form 8-4-4-4-12) and optionally its parent
device to a name (of the form mdev_<uuid_with_underscores>[_<pciid>]).
diff --git a/releasenotes/notes/hyperv-experimental-antelope-372e18a05cafc295.yaml b/releasenotes/notes/hyperv-experimental-antelope-372e18a05cafc295.yaml
new file mode 100644
index 0000000000..85b874fb69
--- /dev/null
+++ b/releasenotes/notes/hyperv-experimental-antelope-372e18a05cafc295.yaml
@@ -0,0 +1,6 @@
+---
+deprecations:
+ - |
+ The hyperv driver is marked as experimental and may be removed in a
+ future release. The driver is not tested by the OpenStack project and
+ does not have a clear maintainer.
diff --git a/releasenotes/notes/remove-sqlalchemy-migrate-907c200314884d81.yaml b/releasenotes/notes/remove-sqlalchemy-migrate-907c200314884d81.yaml
new file mode 100644
index 0000000000..c08080a806
--- /dev/null
+++ b/releasenotes/notes/remove-sqlalchemy-migrate-907c200314884d81.yaml
@@ -0,0 +1,5 @@
+---
+upgrade:
+ - |
+ The legacy ``sqlalchemy-migrate`` migrations, which have been deprecated
+ since Wallaby, have been removed. There should be no end-user impact.
diff --git a/requirements.txt b/requirements.txt
index 9954d06bc9..e885a4a66f 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -16,7 +16,6 @@ greenlet>=0.4.15 # MIT
PasteDeploy>=1.5.0 # MIT
Paste>=2.0.2 # MIT
PrettyTable>=0.7.1 # BSD
-sqlalchemy-migrate>=0.13.0 # Apache-2.0
alembic>=1.5.0 # MIT
netaddr>=0.7.18 # BSD
netifaces>=0.10.4 # MIT
diff --git a/tox.ini b/tox.ini
index 097edbe827..77c0d9b9d3 100644
--- a/tox.ini
+++ b/tox.ini
@@ -42,7 +42,7 @@ commands =
env TEST_OSPROFILER=1 stestr run --combine --no-discover 'nova.tests.unit.test_profiler'
stestr slowest
-[testenv:functional{,-py38,-py39,-py310}]
+[testenv:functional{,-py38,-py39,-py310,-py311}]
description =
Run functional tests.
# As nova functional tests import the PlacementFixture from the placement