summaryrefslogtreecommitdiff
path: root/ironic/tests/unit
diff options
context:
space:
mode:
Diffstat (limited to 'ironic/tests/unit')
-rw-r--r--ironic/tests/unit/__init__.py34
-rw-r--r--ironic/tests/unit/api/__init__.py0
-rw-r--r--ironic/tests/unit/api/base.py240
-rw-r--r--ironic/tests/unit/api/test_acl.py99
-rw-r--r--ironic/tests/unit/api/test_base.py119
-rw-r--r--ironic/tests/unit/api/test_hooks.py303
-rw-r--r--ironic/tests/unit/api/test_root.py43
-rw-r--r--ironic/tests/unit/api/utils.py122
-rw-r--r--ironic/tests/unit/api/v1/__init__.py0
-rw-r--r--ironic/tests/unit/api/v1/test_chassis.py481
-rw-r--r--ironic/tests/unit/api/v1/test_drivers.py338
-rw-r--r--ironic/tests/unit/api/v1/test_nodes.py2260
-rw-r--r--ironic/tests/unit/api/v1/test_ports.py806
-rw-r--r--ironic/tests/unit/api/v1/test_root.py72
-rw-r--r--ironic/tests/unit/api/v1/test_types.py279
-rw-r--r--ironic/tests/unit/api/v1/test_utils.py263
-rw-r--r--ironic/tests/unit/api/v1/test_versions.py69
-rw-r--r--ironic/tests/unit/base.py147
-rw-r--r--ironic/tests/unit/cmd/__init__.py0
-rw-r--r--ironic/tests/unit/cmd/test_dbsync.py27
-rw-r--r--ironic/tests/unit/common/__init__.py0
-rw-r--r--ironic/tests/unit/common/test_disk_partitioner.py198
-rw-r--r--ironic/tests/unit/common/test_driver_factory.py64
-rw-r--r--ironic/tests/unit/common/test_exception.py30
-rw-r--r--ironic/tests/unit/common/test_fsm.py29
-rw-r--r--ironic/tests/unit/common/test_glance_service.py860
-rw-r--r--ironic/tests/unit/common/test_hash_ring.py251
-rw-r--r--ironic/tests/unit/common/test_image_service.py332
-rw-r--r--ironic/tests/unit/common/test_images.py872
-rw-r--r--ironic/tests/unit/common/test_keystone.py185
-rw-r--r--ironic/tests/unit/common/test_network.py64
-rw-r--r--ironic/tests/unit/common/test_policy.py74
-rw-r--r--ironic/tests/unit/common/test_pxe_utils.py498
-rw-r--r--ironic/tests/unit/common/test_raid.py231
-rw-r--r--ironic/tests/unit/common/test_states.py40
-rw-r--r--ironic/tests/unit/common/test_swift.py155
-rw-r--r--ironic/tests/unit/common/test_utils.py687
-rw-r--r--ironic/tests/unit/conductor/__init__.py0
-rw-r--r--ironic/tests/unit/conductor/test_conductor_utils.py335
-rw-r--r--ironic/tests/unit/conductor/test_manager.py4573
-rw-r--r--ironic/tests/unit/conductor/test_rpcapi.py344
-rw-r--r--ironic/tests/unit/conductor/test_task_manager.py653
-rw-r--r--ironic/tests/unit/conductor/test_utils.py50
-rw-r--r--ironic/tests/unit/conductor/utils.py57
-rw-r--r--ironic/tests/unit/conf_fixture.py40
-rw-r--r--ironic/tests/unit/db/__init__.py16
-rw-r--r--ironic/tests/unit/db/base.py102
-rw-r--r--ironic/tests/unit/db/sqlalchemy/__init__.py0
-rw-r--r--ironic/tests/unit/db/sqlalchemy/test_migrations.py455
-rw-r--r--ironic/tests/unit/db/sqlalchemy/test_types.py79
-rw-r--r--ironic/tests/unit/db/test_chassis.py89
-rw-r--r--ironic/tests/unit/db/test_conductor.py219
-rw-r--r--ironic/tests/unit/db/test_nodes.py536
-rw-r--r--ironic/tests/unit/db/test_ports.py122
-rw-r--r--ironic/tests/unit/db/utils.py328
-rw-r--r--ironic/tests/unit/dhcp/__init__.py0
-rw-r--r--ironic/tests/unit/dhcp/test_factory.py113
-rw-r--r--ironic/tests/unit/dhcp/test_neutron.py484
-rw-r--r--ironic/tests/unit/drivers/__init__.py21
-rw-r--r--ironic/tests/unit/drivers/agent_pxe_config.template5
-rw-r--r--ironic/tests/unit/drivers/amt/__init__.py0
-rw-r--r--ironic/tests/unit/drivers/amt/test_common.py173
-rw-r--r--ironic/tests/unit/drivers/amt/test_management.py233
-rw-r--r--ironic/tests/unit/drivers/amt/test_power.py282
-rw-r--r--ironic/tests/unit/drivers/amt/test_vendor.py132
-rw-r--r--ironic/tests/unit/drivers/cimc/__init__.py0
-rw-r--r--ironic/tests/unit/drivers/cimc/test_common.py125
-rw-r--r--ironic/tests/unit/drivers/cimc/test_management.py126
-rw-r--r--ironic/tests/unit/drivers/cimc/test_power.py302
-rw-r--r--ironic/tests/unit/drivers/drac/__init__.py0
-rw-r--r--ironic/tests/unit/drivers/drac/bios_wsman_mock.py273
-rw-r--r--ironic/tests/unit/drivers/drac/test_bios.py199
-rw-r--r--ironic/tests/unit/drivers/drac/test_client.py256
-rw-r--r--ironic/tests/unit/drivers/drac/test_common.py135
-rw-r--r--ironic/tests/unit/drivers/drac/test_management.py461
-rw-r--r--ironic/tests/unit/drivers/drac/test_power.py175
-rw-r--r--ironic/tests/unit/drivers/drac/utils.py72
-rw-r--r--ironic/tests/unit/drivers/elilo_efi_pxe_config.template16
-rw-r--r--ironic/tests/unit/drivers/ilo/__init__.py0
-rw-r--r--ironic/tests/unit/drivers/ilo/test_common.py675
-rw-r--r--ironic/tests/unit/drivers/ilo/test_deploy.py1860
-rw-r--r--ironic/tests/unit/drivers/ilo/test_inspect.py365
-rw-r--r--ironic/tests/unit/drivers/ilo/test_management.py298
-rw-r--r--ironic/tests/unit/drivers/ilo/test_power.py231
-rw-r--r--ironic/tests/unit/drivers/ipxe_config.template21
-rw-r--r--ironic/tests/unit/drivers/irmc/__init__.py0
-rw-r--r--ironic/tests/unit/drivers/irmc/fake_sensors_data_ng.xml156
-rw-r--r--ironic/tests/unit/drivers/irmc/fake_sensors_data_ok.xml156
-rw-r--r--ironic/tests/unit/drivers/irmc/test_common.py168
-rw-r--r--ironic/tests/unit/drivers/irmc/test_deploy.py1536
-rw-r--r--ironic/tests/unit/drivers/irmc/test_management.py302
-rw-r--r--ironic/tests/unit/drivers/irmc/test_power.py224
-rw-r--r--ironic/tests/unit/drivers/msftocs/__init__.py0
-rw-r--r--ironic/tests/unit/drivers/msftocs/test_common.py110
-rw-r--r--ironic/tests/unit/drivers/msftocs/test_management.py133
-rw-r--r--ironic/tests/unit/drivers/msftocs/test_msftocsclient.py182
-rw-r--r--ironic/tests/unit/drivers/msftocs/test_power.py164
-rw-r--r--ironic/tests/unit/drivers/pxe_config.template20
-rw-r--r--ironic/tests/unit/drivers/pxe_grub_config.template18
-rw-r--r--ironic/tests/unit/drivers/test_agent.py805
-rw-r--r--ironic/tests/unit/drivers/test_agent_base_vendor.py948
-rw-r--r--ironic/tests/unit/drivers/test_agent_client.py220
-rw-r--r--ironic/tests/unit/drivers/test_base.py247
-rw-r--r--ironic/tests/unit/drivers/test_console_utils.py348
-rw-r--r--ironic/tests/unit/drivers/test_deploy_utils.py2284
-rw-r--r--ironic/tests/unit/drivers/test_fake.py116
-rw-r--r--ironic/tests/unit/drivers/test_iboot.py384
-rw-r--r--ironic/tests/unit/drivers/test_image_cache.py697
-rw-r--r--ironic/tests/unit/drivers/test_inspector.py239
-rw-r--r--ironic/tests/unit/drivers/test_ipminative.py610
-rw-r--r--ironic/tests/unit/drivers/test_ipmitool.py1899
-rw-r--r--ironic/tests/unit/drivers/test_irmc.py104
-rw-r--r--ironic/tests/unit/drivers/test_iscsi_deploy.py1402
-rw-r--r--ironic/tests/unit/drivers/test_pxe.py918
-rw-r--r--ironic/tests/unit/drivers/test_seamicro.py676
-rw-r--r--ironic/tests/unit/drivers/test_snmp.py1263
-rw-r--r--ironic/tests/unit/drivers/test_ssh.py975
-rw-r--r--ironic/tests/unit/drivers/test_utils.py165
-rw-r--r--ironic/tests/unit/drivers/test_virtualbox.py374
-rw-r--r--ironic/tests/unit/drivers/test_wol.py194
-rw-r--r--ironic/tests/unit/drivers/third_party_driver_mock_specs.py118
-rw-r--r--ironic/tests/unit/drivers/third_party_driver_mocks.py244
-rw-r--r--ironic/tests/unit/drivers/ucs/__init__.py0
-rw-r--r--ironic/tests/unit/drivers/ucs/test_helper.py161
-rw-r--r--ironic/tests/unit/drivers/ucs/test_management.py139
-rw-r--r--ironic/tests/unit/drivers/ucs/test_power.py302
-rw-r--r--ironic/tests/unit/fake_policy.py42
-rw-r--r--ironic/tests/unit/objects/__init__.py0
-rw-r--r--ironic/tests/unit/objects/test_chassis.py104
-rw-r--r--ironic/tests/unit/objects/test_conductor.py87
-rw-r--r--ironic/tests/unit/objects/test_fields.py63
-rw-r--r--ironic/tests/unit/objects/test_node.py151
-rw-r--r--ironic/tests/unit/objects/test_objects.py487
-rw-r--r--ironic/tests/unit/objects/test_port.py114
-rw-r--r--ironic/tests/unit/objects/utils.py99
-rw-r--r--ironic/tests/unit/policy_fixture.py41
-rw-r--r--ironic/tests/unit/raid_constants.py298
-rw-r--r--ironic/tests/unit/stubs.py116
138 files changed, 45906 insertions, 0 deletions
diff --git a/ironic/tests/unit/__init__.py b/ironic/tests/unit/__init__.py
new file mode 100644
index 000000000..c3b5d1359
--- /dev/null
+++ b/ironic/tests/unit/__init__.py
@@ -0,0 +1,34 @@
+# Copyright 2010 United States Government as represented by the
+# Administrator of the National Aeronautics and Space Administration.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""
+:mod:`Ironic.tests` -- ironic Unittests
+=====================================================
+
+.. automodule:: ironic.tests.unit
+ :platform: Unix
+"""
+
+# TODO(deva): move eventlet imports to ironic.__init__ once we move to PBR
+
+import eventlet
+
+eventlet.monkey_patch(os=False)
+
+# See http://code.google.com/p/python-nose/issues/detail?id=373
+# The code below enables nosetests to work with i18n _() blocks
+import six.moves.builtins as __builtin__
+setattr(__builtin__, '_', lambda x: x)
diff --git a/ironic/tests/unit/api/__init__.py b/ironic/tests/unit/api/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ironic/tests/unit/api/__init__.py
diff --git a/ironic/tests/unit/api/base.py b/ironic/tests/unit/api/base.py
new file mode 100644
index 000000000..80d2ebb5a
--- /dev/null
+++ b/ironic/tests/unit/api/base.py
@@ -0,0 +1,240 @@
+# -*- encoding: utf-8 -*-
+#
+# Copyright 2013 Hewlett-Packard Development Company, L.P.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+"""Base classes for API tests."""
+
+# NOTE: Ported from ceilometer/tests/api.py (subsequently moved to
+# ceilometer/tests/api/__init__.py). This should be oslo'ified:
+# https://bugs.launchpad.net/ironic/+bug/1255115.
+
+import mock
+from oslo_config import cfg
+import pecan
+import pecan.testing
+from six.moves.urllib import parse as urlparse
+
+from ironic.tests.unit.db import base
+
+PATH_PREFIX = '/v1'
+
+cfg.CONF.import_group('keystone_authtoken', 'keystonemiddleware.auth_token')
+
+
+class FunctionalTest(base.DbTestCase):
+ """Pecan controller functional testing class.
+
+ Used for functional tests of Pecan controllers where you need to
+ test your literal application and its integration with the
+ framework.
+ """
+
+ SOURCE_DATA = {'test_source': {'somekey': '666'}}
+
+ def setUp(self):
+ super(FunctionalTest, self).setUp()
+ cfg.CONF.set_override("auth_version", "v2.0",
+ group='keystone_authtoken')
+ cfg.CONF.set_override("admin_user", "admin",
+ group='keystone_authtoken')
+ self.app = self._make_app()
+
+ def reset_pecan():
+ pecan.set_config({}, overwrite=True)
+
+ self.addCleanup(reset_pecan)
+
+ p = mock.patch('ironic.api.controllers.v1.Controller._check_version')
+ self._check_version = p.start()
+ self.addCleanup(p.stop)
+
+ def _make_app(self, enable_acl=False):
+ # Determine where we are so we can set up paths in the config
+ root_dir = self.path_get()
+
+ self.config = {
+ 'app': {
+ 'root': 'ironic.api.controllers.root.RootController',
+ 'modules': ['ironic.api'],
+ 'static_root': '%s/public' % root_dir,
+ 'template_path': '%s/api/templates' % root_dir,
+ 'enable_acl': enable_acl,
+ 'acl_public_routes': ['/', '/v1'],
+ },
+ }
+
+ return pecan.testing.load_test_app(self.config)
+
+ def _request_json(self, path, params, expect_errors=False, headers=None,
+ method="post", extra_environ=None, status=None,
+ path_prefix=PATH_PREFIX):
+ """Sends simulated HTTP request to Pecan test app.
+
+ :param path: url path of target service
+ :param params: content for wsgi.input of request
+ :param expect_errors: Boolean value; whether an error is expected based
+ on request
+ :param headers: a dictionary of headers to send along with the request
+ :param method: Request method type. Appropriate method function call
+ should be used rather than passing attribute in.
+ :param extra_environ: a dictionary of environ variables to send along
+ with the request
+ :param status: expected status code of response
+ :param path_prefix: prefix of the url path
+ """
+ full_path = path_prefix + path
+ print('%s: %s %s' % (method.upper(), full_path, params))
+ response = getattr(self.app, "%s_json" % method)(
+ str(full_path),
+ params=params,
+ headers=headers,
+ status=status,
+ extra_environ=extra_environ,
+ expect_errors=expect_errors
+ )
+ print('GOT:%s' % response)
+ return response
+
+ def put_json(self, path, params, expect_errors=False, headers=None,
+ extra_environ=None, status=None):
+ """Sends simulated HTTP PUT request to Pecan test app.
+
+ :param path: url path of target service
+ :param params: content for wsgi.input of request
+ :param expect_errors: Boolean value; whether an error is expected based
+ on request
+ :param headers: a dictionary of headers to send along with the request
+ :param extra_environ: a dictionary of environ variables to send along
+ with the request
+ :param status: expected status code of response
+ """
+ return self._request_json(path=path, params=params,
+ expect_errors=expect_errors,
+ headers=headers, extra_environ=extra_environ,
+ status=status, method="put")
+
+ def post_json(self, path, params, expect_errors=False, headers=None,
+ extra_environ=None, status=None):
+ """Sends simulated HTTP POST request to Pecan test app.
+
+ :param path: url path of target service
+ :param params: content for wsgi.input of request
+ :param expect_errors: Boolean value; whether an error is expected based
+ on request
+ :param headers: a dictionary of headers to send along with the request
+ :param extra_environ: a dictionary of environ variables to send along
+ with the request
+ :param status: expected status code of response
+ """
+ return self._request_json(path=path, params=params,
+ expect_errors=expect_errors,
+ headers=headers, extra_environ=extra_environ,
+ status=status, method="post")
+
+ def patch_json(self, path, params, expect_errors=False, headers=None,
+ extra_environ=None, status=None):
+ """Sends simulated HTTP PATCH request to Pecan test app.
+
+ :param path: url path of target service
+ :param params: content for wsgi.input of request
+ :param expect_errors: Boolean value; whether an error is expected based
+ on request
+ :param headers: a dictionary of headers to send along with the request
+ :param extra_environ: a dictionary of environ variables to send along
+ with the request
+ :param status: expected status code of response
+ """
+ return self._request_json(path=path, params=params,
+ expect_errors=expect_errors,
+ headers=headers, extra_environ=extra_environ,
+ status=status, method="patch")
+
+ def delete(self, path, expect_errors=False, headers=None,
+ extra_environ=None, status=None, path_prefix=PATH_PREFIX):
+ """Sends simulated HTTP DELETE request to Pecan test app.
+
+ :param path: url path of target service
+ :param expect_errors: Boolean value; whether an error is expected based
+ on request
+ :param headers: a dictionary of headers to send along with the request
+ :param extra_environ: a dictionary of environ variables to send along
+ with the request
+ :param status: expected status code of response
+ :param path_prefix: prefix of the url path
+ """
+ full_path = path_prefix + path
+ print('DELETE: %s' % (full_path))
+ response = self.app.delete(str(full_path),
+ headers=headers,
+ status=status,
+ extra_environ=extra_environ,
+ expect_errors=expect_errors)
+ print('GOT:%s' % response)
+ return response
+
+ def get_json(self, path, expect_errors=False, headers=None,
+ extra_environ=None, q=[], path_prefix=PATH_PREFIX, **params):
+ """Sends simulated HTTP GET request to Pecan test app.
+
+ :param path: url path of target service
+ :param expect_errors: Boolean value;whether an error is expected based
+ on request
+ :param headers: a dictionary of headers to send along with the request
+ :param extra_environ: a dictionary of environ variables to send along
+ with the request
+ :param q: list of queries consisting of: field, value, op, and type
+ keys
+ :param path_prefix: prefix of the url path
+ :param params: content for wsgi.input of request
+ """
+ full_path = path_prefix + path
+ query_params = {'q.field': [],
+ 'q.value': [],
+ 'q.op': [],
+ }
+ for query in q:
+ for name in ['field', 'op', 'value']:
+ query_params['q.%s' % name].append(query.get(name, ''))
+ all_params = {}
+ all_params.update(params)
+ if q:
+ all_params.update(query_params)
+ print('GET: %s %r' % (full_path, all_params))
+ response = self.app.get(full_path,
+ params=all_params,
+ headers=headers,
+ extra_environ=extra_environ,
+ expect_errors=expect_errors)
+ if not expect_errors:
+ response = response.json
+ print('GOT:%s' % response)
+ return response
+
+ def validate_link(self, link, bookmark=False):
+ """Checks if the given link can get correct data."""
+ # removes the scheme and net location parts of the link
+ url_parts = list(urlparse.urlparse(link))
+ url_parts[0] = url_parts[1] = ''
+
+ # bookmark link should not have the version in the URL
+ if bookmark and url_parts[2].startswith(PATH_PREFIX):
+ return False
+
+ full_path = urlparse.urlunparse(url_parts)
+ try:
+ self.get_json(full_path, path_prefix='')
+ return True
+ except Exception:
+ return False
diff --git a/ironic/tests/unit/api/test_acl.py b/ironic/tests/unit/api/test_acl.py
new file mode 100644
index 000000000..a05337932
--- /dev/null
+++ b/ironic/tests/unit/api/test_acl.py
@@ -0,0 +1,99 @@
+# -*- encoding: utf-8 -*-
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+"""
+Tests for ACL. Checks whether certain kinds of requests
+are blocked or allowed to be processed.
+"""
+
+import mock
+from oslo_config import cfg
+from six.moves import http_client
+
+from ironic.tests.unit.api import base
+from ironic.tests.unit.api import utils
+from ironic.tests.unit.db import utils as db_utils
+
+cfg.CONF.import_opt('cache', 'keystonemiddleware.auth_token',
+ group='keystone_authtoken')
+
+
+class TestACL(base.FunctionalTest):
+
+ def setUp(self):
+ super(TestACL, self).setUp()
+
+ self.environ = {'fake.cache': utils.FakeMemcache()}
+ self.fake_db_node = db_utils.get_test_node(chassis_id=None)
+ self.node_path = '/nodes/%s' % self.fake_db_node['uuid']
+
+ def get_json(self, path, expect_errors=False, headers=None, q=[], **param):
+ return super(TestACL, self).get_json(path,
+ expect_errors=expect_errors,
+ headers=headers,
+ q=q,
+ extra_environ=self.environ,
+ **param)
+
+ def _make_app(self):
+ cfg.CONF.set_override('cache', 'fake.cache',
+ group='keystone_authtoken')
+ return super(TestACL, self)._make_app(enable_acl=True)
+
+ def test_non_authenticated(self):
+ response = self.get_json(self.node_path, expect_errors=True)
+ self.assertEqual(http_client.UNAUTHORIZED, response.status_int)
+
+ def test_authenticated(self):
+ with mock.patch.object(self.dbapi, 'get_node_by_uuid',
+ autospec=True) as mock_get_node:
+ mock_get_node.return_value = self.fake_db_node
+
+ response = self.get_json(
+ self.node_path, headers={'X-Auth-Token': utils.ADMIN_TOKEN})
+
+ self.assertEqual(self.fake_db_node['uuid'], response['uuid'])
+ mock_get_node.assert_called_once_with(self.fake_db_node['uuid'])
+
+ def test_non_admin(self):
+ response = self.get_json(self.node_path,
+ headers={'X-Auth-Token': utils.MEMBER_TOKEN},
+ expect_errors=True)
+
+ self.assertEqual(http_client.FORBIDDEN, response.status_int)
+
+ def test_non_admin_with_admin_header(self):
+ response = self.get_json(self.node_path,
+ headers={'X-Auth-Token': utils.MEMBER_TOKEN,
+ 'X-Roles': 'admin'},
+ expect_errors=True)
+
+ self.assertEqual(http_client.FORBIDDEN, response.status_int)
+
+ def test_public_api(self):
+ # expect_errors should be set to True: If expect_errors is set to False
+ # the response gets converted to JSON and we cannot read the response
+ # code so easy.
+ for route in ('/', '/v1'):
+ response = self.get_json(route,
+ path_prefix='', expect_errors=True)
+ self.assertEqual(http_client.OK, response.status_int)
+
+ def test_public_api_with_path_extensions(self):
+ routes = {'/v1/': http_client.OK,
+ '/v1.json': http_client.OK,
+ '/v1.xml': http_client.NOT_FOUND}
+ for url in routes:
+ response = self.get_json(url,
+ path_prefix='', expect_errors=True)
+ self.assertEqual(routes[url], response.status_int)
diff --git a/ironic/tests/unit/api/test_base.py b/ironic/tests/unit/api/test_base.py
new file mode 100644
index 000000000..6df501d58
--- /dev/null
+++ b/ironic/tests/unit/api/test_base.py
@@ -0,0 +1,119 @@
+# Copyright 2013 Hewlett-Packard Development Company, L.P.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import mock
+from six.moves import http_client
+from webob import exc
+
+from ironic.api.controllers import base as cbase
+from ironic.tests.unit.api import base
+
+
+class TestBase(base.FunctionalTest):
+
+ def test_api_setup(self):
+ pass
+
+ def test_bad_uri(self):
+ response = self.get_json('/bad/path',
+ expect_errors=True,
+ headers={"Accept": "application/json"})
+ self.assertEqual(http_client.NOT_FOUND, response.status_int)
+ self.assertEqual("application/json", response.content_type)
+ self.assertTrue(response.json['error_message'])
+
+
+class TestVersion(base.FunctionalTest):
+
+ @mock.patch('ironic.api.controllers.base.Version.parse_headers')
+ def test_init(self, mock_parse):
+ a = mock.Mock()
+ b = mock.Mock()
+ mock_parse.return_value = (a, b)
+ v = cbase.Version('test', 'foo', 'bar')
+
+ mock_parse.assert_called_with('test', 'foo', 'bar')
+ self.assertEqual(a, v.major)
+ self.assertEqual(b, v.minor)
+
+ @mock.patch('ironic.api.controllers.base.Version.parse_headers')
+ def test_repr(self, mock_parse):
+ mock_parse.return_value = (123, 456)
+ v = cbase.Version('test', mock.ANY, mock.ANY)
+ result = "%s" % v
+ self.assertEqual('123.456', result)
+
+ @mock.patch('ironic.api.controllers.base.Version.parse_headers')
+ def test_repr_with_strings(self, mock_parse):
+ mock_parse.return_value = ('abc', 'def')
+ v = cbase.Version('test', mock.ANY, mock.ANY)
+ result = "%s" % v
+ self.assertEqual('abc.def', result)
+
+ def test_parse_headers_ok(self):
+ version = cbase.Version.parse_headers(
+ {cbase.Version.string: '123.456'}, mock.ANY, mock.ANY)
+ self.assertEqual((123, 456), version)
+
+ def test_parse_headers_latest(self):
+ for s in ['latest', 'LATEST']:
+ version = cbase.Version.parse_headers(
+ {cbase.Version.string: s}, mock.ANY, '1.9')
+ self.assertEqual((1, 9), version)
+
+ def test_parse_headers_bad_length(self):
+ self.assertRaises(
+ exc.HTTPNotAcceptable,
+ cbase.Version.parse_headers,
+ {cbase.Version.string: '1'},
+ mock.ANY,
+ mock.ANY)
+ self.assertRaises(
+ exc.HTTPNotAcceptable,
+ cbase.Version.parse_headers,
+ {cbase.Version.string: '1.2.3'},
+ mock.ANY,
+ mock.ANY)
+
+ def test_parse_no_header(self):
+ # this asserts that the minimum version string of "1.1" is applied
+ version = cbase.Version.parse_headers({}, '1.1', '1.5')
+ self.assertEqual((1, 1), version)
+
+ def test_equals(self):
+ ver_1 = cbase.Version(
+ {cbase.Version.string: '123.456'}, mock.ANY, mock.ANY)
+ ver_2 = cbase.Version(
+ {cbase.Version.string: '123.456'}, mock.ANY, mock.ANY)
+ self.assertTrue(hasattr(ver_1, '__eq__'))
+ self.assertTrue(ver_1 == ver_2)
+
+ def test_greaterthan(self):
+ ver_1 = cbase.Version(
+ {cbase.Version.string: '123.457'}, mock.ANY, mock.ANY)
+ ver_2 = cbase.Version(
+ {cbase.Version.string: '123.456'}, mock.ANY, mock.ANY)
+ self.assertTrue(hasattr(ver_1, '__gt__'))
+ self.assertTrue(ver_1 > ver_2)
+
+ def test_lessthan(self):
+ # __lt__ is created by @functools.total_ordering, make sure it exists
+ # and works
+ ver_1 = cbase.Version(
+ {cbase.Version.string: '123.456'}, mock.ANY, mock.ANY)
+ ver_2 = cbase.Version(
+ {cbase.Version.string: '123.457'}, mock.ANY, mock.ANY)
+ self.assertTrue(hasattr(ver_1, '__lt__'))
+ self.assertTrue(ver_1 < ver_2)
diff --git a/ironic/tests/unit/api/test_hooks.py b/ironic/tests/unit/api/test_hooks.py
new file mode 100644
index 000000000..198efb810
--- /dev/null
+++ b/ironic/tests/unit/api/test_hooks.py
@@ -0,0 +1,303 @@
+# -*- encoding: utf-8 -*-
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Tests for the Pecan API hooks."""
+
+import json
+
+import mock
+from oslo_config import cfg
+import oslo_messaging as messaging
+import six
+from six.moves import http_client
+from webob import exc as webob_exc
+
+from ironic.api.controllers import root
+from ironic.api import hooks
+from ironic.common import context
+from ironic.tests.unit.api import base
+from ironic.tests.unit import policy_fixture
+
+
+class FakeRequest(object):
+ def __init__(self, headers, context, environ):
+ self.headers = headers
+ self.context = context
+ self.environ = environ or {}
+ self.version = (1, 0)
+ self.host_url = 'http://127.0.0.1:6385'
+
+
+class FakeRequestState(object):
+ def __init__(self, headers=None, context=None, environ=None):
+ self.request = FakeRequest(headers, context, environ)
+ self.response = FakeRequest(headers, context, environ)
+
+ def set_context(self):
+ headers = self.request.headers
+ creds = {
+ 'user': headers.get('X-User') or headers.get('X-User-Id'),
+ 'tenant': headers.get('X-Tenant') or headers.get('X-Tenant-Id'),
+ 'domain_id': headers.get('X-User-Domain-Id'),
+ 'domain_name': headers.get('X-User-Domain-Name'),
+ 'auth_token': headers.get('X-Auth-Token'),
+ 'roles': headers.get('X-Roles', '').split(','),
+ }
+ is_admin = ('admin' in creds['roles'] or
+ 'administrator' in creds['roles'])
+ is_public_api = self.request.environ.get('is_public_api', False)
+ show_password = ('admin' in creds['tenant'])
+
+ self.request.context = context.RequestContext(
+ is_admin=is_admin, is_public_api=is_public_api,
+ show_password=show_password, **creds)
+
+
+def fake_headers(admin=False):
+ headers = {
+ 'X-Auth-Token': '8d9f235ca7464dd7ba46f81515797ea0',
+ 'X-Domain-Id': 'None',
+ 'X-Domain-Name': 'None',
+ 'X-Project-Domain-Id': 'default',
+ 'X-Project-Domain-Name': 'Default',
+ 'X-Project-Id': 'b4efa69d4ffa4973863f2eefc094f7f8',
+ 'X-Project-Name': 'admin',
+ 'X-Role': '_member_,admin',
+ 'X-Roles': '_member_,admin',
+ 'X-Tenant': 'foo',
+ 'X-Tenant-Id': 'b4efa69d4ffa4973863f2eefc094f7f8',
+ 'X-Tenant-Name': 'foo',
+ 'X-User': 'foo',
+ 'X-User-Domain-Id': 'default',
+ 'X-User-Domain-Name': 'Default',
+ 'X-User-Id': '604ab2a197c442c2a84aba66708a9e1e',
+ 'X-User-Name': 'foo',
+ 'X-OpenStack-Ironic-API-Version': '1.0'
+ }
+ if admin:
+ headers.update({
+ 'X-Project-Name': 'admin',
+ 'X-Role': '_member_,admin',
+ 'X-Roles': '_member_,admin',
+ 'X-Tenant': 'admin',
+ 'X-Tenant-Name': 'admin',
+ })
+ else:
+ headers.update({
+ 'X-Project-Name': 'foo',
+ 'X-Role': '_member_',
+ 'X-Roles': '_member_',
+ })
+ return headers
+
+
+class TestNoExceptionTracebackHook(base.FunctionalTest):
+
+ TRACE = [u'Traceback (most recent call last):',
+ u' File "/opt/stack/ironic/ironic/openstack/common/rpc/amqp.py",'
+ ' line 434, in _process_data\\n **args)',
+ u' File "/opt/stack/ironic/ironic/openstack/common/rpc/'
+ 'dispatcher.py", line 172, in dispatch\\n result ='
+ ' getattr(proxyobj, method)(ctxt, **kwargs)']
+ MSG_WITHOUT_TRACE = "Test exception message."
+ MSG_WITH_TRACE = MSG_WITHOUT_TRACE + "\n" + "\n".join(TRACE)
+
+ def setUp(self):
+ super(TestNoExceptionTracebackHook, self).setUp()
+ p = mock.patch.object(root.Root, 'convert')
+ self.root_convert_mock = p.start()
+ self.addCleanup(p.stop)
+
+ def test_hook_exception_success(self):
+ self.root_convert_mock.side_effect = Exception(self.MSG_WITH_TRACE)
+
+ response = self.get_json('/', path_prefix='', expect_errors=True)
+
+ actual_msg = json.loads(response.json['error_message'])['faultstring']
+ self.assertEqual(self.MSG_WITHOUT_TRACE, actual_msg)
+
+ def test_hook_remote_error_success(self):
+ test_exc_type = 'TestException'
+ self.root_convert_mock.side_effect = messaging.rpc.RemoteError(
+ test_exc_type, self.MSG_WITHOUT_TRACE, self.TRACE)
+
+ response = self.get_json('/', path_prefix='', expect_errors=True)
+
+ # NOTE(max_lobur): For RemoteError the client message will still have
+ # some garbage because in RemoteError traceback is serialized as a list
+ # instead of'\n'.join(trace). But since RemoteError is kind of very
+ # rare thing (happens due to wrong deserialization settings etc.)
+ # we don't care about this garbage.
+ expected_msg = ("Remote error: %s %s"
+ % (test_exc_type, self.MSG_WITHOUT_TRACE)
+ + ("\n[u'" if six.PY2 else "\n['"))
+ actual_msg = json.loads(response.json['error_message'])['faultstring']
+ self.assertEqual(expected_msg, actual_msg)
+
+ def test_hook_without_traceback(self):
+ msg = "Error message without traceback \n but \n multiline"
+ self.root_convert_mock.side_effect = Exception(msg)
+
+ response = self.get_json('/', path_prefix='', expect_errors=True)
+
+ actual_msg = json.loads(response.json['error_message'])['faultstring']
+ self.assertEqual(msg, actual_msg)
+
+ def test_hook_server_debug_on_serverfault(self):
+ cfg.CONF.set_override('debug', True)
+ self.root_convert_mock.side_effect = Exception(self.MSG_WITH_TRACE)
+
+ response = self.get_json('/', path_prefix='', expect_errors=True)
+
+ actual_msg = json.loads(
+ response.json['error_message'])['faultstring']
+ self.assertEqual(self.MSG_WITHOUT_TRACE, actual_msg)
+
+ def test_hook_server_debug_on_clientfault(self):
+ cfg.CONF.set_override('debug', True)
+ client_error = Exception(self.MSG_WITH_TRACE)
+ client_error.code = http_client.BAD_REQUEST
+ self.root_convert_mock.side_effect = client_error
+
+ response = self.get_json('/', path_prefix='', expect_errors=True)
+
+ actual_msg = json.loads(
+ response.json['error_message'])['faultstring']
+ self.assertEqual(self.MSG_WITH_TRACE, actual_msg)
+
+
+class TestContextHook(base.FunctionalTest):
+ @mock.patch.object(context, 'RequestContext')
+ def test_context_hook_not_admin(self, mock_ctx):
+ headers = fake_headers(admin=False)
+ reqstate = FakeRequestState(headers=headers)
+ context_hook = hooks.ContextHook(None)
+ context_hook.before(reqstate)
+ mock_ctx.assert_called_with(
+ auth_token=headers['X-Auth-Token'],
+ user=headers['X-User'],
+ tenant=headers['X-Tenant'],
+ domain_id=headers['X-User-Domain-Id'],
+ domain_name=headers['X-User-Domain-Name'],
+ is_public_api=False,
+ show_password=False,
+ is_admin=False,
+ roles=headers['X-Roles'].split(','))
+
+ @mock.patch.object(context, 'RequestContext')
+ def test_context_hook_admin(self, mock_ctx):
+ headers = fake_headers(admin=True)
+ reqstate = FakeRequestState(headers=headers)
+ context_hook = hooks.ContextHook(None)
+ context_hook.before(reqstate)
+ mock_ctx.assert_called_with(
+ auth_token=headers['X-Auth-Token'],
+ user=headers['X-User'],
+ tenant=headers['X-Tenant'],
+ domain_id=headers['X-User-Domain-Id'],
+ domain_name=headers['X-User-Domain-Name'],
+ is_public_api=False,
+ show_password=True,
+ is_admin=True,
+ roles=headers['X-Roles'].split(','))
+
+ @mock.patch.object(context, 'RequestContext')
+ def test_context_hook_public_api(self, mock_ctx):
+ headers = fake_headers(admin=True)
+ env = {'is_public_api': True}
+ reqstate = FakeRequestState(headers=headers, environ=env)
+ context_hook = hooks.ContextHook(None)
+ context_hook.before(reqstate)
+ mock_ctx.assert_called_with(
+ auth_token=headers['X-Auth-Token'],
+ user=headers['X-User'],
+ tenant=headers['X-Tenant'],
+ domain_id=headers['X-User-Domain-Id'],
+ domain_name=headers['X-User-Domain-Name'],
+ is_public_api=True,
+ show_password=True,
+ is_admin=True,
+ roles=headers['X-Roles'].split(','))
+
+ @mock.patch.object(context, 'RequestContext')
+ def test_context_hook_noauth_token_removed(self, mock_ctx):
+ cfg.CONF.set_override('auth_strategy', 'noauth')
+ headers = fake_headers(admin=False)
+ reqstate = FakeRequestState(headers=headers)
+ context_hook = hooks.ContextHook(None)
+ context_hook.before(reqstate)
+ mock_ctx.assert_called_with(
+ auth_token=None,
+ user=headers['X-User'],
+ tenant=headers['X-Tenant'],
+ domain_id=headers['X-User-Domain-Id'],
+ domain_name=headers['X-User-Domain-Name'],
+ is_public_api=False,
+ show_password=False,
+ is_admin=False,
+ roles=headers['X-Roles'].split(','))
+
+
+class TestTrustedCallHook(base.FunctionalTest):
+ def test_trusted_call_hook_not_admin(self):
+ headers = fake_headers(admin=False)
+ reqstate = FakeRequestState(headers=headers)
+ reqstate.set_context()
+ trusted_call_hook = hooks.TrustedCallHook()
+ self.assertRaises(webob_exc.HTTPForbidden,
+ trusted_call_hook.before, reqstate)
+
+ def test_trusted_call_hook_admin(self):
+ headers = fake_headers(admin=True)
+ reqstate = FakeRequestState(headers=headers)
+ reqstate.set_context()
+ trusted_call_hook = hooks.TrustedCallHook()
+ trusted_call_hook.before(reqstate)
+
+ def test_trusted_call_hook_public_api(self):
+ headers = fake_headers(admin=False)
+ env = {'is_public_api': True}
+ reqstate = FakeRequestState(headers=headers, environ=env)
+ reqstate.set_context()
+ trusted_call_hook = hooks.TrustedCallHook()
+ trusted_call_hook.before(reqstate)
+
+
+class TestTrustedCallHookCompatJuno(TestTrustedCallHook):
+ def setUp(self):
+ super(TestTrustedCallHookCompatJuno, self).setUp()
+ self.policy = self.useFixture(
+ policy_fixture.PolicyFixture(compat='juno'))
+
+ def test_trusted_call_hook_public_api(self):
+ self.skipTest('no public_api trusted call policy in juno')
+
+
+class TestPublicUrlHook(base.FunctionalTest):
+
+ def test_before_host_url(self):
+ headers = fake_headers()
+ reqstate = FakeRequestState(headers=headers)
+ trusted_call_hook = hooks.PublicUrlHook()
+ trusted_call_hook.before(reqstate)
+ self.assertEqual(reqstate.request.host_url,
+ reqstate.request.public_url)
+
+ def test_before_public_endpoint(self):
+ cfg.CONF.set_override('public_endpoint', 'http://foo', 'api')
+ headers = fake_headers()
+ reqstate = FakeRequestState(headers=headers)
+ trusted_call_hook = hooks.PublicUrlHook()
+ trusted_call_hook.before(reqstate)
+ self.assertEqual('http://foo', reqstate.request.public_url)
diff --git a/ironic/tests/unit/api/test_root.py b/ironic/tests/unit/api/test_root.py
new file mode 100644
index 000000000..649aff69c
--- /dev/null
+++ b/ironic/tests/unit/api/test_root.py
@@ -0,0 +1,43 @@
+# Copyright 2013 Red Hat, Inc.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from ironic.tests.unit.api import base
+
+
+class TestRoot(base.FunctionalTest):
+
+ def test_get_root(self):
+ data = self.get_json('/', path_prefix='')
+ self.assertEqual('v1', data['default_version']['id'])
+ # Check fields are not empty
+ [self.assertNotIn(f, ['', []]) for f in data.keys()]
+
+
+class TestV1Root(base.FunctionalTest):
+
+ def test_get_v1_root(self):
+ data = self.get_json('/')
+ self.assertEqual('v1', data['id'])
+ # Check fields are not empty
+ for f in data.keys():
+ self.assertNotIn(f, ['', []])
+ # Check if all known resources are present and there are no extra ones.
+ not_resources = ('id', 'links', 'media_types')
+ actual_resources = tuple(set(data.keys()) - set(not_resources))
+ expected_resources = ('chassis', 'drivers', 'nodes', 'ports')
+ self.assertEqual(sorted(expected_resources), sorted(actual_resources))
+
+ self.assertIn({'type': 'application/vnd.openstack.ironic.v1+json',
+ 'base': 'application/json'}, data['media_types'])
diff --git a/ironic/tests/unit/api/utils.py b/ironic/tests/unit/api/utils.py
new file mode 100644
index 000000000..163cc7978
--- /dev/null
+++ b/ironic/tests/unit/api/utils.py
@@ -0,0 +1,122 @@
+# -*- encoding: utf-8 -*-
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+"""
+Utils for testing the API service.
+"""
+
+import datetime
+import hashlib
+import json
+
+from ironic.api.controllers.v1 import chassis as chassis_controller
+from ironic.api.controllers.v1 import node as node_controller
+from ironic.api.controllers.v1 import port as port_controller
+from ironic.tests.unit.db import utils
+
+ADMIN_TOKEN = '4562138218392831'
+MEMBER_TOKEN = '4562138218392832'
+
+ADMIN_TOKEN_HASH = hashlib.sha256(ADMIN_TOKEN.encode()).hexdigest()
+MEMBER_TOKEN_HASH = hashlib.sha256(MEMBER_TOKEN.encode()).hexdigest()
+
+ADMIN_BODY = {
+ 'access': {
+ 'token': {'id': ADMIN_TOKEN,
+ 'expires': '2100-09-11T00:00:00'},
+ 'user': {'id': 'user_id1',
+ 'name': 'user_name1',
+ 'tenantId': '123i2910',
+ 'tenantName': 'mytenant',
+ 'roles': [{'name': 'admin'}]},
+ }
+}
+
+MEMBER_BODY = {
+ 'access': {
+ 'token': {'id': MEMBER_TOKEN,
+ 'expires': '2100-09-11T00:00:00'},
+ 'user': {'id': 'user_id2',
+ 'name': 'user-good',
+ 'tenantId': 'project-good',
+ 'tenantName': 'goodies',
+ 'roles': [{'name': 'Member'}]},
+ }
+}
+
+
+class FakeMemcache(object):
+ """Fake cache that is used for keystone tokens lookup."""
+
+ # NOTE(lucasagomes): keystonemiddleware >= 2.0.0 the token cache
+ # keys are sha256 hashes of the token key. This was introduced in
+ # https://review.openstack.org/#/c/186971
+ _cache = {
+ 'tokens/%s' % ADMIN_TOKEN: ADMIN_BODY,
+ 'tokens/%s' % ADMIN_TOKEN_HASH: ADMIN_BODY,
+ 'tokens/%s' % MEMBER_TOKEN: MEMBER_BODY,
+ 'tokens/%s' % MEMBER_TOKEN_HASH: MEMBER_BODY,
+ }
+
+ def __init__(self):
+ self.set_key = None
+ self.set_value = None
+ self.token_expiration = None
+
+ def get(self, key):
+ dt = datetime.datetime.utcnow() + datetime.timedelta(minutes=5)
+ return json.dumps((self._cache.get(key), dt.isoformat()))
+
+ def set(self, key, value, time=0, min_compress_len=0):
+ self.set_value = value
+ self.set_key = key
+
+
+def remove_internal(values, internal):
+ # NOTE(yuriyz): internal attributes should not be posted, except uuid
+ int_attr = [attr.lstrip('/') for attr in internal if attr != '/uuid']
+ return {k: v for (k, v) in values.items() if k not in int_attr}
+
+
+def node_post_data(**kw):
+ node = utils.get_test_node(**kw)
+ # These values are not part of the API object
+ node.pop('conductor_affinity')
+ node.pop('chassis_id')
+ node.pop('target_raid_config')
+ node.pop('raid_config')
+ internal = node_controller.NodePatchType.internal_attrs()
+ return remove_internal(node, internal)
+
+
+def port_post_data(**kw):
+ port = utils.get_test_port(**kw)
+ # node_id is not part of the API object
+ port.pop('node_id')
+ internal = port_controller.PortPatchType.internal_attrs()
+ return remove_internal(port, internal)
+
+
+def chassis_post_data(**kw):
+ chassis = utils.get_test_chassis(**kw)
+ internal = chassis_controller.ChassisPatchType.internal_attrs()
+ return remove_internal(chassis, internal)
+
+
+def post_get_test_node(**kw):
+ # NOTE(lucasagomes): When creating a node via API (POST)
+ # we have to use chassis_uuid
+ node = node_post_data(**kw)
+ chassis = utils.get_test_chassis()
+ node['chassis_uuid'] = kw.get('chassis_uuid', chassis['uuid'])
+ return node
diff --git a/ironic/tests/unit/api/v1/__init__.py b/ironic/tests/unit/api/v1/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ironic/tests/unit/api/v1/__init__.py
diff --git a/ironic/tests/unit/api/v1/test_chassis.py b/ironic/tests/unit/api/v1/test_chassis.py
new file mode 100644
index 000000000..66c162250
--- /dev/null
+++ b/ironic/tests/unit/api/v1/test_chassis.py
@@ -0,0 +1,481 @@
+# -*- encoding: utf-8 -*-
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+"""
+Tests for the API /chassis/ methods.
+"""
+
+import datetime
+
+import mock
+from oslo_config import cfg
+from oslo_utils import timeutils
+from oslo_utils import uuidutils
+import six
+from six.moves import http_client
+from six.moves.urllib import parse as urlparse
+from wsme import types as wtypes
+
+from ironic.api.controllers import base as api_base
+from ironic.api.controllers import v1 as api_v1
+from ironic.api.controllers.v1 import chassis as api_chassis
+from ironic.tests.unit.api import base as test_api_base
+from ironic.tests.unit.api import utils as apiutils
+from ironic.tests.unit import base
+from ironic.tests.unit.objects import utils as obj_utils
+
+
+class TestChassisObject(base.TestCase):
+
+ def test_chassis_init(self):
+ chassis_dict = apiutils.chassis_post_data()
+ del chassis_dict['description']
+ chassis = api_chassis.Chassis(**chassis_dict)
+ self.assertEqual(wtypes.Unset, chassis.description)
+
+
+class TestListChassis(test_api_base.FunctionalTest):
+
+ def test_empty(self):
+ data = self.get_json('/chassis')
+ self.assertEqual([], data['chassis'])
+
+ def test_one(self):
+ chassis = obj_utils.create_test_chassis(self.context)
+ data = self.get_json('/chassis')
+ self.assertEqual(chassis.uuid, data['chassis'][0]["uuid"])
+ self.assertNotIn('extra', data['chassis'][0])
+ self.assertNotIn('nodes', data['chassis'][0])
+
+ def test_get_one(self):
+ chassis = obj_utils.create_test_chassis(self.context)
+ data = self.get_json('/chassis/%s' % chassis['uuid'])
+ self.assertEqual(chassis.uuid, data['uuid'])
+ self.assertIn('extra', data)
+ self.assertIn('nodes', data)
+
+ def test_get_one_custom_fields(self):
+ chassis = obj_utils.create_test_chassis(self.context)
+ fields = 'extra,description'
+ data = self.get_json(
+ '/chassis/%s?fields=%s' % (chassis.uuid, fields),
+ headers={api_base.Version.string: str(api_v1.MAX_VER)})
+ # We always append "links"
+ self.assertItemsEqual(['description', 'extra', 'links'], data)
+
+ def test_get_collection_custom_fields(self):
+ fields = 'uuid,extra'
+ for i in range(3):
+ obj_utils.create_test_chassis(
+ self.context, uuid=uuidutils.generate_uuid())
+
+ data = self.get_json(
+ '/chassis?fields=%s' % fields,
+ headers={api_base.Version.string: str(api_v1.MAX_VER)})
+
+ self.assertEqual(3, len(data['chassis']))
+ for ch in data['chassis']:
+ # We always append "links"
+ self.assertItemsEqual(['uuid', 'extra', 'links'], ch)
+
+ def test_get_custom_fields_invalid_fields(self):
+ chassis = obj_utils.create_test_chassis(self.context)
+ fields = 'uuid,spongebob'
+ response = self.get_json(
+ '/chassis/%s?fields=%s' % (chassis.uuid, fields),
+ headers={api_base.Version.string: str(api_v1.MAX_VER)},
+ expect_errors=True)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_int)
+ self.assertEqual('application/json', response.content_type)
+ self.assertIn('spongebob', response.json['error_message'])
+
+ def test_get_custom_fields_invalid_api_version(self):
+ chassis = obj_utils.create_test_chassis(self.context)
+ fields = 'uuid,extra'
+ response = self.get_json(
+ '/chassis/%s?fields=%s' % (chassis.uuid, fields),
+ headers={api_base.Version.string: str(api_v1.MIN_VER)},
+ expect_errors=True)
+ self.assertEqual(http_client.NOT_ACCEPTABLE, response.status_int)
+
+ def test_detail(self):
+ chassis = obj_utils.create_test_chassis(self.context)
+ data = self.get_json('/chassis/detail')
+ self.assertEqual(chassis.uuid, data['chassis'][0]["uuid"])
+ self.assertIn('extra', data['chassis'][0])
+ self.assertIn('nodes', data['chassis'][0])
+
+ def test_detail_against_single(self):
+ chassis = obj_utils.create_test_chassis(self.context)
+ response = self.get_json('/chassis/%s/detail' % chassis['uuid'],
+ expect_errors=True)
+ self.assertEqual(http_client.NOT_FOUND, response.status_int)
+
+ def test_many(self):
+ ch_list = []
+ for id_ in range(5):
+ chassis = obj_utils.create_test_chassis(
+ self.context, uuid=uuidutils.generate_uuid())
+ ch_list.append(chassis.uuid)
+ data = self.get_json('/chassis')
+ self.assertEqual(len(ch_list), len(data['chassis']))
+ uuids = [n['uuid'] for n in data['chassis']]
+ six.assertCountEqual(self, ch_list, uuids)
+
+ def _test_links(self, public_url=None):
+ cfg.CONF.set_override('public_endpoint', public_url, 'api')
+ uuid = uuidutils.generate_uuid()
+ obj_utils.create_test_chassis(self.context, uuid=uuid)
+ data = self.get_json('/chassis/%s' % uuid)
+ self.assertIn('links', data.keys())
+ self.assertEqual(2, len(data['links']))
+ self.assertIn(uuid, data['links'][0]['href'])
+ for l in data['links']:
+ bookmark = l['rel'] == 'bookmark'
+ self.assertTrue(self.validate_link(l['href'], bookmark=bookmark))
+
+ if public_url is not None:
+ expected = [{'href': '%s/v1/chassis/%s' % (public_url, uuid),
+ 'rel': 'self'},
+ {'href': '%s/chassis/%s' % (public_url, uuid),
+ 'rel': 'bookmark'}]
+ for i in expected:
+ self.assertIn(i, data['links'])
+
+ def test_links(self):
+ self._test_links()
+
+ def test_links_public_url(self):
+ self._test_links(public_url='http://foo')
+
+ def test_collection_links(self):
+ for id in range(5):
+ obj_utils.create_test_chassis(self.context,
+ uuid=uuidutils.generate_uuid())
+ data = self.get_json('/chassis/?limit=3')
+ self.assertEqual(3, len(data['chassis']))
+
+ next_marker = data['chassis'][-1]['uuid']
+ self.assertIn(next_marker, data['next'])
+
+ def test_collection_links_default_limit(self):
+ cfg.CONF.set_override('max_limit', 3, 'api')
+ for id_ in range(5):
+ obj_utils.create_test_chassis(self.context,
+ uuid=uuidutils.generate_uuid())
+ data = self.get_json('/chassis')
+ self.assertEqual(3, len(data['chassis']))
+
+ next_marker = data['chassis'][-1]['uuid']
+ self.assertIn(next_marker, data['next'])
+
+ def test_sort_key(self):
+ ch_list = []
+ for id_ in range(3):
+ chassis = obj_utils.create_test_chassis(
+ self.context, uuid=uuidutils.generate_uuid())
+ ch_list.append(chassis.uuid)
+ data = self.get_json('/chassis?sort_key=uuid')
+ uuids = [n['uuid'] for n in data['chassis']]
+ self.assertEqual(sorted(ch_list), uuids)
+
+ def test_sort_key_invalid(self):
+ invalid_keys_list = ['foo', 'extra']
+ for invalid_key in invalid_keys_list:
+ response = self.get_json('/chassis?sort_key=%s' % invalid_key,
+ expect_errors=True)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_int)
+ self.assertEqual('application/json', response.content_type)
+ self.assertIn(invalid_key, response.json['error_message'])
+
+ def test_nodes_subresource_link(self):
+ chassis = obj_utils.create_test_chassis(self.context)
+ data = self.get_json('/chassis/%s' % chassis.uuid)
+ self.assertIn('nodes', data.keys())
+
+ def test_nodes_subresource(self):
+ chassis = obj_utils.create_test_chassis(self.context)
+
+ for id_ in range(2):
+ obj_utils.create_test_node(self.context,
+ chassis_id=chassis.id,
+ uuid=uuidutils.generate_uuid())
+
+ data = self.get_json('/chassis/%s/nodes' % chassis.uuid)
+ self.assertEqual(2, len(data['nodes']))
+ self.assertNotIn('next', data.keys())
+
+ # Test collection pagination
+ data = self.get_json('/chassis/%s/nodes?limit=1' % chassis.uuid)
+ self.assertEqual(1, len(data['nodes']))
+ self.assertIn('next', data.keys())
+
+ def test_nodes_subresource_no_uuid(self):
+ response = self.get_json('/chassis/nodes', expect_errors=True)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_int)
+
+ def test_nodes_subresource_chassis_not_found(self):
+ non_existent_uuid = 'eeeeeeee-cccc-aaaa-bbbb-cccccccccccc'
+ response = self.get_json('/chassis/%s/nodes' % non_existent_uuid,
+ expect_errors=True)
+ self.assertEqual(http_client.NOT_FOUND, response.status_int)
+
+
+class TestPatch(test_api_base.FunctionalTest):
+
+ def setUp(self):
+ super(TestPatch, self).setUp()
+ obj_utils.create_test_chassis(self.context)
+
+ def test_update_not_found(self):
+ uuid = uuidutils.generate_uuid()
+ response = self.patch_json('/chassis/%s' % uuid,
+ [{'path': '/extra/a', 'value': 'b',
+ 'op': 'add'}],
+ expect_errors=True)
+ self.assertEqual(http_client.NOT_FOUND, response.status_int)
+ self.assertEqual('application/json', response.content_type)
+ self.assertTrue(response.json['error_message'])
+
+ @mock.patch.object(timeutils, 'utcnow')
+ def test_replace_singular(self, mock_utcnow):
+ chassis = obj_utils.get_test_chassis(self.context)
+ description = 'chassis-new-description'
+ test_time = datetime.datetime(2000, 1, 1, 0, 0)
+
+ mock_utcnow.return_value = test_time
+ response = self.patch_json('/chassis/%s' % chassis.uuid,
+ [{'path': '/description',
+ 'value': description, 'op': 'replace'}])
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.OK, response.status_code)
+ result = self.get_json('/chassis/%s' % chassis.uuid)
+ self.assertEqual(description, result['description'])
+ return_updated_at = timeutils.parse_isotime(
+ result['updated_at']).replace(tzinfo=None)
+ self.assertEqual(test_time, return_updated_at)
+
+ def test_replace_multi(self):
+ extra = {"foo1": "bar1", "foo2": "bar2", "foo3": "bar3"}
+ chassis = obj_utils.create_test_chassis(self.context, extra=extra,
+ uuid=uuidutils.generate_uuid())
+ new_value = 'new value'
+ response = self.patch_json('/chassis/%s' % chassis.uuid,
+ [{'path': '/extra/foo2',
+ 'value': new_value, 'op': 'replace'}])
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.OK, response.status_code)
+ result = self.get_json('/chassis/%s' % chassis.uuid)
+
+ extra["foo2"] = new_value
+ self.assertEqual(extra, result['extra'])
+
+ def test_remove_singular(self):
+ chassis = obj_utils.create_test_chassis(self.context, extra={'a': 'b'},
+ uuid=uuidutils.generate_uuid())
+ response = self.patch_json('/chassis/%s' % chassis.uuid,
+ [{'path': '/description', 'op': 'remove'}])
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.OK, response.status_code)
+ result = self.get_json('/chassis/%s' % chassis.uuid)
+ self.assertIsNone(result['description'])
+
+ # Assert nothing else was changed
+ self.assertEqual(chassis.uuid, result['uuid'])
+ self.assertEqual(chassis.extra, result['extra'])
+
+ def test_remove_multi(self):
+ extra = {"foo1": "bar1", "foo2": "bar2", "foo3": "bar3"}
+ chassis = obj_utils.create_test_chassis(self.context, extra=extra,
+ description="foobar",
+ uuid=uuidutils.generate_uuid())
+
+ # Removing one item from the collection
+ response = self.patch_json('/chassis/%s' % chassis.uuid,
+ [{'path': '/extra/foo2', 'op': 'remove'}])
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.OK, response.status_code)
+ result = self.get_json('/chassis/%s' % chassis.uuid)
+ extra.pop("foo2")
+ self.assertEqual(extra, result['extra'])
+
+ # Removing the collection
+ response = self.patch_json('/chassis/%s' % chassis.uuid,
+ [{'path': '/extra', 'op': 'remove'}])
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.OK, response.status_code)
+ result = self.get_json('/chassis/%s' % chassis.uuid)
+ self.assertEqual({}, result['extra'])
+
+ # Assert nothing else was changed
+ self.assertEqual(chassis.uuid, result['uuid'])
+ self.assertEqual(chassis.description, result['description'])
+
+ def test_remove_non_existent_property_fail(self):
+ chassis = obj_utils.get_test_chassis(self.context)
+ response = self.patch_json(
+ '/chassis/%s' % chassis.uuid,
+ [{'path': '/extra/non-existent', 'op': 'remove'}],
+ expect_errors=True)
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_code)
+ self.assertTrue(response.json['error_message'])
+
+ def test_add_root(self):
+ chassis = obj_utils.get_test_chassis(self.context)
+ response = self.patch_json('/chassis/%s' % chassis.uuid,
+ [{'path': '/description', 'value': 'test',
+ 'op': 'add'}])
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.OK, response.status_int)
+
+ def test_add_root_non_existent(self):
+ chassis = obj_utils.get_test_chassis(self.context)
+ response = self.patch_json('/chassis/%s' % chassis.uuid,
+ [{'path': '/foo', 'value': 'bar',
+ 'op': 'add'}],
+ expect_errors=True)
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_int)
+ self.assertTrue(response.json['error_message'])
+
+ def test_add_multi(self):
+ chassis = obj_utils.get_test_chassis(self.context)
+ response = self.patch_json('/chassis/%s' % chassis.uuid,
+ [{'path': '/extra/foo1', 'value': 'bar1',
+ 'op': 'add'},
+ {'path': '/extra/foo2', 'value': 'bar2',
+ 'op': 'add'}])
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.OK, response.status_code)
+ result = self.get_json('/chassis/%s' % chassis.uuid)
+ expected = {"foo1": "bar1", "foo2": "bar2"}
+ self.assertEqual(expected, result['extra'])
+
+ def test_patch_nodes_subresource(self):
+ chassis = obj_utils.get_test_chassis(self.context)
+ response = self.patch_json('/chassis/%s/nodes' % chassis.uuid,
+ [{'path': '/extra/foo', 'value': 'bar',
+ 'op': 'add'}], expect_errors=True)
+ self.assertEqual(http_client.FORBIDDEN, response.status_int)
+
+ def test_remove_uuid(self):
+ chassis = obj_utils.get_test_chassis(self.context)
+ response = self.patch_json('/chassis/%s' % chassis.uuid,
+ [{'path': '/uuid', 'op': 'remove'}],
+ expect_errors=True)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_int)
+ self.assertEqual('application/json', response.content_type)
+ self.assertTrue(response.json['error_message'])
+
+
+class TestPost(test_api_base.FunctionalTest):
+
+ @mock.patch.object(timeutils, 'utcnow')
+ def test_create_chassis(self, mock_utcnow):
+ cdict = apiutils.chassis_post_data()
+ test_time = datetime.datetime(2000, 1, 1, 0, 0)
+ mock_utcnow.return_value = test_time
+
+ response = self.post_json('/chassis', cdict)
+ self.assertEqual(http_client.CREATED, response.status_int)
+ result = self.get_json('/chassis/%s' % cdict['uuid'])
+ self.assertEqual(cdict['uuid'], result['uuid'])
+ self.assertFalse(result['updated_at'])
+ return_created_at = timeutils.parse_isotime(
+ result['created_at']).replace(tzinfo=None)
+ self.assertEqual(test_time, return_created_at)
+ # Check location header
+ self.assertIsNotNone(response.location)
+ expected_location = '/v1/chassis/%s' % cdict['uuid']
+ self.assertEqual(urlparse.urlparse(response.location).path,
+ expected_location)
+
+ def test_create_chassis_doesnt_contain_id(self):
+ with mock.patch.object(self.dbapi, 'create_chassis',
+ wraps=self.dbapi.create_chassis) as cc_mock:
+ cdict = apiutils.chassis_post_data(extra={'foo': 123})
+ self.post_json('/chassis', cdict)
+ result = self.get_json('/chassis/%s' % cdict['uuid'])
+ self.assertEqual(cdict['extra'], result['extra'])
+ cc_mock.assert_called_once_with(mock.ANY)
+ # Check that 'id' is not in first arg of positional args
+ self.assertNotIn('id', cc_mock.call_args[0][0])
+
+ def test_create_chassis_generate_uuid(self):
+ cdict = apiutils.chassis_post_data()
+ del cdict['uuid']
+ self.post_json('/chassis', cdict)
+ result = self.get_json('/chassis')
+ self.assertEqual(cdict['description'],
+ result['chassis'][0]['description'])
+ self.assertTrue(uuidutils.is_uuid_like(result['chassis'][0]['uuid']))
+
+ def test_post_nodes_subresource(self):
+ chassis = obj_utils.create_test_chassis(self.context)
+ ndict = apiutils.node_post_data()
+ ndict['chassis_uuid'] = chassis.uuid
+ response = self.post_json('/chassis/nodes', ndict,
+ expect_errors=True)
+ self.assertEqual(http_client.FORBIDDEN, response.status_int)
+
+ def test_create_chassis_valid_extra(self):
+ cdict = apiutils.chassis_post_data(extra={'str': 'foo', 'int': 123,
+ 'float': 0.1, 'bool': True,
+ 'list': [1, 2], 'none': None,
+ 'dict': {'cat': 'meow'}})
+ self.post_json('/chassis', cdict)
+ result = self.get_json('/chassis/%s' % cdict['uuid'])
+ self.assertEqual(cdict['extra'], result['extra'])
+
+ def test_create_chassis_unicode_description(self):
+ descr = u'\u0430\u043c\u043e'
+ cdict = apiutils.chassis_post_data(description=descr)
+ self.post_json('/chassis', cdict)
+ result = self.get_json('/chassis/%s' % cdict['uuid'])
+ self.assertEqual(descr, result['description'])
+
+
+class TestDelete(test_api_base.FunctionalTest):
+
+ def test_delete_chassis(self):
+ chassis = obj_utils.create_test_chassis(self.context)
+ self.delete('/chassis/%s' % chassis.uuid)
+ response = self.get_json('/chassis/%s' % chassis.uuid,
+ expect_errors=True)
+ self.assertEqual(http_client.NOT_FOUND, response.status_int)
+ self.assertEqual('application/json', response.content_type)
+ self.assertTrue(response.json['error_message'])
+
+ def test_delete_chassis_with_node(self):
+ chassis = obj_utils.create_test_chassis(self.context)
+ obj_utils.create_test_node(self.context, chassis_id=chassis.id)
+ response = self.delete('/chassis/%s' % chassis.uuid,
+ expect_errors=True)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_int)
+ self.assertEqual('application/json', response.content_type)
+ self.assertTrue(response.json['error_message'])
+ self.assertIn(chassis.uuid, response.json['error_message'])
+
+ def test_delete_chassis_not_found(self):
+ uuid = uuidutils.generate_uuid()
+ response = self.delete('/chassis/%s' % uuid, expect_errors=True)
+ self.assertEqual(http_client.NOT_FOUND, response.status_int)
+ self.assertEqual('application/json', response.content_type)
+ self.assertTrue(response.json['error_message'])
+
+ def test_delete_nodes_subresource(self):
+ chassis = obj_utils.create_test_chassis(self.context)
+ response = self.delete('/chassis/%s/nodes' % chassis.uuid,
+ expect_errors=True)
+ self.assertEqual(http_client.FORBIDDEN, response.status_int)
diff --git a/ironic/tests/unit/api/v1/test_drivers.py b/ironic/tests/unit/api/v1/test_drivers.py
new file mode 100644
index 000000000..abe0325fd
--- /dev/null
+++ b/ironic/tests/unit/api/v1/test_drivers.py
@@ -0,0 +1,338 @@
+# Copyright 2013 Red Hat, Inc.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import json
+
+import mock
+from oslo_config import cfg
+from six.moves import http_client
+from testtools.matchers import HasLength
+
+from ironic.api.controllers import base as api_base
+from ironic.api.controllers.v1 import driver
+from ironic.common import exception
+from ironic.conductor import rpcapi
+from ironic.tests.unit.api import base
+
+
+class TestListDrivers(base.FunctionalTest):
+ d1 = 'fake-driver1'
+ d2 = 'fake-driver2'
+ h1 = 'fake-host1'
+ h2 = 'fake-host2'
+
+ def register_fake_conductors(self):
+ self.dbapi.register_conductor({
+ 'hostname': self.h1,
+ 'drivers': [self.d1, self.d2],
+ })
+ self.dbapi.register_conductor({
+ 'hostname': self.h2,
+ 'drivers': [self.d2],
+ })
+
+ def test_drivers(self):
+ self.register_fake_conductors()
+ expected = sorted([
+ {'name': self.d1, 'hosts': [self.h1]},
+ {'name': self.d2, 'hosts': [self.h1, self.h2]},
+ ], key=lambda d: d['name'])
+ data = self.get_json('/drivers')
+ self.assertThat(data['drivers'], HasLength(2))
+ drivers = sorted(data['drivers'], key=lambda d: d['name'])
+ for i in range(len(expected)):
+ d = drivers[i]
+ self.assertEqual(expected[i]['name'], d['name'])
+ self.assertEqual(sorted(expected[i]['hosts']), sorted(d['hosts']))
+ self.validate_link(d['links'][0]['href'])
+ self.validate_link(d['links'][1]['href'])
+
+ def test_drivers_no_active_conductor(self):
+ data = self.get_json('/drivers')
+ self.assertThat(data['drivers'], HasLength(0))
+ self.assertEqual([], data['drivers'])
+
+ @mock.patch.object(rpcapi.ConductorAPI, 'get_driver_properties')
+ def test_drivers_get_one_ok(self, mock_driver_properties):
+ # get_driver_properties mock is required by validate_link()
+ self.register_fake_conductors()
+ data = self.get_json('/drivers/%s' % self.d1,
+ headers={api_base.Version.string: '1.14'})
+ self.assertEqual(self.d1, data['name'])
+ self.assertEqual([self.h1], data['hosts'])
+ self.assertIn('properties', data.keys())
+ self.validate_link(data['links'][0]['href'])
+ self.validate_link(data['links'][1]['href'])
+ self.validate_link(data['properties'][0]['href'])
+ self.validate_link(data['properties'][1]['href'])
+
+ def test_driver_properties_hidden_in_lower_version(self):
+ self.register_fake_conductors()
+ data = self.get_json('/drivers/%s' % self.d1,
+ headers={api_base.Version.string: '1.8'})
+ self.assertNotIn('properties', data.keys())
+
+ def test_drivers_get_one_not_found(self):
+ response = self.get_json('/drivers/%s' % self.d1, expect_errors=True)
+ self.assertEqual(http_client.NOT_FOUND, response.status_int)
+
+ def _test_links(self, public_url=None):
+ cfg.CONF.set_override('public_endpoint', public_url, 'api')
+ self.register_fake_conductors()
+ data = self.get_json('/drivers/%s' % self.d1)
+ self.assertIn('links', data.keys())
+ self.assertEqual(2, len(data['links']))
+ self.assertIn(self.d1, data['links'][0]['href'])
+ for l in data['links']:
+ bookmark = l['rel'] == 'bookmark'
+ self.assertTrue(self.validate_link(l['href'], bookmark=bookmark))
+
+ if public_url is not None:
+ expected = [{'href': '%s/v1/drivers/%s' % (public_url, self.d1),
+ 'rel': 'self'},
+ {'href': '%s/drivers/%s' % (public_url, self.d1),
+ 'rel': 'bookmark'}]
+ for i in expected:
+ self.assertIn(i, data['links'])
+
+ def test_links(self):
+ self._test_links()
+
+ def test_links_public_url(self):
+ self._test_links(public_url='http://foo')
+
+ @mock.patch.object(rpcapi.ConductorAPI, 'driver_vendor_passthru')
+ def test_driver_vendor_passthru_sync(self, mocked_driver_vendor_passthru):
+ self.register_fake_conductors()
+ mocked_driver_vendor_passthru.return_value = {
+ 'return': {'return_key': 'return_value'},
+ 'async': False,
+ 'attach': False}
+ response = self.post_json(
+ '/drivers/%s/vendor_passthru/do_test' % self.d1,
+ {'test_key': 'test_value'})
+ self.assertEqual(http_client.OK, response.status_int)
+ self.assertEqual(mocked_driver_vendor_passthru.return_value['return'],
+ response.json)
+
+ @mock.patch.object(rpcapi.ConductorAPI, 'driver_vendor_passthru')
+ def test_driver_vendor_passthru_async(self, mocked_driver_vendor_passthru):
+ self.register_fake_conductors()
+ mocked_driver_vendor_passthru.return_value = {'return': None,
+ 'async': True,
+ 'attach': False}
+ response = self.post_json(
+ '/drivers/%s/vendor_passthru/do_test' % self.d1,
+ {'test_key': 'test_value'})
+ self.assertEqual(http_client.ACCEPTED, response.status_int)
+ self.assertIsNone(mocked_driver_vendor_passthru.return_value['return'])
+
+ @mock.patch.object(rpcapi.ConductorAPI, 'driver_vendor_passthru')
+ def test_driver_vendor_passthru_put(self, mocked_driver_vendor_passthru):
+ self.register_fake_conductors()
+ return_value = {'return': None, 'async': True, 'attach': False}
+ mocked_driver_vendor_passthru.return_value = return_value
+ response = self.put_json(
+ '/drivers/%s/vendor_passthru/do_test' % self.d1,
+ {'test_key': 'test_value'})
+ self.assertEqual(http_client.ACCEPTED, response.status_int)
+ self.assertEqual(return_value['return'], response.json)
+
+ @mock.patch.object(rpcapi.ConductorAPI, 'driver_vendor_passthru')
+ def test_driver_vendor_passthru_get(self, mocked_driver_vendor_passthru):
+ self.register_fake_conductors()
+ return_value = {'return': 'foo', 'async': False, 'attach': False}
+ mocked_driver_vendor_passthru.return_value = return_value
+ response = self.get_json(
+ '/drivers/%s/vendor_passthru/do_test' % self.d1)
+ self.assertEqual(return_value['return'], response)
+
+ @mock.patch.object(rpcapi.ConductorAPI, 'driver_vendor_passthru')
+ def test_driver_vendor_passthru_delete(self, mock_driver_vendor_passthru):
+ self.register_fake_conductors()
+ return_value = {'return': None, 'async': True, 'attach': False}
+ mock_driver_vendor_passthru.return_value = return_value
+ response = self.delete(
+ '/drivers/%s/vendor_passthru/do_test' % self.d1)
+ self.assertEqual(http_client.ACCEPTED, response.status_int)
+ self.assertEqual(return_value['return'], response.json)
+
+ def test_driver_vendor_passthru_driver_not_found(self):
+ # tests when given driver is not found
+ # e.g. get_topic_for_driver fails to find the driver
+ response = self.post_json(
+ '/drivers/%s/vendor_passthru/do_test' % self.d1,
+ {'test_key': 'test_value'},
+ expect_errors=True)
+
+ self.assertEqual(http_client.NOT_FOUND, response.status_int)
+
+ def test_driver_vendor_passthru_method_not_found(self):
+ response = self.post_json(
+ '/drivers/%s/vendor_passthru' % self.d1,
+ {'test_key': 'test_value'},
+ expect_errors=True)
+
+ self.assertEqual(http_client.BAD_REQUEST, response.status_int)
+ error = json.loads(response.json['error_message'])
+ self.assertEqual('Missing argument: "method"',
+ error['faultstring'])
+
+ @mock.patch.object(rpcapi.ConductorAPI,
+ 'get_driver_vendor_passthru_methods')
+ def test_driver_vendor_passthru_methods(self, get_methods_mock):
+ self.register_fake_conductors()
+ return_value = {'foo': 'bar'}
+ get_methods_mock.return_value = return_value
+ path = '/drivers/%s/vendor_passthru/methods' % self.d1
+
+ data = self.get_json(path)
+ self.assertEqual(return_value, data)
+ get_methods_mock.assert_called_once_with(mock.ANY, self.d1,
+ topic=mock.ANY)
+
+ # Now let's test the cache: Reset the mock
+ get_methods_mock.reset_mock()
+
+ # Call it again
+ data = self.get_json(path)
+ self.assertEqual(return_value, data)
+ # Assert RPC method wasn't called this time
+ self.assertFalse(get_methods_mock.called)
+
+ @mock.patch.object(rpcapi.ConductorAPI, 'get_raid_logical_disk_properties')
+ def test_raid_logical_disk_properties(self, disk_prop_mock):
+ driver._RAID_PROPERTIES = {}
+ self.register_fake_conductors()
+ properties = {'foo': 'description of foo'}
+ disk_prop_mock.return_value = properties
+ path = '/drivers/%s/raid/logical_disk_properties' % self.d1
+ data = self.get_json(path,
+ headers={api_base.Version.string: "1.12"})
+ self.assertEqual(properties, data)
+ disk_prop_mock.assert_called_once_with(mock.ANY, self.d1,
+ topic=mock.ANY)
+
+ @mock.patch.object(rpcapi.ConductorAPI, 'get_raid_logical_disk_properties')
+ def test_raid_logical_disk_properties_older_version(self, disk_prop_mock):
+ driver._RAID_PROPERTIES = {}
+ self.register_fake_conductors()
+ properties = {'foo': 'description of foo'}
+ disk_prop_mock.return_value = properties
+ path = '/drivers/%s/raid/logical_disk_properties' % self.d1
+ ret = self.get_json(path,
+ headers={api_base.Version.string: "1.4"},
+ expect_errors=True)
+ self.assertEqual(406, ret.status_code)
+
+ @mock.patch.object(rpcapi.ConductorAPI, 'get_raid_logical_disk_properties')
+ def test_raid_logical_disk_properties_cached(self, disk_prop_mock):
+ # only one RPC-conductor call will be made and the info cached
+ # for subsequent requests
+ driver._RAID_PROPERTIES = {}
+ self.register_fake_conductors()
+ properties = {'foo': 'description of foo'}
+ disk_prop_mock.return_value = properties
+ path = '/drivers/%s/raid/logical_disk_properties' % self.d1
+ for i in range(3):
+ data = self.get_json(path,
+ headers={api_base.Version.string: "1.12"})
+ self.assertEqual(properties, data)
+ disk_prop_mock.assert_called_once_with(mock.ANY, self.d1,
+ topic=mock.ANY)
+ self.assertEqual(properties, driver._RAID_PROPERTIES[self.d1])
+
+ @mock.patch.object(rpcapi.ConductorAPI, 'get_raid_logical_disk_properties')
+ def test_raid_logical_disk_properties_iface_not_supported(
+ self, disk_prop_mock):
+ driver._RAID_PROPERTIES = {}
+ self.register_fake_conductors()
+ disk_prop_mock.side_effect = iter(
+ [exception.UnsupportedDriverExtension(
+ extension='raid', driver='fake')])
+ path = '/drivers/%s/raid/logical_disk_properties' % self.d1
+ ret = self.get_json(path,
+ headers={api_base.Version.string: "1.12"},
+ expect_errors=True)
+ self.assertEqual(404, ret.status_code)
+ self.assertTrue(ret.json['error_message'])
+ disk_prop_mock.assert_called_once_with(mock.ANY, self.d1,
+ topic=mock.ANY)
+
+
+@mock.patch.object(rpcapi.ConductorAPI, 'get_driver_properties')
+@mock.patch.object(rpcapi.ConductorAPI, 'get_topic_for_driver')
+class TestDriverProperties(base.FunctionalTest):
+
+ def test_driver_properties_fake(self, mock_topic, mock_properties):
+ # Can get driver properties for fake driver.
+ driver._DRIVER_PROPERTIES = {}
+ driver_name = 'fake'
+ mock_topic.return_value = 'fake_topic'
+ mock_properties.return_value = {'prop1': 'Property 1. Required.'}
+ data = self.get_json('/drivers/%s/properties' % driver_name)
+ self.assertEqual(mock_properties.return_value, data)
+ mock_topic.assert_called_once_with(driver_name)
+ mock_properties.assert_called_once_with(mock.ANY, driver_name,
+ topic=mock_topic.return_value)
+ self.assertEqual(mock_properties.return_value,
+ driver._DRIVER_PROPERTIES[driver_name])
+
+ def test_driver_properties_cached(self, mock_topic, mock_properties):
+ # only one RPC-conductor call will be made and the info cached
+ # for subsequent requests
+ driver._DRIVER_PROPERTIES = {}
+ driver_name = 'fake'
+ mock_topic.return_value = 'fake_topic'
+ mock_properties.return_value = {'prop1': 'Property 1. Required.'}
+ data = self.get_json('/drivers/%s/properties' % driver_name)
+ data = self.get_json('/drivers/%s/properties' % driver_name)
+ data = self.get_json('/drivers/%s/properties' % driver_name)
+ self.assertEqual(mock_properties.return_value, data)
+ mock_topic.assert_called_once_with(driver_name)
+ mock_properties.assert_called_once_with(mock.ANY, driver_name,
+ topic=mock_topic.return_value)
+ self.assertEqual(mock_properties.return_value,
+ driver._DRIVER_PROPERTIES[driver_name])
+
+ def test_driver_properties_invalid_driver_name(self, mock_topic,
+ mock_properties):
+ # Cannot get driver properties for an invalid driver; no RPC topic
+ # exists for it.
+ driver._DRIVER_PROPERTIES = {}
+ driver_name = 'bad_driver'
+ mock_topic.side_effect = exception.DriverNotFound(
+ driver_name=driver_name)
+ mock_properties.return_value = {'prop1': 'Property 1. Required.'}
+ ret = self.get_json('/drivers/%s/properties' % driver_name,
+ expect_errors=True)
+ self.assertEqual(http_client.NOT_FOUND, ret.status_int)
+ mock_topic.assert_called_once_with(driver_name)
+ self.assertFalse(mock_properties.called)
+
+ def test_driver_properties_cannot_load(self, mock_topic, mock_properties):
+ # Cannot get driver properties for the driver. Although an RPC topic
+ # exists for it, the conductor wasn't able to load it.
+ driver._DRIVER_PROPERTIES = {}
+ driver_name = 'driver'
+ mock_topic.return_value = 'driver_topic'
+ mock_properties.side_effect = exception.DriverNotFound(
+ driver_name=driver_name)
+ ret = self.get_json('/drivers/%s/properties' % driver_name,
+ expect_errors=True)
+ self.assertEqual(http_client.NOT_FOUND, ret.status_int)
+ mock_topic.assert_called_once_with(driver_name)
+ mock_properties.assert_called_once_with(mock.ANY, driver_name,
+ topic=mock_topic.return_value)
diff --git a/ironic/tests/unit/api/v1/test_nodes.py b/ironic/tests/unit/api/v1/test_nodes.py
new file mode 100644
index 000000000..0965d4732
--- /dev/null
+++ b/ironic/tests/unit/api/v1/test_nodes.py
@@ -0,0 +1,2260 @@
+# -*- encoding: utf-8 -*-
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+"""
+Tests for the API /nodes/ methods.
+"""
+
+import datetime
+import json
+
+import mock
+from oslo_config import cfg
+from oslo_utils import timeutils
+from oslo_utils import uuidutils
+import six
+from six.moves import http_client
+from six.moves.urllib import parse as urlparse
+from testtools.matchers import HasLength
+from wsme import types as wtypes
+
+from ironic.api.controllers import base as api_base
+from ironic.api.controllers import v1 as api_v1
+from ironic.api.controllers.v1 import node as api_node
+from ironic.api.controllers.v1 import utils as api_utils
+from ironic.common import boot_devices
+from ironic.common import exception
+from ironic.common import states
+from ironic.conductor import rpcapi
+from ironic import objects
+from ironic.tests.unit.api import base as test_api_base
+from ironic.tests.unit.api import utils as test_api_utils
+from ironic.tests.unit import base
+from ironic.tests.unit.objects import utils as obj_utils
+
+
+class TestNodeObject(base.TestCase):
+
+ def test_node_init(self):
+ node_dict = test_api_utils.node_post_data()
+ del node_dict['instance_uuid']
+ node = api_node.Node(**node_dict)
+ self.assertEqual(wtypes.Unset, node.instance_uuid)
+
+
+class TestListNodes(test_api_base.FunctionalTest):
+
+ def setUp(self):
+ super(TestListNodes, self).setUp()
+ self.chassis = obj_utils.create_test_chassis(self.context)
+ p = mock.patch.object(rpcapi.ConductorAPI, 'get_topic_for')
+ self.mock_gtf = p.start()
+ self.mock_gtf.return_value = 'test-topic'
+ self.addCleanup(p.stop)
+
+ def _create_association_test_nodes(self):
+ # create some unassociated nodes
+ unassociated_nodes = []
+ for id in range(3):
+ node = obj_utils.create_test_node(self.context,
+ uuid=uuidutils.generate_uuid())
+ unassociated_nodes.append(node.uuid)
+
+ # created some associated nodes
+ associated_nodes = []
+ for id in range(4):
+ node = obj_utils.create_test_node(
+ self.context, uuid=uuidutils.generate_uuid(),
+ instance_uuid=uuidutils.generate_uuid())
+ associated_nodes.append(node.uuid)
+ return {'associated': associated_nodes,
+ 'unassociated': unassociated_nodes}
+
+ def test_empty(self):
+ data = self.get_json('/nodes')
+ self.assertEqual([], data['nodes'])
+
+ def test_one(self):
+ node = obj_utils.create_test_node(self.context,
+ chassis_id=self.chassis.id)
+ data = self.get_json(
+ '/nodes', headers={api_base.Version.string: str(api_v1.MAX_VER)})
+ self.assertIn('instance_uuid', data['nodes'][0])
+ self.assertIn('maintenance', data['nodes'][0])
+ self.assertIn('power_state', data['nodes'][0])
+ self.assertIn('provision_state', data['nodes'][0])
+ self.assertIn('uuid', data['nodes'][0])
+ self.assertEqual(node.uuid, data['nodes'][0]["uuid"])
+ self.assertNotIn('driver', data['nodes'][0])
+ self.assertNotIn('driver_info', data['nodes'][0])
+ self.assertNotIn('driver_internal_info', data['nodes'][0])
+ self.assertNotIn('extra', data['nodes'][0])
+ self.assertNotIn('properties', data['nodes'][0])
+ self.assertNotIn('chassis_uuid', data['nodes'][0])
+ self.assertNotIn('reservation', data['nodes'][0])
+ self.assertNotIn('console_enabled', data['nodes'][0])
+ self.assertNotIn('target_power_state', data['nodes'][0])
+ self.assertNotIn('target_provision_state', data['nodes'][0])
+ self.assertNotIn('provision_updated_at', data['nodes'][0])
+ self.assertNotIn('maintenance_reason', data['nodes'][0])
+ self.assertNotIn('clean_step', data['nodes'][0])
+ self.assertNotIn('raid_config', data['nodes'][0])
+ self.assertNotIn('target_raid_config', data['nodes'][0])
+ # never expose the chassis_id
+ self.assertNotIn('chassis_id', data['nodes'][0])
+
+ def test_get_one(self):
+ node = obj_utils.create_test_node(self.context,
+ chassis_id=self.chassis.id)
+ data = self.get_json(
+ '/nodes/%s' % node.uuid,
+ headers={api_base.Version.string: str(api_v1.MAX_VER)})
+ self.assertEqual(node.uuid, data['uuid'])
+ self.assertIn('driver', data)
+ self.assertIn('driver_info', data)
+ self.assertEqual('******', data['driver_info']['fake_password'])
+ self.assertEqual('bar', data['driver_info']['foo'])
+ self.assertIn('driver_internal_info', data)
+ self.assertIn('extra', data)
+ self.assertIn('properties', data)
+ self.assertIn('chassis_uuid', data)
+ self.assertIn('reservation', data)
+ self.assertIn('maintenance_reason', data)
+ self.assertIn('name', data)
+ self.assertIn('inspection_finished_at', data)
+ self.assertIn('inspection_started_at', data)
+ self.assertIn('clean_step', data)
+ self.assertIn('states', data)
+ # never expose the chassis_id
+ self.assertNotIn('chassis_id', data)
+
+ def test_node_states_field_hidden_in_lower_version(self):
+ node = obj_utils.create_test_node(self.context,
+ chassis_id=self.chassis.id)
+ data = self.get_json(
+ '/nodes/%s' % node.uuid,
+ headers={api_base.Version.string: '1.8'})
+ self.assertNotIn('states', data)
+
+ def test_get_one_custom_fields(self):
+ node = obj_utils.create_test_node(self.context,
+ chassis_id=self.chassis.id)
+ fields = 'extra,instance_info'
+ data = self.get_json(
+ '/nodes/%s?fields=%s' % (node.uuid, fields),
+ headers={api_base.Version.string: str(api_v1.MAX_VER)})
+ # We always append "links"
+ self.assertItemsEqual(['extra', 'instance_info', 'links'], data)
+
+ def test_get_collection_custom_fields(self):
+ fields = 'uuid,instance_info'
+ for i in range(3):
+ obj_utils.create_test_node(self.context,
+ uuid=uuidutils.generate_uuid(),
+ instance_uuid=uuidutils.generate_uuid())
+
+ data = self.get_json(
+ '/nodes?fields=%s' % fields,
+ headers={api_base.Version.string: str(api_v1.MAX_VER)})
+
+ self.assertEqual(3, len(data['nodes']))
+ for node in data['nodes']:
+ # We always append "links"
+ self.assertItemsEqual(['uuid', 'instance_info', 'links'], node)
+
+ def test_get_custom_fields_invalid_fields(self):
+ node = obj_utils.create_test_node(self.context,
+ chassis_id=self.chassis.id)
+ fields = 'uuid,spongebob'
+ response = self.get_json(
+ '/nodes/%s?fields=%s' % (node.uuid, fields),
+ headers={api_base.Version.string: str(api_v1.MAX_VER)},
+ expect_errors=True)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_int)
+ self.assertEqual('application/json', response.content_type)
+ self.assertIn('spongebob', response.json['error_message'])
+
+ def test_get_custom_fields_invalid_api_version(self):
+ node = obj_utils.create_test_node(self.context,
+ chassis_id=self.chassis.id)
+ fields = 'uuid,extra'
+ response = self.get_json(
+ '/nodes/%s?fields=%s' % (node.uuid, fields),
+ headers={api_base.Version.string: str(api_v1.MIN_VER)},
+ expect_errors=True)
+ self.assertEqual(http_client.NOT_ACCEPTABLE, response.status_int)
+
+ def test_get_one_custom_fields_show_password(self):
+ node = obj_utils.create_test_node(self.context,
+ chassis_id=self.chassis.id,
+ driver_info={'fake_password': 'bar'})
+ fields = 'driver_info'
+ data = self.get_json(
+ '/nodes/%s?fields=%s' % (node.uuid, fields),
+ headers={api_base.Version.string: str(api_v1.MAX_VER)})
+ # We always append "links"
+ self.assertItemsEqual(['driver_info', 'links'], data)
+ self.assertEqual('******', data['driver_info']['fake_password'])
+
+ def test_detail(self):
+ node = obj_utils.create_test_node(self.context,
+ chassis_id=self.chassis.id)
+ data = self.get_json(
+ '/nodes/detail',
+ headers={api_base.Version.string: str(api_v1.MAX_VER)})
+ self.assertEqual(node.uuid, data['nodes'][0]["uuid"])
+ self.assertIn('name', data['nodes'][0])
+ self.assertIn('driver', data['nodes'][0])
+ self.assertIn('driver_info', data['nodes'][0])
+ self.assertIn('extra', data['nodes'][0])
+ self.assertIn('properties', data['nodes'][0])
+ self.assertIn('chassis_uuid', data['nodes'][0])
+ self.assertIn('reservation', data['nodes'][0])
+ self.assertIn('maintenance', data['nodes'][0])
+ self.assertIn('console_enabled', data['nodes'][0])
+ self.assertIn('target_power_state', data['nodes'][0])
+ self.assertIn('target_provision_state', data['nodes'][0])
+ self.assertIn('provision_updated_at', data['nodes'][0])
+ self.assertIn('inspection_finished_at', data['nodes'][0])
+ self.assertIn('inspection_started_at', data['nodes'][0])
+ self.assertIn('raid_config', data['nodes'][0])
+ self.assertIn('target_raid_config', data['nodes'][0])
+ # never expose the chassis_id
+ self.assertNotIn('chassis_id', data['nodes'][0])
+
+ def test_detail_against_single(self):
+ node = obj_utils.create_test_node(self.context)
+ response = self.get_json('/nodes/%s/detail' % node.uuid,
+ expect_errors=True)
+ self.assertEqual(http_client.NOT_FOUND, response.status_int)
+
+ def test_mask_available_state(self):
+ node = obj_utils.create_test_node(self.context,
+ provision_state=states.AVAILABLE)
+
+ data = self.get_json(
+ '/nodes/%s' % node.uuid,
+ headers={api_base.Version.string: str(api_v1.MIN_VER)})
+ self.assertEqual(states.NOSTATE, data['provision_state'])
+
+ data = self.get_json('/nodes/%s' % node.uuid,
+ headers={api_base.Version.string: "1.2"})
+ self.assertEqual(states.AVAILABLE, data['provision_state'])
+
+ def test_hide_fields_in_newer_versions_driver_internal(self):
+ node = obj_utils.create_test_node(self.context,
+ driver_internal_info={"foo": "bar"})
+ data = self.get_json(
+ '/nodes/%s' % node.uuid,
+ headers={api_base.Version.string: str(api_v1.MIN_VER)})
+ self.assertNotIn('driver_internal_info', data)
+
+ data = self.get_json('/nodes/%s' % node.uuid,
+ headers={api_base.Version.string: "1.3"})
+ self.assertEqual({"foo": "bar"}, data['driver_internal_info'])
+
+ def test_hide_fields_in_newer_versions_name(self):
+ node = obj_utils.create_test_node(self.context,
+ name="fish")
+ data = self.get_json('/nodes/%s' % node.uuid,
+ headers={api_base.Version.string: "1.4"})
+ self.assertNotIn('name', data)
+
+ data = self.get_json('/nodes/%s' % node.uuid,
+ headers={api_base.Version.string: "1.5"})
+ self.assertEqual('fish', data['name'])
+
+ def test_hide_fields_in_newer_versions_inspection(self):
+ some_time = datetime.datetime(2015, 3, 18, 19, 20)
+ node = obj_utils.create_test_node(self.context,
+ inspection_started_at=some_time)
+ data = self.get_json(
+ '/nodes/%s' % node.uuid,
+ headers={api_base.Version.string: str(api_v1.MIN_VER)})
+ self.assertNotIn('inspection_finished_at', data)
+ self.assertNotIn('inspection_started_at', data)
+
+ data = self.get_json('/nodes/%s' % node.uuid,
+ headers={api_base.Version.string: "1.6"})
+ started = timeutils.parse_isotime(
+ data['inspection_started_at']).replace(tzinfo=None)
+ self.assertEqual(some_time, started)
+ self.assertIsNone(data['inspection_finished_at'])
+
+ def test_hide_fields_in_newer_versions_clean_step(self):
+ node = obj_utils.create_test_node(self.context,
+ clean_step={"foo": "bar"})
+ data = self.get_json(
+ '/nodes/%s' % node.uuid,
+ headers={api_base.Version.string: str(api_v1.MIN_VER)})
+ self.assertNotIn('clean_step', data)
+
+ data = self.get_json('/nodes/%s' % node.uuid,
+ headers={api_base.Version.string: "1.7"})
+ self.assertEqual({"foo": "bar"}, data['clean_step'])
+
+ def test_many(self):
+ nodes = []
+ for id in range(5):
+ node = obj_utils.create_test_node(self.context,
+ uuid=uuidutils.generate_uuid())
+ nodes.append(node.uuid)
+ data = self.get_json('/nodes')
+ self.assertEqual(len(nodes), len(data['nodes']))
+
+ uuids = [n['uuid'] for n in data['nodes']]
+ self.assertEqual(sorted(nodes), sorted(uuids))
+
+ def test_many_have_names(self):
+ nodes = []
+ node_names = []
+ for id in range(5):
+ name = 'node-%s' % id
+ node = obj_utils.create_test_node(self.context,
+ uuid=uuidutils.generate_uuid(),
+ name=name)
+ nodes.append(node.uuid)
+ node_names.append(name)
+ data = self.get_json('/nodes',
+ headers={api_base.Version.string: "1.5"})
+ names = [n['name'] for n in data['nodes']]
+ self.assertEqual(len(nodes), len(data['nodes']))
+ self.assertEqual(sorted(node_names), sorted(names))
+
+ def _test_links(self, public_url=None):
+ cfg.CONF.set_override('public_endpoint', public_url, 'api')
+ uuid = uuidutils.generate_uuid()
+ obj_utils.create_test_node(self.context, uuid=uuid)
+ data = self.get_json('/nodes/%s' % uuid)
+ self.assertIn('links', data.keys())
+ self.assertEqual(2, len(data['links']))
+ self.assertIn(uuid, data['links'][0]['href'])
+ for l in data['links']:
+ bookmark = l['rel'] == 'bookmark'
+ self.assertTrue(self.validate_link(l['href'], bookmark=bookmark))
+
+ if public_url is not None:
+ expected = [{'href': '%s/v1/nodes/%s' % (public_url, uuid),
+ 'rel': 'self'},
+ {'href': '%s/nodes/%s' % (public_url, uuid),
+ 'rel': 'bookmark'}]
+ for i in expected:
+ self.assertIn(i, data['links'])
+
+ def test_links(self):
+ self._test_links()
+
+ def test_links_public_url(self):
+ self._test_links(public_url='http://foo')
+
+ def test_collection_links(self):
+ nodes = []
+ for id in range(5):
+ node = obj_utils.create_test_node(self.context,
+ uuid=uuidutils.generate_uuid())
+ nodes.append(node.uuid)
+ data = self.get_json('/nodes/?limit=3')
+ self.assertEqual(3, len(data['nodes']))
+
+ next_marker = data['nodes'][-1]['uuid']
+ self.assertIn(next_marker, data['next'])
+
+ def test_collection_links_default_limit(self):
+ cfg.CONF.set_override('max_limit', 3, 'api')
+ nodes = []
+ for id in range(5):
+ node = obj_utils.create_test_node(self.context,
+ uuid=uuidutils.generate_uuid())
+ nodes.append(node.uuid)
+ data = self.get_json('/nodes')
+ self.assertEqual(3, len(data['nodes']))
+
+ next_marker = data['nodes'][-1]['uuid']
+ self.assertIn(next_marker, data['next'])
+
+ def test_sort_key(self):
+ nodes = []
+ for id in range(3):
+ node = obj_utils.create_test_node(self.context,
+ uuid=uuidutils.generate_uuid())
+ nodes.append(node.uuid)
+ data = self.get_json('/nodes?sort_key=uuid')
+ uuids = [n['uuid'] for n in data['nodes']]
+ self.assertEqual(sorted(nodes), uuids)
+
+ def test_sort_key_invalid(self):
+ invalid_keys_list = ['foo', 'properties', 'driver_info', 'extra',
+ 'instance_info', 'driver_internal_info',
+ 'clean_step']
+ for invalid_key in invalid_keys_list:
+ response = self.get_json('/nodes?sort_key=%s' % invalid_key,
+ expect_errors=True)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_int)
+ self.assertEqual('application/json', response.content_type)
+ self.assertIn(invalid_key, response.json['error_message'])
+
+ def test_ports_subresource_link(self):
+ node = obj_utils.create_test_node(self.context)
+ data = self.get_json('/nodes/%s' % node.uuid)
+ self.assertIn('ports', data.keys())
+
+ def test_ports_subresource(self):
+ node = obj_utils.create_test_node(self.context)
+
+ for id_ in range(2):
+ obj_utils.create_test_port(self.context, node_id=node.id,
+ uuid=uuidutils.generate_uuid(),
+ address='52:54:00:cf:2d:3%s' % id_)
+
+ data = self.get_json('/nodes/%s/ports' % node.uuid)
+ self.assertEqual(2, len(data['ports']))
+ self.assertNotIn('next', data.keys())
+
+ # Test collection pagination
+ data = self.get_json('/nodes/%s/ports?limit=1' % node.uuid)
+ self.assertEqual(1, len(data['ports']))
+ self.assertIn('next', data.keys())
+
+ def test_ports_subresource_noid(self):
+ node = obj_utils.create_test_node(self.context)
+ obj_utils.create_test_port(self.context, node_id=node.id)
+ # No node id specified
+ response = self.get_json('/nodes/ports', expect_errors=True)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_int)
+
+ def test_ports_subresource_node_not_found(self):
+ non_existent_uuid = 'eeeeeeee-cccc-aaaa-bbbb-cccccccccccc'
+ response = self.get_json('/nodes/%s/ports' % non_existent_uuid,
+ expect_errors=True)
+ self.assertEqual(http_client.NOT_FOUND, response.status_int)
+
+ @mock.patch.object(timeutils, 'utcnow')
+ def _test_node_states(self, mock_utcnow, api_version=None):
+ fake_state = 'fake-state'
+ fake_error = 'fake-error'
+ fake_config = '{"foo": "bar"}'
+ test_time = datetime.datetime(2000, 1, 1, 0, 0)
+ mock_utcnow.return_value = test_time
+ node = obj_utils.create_test_node(self.context,
+ power_state=fake_state,
+ target_power_state=fake_state,
+ provision_state=fake_state,
+ target_provision_state=fake_state,
+ provision_updated_at=test_time,
+ raid_config=fake_config,
+ target_raid_config=fake_config,
+ last_error=fake_error)
+ headers = {}
+ if api_version:
+ headers = {api_base.Version.string: api_version}
+ data = self.get_json('/nodes/%s/states' % node.uuid, headers=headers)
+ self.assertEqual(fake_state, data['power_state'])
+ self.assertEqual(fake_state, data['target_power_state'])
+ self.assertEqual(fake_state, data['provision_state'])
+ self.assertEqual(fake_state, data['target_provision_state'])
+ prov_up_at = timeutils.parse_isotime(
+ data['provision_updated_at']).replace(tzinfo=None)
+ self.assertEqual(test_time, prov_up_at)
+ self.assertEqual(fake_error, data['last_error'])
+ self.assertFalse(data['console_enabled'])
+ return data
+
+ def test_node_states(self):
+ self._test_node_states()
+
+ def test_node_states_raid(self):
+ data = self._test_node_states(api_version="1.12")
+ self.assertEqual({'foo': 'bar'}, data['raid_config'])
+ self.assertEqual({'foo': 'bar'}, data['target_raid_config'])
+
+ @mock.patch.object(timeutils, 'utcnow')
+ def test_node_states_by_name(self, mock_utcnow):
+ fake_state = 'fake-state'
+ fake_error = 'fake-error'
+ test_time = datetime.datetime(1971, 3, 9, 0, 0)
+ mock_utcnow.return_value = test_time
+ node = obj_utils.create_test_node(self.context,
+ name='eggs',
+ power_state=fake_state,
+ target_power_state=fake_state,
+ provision_state=fake_state,
+ target_provision_state=fake_state,
+ provision_updated_at=test_time,
+ last_error=fake_error)
+ data = self.get_json('/nodes/%s/states' % node.name,
+ headers={api_base.Version.string: "1.5"})
+ self.assertEqual(fake_state, data['power_state'])
+ self.assertEqual(fake_state, data['target_power_state'])
+ self.assertEqual(fake_state, data['provision_state'])
+ self.assertEqual(fake_state, data['target_provision_state'])
+ prov_up_at = timeutils.parse_isotime(
+ data['provision_updated_at']).replace(tzinfo=None)
+ self.assertEqual(test_time, prov_up_at)
+ self.assertEqual(fake_error, data['last_error'])
+ self.assertFalse(data['console_enabled'])
+
+ def test_node_by_instance_uuid(self):
+ node = obj_utils.create_test_node(
+ self.context,
+ uuid=uuidutils.generate_uuid(),
+ instance_uuid=uuidutils.generate_uuid())
+ instance_uuid = node.instance_uuid
+
+ data = self.get_json('/nodes?instance_uuid=%s' % instance_uuid,
+ headers={api_base.Version.string: "1.5"})
+
+ self.assertThat(data['nodes'], HasLength(1))
+ self.assertEqual(node['instance_uuid'],
+ data['nodes'][0]["instance_uuid"])
+
+ def test_node_by_instance_uuid_wrong_uuid(self):
+ obj_utils.create_test_node(
+ self.context, uuid=uuidutils.generate_uuid(),
+ instance_uuid=uuidutils.generate_uuid())
+ wrong_uuid = uuidutils.generate_uuid()
+
+ data = self.get_json('/nodes?instance_uuid=%s' % wrong_uuid)
+
+ self.assertThat(data['nodes'], HasLength(0))
+
+ def test_node_by_instance_uuid_invalid_uuid(self):
+ response = self.get_json('/nodes?instance_uuid=fake',
+ expect_errors=True)
+
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_code)
+
+ def test_associated_nodes_insensitive(self):
+ associated_nodes = (self
+ ._create_association_test_nodes()
+ .get('associated'))
+
+ data = self.get_json('/nodes?associated=true')
+ data1 = self.get_json('/nodes?associated=True')
+
+ uuids = [n['uuid'] for n in data['nodes']]
+ uuids1 = [n['uuid'] for n in data1['nodes']]
+ self.assertEqual(sorted(associated_nodes), sorted(uuids1))
+ self.assertEqual(sorted(associated_nodes), sorted(uuids))
+
+ def test_associated_nodes_error(self):
+ self._create_association_test_nodes()
+ response = self.get_json('/nodes?associated=blah', expect_errors=True)
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_code)
+ self.assertTrue(response.json['error_message'])
+
+ def test_unassociated_nodes_insensitive(self):
+ unassociated_nodes = (self
+ ._create_association_test_nodes()
+ .get('unassociated'))
+
+ data = self.get_json('/nodes?associated=false')
+ data1 = self.get_json('/nodes?associated=FALSE')
+
+ uuids = [n['uuid'] for n in data['nodes']]
+ uuids1 = [n['uuid'] for n in data1['nodes']]
+ self.assertEqual(sorted(unassociated_nodes), sorted(uuids1))
+ self.assertEqual(sorted(unassociated_nodes), sorted(uuids))
+
+ def test_unassociated_nodes_with_limit(self):
+ unassociated_nodes = (self
+ ._create_association_test_nodes()
+ .get('unassociated'))
+
+ data = self.get_json('/nodes?associated=False&limit=2')
+
+ self.assertThat(data['nodes'], HasLength(2))
+ self.assertTrue(data['nodes'][0]['uuid'] in unassociated_nodes)
+
+ def test_next_link_with_association(self):
+ self._create_association_test_nodes()
+ data = self.get_json('/nodes/?limit=3&associated=True')
+ self.assertThat(data['nodes'], HasLength(3))
+ self.assertIn('associated=True', data['next'])
+
+ def test_detail_with_association_filter(self):
+ associated_nodes = (self
+ ._create_association_test_nodes()
+ .get('associated'))
+ data = self.get_json('/nodes/detail?associated=true')
+ self.assertIn('driver', data['nodes'][0])
+ self.assertEqual(len(associated_nodes), len(data['nodes']))
+
+ def test_next_link_with_association_with_detail(self):
+ self._create_association_test_nodes()
+ data = self.get_json('/nodes/detail?limit=3&associated=true')
+ self.assertThat(data['nodes'], HasLength(3))
+ self.assertIn('driver', data['nodes'][0])
+ self.assertIn('associated=True', data['next'])
+
+ def test_detail_with_instance_uuid(self):
+ node = obj_utils.create_test_node(
+ self.context,
+ uuid=uuidutils.generate_uuid(),
+ instance_uuid=uuidutils.generate_uuid(),
+ chassis_id=self.chassis.id)
+ instance_uuid = node.instance_uuid
+
+ data = self.get_json('/nodes/detail?instance_uuid=%s' % instance_uuid)
+
+ self.assertEqual(node['instance_uuid'],
+ data['nodes'][0]["instance_uuid"])
+ self.assertIn('driver', data['nodes'][0])
+ self.assertIn('driver_info', data['nodes'][0])
+ self.assertIn('extra', data['nodes'][0])
+ self.assertIn('properties', data['nodes'][0])
+ self.assertIn('chassis_uuid', data['nodes'][0])
+ # never expose the chassis_id
+ self.assertNotIn('chassis_id', data['nodes'][0])
+
+ def test_maintenance_nodes(self):
+ nodes = []
+ for id in range(5):
+ node = obj_utils.create_test_node(self.context,
+ uuid=uuidutils.generate_uuid(),
+ maintenance=id % 2)
+ nodes.append(node)
+
+ data = self.get_json('/nodes?maintenance=true')
+ uuids = [n['uuid'] for n in data['nodes']]
+ test_uuids_1 = [n.uuid for n in nodes if n.maintenance]
+ self.assertEqual(sorted(test_uuids_1), sorted(uuids))
+
+ data = self.get_json('/nodes?maintenance=false')
+ uuids = [n['uuid'] for n in data['nodes']]
+ test_uuids_0 = [n.uuid for n in nodes if not n.maintenance]
+ self.assertEqual(sorted(test_uuids_0), sorted(uuids))
+
+ def test_maintenance_nodes_error(self):
+ response = self.get_json('/nodes?associated=true&maintenance=blah',
+ expect_errors=True)
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_code)
+ self.assertTrue(response.json['error_message'])
+
+ def test_maintenance_nodes_associated(self):
+ self._create_association_test_nodes()
+ node = obj_utils.create_test_node(
+ self.context,
+ instance_uuid=uuidutils.generate_uuid(),
+ maintenance=True)
+
+ data = self.get_json('/nodes?associated=true&maintenance=false')
+ uuids = [n['uuid'] for n in data['nodes']]
+ self.assertNotIn(node.uuid, uuids)
+ data = self.get_json('/nodes?associated=true&maintenance=true')
+ uuids = [n['uuid'] for n in data['nodes']]
+ self.assertIn(node.uuid, uuids)
+ data = self.get_json('/nodes?associated=true&maintenance=TruE')
+ uuids = [n['uuid'] for n in data['nodes']]
+ self.assertIn(node.uuid, uuids)
+
+ def test_get_nodes_by_provision_state(self):
+ node = obj_utils.create_test_node(self.context,
+ uuid=uuidutils.generate_uuid(),
+ provision_state=states.AVAILABLE)
+ node1 = obj_utils.create_test_node(self.context,
+ uuid=uuidutils.generate_uuid(),
+ provision_state=states.DEPLOYING)
+
+ data = self.get_json('/nodes?provision_state=available',
+ headers={api_base.Version.string: "1.9"})
+ uuids = [n['uuid'] for n in data['nodes']]
+ self.assertIn(node.uuid, uuids)
+ self.assertNotIn(node1.uuid, uuids)
+ data = self.get_json('/nodes?provision_state=deploying',
+ headers={api_base.Version.string: "1.9"})
+ uuids = [n['uuid'] for n in data['nodes']]
+ self.assertIn(node1.uuid, uuids)
+ self.assertNotIn(node.uuid, uuids)
+
+ def test_get_nodes_by_invalid_provision_state(self):
+ response = self.get_json('/nodes?provision_state=test',
+ headers={api_base.Version.string: "1.9"},
+ expect_errors=True)
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_code)
+ self.assertTrue(response.json['error_message'])
+
+ def test_get_nodes_by_provision_state_not_allowed(self):
+ response = self.get_json('/nodes?provision_state=test',
+ headers={api_base.Version.string: "1.8"},
+ expect_errors=True)
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.NOT_ACCEPTABLE, response.status_code)
+ self.assertTrue(response.json['error_message'])
+
+ def test_get_console_information(self):
+ node = obj_utils.create_test_node(self.context)
+ expected_console_info = {'test': 'test-data'}
+ expected_data = {'console_enabled': True,
+ 'console_info': expected_console_info}
+ with mock.patch.object(rpcapi.ConductorAPI,
+ 'get_console_information') as mock_gci:
+ mock_gci.return_value = expected_console_info
+ data = self.get_json('/nodes/%s/states/console' % node.uuid)
+ self.assertEqual(expected_data, data)
+ mock_gci.assert_called_once_with(mock.ANY, node.uuid, 'test-topic')
+
+ @mock.patch.object(rpcapi.ConductorAPI, 'get_console_information')
+ def test_get_console_information_by_name(self, mock_gci):
+ node = obj_utils.create_test_node(self.context, name='spam')
+ expected_console_info = {'test': 'test-data'}
+ expected_data = {'console_enabled': True,
+ 'console_info': expected_console_info}
+ mock_gci.return_value = expected_console_info
+ data = self.get_json('/nodes/%s/states/console' % node.name,
+ headers={api_base.Version.string: "1.5"})
+ self.assertEqual(expected_data, data)
+ mock_gci.assert_called_once_with(mock.ANY, node.uuid, 'test-topic')
+
+ def test_get_console_information_console_disabled(self):
+ node = obj_utils.create_test_node(self.context)
+ expected_data = {'console_enabled': False,
+ 'console_info': None}
+ with mock.patch.object(rpcapi.ConductorAPI,
+ 'get_console_information') as mock_gci:
+ mock_gci.side_effect = (
+ exception.NodeConsoleNotEnabled(node=node.uuid))
+ data = self.get_json('/nodes/%s/states/console' % node.uuid)
+ self.assertEqual(expected_data, data)
+ mock_gci.assert_called_once_with(mock.ANY, node.uuid, 'test-topic')
+
+ def test_get_console_information_not_supported(self):
+ node = obj_utils.create_test_node(self.context)
+ with mock.patch.object(rpcapi.ConductorAPI,
+ 'get_console_information') as mock_gci:
+ mock_gci.side_effect = exception.UnsupportedDriverExtension(
+ extension='console', driver='test-driver')
+ ret = self.get_json('/nodes/%s/states/console' % node.uuid,
+ expect_errors=True)
+ self.assertEqual(http_client.BAD_REQUEST, ret.status_code)
+ mock_gci.assert_called_once_with(mock.ANY, node.uuid, 'test-topic')
+
+ @mock.patch.object(rpcapi.ConductorAPI, 'get_boot_device')
+ def test_get_boot_device(self, mock_gbd):
+ node = obj_utils.create_test_node(self.context)
+ expected_data = {'boot_device': boot_devices.PXE, 'persistent': True}
+ mock_gbd.return_value = expected_data
+ data = self.get_json('/nodes/%s/management/boot_device' % node.uuid)
+ self.assertEqual(expected_data, data)
+ mock_gbd.assert_called_once_with(mock.ANY, node.uuid, 'test-topic')
+
+ @mock.patch.object(rpcapi.ConductorAPI, 'get_boot_device')
+ def test_get_boot_device_by_name(self, mock_gbd):
+ node = obj_utils.create_test_node(self.context, name='spam')
+ expected_data = {'boot_device': boot_devices.PXE, 'persistent': True}
+ mock_gbd.return_value = expected_data
+ data = self.get_json('/nodes/%s/management/boot_device' % node.name,
+ headers={api_base.Version.string: "1.5"})
+ self.assertEqual(expected_data, data)
+ mock_gbd.assert_called_once_with(mock.ANY, node.uuid, 'test-topic')
+
+ @mock.patch.object(rpcapi.ConductorAPI, 'get_boot_device')
+ def test_get_boot_device_iface_not_supported(self, mock_gbd):
+ node = obj_utils.create_test_node(self.context)
+ mock_gbd.side_effect = exception.UnsupportedDriverExtension(
+ extension='management', driver='test-driver')
+ ret = self.get_json('/nodes/%s/management/boot_device' % node.uuid,
+ expect_errors=True)
+ self.assertEqual(http_client.BAD_REQUEST, ret.status_code)
+ self.assertTrue(ret.json['error_message'])
+ mock_gbd.assert_called_once_with(mock.ANY, node.uuid, 'test-topic')
+
+ @mock.patch.object(rpcapi.ConductorAPI, 'get_supported_boot_devices')
+ def test_get_supported_boot_devices(self, mock_gsbd):
+ mock_gsbd.return_value = [boot_devices.PXE]
+ node = obj_utils.create_test_node(self.context)
+ data = self.get_json('/nodes/%s/management/boot_device/supported'
+ % node.uuid)
+ expected_data = {'supported_boot_devices': [boot_devices.PXE]}
+ self.assertEqual(expected_data, data)
+ mock_gsbd.assert_called_once_with(mock.ANY, node.uuid, 'test-topic')
+
+ @mock.patch.object(rpcapi.ConductorAPI, 'get_supported_boot_devices')
+ def test_get_supported_boot_devices_by_name(self, mock_gsbd):
+ mock_gsbd.return_value = [boot_devices.PXE]
+ node = obj_utils.create_test_node(self.context, name='spam')
+ data = self.get_json(
+ '/nodes/%s/management/boot_device/supported' % node.name,
+ headers={api_base.Version.string: "1.5"})
+ expected_data = {'supported_boot_devices': [boot_devices.PXE]}
+ self.assertEqual(expected_data, data)
+ mock_gsbd.assert_called_once_with(mock.ANY, node.uuid, 'test-topic')
+
+ @mock.patch.object(rpcapi.ConductorAPI, 'get_supported_boot_devices')
+ def test_get_supported_boot_devices_iface_not_supported(self, mock_gsbd):
+ node = obj_utils.create_test_node(self.context)
+ mock_gsbd.side_effect = exception.UnsupportedDriverExtension(
+ extension='management', driver='test-driver')
+ ret = self.get_json('/nodes/%s/management/boot_device/supported' %
+ node.uuid, expect_errors=True)
+ self.assertEqual(http_client.BAD_REQUEST, ret.status_code)
+ self.assertTrue(ret.json['error_message'])
+ mock_gsbd.assert_called_once_with(mock.ANY, node.uuid, 'test-topic')
+
+ @mock.patch.object(rpcapi.ConductorAPI, 'validate_driver_interfaces')
+ def test_validate_by_uuid_using_deprecated_interface(self, mock_vdi):
+ # Note(mrda): The 'node_uuid' interface is deprecated in favour
+ # of the 'node' interface
+ node = obj_utils.create_test_node(self.context)
+ self.get_json('/nodes/validate?node_uuid=%s' % node.uuid)
+ mock_vdi.assert_called_once_with(mock.ANY, node.uuid, 'test-topic')
+
+ @mock.patch.object(rpcapi.ConductorAPI, 'validate_driver_interfaces')
+ def test_validate_by_uuid(self, mock_vdi):
+ node = obj_utils.create_test_node(self.context)
+ self.get_json('/nodes/validate?node=%s' % node.uuid,
+ headers={api_base.Version.string: "1.5"})
+ mock_vdi.assert_called_once_with(mock.ANY, node.uuid, 'test-topic')
+
+ @mock.patch.object(rpcapi.ConductorAPI, 'validate_driver_interfaces')
+ def test_validate_by_name_unsupported(self, mock_vdi):
+ node = obj_utils.create_test_node(self.context, name='spam')
+ ret = self.get_json('/nodes/validate?node=%s' % node.name,
+ expect_errors=True)
+ self.assertEqual(http_client.NOT_ACCEPTABLE, ret.status_code)
+ self.assertFalse(mock_vdi.called)
+
+ @mock.patch.object(rpcapi.ConductorAPI, 'validate_driver_interfaces')
+ def test_validate_by_name(self, mock_vdi):
+ node = obj_utils.create_test_node(self.context, name='spam')
+ self.get_json('/nodes/validate?node=%s' % node.name,
+ headers={api_base.Version.string: "1.5"})
+ # note that this should be node.uuid here as we get that from the
+ # rpc_node lookup and pass that downwards
+ mock_vdi.assert_called_once_with(mock.ANY, node.uuid, 'test-topic')
+
+
+class TestPatch(test_api_base.FunctionalTest):
+
+ def setUp(self):
+ super(TestPatch, self).setUp()
+ self.chassis = obj_utils.create_test_chassis(self.context)
+ self.node = obj_utils.create_test_node(self.context, name='node-57',
+ chassis_id=self.chassis.id)
+ self.node_no_name = obj_utils.create_test_node(
+ self.context, uuid='deadbeef-0000-1111-2222-333333333333',
+ chassis_id=self.chassis.id)
+ p = mock.patch.object(rpcapi.ConductorAPI, 'get_topic_for')
+ self.mock_gtf = p.start()
+ self.mock_gtf.return_value = 'test-topic'
+ self.addCleanup(p.stop)
+ p = mock.patch.object(rpcapi.ConductorAPI, 'update_node')
+ self.mock_update_node = p.start()
+ self.addCleanup(p.stop)
+ p = mock.patch.object(rpcapi.ConductorAPI, 'change_node_power_state')
+ self.mock_cnps = p.start()
+ self.addCleanup(p.stop)
+
+ def test_update_ok(self):
+ self.mock_update_node.return_value = self.node
+ (self
+ .mock_update_node
+ .return_value
+ .updated_at) = "2013-12-03T06:20:41.184720+00:00"
+ response = self.patch_json('/nodes/%s' % self.node.uuid,
+ [{'path': '/instance_uuid',
+ 'value':
+ 'aaaaaaaa-1111-bbbb-2222-cccccccccccc',
+ 'op': 'replace'}])
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.OK, response.status_code)
+ self.assertEqual(self.mock_update_node.return_value.updated_at,
+ timeutils.parse_isotime(response.json['updated_at']))
+ self.mock_update_node.assert_called_once_with(
+ mock.ANY, mock.ANY, 'test-topic')
+
+ def test_update_by_name_unsupported(self):
+ self.mock_update_node.return_value = self.node
+ (self
+ .mock_update_node
+ .return_value
+ .updated_at) = "2013-12-03T06:20:41.184720+00:00"
+ response = self.patch_json(
+ '/nodes/%s' % self.node.name,
+ [{'path': '/instance_uuid',
+ 'value': 'aaaaaaaa-1111-bbbb-2222-cccccccccccc',
+ 'op': 'replace'}],
+ expect_errors=True)
+ self.assertEqual(http_client.NOT_FOUND, response.status_code)
+ self.assertFalse(self.mock_update_node.called)
+
+ def test_update_ok_by_name(self):
+ self.mock_update_node.return_value = self.node
+ (self
+ .mock_update_node
+ .return_value
+ .updated_at) = "2013-12-03T06:20:41.184720+00:00"
+ response = self.patch_json(
+ '/nodes/%s' % self.node.name,
+ [{'path': '/instance_uuid',
+ 'value': 'aaaaaaaa-1111-bbbb-2222-cccccccccccc',
+ 'op': 'replace'}],
+ headers={api_base.Version.string: "1.5"})
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.OK, response.status_code)
+ self.assertEqual(self.mock_update_node.return_value.updated_at,
+ timeutils.parse_isotime(response.json['updated_at']))
+ self.mock_update_node.assert_called_once_with(
+ mock.ANY, mock.ANY, 'test-topic')
+
+ def test_update_state(self):
+ response = self.patch_json('/nodes/%s' % self.node.uuid,
+ [{'power_state': 'new state'}],
+ expect_errors=True)
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_code)
+ self.assertTrue(response.json['error_message'])
+
+ def test_update_fails_bad_driver_info(self):
+ fake_err = 'Fake Error Message'
+ self.mock_update_node.side_effect = (
+ exception.InvalidParameterValue(fake_err))
+
+ response = self.patch_json('/nodes/%s' % self.node.uuid,
+ [{'path': '/driver_info/this',
+ 'value': 'foo',
+ 'op': 'add'},
+ {'path': '/driver_info/that',
+ 'value': 'bar',
+ 'op': 'add'}],
+ expect_errors=True)
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_code)
+
+ self.mock_update_node.assert_called_once_with(
+ mock.ANY, mock.ANY, 'test-topic')
+
+ def test_update_fails_bad_driver(self):
+ self.mock_gtf.side_effect = exception.NoValidHost('Fake Error')
+
+ response = self.patch_json('/nodes/%s' % self.node.uuid,
+ [{'path': '/driver',
+ 'value': 'bad-driver',
+ 'op': 'replace'}],
+ expect_errors=True)
+
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_code)
+
+ def test_add_ok(self):
+ self.mock_update_node.return_value = self.node
+
+ response = self.patch_json('/nodes/%s' % self.node.uuid,
+ [{'path': '/extra/foo',
+ 'value': 'bar',
+ 'op': 'add'}])
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.OK, response.status_code)
+
+ self.mock_update_node.assert_called_once_with(
+ mock.ANY, mock.ANY, 'test-topic')
+
+ def test_add_root(self):
+ self.mock_update_node.return_value = self.node
+ response = self.patch_json('/nodes/%s' % self.node.uuid,
+ [{'path': '/instance_uuid',
+ 'value':
+ 'aaaaaaaa-1111-bbbb-2222-cccccccccccc',
+ 'op': 'add'}])
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.OK, response.status_code)
+ self.mock_update_node.assert_called_once_with(
+ mock.ANY, mock.ANY, 'test-topic')
+
+ def test_add_root_non_existent(self):
+ response = self.patch_json('/nodes/%s' % self.node.uuid,
+ [{'path': '/foo', 'value': 'bar',
+ 'op': 'add'}],
+ expect_errors=True)
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_code)
+ self.assertTrue(response.json['error_message'])
+
+ def test_remove_ok(self):
+ self.mock_update_node.return_value = self.node
+
+ response = self.patch_json('/nodes/%s' % self.node.uuid,
+ [{'path': '/extra',
+ 'op': 'remove'}])
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.OK, response.status_code)
+
+ self.mock_update_node.assert_called_once_with(
+ mock.ANY, mock.ANY, 'test-topic')
+
+ def test_remove_non_existent_property_fail(self):
+ response = self.patch_json('/nodes/%s' % self.node.uuid,
+ [{'path': '/extra/non-existent',
+ 'op': 'remove'}],
+ expect_errors=True)
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_code)
+ self.assertTrue(response.json['error_message'])
+
+ def test_update_allowed_in_power_transition(self):
+ node = obj_utils.create_test_node(self.context,
+ uuid=uuidutils.generate_uuid(),
+ target_power_state=states.POWER_OFF)
+ self.mock_update_node.return_value = node
+ response = self.patch_json('/nodes/%s' % node.uuid,
+ [{'path': '/extra/foo',
+ 'value': 'bar',
+ 'op': 'add'}])
+ self.assertEqual(http_client.OK, response.status_code)
+
+ def test_update_allowed_in_maintenance(self):
+ node = obj_utils.create_test_node(self.context,
+ uuid=uuidutils.generate_uuid(),
+ target_power_state=states.POWER_OFF,
+ maintenance=True)
+ self.mock_update_node.return_value = node
+ response = self.patch_json('/nodes/%s' % node.uuid,
+ [{'path': '/instance_uuid',
+ 'op': 'remove'}])
+ self.assertEqual(http_client.OK, response.status_code)
+
+ def test_add_state_in_deployfail(self):
+ node = obj_utils.create_test_node(self.context,
+ uuid=uuidutils.generate_uuid(),
+ provision_state=states.DEPLOYFAIL,
+ target_provision_state=states.ACTIVE)
+ self.mock_update_node.return_value = node
+ response = self.patch_json('/nodes/%s' % node.uuid,
+ [{'path': '/extra/foo', 'value': 'bar',
+ 'op': 'add'}])
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.OK, response.status_code)
+ self.mock_update_node.assert_called_once_with(
+ mock.ANY, mock.ANY, 'test-topic')
+
+ def test_patch_ports_subresource(self):
+ response = self.patch_json('/nodes/%s/ports' % self.node.uuid,
+ [{'path': '/extra/foo', 'value': 'bar',
+ 'op': 'add'}], expect_errors=True)
+ self.assertEqual(http_client.FORBIDDEN, response.status_int)
+
+ def test_remove_uuid(self):
+ response = self.patch_json('/nodes/%s' % self.node.uuid,
+ [{'path': '/uuid', 'op': 'remove'}],
+ expect_errors=True)
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_code)
+ self.assertTrue(response.json['error_message'])
+
+ def test_remove_instance_uuid_clean_backward_compat(self):
+ for state in (states.CLEANING, states.CLEANWAIT):
+ node = obj_utils.create_test_node(
+ self.context,
+ uuid=uuidutils.generate_uuid(),
+ provision_state=state,
+ target_provision_state=states.AVAILABLE)
+ self.mock_update_node.return_value = node
+ response = self.patch_json('/nodes/%s' % node.uuid,
+ [{'op': 'remove',
+ 'path': '/instance_uuid'}])
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.OK, response.status_code)
+ # NOTE(lucasagomes): instance_uuid is already removed as part of
+ # node's tear down, assert update has not been called. This test
+ # should be removed in the next cycle (Mitaka).
+ self.assertFalse(self.mock_update_node.called)
+
+ def test_add_state_in_cleaning(self):
+ node = obj_utils.create_test_node(
+ self.context,
+ uuid=uuidutils.generate_uuid(),
+ provision_state=states.CLEANING,
+ target_provision_state=states.AVAILABLE)
+ self.mock_update_node.return_value = node
+ response = self.patch_json('/nodes/%s' % node.uuid,
+ [{'path': '/extra/foo', 'value': 'bar',
+ 'op': 'add'}], expect_errors=True)
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.CONFLICT, response.status_code)
+ self.assertTrue(response.json['error_message'])
+
+ def test_remove_mandatory_field(self):
+ response = self.patch_json('/nodes/%s' % self.node.uuid,
+ [{'path': '/driver', 'op': 'remove'}],
+ expect_errors=True)
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_code)
+ self.assertTrue(response.json['error_message'])
+
+ def test_replace_chassis_uuid(self):
+ self.mock_update_node.return_value = self.node
+ response = self.patch_json('/nodes/%s' % self.node.uuid,
+ [{'path': '/chassis_uuid',
+ 'value': self.chassis.uuid,
+ 'op': 'replace'}])
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.OK, response.status_code)
+
+ def test_add_chassis_uuid(self):
+ self.mock_update_node.return_value = self.node
+ response = self.patch_json('/nodes/%s' % self.node.uuid,
+ [{'path': '/chassis_uuid',
+ 'value': self.chassis.uuid,
+ 'op': 'add'}])
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.OK, response.status_code)
+
+ def test_add_chassis_id(self):
+ response = self.patch_json('/nodes/%s' % self.node.uuid,
+ [{'path': '/chassis_id',
+ 'value': '1',
+ 'op': 'add'}],
+ expect_errors=True)
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_code)
+ self.assertTrue(response.json['error_message'])
+
+ def test_replace_chassis_id(self):
+ response = self.patch_json('/nodes/%s' % self.node.uuid,
+ [{'path': '/chassis_id',
+ 'value': '1',
+ 'op': 'replace'}],
+ expect_errors=True)
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_code)
+ self.assertTrue(response.json['error_message'])
+
+ def test_remove_chassis_id(self):
+ response = self.patch_json('/nodes/%s' % self.node.uuid,
+ [{'path': '/chassis_id',
+ 'op': 'remove'}],
+ expect_errors=True)
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_code)
+ self.assertTrue(response.json['error_message'])
+
+ def test_replace_non_existent_chassis_uuid(self):
+ response = self.patch_json('/nodes/%s' % self.node.uuid,
+ [{'path': '/chassis_uuid',
+ 'value':
+ 'eeeeeeee-dddd-cccc-bbbb-aaaaaaaaaaaa',
+ 'op': 'replace'}], expect_errors=True)
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_code)
+ self.assertTrue(response.json['error_message'])
+
+ def test_remove_internal_field(self):
+ response = self.patch_json('/nodes/%s' % self.node.uuid,
+ [{'path': '/last_error', 'op': 'remove'}],
+ expect_errors=True)
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_code)
+ self.assertTrue(response.json['error_message'])
+
+ def test_replace_internal_field(self):
+ response = self.patch_json('/nodes/%s' % self.node.uuid,
+ [{'path': '/power_state', 'op': 'replace',
+ 'value': 'fake-state'}],
+ expect_errors=True)
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_code)
+ self.assertTrue(response.json['error_message'])
+
+ def test_replace_maintenance(self):
+ self.mock_update_node.return_value = self.node
+
+ response = self.patch_json('/nodes/%s' % self.node.uuid,
+ [{'path': '/maintenance', 'op': 'replace',
+ 'value': 'true'}])
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.OK, response.status_code)
+
+ self.mock_update_node.assert_called_once_with(
+ mock.ANY, mock.ANY, 'test-topic')
+
+ def test_replace_maintenance_by_name(self):
+ self.mock_update_node.return_value = self.node
+
+ response = self.patch_json(
+ '/nodes/%s' % self.node.name,
+ [{'path': '/maintenance', 'op': 'replace',
+ 'value': 'true'}],
+ headers={api_base.Version.string: "1.5"})
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.OK, response.status_code)
+
+ self.mock_update_node.assert_called_once_with(
+ mock.ANY, mock.ANY, 'test-topic')
+
+ def test_replace_consoled_enabled(self):
+ response = self.patch_json('/nodes/%s' % self.node.uuid,
+ [{'path': '/console_enabled',
+ 'op': 'replace', 'value': True}],
+ expect_errors=True)
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_code)
+ self.assertTrue(response.json['error_message'])
+
+ def test_replace_provision_updated_at(self):
+ test_time = '2000-01-01 00:00:00'
+ response = self.patch_json('/nodes/%s' % self.node.uuid,
+ [{'path': '/provision_updated_at',
+ 'op': 'replace', 'value': test_time}],
+ expect_errors=True)
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_code)
+ self.assertTrue(response.json['error_message'])
+
+ def test_patch_add_name_ok(self):
+ self.mock_update_node.return_value = self.node_no_name
+ test_name = 'guido-van-rossum'
+ response = self.patch_json('/nodes/%s' % self.node_no_name.uuid,
+ [{'path': '/name',
+ 'op': 'add',
+ 'value': test_name}],
+ headers={api_base.Version.string: "1.5"})
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.OK, response.status_code)
+
+ def test_patch_add_name_invalid(self):
+ self.mock_update_node.return_value = self.node_no_name
+ test_name = 'i am invalid'
+ response = self.patch_json('/nodes/%s' % self.node_no_name.uuid,
+ [{'path': '/name',
+ 'op': 'add',
+ 'value': test_name}],
+ headers={api_base.Version.string: "1.10"},
+ expect_errors=True)
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_code)
+ self.assertTrue(response.json['error_message'])
+
+ def test_patch_name_replace_ok(self):
+ self.mock_update_node.return_value = self.node
+ test_name = 'guido-van-rossum'
+ response = self.patch_json('/nodes/%s' % self.node.uuid,
+ [{'path': '/name',
+ 'op': 'replace',
+ 'value': test_name}],
+ headers={api_base.Version.string: "1.5"})
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.OK, response.status_code)
+
+ def test_patch_add_replace_invalid(self):
+ self.mock_update_node.return_value = self.node_no_name
+ test_name = 'Guido Van Error'
+ response = self.patch_json('/nodes/%s' % self.node.uuid,
+ [{'path': '/name',
+ 'op': 'replace',
+ 'value': test_name}],
+ headers={api_base.Version.string: "1.5"},
+ expect_errors=True)
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_code)
+ self.assertTrue(response.json['error_message'])
+
+ def test_patch_duplicate_name(self):
+ node = obj_utils.create_test_node(self.context,
+ uuid=uuidutils.generate_uuid())
+ test_name = "this-is-my-node"
+ self.mock_update_node.side_effect = exception.DuplicateName(test_name)
+ response = self.patch_json('/nodes/%s' % node.uuid,
+ [{'path': '/name',
+ 'op': 'replace',
+ 'value': test_name}],
+ headers={api_base.Version.string: "1.5"},
+ expect_errors=True)
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.CONFLICT, response.status_code)
+ self.assertTrue(response.json['error_message'])
+
+ @mock.patch.object(api_utils, 'get_rpc_node')
+ def test_patch_update_drive_console_enabled(self, mock_rpc_node):
+ self.node.console_enabled = True
+ mock_rpc_node.return_value = self.node
+
+ response = self.patch_json('/nodes/%s' % self.node.uuid,
+ [{'path': '/driver',
+ 'value': 'foo',
+ 'op': 'add'}],
+ expect_errors=True)
+ mock_rpc_node.assert_called_once_with(self.node.uuid)
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.CONFLICT, response.status_code)
+ self.assertTrue(response.json['error_message'])
+
+ def test_update_in_UPDATE_ALLOWED_STATES(self):
+ for state in states.UPDATE_ALLOWED_STATES:
+ node = obj_utils.create_test_node(
+ self.context,
+ uuid=uuidutils.generate_uuid(),
+ provision_state=state,
+ target_provision_state=states.AVAILABLE)
+
+ self.mock_update_node.return_value = node
+ response = self.patch_json('/nodes/%s' % node.uuid,
+ [{'path': '/extra/foo', 'value': 'bar',
+ 'op': 'add'}])
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.OK, response.status_code)
+
+
+class TestPost(test_api_base.FunctionalTest):
+
+ def setUp(self):
+ super(TestPost, self).setUp()
+ self.chassis = obj_utils.create_test_chassis(self.context)
+ p = mock.patch.object(rpcapi.ConductorAPI, 'get_topic_for')
+ self.mock_gtf = p.start()
+ self.mock_gtf.return_value = 'test-topic'
+ self.addCleanup(p.stop)
+
+ @mock.patch.object(timeutils, 'utcnow')
+ def test_create_node(self, mock_utcnow):
+ ndict = test_api_utils.post_get_test_node()
+ test_time = datetime.datetime(2000, 1, 1, 0, 0)
+ mock_utcnow.return_value = test_time
+ response = self.post_json('/nodes', ndict)
+ self.assertEqual(http_client.CREATED, response.status_int)
+ result = self.get_json('/nodes/%s' % ndict['uuid'])
+ self.assertEqual(ndict['uuid'], result['uuid'])
+ self.assertFalse(result['updated_at'])
+ return_created_at = timeutils.parse_isotime(
+ result['created_at']).replace(tzinfo=None)
+ self.assertEqual(test_time, return_created_at)
+ # Check location header
+ self.assertIsNotNone(response.location)
+ expected_location = '/v1/nodes/%s' % ndict['uuid']
+ self.assertEqual(urlparse.urlparse(response.location).path,
+ expected_location)
+
+ def test_create_node_default_state_none(self):
+ ndict = test_api_utils.post_get_test_node()
+ response = self.post_json('/nodes', ndict,
+ headers={api_base.Version.string: "1.10"})
+ self.assertEqual(http_client.CREATED, response.status_int)
+
+ # default state remains NONE/AVAILABLE
+ result = self.get_json('/nodes/%s' % ndict['uuid'])
+ self.assertEqual(states.NOSTATE, result['provision_state'])
+ result = self.get_json('/nodes/%s' % ndict['uuid'],
+ headers={api_base.Version.string: "1.10"})
+ self.assertEqual(ndict['uuid'], result['uuid'])
+ self.assertEqual(states.AVAILABLE, result['provision_state'])
+
+ def test_create_node_default_state_enroll(self):
+ ndict = test_api_utils.post_get_test_node()
+ response = self.post_json('/nodes', ndict,
+ headers={api_base.Version.string: "1.11"})
+ self.assertEqual(http_client.CREATED, response.status_int)
+
+ # default state is ENROLL
+ result = self.get_json('/nodes/%s' % ndict['uuid'])
+ self.assertEqual(ndict['uuid'], result['uuid'])
+ self.assertEqual(states.ENROLL, result['provision_state'])
+
+ def test_create_node_doesnt_contain_id(self):
+ # FIXME(comstud): I'd like to make this test not use the
+ # dbapi, however, no matter what I do when trying to mock
+ # Node.create(), the API fails to convert the objects.Node
+ # into the API Node object correctly (it leaves all fields
+ # as Unset).
+ with mock.patch.object(self.dbapi, 'create_node',
+ wraps=self.dbapi.create_node) as cn_mock:
+ ndict = test_api_utils.post_get_test_node(extra={'foo': 123})
+ self.post_json('/nodes', ndict)
+ result = self.get_json('/nodes/%s' % ndict['uuid'])
+ self.assertEqual(ndict['extra'], result['extra'])
+ cn_mock.assert_called_once_with(mock.ANY)
+ # Check that 'id' is not in first arg of positional args
+ self.assertNotIn('id', cn_mock.call_args[0][0])
+
+ def _test_jsontype_attributes(self, attr_name):
+ kwargs = {attr_name: {'str': 'foo', 'int': 123, 'float': 0.1,
+ 'bool': True, 'list': [1, 2], 'none': None,
+ 'dict': {'cat': 'meow'}}}
+ ndict = test_api_utils.post_get_test_node(**kwargs)
+ self.post_json('/nodes', ndict)
+ result = self.get_json('/nodes/%s' % ndict['uuid'])
+ self.assertEqual(ndict[attr_name], result[attr_name])
+
+ def test_create_node_valid_extra(self):
+ self._test_jsontype_attributes('extra')
+
+ def test_create_node_valid_properties(self):
+ self._test_jsontype_attributes('properties')
+
+ def test_create_node_valid_driver_info(self):
+ self._test_jsontype_attributes('driver_info')
+
+ def test_create_node_valid_instance_info(self):
+ self._test_jsontype_attributes('instance_info')
+
+ def _test_vendor_passthru_ok(self, mock_vendor, return_value=None,
+ is_async=True):
+ expected_status = http_client.ACCEPTED if is_async else http_client.OK
+ expected_return_value = json.dumps(return_value)
+ if six.PY3:
+ expected_return_value = expected_return_value.encode('utf-8')
+
+ node = obj_utils.create_test_node(self.context)
+ info = {'foo': 'bar'}
+ mock_vendor.return_value = {'return': return_value,
+ 'async': is_async,
+ 'attach': False}
+ response = self.post_json('/nodes/%s/vendor_passthru/test' % node.uuid,
+ info)
+ mock_vendor.assert_called_once_with(
+ mock.ANY, node.uuid, 'test', 'POST', info, 'test-topic')
+ self.assertEqual(expected_return_value, response.body)
+ self.assertEqual(expected_status, response.status_code)
+
+ def _test_vendor_passthru_ok_by_name(self, mock_vendor, return_value=None,
+ is_async=True):
+ expected_status = http_client.ACCEPTED if is_async else http_client.OK
+ expected_return_value = json.dumps(return_value)
+ if six.PY3:
+ expected_return_value = expected_return_value.encode('utf-8')
+
+ node = obj_utils.create_test_node(self.context, name='node-109')
+ info = {'foo': 'bar'}
+ mock_vendor.return_value = {'return': return_value,
+ 'async': is_async,
+ 'attach': False}
+ response = self.post_json('/nodes/%s/vendor_passthru/test' % node.name,
+ info,
+ headers={api_base.Version.string: "1.5"})
+ mock_vendor.assert_called_once_with(
+ mock.ANY, node.uuid, 'test', 'POST', info, 'test-topic')
+ self.assertEqual(expected_return_value, response.body)
+ self.assertEqual(expected_status, response.status_code)
+
+ @mock.patch.object(rpcapi.ConductorAPI, 'vendor_passthru')
+ def test_vendor_passthru_async(self, mock_vendor):
+ self._test_vendor_passthru_ok(mock_vendor)
+
+ @mock.patch.object(rpcapi.ConductorAPI, 'vendor_passthru')
+ def test_vendor_passthru_sync(self, mock_vendor):
+ return_value = {'cat': 'meow'}
+ self._test_vendor_passthru_ok(mock_vendor, return_value=return_value,
+ is_async=False)
+
+ @mock.patch.object(rpcapi.ConductorAPI, 'vendor_passthru')
+ def test_vendor_passthru_put(self, mocked_vendor_passthru):
+ node = obj_utils.create_test_node(self.context)
+ return_value = {'return': None, 'async': True, 'attach': False}
+ mocked_vendor_passthru.return_value = return_value
+ response = self.put_json(
+ '/nodes/%s/vendor_passthru/do_test' % node.uuid,
+ {'test_key': 'test_value'})
+ self.assertEqual(http_client.ACCEPTED, response.status_int)
+ self.assertEqual(return_value['return'], response.json)
+
+ @mock.patch.object(rpcapi.ConductorAPI, 'vendor_passthru')
+ def test_vendor_passthru_by_name(self, mock_vendor):
+ self._test_vendor_passthru_ok_by_name(mock_vendor)
+
+ @mock.patch.object(rpcapi.ConductorAPI, 'vendor_passthru')
+ def test_vendor_passthru_get(self, mocked_vendor_passthru):
+ node = obj_utils.create_test_node(self.context)
+ return_value = {'return': 'foo', 'async': False, 'attach': False}
+ mocked_vendor_passthru.return_value = return_value
+ response = self.get_json(
+ '/nodes/%s/vendor_passthru/do_test' % node.uuid)
+ self.assertEqual(return_value['return'], response)
+
+ @mock.patch.object(rpcapi.ConductorAPI, 'vendor_passthru')
+ def test_vendor_passthru_delete(self, mock_vendor_passthru):
+ node = obj_utils.create_test_node(self.context)
+ return_value = {'return': None, 'async': True, 'attach': False}
+ mock_vendor_passthru.return_value = return_value
+ response = self.delete(
+ '/nodes/%s/vendor_passthru/do_test' % node.uuid)
+ self.assertEqual(http_client.ACCEPTED, response.status_int)
+ self.assertEqual(return_value['return'], response.json)
+
+ def test_vendor_passthru_no_such_method(self):
+ node = obj_utils.create_test_node(self.context)
+ uuid = node.uuid
+ info = {'foo': 'bar'}
+
+ with mock.patch.object(
+ rpcapi.ConductorAPI, 'vendor_passthru') as mock_vendor:
+ mock_vendor.side_effect = exception.UnsupportedDriverExtension(
+ **{'driver': node.driver, 'node': uuid, 'extension': 'test'})
+ response = self.post_json('/nodes/%s/vendor_passthru/test' % uuid,
+ info, expect_errors=True)
+ mock_vendor.assert_called_once_with(
+ mock.ANY, uuid, 'test', 'POST', info, 'test-topic')
+ self.assertEqual(http_client.BAD_REQUEST, response.status_code)
+
+ def test_vendor_passthru_without_method(self):
+ node = obj_utils.create_test_node(self.context)
+ response = self.post_json('/nodes/%s/vendor_passthru' % node.uuid,
+ {'foo': 'bar'}, expect_errors=True)
+ self.assertEqual('application/json', response.content_type, )
+ self.assertEqual(http_client.BAD_REQUEST, response.status_code)
+ self.assertTrue(response.json['error_message'])
+
+ def test_post_ports_subresource(self):
+ node = obj_utils.create_test_node(self.context)
+ pdict = test_api_utils.port_post_data(node_id=None)
+ pdict['node_uuid'] = node.uuid
+ response = self.post_json('/nodes/ports', pdict,
+ expect_errors=True)
+ self.assertEqual(http_client.FORBIDDEN, response.status_int)
+
+ def test_create_node_no_mandatory_field_driver(self):
+ ndict = test_api_utils.post_get_test_node()
+ del ndict['driver']
+ response = self.post_json('/nodes', ndict, expect_errors=True)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_int)
+ self.assertEqual('application/json', response.content_type)
+ self.assertTrue(response.json['error_message'])
+
+ def test_create_node_invalid_driver(self):
+ ndict = test_api_utils.post_get_test_node()
+ self.mock_gtf.side_effect = exception.NoValidHost('Fake Error')
+ response = self.post_json('/nodes', ndict, expect_errors=True)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_int)
+ self.assertEqual('application/json', response.content_type)
+ self.assertTrue(response.json['error_message'])
+
+ def test_create_node_no_chassis_uuid(self):
+ ndict = test_api_utils.post_get_test_node()
+ del ndict['chassis_uuid']
+ response = self.post_json('/nodes', ndict)
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.CREATED, response.status_int)
+ # Check location header
+ self.assertIsNotNone(response.location)
+ expected_location = '/v1/nodes/%s' % ndict['uuid']
+ self.assertEqual(urlparse.urlparse(response.location).path,
+ expected_location)
+
+ def test_create_node_with_chassis_uuid(self):
+ ndict = test_api_utils.post_get_test_node(
+ chassis_uuid=self.chassis.uuid)
+ response = self.post_json('/nodes', ndict)
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.CREATED, response.status_int)
+ result = self.get_json('/nodes/%s' % ndict['uuid'])
+ self.assertEqual(ndict['chassis_uuid'], result['chassis_uuid'])
+ # Check location header
+ self.assertIsNotNone(response.location)
+ expected_location = '/v1/nodes/%s' % ndict['uuid']
+ self.assertEqual(urlparse.urlparse(response.location).path,
+ expected_location)
+
+ def test_create_node_chassis_uuid_not_found(self):
+ ndict = test_api_utils.post_get_test_node(
+ chassis_uuid='1a1a1a1a-2b2b-3c3c-4d4d-5e5e5e5e5e5e')
+ response = self.post_json('/nodes', ndict, expect_errors=True)
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_int)
+ self.assertTrue(response.json['error_message'])
+
+ def test_create_node_with_internal_field(self):
+ ndict = test_api_utils.post_get_test_node()
+ ndict['reservation'] = 'fake'
+ response = self.post_json('/nodes', ndict, expect_errors=True)
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_int)
+ self.assertTrue(response.json['error_message'])
+
+ @mock.patch.object(rpcapi.ConductorAPI, 'get_node_vendor_passthru_methods')
+ def test_vendor_passthru_methods(self, get_methods_mock):
+ return_value = {'foo': 'bar'}
+ get_methods_mock.return_value = return_value
+ node = obj_utils.create_test_node(self.context)
+ path = '/nodes/%s/vendor_passthru/methods' % node.uuid
+
+ data = self.get_json(path)
+ self.assertEqual(return_value, data)
+ get_methods_mock.assert_called_once_with(mock.ANY, node.uuid,
+ topic=mock.ANY)
+
+ # Now let's test the cache: Reset the mock
+ get_methods_mock.reset_mock()
+
+ # Call it again
+ data = self.get_json(path)
+ self.assertEqual(return_value, data)
+ # Assert RPC method wasn't called this time
+ self.assertFalse(get_methods_mock.called)
+
+
+class TestDelete(test_api_base.FunctionalTest):
+
+ def setUp(self):
+ super(TestDelete, self).setUp()
+ p = mock.patch.object(rpcapi.ConductorAPI, 'get_topic_for')
+ self.mock_gtf = p.start()
+ self.mock_gtf.return_value = 'test-topic'
+ self.addCleanup(p.stop)
+
+ @mock.patch.object(rpcapi.ConductorAPI, 'destroy_node')
+ def test_delete_node(self, mock_dn):
+ node = obj_utils.create_test_node(self.context)
+ self.delete('/nodes/%s' % node.uuid)
+ mock_dn.assert_called_once_with(mock.ANY, node.uuid, 'test-topic')
+
+ @mock.patch.object(rpcapi.ConductorAPI, 'destroy_node')
+ def test_delete_node_by_name_unsupported(self, mock_dn):
+ node = obj_utils.create_test_node(self.context, name='foo')
+ self.delete('/nodes/%s' % node.name,
+ expect_errors=True)
+ self.assertFalse(mock_dn.called)
+
+ @mock.patch.object(rpcapi.ConductorAPI, 'destroy_node')
+ def test_delete_node_by_name(self, mock_dn):
+ node = obj_utils.create_test_node(self.context, name='foo')
+ self.delete('/nodes/%s' % node.name,
+ headers={api_base.Version.string: "1.5"})
+ mock_dn.assert_called_once_with(mock.ANY, node.uuid, 'test-topic')
+
+ @mock.patch.object(objects.Node, 'get_by_uuid')
+ def test_delete_node_not_found(self, mock_gbu):
+ node = obj_utils.get_test_node(self.context)
+ mock_gbu.side_effect = exception.NodeNotFound(node=node.uuid)
+
+ response = self.delete('/nodes/%s' % node.uuid, expect_errors=True)
+ self.assertEqual(http_client.NOT_FOUND, response.status_int)
+ self.assertEqual('application/json', response.content_type)
+ self.assertTrue(response.json['error_message'])
+ mock_gbu.assert_called_once_with(mock.ANY, node.uuid)
+
+ @mock.patch.object(objects.Node, 'get_by_name')
+ def test_delete_node_not_found_by_name_unsupported(self, mock_gbn):
+ node = obj_utils.get_test_node(self.context, name='foo')
+ mock_gbn.side_effect = exception.NodeNotFound(node=node.name)
+
+ response = self.delete('/nodes/%s' % node.name,
+ expect_errors=True)
+ self.assertEqual(http_client.NOT_FOUND, response.status_int)
+ self.assertFalse(mock_gbn.called)
+
+ @mock.patch.object(objects.Node, 'get_by_name')
+ def test_delete_node_not_found_by_name(self, mock_gbn):
+ node = obj_utils.get_test_node(self.context, name='foo')
+ mock_gbn.side_effect = exception.NodeNotFound(node=node.name)
+
+ response = self.delete('/nodes/%s' % node.name,
+ headers={api_base.Version.string: "1.5"},
+ expect_errors=True)
+ self.assertEqual(http_client.NOT_FOUND, response.status_int)
+ self.assertEqual('application/json', response.content_type)
+ self.assertTrue(response.json['error_message'])
+ mock_gbn.assert_called_once_with(mock.ANY, node.name)
+
+ def test_delete_ports_subresource(self):
+ node = obj_utils.create_test_node(self.context)
+ response = self.delete('/nodes/%s/ports' % node.uuid,
+ expect_errors=True)
+ self.assertEqual(http_client.FORBIDDEN, response.status_int)
+
+ @mock.patch.object(rpcapi.ConductorAPI, 'destroy_node')
+ def test_delete_associated(self, mock_dn):
+ node = obj_utils.create_test_node(
+ self.context,
+ instance_uuid='aaaaaaaa-1111-bbbb-2222-cccccccccccc')
+ mock_dn.side_effect = exception.NodeAssociated(
+ node=node.uuid, instance=node.instance_uuid)
+
+ response = self.delete('/nodes/%s' % node.uuid, expect_errors=True)
+ self.assertEqual(http_client.CONFLICT, response.status_int)
+ mock_dn.assert_called_once_with(mock.ANY, node.uuid, 'test-topic')
+
+ @mock.patch.object(objects.Node, 'get_by_uuid')
+ @mock.patch.object(rpcapi.ConductorAPI, 'update_node')
+ def test_delete_node_maintenance_mode(self, mock_update, mock_get):
+ node = obj_utils.create_test_node(self.context, maintenance=True,
+ maintenance_reason='blah')
+ mock_get.return_value = node
+ response = self.delete('/nodes/%s/maintenance' % node.uuid)
+ self.assertEqual(http_client.ACCEPTED, response.status_int)
+ self.assertEqual(b'', response.body)
+ self.assertEqual(False, node.maintenance)
+ self.assertIsNone(node.maintenance_reason)
+ mock_get.assert_called_once_with(mock.ANY, node.uuid)
+ mock_update.assert_called_once_with(mock.ANY, mock.ANY,
+ topic='test-topic')
+
+ @mock.patch.object(objects.Node, 'get_by_name')
+ @mock.patch.object(rpcapi.ConductorAPI, 'update_node')
+ def test_delete_node_maintenance_mode_by_name(self, mock_update,
+ mock_get):
+ node = obj_utils.create_test_node(self.context, maintenance=True,
+ maintenance_reason='blah',
+ name='foo')
+ mock_get.return_value = node
+ response = self.delete('/nodes/%s/maintenance' % node.name,
+ headers={api_base.Version.string: "1.5"})
+ self.assertEqual(http_client.ACCEPTED, response.status_int)
+ self.assertEqual(b'', response.body)
+ self.assertEqual(False, node.maintenance)
+ self.assertIsNone(node.maintenance_reason)
+ mock_get.assert_called_once_with(mock.ANY, node.name)
+ mock_update.assert_called_once_with(mock.ANY, mock.ANY,
+ topic='test-topic')
+
+
+class TestPut(test_api_base.FunctionalTest):
+
+ def setUp(self):
+ super(TestPut, self).setUp()
+ self.node = obj_utils.create_test_node(
+ self.context,
+ provision_state=states.AVAILABLE, name='node-39')
+ p = mock.patch.object(rpcapi.ConductorAPI, 'get_topic_for')
+ self.mock_gtf = p.start()
+ self.mock_gtf.return_value = 'test-topic'
+ self.addCleanup(p.stop)
+ p = mock.patch.object(rpcapi.ConductorAPI, 'change_node_power_state')
+ self.mock_cnps = p.start()
+ self.addCleanup(p.stop)
+ p = mock.patch.object(rpcapi.ConductorAPI, 'do_node_deploy')
+ self.mock_dnd = p.start()
+ self.addCleanup(p.stop)
+ p = mock.patch.object(rpcapi.ConductorAPI, 'do_node_tear_down')
+ self.mock_dntd = p.start()
+ self.addCleanup(p.stop)
+ p = mock.patch.object(rpcapi.ConductorAPI, 'inspect_hardware')
+ self.mock_dnih = p.start()
+ self.addCleanup(p.stop)
+
+ def test_power_state(self):
+ response = self.put_json('/nodes/%s/states/power' % self.node.uuid,
+ {'target': states.POWER_ON})
+ self.assertEqual(http_client.ACCEPTED, response.status_code)
+ self.assertEqual(b'', response.body)
+ self.mock_cnps.assert_called_once_with(mock.ANY,
+ self.node.uuid,
+ states.POWER_ON,
+ 'test-topic')
+ # Check location header
+ self.assertIsNotNone(response.location)
+ expected_location = '/v1/nodes/%s/states' % self.node.uuid
+ self.assertEqual(urlparse.urlparse(response.location).path,
+ expected_location)
+
+ def test_power_state_by_name_unsupported(self):
+ response = self.put_json('/nodes/%s/states/power' % self.node.name,
+ {'target': states.POWER_ON},
+ expect_errors=True)
+ self.assertEqual(http_client.NOT_FOUND, response.status_code)
+
+ def test_power_state_by_name(self):
+ response = self.put_json('/nodes/%s/states/power' % self.node.name,
+ {'target': states.POWER_ON},
+ headers={api_base.Version.string: "1.5"})
+ self.assertEqual(http_client.ACCEPTED, response.status_code)
+ self.assertEqual(b'', response.body)
+ self.mock_cnps.assert_called_once_with(mock.ANY,
+ self.node.uuid,
+ states.POWER_ON,
+ 'test-topic')
+ # Check location header
+ self.assertIsNotNone(response.location)
+ expected_location = '/v1/nodes/%s/states' % self.node.name
+ self.assertEqual(urlparse.urlparse(response.location).path,
+ expected_location)
+
+ def test_power_invalid_state_request(self):
+ ret = self.put_json('/nodes/%s/states/power' % self.node.uuid,
+ {'target': 'not-supported'}, expect_errors=True)
+ self.assertEqual(http_client.BAD_REQUEST, ret.status_code)
+
+ def test_power_change_when_being_cleaned(self):
+ for state in (states.CLEANING, states.CLEANWAIT):
+ self.node.provision_state = state
+ self.node.save()
+ ret = self.put_json('/nodes/%s/states/power' % self.node.uuid,
+ {'target': states.POWER_OFF},
+ expect_errors=True)
+ self.assertEqual(http_client.BAD_REQUEST, ret.status_code)
+
+ def test_provision_invalid_state_request(self):
+ ret = self.put_json('/nodes/%s/states/provision' % self.node.uuid,
+ {'target': 'not-supported'}, expect_errors=True)
+ self.assertEqual(http_client.BAD_REQUEST, ret.status_code)
+
+ def test_provision_with_deploy(self):
+ ret = self.put_json('/nodes/%s/states/provision' % self.node.uuid,
+ {'target': states.ACTIVE})
+ self.assertEqual(http_client.ACCEPTED, ret.status_code)
+ self.assertEqual(b'', ret.body)
+ self.mock_dnd.assert_called_once_with(
+ mock.ANY, self.node.uuid, False, None, 'test-topic')
+ # Check location header
+ self.assertIsNotNone(ret.location)
+ expected_location = '/v1/nodes/%s/states' % self.node.uuid
+ self.assertEqual(urlparse.urlparse(ret.location).path,
+ expected_location)
+
+ def test_provision_by_name_unsupported(self):
+ ret = self.put_json('/nodes/%s/states/provision' % self.node.name,
+ {'target': states.ACTIVE},
+ expect_errors=True)
+ self.assertEqual(http_client.NOT_FOUND, ret.status_code)
+
+ def test_provision_by_name(self):
+ ret = self.put_json('/nodes/%s/states/provision' % self.node.name,
+ {'target': states.ACTIVE},
+ headers={api_base.Version.string: "1.5"})
+ self.assertEqual(http_client.ACCEPTED, ret.status_code)
+ self.assertEqual(b'', ret.body)
+ self.mock_dnd.assert_called_once_with(
+ mock.ANY, self.node.uuid, False, None, 'test-topic')
+
+ def test_provision_with_deploy_configdrive(self):
+ ret = self.put_json('/nodes/%s/states/provision' % self.node.uuid,
+ {'target': states.ACTIVE, 'configdrive': 'foo'})
+ self.assertEqual(http_client.ACCEPTED, ret.status_code)
+ self.assertEqual(b'', ret.body)
+ self.mock_dnd.assert_called_once_with(
+ mock.ANY, self.node.uuid, False, 'foo', 'test-topic')
+ # Check location header
+ self.assertIsNotNone(ret.location)
+ expected_location = '/v1/nodes/%s/states' % self.node.uuid
+ self.assertEqual(urlparse.urlparse(ret.location).path,
+ expected_location)
+
+ def test_provision_with_configdrive_not_active(self):
+ ret = self.put_json('/nodes/%s/states/provision' % self.node.uuid,
+ {'target': states.DELETED, 'configdrive': 'foo'},
+ expect_errors=True)
+ self.assertEqual(http_client.BAD_REQUEST, ret.status_code)
+
+ def test_provision_with_tear_down(self):
+ node = self.node
+ node.provision_state = states.ACTIVE
+ node.target_provision_state = states.NOSTATE
+ node.save()
+ ret = self.put_json('/nodes/%s/states/provision' % node.uuid,
+ {'target': states.DELETED})
+ self.assertEqual(http_client.ACCEPTED, ret.status_code)
+ self.assertEqual(b'', ret.body)
+ self.mock_dntd.assert_called_once_with(
+ mock.ANY, node.uuid, 'test-topic')
+ # Check location header
+ self.assertIsNotNone(ret.location)
+ expected_location = '/v1/nodes/%s/states' % node.uuid
+ self.assertEqual(urlparse.urlparse(ret.location).path,
+ expected_location)
+
+ def test_provision_already_in_progress(self):
+ node = self.node
+ node.provision_state = states.DEPLOYING
+ node.target_provision_state = states.ACTIVE
+ node.reservation = 'fake-host'
+ node.save()
+ ret = self.put_json('/nodes/%s/states/provision' % node.uuid,
+ {'target': states.ACTIVE},
+ expect_errors=True)
+ self.assertEqual(http_client.CONFLICT, ret.status_code) # Conflict
+ self.assertFalse(self.mock_dnd.called)
+
+ def test_provision_locked_with_correct_state(self):
+ node = self.node
+ node.provision_state = states.AVAILABLE
+ node.target_provision_state = states.NOSTATE
+ node.reservation = 'fake-host'
+ node.save()
+ self.mock_dnd.side_effect = iter([exception.NodeLocked(node='',
+ host='')])
+ ret = self.put_json('/nodes/%s/states/provision' % node.uuid,
+ {'target': states.ACTIVE},
+ expect_errors=True)
+ self.assertEqual(http_client.CONFLICT, ret.status_code) # Conflict
+ self.assertTrue(self.mock_dnd.called)
+
+ def test_provision_with_tear_down_in_progress_deploywait(self):
+ node = self.node
+ node.provision_state = states.DEPLOYWAIT
+ node.target_provision_state = states.ACTIVE
+ node.save()
+ ret = self.put_json('/nodes/%s/states/provision' % node.uuid,
+ {'target': states.DELETED})
+ self.assertEqual(http_client.ACCEPTED, ret.status_code)
+ self.assertEqual(b'', ret.body)
+ self.mock_dntd.assert_called_once_with(
+ mock.ANY, node.uuid, 'test-topic')
+ # Check location header
+ self.assertIsNotNone(ret.location)
+ expected_location = '/v1/nodes/%s/states' % node.uuid
+ self.assertEqual(urlparse.urlparse(ret.location).path,
+ expected_location)
+
+ # NOTE(deva): this test asserts API funcionality which is not part of
+ # the new-ironic-state-machine in Kilo. It is retained for backwards
+ # compatibility with Juno.
+ # TODO(deva): add a deprecation-warning to the REST result
+ # and check for it here.
+ def test_provision_with_deploy_after_deployfail(self):
+ node = self.node
+ node.provision_state = states.DEPLOYFAIL
+ node.target_provision_state = states.ACTIVE
+ node.save()
+ ret = self.put_json('/nodes/%s/states/provision' % node.uuid,
+ {'target': states.ACTIVE})
+ self.assertEqual(http_client.ACCEPTED, ret.status_code)
+ self.assertEqual(b'', ret.body)
+ self.mock_dnd.assert_called_once_with(
+ mock.ANY, node.uuid, False, None, 'test-topic')
+ # Check location header
+ self.assertIsNotNone(ret.location)
+ expected_location = '/v1/nodes/%s/states' % node.uuid
+ self.assertEqual(expected_location,
+ urlparse.urlparse(ret.location).path)
+
+ def test_provision_already_in_state(self):
+ self.node.provision_state = states.ACTIVE
+ self.node.save()
+ ret = self.put_json('/nodes/%s/states/provision' % self.node.uuid,
+ {'target': states.ACTIVE},
+ expect_errors=True)
+ self.assertEqual(http_client.BAD_REQUEST, ret.status_code)
+
+ def test_manage_raises_error_before_1_2(self):
+ ret = self.put_json('/nodes/%s/states/provision' % self.node.uuid,
+ {'target': states.VERBS['manage']},
+ headers={},
+ expect_errors=True)
+ self.assertEqual(http_client.NOT_ACCEPTABLE, ret.status_code)
+
+ @mock.patch.object(rpcapi.ConductorAPI, 'do_provisioning_action')
+ def test_provide_from_manage(self, mock_dpa):
+ self.node.provision_state = states.MANAGEABLE
+ self.node.save()
+
+ ret = self.put_json('/nodes/%s/states/provision' % self.node.uuid,
+ {'target': states.VERBS['provide']},
+ headers={api_base.Version.string: "1.4"})
+ self.assertEqual(http_client.ACCEPTED, ret.status_code)
+ self.assertEqual(b'', ret.body)
+ mock_dpa.assert_called_once_with(mock.ANY, self.node.uuid,
+ states.VERBS['provide'],
+ 'test-topic')
+
+ def test_inspect_already_in_progress(self):
+ node = self.node
+ node.provision_state = states.INSPECTING
+ node.target_provision_state = states.MANAGEABLE
+ node.reservation = 'fake-host'
+ node.save()
+ ret = self.put_json('/nodes/%s/states/provision' % node.uuid,
+ {'target': states.MANAGEABLE},
+ expect_errors=True)
+ self.assertEqual(http_client.CONFLICT, ret.status_code) # Conflict
+
+ @mock.patch.object(rpcapi.ConductorAPI, 'do_provisioning_action')
+ def test_manage_from_available(self, mock_dpa):
+ self.node.provision_state = states.AVAILABLE
+ self.node.save()
+
+ ret = self.put_json('/nodes/%s/states/provision' % self.node.uuid,
+ {'target': states.VERBS['manage']},
+ headers={api_base.Version.string: "1.4"})
+ self.assertEqual(http_client.ACCEPTED, ret.status_code)
+ self.assertEqual(b'', ret.body)
+ mock_dpa.assert_called_once_with(mock.ANY, self.node.uuid,
+ states.VERBS['manage'],
+ 'test-topic')
+
+ @mock.patch.object(rpcapi.ConductorAPI, 'do_provisioning_action')
+ def test_bad_requests_in_managed_state(self, mock_dpa):
+ self.node.provision_state = states.MANAGEABLE
+ self.node.save()
+
+ for state in [states.ACTIVE, states.REBUILD, states.DELETED]:
+ ret = self.put_json('/nodes/%s/states/provision' % self.node.uuid,
+ {'target': states.ACTIVE},
+ expect_errors=True)
+ self.assertEqual(http_client.BAD_REQUEST, ret.status_code)
+ self.assertEqual(0, mock_dpa.call_count)
+
+ def test_abort_unsupported(self):
+ ret = self.put_json('/nodes/%s/states/provision' % self.node.uuid,
+ {'target': states.VERBS['abort']},
+ headers={api_base.Version.string: "1.12"},
+ expect_errors=True)
+ self.assertEqual(406, ret.status_code)
+
+ @mock.patch.object(rpcapi.ConductorAPI, 'do_provisioning_action')
+ def test_abort_cleanwait(self, mock_dpa):
+ self.node.provision_state = states.CLEANWAIT
+ self.node.save()
+
+ ret = self.put_json('/nodes/%s/states/provision' % self.node.uuid,
+ {'target': states.VERBS['abort']},
+ headers={api_base.Version.string: "1.13"})
+ self.assertEqual(202, ret.status_code)
+ self.assertEqual(b'', ret.body)
+ mock_dpa.assert_called_once_with(mock.ANY, self.node.uuid,
+ states.VERBS['abort'],
+ 'test-topic')
+
+ def test_abort_invalid_state(self):
+ # "abort" is only valid for nodes in CLEANWAIT
+ self.node.provision_state = states.CLEANING
+ self.node.save()
+ ret = self.put_json('/nodes/%s/states/provision' % self.node.uuid,
+ {'target': states.VERBS['abort']},
+ headers={api_base.Version.string: "1.13"},
+ expect_errors=True)
+ self.assertEqual(400, ret.status_code)
+
+ def test_set_console_mode_enabled(self):
+ with mock.patch.object(rpcapi.ConductorAPI,
+ 'set_console_mode') as mock_scm:
+ ret = self.put_json('/nodes/%s/states/console' % self.node.uuid,
+ {'enabled': "true"})
+ self.assertEqual(http_client.ACCEPTED, ret.status_code)
+ self.assertEqual(b'', ret.body)
+ mock_scm.assert_called_once_with(mock.ANY, self.node.uuid,
+ True, 'test-topic')
+ # Check location header
+ self.assertIsNotNone(ret.location)
+ expected_location = '/v1/nodes/%s/states/console' % self.node.uuid
+ self.assertEqual(urlparse.urlparse(ret.location).path,
+ expected_location)
+
+ @mock.patch.object(rpcapi.ConductorAPI, 'set_console_mode')
+ def test_set_console_by_name_unsupported(self, mock_scm):
+ ret = self.put_json('/nodes/%s/states/console' % self.node.name,
+ {'enabled': "true"},
+ expect_errors=True)
+ self.assertEqual(http_client.NOT_FOUND, ret.status_code)
+
+ @mock.patch.object(rpcapi.ConductorAPI, 'set_console_mode')
+ def test_set_console_by_name(self, mock_scm):
+ ret = self.put_json('/nodes/%s/states/console' % self.node.name,
+ {'enabled': "true"},
+ headers={api_base.Version.string: "1.5"})
+ self.assertEqual(http_client.ACCEPTED, ret.status_code)
+ self.assertEqual(b'', ret.body)
+ mock_scm.assert_called_once_with(mock.ANY, self.node.uuid,
+ True, 'test-topic')
+
+ def test_set_console_mode_disabled(self):
+ with mock.patch.object(rpcapi.ConductorAPI,
+ 'set_console_mode') as mock_scm:
+ ret = self.put_json('/nodes/%s/states/console' % self.node.uuid,
+ {'enabled': "false"})
+ self.assertEqual(http_client.ACCEPTED, ret.status_code)
+ self.assertEqual(b'', ret.body)
+ mock_scm.assert_called_once_with(mock.ANY, self.node.uuid,
+ False, 'test-topic')
+ # Check location header
+ self.assertIsNotNone(ret.location)
+ expected_location = '/v1/nodes/%s/states/console' % self.node.uuid
+ self.assertEqual(urlparse.urlparse(ret.location).path,
+ expected_location)
+
+ def test_set_console_mode_bad_request(self):
+ with mock.patch.object(rpcapi.ConductorAPI,
+ 'set_console_mode') as mock_scm:
+ ret = self.put_json('/nodes/%s/states/console' % self.node.uuid,
+ {'enabled': "invalid-value"},
+ expect_errors=True)
+ self.assertEqual(http_client.BAD_REQUEST, ret.status_code)
+ # assert set_console_mode wasn't called
+ assert not mock_scm.called
+
+ def test_set_console_mode_bad_request_missing_parameter(self):
+ with mock.patch.object(rpcapi.ConductorAPI,
+ 'set_console_mode') as mock_scm:
+ ret = self.put_json('/nodes/%s/states/console' % self.node.uuid,
+ {}, expect_errors=True)
+ self.assertEqual(http_client.BAD_REQUEST, ret.status_code)
+ # assert set_console_mode wasn't called
+ assert not mock_scm.called
+
+ def test_set_console_mode_console_not_supported(self):
+ with mock.patch.object(rpcapi.ConductorAPI,
+ 'set_console_mode') as mock_scm:
+ mock_scm.side_effect = exception.UnsupportedDriverExtension(
+ extension='console', driver='test-driver')
+ ret = self.put_json('/nodes/%s/states/console' % self.node.uuid,
+ {'enabled': "true"}, expect_errors=True)
+ self.assertEqual(http_client.BAD_REQUEST, ret.status_code)
+ mock_scm.assert_called_once_with(mock.ANY, self.node.uuid,
+ True, 'test-topic')
+
+ def test_provision_node_in_maintenance_fail(self):
+ self.node.maintenance = True
+ self.node.save()
+
+ ret = self.put_json('/nodes/%s/states/provision' % self.node.uuid,
+ {'target': states.ACTIVE},
+ expect_errors=True)
+ self.assertEqual(http_client.BAD_REQUEST, ret.status_code)
+ self.assertTrue(ret.json['error_message'])
+
+ @mock.patch.object(rpcapi.ConductorAPI, 'set_target_raid_config',
+ autospec=True)
+ def test_put_raid(self, set_raid_config_mock):
+ raid_config = {'logical_disks': [{'size_gb': 100, 'raid_level': 1}]}
+ ret = self.put_json(
+ '/nodes/%s/states/raid' % self.node.uuid, raid_config,
+ headers={api_base.Version.string: "1.12"})
+ self.assertEqual(204, ret.status_code)
+ self.assertEqual(b'', ret.body)
+ set_raid_config_mock.assert_called_once_with(
+ mock.ANY, mock.ANY, self.node.uuid, raid_config, topic=mock.ANY)
+
+ @mock.patch.object(rpcapi.ConductorAPI, 'set_target_raid_config',
+ autospec=True)
+ def test_put_raid_older_version(self, set_raid_config_mock):
+ raid_config = {'logical_disks': [{'size_gb': 100, 'raid_level': 1}]}
+ ret = self.put_json(
+ '/nodes/%s/states/raid' % self.node.uuid, raid_config,
+ headers={api_base.Version.string: "1.5"},
+ expect_errors=True)
+ self.assertEqual(406, ret.status_code)
+ self.assertFalse(set_raid_config_mock.called)
+
+ @mock.patch.object(rpcapi.ConductorAPI, 'set_target_raid_config',
+ autospec=True)
+ def test_put_raid_iface_not_supported(self, set_raid_config_mock):
+ raid_config = {'logical_disks': [{'size_gb': 100, 'raid_level': 1}]}
+ set_raid_config_mock.side_effect = iter([
+ exception.UnsupportedDriverExtension(extension='raid',
+ driver='fake')])
+ ret = self.put_json(
+ '/nodes/%s/states/raid' % self.node.uuid, raid_config,
+ headers={api_base.Version.string: "1.12"},
+ expect_errors=True)
+ self.assertEqual(404, ret.status_code)
+ self.assertTrue(ret.json['error_message'])
+ set_raid_config_mock.assert_called_once_with(
+ mock.ANY, mock.ANY, self.node.uuid, raid_config, topic=mock.ANY)
+
+ @mock.patch.object(rpcapi.ConductorAPI, 'set_target_raid_config',
+ autospec=True)
+ def test_put_raid_invalid_parameter_value(self, set_raid_config_mock):
+ raid_config = {'logical_disks': [{'size_gb': 100, 'raid_level': 1}]}
+ set_raid_config_mock.side_effect = iter([
+ exception.InvalidParameterValue('foo')])
+ ret = self.put_json(
+ '/nodes/%s/states/raid' % self.node.uuid, raid_config,
+ headers={api_base.Version.string: "1.12"},
+ expect_errors=True)
+ self.assertEqual(400, ret.status_code)
+ self.assertTrue(ret.json['error_message'])
+ set_raid_config_mock.assert_called_once_with(
+ mock.ANY, mock.ANY, self.node.uuid, raid_config, topic=mock.ANY)
+
+ @mock.patch.object(rpcapi.ConductorAPI, 'set_boot_device')
+ def test_set_boot_device(self, mock_sbd):
+ device = boot_devices.PXE
+ ret = self.put_json('/nodes/%s/management/boot_device'
+ % self.node.uuid, {'boot_device': device})
+ self.assertEqual(http_client.NO_CONTENT, ret.status_code)
+ self.assertEqual(b'', ret.body)
+ mock_sbd.assert_called_once_with(mock.ANY, self.node.uuid,
+ device, persistent=False,
+ topic='test-topic')
+
+ @mock.patch.object(rpcapi.ConductorAPI, 'set_boot_device')
+ def test_set_boot_device_by_name(self, mock_sbd):
+ device = boot_devices.PXE
+ ret = self.put_json('/nodes/%s/management/boot_device'
+ % self.node.name, {'boot_device': device},
+ headers={api_base.Version.string: "1.5"})
+ self.assertEqual(http_client.NO_CONTENT, ret.status_code)
+ self.assertEqual(b'', ret.body)
+ mock_sbd.assert_called_once_with(mock.ANY, self.node.uuid,
+ device, persistent=False,
+ topic='test-topic')
+
+ @mock.patch.object(rpcapi.ConductorAPI, 'set_boot_device')
+ def test_set_boot_device_not_supported(self, mock_sbd):
+ mock_sbd.side_effect = exception.UnsupportedDriverExtension(
+ extension='management', driver='test-driver')
+ device = boot_devices.PXE
+ ret = self.put_json('/nodes/%s/management/boot_device'
+ % self.node.uuid, {'boot_device': device},
+ expect_errors=True)
+ self.assertEqual(http_client.BAD_REQUEST, ret.status_code)
+ self.assertTrue(ret.json['error_message'])
+ mock_sbd.assert_called_once_with(mock.ANY, self.node.uuid,
+ device, persistent=False,
+ topic='test-topic')
+
+ @mock.patch.object(rpcapi.ConductorAPI, 'set_boot_device')
+ def test_set_boot_device_persistent(self, mock_sbd):
+ device = boot_devices.PXE
+ ret = self.put_json('/nodes/%s/management/boot_device?persistent=True'
+ % self.node.uuid, {'boot_device': device})
+ self.assertEqual(http_client.NO_CONTENT, ret.status_code)
+ self.assertEqual(b'', ret.body)
+ mock_sbd.assert_called_once_with(mock.ANY, self.node.uuid,
+ device, persistent=True,
+ topic='test-topic')
+
+ @mock.patch.object(rpcapi.ConductorAPI, 'set_boot_device')
+ def test_set_boot_device_persistent_invalid_value(self, mock_sbd):
+ device = boot_devices.PXE
+ ret = self.put_json('/nodes/%s/management/boot_device?persistent=blah'
+ % self.node.uuid, {'boot_device': device},
+ expect_errors=True)
+ self.assertEqual('application/json', ret.content_type)
+ self.assertEqual(http_client.BAD_REQUEST, ret.status_code)
+
+ def _test_set_node_maintenance_mode(self, mock_update, mock_get, reason,
+ node_ident, is_by_name=False):
+ request_body = {}
+ if reason:
+ request_body['reason'] = reason
+
+ self.node.maintenance = False
+ mock_get.return_value = self.node
+ if is_by_name:
+ headers = {api_base.Version.string: "1.5"}
+ else:
+ headers = {}
+ ret = self.put_json('/nodes/%s/maintenance' % node_ident,
+ request_body, headers=headers)
+ self.assertEqual(http_client.ACCEPTED, ret.status_code)
+ self.assertEqual(b'', ret.body)
+ self.assertEqual(True, self.node.maintenance)
+ self.assertEqual(reason, self.node.maintenance_reason)
+ mock_get.assert_called_once_with(mock.ANY, node_ident)
+ mock_update.assert_called_once_with(mock.ANY, mock.ANY,
+ topic='test-topic')
+
+ @mock.patch.object(objects.Node, 'get_by_uuid')
+ @mock.patch.object(rpcapi.ConductorAPI, 'update_node')
+ def test_set_node_maintenance_mode(self, mock_update, mock_get):
+ self._test_set_node_maintenance_mode(mock_update, mock_get,
+ 'fake_reason', self.node.uuid)
+
+ @mock.patch.object(objects.Node, 'get_by_uuid')
+ @mock.patch.object(rpcapi.ConductorAPI, 'update_node')
+ def test_set_node_maintenance_mode_no_reason(self, mock_update, mock_get):
+ self._test_set_node_maintenance_mode(mock_update, mock_get, None,
+ self.node.uuid)
+
+ @mock.patch.object(objects.Node, 'get_by_name')
+ @mock.patch.object(rpcapi.ConductorAPI, 'update_node')
+ def test_set_node_maintenance_mode_by_name(self, mock_update, mock_get):
+ self._test_set_node_maintenance_mode(mock_update, mock_get,
+ 'fake_reason', self.node.name,
+ is_by_name=True)
+
+ @mock.patch.object(objects.Node, 'get_by_name')
+ @mock.patch.object(rpcapi.ConductorAPI, 'update_node')
+ def test_set_node_maintenance_mode_no_reason_by_name(self, mock_update,
+ mock_get):
+ self._test_set_node_maintenance_mode(mock_update, mock_get, None,
+ self.node.name, is_by_name=True)
diff --git a/ironic/tests/unit/api/v1/test_ports.py b/ironic/tests/unit/api/v1/test_ports.py
new file mode 100644
index 000000000..75e4889bb
--- /dev/null
+++ b/ironic/tests/unit/api/v1/test_ports.py
@@ -0,0 +1,806 @@
+# -*- encoding: utf-8 -*-
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+"""
+Tests for the API /ports/ methods.
+"""
+
+import datetime
+
+import mock
+from oslo_config import cfg
+from oslo_utils import timeutils
+from oslo_utils import uuidutils
+import six
+from six.moves import http_client
+from six.moves.urllib import parse as urlparse
+from testtools.matchers import HasLength
+from wsme import types as wtypes
+
+from ironic.api.controllers import base as api_base
+from ironic.api.controllers import v1 as api_v1
+from ironic.api.controllers.v1 import port as api_port
+from ironic.api.controllers.v1 import utils as api_utils
+from ironic.common import exception
+from ironic.conductor import rpcapi
+from ironic.tests.unit.api import base as test_api_base
+from ironic.tests.unit.api import utils as apiutils
+from ironic.tests.unit import base
+from ironic.tests.unit.db import utils as dbutils
+from ironic.tests.unit.objects import utils as obj_utils
+
+
+# NOTE(lucasagomes): When creating a port via API (POST)
+# we have to use node_uuid
+def post_get_test_port(**kw):
+ port = apiutils.port_post_data(**kw)
+ node = dbutils.get_test_node()
+ port['node_uuid'] = kw.get('node_uuid', node['uuid'])
+ return port
+
+
+class TestPortObject(base.TestCase):
+
+ def test_port_init(self):
+ port_dict = apiutils.port_post_data(node_id=None)
+ del port_dict['extra']
+ port = api_port.Port(**port_dict)
+ self.assertEqual(wtypes.Unset, port.extra)
+
+
+class TestListPorts(test_api_base.FunctionalTest):
+
+ def setUp(self):
+ super(TestListPorts, self).setUp()
+ self.node = obj_utils.create_test_node(self.context)
+
+ def test_empty(self):
+ data = self.get_json('/ports')
+ self.assertEqual([], data['ports'])
+
+ def test_one(self):
+ port = obj_utils.create_test_port(self.context, node_id=self.node.id)
+ data = self.get_json('/ports')
+ self.assertEqual(port.uuid, data['ports'][0]["uuid"])
+ self.assertNotIn('extra', data['ports'][0])
+ self.assertNotIn('node_uuid', data['ports'][0])
+ # never expose the node_id
+ self.assertNotIn('node_id', data['ports'][0])
+
+ def test_get_one(self):
+ port = obj_utils.create_test_port(self.context, node_id=self.node.id)
+ data = self.get_json('/ports/%s' % port.uuid)
+ self.assertEqual(port.uuid, data['uuid'])
+ self.assertIn('extra', data)
+ self.assertIn('node_uuid', data)
+ # never expose the node_id
+ self.assertNotIn('node_id', data)
+
+ def test_get_one_custom_fields(self):
+ port = obj_utils.create_test_port(self.context, node_id=self.node.id)
+ fields = 'address,extra'
+ data = self.get_json(
+ '/ports/%s?fields=%s' % (port.uuid, fields),
+ headers={api_base.Version.string: str(api_v1.MAX_VER)})
+ # We always append "links"
+ self.assertItemsEqual(['address', 'extra', 'links'], data)
+
+ def test_get_collection_custom_fields(self):
+ fields = 'uuid,extra'
+ for i in range(3):
+ obj_utils.create_test_port(self.context,
+ node_id=self.node.id,
+ uuid=uuidutils.generate_uuid(),
+ address='52:54:00:cf:2d:3%s' % i)
+
+ data = self.get_json(
+ '/ports?fields=%s' % fields,
+ headers={api_base.Version.string: str(api_v1.MAX_VER)})
+
+ self.assertEqual(3, len(data['ports']))
+ for port in data['ports']:
+ # We always append "links"
+ self.assertItemsEqual(['uuid', 'extra', 'links'], port)
+
+ def test_get_custom_fields_invalid_fields(self):
+ port = obj_utils.create_test_port(self.context, node_id=self.node.id)
+ fields = 'uuid,spongebob'
+ response = self.get_json(
+ '/ports/%s?fields=%s' % (port.uuid, fields),
+ headers={api_base.Version.string: str(api_v1.MAX_VER)},
+ expect_errors=True)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_int)
+ self.assertEqual('application/json', response.content_type)
+ self.assertIn('spongebob', response.json['error_message'])
+
+ def test_get_custom_fields_invalid_api_version(self):
+ port = obj_utils.create_test_port(self.context, node_id=self.node.id)
+ fields = 'uuid,extra'
+ response = self.get_json(
+ '/ports/%s?fields=%s' % (port.uuid, fields),
+ headers={api_base.Version.string: str(api_v1.MIN_VER)},
+ expect_errors=True)
+ self.assertEqual(http_client.NOT_ACCEPTABLE, response.status_int)
+
+ def test_detail(self):
+ port = obj_utils.create_test_port(self.context, node_id=self.node.id)
+ data = self.get_json('/ports/detail')
+ self.assertEqual(port.uuid, data['ports'][0]["uuid"])
+ self.assertIn('extra', data['ports'][0])
+ self.assertIn('node_uuid', data['ports'][0])
+ # never expose the node_id
+ self.assertNotIn('node_id', data['ports'][0])
+
+ def test_detail_against_single(self):
+ port = obj_utils.create_test_port(self.context, node_id=self.node.id)
+ response = self.get_json('/ports/%s/detail' % port.uuid,
+ expect_errors=True)
+ self.assertEqual(http_client.NOT_FOUND, response.status_int)
+
+ def test_many(self):
+ ports = []
+ for id_ in range(5):
+ port = obj_utils.create_test_port(
+ self.context, node_id=self.node.id,
+ uuid=uuidutils.generate_uuid(),
+ address='52:54:00:cf:2d:3%s' % id_)
+ ports.append(port.uuid)
+ data = self.get_json('/ports')
+ self.assertEqual(len(ports), len(data['ports']))
+
+ uuids = [n['uuid'] for n in data['ports']]
+ six.assertCountEqual(self, ports, uuids)
+
+ def _test_links(self, public_url=None):
+ cfg.CONF.set_override('public_endpoint', public_url, 'api')
+ uuid = uuidutils.generate_uuid()
+ obj_utils.create_test_port(self.context,
+ uuid=uuid,
+ node_id=self.node.id)
+ data = self.get_json('/ports/%s' % uuid)
+ self.assertIn('links', data.keys())
+ self.assertEqual(2, len(data['links']))
+ self.assertIn(uuid, data['links'][0]['href'])
+ for l in data['links']:
+ bookmark = l['rel'] == 'bookmark'
+ self.assertTrue(self.validate_link(l['href'], bookmark=bookmark))
+
+ if public_url is not None:
+ expected = [{'href': '%s/v1/ports/%s' % (public_url, uuid),
+ 'rel': 'self'},
+ {'href': '%s/ports/%s' % (public_url, uuid),
+ 'rel': 'bookmark'}]
+ for i in expected:
+ self.assertIn(i, data['links'])
+
+ def test_links(self):
+ self._test_links()
+
+ def test_links_public_url(self):
+ self._test_links(public_url='http://foo')
+
+ def test_collection_links(self):
+ ports = []
+ for id_ in range(5):
+ port = obj_utils.create_test_port(
+ self.context,
+ node_id=self.node.id,
+ uuid=uuidutils.generate_uuid(),
+ address='52:54:00:cf:2d:3%s' % id_)
+ ports.append(port.uuid)
+ data = self.get_json('/ports/?limit=3')
+ self.assertEqual(3, len(data['ports']))
+
+ next_marker = data['ports'][-1]['uuid']
+ self.assertIn(next_marker, data['next'])
+
+ def test_collection_links_default_limit(self):
+ cfg.CONF.set_override('max_limit', 3, 'api')
+ ports = []
+ for id_ in range(5):
+ port = obj_utils.create_test_port(
+ self.context,
+ node_id=self.node.id,
+ uuid=uuidutils.generate_uuid(),
+ address='52:54:00:cf:2d:3%s' % id_)
+ ports.append(port.uuid)
+ data = self.get_json('/ports')
+ self.assertEqual(3, len(data['ports']))
+
+ next_marker = data['ports'][-1]['uuid']
+ self.assertIn(next_marker, data['next'])
+
+ def test_port_by_address(self):
+ address_template = "aa:bb:cc:dd:ee:f%d"
+ for id_ in range(3):
+ obj_utils.create_test_port(self.context,
+ node_id=self.node.id,
+ uuid=uuidutils.generate_uuid(),
+ address=address_template % id_)
+
+ target_address = address_template % 1
+ data = self.get_json('/ports?address=%s' % target_address)
+ self.assertThat(data['ports'], HasLength(1))
+ self.assertEqual(target_address, data['ports'][0]['address'])
+
+ def test_port_by_address_non_existent_address(self):
+ # non-existent address
+ data = self.get_json('/ports?address=%s' % 'aa:bb:cc:dd:ee:ff')
+ self.assertThat(data['ports'], HasLength(0))
+
+ def test_port_by_address_invalid_address_format(self):
+ obj_utils.create_test_port(self.context, node_id=self.node.id)
+ invalid_address = 'invalid-mac-format'
+ response = self.get_json('/ports?address=%s' % invalid_address,
+ expect_errors=True)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_int)
+ self.assertEqual('application/json', response.content_type)
+ self.assertIn(invalid_address, response.json['error_message'])
+
+ def test_sort_key(self):
+ ports = []
+ for id_ in range(3):
+ port = obj_utils.create_test_port(
+ self.context,
+ node_id=self.node.id,
+ uuid=uuidutils.generate_uuid(),
+ address='52:54:00:cf:2d:3%s' % id_)
+ ports.append(port.uuid)
+ data = self.get_json('/ports?sort_key=uuid')
+ uuids = [n['uuid'] for n in data['ports']]
+ self.assertEqual(sorted(ports), uuids)
+
+ def test_sort_key_invalid(self):
+ invalid_keys_list = ['foo', 'extra']
+ for invalid_key in invalid_keys_list:
+ response = self.get_json('/ports?sort_key=%s' % invalid_key,
+ expect_errors=True)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_int)
+ self.assertEqual('application/json', response.content_type)
+ self.assertIn(invalid_key, response.json['error_message'])
+
+ @mock.patch.object(api_utils, 'get_rpc_node')
+ def test_get_all_by_node_name_ok(self, mock_get_rpc_node):
+ # GET /v1/ports specifying node_name - success
+ mock_get_rpc_node.return_value = self.node
+ for i in range(5):
+ if i < 3:
+ node_id = self.node.id
+ else:
+ node_id = 100000 + i
+ obj_utils.create_test_port(self.context,
+ node_id=node_id,
+ uuid=uuidutils.generate_uuid(),
+ address='52:54:00:cf:2d:3%s' % i)
+ data = self.get_json("/ports?node=%s" % 'test-node',
+ headers={api_base.Version.string: '1.5'})
+ self.assertEqual(3, len(data['ports']))
+
+ @mock.patch.object(api_utils, 'get_rpc_node')
+ def test_get_all_by_node_uuid_and_name(self, mock_get_rpc_node):
+ # GET /v1/ports specifying node and uuid - should only use node_uuid
+ mock_get_rpc_node.return_value = self.node
+ obj_utils.create_test_port(self.context, node_id=self.node.id)
+ self.get_json('/ports/detail?node_uuid=%s&node=%s' %
+ (self.node.uuid, 'node-name'))
+ mock_get_rpc_node.assert_called_once_with(self.node.uuid)
+
+ @mock.patch.object(api_utils, 'get_rpc_node')
+ def test_get_all_by_node_name_not_supported(self, mock_get_rpc_node):
+ # GET /v1/ports specifying node_name - name not supported
+ mock_get_rpc_node.side_effect = (
+ exception.InvalidUuidOrName(name=self.node.uuid))
+ for i in range(3):
+ obj_utils.create_test_port(self.context,
+ node_id=self.node.id,
+ uuid=uuidutils.generate_uuid(),
+ address='52:54:00:cf:2d:3%s' % i)
+ data = self.get_json("/ports?node=%s" % 'test-node',
+ expect_errors=True)
+ self.assertEqual(0, mock_get_rpc_node.call_count)
+ self.assertEqual(http_client.NOT_ACCEPTABLE, data.status_int)
+
+ @mock.patch.object(api_utils, 'get_rpc_node')
+ def test_detail_by_node_name_ok(self, mock_get_rpc_node):
+ # GET /v1/ports/detail specifying node_name - success
+ mock_get_rpc_node.return_value = self.node
+ port = obj_utils.create_test_port(self.context, node_id=self.node.id)
+ data = self.get_json('/ports/detail?node=%s' % 'test-node',
+ headers={api_base.Version.string: '1.5'})
+ self.assertEqual(port.uuid, data['ports'][0]['uuid'])
+ self.assertEqual(self.node.uuid, data['ports'][0]['node_uuid'])
+
+ @mock.patch.object(api_utils, 'get_rpc_node')
+ def test_detail_by_node_name_not_supported(self, mock_get_rpc_node):
+ # GET /v1/ports/detail specifying node_name - name not supported
+ mock_get_rpc_node.side_effect = (
+ exception.InvalidUuidOrName(name=self.node.uuid))
+ obj_utils.create_test_port(self.context, node_id=self.node.id)
+ data = self.get_json('/ports/detail?node=%s' % 'test-node',
+ expect_errors=True)
+ self.assertEqual(0, mock_get_rpc_node.call_count)
+ self.assertEqual(http_client.NOT_ACCEPTABLE, data.status_int)
+
+ @mock.patch.object(api_port.PortsController, '_get_ports_collection')
+ def test_detail_with_incorrect_api_usage(self, mock_gpc):
+ # GET /v1/ports/detail specifying node and node_uuid. In this case
+ # we expect the node_uuid interface to be used.
+ self.get_json('/ports/detail?node=%s&node_uuid=%s' %
+ ('test-node', self.node.uuid))
+ mock_gpc.assert_called_once_with(self.node.uuid, mock.ANY, mock.ANY,
+ mock.ANY, mock.ANY, mock.ANY,
+ mock.ANY)
+
+
+@mock.patch.object(rpcapi.ConductorAPI, 'update_port')
+class TestPatch(test_api_base.FunctionalTest):
+
+ def setUp(self):
+ super(TestPatch, self).setUp()
+ self.node = obj_utils.create_test_node(self.context)
+ self.port = obj_utils.create_test_port(self.context,
+ node_id=self.node.id)
+
+ p = mock.patch.object(rpcapi.ConductorAPI, 'get_topic_for')
+ self.mock_gtf = p.start()
+ self.mock_gtf.return_value = 'test-topic'
+ self.addCleanup(p.stop)
+
+ def test_update_byid(self, mock_upd):
+ extra = {'foo': 'bar'}
+ mock_upd.return_value = self.port
+ mock_upd.return_value.extra = extra
+ response = self.patch_json('/ports/%s' % self.port.uuid,
+ [{'path': '/extra/foo',
+ 'value': 'bar',
+ 'op': 'add'}])
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.OK, response.status_code)
+ self.assertEqual(extra, response.json['extra'])
+
+ kargs = mock_upd.call_args[0][1]
+ self.assertEqual(extra, kargs.extra)
+
+ def test_update_byaddress_not_allowed(self, mock_upd):
+ extra = {'foo': 'bar'}
+ mock_upd.return_value = self.port
+ mock_upd.return_value.extra = extra
+ response = self.patch_json('/ports/%s' % self.port.address,
+ [{'path': '/extra/foo',
+ 'value': 'bar',
+ 'op': 'add'}],
+ expect_errors=True)
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_int)
+ self.assertIn(self.port.address, response.json['error_message'])
+ self.assertFalse(mock_upd.called)
+
+ def test_update_not_found(self, mock_upd):
+ uuid = uuidutils.generate_uuid()
+ response = self.patch_json('/ports/%s' % uuid,
+ [{'path': '/extra/foo',
+ 'value': 'bar',
+ 'op': 'add'}],
+ expect_errors=True)
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.NOT_FOUND, response.status_int)
+ self.assertTrue(response.json['error_message'])
+ self.assertFalse(mock_upd.called)
+
+ def test_replace_singular(self, mock_upd):
+ address = 'aa:bb:cc:dd:ee:ff'
+ mock_upd.return_value = self.port
+ mock_upd.return_value.address = address
+ response = self.patch_json('/ports/%s' % self.port.uuid,
+ [{'path': '/address',
+ 'value': address,
+ 'op': 'replace'}])
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.OK, response.status_code)
+ self.assertEqual(address, response.json['address'])
+ self.assertTrue(mock_upd.called)
+
+ kargs = mock_upd.call_args[0][1]
+ self.assertEqual(address, kargs.address)
+
+ def test_replace_address_already_exist(self, mock_upd):
+ address = 'aa:aa:aa:aa:aa:aa'
+ mock_upd.side_effect = exception.MACAlreadyExists(mac=address)
+ response = self.patch_json('/ports/%s' % self.port.uuid,
+ [{'path': '/address',
+ 'value': address,
+ 'op': 'replace'}],
+ expect_errors=True)
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.CONFLICT, response.status_code)
+ self.assertTrue(response.json['error_message'])
+ self.assertTrue(mock_upd.called)
+
+ kargs = mock_upd.call_args[0][1]
+ self.assertEqual(address, kargs.address)
+
+ def test_replace_node_uuid(self, mock_upd):
+ mock_upd.return_value = self.port
+ response = self.patch_json('/ports/%s' % self.port.uuid,
+ [{'path': '/node_uuid',
+ 'value': self.node.uuid,
+ 'op': 'replace'}])
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.OK, response.status_code)
+
+ def test_add_node_uuid(self, mock_upd):
+ mock_upd.return_value = self.port
+ response = self.patch_json('/ports/%s' % self.port.uuid,
+ [{'path': '/node_uuid',
+ 'value': self.node.uuid,
+ 'op': 'add'}])
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.OK, response.status_code)
+
+ def test_add_node_id(self, mock_upd):
+ response = self.patch_json('/ports/%s' % self.port.uuid,
+ [{'path': '/node_id',
+ 'value': '1',
+ 'op': 'add'}],
+ expect_errors=True)
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_code)
+ self.assertFalse(mock_upd.called)
+
+ def test_replace_node_id(self, mock_upd):
+ response = self.patch_json('/ports/%s' % self.port.uuid,
+ [{'path': '/node_id',
+ 'value': '1',
+ 'op': 'replace'}],
+ expect_errors=True)
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_code)
+ self.assertFalse(mock_upd.called)
+
+ def test_remove_node_id(self, mock_upd):
+ response = self.patch_json('/ports/%s' % self.port.uuid,
+ [{'path': '/node_id',
+ 'op': 'remove'}],
+ expect_errors=True)
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_code)
+ self.assertFalse(mock_upd.called)
+
+ def test_replace_non_existent_node_uuid(self, mock_upd):
+ node_uuid = '12506333-a81c-4d59-9987-889ed5f8687b'
+ response = self.patch_json('/ports/%s' % self.port.uuid,
+ [{'path': '/node_uuid',
+ 'value': node_uuid,
+ 'op': 'replace'}],
+ expect_errors=True)
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_code)
+ self.assertIn(node_uuid, response.json['error_message'])
+ self.assertFalse(mock_upd.called)
+
+ def test_replace_multi(self, mock_upd):
+ extra = {"foo1": "bar1", "foo2": "bar2", "foo3": "bar3"}
+ self.port.extra = extra
+ self.port.save()
+
+ # mutate extra so we replace all of them
+ extra = dict((k, extra[k] + 'x') for k in extra.keys())
+
+ patch = []
+ for k in extra.keys():
+ patch.append({'path': '/extra/%s' % k,
+ 'value': extra[k],
+ 'op': 'replace'})
+ mock_upd.return_value = self.port
+ mock_upd.return_value.extra = extra
+ response = self.patch_json('/ports/%s' % self.port.uuid,
+ patch)
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.OK, response.status_code)
+ self.assertEqual(extra, response.json['extra'])
+ kargs = mock_upd.call_args[0][1]
+ self.assertEqual(extra, kargs.extra)
+
+ def test_remove_multi(self, mock_upd):
+ extra = {"foo1": "bar1", "foo2": "bar2", "foo3": "bar3"}
+ self.port.extra = extra
+ self.port.save()
+
+ # Removing one item from the collection
+ extra.pop('foo1')
+ mock_upd.return_value = self.port
+ mock_upd.return_value.extra = extra
+ response = self.patch_json('/ports/%s' % self.port.uuid,
+ [{'path': '/extra/foo1',
+ 'op': 'remove'}])
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.OK, response.status_code)
+ self.assertEqual(extra, response.json['extra'])
+ kargs = mock_upd.call_args[0][1]
+ self.assertEqual(extra, kargs.extra)
+
+ # Removing the collection
+ extra = {}
+ mock_upd.return_value.extra = extra
+ response = self.patch_json('/ports/%s' % self.port.uuid,
+ [{'path': '/extra', 'op': 'remove'}])
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.OK, response.status_code)
+ self.assertEqual({}, response.json['extra'])
+ kargs = mock_upd.call_args[0][1]
+ self.assertEqual(extra, kargs.extra)
+
+ # Assert nothing else was changed
+ self.assertEqual(self.port.uuid, response.json['uuid'])
+ self.assertEqual(self.port.address, response.json['address'])
+
+ def test_remove_non_existent_property_fail(self, mock_upd):
+ response = self.patch_json('/ports/%s' % self.port.uuid,
+ [{'path': '/extra/non-existent',
+ 'op': 'remove'}],
+ expect_errors=True)
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_code)
+ self.assertTrue(response.json['error_message'])
+ self.assertFalse(mock_upd.called)
+
+ def test_remove_mandatory_field(self, mock_upd):
+ response = self.patch_json('/ports/%s' % self.port.uuid,
+ [{'path': '/address',
+ 'op': 'remove'}],
+ expect_errors=True)
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_code)
+ self.assertTrue(response.json['error_message'])
+ self.assertFalse(mock_upd.called)
+
+ def test_add_root(self, mock_upd):
+ address = 'aa:bb:cc:dd:ee:ff'
+ mock_upd.return_value = self.port
+ mock_upd.return_value.address = address
+ response = self.patch_json('/ports/%s' % self.port.uuid,
+ [{'path': '/address',
+ 'value': address,
+ 'op': 'add'}])
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.OK, response.status_code)
+ self.assertEqual(address, response.json['address'])
+ self.assertTrue(mock_upd.called)
+ kargs = mock_upd.call_args[0][1]
+ self.assertEqual(address, kargs.address)
+
+ def test_add_root_non_existent(self, mock_upd):
+ response = self.patch_json('/ports/%s' % self.port.uuid,
+ [{'path': '/foo',
+ 'value': 'bar',
+ 'op': 'add'}],
+ expect_errors=True)
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_int)
+ self.assertTrue(response.json['error_message'])
+ self.assertFalse(mock_upd.called)
+
+ def test_add_multi(self, mock_upd):
+ extra = {"foo1": "bar1", "foo2": "bar2", "foo3": "bar3"}
+ patch = []
+ for k in extra.keys():
+ patch.append({'path': '/extra/%s' % k,
+ 'value': extra[k],
+ 'op': 'add'})
+ mock_upd.return_value = self.port
+ mock_upd.return_value.extra = extra
+ response = self.patch_json('/ports/%s' % self.port.uuid,
+ patch)
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.OK, response.status_code)
+ self.assertEqual(extra, response.json['extra'])
+ kargs = mock_upd.call_args[0][1]
+ self.assertEqual(extra, kargs.extra)
+
+ def test_remove_uuid(self, mock_upd):
+ response = self.patch_json('/ports/%s' % self.port.uuid,
+ [{'path': '/uuid',
+ 'op': 'remove'}],
+ expect_errors=True)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_int)
+ self.assertEqual('application/json', response.content_type)
+ self.assertTrue(response.json['error_message'])
+ self.assertFalse(mock_upd.called)
+
+ def test_update_address_invalid_format(self, mock_upd):
+ response = self.patch_json('/ports/%s' % self.port.uuid,
+ [{'path': '/address',
+ 'value': 'invalid-format',
+ 'op': 'replace'}],
+ expect_errors=True)
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_int)
+ self.assertTrue(response.json['error_message'])
+ self.assertFalse(mock_upd.called)
+
+ def test_update_port_address_normalized(self, mock_upd):
+ address = 'AA:BB:CC:DD:EE:FF'
+ mock_upd.return_value = self.port
+ mock_upd.return_value.address = address.lower()
+ response = self.patch_json('/ports/%s' % self.port.uuid,
+ [{'path': '/address',
+ 'value': address,
+ 'op': 'replace'}])
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.OK, response.status_code)
+ self.assertEqual(address.lower(), response.json['address'])
+ kargs = mock_upd.call_args[0][1]
+ self.assertEqual(address.lower(), kargs.address)
+
+
+class TestPost(test_api_base.FunctionalTest):
+
+ def setUp(self):
+ super(TestPost, self).setUp()
+ self.node = obj_utils.create_test_node(self.context)
+
+ @mock.patch.object(timeutils, 'utcnow')
+ def test_create_port(self, mock_utcnow):
+ pdict = post_get_test_port()
+ test_time = datetime.datetime(2000, 1, 1, 0, 0)
+ mock_utcnow.return_value = test_time
+ response = self.post_json('/ports', pdict)
+ self.assertEqual(http_client.CREATED, response.status_int)
+ result = self.get_json('/ports/%s' % pdict['uuid'])
+ self.assertEqual(pdict['uuid'], result['uuid'])
+ self.assertFalse(result['updated_at'])
+ return_created_at = timeutils.parse_isotime(
+ result['created_at']).replace(tzinfo=None)
+ self.assertEqual(test_time, return_created_at)
+ # Check location header
+ self.assertIsNotNone(response.location)
+ expected_location = '/v1/ports/%s' % pdict['uuid']
+ self.assertEqual(urlparse.urlparse(response.location).path,
+ expected_location)
+
+ def test_create_port_doesnt_contain_id(self):
+ with mock.patch.object(self.dbapi, 'create_port',
+ wraps=self.dbapi.create_port) as cp_mock:
+ pdict = post_get_test_port(extra={'foo': 123})
+ self.post_json('/ports', pdict)
+ result = self.get_json('/ports/%s' % pdict['uuid'])
+ self.assertEqual(pdict['extra'], result['extra'])
+ cp_mock.assert_called_once_with(mock.ANY)
+ # Check that 'id' is not in first arg of positional args
+ self.assertNotIn('id', cp_mock.call_args[0][0])
+
+ def test_create_port_generate_uuid(self):
+ pdict = post_get_test_port()
+ del pdict['uuid']
+ response = self.post_json('/ports', pdict)
+ result = self.get_json('/ports/%s' % response.json['uuid'])
+ self.assertEqual(pdict['address'], result['address'])
+ self.assertTrue(uuidutils.is_uuid_like(result['uuid']))
+
+ def test_create_port_valid_extra(self):
+ pdict = post_get_test_port(extra={'str': 'foo', 'int': 123,
+ 'float': 0.1, 'bool': True,
+ 'list': [1, 2], 'none': None,
+ 'dict': {'cat': 'meow'}})
+ self.post_json('/ports', pdict)
+ result = self.get_json('/ports/%s' % pdict['uuid'])
+ self.assertEqual(pdict['extra'], result['extra'])
+
+ def test_create_port_no_mandatory_field_address(self):
+ pdict = post_get_test_port()
+ del pdict['address']
+ response = self.post_json('/ports', pdict, expect_errors=True)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_int)
+ self.assertEqual('application/json', response.content_type)
+ self.assertTrue(response.json['error_message'])
+
+ def test_create_port_no_mandatory_field_node_uuid(self):
+ pdict = post_get_test_port()
+ del pdict['node_uuid']
+ response = self.post_json('/ports', pdict, expect_errors=True)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_int)
+ self.assertEqual('application/json', response.content_type)
+ self.assertTrue(response.json['error_message'])
+
+ def test_create_port_invalid_addr_format(self):
+ pdict = post_get_test_port(address='invalid-format')
+ response = self.post_json('/ports', pdict, expect_errors=True)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_int)
+ self.assertEqual('application/json', response.content_type)
+ self.assertTrue(response.json['error_message'])
+
+ def test_create_port_address_normalized(self):
+ address = 'AA:BB:CC:DD:EE:FF'
+ pdict = post_get_test_port(address=address)
+ self.post_json('/ports', pdict)
+ result = self.get_json('/ports/%s' % pdict['uuid'])
+ self.assertEqual(address.lower(), result['address'])
+
+ def test_create_port_with_hyphens_delimiter(self):
+ pdict = post_get_test_port()
+ colonsMAC = pdict['address']
+ hyphensMAC = colonsMAC.replace(':', '-')
+ pdict['address'] = hyphensMAC
+ response = self.post_json('/ports', pdict, expect_errors=True)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_int)
+ self.assertEqual('application/json', response.content_type)
+ self.assertTrue(response.json['error_message'])
+
+ def test_create_port_invalid_node_uuid_format(self):
+ pdict = post_get_test_port(node_uuid='invalid-format')
+ response = self.post_json('/ports', pdict, expect_errors=True)
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_int)
+ self.assertTrue(response.json['error_message'])
+
+ def test_node_uuid_to_node_id_mapping(self):
+ pdict = post_get_test_port(node_uuid=self.node['uuid'])
+ self.post_json('/ports', pdict)
+ # GET doesn't return the node_id it's an internal value
+ port = self.dbapi.get_port_by_uuid(pdict['uuid'])
+ self.assertEqual(self.node['id'], port.node_id)
+
+ def test_create_port_node_uuid_not_found(self):
+ pdict = post_get_test_port(
+ node_uuid='1a1a1a1a-2b2b-3c3c-4d4d-5e5e5e5e5e5e')
+ response = self.post_json('/ports', pdict, expect_errors=True)
+ self.assertEqual('application/json', response.content_type)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_int)
+ self.assertTrue(response.json['error_message'])
+
+ def test_create_port_address_already_exist(self):
+ address = 'AA:AA:AA:11:22:33'
+ pdict = post_get_test_port(address=address)
+ self.post_json('/ports', pdict)
+ pdict['uuid'] = uuidutils.generate_uuid()
+ response = self.post_json('/ports', pdict, expect_errors=True)
+ self.assertEqual(http_client.CONFLICT, response.status_int)
+ self.assertEqual('application/json', response.content_type)
+ error_msg = response.json['error_message']
+ self.assertTrue(error_msg)
+ self.assertIn(address, error_msg.upper())
+
+
+@mock.patch.object(rpcapi.ConductorAPI, 'destroy_port')
+class TestDelete(test_api_base.FunctionalTest):
+
+ def setUp(self):
+ super(TestDelete, self).setUp()
+ self.node = obj_utils.create_test_node(self.context)
+ self.port = obj_utils.create_test_port(self.context,
+ node_id=self.node.id)
+
+ gtf = mock.patch.object(rpcapi.ConductorAPI, 'get_topic_for')
+ self.mock_gtf = gtf.start()
+ self.mock_gtf.return_value = 'test-topic'
+ self.addCleanup(gtf.stop)
+
+ def test_delete_port_byaddress(self, mock_dpt):
+ response = self.delete('/ports/%s' % self.port.address,
+ expect_errors=True)
+ self.assertEqual(http_client.BAD_REQUEST, response.status_int)
+ self.assertEqual('application/json', response.content_type)
+ self.assertIn(self.port.address, response.json['error_message'])
+
+ def test_delete_port_byid(self, mock_dpt):
+ self.delete('/ports/%s' % self.port.uuid, expect_errors=True)
+ self.assertTrue(mock_dpt.called)
+
+ def test_delete_port_node_locked(self, mock_dpt):
+ self.node.reserve(self.context, 'fake', self.node.uuid)
+ mock_dpt.side_effect = exception.NodeLocked(node='fake-node',
+ host='fake-host')
+ ret = self.delete('/ports/%s' % self.port.uuid, expect_errors=True)
+ self.assertEqual(http_client.CONFLICT, ret.status_code)
+ self.assertTrue(ret.json['error_message'])
+ self.assertTrue(mock_dpt.called)
diff --git a/ironic/tests/unit/api/v1/test_root.py b/ironic/tests/unit/api/v1/test_root.py
new file mode 100644
index 000000000..24384bee5
--- /dev/null
+++ b/ironic/tests/unit/api/v1/test_root.py
@@ -0,0 +1,72 @@
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import mock
+from webob import exc as webob_exc
+
+from ironic.api.controllers import v1 as v1_api
+from ironic.tests.unit.api import base as api_base
+from ironic.tests.unit import base as test_base
+
+
+class TestV1Routing(api_base.FunctionalTest):
+ def setUp(self):
+ super(TestV1Routing, self).setUp()
+
+ def test_route_checks_version(self):
+ self.get_json('/')
+ self._check_version.assert_called_once_with(mock.ANY,
+ mock.ANY)
+
+
+class TestCheckVersions(test_base.TestCase):
+
+ def setUp(self):
+ super(TestCheckVersions, self).setUp()
+
+ class ver(object):
+ major = None
+ minor = None
+
+ self.version = ver()
+
+ def test_check_version_invalid_major_version(self):
+ self.version.major = v1_api.BASE_VERSION + 1
+ self.version.minor = v1_api.MIN_VER.minor
+ self.assertRaises(
+ webob_exc.HTTPNotAcceptable,
+ v1_api.Controller()._check_version,
+ self.version)
+
+ def test_check_version_too_low(self):
+ self.version.major = v1_api.BASE_VERSION
+ self.version.minor = v1_api.MIN_VER.minor - 1
+ self.assertRaises(
+ webob_exc.HTTPNotAcceptable,
+ v1_api.Controller()._check_version,
+ self.version)
+
+ def test_check_version_too_high(self):
+ self.version.major = v1_api.BASE_VERSION
+ self.version.minor = v1_api.MAX_VER.minor + 1
+ e = self.assertRaises(
+ webob_exc.HTTPNotAcceptable,
+ v1_api.Controller()._check_version,
+ self.version, {'fake-headers': v1_api.MAX_VER.minor})
+ self.assertEqual(v1_api.MAX_VER.minor, e.headers['fake-headers'])
+
+ def test_check_version_ok(self):
+ self.version.major = v1_api.BASE_VERSION
+ self.version.minor = v1_api.MIN_VER.minor
+ v1_api.Controller()._check_version(self.version)
diff --git a/ironic/tests/unit/api/v1/test_types.py b/ironic/tests/unit/api/v1/test_types.py
new file mode 100644
index 000000000..9fb7747c5
--- /dev/null
+++ b/ironic/tests/unit/api/v1/test_types.py
@@ -0,0 +1,279 @@
+# coding: utf-8
+#
+# Copyright 2013 Red Hat, Inc.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import mock
+import six
+from six.moves import http_client
+import webtest
+import wsme
+from wsme import types as wtypes
+
+from ironic.api.controllers.v1 import types
+from ironic.common import exception
+from ironic.common import utils
+from ironic.tests.unit import base
+
+
+class TestMacAddressType(base.TestCase):
+
+ def test_valid_mac_addr(self):
+ test_mac = 'aa:bb:cc:11:22:33'
+ with mock.patch.object(utils, 'validate_and_normalize_mac') as m_mock:
+ types.MacAddressType.validate(test_mac)
+ m_mock.assert_called_once_with(test_mac)
+
+ def test_invalid_mac_addr(self):
+ self.assertRaises(exception.InvalidMAC,
+ types.MacAddressType.validate, 'invalid-mac')
+
+
+class TestUuidType(base.TestCase):
+
+ def test_valid_uuid(self):
+ test_uuid = '1a1a1a1a-2b2b-3c3c-4d4d-5e5e5e5e5e5e'
+ self.assertEqual(test_uuid, types.UuidType.validate(test_uuid))
+
+ def test_invalid_uuid(self):
+ self.assertRaises(exception.InvalidUUID,
+ types.UuidType.validate, 'invalid-uuid')
+
+
+class TestNameType(base.TestCase):
+
+ @mock.patch("pecan.request")
+ def test_valid_name(self, mock_pecan_req):
+ mock_pecan_req.version.minor = 10
+ test_name = 'hal-9000'
+ self.assertEqual(test_name, types.NameType.validate(test_name))
+
+ @mock.patch("pecan.request")
+ def test_invalid_name(self, mock_pecan_req):
+ mock_pecan_req.version.minor = 10
+ self.assertRaises(exception.InvalidName,
+ types.NameType.validate, '-this is not valid-')
+
+
+class TestUuidOrNameType(base.TestCase):
+
+ @mock.patch("pecan.request")
+ def test_valid_uuid(self, mock_pecan_req):
+ mock_pecan_req.version.minor = 10
+ test_uuid = '1a1a1a1a-2b2b-3c3c-4d4d-5e5e5e5e5e5e'
+ self.assertTrue(types.UuidOrNameType.validate(test_uuid))
+
+ @mock.patch("pecan.request")
+ def test_valid_name(self, mock_pecan_req):
+ mock_pecan_req.version.minor = 10
+ test_name = 'dc16-database5'
+ self.assertTrue(types.UuidOrNameType.validate(test_name))
+
+ @mock.patch("pecan.request")
+ def test_invalid_uuid_or_name(self, mock_pecan_req):
+ mock_pecan_req.version.minor = 10
+ self.assertRaises(exception.InvalidUuidOrName,
+ types.UuidOrNameType.validate, 'inval#uuid%or*name')
+
+
+class MyPatchType(types.JsonPatchType):
+ """Helper class for TestJsonPatchType tests."""
+
+ @staticmethod
+ def mandatory_attrs():
+ return ['/mandatory']
+
+ @staticmethod
+ def internal_attrs():
+ return ['/internal']
+
+
+class MyRoot(wsme.WSRoot):
+ """Helper class for TestJsonPatchType tests."""
+
+ @wsme.expose([wsme.types.text], body=[MyPatchType])
+ @wsme.validate([MyPatchType])
+ def test(self, patch):
+ return patch
+
+
+class TestJsonPatchType(base.TestCase):
+
+ def setUp(self):
+ super(TestJsonPatchType, self).setUp()
+ self.app = webtest.TestApp(MyRoot(['restjson']).wsgiapp())
+
+ def _patch_json(self, params, expect_errors=False):
+ return self.app.patch_json('/test', params=params,
+ headers={'Accept': 'application/json'},
+ expect_errors=expect_errors)
+
+ def test_valid_patches(self):
+ valid_patches = [{'path': '/extra/foo', 'op': 'remove'},
+ {'path': '/extra/foo', 'op': 'add', 'value': 'bar'},
+ {'path': '/str', 'op': 'replace', 'value': 'bar'},
+ {'path': '/bool', 'op': 'add', 'value': True},
+ {'path': '/int', 'op': 'add', 'value': 1},
+ {'path': '/float', 'op': 'add', 'value': 0.123},
+ {'path': '/list', 'op': 'add', 'value': [1, 2]},
+ {'path': '/none', 'op': 'add', 'value': None},
+ {'path': '/empty_dict', 'op': 'add', 'value': {}},
+ {'path': '/empty_list', 'op': 'add', 'value': []},
+ {'path': '/dict', 'op': 'add',
+ 'value': {'cat': 'meow'}}]
+ ret = self._patch_json(valid_patches, False)
+ self.assertEqual(http_client.OK, ret.status_int)
+ self.assertItemsEqual(valid_patches, ret.json)
+
+ def test_cannot_update_internal_attr(self):
+ patch = [{'path': '/internal', 'op': 'replace', 'value': 'foo'}]
+ ret = self._patch_json(patch, True)
+ self.assertEqual(http_client.BAD_REQUEST, ret.status_int)
+ self.assertTrue(ret.json['faultstring'])
+
+ def test_cannot_update_internal_dict_attr(self):
+ patch = [{'path': '/internal/test', 'op': 'replace',
+ 'value': 'foo'}]
+ ret = self._patch_json(patch, True)
+ self.assertEqual(http_client.BAD_REQUEST, ret.status_int)
+ self.assertTrue(ret.json['faultstring'])
+
+ def test_mandatory_attr(self):
+ patch = [{'op': 'replace', 'path': '/mandatory', 'value': 'foo'}]
+ ret = self._patch_json(patch, False)
+ self.assertEqual(http_client.OK, ret.status_int)
+ self.assertEqual(patch, ret.json)
+
+ def test_cannot_remove_mandatory_attr(self):
+ patch = [{'op': 'remove', 'path': '/mandatory'}]
+ ret = self._patch_json(patch, True)
+ self.assertEqual(http_client.BAD_REQUEST, ret.status_int)
+ self.assertTrue(ret.json['faultstring'])
+
+ def test_missing_required_fields_path(self):
+ missing_path = [{'op': 'remove'}]
+ ret = self._patch_json(missing_path, True)
+ self.assertEqual(http_client.BAD_REQUEST, ret.status_int)
+ self.assertTrue(ret.json['faultstring'])
+
+ def test_missing_required_fields_op(self):
+ missing_op = [{'path': '/foo'}]
+ ret = self._patch_json(missing_op, True)
+ self.assertEqual(http_client.BAD_REQUEST, ret.status_int)
+ self.assertTrue(ret.json['faultstring'])
+
+ def test_invalid_op(self):
+ patch = [{'path': '/foo', 'op': 'invalid'}]
+ ret = self._patch_json(patch, True)
+ self.assertEqual(http_client.BAD_REQUEST, ret.status_int)
+ self.assertTrue(ret.json['faultstring'])
+
+ def test_invalid_path(self):
+ patch = [{'path': 'invalid-path', 'op': 'remove'}]
+ ret = self._patch_json(patch, True)
+ self.assertEqual(http_client.BAD_REQUEST, ret.status_int)
+ self.assertTrue(ret.json['faultstring'])
+
+ def test_cannot_add_with_no_value(self):
+ patch = [{'path': '/extra/foo', 'op': 'add'}]
+ ret = self._patch_json(patch, True)
+ self.assertEqual(http_client.BAD_REQUEST, ret.status_int)
+ self.assertTrue(ret.json['faultstring'])
+
+ def test_cannot_replace_with_no_value(self):
+ patch = [{'path': '/foo', 'op': 'replace'}]
+ ret = self._patch_json(patch, True)
+ self.assertEqual(http_client.BAD_REQUEST, ret.status_int)
+ self.assertTrue(ret.json['faultstring'])
+
+
+class TestBooleanType(base.TestCase):
+
+ def test_valid_true_values(self):
+ v = types.BooleanType()
+ self.assertTrue(v.validate("true"))
+ self.assertTrue(v.validate("TRUE"))
+ self.assertTrue(v.validate("True"))
+ self.assertTrue(v.validate("t"))
+ self.assertTrue(v.validate("1"))
+ self.assertTrue(v.validate("y"))
+ self.assertTrue(v.validate("yes"))
+ self.assertTrue(v.validate("on"))
+
+ def test_valid_false_values(self):
+ v = types.BooleanType()
+ self.assertFalse(v.validate("false"))
+ self.assertFalse(v.validate("FALSE"))
+ self.assertFalse(v.validate("False"))
+ self.assertFalse(v.validate("f"))
+ self.assertFalse(v.validate("0"))
+ self.assertFalse(v.validate("n"))
+ self.assertFalse(v.validate("no"))
+ self.assertFalse(v.validate("off"))
+
+ def test_invalid_value(self):
+ v = types.BooleanType()
+ self.assertRaises(exception.Invalid, v.validate, "invalid-value")
+ self.assertRaises(exception.Invalid, v.validate, "01")
+
+
+class TestJsonType(base.TestCase):
+
+ def test_valid_values(self):
+ vt = types.jsontype
+ value = vt.validate("hello")
+ self.assertEqual("hello", value)
+ value = vt.validate(10)
+ self.assertEqual(10, value)
+ value = vt.validate(0.123)
+ self.assertEqual(0.123, value)
+ value = vt.validate(True)
+ self.assertEqual(True, value)
+ value = vt.validate([1, 2, 3])
+ self.assertEqual([1, 2, 3], value)
+ value = vt.validate({'foo': 'bar'})
+ self.assertEqual({'foo': 'bar'}, value)
+ value = vt.validate(None)
+ self.assertIsNone(value)
+
+ def test_invalid_values(self):
+ vt = types.jsontype
+ self.assertRaises(exception.Invalid, vt.validate, object())
+
+ def test_apimultitype_tostring(self):
+ vts = str(types.jsontype)
+ self.assertIn(str(wtypes.text), vts)
+ self.assertIn(str(int), vts)
+ if six.PY2:
+ self.assertIn(str(long), vts)
+ self.assertIn(str(float), vts)
+ self.assertIn(str(types.BooleanType), vts)
+ self.assertIn(str(list), vts)
+ self.assertIn(str(dict), vts)
+ self.assertIn(str(None), vts)
+
+
+class TestListType(base.TestCase):
+
+ def test_list_type(self):
+ v = types.ListType()
+ self.assertItemsEqual(['foo', 'bar'], v.validate('foo,bar'))
+ self.assertItemsEqual(['cat', 'meow'], v.validate("cat , meow"))
+ self.assertItemsEqual(['spongebob', 'squarepants'],
+ v.validate("SpongeBob,SquarePants"))
+ self.assertItemsEqual(['foo', 'bar'],
+ v.validate("foo, ,,bar"))
+ self.assertItemsEqual(['foo', 'bar'],
+ v.validate("foo,foo,foo,bar"))
diff --git a/ironic/tests/unit/api/v1/test_utils.py b/ironic/tests/unit/api/v1/test_utils.py
new file mode 100644
index 000000000..03d351fcc
--- /dev/null
+++ b/ironic/tests/unit/api/v1/test_utils.py
@@ -0,0 +1,263 @@
+# -*- encoding: utf-8 -*-
+# Copyright 2013 Red Hat, Inc.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import mock
+from oslo_config import cfg
+from oslo_utils import uuidutils
+import pecan
+from six.moves import http_client
+from webob.static import FileIter
+import wsme
+
+from ironic.api.controllers.v1 import utils
+from ironic.common import exception
+from ironic import objects
+from ironic.tests.unit.api import utils as test_api_utils
+from ironic.tests.unit import base
+
+CONF = cfg.CONF
+
+
+class TestApiUtils(base.TestCase):
+
+ def test_validate_limit(self):
+ limit = utils.validate_limit(10)
+ self.assertEqual(10, 10)
+
+ # max limit
+ limit = utils.validate_limit(999999999)
+ self.assertEqual(CONF.api.max_limit, limit)
+
+ # negative
+ self.assertRaises(wsme.exc.ClientSideError, utils.validate_limit, -1)
+
+ # zero
+ self.assertRaises(wsme.exc.ClientSideError, utils.validate_limit, 0)
+
+ def test_validate_sort_dir(self):
+ sort_dir = utils.validate_sort_dir('asc')
+ self.assertEqual('asc', sort_dir)
+
+ # invalid sort_dir parameter
+ self.assertRaises(wsme.exc.ClientSideError,
+ utils.validate_sort_dir,
+ 'fake-sort')
+
+ def test_check_for_invalid_fields(self):
+ requested = ['field_1', 'field_3']
+ supported = ['field_1', 'field_2', 'field_3']
+ utils.check_for_invalid_fields(requested, supported)
+
+ def test_check_for_invalid_fields_fail(self):
+ requested = ['field_1', 'field_4']
+ supported = ['field_1', 'field_2', 'field_3']
+ self.assertRaises(exception.InvalidParameterValue,
+ utils.check_for_invalid_fields,
+ requested, supported)
+
+ @mock.patch.object(pecan, 'request', spec_set=['version'])
+ def test_check_allow_specify_fields(self, mock_request):
+ mock_request.version.minor = 8
+ self.assertIsNone(utils.check_allow_specify_fields(['foo']))
+
+ @mock.patch.object(pecan, 'request', spec_set=['version'])
+ def test_check_allow_specify_fields_fail(self, mock_request):
+ mock_request.version.minor = 7
+ self.assertRaises(exception.NotAcceptable,
+ utils.check_allow_specify_fields, ['foo'])
+
+ @mock.patch.object(pecan, 'request', spec_set=['version'])
+ def test_allow_links_node_states_and_driver_properties(self, mock_request):
+ mock_request.version.minor = 14
+ self.assertTrue(utils.allow_links_node_states_and_driver_properties())
+ mock_request.version.minor = 10
+ self.assertFalse(utils.allow_links_node_states_and_driver_properties())
+
+
+class TestNodeIdent(base.TestCase):
+
+ def setUp(self):
+ super(TestNodeIdent, self).setUp()
+ self.valid_name = 'my-host'
+ self.valid_uuid = uuidutils.generate_uuid()
+ self.invalid_name = 'Mr Plow'
+ self.node = test_api_utils.post_get_test_node()
+
+ @mock.patch.object(pecan, 'request')
+ def test_allow_node_logical_names_pre_name(self, mock_pecan_req):
+ mock_pecan_req.version.minor = 1
+ self.assertFalse(utils.allow_node_logical_names())
+
+ @mock.patch.object(pecan, 'request')
+ def test_allow_node_logical_names_post_name(self, mock_pecan_req):
+ mock_pecan_req.version.minor = 5
+ self.assertTrue(utils.allow_node_logical_names())
+
+ @mock.patch("pecan.request")
+ def test_is_valid_node_name(self, mock_pecan_req):
+ mock_pecan_req.version.minor = 10
+ self.assertTrue(utils.is_valid_node_name(self.valid_name))
+ self.assertFalse(utils.is_valid_node_name(self.invalid_name))
+ self.assertFalse(utils.is_valid_node_name(self.valid_uuid))
+
+ @mock.patch.object(pecan, 'request')
+ @mock.patch.object(utils, 'allow_node_logical_names')
+ @mock.patch.object(objects.Node, 'get_by_uuid')
+ @mock.patch.object(objects.Node, 'get_by_name')
+ def test_get_rpc_node_expect_uuid(self, mock_gbn, mock_gbu, mock_anln,
+ mock_pr):
+ mock_anln.return_value = True
+ self.node['uuid'] = self.valid_uuid
+ mock_gbu.return_value = self.node
+ self.assertEqual(self.node, utils.get_rpc_node(self.valid_uuid))
+ self.assertEqual(1, mock_gbu.call_count)
+ self.assertEqual(0, mock_gbn.call_count)
+
+ @mock.patch.object(pecan, 'request')
+ @mock.patch.object(utils, 'allow_node_logical_names')
+ @mock.patch.object(objects.Node, 'get_by_uuid')
+ @mock.patch.object(objects.Node, 'get_by_name')
+ def test_get_rpc_node_expect_name(self, mock_gbn, mock_gbu, mock_anln,
+ mock_pr):
+ mock_pr.version.minor = 10
+ mock_anln.return_value = True
+ self.node['name'] = self.valid_name
+ mock_gbn.return_value = self.node
+ self.assertEqual(self.node, utils.get_rpc_node(self.valid_name))
+ self.assertEqual(0, mock_gbu.call_count)
+ self.assertEqual(1, mock_gbn.call_count)
+
+ @mock.patch.object(pecan, 'request')
+ @mock.patch.object(utils, 'allow_node_logical_names')
+ @mock.patch.object(objects.Node, 'get_by_uuid')
+ @mock.patch.object(objects.Node, 'get_by_name')
+ def test_get_rpc_node_invalid_name(self, mock_gbn, mock_gbu,
+ mock_anln, mock_pr):
+ mock_pr.version.minor = 10
+ mock_anln.return_value = True
+ self.assertRaises(exception.InvalidUuidOrName,
+ utils.get_rpc_node,
+ self.invalid_name)
+
+ @mock.patch.object(pecan, 'request')
+ @mock.patch.object(utils, 'allow_node_logical_names')
+ @mock.patch.object(objects.Node, 'get_by_uuid')
+ @mock.patch.object(objects.Node, 'get_by_name')
+ def test_get_rpc_node_by_uuid_no_logical_name(self, mock_gbn, mock_gbu,
+ mock_anln, mock_pr):
+ # allow_node_logical_name() should have no effect
+ mock_anln.return_value = False
+ self.node['uuid'] = self.valid_uuid
+ mock_gbu.return_value = self.node
+ self.assertEqual(self.node, utils.get_rpc_node(self.valid_uuid))
+ self.assertEqual(1, mock_gbu.call_count)
+ self.assertEqual(0, mock_gbn.call_count)
+
+ @mock.patch.object(pecan, 'request')
+ @mock.patch.object(utils, 'allow_node_logical_names')
+ @mock.patch.object(objects.Node, 'get_by_uuid')
+ @mock.patch.object(objects.Node, 'get_by_name')
+ def test_get_rpc_node_by_name_no_logical_name(self, mock_gbn, mock_gbu,
+ mock_anln, mock_pr):
+ mock_anln.return_value = False
+ self.node['name'] = self.valid_name
+ mock_gbn.return_value = self.node
+ self.assertRaises(exception.NodeNotFound,
+ utils.get_rpc_node,
+ self.valid_name)
+
+
+class TestVendorPassthru(base.TestCase):
+
+ def test_method_not_specified(self):
+ self.assertRaises(wsme.exc.ClientSideError,
+ utils.vendor_passthru, 'fake-ident',
+ None, 'fake-topic', data='fake-data')
+
+ @mock.patch.object(pecan, 'request',
+ spec_set=['method', 'context', 'rpcapi'])
+ def _vendor_passthru(self, mock_request, async=True,
+ driver_passthru=False):
+ return_value = {'return': 'SpongeBob', 'async': async, 'attach': False}
+ mock_request.method = 'post'
+ mock_request.context = 'fake-context'
+
+ passthru_mock = None
+ if driver_passthru:
+ passthru_mock = mock_request.rpcapi.driver_vendor_passthru
+ else:
+ passthru_mock = mock_request.rpcapi.vendor_passthru
+ passthru_mock.return_value = return_value
+
+ response = utils.vendor_passthru('fake-ident', 'squarepants',
+ 'fake-topic', data='fake-data',
+ driver_passthru=driver_passthru)
+
+ passthru_mock.assert_called_once_with(
+ 'fake-context', 'fake-ident', 'squarepants', 'POST',
+ 'fake-data', 'fake-topic')
+ self.assertIsInstance(response, wsme.api.Response)
+ self.assertEqual('SpongeBob', response.obj)
+ self.assertEqual(response.return_type, wsme.types.Unset)
+ sc = http_client.ACCEPTED if async else http_client.OK
+ self.assertEqual(sc, response.status_code)
+
+ def test_vendor_passthru_async(self):
+ self._vendor_passthru()
+
+ def test_vendor_passthru_sync(self):
+ self._vendor_passthru(async=False)
+
+ def test_driver_vendor_passthru_async(self):
+ self._vendor_passthru(driver_passthru=True)
+
+ def test_driver_vendor_passthru_sync(self):
+ self._vendor_passthru(async=False, driver_passthru=True)
+
+ @mock.patch.object(pecan, 'response', spec_set=['app_iter'])
+ @mock.patch.object(pecan, 'request',
+ spec_set=['method', 'context', 'rpcapi'])
+ def _test_vendor_passthru_attach(self, return_value, expct_return_value,
+ mock_request, mock_response):
+ return_ = {'return': return_value, 'async': False, 'attach': True}
+ mock_request.method = 'get'
+ mock_request.context = 'fake-context'
+ mock_request.rpcapi.driver_vendor_passthru.return_value = return_
+ response = utils.vendor_passthru('fake-ident', 'bar',
+ 'fake-topic', data='fake-data',
+ driver_passthru=True)
+ mock_request.rpcapi.driver_vendor_passthru.assert_called_once_with(
+ 'fake-context', 'fake-ident', 'bar', 'GET',
+ 'fake-data', 'fake-topic')
+
+ # Assert file was attached to the response object
+ self.assertIsInstance(mock_response.app_iter, FileIter)
+ self.assertEqual(expct_return_value,
+ mock_response.app_iter.file.read())
+ # Assert response message is none
+ self.assertIsInstance(response, wsme.api.Response)
+ self.assertIsNone(response.obj)
+ self.assertIsNone(response.return_type)
+ self.assertEqual(http_client.OK, response.status_code)
+
+ def test_vendor_passthru_attach(self):
+ self._test_vendor_passthru_attach('foo', b'foo')
+
+ def test_vendor_passthru_attach_unicode_to_byte(self):
+ self._test_vendor_passthru_attach(u'não', b'n\xc3\xa3o')
+
+ def test_vendor_passthru_attach_byte_to_byte(self):
+ self._test_vendor_passthru_attach(b'\x00\x01', b'\x00\x01')
diff --git a/ironic/tests/unit/api/v1/test_versions.py b/ironic/tests/unit/api/v1/test_versions.py
new file mode 100644
index 000000000..39ff05e73
--- /dev/null
+++ b/ironic/tests/unit/api/v1/test_versions.py
@@ -0,0 +1,69 @@
+# Copyright (c) 2015 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+"""
+Tests for the versions constants and methods.
+"""
+
+import re
+
+from ironic.api.controllers.v1 import versions
+from ironic.tests.unit import base
+
+
+class TestVersionConstants(base.TestCase):
+
+ def setUp(self):
+ super(TestVersionConstants, self).setUp()
+
+ # Get all of our named constants. They all begin with r'MINOR_[0-9]'
+ self.minor_consts = [x for x in dir(versions)
+ if re.search(r'^MINOR_[0-9]', x)]
+
+ # Sort key needs to be an integer
+ def minor_key(x):
+ return int(x.split('_', 2)[1])
+ self.minor_consts.sort(key=minor_key)
+
+ def test_max_ver_str(self):
+ # Test to make sure MAX_VERSION_STRING corresponds with the largest
+ # MINOR_ constant
+
+ max_ver = '1.{}'.format(getattr(versions, self.minor_consts[-1]))
+ self.assertEqual(max_ver, versions.MAX_VERSION_STRING)
+
+ def test_min_ver_str(self):
+ # Try to make sure someone doesn't change the MIN_VERSION_STRING by
+ # accident and make sure it exists
+ self.assertEqual('1.1', versions.MIN_VERSION_STRING)
+
+ def test_name_value_match(self):
+ # Test to make sure variable name matches the value. For example
+ # MINOR_99_FOO should equal 99
+
+ for var_name in self.minor_consts:
+ version = int(var_name.split('_', 2)[1])
+ self.assertEqual(
+ version, getattr(versions, var_name),
+ 'Constant "{}" does not equal {}'.format(var_name, version))
+
+ def test_duplicates(self):
+ # Test to make sure no duplicates values
+
+ seen_values = set()
+ for var_name in self.minor_consts:
+ value = getattr(versions, var_name)
+ self.assertNotIn(
+ value, seen_values,
+ 'The value {} has been used more than once'.format(value))
+ seen_values.add(value)
diff --git a/ironic/tests/unit/base.py b/ironic/tests/unit/base.py
new file mode 100644
index 000000000..37adbc0d4
--- /dev/null
+++ b/ironic/tests/unit/base.py
@@ -0,0 +1,147 @@
+# Copyright 2010 United States Government as represented by the
+# Administrator of the National Aeronautics and Space Administration.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Base classes for our unit tests.
+
+Allows overriding of config for use of fakes, and some black magic for
+inline callbacks.
+
+"""
+
+import copy
+import os
+import sys
+import tempfile
+
+import eventlet
+eventlet.monkey_patch(os=False)
+import fixtures
+from oslo_config import cfg
+from oslo_context import context as ironic_context
+from oslo_log import log as logging
+import testtools
+
+from ironic.common import hash_ring
+from ironic.objects import base as objects_base
+from ironic.tests.unit import conf_fixture
+from ironic.tests.unit import policy_fixture
+
+
+CONF = cfg.CONF
+logging.register_options(CONF)
+CONF.set_override('use_stderr', False)
+
+logging.setup(CONF, 'ironic')
+
+
+class ReplaceModule(fixtures.Fixture):
+ """Replace a module with a fake module."""
+
+ def __init__(self, name, new_value):
+ self.name = name
+ self.new_value = new_value
+
+ def _restore(self, old_value):
+ sys.modules[self.name] = old_value
+
+ def setUp(self):
+ super(ReplaceModule, self).setUp()
+ old_value = sys.modules.get(self.name)
+ sys.modules[self.name] = self.new_value
+ self.addCleanup(self._restore, old_value)
+
+
+class TestingException(Exception):
+ pass
+
+
+class TestCase(testtools.TestCase):
+ """Test case base class for all unit tests."""
+
+ def setUp(self):
+ """Run before each test method to initialize test environment."""
+ super(TestCase, self).setUp()
+ self.context = ironic_context.get_admin_context()
+ test_timeout = os.environ.get('OS_TEST_TIMEOUT', 0)
+ try:
+ test_timeout = int(test_timeout)
+ except ValueError:
+ # If timeout value is invalid do not set a timeout.
+ test_timeout = 0
+ if test_timeout > 0:
+ self.useFixture(fixtures.Timeout(test_timeout, gentle=True))
+ self.useFixture(fixtures.NestedTempfile())
+ self.useFixture(fixtures.TempHomeDir())
+ self.config(tempdir=tempfile.tempdir)
+
+ if (os.environ.get('OS_STDOUT_CAPTURE') == 'True' or
+ os.environ.get('OS_STDOUT_CAPTURE') == '1'):
+ stdout = self.useFixture(fixtures.StringStream('stdout')).stream
+ self.useFixture(fixtures.MonkeyPatch('sys.stdout', stdout))
+ if (os.environ.get('OS_STDERR_CAPTURE') == 'True' or
+ os.environ.get('OS_STDERR_CAPTURE') == '1'):
+ stderr = self.useFixture(fixtures.StringStream('stderr')).stream
+ self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr))
+
+ self.log_fixture = self.useFixture(fixtures.FakeLogger())
+ self.useFixture(conf_fixture.ConfFixture(CONF))
+
+ # NOTE(danms): Make sure to reset us back to non-remote objects
+ # for each test to avoid interactions. Also, backup the object
+ # registry
+ objects_base.IronicObject.indirection_api = None
+ self._base_test_obj_backup = copy.copy(
+ objects_base.IronicObjectRegistry.obj_classes())
+ self.addCleanup(self._restore_obj_registry)
+
+ self.addCleanup(self._clear_attrs)
+ self.addCleanup(hash_ring.HashRingManager().reset)
+ self.useFixture(fixtures.EnvironmentVariable('http_proxy'))
+ self.policy = self.useFixture(policy_fixture.PolicyFixture())
+ CONF.set_override('fatal_exception_format_errors', True)
+
+ def _restore_obj_registry(self):
+ objects_base.IronicObjectRegistry._registry._obj_classes = (
+ self._base_test_obj_backup)
+
+ def _clear_attrs(self):
+ # Delete attributes that don't start with _ so they don't pin
+ # memory around unnecessarily for the duration of the test
+ # suite
+ for key in [k for k in self.__dict__.keys() if k[0] != '_']:
+ del self.__dict__[key]
+
+ def config(self, **kw):
+ """Override config options for a test."""
+ group = kw.pop('group', None)
+ for k, v in kw.items():
+ CONF.set_override(k, v, group)
+
+ def path_get(self, project_file=None):
+ """Get the absolute path to a file. Used for testing the API.
+
+ :param project_file: File whose path to return. Default: None.
+ :returns: path to the specified file, or path to project root.
+ """
+ root = os.path.abspath(os.path.join(os.path.dirname(__file__),
+ '..',
+ '..',
+ )
+ )
+ if project_file:
+ return os.path.join(root, project_file)
+ else:
+ return root
diff --git a/ironic/tests/unit/cmd/__init__.py b/ironic/tests/unit/cmd/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ironic/tests/unit/cmd/__init__.py
diff --git a/ironic/tests/unit/cmd/test_dbsync.py b/ironic/tests/unit/cmd/test_dbsync.py
new file mode 100644
index 000000000..31e85996e
--- /dev/null
+++ b/ironic/tests/unit/cmd/test_dbsync.py
@@ -0,0 +1,27 @@
+# -*- encoding: utf-8 -*-
+#
+# Copyright 2013 Hewlett-Packard Development Company, L.P.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from ironic.db import migration
+from ironic.tests.unit.db import base
+
+
+class DbSyncTestCase(base.DbTestCase):
+
+ def test_upgrade_and_version(self):
+ migration.upgrade('head')
+ v = migration.version()
+ self.assertTrue(v)
diff --git a/ironic/tests/unit/common/__init__.py b/ironic/tests/unit/common/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ironic/tests/unit/common/__init__.py
diff --git a/ironic/tests/unit/common/test_disk_partitioner.py b/ironic/tests/unit/common/test_disk_partitioner.py
new file mode 100644
index 000000000..dc87e206f
--- /dev/null
+++ b/ironic/tests/unit/common/test_disk_partitioner.py
@@ -0,0 +1,198 @@
+# Copyright 2014 Red Hat, Inc.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import eventlet
+import mock
+from testtools.matchers import HasLength
+
+from ironic.common import disk_partitioner
+from ironic.common import exception
+from ironic.common import utils
+from ironic.tests.unit import base
+
+
+@mock.patch.object(eventlet.greenthread, 'sleep', lambda seconds: None)
+class DiskPartitionerTestCase(base.TestCase):
+
+ def test_add_partition(self):
+ dp = disk_partitioner.DiskPartitioner('/dev/fake')
+ dp.add_partition(1024)
+ dp.add_partition(512, fs_type='linux-swap')
+ dp.add_partition(2048, bootable=True)
+ expected = [(1, {'bootable': False,
+ 'fs_type': '',
+ 'type': 'primary',
+ 'size': 1024}),
+ (2, {'bootable': False,
+ 'fs_type': 'linux-swap',
+ 'type': 'primary',
+ 'size': 512}),
+ (3, {'bootable': True,
+ 'fs_type': '',
+ 'type': 'primary',
+ 'size': 2048})]
+ partitions = [(n, p) for n, p in dp.get_partitions()]
+ self.assertThat(partitions, HasLength(3))
+ self.assertEqual(expected, partitions)
+
+ @mock.patch.object(disk_partitioner.DiskPartitioner, '_exec',
+ autospec=True)
+ @mock.patch.object(utils, 'execute', autospec=True)
+ def test_commit(self, mock_utils_exc, mock_disk_partitioner_exec):
+ dp = disk_partitioner.DiskPartitioner('/dev/fake')
+ fake_parts = [(1, {'bootable': False,
+ 'fs_type': 'fake-fs-type',
+ 'type': 'fake-type',
+ 'size': 1}),
+ (2, {'bootable': True,
+ 'fs_type': 'fake-fs-type',
+ 'type': 'fake-type',
+ 'size': 1})]
+ with mock.patch.object(dp, 'get_partitions', autospec=True) as mock_gp:
+ mock_gp.return_value = fake_parts
+ mock_utils_exc.return_value = (None, None)
+ dp.commit()
+
+ mock_disk_partitioner_exec.assert_called_once_with(
+ mock.ANY, 'mklabel', 'msdos',
+ 'mkpart', 'fake-type', 'fake-fs-type', '1', '2',
+ 'mkpart', 'fake-type', 'fake-fs-type', '2', '3',
+ 'set', '2', 'boot', 'on')
+ mock_utils_exc.assert_called_once_with(
+ 'fuser', '/dev/fake', run_as_root=True, check_exit_code=[0, 1])
+
+ @mock.patch.object(disk_partitioner.DiskPartitioner, '_exec',
+ autospec=True)
+ @mock.patch.object(utils, 'execute', autospec=True)
+ def test_commit_with_device_is_busy_once(self, mock_utils_exc,
+ mock_disk_partitioner_exec):
+ dp = disk_partitioner.DiskPartitioner('/dev/fake')
+ fake_parts = [(1, {'bootable': False,
+ 'fs_type': 'fake-fs-type',
+ 'type': 'fake-type',
+ 'size': 1}),
+ (2, {'bootable': True,
+ 'fs_type': 'fake-fs-type',
+ 'type': 'fake-type',
+ 'size': 1})]
+ fuser_outputs = iter([("/dev/fake: 10000 10001", None), (None, None)])
+
+ with mock.patch.object(dp, 'get_partitions', autospec=True) as mock_gp:
+ mock_gp.return_value = fake_parts
+ mock_utils_exc.side_effect = fuser_outputs
+ dp.commit()
+
+ mock_disk_partitioner_exec.assert_called_once_with(
+ mock.ANY, 'mklabel', 'msdos',
+ 'mkpart', 'fake-type', 'fake-fs-type', '1', '2',
+ 'mkpart', 'fake-type', 'fake-fs-type', '2', '3',
+ 'set', '2', 'boot', 'on')
+ mock_utils_exc.assert_called_with(
+ 'fuser', '/dev/fake', run_as_root=True, check_exit_code=[0, 1])
+ self.assertEqual(2, mock_utils_exc.call_count)
+
+ @mock.patch.object(disk_partitioner.DiskPartitioner, '_exec',
+ autospec=True)
+ @mock.patch.object(utils, 'execute', autospec=True)
+ def test_commit_with_device_is_always_busy(self, mock_utils_exc,
+ mock_disk_partitioner_exec):
+ dp = disk_partitioner.DiskPartitioner('/dev/fake')
+ fake_parts = [(1, {'bootable': False,
+ 'fs_type': 'fake-fs-type',
+ 'type': 'fake-type',
+ 'size': 1}),
+ (2, {'bootable': True,
+ 'fs_type': 'fake-fs-type',
+ 'type': 'fake-type',
+ 'size': 1})]
+
+ with mock.patch.object(dp, 'get_partitions', autospec=True) as mock_gp:
+ mock_gp.return_value = fake_parts
+ mock_utils_exc.return_value = ("/dev/fake: 10000 10001", None)
+ self.assertRaises(exception.InstanceDeployFailure, dp.commit)
+
+ mock_disk_partitioner_exec.assert_called_once_with(
+ mock.ANY, 'mklabel', 'msdos',
+ 'mkpart', 'fake-type', 'fake-fs-type', '1', '2',
+ 'mkpart', 'fake-type', 'fake-fs-type', '2', '3',
+ 'set', '2', 'boot', 'on')
+ mock_utils_exc.assert_called_with(
+ 'fuser', '/dev/fake', run_as_root=True, check_exit_code=[0, 1])
+ self.assertEqual(20, mock_utils_exc.call_count)
+
+ @mock.patch.object(disk_partitioner.DiskPartitioner, '_exec',
+ autospec=True)
+ @mock.patch.object(utils, 'execute', autospec=True)
+ def test_commit_with_device_disconnected(self, mock_utils_exc,
+ mock_disk_partitioner_exec):
+ dp = disk_partitioner.DiskPartitioner('/dev/fake')
+ fake_parts = [(1, {'bootable': False,
+ 'fs_type': 'fake-fs-type',
+ 'type': 'fake-type',
+ 'size': 1}),
+ (2, {'bootable': True,
+ 'fs_type': 'fake-fs-type',
+ 'type': 'fake-type',
+ 'size': 1})]
+
+ with mock.patch.object(dp, 'get_partitions', autospec=True) as mock_gp:
+ mock_gp.return_value = fake_parts
+ mock_utils_exc.return_value = (None, "Specified filename /dev/fake"
+ " does not exist.")
+ self.assertRaises(exception.InstanceDeployFailure, dp.commit)
+
+ mock_disk_partitioner_exec.assert_called_once_with(
+ mock.ANY, 'mklabel', 'msdos',
+ 'mkpart', 'fake-type', 'fake-fs-type', '1', '2',
+ 'mkpart', 'fake-type', 'fake-fs-type', '2', '3',
+ 'set', '2', 'boot', 'on')
+ mock_utils_exc.assert_called_with(
+ 'fuser', '/dev/fake', run_as_root=True, check_exit_code=[0, 1])
+ self.assertEqual(20, mock_utils_exc.call_count)
+
+
+@mock.patch.object(utils, 'execute', autospec=True)
+class ListPartitionsTestCase(base.TestCase):
+
+ def test_correct(self, execute_mock):
+ output = """
+BYT;
+/dev/sda:500107862016B:scsi:512:4096:msdos:ATA HGST HTS725050A7:;
+1:1.00MiB:501MiB:500MiB:ext4::boot;
+2:501MiB:476940MiB:476439MiB:::;
+"""
+ expected = [
+ {'number': 1, 'start': 1, 'end': 501, 'size': 500,
+ 'filesystem': 'ext4', 'flags': 'boot'},
+ {'number': 2, 'start': 501, 'end': 476940, 'size': 476439,
+ 'filesystem': '', 'flags': ''},
+ ]
+ execute_mock.return_value = (output, '')
+ result = disk_partitioner.list_partitions('/dev/fake')
+ self.assertEqual(expected, result)
+ execute_mock.assert_called_once_with(
+ 'parted', '-s', '-m', '/dev/fake', 'unit', 'MiB', 'print',
+ use_standard_locale=True, run_as_root=True)
+
+ @mock.patch.object(disk_partitioner.LOG, 'warn', autospec=True)
+ def test_incorrect(self, log_mock, execute_mock):
+ output = """
+BYT;
+/dev/sda:500107862016B:scsi:512:4096:msdos:ATA HGST HTS725050A7:;
+1:XX1076MiB:---:524MiB:ext4::boot;
+"""
+ execute_mock.return_value = (output, '')
+ self.assertEqual([], disk_partitioner.list_partitions('/dev/fake'))
+ self.assertEqual(1, log_mock.call_count)
diff --git a/ironic/tests/unit/common/test_driver_factory.py b/ironic/tests/unit/common/test_driver_factory.py
new file mode 100644
index 000000000..9035a0386
--- /dev/null
+++ b/ironic/tests/unit/common/test_driver_factory.py
@@ -0,0 +1,64 @@
+# coding=utf-8
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import mock
+from stevedore import dispatch
+
+from ironic.common import driver_factory
+from ironic.common import exception
+from ironic.tests.unit import base
+
+
+class FakeEp(object):
+ name = 'fake'
+
+
+class DriverLoadTestCase(base.TestCase):
+
+ def setUp(self):
+ super(DriverLoadTestCase, self).setUp()
+ driver_factory.DriverFactory._extension_manager = None
+
+ def _fake_init_name_err(self, *args, **kwargs):
+ kwargs['on_load_failure_callback'](None, FakeEp, NameError('aaa'))
+
+ def _fake_init_driver_err(self, *args, **kwargs):
+ kwargs['on_load_failure_callback'](None, FakeEp,
+ exception.DriverLoadError(
+ driver='aaa', reason='bbb'))
+
+ def test_driver_load_error_if_driver_enabled(self):
+ self.config(enabled_drivers=['fake'])
+ with mock.patch.object(dispatch.NameDispatchExtensionManager,
+ '__init__', self._fake_init_driver_err):
+ self.assertRaises(
+ exception.DriverLoadError,
+ driver_factory.DriverFactory._init_extension_manager)
+
+ def test_wrap_in_driver_load_error_if_driver_enabled(self):
+ self.config(enabled_drivers=['fake'])
+ with mock.patch.object(dispatch.NameDispatchExtensionManager,
+ '__init__', self._fake_init_name_err):
+ self.assertRaises(
+ exception.DriverLoadError,
+ driver_factory.DriverFactory._init_extension_manager)
+
+ @mock.patch.object(dispatch.NameDispatchExtensionManager, 'names',
+ autospec=True)
+ def test_no_driver_load_error_if_driver_disabled(self, mock_em):
+ self.config(enabled_drivers=[])
+ with mock.patch.object(dispatch.NameDispatchExtensionManager,
+ '__init__', self._fake_init_driver_err):
+ driver_factory.DriverFactory._init_extension_manager()
+ self.assertEqual(2, mock_em.call_count)
diff --git a/ironic/tests/unit/common/test_exception.py b/ironic/tests/unit/common/test_exception.py
new file mode 100644
index 000000000..bf52f59d7
--- /dev/null
+++ b/ironic/tests/unit/common/test_exception.py
@@ -0,0 +1,30 @@
+# Copyright (c) 2015 IBM, Corp.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import six
+
+from ironic.common import exception
+from ironic.tests.unit import base
+
+
+class TestIronicException(base.TestCase):
+ def test____init__(self):
+ expected = b'\xc3\xa9\xe0\xaf\xb2\xe0\xbe\x84'
+ if six.PY3:
+ expected = expected.decode('utf-8')
+ message = chr(233) + chr(0x0bf2) + chr(3972)
+ else:
+ message = unichr(233) + unichr(0x0bf2) + unichr(3972)
+ exc = exception.IronicException(message)
+ self.assertEqual(expected, exc.__str__())
diff --git a/ironic/tests/unit/common/test_fsm.py b/ironic/tests/unit/common/test_fsm.py
new file mode 100644
index 000000000..78a274fbc
--- /dev/null
+++ b/ironic/tests/unit/common/test_fsm.py
@@ -0,0 +1,29 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from ironic.common import fsm
+from ironic.tests.unit import base
+
+
+class FSMTest(base.TestCase):
+ def test_target_state_stable(self):
+ # Test to verify that adding a new state with a 'target' state pointing
+ # to a 'stable' state does not raise an exception
+ m = fsm.FSM()
+ m.add_state('working', stable=True)
+ m.add_state('foo', target='working')
+ m.default_start_state = 'working'
+ m.initialize()
diff --git a/ironic/tests/unit/common/test_glance_service.py b/ironic/tests/unit/common/test_glance_service.py
new file mode 100644
index 000000000..881b86eaa
--- /dev/null
+++ b/ironic/tests/unit/common/test_glance_service.py
@@ -0,0 +1,860 @@
+# Copyright 2013 Hewlett-Packard Development Company, L.P.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+
+import datetime
+import filecmp
+import os
+import tempfile
+import time
+
+from glanceclient import exc as glance_exc
+import mock
+from oslo_config import cfg
+from oslo_context import context
+from oslo_serialization import jsonutils
+import testtools
+
+from ironic.common import exception
+from ironic.common.glance_service import base_image_service
+from ironic.common.glance_service import service_utils
+from ironic.common import image_service as service
+from ironic.tests.unit import base
+from ironic.tests.unit import stubs
+
+
+CONF = cfg.CONF
+
+
+class NullWriter(object):
+ """Used to test ImageService.get which takes a writer object."""
+
+ def write(self, *arg, **kwargs):
+ pass
+
+
+class TestGlanceSerializer(testtools.TestCase):
+ def test_serialize(self):
+ metadata = {'name': 'image1',
+ 'is_public': True,
+ 'foo': 'bar',
+ 'properties': {
+ 'prop1': 'propvalue1',
+ 'mappings': [
+ {'virtual': 'aaa',
+ 'device': 'bbb'},
+ {'virtual': 'xxx',
+ 'device': 'yyy'}],
+ 'block_device_mapping': [
+ {'virtual_device': 'fake',
+ 'device_name': '/dev/fake'},
+ {'virtual_device': 'ephemeral0',
+ 'device_name': '/dev/fake0'}]}}
+
+ converted_expected = {
+ 'name': 'image1',
+ 'is_public': True,
+ 'foo': 'bar',
+ 'properties': {'prop1': 'propvalue1'}
+ }
+ converted = service_utils._convert(metadata, 'to')
+ self.assertEqual(metadata,
+ service_utils._convert(converted, 'from'))
+ # Fields that rely on dict ordering can't be compared as text
+ mappings = jsonutils.loads(converted['properties']
+ .pop('mappings'))
+ self.assertEqual([{"device": "bbb", "virtual": "aaa"},
+ {"device": "yyy", "virtual": "xxx"}],
+ mappings)
+ bd_mapping = jsonutils.loads(converted['properties']
+ .pop('block_device_mapping'))
+ self.assertEqual([{"virtual_device": "fake",
+ "device_name": "/dev/fake"},
+ {"virtual_device": "ephemeral0",
+ "device_name": "/dev/fake0"}],
+ bd_mapping)
+ # Compare the remaining
+ self.assertEqual(converted_expected, converted)
+
+
+class TestGlanceImageService(base.TestCase):
+ NOW_GLANCE_OLD_FORMAT = "2010-10-11T10:30:22"
+ NOW_GLANCE_FORMAT = "2010-10-11T10:30:22.000000"
+
+ NOW_DATETIME = datetime.datetime(2010, 10, 11, 10, 30, 22)
+
+ def setUp(self):
+ super(TestGlanceImageService, self).setUp()
+ client = stubs.StubGlanceClient()
+ self.context = context.RequestContext(auth_token=True)
+ self.context.user_id = 'fake'
+ self.context.project_id = 'fake'
+ self.service = service.GlanceImageService(client, 1, self.context)
+
+ self.config(glance_host='localhost', group='glance')
+ try:
+ self.config(auth_strategy='keystone', group='glance')
+ except Exception:
+ opts = [
+ cfg.StrOpt('auth_strategy', default='keystone'),
+ ]
+ CONF.register_opts(opts)
+
+ return
+
+ @staticmethod
+ def _make_fixture(**kwargs):
+ fixture = {'name': None,
+ 'properties': {},
+ 'status': None,
+ 'is_public': None}
+ fixture.update(kwargs)
+ return fixture
+
+ @property
+ def endpoint(self):
+ # For glanceclient versions >= 0.13, the endpoint is located
+ # under http_client (blueprint common-client-library-2)
+ # I5addc38eb2e2dd0be91b566fda7c0d81787ffa75
+ # Test both options to keep backward compatibility
+ if getattr(self.service.client, 'endpoint', None):
+ endpoint = self.service.client.endpoint
+ else:
+ endpoint = self.service.client.http_client.endpoint
+ return endpoint
+
+ def _make_datetime_fixture(self):
+ return self._make_fixture(created_at=self.NOW_GLANCE_FORMAT,
+ updated_at=self.NOW_GLANCE_FORMAT,
+ deleted_at=self.NOW_GLANCE_FORMAT)
+
+ def test_create_with_instance_id(self):
+ # Ensure instance_id is persisted as an image-property.
+ fixture = {'name': 'test image',
+ 'is_public': False,
+ 'properties': {'instance_id': '42', 'user_id': 'fake'}}
+ image_id = self.service.create(fixture)['id']
+ image_meta = self.service.show(image_id)
+ expected = {
+ 'id': image_id,
+ 'name': 'test image',
+ 'is_public': False,
+ 'size': None,
+ 'min_disk': None,
+ 'min_ram': None,
+ 'disk_format': None,
+ 'container_format': None,
+ 'checksum': None,
+ 'created_at': self.NOW_DATETIME,
+ 'updated_at': self.NOW_DATETIME,
+ 'deleted_at': None,
+ 'deleted': None,
+ 'status': None,
+ 'properties': {'instance_id': '42', 'user_id': 'fake'},
+ 'owner': None,
+ }
+
+ self.assertDictEqual(expected, image_meta)
+
+ image_metas = self.service.detail()
+ self.assertDictEqual(expected, image_metas[0])
+
+ def test_create_without_instance_id(self):
+ """Test creating an image without an instance ID.
+
+ Ensure we can create an image without having to specify an
+ instance_id. Public images are an example of an image not tied to an
+ instance.
+ """
+ fixture = {'name': 'test image', 'is_public': False}
+ image_id = self.service.create(fixture)['id']
+
+ expected = {
+ 'id': image_id,
+ 'name': 'test image',
+ 'is_public': False,
+ 'size': None,
+ 'min_disk': None,
+ 'min_ram': None,
+ 'disk_format': None,
+ 'container_format': None,
+ 'checksum': None,
+ 'created_at': self.NOW_DATETIME,
+ 'updated_at': self.NOW_DATETIME,
+ 'deleted_at': None,
+ 'deleted': None,
+ 'status': None,
+ 'properties': {},
+ 'owner': None,
+ }
+ actual = self.service.show(image_id)
+ self.assertDictEqual(expected, actual)
+
+ def test_create(self):
+ fixture = self._make_fixture(name='test image')
+ num_images = len(self.service.detail())
+ image_id = self.service.create(fixture)['id']
+
+ self.assertIsNotNone(image_id)
+ self.assertEqual(
+ num_images + 1, len(self.service.detail()))
+
+ def test_create_and_show_non_existing_image(self):
+ fixture = self._make_fixture(name='test image')
+ image_id = self.service.create(fixture)['id']
+
+ self.assertIsNotNone(image_id)
+ self.assertRaises(exception.ImageNotFound,
+ self.service.show,
+ 'bad image id')
+
+ def test_detail_private_image(self):
+ fixture = self._make_fixture(name='test image')
+ fixture['is_public'] = False
+ properties = {'owner_id': 'proj1'}
+ fixture['properties'] = properties
+
+ self.service.create(fixture)['id']
+
+ proj = self.context.project_id
+ self.context.project_id = 'proj1'
+
+ image_metas = self.service.detail()
+
+ self.context.project_id = proj
+
+ self.assertEqual(1, len(image_metas))
+ self.assertEqual('test image', image_metas[0]['name'])
+ self.assertEqual(False, image_metas[0]['is_public'])
+
+ def test_detail_marker(self):
+ fixtures = []
+ ids = []
+ for i in range(10):
+ fixture = self._make_fixture(name='TestImage %d' % (i))
+ fixtures.append(fixture)
+ ids.append(self.service.create(fixture)['id'])
+
+ image_metas = self.service.detail(marker=ids[1])
+ self.assertEqual(8, len(image_metas))
+ i = 2
+ for meta in image_metas:
+ expected = {
+ 'id': ids[i],
+ 'status': None,
+ 'is_public': None,
+ 'name': 'TestImage %d' % (i),
+ 'properties': {},
+ 'size': None,
+ 'min_disk': None,
+ 'min_ram': None,
+ 'disk_format': None,
+ 'container_format': None,
+ 'checksum': None,
+ 'created_at': self.NOW_DATETIME,
+ 'updated_at': self.NOW_DATETIME,
+ 'deleted_at': None,
+ 'deleted': None,
+ 'owner': None,
+ }
+
+ self.assertDictEqual(expected, meta)
+ i = i + 1
+
+ def test_detail_limit(self):
+ fixtures = []
+ ids = []
+ for i in range(10):
+ fixture = self._make_fixture(name='TestImage %d' % (i))
+ fixtures.append(fixture)
+ ids.append(self.service.create(fixture)['id'])
+
+ image_metas = self.service.detail(limit=5)
+ self.assertEqual(5, len(image_metas))
+
+ def test_detail_default_limit(self):
+ fixtures = []
+ ids = []
+ for i in range(10):
+ fixture = self._make_fixture(name='TestImage %d' % (i))
+ fixtures.append(fixture)
+ ids.append(self.service.create(fixture)['id'])
+
+ image_metas = self.service.detail()
+ for i, meta in enumerate(image_metas):
+ self.assertEqual(meta['name'], 'TestImage %d' % (i))
+
+ def test_detail_marker_and_limit(self):
+ fixtures = []
+ ids = []
+ for i in range(10):
+ fixture = self._make_fixture(name='TestImage %d' % (i))
+ fixtures.append(fixture)
+ ids.append(self.service.create(fixture)['id'])
+
+ image_metas = self.service.detail(marker=ids[3], limit=5)
+ self.assertEqual(5, len(image_metas))
+ i = 4
+ for meta in image_metas:
+ expected = {
+ 'id': ids[i],
+ 'status': None,
+ 'is_public': None,
+ 'name': 'TestImage %d' % (i),
+ 'properties': {},
+ 'size': None,
+ 'min_disk': None,
+ 'min_ram': None,
+ 'disk_format': None,
+ 'container_format': None,
+ 'checksum': None,
+ 'created_at': self.NOW_DATETIME,
+ 'updated_at': self.NOW_DATETIME,
+ 'deleted_at': None,
+ 'deleted': None,
+ 'owner': None,
+ }
+ self.assertDictEqual(expected, meta)
+ i = i + 1
+
+ def test_detail_invalid_marker(self):
+ fixtures = []
+ ids = []
+ for i in range(10):
+ fixture = self._make_fixture(name='TestImage %d' % (i))
+ fixtures.append(fixture)
+ ids.append(self.service.create(fixture)['id'])
+
+ self.assertRaises(exception.Invalid, self.service.detail,
+ marker='invalidmarker')
+
+ def test_update(self):
+ fixture = self._make_fixture(name='test image')
+ image = self.service.create(fixture)
+ image_id = image['id']
+ fixture['name'] = 'new image name'
+ self.service.update(image_id, fixture)
+
+ new_image_data = self.service.show(image_id)
+ self.assertEqual('new image name', new_image_data['name'])
+
+ def test_delete(self):
+ fixture1 = self._make_fixture(name='test image 1')
+ fixture2 = self._make_fixture(name='test image 2')
+ fixtures = [fixture1, fixture2]
+
+ num_images = len(self.service.detail())
+ self.assertEqual(0, num_images)
+
+ ids = []
+ for fixture in fixtures:
+ new_id = self.service.create(fixture)['id']
+ ids.append(new_id)
+
+ num_images = len(self.service.detail())
+ self.assertEqual(2, num_images)
+
+ self.service.delete(ids[0])
+ # When you delete an image from glance, it sets the status to DELETED
+ # and doesn't actually remove the image.
+
+ # Check the image is still there.
+ num_images = len(self.service.detail())
+ self.assertEqual(2, num_images)
+
+ # Check the image is marked as deleted.
+ num_images = len([x for x in self.service.detail()
+ if not x['deleted']])
+ self.assertEqual(1, num_images)
+
+ def test_show_passes_through_to_client(self):
+ fixture = self._make_fixture(name='image1', is_public=True)
+ image_id = self.service.create(fixture)['id']
+
+ image_meta = self.service.show(image_id)
+ expected = {
+ 'id': image_id,
+ 'name': 'image1',
+ 'is_public': True,
+ 'size': None,
+ 'min_disk': None,
+ 'min_ram': None,
+ 'disk_format': None,
+ 'container_format': None,
+ 'checksum': None,
+ 'created_at': self.NOW_DATETIME,
+ 'updated_at': self.NOW_DATETIME,
+ 'deleted_at': None,
+ 'deleted': None,
+ 'status': None,
+ 'properties': {},
+ 'owner': None,
+ }
+ self.assertEqual(expected, image_meta)
+
+ def test_show_raises_when_no_authtoken_in_the_context(self):
+ fixture = self._make_fixture(name='image1',
+ is_public=False,
+ properties={'one': 'two'})
+ image_id = self.service.create(fixture)['id']
+ self.context.auth_token = False
+ self.assertRaises(exception.ImageNotFound,
+ self.service.show,
+ image_id)
+
+ def test_detail_passes_through_to_client(self):
+ fixture = self._make_fixture(name='image10', is_public=True)
+ image_id = self.service.create(fixture)['id']
+ image_metas = self.service.detail()
+ expected = [
+ {
+ 'id': image_id,
+ 'name': 'image10',
+ 'is_public': True,
+ 'size': None,
+ 'min_disk': None,
+ 'min_ram': None,
+ 'disk_format': None,
+ 'container_format': None,
+ 'checksum': None,
+ 'created_at': self.NOW_DATETIME,
+ 'updated_at': self.NOW_DATETIME,
+ 'deleted_at': None,
+ 'deleted': None,
+ 'status': None,
+ 'properties': {},
+ 'owner': None,
+ },
+ ]
+ self.assertEqual(expected, image_metas)
+
+ def test_show_makes_datetimes(self):
+ fixture = self._make_datetime_fixture()
+ image_id = self.service.create(fixture)['id']
+ image_meta = self.service.show(image_id)
+ self.assertEqual(self.NOW_DATETIME, image_meta['created_at'])
+ self.assertEqual(self.NOW_DATETIME, image_meta['updated_at'])
+
+ def test_detail_makes_datetimes(self):
+ fixture = self._make_datetime_fixture()
+ self.service.create(fixture)
+ image_meta = self.service.detail()[0]
+ self.assertEqual(self.NOW_DATETIME, image_meta['created_at'])
+ self.assertEqual(self.NOW_DATETIME, image_meta['updated_at'])
+
+ @mock.patch.object(time, 'sleep', autospec=True)
+ def test_download_with_retries(self, mock_sleep):
+ tries = [0]
+
+ class MyGlanceStubClient(stubs.StubGlanceClient):
+ """A client that fails the first time, then succeeds."""
+ def get(self, image_id):
+ if tries[0] == 0:
+ tries[0] = 1
+ raise glance_exc.ServiceUnavailable('')
+ else:
+ return {}
+
+ stub_client = MyGlanceStubClient()
+ stub_context = context.RequestContext(auth_token=True)
+ stub_context.user_id = 'fake'
+ stub_context.project_id = 'fake'
+ stub_service = service.GlanceImageService(stub_client, 1, stub_context)
+ image_id = 1 # doesn't matter
+ writer = NullWriter()
+
+ # When retries are disabled, we should get an exception
+ self.config(glance_num_retries=0, group='glance')
+ self.assertRaises(exception.GlanceConnectionFailed,
+ stub_service.download, image_id, writer)
+
+ # Now lets enable retries. No exception should happen now.
+ tries = [0]
+ self.config(glance_num_retries=1, group='glance')
+ stub_service.download(image_id, writer)
+ self.assertTrue(mock_sleep.called)
+
+ def test_download_file_url(self):
+ # NOTE: only in v2 API
+ class MyGlanceStubClient(stubs.StubGlanceClient):
+
+ """A client that returns a file url."""
+
+ (outfd, s_tmpfname) = tempfile.mkstemp(prefix='directURLsrc')
+ outf = os.fdopen(outfd, 'wb')
+ inf = open('/dev/urandom', 'rb')
+ for i in range(10):
+ _data = inf.read(1024)
+ outf.write(_data)
+ outf.close()
+
+ def get(self, image_id):
+ return type('GlanceTestDirectUrlMeta', (object,),
+ {'direct_url': 'file://%s' + self.s_tmpfname})
+
+ stub_context = context.RequestContext(auth_token=True)
+ stub_context.user_id = 'fake'
+ stub_context.project_id = 'fake'
+ stub_client = MyGlanceStubClient()
+ (outfd, tmpfname) = tempfile.mkstemp(prefix='directURLdst')
+ writer = os.fdopen(outfd, 'w')
+
+ stub_service = service.GlanceImageService(stub_client,
+ context=stub_context,
+ version=2)
+ image_id = 1 # doesn't matter
+
+ self.config(allowed_direct_url_schemes=['file'], group='glance')
+ stub_service.download(image_id, writer)
+ writer.close()
+
+ # compare the two files
+ rc = filecmp.cmp(tmpfname, stub_client.s_tmpfname)
+ self.assertTrue(rc, "The file %s and %s should be the same" %
+ (tmpfname, stub_client.s_tmpfname))
+ os.remove(stub_client.s_tmpfname)
+ os.remove(tmpfname)
+
+ def test_client_forbidden_converts_to_imagenotauthed(self):
+ class MyGlanceStubClient(stubs.StubGlanceClient):
+ """A client that raises a Forbidden exception."""
+ def get(self, image_id):
+ raise glance_exc.Forbidden(image_id)
+
+ stub_client = MyGlanceStubClient()
+ stub_context = context.RequestContext(auth_token=True)
+ stub_context.user_id = 'fake'
+ stub_context.project_id = 'fake'
+ stub_service = service.GlanceImageService(stub_client, 1, stub_context)
+ image_id = 1 # doesn't matter
+ writer = NullWriter()
+ self.assertRaises(exception.ImageNotAuthorized, stub_service.download,
+ image_id, writer)
+
+ def test_client_httpforbidden_converts_to_imagenotauthed(self):
+ class MyGlanceStubClient(stubs.StubGlanceClient):
+ """A client that raises a HTTPForbidden exception."""
+ def get(self, image_id):
+ raise glance_exc.HTTPForbidden(image_id)
+
+ stub_client = MyGlanceStubClient()
+ stub_context = context.RequestContext(auth_token=True)
+ stub_context.user_id = 'fake'
+ stub_context.project_id = 'fake'
+ stub_service = service.GlanceImageService(stub_client, 1, stub_context)
+ image_id = 1 # doesn't matter
+ writer = NullWriter()
+ self.assertRaises(exception.ImageNotAuthorized, stub_service.download,
+ image_id, writer)
+
+ def test_client_notfound_converts_to_imagenotfound(self):
+ class MyGlanceStubClient(stubs.StubGlanceClient):
+ """A client that raises a NotFound exception."""
+ def get(self, image_id):
+ raise glance_exc.NotFound(image_id)
+
+ stub_client = MyGlanceStubClient()
+ stub_context = context.RequestContext(auth_token=True)
+ stub_context.user_id = 'fake'
+ stub_context.project_id = 'fake'
+ stub_service = service.GlanceImageService(stub_client, 1, stub_context)
+ image_id = 1 # doesn't matter
+ writer = NullWriter()
+ self.assertRaises(exception.ImageNotFound, stub_service.download,
+ image_id, writer)
+
+ def test_client_httpnotfound_converts_to_imagenotfound(self):
+ class MyGlanceStubClient(stubs.StubGlanceClient):
+ """A client that raises a HTTPNotFound exception."""
+ def get(self, image_id):
+ raise glance_exc.HTTPNotFound(image_id)
+
+ stub_client = MyGlanceStubClient()
+ stub_context = context.RequestContext(auth_token=True)
+ stub_context.user_id = 'fake'
+ stub_context.project_id = 'fake'
+ stub_service = service.GlanceImageService(stub_client, 1, stub_context)
+ image_id = 1 # doesn't matter
+ writer = NullWriter()
+ self.assertRaises(exception.ImageNotFound, stub_service.download,
+ image_id, writer)
+
+ def test_check_image_service_client_set(self):
+ def func(self):
+ return True
+
+ self.service.client = True
+
+ wrapped_func = base_image_service.check_image_service(func)
+ self.assertTrue(wrapped_func(self.service))
+
+ def test_check_image_service__no_client_set_http(self):
+ def func(service, *args, **kwargs):
+ return (self.endpoint, args, kwargs)
+
+ self.service.client = None
+ params = {'image_href': 'http://123.123.123.123:9292/image_uuid'}
+ self.config(auth_strategy='keystone', group='glance')
+ wrapped_func = base_image_service.check_image_service(func)
+ self.assertEqual(('http://123.123.123.123:9292', (), params),
+ wrapped_func(self.service, **params))
+
+ def test_get_image_service__no_client_set_https(self):
+ def func(service, *args, **kwargs):
+ return (self.endpoint, args, kwargs)
+
+ self.service.client = None
+ params = {'image_href': 'https://123.123.123.123:9292/image_uuid'}
+ self.config(auth_strategy='keystone', group='glance')
+ wrapped_func = base_image_service.check_image_service(func)
+
+ self.assertEqual(('https://123.123.123.123:9292', (), params),
+ wrapped_func(self.service, **params))
+
+
+def _create_failing_glance_client(info):
+ class MyGlanceStubClient(stubs.StubGlanceClient):
+ """A client that fails the first time, then succeeds."""
+ def get(self, image_id):
+ info['num_calls'] += 1
+ if info['num_calls'] == 1:
+ raise glance_exc.ServiceUnavailable('')
+ return {}
+
+ return MyGlanceStubClient()
+
+
+class TestGlanceSwiftTempURL(base.TestCase):
+ def setUp(self):
+ super(TestGlanceSwiftTempURL, self).setUp()
+ client = stubs.StubGlanceClient()
+ self.context = context.RequestContext()
+ self.context.auth_token = 'fake'
+ self.service = service.GlanceImageService(client, 2, self.context)
+ self.config(swift_temp_url_key='correcthorsebatterystaple',
+ group='glance')
+ self.config(swift_endpoint_url='https://swift.example.com',
+ group='glance')
+ self.config(swift_account='AUTH_a422b2-91f3-2f46-74b7-d7c9e8958f5d30',
+ group='glance')
+ self.config(swift_api_version='v1',
+ group='glance')
+ self.config(swift_container='glance',
+ group='glance')
+ self.config(swift_temp_url_duration=1200,
+ group='glance')
+ self.config(swift_store_multiple_containers_seed=0,
+ group='glance')
+ self.config()
+ self.fake_image = {
+ 'id': '757274c4-2856-4bd2-bb20-9a4a231e187b'
+ }
+
+ @mock.patch('swiftclient.utils.generate_temp_url', autospec=True)
+ def test_swift_temp_url(self, tempurl_mock):
+
+ path = ('/v1/AUTH_a422b2-91f3-2f46-74b7-d7c9e8958f5d30'
+ '/glance'
+ '/757274c4-2856-4bd2-bb20-9a4a231e187b')
+ tempurl_mock.return_value = (
+ path + '?temp_url_sig=hmacsig&temp_url_expires=1400001200')
+
+ self.service._validate_temp_url_config = mock.Mock()
+
+ temp_url = self.service.swift_temp_url(image_info=self.fake_image)
+
+ self.assertEqual(CONF.glance.swift_endpoint_url
+ + tempurl_mock.return_value,
+ temp_url)
+ tempurl_mock.assert_called_with(
+ path=path,
+ seconds=CONF.glance.swift_temp_url_duration,
+ key=CONF.glance.swift_temp_url_key,
+ method='GET')
+
+ @mock.patch('swiftclient.utils.generate_temp_url', autospec=True)
+ def test_swift_temp_url_multiple_containers(self, tempurl_mock):
+
+ self.config(swift_store_multiple_containers_seed=8,
+ group='glance')
+
+ path = ('/v1/AUTH_a422b2-91f3-2f46-74b7-d7c9e8958f5d30'
+ '/glance_757274c4'
+ '/757274c4-2856-4bd2-bb20-9a4a231e187b')
+ tempurl_mock.return_value = (
+ path + '?temp_url_sig=hmacsig&temp_url_expires=1400001200')
+
+ self.service._validate_temp_url_config = mock.Mock()
+
+ temp_url = self.service.swift_temp_url(image_info=self.fake_image)
+
+ self.assertEqual(CONF.glance.swift_endpoint_url
+ + tempurl_mock.return_value,
+ temp_url)
+ tempurl_mock.assert_called_with(
+ path=path,
+ seconds=CONF.glance.swift_temp_url_duration,
+ key=CONF.glance.swift_temp_url_key,
+ method='GET')
+
+ def test_swift_temp_url_url_bad_no_info(self):
+ self.assertRaises(exception.ImageUnacceptable,
+ self.service.swift_temp_url,
+ image_info={})
+
+ def test__validate_temp_url_config(self):
+ self.service._validate_temp_url_config()
+
+ def test__validate_temp_url_key_exception(self):
+ self.config(swift_temp_url_key=None, group='glance')
+ self.assertRaises(exception.MissingParameterValue,
+ self.service._validate_temp_url_config)
+
+ def test__validate_temp_url_endpoint_config_exception(self):
+ self.config(swift_endpoint_url=None, group='glance')
+ self.assertRaises(exception.MissingParameterValue,
+ self.service._validate_temp_url_config)
+
+ def test__validate_temp_url_account_exception(self):
+ self.config(swift_account=None, group='glance')
+ self.assertRaises(exception.MissingParameterValue,
+ self.service._validate_temp_url_config)
+
+ def test__validate_temp_url_endpoint_negative_duration(self):
+ self.config(swift_temp_url_duration=-1,
+ group='glance')
+ self.assertRaises(exception.InvalidParameterValue,
+ self.service._validate_temp_url_config)
+
+ def test__validate_temp_url_multiple_containers(self):
+ self.config(swift_store_multiple_containers_seed=-1,
+ group='glance')
+ self.assertRaises(exception.InvalidParameterValue,
+ self.service._validate_temp_url_config)
+ self.config(swift_store_multiple_containers_seed=None,
+ group='glance')
+ self.assertRaises(exception.InvalidParameterValue,
+ self.service._validate_temp_url_config)
+ self.config(swift_store_multiple_containers_seed=33,
+ group='glance')
+ self.assertRaises(exception.InvalidParameterValue,
+ self.service._validate_temp_url_config)
+
+
+class TestGlanceUrl(base.TestCase):
+
+ def test_generate_glance_http_url(self):
+ self.config(glance_host="127.0.0.1", group='glance')
+ generated_url = service_utils.generate_glance_url()
+ http_url = "http://%s:%d" % (CONF.glance.glance_host,
+ CONF.glance.glance_port)
+ self.assertEqual(http_url, generated_url)
+
+ def test_generate_glance_https_url(self):
+ self.config(glance_protocol="https", group='glance')
+ self.config(glance_host="127.0.0.1", group='glance')
+ generated_url = service_utils.generate_glance_url()
+ https_url = "https://%s:%d" % (CONF.glance.glance_host,
+ CONF.glance.glance_port)
+ self.assertEqual(https_url, generated_url)
+
+
+class TestServiceUtils(base.TestCase):
+
+ def test_parse_image_ref_no_ssl(self):
+ image_href = u'http://127.0.0.1:9292/image_path/'\
+ u'image_\u00F9\u00FA\u00EE\u0111'
+ parsed_href = service_utils.parse_image_ref(image_href)
+ self.assertEqual((u'image_\u00F9\u00FA\u00EE\u0111',
+ '127.0.0.1', 9292, False), parsed_href)
+
+ def test_parse_image_ref_ssl(self):
+ image_href = 'https://127.0.0.1:9292/image_path/'\
+ u'image_\u00F9\u00FA\u00EE\u0111'
+ parsed_href = service_utils.parse_image_ref(image_href)
+ self.assertEqual((u'image_\u00F9\u00FA\u00EE\u0111',
+ '127.0.0.1', 9292, True), parsed_href)
+
+ def test_generate_image_url(self):
+ image_href = u'image_\u00F9\u00FA\u00EE\u0111'
+ self.config(glance_host='123.123.123.123', group='glance')
+ self.config(glance_port=1234, group='glance')
+ self.config(glance_protocol='https', group='glance')
+ generated_url = service_utils.generate_image_url(image_href)
+ self.assertEqual('https://123.123.123.123:1234/images/'
+ u'image_\u00F9\u00FA\u00EE\u0111',
+ generated_url)
+
+ def test_is_glance_image(self):
+ image_href = u'uui\u0111'
+ self.assertFalse(service_utils.is_glance_image(image_href))
+ image_href = u'733d1c44-a2ea-414b-aca7-69decf20d810'
+ self.assertTrue(service_utils.is_glance_image(image_href))
+ image_href = u'glance://uui\u0111'
+ self.assertTrue(service_utils.is_glance_image(image_href))
+ image_href = 'http://aaa/bbb'
+ self.assertFalse(service_utils.is_glance_image(image_href))
+ image_href = None
+ self.assertFalse(service_utils.is_glance_image(image_href))
+
+ def test_is_image_href_ordinary_file_name_true(self):
+ image = u"\u0111eploy.iso"
+ result = service_utils.is_image_href_ordinary_file_name(image)
+ self.assertTrue(result)
+
+ def test_is_image_href_ordinary_file_name_false(self):
+ for image in ('733d1c44-a2ea-414b-aca7-69decf20d810',
+ u'glance://\u0111eploy_iso',
+ u'http://\u0111eploy_iso',
+ u'https://\u0111eploy_iso',
+ u'file://\u0111eploy_iso',):
+ result = service_utils.is_image_href_ordinary_file_name(image)
+ self.assertFalse(result)
+
+
+class TestGlanceAPIServers(base.TestCase):
+
+ def setUp(self):
+ super(TestGlanceAPIServers, self).setUp()
+ service_utils._GLANCE_API_SERVER = None
+
+ def test__get_api_servers_default(self):
+ host, port, use_ssl = service_utils._get_api_server()
+ self.assertEqual(CONF.glance.glance_host, host)
+ self.assertEqual(CONF.glance.glance_port, port)
+ self.assertEqual(CONF.glance.glance_protocol == 'https', use_ssl)
+
+ def test__get_api_servers_one(self):
+ CONF.set_override('glance_api_servers', ['https://10.0.0.1:9293'],
+ 'glance')
+ s1 = service_utils._get_api_server()
+ s2 = service_utils._get_api_server()
+ self.assertEqual(('10.0.0.1', 9293, True), s1)
+
+ # Only one server, should always get the same one
+ self.assertEqual(s1, s2)
+
+ def test__get_api_servers_two(self):
+ CONF.set_override('glance_api_servers',
+ ['http://10.0.0.1:9293', 'http://10.0.0.2:9294'],
+ 'glance')
+ s1 = service_utils._get_api_server()
+ s2 = service_utils._get_api_server()
+ s3 = service_utils._get_api_server()
+
+ self.assertNotEqual(s1, s2)
+
+ # 2 servers, so cycles to the first again
+ self.assertEqual(s1, s3)
diff --git a/ironic/tests/unit/common/test_hash_ring.py b/ironic/tests/unit/common/test_hash_ring.py
new file mode 100644
index 000000000..832c975b2
--- /dev/null
+++ b/ironic/tests/unit/common/test_hash_ring.py
@@ -0,0 +1,251 @@
+# Copyright 2013 Hewlett-Packard Development Company, L.P.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import hashlib
+
+import mock
+from oslo_config import cfg
+from testtools import matchers
+
+from ironic.common import exception
+from ironic.common import hash_ring
+from ironic.tests.unit import base
+from ironic.tests.unit.db import base as db_base
+
+CONF = cfg.CONF
+
+
+class HashRingTestCase(base.TestCase):
+
+ # NOTE(deva): the mapping used in these tests is as follows:
+ # if hosts = [foo, bar]:
+ # fake -> foo, bar
+ # if hosts = [foo, bar, baz]:
+ # fake -> foo, bar, baz
+ # fake-again -> bar, baz, foo
+
+ @mock.patch.object(hashlib, 'md5', autospec=True)
+ def test__hash2int_returns_int(self, mock_md5):
+ CONF.set_override('hash_partition_exponent', 0)
+ r1 = 32 * 'a'
+ r2 = 32 * 'b'
+ mock_md5.return_value.hexdigest.side_effect = [r1, r2]
+
+ hosts = ['foo', 'bar']
+ replicas = 1
+ ring = hash_ring.HashRing(hosts, replicas=replicas)
+
+ self.assertIn(int(r1, 16), ring._host_hashes)
+ self.assertIn(int(r2, 16), ring._host_hashes)
+
+ def test_create_ring(self):
+ hosts = ['foo', 'bar']
+ replicas = 2
+ ring = hash_ring.HashRing(hosts, replicas=replicas)
+ self.assertEqual(set(hosts), ring.hosts)
+ self.assertEqual(replicas, ring.replicas)
+
+ def test_create_with_different_partition_counts(self):
+ hosts = ['foo', 'bar']
+ CONF.set_override('hash_partition_exponent', 2)
+ ring = hash_ring.HashRing(hosts)
+ self.assertEqual(2 ** 2 * 2, len(ring._partitions))
+
+ CONF.set_override('hash_partition_exponent', 8)
+ ring = hash_ring.HashRing(hosts)
+ self.assertEqual(2 ** 8 * 2, len(ring._partitions))
+
+ CONF.set_override('hash_partition_exponent', 16)
+ ring = hash_ring.HashRing(hosts)
+ self.assertEqual(2 ** 16 * 2, len(ring._partitions))
+
+ def test_distribution_one_replica(self):
+ hosts = ['foo', 'bar', 'baz']
+ ring = hash_ring.HashRing(hosts, replicas=1)
+ fake_1_hosts = ring.get_hosts('fake')
+ fake_2_hosts = ring.get_hosts('fake-again')
+ # We should have one hosts for each thing
+ self.assertThat(fake_1_hosts, matchers.HasLength(1))
+ self.assertThat(fake_2_hosts, matchers.HasLength(1))
+ # And they must not be the same answers even on this simple data.
+ self.assertNotEqual(fake_1_hosts, fake_2_hosts)
+
+ def test_distribution_two_replicas(self):
+ hosts = ['foo', 'bar', 'baz']
+ ring = hash_ring.HashRing(hosts, replicas=2)
+ fake_1_hosts = ring.get_hosts('fake')
+ fake_2_hosts = ring.get_hosts('fake-again')
+ # We should have two hosts for each thing
+ self.assertThat(fake_1_hosts, matchers.HasLength(2))
+ self.assertThat(fake_2_hosts, matchers.HasLength(2))
+ # And they must not be the same answers even on this simple data
+ # because if they were we'd be making the active replica a hot spot.
+ self.assertNotEqual(fake_1_hosts, fake_2_hosts)
+
+ def test_distribution_three_replicas(self):
+ hosts = ['foo', 'bar', 'baz']
+ ring = hash_ring.HashRing(hosts, replicas=3)
+ fake_1_hosts = ring.get_hosts('fake')
+ fake_2_hosts = ring.get_hosts('fake-again')
+ # We should have two hosts for each thing
+ self.assertThat(fake_1_hosts, matchers.HasLength(3))
+ self.assertThat(fake_2_hosts, matchers.HasLength(3))
+ # And they must not be the same answers even on this simple data
+ # because if they were we'd be making the active replica a hot spot.
+ self.assertNotEqual(fake_1_hosts, fake_2_hosts)
+ self.assertNotEqual(fake_1_hosts[0], fake_2_hosts[0])
+
+ def test_ignore_hosts(self):
+ hosts = ['foo', 'bar', 'baz']
+ ring = hash_ring.HashRing(hosts, replicas=1)
+ equals_bar_or_baz = matchers.MatchesAny(
+ matchers.Equals(['bar']),
+ matchers.Equals(['baz']))
+ self.assertThat(
+ ring.get_hosts('fake', ignore_hosts=['foo']),
+ equals_bar_or_baz)
+ self.assertThat(
+ ring.get_hosts('fake', ignore_hosts=['foo', 'bar']),
+ equals_bar_or_baz)
+ self.assertEqual([], ring.get_hosts('fake', ignore_hosts=hosts))
+
+ def test_ignore_hosts_with_replicas(self):
+ hosts = ['foo', 'bar', 'baz']
+ ring = hash_ring.HashRing(hosts, replicas=2)
+ self.assertEqual(
+ set(['bar', 'baz']),
+ set(ring.get_hosts('fake', ignore_hosts=['foo'])))
+ self.assertEqual(
+ set(['baz']),
+ set(ring.get_hosts('fake', ignore_hosts=['foo', 'bar'])))
+ self.assertEqual(
+ set(['baz', 'foo']),
+ set(ring.get_hosts('fake-again', ignore_hosts=['bar'])))
+ self.assertEqual(
+ set(['foo']),
+ set(ring.get_hosts('fake-again', ignore_hosts=['bar', 'baz'])))
+ self.assertEqual([], ring.get_hosts('fake', ignore_hosts=hosts))
+
+ def _compare_rings(self, nodes, conductors, ring,
+ new_conductors, new_ring):
+ delta = {}
+ mapping = dict((node, ring.get_hosts(node)[0]) for node in nodes)
+ new_mapping = dict(
+ (node, new_ring.get_hosts(node)[0]) for node in nodes)
+
+ for key, old in mapping.items():
+ new = new_mapping.get(key, None)
+ if new != old:
+ delta[key] = (old, new)
+ return delta
+
+ def test_rebalance_stability_join(self):
+ num_conductors = 10
+ num_nodes = 10000
+ # Adding 1 conductor to a set of N should move 1/(N+1) of all nodes
+ # Eg, for a cluster of 10 nodes, adding one should move 1/11, or 9%
+ # We allow for 1/N to allow for rounding in tests.
+ redistribution_factor = 1.0 / num_conductors
+
+ nodes = [str(x) for x in range(num_nodes)]
+ conductors = [str(x) for x in range(num_conductors)]
+ new_conductors = conductors + ['new']
+ delta = self._compare_rings(
+ nodes, conductors, hash_ring.HashRing(conductors),
+ new_conductors, hash_ring.HashRing(new_conductors))
+
+ self.assertTrue(len(delta) < num_nodes * redistribution_factor)
+
+ def test_rebalance_stability_leave(self):
+ num_conductors = 10
+ num_nodes = 10000
+ # Removing 1 conductor from a set of N should move 1/(N) of all nodes
+ # Eg, for a cluster of 10 nodes, removing one should move 1/10, or 10%
+ # We allow for 1/(N-1) to allow for rounding in tests.
+ redistribution_factor = 1.0 / (num_conductors - 1)
+
+ nodes = [str(x) for x in range(num_nodes)]
+ conductors = [str(x) for x in range(num_conductors)]
+ new_conductors = conductors[:]
+ new_conductors.pop()
+ delta = self._compare_rings(
+ nodes, conductors, hash_ring.HashRing(conductors),
+ new_conductors, hash_ring.HashRing(new_conductors))
+
+ self.assertTrue(len(delta) < num_nodes * redistribution_factor)
+
+ def test_more_replicas_than_hosts(self):
+ hosts = ['foo', 'bar']
+ ring = hash_ring.HashRing(hosts, replicas=10)
+ self.assertEqual(set(hosts), set(ring.get_hosts('fake')))
+
+ def test_ignore_non_existent_host(self):
+ hosts = ['foo', 'bar']
+ ring = hash_ring.HashRing(hosts, replicas=1)
+ self.assertEqual(['foo'], ring.get_hosts('fake',
+ ignore_hosts=['baz']))
+
+ def test_create_ring_invalid_data(self):
+ hosts = None
+ self.assertRaises(exception.Invalid,
+ hash_ring.HashRing,
+ hosts)
+
+ def test_get_hosts_invalid_data(self):
+ hosts = ['foo', 'bar']
+ ring = hash_ring.HashRing(hosts)
+ self.assertRaises(exception.Invalid,
+ ring.get_hosts,
+ None)
+
+
+class HashRingManagerTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(HashRingManagerTestCase, self).setUp()
+ self.ring_manager = hash_ring.HashRingManager()
+
+ def register_conductors(self):
+ self.dbapi.register_conductor({
+ 'hostname': 'host1',
+ 'drivers': ['driver1', 'driver2'],
+ })
+ self.dbapi.register_conductor({
+ 'hostname': 'host2',
+ 'drivers': ['driver1'],
+ })
+
+ def test_hash_ring_manager_get_ring_success(self):
+ self.register_conductors()
+ ring = self.ring_manager['driver1']
+ self.assertEqual(sorted(['host1', 'host2']), sorted(ring.hosts))
+
+ def test_hash_ring_manager_driver_not_found(self):
+ self.register_conductors()
+ self.assertRaises(exception.DriverNotFound,
+ self.ring_manager.__getitem__,
+ 'driver3')
+
+ def test_hash_ring_manager_no_refresh(self):
+ # If a new conductor is registered after the ring manager is
+ # initialized, it won't be seen. Long term this is probably
+ # undesirable, but today is the intended behavior.
+ self.assertRaises(exception.DriverNotFound,
+ self.ring_manager.__getitem__,
+ 'driver1')
+ self.register_conductors()
+ self.assertRaises(exception.DriverNotFound,
+ self.ring_manager.__getitem__,
+ 'driver1')
diff --git a/ironic/tests/unit/common/test_image_service.py b/ironic/tests/unit/common/test_image_service.py
new file mode 100644
index 000000000..ed39f1bef
--- /dev/null
+++ b/ironic/tests/unit/common/test_image_service.py
@@ -0,0 +1,332 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import datetime
+import os
+import shutil
+
+import mock
+import requests
+import sendfile
+import six
+import six.moves.builtins as __builtin__
+from six.moves import http_client
+
+from ironic.common import exception
+from ironic.common.glance_service.v1 import image_service as glance_v1_service
+from ironic.common import image_service
+from ironic.common import keystone
+from ironic.tests.unit import base
+
+if six.PY3:
+ import io
+ file = io.BytesIO
+
+
+class HttpImageServiceTestCase(base.TestCase):
+ def setUp(self):
+ super(HttpImageServiceTestCase, self).setUp()
+ self.service = image_service.HttpImageService()
+ self.href = 'http://127.0.0.1:12345/fedora.qcow2'
+
+ @mock.patch.object(requests, 'head', autospec=True)
+ def test_validate_href(self, head_mock):
+ response = head_mock.return_value
+ response.status_code = http_client.OK
+ self.service.validate_href(self.href)
+ head_mock.assert_called_once_with(self.href)
+ response.status_code = http_client.NO_CONTENT
+ self.assertRaises(exception.ImageRefValidationFailed,
+ self.service.validate_href,
+ self.href)
+ response.status_code = http_client.BAD_REQUEST
+ self.assertRaises(exception.ImageRefValidationFailed,
+ self.service.validate_href,
+ self.href)
+
+ @mock.patch.object(requests, 'head', autospec=True)
+ def test_validate_href_error_code(self, head_mock):
+ head_mock.return_value.status_code = http_client.BAD_REQUEST
+ self.assertRaises(exception.ImageRefValidationFailed,
+ self.service.validate_href, self.href)
+ head_mock.assert_called_once_with(self.href)
+
+ @mock.patch.object(requests, 'head', autospec=True)
+ def test_validate_href_error(self, head_mock):
+ head_mock.side_effect = iter([requests.ConnectionError()])
+ self.assertRaises(exception.ImageRefValidationFailed,
+ self.service.validate_href, self.href)
+ head_mock.assert_called_once_with(self.href)
+
+ @mock.patch.object(requests, 'head', autospec=True)
+ def _test_show(self, head_mock, mtime, mtime_date):
+ head_mock.return_value.status_code = http_client.OK
+ head_mock.return_value.headers = {
+ 'Content-Length': 100,
+ 'Last-Modified': mtime
+ }
+ result = self.service.show(self.href)
+ head_mock.assert_called_once_with(self.href)
+ self.assertEqual({'size': 100, 'updated_at': mtime_date,
+ 'properties': {}}, result)
+
+ def test_show_rfc_822(self):
+ self._test_show(mtime='Tue, 15 Nov 2014 08:12:31 GMT',
+ mtime_date=datetime.datetime(2014, 11, 15, 8, 12, 31))
+
+ def test_show_rfc_850(self):
+ self._test_show(mtime='Tuesday, 15-Nov-14 08:12:31 GMT',
+ mtime_date=datetime.datetime(2014, 11, 15, 8, 12, 31))
+
+ def test_show_ansi_c(self):
+ self._test_show(mtime='Tue Nov 15 08:12:31 2014',
+ mtime_date=datetime.datetime(2014, 11, 15, 8, 12, 31))
+
+ @mock.patch.object(requests, 'head', autospec=True)
+ def test_show_no_content_length(self, head_mock):
+ head_mock.return_value.status_code = http_client.OK
+ head_mock.return_value.headers = {}
+ self.assertRaises(exception.ImageRefValidationFailed,
+ self.service.show, self.href)
+ head_mock.assert_called_with(self.href)
+
+ @mock.patch.object(shutil, 'copyfileobj', autospec=True)
+ @mock.patch.object(requests, 'get', autospec=True)
+ def test_download_success(self, req_get_mock, shutil_mock):
+ response_mock = req_get_mock.return_value
+ response_mock.status_code = http_client.OK
+ response_mock.raw = mock.MagicMock(spec=file)
+ file_mock = mock.Mock(spec=file)
+ self.service.download(self.href, file_mock)
+ shutil_mock.assert_called_once_with(
+ response_mock.raw.__enter__(), file_mock,
+ image_service.IMAGE_CHUNK_SIZE
+ )
+ req_get_mock.assert_called_once_with(self.href, stream=True)
+
+ @mock.patch.object(requests, 'get', autospec=True)
+ def test_download_fail_connerror(self, req_get_mock):
+ req_get_mock.side_effect = iter([requests.ConnectionError()])
+ file_mock = mock.Mock(spec=file)
+ self.assertRaises(exception.ImageDownloadFailed,
+ self.service.download, self.href, file_mock)
+
+ @mock.patch.object(shutil, 'copyfileobj', autospec=True)
+ @mock.patch.object(requests, 'get', autospec=True)
+ def test_download_fail_ioerror(self, req_get_mock, shutil_mock):
+ response_mock = req_get_mock.return_value
+ response_mock.status_code = http_client.OK
+ response_mock.raw = mock.MagicMock(spec=file)
+ file_mock = mock.Mock(spec=file)
+ shutil_mock.side_effect = IOError
+ self.assertRaises(exception.ImageDownloadFailed,
+ self.service.download, self.href, file_mock)
+ req_get_mock.assert_called_once_with(self.href, stream=True)
+
+
+class FileImageServiceTestCase(base.TestCase):
+ def setUp(self):
+ super(FileImageServiceTestCase, self).setUp()
+ self.service = image_service.FileImageService()
+ self.href = 'file:///home/user/image.qcow2'
+ self.href_path = '/home/user/image.qcow2'
+
+ @mock.patch.object(os.path, 'isfile', return_value=True, autospec=True)
+ def test_validate_href(self, path_exists_mock):
+ self.service.validate_href(self.href)
+ path_exists_mock.assert_called_once_with(self.href_path)
+
+ @mock.patch.object(os.path, 'isfile', return_value=False, autospec=True)
+ def test_validate_href_path_not_found_or_not_file(self, path_exists_mock):
+ self.assertRaises(exception.ImageRefValidationFailed,
+ self.service.validate_href, self.href)
+ path_exists_mock.assert_called_once_with(self.href_path)
+
+ @mock.patch.object(os.path, 'getmtime', return_value=1431087909.1641912,
+ autospec=True)
+ @mock.patch.object(os.path, 'getsize', return_value=42, autospec=True)
+ @mock.patch.object(image_service.FileImageService, 'validate_href',
+ autospec=True)
+ def test_show(self, _validate_mock, getsize_mock, getmtime_mock):
+ _validate_mock.return_value = self.href_path
+ result = self.service.show(self.href)
+ getsize_mock.assert_called_once_with(self.href_path)
+ getmtime_mock.assert_called_once_with(self.href_path)
+ _validate_mock.assert_called_once_with(mock.ANY, self.href)
+ self.assertEqual({'size': 42,
+ 'updated_at': datetime.datetime(2015, 5, 8,
+ 12, 25, 9, 164191),
+ 'properties': {}}, result)
+
+ @mock.patch.object(os, 'link', autospec=True)
+ @mock.patch.object(os, 'remove', autospec=True)
+ @mock.patch.object(os, 'access', return_value=True, autospec=True)
+ @mock.patch.object(os, 'stat', autospec=True)
+ @mock.patch.object(image_service.FileImageService, 'validate_href',
+ autospec=True)
+ def test_download_hard_link(self, _validate_mock, stat_mock, access_mock,
+ remove_mock, link_mock):
+ _validate_mock.return_value = self.href_path
+ stat_mock.return_value.st_dev = 'dev1'
+ file_mock = mock.Mock(spec=file)
+ file_mock.name = 'file'
+ self.service.download(self.href, file_mock)
+ _validate_mock.assert_called_once_with(mock.ANY, self.href)
+ self.assertEqual(2, stat_mock.call_count)
+ access_mock.assert_called_once_with(self.href_path, os.R_OK | os.W_OK)
+ remove_mock.assert_called_once_with('file')
+ link_mock.assert_called_once_with(self.href_path, 'file')
+
+ @mock.patch.object(sendfile, 'sendfile', autospec=True)
+ @mock.patch.object(os.path, 'getsize', return_value=42, autospec=True)
+ @mock.patch.object(__builtin__, 'open', autospec=True)
+ @mock.patch.object(os, 'access', return_value=False, autospec=True)
+ @mock.patch.object(os, 'stat', autospec=True)
+ @mock.patch.object(image_service.FileImageService, 'validate_href',
+ autospec=True)
+ def test_download_copy(self, _validate_mock, stat_mock, access_mock,
+ open_mock, size_mock, copy_mock):
+ _validate_mock.return_value = self.href_path
+ stat_mock.return_value.st_dev = 'dev1'
+ file_mock = mock.MagicMock(spec=file)
+ file_mock.name = 'file'
+ input_mock = mock.MagicMock(spec=file)
+ open_mock.return_value = input_mock
+ self.service.download(self.href, file_mock)
+ _validate_mock.assert_called_once_with(mock.ANY, self.href)
+ self.assertEqual(2, stat_mock.call_count)
+ access_mock.assert_called_once_with(self.href_path, os.R_OK | os.W_OK)
+ copy_mock.assert_called_once_with(file_mock.fileno(),
+ input_mock.__enter__().fileno(),
+ 0, 42)
+ size_mock.assert_called_once_with(self.href_path)
+
+ @mock.patch.object(os, 'remove', side_effect=OSError, autospec=True)
+ @mock.patch.object(os, 'access', return_value=True, autospec=True)
+ @mock.patch.object(os, 'stat', autospec=True)
+ @mock.patch.object(image_service.FileImageService, 'validate_href',
+ autospec=True)
+ def test_download_hard_link_fail(self, _validate_mock, stat_mock,
+ access_mock, remove_mock):
+ _validate_mock.return_value = self.href_path
+ stat_mock.return_value.st_dev = 'dev1'
+ file_mock = mock.MagicMock(spec=file)
+ file_mock.name = 'file'
+ self.assertRaises(exception.ImageDownloadFailed,
+ self.service.download, self.href, file_mock)
+ _validate_mock.assert_called_once_with(mock.ANY, self.href)
+ self.assertEqual(2, stat_mock.call_count)
+ access_mock.assert_called_once_with(self.href_path, os.R_OK | os.W_OK)
+
+ @mock.patch.object(sendfile, 'sendfile', side_effect=OSError,
+ autospec=True)
+ @mock.patch.object(os.path, 'getsize', return_value=42, autospec=True)
+ @mock.patch.object(__builtin__, 'open', autospec=True)
+ @mock.patch.object(os, 'access', return_value=False, autospec=True)
+ @mock.patch.object(os, 'stat', autospec=True)
+ @mock.patch.object(image_service.FileImageService, 'validate_href',
+ autospec=True)
+ def test_download_copy_fail(self, _validate_mock, stat_mock, access_mock,
+ open_mock, size_mock, copy_mock):
+ _validate_mock.return_value = self.href_path
+ stat_mock.return_value.st_dev = 'dev1'
+ file_mock = mock.MagicMock(spec=file)
+ file_mock.name = 'file'
+ input_mock = mock.MagicMock(spec=file)
+ open_mock.return_value = input_mock
+ self.assertRaises(exception.ImageDownloadFailed,
+ self.service.download, self.href, file_mock)
+ _validate_mock.assert_called_once_with(mock.ANY, self.href)
+ self.assertEqual(2, stat_mock.call_count)
+ access_mock.assert_called_once_with(self.href_path, os.R_OK | os.W_OK)
+ size_mock.assert_called_once_with(self.href_path)
+
+
+class ServiceGetterTestCase(base.TestCase):
+
+ @mock.patch.object(keystone, 'get_admin_auth_token', autospec=True)
+ @mock.patch.object(glance_v1_service.GlanceImageService, '__init__',
+ return_value=None, autospec=True)
+ def test_get_glance_image_service(self, glance_service_mock, token_mock):
+ image_href = 'image-uuid'
+ self.context.auth_token = 'fake'
+ image_service.get_image_service(image_href, context=self.context)
+ glance_service_mock.assert_called_once_with(mock.ANY, None, 1,
+ self.context)
+ self.assertFalse(token_mock.called)
+
+ @mock.patch.object(keystone, 'get_admin_auth_token', autospec=True)
+ @mock.patch.object(glance_v1_service.GlanceImageService, '__init__',
+ return_value=None, autospec=True)
+ def test_get_glance_image_service_url(self, glance_service_mock,
+ token_mock):
+ image_href = 'glance://image-uuid'
+ self.context.auth_token = 'fake'
+ image_service.get_image_service(image_href, context=self.context)
+ glance_service_mock.assert_called_once_with(mock.ANY, None, 1,
+ self.context)
+ self.assertFalse(token_mock.called)
+
+ @mock.patch.object(keystone, 'get_admin_auth_token', autospec=True)
+ @mock.patch.object(glance_v1_service.GlanceImageService, '__init__',
+ return_value=None, autospec=True)
+ def test_get_glance_image_service_no_token(self, glance_service_mock,
+ token_mock):
+ image_href = 'image-uuid'
+ self.context.auth_token = None
+ token_mock.return_value = 'admin-token'
+ image_service.get_image_service(image_href, context=self.context)
+ glance_service_mock.assert_called_once_with(mock.ANY, None, 1,
+ self.context)
+ token_mock.assert_called_once_with()
+ self.assertEqual('admin-token', self.context.auth_token)
+
+ @mock.patch.object(keystone, 'get_admin_auth_token', autospec=True)
+ @mock.patch.object(glance_v1_service.GlanceImageService, '__init__',
+ return_value=None, autospec=True)
+ def test_get_glance_image_service_token_not_needed(self,
+ glance_service_mock,
+ token_mock):
+ image_href = 'image-uuid'
+ self.context.auth_token = None
+ self.config(auth_strategy='noauth', group='glance')
+ image_service.get_image_service(image_href, context=self.context)
+ glance_service_mock.assert_called_once_with(mock.ANY, None, 1,
+ self.context)
+ self.assertFalse(token_mock.called)
+ self.assertIsNone(self.context.auth_token)
+
+ @mock.patch.object(image_service.HttpImageService, '__init__',
+ return_value=None, autospec=True)
+ def test_get_http_image_service(self, http_service_mock):
+ image_href = 'http://127.0.0.1/image.qcow2'
+ image_service.get_image_service(image_href)
+ http_service_mock.assert_called_once_with()
+
+ @mock.patch.object(image_service.HttpImageService, '__init__',
+ return_value=None, autospec=True)
+ def test_get_https_image_service(self, http_service_mock):
+ image_href = 'https://127.0.0.1/image.qcow2'
+ image_service.get_image_service(image_href)
+ http_service_mock.assert_called_once_with()
+
+ @mock.patch.object(image_service.FileImageService, '__init__',
+ return_value=None, autospec=True)
+ def test_get_file_image_service(self, local_service_mock):
+ image_href = 'file:///home/user/image.qcow2'
+ image_service.get_image_service(image_href)
+ local_service_mock.assert_called_once_with()
+
+ def test_get_image_service_unknown_protocol(self):
+ image_href = 'usenet://alt.binaries.dvd/image.qcow2'
+ self.assertRaises(exception.ImageRefValidationFailed,
+ image_service.get_image_service, image_href)
diff --git a/ironic/tests/unit/common/test_images.py b/ironic/tests/unit/common/test_images.py
new file mode 100644
index 000000000..f6493df4e
--- /dev/null
+++ b/ironic/tests/unit/common/test_images.py
@@ -0,0 +1,872 @@
+# Vim: tabstop=4 shiftwidth=4 softtabstop=4
+# coding=utf-8
+
+# Copyright 2013 Hewlett-Packard Development Company, L.P.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import os
+import shutil
+
+import mock
+from oslo_concurrency import processutils
+from oslo_config import cfg
+import six
+import six.moves.builtins as __builtin__
+
+from ironic.common import exception
+from ironic.common.glance_service import service_utils as glance_utils
+from ironic.common import image_service
+from ironic.common import images
+from ironic.common import utils
+from ironic.openstack.common import imageutils
+from ironic.tests.unit import base
+
+if six.PY3:
+ import io
+ file = io.BytesIO
+
+CONF = cfg.CONF
+
+
+class IronicImagesTestCase(base.TestCase):
+
+ class FakeImgInfo(object):
+ pass
+
+ @mock.patch.object(imageutils, 'QemuImgInfo', autospec=True)
+ @mock.patch.object(os.path, 'exists', return_value=False, autospec=True)
+ def test_qemu_img_info_path_doesnt_exist(self, path_exists_mock,
+ qemu_img_info_mock):
+ images.qemu_img_info('noimg')
+ path_exists_mock.assert_called_once_with('noimg')
+ qemu_img_info_mock.assert_called_once_with()
+
+ @mock.patch.object(utils, 'execute', return_value=('out', 'err'),
+ autospec=True)
+ @mock.patch.object(imageutils, 'QemuImgInfo', autospec=True)
+ @mock.patch.object(os.path, 'exists', return_value=True, autospec=True)
+ def test_qemu_img_info_path_exists(self, path_exists_mock,
+ qemu_img_info_mock, execute_mock):
+ images.qemu_img_info('img')
+ path_exists_mock.assert_called_once_with('img')
+ execute_mock.assert_called_once_with('env', 'LC_ALL=C', 'LANG=C',
+ 'qemu-img', 'info', 'img')
+ qemu_img_info_mock.assert_called_once_with('out')
+
+ @mock.patch.object(utils, 'execute', autospec=True)
+ def test_convert_image(self, execute_mock):
+ images.convert_image('source', 'dest', 'out_format')
+ execute_mock.assert_called_once_with('qemu-img', 'convert', '-O',
+ 'out_format', 'source', 'dest',
+ run_as_root=False)
+
+ @mock.patch.object(image_service, 'get_image_service', autospec=True)
+ @mock.patch.object(__builtin__, 'open', autospec=True)
+ def test_fetch_image_service(self, open_mock, image_service_mock):
+ mock_file_handle = mock.MagicMock(spec=file)
+ mock_file_handle.__enter__.return_value = 'file'
+ open_mock.return_value = mock_file_handle
+
+ images.fetch('context', 'image_href', 'path')
+
+ open_mock.assert_called_once_with('path', 'wb')
+ image_service_mock.assert_called_once_with('image_href',
+ context='context')
+ image_service_mock.return_value.download.assert_called_once_with(
+ 'image_href', 'file')
+
+ @mock.patch.object(image_service, 'get_image_service', autospec=True)
+ @mock.patch.object(images, 'image_to_raw', autospec=True)
+ @mock.patch.object(__builtin__, 'open', autospec=True)
+ def test_fetch_image_service_force_raw(self, open_mock, image_to_raw_mock,
+ image_service_mock):
+ mock_file_handle = mock.MagicMock(spec=file)
+ mock_file_handle.__enter__.return_value = 'file'
+ open_mock.return_value = mock_file_handle
+
+ images.fetch('context', 'image_href', 'path', force_raw=True)
+
+ open_mock.assert_called_once_with('path', 'wb')
+ image_service_mock.return_value.download.assert_called_once_with(
+ 'image_href', 'file')
+ image_to_raw_mock.assert_called_once_with(
+ 'image_href', 'path', 'path.part')
+
+ @mock.patch.object(images, 'qemu_img_info', autospec=True)
+ def test_image_to_raw_no_file_format(self, qemu_img_info_mock):
+ info = self.FakeImgInfo()
+ info.file_format = None
+ qemu_img_info_mock.return_value = info
+
+ e = self.assertRaises(exception.ImageUnacceptable, images.image_to_raw,
+ 'image_href', 'path', 'path_tmp')
+ qemu_img_info_mock.assert_called_once_with('path_tmp')
+ self.assertIn("'qemu-img info' parsing failed.", str(e))
+
+ @mock.patch.object(images, 'qemu_img_info', autospec=True)
+ def test_image_to_raw_backing_file_present(self, qemu_img_info_mock):
+ info = self.FakeImgInfo()
+ info.file_format = 'raw'
+ info.backing_file = 'backing_file'
+ qemu_img_info_mock.return_value = info
+
+ e = self.assertRaises(exception.ImageUnacceptable, images.image_to_raw,
+ 'image_href', 'path', 'path_tmp')
+ qemu_img_info_mock.assert_called_once_with('path_tmp')
+ self.assertIn("fmt=raw backed by: backing_file", str(e))
+
+ @mock.patch.object(os, 'rename', autospec=True)
+ @mock.patch.object(os, 'unlink', autospec=True)
+ @mock.patch.object(images, 'convert_image', autospec=True)
+ @mock.patch.object(images, 'qemu_img_info', autospec=True)
+ def test_image_to_raw(self, qemu_img_info_mock, convert_image_mock,
+ unlink_mock, rename_mock):
+ CONF.set_override('force_raw_images', True)
+ info = self.FakeImgInfo()
+ info.file_format = 'fmt'
+ info.backing_file = None
+ qemu_img_info_mock.return_value = info
+
+ def convert_side_effect(source, dest, out_format):
+ info.file_format = 'raw'
+ convert_image_mock.side_effect = convert_side_effect
+
+ images.image_to_raw('image_href', 'path', 'path_tmp')
+
+ qemu_img_info_mock.assert_has_calls([mock.call('path_tmp'),
+ mock.call('path.converted')])
+ convert_image_mock.assert_called_once_with('path_tmp',
+ 'path.converted', 'raw')
+ unlink_mock.assert_called_once_with('path_tmp')
+ rename_mock.assert_called_once_with('path.converted', 'path')
+
+ @mock.patch.object(os, 'unlink', autospec=True)
+ @mock.patch.object(images, 'convert_image', autospec=True)
+ @mock.patch.object(images, 'qemu_img_info', autospec=True)
+ def test_image_to_raw_not_raw_after_conversion(self, qemu_img_info_mock,
+ convert_image_mock,
+ unlink_mock):
+ CONF.set_override('force_raw_images', True)
+ info = self.FakeImgInfo()
+ info.file_format = 'fmt'
+ info.backing_file = None
+ qemu_img_info_mock.return_value = info
+
+ self.assertRaises(exception.ImageConvertFailed, images.image_to_raw,
+ 'image_href', 'path', 'path_tmp')
+ qemu_img_info_mock.assert_has_calls([mock.call('path_tmp'),
+ mock.call('path.converted')])
+ convert_image_mock.assert_called_once_with('path_tmp',
+ 'path.converted', 'raw')
+ unlink_mock.assert_called_once_with('path_tmp')
+
+ @mock.patch.object(os, 'rename', autospec=True)
+ @mock.patch.object(images, 'qemu_img_info', autospec=True)
+ def test_image_to_raw_already_raw_format(self, qemu_img_info_mock,
+ rename_mock):
+ info = self.FakeImgInfo()
+ info.file_format = 'raw'
+ info.backing_file = None
+ qemu_img_info_mock.return_value = info
+
+ images.image_to_raw('image_href', 'path', 'path_tmp')
+
+ qemu_img_info_mock.assert_called_once_with('path_tmp')
+ rename_mock.assert_called_once_with('path_tmp', 'path')
+
+ @mock.patch.object(image_service, 'get_image_service', autospec=True)
+ def test_download_size_no_image_service(self, image_service_mock):
+ images.download_size('context', 'image_href')
+ image_service_mock.assert_called_once_with('image_href',
+ context='context')
+ image_service_mock.return_value.show.assert_called_once_with(
+ 'image_href')
+
+ def test_download_size_image_service(self):
+ image_service_mock = mock.MagicMock()
+ images.download_size('context', 'image_href', image_service_mock)
+ image_service_mock.show.assert_called_once_with('image_href')
+
+ @mock.patch.object(images, 'qemu_img_info', autospec=True)
+ def test_converted_size(self, qemu_img_info_mock):
+ info = self.FakeImgInfo()
+ info.virtual_size = 1
+ qemu_img_info_mock.return_value = info
+ size = images.converted_size('path')
+ qemu_img_info_mock.assert_called_once_with('path')
+ self.assertEqual(1, size)
+
+ @mock.patch.object(images, 'get_image_properties', autospec=True)
+ @mock.patch.object(glance_utils, 'is_glance_image', autospec=True)
+ def test_is_whole_disk_image_no_img_src(self, mock_igi, mock_gip):
+ instance_info = {'image_source': ''}
+ iwdi = images.is_whole_disk_image('context', instance_info)
+ self.assertIsNone(iwdi)
+ self.assertFalse(mock_igi.called)
+ self.assertFalse(mock_gip.called)
+
+ @mock.patch.object(images, 'get_image_properties', autospec=True)
+ @mock.patch.object(glance_utils, 'is_glance_image', autospec=True)
+ def test_is_whole_disk_image_partition_image(self, mock_igi, mock_gip):
+ mock_igi.return_value = True
+ mock_gip.return_value = {'kernel_id': 'kernel',
+ 'ramdisk_id': 'ramdisk'}
+ instance_info = {'image_source': 'glance://partition_image'}
+ image_source = instance_info['image_source']
+ is_whole_disk_image = images.is_whole_disk_image('context',
+ instance_info)
+ self.assertFalse(is_whole_disk_image)
+ mock_igi.assert_called_once_with(image_source)
+ mock_gip.assert_called_once_with('context', image_source)
+
+ @mock.patch.object(images, 'get_image_properties', autospec=True)
+ @mock.patch.object(glance_utils, 'is_glance_image', autospec=True)
+ def test_is_whole_disk_image_whole_disk_image(self, mock_igi, mock_gip):
+ mock_igi.return_value = True
+ mock_gip.return_value = {}
+ instance_info = {'image_source': 'glance://whole_disk_image'}
+ image_source = instance_info['image_source']
+ is_whole_disk_image = images.is_whole_disk_image('context',
+ instance_info)
+ self.assertTrue(is_whole_disk_image)
+ mock_igi.assert_called_once_with(image_source)
+ mock_gip.assert_called_once_with('context', image_source)
+
+ @mock.patch.object(images, 'get_image_properties', autospec=True)
+ @mock.patch.object(glance_utils, 'is_glance_image', autospec=True)
+ def test_is_whole_disk_image_partition_non_glance(self, mock_igi,
+ mock_gip):
+ mock_igi.return_value = False
+ instance_info = {'image_source': 'partition_image',
+ 'kernel': 'kernel',
+ 'ramdisk': 'ramdisk'}
+ is_whole_disk_image = images.is_whole_disk_image('context',
+ instance_info)
+ self.assertFalse(is_whole_disk_image)
+ self.assertFalse(mock_gip.called)
+ mock_igi.assert_called_once_with(instance_info['image_source'])
+
+ @mock.patch.object(images, 'get_image_properties', autospec=True)
+ @mock.patch.object(glance_utils, 'is_glance_image', autospec=True)
+ def test_is_whole_disk_image_whole_disk_non_glance(self, mock_igi,
+ mock_gip):
+ mock_igi.return_value = False
+ instance_info = {'image_source': 'whole_disk_image'}
+ is_whole_disk_image = images.is_whole_disk_image('context',
+ instance_info)
+ self.assertTrue(is_whole_disk_image)
+ self.assertFalse(mock_gip.called)
+ mock_igi.assert_called_once_with(instance_info['image_source'])
+
+
+class FsImageTestCase(base.TestCase):
+
+ @mock.patch.object(shutil, 'copyfile', autospec=True)
+ @mock.patch.object(os, 'makedirs', autospec=True)
+ @mock.patch.object(os.path, 'dirname', autospec=True)
+ @mock.patch.object(os.path, 'exists', autospec=True)
+ def test__create_root_fs(self, path_exists_mock,
+ dirname_mock, mkdir_mock, cp_mock):
+
+ path_exists_mock_func = lambda path: path == 'root_dir'
+
+ files_info = {
+ 'a1': 'b1',
+ 'a2': 'b2',
+ 'a3': 'sub_dir/b3'}
+
+ path_exists_mock.side_effect = path_exists_mock_func
+ dirname_mock.side_effect = iter(
+ ['root_dir', 'root_dir', 'root_dir/sub_dir', 'root_dir/sub_dir'])
+ images._create_root_fs('root_dir', files_info)
+ cp_mock.assert_any_call('a1', 'root_dir/b1')
+ cp_mock.assert_any_call('a2', 'root_dir/b2')
+ cp_mock.assert_any_call('a3', 'root_dir/sub_dir/b3')
+
+ path_exists_mock.assert_any_call('root_dir/sub_dir')
+ dirname_mock.assert_any_call('root_dir/b1')
+ dirname_mock.assert_any_call('root_dir/b2')
+ dirname_mock.assert_any_call('root_dir/sub_dir/b3')
+ mkdir_mock.assert_called_once_with('root_dir/sub_dir')
+
+ @mock.patch.object(images, '_create_root_fs', autospec=True)
+ @mock.patch.object(utils, 'tempdir', autospec=True)
+ @mock.patch.object(utils, 'write_to_file', autospec=True)
+ @mock.patch.object(utils, 'dd', autospec=True)
+ @mock.patch.object(utils, 'umount', autospec=True)
+ @mock.patch.object(utils, 'mount', autospec=True)
+ @mock.patch.object(utils, 'mkfs', autospec=True)
+ def test_create_vfat_image(
+ self, mkfs_mock, mount_mock, umount_mock, dd_mock, write_mock,
+ tempdir_mock, create_root_fs_mock):
+
+ mock_file_handle = mock.MagicMock(spec=file)
+ mock_file_handle.__enter__.return_value = 'tempdir'
+ tempdir_mock.return_value = mock_file_handle
+
+ parameters = {'p1': 'v1'}
+ files_info = {'a': 'b'}
+ images.create_vfat_image('tgt_file', parameters=parameters,
+ files_info=files_info, parameters_file='qwe',
+ fs_size_kib=1000)
+
+ dd_mock.assert_called_once_with('/dev/zero',
+ 'tgt_file',
+ 'count=1',
+ 'bs=1000KiB')
+
+ mkfs_mock.assert_called_once_with('vfat', 'tgt_file',
+ label="ir-vfd-dev")
+ mount_mock.assert_called_once_with('tgt_file', 'tempdir',
+ '-o', 'umask=0')
+
+ parameters_file_path = os.path.join('tempdir', 'qwe')
+ write_mock.assert_called_once_with(parameters_file_path, 'p1=v1')
+ create_root_fs_mock.assert_called_once_with('tempdir', files_info)
+ umount_mock.assert_called_once_with('tempdir')
+
+ @mock.patch.object(images, '_create_root_fs', autospec=True)
+ @mock.patch.object(utils, 'tempdir', autospec=True)
+ @mock.patch.object(utils, 'dd', autospec=True)
+ @mock.patch.object(utils, 'umount', autospec=True)
+ @mock.patch.object(utils, 'mount', autospec=True)
+ @mock.patch.object(utils, 'mkfs', autospec=True)
+ def test_create_vfat_image_always_umount(
+ self, mkfs_mock, mount_mock, umount_mock, dd_mock,
+ tempdir_mock, create_root_fs_mock):
+
+ mock_file_handle = mock.MagicMock(spec=file)
+ mock_file_handle.__enter__.return_value = 'tempdir'
+ tempdir_mock.return_value = mock_file_handle
+ files_info = {'a': 'b'}
+ create_root_fs_mock.side_effect = OSError()
+ self.assertRaises(exception.ImageCreationFailed,
+ images.create_vfat_image, 'tgt_file',
+ files_info=files_info)
+
+ umount_mock.assert_called_once_with('tempdir')
+
+ @mock.patch.object(utils, 'dd', autospec=True)
+ def test_create_vfat_image_dd_fails(self, dd_mock):
+
+ dd_mock.side_effect = processutils.ProcessExecutionError
+ self.assertRaises(exception.ImageCreationFailed,
+ images.create_vfat_image, 'tgt_file')
+
+ @mock.patch.object(utils, 'tempdir', autospec=True)
+ @mock.patch.object(utils, 'dd', autospec=True)
+ @mock.patch.object(utils, 'mkfs', autospec=True)
+ def test_create_vfat_image_mkfs_fails(self, mkfs_mock, dd_mock,
+ tempdir_mock):
+
+ mock_file_handle = mock.MagicMock(spec=file)
+ mock_file_handle.__enter__.return_value = 'tempdir'
+ tempdir_mock.return_value = mock_file_handle
+
+ mkfs_mock.side_effect = processutils.ProcessExecutionError
+ self.assertRaises(exception.ImageCreationFailed,
+ images.create_vfat_image, 'tgt_file')
+
+ @mock.patch.object(images, '_create_root_fs', autospec=True)
+ @mock.patch.object(utils, 'tempdir', autospec=True)
+ @mock.patch.object(utils, 'dd', autospec=True)
+ @mock.patch.object(utils, 'umount', autospec=True)
+ @mock.patch.object(utils, 'mount', autospec=True)
+ @mock.patch.object(utils, 'mkfs', autospec=True)
+ def test_create_vfat_image_umount_fails(
+ self, mkfs_mock, mount_mock, umount_mock, dd_mock,
+ tempdir_mock, create_root_fs_mock):
+
+ mock_file_handle = mock.MagicMock(spec=file)
+ mock_file_handle.__enter__.return_value = 'tempdir'
+ tempdir_mock.return_value = mock_file_handle
+ umount_mock.side_effect = processutils.ProcessExecutionError
+
+ self.assertRaises(exception.ImageCreationFailed,
+ images.create_vfat_image, 'tgt_file')
+
+ @mock.patch.object(utils, 'umount', autospec=True)
+ def test__umount_without_raise(self, umount_mock):
+
+ umount_mock.side_effect = processutils.ProcessExecutionError
+ images._umount_without_raise('mountdir')
+ umount_mock.assert_called_once_with('mountdir')
+
+ def test__generate_isolinux_cfg(self):
+
+ kernel_params = ['key1=value1', 'key2']
+ options = {'kernel': '/vmlinuz', 'ramdisk': '/initrd'}
+ expected_cfg = ("default boot\n"
+ "\n"
+ "label boot\n"
+ "kernel /vmlinuz\n"
+ "append initrd=/initrd text key1=value1 key2 --")
+ cfg = images._generate_cfg(kernel_params,
+ CONF.isolinux_config_template,
+ options)
+ self.assertEqual(expected_cfg, cfg)
+
+ def test__generate_grub_cfg(self):
+ kernel_params = ['key1=value1', 'key2']
+ options = {'linux': '/vmlinuz', 'initrd': '/initrd'}
+ expected_cfg = ("set default=0\n"
+ "set timeout=5\n"
+ "set hidden_timeout_quiet=false\n"
+ "\n"
+ "menuentry \"boot_partition\" {\n"
+ "linuxefi /vmlinuz key1=value1 key2 --\n"
+ "initrdefi /initrd\n"
+ "}")
+
+ cfg = images._generate_cfg(kernel_params,
+ CONF.grub_config_template,
+ options)
+ self.assertEqual(expected_cfg, cfg)
+
+ @mock.patch.object(os.path, 'relpath', autospec=True)
+ @mock.patch.object(os, 'walk', autospec=True)
+ @mock.patch.object(utils, 'mount', autospec=True)
+ def test__mount_deploy_iso(self, mount_mock,
+ walk_mock, relpath_mock):
+ walk_mock.return_value = [('/tmpdir1/EFI/ubuntu', [], ['grub.cfg']),
+ ('/tmpdir1/isolinux', [],
+ ['efiboot.img', 'isolinux.bin',
+ 'isolinux.cfg'])]
+ relpath_mock.side_effect = iter(
+ ['EFI/ubuntu/grub.cfg', 'isolinux/efiboot.img'])
+
+ images._mount_deploy_iso('path/to/deployiso', 'tmpdir1')
+ mount_mock.assert_called_once_with('path/to/deployiso',
+ 'tmpdir1', '-o', 'loop')
+ walk_mock.assert_called_once_with('tmpdir1')
+
+ @mock.patch.object(images, '_umount_without_raise', autospec=True)
+ @mock.patch.object(os.path, 'relpath', autospec=True)
+ @mock.patch.object(os, 'walk', autospec=True)
+ @mock.patch.object(utils, 'mount', autospec=True)
+ def test__mount_deploy_iso_fail_no_efibootimg(self, mount_mock,
+ walk_mock, relpath_mock,
+ umount_mock):
+ walk_mock.return_value = [('/tmpdir1/EFI/ubuntu', [], ['grub.cfg']),
+ ('/tmpdir1/isolinux', [],
+ ['isolinux.bin', 'isolinux.cfg'])]
+ relpath_mock.side_effect = iter(['EFI/ubuntu/grub.cfg'])
+
+ self.assertRaises(exception.ImageCreationFailed,
+ images._mount_deploy_iso,
+ 'path/to/deployiso', 'tmpdir1')
+ mount_mock.assert_called_once_with('path/to/deployiso',
+ 'tmpdir1', '-o', 'loop')
+ walk_mock.assert_called_once_with('tmpdir1')
+ umount_mock.assert_called_once_with('tmpdir1')
+
+ @mock.patch.object(images, '_umount_without_raise', autospec=True)
+ @mock.patch.object(os.path, 'relpath', autospec=True)
+ @mock.patch.object(os, 'walk', autospec=True)
+ @mock.patch.object(utils, 'mount', autospec=True)
+ def test__mount_deploy_iso_fails_no_grub_cfg(self, mount_mock,
+ walk_mock, relpath_mock,
+ umount_mock):
+ walk_mock.return_value = [('/tmpdir1/EFI/ubuntu', '', []),
+ ('/tmpdir1/isolinux', '',
+ ['efiboot.img', 'isolinux.bin',
+ 'isolinux.cfg'])]
+ relpath_mock.side_effect = iter(['isolinux/efiboot.img'])
+
+ self.assertRaises(exception.ImageCreationFailed,
+ images._mount_deploy_iso,
+ 'path/to/deployiso', 'tmpdir1')
+ mount_mock.assert_called_once_with('path/to/deployiso',
+ 'tmpdir1', '-o', 'loop')
+ walk_mock.assert_called_once_with('tmpdir1')
+ umount_mock.assert_called_once_with('tmpdir1')
+
+ @mock.patch.object(utils, 'mount', autospec=True)
+ def test__mount_deploy_iso_fail_with_ExecutionError(self, mount_mock):
+ mount_mock.side_effect = processutils.ProcessExecutionError
+ self.assertRaises(exception.ImageCreationFailed,
+ images._mount_deploy_iso,
+ 'path/to/deployiso', 'tmpdir1')
+
+ @mock.patch.object(images, '_umount_without_raise', autospec=True)
+ @mock.patch.object(images, '_create_root_fs', autospec=True)
+ @mock.patch.object(utils, 'write_to_file', autospec=True)
+ @mock.patch.object(utils, 'execute', autospec=True)
+ @mock.patch.object(images, '_mount_deploy_iso', autospec=True)
+ @mock.patch.object(utils, 'tempdir', autospec=True)
+ @mock.patch.object(images, '_generate_cfg', autospec=True)
+ def test_create_isolinux_image_for_uefi(
+ self, gen_cfg_mock, tempdir_mock, mount_mock, execute_mock,
+ write_to_file_mock, create_root_fs_mock, umount_mock):
+
+ files_info = {
+ 'path/to/kernel': 'vmlinuz',
+ 'path/to/ramdisk': 'initrd',
+ CONF.isolinux_bin: 'isolinux/isolinux.bin',
+ 'path/to/grub': 'relpath/to/grub.cfg',
+ 'sourceabspath/to/efiboot.img': 'path/to/efiboot.img'
+ }
+ cfg = "cfg"
+ cfg_file = 'tmpdir/isolinux/isolinux.cfg'
+ grubcfg = "grubcfg"
+ grub_file = 'tmpdir/relpath/to/grub.cfg'
+ gen_cfg_mock.side_effect = iter([cfg, grubcfg])
+
+ params = ['a=b', 'c']
+ isolinux_options = {'kernel': '/vmlinuz',
+ 'ramdisk': '/initrd'}
+ grub_options = {'linux': '/vmlinuz',
+ 'initrd': '/initrd'}
+
+ uefi_path_info = {
+ 'sourceabspath/to/efiboot.img': 'path/to/efiboot.img',
+ 'path/to/grub': 'relpath/to/grub.cfg'}
+ grub_rel_path = 'relpath/to/grub.cfg'
+ e_img_rel_path = 'path/to/efiboot.img'
+ mock_file_handle = mock.MagicMock(spec=file)
+ mock_file_handle.__enter__.return_value = 'tmpdir'
+ mock_file_handle1 = mock.MagicMock(spec=file)
+ mock_file_handle1.__enter__.return_value = 'mountdir'
+ tempdir_mock.side_effect = iter(
+ [mock_file_handle, mock_file_handle1])
+ mount_mock.return_value = (uefi_path_info,
+ e_img_rel_path, grub_rel_path)
+
+ images.create_isolinux_image_for_uefi('tgt_file', 'path/to/deploy_iso',
+ 'path/to/kernel',
+ 'path/to/ramdisk',
+ kernel_params=params)
+ mount_mock.assert_called_once_with('path/to/deploy_iso', 'mountdir')
+ create_root_fs_mock.assert_called_once_with('tmpdir', files_info)
+ gen_cfg_mock.assert_any_call(params, CONF.isolinux_config_template,
+ isolinux_options)
+ write_to_file_mock.assert_any_call(cfg_file, cfg)
+ gen_cfg_mock.assert_any_call(params, CONF.grub_config_template,
+ grub_options)
+ write_to_file_mock.assert_any_call(grub_file, grubcfg)
+ execute_mock.assert_called_once_with(
+ 'mkisofs', '-r', '-V', "VMEDIA_BOOT_ISO", '-cache-inodes', '-J',
+ '-l', '-no-emul-boot', '-boot-load-size', '4', '-boot-info-table',
+ '-b', 'isolinux/isolinux.bin', '-eltorito-alt-boot',
+ '-e', 'path/to/efiboot.img', '-no-emul-boot',
+ '-o', 'tgt_file', 'tmpdir')
+ umount_mock.assert_called_once_with('mountdir')
+
+ @mock.patch.object(images, '_create_root_fs', autospec=True)
+ @mock.patch.object(utils, 'write_to_file', autospec=True)
+ @mock.patch.object(utils, 'tempdir', autospec=True)
+ @mock.patch.object(utils, 'execute', autospec=True)
+ @mock.patch.object(images, '_generate_cfg', autospec=True)
+ def test_create_isolinux_image_for_bios(
+ self, gen_cfg_mock, execute_mock, tempdir_mock,
+ write_to_file_mock, create_root_fs_mock):
+
+ mock_file_handle = mock.MagicMock(spec=file)
+ mock_file_handle.__enter__.return_value = 'tmpdir'
+ tempdir_mock.return_value = mock_file_handle
+
+ cfg = "cfg"
+ cfg_file = 'tmpdir/isolinux/isolinux.cfg'
+ gen_cfg_mock.return_value = cfg
+
+ params = ['a=b', 'c']
+ isolinux_options = {'kernel': '/vmlinuz',
+ 'ramdisk': '/initrd'}
+
+ images.create_isolinux_image_for_bios('tgt_file',
+ 'path/to/kernel',
+ 'path/to/ramdisk',
+ kernel_params=params)
+
+ files_info = {
+ 'path/to/kernel': 'vmlinuz',
+ 'path/to/ramdisk': 'initrd',
+ CONF.isolinux_bin: 'isolinux/isolinux.bin'
+ }
+ create_root_fs_mock.assert_called_once_with('tmpdir', files_info)
+ gen_cfg_mock.assert_called_once_with(params,
+ CONF.isolinux_config_template,
+ isolinux_options)
+ write_to_file_mock.assert_called_once_with(cfg_file, cfg)
+ execute_mock.assert_called_once_with(
+ 'mkisofs', '-r', '-V',
+ "VMEDIA_BOOT_ISO", '-cache-inodes', '-J', '-l',
+ '-no-emul-boot', '-boot-load-size',
+ '4', '-boot-info-table', '-b', 'isolinux/isolinux.bin',
+ '-o', 'tgt_file', 'tmpdir')
+
+ @mock.patch.object(images, '_umount_without_raise', autospec=True)
+ @mock.patch.object(images, '_create_root_fs', autospec=True)
+ @mock.patch.object(utils, 'tempdir', autospec=True)
+ @mock.patch.object(utils, 'execute', autospec=True)
+ @mock.patch.object(os, 'walk', autospec=True)
+ def test_create_isolinux_image_uefi_rootfs_fails(self, walk_mock,
+ utils_mock,
+ tempdir_mock,
+ create_root_fs_mock,
+ umount_mock):
+
+ mock_file_handle = mock.MagicMock(spec=file)
+ mock_file_handle.__enter__.return_value = 'tmpdir'
+ mock_file_handle1 = mock.MagicMock(spec=file)
+ mock_file_handle1.__enter__.return_value = 'mountdir'
+ tempdir_mock.side_effect = iter(
+ [mock_file_handle, mock_file_handle1])
+ create_root_fs_mock.side_effect = IOError
+
+ self.assertRaises(exception.ImageCreationFailed,
+ images.create_isolinux_image_for_uefi,
+ 'tgt_file', 'path/to/deployiso',
+ 'path/to/kernel',
+ 'path/to/ramdisk')
+ umount_mock.assert_called_once_with('mountdir')
+
+ @mock.patch.object(images, '_create_root_fs', autospec=True)
+ @mock.patch.object(utils, 'tempdir', autospec=True)
+ @mock.patch.object(utils, 'execute', autospec=True)
+ @mock.patch.object(os, 'walk', autospec=True)
+ def test_create_isolinux_image_bios_rootfs_fails(self, walk_mock,
+ utils_mock,
+ tempdir_mock,
+ create_root_fs_mock):
+ create_root_fs_mock.side_effect = IOError
+
+ self.assertRaises(exception.ImageCreationFailed,
+ images.create_isolinux_image_for_bios,
+ 'tgt_file', 'path/to/kernel',
+ 'path/to/ramdisk')
+
+ @mock.patch.object(images, '_umount_without_raise', autospec=True)
+ @mock.patch.object(images, '_create_root_fs', autospec=True)
+ @mock.patch.object(utils, 'write_to_file', autospec=True)
+ @mock.patch.object(utils, 'tempdir', autospec=True)
+ @mock.patch.object(utils, 'execute', autospec=True)
+ @mock.patch.object(images, '_mount_deploy_iso', autospec=True)
+ @mock.patch.object(images, '_generate_cfg', autospec=True)
+ def test_create_isolinux_image_mkisofs_fails(self,
+ gen_cfg_mock,
+ mount_mock,
+ utils_mock,
+ tempdir_mock,
+ write_to_file_mock,
+ create_root_fs_mock,
+ umount_mock):
+ mock_file_handle = mock.MagicMock(spec=file)
+ mock_file_handle.__enter__.return_value = 'tmpdir'
+ mock_file_handle1 = mock.MagicMock(spec=file)
+ mock_file_handle1.__enter__.return_value = 'mountdir'
+ tempdir_mock.side_effect = iter(
+ [mock_file_handle, mock_file_handle1])
+ mount_mock.return_value = ({'a': 'a'}, 'b', 'c')
+ utils_mock.side_effect = processutils.ProcessExecutionError
+
+ self.assertRaises(exception.ImageCreationFailed,
+ images.create_isolinux_image_for_uefi,
+ 'tgt_file', 'path/to/deployiso',
+ 'path/to/kernel',
+ 'path/to/ramdisk')
+ umount_mock.assert_called_once_with('mountdir')
+
+ @mock.patch.object(images, '_create_root_fs', autospec=True)
+ @mock.patch.object(utils, 'write_to_file', autospec=True)
+ @mock.patch.object(utils, 'tempdir', autospec=True)
+ @mock.patch.object(utils, 'execute', autospec=True)
+ @mock.patch.object(images, '_generate_cfg', autospec=True)
+ def test_create_isolinux_image_bios_mkisofs_fails(self,
+ gen_cfg_mock,
+ utils_mock,
+ tempdir_mock,
+ write_to_file_mock,
+ create_root_fs_mock):
+ mock_file_handle = mock.MagicMock(spec=file)
+ mock_file_handle.__enter__.return_value = 'tmpdir'
+ tempdir_mock.return_value = mock_file_handle
+ utils_mock.side_effect = processutils.ProcessExecutionError
+
+ self.assertRaises(exception.ImageCreationFailed,
+ images.create_isolinux_image_for_bios,
+ 'tgt_file', 'path/to/kernel',
+ 'path/to/ramdisk')
+
+ @mock.patch.object(images, 'create_isolinux_image_for_uefi', autospec=True)
+ @mock.patch.object(images, 'fetch', autospec=True)
+ @mock.patch.object(utils, 'tempdir', autospec=True)
+ def test_create_boot_iso_for_uefi(
+ self, tempdir_mock, fetch_images_mock, create_isolinux_mock):
+ mock_file_handle = mock.MagicMock(spec=file)
+ mock_file_handle.__enter__.return_value = 'tmpdir'
+ tempdir_mock.return_value = mock_file_handle
+
+ images.create_boot_iso('ctx', 'output_file', 'kernel-uuid',
+ 'ramdisk-uuid', 'deploy_iso-uuid',
+ 'root-uuid', 'kernel-params', 'uefi')
+
+ fetch_images_mock.assert_any_call(
+ 'ctx', 'kernel-uuid', 'tmpdir/kernel-uuid')
+ fetch_images_mock.assert_any_call(
+ 'ctx', 'ramdisk-uuid', 'tmpdir/ramdisk-uuid')
+ fetch_images_mock.assert_any_call(
+ 'ctx', 'deploy_iso-uuid', 'tmpdir/deploy_iso-uuid')
+
+ params = ['root=UUID=root-uuid', 'kernel-params']
+ create_isolinux_mock.assert_called_once_with(
+ 'output_file', 'tmpdir/deploy_iso-uuid', 'tmpdir/kernel-uuid',
+ 'tmpdir/ramdisk-uuid', params)
+
+ @mock.patch.object(images, 'create_isolinux_image_for_uefi', autospec=True)
+ @mock.patch.object(images, 'fetch', autospec=True)
+ @mock.patch.object(utils, 'tempdir', autospec=True)
+ def test_create_boot_iso_for_uefi_for_hrefs(
+ self, tempdir_mock, fetch_images_mock, create_isolinux_mock):
+ mock_file_handle = mock.MagicMock(spec=file)
+ mock_file_handle.__enter__.return_value = 'tmpdir'
+ tempdir_mock.return_value = mock_file_handle
+
+ images.create_boot_iso('ctx', 'output_file', 'http://kernel-href',
+ 'http://ramdisk-href', 'http://deploy_iso-href',
+ 'root-uuid', 'kernel-params', 'uefi')
+ expected_calls = [mock.call('ctx', 'http://kernel-href',
+ 'tmpdir/kernel-href'),
+ mock.call('ctx', 'http://ramdisk-href',
+ 'tmpdir/ramdisk-href'),
+ mock.call('ctx', 'http://deploy_iso-href',
+ 'tmpdir/deploy_iso-href')]
+ fetch_images_mock.assert_has_calls(expected_calls)
+ params = ['root=UUID=root-uuid', 'kernel-params']
+ create_isolinux_mock.assert_called_once_with(
+ 'output_file', 'tmpdir/deploy_iso-href', 'tmpdir/kernel-href',
+ 'tmpdir/ramdisk-href', params)
+
+ @mock.patch.object(images, 'create_isolinux_image_for_bios', autospec=True)
+ @mock.patch.object(images, 'fetch', autospec=True)
+ @mock.patch.object(utils, 'tempdir', autospec=True)
+ def test_create_boot_iso_for_bios(
+ self, tempdir_mock, fetch_images_mock, create_isolinux_mock):
+ mock_file_handle = mock.MagicMock(spec=file)
+ mock_file_handle.__enter__.return_value = 'tmpdir'
+ tempdir_mock.return_value = mock_file_handle
+
+ images.create_boot_iso('ctx', 'output_file', 'kernel-uuid',
+ 'ramdisk-uuid', 'deploy_iso-uuid',
+ 'root-uuid', 'kernel-params', 'bios')
+
+ fetch_images_mock.assert_any_call(
+ 'ctx', 'kernel-uuid', 'tmpdir/kernel-uuid')
+ fetch_images_mock.assert_any_call(
+ 'ctx', 'ramdisk-uuid', 'tmpdir/ramdisk-uuid')
+ # Note (NobodyCam): the orginal assert asserted that fetch_images
+ # was not called with parameters, this did not
+ # work, So I instead assert that there were only
+ # Two calls to the mock validating the above
+ # asserts.
+ self.assertEqual(2, fetch_images_mock.call_count)
+
+ params = ['root=UUID=root-uuid', 'kernel-params']
+ create_isolinux_mock.assert_called_once_with('output_file',
+ 'tmpdir/kernel-uuid',
+ 'tmpdir/ramdisk-uuid',
+ params)
+
+ @mock.patch.object(images, 'create_isolinux_image_for_bios', autospec=True)
+ @mock.patch.object(images, 'fetch', autospec=True)
+ @mock.patch.object(utils, 'tempdir', autospec=True)
+ def test_create_boot_iso_for_bios_with_no_boot_mode(self, tempdir_mock,
+ fetch_images_mock,
+ create_isolinux_mock):
+ mock_file_handle = mock.MagicMock(spec=file)
+ mock_file_handle.__enter__.return_value = 'tmpdir'
+ tempdir_mock.return_value = mock_file_handle
+
+ images.create_boot_iso('ctx', 'output_file', 'kernel-uuid',
+ 'ramdisk-uuid', 'deploy_iso-uuid',
+ 'root-uuid', 'kernel-params', None)
+
+ fetch_images_mock.assert_any_call(
+ 'ctx', 'kernel-uuid', 'tmpdir/kernel-uuid')
+ fetch_images_mock.assert_any_call(
+ 'ctx', 'ramdisk-uuid', 'tmpdir/ramdisk-uuid')
+
+ params = ['root=UUID=root-uuid', 'kernel-params']
+ create_isolinux_mock.assert_called_once_with('output_file',
+ 'tmpdir/kernel-uuid',
+ 'tmpdir/ramdisk-uuid',
+ params)
+
+ @mock.patch.object(image_service, 'get_image_service', autospec=True)
+ def test_get_glance_image_properties_no_such_prop(self,
+ image_service_mock):
+
+ prop_dict = {'properties': {'p1': 'v1',
+ 'p2': 'v2'}}
+
+ image_service_obj_mock = image_service_mock.return_value
+ image_service_obj_mock.show.return_value = prop_dict
+
+ ret_val = images.get_image_properties('con', 'uuid',
+ ['p1', 'p2', 'p3'])
+ image_service_mock.assert_called_once_with('uuid', context='con')
+ image_service_obj_mock.show.assert_called_once_with('uuid')
+ self.assertEqual({'p1': 'v1',
+ 'p2': 'v2',
+ 'p3': None}, ret_val)
+
+ @mock.patch.object(image_service, 'get_image_service', autospec=True)
+ def test_get_glance_image_properties_default_all(
+ self, image_service_mock):
+
+ prop_dict = {'properties': {'p1': 'v1',
+ 'p2': 'v2'}}
+
+ image_service_obj_mock = image_service_mock.return_value
+ image_service_obj_mock.show.return_value = prop_dict
+
+ ret_val = images.get_image_properties('con', 'uuid')
+ image_service_mock.assert_called_once_with('uuid', context='con')
+ image_service_obj_mock.show.assert_called_once_with('uuid')
+ self.assertEqual({'p1': 'v1',
+ 'p2': 'v2'}, ret_val)
+
+ @mock.patch.object(image_service, 'get_image_service', autospec=True)
+ def test_get_glance_image_properties_with_prop_subset(
+ self, image_service_mock):
+
+ prop_dict = {'properties': {'p1': 'v1',
+ 'p2': 'v2',
+ 'p3': 'v3'}}
+
+ image_service_obj_mock = image_service_mock.return_value
+ image_service_obj_mock.show.return_value = prop_dict
+
+ ret_val = images.get_image_properties('con', 'uuid',
+ ['p1', 'p3'])
+ image_service_mock.assert_called_once_with('uuid', context='con')
+ image_service_obj_mock.show.assert_called_once_with('uuid')
+ self.assertEqual({'p1': 'v1',
+ 'p3': 'v3'}, ret_val)
+
+ @mock.patch.object(image_service, 'GlanceImageService', autospec=True)
+ def test_get_temp_url_for_glance_image(self, image_service_mock):
+
+ direct_url = 'swift+http://host/v1/AUTH_xx/con/obj'
+ image_info = {'id': 'qwe', 'properties': {'direct_url': direct_url}}
+ glance_service_mock = image_service_mock.return_value
+ glance_service_mock.swift_temp_url.return_value = 'temp-url'
+ glance_service_mock.show.return_value = image_info
+
+ temp_url = images.get_temp_url_for_glance_image('context',
+ 'glance_uuid')
+
+ glance_service_mock.show.assert_called_once_with('glance_uuid')
+ self.assertEqual('temp-url', temp_url)
diff --git a/ironic/tests/unit/common/test_keystone.py b/ironic/tests/unit/common/test_keystone.py
new file mode 100644
index 000000000..913b7b4c4
--- /dev/null
+++ b/ironic/tests/unit/common/test_keystone.py
@@ -0,0 +1,185 @@
+# -*- encoding: utf-8 -*-
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from keystoneclient import exceptions as ksexception
+import mock
+
+from ironic.common import exception
+from ironic.common import keystone
+from ironic.tests.unit import base
+
+
+class FakeCatalog(object):
+ def url_for(self, **kwargs):
+ return 'fake-url'
+
+
+class FakeAccessInfo(object):
+ def will_expire_soon(self):
+ pass
+
+
+class FakeClient(object):
+ def __init__(self, **kwargs):
+ self.service_catalog = FakeCatalog()
+ self.auth_ref = FakeAccessInfo()
+
+ def has_service_catalog(self):
+ return True
+
+
+class KeystoneTestCase(base.TestCase):
+
+ def setUp(self):
+ super(KeystoneTestCase, self).setUp()
+ self.config(group='keystone_authtoken',
+ auth_uri='http://127.0.0.1:9898/',
+ admin_user='fake', admin_password='fake',
+ admin_tenant_name='fake')
+ self.config(group='keystone', region_name='fake')
+ keystone._KS_CLIENT = None
+
+ def test_failure_authorization(self):
+ self.assertRaises(exception.KeystoneFailure, keystone.get_service_url)
+
+ @mock.patch.object(FakeCatalog, 'url_for', autospec=True)
+ @mock.patch('keystoneclient.v2_0.client.Client', autospec=True)
+ def test_get_url(self, mock_ks, mock_uf):
+ fake_url = 'http://127.0.0.1:6385'
+ mock_uf.return_value = fake_url
+ mock_ks.return_value = FakeClient()
+ res = keystone.get_service_url()
+ self.assertEqual(fake_url, res)
+
+ @mock.patch.object(FakeCatalog, 'url_for', autospec=True)
+ @mock.patch('keystoneclient.v2_0.client.Client', autospec=True)
+ def test_url_not_found(self, mock_ks, mock_uf):
+ mock_uf.side_effect = ksexception.EndpointNotFound
+ mock_ks.return_value = FakeClient()
+ self.assertRaises(exception.CatalogNotFound, keystone.get_service_url)
+
+ @mock.patch.object(FakeClient, 'has_service_catalog', autospec=True)
+ @mock.patch('keystoneclient.v2_0.client.Client', autospec=True)
+ def test_no_catalog(self, mock_ks, mock_hsc):
+ mock_hsc.return_value = False
+ mock_ks.return_value = FakeClient()
+ self.assertRaises(exception.KeystoneFailure, keystone.get_service_url)
+
+ @mock.patch('keystoneclient.v2_0.client.Client', autospec=True)
+ def test_unauthorized(self, mock_ks):
+ mock_ks.side_effect = ksexception.Unauthorized
+ self.assertRaises(exception.KeystoneUnauthorized,
+ keystone.get_service_url)
+
+ def test_get_service_url_fail_missing_auth_uri(self):
+ self.config(group='keystone_authtoken', auth_uri=None)
+ self.assertRaises(exception.KeystoneFailure,
+ keystone.get_service_url)
+
+ @mock.patch('keystoneclient.v2_0.client.Client', autospec=True)
+ def test_get_service_url_versionless_v2(self, mock_ks):
+ mock_ks.return_value = FakeClient()
+ self.config(group='keystone_authtoken', auth_uri='http://127.0.0.1')
+ expected_url = 'http://127.0.0.1/v2.0'
+ keystone.get_service_url()
+ mock_ks.assert_called_once_with(username='fake', password='fake',
+ tenant_name='fake',
+ region_name='fake',
+ auth_url=expected_url)
+
+ @mock.patch('keystoneclient.v3.client.Client', autospec=True)
+ def test_get_service_url_versionless_v3(self, mock_ks):
+ mock_ks.return_value = FakeClient()
+ self.config(group='keystone_authtoken', auth_version='v3.0',
+ auth_uri='http://127.0.0.1')
+ expected_url = 'http://127.0.0.1/v3'
+ keystone.get_service_url()
+ mock_ks.assert_called_once_with(username='fake', password='fake',
+ tenant_name='fake',
+ region_name='fake',
+ auth_url=expected_url)
+
+ @mock.patch('keystoneclient.v2_0.client.Client', autospec=True)
+ def test_get_service_url_version_override(self, mock_ks):
+ mock_ks.return_value = FakeClient()
+ self.config(group='keystone_authtoken',
+ auth_uri='http://127.0.0.1/v2.0/')
+ expected_url = 'http://127.0.0.1/v2.0'
+ keystone.get_service_url()
+ mock_ks.assert_called_once_with(username='fake', password='fake',
+ tenant_name='fake',
+ region_name='fake',
+ auth_url=expected_url)
+
+ @mock.patch('keystoneclient.v2_0.client.Client', autospec=True)
+ def test_get_admin_auth_token(self, mock_ks):
+ fake_client = FakeClient()
+ fake_client.auth_token = '123456'
+ mock_ks.return_value = fake_client
+ self.assertEqual('123456', keystone.get_admin_auth_token())
+
+ @mock.patch('keystoneclient.v2_0.client.Client', autospec=True)
+ def test_get_region_name_v2(self, mock_ks):
+ mock_ks.return_value = FakeClient()
+ self.config(group='keystone', region_name='fake_region')
+ expected_url = 'http://127.0.0.1:9898/v2.0'
+ expected_region = 'fake_region'
+ keystone.get_service_url()
+ mock_ks.assert_called_once_with(username='fake', password='fake',
+ tenant_name='fake',
+ region_name=expected_region,
+ auth_url=expected_url)
+
+ @mock.patch('keystoneclient.v3.client.Client', autospec=True)
+ def test_get_region_name_v3(self, mock_ks):
+ mock_ks.return_value = FakeClient()
+ self.config(group='keystone', region_name='fake_region')
+ self.config(group='keystone_authtoken', auth_version='v3.0')
+ expected_url = 'http://127.0.0.1:9898/v3'
+ expected_region = 'fake_region'
+ keystone.get_service_url()
+ mock_ks.assert_called_once_with(username='fake', password='fake',
+ tenant_name='fake',
+ region_name=expected_region,
+ auth_url=expected_url)
+
+ @mock.patch('keystoneclient.v2_0.client.Client', autospec=True)
+ def test_cache_client_init(self, mock_ks):
+ fake_client = FakeClient()
+ mock_ks.return_value = fake_client
+ self.assertEqual(fake_client, keystone._get_ksclient())
+ self.assertEqual(fake_client, keystone._KS_CLIENT)
+ self.assertEqual(1, mock_ks.call_count)
+
+ @mock.patch.object(FakeAccessInfo, 'will_expire_soon', autospec=True)
+ @mock.patch('keystoneclient.v2_0.client.Client', autospec=True)
+ def test_cache_client_cached(self, mock_ks, mock_expire):
+ mock_expire.return_value = False
+ fake_client = FakeClient()
+ keystone._KS_CLIENT = fake_client
+ self.assertEqual(fake_client, keystone._get_ksclient())
+ self.assertEqual(fake_client, keystone._KS_CLIENT)
+ self.assertFalse(mock_ks.called)
+
+ @mock.patch.object(FakeAccessInfo, 'will_expire_soon', autospec=True)
+ @mock.patch('keystoneclient.v2_0.client.Client', autospec=True)
+ def test_cache_client_expired(self, mock_ks, mock_expire):
+ mock_expire.return_value = True
+ fake_client = FakeClient()
+ keystone._KS_CLIENT = fake_client
+ new_client = FakeClient()
+ mock_ks.return_value = new_client
+ self.assertEqual(new_client, keystone._get_ksclient())
+ self.assertEqual(new_client, keystone._KS_CLIENT)
+ self.assertEqual(1, mock_ks.call_count)
diff --git a/ironic/tests/unit/common/test_network.py b/ironic/tests/unit/common/test_network.py
new file mode 100644
index 000000000..34990c253
--- /dev/null
+++ b/ironic/tests/unit/common/test_network.py
@@ -0,0 +1,64 @@
+# Copyright 2014 Rackspace Inc.
+# All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from oslo_utils import uuidutils
+
+from ironic.common import network
+from ironic.conductor import task_manager
+from ironic.tests.unit.conductor import utils as mgr_utils
+from ironic.tests.unit.db import base as db_base
+from ironic.tests.unit.db import utils as db_utils
+from ironic.tests.unit.objects import utils as object_utils
+
+
+class TestNetwork(db_base.DbTestCase):
+
+ def setUp(self):
+ super(TestNetwork, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver='fake')
+ self.node = object_utils.create_test_node(self.context)
+
+ def test_get_node_vif_ids_no_ports(self):
+ expected = {}
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ result = network.get_node_vif_ids(task)
+ self.assertEqual(expected, result)
+
+ def test_get_node_vif_ids_one_port(self):
+ port1 = db_utils.create_test_port(node_id=self.node.id,
+ address='aa:bb:cc:dd:ee:ff',
+ uuid=uuidutils.generate_uuid(),
+ extra={'vif_port_id': 'test-vif-A'},
+ driver='fake')
+ expected = {port1.uuid: 'test-vif-A'}
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ result = network.get_node_vif_ids(task)
+ self.assertEqual(expected, result)
+
+ def test_get_node_vif_ids_two_ports(self):
+ port1 = db_utils.create_test_port(node_id=self.node.id,
+ address='aa:bb:cc:dd:ee:ff',
+ uuid=uuidutils.generate_uuid(),
+ extra={'vif_port_id': 'test-vif-A'},
+ driver='fake')
+ port2 = db_utils.create_test_port(node_id=self.node.id,
+ address='dd:ee:ff:aa:bb:cc',
+ uuid=uuidutils.generate_uuid(),
+ extra={'vif_port_id': 'test-vif-B'},
+ driver='fake')
+ expected = {port1.uuid: 'test-vif-A', port2.uuid: 'test-vif-B'}
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ result = network.get_node_vif_ids(task)
+ self.assertEqual(expected, result)
diff --git a/ironic/tests/unit/common/test_policy.py b/ironic/tests/unit/common/test_policy.py
new file mode 100644
index 000000000..fda4a5aaa
--- /dev/null
+++ b/ironic/tests/unit/common/test_policy.py
@@ -0,0 +1,74 @@
+# -*- encoding: utf-8 -*-
+#
+# Copyright 2013 Red Hat, Inc.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from ironic.common import policy
+from ironic.tests.unit import base
+
+
+class PolicyTestCase(base.TestCase):
+ """Tests whether the configuration of the policy engine is corect."""
+
+ def test_admin_api(self):
+ creds = ({'roles': [u'admin']},
+ {'roles': ['administrator']},
+ {'roles': ['admin', 'administrator']})
+
+ for c in creds:
+ self.assertTrue(policy.enforce('admin_api', c, c))
+
+ def test_public_api(self):
+ creds = {'is_public_api': 'True'}
+ self.assertTrue(policy.enforce('public_api', creds, creds))
+
+ def test_trusted_call(self):
+ creds = ({'roles': ['admin']},
+ {'is_public_api': 'True'},
+ {'roles': ['admin'], 'is_public_api': 'True'},
+ {'roles': ['Member'], 'is_public_api': 'True'})
+
+ for c in creds:
+ self.assertTrue(policy.enforce('trusted_call', c, c))
+
+ def test_show_password(self):
+ creds = {'roles': [u'admin'], 'tenant': 'admin'}
+ self.assertTrue(policy.enforce('show_password', creds, creds))
+
+
+class PolicyTestCaseNegative(base.TestCase):
+ """Tests whether the configuration of the policy engine is corect."""
+
+ def test_admin_api(self):
+ creds = {'roles': ['Member']}
+ self.assertFalse(policy.enforce('admin_api', creds, creds))
+
+ def test_public_api(self):
+ creds = ({'is_public_api': 'False'}, {})
+
+ for c in creds:
+ self.assertFalse(policy.enforce('public_api', c, c))
+
+ def test_trusted_call(self):
+ creds = ({'roles': ['Member']},
+ {'is_public_api': 'False'},
+ {'roles': ['Member'], 'is_public_api': 'False'})
+
+ for c in creds:
+ self.assertFalse(policy.enforce('trusted_call', c, c))
+
+ def test_show_password(self):
+ creds = {'roles': [u'admin'], 'tenant': 'demo'}
+ self.assertFalse(policy.enforce('show_password', creds, creds))
diff --git a/ironic/tests/unit/common/test_pxe_utils.py b/ironic/tests/unit/common/test_pxe_utils.py
new file mode 100644
index 000000000..0e086c1dc
--- /dev/null
+++ b/ironic/tests/unit/common/test_pxe_utils.py
@@ -0,0 +1,498 @@
+#
+# Copyright 2014 Rackspace, Inc
+# All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import os
+
+import mock
+from oslo_config import cfg
+import six
+
+from ironic.common import pxe_utils
+from ironic.conductor import task_manager
+from ironic.tests.unit.conductor import utils as mgr_utils
+from ironic.tests.unit.db import base as db_base
+from ironic.tests.unit.objects import utils as object_utils
+
+CONF = cfg.CONF
+
+
+class TestPXEUtils(db_base.DbTestCase):
+
+ def setUp(self):
+ super(TestPXEUtils, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver="fake")
+
+ common_pxe_options = {
+ 'deployment_aki_path': u'/tftpboot/1be26c0b-03f2-4d2e-ae87-'
+ u'c02d7f33c123/deploy_kernel',
+ 'aki_path': u'/tftpboot/1be26c0b-03f2-4d2e-ae87-c02d7f33c123/'
+ u'kernel',
+ 'pxe_append_params': 'test_param',
+ 'deployment_ari_path': u'/tftpboot/1be26c0b-03f2-4d2e-ae87-c02d7'
+ u'f33c123/deploy_ramdisk',
+ 'root_device': 'vendor=fake,size=123',
+ 'ipa-api-url': 'http://192.168.122.184:6385',
+ }
+
+ self.pxe_options = {
+ 'deployment_key': '0123456789ABCDEFGHIJKLMNOPQRSTUV',
+ 'ari_path': u'/tftpboot/1be26c0b-03f2-4d2e-ae87-c02d7f33c123/'
+ u'ramdisk',
+ 'iscsi_target_iqn': u'iqn-1be26c0b-03f2-4d2e-ae87-c02d7f33'
+ u'c123',
+ 'deployment_id': u'1be26c0b-03f2-4d2e-ae87-c02d7f33c123',
+ 'ironic_api_url': 'http://192.168.122.184:6385',
+ 'disk': 'cciss/c0d0,sda,hda,vda',
+ 'boot_option': 'netboot',
+ 'ipa-driver-name': 'pxe_ssh',
+ }
+ self.pxe_options.update(common_pxe_options)
+
+ self.pxe_options_bios = {
+ 'boot_mode': 'bios',
+ }
+ self.pxe_options_bios.update(self.pxe_options)
+
+ self.pxe_options_uefi = {
+ 'boot_mode': 'uefi',
+ }
+ self.pxe_options_uefi.update(self.pxe_options)
+
+ self.agent_pxe_options = {
+ 'ipa-driver-name': 'agent_ipmitool',
+ }
+ self.agent_pxe_options.update(common_pxe_options)
+
+ self.ipxe_options = self.pxe_options.copy()
+ self.ipxe_options.update({
+ 'deployment_aki_path': 'http://1.2.3.4:1234/deploy_kernel',
+ 'deployment_ari_path': 'http://1.2.3.4:1234/deploy_ramdisk',
+ 'aki_path': 'http://1.2.3.4:1234/kernel',
+ 'ari_path': 'http://1.2.3.4:1234/ramdisk',
+ })
+
+ self.node = object_utils.create_test_node(self.context)
+
+ def test__build_pxe_config(self):
+
+ rendered_template = pxe_utils._build_pxe_config(
+ self.pxe_options_bios, CONF.pxe.pxe_config_template,
+ '{{ ROOT }}', '{{ DISK_IDENTIFIER }}')
+
+ expected_template = open(
+ 'ironic/tests/unit/drivers/pxe_config.template').read().rstrip()
+
+ self.assertEqual(six.text_type(expected_template), rendered_template)
+
+ def test__build_pxe_config_with_agent(self):
+
+ rendered_template = pxe_utils._build_pxe_config(
+ self.agent_pxe_options, CONF.agent.agent_pxe_config_template,
+ '{{ ROOT }}', '{{ DISK_IDENTIFIER }}')
+
+ template_file = 'ironic/tests/unit/drivers/agent_pxe_config.template'
+ expected_template = open(template_file).read().rstrip()
+
+ self.assertEqual(six.text_type(expected_template), rendered_template)
+
+ def test__build_ipxe_config(self):
+ # NOTE(lucasagomes): iPXE is just an extension of the PXE driver,
+ # it doesn't have it's own configuration option for template.
+ # More info:
+ # http://docs.openstack.org/developer/ironic/deploy/install-guide.html
+ self.config(
+ pxe_config_template='ironic/drivers/modules/ipxe_config.template',
+ group='pxe'
+ )
+ self.config(http_url='http://1.2.3.4:1234', group='deploy')
+ rendered_template = pxe_utils._build_pxe_config(
+ self.ipxe_options, CONF.pxe.pxe_config_template,
+ '{{ ROOT }}', '{{ DISK_IDENTIFIER }}')
+
+ expected_template = open(
+ 'ironic/tests/unit/drivers/ipxe_config.template').read().rstrip()
+
+ self.assertEqual(six.text_type(expected_template), rendered_template)
+
+ def test__build_elilo_config(self):
+ pxe_opts = self.pxe_options
+ pxe_opts['boot_mode'] = 'uefi'
+ rendered_template = pxe_utils._build_pxe_config(
+ pxe_opts, CONF.pxe.uefi_pxe_config_template,
+ '{{ ROOT }}', '{{ DISK_IDENTIFIER }}')
+
+ expected_template = open(
+ 'ironic/tests/unit/drivers/elilo_efi_pxe_config.template'
+ ).read().rstrip()
+
+ self.assertEqual(six.text_type(expected_template), rendered_template)
+
+ def test__build_grub_config(self):
+ pxe_opts = self.pxe_options
+ pxe_opts['boot_mode'] = 'uefi'
+ pxe_opts['tftp_server'] = '192.0.2.1'
+ grub_tmplte = "ironic/drivers/modules/pxe_grub_config.template"
+ rendered_template = pxe_utils._build_pxe_config(
+ pxe_opts, grub_tmplte, '(( ROOT ))', '(( DISK_IDENTIFIER ))')
+
+ template_file = 'ironic/tests/unit/drivers/pxe_grub_config.template'
+ expected_template = open(template_file).read().rstrip()
+
+ self.assertEqual(six.text_type(expected_template), rendered_template)
+
+ @mock.patch('ironic.common.utils.create_link_without_raise', autospec=True)
+ @mock.patch('ironic.common.utils.unlink_without_raise', autospec=True)
+ @mock.patch('ironic.drivers.utils.get_node_mac_addresses', autospec=True)
+ def test__write_mac_pxe_configs(self, get_macs_mock, unlink_mock,
+ create_link_mock):
+ macs = [
+ '00:11:22:33:44:55:66',
+ '00:11:22:33:44:55:67'
+ ]
+ get_macs_mock.return_value = macs
+ create_link_calls = [
+ mock.call(u'../1be26c0b-03f2-4d2e-ae87-c02d7f33c123/config',
+ '/tftpboot/pxelinux.cfg/01-00-11-22-33-44-55-66'),
+ mock.call(u'../1be26c0b-03f2-4d2e-ae87-c02d7f33c123/config',
+ '/tftpboot/pxelinux.cfg/01-00-11-22-33-44-55-67')
+ ]
+ unlink_calls = [
+ mock.call('/tftpboot/pxelinux.cfg/01-00-11-22-33-44-55-66'),
+ mock.call('/tftpboot/pxelinux.cfg/01-00-11-22-33-44-55-67'),
+ ]
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ pxe_utils._link_mac_pxe_configs(task)
+
+ unlink_mock.assert_has_calls(unlink_calls)
+ create_link_mock.assert_has_calls(create_link_calls)
+
+ @mock.patch('ironic.common.utils.create_link_without_raise', autospec=True)
+ @mock.patch('ironic.common.utils.unlink_without_raise', autospec=True)
+ @mock.patch('ironic.drivers.utils.get_node_mac_addresses', autospec=True)
+ def test__write_mac_ipxe_configs(self, get_macs_mock, unlink_mock,
+ create_link_mock):
+ self.config(ipxe_enabled=True, group='pxe')
+ macs = [
+ '00:11:22:33:44:55:66',
+ '00:11:22:33:44:55:67'
+ ]
+ get_macs_mock.return_value = macs
+ create_link_calls = [
+ mock.call(u'../1be26c0b-03f2-4d2e-ae87-c02d7f33c123/config',
+ '/httpboot/pxelinux.cfg/00-11-22-33-44-55-66'),
+ mock.call(u'../1be26c0b-03f2-4d2e-ae87-c02d7f33c123/config',
+ '/httpboot/pxelinux.cfg/00112233445566'),
+ mock.call(u'../1be26c0b-03f2-4d2e-ae87-c02d7f33c123/config',
+ '/httpboot/pxelinux.cfg/00-11-22-33-44-55-67'),
+ mock.call(u'../1be26c0b-03f2-4d2e-ae87-c02d7f33c123/config',
+ '/httpboot/pxelinux.cfg/00112233445567'),
+ ]
+ unlink_calls = [
+ mock.call('/httpboot/pxelinux.cfg/00-11-22-33-44-55-66'),
+ mock.call('/httpboot/pxelinux.cfg/00112233445566'),
+ mock.call('/httpboot/pxelinux.cfg/00-11-22-33-44-55-67'),
+ mock.call('/httpboot/pxelinux.cfg/00112233445567'),
+ ]
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ pxe_utils._link_mac_pxe_configs(task)
+
+ unlink_mock.assert_has_calls(unlink_calls)
+ create_link_mock.assert_has_calls(create_link_calls)
+
+ @mock.patch('ironic.common.utils.create_link_without_raise', autospec=True)
+ @mock.patch('ironic.common.utils.unlink_without_raise', autospec=True)
+ @mock.patch('ironic.common.dhcp_factory.DHCPFactory.provider',
+ autospec=True)
+ def test__link_ip_address_pxe_configs(self, provider_mock, unlink_mock,
+ create_link_mock):
+ ip_address = '10.10.0.1'
+ address = "aa:aa:aa:aa:aa:aa"
+ object_utils.create_test_port(self.context, node_id=self.node.id,
+ address=address)
+
+ provider_mock.get_ip_addresses.return_value = [ip_address]
+ create_link_calls = [
+ mock.call(u'1be26c0b-03f2-4d2e-ae87-c02d7f33c123/config',
+ u'/tftpboot/10.10.0.1.conf'),
+ ]
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ pxe_utils._link_ip_address_pxe_configs(task, False)
+
+ unlink_mock.assert_called_once_with('/tftpboot/10.10.0.1.conf')
+ create_link_mock.assert_has_calls(create_link_calls)
+
+ @mock.patch('ironic.common.utils.write_to_file', autospec=True)
+ @mock.patch.object(pxe_utils, '_build_pxe_config', autospec=True)
+ @mock.patch('oslo_utils.fileutils.ensure_tree', autospec=True)
+ def test_create_pxe_config(self, ensure_tree_mock, build_mock,
+ write_mock):
+ build_mock.return_value = self.pxe_options_bios
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ pxe_utils.create_pxe_config(task, self.pxe_options_bios,
+ CONF.pxe.pxe_config_template)
+ build_mock.assert_called_with(self.pxe_options_bios,
+ CONF.pxe.pxe_config_template,
+ '{{ ROOT }}',
+ '{{ DISK_IDENTIFIER }}')
+ ensure_calls = [
+ mock.call(os.path.join(CONF.pxe.tftp_root, self.node.uuid)),
+ mock.call(os.path.join(CONF.pxe.tftp_root, 'pxelinux.cfg'))
+ ]
+ ensure_tree_mock.assert_has_calls(ensure_calls)
+
+ pxe_cfg_file_path = pxe_utils.get_pxe_config_file_path(self.node.uuid)
+ write_mock.assert_called_with(pxe_cfg_file_path, self.pxe_options_bios)
+
+ @mock.patch('ironic.common.pxe_utils._link_ip_address_pxe_configs',
+ autospec=True)
+ @mock.patch('ironic.common.utils.write_to_file', autospec=True)
+ @mock.patch('ironic.common.pxe_utils._build_pxe_config', autospec=True)
+ @mock.patch('oslo_utils.fileutils.ensure_tree', autospec=True)
+ def test_create_pxe_config_uefi_elilo(self, ensure_tree_mock, build_mock,
+ write_mock, link_ip_configs_mock):
+ build_mock.return_value = self.pxe_options_uefi
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ task.node.properties['capabilities'] = 'boot_mode:uefi'
+ pxe_utils.create_pxe_config(task, self.pxe_options_uefi,
+ CONF.pxe.uefi_pxe_config_template)
+
+ ensure_calls = [
+ mock.call(os.path.join(CONF.pxe.tftp_root, self.node.uuid)),
+ mock.call(os.path.join(CONF.pxe.tftp_root, 'pxelinux.cfg'))
+ ]
+ ensure_tree_mock.assert_has_calls(ensure_calls)
+ build_mock.assert_called_with(self.pxe_options_uefi,
+ CONF.pxe.uefi_pxe_config_template,
+ '{{ ROOT }}',
+ '{{ DISK_IDENTIFIER }}')
+ link_ip_configs_mock.assert_called_once_with(task, True)
+
+ pxe_cfg_file_path = pxe_utils.get_pxe_config_file_path(self.node.uuid)
+ write_mock.assert_called_with(pxe_cfg_file_path, self.pxe_options_uefi)
+
+ @mock.patch('ironic.common.pxe_utils._link_ip_address_pxe_configs',
+ autospec=True)
+ @mock.patch('ironic.common.utils.write_to_file', autospec=True)
+ @mock.patch('ironic.common.pxe_utils._build_pxe_config', autospec=True)
+ @mock.patch('oslo_utils.fileutils.ensure_tree', autospec=True)
+ def test_create_pxe_config_uefi_grub(self, ensure_tree_mock, build_mock,
+ write_mock, link_ip_configs_mock):
+ build_mock.return_value = self.pxe_options_uefi
+ grub_tmplte = "ironic/drivers/modules/pxe_grub_config.template"
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ task.node.properties['capabilities'] = 'boot_mode:uefi'
+ pxe_utils.create_pxe_config(task, self.pxe_options_uefi,
+ grub_tmplte)
+
+ ensure_calls = [
+ mock.call(os.path.join(CONF.pxe.tftp_root, self.node.uuid)),
+ mock.call(os.path.join(CONF.pxe.tftp_root, 'pxelinux.cfg'))
+ ]
+ ensure_tree_mock.assert_has_calls(ensure_calls)
+ build_mock.assert_called_with(self.pxe_options_uefi,
+ grub_tmplte,
+ '(( ROOT ))',
+ '(( DISK_IDENTIFIER ))')
+ link_ip_configs_mock.assert_called_once_with(task, False)
+
+ pxe_cfg_file_path = pxe_utils.get_pxe_config_file_path(self.node.uuid)
+ write_mock.assert_called_with(pxe_cfg_file_path, self.pxe_options_uefi)
+
+ @mock.patch('ironic.common.utils.rmtree_without_raise', autospec=True)
+ @mock.patch('ironic.common.utils.unlink_without_raise', autospec=True)
+ def test_clean_up_pxe_config(self, unlink_mock, rmtree_mock):
+ address = "aa:aa:aa:aa:aa:aa"
+ object_utils.create_test_port(self.context, node_id=self.node.id,
+ address=address)
+
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ pxe_utils.clean_up_pxe_config(task)
+
+ unlink_mock.assert_called_once_with("/tftpboot/pxelinux.cfg/01-%s"
+ % address.replace(':', '-'))
+ rmtree_mock.assert_called_once_with(
+ os.path.join(CONF.pxe.tftp_root, self.node.uuid))
+
+ def test__get_pxe_mac_path(self):
+ mac = '00:11:22:33:44:55:66'
+ self.assertEqual('/tftpboot/pxelinux.cfg/01-00-11-22-33-44-55-66',
+ pxe_utils._get_pxe_mac_path(mac))
+
+ def test__get_pxe_mac_path_ipxe(self):
+ self.config(ipxe_enabled=True, group='pxe')
+ self.config(http_root='/httpboot', group='deploy')
+ mac = '00:11:22:33:AA:BB:CC'
+ self.assertEqual('/httpboot/pxelinux.cfg/00-11-22-33-aa-bb-cc',
+ pxe_utils._get_pxe_mac_path(mac))
+
+ def test__get_pxe_ip_address_path(self):
+ ipaddress = '10.10.0.1'
+ self.assertEqual('/tftpboot/10.10.0.1.conf',
+ pxe_utils._get_pxe_ip_address_path(ipaddress, False))
+
+ def test_get_root_dir(self):
+ expected_dir = '/tftproot'
+ self.config(ipxe_enabled=False, group='pxe')
+ self.config(tftp_root=expected_dir, group='pxe')
+ self.assertEqual(expected_dir, pxe_utils.get_root_dir())
+
+ def test_get_root_dir_ipxe(self):
+ expected_dir = '/httpboot'
+ self.config(ipxe_enabled=True, group='pxe')
+ self.config(http_root=expected_dir, group='deploy')
+ self.assertEqual(expected_dir, pxe_utils.get_root_dir())
+
+ def test_get_pxe_config_file_path(self):
+ self.assertEqual(os.path.join(CONF.pxe.tftp_root,
+ self.node.uuid,
+ 'config'),
+ pxe_utils.get_pxe_config_file_path(self.node.uuid))
+
+ def test_dhcp_options_for_instance(self):
+ self.config(tftp_server='192.0.2.1', group='pxe')
+ self.config(pxe_bootfile_name='fake-bootfile', group='pxe')
+ expected_info = [{'opt_name': 'bootfile-name',
+ 'opt_value': 'fake-bootfile'},
+ {'opt_name': 'server-ip-address',
+ 'opt_value': '192.0.2.1'},
+ {'opt_name': 'tftp-server',
+ 'opt_value': '192.0.2.1'}
+ ]
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ self.assertEqual(expected_info,
+ pxe_utils.dhcp_options_for_instance(task))
+
+ def _test_get_deploy_kr_info(self, expected_dir):
+ node_uuid = 'fake-node'
+ driver_info = {
+ 'deploy_kernel': 'glance://deploy-kernel',
+ 'deploy_ramdisk': 'glance://deploy-ramdisk',
+ }
+
+ expected = {
+ 'deploy_kernel': ('glance://deploy-kernel',
+ expected_dir + '/fake-node/deploy_kernel'),
+ 'deploy_ramdisk': ('glance://deploy-ramdisk',
+ expected_dir + '/fake-node/deploy_ramdisk'),
+ }
+
+ kr_info = pxe_utils.get_deploy_kr_info(node_uuid, driver_info)
+ self.assertEqual(expected, kr_info)
+
+ def test_get_deploy_kr_info(self):
+ expected_dir = '/tftp'
+ self.config(tftp_root=expected_dir, group='pxe')
+ self._test_get_deploy_kr_info(expected_dir)
+
+ def test_get_deploy_kr_info_ipxe(self):
+ expected_dir = '/http'
+ self.config(ipxe_enabled=True, group='pxe')
+ self.config(http_root=expected_dir, group='deploy')
+ self._test_get_deploy_kr_info(expected_dir)
+
+ def test_get_deploy_kr_info_bad_driver_info(self):
+ self.config(tftp_root='/tftp', group='pxe')
+ node_uuid = 'fake-node'
+ driver_info = {}
+ self.assertRaises(KeyError,
+ pxe_utils.get_deploy_kr_info,
+ node_uuid,
+ driver_info)
+
+ def test_dhcp_options_for_instance_ipxe(self):
+ self.config(tftp_server='192.0.2.1', group='pxe')
+ self.config(pxe_bootfile_name='fake-bootfile', group='pxe')
+ self.config(ipxe_enabled=True, group='pxe')
+ self.config(http_url='http://192.0.3.2:1234', group='deploy')
+ self.config(ipxe_boot_script='/test/boot.ipxe', group='pxe')
+
+ self.config(dhcp_provider='isc', group='dhcp')
+ expected_boot_script_url = 'http://192.0.3.2:1234/boot.ipxe'
+ expected_info = [{'opt_name': '!175,bootfile-name',
+ 'opt_value': 'fake-bootfile'},
+ {'opt_name': 'server-ip-address',
+ 'opt_value': '192.0.2.1'},
+ {'opt_name': 'tftp-server',
+ 'opt_value': '192.0.2.1'},
+ {'opt_name': 'bootfile-name',
+ 'opt_value': expected_boot_script_url}]
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ self.assertItemsEqual(expected_info,
+ pxe_utils.dhcp_options_for_instance(task))
+
+ self.config(dhcp_provider='neutron', group='dhcp')
+ expected_boot_script_url = 'http://192.0.3.2:1234/boot.ipxe'
+ expected_info = [{'opt_name': 'tag:!ipxe,bootfile-name',
+ 'opt_value': 'fake-bootfile'},
+ {'opt_name': 'server-ip-address',
+ 'opt_value': '192.0.2.1'},
+ {'opt_name': 'tftp-server',
+ 'opt_value': '192.0.2.1'},
+ {'opt_name': 'tag:ipxe,bootfile-name',
+ 'opt_value': expected_boot_script_url}]
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ self.assertItemsEqual(expected_info,
+ pxe_utils.dhcp_options_for_instance(task))
+
+ @mock.patch('ironic.common.utils.rmtree_without_raise', autospec=True)
+ @mock.patch('ironic.common.utils.unlink_without_raise', autospec=True)
+ @mock.patch('ironic.common.dhcp_factory.DHCPFactory.provider')
+ def test_clean_up_pxe_config_uefi(self, provider_mock, unlink_mock,
+ rmtree_mock):
+ ip_address = '10.10.0.1'
+ address = "aa:aa:aa:aa:aa:aa"
+ properties = {'capabilities': 'boot_mode:uefi'}
+ object_utils.create_test_port(self.context, node_id=self.node.id,
+ address=address)
+
+ provider_mock.get_ip_addresses.return_value = [ip_address]
+
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ task.node.properties = properties
+ pxe_utils.clean_up_pxe_config(task)
+
+ unlink_calls = [
+ mock.call('/tftpboot/10.10.0.1.conf'),
+ mock.call('/tftpboot/0A0A0001.conf')
+ ]
+ unlink_mock.assert_has_calls(unlink_calls)
+ rmtree_mock.assert_called_once_with(
+ os.path.join(CONF.pxe.tftp_root, self.node.uuid))
+
+ @mock.patch('ironic.common.utils.rmtree_without_raise')
+ @mock.patch('ironic.common.utils.unlink_without_raise')
+ @mock.patch('ironic.common.dhcp_factory.DHCPFactory.provider')
+ def test_clean_up_pxe_config_uefi_instance_info(self,
+ provider_mock, unlink_mock,
+ rmtree_mock):
+ ip_address = '10.10.0.1'
+ address = "aa:aa:aa:aa:aa:aa"
+ object_utils.create_test_port(self.context, node_id=self.node.id,
+ address=address)
+
+ provider_mock.get_ip_addresses.return_value = [ip_address]
+
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ task.node.instance_info['deploy_boot_mode'] = 'uefi'
+ pxe_utils.clean_up_pxe_config(task)
+
+ unlink_calls = [
+ mock.call('/tftpboot/10.10.0.1.conf'),
+ mock.call('/tftpboot/0A0A0001.conf')
+ ]
+ unlink_mock.assert_has_calls(unlink_calls)
+ rmtree_mock.assert_called_once_with(
+ os.path.join(CONF.pxe.tftp_root, self.node.uuid))
diff --git a/ironic/tests/unit/common/test_raid.py b/ironic/tests/unit/common/test_raid.py
new file mode 100644
index 000000000..88c3b68dc
--- /dev/null
+++ b/ironic/tests/unit/common/test_raid.py
@@ -0,0 +1,231 @@
+# Copyright 2014 Hewlett-Packard Development Company, L.P.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import json
+
+from ironic.common import exception
+from ironic.common import raid
+from ironic.drivers import base as drivers_base
+from ironic.tests.unit import base
+from ironic.tests.unit.db import base as db_base
+from ironic.tests.unit.objects import utils as obj_utils
+from ironic.tests.unit import raid_constants
+
+
+class ValidateRaidConfigurationTestCase(base.TestCase):
+
+ def setUp(self):
+ with open(drivers_base.RAID_CONFIG_SCHEMA, 'r') as raid_schema_fobj:
+ self.schema = json.load(raid_schema_fobj)
+ super(ValidateRaidConfigurationTestCase, self).setUp()
+
+ def test_validate_configuration_okay(self):
+ raid_config = json.loads(raid_constants.RAID_CONFIG_OKAY)
+ raid.validate_configuration(
+ raid_config, raid_config_schema=self.schema)
+
+ def test_validate_configuration_no_logical_disk(self):
+ self.assertRaises(exception.InvalidParameterValue,
+ raid.validate_configuration,
+ {},
+ raid_config_schema=self.schema)
+
+ def test_validate_configuration_zero_logical_disks(self):
+ raid_config = json.loads(raid_constants.RAID_CONFIG_NO_LOGICAL_DISKS)
+ self.assertRaises(exception.InvalidParameterValue,
+ raid.validate_configuration,
+ raid_config,
+ raid_config_schema=self.schema)
+
+ def test_validate_configuration_no_raid_level(self):
+ raid_config = json.loads(raid_constants.RAID_CONFIG_NO_RAID_LEVEL)
+ self.assertRaises(exception.InvalidParameterValue,
+ raid.validate_configuration,
+ raid_config,
+ raid_config_schema=self.schema)
+
+ def test_validate_configuration_invalid_raid_level(self):
+ raid_config = json.loads(raid_constants.RAID_CONFIG_INVALID_RAID_LEVEL)
+ self.assertRaises(exception.InvalidParameterValue,
+ raid.validate_configuration,
+ raid_config,
+ raid_config_schema=self.schema)
+
+ def test_validate_configuration_no_size_gb(self):
+ raid_config = json.loads(raid_constants.RAID_CONFIG_NO_SIZE_GB)
+ self.assertRaises(exception.InvalidParameterValue,
+ raid.validate_configuration,
+ raid_config,
+ raid_config_schema=self.schema)
+
+ def test_validate_configuration_max_size_gb(self):
+ raid_config = json.loads(raid_constants.RAID_CONFIG_MAX_SIZE_GB)
+ raid.validate_configuration(raid_config,
+ raid_config_schema=self.schema)
+
+ def test_validate_configuration_invalid_size_gb(self):
+ raid_config = json.loads(raid_constants.RAID_CONFIG_INVALID_SIZE_GB)
+ self.assertRaises(exception.InvalidParameterValue,
+ raid.validate_configuration,
+ raid_config,
+ raid_config_schema=self.schema)
+
+ def test_validate_configuration_invalid_is_root_volume(self):
+ raid_config_str = raid_constants.RAID_CONFIG_INVALID_IS_ROOT_VOL
+ raid_config = json.loads(raid_config_str)
+ self.assertRaises(exception.InvalidParameterValue,
+ raid.validate_configuration,
+ raid_config,
+ raid_config_schema=self.schema)
+
+ def test_validate_configuration_invalid_multiple_is_root_volume(self):
+ raid_config_str = raid_constants.RAID_CONFIG_MULTIPLE_IS_ROOT_VOL
+ raid_config = json.loads(raid_config_str)
+ self.assertRaises(exception.InvalidParameterValue,
+ raid.validate_configuration,
+ raid_config,
+ raid_config_schema=self.schema)
+
+ def test_validate_configuration_invalid_share_physical_disks(self):
+ raid_config_str = raid_constants.RAID_CONFIG_INVALID_SHARE_PHY_DISKS
+ raid_config = json.loads(raid_config_str)
+ self.assertRaises(exception.InvalidParameterValue,
+ raid.validate_configuration,
+ raid_config,
+ raid_config_schema=self.schema)
+
+ def test_validate_configuration_invalid_disk_type(self):
+ raid_config = json.loads(raid_constants.RAID_CONFIG_INVALID_DISK_TYPE)
+ self.assertRaises(exception.InvalidParameterValue,
+ raid.validate_configuration,
+ raid_config,
+ raid_config_schema=self.schema)
+
+ def test_validate_configuration_invalid_int_type(self):
+ raid_config = json.loads(raid_constants.RAID_CONFIG_INVALID_INT_TYPE)
+ self.assertRaises(exception.InvalidParameterValue,
+ raid.validate_configuration,
+ raid_config,
+ raid_config_schema=self.schema)
+
+ def test_validate_configuration_invalid_number_of_phy_disks(self):
+ raid_config_str = raid_constants.RAID_CONFIG_INVALID_NUM_PHY_DISKS
+ raid_config = json.loads(raid_config_str)
+ self.assertRaises(exception.InvalidParameterValue,
+ raid.validate_configuration,
+ raid_config,
+ raid_config_schema=self.schema)
+
+ def test_validate_configuration_invalid_physical_disks(self):
+ raid_config = json.loads(raid_constants.RAID_CONFIG_INVALID_PHY_DISKS)
+ self.assertRaises(exception.InvalidParameterValue,
+ raid.validate_configuration,
+ raid_config,
+ raid_config_schema=self.schema)
+
+ def test_validate_configuration_additional_property(self):
+ raid_config = json.loads(raid_constants.RAID_CONFIG_ADDITIONAL_PROP)
+ self.assertRaises(exception.InvalidParameterValue,
+ raid.validate_configuration,
+ raid_config,
+ raid_config_schema=self.schema)
+
+ def test_validate_configuration_custom_schema(self):
+ raid_config = json.loads(raid_constants.CUSTOM_SCHEMA_RAID_CONFIG)
+ schema = json.loads(raid_constants.CUSTOM_RAID_SCHEMA)
+ raid.validate_configuration(raid_config,
+ raid_config_schema=schema)
+
+
+class RaidPublicMethodsTestCase(db_base.DbTestCase):
+
+ def test_get_logical_disk_properties(self):
+ with open(drivers_base.RAID_CONFIG_SCHEMA, 'r') as raid_schema_fobj:
+ schema = json.load(raid_schema_fobj)
+ logical_disk_properties = raid.get_logical_disk_properties(schema)
+ self.assertIn('raid_level', logical_disk_properties)
+ self.assertIn('size_gb', logical_disk_properties)
+ self.assertIn('volume_name', logical_disk_properties)
+ self.assertIn('is_root_volume', logical_disk_properties)
+ self.assertIn('share_physical_disks', logical_disk_properties)
+ self.assertIn('disk_type', logical_disk_properties)
+ self.assertIn('interface_type', logical_disk_properties)
+ self.assertIn('number_of_physical_disks', logical_disk_properties)
+ self.assertIn('controller', logical_disk_properties)
+ self.assertIn('physical_disks', logical_disk_properties)
+
+ def test_get_logical_disk_properties_custom_schema(self):
+ raid_schema = json.loads(raid_constants.CUSTOM_RAID_SCHEMA)
+ logical_disk_properties = raid.get_logical_disk_properties(
+ raid_config_schema=raid_schema)
+ self.assertIn('raid_level', logical_disk_properties)
+ self.assertIn('size_gb', logical_disk_properties)
+ self.assertIn('foo', logical_disk_properties)
+
+ def _test_update_raid_info(self, current_config,
+ capabilities=None):
+ node = obj_utils.create_test_node(self.context,
+ driver='fake')
+ if capabilities:
+ properties = node.properties
+ properties['capabilities'] = capabilities
+ del properties['local_gb']
+ node.properties = properties
+ target_raid_config = json.loads(raid_constants.RAID_CONFIG_OKAY)
+ node.target_raid_config = target_raid_config
+ node.save()
+ raid.update_raid_info(node, current_config)
+ properties = node.properties
+ current = node.raid_config
+ target = node.target_raid_config
+ self.assertIsNotNone(current['last_updated'])
+ self.assertIsInstance(current['logical_disks'][0], dict)
+ if current_config['logical_disks'][0].get('is_root_volume'):
+ self.assertEqual({'wwn': '600508B100'},
+ properties['root_device'])
+ self.assertEqual(100, properties['local_gb'])
+ self.assertIn('raid_level:1', properties['capabilities'])
+ if capabilities:
+ self.assertIn(capabilities, properties['capabilities'])
+ else:
+ self.assertNotIn('local_gb', properties)
+ self.assertNotIn('root_device', properties)
+ if capabilities:
+ self.assertNotIn('raid_level:1', properties['capabilities'])
+
+ # Verify node.target_raid_config is preserved.
+ self.assertEqual(target_raid_config, target)
+
+ def test_update_raid_info_okay(self):
+ current_config = json.loads(raid_constants.CURRENT_RAID_CONFIG)
+ self._test_update_raid_info(current_config,
+ capabilities='boot_mode:bios')
+
+ def test_update_raid_info_okay_no_root_volumes(self):
+ current_config = json.loads(raid_constants.CURRENT_RAID_CONFIG)
+ del current_config['logical_disks'][0]['is_root_volume']
+ del current_config['logical_disks'][0]['root_device_hint']
+ self._test_update_raid_info(current_config,
+ capabilities='boot_mode:bios')
+
+ def test_update_raid_info_okay_current_capabilities_empty(self):
+ current_config = json.loads(raid_constants.CURRENT_RAID_CONFIG)
+ self._test_update_raid_info(current_config,
+ capabilities=None)
+
+ def test_update_raid_info_multiple_root_volumes(self):
+ current_config = json.loads(raid_constants.RAID_CONFIG_MULTIPLE_ROOT)
+ self.assertRaises(exception.InvalidParameterValue,
+ self._test_update_raid_info,
+ current_config)
diff --git a/ironic/tests/unit/common/test_states.py b/ironic/tests/unit/common/test_states.py
new file mode 100644
index 000000000..206ded177
--- /dev/null
+++ b/ironic/tests/unit/common/test_states.py
@@ -0,0 +1,40 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (C) 2015 Intel Corporation. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import six
+
+from ironic.common import states
+from ironic.tests.unit import base
+
+
+class StatesTest(base.TestCase):
+
+ def test_state_values_length(self):
+ """test_state_values_length
+
+ State values can be a maximum of 15 characters because they are stored
+ in the database and the size of the database entry is 15 characters.
+ This is specified in db/sqlalchemy/models.py
+
+ """
+ for key, value in states.__dict__.items():
+ # Assumption: A state variable name is all UPPERCASE and contents
+ # are a string.
+ if key.upper() == key and isinstance(value, six.string_types):
+ self.assertTrue(
+ (len(value) <= 15),
+ "Value for state: {} is greater than 15 characters".format(
+ key))
diff --git a/ironic/tests/unit/common/test_swift.py b/ironic/tests/unit/common/test_swift.py
new file mode 100644
index 000000000..55ef68848
--- /dev/null
+++ b/ironic/tests/unit/common/test_swift.py
@@ -0,0 +1,155 @@
+# Copyright 2013 Hewlett-Packard Development Company, L.P.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import sys
+
+import mock
+from oslo_config import cfg
+import six
+from six.moves import builtins as __builtin__
+from swiftclient import client as swift_client
+from swiftclient import exceptions as swift_exception
+from swiftclient import utils as swift_utils
+
+from ironic.common import exception
+from ironic.common import swift
+from ironic.tests.unit import base
+
+CONF = cfg.CONF
+
+if six.PY3:
+ import io
+ file = io.BytesIO
+
+
+@mock.patch.object(swift_client, 'Connection', autospec=True)
+class SwiftTestCase(base.TestCase):
+
+ def setUp(self):
+ super(SwiftTestCase, self).setUp()
+ self.swift_exception = swift_exception.ClientException('', '')
+
+ self.config(admin_user='admin', group='keystone_authtoken')
+ self.config(admin_tenant_name='tenant', group='keystone_authtoken')
+ self.config(admin_password='password', group='keystone_authtoken')
+ self.config(auth_uri='http://authurl', group='keystone_authtoken')
+ self.config(auth_version='2', group='keystone_authtoken')
+ self.config(swift_max_retries=2, group='swift')
+ self.config(insecure=0, group='keystone_authtoken')
+ self.config(cafile='/path/to/ca/file', group='keystone_authtoken')
+
+ # The constructor of SwiftAPI accepts arguments whose
+ # default values are values of some config options above. So reload
+ # the module to make sure the required values are set.
+ six.moves.reload_module(sys.modules['ironic.common.swift'])
+
+ def test___init__(self, connection_mock):
+ swift.SwiftAPI()
+ params = {'retries': 2,
+ 'insecure': 0,
+ 'user': 'admin',
+ 'tenant_name': 'tenant',
+ 'key': 'password',
+ 'authurl': 'http://authurl/v2.0',
+ 'cacert': '/path/to/ca/file',
+ 'auth_version': '2'}
+ connection_mock.assert_called_once_with(**params)
+
+ @mock.patch.object(__builtin__, 'open', autospec=True)
+ def test_create_object(self, open_mock, connection_mock):
+ swiftapi = swift.SwiftAPI()
+ connection_obj_mock = connection_mock.return_value
+ mock_file_handle = mock.MagicMock(spec=file)
+ mock_file_handle.__enter__.return_value = 'file-object'
+ open_mock.return_value = mock_file_handle
+
+ connection_obj_mock.put_object.return_value = 'object-uuid'
+
+ object_uuid = swiftapi.create_object('container', 'object',
+ 'some-file-location')
+
+ connection_obj_mock.put_container.assert_called_once_with('container')
+ connection_obj_mock.put_object.assert_called_once_with(
+ 'container', 'object', 'file-object', headers=None)
+ self.assertEqual('object-uuid', object_uuid)
+
+ @mock.patch.object(__builtin__, 'open', autospec=True)
+ def test_create_object_create_container_fails(self, open_mock,
+ connection_mock):
+ swiftapi = swift.SwiftAPI()
+ connection_obj_mock = connection_mock.return_value
+ connection_obj_mock.put_container.side_effect = self.swift_exception
+ self.assertRaises(exception.SwiftOperationError,
+ swiftapi.create_object, 'container',
+ 'object', 'some-file-location')
+ connection_obj_mock.put_container.assert_called_once_with('container')
+ self.assertFalse(connection_obj_mock.put_object.called)
+
+ @mock.patch.object(__builtin__, 'open', autospec=True)
+ def test_create_object_put_object_fails(self, open_mock, connection_mock):
+ swiftapi = swift.SwiftAPI()
+ mock_file_handle = mock.MagicMock(spec=file)
+ mock_file_handle.__enter__.return_value = 'file-object'
+ open_mock.return_value = mock_file_handle
+ connection_obj_mock = connection_mock.return_value
+ connection_obj_mock.head_account.side_effect = None
+ connection_obj_mock.put_object.side_effect = self.swift_exception
+ self.assertRaises(exception.SwiftOperationError,
+ swiftapi.create_object, 'container',
+ 'object', 'some-file-location')
+ connection_obj_mock.put_container.assert_called_once_with('container')
+ connection_obj_mock.put_object.assert_called_once_with(
+ 'container', 'object', 'file-object', headers=None)
+
+ @mock.patch.object(swift_utils, 'generate_temp_url', autospec=True)
+ def test_get_temp_url(self, gen_temp_url_mock, connection_mock):
+ swiftapi = swift.SwiftAPI()
+ connection_obj_mock = connection_mock.return_value
+ auth = ['http://host/v1/AUTH_tenant_id', 'token']
+ connection_obj_mock.get_auth.return_value = auth
+ head_ret_val = {'x-account-meta-temp-url-key': 'secretkey'}
+ connection_obj_mock.head_account.return_value = head_ret_val
+ gen_temp_url_mock.return_value = 'temp-url-path'
+ temp_url_returned = swiftapi.get_temp_url('container', 'object', 10)
+ connection_obj_mock.get_auth.assert_called_once_with()
+ connection_obj_mock.head_account.assert_called_once_with()
+ object_path_expected = '/v1/AUTH_tenant_id/container/object'
+ gen_temp_url_mock.assert_called_once_with(object_path_expected, 10,
+ 'secretkey', 'GET')
+ self.assertEqual('http://host/temp-url-path', temp_url_returned)
+
+ def test_delete_object(self, connection_mock):
+ swiftapi = swift.SwiftAPI()
+ connection_obj_mock = connection_mock.return_value
+ swiftapi.delete_object('container', 'object')
+ connection_obj_mock.delete_object.assert_called_once_with('container',
+ 'object')
+
+ def test_head_object(self, connection_mock):
+ swiftapi = swift.SwiftAPI()
+ connection_obj_mock = connection_mock.return_value
+ expected_head_result = {'a': 'b'}
+ connection_obj_mock.head_object.return_value = expected_head_result
+ actual_head_result = swiftapi.head_object('container', 'object')
+ connection_obj_mock.head_object.assert_called_once_with('container',
+ 'object')
+ self.assertEqual(expected_head_result, actual_head_result)
+
+ def test_update_object_meta(self, connection_mock):
+ swiftapi = swift.SwiftAPI()
+ connection_obj_mock = connection_mock.return_value
+ headers = {'a': 'b'}
+ swiftapi.update_object_meta('container', 'object', headers)
+ connection_obj_mock.post_object.assert_called_once_with(
+ 'container', 'object', headers)
diff --git a/ironic/tests/unit/common/test_utils.py b/ironic/tests/unit/common/test_utils.py
new file mode 100644
index 000000000..4898efc5d
--- /dev/null
+++ b/ironic/tests/unit/common/test_utils.py
@@ -0,0 +1,687 @@
+# Copyright 2011 Justin Santa Barbara
+# Copyright 2012 Hewlett-Packard Development Company, L.P.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import datetime
+import errno
+import hashlib
+import os
+import os.path
+import shutil
+import tempfile
+
+import mock
+import netaddr
+from oslo_concurrency import processutils
+from oslo_config import cfg
+import six
+import six.moves.builtins as __builtin__
+
+from ironic.common import exception
+from ironic.common import utils
+from ironic.tests.unit import base
+
+CONF = cfg.CONF
+
+
+class BareMetalUtilsTestCase(base.TestCase):
+
+ def test_random_alnum(self):
+ s = utils.random_alnum(10)
+ self.assertEqual(10, len(s))
+ s = utils.random_alnum(100)
+ self.assertEqual(100, len(s))
+
+ def test_unlink(self):
+ with mock.patch.object(os, "unlink", autospec=True) as unlink_mock:
+ unlink_mock.return_value = None
+ utils.unlink_without_raise("/fake/path")
+ unlink_mock.assert_called_once_with("/fake/path")
+
+ def test_unlink_ENOENT(self):
+ with mock.patch.object(os, "unlink", autospec=True) as unlink_mock:
+ unlink_mock.side_effect = OSError(errno.ENOENT)
+ utils.unlink_without_raise("/fake/path")
+ unlink_mock.assert_called_once_with("/fake/path")
+
+ def test_create_link(self):
+ with mock.patch.object(os, "symlink", autospec=True) as symlink_mock:
+ symlink_mock.return_value = None
+ utils.create_link_without_raise("/fake/source", "/fake/link")
+ symlink_mock.assert_called_once_with("/fake/source", "/fake/link")
+
+ def test_create_link_EEXIST(self):
+ with mock.patch.object(os, "symlink", autospec=True) as symlink_mock:
+ symlink_mock.side_effect = OSError(errno.EEXIST)
+ utils.create_link_without_raise("/fake/source", "/fake/link")
+ symlink_mock.assert_called_once_with("/fake/source", "/fake/link")
+
+
+class ExecuteTestCase(base.TestCase):
+
+ def test_retry_on_failure(self):
+ fd, tmpfilename = tempfile.mkstemp()
+ _, tmpfilename2 = tempfile.mkstemp()
+ try:
+ fp = os.fdopen(fd, 'w+')
+ fp.write('''#!/bin/sh
+# If stdin fails to get passed during one of the runs, make a note.
+if ! grep -q foo
+then
+ echo 'failure' > "$1"
+fi
+# If stdin has failed to get passed during this or a previous run, exit early.
+if grep failure "$1"
+then
+ exit 1
+fi
+runs="$(cat $1)"
+if [ -z "$runs" ]
+then
+ runs=0
+fi
+runs=$(($runs + 1))
+echo $runs > "$1"
+exit 1
+''')
+ fp.close()
+ os.chmod(tmpfilename, 0o755)
+ try:
+ self.assertRaises(processutils.ProcessExecutionError,
+ utils.execute,
+ tmpfilename, tmpfilename2, attempts=10,
+ process_input=b'foo',
+ delay_on_retry=False)
+ except OSError as e:
+ if e.errno == errno.EACCES:
+ self.skipTest("Permissions error detected. "
+ "Are you running with a noexec /tmp?")
+ else:
+ raise
+ fp = open(tmpfilename2, 'r')
+ runs = fp.read()
+ fp.close()
+ self.assertNotEqual(runs.strip(), 'failure', 'stdin did not '
+ 'always get passed '
+ 'correctly')
+ runs = int(runs.strip())
+ self.assertEqual(10, runs,
+ 'Ran %d times instead of 10.' % (runs,))
+ finally:
+ os.unlink(tmpfilename)
+ os.unlink(tmpfilename2)
+
+ def test_unknown_kwargs_raises_error(self):
+ self.assertRaises(processutils.UnknownArgumentError,
+ utils.execute,
+ '/usr/bin/env', 'true',
+ this_is_not_a_valid_kwarg=True)
+
+ def test_check_exit_code_boolean(self):
+ utils.execute('/usr/bin/env', 'false', check_exit_code=False)
+ self.assertRaises(processutils.ProcessExecutionError,
+ utils.execute,
+ '/usr/bin/env', 'false', check_exit_code=True)
+
+ def test_no_retry_on_success(self):
+ fd, tmpfilename = tempfile.mkstemp()
+ _, tmpfilename2 = tempfile.mkstemp()
+ try:
+ fp = os.fdopen(fd, 'w+')
+ fp.write('''#!/bin/sh
+# If we've already run, bail out.
+grep -q foo "$1" && exit 1
+# Mark that we've run before.
+echo foo > "$1"
+# Check that stdin gets passed correctly.
+grep foo
+''')
+ fp.close()
+ os.chmod(tmpfilename, 0o755)
+ try:
+ utils.execute(tmpfilename,
+ tmpfilename2,
+ process_input=b'foo',
+ attempts=2)
+ except OSError as e:
+ if e.errno == errno.EACCES:
+ self.skipTest("Permissions error detected. "
+ "Are you running with a noexec /tmp?")
+ else:
+ raise
+ finally:
+ os.unlink(tmpfilename)
+ os.unlink(tmpfilename2)
+
+ @mock.patch.object(processutils, 'execute', autospec=True)
+ @mock.patch.object(os.environ, 'copy', return_value={}, autospec=True)
+ def test_execute_use_standard_locale_no_env_variables(self, env_mock,
+ execute_mock):
+ utils.execute('foo', use_standard_locale=True)
+ execute_mock.assert_called_once_with('foo',
+ env_variables={'LC_ALL': 'C'})
+
+ @mock.patch.object(processutils, 'execute', autospec=True)
+ def test_execute_use_standard_locale_with_env_variables(self,
+ execute_mock):
+ utils.execute('foo', use_standard_locale=True,
+ env_variables={'foo': 'bar'})
+ execute_mock.assert_called_once_with('foo',
+ env_variables={'LC_ALL': 'C',
+ 'foo': 'bar'})
+
+ @mock.patch.object(processutils, 'execute', autospec=True)
+ def test_execute_not_use_standard_locale(self, execute_mock):
+ utils.execute('foo', use_standard_locale=False,
+ env_variables={'foo': 'bar'})
+ execute_mock.assert_called_once_with('foo',
+ env_variables={'foo': 'bar'})
+
+ def test_execute_get_root_helper(self):
+ with mock.patch.object(
+ processutils, 'execute', autospec=True) as execute_mock:
+ helper = utils._get_root_helper()
+ utils.execute('foo', run_as_root=True)
+ execute_mock.assert_called_once_with('foo', run_as_root=True,
+ root_helper=helper)
+
+ def test_execute_without_root_helper(self):
+ with mock.patch.object(
+ processutils, 'execute', autospec=True) as execute_mock:
+ utils.execute('foo', run_as_root=False)
+ execute_mock.assert_called_once_with('foo', run_as_root=False)
+
+
+class GenericUtilsTestCase(base.TestCase):
+ def test_hostname_unicode_sanitization(self):
+ hostname = u"\u7684.test.example.com"
+ self.assertEqual(b"test.example.com",
+ utils.sanitize_hostname(hostname))
+
+ def test_hostname_sanitize_periods(self):
+ hostname = "....test.example.com..."
+ self.assertEqual(b"test.example.com",
+ utils.sanitize_hostname(hostname))
+
+ def test_hostname_sanitize_dashes(self):
+ hostname = "----test.example.com---"
+ self.assertEqual(b"test.example.com",
+ utils.sanitize_hostname(hostname))
+
+ def test_hostname_sanitize_characters(self):
+ hostname = "(#@&$!(@*--#&91)(__=+--test-host.example!!.com-0+"
+ self.assertEqual(b"91----test-host.example.com-0",
+ utils.sanitize_hostname(hostname))
+
+ def test_hostname_translate(self):
+ hostname = "<}\x1fh\x10e\x08l\x02l\x05o\x12!{>"
+ self.assertEqual(b"hello", utils.sanitize_hostname(hostname))
+
+ def test_read_cached_file(self):
+ with mock.patch.object(
+ os.path, "getmtime", autospec=True) as getmtime_mock:
+ getmtime_mock.return_value = 1
+
+ cache_data = {"data": 1123, "mtime": 1}
+ data = utils.read_cached_file("/this/is/a/fake", cache_data)
+ self.assertEqual(cache_data["data"], data)
+ getmtime_mock.assert_called_once_with(mock.ANY)
+
+ def test_read_modified_cached_file(self):
+ with mock.patch.object(
+ os.path, "getmtime", autospec=True) as getmtime_mock:
+ with mock.patch.object(
+ __builtin__, 'open', autospec=True) as open_mock:
+ getmtime_mock.return_value = 2
+ fake_contents = "lorem ipsum"
+ fake_file = mock.Mock()
+ fake_file.read.return_value = fake_contents
+ fake_context_manager = mock.MagicMock()
+ fake_context_manager.__enter__.return_value = fake_file
+ fake_context_manager.__exit__.return_value = None
+ open_mock.return_value = fake_context_manager
+
+ cache_data = {"data": 1123, "mtime": 1}
+ self.reload_called = False
+
+ def test_reload(reloaded_data):
+ self.assertEqual(fake_contents, reloaded_data)
+ self.reload_called = True
+
+ data = utils.read_cached_file("/this/is/a/fake",
+ cache_data,
+ reload_func=test_reload)
+
+ self.assertEqual(fake_contents, data)
+ self.assertTrue(self.reload_called)
+ getmtime_mock.assert_called_once_with(mock.ANY)
+ open_mock.assert_called_once_with(mock.ANY)
+ fake_file.read.assert_called_once_with()
+ fake_context_manager.__exit__.assert_called_once_with(mock.ANY,
+ mock.ANY,
+ mock.ANY)
+ fake_context_manager.__enter__.assert_called_once_with()
+
+ def test_hash_file(self):
+ data = b'Mary had a little lamb, its fleece as white as snow'
+ flo = six.BytesIO(data)
+ h1 = utils.hash_file(flo)
+ h2 = hashlib.sha1(data).hexdigest()
+ self.assertEqual(h1, h2)
+
+ def test_is_valid_boolstr(self):
+ self.assertTrue(utils.is_valid_boolstr('true'))
+ self.assertTrue(utils.is_valid_boolstr('false'))
+ self.assertTrue(utils.is_valid_boolstr('yes'))
+ self.assertTrue(utils.is_valid_boolstr('no'))
+ self.assertTrue(utils.is_valid_boolstr('y'))
+ self.assertTrue(utils.is_valid_boolstr('n'))
+ self.assertTrue(utils.is_valid_boolstr('1'))
+ self.assertTrue(utils.is_valid_boolstr('0'))
+
+ self.assertFalse(utils.is_valid_boolstr('maybe'))
+ self.assertFalse(utils.is_valid_boolstr('only on tuesdays'))
+
+ def test_is_valid_ipv6_cidr(self):
+ self.assertTrue(utils.is_valid_ipv6_cidr("2600::/64"))
+ self.assertTrue(utils.is_valid_ipv6_cidr(
+ "abcd:ef01:2345:6789:abcd:ef01:192.168.254.254/48"))
+ self.assertTrue(utils.is_valid_ipv6_cidr(
+ "0000:0000:0000:0000:0000:0000:0000:0001/32"))
+ self.assertTrue(utils.is_valid_ipv6_cidr(
+ "0000:0000:0000:0000:0000:0000:0000:0001"))
+ self.assertFalse(utils.is_valid_ipv6_cidr("foo"))
+ self.assertFalse(utils.is_valid_ipv6_cidr("127.0.0.1"))
+
+ def test_get_shortened_ipv6(self):
+ self.assertEqual("abcd:ef01:2345:6789:abcd:ef01:c0a8:fefe",
+ utils.get_shortened_ipv6(
+ "abcd:ef01:2345:6789:abcd:ef01:192.168.254.254"))
+ self.assertEqual("::1", utils.get_shortened_ipv6(
+ "0000:0000:0000:0000:0000:0000:0000:0001"))
+ self.assertEqual("caca::caca:0:babe:201:102",
+ utils.get_shortened_ipv6(
+ "caca:0000:0000:caca:0000:babe:0201:0102"))
+ self.assertRaises(netaddr.AddrFormatError, utils.get_shortened_ipv6,
+ "127.0.0.1")
+ self.assertRaises(netaddr.AddrFormatError, utils.get_shortened_ipv6,
+ "failure")
+
+ def test_get_shortened_ipv6_cidr(self):
+ self.assertEqual("2600::/64", utils.get_shortened_ipv6_cidr(
+ "2600:0000:0000:0000:0000:0000:0000:0000/64"))
+ self.assertEqual("2600::/64", utils.get_shortened_ipv6_cidr(
+ "2600::1/64"))
+ self.assertRaises(netaddr.AddrFormatError,
+ utils.get_shortened_ipv6_cidr,
+ "127.0.0.1")
+ self.assertRaises(netaddr.AddrFormatError,
+ utils.get_shortened_ipv6_cidr,
+ "failure")
+
+ def test_is_valid_mac(self):
+ self.assertTrue(utils.is_valid_mac("52:54:00:cf:2d:31"))
+ self.assertTrue(utils.is_valid_mac(u"52:54:00:cf:2d:31"))
+ self.assertFalse(utils.is_valid_mac("127.0.0.1"))
+ self.assertFalse(utils.is_valid_mac("not:a:mac:address"))
+ self.assertFalse(utils.is_valid_mac("52-54-00-cf-2d-31"))
+ self.assertFalse(utils.is_valid_mac("aa bb cc dd ee ff"))
+ self.assertTrue(utils.is_valid_mac("AA:BB:CC:DD:EE:FF"))
+ self.assertFalse(utils.is_valid_mac("AA BB CC DD EE FF"))
+ self.assertFalse(utils.is_valid_mac("AA-BB-CC-DD-EE-FF"))
+
+ def test_is_hostname_safe(self):
+ self.assertTrue(utils.is_hostname_safe('spam'))
+ self.assertFalse(utils.is_hostname_safe('spAm'))
+ self.assertFalse(utils.is_hostname_safe('SPAM'))
+ self.assertFalse(utils.is_hostname_safe('-spam'))
+ self.assertFalse(utils.is_hostname_safe('spam-'))
+ self.assertTrue(utils.is_hostname_safe('spam-eggs'))
+ self.assertFalse(utils.is_hostname_safe('spam_eggs'))
+ self.assertFalse(utils.is_hostname_safe('spam eggs'))
+ self.assertTrue(utils.is_hostname_safe('spam.eggs'))
+ self.assertTrue(utils.is_hostname_safe('9spam'))
+ self.assertTrue(utils.is_hostname_safe('spam7'))
+ self.assertTrue(utils.is_hostname_safe('br34kf4st'))
+ self.assertFalse(utils.is_hostname_safe('$pam'))
+ self.assertFalse(utils.is_hostname_safe('egg$'))
+ self.assertFalse(utils.is_hostname_safe('spam#eggs'))
+ self.assertFalse(utils.is_hostname_safe(' eggs'))
+ self.assertFalse(utils.is_hostname_safe('spam '))
+ self.assertTrue(utils.is_hostname_safe('s'))
+ self.assertTrue(utils.is_hostname_safe('s' * 63))
+ self.assertFalse(utils.is_hostname_safe('s' * 64))
+ self.assertFalse(utils.is_hostname_safe(''))
+ self.assertFalse(utils.is_hostname_safe(None))
+ # Need to ensure a binary response for success or fail
+ self.assertIsNotNone(utils.is_hostname_safe('spam'))
+ self.assertIsNotNone(utils.is_hostname_safe('-spam'))
+ self.assertTrue(utils.is_hostname_safe('www.rackspace.com'))
+ self.assertTrue(utils.is_hostname_safe('www.rackspace.com.'))
+ self.assertTrue(utils.is_hostname_safe('http._sctp.www.example.com'))
+ self.assertTrue(utils.is_hostname_safe('mail.pets_r_us.net'))
+ self.assertTrue(utils.is_hostname_safe('mail-server-15.my_host.org'))
+ self.assertFalse(utils.is_hostname_safe('www.nothere.com_'))
+ self.assertFalse(utils.is_hostname_safe('www.nothere_.com'))
+ self.assertFalse(utils.is_hostname_safe('www..nothere.com'))
+ long_str = 'a' * 63 + '.' + 'b' * 63 + '.' + 'c' * 63 + '.' + 'd' * 63
+ self.assertTrue(utils.is_hostname_safe(long_str))
+ self.assertFalse(utils.is_hostname_safe(long_str + '.'))
+ self.assertFalse(utils.is_hostname_safe('a' * 255))
+
+ def test_is_valid_logical_name(self):
+ valid = (
+ 'spam', 'spAm', 'SPAM', 'spam-eggs', 'spam.eggs', 'spam_eggs',
+ 'spam~eggs', '9spam', 'spam7', '~spam', '.spam', '.~-_', '~',
+ 'br34kf4st', 's', 's' * 63, 's' * 255)
+ invalid = (
+ ' ', 'spam eggs', '$pam', 'egg$', 'spam#eggs',
+ ' eggs', 'spam ', '', None, 'spam%20')
+
+ for hostname in valid:
+ result = utils.is_valid_logical_name(hostname)
+ # Need to ensure a binary response for success. assertTrue
+ # is too generous, and would pass this test if, for
+ # instance, a regex Match object were returned.
+ self.assertIs(result, True,
+ "%s is unexpectedly invalid" % hostname)
+
+ for hostname in invalid:
+ result = utils.is_valid_logical_name(hostname)
+ # Need to ensure a binary response for
+ # success. assertFalse is too generous and would pass this
+ # test if None were returned.
+ self.assertIs(result, False,
+ "%s is unexpectedly valid" % hostname)
+
+ def test_validate_and_normalize_mac(self):
+ mac = 'AA:BB:CC:DD:EE:FF'
+ with mock.patch.object(utils, 'is_valid_mac', autospec=True) as m_mock:
+ m_mock.return_value = True
+ self.assertEqual(mac.lower(),
+ utils.validate_and_normalize_mac(mac))
+
+ def test_validate_and_normalize_mac_invalid_format(self):
+ with mock.patch.object(utils, 'is_valid_mac', autospec=True) as m_mock:
+ m_mock.return_value = False
+ self.assertRaises(exception.InvalidMAC,
+ utils.validate_and_normalize_mac, 'invalid-mac')
+
+ def test_safe_rstrip(self):
+ value = '/test/'
+ rstripped_value = '/test'
+ not_rstripped = '/'
+
+ self.assertEqual(rstripped_value, utils.safe_rstrip(value, '/'))
+ self.assertEqual(not_rstripped, utils.safe_rstrip(not_rstripped, '/'))
+
+ def test_safe_rstrip_not_raises_exceptions(self):
+ # Supplying an integer should normally raise an exception because it
+ # does not save the rstrip() method.
+ value = 10
+
+ # In the case of raising an exception safe_rstrip() should return the
+ # original value.
+ self.assertEqual(value, utils.safe_rstrip(value))
+
+ @mock.patch.object(os.path, 'getmtime', return_value=1439465889.4964755,
+ autospec=True)
+ def test_unix_file_modification_datetime(self, mtime_mock):
+ expected = datetime.datetime(2015, 8, 13, 11, 38, 9, 496475)
+ self.assertEqual(expected,
+ utils.unix_file_modification_datetime('foo'))
+ mtime_mock.assert_called_once_with('foo')
+
+
+class MkfsTestCase(base.TestCase):
+
+ @mock.patch.object(utils, 'execute', autospec=True)
+ def test_mkfs(self, execute_mock):
+ utils.mkfs('ext4', '/my/block/dev')
+ utils.mkfs('msdos', '/my/msdos/block/dev')
+ utils.mkfs('swap', '/my/swap/block/dev')
+
+ expected = [mock.call('mkfs', '-t', 'ext4', '-F', '/my/block/dev',
+ run_as_root=True,
+ use_standard_locale=True),
+ mock.call('mkfs', '-t', 'msdos', '/my/msdos/block/dev',
+ run_as_root=True,
+ use_standard_locale=True),
+ mock.call('mkswap', '/my/swap/block/dev',
+ run_as_root=True,
+ use_standard_locale=True)]
+ self.assertEqual(expected, execute_mock.call_args_list)
+
+ @mock.patch.object(utils, 'execute', autospec=True)
+ def test_mkfs_with_label(self, execute_mock):
+ utils.mkfs('ext4', '/my/block/dev', 'ext4-vol')
+ utils.mkfs('msdos', '/my/msdos/block/dev', 'msdos-vol')
+ utils.mkfs('swap', '/my/swap/block/dev', 'swap-vol')
+
+ expected = [mock.call('mkfs', '-t', 'ext4', '-F', '-L', 'ext4-vol',
+ '/my/block/dev', run_as_root=True,
+ use_standard_locale=True),
+ mock.call('mkfs', '-t', 'msdos', '-n', 'msdos-vol',
+ '/my/msdos/block/dev', run_as_root=True,
+ use_standard_locale=True),
+ mock.call('mkswap', '-L', 'swap-vol',
+ '/my/swap/block/dev', run_as_root=True,
+ use_standard_locale=True)]
+ self.assertEqual(expected, execute_mock.call_args_list)
+
+ @mock.patch.object(utils, 'execute', autospec=True)
+ def test_mkfs_with_unsupported_fs(self, execute_mock):
+ execute_mock.side_effect = iter([processutils.ProcessExecutionError(
+ stderr=os.strerror(errno.ENOENT))])
+ self.assertRaises(exception.FileSystemNotSupported,
+ utils.mkfs, 'foo', '/my/block/dev')
+
+ @mock.patch.object(utils, 'execute', autospec=True)
+ def test_mkfs_with_unexpected_error(self, execute_mock):
+ execute_mock.side_effect = iter([processutils.ProcessExecutionError(
+ stderr='fake')])
+ self.assertRaises(processutils.ProcessExecutionError, utils.mkfs,
+ 'ext4', '/my/block/dev', 'ext4-vol')
+
+
+class TempFilesTestCase(base.TestCase):
+
+ def test_tempdir(self):
+
+ dirname = None
+ with utils.tempdir() as tempdir:
+ self.assertTrue(os.path.isdir(tempdir))
+ dirname = tempdir
+ self.assertFalse(os.path.exists(dirname))
+
+ @mock.patch.object(shutil, 'rmtree', autospec=True)
+ @mock.patch.object(tempfile, 'mkdtemp', autospec=True)
+ def test_tempdir_mocked(self, mkdtemp_mock, rmtree_mock):
+
+ self.config(tempdir='abc')
+ mkdtemp_mock.return_value = 'temp-dir'
+ kwargs = {'dir': 'b'}
+
+ with utils.tempdir(**kwargs) as tempdir:
+ self.assertEqual('temp-dir', tempdir)
+ tempdir_created = tempdir
+
+ mkdtemp_mock.assert_called_once_with(**kwargs)
+ rmtree_mock.assert_called_once_with(tempdir_created)
+
+ @mock.patch.object(utils, 'LOG', autospec=True)
+ @mock.patch.object(shutil, 'rmtree', autospec=True)
+ @mock.patch.object(tempfile, 'mkdtemp', autospec=True)
+ def test_tempdir_mocked_error_on_rmtree(self, mkdtemp_mock, rmtree_mock,
+ log_mock):
+
+ self.config(tempdir='abc')
+ mkdtemp_mock.return_value = 'temp-dir'
+ rmtree_mock.side_effect = OSError
+
+ with utils.tempdir() as tempdir:
+ self.assertEqual('temp-dir', tempdir)
+ tempdir_created = tempdir
+
+ rmtree_mock.assert_called_once_with(tempdir_created)
+ self.assertTrue(log_mock.error.called)
+
+ @mock.patch.object(os.path, 'exists', autospec=True)
+ @mock.patch.object(utils, '_check_dir_writable', autospec=True)
+ @mock.patch.object(utils, '_check_dir_free_space', autospec=True)
+ def test_check_dir_with_pass_in(self, mock_free_space, mock_dir_writable,
+ mock_exists):
+ mock_exists.return_value = True
+ # test passing in a directory and size
+ utils.check_dir(directory_to_check='/fake/path', required_space=5)
+ mock_exists.assert_called_once_with('/fake/path')
+ mock_dir_writable.assert_called_once_with('/fake/path')
+ mock_free_space.assert_called_once_with('/fake/path', 5)
+
+ @mock.patch.object(os.path, 'exists', autospec=True)
+ @mock.patch.object(utils, '_check_dir_writable', autospec=True)
+ @mock.patch.object(utils, '_check_dir_free_space', autospec=True)
+ def test_check_dir_no_dir(self, mock_free_space, mock_dir_writable,
+ mock_exists):
+ mock_exists.return_value = False
+ self.config(tempdir='/fake/path')
+ self.assertRaises(exception.PathNotFound, utils.check_dir)
+ mock_exists.assert_called_once_with(CONF.tempdir)
+ self.assertFalse(mock_free_space.called)
+ self.assertFalse(mock_dir_writable.called)
+
+ @mock.patch.object(os.path, 'exists', autospec=True)
+ @mock.patch.object(utils, '_check_dir_writable', autospec=True)
+ @mock.patch.object(utils, '_check_dir_free_space', autospec=True)
+ def test_check_dir_ok(self, mock_free_space, mock_dir_writable,
+ mock_exists):
+ mock_exists.return_value = True
+ self.config(tempdir='/fake/path')
+ utils.check_dir()
+ mock_exists.assert_called_once_with(CONF.tempdir)
+ mock_dir_writable.assert_called_once_with(CONF.tempdir)
+ mock_free_space.assert_called_once_with(CONF.tempdir, 1)
+
+ @mock.patch.object(os, 'access', autospec=True)
+ def test__check_dir_writable_ok(self, mock_access):
+ mock_access.return_value = True
+ self.assertIsNone(utils._check_dir_writable("/fake/path"))
+ mock_access.assert_called_once_with("/fake/path", os.W_OK)
+
+ @mock.patch.object(os, 'access', autospec=True)
+ def test__check_dir_writable_not_writable(self, mock_access):
+ mock_access.return_value = False
+
+ self.assertRaises(exception.DirectoryNotWritable,
+ utils._check_dir_writable, "/fake/path")
+ mock_access.assert_called_once_with("/fake/path", os.W_OK)
+
+ @mock.patch.object(os, 'statvfs', autospec=True)
+ def test__check_dir_free_space_ok(self, mock_stat):
+ statvfs_mock_return = mock.MagicMock()
+ statvfs_mock_return.f_bsize = 5
+ statvfs_mock_return.f_frsize = 0
+ statvfs_mock_return.f_blocks = 0
+ statvfs_mock_return.f_bfree = 0
+ statvfs_mock_return.f_bavail = 1024 * 1024
+ statvfs_mock_return.f_files = 0
+ statvfs_mock_return.f_ffree = 0
+ statvfs_mock_return.f_favail = 0
+ statvfs_mock_return.f_flag = 0
+ statvfs_mock_return.f_namemax = 0
+ mock_stat.return_value = statvfs_mock_return
+ utils._check_dir_free_space("/fake/path")
+ mock_stat.assert_called_once_with("/fake/path")
+
+ @mock.patch.object(os, 'statvfs', autospec=True)
+ def test_check_dir_free_space_raises(self, mock_stat):
+ statvfs_mock_return = mock.MagicMock()
+ statvfs_mock_return.f_bsize = 1
+ statvfs_mock_return.f_frsize = 0
+ statvfs_mock_return.f_blocks = 0
+ statvfs_mock_return.f_bfree = 0
+ statvfs_mock_return.f_bavail = 1024
+ statvfs_mock_return.f_files = 0
+ statvfs_mock_return.f_ffree = 0
+ statvfs_mock_return.f_favail = 0
+ statvfs_mock_return.f_flag = 0
+ statvfs_mock_return.f_namemax = 0
+ mock_stat.return_value = statvfs_mock_return
+
+ self.assertRaises(exception.InsufficientDiskSpace,
+ utils._check_dir_free_space, "/fake/path")
+ mock_stat.assert_called_once_with("/fake/path")
+
+
+class IsHttpUrlTestCase(base.TestCase):
+
+ def test_is_http_url(self):
+ self.assertTrue(utils.is_http_url('http://127.0.0.1'))
+ self.assertTrue(utils.is_http_url('https://127.0.0.1'))
+ self.assertTrue(utils.is_http_url('HTTP://127.1.2.3'))
+ self.assertTrue(utils.is_http_url('HTTPS://127.3.2.1'))
+ self.assertFalse(utils.is_http_url('Zm9vYmFy'))
+ self.assertFalse(utils.is_http_url('11111111'))
+
+
+class GetUpdatedCapabilitiesTestCase(base.TestCase):
+
+ def test_get_updated_capabilities(self):
+ capabilities = {'ilo_firmware_version': 'xyz'}
+ cap_string = 'ilo_firmware_version:xyz'
+ cap_returned = utils.get_updated_capabilities(None, capabilities)
+ self.assertEqual(cap_string, cap_returned)
+ self.assertIsInstance(cap_returned, str)
+
+ def test_get_updated_capabilities_multiple_keys(self):
+ capabilities = {'ilo_firmware_version': 'xyz',
+ 'foo': 'bar', 'somekey': 'value'}
+ cap_string = 'ilo_firmware_version:xyz,foo:bar,somekey:value'
+ cap_returned = utils.get_updated_capabilities(None, capabilities)
+ set1 = set(cap_string.split(','))
+ set2 = set(cap_returned.split(','))
+ self.assertEqual(set1, set2)
+ self.assertIsInstance(cap_returned, str)
+
+ def test_get_updated_capabilities_invalid_capabilities(self):
+ capabilities = 'ilo_firmware_version'
+ self.assertRaises(ValueError,
+ utils.get_updated_capabilities,
+ capabilities, {})
+
+ def test_get_updated_capabilities_capabilities_not_dict(self):
+ capabilities = ['ilo_firmware_version:xyz', 'foo:bar']
+ self.assertRaises(ValueError,
+ utils.get_updated_capabilities,
+ None, capabilities)
+
+ def test_get_updated_capabilities_add_to_existing_capabilities(self):
+ new_capabilities = {'BootMode': 'uefi'}
+ expected_capabilities = 'BootMode:uefi,foo:bar'
+ cap_returned = utils.get_updated_capabilities('foo:bar',
+ new_capabilities)
+ set1 = set(expected_capabilities.split(','))
+ set2 = set(cap_returned.split(','))
+ self.assertEqual(set1, set2)
+ self.assertIsInstance(cap_returned, str)
+
+ def test_get_updated_capabilities_replace_to_existing_capabilities(self):
+ new_capabilities = {'BootMode': 'bios'}
+ expected_capabilities = 'BootMode:bios'
+ cap_returned = utils.get_updated_capabilities('BootMode:uefi',
+ new_capabilities)
+ set1 = set(expected_capabilities.split(','))
+ set2 = set(cap_returned.split(','))
+ self.assertEqual(set1, set2)
+ self.assertIsInstance(cap_returned, str)
diff --git a/ironic/tests/unit/conductor/__init__.py b/ironic/tests/unit/conductor/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ironic/tests/unit/conductor/__init__.py
diff --git a/ironic/tests/unit/conductor/test_conductor_utils.py b/ironic/tests/unit/conductor/test_conductor_utils.py
new file mode 100644
index 000000000..72bfba89b
--- /dev/null
+++ b/ironic/tests/unit/conductor/test_conductor_utils.py
@@ -0,0 +1,335 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import mock
+from oslo_utils import uuidutils
+
+from ironic.common import driver_factory
+from ironic.common import exception
+from ironic.common import states
+from ironic.conductor import task_manager
+from ironic.conductor import utils as conductor_utils
+from ironic import objects
+from ironic.tests.unit import base as tests_base
+from ironic.tests.unit.conductor import utils as mgr_utils
+from ironic.tests.unit.db import base
+from ironic.tests.unit.db import utils
+from ironic.tests.unit.objects import utils as obj_utils
+
+
+class NodeSetBootDeviceTestCase(base.DbTestCase):
+
+ def test_node_set_boot_device_non_existent_device(self):
+ mgr_utils.mock_the_extension_manager(driver="fake_ipmitool")
+ self.driver = driver_factory.get_driver("fake_ipmitool")
+ ipmi_info = utils.get_test_ipmi_info()
+ node = obj_utils.create_test_node(self.context,
+ uuid=uuidutils.generate_uuid(),
+ driver='fake_ipmitool',
+ driver_info=ipmi_info)
+ task = task_manager.TaskManager(self.context, node.uuid)
+ self.assertRaises(exception.InvalidParameterValue,
+ conductor_utils.node_set_boot_device,
+ task,
+ device='fake')
+
+ def test_node_set_boot_device_valid(self):
+ mgr_utils.mock_the_extension_manager(driver="fake_ipmitool")
+ self.driver = driver_factory.get_driver("fake_ipmitool")
+ ipmi_info = utils.get_test_ipmi_info()
+ node = obj_utils.create_test_node(self.context,
+ uuid=uuidutils.generate_uuid(),
+ driver='fake_ipmitool',
+ driver_info=ipmi_info)
+ task = task_manager.TaskManager(self.context, node.uuid)
+
+ with mock.patch.object(self.driver.management,
+ 'set_boot_device') as mock_sbd:
+ conductor_utils.node_set_boot_device(task,
+ device='pxe')
+ mock_sbd.assert_called_once_with(task,
+ device='pxe',
+ persistent=False)
+
+
+class NodePowerActionTestCase(base.DbTestCase):
+
+ def setUp(self):
+ super(NodePowerActionTestCase, self).setUp()
+ mgr_utils.mock_the_extension_manager()
+ self.driver = driver_factory.get_driver("fake")
+
+ def test_node_power_action_power_on(self):
+ """Test node_power_action to turn node power on."""
+ node = obj_utils.create_test_node(self.context,
+ uuid=uuidutils.generate_uuid(),
+ driver='fake',
+ power_state=states.POWER_OFF)
+ task = task_manager.TaskManager(self.context, node.uuid)
+
+ with mock.patch.object(self.driver.power,
+ 'get_power_state') as get_power_mock:
+ get_power_mock.return_value = states.POWER_OFF
+
+ conductor_utils.node_power_action(task, states.POWER_ON)
+
+ node.refresh()
+ get_power_mock.assert_called_once_with(mock.ANY)
+ self.assertEqual(states.POWER_ON, node['power_state'])
+ self.assertIsNone(node['target_power_state'])
+ self.assertIsNone(node['last_error'])
+
+ def test_node_power_action_power_off(self):
+ """Test node_power_action to turn node power off."""
+ node = obj_utils.create_test_node(self.context,
+ uuid=uuidutils.generate_uuid(),
+ driver='fake',
+ power_state=states.POWER_ON)
+ task = task_manager.TaskManager(self.context, node.uuid)
+
+ with mock.patch.object(self.driver.power,
+ 'get_power_state') as get_power_mock:
+ get_power_mock.return_value = states.POWER_ON
+
+ conductor_utils.node_power_action(task, states.POWER_OFF)
+
+ node.refresh()
+ get_power_mock.assert_called_once_with(mock.ANY)
+ self.assertEqual(states.POWER_OFF, node['power_state'])
+ self.assertIsNone(node['target_power_state'])
+ self.assertIsNone(node['last_error'])
+
+ def test_node_power_action_power_reboot(self):
+ """Test for reboot a node."""
+ node = obj_utils.create_test_node(self.context,
+ uuid=uuidutils.generate_uuid(),
+ driver='fake',
+ power_state=states.POWER_ON)
+ task = task_manager.TaskManager(self.context, node.uuid)
+
+ with mock.patch.object(self.driver.power, 'reboot') as reboot_mock:
+ conductor_utils.node_power_action(task, states.REBOOT)
+
+ node.refresh()
+ reboot_mock.assert_called_once_with(mock.ANY)
+ self.assertEqual(states.POWER_ON, node['power_state'])
+ self.assertIsNone(node['target_power_state'])
+ self.assertIsNone(node['last_error'])
+
+ def test_node_power_action_invalid_state(self):
+ """Test for exception when changing to an invalid power state."""
+ node = obj_utils.create_test_node(self.context,
+ uuid=uuidutils.generate_uuid(),
+ driver='fake',
+ power_state=states.POWER_ON)
+ task = task_manager.TaskManager(self.context, node.uuid)
+
+ with mock.patch.object(self.driver.power,
+ 'get_power_state') as get_power_mock:
+ get_power_mock.return_value = states.POWER_ON
+
+ self.assertRaises(exception.InvalidParameterValue,
+ conductor_utils.node_power_action,
+ task,
+ "INVALID_POWER_STATE")
+
+ node.refresh()
+ get_power_mock.assert_called_once_with(mock.ANY)
+ self.assertEqual(states.POWER_ON, node['power_state'])
+ self.assertIsNone(node['target_power_state'])
+ self.assertIsNotNone(node['last_error'])
+
+ # last_error is cleared when a new transaction happens
+ conductor_utils.node_power_action(task, states.POWER_OFF)
+ node.refresh()
+ self.assertEqual(states.POWER_OFF, node['power_state'])
+ self.assertIsNone(node['target_power_state'])
+ self.assertIsNone(node['last_error'])
+
+ def test_node_power_action_already_being_processed(self):
+ """Test node power action after aborted power action.
+
+ The target_power_state is expected to be None so it isn't
+ checked in the code. This is what happens if it is not None.
+ (Eg, if a conductor had died during a previous power-off
+ attempt and left the target_power_state set to states.POWER_OFF,
+ and the user is attempting to power-off again.)
+ """
+ node = obj_utils.create_test_node(self.context,
+ uuid=uuidutils.generate_uuid(),
+ driver='fake',
+ power_state=states.POWER_ON,
+ target_power_state=states.POWER_OFF)
+ task = task_manager.TaskManager(self.context, node.uuid)
+
+ conductor_utils.node_power_action(task, states.POWER_OFF)
+
+ node.refresh()
+ self.assertEqual(states.POWER_OFF, node['power_state'])
+ self.assertEqual(states.NOSTATE, node['target_power_state'])
+ self.assertIsNone(node['last_error'])
+
+ def test_node_power_action_in_same_state(self):
+ """Test setting node state to its present state.
+
+ Test that we don't try to set the power state if the requested
+ state is the same as the current state.
+ """
+ node = obj_utils.create_test_node(self.context,
+ uuid=uuidutils.generate_uuid(),
+ driver='fake',
+ last_error='anything but None',
+ power_state=states.POWER_ON)
+ task = task_manager.TaskManager(self.context, node.uuid)
+
+ with mock.patch.object(self.driver.power,
+ 'get_power_state') as get_power_mock:
+ get_power_mock.return_value = states.POWER_ON
+
+ with mock.patch.object(self.driver.power,
+ 'set_power_state') as set_power_mock:
+ conductor_utils.node_power_action(task, states.POWER_ON)
+
+ node.refresh()
+ get_power_mock.assert_called_once_with(mock.ANY)
+ self.assertFalse(set_power_mock.called,
+ "set_power_state unexpectedly called")
+ self.assertEqual(states.POWER_ON, node['power_state'])
+ self.assertIsNone(node['target_power_state'])
+ self.assertIsNone(node['last_error'])
+
+ def test_node_power_action_in_same_state_db_not_in_sync(self):
+ """Test setting node state to its present state if DB is out of sync.
+
+ Under rare conditions (see bug #1403106) database might contain stale
+ information, make sure we fix it.
+ """
+ node = obj_utils.create_test_node(self.context,
+ uuid=uuidutils.generate_uuid(),
+ driver='fake',
+ last_error='anything but None',
+ power_state=states.POWER_ON)
+ task = task_manager.TaskManager(self.context, node.uuid)
+
+ with mock.patch.object(self.driver.power,
+ 'get_power_state') as get_power_mock:
+ get_power_mock.return_value = states.POWER_OFF
+
+ with mock.patch.object(self.driver.power,
+ 'set_power_state') as set_power_mock:
+ conductor_utils.node_power_action(task, states.POWER_OFF)
+
+ node.refresh()
+ get_power_mock.assert_called_once_with(mock.ANY)
+ self.assertFalse(set_power_mock.called,
+ "set_power_state unexpectedly called")
+ self.assertEqual(states.POWER_OFF, node['power_state'])
+ self.assertIsNone(node['target_power_state'])
+ self.assertIsNone(node['last_error'])
+
+ def test_node_power_action_failed_getting_state(self):
+ """Test for exception when we can't get the current power state."""
+ node = obj_utils.create_test_node(self.context,
+ uuid=uuidutils.generate_uuid(),
+ driver='fake',
+ power_state=states.POWER_ON)
+ task = task_manager.TaskManager(self.context, node.uuid)
+
+ with mock.patch.object(self.driver.power,
+ 'get_power_state') as get_power_state_mock:
+ get_power_state_mock.side_effect = (
+ exception.InvalidParameterValue('failed getting power state'))
+
+ self.assertRaises(exception.InvalidParameterValue,
+ conductor_utils.node_power_action,
+ task,
+ states.POWER_ON)
+
+ node.refresh()
+ get_power_state_mock.assert_called_once_with(mock.ANY)
+ self.assertEqual(states.POWER_ON, node['power_state'])
+ self.assertIsNone(node['target_power_state'])
+ self.assertIsNotNone(node['last_error'])
+
+ def test_node_power_action_set_power_failure(self):
+ """Test if an exception is thrown when the set_power call fails."""
+ node = obj_utils.create_test_node(self.context,
+ uuid=uuidutils.generate_uuid(),
+ driver='fake',
+ power_state=states.POWER_OFF)
+ task = task_manager.TaskManager(self.context, node.uuid)
+
+ with mock.patch.object(self.driver.power,
+ 'get_power_state') as get_power_mock:
+ with mock.patch.object(self.driver.power,
+ 'set_power_state') as set_power_mock:
+ get_power_mock.return_value = states.POWER_OFF
+ set_power_mock.side_effect = exception.IronicException()
+
+ self.assertRaises(
+ exception.IronicException,
+ conductor_utils.node_power_action,
+ task,
+ states.POWER_ON)
+
+ node.refresh()
+ get_power_mock.assert_called_once_with(mock.ANY)
+ set_power_mock.assert_called_once_with(mock.ANY,
+ states.POWER_ON)
+ self.assertEqual(states.POWER_OFF, node['power_state'])
+ self.assertIsNone(node['target_power_state'])
+ self.assertIsNotNone(node['last_error'])
+
+
+class CleanupAfterTimeoutTestCase(tests_base.TestCase):
+ def setUp(self):
+ super(CleanupAfterTimeoutTestCase, self).setUp()
+ self.task = mock.Mock(spec=task_manager.TaskManager)
+ self.task.context = mock.sentinel.context
+ self.task.driver = mock.Mock(spec_set=['deploy'])
+ self.task.shared = False
+ self.task.node = mock.Mock(spec_set=objects.Node)
+ self.node = self.task.node
+
+ def test_cleanup_after_timeout(self):
+ conductor_utils.cleanup_after_timeout(self.task)
+
+ self.node.save.assert_called_once_with()
+ self.task.driver.deploy.clean_up.assert_called_once_with(self.task)
+ self.assertIn('Timeout reached', self.node.last_error)
+
+ def test_cleanup_after_timeout_shared_lock(self):
+ self.task.shared = True
+
+ self.assertRaises(exception.ExclusiveLockRequired,
+ conductor_utils.cleanup_after_timeout,
+ self.task)
+
+ def test_cleanup_after_timeout_cleanup_ironic_exception(self):
+ clean_up_mock = self.task.driver.deploy.clean_up
+ clean_up_mock.side_effect = exception.IronicException('moocow')
+
+ conductor_utils.cleanup_after_timeout(self.task)
+
+ self.task.driver.deploy.clean_up.assert_called_once_with(self.task)
+ self.assertEqual([mock.call()] * 2, self.node.save.call_args_list)
+ self.assertIn('moocow', self.node.last_error)
+
+ def test_cleanup_after_timeout_cleanup_random_exception(self):
+ clean_up_mock = self.task.driver.deploy.clean_up
+ clean_up_mock.side_effect = Exception('moocow')
+
+ conductor_utils.cleanup_after_timeout(self.task)
+
+ self.task.driver.deploy.clean_up.assert_called_once_with(self.task)
+ self.assertEqual([mock.call()] * 2, self.node.save.call_args_list)
+ self.assertIn('Deploy timed out', self.node.last_error)
diff --git a/ironic/tests/unit/conductor/test_manager.py b/ironic/tests/unit/conductor/test_manager.py
new file mode 100644
index 000000000..5ad9dbbe3
--- /dev/null
+++ b/ironic/tests/unit/conductor/test_manager.py
@@ -0,0 +1,4573 @@
+# coding=utf-8
+
+# Copyright 2013 Hewlett-Packard Development Company, L.P.
+# Copyright 2013 International Business Machines Corporation
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Test class for Ironic ManagerService."""
+
+import datetime
+
+import eventlet
+import mock
+from oslo_config import cfg
+from oslo_db import exception as db_exception
+import oslo_messaging as messaging
+from oslo_utils import strutils
+from oslo_utils import uuidutils
+from oslo_versionedobjects import base as ovo_base
+from oslo_versionedobjects import fields
+
+from ironic.common import boot_devices
+from ironic.common import driver_factory
+from ironic.common import exception
+from ironic.common import images
+from ironic.common import states
+from ironic.common import swift
+from ironic.conductor import manager
+from ironic.conductor import task_manager
+from ironic.conductor import utils as conductor_utils
+from ironic.db import api as dbapi
+from ironic.drivers import base as drivers_base
+from ironic.drivers.modules import fake
+from ironic import objects
+from ironic.objects import base as obj_base
+from ironic.tests.unit import base as tests_base
+from ironic.tests.unit.conductor import utils as mgr_utils
+from ironic.tests.unit.db import base as tests_db_base
+from ironic.tests.unit.db import utils
+from ironic.tests.unit.objects import utils as obj_utils
+
+CONF = cfg.CONF
+
+
+class _CommonMixIn(object):
+ @staticmethod
+ def _create_node(**kwargs):
+ attrs = {'id': 1,
+ 'uuid': uuidutils.generate_uuid(),
+ 'power_state': states.POWER_OFF,
+ 'target_power_state': None,
+ 'maintenance': False,
+ 'reservation': None}
+ attrs.update(kwargs)
+ node = mock.Mock(spec_set=objects.Node)
+ for attr in attrs:
+ setattr(node, attr, attrs[attr])
+ return node
+
+ def _create_task(self, node=None, node_attrs=None):
+ if node_attrs is None:
+ node_attrs = {}
+ if node is None:
+ node = self._create_node(**node_attrs)
+ task = mock.Mock(spec_set=['node', 'release_resources',
+ 'spawn_after', 'process_event'])
+ task.node = node
+ return task
+
+ def _get_nodeinfo_list_response(self, nodes=None):
+ if nodes is None:
+ nodes = [self.node]
+ elif not isinstance(nodes, (list, tuple)):
+ nodes = [nodes]
+ return [tuple(getattr(n, c) for c in self.columns) for n in nodes]
+
+ def _get_acquire_side_effect(self, task_infos):
+ """Helper method to generate a task_manager.acquire() side effect.
+
+ This accepts a list of information about task mocks to return.
+ task_infos can be a single entity or a list.
+
+ Each task_info can be a single entity, the task to return, or it
+ can be a tuple of (task, exception_to_raise_on_exit). 'task' can
+ be an exception to raise on __enter__.
+
+ Examples: _get_acquire_side_effect(self, task): Yield task
+ _get_acquire_side_effect(self, [task, enter_exception(),
+ (task2, exit_exception())])
+ Yield task on first call to acquire()
+ raise enter_exception() in __enter__ on 2nd call to
+ acquire()
+ Yield task2 on 3rd call to acquire(), but raise
+ exit_exception() on __exit__()
+ """
+ tasks = []
+ exit_exceptions = []
+ if not isinstance(task_infos, list):
+ task_infos = [task_infos]
+ for task_info in task_infos:
+ if isinstance(task_info, tuple):
+ task, exc = task_info
+ else:
+ task = task_info
+ exc = None
+ tasks.append(task)
+ exit_exceptions.append(exc)
+
+ class FakeAcquire(object):
+ def __init__(fa_self, context, node_id, *args, **kwargs):
+ # We actually verify these arguments via
+ # acquire_mock.call_args_list(). However, this stores the
+ # node_id so we can assert we're returning the correct node
+ # in __enter__().
+ fa_self.node_id = node_id
+
+ def __enter__(fa_self):
+ task = tasks.pop(0)
+ if isinstance(task, Exception):
+ raise task
+ # NOTE(comstud): Not ideal to throw this into
+ # a helper, however it's the cleanest way
+ # to verify we're dealing with the correct task/node.
+ if strutils.is_int_like(fa_self.node_id):
+ self.assertEqual(fa_self.node_id, task.node.id)
+ else:
+ self.assertEqual(fa_self.node_id, task.node.uuid)
+ return task
+
+ def __exit__(fa_self, exc_typ, exc_val, exc_tb):
+ exc = exit_exceptions.pop(0)
+ if exc_typ is None and exc is not None:
+ raise exc
+
+ return FakeAcquire
+
+
+class _ServiceSetUpMixin(object):
+ def setUp(self):
+ super(_ServiceSetUpMixin, self).setUp()
+ self.hostname = 'test-host'
+ self.config(enabled_drivers=['fake'])
+ self.config(node_locked_retry_attempts=1, group='conductor')
+ self.config(node_locked_retry_interval=0, group='conductor')
+ self.service = manager.ConductorManager(self.hostname, 'test-topic')
+ mgr_utils.mock_the_extension_manager()
+ self.driver = driver_factory.get_driver("fake")
+
+ def _stop_service(self):
+ try:
+ objects.Conductor.get_by_hostname(self.context, self.hostname)
+ except exception.ConductorNotFound:
+ return
+ self.service.del_host()
+
+ def _start_service(self):
+ self.service.init_host()
+ self.addCleanup(self._stop_service)
+
+
+def _mock_record_keepalive(func_or_class):
+ return mock.patch.object(
+ manager.ConductorManager,
+ '_conductor_service_record_keepalive',
+ lambda: None)(func_or_class)
+
+
+@_mock_record_keepalive
+class StartStopTestCase(_ServiceSetUpMixin, tests_db_base.DbTestCase):
+ def test_start_registers_conductor(self):
+ self.assertRaises(exception.ConductorNotFound,
+ objects.Conductor.get_by_hostname,
+ self.context, self.hostname)
+ self._start_service()
+ res = objects.Conductor.get_by_hostname(self.context, self.hostname)
+ self.assertEqual(self.hostname, res['hostname'])
+
+ def test_start_clears_conductor_locks(self):
+ node = obj_utils.create_test_node(self.context,
+ reservation=self.hostname)
+ node.save()
+ self._start_service()
+ node.refresh()
+ self.assertIsNone(node.reservation)
+
+ def test_stop_unregisters_conductor(self):
+ self._start_service()
+ res = objects.Conductor.get_by_hostname(self.context, self.hostname)
+ self.assertEqual(self.hostname, res['hostname'])
+ self.service.del_host()
+ self.assertRaises(exception.ConductorNotFound,
+ objects.Conductor.get_by_hostname,
+ self.context, self.hostname)
+
+ def test_stop_doesnt_unregister_conductor(self):
+ self._start_service()
+ res = objects.Conductor.get_by_hostname(self.context, self.hostname)
+ self.assertEqual(self.hostname, res['hostname'])
+ self.service.del_host(deregister=False)
+ res = objects.Conductor.get_by_hostname(self.context, self.hostname)
+ self.assertEqual(self.hostname, res['hostname'])
+
+ @mock.patch.object(driver_factory.DriverFactory, '__getitem__',
+ lambda *args: mock.MagicMock())
+ def test_start_registers_driver_names(self):
+ init_names = ['fake1', 'fake2']
+ restart_names = ['fake3', 'fake4']
+
+ df = driver_factory.DriverFactory()
+ with mock.patch.object(df._extension_manager, 'names') as mock_names:
+ # verify driver names are registered
+ self.config(enabled_drivers=init_names)
+ mock_names.return_value = init_names
+ self._start_service()
+ res = objects.Conductor.get_by_hostname(self.context,
+ self.hostname)
+ self.assertEqual(init_names, res['drivers'])
+
+ # verify that restart registers new driver names
+ self.config(enabled_drivers=restart_names)
+ mock_names.return_value = restart_names
+ self._start_service()
+ res = objects.Conductor.get_by_hostname(self.context,
+ self.hostname)
+ self.assertEqual(restart_names, res['drivers'])
+
+ @mock.patch.object(driver_factory.DriverFactory, '__getitem__')
+ def test_start_registers_driver_specific_tasks(self, get_mock):
+ init_names = ['fake1']
+ expected_task_name = 'ironic.tests.unit.conductor.test_manager.task'
+ expected_task_name2 = 'ironic.tests.unit.conductor.test_manager.iface'
+ self.config(enabled_drivers=init_names)
+
+ class TestInterface(object):
+ @drivers_base.driver_periodic_task(spacing=100500)
+ def iface(self):
+ pass
+
+ class Driver(object):
+ core_interfaces = []
+ standard_interfaces = ['iface']
+
+ iface = TestInterface()
+
+ @drivers_base.driver_periodic_task(spacing=42)
+ def task(self, context):
+ pass
+
+ obj = Driver()
+ self.assertTrue(obj.task._periodic_enabled)
+ get_mock.return_value = mock.Mock(obj=obj)
+
+ with mock.patch.object(
+ driver_factory.DriverFactory()._extension_manager,
+ 'names') as mock_names:
+ mock_names.return_value = init_names
+ self._start_service()
+ tasks = dict(self.service._periodic_tasks)
+ self.assertEqual(obj.task, tasks[expected_task_name])
+ self.assertEqual(obj.iface.iface, tasks[expected_task_name2])
+ self.assertEqual(42,
+ self.service._periodic_spacing[expected_task_name])
+ self.assertEqual(100500,
+ self.service._periodic_spacing[expected_task_name2])
+ self.assertIn(expected_task_name, self.service._periodic_last_run)
+ self.assertIn(expected_task_name2, self.service._periodic_last_run)
+
+ @mock.patch.object(driver_factory.DriverFactory, '__init__')
+ def test_start_fails_on_missing_driver(self, mock_df):
+ mock_df.side_effect = exception.DriverNotFound('test')
+ with mock.patch.object(self.dbapi, 'register_conductor') as mock_reg:
+ self.assertRaises(exception.DriverNotFound,
+ self.service.init_host)
+ self.assertTrue(mock_df.called)
+ self.assertFalse(mock_reg.called)
+
+ @mock.patch.object(manager, 'LOG')
+ @mock.patch.object(driver_factory, 'DriverFactory')
+ def test_start_fails_on_no_driver(self, df_mock, log_mock):
+ driver_factory_mock = mock.MagicMock(names=[])
+ df_mock.return_value = driver_factory_mock
+ self.assertRaises(exception.NoDriversLoaded,
+ self.service.init_host)
+ self.assertTrue(log_mock.error.called)
+
+ @mock.patch.object(eventlet.greenpool.GreenPool, 'waitall')
+ def test_del_host_waits_on_workerpool(self, wait_mock):
+ self._start_service()
+ self.service.del_host()
+ self.assertTrue(wait_mock.called)
+
+
+class KeepAliveTestCase(_ServiceSetUpMixin, tests_db_base.DbTestCase):
+ def test__conductor_service_record_keepalive(self):
+ self._start_service()
+ # avoid wasting time at the event.wait()
+ CONF.set_override('heartbeat_interval', 0, 'conductor')
+ with mock.patch.object(self.dbapi, 'touch_conductor') as mock_touch:
+ with mock.patch.object(self.service._keepalive_evt,
+ 'is_set') as mock_is_set:
+ mock_is_set.side_effect = [False, True]
+ self.service._conductor_service_record_keepalive()
+ mock_touch.assert_called_once_with(self.hostname)
+
+ def test__conductor_service_record_keepalive_failed_db_conn(self):
+ self._start_service()
+ # avoid wasting time at the event.wait()
+ CONF.set_override('heartbeat_interval', 0, 'conductor')
+ with mock.patch.object(self.dbapi, 'touch_conductor') as mock_touch:
+ mock_touch.side_effect = [None, db_exception.DBConnectionError(),
+ None]
+ with mock.patch.object(self.service._keepalive_evt,
+ 'is_set') as mock_is_set:
+ mock_is_set.side_effect = [False, False, False, True]
+ self.service._conductor_service_record_keepalive()
+ self.assertEqual(3, mock_touch.call_count)
+
+
+@_mock_record_keepalive
+class ChangeNodePowerStateTestCase(_ServiceSetUpMixin,
+ tests_db_base.DbTestCase):
+
+ def test_change_node_power_state_power_on(self):
+ # Test change_node_power_state including integration with
+ # conductor.utils.node_power_action and lower.
+ node = obj_utils.create_test_node(self.context,
+ driver='fake',
+ power_state=states.POWER_OFF)
+ self._start_service()
+
+ with mock.patch.object(self.driver.power,
+ 'get_power_state') as get_power_mock:
+ get_power_mock.return_value = states.POWER_OFF
+
+ self.service.change_node_power_state(self.context,
+ node.uuid,
+ states.POWER_ON)
+ self.service._worker_pool.waitall()
+
+ get_power_mock.assert_called_once_with(mock.ANY)
+ node.refresh()
+ self.assertEqual(states.POWER_ON, node.power_state)
+ self.assertIsNone(node.target_power_state)
+ self.assertIsNone(node.last_error)
+ # Verify the reservation has been cleared by
+ # background task's link callback.
+ self.assertIsNone(node.reservation)
+
+ @mock.patch.object(conductor_utils, 'node_power_action')
+ def test_change_node_power_state_node_already_locked(self,
+ pwr_act_mock):
+ # Test change_node_power_state with mocked
+ # conductor.utils.node_power_action.
+ fake_reservation = 'fake-reserv'
+ pwr_state = states.POWER_ON
+ node = obj_utils.create_test_node(self.context, driver='fake',
+ power_state=pwr_state,
+ reservation=fake_reservation)
+ self._start_service()
+
+ exc = self.assertRaises(messaging.rpc.ExpectedException,
+ self.service.change_node_power_state,
+ self.context,
+ node.uuid,
+ states.POWER_ON)
+ # Compare true exception hidden by @messaging.expected_exceptions
+ self.assertEqual(exception.NodeLocked, exc.exc_info[0])
+
+ # In this test worker should not be spawned, but waiting to make sure
+ # the below perform_mock assertion is valid.
+ self.service._worker_pool.waitall()
+ self.assertFalse(pwr_act_mock.called, 'node_power_action has been '
+ 'unexpectedly called.')
+ # Verify existing reservation wasn't broken.
+ node.refresh()
+ self.assertEqual(fake_reservation, node.reservation)
+
+ def test_change_node_power_state_worker_pool_full(self):
+ # Test change_node_power_state including integration with
+ # conductor.utils.node_power_action and lower.
+ initial_state = states.POWER_OFF
+ node = obj_utils.create_test_node(self.context, driver='fake',
+ power_state=initial_state)
+ self._start_service()
+
+ with mock.patch.object(self.service,
+ '_spawn_worker') as spawn_mock:
+ spawn_mock.side_effect = exception.NoFreeConductorWorker()
+
+ exc = self.assertRaises(messaging.rpc.ExpectedException,
+ self.service.change_node_power_state,
+ self.context,
+ node.uuid,
+ states.POWER_ON)
+ # Compare true exception hidden by @messaging.expected_exceptions
+ self.assertEqual(exception.NoFreeConductorWorker, exc.exc_info[0])
+
+ spawn_mock.assert_called_once_with(mock.ANY, mock.ANY,
+ mock.ANY)
+ node.refresh()
+ self.assertEqual(initial_state, node.power_state)
+ self.assertIsNone(node.target_power_state)
+ self.assertIsNotNone(node.last_error)
+ # Verify the picked reservation has been cleared due to full pool.
+ self.assertIsNone(node.reservation)
+
+ def test_change_node_power_state_exception_in_background_task(
+ self):
+ # Test change_node_power_state including integration with
+ # conductor.utils.node_power_action and lower.
+ initial_state = states.POWER_OFF
+ node = obj_utils.create_test_node(self.context, driver='fake',
+ power_state=initial_state)
+ self._start_service()
+
+ with mock.patch.object(self.driver.power,
+ 'get_power_state') as get_power_mock:
+ get_power_mock.return_value = states.POWER_OFF
+
+ with mock.patch.object(self.driver.power,
+ 'set_power_state') as set_power_mock:
+ new_state = states.POWER_ON
+ set_power_mock.side_effect = exception.PowerStateFailure(
+ pstate=new_state
+ )
+
+ self.service.change_node_power_state(self.context,
+ node.uuid,
+ new_state)
+ self.service._worker_pool.waitall()
+
+ get_power_mock.assert_called_once_with(mock.ANY)
+ set_power_mock.assert_called_once_with(mock.ANY, new_state)
+ node.refresh()
+ self.assertEqual(initial_state, node.power_state)
+ self.assertIsNone(node.target_power_state)
+ self.assertIsNotNone(node.last_error)
+ # Verify the reservation has been cleared by background task's
+ # link callback despite exception in background task.
+ self.assertIsNone(node.reservation)
+
+ def test_change_node_power_state_validate_fail(self):
+ # Test change_node_power_state where task.driver.power.validate
+ # fails and raises an exception
+ initial_state = states.POWER_ON
+ node = obj_utils.create_test_node(self.context, driver='fake',
+ power_state=initial_state)
+ self._start_service()
+
+ with mock.patch.object(self.driver.power,
+ 'validate') as validate_mock:
+ validate_mock.side_effect = exception.InvalidParameterValue(
+ 'wrong power driver info')
+
+ exc = self.assertRaises(messaging.rpc.ExpectedException,
+ self.service.change_node_power_state,
+ self.context,
+ node.uuid,
+ states.POWER_ON)
+
+ self.assertEqual(exception.InvalidParameterValue, exc.exc_info[0])
+
+ node.refresh()
+ validate_mock.assert_called_once_with(mock.ANY)
+ self.assertEqual(states.POWER_ON, node.power_state)
+ self.assertIsNone(node.target_power_state)
+ self.assertIsNone(node.last_error)
+
+
+@_mock_record_keepalive
+class UpdateNodeTestCase(_ServiceSetUpMixin, tests_db_base.DbTestCase):
+ def test_update_node(self):
+ node = obj_utils.create_test_node(self.context, driver='fake',
+ extra={'test': 'one'})
+
+ # check that ManagerService.update_node actually updates the node
+ node.extra = {'test': 'two'}
+ res = self.service.update_node(self.context, node)
+ self.assertEqual({'test': 'two'}, res['extra'])
+
+ def test_update_node_clears_maintenance_reason(self):
+ node = obj_utils.create_test_node(self.context, driver='fake',
+ maintenance=True,
+ maintenance_reason='reason')
+
+ # check that ManagerService.update_node actually updates the node
+ node.maintenance = False
+ res = self.service.update_node(self.context, node)
+ self.assertFalse(res['maintenance'])
+ self.assertIsNone(res['maintenance_reason'])
+
+ def test_update_node_already_locked(self):
+ node = obj_utils.create_test_node(self.context, driver='fake',
+ extra={'test': 'one'})
+
+ # check that it fails if something else has locked it already
+ with task_manager.acquire(self.context, node['id'], shared=False):
+ node.extra = {'test': 'two'}
+ exc = self.assertRaises(messaging.rpc.ExpectedException,
+ self.service.update_node,
+ self.context,
+ node)
+ # Compare true exception hidden by @messaging.expected_exceptions
+ self.assertEqual(exception.NodeLocked, exc.exc_info[0])
+
+ # verify change did not happen
+ res = objects.Node.get_by_uuid(self.context, node['uuid'])
+ self.assertEqual({'test': 'one'}, res['extra'])
+
+ @mock.patch('ironic.drivers.modules.fake.FakePower.get_power_state')
+ def _test_associate_node(self, power_state, mock_get_power_state):
+ mock_get_power_state.return_value = power_state
+ node = obj_utils.create_test_node(self.context, driver='fake',
+ instance_uuid=None,
+ power_state=states.NOSTATE)
+ node.instance_uuid = 'fake-uuid'
+ self.service.update_node(self.context, node)
+
+ # Check if the change was applied
+ node.instance_uuid = 'meow'
+ node.refresh()
+ self.assertEqual('fake-uuid', node.instance_uuid)
+
+ def test_associate_node_powered_off(self):
+ self._test_associate_node(states.POWER_OFF)
+
+ def test_associate_node_powered_on(self):
+ self._test_associate_node(states.POWER_ON)
+
+ def test_update_node_invalid_driver(self):
+ existing_driver = 'fake'
+ wrong_driver = 'wrong-driver'
+ node = obj_utils.create_test_node(self.context,
+ driver=existing_driver,
+ extra={'test': 'one'},
+ instance_uuid=None,
+ task_state=states.POWER_ON)
+ # check that it fails because driver not found
+ node.driver = wrong_driver
+ node.driver_info = {}
+ self.assertRaises(exception.DriverNotFound,
+ self.service.update_node,
+ self.context,
+ node)
+
+ # verify change did not happen
+ node.refresh()
+ self.assertEqual(existing_driver, node.driver)
+
+
+@_mock_record_keepalive
+class VendorPassthruTestCase(_ServiceSetUpMixin, tests_db_base.DbTestCase):
+
+ @mock.patch.object(task_manager.TaskManager, 'spawn_after')
+ def test_vendor_passthru_async(self, mock_spawn):
+ node = obj_utils.create_test_node(self.context, driver='fake')
+ info = {'bar': 'baz'}
+ self._start_service()
+
+ response = self.service.vendor_passthru(self.context, node.uuid,
+ 'first_method', 'POST',
+ info)
+ # Waiting to make sure the below assertions are valid.
+ self.service._worker_pool.waitall()
+
+ # Assert spawn_after was called
+ self.assertTrue(mock_spawn.called)
+ self.assertIsNone(response['return'])
+ self.assertTrue(response['async'])
+
+ node.refresh()
+ self.assertIsNone(node.last_error)
+ # Verify reservation has been cleared.
+ self.assertIsNone(node.reservation)
+
+ @mock.patch.object(task_manager.TaskManager, 'spawn_after')
+ def test_vendor_passthru_sync(self, mock_spawn):
+ node = obj_utils.create_test_node(self.context, driver='fake')
+ info = {'bar': 'meow'}
+ self._start_service()
+
+ response = self.service.vendor_passthru(self.context, node.uuid,
+ 'third_method_sync',
+ 'POST', info)
+ # Waiting to make sure the below assertions are valid.
+ self.service._worker_pool.waitall()
+
+ # Assert no workers were used
+ self.assertFalse(mock_spawn.called)
+ self.assertTrue(response['return'])
+ self.assertFalse(response['async'])
+
+ node.refresh()
+ self.assertIsNone(node.last_error)
+ # Verify reservation has been cleared.
+ self.assertIsNone(node.reservation)
+
+ def test_vendor_passthru_http_method_not_supported(self):
+ node = obj_utils.create_test_node(self.context, driver='fake')
+ self._start_service()
+
+ # GET not supported by first_method
+ exc = self.assertRaises(messaging.rpc.ExpectedException,
+ self.service.vendor_passthru,
+ self.context, node.uuid,
+ 'first_method', 'GET', {})
+ # Compare true exception hidden by @messaging.expected_exceptions
+ self.assertEqual(exception.InvalidParameterValue, exc.exc_info[0])
+
+ node.refresh()
+ self.assertIsNone(node.last_error)
+ # Verify reservation has been cleared.
+ self.assertIsNone(node.reservation)
+
+ def test_vendor_passthru_node_already_locked(self):
+ fake_reservation = 'test_reserv'
+ node = obj_utils.create_test_node(self.context, driver='fake',
+ reservation=fake_reservation)
+ info = {'bar': 'baz'}
+ self._start_service()
+
+ exc = self.assertRaises(messaging.rpc.ExpectedException,
+ self.service.vendor_passthru,
+ self.context, node.uuid, 'first_method',
+ 'POST', info)
+ # Compare true exception hidden by @messaging.expected_exceptions
+ self.assertEqual(exception.NodeLocked, exc.exc_info[0])
+
+ node.refresh()
+ self.assertIsNone(node.last_error)
+ # Verify the existing reservation is not broken.
+ self.assertEqual(fake_reservation, node.reservation)
+
+ def test_vendor_passthru_unsupported_method(self):
+ node = obj_utils.create_test_node(self.context, driver='fake')
+ info = {'bar': 'baz'}
+ self._start_service()
+
+ exc = self.assertRaises(messaging.rpc.ExpectedException,
+ self.service.vendor_passthru,
+ self.context, node.uuid,
+ 'unsupported_method', 'POST', info)
+ # Compare true exception hidden by @messaging.expected_exceptions
+ self.assertEqual(exception.InvalidParameterValue,
+ exc.exc_info[0])
+
+ node.refresh()
+ self.assertIsNone(node.last_error)
+ # Verify reservation has been cleared.
+ self.assertIsNone(node.reservation)
+
+ def test_vendor_passthru_missing_method_parameters(self):
+ node = obj_utils.create_test_node(self.context, driver='fake')
+ info = {'invalid_param': 'whatever'}
+ self._start_service()
+
+ exc = self.assertRaises(messaging.rpc.ExpectedException,
+ self.service.vendor_passthru,
+ self.context, node.uuid,
+ 'first_method', 'POST', info)
+ # Compare true exception hidden by @messaging.expected_exceptions
+ self.assertEqual(exception.MissingParameterValue, exc.exc_info[0])
+
+ node.refresh()
+ self.assertIsNone(node.last_error)
+ # Verify reservation has been cleared.
+ self.assertIsNone(node.reservation)
+
+ def test_vendor_passthru_vendor_interface_not_supported(self):
+ node = obj_utils.create_test_node(self.context, driver='fake')
+ info = {'bar': 'baz'}
+ self.driver.vendor = None
+ self._start_service()
+
+ exc = self.assertRaises(messaging.rpc.ExpectedException,
+ self.service.vendor_passthru,
+ self.context, node.uuid,
+ 'whatever_method', 'POST', info)
+ # Compare true exception hidden by @messaging.expected_exceptions
+ self.assertEqual(exception.UnsupportedDriverExtension,
+ exc.exc_info[0])
+
+ node.refresh()
+ # Verify reservation has been cleared.
+ self.assertIsNone(node.reservation)
+
+ def test_vendor_passthru_worker_pool_full(self):
+ node = obj_utils.create_test_node(self.context, driver='fake')
+ info = {'bar': 'baz'}
+ self._start_service()
+
+ with mock.patch.object(self.service,
+ '_spawn_worker') as spawn_mock:
+ spawn_mock.side_effect = exception.NoFreeConductorWorker()
+
+ exc = self.assertRaises(messaging.rpc.ExpectedException,
+ self.service.vendor_passthru,
+ self.context, node.uuid,
+ 'first_method', 'POST', info)
+ # Compare true exception hidden by @messaging.expected_exceptions
+ self.assertEqual(exception.NoFreeConductorWorker, exc.exc_info[0])
+
+ # Waiting to make sure the below assertions are valid.
+ self.service._worker_pool.waitall()
+
+ node.refresh()
+ self.assertIsNone(node.last_error)
+ # Verify reservation has been cleared.
+ self.assertIsNone(node.reservation)
+
+ def test_get_node_vendor_passthru_methods(self):
+ node = obj_utils.create_test_node(self.context, driver='fake')
+ fake_routes = {'test_method': {'async': True,
+ 'description': 'foo',
+ 'http_methods': ['POST'],
+ 'func': None}}
+ self.driver.vendor.vendor_routes = fake_routes
+ self._start_service()
+
+ data = self.service.get_node_vendor_passthru_methods(self.context,
+ node.uuid)
+ # The function reference should not be returned
+ del fake_routes['test_method']['func']
+ self.assertEqual(fake_routes, data)
+
+ def test_get_node_vendor_passthru_methods_not_supported(self):
+ node = obj_utils.create_test_node(self.context, driver='fake')
+ self.driver.vendor = None
+ exc = self.assertRaises(messaging.rpc.ExpectedException,
+ self.service.get_node_vendor_passthru_methods,
+ self.context, node.uuid)
+ # Compare true exception hidden by @messaging.expected_exceptions
+ self.assertEqual(exception.UnsupportedDriverExtension,
+ exc.exc_info[0])
+
+ @mock.patch.object(manager.ConductorManager, '_spawn_worker')
+ def test_driver_vendor_passthru_sync(self, mock_spawn):
+ expected = {'foo': 'bar'}
+ self.driver.vendor = mock.Mock(spec=drivers_base.VendorInterface)
+ test_method = mock.MagicMock(return_value=expected)
+ self.driver.vendor.driver_routes = {
+ 'test_method': {'func': test_method,
+ 'async': False,
+ 'attach': False,
+ 'http_methods': ['POST']}}
+ self.service.init_host()
+ # init_host() called _spawn_worker because of the heartbeat
+ mock_spawn.reset_mock()
+
+ vendor_args = {'test': 'arg'}
+ response = self.service.driver_vendor_passthru(
+ self.context, 'fake', 'test_method', 'POST', vendor_args)
+
+ # Assert that the vendor interface has no custom
+ # driver_vendor_passthru()
+ self.assertFalse(hasattr(self.driver.vendor, 'driver_vendor_passthru'))
+ self.assertEqual(expected, response['return'])
+ self.assertFalse(response['async'])
+ test_method.assert_called_once_with(self.context, **vendor_args)
+ # No worker was spawned
+ self.assertFalse(mock_spawn.called)
+
+ @mock.patch.object(manager.ConductorManager, '_spawn_worker')
+ def test_driver_vendor_passthru_async(self, mock_spawn):
+ self.driver.vendor = mock.Mock(spec=drivers_base.VendorInterface)
+ test_method = mock.MagicMock()
+ self.driver.vendor.driver_routes = {
+ 'test_sync_method': {'func': test_method,
+ 'async': True,
+ 'attach': False,
+ 'http_methods': ['POST']}}
+ self.service.init_host()
+ # init_host() called _spawn_worker because of the heartbeat
+ mock_spawn.reset_mock()
+
+ vendor_args = {'test': 'arg'}
+ response = self.service.driver_vendor_passthru(
+ self.context, 'fake', 'test_sync_method', 'POST', vendor_args)
+
+ # Assert that the vendor interface has no custom
+ # driver_vendor_passthru()
+ self.assertFalse(hasattr(self.driver.vendor, 'driver_vendor_passthru'))
+ self.assertIsNone(response['return'])
+ self.assertTrue(response['async'])
+ mock_spawn.assert_called_once_with(test_method, self.context,
+ **vendor_args)
+
+ def test_driver_vendor_passthru_http_method_not_supported(self):
+ self.driver.vendor = mock.Mock(spec=drivers_base.VendorInterface)
+ self.driver.vendor.driver_routes = {
+ 'test_method': {'func': mock.MagicMock(),
+ 'async': True,
+ 'http_methods': ['POST']}}
+ self.service.init_host()
+ # GET not supported by test_method
+ exc = self.assertRaises(messaging.ExpectedException,
+ self.service.driver_vendor_passthru,
+ self.context, 'fake', 'test_method',
+ 'GET', {})
+ # Compare true exception hidden by @messaging.expected_exceptions
+ self.assertEqual(exception.InvalidParameterValue,
+ exc.exc_info[0])
+
+ def test_driver_vendor_passthru_vendor_interface_not_supported(self):
+ # Test for when no vendor interface is set at all
+ self.driver.vendor = None
+ self.service.init_host()
+ exc = self.assertRaises(messaging.ExpectedException,
+ self.service.driver_vendor_passthru,
+ self.context, 'fake', 'test_method',
+ 'POST', {})
+ # Compare true exception hidden by @messaging.expected_exceptions
+ self.assertEqual(exception.UnsupportedDriverExtension,
+ exc.exc_info[0])
+
+ def test_driver_vendor_passthru_method_not_supported(self):
+ # Test for when the vendor interface is set, but hasn't passed a
+ # driver_passthru_mapping to MixinVendorInterface
+ self.service.init_host()
+ exc = self.assertRaises(messaging.ExpectedException,
+ self.service.driver_vendor_passthru,
+ self.context, 'fake', 'test_method',
+ 'POST', {})
+ # Compare true exception hidden by @messaging.expected_exceptions
+ self.assertEqual(exception.InvalidParameterValue,
+ exc.exc_info[0])
+
+ def test_driver_vendor_passthru_driver_not_found(self):
+ self.service.init_host()
+ self.assertRaises(messaging.ExpectedException,
+ self.service.driver_vendor_passthru,
+ self.context, 'does_not_exist', 'test_method',
+ 'POST', {})
+
+ def test_get_driver_vendor_passthru_methods(self):
+ self.driver.vendor = mock.Mock(spec=drivers_base.VendorInterface)
+ fake_routes = {'test_method': {'async': True,
+ 'description': 'foo',
+ 'http_methods': ['POST'],
+ 'func': None}}
+ self.driver.vendor.driver_routes = fake_routes
+ self.service.init_host()
+
+ data = self.service.get_driver_vendor_passthru_methods(self.context,
+ 'fake')
+ # The function reference should not be returned
+ del fake_routes['test_method']['func']
+ self.assertEqual(fake_routes, data)
+
+ def test_get_driver_vendor_passthru_methods_not_supported(self):
+ self.service.init_host()
+ self.driver.vendor = None
+ exc = self.assertRaises(
+ messaging.rpc.ExpectedException,
+ self.service.get_driver_vendor_passthru_methods,
+ self.context, 'fake')
+ # Compare true exception hidden by @messaging.expected_exceptions
+ self.assertEqual(exception.UnsupportedDriverExtension,
+ exc.exc_info[0])
+
+ @mock.patch.object(drivers_base.VendorInterface, 'driver_validate')
+ def test_driver_vendor_passthru_validation_failed(self, validate_mock):
+ validate_mock.side_effect = exception.MissingParameterValue('error')
+ test_method = mock.Mock()
+ self.driver.vendor.driver_routes = {
+ 'test_method': {'func': test_method,
+ 'async': False,
+ 'http_methods': ['POST']}}
+ self.service.init_host()
+ exc = self.assertRaises(messaging.ExpectedException,
+ self.service.driver_vendor_passthru,
+ self.context, 'fake', 'test_method',
+ 'POST', {})
+ self.assertEqual(exception.MissingParameterValue,
+ exc.exc_info[0])
+ self.assertFalse(test_method.called)
+
+
+@_mock_record_keepalive
+@mock.patch.object(images, 'is_whole_disk_image')
+class ServiceDoNodeDeployTestCase(_ServiceSetUpMixin,
+ tests_db_base.DbTestCase):
+ def test_do_node_deploy_invalid_state(self, mock_iwdi):
+ mock_iwdi.return_value = False
+ self._start_service()
+ # test that node deploy fails if the node is already provisioned
+ node = obj_utils.create_test_node(
+ self.context, driver='fake', provision_state=states.ACTIVE,
+ target_provision_state=states.NOSTATE)
+ exc = self.assertRaises(messaging.rpc.ExpectedException,
+ self.service.do_node_deploy,
+ self.context, node['uuid'])
+ # Compare true exception hidden by @messaging.expected_exceptions
+ self.assertEqual(exception.InvalidStateRequested, exc.exc_info[0])
+ # This is a sync operation last_error should be None.
+ self.assertIsNone(node.last_error)
+ # Verify reservation has been cleared.
+ self.assertIsNone(node.reservation)
+ mock_iwdi.assert_called_once_with(self.context, node.instance_info)
+ self.assertNotIn('is_whole_disk_image', node.driver_internal_info)
+
+ def test_do_node_deploy_maintenance(self, mock_iwdi):
+ mock_iwdi.return_value = False
+ node = obj_utils.create_test_node(self.context, driver='fake',
+ maintenance=True)
+ exc = self.assertRaises(messaging.rpc.ExpectedException,
+ self.service.do_node_deploy,
+ self.context, node['uuid'])
+ # Compare true exception hidden by @messaging.expected_exceptions
+ self.assertEqual(exception.NodeInMaintenance, exc.exc_info[0])
+ # This is a sync operation last_error should be None.
+ self.assertIsNone(node.last_error)
+ # Verify reservation has been cleared.
+ self.assertIsNone(node.reservation)
+ self.assertFalse(mock_iwdi.called)
+
+ def _test_do_node_deploy_validate_fail(self, mock_validate, mock_iwdi):
+ mock_iwdi.return_value = False
+ # InvalidParameterValue should be re-raised as InstanceDeployFailure
+ mock_validate.side_effect = exception.InvalidParameterValue('error')
+ node = obj_utils.create_test_node(self.context, driver='fake')
+ exc = self.assertRaises(messaging.rpc.ExpectedException,
+ self.service.do_node_deploy,
+ self.context, node.uuid)
+ # Compare true exception hidden by @messaging.expected_exceptions
+ self.assertEqual(exception.InstanceDeployFailure, exc.exc_info[0])
+ # This is a sync operation last_error should be None.
+ self.assertIsNone(node.last_error)
+ # Verify reservation has been cleared.
+ self.assertIsNone(node.reservation)
+ mock_iwdi.assert_called_once_with(self.context, node.instance_info)
+ self.assertNotIn('is_whole_disk_image', node.driver_internal_info)
+
+ @mock.patch('ironic.drivers.modules.fake.FakeDeploy.validate')
+ def test_do_node_deploy_validate_fail(self, mock_validate, mock_iwdi):
+ self._test_do_node_deploy_validate_fail(mock_validate, mock_iwdi)
+
+ @mock.patch('ironic.drivers.modules.fake.FakePower.validate')
+ def test_do_node_deploy_power_validate_fail(self, mock_validate,
+ mock_iwdi):
+ self._test_do_node_deploy_validate_fail(mock_validate, mock_iwdi)
+
+ @mock.patch('ironic.conductor.task_manager.TaskManager.process_event')
+ def test_deploy_with_nostate_converts_to_available(self, mock_pe,
+ mock_iwdi):
+ # expressly create a node using the Juno-era NOSTATE state
+ # and assert that it does not result in an error, and that the state
+ # is converted to the new AVAILABLE state.
+ # Mock the process_event call, because the transitions from
+ # AVAILABLE are tested thoroughly elsewhere
+ # NOTE(deva): This test can be deleted after Kilo is released
+ mock_iwdi.return_value = False
+ self._start_service()
+ node = obj_utils.create_test_node(self.context, driver='fake',
+ provision_state=states.NOSTATE)
+ self.assertEqual(states.NOSTATE, node.provision_state)
+ self.service.do_node_deploy(self.context, node.uuid)
+ self.assertTrue(mock_pe.called)
+ node.refresh()
+ self.assertEqual(states.AVAILABLE, node.provision_state)
+ mock_iwdi.assert_called_once_with(self.context, node.instance_info)
+ self.assertFalse(node.driver_internal_info['is_whole_disk_image'])
+
+ def test_do_node_deploy_partial_ok(self, mock_iwdi):
+ mock_iwdi.return_value = False
+ self._start_service()
+ thread = self.service._spawn_worker(lambda: None)
+ with mock.patch.object(self.service, '_spawn_worker') as mock_spawn:
+ mock_spawn.return_value = thread
+
+ node = obj_utils.create_test_node(self.context, driver='fake',
+ provision_state=states.AVAILABLE)
+
+ self.service.do_node_deploy(self.context, node.uuid)
+ self.service._worker_pool.waitall()
+ node.refresh()
+ self.assertEqual(states.DEPLOYING, node.provision_state)
+ self.assertEqual(states.ACTIVE, node.target_provision_state)
+ # This is a sync operation last_error should be None.
+ self.assertIsNone(node.last_error)
+ # Verify reservation has been cleared.
+ self.assertIsNone(node.reservation)
+ mock_spawn.assert_called_once_with(mock.ANY, mock.ANY,
+ mock.ANY, None)
+ mock_iwdi.assert_called_once_with(self.context, node.instance_info)
+ self.assertFalse(node.driver_internal_info['is_whole_disk_image'])
+
+ @mock.patch('ironic.drivers.modules.fake.FakeDeploy.deploy')
+ def test_do_node_deploy_rebuild_active_state(self, mock_deploy, mock_iwdi):
+ # This tests manager.do_node_deploy(), the 'else' path of
+ # 'if new_state == states.DEPLOYDONE'. The node's states
+ # aren't changed in this case.
+ mock_iwdi.return_value = True
+ self._start_service()
+ mock_deploy.return_value = states.DEPLOYING
+ node = obj_utils.create_test_node(
+ self.context, driver='fake',
+ provision_state=states.ACTIVE,
+ target_provision_state=states.NOSTATE,
+ instance_info={'image_source': uuidutils.generate_uuid(),
+ 'kernel': 'aaaa', 'ramdisk': 'bbbb'},
+ driver_internal_info={'is_whole_disk_image': False})
+
+ self.service.do_node_deploy(self.context, node.uuid, rebuild=True)
+ self.service._worker_pool.waitall()
+ node.refresh()
+ self.assertEqual(states.DEPLOYING, node.provision_state)
+ self.assertEqual(states.ACTIVE, node.target_provision_state)
+ # last_error should be None.
+ self.assertIsNone(node.last_error)
+ # Verify reservation has been cleared.
+ self.assertIsNone(node.reservation)
+ mock_deploy.assert_called_once_with(mock.ANY)
+ # Verify instance_info values has been cleared.
+ self.assertNotIn('kernel', node.instance_info)
+ self.assertNotIn('ramdisk', node.instance_info)
+ mock_iwdi.assert_called_once_with(self.context, node.instance_info)
+ # Verify is_whole_disk_image reflects correct value on rebuild.
+ self.assertTrue(node.driver_internal_info['is_whole_disk_image'])
+
+ @mock.patch('ironic.drivers.modules.fake.FakeDeploy.deploy')
+ def test_do_node_deploy_rebuild_active_state_waiting(self, mock_deploy,
+ mock_iwdi):
+ mock_iwdi.return_value = False
+ self._start_service()
+ mock_deploy.return_value = states.DEPLOYWAIT
+ node = obj_utils.create_test_node(
+ self.context, driver='fake',
+ provision_state=states.ACTIVE,
+ target_provision_state=states.NOSTATE,
+ instance_info={'image_source': uuidutils.generate_uuid()})
+
+ self.service.do_node_deploy(self.context, node.uuid, rebuild=True)
+ self.service._worker_pool.waitall()
+ node.refresh()
+ self.assertEqual(states.DEPLOYWAIT, node.provision_state)
+ self.assertEqual(states.ACTIVE, node.target_provision_state)
+ # last_error should be None.
+ self.assertIsNone(node.last_error)
+ # Verify reservation has been cleared.
+ self.assertIsNone(node.reservation)
+ mock_deploy.assert_called_once_with(mock.ANY)
+ mock_iwdi.assert_called_once_with(self.context, node.instance_info)
+ self.assertFalse(node.driver_internal_info['is_whole_disk_image'])
+
+ @mock.patch('ironic.drivers.modules.fake.FakeDeploy.deploy')
+ def test_do_node_deploy_rebuild_active_state_done(self, mock_deploy,
+ mock_iwdi):
+ mock_iwdi.return_value = False
+ self._start_service()
+ mock_deploy.return_value = states.DEPLOYDONE
+ node = obj_utils.create_test_node(
+ self.context, driver='fake',
+ provision_state=states.ACTIVE,
+ target_provision_state=states.NOSTATE)
+
+ self.service.do_node_deploy(self.context, node.uuid, rebuild=True)
+ self.service._worker_pool.waitall()
+ node.refresh()
+ self.assertEqual(states.ACTIVE, node.provision_state)
+ self.assertEqual(states.NOSTATE, node.target_provision_state)
+ # last_error should be None.
+ self.assertIsNone(node.last_error)
+ # Verify reservation has been cleared.
+ self.assertIsNone(node.reservation)
+ mock_deploy.assert_called_once_with(mock.ANY)
+ mock_iwdi.assert_called_once_with(self.context, node.instance_info)
+ self.assertFalse(node.driver_internal_info['is_whole_disk_image'])
+
+ @mock.patch('ironic.drivers.modules.fake.FakeDeploy.deploy')
+ def test_do_node_deploy_rebuild_deployfail_state(self, mock_deploy,
+ mock_iwdi):
+ mock_iwdi.return_value = False
+ self._start_service()
+ mock_deploy.return_value = states.DEPLOYDONE
+ node = obj_utils.create_test_node(
+ self.context, driver='fake',
+ provision_state=states.DEPLOYFAIL,
+ target_provision_state=states.NOSTATE)
+
+ self.service.do_node_deploy(self.context, node.uuid, rebuild=True)
+ self.service._worker_pool.waitall()
+ node.refresh()
+ self.assertEqual(states.ACTIVE, node.provision_state)
+ self.assertEqual(states.NOSTATE, node.target_provision_state)
+ # last_error should be None.
+ self.assertIsNone(node.last_error)
+ # Verify reservation has been cleared.
+ self.assertIsNone(node.reservation)
+ mock_deploy.assert_called_once_with(mock.ANY)
+ mock_iwdi.assert_called_once_with(self.context, node.instance_info)
+ self.assertFalse(node.driver_internal_info['is_whole_disk_image'])
+
+ @mock.patch('ironic.drivers.modules.fake.FakeDeploy.deploy')
+ def test_do_node_deploy_rebuild_error_state(self, mock_deploy, mock_iwdi):
+ mock_iwdi.return_value = False
+ self._start_service()
+ mock_deploy.return_value = states.DEPLOYDONE
+ node = obj_utils.create_test_node(
+ self.context, driver='fake',
+ provision_state=states.ERROR,
+ target_provision_state=states.NOSTATE)
+
+ self.service.do_node_deploy(self.context, node.uuid, rebuild=True)
+ self.service._worker_pool.waitall()
+ node.refresh()
+ self.assertEqual(states.ACTIVE, node.provision_state)
+ self.assertEqual(states.NOSTATE, node.target_provision_state)
+ # last_error should be None.
+ self.assertIsNone(node.last_error)
+ # Verify reservation has been cleared.
+ self.assertIsNone(node.reservation)
+ mock_deploy.assert_called_once_with(mock.ANY)
+ mock_iwdi.assert_called_once_with(self.context, node.instance_info)
+ self.assertFalse(node.driver_internal_info['is_whole_disk_image'])
+
+ def test_do_node_deploy_rebuild_from_available_state(self, mock_iwdi):
+ mock_iwdi.return_value = False
+ self._start_service()
+ # test node will not rebuild if state is AVAILABLE
+ node = obj_utils.create_test_node(self.context, driver='fake',
+ provision_state=states.AVAILABLE)
+ exc = self.assertRaises(messaging.rpc.ExpectedException,
+ self.service.do_node_deploy,
+ self.context, node['uuid'], rebuild=True)
+ # Compare true exception hidden by @messaging.expected_exceptions
+ self.assertEqual(exception.InvalidStateRequested, exc.exc_info[0])
+ # Last_error should be None.
+ self.assertIsNone(node.last_error)
+ # Verify reservation has been cleared.
+ self.assertIsNone(node.reservation)
+ mock_iwdi.assert_called_once_with(self.context, node.instance_info)
+ self.assertNotIn('is_whole_disk_image', node.driver_internal_info)
+
+ def test_do_node_deploy_worker_pool_full(self, mock_iwdi):
+ mock_iwdi.return_value = False
+ prv_state = states.AVAILABLE
+ tgt_prv_state = states.NOSTATE
+ node = obj_utils.create_test_node(self.context,
+ provision_state=prv_state,
+ target_provision_state=tgt_prv_state,
+ last_error=None, driver='fake')
+ self._start_service()
+
+ with mock.patch.object(self.service, '_spawn_worker') as mock_spawn:
+ mock_spawn.side_effect = exception.NoFreeConductorWorker()
+
+ exc = self.assertRaises(messaging.rpc.ExpectedException,
+ self.service.do_node_deploy,
+ self.context, node.uuid)
+ # Compare true exception hidden by @messaging.expected_exceptions
+ self.assertEqual(exception.NoFreeConductorWorker, exc.exc_info[0])
+ self.service._worker_pool.waitall()
+ node.refresh()
+ # Make sure things were rolled back
+ self.assertEqual(prv_state, node.provision_state)
+ self.assertEqual(tgt_prv_state, node.target_provision_state)
+ self.assertIsNotNone(node.last_error)
+ # Verify reservation has been cleared.
+ self.assertIsNone(node.reservation)
+ mock_iwdi.assert_called_once_with(self.context, node.instance_info)
+ self.assertFalse(node.driver_internal_info['is_whole_disk_image'])
+
+
+@_mock_record_keepalive
+class DoNodeDeployTearDownTestCase(_ServiceSetUpMixin,
+ tests_db_base.DbTestCase):
+ @mock.patch('ironic.drivers.modules.fake.FakeDeploy.deploy')
+ @mock.patch('ironic.drivers.modules.fake.FakeDeploy.prepare')
+ def test__do_node_deploy_driver_raises_prepare_error(self, mock_prepare,
+ mock_deploy):
+ self._start_service()
+ # test when driver.deploy.prepare raises an exception
+ mock_prepare.side_effect = exception.InstanceDeployFailure('test')
+ node = obj_utils.create_test_node(self.context, driver='fake',
+ provision_state=states.DEPLOYING,
+ target_provision_state=states.ACTIVE)
+ task = task_manager.TaskManager(self.context, node.uuid)
+
+ self.assertRaises(exception.InstanceDeployFailure,
+ manager.do_node_deploy, task,
+ self.service.conductor.id)
+ node.refresh()
+ self.assertEqual(states.DEPLOYFAIL, node.provision_state)
+ # NOTE(deva): failing a deploy does not clear the target state
+ # any longer. Instead, it is cleared when the instance
+ # is deleted.
+ self.assertEqual(states.ACTIVE, node.target_provision_state)
+ self.assertIsNotNone(node.last_error)
+ self.assertTrue(mock_prepare.called)
+ self.assertFalse(mock_deploy.called)
+
+ @mock.patch('ironic.drivers.modules.fake.FakeDeploy.deploy')
+ def test__do_node_deploy_driver_raises_error(self, mock_deploy):
+ self._start_service()
+ # test when driver.deploy.deploy raises an exception
+ mock_deploy.side_effect = exception.InstanceDeployFailure('test')
+ node = obj_utils.create_test_node(self.context, driver='fake',
+ provision_state=states.DEPLOYING,
+ target_provision_state=states.ACTIVE)
+ task = task_manager.TaskManager(self.context, node.uuid)
+
+ self.assertRaises(exception.InstanceDeployFailure,
+ manager.do_node_deploy, task,
+ self.service.conductor.id)
+ node.refresh()
+ self.assertEqual(states.DEPLOYFAIL, node.provision_state)
+ # NOTE(deva): failing a deploy does not clear the target state
+ # any longer. Instead, it is cleared when the instance
+ # is deleted.
+ self.assertEqual(states.ACTIVE, node.target_provision_state)
+ self.assertIsNotNone(node.last_error)
+ mock_deploy.assert_called_once_with(mock.ANY)
+
+ @mock.patch.object(manager, '_store_configdrive')
+ @mock.patch('ironic.drivers.modules.fake.FakeDeploy.deploy')
+ def test__do_node_deploy_ok(self, mock_deploy, mock_store):
+ self._start_service()
+ # test when driver.deploy.deploy returns DEPLOYDONE
+ mock_deploy.return_value = states.DEPLOYDONE
+ node = obj_utils.create_test_node(self.context, driver='fake',
+ provision_state=states.DEPLOYING,
+ target_provision_state=states.ACTIVE)
+ task = task_manager.TaskManager(self.context, node.uuid)
+
+ manager.do_node_deploy(task, self.service.conductor.id)
+ node.refresh()
+ self.assertEqual(states.ACTIVE, node.provision_state)
+ self.assertEqual(states.NOSTATE, node.target_provision_state)
+ self.assertIsNone(node.last_error)
+ mock_deploy.assert_called_once_with(mock.ANY)
+ # assert _store_configdrive wasn't invoked
+ self.assertFalse(mock_store.called)
+
+ @mock.patch.object(manager, '_store_configdrive')
+ @mock.patch('ironic.drivers.modules.fake.FakeDeploy.deploy')
+ def test__do_node_deploy_ok_configdrive(self, mock_deploy, mock_store):
+ self._start_service()
+ # test when driver.deploy.deploy returns DEPLOYDONE
+ mock_deploy.return_value = states.DEPLOYDONE
+ node = obj_utils.create_test_node(self.context, driver='fake',
+ provision_state=states.DEPLOYING,
+ target_provision_state=states.ACTIVE)
+ task = task_manager.TaskManager(self.context, node.uuid)
+ configdrive = 'foo'
+
+ manager.do_node_deploy(task, self.service.conductor.id,
+ configdrive=configdrive)
+ node.refresh()
+ self.assertEqual(states.ACTIVE, node.provision_state)
+ self.assertEqual(states.NOSTATE, node.target_provision_state)
+ self.assertIsNone(node.last_error)
+ mock_deploy.assert_called_once_with(mock.ANY)
+ mock_store.assert_called_once_with(task.node, configdrive)
+
+ @mock.patch.object(swift, 'SwiftAPI')
+ @mock.patch('ironic.drivers.modules.fake.FakeDeploy.deploy')
+ def test__do_node_deploy_configdrive_swift_error(self, mock_deploy,
+ mock_swift):
+ CONF.set_override('configdrive_use_swift', True, group='conductor')
+ self._start_service()
+ # test when driver.deploy.deploy returns DEPLOYDONE
+ mock_deploy.return_value = states.DEPLOYDONE
+ node = obj_utils.create_test_node(self.context, driver='fake',
+ provision_state=states.DEPLOYING,
+ target_provision_state=states.ACTIVE)
+ task = task_manager.TaskManager(self.context, node.uuid)
+
+ mock_swift.side_effect = exception.SwiftOperationError('error')
+ self.assertRaises(exception.SwiftOperationError,
+ manager.do_node_deploy, task,
+ self.service.conductor.id,
+ configdrive=b'fake config drive')
+ node.refresh()
+ self.assertEqual(states.DEPLOYFAIL, node.provision_state)
+ self.assertEqual(states.ACTIVE, node.target_provision_state)
+ self.assertIsNotNone(node.last_error)
+ self.assertFalse(mock_deploy.called)
+
+ @mock.patch('ironic.drivers.modules.fake.FakeDeploy.deploy')
+ def test__do_node_deploy_ok_2(self, mock_deploy):
+ # NOTE(rloo): a different way of testing for the same thing as in
+ # test__do_node_deploy_ok()
+ self._start_service()
+ # test when driver.deploy.deploy returns DEPLOYDONE
+ mock_deploy.return_value = states.DEPLOYDONE
+ node = obj_utils.create_test_node(self.context, driver='fake')
+ task = task_manager.TaskManager(self.context, node.uuid)
+ task.process_event('deploy')
+
+ manager.do_node_deploy(task, self.service.conductor.id)
+ node.refresh()
+ self.assertEqual(states.ACTIVE, node.provision_state)
+ self.assertEqual(states.NOSTATE, node.target_provision_state)
+ self.assertIsNone(node.last_error)
+ mock_deploy.assert_called_once_with(mock.ANY)
+
+ @mock.patch('ironic.drivers.modules.fake.FakeDeploy.clean_up')
+ def test__check_deploy_timeouts(self, mock_cleanup):
+ self._start_service()
+ CONF.set_override('deploy_callback_timeout', 1, group='conductor')
+ node = obj_utils.create_test_node(
+ self.context, driver='fake',
+ provision_state=states.DEPLOYWAIT,
+ target_provision_state=states.ACTIVE,
+ provision_updated_at=datetime.datetime(2000, 1, 1, 0, 0))
+
+ self.service._check_deploy_timeouts(self.context)
+ self.service._worker_pool.waitall()
+ node.refresh()
+ self.assertEqual(states.DEPLOYFAIL, node.provision_state)
+ self.assertEqual(states.ACTIVE, node.target_provision_state)
+ self.assertIsNotNone(node.last_error)
+ mock_cleanup.assert_called_once_with(mock.ANY)
+
+ def test__check_cleanwait_timeouts(self):
+ self._start_service()
+ CONF.set_override('clean_callback_timeout', 1, group='conductor')
+ node = obj_utils.create_test_node(
+ self.context, driver='fake',
+ provision_state=states.CLEANWAIT,
+ target_provision_state=states.AVAILABLE,
+ provision_updated_at=datetime.datetime(2000, 1, 1, 0, 0))
+
+ self.service._check_cleanwait_timeouts(self.context)
+ self.service._worker_pool.waitall()
+ node.refresh()
+ self.assertEqual(states.CLEANFAIL, node.provision_state)
+ self.assertEqual(states.AVAILABLE, node.target_provision_state)
+ self.assertIsNotNone(node.last_error)
+
+ def test_do_node_tear_down_invalid_state(self):
+ self._start_service()
+ # test node.provision_state is incorrect for tear_down
+ node = obj_utils.create_test_node(self.context, driver='fake',
+ provision_state=states.AVAILABLE)
+ exc = self.assertRaises(messaging.rpc.ExpectedException,
+ self.service.do_node_tear_down,
+ self.context, node['uuid'])
+ # Compare true exception hidden by @messaging.expected_exceptions
+ self.assertEqual(exception.InvalidStateRequested, exc.exc_info[0])
+
+ @mock.patch('ironic.drivers.modules.fake.FakePower.validate')
+ def test_do_node_tear_down_validate_fail(self, mock_validate):
+ # InvalidParameterValue should be re-raised as InstanceDeployFailure
+ mock_validate.side_effect = exception.InvalidParameterValue('error')
+ node = obj_utils.create_test_node(
+ self.context, driver='fake',
+ provision_state=states.ACTIVE,
+ target_provision_state=states.NOSTATE)
+ exc = self.assertRaises(messaging.rpc.ExpectedException,
+ self.service.do_node_tear_down,
+ self.context, node.uuid)
+ # Compare true exception hidden by @messaging.expected_exceptions
+ self.assertEqual(exception.InstanceDeployFailure, exc.exc_info[0])
+
+ @mock.patch('ironic.drivers.modules.fake.FakeDeploy.tear_down')
+ def test_do_node_tear_down_driver_raises_error(self, mock_tear_down):
+ # test when driver.deploy.tear_down raises exception
+ node = obj_utils.create_test_node(
+ self.context, driver='fake', provision_state=states.DELETING,
+ target_provision_state=states.AVAILABLE,
+ instance_info={'foo': 'bar'},
+ driver_internal_info={'is_whole_disk_image': False})
+
+ task = task_manager.TaskManager(self.context, node.uuid)
+ self._start_service()
+ mock_tear_down.side_effect = exception.InstanceDeployFailure('test')
+ self.assertRaises(exception.InstanceDeployFailure,
+ self.service._do_node_tear_down, task)
+ node.refresh()
+ self.assertEqual(states.ERROR, node.provision_state)
+ self.assertEqual(states.AVAILABLE, node.target_provision_state)
+ self.assertIsNotNone(node.last_error)
+ # Assert instance_info was erased
+ self.assertEqual({}, node.instance_info)
+ mock_tear_down.assert_called_once_with(mock.ANY)
+
+ @mock.patch('ironic.conductor.manager.ConductorManager._do_node_clean')
+ @mock.patch('ironic.drivers.modules.fake.FakeDeploy.tear_down')
+ def test__do_node_tear_down_ok(self, mock_tear_down, mock_clean):
+ # test when driver.deploy.tear_down succeeds
+ node = obj_utils.create_test_node(
+ self.context, driver='fake', provision_state=states.DELETING,
+ target_provision_state=states.AVAILABLE,
+ instance_uuid=uuidutils.generate_uuid(),
+ instance_info={'foo': 'bar'},
+ driver_internal_info={'is_whole_disk_image': False,
+ 'instance': {'ephemeral_gb': 10}})
+
+ task = task_manager.TaskManager(self.context, node.uuid)
+ self._start_service()
+ self.service._do_node_tear_down(task)
+ node.refresh()
+ # Node will be moved to AVAILABLE after cleaning, not tested here
+ self.assertEqual(states.CLEANING, node.provision_state)
+ self.assertEqual(states.AVAILABLE, node.target_provision_state)
+ self.assertIsNone(node.last_error)
+ self.assertIsNone(node.instance_uuid)
+ self.assertEqual({}, node.instance_info)
+ self.assertNotIn('instance', node.driver_internal_info)
+ mock_tear_down.assert_called_once_with(mock.ANY)
+ mock_clean.assert_called_once_with(mock.ANY)
+
+ @mock.patch('ironic.conductor.manager.ConductorManager._do_node_clean')
+ @mock.patch('ironic.drivers.modules.fake.FakeDeploy.tear_down')
+ def _test_do_node_tear_down_from_state(self, init_state, mock_tear_down,
+ mock_clean):
+ node = obj_utils.create_test_node(
+ self.context, driver='fake', uuid=uuidutils.generate_uuid(),
+ provision_state=init_state,
+ target_provision_state=states.AVAILABLE,
+ driver_internal_info={'is_whole_disk_image': False})
+
+ self.service.do_node_tear_down(self.context, node.uuid)
+ self.service._worker_pool.waitall()
+ node.refresh()
+ # Node will be moved to AVAILABLE after cleaning, not tested here
+ self.assertEqual(states.CLEANING, node.provision_state)
+ self.assertEqual(states.AVAILABLE, node.target_provision_state)
+ self.assertIsNone(node.last_error)
+ self.assertEqual({}, node.instance_info)
+ mock_tear_down.assert_called_once_with(mock.ANY)
+ mock_clean.assert_called_once_with(mock.ANY)
+
+ def test__do_node_tear_down_from_valid_states(self):
+ valid_states = [states.ACTIVE, states.DEPLOYWAIT, states.DEPLOYFAIL,
+ states.ERROR]
+ self._start_service()
+ for state in valid_states:
+ self._test_do_node_tear_down_from_state(state)
+
+ # NOTE(deva): partial tear-down was broken. A node left in a state of
+ # DELETING could not have tear_down called on it a second time
+ # Thus, I have removed the unit test, which faultily asserted
+ # only that a node could be left in a state of incomplete
+ # deletion -- not that such a node's deletion could later be
+ # completed.
+
+ @mock.patch('ironic.conductor.manager.ConductorManager._spawn_worker')
+ def test_do_node_tear_down_worker_pool_full(self, mock_spawn):
+ prv_state = states.ACTIVE
+ tgt_prv_state = states.NOSTATE
+ fake_instance_info = {'foo': 'bar'}
+ driver_internal_info = {'is_whole_disk_image': False}
+ node = obj_utils.create_test_node(
+ self.context, driver='fake', provision_state=prv_state,
+ target_provision_state=tgt_prv_state,
+ instance_info=fake_instance_info,
+ driver_internal_info=driver_internal_info, last_error=None)
+ self._start_service()
+
+ mock_spawn.side_effect = exception.NoFreeConductorWorker()
+
+ exc = self.assertRaises(messaging.rpc.ExpectedException,
+ self.service.do_node_tear_down,
+ self.context, node.uuid)
+ # Compare true exception hidden by @messaging.expected_exceptions
+ self.assertEqual(exception.NoFreeConductorWorker, exc.exc_info[0])
+ self.service._worker_pool.waitall()
+ node.refresh()
+ # Assert instance_info/driver_internal_info was not touched
+ self.assertEqual(fake_instance_info, node.instance_info)
+ self.assertEqual(driver_internal_info, node.driver_internal_info)
+ # Make sure things were rolled back
+ self.assertEqual(prv_state, node.provision_state)
+ self.assertEqual(tgt_prv_state, node.target_provision_state)
+ self.assertIsNotNone(node.last_error)
+ # Verify reservation has been cleared.
+ self.assertIsNone(node.reservation)
+
+ @mock.patch('ironic.conductor.manager.ConductorManager._spawn_worker')
+ def test_do_provisioning_action_worker_pool_full(self, mock_spawn):
+ prv_state = states.MANAGEABLE
+ tgt_prv_state = states.CLEANING
+ node = obj_utils.create_test_node(self.context, driver='fake',
+ provision_state=prv_state,
+ target_provision_state=tgt_prv_state,
+ last_error=None)
+ self._start_service()
+
+ mock_spawn.side_effect = exception.NoFreeConductorWorker()
+
+ exc = self.assertRaises(messaging.rpc.ExpectedException,
+ self.service.do_provisioning_action,
+ self.context, node.uuid, 'provide')
+ # Compare true exception hidden by @messaging.expected_exceptions
+ self.assertEqual(exception.NoFreeConductorWorker, exc.exc_info[0])
+ self.service._worker_pool.waitall()
+ node.refresh()
+ # Make sure things were rolled back
+ self.assertEqual(prv_state, node.provision_state)
+ self.assertEqual(tgt_prv_state, node.target_provision_state)
+ self.assertIsNotNone(node.last_error)
+ # Verify reservation has been cleared.
+ self.assertIsNone(node.reservation)
+
+ @mock.patch('ironic.conductor.manager.ConductorManager._spawn_worker')
+ def test_do_provision_action_provide(self, mock_spawn):
+ # test when a node is cleaned going from manageable to available
+ node = obj_utils.create_test_node(
+ self.context, driver='fake',
+ provision_state=states.MANAGEABLE,
+ target_provision_state=states.AVAILABLE)
+
+ self._start_service()
+ self.service.do_provisioning_action(self.context, node.uuid, 'provide')
+ node.refresh()
+ # Node will be moved to AVAILABLE after cleaning, not tested here
+ self.assertEqual(states.CLEANING, node.provision_state)
+ self.assertEqual(states.AVAILABLE, node.target_provision_state)
+ self.assertIsNone(node.last_error)
+ mock_spawn.assert_called_with(self.service._do_node_clean, mock.ANY)
+
+ @mock.patch('ironic.conductor.manager.ConductorManager._spawn_worker')
+ def test_do_provision_action_manage(self, mock_spawn):
+ # test when a node is verified going from enroll to manageable
+ node = obj_utils.create_test_node(
+ self.context, driver='fake',
+ provision_state=states.ENROLL,
+ target_provision_state=states.MANAGEABLE)
+
+ self._start_service()
+ self.service.do_provisioning_action(self.context, node.uuid, 'manage')
+ node.refresh()
+ # Node will be moved to MANAGEABLE after verification, not tested here
+ self.assertEqual(states.VERIFYING, node.provision_state)
+ self.assertEqual(states.MANAGEABLE, node.target_provision_state)
+ self.assertIsNone(node.last_error)
+ mock_spawn.assert_called_with(self.service._do_node_verify, mock.ANY)
+
+ @mock.patch('ironic.conductor.manager.ConductorManager._spawn_worker')
+ def test_do_provision_action_abort(self, mock_spawn):
+ node = obj_utils.create_test_node(
+ self.context, driver='fake',
+ provision_state=states.CLEANWAIT,
+ target_provision_state=states.AVAILABLE)
+
+ self._start_service()
+ self.service.do_provisioning_action(self.context, node.uuid, 'abort')
+ node.refresh()
+ # Node will be moved to AVAILABLE after cleaning, not tested here
+ self.assertEqual(states.CLEANFAIL, node.provision_state)
+ self.assertEqual(states.AVAILABLE, node.target_provision_state)
+ self.assertIsNone(node.last_error)
+ mock_spawn.assert_called_with(self.service._do_node_clean_abort,
+ mock.ANY)
+
+ def test_do_provision_action_abort_clean_step_not_abortable(self):
+ node = obj_utils.create_test_node(
+ self.context, driver='fake',
+ provision_state=states.CLEANWAIT,
+ target_provision_state=states.AVAILABLE,
+ clean_step={'step': 'foo', 'abortable': False})
+
+ self._start_service()
+ self.service.do_provisioning_action(self.context, node.uuid, 'abort')
+ node.refresh()
+ # Assert the current clean step was marked to be aborted later
+ self.assertIn('abort_after', node.clean_step)
+ self.assertTrue(node.clean_step['abort_after'])
+ # Make sure things stays as it was before
+ self.assertEqual(states.CLEANWAIT, node.provision_state)
+ self.assertEqual(states.AVAILABLE, node.target_provision_state)
+
+ @mock.patch.object(fake.FakeDeploy, 'tear_down_cleaning', autospec=True)
+ def _test__do_node_clean_abort(self, step_name, tear_mock):
+ node = obj_utils.create_test_node(
+ self.context, driver='fake',
+ provision_state=states.CLEANFAIL,
+ target_provision_state=states.AVAILABLE,
+ clean_step={'step': 'foo', 'abortable': True})
+
+ with task_manager.acquire(self.context, node.uuid) as task:
+ self.service._do_node_clean_abort(task, step_name=step_name)
+ self.assertIsNotNone(task.node.last_error)
+ tear_mock.assert_called_once_with(task.driver.deploy, task)
+ if step_name:
+ self.assertIn(step_name, task.node.last_error)
+
+ def test__do_node_clean_abort(self):
+ self._test__do_node_clean_abort(None)
+
+ def test__do_node_clean_abort_with_step_name(self):
+ self._test__do_node_clean_abort('foo')
+
+ @mock.patch.object(fake.FakeDeploy, 'tear_down_cleaning', autospec=True)
+ def test__do_node_clean_abort_tear_down_fail(self, tear_mock):
+ tear_mock.side_effect = Exception('Surprise')
+
+ node = obj_utils.create_test_node(
+ self.context, driver='fake',
+ provision_state=states.CLEANFAIL,
+ target_provision_state=states.AVAILABLE,
+ clean_step={'step': 'foo', 'abortable': True})
+
+ with task_manager.acquire(self.context, node.uuid) as task:
+ self.service._do_node_clean_abort(task)
+ tear_mock.assert_called_once_with(task.driver.deploy, task)
+ self.assertIsNotNone(task.node.last_error)
+ self.assertIsNotNone(task.node.maintenance_reason)
+ self.assertTrue(task.node.maintenance)
+
+
+@_mock_record_keepalive
+class DoNodeCleanTestCase(_ServiceSetUpMixin, tests_db_base.DbTestCase):
+ def setUp(self):
+ super(DoNodeCleanTestCase, self).setUp()
+ self.config(clean_nodes=True, group='conductor')
+ self.power_update = {
+ 'step': 'update_firmware', 'priority': 10, 'interface': 'power'}
+ self.deploy_update = {
+ 'step': 'update_firmware', 'priority': 10, 'interface': 'deploy'}
+ self.deploy_erase = {
+ 'step': 'erase_disks', 'priority': 20, 'interface': 'deploy'}
+ # Cleaning should be executed in this order
+ self.clean_steps = [self.deploy_erase, self.power_update,
+ self.deploy_update]
+ self.next_clean_steps = self.clean_steps[1:]
+ # Zap step
+ self.deploy_raid = {
+ 'step': 'build_raid', 'priority': 0, 'interface': 'deploy'}
+
+ @mock.patch('ironic.drivers.modules.fake.FakeDeploy.get_clean_steps')
+ @mock.patch('ironic.drivers.modules.fake.FakePower.get_clean_steps')
+ def test__get_cleaning_steps(self, mock_power_steps, mock_deploy_steps):
+ # Test getting cleaning steps, with one driver returning None, two
+ # conflicting priorities, and asserting they are ordered properly.
+ node = obj_utils.create_test_node(
+ self.context, driver='fake',
+ provision_state=states.CLEANING,
+ target_provision_state=states.AVAILABLE)
+
+ mock_power_steps.return_value = [self.power_update]
+ mock_deploy_steps.return_value = [self.deploy_erase,
+ self.deploy_update]
+
+ with task_manager.acquire(
+ self.context, node['id'], shared=False) as task:
+ steps = manager._get_cleaning_steps(task, enabled=False)
+
+ self.assertEqual(self.clean_steps, steps)
+
+ @mock.patch('ironic.drivers.modules.fake.FakeDeploy.get_clean_steps')
+ @mock.patch('ironic.drivers.modules.fake.FakePower.get_clean_steps')
+ def test__get_cleaning_steps_only_enabled(self, mock_power_steps,
+ mock_deploy_steps):
+ # Test getting only cleaning steps, with one driver returning None, two
+ # conflicting priorities, and asserting they are ordered properly.
+ # Should discard zap step
+ node = obj_utils.create_test_node(
+ self.context, driver='fake',
+ provision_state=states.CLEANING,
+ target_provision_state=states.AVAILABLE)
+
+ mock_power_steps.return_value = [self.power_update]
+ mock_deploy_steps.return_value = [self.deploy_erase,
+ self.deploy_update,
+ self.deploy_raid]
+
+ with task_manager.acquire(
+ self.context, node['id'], shared=True) as task:
+ steps = manager._get_cleaning_steps(task, enabled=True)
+
+ self.assertEqual(self.clean_steps, steps)
+
+ @mock.patch('ironic.conductor.manager.ConductorManager._spawn_worker')
+ def test_continue_node_clean_worker_pool_full(self, mock_spawn):
+ # Test the appropriate exception is raised if the worker pool is full
+ prv_state = states.CLEANWAIT
+ tgt_prv_state = states.AVAILABLE
+ node = obj_utils.create_test_node(self.context, driver='fake',
+ provision_state=prv_state,
+ target_provision_state=tgt_prv_state,
+ last_error=None)
+ self._start_service()
+
+ mock_spawn.side_effect = exception.NoFreeConductorWorker()
+
+ self.assertRaises(exception.NoFreeConductorWorker,
+ self.service.continue_node_clean,
+ self.context, node.uuid)
+
+ self.service._worker_pool.waitall()
+ node.refresh()
+
+ @mock.patch('ironic.conductor.manager.ConductorManager._spawn_worker')
+ def test_continue_node_clean_wrong_state(self, mock_spawn):
+ # Test the appropriate exception is raised if node isn't already
+ # in CLEANWAIT state
+ prv_state = states.DELETING
+ tgt_prv_state = states.AVAILABLE
+ node = obj_utils.create_test_node(self.context, driver='fake',
+ provision_state=prv_state,
+ target_provision_state=tgt_prv_state,
+ last_error=None)
+ self._start_service()
+
+ self.assertRaises(exception.InvalidStateRequested,
+ self.service.continue_node_clean,
+ self.context, node.uuid)
+
+ self.service._worker_pool.waitall()
+ node.refresh()
+ # Make sure things were rolled back
+ self.assertEqual(prv_state, node.provision_state)
+ self.assertEqual(tgt_prv_state, node.target_provision_state)
+ # Verify reservation has been cleared.
+ self.assertIsNone(node.reservation)
+
+ @mock.patch('ironic.conductor.manager.ConductorManager._spawn_worker')
+ def _continue_node_clean(self, return_state, mock_spawn):
+ # test a node can continue cleaning via RPC
+ prv_state = return_state
+ tgt_prv_state = states.AVAILABLE
+ driver_info = {'clean_steps': self.clean_steps}
+ node = obj_utils.create_test_node(self.context, driver='fake',
+ provision_state=prv_state,
+ target_provision_state=tgt_prv_state,
+ last_error=None,
+ driver_internal_info=driver_info,
+ clean_step=self.clean_steps[0])
+ self._start_service()
+ self.service.continue_node_clean(self.context, node.uuid)
+ self.service._worker_pool.waitall()
+ node.refresh()
+ mock_spawn.assert_called_with(self.service._do_next_clean_step,
+ mock.ANY, self.next_clean_steps)
+
+ def test_continue_node_clean(self):
+ self._continue_node_clean(states.CLEANWAIT)
+
+ def test_continue_node_clean_backward_compat(self):
+ self._continue_node_clean(states.CLEANING)
+
+ def test_continue_node_clean_abort(self):
+ last_clean_step = self.clean_steps[0]
+ last_clean_step['abortable'] = False
+ last_clean_step['abort_after'] = True
+ driver_info = {'clean_steps': self.clean_steps}
+ node = obj_utils.create_test_node(
+ self.context, driver='fake', provision_state=states.CLEANWAIT,
+ target_provision_state=states.AVAILABLE, last_error=None,
+ driver_internal_info=driver_info, clean_step=self.clean_steps[0])
+
+ self._start_service()
+ self.service.continue_node_clean(self.context, node.uuid)
+ self.service._worker_pool.waitall()
+ node.refresh()
+ self.assertEqual(states.CLEANFAIL, node.provision_state)
+ self.assertEqual(states.AVAILABLE, node.target_provision_state)
+ self.assertIsNotNone(node.last_error)
+ # assert the clean step name is in the last error message
+ self.assertIn(self.clean_steps[0]['step'], node.last_error)
+
+ def test_continue_node_clean_abort_last_clean_step(self):
+ last_clean_step = self.clean_steps[0]
+ last_clean_step['abortable'] = False
+ last_clean_step['abort_after'] = True
+ driver_info = {'clean_steps': [self.clean_steps[0]]}
+ node = obj_utils.create_test_node(
+ self.context, driver='fake', provision_state=states.CLEANWAIT,
+ target_provision_state=states.AVAILABLE, last_error=None,
+ driver_internal_info=driver_info, clean_step=self.clean_steps[0])
+
+ self._start_service()
+ self.service.continue_node_clean(self.context, node.uuid)
+ self.service._worker_pool.waitall()
+ node.refresh()
+ self.assertEqual(states.AVAILABLE, node.provision_state)
+ self.assertIsNone(node.target_provision_state)
+ self.assertIsNone(node.last_error)
+
+ @mock.patch('ironic.drivers.modules.fake.FakePower.validate')
+ def test__do_node_clean_validate_fail(self, mock_validate):
+ # InvalidParameterValue should be cause node to go to CLEANFAIL
+ self.config(clean_nodes=True, group='conductor')
+ mock_validate.side_effect = exception.InvalidParameterValue('error')
+ node = obj_utils.create_test_node(
+ self.context, driver='fake',
+ provision_state=states.CLEANING,
+ target_provision_state=states.AVAILABLE)
+ with task_manager.acquire(
+ self.context, node['id'], shared=False) as task:
+ self.service._do_node_clean(task)
+ node.refresh()
+ self.assertEqual(states.CLEANFAIL, node.provision_state)
+
+ @mock.patch('ironic.drivers.modules.fake.FakePower.validate')
+ def test__do_node_clean_disabled(self, mock_validate):
+ self.config(clean_nodes=False, group='conductor')
+ node = obj_utils.create_test_node(
+ self.context, driver='fake',
+ provision_state=states.CLEANING,
+ target_provision_state=states.AVAILABLE,
+ last_error=None)
+
+ self._start_service()
+ with task_manager.acquire(
+ self.context, node['id'], shared=False) as task:
+ self.service._do_node_clean(task)
+ self.service._worker_pool.waitall()
+ node.refresh()
+
+ # Assert that the node was moved to available without cleaning
+ self.assertFalse(mock_validate.called)
+ self.assertEqual(states.AVAILABLE, node.provision_state)
+ self.assertEqual(states.NOSTATE, node.target_provision_state)
+ self.assertEqual({}, node.clean_step)
+ self.assertIsNone(node.driver_internal_info.get('clean_steps'))
+
+ @mock.patch('ironic.conductor.manager.set_node_cleaning_steps')
+ @mock.patch('ironic.conductor.manager.ConductorManager.'
+ '_do_next_clean_step')
+ @mock.patch('ironic.drivers.modules.fake.FakePower.validate')
+ def test__do_node_clean(self, mock_validate, mock_next_step, mock_steps):
+ node = obj_utils.create_test_node(
+ self.context, driver='fake',
+ provision_state=states.CLEANING,
+ target_provision_state=states.AVAILABLE,
+ last_error=None,
+ power_state=states.POWER_OFF,
+ driver_internal_info={'clean_steps': []})
+
+ mock_steps.return_value = self.clean_steps
+
+ self._start_service()
+ with task_manager.acquire(
+ self.context, node['id'], shared=False) as task:
+ self.service._do_node_clean(task)
+
+ self.service._worker_pool.waitall()
+ node.refresh()
+
+ mock_validate.assert_called_once_with(task)
+ mock_next_step.assert_called_once_with(mock.ANY, [])
+ mock_steps.assert_called_once_with(task)
+
+ # Check that state didn't change
+ self.assertEqual(states.CLEANING, node.provision_state)
+ self.assertEqual(states.AVAILABLE, node.target_provision_state)
+
+ @mock.patch('ironic.drivers.modules.fake.FakeDeploy.execute_clean_step')
+ def _do_next_clean_step_first_step_async(self, return_state, mock_execute):
+ # Execute the first async clean step on a node
+ node = obj_utils.create_test_node(
+ self.context, driver='fake',
+ provision_state=states.CLEANING,
+ target_provision_state=states.AVAILABLE,
+ last_error=None,
+ clean_step={})
+ mock_execute.return_value = return_state
+
+ self._start_service()
+
+ with task_manager.acquire(
+ self.context, node['id'], shared=False) as task:
+ self.service._do_next_clean_step(task, self.clean_steps)
+
+ self.service._worker_pool.waitall()
+ node.refresh()
+
+ self.assertEqual(states.CLEANWAIT, node.provision_state)
+ self.assertEqual(self.clean_steps[0], node.clean_step)
+ mock_execute.assert_called_once_with(mock.ANY, self.clean_steps[0])
+
+ def test_do_next_clean_step_first_step_async(self):
+ self._do_next_clean_step_first_step_async(states.CLEANWAIT)
+
+ def test_do_next_clean_step_first_step_async_backward_compat(self):
+ self._do_next_clean_step_first_step_async(states.CLEANING)
+
+ @mock.patch('ironic.drivers.modules.fake.FakePower.execute_clean_step')
+ def _do_next_clean_step_continue_from_last_step(self, return_state,
+ mock_execute):
+ # Resume an in-progress cleaning after the first async step
+ node = obj_utils.create_test_node(
+ self.context, driver='fake',
+ provision_state=states.CLEANING,
+ target_provision_state=states.AVAILABLE,
+ last_error=None,
+ clean_step=self.clean_steps[0])
+ mock_execute.return_value = return_state
+
+ self._start_service()
+
+ with task_manager.acquire(
+ self.context, node['id'], shared=False) as task:
+ self.service._do_next_clean_step(task, self.next_clean_steps)
+
+ self.service._worker_pool.waitall()
+ node.refresh()
+
+ self.assertEqual(states.CLEANWAIT, node.provision_state)
+ self.assertEqual(self.clean_steps[1], node.clean_step)
+ mock_execute.assert_called_once_with(mock.ANY, self.clean_steps[1])
+
+ def test_do_next_clean_step_continue_from_last_step(self):
+ self._do_next_clean_step_continue_from_last_step(states.CLEANWAIT)
+
+ def test_do_next_clean_step_continue_from_last_step_backward_compat(self):
+ self._do_next_clean_step_continue_from_last_step(states.CLEANING)
+
+ @mock.patch('ironic.drivers.modules.fake.FakePower.execute_clean_step')
+ def _do_next_clean_step_continue_from_last_cleaning(self, return_state,
+ mock_execute):
+ # Resume an in-progress cleaning after the first async step
+ node = obj_utils.create_test_node(
+ self.context, driver='fake',
+ provision_state=states.CLEANING,
+ target_provision_state=states.AVAILABLE,
+ last_error=None,
+ clean_step=self.clean_steps[0])
+ mock_execute.return_value = return_state
+
+ self._start_service()
+
+ with task_manager.acquire(
+ self.context, node['id'], shared=False) as task:
+ self.service._do_next_clean_step(task, self.next_clean_steps)
+
+ self.service._worker_pool.waitall()
+ node.refresh()
+
+ self.assertEqual(states.CLEANWAIT, node.provision_state)
+ self.assertEqual(self.clean_steps[1], node.clean_step)
+ mock_execute.assert_called_once_with(mock.ANY, self.clean_steps[1])
+
+ def test_do_next_clean_step_continue_from_last_cleaning(self):
+ self._do_next_clean_step_continue_from_last_cleaning(states.CLEANWAIT)
+
+ def test_do_next_clean_step_continue_from_last_cleaning_backward_com(self):
+ self._do_next_clean_step_continue_from_last_cleaning(states.CLEANING)
+
+ @mock.patch('ironic.drivers.modules.fake.FakeDeploy.execute_clean_step')
+ def test__do_next_clean_step_last_step_noop(self, mock_execute):
+ # Resume where last_step is the last cleaning step, should be noop
+ node = obj_utils.create_test_node(
+ self.context, driver='fake',
+ provision_state=states.CLEANING,
+ target_provision_state=states.AVAILABLE,
+ last_error=None,
+ clean_step=self.clean_steps[-1])
+
+ self._start_service()
+
+ with task_manager.acquire(
+ self.context, node['id'], shared=False) as task:
+ self.service._do_next_clean_step(task, [])
+
+ self.service._worker_pool.waitall()
+ node.refresh()
+
+ # Cleaning should be complete without calling additional steps
+ self.assertEqual(states.AVAILABLE, node.provision_state)
+ self.assertEqual({}, node.clean_step)
+ self.assertFalse(mock_execute.called)
+
+ @mock.patch('ironic.drivers.modules.fake.FakePower.execute_clean_step')
+ @mock.patch('ironic.drivers.modules.fake.FakeDeploy.execute_clean_step')
+ def test__do_next_clean_step_all(self, mock_deploy_execute,
+ mock_power_execute):
+ # Run all steps from start to finish (all synchronous)
+ node = obj_utils.create_test_node(
+ self.context, driver='fake',
+ provision_state=states.CLEANING,
+ target_provision_state=states.AVAILABLE,
+ last_error=None,
+ clean_step={})
+ mock_deploy_execute.return_value = None
+ mock_power_execute.return_value = None
+
+ self._start_service()
+
+ with task_manager.acquire(
+ self.context, node['id'], shared=False) as task:
+ self.service._do_next_clean_step(task, self.clean_steps)
+
+ self.service._worker_pool.waitall()
+ node.refresh()
+
+ # Cleaning should be complete
+ self.assertEqual(states.AVAILABLE, node.provision_state)
+ self.assertEqual({}, node.clean_step)
+ mock_power_execute.assert_called_once_with(mock.ANY,
+ self.clean_steps[1])
+ mock_deploy_execute.assert_has_calls = [
+ mock.call(self.clean_steps[0]),
+ mock.call(self.clean_steps[2])
+ ]
+
+ @mock.patch('ironic.drivers.modules.fake.FakeDeploy.execute_clean_step')
+ @mock.patch.object(fake.FakeDeploy, 'tear_down_cleaning', autospec=True)
+ def test__do_next_clean_step_fail(self, tear_mock, mock_execute):
+ # When a clean step fails, go to CLEANFAIL
+ node = obj_utils.create_test_node(
+ self.context, driver='fake',
+ provision_state=states.CLEANING,
+ target_provision_state=states.AVAILABLE,
+ last_error=None,
+ clean_step={})
+ mock_execute.side_effect = Exception()
+
+ self._start_service()
+
+ with task_manager.acquire(
+ self.context, node['id'], shared=False) as task:
+ self.service._do_next_clean_step(task, self.clean_steps)
+ tear_mock.assert_called_once_with(task.driver.deploy, task)
+
+ self.service._worker_pool.waitall()
+ node.refresh()
+
+ # Make sure we go to CLEANFAIL, clear clean_steps
+ self.assertEqual(states.CLEANFAIL, node.provision_state)
+ self.assertEqual({}, node.clean_step)
+ self.assertIsNotNone(node.last_error)
+ self.assertTrue(node.maintenance)
+ mock_execute.assert_called_once_with(mock.ANY, self.clean_steps[0])
+
+ @mock.patch('ironic.drivers.modules.fake.FakeDeploy.execute_clean_step')
+ @mock.patch.object(fake.FakeDeploy, 'tear_down_cleaning', autospec=True)
+ def test__do_next_clean_step_fail_in_tear_down_cleaning(self, tear_mock,
+ mock_execute):
+ node = obj_utils.create_test_node(
+ self.context, driver='fake',
+ provision_state=states.CLEANING,
+ target_provision_state=states.AVAILABLE,
+ last_error=None,
+ clean_step={})
+
+ mock_execute.return_value = None
+ tear_mock.side_effect = Exception()
+
+ self._start_service()
+
+ with task_manager.acquire(
+ self.context, node['id'], shared=False) as task:
+ self.service._do_next_clean_step(task, self.clean_steps)
+
+ self.service._worker_pool.waitall()
+ node.refresh()
+
+ # Make sure we go to CLEANFAIL, clear clean_steps
+ self.assertEqual(states.CLEANFAIL, node.provision_state)
+ self.assertEqual({}, node.clean_step)
+ self.assertIsNotNone(node.last_error)
+ self.assertEqual(1, tear_mock.call_count)
+ self.assertTrue(node.maintenance)
+ mock_execute.assert_called_once_with(mock.ANY, self.clean_steps[0])
+
+ @mock.patch('ironic.drivers.modules.fake.FakeDeploy.execute_clean_step')
+ def test__do_next_clean_step_no_steps(self, mock_execute):
+ # Resume where there are no steps, should be a noop
+ node = obj_utils.create_test_node(
+ self.context, driver='fake',
+ provision_state=states.CLEANING,
+ target_provision_state=states.AVAILABLE,
+ last_error=None,
+ clean_step={})
+
+ self._start_service()
+
+ with task_manager.acquire(
+ self.context, node['id'], shared=False) as task:
+ self.service._do_next_clean_step(
+ task, [])
+
+ self.service._worker_pool.waitall()
+ node.refresh()
+
+ # Cleaning should be complete without calling additional steps
+ self.assertEqual(states.AVAILABLE, node.provision_state)
+ self.assertEqual({}, node.clean_step)
+ self.assertFalse(mock_execute.called)
+
+ @mock.patch('ironic.drivers.modules.fake.FakePower.execute_clean_step')
+ @mock.patch('ironic.drivers.modules.fake.FakeDeploy.execute_clean_step')
+ def test__do_next_clean_step_bad_step_return_value(
+ self, deploy_exec_mock, power_exec_mock):
+ # When a clean step fails, go to CLEANFAIL
+ node = obj_utils.create_test_node(
+ self.context, driver='fake',
+ provision_state=states.CLEANING,
+ target_provision_state=states.AVAILABLE,
+ last_error=None,
+ clean_step={})
+ deploy_exec_mock.return_value = "foo"
+
+ self._start_service()
+
+ with task_manager.acquire(
+ self.context, node['id'], shared=False) as task:
+ self.service._do_next_clean_step(task, self.clean_steps)
+
+ self.service._worker_pool.waitall()
+ node.refresh()
+
+ # Make sure we go to CLEANFAIL, clear clean_steps
+ self.assertEqual(states.CLEANFAIL, node.provision_state)
+ self.assertEqual({}, node.clean_step)
+ self.assertIsNotNone(node.last_error)
+ self.assertTrue(node.maintenance)
+ deploy_exec_mock.assert_called_once_with(mock.ANY,
+ self.clean_steps[0])
+ # Make sure we don't execute any other step and return
+ self.assertFalse(power_exec_mock.called)
+
+ @mock.patch('ironic.conductor.manager._get_cleaning_steps')
+ def test_set_node_cleaning_steps(self, mock_steps):
+ mock_steps.return_value = self.clean_steps
+
+ node = obj_utils.create_test_node(
+ self.context, driver='fake',
+ provision_state=states.CLEANING,
+ target_provision_state=states.AVAILABLE,
+ last_error=None,
+ clean_step=None)
+
+ with task_manager.acquire(
+ self.context, node['id'], shared=False) as task:
+ manager.set_node_cleaning_steps(task)
+ node.refresh()
+ self.assertEqual(self.clean_steps,
+ task.node.driver_internal_info['clean_steps'])
+ self.assertEqual({}, node.clean_step)
+
+ def test__get_node_next_clean_steps(self):
+ driver_internal_info = {'clean_steps': self.clean_steps}
+ node = obj_utils.create_test_node(
+ self.context, driver='fake',
+ provision_state=states.CLEANWAIT,
+ target_provision_state=states.AVAILABLE,
+ driver_internal_info=driver_internal_info,
+ last_error=None,
+ clean_step=self.clean_steps[0])
+
+ with task_manager.acquire(self.context, node.uuid) as task:
+ steps = self.service._get_node_next_clean_steps(task)
+ self.assertEqual(self.next_clean_steps, steps)
+
+ def test__get_node_next_clean_steps_bad_clean_step(self):
+ driver_internal_info = {'clean_steps': self.clean_steps}
+ node = obj_utils.create_test_node(
+ self.context, driver='fake',
+ provision_state=states.CLEANWAIT,
+ target_provision_state=states.AVAILABLE,
+ driver_internal_info=driver_internal_info,
+ last_error=None,
+ clean_step={'interface': 'deploy',
+ 'step': 'not_a_clean_step',
+ 'priority': 100})
+
+ with task_manager.acquire(self.context, node.uuid) as task:
+ self.assertRaises(exception.NodeCleaningFailure,
+ self.service._get_node_next_clean_steps, task)
+
+
+@_mock_record_keepalive
+class DoNodeVerifyTestCase(_ServiceSetUpMixin, tests_db_base.DbTestCase):
+ @mock.patch('ironic.drivers.modules.fake.FakePower.get_power_state')
+ @mock.patch('ironic.drivers.modules.fake.FakePower.validate')
+ def test__do_node_verify(self, mock_validate, mock_get_power_state):
+ mock_get_power_state.return_value = states.POWER_OFF
+ node = obj_utils.create_test_node(
+ self.context, driver='fake',
+ provision_state=states.VERIFYING,
+ target_provision_state=states.MANAGEABLE,
+ last_error=None,
+ power_state=states.NOSTATE)
+
+ self._start_service()
+ with task_manager.acquire(
+ self.context, node['id'], shared=False) as task:
+ self.service._do_node_verify(task)
+
+ self.service._worker_pool.waitall()
+ node.refresh()
+
+ mock_validate.assert_called_once_with(task)
+ mock_get_power_state.assert_called_once_with(task)
+
+ self.assertEqual(states.MANAGEABLE, node.provision_state)
+ self.assertIsNone(node.target_provision_state)
+ self.assertIsNone(node.last_error)
+ self.assertEqual(states.POWER_OFF, node.power_state)
+
+ @mock.patch('ironic.drivers.modules.fake.FakePower.get_power_state')
+ @mock.patch('ironic.drivers.modules.fake.FakePower.validate')
+ def test__do_node_verify_validation_fails(self, mock_validate,
+ mock_get_power_state):
+ node = obj_utils.create_test_node(
+ self.context, driver='fake',
+ provision_state=states.VERIFYING,
+ target_provision_state=states.MANAGEABLE,
+ last_error=None,
+ power_state=states.NOSTATE)
+
+ mock_validate.side_effect = iter([RuntimeError("boom")])
+
+ self._start_service()
+ with task_manager.acquire(
+ self.context, node['id'], shared=False) as task:
+ self.service._do_node_verify(task)
+
+ self.service._worker_pool.waitall()
+ node.refresh()
+
+ mock_validate.assert_called_once_with(task)
+
+ self.assertEqual(states.ENROLL, node.provision_state)
+ self.assertIsNone(node.target_provision_state)
+ self.assertTrue(node.last_error)
+ self.assertFalse(mock_get_power_state.called)
+
+ @mock.patch('ironic.drivers.modules.fake.FakePower.get_power_state')
+ @mock.patch('ironic.drivers.modules.fake.FakePower.validate')
+ def test__do_node_verify_get_state_fails(self, mock_validate,
+ mock_get_power_state):
+ node = obj_utils.create_test_node(
+ self.context, driver='fake',
+ provision_state=states.VERIFYING,
+ target_provision_state=states.MANAGEABLE,
+ last_error=None,
+ power_state=states.NOSTATE)
+
+ mock_get_power_state.side_effect = iter([RuntimeError("boom")])
+
+ self._start_service()
+ with task_manager.acquire(
+ self.context, node['id'], shared=False) as task:
+ self.service._do_node_verify(task)
+
+ self.service._worker_pool.waitall()
+ node.refresh()
+
+ mock_get_power_state.assert_called_once_with(task)
+
+ self.assertEqual(states.ENROLL, node.provision_state)
+ self.assertIsNone(node.target_provision_state)
+ self.assertTrue(node.last_error)
+
+
+@_mock_record_keepalive
+class MiscTestCase(_ServiceSetUpMixin, _CommonMixIn, tests_db_base.DbTestCase):
+ def test_get_driver_known(self):
+ self._start_service()
+ driver = self.service._get_driver('fake')
+ self.assertTrue(isinstance(driver, drivers_base.BaseDriver))
+
+ def test_get_driver_unknown(self):
+ self._start_service()
+ self.assertRaises(exception.DriverNotFound,
+ self.service._get_driver, 'unknown_driver')
+
+ def test__mapped_to_this_conductor(self):
+ self._start_service()
+ n = utils.get_test_node()
+ self.assertTrue(self.service._mapped_to_this_conductor(n['uuid'],
+ 'fake'))
+ self.assertFalse(self.service._mapped_to_this_conductor(n['uuid'],
+
+ 'otherdriver'))
+
+ @mock.patch.object(images, 'is_whole_disk_image')
+ def test_validate_driver_interfaces(self, mock_iwdi):
+ mock_iwdi.return_value = False
+ target_raid_config = {'logical_disks': [{'size_gb': 1,
+ 'raid_level': '1'}]}
+ node = obj_utils.create_test_node(
+ self.context, driver='fake', target_raid_config=target_raid_config)
+ ret = self.service.validate_driver_interfaces(self.context,
+ node.uuid)
+ expected = {'console': {'result': True},
+ 'power': {'result': True},
+ 'inspect': {'result': True},
+ 'management': {'result': True},
+ 'boot': {'result': True},
+ 'raid': {'result': True},
+ 'deploy': {'result': True}}
+ self.assertEqual(expected, ret)
+ mock_iwdi.assert_called_once_with(self.context, node.instance_info)
+
+ @mock.patch.object(images, 'is_whole_disk_image')
+ def test_validate_driver_interfaces_validation_fail(self, mock_iwdi):
+ mock_iwdi.return_value = False
+ node = obj_utils.create_test_node(self.context, driver='fake')
+ with mock.patch(
+ 'ironic.drivers.modules.fake.FakeDeploy.validate'
+ ) as deploy:
+ reason = 'fake reason'
+ deploy.side_effect = exception.InvalidParameterValue(reason)
+ ret = self.service.validate_driver_interfaces(self.context,
+ node.uuid)
+ self.assertFalse(ret['deploy']['result'])
+ self.assertEqual(reason, ret['deploy']['reason'])
+ mock_iwdi.assert_called_once_with(self.context, node.instance_info)
+
+ @mock.patch.object(manager.ConductorManager, '_fail_if_in_state',
+ autospec=True)
+ @mock.patch.object(manager.ConductorManager, '_mapped_to_this_conductor')
+ @mock.patch.object(dbapi.IMPL, 'get_nodeinfo_list')
+ def test_iter_nodes(self, mock_nodeinfo_list, mock_mapped,
+ mock_fail_if_state):
+ self._start_service()
+ self.columns = ['uuid', 'driver', 'id']
+ nodes = [self._create_node(id=i, driver='fake') for i in range(2)]
+ mock_nodeinfo_list.return_value = self._get_nodeinfo_list_response(
+ nodes)
+ mock_mapped.side_effect = [True, False]
+
+ result = list(self.service.iter_nodes(fields=['id'],
+ filters=mock.sentinel.filters))
+ self.assertEqual([(nodes[0].uuid, 'fake', 0)], result)
+ mock_nodeinfo_list.assert_called_once_with(
+ columns=self.columns, filters=mock.sentinel.filters)
+ mock_fail_if_state.assert_called_once_with(
+ mock.ANY, mock.ANY,
+ {'provision_state': 'deploying', 'reserved': False},
+ 'deploying', 'provision_updated_at',
+ last_error=mock.ANY)
+
+
+@_mock_record_keepalive
+class ConsoleTestCase(_ServiceSetUpMixin, tests_db_base.DbTestCase):
+ def test_set_console_mode_worker_pool_full(self):
+ node = obj_utils.create_test_node(self.context, driver='fake')
+ self._start_service()
+ with mock.patch.object(self.service,
+ '_spawn_worker') as spawn_mock:
+ spawn_mock.side_effect = exception.NoFreeConductorWorker()
+
+ exc = self.assertRaises(messaging.rpc.ExpectedException,
+ self.service.set_console_mode,
+ self.context, node.uuid, True)
+ # Compare true exception hidden by @messaging.expected_exceptions
+ self.assertEqual(exception.NoFreeConductorWorker, exc.exc_info[0])
+ self.service._worker_pool.waitall()
+ spawn_mock.assert_called_once_with(mock.ANY, mock.ANY, mock.ANY)
+
+ def test_set_console_mode_enabled(self):
+ node = obj_utils.create_test_node(self.context, driver='fake')
+ self._start_service()
+ self.service.set_console_mode(self.context, node.uuid, True)
+ self.service._worker_pool.waitall()
+ node.refresh()
+ self.assertTrue(node.console_enabled)
+
+ def test_set_console_mode_disabled(self):
+ node = obj_utils.create_test_node(self.context, driver='fake')
+ self._start_service()
+ self.service.set_console_mode(self.context, node.uuid, False)
+ self.service._worker_pool.waitall()
+ node.refresh()
+ self.assertFalse(node.console_enabled)
+
+ def test_set_console_mode_not_supported(self):
+ node = obj_utils.create_test_node(self.context, driver='fake',
+ last_error=None)
+ self._start_service()
+ # null the console interface
+ self.driver.console = None
+ exc = self.assertRaises(messaging.rpc.ExpectedException,
+ self.service.set_console_mode, self.context,
+ node.uuid, True)
+ # Compare true exception hidden by @messaging.expected_exceptions
+ self.assertEqual(exception.UnsupportedDriverExtension,
+ exc.exc_info[0])
+ self.service._worker_pool.waitall()
+ node.refresh()
+
+ def test_set_console_mode_validation_fail(self):
+ node = obj_utils.create_test_node(self.context, driver='fake',
+ last_error=None)
+ self._start_service()
+ with mock.patch.object(self.driver.console, 'validate') as mock_val:
+ mock_val.side_effect = exception.InvalidParameterValue('error')
+ exc = self.assertRaises(messaging.rpc.ExpectedException,
+ self.service.set_console_mode,
+ self.context, node.uuid, True)
+ # Compare true exception hidden by @messaging.expected_exceptions
+ self.assertEqual(exception.InvalidParameterValue, exc.exc_info[0])
+
+ def test_set_console_mode_start_fail(self):
+ node = obj_utils.create_test_node(self.context, driver='fake',
+ last_error=None,
+ console_enabled=False)
+ self._start_service()
+ with mock.patch.object(self.driver.console,
+ 'start_console') as mock_sc:
+ mock_sc.side_effect = exception.IronicException('test-error')
+ self.service.set_console_mode(self.context, node.uuid, True)
+ self.service._worker_pool.waitall()
+ mock_sc.assert_called_once_with(mock.ANY)
+ node.refresh()
+ self.assertIsNotNone(node.last_error)
+
+ def test_set_console_mode_stop_fail(self):
+ node = obj_utils.create_test_node(self.context, driver='fake',
+ last_error=None,
+ console_enabled=True)
+ self._start_service()
+ with mock.patch.object(self.driver.console,
+ 'stop_console') as mock_sc:
+ mock_sc.side_effect = exception.IronicException('test-error')
+ self.service.set_console_mode(self.context, node.uuid, False)
+ self.service._worker_pool.waitall()
+ mock_sc.assert_called_once_with(mock.ANY)
+ node.refresh()
+ self.assertIsNotNone(node.last_error)
+
+ def test_enable_console_already_enabled(self):
+ node = obj_utils.create_test_node(self.context, driver='fake',
+ console_enabled=True)
+ self._start_service()
+ with mock.patch.object(self.driver.console,
+ 'start_console') as mock_sc:
+ self.service.set_console_mode(self.context, node.uuid, True)
+ self.service._worker_pool.waitall()
+ self.assertFalse(mock_sc.called)
+
+ def test_disable_console_already_disabled(self):
+ node = obj_utils.create_test_node(self.context, driver='fake',
+ console_enabled=False)
+ self._start_service()
+ with mock.patch.object(self.driver.console,
+ 'stop_console') as mock_sc:
+ self.service.set_console_mode(self.context, node.uuid, False)
+ self.service._worker_pool.waitall()
+ self.assertFalse(mock_sc.called)
+
+ def test_get_console(self):
+ node = obj_utils.create_test_node(self.context, driver='fake',
+ console_enabled=True)
+ console_info = {'test': 'test info'}
+ with mock.patch.object(self.driver.console, 'get_console') as mock_gc:
+ mock_gc.return_value = console_info
+ data = self.service.get_console_information(self.context,
+ node.uuid)
+ self.assertEqual(console_info, data)
+
+ def test_get_console_not_supported(self):
+ node = obj_utils.create_test_node(self.context, driver='fake',
+ console_enabled=True)
+ # null the console interface
+ self.driver.console = None
+ exc = self.assertRaises(messaging.rpc.ExpectedException,
+ self.service.get_console_information,
+ self.context, node.uuid)
+ # Compare true exception hidden by @messaging.expected_exceptions
+ self.assertEqual(exception.UnsupportedDriverExtension,
+ exc.exc_info[0])
+
+ def test_get_console_disabled(self):
+ node = obj_utils.create_test_node(self.context, driver='fake',
+ console_enabled=False)
+ exc = self.assertRaises(messaging.rpc.ExpectedException,
+ self.service.get_console_information,
+ self.context, node.uuid)
+ # Compare true exception hidden by @messaging.expected_exceptions
+ self.assertEqual(exception.NodeConsoleNotEnabled, exc.exc_info[0])
+
+ def test_get_console_validate_fail(self):
+ node = obj_utils.create_test_node(self.context, driver='fake',
+ console_enabled=True)
+ with mock.patch.object(self.driver.console, 'validate') as mock_gc:
+ mock_gc.side_effect = exception.InvalidParameterValue('error')
+ exc = self.assertRaises(messaging.rpc.ExpectedException,
+ self.service.get_console_information,
+ self.context, node.uuid)
+ # Compare true exception hidden by @messaging.expected_exceptions
+ self.assertEqual(exception.InvalidParameterValue, exc.exc_info[0])
+
+
+@_mock_record_keepalive
+class DestroyNodeTestCase(_ServiceSetUpMixin, tests_db_base.DbTestCase):
+
+ def test_destroy_node(self):
+ self._start_service()
+ for state in states.DELETE_ALLOWED_STATES:
+ node = obj_utils.create_test_node(self.context,
+ provision_state=state)
+ self.service.destroy_node(self.context, node.uuid)
+ self.assertRaises(exception.NodeNotFound,
+ self.dbapi.get_node_by_uuid,
+ node.uuid)
+
+ def test_destroy_node_reserved(self):
+ self._start_service()
+ fake_reservation = 'fake-reserv'
+ node = obj_utils.create_test_node(self.context,
+ reservation=fake_reservation)
+
+ exc = self.assertRaises(messaging.rpc.ExpectedException,
+ self.service.destroy_node,
+ self.context, node.uuid)
+ # Compare true exception hidden by @messaging.expected_exceptions
+ self.assertEqual(exception.NodeLocked, exc.exc_info[0])
+ # Verify existing reservation wasn't broken.
+ node.refresh()
+ self.assertEqual(fake_reservation, node.reservation)
+
+ def test_destroy_node_associated(self):
+ self._start_service()
+ node = obj_utils.create_test_node(self.context,
+ instance_uuid='fake-uuid')
+
+ exc = self.assertRaises(messaging.rpc.ExpectedException,
+ self.service.destroy_node,
+ self.context, node.uuid)
+ # Compare true exception hidden by @messaging.expected_exceptions
+ self.assertEqual(exception.NodeAssociated, exc.exc_info[0])
+
+ # Verify reservation was released.
+ node.refresh()
+ self.assertIsNone(node.reservation)
+
+ def test_destroy_node_invalid_provision_state(self):
+ self._start_service()
+ node = obj_utils.create_test_node(self.context,
+ provision_state=states.ACTIVE)
+
+ exc = self.assertRaises(messaging.rpc.ExpectedException,
+ self.service.destroy_node,
+ self.context, node.uuid)
+ # Compare true exception hidden by @messaging.expected_exceptions
+ self.assertEqual(exception.InvalidState, exc.exc_info[0])
+ # Verify reservation was released.
+ node.refresh()
+ self.assertIsNone(node.reservation)
+
+ def test_destroy_node_allowed_in_maintenance(self):
+ self._start_service()
+ node = obj_utils.create_test_node(self.context,
+ instance_uuid='fake-uuid',
+ provision_state=states.ACTIVE,
+ maintenance=True)
+ self.service.destroy_node(self.context, node.uuid)
+ self.assertRaises(exception.NodeNotFound,
+ self.dbapi.get_node_by_uuid,
+ node.uuid)
+
+ def test_destroy_node_power_off(self):
+ self._start_service()
+ node = obj_utils.create_test_node(self.context,
+ power_state=states.POWER_OFF)
+ self.service.destroy_node(self.context, node.uuid)
+
+ def test_destroy_node_console_enabled(self):
+ self._start_service()
+ node = obj_utils.create_test_node(self.context, driver='fake',
+ console_enabled=True)
+ with mock.patch.object(self.driver.console,
+ 'stop_console') as mock_sc:
+ self.service.destroy_node(self.context, node.uuid)
+ mock_sc.assert_called_once_with(mock.ANY)
+ self.assertRaises(exception.NodeNotFound,
+ self.dbapi.get_node_by_uuid,
+ node.uuid)
+
+
+@_mock_record_keepalive
+class UpdatePortTestCase(_ServiceSetUpMixin, tests_db_base.DbTestCase):
+ def test_update_port(self):
+ node = obj_utils.create_test_node(self.context, driver='fake')
+
+ port = obj_utils.create_test_port(self.context,
+ node_id=node.id,
+ extra={'foo': 'bar'})
+ new_extra = {'foo': 'baz'}
+ port.extra = new_extra
+ res = self.service.update_port(self.context, port)
+ self.assertEqual(new_extra, res.extra)
+
+ def test_update_port_node_locked(self):
+ node = obj_utils.create_test_node(self.context, driver='fake',
+ reservation='fake-reserv')
+
+ port = obj_utils.create_test_port(self.context, node_id=node.id)
+ port.extra = {'foo': 'baz'}
+ exc = self.assertRaises(messaging.rpc.ExpectedException,
+ self.service.update_port,
+ self.context, port)
+ # Compare true exception hidden by @messaging.expected_exceptions
+ self.assertEqual(exception.NodeLocked, exc.exc_info[0])
+
+ @mock.patch('ironic.dhcp.neutron.NeutronDHCPApi.update_port_address')
+ def test_update_port_address(self, mac_update_mock):
+ node = obj_utils.create_test_node(self.context, driver='fake')
+ port = obj_utils.create_test_port(self.context,
+ node_id=node.id,
+ extra={'vif_port_id': 'fake-id'})
+ new_address = '11:22:33:44:55:bb'
+ port.address = new_address
+ res = self.service.update_port(self.context, port)
+ self.assertEqual(new_address, res.address)
+ mac_update_mock.assert_called_once_with('fake-id', new_address,
+ token=self.context.auth_token)
+
+ @mock.patch('ironic.dhcp.neutron.NeutronDHCPApi.update_port_address')
+ def test_update_port_address_fail(self, mac_update_mock):
+ node = obj_utils.create_test_node(self.context, driver='fake')
+ port = obj_utils.create_test_port(self.context,
+ node_id=node.id,
+ extra={'vif_port_id': 'fake-id'})
+ old_address = port.address
+ port.address = '11:22:33:44:55:bb'
+ mac_update_mock.side_effect = (
+ exception.FailedToUpdateMacOnPort(port_id=port.uuid))
+ exc = self.assertRaises(messaging.rpc.ExpectedException,
+ self.service.update_port,
+ self.context, port)
+ # Compare true exception hidden by @messaging.expected_exceptions
+ self.assertEqual(exception.FailedToUpdateMacOnPort, exc.exc_info[0])
+ port.refresh()
+ self.assertEqual(old_address, port.address)
+
+ @mock.patch('ironic.dhcp.neutron.NeutronDHCPApi.update_port_address')
+ def test_update_port_address_no_vif_id(self, mac_update_mock):
+ node = obj_utils.create_test_node(self.context, driver='fake')
+ port = obj_utils.create_test_port(self.context, node_id=node.id)
+
+ new_address = '11:22:33:44:55:bb'
+ port.address = new_address
+ res = self.service.update_port(self.context, port)
+ self.assertEqual(new_address, res.address)
+ self.assertFalse(mac_update_mock.called)
+
+ def test__filter_out_unsupported_types_all(self):
+ self._start_service()
+ CONF.set_override('send_sensor_data_types', ['All'], group='conductor')
+ fake_sensors_data = {"t1": {'f1': 'v1'}, "t2": {'f1': 'v1'}}
+ actual_result = (
+ self.service._filter_out_unsupported_types(fake_sensors_data))
+ expected_result = {"t1": {'f1': 'v1'}, "t2": {'f1': 'v1'}}
+ self.assertEqual(expected_result, actual_result)
+
+ def test__filter_out_unsupported_types_part(self):
+ self._start_service()
+ CONF.set_override('send_sensor_data_types', ['t1'], group='conductor')
+ fake_sensors_data = {"t1": {'f1': 'v1'}, "t2": {'f1': 'v1'}}
+ actual_result = (
+ self.service._filter_out_unsupported_types(fake_sensors_data))
+ expected_result = {"t1": {'f1': 'v1'}}
+ self.assertEqual(expected_result, actual_result)
+
+ def test__filter_out_unsupported_types_non(self):
+ self._start_service()
+ CONF.set_override('send_sensor_data_types', ['t3'], group='conductor')
+ fake_sensors_data = {"t1": {'f1': 'v1'}, "t2": {'f1': 'v1'}}
+ actual_result = (
+ self.service._filter_out_unsupported_types(fake_sensors_data))
+ expected_result = {}
+ self.assertEqual(expected_result, actual_result)
+
+ @mock.patch.object(manager.ConductorManager, '_mapped_to_this_conductor')
+ @mock.patch.object(dbapi.IMPL, 'get_nodeinfo_list')
+ @mock.patch.object(task_manager, 'acquire')
+ def test___send_sensor_data(self, acquire_mock, get_nodeinfo_list_mock,
+ _mapped_to_this_conductor_mock):
+ node = obj_utils.create_test_node(self.context,
+ driver='fake')
+ self._start_service()
+ CONF.set_override('send_sensor_data', True, group='conductor')
+ acquire_mock.return_value.__enter__.return_value.driver = self.driver
+ with mock.patch.object(self.driver.management,
+ 'get_sensors_data') as get_sensors_data_mock:
+ with mock.patch.object(self.driver.management,
+ 'validate') as validate_mock:
+ get_sensors_data_mock.return_value = 'fake-sensor-data'
+ _mapped_to_this_conductor_mock.return_value = True
+ get_nodeinfo_list_mock.return_value = [(node.uuid, node.driver,
+ node.instance_uuid)]
+ self.service._send_sensor_data(self.context)
+ self.assertTrue(get_nodeinfo_list_mock.called)
+ self.assertTrue(_mapped_to_this_conductor_mock.called)
+ self.assertTrue(acquire_mock.called)
+ self.assertTrue(get_sensors_data_mock.called)
+ self.assertTrue(validate_mock.called)
+
+ @mock.patch.object(manager.ConductorManager, '_fail_if_in_state',
+ autospec=True)
+ @mock.patch.object(manager.ConductorManager, '_mapped_to_this_conductor')
+ @mock.patch.object(dbapi.IMPL, 'get_nodeinfo_list')
+ @mock.patch.object(task_manager, 'acquire')
+ def test___send_sensor_data_disabled(self, acquire_mock,
+ get_nodeinfo_list_mock,
+ _mapped_to_this_conductor_mock,
+ mock_fail_if_state):
+ node = obj_utils.create_test_node(self.context,
+ driver='fake')
+ self._start_service()
+ acquire_mock.return_value.__enter__.return_value.driver = self.driver
+ with mock.patch.object(self.driver.management,
+ 'get_sensors_data') as get_sensors_data_mock:
+ with mock.patch.object(self.driver.management,
+ 'validate') as validate_mock:
+ get_sensors_data_mock.return_value = 'fake-sensor-data'
+ _mapped_to_this_conductor_mock.return_value = True
+ get_nodeinfo_list_mock.return_value = [(node.uuid, node.driver,
+ node.instance_uuid)]
+ self.service._send_sensor_data(self.context)
+ self.assertFalse(get_nodeinfo_list_mock.called)
+ self.assertFalse(_mapped_to_this_conductor_mock.called)
+ self.assertFalse(acquire_mock.called)
+ self.assertFalse(get_sensors_data_mock.called)
+ self.assertFalse(validate_mock.called)
+ mock_fail_if_state.assert_called_once_with(
+ mock.ANY, mock.ANY,
+ {'provision_state': 'deploying', 'reserved': False},
+ 'deploying', 'provision_updated_at',
+ last_error=mock.ANY)
+
+ @mock.patch.object(manager.ConductorManager, 'iter_nodes', autospec=True)
+ @mock.patch.object(task_manager, 'acquire', autospec=True)
+ def test___send_sensor_data_no_management(self, acquire_mock,
+ iter_nodes_mock):
+ CONF.set_override('send_sensor_data', True, group='conductor')
+ iter_nodes_mock.return_value = [('fake_uuid1', 'fake', 'fake_uuid2')]
+ self.driver.management = None
+ acquire_mock.return_value.__enter__.return_value.driver = self.driver
+
+ with mock.patch.object(fake.FakeManagement, 'get_sensors_data',
+ autospec=True) as get_sensors_data_mock:
+ with mock.patch.object(fake.FakeManagement, 'validate',
+ autospec=True) as validate_mock:
+ self.service._send_sensor_data(self.context)
+
+ self.assertTrue(iter_nodes_mock.called)
+ self.assertTrue(acquire_mock.called)
+ self.assertFalse(get_sensors_data_mock.called)
+ self.assertFalse(validate_mock.called)
+
+ def test_set_boot_device(self):
+ node = obj_utils.create_test_node(self.context, driver='fake')
+ with mock.patch.object(self.driver.management, 'validate') as mock_val:
+ with mock.patch.object(self.driver.management,
+ 'set_boot_device') as mock_sbd:
+ self.service.set_boot_device(self.context, node.uuid,
+ boot_devices.PXE)
+ mock_val.assert_called_once_with(mock.ANY)
+ mock_sbd.assert_called_once_with(mock.ANY, boot_devices.PXE,
+ persistent=False)
+
+ def test_set_boot_device_node_locked(self):
+ node = obj_utils.create_test_node(self.context, driver='fake',
+ reservation='fake-reserv')
+ exc = self.assertRaises(messaging.rpc.ExpectedException,
+ self.service.set_boot_device,
+ self.context, node.uuid, boot_devices.DISK)
+ # Compare true exception hidden by @messaging.expected_exceptions
+ self.assertEqual(exception.NodeLocked, exc.exc_info[0])
+
+ def test_set_boot_device_not_supported(self):
+ node = obj_utils.create_test_node(self.context, driver='fake')
+ # null the console interface
+ self.driver.management = None
+ exc = self.assertRaises(messaging.rpc.ExpectedException,
+ self.service.set_boot_device,
+ self.context, node.uuid, boot_devices.DISK)
+ # Compare true exception hidden by @messaging.expected_exceptions
+ self.assertEqual(exception.UnsupportedDriverExtension,
+ exc.exc_info[0])
+
+ def test_set_boot_device_validate_fail(self):
+ node = obj_utils.create_test_node(self.context, driver='fake')
+ with mock.patch.object(self.driver.management, 'validate') as mock_val:
+ mock_val.side_effect = exception.InvalidParameterValue('error')
+ exc = self.assertRaises(messaging.rpc.ExpectedException,
+ self.service.set_boot_device,
+ self.context, node.uuid, boot_devices.DISK)
+ # Compare true exception hidden by @messaging.expected_exceptions
+ self.assertEqual(exception.InvalidParameterValue, exc.exc_info[0])
+
+ def test_get_boot_device(self):
+ node = obj_utils.create_test_node(self.context, driver='fake')
+ bootdev = self.service.get_boot_device(self.context, node.uuid)
+ expected = {'boot_device': boot_devices.PXE, 'persistent': False}
+ self.assertEqual(expected, bootdev)
+
+ def test_get_boot_device_node_locked(self):
+ node = obj_utils.create_test_node(self.context, driver='fake',
+ reservation='fake-reserv')
+ exc = self.assertRaises(messaging.rpc.ExpectedException,
+ self.service.get_boot_device,
+ self.context, node.uuid)
+ # Compare true exception hidden by @messaging.expected_exceptions
+ self.assertEqual(exception.NodeLocked, exc.exc_info[0])
+
+ def test_get_boot_device_not_supported(self):
+ node = obj_utils.create_test_node(self.context, driver='fake')
+ # null the management interface
+ self.driver.management = None
+ exc = self.assertRaises(messaging.rpc.ExpectedException,
+ self.service.get_boot_device,
+ self.context, node.uuid)
+ # Compare true exception hidden by @messaging.expected_exceptions
+ self.assertEqual(exception.UnsupportedDriverExtension,
+ exc.exc_info[0])
+
+ def test_get_boot_device_validate_fail(self):
+ node = obj_utils.create_test_node(self.context, driver='fake')
+ with mock.patch.object(self.driver.management, 'validate') as mock_val:
+ mock_val.side_effect = exception.InvalidParameterValue('error')
+ exc = self.assertRaises(messaging.rpc.ExpectedException,
+ self.service.get_boot_device,
+ self.context, node.uuid)
+ # Compare true exception hidden by @messaging.expected_exceptions
+ self.assertEqual(exception.InvalidParameterValue, exc.exc_info[0])
+
+ def test_get_supported_boot_devices(self):
+ node = obj_utils.create_test_node(self.context, driver='fake')
+ bootdevs = self.service.get_supported_boot_devices(self.context,
+ node.uuid)
+ self.assertEqual([boot_devices.PXE], bootdevs)
+
+ def test_get_supported_boot_devices_no_task(self):
+ # NOTE(MattMan): This test method should be removed in next
+ # cycle(Mitaka), task parameter will be mandatory then
+ node = obj_utils.create_test_node(self.context, driver='fake')
+
+ def no_task_get_supported_boot_devices():
+ return "FAKE_BOOT_DEVICE_NO_TASK"
+
+ # Override driver's get_supported_boot_devices method ensuring
+ # no task parameter
+ saved_get_boot_devices = \
+ self.driver.management.get_supported_boot_devices
+ self.driver.management.get_supported_boot_devices = \
+ no_task_get_supported_boot_devices
+ bootdevs = self.service.get_supported_boot_devices(self.context,
+ node.uuid)
+ self.assertEqual("FAKE_BOOT_DEVICE_NO_TASK", bootdevs)
+
+ # Revert back to original method
+ self.driver.management.get_supported_boot_devices = \
+ saved_get_boot_devices
+
+ def test_get_supported_boot_devices_iface_not_supported(self):
+ node = obj_utils.create_test_node(self.context, driver='fake')
+ # null the management interface
+ self.driver.management = None
+ exc = self.assertRaises(messaging.rpc.ExpectedException,
+ self.service.get_supported_boot_devices,
+ self.context, node.uuid)
+ # Compare true exception hidden by @messaging.expected_exceptions
+ self.assertEqual(exception.UnsupportedDriverExtension,
+ exc.exc_info[0])
+
+
+@_mock_record_keepalive
+class RaidTestCases(_ServiceSetUpMixin, tests_db_base.DbTestCase):
+
+ def setUp(self):
+ super(RaidTestCases, self).setUp()
+ self.node = obj_utils.create_test_node(
+ self.context, driver='fake', provision_state=states.MANAGEABLE)
+
+ def test_get_raid_logical_disk_properties(self):
+ self._start_service()
+ properties = self.service.get_raid_logical_disk_properties(
+ self.context, 'fake')
+ self.assertIn('raid_level', properties)
+ self.assertIn('size_gb', properties)
+
+ def test_get_raid_logical_disk_properties_iface_not_supported(self):
+ self.driver.raid = None
+ self._start_service()
+ exc = self.assertRaises(messaging.rpc.ExpectedException,
+ self.service.get_raid_logical_disk_properties,
+ self.context, 'fake')
+ self.assertEqual(exception.UnsupportedDriverExtension, exc.exc_info[0])
+
+ def test_set_target_raid_config(self):
+ raid_config = {'logical_disks': [{'size_gb': 100, 'raid_level': '1'}]}
+ self.service.set_target_raid_config(
+ self.context, self.node.uuid, raid_config)
+ self.node.refresh()
+ self.assertEqual(raid_config, self.node.target_raid_config)
+
+ def test_set_target_raid_config_empty(self):
+ self.node.target_raid_config = {'foo': 'bar'}
+ self.node.save()
+ raid_config = {}
+ self.service.set_target_raid_config(
+ self.context, self.node.uuid, raid_config)
+ self.node.refresh()
+ self.assertEqual({}, self.node.target_raid_config)
+
+ def test_set_target_raid_config_iface_not_supported(self):
+ raid_config = {'logical_disks': [{'size_gb': 100, 'raid_level': '1'}]}
+ self.driver.raid = None
+ exc = self.assertRaises(
+ messaging.rpc.ExpectedException,
+ self.service.set_target_raid_config,
+ self.context, self.node.uuid, raid_config)
+ self.node.refresh()
+ self.assertEqual({}, self.node.target_raid_config)
+ self.assertEqual(exception.UnsupportedDriverExtension, exc.exc_info[0])
+
+ def test_set_target_raid_config_invalid_parameter_value(self):
+ # Missing raid_level in the below raid config.
+ raid_config = {'logical_disks': [{'size_gb': 100}]}
+ self.node.target_raid_config = {'foo': 'bar'}
+ self.node.save()
+
+ exc = self.assertRaises(
+ messaging.rpc.ExpectedException,
+ self.service.set_target_raid_config,
+ self.context, self.node.uuid, raid_config)
+
+ self.node.refresh()
+ self.assertEqual({'foo': 'bar'}, self.node.target_raid_config)
+ self.assertEqual(exception.InvalidParameterValue, exc.exc_info[0])
+
+
+class ManagerSpawnWorkerTestCase(tests_base.TestCase):
+ def setUp(self):
+ super(ManagerSpawnWorkerTestCase, self).setUp()
+ self.service = manager.ConductorManager('hostname', 'test-topic')
+
+ def test__spawn_worker(self):
+ worker_pool = mock.Mock(spec_set=['free', 'spawn'])
+ worker_pool.free.return_value = True
+ self.service._worker_pool = worker_pool
+
+ self.service._spawn_worker('fake', 1, 2, foo='bar', cat='meow')
+
+ worker_pool.spawn.assert_called_once_with(
+ 'fake', 1, 2, foo='bar', cat='meow')
+
+ def test__spawn_worker_none_free(self):
+ worker_pool = mock.Mock(spec_set=['free', 'spawn'])
+ worker_pool.free.return_value = False
+ self.service._worker_pool = worker_pool
+
+ self.assertRaises(exception.NoFreeConductorWorker,
+ self.service._spawn_worker, 'fake')
+
+ self.assertFalse(worker_pool.spawn.called)
+
+
+@mock.patch.object(conductor_utils, 'node_power_action')
+class ManagerDoSyncPowerStateTestCase(tests_db_base.DbTestCase):
+ def setUp(self):
+ super(ManagerDoSyncPowerStateTestCase, self).setUp()
+ self.service = manager.ConductorManager('hostname', 'test-topic')
+ self.driver = mock.Mock(spec_set=drivers_base.BaseDriver)
+ self.power = self.driver.power
+ self.node = mock.Mock(spec_set=objects.Node,
+ maintenance=False,
+ provision_state=states.AVAILABLE)
+ self.task = mock.Mock(spec_set=['context', 'driver', 'node',
+ 'upgrade_lock', 'shared'])
+ self.task.context = self.context
+ self.task.driver = self.driver
+ self.task.node = self.node
+ self.task.shared = False
+ self.config(force_power_state_during_sync=False, group='conductor')
+
+ def _do_sync_power_state(self, old_power_state, new_power_states,
+ fail_validate=False):
+ self.node.power_state = old_power_state
+ if not isinstance(new_power_states, (list, tuple)):
+ new_power_states = [new_power_states]
+ if fail_validate:
+ exc = exception.InvalidParameterValue('error')
+ self.power.validate.side_effect = exc
+ for new_power_state in new_power_states:
+ self.node.power_state = old_power_state
+ if isinstance(new_power_state, Exception):
+ self.power.get_power_state.side_effect = new_power_state
+ else:
+ self.power.get_power_state.return_value = new_power_state
+ count = manager.do_sync_power_state(
+ self.task, self.service.power_state_sync_count[self.node.uuid])
+ self.service.power_state_sync_count[self.node.uuid] = count
+
+ def test_state_unchanged(self, node_power_action):
+ self._do_sync_power_state('fake-power', 'fake-power')
+
+ self.assertFalse(self.power.validate.called)
+ self.power.get_power_state.assert_called_once_with(self.task)
+ self.assertEqual('fake-power', self.node.power_state)
+ self.assertFalse(self.node.save.called)
+ self.assertFalse(node_power_action.called)
+ self.assertFalse(self.task.upgrade_lock.called)
+
+ def test_state_not_set(self, node_power_action):
+ self._do_sync_power_state(None, states.POWER_ON)
+
+ self.power.validate.assert_called_once_with(self.task)
+ self.power.get_power_state.assert_called_once_with(self.task)
+ self.node.save.assert_called_once_with()
+ self.assertFalse(node_power_action.called)
+ self.assertEqual(states.POWER_ON, self.node.power_state)
+ self.task.upgrade_lock.assert_called_once_with()
+
+ def test_validate_fail(self, node_power_action):
+ self._do_sync_power_state(None, states.POWER_ON,
+ fail_validate=True)
+
+ self.power.validate.assert_called_once_with(self.task)
+ self.assertFalse(self.power.get_power_state.called)
+ self.assertFalse(self.node.save.called)
+ self.assertFalse(node_power_action.called)
+ self.assertIsNone(self.node.power_state)
+
+ def test_get_power_state_fail(self, node_power_action):
+ self._do_sync_power_state('fake',
+ exception.IronicException('foo'))
+
+ self.assertFalse(self.power.validate.called)
+ self.power.get_power_state.assert_called_once_with(self.task)
+ self.assertFalse(self.node.save.called)
+ self.assertFalse(node_power_action.called)
+ self.assertEqual('fake', self.node.power_state)
+ self.assertEqual(1,
+ self.service.power_state_sync_count[self.node.uuid])
+
+ def test_get_power_state_error(self, node_power_action):
+ self._do_sync_power_state('fake', states.ERROR)
+ self.assertFalse(self.power.validate.called)
+ self.power.get_power_state.assert_called_once_with(self.task)
+ self.assertFalse(self.node.save.called)
+ self.assertFalse(node_power_action.called)
+ self.assertEqual('fake', self.node.power_state)
+ self.assertEqual(1,
+ self.service.power_state_sync_count[self.node.uuid])
+
+ def test_state_changed_no_sync(self, node_power_action):
+ self._do_sync_power_state(states.POWER_ON, states.POWER_OFF)
+
+ self.assertFalse(self.power.validate.called)
+ self.power.get_power_state.assert_called_once_with(self.task)
+ self.node.save.assert_called_once_with()
+ self.assertFalse(node_power_action.called)
+ self.assertEqual(states.POWER_OFF, self.node.power_state)
+ self.task.upgrade_lock.assert_called_once_with()
+
+ def test_state_changed_sync(self, node_power_action):
+ self.config(force_power_state_during_sync=True, group='conductor')
+ self.config(power_state_sync_max_retries=1, group='conductor')
+
+ self._do_sync_power_state(states.POWER_ON, states.POWER_OFF)
+
+ self.assertFalse(self.power.validate.called)
+ self.power.get_power_state.assert_called_once_with(self.task)
+ self.assertFalse(self.node.save.called)
+ node_power_action.assert_called_once_with(self.task, states.POWER_ON)
+ self.assertEqual(states.POWER_ON, self.node.power_state)
+ self.task.upgrade_lock.assert_called_once_with()
+
+ def test_state_changed_sync_failed(self, node_power_action):
+ self.config(force_power_state_during_sync=True, group='conductor')
+
+ node_power_action.side_effect = exception.IronicException('test')
+ self._do_sync_power_state(states.POWER_ON, states.POWER_OFF)
+
+ # Just testing that this test doesn't raise.
+ self.assertFalse(self.power.validate.called)
+ self.power.get_power_state.assert_called_once_with(self.task)
+ self.assertFalse(self.node.save.called)
+ node_power_action.assert_called_once_with(self.task, states.POWER_ON)
+ self.assertEqual(states.POWER_ON, self.node.power_state)
+ self.assertEqual(1,
+ self.service.power_state_sync_count[self.node.uuid])
+
+ def test_max_retries_exceeded(self, node_power_action):
+ self.config(force_power_state_during_sync=True, group='conductor')
+ self.config(power_state_sync_max_retries=1, group='conductor')
+
+ self._do_sync_power_state(states.POWER_ON, [states.POWER_OFF,
+ states.POWER_OFF])
+
+ self.assertFalse(self.power.validate.called)
+ power_exp_calls = [mock.call(self.task)] * 2
+ self.assertEqual(power_exp_calls,
+ self.power.get_power_state.call_args_list)
+ self.node.save.assert_called_once_with()
+ node_power_action.assert_called_once_with(self.task, states.POWER_ON)
+ self.assertEqual(states.POWER_OFF, self.node.power_state)
+ self.assertEqual(2,
+ self.service.power_state_sync_count[self.node.uuid])
+ self.assertTrue(self.node.maintenance)
+ self.assertIsNotNone(self.node.maintenance_reason)
+
+ def test_max_retries_exceeded2(self, node_power_action):
+ self.config(force_power_state_during_sync=True, group='conductor')
+ self.config(power_state_sync_max_retries=2, group='conductor')
+
+ self._do_sync_power_state(states.POWER_ON, [states.POWER_OFF,
+ states.POWER_OFF,
+ states.POWER_OFF])
+
+ self.assertFalse(self.power.validate.called)
+ power_exp_calls = [mock.call(self.task)] * 3
+ self.assertEqual(power_exp_calls,
+ self.power.get_power_state.call_args_list)
+ self.node.save.assert_called_once_with()
+ npa_exp_calls = [mock.call(self.task, states.POWER_ON)] * 2
+ self.assertEqual(npa_exp_calls, node_power_action.call_args_list)
+ self.assertEqual(states.POWER_OFF, self.node.power_state)
+ self.assertEqual(3,
+ self.service.power_state_sync_count[self.node.uuid])
+ self.assertTrue(self.node.maintenance)
+
+ def test_retry_then_success(self, node_power_action):
+ self.config(force_power_state_during_sync=True, group='conductor')
+ self.config(power_state_sync_max_retries=2, group='conductor')
+
+ self._do_sync_power_state(states.POWER_ON, [states.POWER_OFF,
+ states.POWER_OFF,
+ states.POWER_ON])
+
+ self.assertFalse(self.power.validate.called)
+ power_exp_calls = [mock.call(self.task)] * 3
+ self.assertEqual(power_exp_calls,
+ self.power.get_power_state.call_args_list)
+ self.assertFalse(self.node.save.called)
+ npa_exp_calls = [mock.call(self.task, states.POWER_ON)] * 2
+ self.assertEqual(npa_exp_calls, node_power_action.call_args_list)
+ self.assertEqual(states.POWER_ON, self.node.power_state)
+ self.assertEqual(0,
+ self.service.power_state_sync_count[self.node.uuid])
+
+ def test_power_state_sync_max_retries_gps_exception(self,
+ node_power_action):
+ self.config(power_state_sync_max_retries=2, group='conductor')
+ self.service.power_state_sync_count[self.node.uuid] = 2
+
+ node_power_action.side_effect = exception.IronicException('test')
+ self._do_sync_power_state('fake',
+ exception.IronicException('foo'))
+
+ self.assertFalse(self.power.validate.called)
+ self.power.get_power_state.assert_called_once_with(self.task)
+
+ self.assertIsNone(self.node.power_state)
+ self.assertTrue(self.node.maintenance)
+ self.assertTrue(self.node.save.called)
+
+ self.assertFalse(node_power_action.called)
+
+ def test_maintenance_on_upgrade_lock(self, node_power_action):
+ self.node.maintenance = True
+
+ self._do_sync_power_state(states.POWER_ON, states.POWER_OFF)
+
+ self.assertFalse(self.power.validate.called)
+ self.power.get_power_state.assert_called_once_with(self.task)
+ self.assertEqual(states.POWER_ON, self.node.power_state)
+ self.assertFalse(self.node.save.called)
+ self.assertFalse(node_power_action.called)
+ self.task.upgrade_lock.assert_called_once_with()
+
+ def test_wrong_provision_state_on_upgrade_lock(self, node_power_action):
+ self.node.provision_state = states.DEPLOYWAIT
+
+ self._do_sync_power_state(states.POWER_ON, states.POWER_OFF)
+
+ self.assertFalse(self.power.validate.called)
+ self.power.get_power_state.assert_called_once_with(self.task)
+ self.assertEqual(states.POWER_ON, self.node.power_state)
+ self.assertFalse(self.node.save.called)
+ self.assertFalse(node_power_action.called)
+ self.task.upgrade_lock.assert_called_once_with()
+
+ def test_correct_power_state_on_upgrade_lock(self, node_power_action):
+ def _fake_upgrade():
+ self.node.power_state = states.POWER_OFF
+
+ self.task.upgrade_lock.side_effect = _fake_upgrade
+
+ self._do_sync_power_state(states.POWER_ON, states.POWER_OFF)
+
+ self.assertFalse(self.power.validate.called)
+ self.power.get_power_state.assert_called_once_with(self.task)
+ self.assertFalse(self.node.save.called)
+ self.assertFalse(node_power_action.called)
+ self.task.upgrade_lock.assert_called_once_with()
+
+
+@mock.patch.object(manager, 'do_sync_power_state')
+@mock.patch.object(task_manager, 'acquire')
+@mock.patch.object(manager.ConductorManager, '_mapped_to_this_conductor')
+@mock.patch.object(dbapi.IMPL, 'get_nodeinfo_list')
+class ManagerSyncPowerStatesTestCase(_CommonMixIn, tests_db_base.DbTestCase):
+ def setUp(self):
+ super(ManagerSyncPowerStatesTestCase, self).setUp()
+ self.service = manager.ConductorManager('hostname', 'test-topic')
+ self.service.dbapi = self.dbapi
+ self.node = self._create_node()
+ self.filters = {'reserved': False, 'maintenance': False}
+ self.columns = ['uuid', 'driver', 'id']
+
+ def test_node_not_mapped(self, get_nodeinfo_mock,
+ mapped_mock, acquire_mock, sync_mock):
+ get_nodeinfo_mock.return_value = self._get_nodeinfo_list_response()
+ mapped_mock.return_value = False
+
+ self.service._sync_power_states(self.context)
+
+ get_nodeinfo_mock.assert_called_once_with(
+ columns=self.columns, filters=self.filters)
+ mapped_mock.assert_called_once_with(self.node.uuid,
+ self.node.driver)
+ self.assertFalse(acquire_mock.called)
+ self.assertFalse(sync_mock.called)
+
+ def test_node_locked_on_acquire(self, get_nodeinfo_mock,
+ mapped_mock, acquire_mock, sync_mock):
+ get_nodeinfo_mock.return_value = self._get_nodeinfo_list_response()
+ mapped_mock.return_value = True
+ acquire_mock.side_effect = exception.NodeLocked(node=self.node.uuid,
+ host='fake')
+
+ self.service._sync_power_states(self.context)
+
+ get_nodeinfo_mock.assert_called_once_with(
+ columns=self.columns, filters=self.filters)
+ mapped_mock.assert_called_once_with(self.node.uuid,
+ self.node.driver)
+ acquire_mock.assert_called_once_with(self.context, self.node.uuid,
+ purpose=mock.ANY,
+ shared=True)
+ self.assertFalse(sync_mock.called)
+
+ def test_node_in_deploywait_on_acquire(self, get_nodeinfo_mock,
+ mapped_mock, acquire_mock,
+ sync_mock):
+ get_nodeinfo_mock.return_value = self._get_nodeinfo_list_response()
+ mapped_mock.return_value = True
+ task = self._create_task(
+ node_attrs=dict(provision_state=states.DEPLOYWAIT,
+ target_provision_state=states.ACTIVE,
+ uuid=self.node.uuid))
+ acquire_mock.side_effect = self._get_acquire_side_effect(task)
+
+ self.service._sync_power_states(self.context)
+
+ get_nodeinfo_mock.assert_called_once_with(
+ columns=self.columns, filters=self.filters)
+ mapped_mock.assert_called_once_with(self.node.uuid,
+ self.node.driver)
+ acquire_mock.assert_called_once_with(self.context, self.node.uuid,
+ purpose=mock.ANY,
+ shared=True)
+ self.assertFalse(sync_mock.called)
+
+ def test_node_in_enroll_on_acquire(self, get_nodeinfo_mock, mapped_mock,
+ acquire_mock, sync_mock):
+ get_nodeinfo_mock.return_value = self._get_nodeinfo_list_response()
+ mapped_mock.return_value = True
+ task = self._create_task(
+ node_attrs=dict(provision_state=states.ENROLL,
+ target_provision_state=states.NOSTATE,
+ uuid=self.node.uuid))
+ acquire_mock.side_effect = self._get_acquire_side_effect(task)
+
+ self.service._sync_power_states(self.context)
+
+ get_nodeinfo_mock.assert_called_once_with(
+ columns=self.columns, filters=self.filters)
+ mapped_mock.assert_called_once_with(self.node.uuid,
+ self.node.driver)
+ acquire_mock.assert_called_once_with(self.context, self.node.uuid,
+ purpose=mock.ANY,
+ shared=True)
+ self.assertFalse(sync_mock.called)
+
+ def test_node_in_power_transition_on_acquire(self, get_nodeinfo_mock,
+ mapped_mock, acquire_mock,
+ sync_mock):
+ get_nodeinfo_mock.return_value = self._get_nodeinfo_list_response()
+ mapped_mock.return_value = True
+ task = self._create_task(
+ node_attrs=dict(target_power_state=states.POWER_ON,
+ uuid=self.node.uuid))
+ acquire_mock.side_effect = self._get_acquire_side_effect(task)
+
+ self.service._sync_power_states(self.context)
+
+ get_nodeinfo_mock.assert_called_once_with(
+ columns=self.columns, filters=self.filters)
+ mapped_mock.assert_called_once_with(self.node.uuid,
+ self.node.driver)
+ acquire_mock.assert_called_once_with(self.context, self.node.uuid,
+ purpose=mock.ANY,
+ shared=True)
+ self.assertFalse(sync_mock.called)
+
+ def test_node_in_maintenance_on_acquire(self, get_nodeinfo_mock,
+ mapped_mock, acquire_mock,
+ sync_mock):
+ get_nodeinfo_mock.return_value = self._get_nodeinfo_list_response()
+ mapped_mock.return_value = True
+ task = self._create_task(
+ node_attrs=dict(maintenance=True, uuid=self.node.uuid))
+ acquire_mock.side_effect = self._get_acquire_side_effect(task)
+
+ self.service._sync_power_states(self.context)
+
+ get_nodeinfo_mock.assert_called_once_with(
+ columns=self.columns, filters=self.filters)
+ mapped_mock.assert_called_once_with(self.node.uuid,
+ self.node.driver)
+ acquire_mock.assert_called_once_with(self.context, self.node.uuid,
+ purpose=mock.ANY,
+ shared=True)
+ self.assertFalse(sync_mock.called)
+
+ def test_node_disappears_on_acquire(self, get_nodeinfo_mock,
+ mapped_mock, acquire_mock, sync_mock):
+ get_nodeinfo_mock.return_value = self._get_nodeinfo_list_response()
+ mapped_mock.return_value = True
+ acquire_mock.side_effect = exception.NodeNotFound(node=self.node.uuid,
+ host='fake')
+
+ self.service._sync_power_states(self.context)
+
+ get_nodeinfo_mock.assert_called_once_with(
+ columns=self.columns, filters=self.filters)
+ mapped_mock.assert_called_once_with(self.node.uuid,
+ self.node.driver)
+ acquire_mock.assert_called_once_with(self.context, self.node.uuid,
+ purpose=mock.ANY,
+ shared=True)
+ self.assertFalse(sync_mock.called)
+
+ def test_single_node(self, get_nodeinfo_mock,
+ mapped_mock, acquire_mock, sync_mock):
+ get_nodeinfo_mock.return_value = self._get_nodeinfo_list_response()
+ mapped_mock.return_value = True
+ task = self._create_task(node_attrs=dict(uuid=self.node.uuid))
+ acquire_mock.side_effect = self._get_acquire_side_effect(task)
+
+ self.service._sync_power_states(self.context)
+
+ get_nodeinfo_mock.assert_called_once_with(
+ columns=self.columns, filters=self.filters)
+ mapped_mock.assert_called_once_with(self.node.uuid,
+ self.node.driver)
+ acquire_mock.assert_called_once_with(self.context, self.node.uuid,
+ purpose=mock.ANY,
+ shared=True)
+ sync_mock.assert_called_once_with(task, mock.ANY)
+
+ def test__sync_power_state_multiple_nodes(self, get_nodeinfo_mock,
+ mapped_mock, acquire_mock,
+ sync_mock):
+ # Create 8 nodes:
+ # 1st node: Should acquire and try to sync
+ # 2nd node: Not mapped to this conductor
+ # 3rd node: In DEPLOYWAIT provision_state
+ # 4th node: In maintenance mode
+ # 5th node: Is in power transition
+ # 6th node: Disappears after getting nodeinfo list
+ # 7th node: Should acquire and try to sync
+ # 8th node: do_sync_power_state raises NodeLocked
+ nodes = []
+ node_attrs = {}
+ mapped_map = {}
+ for i in range(1, 8):
+ attrs = {'id': i,
+ 'uuid': uuidutils.generate_uuid()}
+ if i == 3:
+ attrs['provision_state'] = states.DEPLOYWAIT
+ attrs['target_provision_state'] = states.ACTIVE
+ elif i == 4:
+ attrs['maintenance'] = True
+ elif i == 5:
+ attrs['target_power_state'] = states.POWER_ON
+
+ n = self._create_node(**attrs)
+ nodes.append(n)
+ node_attrs[n.uuid] = attrs
+ mapped_map[n.uuid] = False if i == 2 else True
+
+ tasks = [self._create_task(node_attrs=node_attrs[x.uuid])
+ for x in nodes if x.id != 2]
+ # not found during acquire (4 = index of Node6 after removing Node2)
+ tasks[4] = exception.NodeNotFound(node=6)
+ sync_results = [0] * 7 + [exception.NodeLocked(node=8, host='')]
+
+ get_nodeinfo_mock.return_value = (
+ self._get_nodeinfo_list_response(nodes))
+ mapped_mock.side_effect = lambda x, y: mapped_map[x]
+ acquire_mock.side_effect = self._get_acquire_side_effect(tasks)
+ sync_mock.side_effect = sync_results
+
+ with mock.patch.object(eventlet, 'sleep') as sleep_mock:
+ self.service._sync_power_states(self.context)
+ # Ensure we've yielded on every iteration, except for node
+ # not mapped to this conductor
+ self.assertEqual(len(nodes) - 1, sleep_mock.call_count)
+
+ get_nodeinfo_mock.assert_called_once_with(
+ columns=self.columns, filters=self.filters)
+ mapped_calls = [mock.call(x.uuid, x.driver) for x in nodes]
+ self.assertEqual(mapped_calls, mapped_mock.call_args_list)
+ acquire_calls = [mock.call(self.context, x.uuid,
+ purpose=mock.ANY,
+ shared=True)
+ for x in nodes if x.id != 2]
+ self.assertEqual(acquire_calls, acquire_mock.call_args_list)
+ # Nodes 1 and 7 (5 = index of Node7 after removing Node2)
+ sync_calls = [mock.call(tasks[0], mock.ANY),
+ mock.call(tasks[5], mock.ANY)]
+ self.assertEqual(sync_calls, sync_mock.call_args_list)
+
+
+@mock.patch.object(task_manager, 'acquire')
+@mock.patch.object(manager.ConductorManager, '_mapped_to_this_conductor')
+@mock.patch.object(dbapi.IMPL, 'get_nodeinfo_list')
+class ManagerCheckDeployTimeoutsTestCase(_CommonMixIn,
+ tests_db_base.DbTestCase):
+ def setUp(self):
+ super(ManagerCheckDeployTimeoutsTestCase, self).setUp()
+ self.config(deploy_callback_timeout=300, group='conductor')
+ self.service = manager.ConductorManager('hostname', 'test-topic')
+ self.service.dbapi = self.dbapi
+
+ self.node = self._create_node(provision_state=states.DEPLOYWAIT,
+ target_provision_state=states.ACTIVE)
+ self.task = self._create_task(node=self.node)
+
+ self.node2 = self._create_node(provision_state=states.DEPLOYWAIT,
+ target_provision_state=states.ACTIVE)
+ self.task2 = self._create_task(node=self.node2)
+
+ self.filters = {'reserved': False, 'maintenance': False,
+ 'provisioned_before': 300,
+ 'provision_state': states.DEPLOYWAIT}
+ self.columns = ['uuid', 'driver']
+
+ def _assert_get_nodeinfo_args(self, get_nodeinfo_mock):
+ get_nodeinfo_mock.assert_called_once_with(
+ columns=self.columns, filters=self.filters,
+ sort_key='provision_updated_at', sort_dir='asc')
+
+ def test_disabled(self, get_nodeinfo_mock, mapped_mock,
+ acquire_mock):
+ self.config(deploy_callback_timeout=0, group='conductor')
+
+ self.service._check_deploy_timeouts(self.context)
+
+ self.assertFalse(get_nodeinfo_mock.called)
+ self.assertFalse(mapped_mock.called)
+ self.assertFalse(acquire_mock.called)
+
+ def test_not_mapped(self, get_nodeinfo_mock, mapped_mock, acquire_mock):
+ get_nodeinfo_mock.return_value = self._get_nodeinfo_list_response()
+ mapped_mock.return_value = False
+
+ self.service._check_deploy_timeouts(self.context)
+
+ self._assert_get_nodeinfo_args(get_nodeinfo_mock)
+ mapped_mock.assert_called_once_with(self.node.uuid, self.node.driver)
+ self.assertFalse(acquire_mock.called)
+
+ def test_timeout(self, get_nodeinfo_mock, mapped_mock, acquire_mock):
+ get_nodeinfo_mock.return_value = self._get_nodeinfo_list_response()
+ mapped_mock.return_value = True
+ acquire_mock.side_effect = self._get_acquire_side_effect(self.task)
+
+ self.service._check_deploy_timeouts(self.context)
+
+ self._assert_get_nodeinfo_args(get_nodeinfo_mock)
+ mapped_mock.assert_called_once_with(self.node.uuid, self.node.driver)
+ acquire_mock.assert_called_once_with(self.context, self.node.uuid,
+ purpose=mock.ANY)
+ self.task.process_event.assert_called_with(
+ 'fail',
+ callback=self.service._spawn_worker,
+ call_args=(conductor_utils.cleanup_after_timeout, self.task),
+ err_handler=manager.provisioning_error_handler)
+
+ def test_acquire_node_disappears(self, get_nodeinfo_mock, mapped_mock,
+ acquire_mock):
+ get_nodeinfo_mock.return_value = self._get_nodeinfo_list_response()
+ mapped_mock.return_value = True
+ acquire_mock.side_effect = exception.NodeNotFound(node='fake')
+
+ # Exception eaten
+ self.service._check_deploy_timeouts(self.context)
+
+ self._assert_get_nodeinfo_args(get_nodeinfo_mock)
+ mapped_mock.assert_called_once_with(
+ self.node.uuid, self.node.driver)
+ acquire_mock.assert_called_once_with(self.context,
+ self.node.uuid,
+ purpose=mock.ANY)
+ self.assertFalse(self.task.spawn_after.called)
+
+ def test_acquire_node_locked(self, get_nodeinfo_mock, mapped_mock,
+ acquire_mock):
+ get_nodeinfo_mock.return_value = self._get_nodeinfo_list_response()
+ mapped_mock.return_value = True
+ acquire_mock.side_effect = exception.NodeLocked(node='fake',
+ host='fake')
+
+ # Exception eaten
+ self.service._check_deploy_timeouts(self.context)
+
+ self._assert_get_nodeinfo_args(get_nodeinfo_mock)
+ mapped_mock.assert_called_once_with(
+ self.node.uuid, self.node.driver)
+ acquire_mock.assert_called_once_with(self.context,
+ self.node.uuid,
+ purpose=mock.ANY)
+ self.assertFalse(self.task.spawn_after.called)
+
+ def test_no_deploywait_after_lock(self, get_nodeinfo_mock, mapped_mock,
+ acquire_mock):
+ task = self._create_task(
+ node_attrs=dict(provision_state=states.AVAILABLE,
+ uuid=self.node.uuid))
+ get_nodeinfo_mock.return_value = self._get_nodeinfo_list_response()
+ mapped_mock.return_value = True
+ acquire_mock.side_effect = self._get_acquire_side_effect(task)
+
+ self.service._check_deploy_timeouts(self.context)
+
+ self._assert_get_nodeinfo_args(get_nodeinfo_mock)
+ mapped_mock.assert_called_once_with(
+ self.node.uuid, self.node.driver)
+ acquire_mock.assert_called_once_with(self.context,
+ self.node.uuid,
+ purpose=mock.ANY)
+ self.assertFalse(task.spawn_after.called)
+
+ def test_maintenance_after_lock(self, get_nodeinfo_mock, mapped_mock,
+ acquire_mock):
+ task = self._create_task(
+ node_attrs=dict(provision_state=states.DEPLOYWAIT,
+ target_provision_state=states.ACTIVE,
+ maintenance=True,
+ uuid=self.node.uuid))
+ get_nodeinfo_mock.return_value = (
+ self._get_nodeinfo_list_response([task.node, self.node2]))
+ mapped_mock.return_value = True
+ acquire_mock.side_effect = (
+ self._get_acquire_side_effect([task, self.task2]))
+
+ self.service._check_deploy_timeouts(self.context)
+
+ self._assert_get_nodeinfo_args(get_nodeinfo_mock)
+ self.assertEqual([mock.call(self.node.uuid, task.node.driver),
+ mock.call(self.node2.uuid, self.node2.driver)],
+ mapped_mock.call_args_list)
+ self.assertEqual([mock.call(self.context, self.node.uuid,
+ purpose=mock.ANY),
+ mock.call(self.context, self.node2.uuid,
+ purpose=mock.ANY)],
+ acquire_mock.call_args_list)
+ # First node skipped
+ self.assertFalse(task.spawn_after.called)
+ # Second node spawned
+ self.task2.process_event.assert_called_with(
+ 'fail',
+ callback=self.service._spawn_worker,
+ call_args=(conductor_utils.cleanup_after_timeout, self.task2),
+ err_handler=manager.provisioning_error_handler)
+
+ def test_exiting_no_worker_avail(self, get_nodeinfo_mock, mapped_mock,
+ acquire_mock):
+ get_nodeinfo_mock.return_value = (
+ self._get_nodeinfo_list_response([self.node, self.node2]))
+ mapped_mock.return_value = True
+ acquire_mock.side_effect = self._get_acquire_side_effect(
+ [(self.task, exception.NoFreeConductorWorker()), self.task2])
+
+ # Exception should be nuked
+ self.service._check_deploy_timeouts(self.context)
+
+ self._assert_get_nodeinfo_args(get_nodeinfo_mock)
+ # mapped should be only called for the first node as we should
+ # have exited the loop early due to NoFreeConductorWorker
+ mapped_mock.assert_called_once_with(
+ self.node.uuid, self.node.driver)
+ acquire_mock.assert_called_once_with(self.context,
+ self.node.uuid,
+ purpose=mock.ANY)
+ self.task.process_event.assert_called_with(
+ 'fail',
+ callback=self.service._spawn_worker,
+ call_args=(conductor_utils.cleanup_after_timeout, self.task),
+ err_handler=manager.provisioning_error_handler)
+
+ def test_exiting_with_other_exception(self, get_nodeinfo_mock,
+ mapped_mock, acquire_mock):
+ get_nodeinfo_mock.return_value = (
+ self._get_nodeinfo_list_response([self.node, self.node2]))
+ mapped_mock.return_value = True
+ acquire_mock.side_effect = self._get_acquire_side_effect(
+ [(self.task, exception.IronicException('foo')), self.task2])
+
+ # Should re-raise
+ self.assertRaises(exception.IronicException,
+ self.service._check_deploy_timeouts,
+ self.context)
+
+ self._assert_get_nodeinfo_args(get_nodeinfo_mock)
+ # mapped should be only called for the first node as we should
+ # have exited the loop early due to unknown exception
+ mapped_mock.assert_called_once_with(self.node.uuid, self.node.driver)
+ acquire_mock.assert_called_once_with(self.context,
+ self.node.uuid,
+ purpose=mock.ANY)
+ self.task.process_event.assert_called_with(
+ 'fail',
+ callback=self.service._spawn_worker,
+ call_args=(conductor_utils.cleanup_after_timeout, self.task),
+ err_handler=manager.provisioning_error_handler)
+
+ def test_worker_limit(self, get_nodeinfo_mock, mapped_mock, acquire_mock):
+ self.config(periodic_max_workers=2, group='conductor')
+
+ # Use the same nodes/tasks to make life easier in the tests
+ # here
+
+ get_nodeinfo_mock.return_value = (
+ self._get_nodeinfo_list_response([self.node] * 3))
+ mapped_mock.return_value = True
+ acquire_mock.side_effect = (
+ self._get_acquire_side_effect([self.task] * 3))
+
+ self.service._check_deploy_timeouts(self.context)
+
+ # Should only have ran 2.
+ self.assertEqual([mock.call(self.node.uuid, self.node.driver)] * 2,
+ mapped_mock.call_args_list)
+ self.assertEqual([mock.call(self.context, self.node.uuid,
+ purpose=mock.ANY)] * 2,
+ acquire_mock.call_args_list)
+ process_event_call = mock.call(
+ 'fail',
+ callback=self.service._spawn_worker,
+ call_args=(conductor_utils.cleanup_after_timeout, self.task),
+ err_handler=manager.provisioning_error_handler)
+ self.assertEqual([process_event_call] * 2,
+ self.task.process_event.call_args_list)
+
+ @mock.patch.object(dbapi.IMPL, 'update_port')
+ @mock.patch('ironic.dhcp.neutron.NeutronDHCPApi.update_port_address')
+ def test_update_port_duplicate_mac(self, get_nodeinfo_mock, mapped_mock,
+ acquire_mock, mac_update_mock, mock_up):
+ node = utils.create_test_node(driver='fake')
+ port = obj_utils.create_test_port(self.context, node_id=node.id)
+ mock_up.side_effect = exception.MACAlreadyExists(mac=port.address)
+ exc = self.assertRaises(messaging.rpc.ExpectedException,
+ self.service.update_port,
+ self.context, port)
+ # Compare true exception hidden by @messaging.expected_exceptions
+ self.assertEqual(exception.MACAlreadyExists, exc.exc_info[0])
+ # ensure Neutron wasn't updated
+ self.assertFalse(mac_update_mock.called)
+
+
+class ManagerTestProperties(tests_db_base.DbTestCase):
+
+ def setUp(self):
+ super(ManagerTestProperties, self).setUp()
+ self.service = manager.ConductorManager('test-host', 'test-topic')
+
+ def _check_driver_properties(self, driver, expected):
+ mgr_utils.mock_the_extension_manager(driver=driver)
+ self.driver = driver_factory.get_driver(driver)
+ self.service.init_host()
+ properties = self.service.get_driver_properties(self.context, driver)
+ self.assertEqual(sorted(expected), sorted(properties.keys()))
+
+ def test_driver_properties_fake(self):
+ expected = ['A1', 'A2', 'B1', 'B2']
+ self._check_driver_properties("fake", expected)
+
+ def test_driver_properties_fake_ipmitool(self):
+ expected = ['ipmi_address', 'ipmi_terminal_port',
+ 'ipmi_password', 'ipmi_priv_level',
+ 'ipmi_username', 'ipmi_bridging',
+ 'ipmi_transit_channel', 'ipmi_transit_address',
+ 'ipmi_target_channel', 'ipmi_target_address',
+ 'ipmi_local_address', 'ipmi_protocol_version',
+ 'ipmi_force_boot_device'
+ ]
+ self._check_driver_properties("fake_ipmitool", expected)
+
+ def test_driver_properties_fake_ipminative(self):
+ expected = ['ipmi_address', 'ipmi_password', 'ipmi_username',
+ 'ipmi_terminal_port', 'ipmi_force_boot_device']
+ self._check_driver_properties("fake_ipminative", expected)
+
+ def test_driver_properties_fake_ssh(self):
+ expected = ['ssh_address', 'ssh_username', 'ssh_virt_type',
+ 'ssh_key_contents', 'ssh_key_filename',
+ 'ssh_password', 'ssh_port']
+ self._check_driver_properties("fake_ssh", expected)
+
+ def test_driver_properties_fake_pxe(self):
+ expected = ['deploy_kernel', 'deploy_ramdisk']
+ self._check_driver_properties("fake_pxe", expected)
+
+ def test_driver_properties_fake_seamicro(self):
+ expected = ['seamicro_api_endpoint', 'seamicro_password',
+ 'seamicro_server_id', 'seamicro_username',
+ 'seamicro_api_version', 'seamicro_terminal_port']
+ self._check_driver_properties("fake_seamicro", expected)
+
+ def test_driver_properties_fake_snmp(self):
+ expected = ['snmp_driver', 'snmp_address', 'snmp_port', 'snmp_version',
+ 'snmp_community', 'snmp_security', 'snmp_outlet']
+ self._check_driver_properties("fake_snmp", expected)
+
+ def test_driver_properties_pxe_ipmitool(self):
+ expected = ['ipmi_address', 'ipmi_terminal_port',
+ 'ipmi_password', 'ipmi_priv_level',
+ 'ipmi_username', 'ipmi_bridging', 'ipmi_transit_channel',
+ 'ipmi_transit_address', 'ipmi_target_channel',
+ 'ipmi_target_address', 'ipmi_local_address',
+ 'deploy_kernel', 'deploy_ramdisk', 'ipmi_protocol_version',
+ 'ipmi_force_boot_device'
+ ]
+ self._check_driver_properties("pxe_ipmitool", expected)
+
+ def test_driver_properties_pxe_ipminative(self):
+ expected = ['ipmi_address', 'ipmi_password', 'ipmi_username',
+ 'deploy_kernel', 'deploy_ramdisk',
+ 'ipmi_terminal_port', 'ipmi_force_boot_device']
+ self._check_driver_properties("pxe_ipminative", expected)
+
+ def test_driver_properties_pxe_ssh(self):
+ expected = ['deploy_kernel', 'deploy_ramdisk',
+ 'ssh_address', 'ssh_username', 'ssh_virt_type',
+ 'ssh_key_contents', 'ssh_key_filename',
+ 'ssh_password', 'ssh_port']
+ self._check_driver_properties("pxe_ssh", expected)
+
+ def test_driver_properties_pxe_seamicro(self):
+ expected = ['deploy_kernel', 'deploy_ramdisk',
+ 'seamicro_api_endpoint', 'seamicro_password',
+ 'seamicro_server_id', 'seamicro_username',
+ 'seamicro_api_version', 'seamicro_terminal_port']
+ self._check_driver_properties("pxe_seamicro", expected)
+
+ def test_driver_properties_pxe_snmp(self):
+ expected = ['deploy_kernel', 'deploy_ramdisk',
+ 'snmp_driver', 'snmp_address', 'snmp_port', 'snmp_version',
+ 'snmp_community', 'snmp_security', 'snmp_outlet']
+ self._check_driver_properties("pxe_snmp", expected)
+
+ def test_driver_properties_fake_ilo(self):
+ expected = ['ilo_address', 'ilo_username', 'ilo_password',
+ 'client_port', 'client_timeout', 'ilo_change_password']
+ self._check_driver_properties("fake_ilo", expected)
+
+ def test_driver_properties_ilo_iscsi(self):
+ expected = ['ilo_address', 'ilo_username', 'ilo_password',
+ 'client_port', 'client_timeout', 'ilo_deploy_iso',
+ 'console_port', 'ilo_change_password']
+ self._check_driver_properties("iscsi_ilo", expected)
+
+ def test_driver_properties_agent_ilo(self):
+ expected = ['ilo_address', 'ilo_username', 'ilo_password',
+ 'client_port', 'client_timeout', 'ilo_deploy_iso',
+ 'console_port', 'ilo_change_password']
+ self._check_driver_properties("agent_ilo", expected)
+
+ def test_driver_properties_fail(self):
+ mgr_utils.mock_the_extension_manager()
+ self.driver = driver_factory.get_driver("fake")
+ self.service.init_host()
+ exc = self.assertRaises(messaging.rpc.ExpectedException,
+ self.service.get_driver_properties,
+ self.context, "bad-driver")
+ # Compare true exception hidden by @messaging.expected_exceptions
+ self.assertEqual(exception.DriverNotFound, exc.exc_info[0])
+
+
+@mock.patch.object(task_manager, 'acquire')
+@mock.patch.object(manager.ConductorManager, '_mapped_to_this_conductor')
+@mock.patch.object(dbapi.IMPL, 'get_nodeinfo_list')
+class ManagerSyncLocalStateTestCase(_CommonMixIn, tests_db_base.DbTestCase):
+
+ def setUp(self):
+ super(ManagerSyncLocalStateTestCase, self).setUp()
+
+ self.service = manager.ConductorManager('hostname', 'test-topic')
+
+ self.service.conductor = mock.Mock()
+ self.service.dbapi = self.dbapi
+ self.service.ring_manager = mock.Mock()
+
+ self.node = self._create_node(provision_state=states.ACTIVE,
+ target_provision_state=states.NOSTATE)
+ self.task = self._create_task(node=self.node)
+
+ self.filters = {'reserved': False,
+ 'maintenance': False,
+ 'provision_state': states.ACTIVE}
+ self.columns = ['uuid', 'driver', 'id', 'conductor_affinity']
+
+ def _assert_get_nodeinfo_args(self, get_nodeinfo_mock):
+ get_nodeinfo_mock.assert_called_once_with(
+ columns=self.columns, filters=self.filters)
+
+ def test_not_mapped(self, get_nodeinfo_mock, mapped_mock, acquire_mock):
+ get_nodeinfo_mock.return_value = self._get_nodeinfo_list_response()
+ mapped_mock.return_value = False
+
+ self.service._sync_local_state(self.context)
+
+ self._assert_get_nodeinfo_args(get_nodeinfo_mock)
+ mapped_mock.assert_called_once_with(self.node.uuid, self.node.driver)
+ self.assertFalse(acquire_mock.called)
+ self.service.ring_manager.reset.assert_called_once_with()
+
+ def test_already_mapped(self, get_nodeinfo_mock, mapped_mock,
+ acquire_mock):
+ # Node is already mapped to the conductor running the periodic task
+ self.node.conductor_affinity = 123
+ self.service.conductor.id = 123
+
+ get_nodeinfo_mock.return_value = self._get_nodeinfo_list_response()
+ mapped_mock.return_value = True
+
+ self.service._sync_local_state(self.context)
+
+ self._assert_get_nodeinfo_args(get_nodeinfo_mock)
+ mapped_mock.assert_called_once_with(self.node.uuid, self.node.driver)
+ self.assertFalse(acquire_mock.called)
+ self.service.ring_manager.reset.assert_called_once_with()
+
+ def test_good(self, get_nodeinfo_mock, mapped_mock, acquire_mock):
+ get_nodeinfo_mock.return_value = self._get_nodeinfo_list_response()
+ mapped_mock.return_value = True
+ acquire_mock.side_effect = self._get_acquire_side_effect(self.task)
+
+ self.service._sync_local_state(self.context)
+
+ self._assert_get_nodeinfo_args(get_nodeinfo_mock)
+ mapped_mock.assert_called_once_with(self.node.uuid, self.node.driver)
+ acquire_mock.assert_called_once_with(self.context, self.node.uuid,
+ purpose=mock.ANY)
+ # assert spawn_after has been called
+ self.task.spawn_after.assert_called_once_with(
+ self.service._spawn_worker,
+ self.service._do_takeover, self.task)
+
+ def test_no_free_worker(self, get_nodeinfo_mock, mapped_mock,
+ acquire_mock):
+ mapped_mock.return_value = True
+ acquire_mock.side_effect = (
+ self._get_acquire_side_effect([self.task] * 3))
+ self.task.spawn_after.side_effect = [
+ None,
+ exception.NoFreeConductorWorker('error')
+ ]
+
+ # 3 nodes to be checked
+ get_nodeinfo_mock.return_value = (
+ self._get_nodeinfo_list_response([self.node] * 3))
+
+ self.service._sync_local_state(self.context)
+
+ self._assert_get_nodeinfo_args(get_nodeinfo_mock)
+
+ # assert _mapped_to_this_conductor() gets called 2 times only
+ # instead of 3. When NoFreeConductorWorker is raised the loop
+ # should be broken
+ expected = [mock.call(self.node.uuid, self.node.driver)] * 2
+ self.assertEqual(expected, mapped_mock.call_args_list)
+
+ # assert acquire() gets called 2 times only instead of 3. When
+ # NoFreeConductorWorker is raised the loop should be broken
+ expected = [mock.call(self.context, self.node.uuid,
+ purpose=mock.ANY)] * 2
+ self.assertEqual(expected, acquire_mock.call_args_list)
+
+ # assert spawn_after has been called twice
+ expected = [mock.call(self.service._spawn_worker,
+ self.service._do_takeover, self.task)] * 2
+ self.assertEqual(expected, self.task.spawn_after.call_args_list)
+
+ def test_node_locked(self, get_nodeinfo_mock, mapped_mock, acquire_mock,):
+ mapped_mock.return_value = True
+ acquire_mock.side_effect = self._get_acquire_side_effect(
+ [self.task, exception.NodeLocked('error'), self.task])
+ self.task.spawn_after.side_effect = [None, None]
+
+ # 3 nodes to be checked
+ get_nodeinfo_mock.return_value = (
+ self._get_nodeinfo_list_response([self.node] * 3))
+
+ self.service._sync_local_state(self.context)
+
+ self._assert_get_nodeinfo_args(get_nodeinfo_mock)
+
+ # assert _mapped_to_this_conductor() gets called 3 times
+ expected = [mock.call(self.node.uuid, self.node.driver)] * 3
+ self.assertEqual(expected, mapped_mock.call_args_list)
+
+ # assert acquire() gets called 3 times
+ expected = [mock.call(self.context, self.node.uuid,
+ purpose=mock.ANY)] * 3
+ self.assertEqual(expected, acquire_mock.call_args_list)
+
+ # assert spawn_after has been called only 2 times
+ expected = [mock.call(self.service._spawn_worker,
+ self.service._do_takeover, self.task)] * 2
+ self.assertEqual(expected, self.task.spawn_after.call_args_list)
+
+ def test_worker_limit(self, get_nodeinfo_mock, mapped_mock, acquire_mock):
+ # Limit to only 1 worker
+ self.config(periodic_max_workers=1, group='conductor')
+ mapped_mock.return_value = True
+ acquire_mock.side_effect = (
+ self._get_acquire_side_effect([self.task] * 3))
+ self.task.spawn_after.side_effect = [None] * 3
+
+ # 3 nodes to be checked
+ get_nodeinfo_mock.return_value = (
+ self._get_nodeinfo_list_response([self.node] * 3))
+
+ self.service._sync_local_state(self.context)
+
+ self._assert_get_nodeinfo_args(get_nodeinfo_mock)
+
+ # assert _mapped_to_this_conductor() gets called only once
+ # because of the worker limit
+ mapped_mock.assert_called_once_with(self.node.uuid, self.node.driver)
+
+ # assert acquire() gets called only once because of the worker limit
+ acquire_mock.assert_called_once_with(self.context, self.node.uuid,
+ purpose=mock.ANY)
+
+ # assert spawn_after has been called
+ self.task.spawn_after.assert_called_once_with(
+ self.service._spawn_worker,
+ self.service._do_takeover, self.task)
+
+
+@mock.patch.object(swift, 'SwiftAPI')
+class StoreConfigDriveTestCase(tests_base.TestCase):
+
+ def setUp(self):
+ super(StoreConfigDriveTestCase, self).setUp()
+ self.node = obj_utils.get_test_node(self.context, driver='fake',
+ instance_info=None)
+
+ def test_store_configdrive(self, mock_swift):
+ manager._store_configdrive(self.node, 'foo')
+ expected_instance_info = {'configdrive': 'foo'}
+ self.assertEqual(expected_instance_info, self.node.instance_info)
+ self.assertFalse(mock_swift.called)
+
+ def test_store_configdrive_swift(self, mock_swift):
+ container_name = 'foo_container'
+ timeout = 123
+ expected_obj_name = 'configdrive-%s' % self.node.uuid
+ expected_obj_header = {'X-Delete-After': timeout}
+ expected_instance_info = {'configdrive': 'http://1.2.3.4'}
+
+ # set configs and mocks
+ CONF.set_override('configdrive_use_swift', True, group='conductor')
+ CONF.set_override('configdrive_swift_container', container_name,
+ group='conductor')
+ CONF.set_override('deploy_callback_timeout', timeout,
+ group='conductor')
+ mock_swift.return_value.get_temp_url.return_value = 'http://1.2.3.4'
+
+ manager._store_configdrive(self.node, b'foo')
+
+ mock_swift.assert_called_once_with()
+ mock_swift.return_value.create_object.assert_called_once_with(
+ container_name, expected_obj_name, mock.ANY,
+ object_headers=expected_obj_header)
+ mock_swift.return_value.get_temp_url.assert_called_once_with(
+ container_name, expected_obj_name, timeout)
+ self.assertEqual(expected_instance_info, self.node.instance_info)
+
+
+@_mock_record_keepalive
+class NodeInspectHardware(_ServiceSetUpMixin,
+ tests_db_base.DbTestCase):
+
+ @mock.patch('ironic.drivers.modules.fake.FakeInspect.inspect_hardware')
+ def test_inspect_hardware_ok(self, mock_inspect):
+ self._start_service()
+ node = obj_utils.create_test_node(self.context, driver='fake',
+ provision_state=states.INSPECTING)
+ task = task_manager.TaskManager(self.context, node.uuid)
+ mock_inspect.return_value = states.MANAGEABLE
+ manager._do_inspect_hardware(task)
+ node.refresh()
+ self.assertEqual(states.MANAGEABLE, node.provision_state)
+ self.assertEqual(states.NOSTATE, node.target_provision_state)
+ self.assertIsNone(node.last_error)
+ mock_inspect.assert_called_once_with(mock.ANY)
+
+ @mock.patch('ironic.drivers.modules.fake.FakeInspect.inspect_hardware')
+ def test_inspect_hardware_return_inspecting(self, mock_inspect):
+ self._start_service()
+ node = obj_utils.create_test_node(self.context, driver='fake',
+ provision_state=states.INSPECTING)
+ task = task_manager.TaskManager(self.context, node.uuid)
+ mock_inspect.return_value = states.INSPECTING
+ manager._do_inspect_hardware(task)
+ node.refresh()
+ self.assertEqual(states.INSPECTING, node.provision_state)
+ self.assertEqual(states.NOSTATE, node.target_provision_state)
+ self.assertIsNone(node.last_error)
+ mock_inspect.assert_called_once_with(mock.ANY)
+
+ @mock.patch.object(manager, 'LOG')
+ @mock.patch('ironic.drivers.modules.fake.FakeInspect.inspect_hardware')
+ def test_inspect_hardware_return_other_state(self, mock_inspect, log_mock):
+ self._start_service()
+ node = obj_utils.create_test_node(self.context, driver='fake',
+ provision_state=states.INSPECTING)
+ task = task_manager.TaskManager(self.context, node.uuid)
+ mock_inspect.return_value = None
+ self.assertRaises(exception.HardwareInspectionFailure,
+ manager._do_inspect_hardware, task)
+ node.refresh()
+ self.assertEqual(states.INSPECTFAIL, node.provision_state)
+ self.assertEqual(states.MANAGEABLE, node.target_provision_state)
+ self.assertIsNotNone(node.last_error)
+ mock_inspect.assert_called_once_with(mock.ANY)
+ self.assertTrue(log_mock.error.called)
+
+ def test__check_inspect_timeouts(self):
+ self._start_service()
+ CONF.set_override('inspect_timeout', 1, group='conductor')
+ node = obj_utils.create_test_node(
+ self.context, driver='fake',
+ provision_state=states.INSPECTING,
+ target_provision_state=states.MANAGEABLE,
+ provision_updated_at=datetime.datetime(2000, 1, 1, 0, 0),
+ inspection_started_at=datetime.datetime(2000, 1, 1, 0, 0))
+
+ self.service._check_inspect_timeouts(self.context)
+ self.service._worker_pool.waitall()
+ node.refresh()
+ self.assertEqual(states.INSPECTFAIL, node.provision_state)
+ self.assertEqual(states.MANAGEABLE, node.target_provision_state)
+ self.assertIsNotNone(node.last_error)
+
+ @mock.patch('ironic.conductor.manager.ConductorManager._spawn_worker')
+ def test_inspect_hardware_worker_pool_full(self, mock_spawn):
+ prv_state = states.MANAGEABLE
+ tgt_prv_state = states.NOSTATE
+ node = obj_utils.create_test_node(self.context,
+ provision_state=prv_state,
+ target_provision_state=tgt_prv_state,
+ last_error=None, driver='fake')
+ self._start_service()
+
+ mock_spawn.side_effect = exception.NoFreeConductorWorker()
+
+ exc = self.assertRaises(messaging.rpc.ExpectedException,
+ self.service.inspect_hardware,
+ self.context, node.uuid)
+ # Compare true exception hidden by @messaging.expected_exceptions
+ self.assertEqual(exception.NoFreeConductorWorker, exc.exc_info[0])
+ self.service._worker_pool.waitall()
+ node.refresh()
+ # Make sure things were rolled back
+ self.assertEqual(prv_state, node.provision_state)
+ self.assertEqual(tgt_prv_state, node.target_provision_state)
+ self.assertIsNotNone(node.last_error)
+ # Verify reservation has been cleared.
+ self.assertIsNone(node.reservation)
+
+ def _test_inspect_hardware_validate_fail(self, mock_validate):
+ mock_validate.side_effect = exception.InvalidParameterValue('error')
+ node = obj_utils.create_test_node(self.context, driver='fake')
+ exc = self.assertRaises(messaging.rpc.ExpectedException,
+ self.service.inspect_hardware,
+ self.context, node.uuid)
+ # Compare true exception hidden by @messaging.expected_exceptions
+ self.assertEqual(exception.HardwareInspectionFailure, exc.exc_info[0])
+ # This is a sync operation last_error should be None.
+ self.assertIsNone(node.last_error)
+ # Verify reservation has been cleared.
+ self.assertIsNone(node.reservation)
+
+ @mock.patch('ironic.drivers.modules.fake.FakeInspect.validate')
+ def test_inspect_hardware_validate_fail(self, mock_validate):
+ self._test_inspect_hardware_validate_fail(mock_validate)
+
+ @mock.patch('ironic.drivers.modules.fake.FakePower.validate')
+ def test_inspect_hardware_power_validate_fail(self, mock_validate):
+ self._test_inspect_hardware_validate_fail(mock_validate)
+
+ @mock.patch('ironic.drivers.modules.fake.FakeInspect.inspect_hardware')
+ def test_inspect_hardware_raises_error(self, mock_inspect):
+ self._start_service()
+ mock_inspect.side_effect = exception.HardwareInspectionFailure('test')
+ state = states.MANAGEABLE
+ node = obj_utils.create_test_node(self.context, driver='fake',
+ provision_state=states.INSPECTING,
+ target_provision_state=state)
+ task = task_manager.TaskManager(self.context, node.uuid)
+
+ self.assertRaises(exception.HardwareInspectionFailure,
+ manager._do_inspect_hardware, task)
+ node.refresh()
+ self.assertEqual(states.INSPECTFAIL, node.provision_state)
+ self.assertEqual(states.MANAGEABLE, node.target_provision_state)
+ self.assertIsNotNone(node.last_error)
+ self.assertTrue(mock_inspect.called)
+
+
+@mock.patch.object(task_manager, 'acquire')
+@mock.patch.object(manager.ConductorManager, '_mapped_to_this_conductor')
+@mock.patch.object(dbapi.IMPL, 'get_nodeinfo_list')
+class ManagerCheckInspectTimeoutsTestCase(_CommonMixIn,
+ tests_db_base.DbTestCase):
+ def setUp(self):
+ super(ManagerCheckInspectTimeoutsTestCase, self).setUp()
+ self.config(inspect_timeout=300, group='conductor')
+ self.service = manager.ConductorManager('hostname', 'test-topic')
+ self.service.dbapi = self.dbapi
+
+ self.node = self._create_node(provision_state=states.INSPECTING,
+ target_provision_state=states.MANAGEABLE)
+ self.task = self._create_task(node=self.node)
+
+ self.node2 = self._create_node(
+ provision_state=states.INSPECTING,
+ target_provision_state=states.MANAGEABLE)
+ self.task2 = self._create_task(node=self.node2)
+
+ self.filters = {'reserved': False,
+ 'inspection_started_before': 300,
+ 'provision_state': states.INSPECTING}
+ self.columns = ['uuid', 'driver']
+
+ def _assert_get_nodeinfo_args(self, get_nodeinfo_mock):
+ get_nodeinfo_mock.assert_called_once_with(
+ sort_dir='asc', columns=self.columns, filters=self.filters,
+ sort_key='inspection_started_at')
+
+ def test__check_inspect_timeouts_disabled(self, get_nodeinfo_mock,
+ mapped_mock, acquire_mock):
+ self.config(inspect_timeout=0, group='conductor')
+
+ self.service._check_inspect_timeouts(self.context)
+
+ self.assertFalse(get_nodeinfo_mock.called)
+ self.assertFalse(mapped_mock.called)
+ self.assertFalse(acquire_mock.called)
+
+ def test__check_inspect_timeouts_not_mapped(self, get_nodeinfo_mock,
+ mapped_mock, acquire_mock):
+ get_nodeinfo_mock.return_value = self._get_nodeinfo_list_response()
+ mapped_mock.return_value = False
+
+ self.service._check_inspect_timeouts(self.context)
+
+ self._assert_get_nodeinfo_args(get_nodeinfo_mock)
+ mapped_mock.assert_called_once_with(self.node.uuid, self.node.driver)
+ self.assertFalse(acquire_mock.called)
+
+ def test__check_inspect_timeout(self, get_nodeinfo_mock,
+ mapped_mock, acquire_mock):
+ get_nodeinfo_mock.return_value = self._get_nodeinfo_list_response()
+ mapped_mock.return_value = True
+ acquire_mock.side_effect = self._get_acquire_side_effect(self.task)
+
+ self.service._check_inspect_timeouts(self.context)
+
+ self._assert_get_nodeinfo_args(get_nodeinfo_mock)
+ mapped_mock.assert_called_once_with(self.node.uuid, self.node.driver)
+ acquire_mock.assert_called_once_with(self.context, self.node.uuid,
+ purpose=mock.ANY)
+ self.task.process_event.assert_called_with('fail')
+
+ def test__check_inspect_timeouts_acquire_node_disappears(self,
+ get_nodeinfo_mock,
+ mapped_mock,
+ acquire_mock):
+ get_nodeinfo_mock.return_value = self._get_nodeinfo_list_response()
+ mapped_mock.return_value = True
+ acquire_mock.side_effect = exception.NodeNotFound(node='fake')
+
+ # Exception eaten
+ self.service._check_inspect_timeouts(self.context)
+
+ self._assert_get_nodeinfo_args(get_nodeinfo_mock)
+ mapped_mock.assert_called_once_with(self.node.uuid,
+ self.node.driver)
+ acquire_mock.assert_called_once_with(self.context,
+ self.node.uuid,
+ purpose=mock.ANY)
+ self.assertFalse(self.task.process_event.called)
+
+ def test__check_inspect_timeouts_acquire_node_locked(self,
+ get_nodeinfo_mock,
+ mapped_mock,
+ acquire_mock):
+ get_nodeinfo_mock.return_value = self._get_nodeinfo_list_response()
+ mapped_mock.return_value = True
+ acquire_mock.side_effect = exception.NodeLocked(node='fake',
+ host='fake')
+
+ # Exception eaten
+ self.service._check_inspect_timeouts(self.context)
+
+ self._assert_get_nodeinfo_args(get_nodeinfo_mock)
+ mapped_mock.assert_called_once_with(self.node.uuid,
+ self.node.driver)
+ acquire_mock.assert_called_once_with(self.context,
+ self.node.uuid,
+ purpose=mock.ANY)
+ self.assertFalse(self.task.process_event.called)
+
+ def test__check_inspect_timeouts_no_acquire_after_lock(self,
+ get_nodeinfo_mock,
+ mapped_mock,
+ acquire_mock):
+ task = self._create_task(
+ node_attrs=dict(provision_state=states.AVAILABLE,
+ uuid=self.node.uuid))
+ get_nodeinfo_mock.return_value = self._get_nodeinfo_list_response()
+ mapped_mock.return_value = True
+ acquire_mock.side_effect = self._get_acquire_side_effect(task)
+
+ self.service._check_inspect_timeouts(self.context)
+
+ self._assert_get_nodeinfo_args(get_nodeinfo_mock)
+ mapped_mock.assert_called_once_with(
+ self.node.uuid, self.node.driver)
+ acquire_mock.assert_called_once_with(self.context,
+ self.node.uuid,
+ purpose=mock.ANY)
+ self.assertFalse(task.process_event.called)
+
+ def test__check_inspect_timeouts_to_maintenance_after_lock(
+ self, get_nodeinfo_mock, mapped_mock, acquire_mock):
+ task = self._create_task(
+ node_attrs=dict(provision_state=states.INSPECTING,
+ target_provision_state=states.MANAGEABLE,
+ maintenance=True,
+ uuid=self.node.uuid))
+ get_nodeinfo_mock.return_value = (
+ self._get_nodeinfo_list_response([task.node, self.node2]))
+ mapped_mock.return_value = True
+ acquire_mock.side_effect = (
+ self._get_acquire_side_effect([task, self.task2]))
+
+ self.service._check_inspect_timeouts(self.context)
+
+ self._assert_get_nodeinfo_args(get_nodeinfo_mock)
+ self.assertEqual([mock.call(self.node.uuid, task.node.driver),
+ mock.call(self.node2.uuid, self.node2.driver)],
+ mapped_mock.call_args_list)
+ self.assertEqual([mock.call(self.context, self.node.uuid,
+ purpose=mock.ANY),
+ mock.call(self.context, self.node2.uuid,
+ purpose=mock.ANY)],
+ acquire_mock.call_args_list)
+ # First node skipped
+ self.assertFalse(task.process_event.called)
+ # Second node spawned
+ self.task2.process_event.assert_called_with('fail')
+
+ def test__check_inspect_timeouts_exiting_no_worker_avail(
+ self, get_nodeinfo_mock, mapped_mock, acquire_mock):
+ get_nodeinfo_mock.return_value = (
+ self._get_nodeinfo_list_response([self.node, self.node2]))
+ mapped_mock.return_value = True
+ acquire_mock.side_effect = self._get_acquire_side_effect(
+ [(self.task, exception.NoFreeConductorWorker()), self.task2])
+
+ # Exception should be nuked
+ self.service._check_inspect_timeouts(self.context)
+
+ self._assert_get_nodeinfo_args(get_nodeinfo_mock)
+ # mapped should be only called for the first node as we should
+ # have exited the loop early due to NoFreeConductorWorker
+ mapped_mock.assert_called_once_with(
+ self.node.uuid, self.node.driver)
+ acquire_mock.assert_called_once_with(self.context,
+ self.node.uuid,
+ purpose=mock.ANY)
+ self.task.process_event.assert_called_with('fail')
+
+ def test__check_inspect_timeouts_exit_with_other_exception(
+ self, get_nodeinfo_mock, mapped_mock, acquire_mock):
+ get_nodeinfo_mock.return_value = (
+ self._get_nodeinfo_list_response([self.node, self.node2]))
+ mapped_mock.return_value = True
+ acquire_mock.side_effect = self._get_acquire_side_effect(
+ [(self.task, exception.IronicException('foo')), self.task2])
+
+ # Should re-raise
+ self.assertRaises(exception.IronicException,
+ self.service._check_inspect_timeouts,
+ self.context)
+
+ self._assert_get_nodeinfo_args(get_nodeinfo_mock)
+ # mapped should be only called for the first node as we should
+ # have exited the loop early due to unknown exception
+ mapped_mock.assert_called_once_with(
+ self.node.uuid, self.node.driver)
+ acquire_mock.assert_called_once_with(self.context,
+ self.node.uuid,
+ purpose=mock.ANY)
+ self.task.process_event.assert_called_with('fail')
+
+ def test__check_inspect_timeouts_worker_limit(self, get_nodeinfo_mock,
+ mapped_mock, acquire_mock):
+ self.config(periodic_max_workers=2, group='conductor')
+
+ # Use the same nodes/tasks to make life easier in the tests
+ # here
+
+ get_nodeinfo_mock.return_value = (
+ self._get_nodeinfo_list_response([self.node] * 3))
+ mapped_mock.return_value = True
+ acquire_mock.side_effect = (
+ self._get_acquire_side_effect([self.task] * 3))
+
+ self.service._check_inspect_timeouts(self.context)
+
+ # Should only have ran 2.
+ self.assertEqual([mock.call(self.node.uuid, self.node.driver)] * 2,
+ mapped_mock.call_args_list)
+ self.assertEqual([mock.call(self.context, self.node.uuid,
+ purpose=mock.ANY)] * 2,
+ acquire_mock.call_args_list)
+ process_event_call = mock.call('fail')
+ self.assertEqual([process_event_call] * 2,
+ self.task.process_event.call_args_list)
+
+
+@_mock_record_keepalive
+class DestroyPortTestCase(_ServiceSetUpMixin, tests_db_base.DbTestCase):
+ def test_destroy_port(self):
+ node = obj_utils.create_test_node(self.context, driver='fake')
+
+ port = obj_utils.create_test_port(self.context,
+ node_id=node.id)
+ self.service.destroy_port(self.context, port)
+ self.assertRaises(exception.PortNotFound, port.refresh)
+
+ def test_destroy_port_node_locked(self):
+ node = obj_utils.create_test_node(self.context, driver='fake',
+ reservation='fake-reserv')
+
+ port = obj_utils.create_test_port(self.context, node_id=node.id)
+ exc = self.assertRaises(messaging.rpc.ExpectedException,
+ self.service.destroy_port,
+ self.context, port)
+ # Compare true exception hidden by @messaging.expected_exceptions
+ self.assertEqual(exception.NodeLocked, exc.exc_info[0])
+
+
+@mock.patch.object(manager.ConductorManager, '_fail_if_in_state')
+@mock.patch.object(manager.ConductorManager, '_mapped_to_this_conductor')
+@mock.patch.object(dbapi.IMPL, 'get_offline_conductors')
+class ManagerCheckDeployingStatusTestCase(_ServiceSetUpMixin,
+ tests_db_base.DbTestCase):
+ def setUp(self):
+ super(ManagerCheckDeployingStatusTestCase, self).setUp()
+ self.service = manager.ConductorManager('hostname', 'test-topic')
+ self.service.dbapi = self.dbapi
+
+ self._start_service()
+
+ self.node = obj_utils.create_test_node(
+ self.context, id=1, uuid=uuidutils.generate_uuid(),
+ driver='fake', provision_state=states.DEPLOYING,
+ target_provision_state=states.DEPLOYDONE,
+ reservation='fake-conductor')
+
+ # create a second node in a different state to test the
+ # filtering nodes in DEPLOYING state
+ obj_utils.create_test_node(
+ self.context, id=10, uuid=uuidutils.generate_uuid(),
+ driver='fake', provision_state=states.AVAILABLE,
+ target_provision_state=states.NOSTATE)
+
+ self.expected_filter = {
+ 'provision_state': 'deploying', 'reserved': False,
+ 'maintenance': False}
+
+ def test__check_deploying_status(self, mock_off_cond, mock_mapped,
+ mock_fail_if):
+ mock_off_cond.return_value = ['fake-conductor']
+
+ self.service._check_deploying_status(self.context)
+
+ self.node.refresh()
+ mock_off_cond.assert_called_once_with()
+ mock_mapped.assert_called_once_with(self.node.uuid, 'fake')
+ mock_fail_if.assert_called_once_with(
+ mock.ANY, {'id': self.node.id}, states.DEPLOYING,
+ 'provision_updated_at',
+ callback_method=conductor_utils.cleanup_after_timeout,
+ err_handler=manager.provisioning_error_handler)
+ # assert node was released
+ self.assertIsNone(self.node.reservation)
+
+ def test__check_deploying_status_alive(self, mock_off_cond,
+ mock_mapped, mock_fail_if):
+ mock_off_cond.return_value = []
+
+ self.service._check_deploying_status(self.context)
+
+ self.node.refresh()
+ mock_off_cond.assert_called_once_with()
+ self.assertFalse(mock_mapped.called)
+ self.assertFalse(mock_fail_if.called)
+ # assert node still locked
+ self.assertIsNotNone(self.node.reservation)
+
+ @mock.patch.object(objects.Node, 'release')
+ def test__check_deploying_status_release_exceptions_skipping(
+ self, mock_release, mock_off_cond, mock_mapped, mock_fail_if):
+ mock_off_cond.return_value = ['fake-conductor']
+ # Add another node so we can check both exceptions
+ node2 = obj_utils.create_test_node(
+ self.context, id=2, uuid=uuidutils.generate_uuid(),
+ driver='fake', provision_state=states.DEPLOYING,
+ target_provision_state=states.DEPLOYDONE,
+ reservation='fake-conductor')
+
+ mock_mapped.return_value = True
+ mock_release.side_effect = iter([exception.NodeNotFound('not found'),
+ exception.NodeLocked('locked')])
+ self.service._check_deploying_status(self.context)
+
+ self.node.refresh()
+ mock_off_cond.assert_called_once_with()
+ expected_calls = [mock.call(self.node.uuid, 'fake'),
+ mock.call(node2.uuid, 'fake')]
+ mock_mapped.assert_has_calls(expected_calls)
+ # Assert we skipped and didn't try to call _fail_if_in_state
+ self.assertFalse(mock_fail_if.called)
+
+ @mock.patch.object(objects.Node, 'release')
+ def test__check_deploying_status_release_node_not_locked(
+ self, mock_release, mock_off_cond, mock_mapped, mock_fail_if):
+ mock_off_cond.return_value = ['fake-conductor']
+ mock_mapped.return_value = True
+ mock_release.side_effect = iter([
+ exception.NodeNotLocked('not locked')])
+ self.service._check_deploying_status(self.context)
+
+ self.node.refresh()
+ mock_off_cond.assert_called_once_with()
+ mock_mapped.assert_called_once_with(self.node.uuid, 'fake')
+ mock_fail_if.assert_called_once_with(
+ mock.ANY, {'id': self.node.id}, states.DEPLOYING,
+ 'provision_updated_at',
+ callback_method=conductor_utils.cleanup_after_timeout,
+ err_handler=manager.provisioning_error_handler)
+
+
+class TestIndirectionApiConductor(tests_db_base.DbTestCase):
+
+ def setUp(self):
+ super(TestIndirectionApiConductor, self).setUp()
+ self.conductor = manager.ConductorManager('test-host', 'test-topic')
+
+ def _test_object_action(self, is_classmethod, raise_exception,
+ return_object=False):
+ @obj_base.IronicObjectRegistry.register
+ class TestObject(obj_base.IronicObject):
+ context = self.context
+
+ def foo(self, context, raise_exception=False, return_object=False):
+ if raise_exception:
+ raise Exception('test')
+ elif return_object:
+ return obj
+ else:
+ return 'test'
+
+ @classmethod
+ def bar(cls, context, raise_exception=False, return_object=False):
+ if raise_exception:
+ raise Exception('test')
+ elif return_object:
+ return obj
+ else:
+ return 'test'
+
+ obj = TestObject(self.context)
+ if is_classmethod:
+ versions = ovo_base.obj_tree_get_versions(TestObject.obj_name())
+ result = self.conductor.object_class_action_versions(
+ self.context, TestObject.obj_name(), 'bar', versions,
+ tuple(), {'raise_exception': raise_exception,
+ 'return_object': return_object})
+ else:
+ updates, result = self.conductor.object_action(
+ self.context, obj, 'foo', tuple(),
+ {'raise_exception': raise_exception,
+ 'return_object': return_object})
+ if return_object:
+ self.assertEqual(obj, result)
+ else:
+ self.assertEqual('test', result)
+
+ def test_object_action(self):
+ self._test_object_action(False, False)
+
+ def test_object_action_on_raise(self):
+ self.assertRaises(messaging.ExpectedException,
+ self._test_object_action, False, True)
+
+ def test_object_action_on_object(self):
+ self._test_object_action(False, False, True)
+
+ def test_object_class_action(self):
+ self._test_object_action(True, False)
+
+ def test_object_class_action_on_raise(self):
+ self.assertRaises(messaging.ExpectedException,
+ self._test_object_action, True, True)
+
+ def test_object_class_action_on_object(self):
+ self._test_object_action(True, False, False)
+
+ def test_object_action_copies_object(self):
+ @obj_base.IronicObjectRegistry.register
+ class TestObject(obj_base.IronicObject):
+ fields = {'dict': fields.DictOfStringsField()}
+
+ def touch_dict(self, context):
+ self.dict['foo'] = 'bar'
+ self.obj_reset_changes()
+
+ obj = TestObject(self.context)
+ obj.dict = {}
+ obj.obj_reset_changes()
+ updates, result = self.conductor.object_action(
+ self.context, obj, 'touch_dict', tuple(), {})
+ # NOTE(danms): If conductor did not properly copy the object, then
+ # the new and reference copies of the nested dict object will be
+ # the same, and thus 'dict' will not be reported as changed
+ self.assertIn('dict', updates)
+ self.assertEqual({'foo': 'bar'}, updates['dict'])
+
+ def test_object_backport_versions(self):
+ fake_backported_obj = 'fake-backported-obj'
+ obj_name = 'fake-obj'
+ test_obj = mock.Mock()
+ test_obj.obj_name.return_value = obj_name
+ test_obj.obj_to_primitive.return_value = fake_backported_obj
+ fake_version_manifest = {obj_name: '1.0'}
+
+ result = self.conductor.object_backport_versions(
+ self.context, test_obj, fake_version_manifest)
+
+ self.assertEqual(result, fake_backported_obj)
+ test_obj.obj_to_primitive.assert_called_once_with(
+ target_version='1.0', version_manifest=fake_version_manifest)
diff --git a/ironic/tests/unit/conductor/test_rpcapi.py b/ironic/tests/unit/conductor/test_rpcapi.py
new file mode 100644
index 000000000..b7bba1814
--- /dev/null
+++ b/ironic/tests/unit/conductor/test_rpcapi.py
@@ -0,0 +1,344 @@
+# coding=utf-8
+
+# Copyright 2013 Hewlett-Packard Development Company, L.P.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+"""
+Unit Tests for :py:class:`ironic.conductor.rpcapi.ConductorAPI`.
+"""
+
+import copy
+
+import mock
+from oslo_config import cfg
+from oslo_messaging import _utils as messaging_utils
+
+from ironic.common import boot_devices
+from ironic.common import exception
+from ironic.common import states
+from ironic.conductor import manager as conductor_manager
+from ironic.conductor import rpcapi as conductor_rpcapi
+from ironic import objects
+from ironic.tests.unit import base as tests_base
+from ironic.tests.unit.db import base
+from ironic.tests.unit.db import utils as dbutils
+
+CONF = cfg.CONF
+
+
+class ConductorRPCAPITestCase(tests_base.TestCase):
+
+ def test_versions_in_sync(self):
+ self.assertEqual(
+ conductor_manager.ConductorManager.RPC_API_VERSION,
+ conductor_rpcapi.ConductorAPI.RPC_API_VERSION)
+
+
+class RPCAPITestCase(base.DbTestCase):
+
+ def setUp(self):
+ super(RPCAPITestCase, self).setUp()
+ self.fake_node = dbutils.get_test_node(driver='fake-driver')
+ self.fake_node_obj = objects.Node._from_db_object(
+ objects.Node(self.context), self.fake_node)
+
+ def test_serialized_instance_has_uuid(self):
+ self.assertTrue('uuid' in self.fake_node)
+
+ def test_get_topic_for_known_driver(self):
+ CONF.set_override('host', 'fake-host')
+ self.dbapi.register_conductor({'hostname': 'fake-host',
+ 'drivers': ['fake-driver']})
+
+ rpcapi = conductor_rpcapi.ConductorAPI(topic='fake-topic')
+ expected_topic = 'fake-topic.fake-host'
+ self.assertEqual(expected_topic,
+ rpcapi.get_topic_for(self.fake_node_obj))
+
+ def test_get_topic_for_unknown_driver(self):
+ CONF.set_override('host', 'fake-host')
+ self.dbapi.register_conductor({'hostname': 'fake-host',
+ 'drivers': ['other-driver']})
+
+ rpcapi = conductor_rpcapi.ConductorAPI(topic='fake-topic')
+ self.assertRaises(exception.NoValidHost,
+ rpcapi.get_topic_for,
+ self.fake_node_obj)
+
+ def test_get_topic_doesnt_cache(self):
+ CONF.set_override('host', 'fake-host')
+
+ rpcapi = conductor_rpcapi.ConductorAPI(topic='fake-topic')
+ self.assertRaises(exception.NoValidHost,
+ rpcapi.get_topic_for,
+ self.fake_node_obj)
+
+ self.dbapi.register_conductor({'hostname': 'fake-host',
+ 'drivers': ['fake-driver']})
+
+ rpcapi = conductor_rpcapi.ConductorAPI(topic='fake-topic')
+ expected_topic = 'fake-topic.fake-host'
+ self.assertEqual(expected_topic,
+ rpcapi.get_topic_for(self.fake_node_obj))
+
+ def test_get_topic_for_driver_known_driver(self):
+ CONF.set_override('host', 'fake-host')
+ self.dbapi.register_conductor({
+ 'hostname': 'fake-host',
+ 'drivers': ['fake-driver'],
+ })
+ rpcapi = conductor_rpcapi.ConductorAPI(topic='fake-topic')
+ self.assertEqual('fake-topic.fake-host',
+ rpcapi.get_topic_for_driver('fake-driver'))
+
+ def test_get_topic_for_driver_unknown_driver(self):
+ CONF.set_override('host', 'fake-host')
+ self.dbapi.register_conductor({
+ 'hostname': 'fake-host',
+ 'drivers': ['other-driver'],
+ })
+ rpcapi = conductor_rpcapi.ConductorAPI(topic='fake-topic')
+ self.assertRaises(exception.DriverNotFound,
+ rpcapi.get_topic_for_driver,
+ 'fake-driver')
+
+ def test_get_topic_for_driver_doesnt_cache(self):
+ CONF.set_override('host', 'fake-host')
+ rpcapi = conductor_rpcapi.ConductorAPI(topic='fake-topic')
+ self.assertRaises(exception.DriverNotFound,
+ rpcapi.get_topic_for_driver,
+ 'fake-driver')
+
+ self.dbapi.register_conductor({
+ 'hostname': 'fake-host',
+ 'drivers': ['fake-driver'],
+ })
+ rpcapi = conductor_rpcapi.ConductorAPI(topic='fake-topic')
+ self.assertEqual('fake-topic.fake-host',
+ rpcapi.get_topic_for_driver('fake-driver'))
+
+ def _test_rpcapi(self, method, rpc_method, **kwargs):
+ rpcapi = conductor_rpcapi.ConductorAPI(topic='fake-topic')
+
+ expected_retval = 'hello world' if rpc_method == 'call' else None
+
+ expected_topic = 'fake-topic'
+ if 'host' in kwargs:
+ expected_topic += ".%s" % kwargs['host']
+
+ target = {
+ "topic": expected_topic,
+ "version": kwargs.pop('version', rpcapi.RPC_API_VERSION)
+ }
+ expected_msg = copy.deepcopy(kwargs)
+
+ self.fake_args = None
+ self.fake_kwargs = None
+
+ def _fake_can_send_version_method(version):
+ return messaging_utils.version_is_compatible(
+ rpcapi.RPC_API_VERSION, version)
+
+ def _fake_prepare_method(*args, **kwargs):
+ for kwd in kwargs:
+ self.assertEqual(kwargs[kwd], target[kwd])
+ return rpcapi.client
+
+ def _fake_rpc_method(*args, **kwargs):
+ self.fake_args = args
+ self.fake_kwargs = kwargs
+ if expected_retval:
+ return expected_retval
+
+ with mock.patch.object(rpcapi.client,
+ "can_send_version") as mock_can_send_version:
+ mock_can_send_version.side_effect = _fake_can_send_version_method
+ with mock.patch.object(rpcapi.client, "prepare") as mock_prepared:
+ mock_prepared.side_effect = _fake_prepare_method
+
+ with mock.patch.object(rpcapi.client,
+ rpc_method) as mock_method:
+ mock_method.side_effect = _fake_rpc_method
+ retval = getattr(rpcapi, method)(self.context, **kwargs)
+ self.assertEqual(retval, expected_retval)
+ expected_args = [self.context, method, expected_msg]
+ for arg, expected_arg in zip(self.fake_args,
+ expected_args):
+ self.assertEqual(arg, expected_arg)
+
+ def test_update_node(self):
+ self._test_rpcapi('update_node',
+ 'call',
+ version='1.1',
+ node_obj=self.fake_node)
+
+ def test_change_node_power_state(self):
+ self._test_rpcapi('change_node_power_state',
+ 'call',
+ version='1.6',
+ node_id=self.fake_node['uuid'],
+ new_state=states.POWER_ON)
+
+ def test_vendor_passthru(self):
+ self._test_rpcapi('vendor_passthru',
+ 'call',
+ version='1.20',
+ node_id=self.fake_node['uuid'],
+ driver_method='test-driver-method',
+ http_method='test-http-method',
+ info={"test_info": "test_value"})
+
+ def test_driver_vendor_passthru(self):
+ self._test_rpcapi('driver_vendor_passthru',
+ 'call',
+ version='1.20',
+ driver_name='test-driver-name',
+ driver_method='test-driver-method',
+ http_method='test-http-method',
+ info={'test_key': 'test_value'})
+
+ def test_do_node_deploy(self):
+ self._test_rpcapi('do_node_deploy',
+ 'call',
+ version='1.22',
+ node_id=self.fake_node['uuid'],
+ rebuild=False,
+ configdrive=None)
+
+ def test_do_node_tear_down(self):
+ self._test_rpcapi('do_node_tear_down',
+ 'call',
+ version='1.6',
+ node_id=self.fake_node['uuid'])
+
+ def test_validate_driver_interfaces(self):
+ self._test_rpcapi('validate_driver_interfaces',
+ 'call',
+ version='1.5',
+ node_id=self.fake_node['uuid'])
+
+ def test_destroy_node(self):
+ self._test_rpcapi('destroy_node',
+ 'call',
+ version='1.9',
+ node_id=self.fake_node['uuid'])
+
+ def test_get_console_information(self):
+ self._test_rpcapi('get_console_information',
+ 'call',
+ version='1.11',
+ node_id=self.fake_node['uuid'])
+
+ def test_set_console_mode(self):
+ self._test_rpcapi('set_console_mode',
+ 'call',
+ version='1.11',
+ node_id=self.fake_node['uuid'],
+ enabled=True)
+
+ def test_update_port(self):
+ fake_port = dbutils.get_test_port()
+ self._test_rpcapi('update_port',
+ 'call',
+ version='1.13',
+ port_obj=fake_port)
+
+ def test_get_driver_properties(self):
+ self._test_rpcapi('get_driver_properties',
+ 'call',
+ version='1.16',
+ driver_name='fake-driver')
+
+ def test_set_boot_device(self):
+ self._test_rpcapi('set_boot_device',
+ 'call',
+ version='1.17',
+ node_id=self.fake_node['uuid'],
+ device=boot_devices.DISK,
+ persistent=False)
+
+ def test_get_boot_device(self):
+ self._test_rpcapi('get_boot_device',
+ 'call',
+ version='1.17',
+ node_id=self.fake_node['uuid'])
+
+ def test_get_supported_boot_devices(self):
+ self._test_rpcapi('get_supported_boot_devices',
+ 'call',
+ version='1.17',
+ node_id=self.fake_node['uuid'])
+
+ def test_get_node_vendor_passthru_methods(self):
+ self._test_rpcapi('get_node_vendor_passthru_methods',
+ 'call',
+ version='1.21',
+ node_id=self.fake_node['uuid'])
+
+ def test_get_driver_vendor_passthru_methods(self):
+ self._test_rpcapi('get_driver_vendor_passthru_methods',
+ 'call',
+ version='1.21',
+ driver_name='fake-driver')
+
+ def test_inspect_hardware(self):
+ self._test_rpcapi('inspect_hardware',
+ 'call',
+ version='1.24',
+ node_id=self.fake_node['uuid'])
+
+ def test_continue_node_clean(self):
+ self._test_rpcapi('continue_node_clean',
+ 'cast',
+ version='1.27',
+ node_id=self.fake_node['uuid'])
+
+ def test_get_raid_logical_disk_properties(self):
+ self._test_rpcapi('get_raid_logical_disk_properties',
+ 'call',
+ version='1.30',
+ driver_name='fake-driver')
+
+ def test_set_target_raid_config(self):
+ self._test_rpcapi('set_target_raid_config',
+ 'call',
+ version='1.30',
+ node_id=self.fake_node['uuid'],
+ target_raid_config='config')
+
+ def test_object_action(self):
+ self._test_rpcapi('object_action',
+ 'call',
+ version='1.31',
+ objinst='fake-object',
+ objmethod='foo',
+ args=tuple(),
+ kwargs=dict())
+
+ def test_object_class_action_versions(self):
+ self._test_rpcapi('object_class_action_versions',
+ 'call',
+ version='1.31',
+ objname='fake-object',
+ objmethod='foo',
+ object_versions={'fake-object': '1.0'},
+ args=tuple(),
+ kwargs=dict())
+
+ def test_object_backport_versions(self):
+ self._test_rpcapi('object_backport_versions',
+ 'call',
+ version='1.31',
+ objinst='fake-object',
+ object_versions={'fake-object': '1.0'})
diff --git a/ironic/tests/unit/conductor/test_task_manager.py b/ironic/tests/unit/conductor/test_task_manager.py
new file mode 100644
index 000000000..68bae9ed4
--- /dev/null
+++ b/ironic/tests/unit/conductor/test_task_manager.py
@@ -0,0 +1,653 @@
+# coding=utf-8
+
+# Copyright 2013 Hewlett-Packard Development Company, L.P.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Tests for :class:`ironic.conductor.task_manager`."""
+
+import eventlet
+from eventlet import greenpool
+import mock
+from oslo_utils import uuidutils
+
+from ironic.common import driver_factory
+from ironic.common import exception
+from ironic.common import fsm
+from ironic.common import states
+from ironic.conductor import task_manager
+from ironic import objects
+from ironic.tests.unit import base as tests_base
+from ironic.tests.unit.db import base as tests_db_base
+from ironic.tests.unit.objects import utils as obj_utils
+
+
+@mock.patch.object(objects.Node, 'get')
+@mock.patch.object(objects.Node, 'release')
+@mock.patch.object(objects.Node, 'reserve')
+@mock.patch.object(driver_factory, 'get_driver')
+@mock.patch.object(objects.Port, 'list_by_node_id')
+class TaskManagerTestCase(tests_db_base.DbTestCase):
+ def setUp(self):
+ super(TaskManagerTestCase, self).setUp()
+ self.host = 'test-host'
+ self.config(host=self.host)
+ self.config(node_locked_retry_attempts=1, group='conductor')
+ self.config(node_locked_retry_interval=0, group='conductor')
+ self.node = obj_utils.create_test_node(self.context)
+
+ def test_excl_lock(self, get_ports_mock, get_driver_mock,
+ reserve_mock, release_mock, node_get_mock):
+ reserve_mock.return_value = self.node
+ with task_manager.TaskManager(self.context, 'fake-node-id') as task:
+ self.assertEqual(self.context, task.context)
+ self.assertEqual(self.node, task.node)
+ self.assertEqual(get_ports_mock.return_value, task.ports)
+ self.assertEqual(get_driver_mock.return_value, task.driver)
+ self.assertFalse(task.shared)
+
+ reserve_mock.assert_called_once_with(self.context, self.host,
+ 'fake-node-id')
+ get_ports_mock.assert_called_once_with(self.context, self.node.id)
+ get_driver_mock.assert_called_once_with(self.node.driver)
+ release_mock.assert_called_once_with(self.context, self.host,
+ self.node.id)
+ self.assertFalse(node_get_mock.called)
+
+ def test_excl_lock_with_driver(self, get_ports_mock, get_driver_mock,
+ reserve_mock, release_mock,
+ node_get_mock):
+ reserve_mock.return_value = self.node
+ with task_manager.TaskManager(self.context, 'fake-node-id',
+ driver_name='fake-driver') as task:
+ self.assertEqual(self.context, task.context)
+ self.assertEqual(self.node, task.node)
+ self.assertEqual(get_ports_mock.return_value, task.ports)
+ self.assertEqual(get_driver_mock.return_value, task.driver)
+ self.assertFalse(task.shared)
+
+ reserve_mock.assert_called_once_with(self.context, self.host,
+ 'fake-node-id')
+ get_ports_mock.assert_called_once_with(self.context, self.node.id)
+ get_driver_mock.assert_called_once_with('fake-driver')
+ release_mock.assert_called_once_with(self.context, self.host,
+ self.node.id)
+ self.assertFalse(node_get_mock.called)
+
+ def test_excl_nested_acquire(self, get_ports_mock, get_driver_mock,
+ reserve_mock, release_mock,
+ node_get_mock):
+ node2 = obj_utils.create_test_node(self.context,
+ uuid=uuidutils.generate_uuid(),
+ driver='fake')
+
+ reserve_mock.return_value = self.node
+ get_ports_mock.return_value = mock.sentinel.ports1
+ get_driver_mock.return_value = mock.sentinel.driver1
+
+ with task_manager.TaskManager(self.context, 'node-id1') as task:
+ reserve_mock.return_value = node2
+ get_ports_mock.return_value = mock.sentinel.ports2
+ get_driver_mock.return_value = mock.sentinel.driver2
+ with task_manager.TaskManager(self.context, 'node-id2') as task2:
+ self.assertEqual(self.context, task.context)
+ self.assertEqual(self.node, task.node)
+ self.assertEqual(mock.sentinel.ports1, task.ports)
+ self.assertEqual(mock.sentinel.driver1, task.driver)
+ self.assertFalse(task.shared)
+ self.assertEqual(self.context, task2.context)
+ self.assertEqual(node2, task2.node)
+ self.assertEqual(mock.sentinel.ports2, task2.ports)
+ self.assertEqual(mock.sentinel.driver2, task2.driver)
+ self.assertFalse(task2.shared)
+
+ self.assertEqual([mock.call(self.context, self.host, 'node-id1'),
+ mock.call(self.context, self.host, 'node-id2')],
+ reserve_mock.call_args_list)
+ self.assertEqual([mock.call(self.context, self.node.id),
+ mock.call(self.context, node2.id)],
+ get_ports_mock.call_args_list)
+ self.assertEqual([mock.call(self.node.driver),
+ mock.call(node2.driver)],
+ get_driver_mock.call_args_list)
+ # release should be in reverse order
+ self.assertEqual([mock.call(self.context, self.host, node2.id),
+ mock.call(self.context, self.host, self.node.id)],
+ release_mock.call_args_list)
+ self.assertFalse(node_get_mock.called)
+
+ def test_excl_lock_exception_then_lock(self, get_ports_mock,
+ get_driver_mock, reserve_mock,
+ release_mock, node_get_mock):
+ retry_attempts = 3
+ self.config(node_locked_retry_attempts=retry_attempts,
+ group='conductor')
+
+ # Fail on the first lock attempt, succeed on the second.
+ reserve_mock.side_effect = [exception.NodeLocked(node='foo',
+ host='foo'),
+ self.node]
+
+ with task_manager.TaskManager(self.context, 'fake-node-id') as task:
+ self.assertFalse(task.shared)
+
+ expected_calls = [mock.call(self.context, self.host,
+ 'fake-node-id')] * 2
+ reserve_mock.assert_has_calls(expected_calls)
+ self.assertEqual(2, reserve_mock.call_count)
+
+ def test_excl_lock_reserve_exception(self, get_ports_mock,
+ get_driver_mock, reserve_mock,
+ release_mock, node_get_mock):
+ retry_attempts = 3
+ self.config(node_locked_retry_attempts=retry_attempts,
+ group='conductor')
+ reserve_mock.side_effect = exception.NodeLocked(node='foo',
+ host='foo')
+
+ self.assertRaises(exception.NodeLocked,
+ task_manager.TaskManager,
+ self.context,
+ 'fake-node-id')
+
+ reserve_mock.assert_called_with(self.context, self.host,
+ 'fake-node-id')
+ self.assertEqual(retry_attempts, reserve_mock.call_count)
+ self.assertFalse(get_ports_mock.called)
+ self.assertFalse(get_driver_mock.called)
+ self.assertFalse(release_mock.called)
+ self.assertFalse(node_get_mock.called)
+
+ def test_excl_lock_get_ports_exception(self, get_ports_mock,
+ get_driver_mock, reserve_mock,
+ release_mock, node_get_mock):
+ reserve_mock.return_value = self.node
+ get_ports_mock.side_effect = exception.IronicException('foo')
+
+ self.assertRaises(exception.IronicException,
+ task_manager.TaskManager,
+ self.context,
+ 'fake-node-id')
+
+ reserve_mock.assert_called_once_with(self.context, self.host,
+ 'fake-node-id')
+ get_ports_mock.assert_called_once_with(self.context, self.node.id)
+ self.assertFalse(get_driver_mock.called)
+ release_mock.assert_called_once_with(self.context, self.host,
+ self.node.id)
+ self.assertFalse(node_get_mock.called)
+
+ def test_excl_lock_get_driver_exception(self, get_ports_mock,
+ get_driver_mock, reserve_mock,
+ release_mock, node_get_mock):
+ reserve_mock.return_value = self.node
+ get_driver_mock.side_effect = (
+ exception.DriverNotFound(driver_name='foo'))
+
+ self.assertRaises(exception.DriverNotFound,
+ task_manager.TaskManager,
+ self.context,
+ 'fake-node-id')
+
+ reserve_mock.assert_called_once_with(self.context, self.host,
+ 'fake-node-id')
+ get_ports_mock.assert_called_once_with(self.context, self.node.id)
+ get_driver_mock.assert_called_once_with(self.node.driver)
+ release_mock.assert_called_once_with(self.context, self.host,
+ self.node.id)
+ self.assertFalse(node_get_mock.called)
+
+ def test_shared_lock(self, get_ports_mock, get_driver_mock,
+ reserve_mock, release_mock, node_get_mock):
+ node_get_mock.return_value = self.node
+ with task_manager.TaskManager(self.context, 'fake-node-id',
+ shared=True) as task:
+ self.assertEqual(self.context, task.context)
+ self.assertEqual(self.node, task.node)
+ self.assertEqual(get_ports_mock.return_value, task.ports)
+ self.assertEqual(get_driver_mock.return_value, task.driver)
+ self.assertTrue(task.shared)
+
+ self.assertFalse(reserve_mock.called)
+ self.assertFalse(release_mock.called)
+ node_get_mock.assert_called_once_with(self.context, 'fake-node-id')
+ get_ports_mock.assert_called_once_with(self.context, self.node.id)
+ get_driver_mock.assert_called_once_with(self.node.driver)
+
+ def test_shared_lock_with_driver(self, get_ports_mock, get_driver_mock,
+ reserve_mock, release_mock,
+ node_get_mock):
+ node_get_mock.return_value = self.node
+ with task_manager.TaskManager(self.context,
+ 'fake-node-id',
+ shared=True,
+ driver_name='fake-driver') as task:
+ self.assertEqual(self.context, task.context)
+ self.assertEqual(self.node, task.node)
+ self.assertEqual(get_ports_mock.return_value, task.ports)
+ self.assertEqual(get_driver_mock.return_value, task.driver)
+ self.assertTrue(task.shared)
+
+ self.assertFalse(reserve_mock.called)
+ self.assertFalse(release_mock.called)
+ node_get_mock.assert_called_once_with(self.context, 'fake-node-id')
+ get_ports_mock.assert_called_once_with(self.context, self.node.id)
+ get_driver_mock.assert_called_once_with('fake-driver')
+
+ def test_shared_lock_node_get_exception(self, get_ports_mock,
+ get_driver_mock, reserve_mock,
+ release_mock, node_get_mock):
+ node_get_mock.side_effect = exception.NodeNotFound(node='foo')
+
+ self.assertRaises(exception.NodeNotFound,
+ task_manager.TaskManager,
+ self.context,
+ 'fake-node-id',
+ shared=True)
+
+ self.assertFalse(reserve_mock.called)
+ self.assertFalse(release_mock.called)
+ node_get_mock.assert_called_once_with(self.context, 'fake-node-id')
+ self.assertFalse(get_ports_mock.called)
+ self.assertFalse(get_driver_mock.called)
+
+ def test_shared_lock_get_ports_exception(self, get_ports_mock,
+ get_driver_mock, reserve_mock,
+ release_mock, node_get_mock):
+ node_get_mock.return_value = self.node
+ get_ports_mock.side_effect = exception.IronicException('foo')
+
+ self.assertRaises(exception.IronicException,
+ task_manager.TaskManager,
+ self.context,
+ 'fake-node-id',
+ shared=True)
+
+ self.assertFalse(reserve_mock.called)
+ self.assertFalse(release_mock.called)
+ node_get_mock.assert_called_once_with(self.context, 'fake-node-id')
+ get_ports_mock.assert_called_once_with(self.context, self.node.id)
+ self.assertFalse(get_driver_mock.called)
+
+ def test_shared_lock_get_driver_exception(self, get_ports_mock,
+ get_driver_mock, reserve_mock,
+ release_mock, node_get_mock):
+ node_get_mock.return_value = self.node
+ get_driver_mock.side_effect = (
+ exception.DriverNotFound(driver_name='foo'))
+
+ self.assertRaises(exception.DriverNotFound,
+ task_manager.TaskManager,
+ self.context,
+ 'fake-node-id',
+ shared=True)
+
+ self.assertFalse(reserve_mock.called)
+ self.assertFalse(release_mock.called)
+ node_get_mock.assert_called_once_with(self.context, 'fake-node-id')
+ get_ports_mock.assert_called_once_with(self.context, self.node.id)
+ get_driver_mock.assert_called_once_with(self.node.driver)
+
+ def test_upgrade_lock(self, get_ports_mock, get_driver_mock,
+ reserve_mock, release_mock, node_get_mock):
+ node_get_mock.return_value = self.node
+ reserve_mock.return_value = self.node
+ with task_manager.TaskManager(self.context, 'fake-node-id',
+ shared=True) as task:
+ self.assertEqual(self.context, task.context)
+ self.assertEqual(self.node, task.node)
+ self.assertEqual(get_ports_mock.return_value, task.ports)
+ self.assertEqual(get_driver_mock.return_value, task.driver)
+ self.assertTrue(task.shared)
+ self.assertFalse(reserve_mock.called)
+
+ task.upgrade_lock()
+ self.assertFalse(task.shared)
+ # second upgrade does nothing
+ task.upgrade_lock()
+ self.assertFalse(task.shared)
+
+ # make sure reserve() was called only once
+ reserve_mock.assert_called_once_with(self.context, self.host,
+ 'fake-node-id')
+ release_mock.assert_called_once_with(self.context, self.host,
+ self.node.id)
+ node_get_mock.assert_called_once_with(self.context, 'fake-node-id')
+ get_ports_mock.assert_called_once_with(self.context, self.node.id)
+ get_driver_mock.assert_called_once_with(self.node.driver)
+
+ def test_spawn_after(self, get_ports_mock, get_driver_mock,
+ reserve_mock, release_mock, node_get_mock):
+ thread_mock = mock.Mock(spec_set=['link', 'cancel'])
+ spawn_mock = mock.Mock(return_value=thread_mock)
+ task_release_mock = mock.Mock()
+ reserve_mock.return_value = self.node
+
+ with task_manager.TaskManager(self.context, 'node-id') as task:
+ task.spawn_after(spawn_mock, 1, 2, foo='bar', cat='meow')
+ task.release_resources = task_release_mock
+
+ spawn_mock.assert_called_once_with(1, 2, foo='bar', cat='meow')
+ thread_mock.link.assert_called_once_with(
+ task._thread_release_resources)
+ self.assertFalse(thread_mock.cancel.called)
+ # Since we mocked link(), we're testing that __exit__ didn't
+ # release resources pending the finishing of the background
+ # thread
+ self.assertFalse(task_release_mock.called)
+
+ def test_spawn_after_exception_while_yielded(self, get_ports_mock,
+ get_driver_mock,
+ reserve_mock,
+ release_mock,
+ node_get_mock):
+ spawn_mock = mock.Mock()
+ task_release_mock = mock.Mock()
+ reserve_mock.return_value = self.node
+
+ def _test_it():
+ with task_manager.TaskManager(self.context, 'node-id') as task:
+ task.spawn_after(spawn_mock, 1, 2, foo='bar', cat='meow')
+ task.release_resources = task_release_mock
+ raise exception.IronicException('foo')
+
+ self.assertRaises(exception.IronicException, _test_it)
+ self.assertFalse(spawn_mock.called)
+ task_release_mock.assert_called_once_with()
+
+ def test_spawn_after_spawn_fails(self, get_ports_mock, get_driver_mock,
+ reserve_mock, release_mock,
+ node_get_mock):
+ spawn_mock = mock.Mock(side_effect=exception.IronicException('foo'))
+ task_release_mock = mock.Mock()
+ reserve_mock.return_value = self.node
+
+ def _test_it():
+ with task_manager.TaskManager(self.context, 'node-id') as task:
+ task.spawn_after(spawn_mock, 1, 2, foo='bar', cat='meow')
+ task.release_resources = task_release_mock
+
+ self.assertRaises(exception.IronicException, _test_it)
+
+ spawn_mock.assert_called_once_with(1, 2, foo='bar', cat='meow')
+ task_release_mock.assert_called_once_with()
+
+ def test_spawn_after_link_fails(self, get_ports_mock, get_driver_mock,
+ reserve_mock, release_mock,
+ node_get_mock):
+ thread_mock = mock.Mock(spec_set=['link', 'cancel'])
+ thread_mock.link.side_effect = exception.IronicException('foo')
+ spawn_mock = mock.Mock(return_value=thread_mock)
+ task_release_mock = mock.Mock()
+ thr_release_mock = mock.Mock(spec_set=[])
+ reserve_mock.return_value = self.node
+
+ def _test_it():
+ with task_manager.TaskManager(self.context, 'node-id') as task:
+ task.spawn_after(spawn_mock, 1, 2, foo='bar', cat='meow')
+ task._thread_release_resources = thr_release_mock
+ task.release_resources = task_release_mock
+ self.assertRaises(exception.IronicException, _test_it)
+
+ spawn_mock.assert_called_once_with(1, 2, foo='bar', cat='meow')
+ thread_mock.link.assert_called_once_with(thr_release_mock)
+ thread_mock.cancel.assert_called_once_with()
+ task_release_mock.assert_called_once_with()
+
+ def test_spawn_after_on_error_hook(self, get_ports_mock, get_driver_mock,
+ reserve_mock, release_mock,
+ node_get_mock):
+ expected_exception = exception.IronicException('foo')
+ spawn_mock = mock.Mock(side_effect=expected_exception)
+ task_release_mock = mock.Mock()
+ on_error_handler = mock.Mock()
+ reserve_mock.return_value = self.node
+
+ def _test_it():
+ with task_manager.TaskManager(self.context, 'node-id') as task:
+ task.set_spawn_error_hook(on_error_handler, 'fake-argument')
+ task.spawn_after(spawn_mock, 1, 2, foo='bar', cat='meow')
+ task.release_resources = task_release_mock
+
+ self.assertRaises(exception.IronicException, _test_it)
+
+ spawn_mock.assert_called_once_with(1, 2, foo='bar', cat='meow')
+ task_release_mock.assert_called_once_with()
+ on_error_handler.assert_called_once_with(expected_exception,
+ 'fake-argument')
+
+ def test_spawn_after_on_error_hook_exception(self, get_ports_mock,
+ get_driver_mock, reserve_mock,
+ release_mock, node_get_mock):
+ expected_exception = exception.IronicException('foo')
+ spawn_mock = mock.Mock(side_effect=expected_exception)
+ task_release_mock = mock.Mock()
+ # Raise an exception within the on_error handler
+ on_error_handler = mock.Mock(side_effect=Exception('unexpected'))
+ on_error_handler.__name__ = 'foo_method'
+ reserve_mock.return_value = self.node
+
+ def _test_it():
+ with task_manager.TaskManager(self.context, 'node-id') as task:
+ task.set_spawn_error_hook(on_error_handler, 'fake-argument')
+ task.spawn_after(spawn_mock, 1, 2, foo='bar', cat='meow')
+ task.release_resources = task_release_mock
+
+ # Make sure the original exception is the one raised
+ self.assertRaises(exception.IronicException, _test_it)
+
+ spawn_mock.assert_called_once_with(1, 2, foo='bar', cat='meow')
+ task_release_mock.assert_called_once_with()
+ on_error_handler.assert_called_once_with(expected_exception,
+ 'fake-argument')
+
+ @mock.patch.object(states.machine, 'copy')
+ def test_init_prepares_fsm(
+ self, copy_mock, get_ports_mock, get_driver_mock, reserve_mock,
+ release_mock, node_get_mock):
+ m = mock.Mock(spec=fsm.FSM)
+ reserve_mock.return_value = self.node
+ copy_mock.return_value = m
+ t = task_manager.TaskManager('fake', 'fake')
+ copy_mock.assert_called_once_with()
+ self.assertIs(m, t.fsm)
+ m.initialize.assert_called_once_with(self.node.provision_state)
+
+
+class TaskManagerStateModelTestCases(tests_base.TestCase):
+ def setUp(self):
+ super(TaskManagerStateModelTestCases, self).setUp()
+ self.fsm = mock.Mock(spec=fsm.FSM)
+ self.node = mock.Mock(spec=objects.Node)
+ self.task = mock.Mock(spec=task_manager.TaskManager)
+ self.task.fsm = self.fsm
+ self.task.node = self.node
+
+ def test_release_clears_resources(self):
+ t = self.task
+ t.release_resources = task_manager.TaskManager.release_resources
+ t.driver = mock.Mock()
+ t.ports = mock.Mock()
+ t.shared = True
+ t._purpose = 'purpose'
+ t._debug_timer = mock.Mock()
+
+ t.release_resources(t)
+ self.assertIsNone(t.node)
+ self.assertIsNone(t.driver)
+ self.assertIsNone(t.ports)
+ self.assertIsNone(t.fsm)
+
+ def test_process_event_fsm_raises(self):
+ self.task.process_event = task_manager.TaskManager.process_event
+ self.fsm.process_event.side_effect = exception.InvalidState('test')
+
+ self.assertRaises(
+ exception.InvalidState,
+ self.task.process_event,
+ self.task, 'fake')
+ self.assertEqual(0, self.task.spawn_after.call_count)
+ self.assertFalse(self.task.node.save.called)
+
+ def test_process_event_sets_callback(self):
+ cb = mock.Mock()
+ arg = mock.Mock()
+ kwarg = mock.Mock()
+ self.task.process_event = task_manager.TaskManager.process_event
+ self.task.process_event(
+ self.task, 'fake', callback=cb, call_args=[arg],
+ call_kwargs={'mock': kwarg})
+ self.fsm.process_event.assert_called_once_with('fake')
+ self.task.spawn_after.assert_called_with(cb, arg, mock=kwarg)
+ self.assertEqual(1, self.task.node.save.call_count)
+ self.assertIsNone(self.node.last_error)
+
+ def test_process_event_sets_callback_and_error_handler(self):
+ arg = mock.Mock()
+ cb = mock.Mock()
+ er = mock.Mock()
+ kwarg = mock.Mock()
+ provision_state = 'provision_state'
+ target_provision_state = 'target'
+ self.node.provision_state = provision_state
+ self.node.target_provision_state = target_provision_state
+ self.task.process_event = task_manager.TaskManager.process_event
+
+ self.task.process_event(
+ self.task, 'fake', callback=cb, call_args=[arg],
+ call_kwargs={'mock': kwarg}, err_handler=er)
+
+ self.task.set_spawn_error_hook.assert_called_once_with(
+ er, self.node, provision_state, target_provision_state)
+ self.fsm.process_event.assert_called_once_with('fake')
+ self.task.spawn_after.assert_called_with(cb, arg, mock=kwarg)
+ self.assertEqual(1, self.task.node.save.call_count)
+ self.assertIsNone(self.node.last_error)
+ self.assertNotEqual(provision_state, self.node.provision_state)
+ self.assertNotEqual(target_provision_state,
+ self.node.target_provision_state)
+
+
+@task_manager.require_exclusive_lock
+def _req_excl_lock_method(*args, **kwargs):
+ return (args, kwargs)
+
+
+class ExclusiveLockDecoratorTestCase(tests_base.TestCase):
+ def setUp(self):
+ super(ExclusiveLockDecoratorTestCase, self).setUp()
+ self.task = mock.Mock(spec=task_manager.TaskManager)
+ self.args_task_first = (self.task, 1, 2)
+ self.args_task_second = (1, self.task, 2)
+ self.kwargs = dict(cat='meow', dog='wuff')
+
+ def test_with_excl_lock_task_first_arg(self):
+ self.task.shared = False
+ (args, kwargs) = _req_excl_lock_method(*self.args_task_first,
+ **self.kwargs)
+ self.assertEqual(self.args_task_first, args)
+ self.assertEqual(self.kwargs, kwargs)
+
+ def test_with_excl_lock_task_second_arg(self):
+ self.task.shared = False
+ (args, kwargs) = _req_excl_lock_method(*self.args_task_second,
+ **self.kwargs)
+ self.assertEqual(self.args_task_second, args)
+ self.assertEqual(self.kwargs, kwargs)
+
+ def test_with_shared_lock_task_first_arg(self):
+ self.task.shared = True
+ self.assertRaises(exception.ExclusiveLockRequired,
+ _req_excl_lock_method,
+ *self.args_task_first,
+ **self.kwargs)
+
+ def test_with_shared_lock_task_second_arg(self):
+ self.task.shared = True
+ self.assertRaises(exception.ExclusiveLockRequired,
+ _req_excl_lock_method,
+ *self.args_task_second,
+ **self.kwargs)
+
+
+class TaskManagerGreenThreadTestCase(tests_base.TestCase):
+ """Class to assert our assumptions about greenthread behavior."""
+ def test_gt_link_callback_added_during_execution(self):
+ pool = greenpool.GreenPool()
+ q1 = eventlet.Queue()
+ q2 = eventlet.Queue()
+
+ def func():
+ q1.put(None)
+ q2.get()
+
+ link_callback = mock.Mock()
+
+ thread = pool.spawn(func)
+ q1.get()
+ thread.link(link_callback)
+ q2.put(None)
+ pool.waitall()
+ link_callback.assert_called_once_with(thread)
+
+ def test_gt_link_callback_added_after_execution(self):
+ pool = greenpool.GreenPool()
+ link_callback = mock.Mock()
+
+ thread = pool.spawn(lambda: None)
+ pool.waitall()
+ thread.link(link_callback)
+ link_callback.assert_called_once_with(thread)
+
+ def test_gt_link_callback_exception_inside_thread(self):
+ pool = greenpool.GreenPool()
+ q1 = eventlet.Queue()
+ q2 = eventlet.Queue()
+
+ def func():
+ q1.put(None)
+ q2.get()
+ raise Exception()
+
+ link_callback = mock.Mock()
+
+ thread = pool.spawn(func)
+ q1.get()
+ thread.link(link_callback)
+ q2.put(None)
+ pool.waitall()
+ link_callback.assert_called_once_with(thread)
+
+ def test_gt_link_callback_added_after_exception_inside_thread(self):
+ pool = greenpool.GreenPool()
+
+ def func():
+ raise Exception()
+
+ link_callback = mock.Mock()
+
+ thread = pool.spawn(func)
+ pool.waitall()
+ thread.link(link_callback)
+
+ link_callback.assert_called_once_with(thread)
+
+ def test_gt_cancel_doesnt_run_thread(self):
+ pool = greenpool.GreenPool()
+ func = mock.Mock()
+ thread = pool.spawn(func)
+ thread.link(lambda t: None)
+ thread.cancel()
+ pool.waitall()
+ self.assertFalse(func.called)
diff --git a/ironic/tests/unit/conductor/test_utils.py b/ironic/tests/unit/conductor/test_utils.py
new file mode 100644
index 000000000..4ab0fb19b
--- /dev/null
+++ b/ironic/tests/unit/conductor/test_utils.py
@@ -0,0 +1,50 @@
+# coding=utf-8
+
+# Copyright 2013 Hewlett-Packard Development Company, L.P.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Tests for Ironic Manager test utils."""
+
+from ironic.tests.unit import base
+from ironic.tests.unit.conductor import utils
+
+
+class UtilsTestCase(base.TestCase):
+
+ def test_fails_to_load_extension(self):
+ self.assertRaises(AttributeError,
+ utils.mock_the_extension_manager,
+ 'fake',
+ 'bad.namespace')
+ self.assertRaises(AttributeError,
+ utils.mock_the_extension_manager,
+ 'no-such-driver',
+ 'ironic.drivers')
+
+ def test_get_mockable_ext_mgr(self):
+ (mgr, ext) = utils.mock_the_extension_manager('fake',
+ 'ironic.drivers')
+
+ # confirm that stevedore did not scan the actual entrypoints
+ self.assertNotEqual(mgr._extension_manager.namespace, 'ironic.drivers')
+ # confirm mgr has only one extension
+ self.assertEqual(1, len(mgr._extension_manager.extensions))
+ # confirm that we got a reference to the extension in this manager
+ self.assertEqual(ext, mgr._extension_manager.extensions[0])
+ # confirm that it is the "fake" driver we asked for
+ self.assertEqual("fake = ironic.drivers.fake:FakeDriver",
+ "%s" % ext.entry_point)
+ # Confirm driver is loaded
+ self.assertIn('fake', mgr.names)
diff --git a/ironic/tests/unit/conductor/utils.py b/ironic/tests/unit/conductor/utils.py
new file mode 100644
index 000000000..84b3f68d3
--- /dev/null
+++ b/ironic/tests/unit/conductor/utils.py
@@ -0,0 +1,57 @@
+# coding=utf-8
+
+# Copyright 2013 Hewlett-Packard Development Company, L.P.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Test utils for Ironic Managers."""
+
+import pkg_resources
+from stevedore import dispatch
+
+from ironic.common import driver_factory
+
+
+def mock_the_extension_manager(driver="fake", namespace="ironic.drivers"):
+ """Get a fake stevedore NameDispatchExtensionManager instance.
+
+ :param namespace: A string representing the namespace over which to
+ search for entrypoints.
+ :returns mock_ext_mgr: A DriverFactory instance that has been faked.
+ :returns mock_ext: A real plugin loaded by mock_ext_mgr in the specified
+ namespace.
+
+ """
+ entry_point = None
+ for ep in list(pkg_resources.iter_entry_points(namespace)):
+ s = "%s" % ep
+ if driver == s[:s.index(' =')]:
+ entry_point = ep
+ break
+
+ # NOTE(lucasagomes): Initialize the _extension_manager before
+ # instantiaing a DriverFactory class to avoid
+ # a real NameDispatchExtensionManager to be created
+ # with the real namespace.
+ driver_factory.DriverFactory._extension_manager = (
+ dispatch.NameDispatchExtensionManager('ironic.no-such-namespace',
+ lambda x: True))
+ mock_ext_mgr = driver_factory.DriverFactory()
+ mock_ext = mock_ext_mgr._extension_manager._load_one_plugin(
+ entry_point, True, [], {}, False)
+ mock_ext_mgr._extension_manager.extensions = [mock_ext]
+ mock_ext_mgr._extension_manager.by_name = dict((e.name, e)
+ for e in [mock_ext])
+
+ return (mock_ext_mgr, mock_ext)
diff --git a/ironic/tests/unit/conf_fixture.py b/ironic/tests/unit/conf_fixture.py
new file mode 100644
index 000000000..42132f882
--- /dev/null
+++ b/ironic/tests/unit/conf_fixture.py
@@ -0,0 +1,40 @@
+# Copyright 2010 United States Government as represented by the
+# Administrator of the National Aeronautics and Space Administration.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import fixtures
+from oslo_config import cfg
+
+from ironic.common import config
+
+CONF = cfg.CONF
+CONF.import_opt('host', 'ironic.common.service')
+
+
+class ConfFixture(fixtures.Fixture):
+ """Fixture to manage global conf settings."""
+
+ def __init__(self, conf):
+ self.conf = conf
+
+ def setUp(self):
+ super(ConfFixture, self).setUp()
+
+ self.conf.set_default('host', 'fake-mini')
+ self.conf.set_default('connection', "sqlite://", group='database')
+ self.conf.set_default('sqlite_synchronous', False, group='database')
+ self.conf.set_default('verbose', True)
+ config.parse_args([], default_config_files=[])
+ self.addCleanup(self.conf.reset)
diff --git a/ironic/tests/unit/db/__init__.py b/ironic/tests/unit/db/__init__.py
new file mode 100644
index 000000000..0f96bcc9c
--- /dev/null
+++ b/ironic/tests/unit/db/__init__.py
@@ -0,0 +1,16 @@
+# Copyright (c) 2012 NTT DOCOMO, INC.
+# All Rights Reserved.
+# flake8: noqa
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+from ironic.tests.unit.db import *
diff --git a/ironic/tests/unit/db/base.py b/ironic/tests/unit/db/base.py
new file mode 100644
index 000000000..37d24b3b3
--- /dev/null
+++ b/ironic/tests/unit/db/base.py
@@ -0,0 +1,102 @@
+# Copyright (c) 2012 NTT DOCOMO, INC.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Ironic DB test base class."""
+
+import os
+import shutil
+
+import fixtures
+from oslo_config import cfg
+from oslo_db.sqlalchemy import enginefacade
+
+from ironic.common import paths
+from ironic.db import api as dbapi
+from ironic.db.sqlalchemy import migration
+from ironic.db.sqlalchemy import models
+from ironic.tests.unit import base
+
+
+CONF = cfg.CONF
+
+_DB_CACHE = None
+
+
+class Database(fixtures.Fixture):
+
+ def __init__(self, engine, db_migrate, sql_connection,
+ sqlite_db, sqlite_clean_db):
+ self.sql_connection = sql_connection
+ self.sqlite_db = sqlite_db
+ self.sqlite_clean_db = sqlite_clean_db
+
+ self.engine = engine
+ self.engine.dispose()
+ conn = self.engine.connect()
+ if sql_connection == "sqlite://":
+ self.setup_sqlite(db_migrate)
+ elif sql_connection.startswith('sqlite:///'):
+ testdb = paths.state_path_rel(sqlite_db)
+ if os.path.exists(testdb):
+ return
+ self.setup_sqlite(db_migrate)
+ else:
+ db_migrate.upgrade('head')
+ self.post_migrations()
+ if sql_connection == "sqlite://":
+ conn = self.engine.connect()
+ self._DB = "".join(line for line in conn.connection.iterdump())
+ self.engine.dispose()
+ else:
+ cleandb = paths.state_path_rel(sqlite_clean_db)
+ shutil.copyfile(testdb, cleandb)
+
+ def setup_sqlite(self, db_migrate):
+ if db_migrate.version():
+ return
+ models.Base.metadata.create_all(self.engine)
+ db_migrate.stamp('head')
+
+ def setUp(self):
+ super(Database, self).setUp()
+
+ if self.sql_connection == "sqlite://":
+ conn = self.engine.connect()
+ conn.connection.executescript(self._DB)
+ self.addCleanup(self.engine.dispose)
+ else:
+ shutil.copyfile(paths.state_path_rel(self.sqlite_clean_db),
+ paths.state_path_rel(self.sqlite_db))
+ self.addCleanup(os.unlink, self.sqlite_db)
+
+ def post_migrations(self):
+ """Any addition steps that are needed outside of the migrations."""
+
+
+class DbTestCase(base.TestCase):
+
+ def setUp(self):
+ super(DbTestCase, self).setUp()
+
+ self.dbapi = dbapi.get_instance()
+
+ global _DB_CACHE
+ if not _DB_CACHE:
+ engine = enginefacade.get_legacy_facade().get_engine()
+ _DB_CACHE = Database(engine, migration,
+ sql_connection=CONF.database.connection,
+ sqlite_db=CONF.database.sqlite_db,
+ sqlite_clean_db='clean.sqlite')
+ self.useFixture(_DB_CACHE)
diff --git a/ironic/tests/unit/db/sqlalchemy/__init__.py b/ironic/tests/unit/db/sqlalchemy/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ironic/tests/unit/db/sqlalchemy/__init__.py
diff --git a/ironic/tests/unit/db/sqlalchemy/test_migrations.py b/ironic/tests/unit/db/sqlalchemy/test_migrations.py
new file mode 100644
index 000000000..29551476b
--- /dev/null
+++ b/ironic/tests/unit/db/sqlalchemy/test_migrations.py
@@ -0,0 +1,455 @@
+# Copyright 2010-2011 OpenStack Foundation
+# Copyright 2012-2013 IBM Corp.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""
+Tests for database migrations. There are "opportunistic" tests for both mysql
+and postgresql in here, which allows testing against these databases in a
+properly configured unit test environment.
+
+For the opportunistic testing you need to set up a db named 'openstack_citest'
+with user 'openstack_citest' and password 'openstack_citest' on localhost.
+The test will then use that db and u/p combo to run the tests.
+
+For postgres on Ubuntu this can be done with the following commands:
+
+::
+
+ sudo -u postgres psql
+ postgres=# create user openstack_citest with createdb login password
+ 'openstack_citest';
+ postgres=# create database openstack_citest with owner openstack_citest;
+
+"""
+
+import contextlib
+
+from alembic import script
+import mock
+from oslo_db import exception as db_exc
+from oslo_db.sqlalchemy import enginefacade
+from oslo_db.sqlalchemy import test_base
+from oslo_db.sqlalchemy import test_migrations
+from oslo_db.sqlalchemy import utils as db_utils
+from oslo_log import log as logging
+from oslo_utils import uuidutils
+import sqlalchemy
+import sqlalchemy.exc
+
+from ironic.common.i18n import _LE
+from ironic.db.sqlalchemy import migration
+from ironic.db.sqlalchemy import models
+from ironic.tests.unit import base
+
+LOG = logging.getLogger(__name__)
+
+
+def _get_connect_string(backend, user, passwd, database):
+ """Get database connection
+
+ Try to get a connection with a very specific set of values, if we get
+ these then we'll run the tests, otherwise they are skipped
+ """
+ if backend == "postgres":
+ backend = "postgresql+psycopg2"
+ elif backend == "mysql":
+ backend = "mysql+mysqldb"
+ else:
+ raise Exception("Unrecognized backend: '%s'" % backend)
+
+ return ("%(backend)s://%(user)s:%(passwd)s@localhost/%(database)s"
+ % {'backend': backend, 'user': user, 'passwd': passwd,
+ 'database': database})
+
+
+def _is_backend_avail(backend, user, passwd, database):
+ try:
+ connect_uri = _get_connect_string(backend, user, passwd, database)
+ engine = sqlalchemy.create_engine(connect_uri)
+ connection = engine.connect()
+ except Exception:
+ # intentionally catch all to handle exceptions even if we don't
+ # have any backend code loaded.
+ return False
+ else:
+ connection.close()
+ engine.dispose()
+ return True
+
+
+@contextlib.contextmanager
+def patch_with_engine(engine):
+ with mock.patch.object(enginefacade.get_legacy_facade(),
+ 'get_engine') as patch_engine:
+ patch_engine.return_value = engine
+ yield
+
+
+class WalkVersionsMixin(object):
+ def _walk_versions(self, engine=None, alembic_cfg=None, downgrade=True):
+ # Determine latest version script from the repo, then
+ # upgrade from 1 through to the latest, with no data
+ # in the databases. This just checks that the schema itself
+ # upgrades successfully.
+
+ # Place the database under version control
+ with patch_with_engine(engine):
+
+ script_directory = script.ScriptDirectory.from_config(alembic_cfg)
+
+ self.assertIsNone(self.migration_api.version(alembic_cfg))
+
+ versions = [ver for ver in script_directory.walk_revisions()]
+
+ for version in reversed(versions):
+ self._migrate_up(engine, alembic_cfg,
+ version.revision, with_data=True)
+
+ if downgrade:
+ for version in versions:
+ self._migrate_down(engine, alembic_cfg, version.revision)
+
+ def _migrate_down(self, engine, config, version, with_data=False):
+ try:
+ self.migration_api.downgrade(version, config=config)
+ except NotImplementedError:
+ # NOTE(sirp): some migrations, namely release-level
+ # migrations, don't support a downgrade.
+ return False
+
+ self.assertEqual(version, self.migration_api.version(config))
+
+ # NOTE(sirp): `version` is what we're downgrading to (i.e. the 'target'
+ # version). So if we have any downgrade checks, they need to be run for
+ # the previous (higher numbered) migration.
+ if with_data:
+ post_downgrade = getattr(
+ self, "_post_downgrade_%s" % (version), None)
+ if post_downgrade:
+ post_downgrade(engine)
+
+ return True
+
+ def _migrate_up(self, engine, config, version, with_data=False):
+ """migrate up to a new version of the db.
+
+ We allow for data insertion and post checks at every
+ migration version with special _pre_upgrade_### and
+ _check_### functions in the main test.
+ """
+ # NOTE(sdague): try block is here because it's impossible to debug
+ # where a failed data migration happens otherwise
+ try:
+ if with_data:
+ data = None
+ pre_upgrade = getattr(
+ self, "_pre_upgrade_%s" % version, None)
+ if pre_upgrade:
+ data = pre_upgrade(engine)
+
+ self.migration_api.upgrade(version, config=config)
+ self.assertEqual(version, self.migration_api.version(config))
+ if with_data:
+ check = getattr(self, "_check_%s" % version, None)
+ if check:
+ check(engine, data)
+ except Exception:
+ LOG.error(_LE("Failed to migrate to version %(version)s on engine "
+ "%(engine)s"),
+ {'version': version, 'engine': engine})
+ raise
+
+
+class TestWalkVersions(base.TestCase, WalkVersionsMixin):
+ def setUp(self):
+ super(TestWalkVersions, self).setUp()
+ self.migration_api = mock.MagicMock()
+ self.engine = mock.MagicMock()
+ self.config = mock.MagicMock()
+ self.versions = [mock.Mock(revision='2b2'), mock.Mock(revision='1a1')]
+
+ def test_migrate_up(self):
+ self.migration_api.version.return_value = 'dsa123'
+
+ self._migrate_up(self.engine, self.config, 'dsa123')
+
+ self.migration_api.upgrade.assert_called_with('dsa123',
+ config=self.config)
+ self.migration_api.version.assert_called_with(self.config)
+
+ def test_migrate_up_with_data(self):
+ test_value = {"a": 1, "b": 2}
+ self.migration_api.version.return_value = '141'
+ self._pre_upgrade_141 = mock.MagicMock()
+ self._pre_upgrade_141.return_value = test_value
+ self._check_141 = mock.MagicMock()
+
+ self._migrate_up(self.engine, self.config, '141', True)
+
+ self._pre_upgrade_141.assert_called_with(self.engine)
+ self._check_141.assert_called_with(self.engine, test_value)
+
+ def test_migrate_down(self):
+ self.migration_api.version.return_value = '42'
+
+ self.assertTrue(self._migrate_down(self.engine, self.config, '42'))
+ self.migration_api.version.assert_called_with(self.config)
+
+ def test_migrate_down_not_implemented(self):
+ self.migration_api.downgrade.side_effect = NotImplementedError
+ self.assertFalse(self._migrate_down(self.engine, self.config, '42'))
+
+ def test_migrate_down_with_data(self):
+ self._post_downgrade_043 = mock.MagicMock()
+ self.migration_api.version.return_value = '043'
+
+ self._migrate_down(self.engine, self.config, '043', True)
+
+ self._post_downgrade_043.assert_called_with(self.engine)
+
+ @mock.patch.object(script, 'ScriptDirectory')
+ @mock.patch.object(WalkVersionsMixin, '_migrate_up')
+ @mock.patch.object(WalkVersionsMixin, '_migrate_down')
+ def test_walk_versions_all_default(self, _migrate_up, _migrate_down,
+ script_directory):
+ fc = script_directory.from_config()
+ fc.walk_revisions.return_value = self.versions
+ self.migration_api.version.return_value = None
+
+ self._walk_versions(self.engine, self.config)
+
+ self.migration_api.version.assert_called_with(self.config)
+
+ upgraded = [mock.call(self.engine, self.config, v.revision,
+ with_data=True) for v in reversed(self.versions)]
+ self.assertEqual(self._migrate_up.call_args_list, upgraded)
+
+ downgraded = [mock.call(self.engine, self.config, v.revision)
+ for v in self.versions]
+ self.assertEqual(self._migrate_down.call_args_list, downgraded)
+
+ @mock.patch.object(script, 'ScriptDirectory')
+ @mock.patch.object(WalkVersionsMixin, '_migrate_up')
+ @mock.patch.object(WalkVersionsMixin, '_migrate_down')
+ def test_walk_versions_all_false(self, _migrate_up, _migrate_down,
+ script_directory):
+ fc = script_directory.from_config()
+ fc.walk_revisions.return_value = self.versions
+ self.migration_api.version.return_value = None
+
+ self._walk_versions(self.engine, self.config, downgrade=False)
+
+ upgraded = [mock.call(self.engine, self.config, v.revision,
+ with_data=True) for v in reversed(self.versions)]
+ self.assertEqual(upgraded, self._migrate_up.call_args_list)
+
+
+class MigrationCheckersMixin(object):
+
+ def setUp(self):
+ super(MigrationCheckersMixin, self).setUp()
+ self.config = migration._alembic_config()
+ self.migration_api = migration
+
+ def test_walk_versions(self):
+ self._walk_versions(self.engine, self.config, downgrade=False)
+
+ def test_connect_fail(self):
+ """Test that we can trigger a database connection failure
+
+ Test that we can fail gracefully to ensure we don't break people
+ without specific database backend
+ """
+ if _is_backend_avail(self.FIXTURE.DRIVER, "openstack_cifail",
+ self.FIXTURE.USERNAME, self.FIXTURE.DBNAME):
+ self.fail("Shouldn't have connected")
+
+ def _check_21b331f883ef(self, engine, data):
+ nodes = db_utils.get_table(engine, 'nodes')
+ col_names = [column.name for column in nodes.c]
+ self.assertIn('provision_updated_at', col_names)
+ self.assertIsInstance(nodes.c.provision_updated_at.type,
+ sqlalchemy.types.DateTime)
+
+ def _check_3cb628139ea4(self, engine, data):
+ nodes = db_utils.get_table(engine, 'nodes')
+ col_names = [column.name for column in nodes.c]
+
+ self.assertIn('console_enabled', col_names)
+ # in some backends bool type is integer
+ self.assertTrue(isinstance(nodes.c.console_enabled.type,
+ sqlalchemy.types.Boolean) or
+ isinstance(nodes.c.console_enabled.type,
+ sqlalchemy.types.Integer))
+
+ def _check_31baaf680d2b(self, engine, data):
+ nodes = db_utils.get_table(engine, 'nodes')
+ col_names = [column.name for column in nodes.c]
+ self.assertIn('instance_info', col_names)
+ self.assertIsInstance(nodes.c.instance_info.type,
+ sqlalchemy.types.TEXT)
+
+ def _check_3bea56f25597(self, engine, data):
+ nodes = db_utils.get_table(engine, 'nodes')
+ instance_uuid = 'aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee'
+ data = {'driver': 'fake',
+ 'uuid': uuidutils.generate_uuid(),
+ 'instance_uuid': instance_uuid}
+ nodes.insert().values(data).execute()
+ data['uuid'] = uuidutils.generate_uuid()
+ self.assertRaises(db_exc.DBDuplicateEntry,
+ nodes.insert().execute, data)
+
+ def _check_242cc6a923b3(self, engine, data):
+ nodes = db_utils.get_table(engine, 'nodes')
+ col_names = [column.name for column in nodes.c]
+ self.assertIn('maintenance_reason', col_names)
+ self.assertIsInstance(nodes.c.maintenance_reason.type,
+ sqlalchemy.types.String)
+
+ def _pre_upgrade_5674c57409b9(self, engine):
+ # add some nodes in various states so we can assert that "None"
+ # was replaced by "available", and nothing else changed.
+ nodes = db_utils.get_table(engine, 'nodes')
+ data = [{'uuid': uuidutils.generate_uuid(),
+ 'provision_state': 'fake state'},
+ {'uuid': uuidutils.generate_uuid(),
+ 'provision_state': 'active'},
+ {'uuid': uuidutils.generate_uuid(),
+ 'provision_state': 'deleting'},
+ {'uuid': uuidutils.generate_uuid(),
+ 'provision_state': None}]
+ nodes.insert().values(data).execute()
+ return data
+
+ def _check_5674c57409b9(self, engine, data):
+ nodes = db_utils.get_table(engine, 'nodes')
+ result = engine.execute(nodes.select())
+
+ def _get_state(uuid):
+ for row in data:
+ if row['uuid'] == uuid:
+ return row['provision_state']
+
+ for row in result:
+ old = _get_state(row['uuid'])
+ new = row['provision_state']
+ if old is None:
+ self.assertEqual('available', new)
+ else:
+ self.assertEqual(old, new)
+
+ def _check_bb59b63f55a(self, engine, data):
+ nodes = db_utils.get_table(engine, 'nodes')
+ col_names = [column.name for column in nodes.c]
+ self.assertIn('driver_internal_info', col_names)
+ self.assertIsInstance(nodes.c.driver_internal_info.type,
+ sqlalchemy.types.TEXT)
+
+ def _check_4f399b21ae71(self, engine, data):
+ nodes = db_utils.get_table(engine, 'nodes')
+ col_names = [column.name for column in nodes.c]
+ self.assertIn('clean_step', col_names)
+ self.assertIsInstance(nodes.c.clean_step.type,
+ sqlalchemy.types.String)
+
+ def _check_789acc877671(self, engine, data):
+ nodes = db_utils.get_table(engine, 'nodes')
+ col_names = [column.name for column in nodes.c]
+ self.assertIn('raid_config', col_names)
+ self.assertIn('target_raid_config', col_names)
+ self.assertIsInstance(nodes.c.raid_config.type,
+ sqlalchemy.types.String)
+ self.assertIsInstance(nodes.c.target_raid_config.type,
+ sqlalchemy.types.String)
+
+ def _check_2fb93ffd2af1(self, engine, data):
+ nodes = db_utils.get_table(engine, 'nodes')
+ bigstring = 'a' * 255
+ uuid = uuidutils.generate_uuid()
+ data = {'uuid': uuid, 'name': bigstring}
+ nodes.insert().execute(data)
+ node = nodes.select(nodes.c.uuid == uuid).execute().first()
+ self.assertEqual(bigstring, node['name'])
+
+ def _check_516faf1bb9b1(self, engine, data):
+ nodes = db_utils.get_table(engine, 'nodes')
+ bigstring = 'a' * 255
+ uuid = uuidutils.generate_uuid()
+ data = {'uuid': uuid, 'driver': bigstring}
+ nodes.insert().execute(data)
+ node = nodes.select(nodes.c.uuid == uuid).execute().first()
+ self.assertEqual(bigstring, node['driver'])
+
+ def test_upgrade_and_version(self):
+ with patch_with_engine(self.engine):
+ self.migration_api.upgrade('head')
+ self.assertIsNotNone(self.migration_api.version())
+
+ def test_create_schema_and_version(self):
+ with patch_with_engine(self.engine):
+ self.migration_api.create_schema()
+ self.assertIsNotNone(self.migration_api.version())
+
+ def test_upgrade_and_create_schema(self):
+ with patch_with_engine(self.engine):
+ self.migration_api.upgrade('31baaf680d2b')
+ self.assertRaises(db_exc.DbMigrationError,
+ self.migration_api.create_schema)
+
+ def test_upgrade_twice(self):
+ with patch_with_engine(self.engine):
+ self.migration_api.upgrade('31baaf680d2b')
+ v1 = self.migration_api.version()
+ self.migration_api.upgrade('head')
+ v2 = self.migration_api.version()
+ self.assertNotEqual(v1, v2)
+
+
+class TestMigrationsMySQL(MigrationCheckersMixin,
+ WalkVersionsMixin,
+ test_base.MySQLOpportunisticTestCase):
+ pass
+
+
+class TestMigrationsPostgreSQL(MigrationCheckersMixin,
+ WalkVersionsMixin,
+ test_base.PostgreSQLOpportunisticTestCase):
+ pass
+
+
+class ModelsMigrationSyncMixin(object):
+
+ def get_metadata(self):
+ return models.Base.metadata
+
+ def get_engine(self):
+ return self.engine
+
+ def db_sync(self, engine):
+ with patch_with_engine(engine):
+ migration.upgrade('head')
+
+
+class ModelsMigrationsSyncMysql(ModelsMigrationSyncMixin,
+ test_migrations.ModelsMigrationsSync,
+ test_base.MySQLOpportunisticTestCase):
+ pass
+
+
+class ModelsMigrationsSyncPostgres(ModelsMigrationSyncMixin,
+ test_migrations.ModelsMigrationsSync,
+ test_base.PostgreSQLOpportunisticTestCase):
+ pass
diff --git a/ironic/tests/unit/db/sqlalchemy/test_types.py b/ironic/tests/unit/db/sqlalchemy/test_types.py
new file mode 100644
index 000000000..07d255fb9
--- /dev/null
+++ b/ironic/tests/unit/db/sqlalchemy/test_types.py
@@ -0,0 +1,79 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Tests for custom SQLAlchemy types via Ironic DB."""
+
+from oslo_db import exception as db_exc
+from oslo_utils import uuidutils
+
+import ironic.db.sqlalchemy.api as sa_api
+from ironic.db.sqlalchemy import models
+from ironic.tests.unit.db import base
+
+
+class SqlAlchemyCustomTypesTestCase(base.DbTestCase):
+
+ # NOTE(max_lobur): Since it's not straightforward to check this in
+ # isolation these tests use existing db models.
+
+ def test_JSONEncodedDict_default_value(self):
+ # Create chassis w/o extra specified.
+ ch1_id = uuidutils.generate_uuid()
+ self.dbapi.create_chassis({'uuid': ch1_id})
+ # Get chassis manually to test SA types in isolation from UOM.
+ ch1 = sa_api.model_query(models.Chassis).filter_by(uuid=ch1_id).one()
+ self.assertEqual({}, ch1.extra)
+
+ # Create chassis with extra specified.
+ ch2_id = uuidutils.generate_uuid()
+ extra = {'foo1': 'test', 'foo2': 'other extra'}
+ self.dbapi.create_chassis({'uuid': ch2_id, 'extra': extra})
+ # Get chassis manually to test SA types in isolation from UOM.
+ ch2 = sa_api.model_query(models.Chassis).filter_by(uuid=ch2_id).one()
+ self.assertEqual(extra, ch2.extra)
+
+ def test_JSONEncodedDict_type_check(self):
+ self.assertRaises(db_exc.DBError,
+ self.dbapi.create_chassis,
+ {'extra': ['this is not a dict']})
+
+ def test_JSONEncodedLict_default_value(self):
+ # Create conductor w/o extra specified.
+ cdr1_id = 321321
+ self.dbapi.register_conductor({'hostname': 'test_host1',
+ 'drivers': None,
+ 'id': cdr1_id})
+ # Get conductor manually to test SA types in isolation from UOM.
+ cdr1 = (sa_api
+ .model_query(models.Conductor)
+ .filter_by(id=cdr1_id)
+ .one())
+ self.assertEqual([], cdr1.drivers)
+
+ # Create conductor with drivers specified.
+ cdr2_id = 623623
+ drivers = ['foo1', 'other driver']
+ self.dbapi.register_conductor({'hostname': 'test_host2',
+ 'drivers': drivers,
+ 'id': cdr2_id})
+ # Get conductor manually to test SA types in isolation from UOM.
+ cdr2 = (sa_api
+ .model_query(models.Conductor)
+ .filter_by(id=cdr2_id)
+ .one())
+ self.assertEqual(drivers, cdr2.drivers)
+
+ def test_JSONEncodedList_type_check(self):
+ self.assertRaises(db_exc.DBError,
+ self.dbapi.register_conductor,
+ {'hostname': 'test_host3',
+ 'drivers': {'this is not a list': 'test'}})
diff --git a/ironic/tests/unit/db/test_chassis.py b/ironic/tests/unit/db/test_chassis.py
new file mode 100644
index 000000000..db298c7c6
--- /dev/null
+++ b/ironic/tests/unit/db/test_chassis.py
@@ -0,0 +1,89 @@
+# Copyright 2013 Hewlett-Packard Development Company, L.P.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Tests for manipulating Chassis via the DB API"""
+
+from oslo_utils import uuidutils
+import six
+
+from ironic.common import exception
+from ironic.tests.unit.db import base
+from ironic.tests.unit.db import utils
+
+
+class DbChassisTestCase(base.DbTestCase):
+
+ def setUp(self):
+ super(DbChassisTestCase, self).setUp()
+ self.chassis = utils.create_test_chassis()
+
+ def test_get_chassis_list(self):
+ uuids = [self.chassis.uuid]
+ for i in range(1, 6):
+ ch = utils.create_test_chassis(uuid=uuidutils.generate_uuid())
+ uuids.append(six.text_type(ch.uuid))
+ res = self.dbapi.get_chassis_list()
+ res_uuids = [r.uuid for r in res]
+ six.assertCountEqual(self, uuids, res_uuids)
+
+ def test_get_chassis_by_id(self):
+ chassis = self.dbapi.get_chassis_by_id(self.chassis.id)
+
+ self.assertEqual(self.chassis.uuid, chassis.uuid)
+
+ def test_get_chassis_by_uuid(self):
+ chassis = self.dbapi.get_chassis_by_uuid(self.chassis.uuid)
+
+ self.assertEqual(self.chassis.id, chassis.id)
+
+ def test_get_chassis_that_does_not_exist(self):
+ self.assertRaises(exception.ChassisNotFound,
+ self.dbapi.get_chassis_by_id, 666)
+
+ def test_update_chassis(self):
+ res = self.dbapi.update_chassis(self.chassis.id,
+ {'description': 'hello'})
+
+ self.assertEqual('hello', res.description)
+
+ def test_update_chassis_that_does_not_exist(self):
+ self.assertRaises(exception.ChassisNotFound,
+ self.dbapi.update_chassis, 666, {'description': ''})
+
+ def test_update_chassis_uuid(self):
+ self.assertRaises(exception.InvalidParameterValue,
+ self.dbapi.update_chassis, self.chassis.id,
+ {'uuid': 'hello'})
+
+ def test_destroy_chassis(self):
+ self.dbapi.destroy_chassis(self.chassis.id)
+
+ self.assertRaises(exception.ChassisNotFound,
+ self.dbapi.get_chassis_by_id, self.chassis.id)
+
+ def test_destroy_chassis_that_does_not_exist(self):
+ self.assertRaises(exception.ChassisNotFound,
+ self.dbapi.destroy_chassis, 666)
+
+ def test_destroy_chassis_with_nodes(self):
+ utils.create_test_node(chassis_id=self.chassis.id)
+
+ self.assertRaises(exception.ChassisNotEmpty,
+ self.dbapi.destroy_chassis, self.chassis.id)
+
+ def test_create_chassis_already_exists(self):
+ self.assertRaises(exception.ChassisAlreadyExists,
+ utils.create_test_chassis,
+ uuid=self.chassis.uuid)
diff --git a/ironic/tests/unit/db/test_conductor.py b/ironic/tests/unit/db/test_conductor.py
new file mode 100644
index 000000000..a6a4e1638
--- /dev/null
+++ b/ironic/tests/unit/db/test_conductor.py
@@ -0,0 +1,219 @@
+# Copyright 2013 Hewlett-Packard Development Company, L.P.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Tests for manipulating Conductors via the DB API"""
+
+import datetime
+
+import mock
+from oslo_utils import timeutils
+
+from ironic.common import exception
+from ironic.tests.unit.db import base
+from ironic.tests.unit.db import utils
+
+
+class DbConductorTestCase(base.DbTestCase):
+
+ def test_register_conductor_existing_fails(self):
+ c = utils.get_test_conductor()
+ self.dbapi.register_conductor(c)
+ self.assertRaises(
+ exception.ConductorAlreadyRegistered,
+ self.dbapi.register_conductor,
+ c)
+
+ def test_register_conductor_override(self):
+ c = utils.get_test_conductor()
+ self.dbapi.register_conductor(c)
+ self.dbapi.register_conductor(c, update_existing=True)
+
+ def _create_test_cdr(self, **kwargs):
+ c = utils.get_test_conductor(**kwargs)
+ return self.dbapi.register_conductor(c)
+
+ def test_get_conductor(self):
+ c1 = self._create_test_cdr()
+ c2 = self.dbapi.get_conductor(c1.hostname)
+ self.assertEqual(c1.id, c2.id)
+
+ def test_get_conductor_not_found(self):
+ self._create_test_cdr()
+ self.assertRaises(
+ exception.ConductorNotFound,
+ self.dbapi.get_conductor,
+ 'bad-hostname')
+
+ def test_unregister_conductor(self):
+ c = self._create_test_cdr()
+ self.dbapi.unregister_conductor(c.hostname)
+ self.assertRaises(
+ exception.ConductorNotFound,
+ self.dbapi.unregister_conductor,
+ c.hostname)
+
+ @mock.patch.object(timeutils, 'utcnow', autospec=True)
+ def test_touch_conductor(self, mock_utcnow):
+ test_time = datetime.datetime(2000, 1, 1, 0, 0)
+ mock_utcnow.return_value = test_time
+ c = self._create_test_cdr()
+ self.assertEqual(test_time, timeutils.normalize_time(c.updated_at))
+
+ test_time = datetime.datetime(2000, 1, 1, 0, 1)
+ mock_utcnow.return_value = test_time
+ self.dbapi.touch_conductor(c.hostname)
+ c = self.dbapi.get_conductor(c.hostname)
+ self.assertEqual(test_time, timeutils.normalize_time(c.updated_at))
+
+ def test_touch_conductor_not_found(self):
+ # A conductor's heartbeat will not create a new record,
+ # it will only update existing ones
+ self._create_test_cdr()
+ self.assertRaises(
+ exception.ConductorNotFound,
+ self.dbapi.touch_conductor,
+ 'bad-hostname')
+
+ def test_touch_offline_conductor(self):
+ # Ensure that a conductor's periodic heartbeat task can make the
+ # conductor visible again, even if it was spuriously marked offline
+ c = self._create_test_cdr()
+ self.dbapi.unregister_conductor(c.hostname)
+ self.assertRaises(
+ exception.ConductorNotFound,
+ self.dbapi.get_conductor,
+ c.hostname)
+ self.dbapi.touch_conductor(c.hostname)
+ self.dbapi.get_conductor(c.hostname)
+
+ def test_clear_node_reservations_for_conductor(self):
+ node1 = self.dbapi.create_node({'reservation': 'hostname1'})
+ node2 = self.dbapi.create_node({'reservation': 'hostname2'})
+ node3 = self.dbapi.create_node({'reservation': None})
+ self.dbapi.clear_node_reservations_for_conductor('hostname1')
+ node1 = self.dbapi.get_node_by_id(node1.id)
+ node2 = self.dbapi.get_node_by_id(node2.id)
+ node3 = self.dbapi.get_node_by_id(node3.id)
+ self.assertIsNone(node1.reservation)
+ self.assertEqual('hostname2', node2.reservation)
+ self.assertIsNone(node3.reservation)
+
+ @mock.patch.object(timeutils, 'utcnow', autospec=True)
+ def test_get_active_driver_dict_one_host_no_driver(self, mock_utcnow):
+ h = 'fake-host'
+ expected = {}
+
+ mock_utcnow.return_value = datetime.datetime.utcnow()
+ self._create_test_cdr(hostname=h, drivers=[])
+ result = self.dbapi.get_active_driver_dict()
+ self.assertEqual(expected, result)
+
+ @mock.patch.object(timeutils, 'utcnow', autospec=True)
+ def test_get_active_driver_dict_one_host_one_driver(self, mock_utcnow):
+ h = 'fake-host'
+ d = 'fake-driver'
+ expected = {d: set([h])}
+
+ mock_utcnow.return_value = datetime.datetime.utcnow()
+ self._create_test_cdr(hostname=h, drivers=[d])
+ result = self.dbapi.get_active_driver_dict()
+ self.assertEqual(expected, result)
+
+ @mock.patch.object(timeutils, 'utcnow', autospec=True)
+ def test_get_active_driver_dict_one_host_many_drivers(self, mock_utcnow):
+ h = 'fake-host'
+ d1 = 'driver-one'
+ d2 = 'driver-two'
+ expected = {d1: set([h]), d2: set([h])}
+
+ mock_utcnow.return_value = datetime.datetime.utcnow()
+ self._create_test_cdr(hostname=h, drivers=[d1, d2])
+ result = self.dbapi.get_active_driver_dict()
+ self.assertEqual(expected, result)
+
+ @mock.patch.object(timeutils, 'utcnow', autospec=True)
+ def test_get_active_driver_dict_many_hosts_one_driver(self, mock_utcnow):
+ h1 = 'host-one'
+ h2 = 'host-two'
+ d = 'fake-driver'
+ expected = {d: set([h1, h2])}
+
+ mock_utcnow.return_value = datetime.datetime.utcnow()
+ self._create_test_cdr(id=1, hostname=h1, drivers=[d])
+ self._create_test_cdr(id=2, hostname=h2, drivers=[d])
+ result = self.dbapi.get_active_driver_dict()
+ self.assertEqual(expected, result)
+
+ @mock.patch.object(timeutils, 'utcnow', autospec=True)
+ def test_get_active_driver_dict_many_hosts_and_drivers(self, mock_utcnow):
+ h1 = 'host-one'
+ h2 = 'host-two'
+ h3 = 'host-three'
+ d1 = 'driver-one'
+ d2 = 'driver-two'
+ expected = {d1: set([h1, h2]), d2: set([h2, h3])}
+
+ mock_utcnow.return_value = datetime.datetime.utcnow()
+ self._create_test_cdr(id=1, hostname=h1, drivers=[d1])
+ self._create_test_cdr(id=2, hostname=h2, drivers=[d1, d2])
+ self._create_test_cdr(id=3, hostname=h3, drivers=[d2])
+ result = self.dbapi.get_active_driver_dict()
+ self.assertEqual(expected, result)
+
+ @mock.patch.object(timeutils, 'utcnow', autospec=True)
+ def test_get_active_driver_dict_with_old_conductor(self, mock_utcnow):
+ past = datetime.datetime(2000, 1, 1, 0, 0)
+ present = past + datetime.timedelta(minutes=2)
+
+ d = 'common-driver'
+
+ h1 = 'old-host'
+ d1 = 'old-driver'
+ mock_utcnow.return_value = past
+ self._create_test_cdr(id=1, hostname=h1, drivers=[d, d1])
+
+ h2 = 'new-host'
+ d2 = 'new-driver'
+ mock_utcnow.return_value = present
+ self._create_test_cdr(id=2, hostname=h2, drivers=[d, d2])
+
+ # verify that old-host does not show up in current list
+ one_minute = 60
+ expected = {d: set([h2]), d2: set([h2])}
+ result = self.dbapi.get_active_driver_dict(interval=one_minute)
+ self.assertEqual(expected, result)
+
+ # change the interval, and verify that old-host appears
+ two_minute = one_minute * 2
+ expected = {d: set([h1, h2]), d1: set([h1]), d2: set([h2])}
+ result = self.dbapi.get_active_driver_dict(interval=two_minute)
+ self.assertEqual(expected, result)
+
+ @mock.patch.object(timeutils, 'utcnow', autospec=True)
+ def test_get_offline_conductors(self, mock_utcnow):
+ self.config(heartbeat_timeout=60, group='conductor')
+ time_ = datetime.datetime(2000, 1, 1, 0, 0)
+
+ mock_utcnow.return_value = time_
+ c = self._create_test_cdr()
+
+ # Only 30 seconds passed since last heartbeat, it's still
+ # considered alive
+ mock_utcnow.return_value = time_ + datetime.timedelta(seconds=30)
+ self.assertEqual([], self.dbapi.get_offline_conductors())
+
+ # 61 seconds passed since last heartbeat, it's dead
+ mock_utcnow.return_value = time_ + datetime.timedelta(seconds=61)
+ self.assertEqual([c.hostname], self.dbapi.get_offline_conductors())
diff --git a/ironic/tests/unit/db/test_nodes.py b/ironic/tests/unit/db/test_nodes.py
new file mode 100644
index 000000000..cbb4c10d6
--- /dev/null
+++ b/ironic/tests/unit/db/test_nodes.py
@@ -0,0 +1,536 @@
+# Copyright 2013 Hewlett-Packard Development Company, L.P.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Tests for manipulating Nodes via the DB API"""
+
+import datetime
+
+import mock
+from oslo_utils import timeutils
+from oslo_utils import uuidutils
+import six
+
+from ironic.common import exception
+from ironic.common import states
+from ironic.tests.unit.db import base
+from ironic.tests.unit.db import utils
+
+
+class DbNodeTestCase(base.DbTestCase):
+
+ def test_create_node(self):
+ utils.create_test_node()
+
+ def test_create_node_already_exists(self):
+ utils.create_test_node()
+ self.assertRaises(exception.NodeAlreadyExists,
+ utils.create_test_node)
+
+ def test_create_node_instance_already_associated(self):
+ instance = uuidutils.generate_uuid()
+ utils.create_test_node(uuid=uuidutils.generate_uuid(),
+ instance_uuid=instance)
+ self.assertRaises(exception.InstanceAssociated,
+ utils.create_test_node,
+ uuid=uuidutils.generate_uuid(),
+ instance_uuid=instance)
+
+ def test_create_node_name_duplicate(self):
+ node = utils.create_test_node(name='spam')
+ self.assertRaises(exception.DuplicateName,
+ utils.create_test_node,
+ name=node.name)
+
+ def test_get_node_by_id(self):
+ node = utils.create_test_node()
+ res = self.dbapi.get_node_by_id(node.id)
+ self.assertEqual(node.id, res.id)
+ self.assertEqual(node.uuid, res.uuid)
+
+ def test_get_node_by_uuid(self):
+ node = utils.create_test_node()
+ res = self.dbapi.get_node_by_uuid(node.uuid)
+ self.assertEqual(node.id, res.id)
+ self.assertEqual(node.uuid, res.uuid)
+
+ def test_get_node_by_name(self):
+ node = utils.create_test_node()
+ res = self.dbapi.get_node_by_name(node.name)
+ self.assertEqual(node.id, res.id)
+ self.assertEqual(node.uuid, res.uuid)
+ self.assertEqual(node.name, res.name)
+
+ def test_get_node_that_does_not_exist(self):
+ self.assertRaises(exception.NodeNotFound,
+ self.dbapi.get_node_by_id, 99)
+ self.assertRaises(exception.NodeNotFound,
+ self.dbapi.get_node_by_uuid,
+ '12345678-9999-0000-aaaa-123456789012')
+ self.assertRaises(exception.NodeNotFound,
+ self.dbapi.get_node_by_name,
+ 'spam-eggs-bacon-spam')
+
+ def test_get_nodeinfo_list_defaults(self):
+ node_id_list = []
+ for i in range(1, 6):
+ node = utils.create_test_node(uuid=uuidutils.generate_uuid())
+ node_id_list.append(node.id)
+ res = [i[0] for i in self.dbapi.get_nodeinfo_list()]
+ self.assertEqual(sorted(res), sorted(node_id_list))
+
+ def test_get_nodeinfo_list_with_cols(self):
+ uuids = {}
+ extras = {}
+ for i in range(1, 6):
+ uuid = uuidutils.generate_uuid()
+ extra = {'foo': i}
+ node = utils.create_test_node(extra=extra, uuid=uuid)
+ uuids[node.id] = uuid
+ extras[node.id] = extra
+ res = self.dbapi.get_nodeinfo_list(columns=['id', 'extra', 'uuid'])
+ self.assertEqual(extras, dict((r[0], r[1]) for r in res))
+ self.assertEqual(uuids, dict((r[0], r[2]) for r in res))
+
+ def test_get_nodeinfo_list_with_filters(self):
+ node1 = utils.create_test_node(
+ driver='driver-one',
+ instance_uuid=uuidutils.generate_uuid(),
+ reservation='fake-host',
+ uuid=uuidutils.generate_uuid())
+ node2 = utils.create_test_node(
+ driver='driver-two',
+ uuid=uuidutils.generate_uuid(),
+ maintenance=True)
+ node3 = utils.create_test_node(
+ driver='driver-one',
+ uuid=uuidutils.generate_uuid(),
+ reservation='another-fake-host')
+
+ res = self.dbapi.get_nodeinfo_list(filters={'driver': 'driver-one'})
+ self.assertEqual(sorted([node1.id, node3.id]),
+ sorted([r[0] for r in res]))
+
+ res = self.dbapi.get_nodeinfo_list(filters={'driver': 'bad-driver'})
+ self.assertEqual([], [r[0] for r in res])
+
+ res = self.dbapi.get_nodeinfo_list(filters={'associated': True})
+ self.assertEqual([node1.id], [r[0] for r in res])
+
+ res = self.dbapi.get_nodeinfo_list(filters={'associated': False})
+ self.assertEqual(sorted([node2.id, node3.id]),
+ sorted([r[0] for r in res]))
+
+ res = self.dbapi.get_nodeinfo_list(filters={'reserved': True})
+ self.assertEqual(sorted([node1.id, node3.id]),
+ sorted([r[0] for r in res]))
+
+ res = self.dbapi.get_nodeinfo_list(filters={'reserved': False})
+ self.assertEqual([node2.id], [r[0] for r in res])
+
+ res = self.dbapi.get_node_list(filters={'maintenance': True})
+ self.assertEqual([node2.id], [r.id for r in res])
+
+ res = self.dbapi.get_node_list(filters={'maintenance': False})
+ self.assertEqual(sorted([node1.id, node3.id]),
+ sorted([r.id for r in res]))
+
+ res = self.dbapi.get_node_list(
+ filters={'reserved_by_any_of': ['fake-host',
+ 'another-fake-host']})
+ self.assertEqual(sorted([node1.id, node3.id]),
+ sorted([r.id for r in res]))
+
+ @mock.patch.object(timeutils, 'utcnow', autospec=True)
+ def test_get_nodeinfo_list_provision(self, mock_utcnow):
+ past = datetime.datetime(2000, 1, 1, 0, 0)
+ next = past + datetime.timedelta(minutes=8)
+ present = past + datetime.timedelta(minutes=10)
+ mock_utcnow.return_value = past
+
+ # node with provision_updated timeout
+ node1 = utils.create_test_node(uuid=uuidutils.generate_uuid(),
+ provision_updated_at=past)
+ # node with None in provision_updated_at
+ node2 = utils.create_test_node(uuid=uuidutils.generate_uuid(),
+ provision_state=states.DEPLOYWAIT)
+ # node without timeout
+ utils.create_test_node(uuid=uuidutils.generate_uuid(),
+ provision_updated_at=next)
+
+ mock_utcnow.return_value = present
+ res = self.dbapi.get_nodeinfo_list(filters={'provisioned_before': 300})
+ self.assertEqual([node1.id], [r[0] for r in res])
+
+ res = self.dbapi.get_nodeinfo_list(filters={'provision_state':
+ states.DEPLOYWAIT})
+ self.assertEqual([node2.id], [r[0] for r in res])
+
+ @mock.patch.object(timeutils, 'utcnow', autospec=True)
+ def test_get_nodeinfo_list_inspection(self, mock_utcnow):
+ past = datetime.datetime(2000, 1, 1, 0, 0)
+ next = past + datetime.timedelta(minutes=8)
+ present = past + datetime.timedelta(minutes=10)
+ mock_utcnow.return_value = past
+
+ # node with provision_updated timeout
+ node1 = utils.create_test_node(uuid=uuidutils.generate_uuid(),
+ inspection_started_at=past)
+ # node with None in provision_updated_at
+ node2 = utils.create_test_node(uuid=uuidutils.generate_uuid(),
+ provision_state=states.INSPECTING)
+ # node without timeout
+ utils.create_test_node(uuid=uuidutils.generate_uuid(),
+ inspection_started_at=next)
+
+ mock_utcnow.return_value = present
+ res = self.dbapi.get_nodeinfo_list(
+ filters={'inspection_started_before': 300})
+ self.assertEqual([node1.id], [r[0] for r in res])
+
+ res = self.dbapi.get_nodeinfo_list(filters={'provision_state':
+ states.INSPECTING})
+ self.assertEqual([node2.id], [r[0] for r in res])
+
+ def test_get_node_list(self):
+ uuids = []
+ for i in range(1, 6):
+ node = utils.create_test_node(uuid=uuidutils.generate_uuid())
+ uuids.append(six.text_type(node['uuid']))
+ res = self.dbapi.get_node_list()
+ res_uuids = [r.uuid for r in res]
+ six.assertCountEqual(self, uuids, res_uuids)
+
+ def test_get_node_list_with_filters(self):
+ ch1 = utils.create_test_chassis(uuid=uuidutils.generate_uuid())
+ ch2 = utils.create_test_chassis(uuid=uuidutils.generate_uuid())
+
+ node1 = utils.create_test_node(
+ driver='driver-one',
+ instance_uuid=uuidutils.generate_uuid(),
+ reservation='fake-host',
+ uuid=uuidutils.generate_uuid(),
+ chassis_id=ch1['id'])
+ node2 = utils.create_test_node(
+ driver='driver-two',
+ uuid=uuidutils.generate_uuid(),
+ chassis_id=ch2['id'],
+ maintenance=True)
+
+ res = self.dbapi.get_node_list(filters={'chassis_uuid': ch1['uuid']})
+ self.assertEqual([node1.id], [r.id for r in res])
+
+ res = self.dbapi.get_node_list(filters={'chassis_uuid': ch2['uuid']})
+ self.assertEqual([node2.id], [r.id for r in res])
+
+ res = self.dbapi.get_node_list(filters={'driver': 'driver-one'})
+ self.assertEqual([node1.id], [r.id for r in res])
+
+ res = self.dbapi.get_node_list(filters={'driver': 'bad-driver'})
+ self.assertEqual([], [r.id for r in res])
+
+ res = self.dbapi.get_node_list(filters={'associated': True})
+ self.assertEqual([node1.id], [r.id for r in res])
+
+ res = self.dbapi.get_node_list(filters={'associated': False})
+ self.assertEqual([node2.id], [r.id for r in res])
+
+ res = self.dbapi.get_node_list(filters={'reserved': True})
+ self.assertEqual([node1.id], [r.id for r in res])
+
+ res = self.dbapi.get_node_list(filters={'reserved': False})
+ self.assertEqual([node2.id], [r.id for r in res])
+
+ res = self.dbapi.get_node_list(filters={'maintenance': True})
+ self.assertEqual([node2.id], [r.id for r in res])
+
+ res = self.dbapi.get_node_list(filters={'maintenance': False})
+ self.assertEqual([node1.id], [r.id for r in res])
+
+ def test_get_node_list_chassis_not_found(self):
+ self.assertRaises(exception.ChassisNotFound,
+ self.dbapi.get_node_list,
+ {'chassis_uuid': uuidutils.generate_uuid()})
+
+ def test_get_node_by_instance(self):
+ node = utils.create_test_node(
+ instance_uuid='12345678-9999-0000-aaaa-123456789012')
+
+ res = self.dbapi.get_node_by_instance(node.instance_uuid)
+ self.assertEqual(node.uuid, res.uuid)
+
+ def test_get_node_by_instance_wrong_uuid(self):
+ utils.create_test_node(
+ instance_uuid='12345678-9999-0000-aaaa-123456789012')
+
+ self.assertRaises(exception.InstanceNotFound,
+ self.dbapi.get_node_by_instance,
+ '12345678-9999-0000-bbbb-123456789012')
+
+ def test_get_node_by_instance_invalid_uuid(self):
+ self.assertRaises(exception.InvalidUUID,
+ self.dbapi.get_node_by_instance,
+ 'fake_uuid')
+
+ def test_destroy_node(self):
+ node = utils.create_test_node()
+
+ self.dbapi.destroy_node(node.id)
+ self.assertRaises(exception.NodeNotFound,
+ self.dbapi.get_node_by_id, node.id)
+
+ def test_destroy_node_by_uuid(self):
+ node = utils.create_test_node()
+
+ self.dbapi.destroy_node(node.uuid)
+ self.assertRaises(exception.NodeNotFound,
+ self.dbapi.get_node_by_uuid, node.uuid)
+
+ def test_destroy_node_that_does_not_exist(self):
+ self.assertRaises(exception.NodeNotFound,
+ self.dbapi.destroy_node,
+ '12345678-9999-0000-aaaa-123456789012')
+
+ def test_ports_get_destroyed_after_destroying_a_node(self):
+ node = utils.create_test_node()
+
+ port = utils.create_test_port(node_id=node.id)
+
+ self.dbapi.destroy_node(node.id)
+
+ self.assertRaises(exception.PortNotFound,
+ self.dbapi.get_port_by_id, port.id)
+
+ def test_ports_get_destroyed_after_destroying_a_node_by_uuid(self):
+ node = utils.create_test_node()
+
+ port = utils.create_test_port(node_id=node.id)
+
+ self.dbapi.destroy_node(node.uuid)
+
+ self.assertRaises(exception.PortNotFound,
+ self.dbapi.get_port_by_id, port.id)
+
+ def test_update_node(self):
+ node = utils.create_test_node()
+
+ old_extra = node.extra
+ new_extra = {'foo': 'bar'}
+ self.assertNotEqual(old_extra, new_extra)
+
+ res = self.dbapi.update_node(node.id, {'extra': new_extra})
+ self.assertEqual(new_extra, res.extra)
+
+ def test_update_node_not_found(self):
+ node_uuid = uuidutils.generate_uuid()
+ new_extra = {'foo': 'bar'}
+ self.assertRaises(exception.NodeNotFound, self.dbapi.update_node,
+ node_uuid, {'extra': new_extra})
+
+ def test_update_node_uuid(self):
+ node = utils.create_test_node()
+ self.assertRaises(exception.InvalidParameterValue,
+ self.dbapi.update_node, node.id,
+ {'uuid': ''})
+
+ def test_update_node_associate_and_disassociate(self):
+ node = utils.create_test_node()
+ new_i_uuid = uuidutils.generate_uuid()
+ res = self.dbapi.update_node(node.id, {'instance_uuid': new_i_uuid})
+ self.assertEqual(new_i_uuid, res.instance_uuid)
+ res = self.dbapi.update_node(node.id, {'instance_uuid': None})
+ self.assertIsNone(res.instance_uuid)
+
+ def test_update_node_already_associated(self):
+ node = utils.create_test_node()
+ new_i_uuid_one = uuidutils.generate_uuid()
+ self.dbapi.update_node(node.id, {'instance_uuid': new_i_uuid_one})
+ new_i_uuid_two = uuidutils.generate_uuid()
+ self.assertRaises(exception.NodeAssociated,
+ self.dbapi.update_node,
+ node.id,
+ {'instance_uuid': new_i_uuid_two})
+
+ def test_update_node_instance_already_associated(self):
+ node1 = utils.create_test_node(uuid=uuidutils.generate_uuid())
+ new_i_uuid = uuidutils.generate_uuid()
+ self.dbapi.update_node(node1.id, {'instance_uuid': new_i_uuid})
+ node2 = utils.create_test_node(uuid=uuidutils.generate_uuid())
+ self.assertRaises(exception.InstanceAssociated,
+ self.dbapi.update_node,
+ node2.id,
+ {'instance_uuid': new_i_uuid})
+
+ @mock.patch.object(timeutils, 'utcnow', autospec=True)
+ def test_update_node_provision(self, mock_utcnow):
+ mocked_time = datetime.datetime(2000, 1, 1, 0, 0)
+ mock_utcnow.return_value = mocked_time
+ node = utils.create_test_node()
+ res = self.dbapi.update_node(node.id, {'provision_state': 'fake'})
+ self.assertEqual(mocked_time,
+ timeutils.normalize_time(res['provision_updated_at']))
+
+ def test_update_node_name_duplicate(self):
+ node1 = utils.create_test_node(uuid=uuidutils.generate_uuid(),
+ name='spam')
+ node2 = utils.create_test_node(uuid=uuidutils.generate_uuid())
+ self.assertRaises(exception.DuplicateName,
+ self.dbapi.update_node,
+ node2.id,
+ {'name': node1.name})
+
+ def test_update_node_no_provision(self):
+ node = utils.create_test_node()
+ res = self.dbapi.update_node(node.id, {'extra': {'foo': 'bar'}})
+ self.assertIsNone(res['provision_updated_at'])
+ self.assertIsNone(res['inspection_started_at'])
+
+ @mock.patch.object(timeutils, 'utcnow', autospec=True)
+ def test_update_node_inspection_started_at(self, mock_utcnow):
+ mocked_time = datetime.datetime(2000, 1, 1, 0, 0)
+ mock_utcnow.return_value = mocked_time
+ node = utils.create_test_node(uuid=uuidutils.generate_uuid(),
+ inspection_started_at=mocked_time)
+ res = self.dbapi.update_node(node.id, {'provision_state': 'fake'})
+ result = res['inspection_started_at']
+ self.assertEqual(mocked_time,
+ timeutils.normalize_time(result))
+ self.assertIsNone(res['inspection_finished_at'])
+
+ @mock.patch.object(timeutils, 'utcnow', autospec=True)
+ def test_update_node_inspection_finished_at(self, mock_utcnow):
+ mocked_time = datetime.datetime(2000, 1, 1, 0, 0)
+ mock_utcnow.return_value = mocked_time
+ node = utils.create_test_node(uuid=uuidutils.generate_uuid(),
+ inspection_finished_at=mocked_time)
+ res = self.dbapi.update_node(node.id, {'provision_state': 'fake'})
+ result = res['inspection_finished_at']
+ self.assertEqual(mocked_time,
+ timeutils.normalize_time(result))
+ self.assertIsNone(res['inspection_started_at'])
+
+ def test_reserve_node(self):
+ node = utils.create_test_node()
+ uuid = node.uuid
+
+ r1 = 'fake-reservation'
+
+ # reserve the node
+ self.dbapi.reserve_node(r1, uuid)
+
+ # check reservation
+ res = self.dbapi.get_node_by_uuid(uuid)
+ self.assertEqual(r1, res.reservation)
+
+ def test_release_reservation(self):
+ node = utils.create_test_node()
+ uuid = node.uuid
+
+ r1 = 'fake-reservation'
+ self.dbapi.reserve_node(r1, uuid)
+
+ # release reservation
+ self.dbapi.release_node(r1, uuid)
+ res = self.dbapi.get_node_by_uuid(uuid)
+ self.assertIsNone(res.reservation)
+
+ def test_reservation_of_reserved_node_fails(self):
+ node = utils.create_test_node()
+ uuid = node.uuid
+
+ r1 = 'fake-reservation'
+ r2 = 'another-reservation'
+
+ # reserve the node
+ self.dbapi.reserve_node(r1, uuid)
+
+ # another host fails to reserve or release
+ self.assertRaises(exception.NodeLocked,
+ self.dbapi.reserve_node,
+ r2, uuid)
+ self.assertRaises(exception.NodeLocked,
+ self.dbapi.release_node,
+ r2, uuid)
+
+ def test_reservation_after_release(self):
+ node = utils.create_test_node()
+ uuid = node.uuid
+
+ r1 = 'fake-reservation'
+ r2 = 'another-reservation'
+
+ self.dbapi.reserve_node(r1, uuid)
+ self.dbapi.release_node(r1, uuid)
+
+ # another host succeeds
+ self.dbapi.reserve_node(r2, uuid)
+ res = self.dbapi.get_node_by_uuid(uuid)
+ self.assertEqual(r2, res.reservation)
+
+ def test_reservation_in_exception_message(self):
+ node = utils.create_test_node()
+ uuid = node.uuid
+
+ r = 'fake-reservation'
+ self.dbapi.reserve_node(r, uuid)
+ try:
+ self.dbapi.reserve_node('another', uuid)
+ except exception.NodeLocked as e:
+ self.assertIn(r, str(e))
+
+ def test_reservation_non_existent_node(self):
+ node = utils.create_test_node()
+ self.dbapi.destroy_node(node.id)
+
+ self.assertRaises(exception.NodeNotFound,
+ self.dbapi.reserve_node, 'fake', node.id)
+ self.assertRaises(exception.NodeNotFound,
+ self.dbapi.reserve_node, 'fake', node.uuid)
+
+ def test_release_non_existent_node(self):
+ node = utils.create_test_node()
+ self.dbapi.destroy_node(node.id)
+
+ self.assertRaises(exception.NodeNotFound,
+ self.dbapi.release_node, 'fake', node.id)
+ self.assertRaises(exception.NodeNotFound,
+ self.dbapi.release_node, 'fake', node.uuid)
+
+ def test_release_non_locked_node(self):
+ node = utils.create_test_node()
+
+ self.assertIsNone(node.reservation)
+ self.assertRaises(exception.NodeNotLocked,
+ self.dbapi.release_node, 'fake', node.id)
+ self.assertRaises(exception.NodeNotLocked,
+ self.dbapi.release_node, 'fake', node.uuid)
+
+ @mock.patch.object(timeutils, 'utcnow', autospec=True)
+ def test_touch_node_provisioning(self, mock_utcnow):
+ test_time = datetime.datetime(2000, 1, 1, 0, 0)
+ mock_utcnow.return_value = test_time
+ node = utils.create_test_node()
+ # assert provision_updated_at is None
+ self.assertIsNone(node.provision_updated_at)
+
+ self.dbapi.touch_node_provisioning(node.uuid)
+ node = self.dbapi.get_node_by_uuid(node.uuid)
+ # assert provision_updated_at has been updated
+ self.assertEqual(test_time,
+ timeutils.normalize_time(node.provision_updated_at))
+
+ def test_touch_node_provisioning_not_found(self):
+ self.assertRaises(
+ exception.NodeNotFound,
+ self.dbapi.touch_node_provisioning, uuidutils.generate_uuid())
diff --git a/ironic/tests/unit/db/test_ports.py b/ironic/tests/unit/db/test_ports.py
new file mode 100644
index 000000000..7d1be5184
--- /dev/null
+++ b/ironic/tests/unit/db/test_ports.py
@@ -0,0 +1,122 @@
+# Copyright 2013 Hewlett-Packard Development Company, L.P.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Tests for manipulating Ports via the DB API"""
+
+from oslo_utils import uuidutils
+import six
+
+from ironic.common import exception
+from ironic.tests.unit.db import base
+from ironic.tests.unit.db import utils as db_utils
+
+
+class DbPortTestCase(base.DbTestCase):
+
+ def setUp(self):
+ # This method creates a port for every test and
+ # replaces a test for creating a port.
+ super(DbPortTestCase, self).setUp()
+ self.node = db_utils.create_test_node()
+ self.port = db_utils.create_test_port(node_id=self.node.id)
+
+ def test_get_port_by_id(self):
+ res = self.dbapi.get_port_by_id(self.port.id)
+ self.assertEqual(self.port.address, res.address)
+
+ def test_get_port_by_uuid(self):
+ res = self.dbapi.get_port_by_uuid(self.port.uuid)
+ self.assertEqual(self.port.id, res.id)
+
+ def test_get_port_by_address(self):
+ res = self.dbapi.get_port_by_address(self.port.address)
+ self.assertEqual(self.port.id, res.id)
+
+ def test_get_port_list(self):
+ uuids = []
+ for i in range(1, 6):
+ port = db_utils.create_test_port(uuid=uuidutils.generate_uuid(),
+ address='52:54:00:cf:2d:4%s' % i)
+ uuids.append(six.text_type(port.uuid))
+ # Also add the uuid for the port created in setUp()
+ uuids.append(six.text_type(self.port.uuid))
+ res = self.dbapi.get_port_list()
+ res_uuids = [r.uuid for r in res]
+ six.assertCountEqual(self, uuids, res_uuids)
+
+ def test_get_port_list_sorted(self):
+ uuids = []
+ for i in range(1, 6):
+ port = db_utils.create_test_port(uuid=uuidutils.generate_uuid(),
+ address='52:54:00:cf:2d:4%s' % i)
+ uuids.append(six.text_type(port.uuid))
+ # Also add the uuid for the port created in setUp()
+ uuids.append(six.text_type(self.port.uuid))
+ res = self.dbapi.get_port_list(sort_key='uuid')
+ res_uuids = [r.uuid for r in res]
+ self.assertEqual(sorted(uuids), res_uuids)
+
+ self.assertRaises(exception.InvalidParameterValue,
+ self.dbapi.get_port_list, sort_key='foo')
+
+ def test_get_ports_by_node_id(self):
+ res = self.dbapi.get_ports_by_node_id(self.node.id)
+ self.assertEqual(self.port.address, res[0].address)
+
+ def test_get_ports_by_node_id_that_does_not_exist(self):
+ self.assertEqual([], self.dbapi.get_ports_by_node_id(99))
+
+ def test_destroy_port(self):
+ self.dbapi.destroy_port(self.port.id)
+ self.assertRaises(exception.PortNotFound,
+ self.dbapi.destroy_port, self.port.id)
+
+ def test_update_port(self):
+ old_address = self.port.address
+ new_address = 'ff.ee.dd.cc.bb.aa'
+
+ self.assertNotEqual(old_address, new_address)
+
+ res = self.dbapi.update_port(self.port.id, {'address': new_address})
+ self.assertEqual(new_address, res.address)
+
+ def test_update_port_uuid(self):
+ self.assertRaises(exception.InvalidParameterValue,
+ self.dbapi.update_port, self.port.id,
+ {'uuid': ''})
+
+ def test_update_port_duplicated_address(self):
+ address1 = self.port.address
+ address2 = 'aa-bb-cc-11-22-33'
+ port2 = db_utils.create_test_port(uuid=uuidutils.generate_uuid(),
+ node_id=self.node.id,
+ address=address2)
+ self.assertRaises(exception.MACAlreadyExists,
+ self.dbapi.update_port, port2.id,
+ {'address': address1})
+
+ def test_create_port_duplicated_address(self):
+ self.assertRaises(exception.MACAlreadyExists,
+ db_utils.create_test_port,
+ uuid=uuidutils.generate_uuid(),
+ node_id=self.node.id,
+ address=self.port.address)
+
+ def test_create_port_duplicated_uuid(self):
+ self.assertRaises(exception.PortAlreadyExists,
+ db_utils.create_test_port,
+ uuid=self.port.uuid,
+ node_id=self.node.id,
+ address='aa-bb-cc-33-11-22')
diff --git a/ironic/tests/unit/db/utils.py b/ironic/tests/unit/db/utils.py
new file mode 100644
index 000000000..7ea0c5811
--- /dev/null
+++ b/ironic/tests/unit/db/utils.py
@@ -0,0 +1,328 @@
+# Copyright 2013 Hewlett-Packard Development Company, L.P.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+"""Ironic test utilities."""
+
+
+from oslo_utils import timeutils
+
+from ironic.common import states
+from ironic.db import api as db_api
+
+
+def get_test_ipmi_info():
+ return {
+ "ipmi_address": "1.2.3.4",
+ "ipmi_username": "admin",
+ "ipmi_password": "fake"
+ }
+
+
+def get_test_ipmi_bridging_parameters():
+ return {
+ "ipmi_bridging": "dual",
+ "ipmi_local_address": "0x20",
+ "ipmi_transit_channel": "0",
+ "ipmi_transit_address": "0x82",
+ "ipmi_target_channel": "7",
+ "ipmi_target_address": "0x72"
+ }
+
+
+def get_test_ssh_info(auth_type='password'):
+ result = {
+ "ssh_address": "1.2.3.4",
+ "ssh_username": "admin",
+ "ssh_port": 22,
+ "ssh_virt_type": "vbox",
+ }
+ if 'password' == auth_type:
+ result['ssh_password'] = 'fake'
+ elif 'file' == auth_type:
+ result['ssh_key_filename'] = '/not/real/file'
+ elif 'key' == auth_type:
+ result['ssh_key_contents'] = '--BEGIN PRIVATE ...blah'
+ elif 'too_many' == auth_type:
+ result['ssh_password'] = 'fake'
+ result['ssh_key_filename'] = '/not/real/file'
+ else:
+ # No auth details (is invalid)
+ pass
+ return result
+
+
+def get_test_pxe_driver_info():
+ return {
+ "deploy_kernel": "glance://deploy_kernel_uuid",
+ "deploy_ramdisk": "glance://deploy_ramdisk_uuid",
+ }
+
+
+def get_test_pxe_driver_internal_info():
+ return {
+ "is_whole_disk_image": False,
+ }
+
+
+def get_test_pxe_instance_info():
+ return {
+ "image_source": "glance://image_uuid",
+ "root_gb": 100,
+ }
+
+
+def get_test_seamicro_info():
+ return {
+ "seamicro_api_endpoint": "http://1.2.3.4",
+ "seamicro_username": "admin",
+ "seamicro_password": "fake",
+ "seamicro_server_id": "0/0",
+ }
+
+
+def get_test_ilo_info():
+ return {
+ "ilo_address": "1.2.3.4",
+ "ilo_username": "admin",
+ "ilo_password": "fake",
+ }
+
+
+def get_test_drac_info():
+ return {
+ "drac_host": "1.2.3.4",
+ "drac_port": "443",
+ "drac_path": "/wsman",
+ "drac_protocol": "https",
+ "drac_username": "admin",
+ "drac_password": "fake",
+ }
+
+
+def get_test_irmc_info():
+ return {
+ "irmc_address": "1.2.3.4",
+ "irmc_username": "admin0",
+ "irmc_password": "fake0",
+ "irmc_port": 80,
+ "irmc_auth_method": "digest",
+ }
+
+
+def get_test_amt_info():
+ return {
+ "amt_address": "1.2.3.4",
+ "amt_protocol": "http",
+ "amt_username": "admin",
+ "amt_password": "fake",
+ }
+
+
+def get_test_msftocs_info():
+ return {
+ "msftocs_base_url": "http://fakehost:8000",
+ "msftocs_username": "admin",
+ "msftocs_password": "fake",
+ "msftocs_blade_id": 1,
+ }
+
+
+def get_test_agent_instance_info():
+ return {
+ 'image_source': 'fake-image',
+ 'image_url': 'http://image',
+ 'image_checksum': 'checksum',
+ 'image_disk_format': 'qcow2',
+ 'image_container_format': 'bare',
+ }
+
+
+def get_test_agent_driver_info():
+ return {
+ 'deploy_kernel': 'glance://deploy_kernel_uuid',
+ 'deploy_ramdisk': 'glance://deploy_ramdisk_uuid',
+ }
+
+
+def get_test_agent_driver_internal_info():
+ return {
+ 'agent_url': 'http://127.0.0.1/foo',
+ 'is_whole_disk_image': True,
+ }
+
+
+def get_test_iboot_info():
+ return {
+ "iboot_address": "1.2.3.4",
+ "iboot_username": "admin",
+ "iboot_password": "fake",
+ }
+
+
+def get_test_snmp_info(**kw):
+ result = {
+ "snmp_driver": kw.get("snmp_driver", "teltronix"),
+ "snmp_address": kw.get("snmp_address", "1.2.3.4"),
+ "snmp_port": kw.get("snmp_port", "161"),
+ "snmp_outlet": kw.get("snmp_outlet", "1"),
+ "snmp_version": kw.get("snmp_version", "1")
+ }
+ if result["snmp_version"] in ("1", "2c"):
+ result["snmp_community"] = kw.get("snmp_community", "public")
+ elif result["snmp_version"] == "3":
+ result["snmp_security"] = kw.get("snmp_security", "public")
+ return result
+
+
+def get_test_node(**kw):
+ properties = {
+ "cpu_arch": "x86_64",
+ "cpus": "8",
+ "local_gb": "10",
+ "memory_mb": "4096",
+ }
+ fake_info = {"foo": "bar", "fake_password": "fakepass"}
+ return {
+ 'id': kw.get('id', 123),
+ 'name': kw.get('name', None),
+ 'uuid': kw.get('uuid', '1be26c0b-03f2-4d2e-ae87-c02d7f33c123'),
+ 'chassis_id': kw.get('chassis_id', None),
+ 'conductor_affinity': kw.get('conductor_affinity', None),
+ 'power_state': kw.get('power_state', states.NOSTATE),
+ 'target_power_state': kw.get('target_power_state', states.NOSTATE),
+ 'provision_state': kw.get('provision_state', states.NOSTATE),
+ 'target_provision_state': kw.get('target_provision_state',
+ states.NOSTATE),
+ 'provision_updated_at': kw.get('provision_updated_at'),
+ 'last_error': kw.get('last_error'),
+ 'instance_uuid': kw.get('instance_uuid'),
+ 'instance_info': kw.get('instance_info', fake_info),
+ 'driver': kw.get('driver', 'fake'),
+ 'driver_info': kw.get('driver_info', fake_info),
+ 'driver_internal_info': kw.get('driver_internal_info', fake_info),
+ 'clean_step': kw.get('clean_step'),
+ 'properties': kw.get('properties', properties),
+ 'reservation': kw.get('reservation'),
+ 'maintenance': kw.get('maintenance', False),
+ 'maintenance_reason': kw.get('maintenance_reason'),
+ 'console_enabled': kw.get('console_enabled', False),
+ 'extra': kw.get('extra', {}),
+ 'updated_at': kw.get('updated_at'),
+ 'created_at': kw.get('created_at'),
+ 'inspection_finished_at': kw.get('inspection_finished_at'),
+ 'inspection_started_at': kw.get('inspection_started_at'),
+ 'raid_config': kw.get('raid_config'),
+ 'target_raid_config': kw.get('target_raid_config'),
+ }
+
+
+def create_test_node(**kw):
+ """Create test node entry in DB and return Node DB object.
+
+ Function to be used to create test Node objects in the database.
+
+ :param kw: kwargs with overriding values for node's attributes.
+ :returns: Test Node DB object.
+
+ """
+ node = get_test_node(**kw)
+ # Let DB generate ID if it isn't specified explicitly
+ if 'id' not in kw:
+ del node['id']
+ dbapi = db_api.get_instance()
+ return dbapi.create_node(node)
+
+
+def get_test_port(**kw):
+ return {
+ 'id': kw.get('id', 987),
+ 'uuid': kw.get('uuid', '1be26c0b-03f2-4d2e-ae87-c02d7f33c781'),
+ 'node_id': kw.get('node_id', 123),
+ 'address': kw.get('address', '52:54:00:cf:2d:31'),
+ 'extra': kw.get('extra', {}),
+ 'created_at': kw.get('created_at'),
+ 'updated_at': kw.get('updated_at'),
+ }
+
+
+def create_test_port(**kw):
+ """Create test port entry in DB and return Port DB object.
+
+ Function to be used to create test Port objects in the database.
+
+ :param kw: kwargs with overriding values for port's attributes.
+ :returns: Test Port DB object.
+
+ """
+ port = get_test_port(**kw)
+ # Let DB generate ID if it isn't specified explicitly
+ if 'id' not in kw:
+ del port['id']
+ dbapi = db_api.get_instance()
+ return dbapi.create_port(port)
+
+
+def get_test_chassis(**kw):
+ return {
+ 'id': kw.get('id', 42),
+ 'uuid': kw.get('uuid', 'e74c40e0-d825-11e2-a28f-0800200c9a66'),
+ 'extra': kw.get('extra', {}),
+ 'description': kw.get('description', 'data-center-1-chassis'),
+ 'created_at': kw.get('created_at'),
+ 'updated_at': kw.get('updated_at'),
+ }
+
+
+def create_test_chassis(**kw):
+ """Create test chassis entry in DB and return Chassis DB object.
+
+ Function to be used to create test Chassis objects in the database.
+
+ :param kw: kwargs with overriding values for chassis's attributes.
+ :returns: Test Chassis DB object.
+
+ """
+ chassis = get_test_chassis(**kw)
+ # Let DB generate ID if it isn't specified explicitly
+ if 'id' not in kw:
+ del chassis['id']
+ dbapi = db_api.get_instance()
+ return dbapi.create_chassis(chassis)
+
+
+def get_test_conductor(**kw):
+ return {
+ 'id': kw.get('id', 6),
+ 'hostname': kw.get('hostname', 'test-conductor-node'),
+ 'drivers': kw.get('drivers', ['fake-driver', 'null-driver']),
+ 'created_at': kw.get('created_at', timeutils.utcnow()),
+ 'updated_at': kw.get('updated_at', timeutils.utcnow()),
+ }
+
+
+def get_test_ucs_info():
+ return {
+ "ucs_username": "admin",
+ "ucs_password": "password",
+ "ucs_service_profile": "org-root/ls-devstack",
+ "ucs_address": "ucs-b",
+ }
+
+
+def get_test_cimc_info():
+ return {
+ "cimc_username": "admin",
+ "cimc_password": "password",
+ "cimc_address": "1.2.3.4",
+ }
diff --git a/ironic/tests/unit/dhcp/__init__.py b/ironic/tests/unit/dhcp/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ironic/tests/unit/dhcp/__init__.py
diff --git a/ironic/tests/unit/dhcp/test_factory.py b/ironic/tests/unit/dhcp/test_factory.py
new file mode 100644
index 000000000..8c1306957
--- /dev/null
+++ b/ironic/tests/unit/dhcp/test_factory.py
@@ -0,0 +1,113 @@
+# Copyright 2014 Rackspace, Inc.
+# All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import inspect
+
+import mock
+import stevedore
+
+from ironic.common import dhcp_factory
+from ironic.common import exception
+from ironic.dhcp import base as base_class
+from ironic.dhcp import neutron
+from ironic.dhcp import none
+from ironic.tests.unit import base
+
+
+class TestDHCPFactory(base.TestCase):
+
+ def setUp(self):
+ super(TestDHCPFactory, self).setUp()
+ self.config(enabled_drivers=['fake'])
+ self.config(url='test-url',
+ url_timeout=30,
+ group='neutron')
+ dhcp_factory.DHCPFactory._dhcp_provider = None
+ self.addCleanup(setattr, dhcp_factory.DHCPFactory,
+ '_dhcp_provider', None)
+
+ def test_default_dhcp(self):
+ # dhcp provider should default to neutron
+ api = dhcp_factory.DHCPFactory()
+ self.assertIsInstance(api.provider, neutron.NeutronDHCPApi)
+
+ def test_set_none_dhcp(self):
+ self.config(dhcp_provider='none',
+ group='dhcp')
+
+ api = dhcp_factory.DHCPFactory()
+ self.assertIsInstance(api.provider, none.NoneDHCPApi)
+
+ def test_set_neutron_dhcp(self):
+ self.config(dhcp_provider='neutron',
+ group='dhcp')
+
+ api = dhcp_factory.DHCPFactory()
+ self.assertIsInstance(api.provider, neutron.NeutronDHCPApi)
+
+ def test_only_one_dhcp(self):
+ self.config(dhcp_provider='none',
+ group='dhcp')
+ dhcp_factory.DHCPFactory()
+
+ with mock.patch.object(dhcp_factory.DHCPFactory,
+ '_set_dhcp_provider') as mock_set_dhcp:
+ # There is already a dhcp_provider, so this shouldn't call
+ # _set_dhcp_provider again.
+ dhcp_factory.DHCPFactory()
+ self.assertEqual(0, mock_set_dhcp.call_count)
+
+ def test_set_bad_dhcp(self):
+ self.config(dhcp_provider='bad_dhcp',
+ group='dhcp')
+
+ self.assertRaises(exception.DHCPLoadError, dhcp_factory.DHCPFactory)
+
+ @mock.patch.object(stevedore.driver, 'DriverManager', autospec=True)
+ def test_dhcp_some_error(self, mock_drv_mgr):
+ mock_drv_mgr.side_effect = Exception('No module mymod found.')
+ self.assertRaises(exception.DHCPLoadError, dhcp_factory.DHCPFactory)
+
+
+class CompareBasetoModules(base.TestCase):
+
+ def test_drivers_match_dhcp_base(self):
+ def _get_public_apis(inst):
+ methods = {}
+ for (name, value) in inspect.getmembers(inst, inspect.ismethod):
+ if name.startswith("_"):
+ continue
+ methods[name] = value
+ return methods
+
+ def _compare_classes(baseclass, driverclass):
+
+ basemethods = _get_public_apis(baseclass)
+ implmethods = _get_public_apis(driverclass)
+
+ for name in basemethods:
+ baseargs = inspect.getargspec(basemethods[name])
+ implargs = inspect.getargspec(implmethods[name])
+ self.assertEqual(
+ baseargs,
+ implargs,
+ "%s args of %s don't match base %s" % (
+ name,
+ driverclass,
+ baseclass)
+ )
+
+ _compare_classes(base_class.BaseDHCP, none.NoneDHCPApi)
+ _compare_classes(base_class.BaseDHCP, neutron.NeutronDHCPApi)
diff --git a/ironic/tests/unit/dhcp/test_neutron.py b/ironic/tests/unit/dhcp/test_neutron.py
new file mode 100644
index 000000000..2f5b77e95
--- /dev/null
+++ b/ironic/tests/unit/dhcp/test_neutron.py
@@ -0,0 +1,484 @@
+#
+# Copyright 2014 OpenStack Foundation
+# All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import mock
+from neutronclient.common import exceptions as neutron_client_exc
+from neutronclient.v2_0 import client
+from oslo_utils import uuidutils
+
+from ironic.common import dhcp_factory
+from ironic.common import exception
+from ironic.common import pxe_utils
+from ironic.conductor import task_manager
+from ironic.dhcp import neutron
+from ironic.tests.unit.conductor import utils as mgr_utils
+from ironic.tests.unit.db import base as db_base
+from ironic.tests.unit.objects import utils as object_utils
+
+
+class TestNeutron(db_base.DbTestCase):
+
+ def setUp(self):
+ super(TestNeutron, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver='fake')
+ self.config(
+ cleaning_network_uuid='00000000-0000-0000-0000-000000000000',
+ group='neutron')
+ self.config(enabled_drivers=['fake'])
+ self.config(dhcp_provider='neutron',
+ group='dhcp')
+ self.config(url='test-url',
+ url_timeout=30,
+ retries=2,
+ group='neutron')
+ self.config(insecure=False,
+ certfile='test-file',
+ admin_user='test-admin-user',
+ admin_tenant_name='test-admin-tenant',
+ admin_password='test-admin-password',
+ auth_uri='test-auth-uri',
+ group='keystone_authtoken')
+ self.node = object_utils.create_test_node(self.context)
+ self.ports = [
+ object_utils.create_test_port(
+ self.context, node_id=self.node.id, id=2,
+ uuid='1be26c0b-03f2-4d2e-ae87-c02d7f33c782',
+ address='52:54:00:cf:2d:32')]
+ # Very simple neutron port representation
+ self.neutron_port = {'id': '132f871f-eaec-4fed-9475-0d54465e0f00',
+ 'mac_address': '52:54:00:cf:2d:32'}
+
+ dhcp_factory.DHCPFactory._dhcp_provider = None
+
+ def test__build_client_invalid_auth_strategy(self):
+ self.config(auth_strategy='wrong_config', group='neutron')
+ token = 'test-token-123'
+ self.assertRaises(exception.ConfigInvalid,
+ neutron._build_client,
+ token=token)
+
+ @mock.patch.object(client.Client, "__init__")
+ def test__build_client_with_token(self, mock_client_init):
+ token = 'test-token-123'
+ expected = {'timeout': 30,
+ 'retries': 2,
+ 'insecure': False,
+ 'ca_cert': 'test-file',
+ 'token': token,
+ 'endpoint_url': 'test-url',
+ 'auth_strategy': None}
+
+ mock_client_init.return_value = None
+ neutron._build_client(token=token)
+ mock_client_init.assert_called_once_with(**expected)
+
+ @mock.patch.object(client.Client, "__init__")
+ def test__build_client_without_token(self, mock_client_init):
+ expected = {'timeout': 30,
+ 'retries': 2,
+ 'insecure': False,
+ 'ca_cert': 'test-file',
+ 'endpoint_url': 'test-url',
+ 'username': 'test-admin-user',
+ 'tenant_name': 'test-admin-tenant',
+ 'password': 'test-admin-password',
+ 'auth_url': 'test-auth-uri'}
+
+ mock_client_init.return_value = None
+ neutron._build_client(token=None)
+ mock_client_init.assert_called_once_with(**expected)
+
+ @mock.patch.object(client.Client, "__init__")
+ def test__build_client_with_region(self, mock_client_init):
+ expected = {'timeout': 30,
+ 'retries': 2,
+ 'insecure': False,
+ 'ca_cert': 'test-file',
+ 'endpoint_url': 'test-url',
+ 'username': 'test-admin-user',
+ 'tenant_name': 'test-admin-tenant',
+ 'password': 'test-admin-password',
+ 'auth_url': 'test-auth-uri',
+ 'region_name': 'test-region'}
+
+ self.config(region_name='test-region',
+ group='keystone')
+ mock_client_init.return_value = None
+ neutron._build_client(token=None)
+ mock_client_init.assert_called_once_with(**expected)
+
+ @mock.patch.object(client.Client, "__init__")
+ def test__build_client_noauth(self, mock_client_init):
+ self.config(auth_strategy='noauth', group='neutron')
+ expected = {'ca_cert': 'test-file',
+ 'insecure': False,
+ 'endpoint_url': 'test-url',
+ 'timeout': 30,
+ 'retries': 2,
+ 'auth_strategy': 'noauth'}
+
+ mock_client_init.return_value = None
+ neutron._build_client(token=None)
+ mock_client_init.assert_called_once_with(**expected)
+
+ @mock.patch.object(client.Client, 'update_port')
+ @mock.patch.object(client.Client, "__init__")
+ def test_update_port_dhcp_opts(self, mock_client_init, mock_update_port):
+ opts = [{'opt_name': 'bootfile-name',
+ 'opt_value': 'pxelinux.0'},
+ {'opt_name': 'tftp-server',
+ 'opt_value': '1.1.1.1'},
+ {'opt_name': 'server-ip-address',
+ 'opt_value': '1.1.1.1'}]
+ port_id = 'fake-port-id'
+ expected = {'port': {'extra_dhcp_opts': opts}}
+
+ mock_client_init.return_value = None
+ api = dhcp_factory.DHCPFactory()
+ api.provider.update_port_dhcp_opts(port_id, opts)
+ mock_update_port.assert_called_once_with(port_id, expected)
+
+ @mock.patch.object(client.Client, 'update_port')
+ @mock.patch.object(client.Client, "__init__")
+ def test_update_port_dhcp_opts_with_exception(self, mock_client_init,
+ mock_update_port):
+ opts = [{}]
+ port_id = 'fake-port-id'
+ mock_client_init.return_value = None
+ mock_update_port.side_effect = (
+ neutron_client_exc.NeutronClientException())
+
+ api = dhcp_factory.DHCPFactory()
+ self.assertRaises(
+ exception.FailedToUpdateDHCPOptOnPort,
+ api.provider.update_port_dhcp_opts,
+ port_id, opts)
+
+ @mock.patch.object(client.Client, 'update_port')
+ @mock.patch.object(client.Client, '__init__')
+ def test_update_port_address(self, mock_client_init, mock_update_port):
+ address = 'fe:54:00:77:07:d9'
+ port_id = 'fake-port-id'
+ expected = {'port': {'mac_address': address}}
+ mock_client_init.return_value = None
+
+ api = dhcp_factory.DHCPFactory()
+ api.provider.update_port_address(port_id, address)
+ mock_update_port.assert_called_once_with(port_id, expected)
+
+ @mock.patch.object(client.Client, 'update_port')
+ @mock.patch.object(client.Client, '__init__')
+ def test_update_port_address_with_exception(self, mock_client_init,
+ mock_update_port):
+ address = 'fe:54:00:77:07:d9'
+ port_id = 'fake-port-id'
+ mock_client_init.return_value = None
+
+ api = dhcp_factory.DHCPFactory()
+ mock_update_port.side_effect = (
+ neutron_client_exc.NeutronClientException())
+ self.assertRaises(exception.FailedToUpdateMacOnPort,
+ api.provider.update_port_address,
+ port_id, address)
+
+ @mock.patch('ironic.dhcp.neutron.NeutronDHCPApi.update_port_dhcp_opts')
+ @mock.patch('ironic.common.network.get_node_vif_ids')
+ def test_update_dhcp(self, mock_gnvi, mock_updo):
+ mock_gnvi.return_value = {'port-uuid': 'vif-uuid'}
+ with task_manager.acquire(self.context,
+ self.node.uuid) as task:
+ opts = pxe_utils.dhcp_options_for_instance(task)
+ api = dhcp_factory.DHCPFactory()
+ api.update_dhcp(task, opts)
+ mock_updo.assert_called_once_with('vif-uuid', opts,
+ token=self.context.auth_token)
+
+ @mock.patch('ironic.dhcp.neutron.NeutronDHCPApi.update_port_dhcp_opts')
+ @mock.patch('ironic.common.network.get_node_vif_ids')
+ def test_update_dhcp_no_vif_data(self, mock_gnvi, mock_updo):
+ mock_gnvi.return_value = {}
+ with task_manager.acquire(self.context,
+ self.node.uuid) as task:
+ api = dhcp_factory.DHCPFactory()
+ self.assertRaises(exception.FailedToUpdateDHCPOptOnPort,
+ api.update_dhcp, task, self.node)
+ self.assertFalse(mock_updo.called)
+
+ @mock.patch('ironic.dhcp.neutron.NeutronDHCPApi.update_port_dhcp_opts')
+ @mock.patch('ironic.common.network.get_node_vif_ids')
+ def test_update_dhcp_some_failures(self, mock_gnvi, mock_updo):
+ # confirm update is called twice, one fails, but no exception raised
+ mock_gnvi.return_value = {'p1': 'v1', 'p2': 'v2'}
+ exc = exception.FailedToUpdateDHCPOptOnPort('fake exception')
+ mock_updo.side_effect = [None, exc]
+ with task_manager.acquire(self.context,
+ self.node.uuid) as task:
+ api = dhcp_factory.DHCPFactory()
+ api.update_dhcp(task, self.node)
+ mock_gnvi.assert_called_once_with(task)
+ self.assertEqual(2, mock_updo.call_count)
+
+ @mock.patch('ironic.dhcp.neutron.NeutronDHCPApi.update_port_dhcp_opts')
+ @mock.patch('ironic.common.network.get_node_vif_ids')
+ def test_update_dhcp_fails(self, mock_gnvi, mock_updo):
+ # confirm update is called twice, both fail, and exception is raised
+ mock_gnvi.return_value = {'p1': 'v1', 'p2': 'v2'}
+ exc = exception.FailedToUpdateDHCPOptOnPort('fake exception')
+ mock_updo.side_effect = [exc, exc]
+ with task_manager.acquire(self.context,
+ self.node.uuid) as task:
+ api = dhcp_factory.DHCPFactory()
+ self.assertRaises(exception.FailedToUpdateDHCPOptOnPort,
+ api.update_dhcp,
+ task, self.node)
+ mock_gnvi.assert_called_once_with(task)
+ self.assertEqual(2, mock_updo.call_count)
+
+ def test__get_fixed_ip_address(self):
+ port_id = 'fake-port-id'
+ expected = "192.168.1.3"
+ api = dhcp_factory.DHCPFactory().provider
+ port_data = {
+ "id": port_id,
+ "network_id": "3cb9bc59-5699-4588-a4b1-b87f96708bc6",
+ "admin_state_up": True,
+ "status": "ACTIVE",
+ "mac_address": "fa:16:3e:4c:2c:30",
+ "fixed_ips": [
+ {
+ "ip_address": "192.168.1.3",
+ "subnet_id": "f8a6e8f8-c2ec-497c-9f23-da9616de54ef"
+ }
+ ],
+ "device_id": 'bece68a3-2f8b-4e66-9092-244493d6aba7',
+ }
+ fake_client = mock.Mock()
+ fake_client.show_port.return_value = {'port': port_data}
+ result = api._get_fixed_ip_address(port_id, fake_client)
+ self.assertEqual(expected, result)
+ fake_client.show_port.assert_called_once_with(port_id)
+
+ def test__get_fixed_ip_address_invalid_ip(self):
+ port_id = 'fake-port-id'
+ api = dhcp_factory.DHCPFactory().provider
+ port_data = {
+ "id": port_id,
+ "network_id": "3cb9bc59-5699-4588-a4b1-b87f96708bc6",
+ "admin_state_up": True,
+ "status": "ACTIVE",
+ "mac_address": "fa:16:3e:4c:2c:30",
+ "fixed_ips": [
+ {
+ "ip_address": "invalid.ip",
+ "subnet_id": "f8a6e8f8-c2ec-497c-9f23-da9616de54ef"
+ }
+ ],
+ "device_id": 'bece68a3-2f8b-4e66-9092-244493d6aba7',
+ }
+ fake_client = mock.Mock()
+ fake_client.show_port.return_value = {'port': port_data}
+ self.assertRaises(exception.InvalidIPv4Address,
+ api._get_fixed_ip_address,
+ port_id, fake_client)
+ fake_client.show_port.assert_called_once_with(port_id)
+
+ def test__get_fixed_ip_address_with_exception(self):
+ port_id = 'fake-port-id'
+ api = dhcp_factory.DHCPFactory().provider
+
+ fake_client = mock.Mock()
+ fake_client.show_port.side_effect = (
+ neutron_client_exc.NeutronClientException())
+ self.assertRaises(exception.FailedToGetIPAddressOnPort,
+ api._get_fixed_ip_address, port_id, fake_client)
+ fake_client.show_port.assert_called_once_with(port_id)
+
+ @mock.patch('ironic.dhcp.neutron.NeutronDHCPApi._get_fixed_ip_address')
+ @mock.patch('ironic.common.network.get_node_vif_ids')
+ def test__get_port_ip_address(self, mock_gnvi, mock_gfia):
+ expected = "192.168.1.3"
+ port = object_utils.create_test_port(self.context,
+ node_id=self.node.id,
+ address='aa:bb:cc:dd:ee:ff',
+ uuid=uuidutils.generate_uuid(),
+ extra={'vif_port_id':
+ 'test-vif-A'},
+ driver='fake')
+ mock_gnvi.return_value = {port.uuid: 'vif-uuid'}
+ mock_gfia.return_value = expected
+ with task_manager.acquire(self.context,
+ self.node.uuid) as task:
+ api = dhcp_factory.DHCPFactory().provider
+ result = api._get_port_ip_address(task, port.uuid,
+ mock.sentinel.client)
+ mock_gnvi.assert_called_once_with(task)
+ self.assertEqual(expected, result)
+ mock_gfia.assert_called_once_with('vif-uuid', mock.sentinel.client)
+
+ @mock.patch('ironic.dhcp.neutron.NeutronDHCPApi._get_fixed_ip_address')
+ @mock.patch('ironic.common.network.get_node_vif_ids')
+ def test__get_port_ip_address_with_exception(self, mock_gnvi, mock_gfia):
+ expected = "192.168.1.3"
+ port = object_utils.create_test_port(self.context,
+ node_id=self.node.id,
+ address='aa:bb:cc:dd:ee:ff',
+ uuid=uuidutils.generate_uuid(),
+ extra={'vif_port_id':
+ 'test-vif-A'},
+ driver='fake')
+ mock_gnvi.return_value = None
+ mock_gfia.return_value = expected
+ with task_manager.acquire(self.context,
+ self.node.uuid) as task:
+ api = dhcp_factory.DHCPFactory().provider
+ self.assertRaises(exception.FailedToGetIPAddressOnPort,
+ api._get_port_ip_address, task, port,
+ mock.sentinel.client)
+ mock_gnvi.assert_called_once_with(task)
+
+ @mock.patch('ironic.dhcp.neutron.NeutronDHCPApi._get_port_ip_address')
+ def test_get_ip_addresses(self, get_ip_mock):
+ ip_address = '10.10.0.1'
+ expected = [ip_address]
+
+ get_ip_mock.return_value = ip_address
+
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ api = dhcp_factory.DHCPFactory().provider
+ result = api.get_ip_addresses(task)
+ get_ip_mock.assert_called_once_with(task, self.ports[0].uuid,
+ mock.ANY)
+ self.assertEqual(expected, result)
+
+ @mock.patch.object(client.Client, 'create_port')
+ def test_create_cleaning_ports(self, create_mock):
+ # Ensure we can create cleaning ports for in band cleaning
+ create_mock.return_value = {'port': self.neutron_port}
+ expected = {self.ports[0].uuid: self.neutron_port['id']}
+ api = dhcp_factory.DHCPFactory().provider
+
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ ports = api.create_cleaning_ports(task)
+ self.assertEqual(expected, ports)
+ create_mock.assert_called_once_with({'port': {
+ 'network_id': '00000000-0000-0000-0000-000000000000',
+ 'admin_state_up': True, 'mac_address': self.ports[0].address}})
+
+ @mock.patch.object(neutron.NeutronDHCPApi, '_rollback_cleaning_ports')
+ @mock.patch.object(client.Client, 'create_port')
+ def test_create_cleaning_ports_fail(self, create_mock, rollback_mock):
+ # Check that if creating a port fails, the ports are cleaned up
+ create_mock.side_effect = neutron_client_exc.ConnectionFailed
+ api = dhcp_factory.DHCPFactory().provider
+
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ self.assertRaises(exception.NodeCleaningFailure,
+ api.create_cleaning_ports,
+ task)
+ create_mock.assert_called_once_with({'port': {
+ 'network_id': '00000000-0000-0000-0000-000000000000',
+ 'admin_state_up': True, 'mac_address': self.ports[0].address}})
+ rollback_mock.assert_called_once_with(task)
+
+ @mock.patch.object(neutron.NeutronDHCPApi, '_rollback_cleaning_ports')
+ @mock.patch.object(client.Client, 'create_port')
+ def test_create_cleaning_ports_fail_delayed(self, create_mock,
+ rollback_mock):
+ """Check ports are cleaned up on failure to create them
+
+ This test checks that the port clean-up occurs
+ when the port create call was successful,
+ but the port in fact was not created.
+
+ """
+ # NOTE(pas-ha) this is trying to emulate the complex port object
+ # with both methods and dictionary access with methods on elements
+ mockport = mock.MagicMock()
+ create_mock.return_value = mockport
+ # fail only on second 'or' branch to fool lazy eval
+ # and actually execute both expressions to assert on both mocks
+ mockport.get.return_value = True
+ mockitem = mock.Mock()
+ mockport.__getitem__.return_value = mockitem
+ mockitem.get.return_value = None
+ api = dhcp_factory.DHCPFactory().provider
+
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ self.assertRaises(exception.NodeCleaningFailure,
+ api.create_cleaning_ports,
+ task)
+ create_mock.assert_called_once_with({'port': {
+ 'network_id': '00000000-0000-0000-0000-000000000000',
+ 'admin_state_up': True, 'mac_address': self.ports[0].address}})
+ rollback_mock.assert_called_once_with(task)
+ mockport.get.assert_called_once_with('port')
+ mockitem.get.assert_called_once_with('id')
+ mockport.__getitem__.assert_called_once_with('port')
+
+ @mock.patch.object(client.Client, 'create_port')
+ def test_create_cleaning_ports_bad_config(self, create_mock):
+ # Check an error is raised if the cleaning network is not set
+ self.config(cleaning_network_uuid=None, group='neutron')
+ api = dhcp_factory.DHCPFactory().provider
+
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ self.assertRaises(exception.InvalidParameterValue,
+ api.create_cleaning_ports, task)
+
+ @mock.patch.object(client.Client, 'delete_port')
+ @mock.patch.object(client.Client, 'list_ports')
+ def test_delete_cleaning_ports(self, list_mock, delete_mock):
+ # Ensure that we can delete cleaning ports, and that ports with
+ # different macs don't get deleted
+ other_port = {'id': '132f871f-eaec-4fed-9475-0d54465e0f01',
+ 'mac_address': 'aa:bb:cc:dd:ee:ff'}
+ list_mock.return_value = {'ports': [self.neutron_port, other_port]}
+ api = dhcp_factory.DHCPFactory().provider
+
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ api.delete_cleaning_ports(task)
+ list_mock.assert_called_once_with(
+ network_id='00000000-0000-0000-0000-000000000000')
+ delete_mock.assert_called_once_with(self.neutron_port['id'])
+
+ @mock.patch.object(client.Client, 'list_ports')
+ def test_delete_cleaning_ports_list_fail(self, list_mock):
+ # Check that if listing ports fails, the node goes to cleanfail
+ list_mock.side_effect = neutron_client_exc.ConnectionFailed
+ api = dhcp_factory.DHCPFactory().provider
+
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ self.assertRaises(exception.NodeCleaningFailure,
+ api.delete_cleaning_ports,
+ task)
+ list_mock.assert_called_once_with(
+ network_id='00000000-0000-0000-0000-000000000000')
+
+ @mock.patch.object(client.Client, 'delete_port')
+ @mock.patch.object(client.Client, 'list_ports')
+ def test_delete_cleaning_ports_delete_fail(self, list_mock, delete_mock):
+ # Check that if deleting ports fails, the node goes to cleanfail
+ list_mock.return_value = {'ports': [self.neutron_port]}
+ delete_mock.side_effect = neutron_client_exc.ConnectionFailed
+ api = dhcp_factory.DHCPFactory().provider
+
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ self.assertRaises(exception.NodeCleaningFailure,
+ api.delete_cleaning_ports,
+ task)
+ list_mock.assert_called_once_with(
+ network_id='00000000-0000-0000-0000-000000000000')
+ delete_mock.assert_called_once_with(self.neutron_port['id'])
diff --git a/ironic/tests/unit/drivers/__init__.py b/ironic/tests/unit/drivers/__init__.py
new file mode 100644
index 000000000..7a25e0c7f
--- /dev/null
+++ b/ironic/tests/unit/drivers/__init__.py
@@ -0,0 +1,21 @@
+# Copyright 2014 Hewlett-Packard Development Company, L.P.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# NOTE(deva): since __init__ is loaded before the files in the same directory,
+# and some third-party driver tests may need to have their
+# external libraries mocked, we load the file which does that
+# mocking here -- in the __init__.
+
+from ironic.tests.unit.drivers import third_party_driver_mocks # noqa
diff --git a/ironic/tests/unit/drivers/agent_pxe_config.template b/ironic/tests/unit/drivers/agent_pxe_config.template
new file mode 100644
index 000000000..7b26d58cf
--- /dev/null
+++ b/ironic/tests/unit/drivers/agent_pxe_config.template
@@ -0,0 +1,5 @@
+default deploy
+
+label deploy
+kernel /tftpboot/1be26c0b-03f2-4d2e-ae87-c02d7f33c123/deploy_kernel
+append initrd=/tftpboot/1be26c0b-03f2-4d2e-ae87-c02d7f33c123/deploy_ramdisk text test_param ipa-api-url=http://192.168.122.184:6385 ipa-driver-name=agent_ipmitool root_device=vendor=fake,size=123 coreos.configdrive=0
diff --git a/ironic/tests/unit/drivers/amt/__init__.py b/ironic/tests/unit/drivers/amt/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ironic/tests/unit/drivers/amt/__init__.py
diff --git a/ironic/tests/unit/drivers/amt/test_common.py b/ironic/tests/unit/drivers/amt/test_common.py
new file mode 100644
index 000000000..6c80036ea
--- /dev/null
+++ b/ironic/tests/unit/drivers/amt/test_common.py
@@ -0,0 +1,173 @@
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""
+Test class for AMT Common
+"""
+
+import mock
+from oslo_config import cfg
+
+from ironic.common import exception
+from ironic.drivers.modules.amt import common as amt_common
+from ironic.drivers.modules.amt import resource_uris
+from ironic.tests.unit import base
+from ironic.tests.unit.db import base as db_base
+from ironic.tests.unit.db import utils as db_utils
+from ironic.tests.unit.drivers.drac import utils as test_utils
+from ironic.tests.unit.drivers import third_party_driver_mock_specs \
+ as mock_specs
+from ironic.tests.unit.objects import utils as obj_utils
+
+INFO_DICT = db_utils.get_test_amt_info()
+CONF = cfg.CONF
+
+
+class AMTCommonMethodsTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(AMTCommonMethodsTestCase, self).setUp()
+ self.node = obj_utils.create_test_node(self.context,
+ driver='fake_amt',
+ driver_info=INFO_DICT)
+
+ def test_parse_driver_info(self):
+ info = amt_common.parse_driver_info(self.node)
+
+ self.assertIsNotNone(info.get('address'))
+ self.assertIsNotNone(info.get('username'))
+ self.assertIsNotNone(info.get('password'))
+ self.assertIsNotNone(info.get('protocol'))
+ self.assertIsNotNone(info.get('uuid'))
+
+ def test_parse_driver_info_missing_address(self):
+ del self.node.driver_info['amt_address']
+
+ self.assertRaises(exception.MissingParameterValue,
+ amt_common.parse_driver_info, self.node)
+
+ def test_parse_driver_info_missing_username(self):
+ del self.node.driver_info['amt_username']
+
+ self.assertRaises(exception.MissingParameterValue,
+ amt_common.parse_driver_info, self.node)
+
+ def test_parse_driver_info_missing_password(self):
+ del self.node.driver_info['amt_password']
+ self.assertRaises(exception.MissingParameterValue,
+ amt_common.parse_driver_info, self.node)
+
+ def test_parse_driver_info_missing_protocol(self):
+ del self.node.driver_info['amt_protocol']
+ info = amt_common.parse_driver_info(self.node)
+ self.assertEqual('http', info.get('protocol'))
+
+ def test_parse_driver_info_wrong_protocol(self):
+ self.node.driver_info['amt_protocol'] = 'fake-protocol'
+ self.assertRaises(exception.InvalidParameterValue,
+ amt_common.parse_driver_info, self.node)
+
+ @mock.patch.object(amt_common, 'Client', spec_set=True, autospec=True)
+ def test_get_wsman_client(self, mock_client):
+ info = amt_common.parse_driver_info(self.node)
+ amt_common.get_wsman_client(self.node)
+ options = {'address': info['address'],
+ 'protocol': info['protocol'],
+ 'username': info['username'],
+ 'password': info['password']}
+
+ mock_client.assert_called_once_with(**options)
+
+ def test_xml_find(self):
+ namespace = 'http://fake'
+ value = 'fake_value'
+ test_xml = test_utils.build_soap_xml([{'test_element': value}],
+ namespace)
+ mock_doc = test_utils.mock_wsman_root(test_xml)
+
+ result = amt_common.xml_find(mock_doc, namespace, 'test_element')
+ self.assertEqual(value, result.text)
+
+ def test_xml_find_fail(self):
+ mock_doc = None
+ self.assertRaises(exception.AMTConnectFailure,
+ amt_common.xml_find,
+ mock_doc, 'namespace', 'test_element')
+
+
+@mock.patch.object(amt_common, 'pywsman', spec_set=mock_specs.PYWSMAN_SPEC)
+class AMTCommonClientTestCase(base.TestCase):
+ def setUp(self):
+ super(AMTCommonClientTestCase, self).setUp()
+ self.info = {key[4:]: INFO_DICT[key] for key in INFO_DICT.keys()}
+
+ def test_wsman_get(self, mock_client_pywsman):
+ namespace = resource_uris.CIM_AssociatedPowerManagementService
+ result_xml = test_utils.build_soap_xml([{'PowerState':
+ '2'}],
+ namespace)
+ mock_doc = test_utils.mock_wsman_root(result_xml)
+ mock_pywsman = mock_client_pywsman.Client.return_value
+ mock_pywsman.get.return_value = mock_doc
+ client = amt_common.Client(**self.info)
+
+ client.wsman_get(namespace)
+ mock_pywsman.get.assert_called_once_with(mock.ANY, namespace)
+
+ def test_wsman_get_fail(self, mock_client_pywsman):
+ namespace = amt_common._SOAP_ENVELOPE
+ result_xml = test_utils.build_soap_xml([{'Fault': 'fault'}],
+ namespace)
+ mock_doc = test_utils.mock_wsman_root(result_xml)
+ mock_pywsman = mock_client_pywsman.Client.return_value
+ mock_pywsman.get.return_value = mock_doc
+ client = amt_common.Client(**self.info)
+
+ self.assertRaises(exception.AMTFailure, client.wsman_get, namespace)
+ mock_pywsman.get.assert_called_once_with(mock.ANY, namespace)
+
+ def test_wsman_invoke(self, mock_client_pywsman):
+ namespace = resource_uris.CIM_BootSourceSetting
+ result_xml = test_utils.build_soap_xml([{'ReturnValue':
+ '0'}],
+ namespace)
+ mock_doc = test_utils.mock_wsman_root(result_xml)
+ mock_pywsman = mock_client_pywsman.Client.return_value
+ mock_pywsman.invoke.return_value = mock_doc
+ method = 'ChangeBootOrder'
+ options = mock.Mock(spec_set=[])
+ client = amt_common.Client(**self.info)
+ doc = None
+ client.wsman_invoke(options, namespace, method, doc)
+ mock_pywsman.invoke.assert_called_once_with(options, namespace, method)
+ doc = 'fake-input'
+ client.wsman_invoke(options, namespace, method, doc)
+ mock_pywsman.invoke.assert_called_with(options, namespace, method, doc)
+
+ def test_wsman_invoke_fail(self, mock_client_pywsman):
+ namespace = resource_uris.CIM_BootSourceSetting
+ result_xml = test_utils.build_soap_xml([{'ReturnValue':
+ '2'}],
+ namespace)
+ mock_doc = test_utils.mock_wsman_root(result_xml)
+ mock_pywsman = mock_client_pywsman.Client.return_value
+ mock_pywsman.invoke.return_value = mock_doc
+ method = 'fake-method'
+ options = mock.Mock(spec_set=[])
+
+ client = amt_common.Client(**self.info)
+
+ self.assertRaises(exception.AMTFailure,
+ client.wsman_invoke,
+ options, namespace, method)
+ mock_pywsman.invoke.assert_called_once_with(options, namespace, method)
diff --git a/ironic/tests/unit/drivers/amt/test_management.py b/ironic/tests/unit/drivers/amt/test_management.py
new file mode 100644
index 000000000..4d567c480
--- /dev/null
+++ b/ironic/tests/unit/drivers/amt/test_management.py
@@ -0,0 +1,233 @@
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""
+Test class for AMT ManagementInterface
+"""
+
+import mock
+from oslo_config import cfg
+
+from ironic.common import boot_devices
+from ironic.common import exception
+from ironic.conductor import task_manager
+from ironic.drivers.modules.amt import common as amt_common
+from ironic.drivers.modules.amt import management as amt_mgmt
+from ironic.drivers.modules.amt import resource_uris
+from ironic.tests.unit.conductor import utils as mgr_utils
+from ironic.tests.unit.db import base as db_base
+from ironic.tests.unit.db import utils as db_utils
+from ironic.tests.unit.drivers.drac import utils as test_utils
+from ironic.tests.unit.drivers import third_party_driver_mock_specs \
+ as mock_specs
+from ironic.tests.unit.objects import utils as obj_utils
+
+INFO_DICT = db_utils.get_test_amt_info()
+CONF = cfg.CONF
+
+
+@mock.patch.object(amt_common, 'pywsman', spec_set=mock_specs.PYWSMAN_SPEC)
+class AMTManagementInteralMethodsTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(AMTManagementInteralMethodsTestCase, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver='fake_amt')
+ self.node = obj_utils.create_test_node(self.context,
+ driver='fake_amt',
+ driver_info=INFO_DICT)
+
+ def test__set_boot_device_order(self, mock_client_pywsman):
+ namespace = resource_uris.CIM_BootConfigSetting
+ device = boot_devices.PXE
+ result_xml = test_utils.build_soap_xml([{'ReturnValue': '0'}],
+ namespace)
+ mock_xml = test_utils.mock_wsman_root(result_xml)
+ mock_pywsman = mock_client_pywsman.Client.return_value
+ mock_pywsman.invoke.return_value = mock_xml
+
+ amt_mgmt._set_boot_device_order(self.node, device)
+
+ mock_pywsman.invoke.assert_called_once_with(
+ mock.ANY, namespace, 'ChangeBootOrder', mock.ANY)
+
+ def test__set_boot_device_order_fail(self, mock_client_pywsman):
+ namespace = resource_uris.CIM_BootConfigSetting
+ device = boot_devices.PXE
+ result_xml = test_utils.build_soap_xml([{'ReturnValue': '2'}],
+ namespace)
+ mock_xml = test_utils.mock_wsman_root(result_xml)
+ mock_pywsman = mock_client_pywsman.Client.return_value
+ mock_pywsman.invoke.return_value = mock_xml
+
+ self.assertRaises(exception.AMTFailure,
+ amt_mgmt._set_boot_device_order, self.node, device)
+ mock_pywsman.invoke.assert_called_once_with(
+ mock.ANY, namespace, 'ChangeBootOrder', mock.ANY)
+
+ mock_pywsman = mock_client_pywsman.Client.return_value
+ mock_pywsman.invoke.return_value = None
+
+ self.assertRaises(exception.AMTConnectFailure,
+ amt_mgmt._set_boot_device_order, self.node, device)
+
+ def test__enable_boot_config(self, mock_client_pywsman):
+ namespace = resource_uris.CIM_BootService
+ result_xml = test_utils.build_soap_xml([{'ReturnValue': '0'}],
+ namespace)
+ mock_xml = test_utils.mock_wsman_root(result_xml)
+ mock_pywsman = mock_client_pywsman.Client.return_value
+ mock_pywsman.invoke.return_value = mock_xml
+
+ amt_mgmt._enable_boot_config(self.node)
+
+ mock_pywsman.invoke.assert_called_once_with(
+ mock.ANY, namespace, 'SetBootConfigRole', mock.ANY)
+
+ def test__enable_boot_config_fail(self, mock_client_pywsman):
+ namespace = resource_uris.CIM_BootService
+ result_xml = test_utils.build_soap_xml([{'ReturnValue': '2'}],
+ namespace)
+ mock_xml = test_utils.mock_wsman_root(result_xml)
+ mock_pywsman = mock_client_pywsman.Client.return_value
+ mock_pywsman.invoke.return_value = mock_xml
+
+ self.assertRaises(exception.AMTFailure,
+ amt_mgmt._enable_boot_config, self.node)
+ mock_pywsman.invoke.assert_called_once_with(
+ mock.ANY, namespace, 'SetBootConfigRole', mock.ANY)
+
+ mock_pywsman = mock_client_pywsman.Client.return_value
+ mock_pywsman.invoke.return_value = None
+
+ self.assertRaises(exception.AMTConnectFailure,
+ amt_mgmt._enable_boot_config, self.node)
+
+
+class AMTManagementTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(AMTManagementTestCase, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver='fake_amt')
+ self.info = INFO_DICT
+ self.node = obj_utils.create_test_node(self.context,
+ driver='fake_amt',
+ driver_info=self.info)
+
+ def test_get_properties(self):
+ expected = amt_common.COMMON_PROPERTIES
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ self.assertEqual(expected, task.driver.get_properties())
+
+ @mock.patch.object(amt_common, 'parse_driver_info', spec_set=True,
+ autospec=True)
+ def test_validate(self, mock_drvinfo):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ task.driver.management.validate(task)
+ mock_drvinfo.assert_called_once_with(task.node)
+
+ @mock.patch.object(amt_common, 'parse_driver_info', spec_set=True,
+ autospec=True)
+ def test_validate_fail(self, mock_drvinfo):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ mock_drvinfo.side_effect = iter(
+ [exception.InvalidParameterValue('x')])
+ self.assertRaises(exception.InvalidParameterValue,
+ task.driver.management.validate,
+ task)
+
+ def test_get_supported_boot_devices(self):
+ expected = [boot_devices.PXE, boot_devices.DISK, boot_devices.CDROM]
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ self.assertEqual(
+ sorted(expected),
+ sorted(task.driver.management.
+ get_supported_boot_devices(task)))
+
+ def test_set_boot_device_one_time(self):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.management.set_boot_device(task, 'pxe')
+ self.assertEqual('pxe',
+ task.node.driver_internal_info["amt_boot_device"])
+ self.assertFalse(
+ task.node.driver_internal_info["amt_boot_persistent"])
+
+ def test_set_boot_device_persistent(self):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.management.set_boot_device(task, 'pxe',
+ persistent=True)
+ self.assertEqual('pxe',
+ task.node.driver_internal_info["amt_boot_device"])
+ self.assertTrue(
+ task.node.driver_internal_info["amt_boot_persistent"])
+
+ def test_set_boot_device_fail(self):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ self.assertRaises(exception.InvalidParameterValue,
+ task.driver.management.set_boot_device,
+ task, 'fake-device')
+
+ @mock.patch.object(amt_mgmt, '_enable_boot_config', spec_set=True,
+ autospec=True)
+ @mock.patch.object(amt_mgmt, '_set_boot_device_order', spec_set=True,
+ autospec=True)
+ def test_ensure_next_boot_device_one_time(self, mock_sbdo, mock_ebc):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ device = boot_devices.PXE
+ task.node.driver_internal_info['amt_boot_device'] = 'pxe'
+ task.driver.management.ensure_next_boot_device(task.node, device)
+ self.assertEqual('disk',
+ task.node.driver_internal_info["amt_boot_device"])
+ self.assertTrue(
+ task.node.driver_internal_info["amt_boot_persistent"])
+ mock_sbdo.assert_called_once_with(task.node, device)
+ mock_ebc.assert_called_once_with(task.node)
+
+ @mock.patch.object(amt_mgmt, '_enable_boot_config', spec_set=True,
+ autospec=True)
+ @mock.patch.object(amt_mgmt, '_set_boot_device_order', spec_set=True,
+ autospec=True)
+ def test_ensure_next_boot_device_persistent(self, mock_sbdo, mock_ebc):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ device = boot_devices.PXE
+ task.node.driver_internal_info['amt_boot_device'] = 'pxe'
+ task.node.driver_internal_info['amt_boot_persistent'] = True
+ task.driver.management.ensure_next_boot_device(task.node, device)
+ self.assertEqual('pxe',
+ task.node.driver_internal_info["amt_boot_device"])
+ self.assertTrue(
+ task.node.driver_internal_info["amt_boot_persistent"])
+ mock_sbdo.assert_called_once_with(task.node, device)
+ mock_ebc.assert_called_once_with(task.node)
+
+ def test_get_boot_device(self):
+ expected = {'boot_device': boot_devices.DISK, 'persistent': True}
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ self.assertEqual(expected,
+ task.driver.management.get_boot_device(task))
+
+ def test_get_sensor_data(self):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ self.assertRaises(NotImplementedError,
+ task.driver.management.get_sensors_data,
+ task)
diff --git a/ironic/tests/unit/drivers/amt/test_power.py b/ironic/tests/unit/drivers/amt/test_power.py
new file mode 100644
index 000000000..1d203a441
--- /dev/null
+++ b/ironic/tests/unit/drivers/amt/test_power.py
@@ -0,0 +1,282 @@
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""
+Test class for AMT ManagementInterface
+"""
+
+import mock
+from oslo_config import cfg
+
+from ironic.common import boot_devices
+from ironic.common import exception
+from ironic.common import states
+from ironic.conductor import task_manager
+from ironic.drivers.modules.amt import common as amt_common
+from ironic.drivers.modules.amt import management as amt_mgmt
+from ironic.drivers.modules.amt import power as amt_power
+from ironic.drivers.modules.amt import resource_uris
+from ironic.tests.unit.conductor import utils as mgr_utils
+from ironic.tests.unit.db import base as db_base
+from ironic.tests.unit.db import utils as db_utils
+from ironic.tests.unit.drivers.drac import utils as test_utils
+from ironic.tests.unit.objects import utils as obj_utils
+
+INFO_DICT = db_utils.get_test_amt_info()
+CONF = cfg.CONF
+
+
+class AMTPowerInteralMethodsTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(AMTPowerInteralMethodsTestCase, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver='fake_amt')
+ self.info = INFO_DICT
+ self.node = obj_utils.create_test_node(self.context,
+ driver='fake_amt',
+ driver_info=self.info)
+ CONF.set_override('max_attempts', 2, 'amt')
+ CONF.set_override('action_wait', 0, 'amt')
+
+ @mock.patch.object(amt_common, 'get_wsman_client', spec_set=True,
+ autospec=True)
+ def test__set_power_state(self, mock_client_pywsman):
+ namespace = resource_uris.CIM_PowerManagementService
+ mock_client = mock_client_pywsman.return_value
+ amt_power._set_power_state(self.node, states.POWER_ON)
+ mock_client.wsman_invoke.assert_called_once_with(
+ mock.ANY, namespace, 'RequestPowerStateChange', mock.ANY)
+
+ @mock.patch.object(amt_common, 'get_wsman_client', spec_set=True,
+ autospec=True)
+ def test__set_power_state_fail(self, mock_client_pywsman):
+ mock_client = mock_client_pywsman.return_value
+ mock_client.wsman_invoke.side_effect = exception.AMTFailure('x')
+ self.assertRaises(exception.AMTFailure,
+ amt_power._set_power_state,
+ self.node, states.POWER_ON)
+
+ @mock.patch.object(amt_common, 'get_wsman_client', spec_set=True,
+ autospec=True)
+ def test__power_status(self, mock_gwc):
+ namespace = resource_uris.CIM_AssociatedPowerManagementService
+ result_xml = test_utils.build_soap_xml([{'PowerState':
+ '2'}],
+ namespace)
+ mock_doc = test_utils.mock_wsman_root(result_xml)
+ mock_client = mock_gwc.return_value
+ mock_client.wsman_get.return_value = mock_doc
+ self.assertEqual(
+ states.POWER_ON, amt_power._power_status(self.node))
+
+ result_xml = test_utils.build_soap_xml([{'PowerState':
+ '8'}],
+ namespace)
+ mock_doc = test_utils.mock_wsman_root(result_xml)
+ mock_client = mock_gwc.return_value
+ mock_client.wsman_get.return_value = mock_doc
+ self.assertEqual(
+ states.POWER_OFF, amt_power._power_status(self.node))
+
+ result_xml = test_utils.build_soap_xml([{'PowerState':
+ '4'}],
+ namespace)
+ mock_doc = test_utils.mock_wsman_root(result_xml)
+ mock_client = mock_gwc.return_value
+ mock_client.wsman_get.return_value = mock_doc
+ self.assertEqual(
+ states.ERROR, amt_power._power_status(self.node))
+
+ @mock.patch.object(amt_common, 'get_wsman_client', spec_set=True,
+ autospec=True)
+ def test__power_status_fail(self, mock_gwc):
+ mock_client = mock_gwc.return_value
+ mock_client.wsman_get.side_effect = exception.AMTFailure('x')
+ self.assertRaises(exception.AMTFailure,
+ amt_power._power_status,
+ self.node)
+
+ @mock.patch.object(amt_mgmt.AMTManagement, 'ensure_next_boot_device',
+ spec_set=True, autospec=True)
+ @mock.patch.object(amt_power, '_power_status', spec_set=True,
+ autospec=True)
+ @mock.patch.object(amt_power, '_set_power_state', spec_set=True,
+ autospec=True)
+ def test__set_and_wait_power_on_with_boot_device(self, mock_sps,
+ mock_ps, mock_enbd):
+ target_state = states.POWER_ON
+ boot_device = boot_devices.PXE
+ mock_ps.side_effect = iter([states.POWER_OFF, states.POWER_ON])
+ mock_enbd.return_value = None
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ task.node.driver_internal_info['amt_boot_device'] = boot_device
+ result = amt_power._set_and_wait(task, target_state)
+ self.assertEqual(states.POWER_ON, result)
+ mock_enbd.assert_called_with(task.driver.management, task.node,
+ boot_devices.PXE)
+ mock_sps.assert_called_once_with(task.node, states.POWER_ON)
+ mock_ps.assert_called_with(task.node)
+
+ @mock.patch.object(amt_power, '_power_status', spec_set=True,
+ autospec=True)
+ @mock.patch.object(amt_power, '_set_power_state', spec_set=True,
+ autospec=True)
+ def test__set_and_wait_power_on_without_boot_device(self, mock_sps,
+ mock_ps):
+ target_state = states.POWER_ON
+ mock_ps.side_effect = iter([states.POWER_OFF, states.POWER_ON])
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ self.assertEqual(states.POWER_ON,
+ amt_power._set_and_wait(task, target_state))
+ mock_sps.assert_called_once_with(task.node, states.POWER_ON)
+ mock_ps.assert_called_with(task.node)
+
+ boot_device = boot_devices.DISK
+ self.node.driver_internal_info['amt_boot_device'] = boot_device
+ mock_ps.side_effect = iter([states.POWER_OFF, states.POWER_ON])
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ self.assertEqual(states.POWER_ON,
+ amt_power._set_and_wait(task, target_state))
+ mock_sps.assert_called_with(task.node, states.POWER_ON)
+ mock_ps.assert_called_with(task.node)
+
+ def test__set_and_wait_wrong_target_state(self):
+ target_state = 'fake-state'
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ self.assertRaises(exception.InvalidParameterValue,
+ amt_power._set_and_wait, task, target_state)
+
+ @mock.patch.object(amt_power, '_power_status', spec_set=True,
+ autospec=True)
+ @mock.patch.object(amt_power, '_set_power_state', spec_set=True,
+ autospec=True)
+ def test__set_and_wait_exceed_iterations(self, mock_sps,
+ mock_ps):
+ target_state = states.POWER_ON
+ mock_ps.side_effect = iter([states.POWER_OFF, states.POWER_OFF,
+ states.POWER_OFF])
+ mock_sps.return_value = exception.AMTFailure('x')
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ self.assertRaises(exception.PowerStateFailure,
+ amt_power._set_and_wait, task, target_state)
+ mock_sps.assert_called_with(task.node, states.POWER_ON)
+ mock_ps.assert_called_with(task.node)
+ self.assertEqual(3, mock_ps.call_count)
+
+ @mock.patch.object(amt_power, '_power_status', spec_set=True,
+ autospec=True)
+ def test__set_and_wait_already_target_state(self, mock_ps):
+ target_state = states.POWER_ON
+ mock_ps.side_effect = iter([states.POWER_ON])
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ self.assertEqual(states.POWER_ON,
+ amt_power._set_and_wait(task, target_state))
+ mock_ps.assert_called_with(task.node)
+
+ @mock.patch.object(amt_power, '_power_status', spec_set=True,
+ autospec=True)
+ @mock.patch.object(amt_power, '_set_power_state', spec_set=True,
+ autospec=True)
+ def test__set_and_wait_power_off(self, mock_sps, mock_ps):
+ target_state = states.POWER_OFF
+ mock_ps.side_effect = iter([states.POWER_ON, states.POWER_OFF])
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ self.assertEqual(states.POWER_OFF,
+ amt_power._set_and_wait(task, target_state))
+ mock_sps.assert_called_once_with(task.node, states.POWER_OFF)
+ mock_ps.assert_called_with(task.node)
+
+
+class AMTPowerTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(AMTPowerTestCase, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver='fake_amt')
+ self.info = INFO_DICT
+ self.node = obj_utils.create_test_node(self.context,
+ driver='fake_amt',
+ driver_info=self.info)
+
+ def test_get_properties(self):
+ expected = amt_common.COMMON_PROPERTIES
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ self.assertEqual(expected, task.driver.get_properties())
+
+ @mock.patch.object(amt_common, 'parse_driver_info', spec_set=True,
+ autospec=True)
+ def test_validate(self, mock_drvinfo):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ task.driver.power.validate(task)
+ mock_drvinfo.assert_called_once_with(task.node)
+
+ @mock.patch.object(amt_common, 'parse_driver_info', spec_set=True,
+ autospec=True)
+ def test_validate_fail(self, mock_drvinfo):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ mock_drvinfo.side_effect = iter(
+ [exception.InvalidParameterValue('x')])
+ self.assertRaises(exception.InvalidParameterValue,
+ task.driver.power.validate,
+ task)
+
+ @mock.patch.object(amt_power, '_power_status', spec_set=True,
+ autospec=True)
+ def test_get_power_state(self, mock_ps):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ mock_ps.return_value = states.POWER_ON
+ self.assertEqual(states.POWER_ON,
+ task.driver.power.get_power_state(task))
+ mock_ps.assert_called_once_with(task.node)
+
+ @mock.patch.object(amt_power, '_set_and_wait', spec_set=True,
+ autospec=True)
+ def test_set_power_state(self, mock_saw):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ pstate = states.POWER_ON
+ mock_saw.return_value = states.POWER_ON
+ task.driver.power.set_power_state(task, pstate)
+ mock_saw.assert_called_once_with(task, pstate)
+
+ @mock.patch.object(amt_power, '_set_and_wait', spec_set=True,
+ autospec=True)
+ def test_set_power_state_fail(self, mock_saw):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ pstate = states.POWER_ON
+ mock_saw.side_effect = iter([exception.PowerStateFailure('x')])
+ self.assertRaises(exception.PowerStateFailure,
+ task.driver.power.set_power_state,
+ task, pstate)
+ mock_saw.assert_called_once_with(task, pstate)
+
+ @mock.patch.object(amt_power, '_set_and_wait', spec_set=True,
+ autospec=True)
+ def test_reboot(self, mock_saw):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.power.reboot(task)
+ calls = [mock.call(task, states.POWER_OFF),
+ mock.call(task, states.POWER_ON)]
+ mock_saw.assert_has_calls(calls)
diff --git a/ironic/tests/unit/drivers/amt/test_vendor.py b/ironic/tests/unit/drivers/amt/test_vendor.py
new file mode 100644
index 000000000..07d893251
--- /dev/null
+++ b/ironic/tests/unit/drivers/amt/test_vendor.py
@@ -0,0 +1,132 @@
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Test class for AMT Vendor methods."""
+
+import mock
+
+from ironic.common import boot_devices
+from ironic.common import states
+from ironic.conductor import task_manager
+from ironic.drivers.modules.amt import management as amt_mgmt
+from ironic.drivers.modules import iscsi_deploy
+from ironic.tests.unit.conductor import utils as mgr_utils
+from ironic.tests.unit.db import base as db_base
+from ironic.tests.unit.db import utils as db_utils
+from ironic.tests.unit.objects import utils as obj_utils
+
+INFO_DICT = db_utils.get_test_amt_info()
+
+
+class AMTPXEVendorPassthruTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(AMTPXEVendorPassthruTestCase, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver="pxe_amt")
+ self.node = obj_utils.create_test_node(
+ self.context, driver='pxe_amt', driver_info=INFO_DICT)
+
+ def test_vendor_routes(self):
+ expected = ['heartbeat', 'pass_deploy_info',
+ 'pass_bootloader_install_info']
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ vendor_routes = task.driver.vendor.vendor_routes
+ self.assertIsInstance(vendor_routes, dict)
+ self.assertEqual(sorted(expected), sorted(list(vendor_routes)))
+
+ def test_driver_routes(self):
+ expected = ['lookup']
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ driver_routes = task.driver.vendor.driver_routes
+ self.assertIsInstance(driver_routes, dict)
+ self.assertEqual(sorted(expected), sorted(list(driver_routes)))
+
+ @mock.patch.object(amt_mgmt.AMTManagement, 'ensure_next_boot_device',
+ spec_set=True, autospec=True)
+ @mock.patch.object(iscsi_deploy.VendorPassthru, 'pass_deploy_info',
+ spec_set=True, autospec=True)
+ def test_vendorpassthru_pass_deploy_info_netboot(self,
+ mock_pxe_vendorpassthru,
+ mock_ensure):
+ kwargs = {'address': '123456'}
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node.provision_state = states.DEPLOYWAIT
+ task.node.target_provision_state = states.ACTIVE
+ task.node.instance_info['capabilities'] = {
+ "boot_option": "netboot"
+ }
+ task.driver.vendor.pass_deploy_info(task, **kwargs)
+ mock_ensure.assert_called_with(
+ task.driver.management, task.node, boot_devices.PXE)
+ mock_pxe_vendorpassthru.assert_called_once_with(
+ task.driver.vendor, task, **kwargs)
+
+ @mock.patch.object(amt_mgmt.AMTManagement, 'ensure_next_boot_device',
+ spec_set=True, autospec=True)
+ @mock.patch.object(iscsi_deploy.VendorPassthru, 'pass_deploy_info',
+ spec_set=True, autospec=True)
+ def test_vendorpassthru_pass_deploy_info_localboot(self,
+ mock_pxe_vendorpassthru,
+ mock_ensure):
+ kwargs = {'address': '123456'}
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node.provision_state = states.DEPLOYWAIT
+ task.node.target_provision_state = states.ACTIVE
+ task.node.instance_info['capabilities'] = {"boot_option": "local"}
+ task.driver.vendor.pass_deploy_info(task, **kwargs)
+ self.assertFalse(mock_ensure.called)
+ mock_pxe_vendorpassthru.assert_called_once_with(
+ task.driver.vendor, task, **kwargs)
+
+ @mock.patch.object(amt_mgmt.AMTManagement, 'ensure_next_boot_device',
+ spec_set=True, autospec=True)
+ @mock.patch.object(iscsi_deploy.VendorPassthru, 'continue_deploy',
+ spec_set=True, autospec=True)
+ def test_vendorpassthru_continue_deploy_netboot(self,
+ mock_pxe_vendorpassthru,
+ mock_ensure):
+ kwargs = {'address': '123456'}
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node.provision_state = states.DEPLOYWAIT
+ task.node.target_provision_state = states.ACTIVE
+ task.node.instance_info['capabilities'] = {
+ "boot_option": "netboot"
+ }
+ task.driver.vendor.continue_deploy(task, **kwargs)
+ mock_ensure.assert_called_with(
+ task.driver.management, task.node, boot_devices.PXE)
+ mock_pxe_vendorpassthru.assert_called_once_with(
+ task.driver.vendor, task, **kwargs)
+
+ @mock.patch.object(amt_mgmt.AMTManagement, 'ensure_next_boot_device',
+ spec_set=True, autospec=True)
+ @mock.patch.object(iscsi_deploy.VendorPassthru, 'continue_deploy',
+ spec_set=True, autospec=True)
+ def test_vendorpassthru_continue_deploy_localboot(self,
+ mock_pxe_vendorpassthru,
+ mock_ensure):
+ kwargs = {'address': '123456'}
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node.provision_state = states.DEPLOYWAIT
+ task.node.target_provision_state = states.ACTIVE
+ task.node.instance_info['capabilities'] = {"boot_option": "local"}
+ task.driver.vendor.continue_deploy(task, **kwargs)
+ self.assertFalse(mock_ensure.called)
+ mock_pxe_vendorpassthru.assert_called_once_with(
+ task.driver.vendor, task, **kwargs)
diff --git a/ironic/tests/unit/drivers/cimc/__init__.py b/ironic/tests/unit/drivers/cimc/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ironic/tests/unit/drivers/cimc/__init__.py
diff --git a/ironic/tests/unit/drivers/cimc/test_common.py b/ironic/tests/unit/drivers/cimc/test_common.py
new file mode 100644
index 000000000..0924f81de
--- /dev/null
+++ b/ironic/tests/unit/drivers/cimc/test_common.py
@@ -0,0 +1,125 @@
+# Copyright 2015, Cisco Systems.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import mock
+
+from oslo_config import cfg
+from oslo_utils import importutils
+
+from ironic.common import exception
+from ironic.conductor import task_manager
+from ironic.drivers.modules.cimc import common as cimc_common
+from ironic.tests.unit.conductor import utils as mgr_utils
+from ironic.tests.unit.db import base as db_base
+from ironic.tests.unit.db import utils as db_utils
+from ironic.tests.unit.objects import utils as obj_utils
+
+imcsdk = importutils.try_import('ImcSdk')
+
+CONF = cfg.CONF
+
+
+class CIMCBaseTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(CIMCBaseTestCase, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver="fake_cimc")
+ self.node = obj_utils.create_test_node(
+ self.context,
+ driver='fake_cimc',
+ driver_info=db_utils.get_test_cimc_info(),
+ instance_uuid="fake_uuid")
+ CONF.set_override('max_retry', 2, 'cimc')
+ CONF.set_override('action_interval', 0, 'cimc')
+
+
+class ParseDriverInfoTestCase(CIMCBaseTestCase):
+
+ def test_parse_driver_info(self):
+ info = cimc_common.parse_driver_info(self.node)
+
+ self.assertIsNotNone(info.get('cimc_address'))
+ self.assertIsNotNone(info.get('cimc_username'))
+ self.assertIsNotNone(info.get('cimc_password'))
+
+ def test_parse_driver_info_missing_address(self):
+ del self.node.driver_info['cimc_address']
+ self.assertRaises(exception.MissingParameterValue,
+ cimc_common.parse_driver_info, self.node)
+
+ def test_parse_driver_info_missing_username(self):
+ del self.node.driver_info['cimc_username']
+ self.assertRaises(exception.MissingParameterValue,
+ cimc_common.parse_driver_info, self.node)
+
+ def test_parse_driver_info_missing_password(self):
+ del self.node.driver_info['cimc_password']
+ self.assertRaises(exception.MissingParameterValue,
+ cimc_common.parse_driver_info, self.node)
+
+
+@mock.patch.object(cimc_common, 'cimc_handle', autospec=True)
+class CIMCHandleLogin(CIMCBaseTestCase):
+
+ def test_cimc_handle_login(self, mock_handle):
+ info = cimc_common.parse_driver_info(self.node)
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ with mock_handle(task) as handle:
+ cimc_common.handle_login(task, handle, info)
+
+ handle.login.assert_called_once_with(
+ self.node.driver_info['cimc_address'],
+ self.node.driver_info['cimc_username'],
+ self.node.driver_info['cimc_password'])
+
+ def test_cimc_handle_login_exception(self, mock_handle):
+ info = cimc_common.parse_driver_info(self.node)
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ with mock_handle(task) as handle:
+ handle.login.side_effect = imcsdk.ImcException('Boom')
+
+ self.assertRaises(exception.CIMCException,
+ cimc_common.handle_login,
+ task, handle, info)
+
+ handle.login.assert_called_once_with(
+ self.node.driver_info['cimc_address'],
+ self.node.driver_info['cimc_username'],
+ self.node.driver_info['cimc_password'])
+
+
+class CIMCHandleTestCase(CIMCBaseTestCase):
+
+ @mock.patch.object(imcsdk, 'ImcHandle', autospec=True)
+ @mock.patch.object(cimc_common, 'handle_login', autospec=True)
+ def test_cimc_handle(self, mock_login, mock_handle):
+ mo_hand = mock.MagicMock()
+ mo_hand.username = self.node.driver_info.get('cimc_username')
+ mo_hand.password = self.node.driver_info.get('cimc_password')
+ mo_hand.name = self.node.driver_info.get('cimc_address')
+ mock_handle.return_value = mo_hand
+ info = cimc_common.parse_driver_info(self.node)
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ with cimc_common.cimc_handle(task) as handle:
+ self.assertEqual(handle, mock_handle.return_value)
+
+ mock_login.assert_called_once_with(task, mock_handle.return_value,
+ info)
+ mock_handle.return_value.logout.assert_called_once_with()
diff --git a/ironic/tests/unit/drivers/cimc/test_management.py b/ironic/tests/unit/drivers/cimc/test_management.py
new file mode 100644
index 000000000..49f42a386
--- /dev/null
+++ b/ironic/tests/unit/drivers/cimc/test_management.py
@@ -0,0 +1,126 @@
+# Copyright 2015, Cisco Systems.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import mock
+
+from oslo_utils import importutils
+
+from ironic.common import boot_devices
+from ironic.common import exception
+from ironic.conductor import task_manager
+from ironic.drivers.modules.cimc import common
+from ironic.tests.unit.drivers.cimc import test_common
+
+imcsdk = importutils.try_import('ImcSdk')
+
+
+@mock.patch.object(common, 'cimc_handle', autospec=True)
+class CIMCManagementTestCase(test_common.CIMCBaseTestCase):
+
+ def test_get_properties(self, mock_handle):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ self.assertEqual(common.COMMON_PROPERTIES,
+ task.driver.management.get_properties())
+
+ @mock.patch.object(common, "parse_driver_info", autospec=True)
+ def test_validate(self, mock_driver_info, mock_handle):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.management.validate(task)
+ mock_driver_info.assert_called_once_with(task.node)
+
+ def test_get_supported_boot_devices(self, mock_handle):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ expected = [boot_devices.PXE, boot_devices.DISK,
+ boot_devices.CDROM]
+ result = task.driver.management.get_supported_boot_devices(task)
+ self.assertEqual(sorted(expected), sorted(result))
+
+ def test_get_boot_device(self, mock_handle):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ with mock_handle(task) as handle:
+ handle.xml_query.return_value.error_code = None
+ mock_dev = mock.MagicMock()
+ mock_dev.Order = 1
+ mock_dev.Rn = 'storage-read-write'
+ handle.xml_query().OutConfigs.child[0].child = [mock_dev]
+
+ device = task.driver.management.get_boot_device(task)
+ self.assertEqual(
+ {'boot_device': boot_devices.DISK, 'persistent': True},
+ device)
+
+ def test_get_boot_device_fail(self, mock_handle):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ with mock_handle(task) as handle:
+ handle.xml_query.return_value.error_code = None
+ mock_dev = mock.MagicMock()
+ mock_dev.Order = 1
+ mock_dev.Rn = 'storage-read-write'
+ handle.xml_query().OutConfigs.child[0].child = [mock_dev]
+
+ device = task.driver.management.get_boot_device(task)
+
+ self.assertEqual(
+ {'boot_device': boot_devices.DISK, 'persistent': True},
+ device)
+
+ def test_set_boot_device(self, mock_handle):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ with mock_handle(task) as handle:
+ handle.xml_query.return_value.error_code = None
+ task.driver.management.set_boot_device(task, boot_devices.DISK)
+ method = imcsdk.ImcCore.ExternalMethod("ConfigConfMo")
+ method.Cookie = handle.cookie
+ method.Dn = "sys/rack-unit-1/boot-policy"
+ method.InHierarchical = "true"
+
+ config = imcsdk.Imc.ConfigConfig()
+
+ bootMode = imcsdk.ImcCore.ManagedObject('lsbootStorage')
+ bootMode.set_attr("access", 'read-write')
+ bootMode.set_attr("type", 'storage')
+ bootMode.set_attr("Rn", 'storage-read-write')
+ bootMode.set_attr("order", "1")
+
+ config.add_child(bootMode)
+ method.InConfig = config
+
+ handle.xml_query.assert_called_once_with(
+ method, imcsdk.WriteXmlOption.DIRTY)
+
+ def test_set_boot_device_fail(self, mock_handle):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ with mock_handle(task) as handle:
+ method = imcsdk.ImcCore.ExternalMethod("ConfigConfMo")
+ handle.xml_query.return_value.error_code = "404"
+
+ self.assertRaises(exception.CIMCException,
+ task.driver.management.set_boot_device,
+ task, boot_devices.DISK)
+
+ handle.xml_query.assert_called_once_with(
+ method, imcsdk.WriteXmlOption.DIRTY)
+
+ def test_get_sensors_data(self, mock_handle):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ self.assertRaises(NotImplementedError,
+ task.driver.management.get_sensors_data, task)
diff --git a/ironic/tests/unit/drivers/cimc/test_power.py b/ironic/tests/unit/drivers/cimc/test_power.py
new file mode 100644
index 000000000..57ac5d97f
--- /dev/null
+++ b/ironic/tests/unit/drivers/cimc/test_power.py
@@ -0,0 +1,302 @@
+# Copyright 2015, Cisco Systems.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import mock
+
+from oslo_config import cfg
+from oslo_utils import importutils
+
+from ironic.common import exception
+from ironic.common import states
+from ironic.conductor import task_manager
+from ironic.drivers.modules.cimc import common
+from ironic.drivers.modules.cimc import power
+from ironic.tests.unit.drivers.cimc import test_common
+
+imcsdk = importutils.try_import('ImcSdk')
+
+CONF = cfg.CONF
+
+
+@mock.patch.object(common, 'cimc_handle', autospec=True)
+class WaitForStateChangeTestCase(test_common.CIMCBaseTestCase):
+
+ def setUp(self):
+ super(WaitForStateChangeTestCase, self).setUp()
+ CONF.set_override('max_retry', 2, 'cimc')
+ CONF.set_override('action_interval', 0, 'cimc')
+
+ def test__wait_for_state_change(self, mock_handle):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ with mock_handle(task) as handle:
+ mock_rack_unit = mock.MagicMock()
+ mock_rack_unit.get_attr.return_value = (
+ imcsdk.ComputeRackUnit.CONST_OPER_POWER_ON)
+
+ handle.get_imc_managedobject.return_value = [mock_rack_unit]
+
+ state = power._wait_for_state_change(states.POWER_ON, task)
+
+ handle.get_imc_managedobject.assert_called_once_with(
+ None, None, params={"Dn": "sys/rack-unit-1"})
+
+ self.assertEqual(state, states.POWER_ON)
+
+ def test__wait_for_state_change_fail(self, mock_handle):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ with mock_handle(task) as handle:
+ mock_rack_unit = mock.MagicMock()
+ mock_rack_unit.get_attr.return_value = (
+ imcsdk.ComputeRackUnit.CONST_OPER_POWER_OFF)
+
+ handle.get_imc_managedobject.return_value = [mock_rack_unit]
+
+ state = power._wait_for_state_change(states.POWER_ON, task)
+
+ calls = [
+ mock.call(None, None, params={"Dn": "sys/rack-unit-1"}),
+ mock.call(None, None, params={"Dn": "sys/rack-unit-1"})
+ ]
+ handle.get_imc_managedobject.assert_has_calls(calls)
+ self.assertEqual(state, states.ERROR)
+
+ def test__wait_for_state_change_imc_exception(self, mock_handle):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ with mock_handle(task) as handle:
+ handle.get_imc_managedobject.side_effect = (
+ imcsdk.ImcException('Boom'))
+
+ self.assertRaises(
+ exception.CIMCException,
+ power._wait_for_state_change, states.POWER_ON, task)
+
+ handle.get_imc_managedobject.assert_called_once_with(
+ None, None, params={"Dn": "sys/rack-unit-1"})
+
+
+@mock.patch.object(common, 'cimc_handle', autospec=True)
+class PowerTestCase(test_common.CIMCBaseTestCase):
+
+ def test_get_properties(self, mock_handle):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ self.assertEqual(common.COMMON_PROPERTIES,
+ task.driver.power.get_properties())
+
+ @mock.patch.object(common, "parse_driver_info", autospec=True)
+ def test_validate(self, mock_driver_info, mock_handle):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.power.validate(task)
+ mock_driver_info.assert_called_once_with(task.node)
+
+ def test_get_power_state(self, mock_handle):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ with mock_handle(task) as handle:
+ mock_rack_unit = mock.MagicMock()
+ mock_rack_unit.get_attr.return_value = (
+ imcsdk.ComputeRackUnit.CONST_OPER_POWER_ON)
+
+ handle.get_imc_managedobject.return_value = [mock_rack_unit]
+
+ state = task.driver.power.get_power_state(task)
+
+ handle.get_imc_managedobject.assert_called_once_with(
+ None, None, params={"Dn": "sys/rack-unit-1"})
+ self.assertEqual(states.POWER_ON, state)
+
+ def test_get_power_state_fail(self, mock_handle):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ with mock_handle(task) as handle:
+ mock_rack_unit = mock.MagicMock()
+ mock_rack_unit.get_attr.return_value = (
+ imcsdk.ComputeRackUnit.CONST_OPER_POWER_ON)
+
+ handle.get_imc_managedobject.side_effect = (
+ imcsdk.ImcException("boom"))
+
+ self.assertRaises(exception.CIMCException,
+ task.driver.power.get_power_state, task)
+
+ handle.get_imc_managedobject.assert_called_once_with(
+ None, None, params={"Dn": "sys/rack-unit-1"})
+
+ def test_set_power_state_invalid_state(self, mock_handle):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ self.assertRaises(exception.InvalidParameterValue,
+ task.driver.power.set_power_state,
+ task, states.ERROR)
+
+ def test_set_power_state_reboot_ok(self, mock_handle):
+ hri = imcsdk.ComputeRackUnit.CONST_ADMIN_POWER_HARD_RESET_IMMEDIATE
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ with mock_handle(task) as handle:
+ mock_rack_unit = mock.MagicMock()
+ mock_rack_unit.get_attr.side_effect = [
+ imcsdk.ComputeRackUnit.CONST_OPER_POWER_OFF,
+ imcsdk.ComputeRackUnit.CONST_OPER_POWER_ON
+ ]
+ handle.get_imc_managedobject.return_value = [mock_rack_unit]
+
+ task.driver.power.set_power_state(task, states.REBOOT)
+
+ handle.set_imc_managedobject.assert_called_once_with(
+ None, class_id="ComputeRackUnit",
+ params={
+ imcsdk.ComputeRackUnit.ADMIN_POWER: hri,
+ imcsdk.ComputeRackUnit.DN: "sys/rack-unit-1"
+ })
+
+ handle.get_imc_managedobject.assert_called_with(
+ None, None, params={"Dn": "sys/rack-unit-1"})
+
+ def test_set_power_state_reboot_fail(self, mock_handle):
+ hri = imcsdk.ComputeRackUnit.CONST_ADMIN_POWER_HARD_RESET_IMMEDIATE
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ with mock_handle(task) as handle:
+ handle.get_imc_managedobject.side_effect = (
+ imcsdk.ImcException("boom"))
+
+ self.assertRaises(exception.CIMCException,
+ task.driver.power.set_power_state,
+ task, states.REBOOT)
+
+ handle.set_imc_managedobject.assert_called_once_with(
+ None, class_id="ComputeRackUnit",
+ params={
+ imcsdk.ComputeRackUnit.ADMIN_POWER: hri,
+ imcsdk.ComputeRackUnit.DN: "sys/rack-unit-1"
+ })
+
+ handle.get_imc_managedobject.assert_called_with(
+ None, None, params={"Dn": "sys/rack-unit-1"})
+
+ def test_set_power_state_on_ok(self, mock_handle):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ with mock_handle(task) as handle:
+ mock_rack_unit = mock.MagicMock()
+ mock_rack_unit.get_attr.side_effect = [
+ imcsdk.ComputeRackUnit.CONST_OPER_POWER_OFF,
+ imcsdk.ComputeRackUnit.CONST_OPER_POWER_ON
+ ]
+ handle.get_imc_managedobject.return_value = [mock_rack_unit]
+
+ task.driver.power.set_power_state(task, states.POWER_ON)
+
+ handle.set_imc_managedobject.assert_called_once_with(
+ None, class_id="ComputeRackUnit",
+ params={
+ imcsdk.ComputeRackUnit.ADMIN_POWER:
+ imcsdk.ComputeRackUnit.CONST_ADMIN_POWER_UP,
+ imcsdk.ComputeRackUnit.DN: "sys/rack-unit-1"
+ })
+
+ handle.get_imc_managedobject.assert_called_with(
+ None, None, params={"Dn": "sys/rack-unit-1"})
+
+ def test_set_power_state_on_fail(self, mock_handle):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ with mock_handle(task) as handle:
+ handle.get_imc_managedobject.side_effect = (
+ imcsdk.ImcException("boom"))
+
+ self.assertRaises(exception.CIMCException,
+ task.driver.power.set_power_state,
+ task, states.POWER_ON)
+
+ handle.set_imc_managedobject.assert_called_once_with(
+ None, class_id="ComputeRackUnit",
+ params={
+ imcsdk.ComputeRackUnit.ADMIN_POWER:
+ imcsdk.ComputeRackUnit.CONST_ADMIN_POWER_UP,
+ imcsdk.ComputeRackUnit.DN: "sys/rack-unit-1"
+ })
+
+ handle.get_imc_managedobject.assert_called_with(
+ None, None, params={"Dn": "sys/rack-unit-1"})
+
+ def test_set_power_state_off_ok(self, mock_handle):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ with mock_handle(task) as handle:
+ mock_rack_unit = mock.MagicMock()
+ mock_rack_unit.get_attr.side_effect = [
+ imcsdk.ComputeRackUnit.CONST_OPER_POWER_ON,
+ imcsdk.ComputeRackUnit.CONST_OPER_POWER_OFF
+ ]
+ handle.get_imc_managedobject.return_value = [mock_rack_unit]
+
+ task.driver.power.set_power_state(task, states.POWER_OFF)
+
+ handle.set_imc_managedobject.assert_called_once_with(
+ None, class_id="ComputeRackUnit",
+ params={
+ imcsdk.ComputeRackUnit.ADMIN_POWER:
+ imcsdk.ComputeRackUnit.CONST_ADMIN_POWER_DOWN,
+ imcsdk.ComputeRackUnit.DN: "sys/rack-unit-1"
+ })
+
+ handle.get_imc_managedobject.assert_called_with(
+ None, None, params={"Dn": "sys/rack-unit-1"})
+
+ def test_set_power_state_off_fail(self, mock_handle):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ with mock_handle(task) as handle:
+ handle.get_imc_managedobject.side_effect = (
+ imcsdk.ImcException("boom"))
+
+ self.assertRaises(exception.CIMCException,
+ task.driver.power.set_power_state,
+ task, states.POWER_OFF)
+
+ handle.set_imc_managedobject.assert_called_once_with(
+ None, class_id="ComputeRackUnit",
+ params={
+ imcsdk.ComputeRackUnit.ADMIN_POWER:
+ imcsdk.ComputeRackUnit.CONST_ADMIN_POWER_DOWN,
+ imcsdk.ComputeRackUnit.DN: "sys/rack-unit-1"
+ })
+
+ handle.get_imc_managedobject.assert_called_with(
+ None, None, params={"Dn": "sys/rack-unit-1"})
+
+ @mock.patch.object(power.Power, "set_power_state", autospec=True)
+ @mock.patch.object(power.Power, "get_power_state", autospec=True)
+ def test_reboot_on(self, mock_get_state, mock_set_state, mock_handle):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ mock_get_state.return_value = states.POWER_ON
+ task.driver.power.reboot(task)
+ mock_set_state.assert_called_with(mock.ANY, task, states.REBOOT)
+
+ @mock.patch.object(power.Power, "set_power_state", autospec=True)
+ @mock.patch.object(power.Power, "get_power_state", autospec=True)
+ def test_reboot_off(self, mock_get_state, mock_set_state, mock_handle):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ mock_get_state.return_value = states.POWER_OFF
+ task.driver.power.reboot(task)
+ mock_set_state.assert_called_with(mock.ANY, task, states.POWER_ON)
diff --git a/ironic/tests/unit/drivers/drac/__init__.py b/ironic/tests/unit/drivers/drac/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ironic/tests/unit/drivers/drac/__init__.py
diff --git a/ironic/tests/unit/drivers/drac/bios_wsman_mock.py b/ironic/tests/unit/drivers/drac/bios_wsman_mock.py
new file mode 100644
index 000000000..245d27c01
--- /dev/null
+++ b/ironic/tests/unit/drivers/drac/bios_wsman_mock.py
@@ -0,0 +1,273 @@
+#
+# Copyright 2015 Dell, Inc.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""
+Test class for DRAC BIOS interface
+"""
+
+from ironic.drivers.modules.drac import resource_uris
+
+Enumerations = {
+ resource_uris.DCIM_BIOSEnumeration: {
+ 'XML': """<ns0:Envelope
+xmlns:ns0="http://www.w3.org/2003/05/soap-envelope"
+xmlns:ns1="http://schemas.xmlsoap.org/ws/2004/08/addressing"
+xmlns:ns2="http://schemas.xmlsoap.org/ws/2004/09/enumeration"
+xmlns:ns3="http://schemas.dmtf.org/wbem/wsman/1/wsman.xsd"
+xmlns:ns4="http://schemas.dell.com/wbem/wscim/1/cim-schema/2/DCIM_BIOSEnumeration"
+xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+ <ns0:Header>
+ <ns1:To>http://schemas.xmlsoap.org/ws/2004/08/addressing/role/anonymous
+</ns1:To>
+ <ns1:Action>
+http://schemas.xmlsoap.org/ws/2004/09/enumeration/EnumerateResponse</ns1:Action>
+ <ns1:RelatesTo>uuid:1f5cd907-0e6f-1e6f-8002-4f266e3acab8</ns1:RelatesTo>
+ <ns1:MessageID>uuid:219ca357-0e6f-1e6f-a828-f0e4fb722ab8</ns1:MessageID>
+ </ns0:Header>
+ <ns0:Body>
+ <ns2:EnumerateResponse>
+ <ns3:Items>
+ <ns4:DCIM_BIOSEnumeration>
+ <ns4:AttributeName>MemTest</ns4:AttributeName>
+ <ns4:CurrentValue>Disabled</ns4:CurrentValue>
+ <ns4:Dependency xsi:nil="true" />
+ <ns4:DisplayOrder>310</ns4:DisplayOrder>
+ <ns4:FQDD>BIOS.Setup.1-1</ns4:FQDD>
+ <ns4:GroupDisplayName>Memory Settings</ns4:GroupDisplayName>
+ <ns4:GroupID>MemSettings</ns4:GroupID>
+ <ns4:InstanceID>BIOS.Setup.1-1:MemTest</ns4:InstanceID>
+ <ns4:IsReadOnly>false</ns4:IsReadOnly>
+ <ns4:PendingValue xsi:nil="true" />
+ <ns4:PossibleValues>Enabled</ns4:PossibleValues>
+ <ns4:PossibleValues>Disabled</ns4:PossibleValues>
+ </ns4:DCIM_BIOSEnumeration>
+ <ns4:DCIM_BIOSEnumeration>
+ <ns4:AttributeDisplayName>C States</ns4:AttributeDisplayName>
+ <ns4:AttributeName>ProcCStates</ns4:AttributeName>
+ <ns4:CurrentValue>Disabled</ns4:CurrentValue>
+ <ns4:DisplayOrder>1706</ns4:DisplayOrder>
+ <ns4:FQDD>BIOS.Setup.1-1</ns4:FQDD>
+ <ns4:GroupDisplayName>System Profile Settings</ns4:GroupDisplayName>
+ <ns4:GroupID>SysProfileSettings</ns4:GroupID>
+ <ns4:InstanceID>BIOS.Setup.1-1:ProcCStates</ns4:InstanceID>
+ <ns4:IsReadOnly>true</ns4:IsReadOnly>
+ <ns4:PendingValue xsi:nil="true" />
+ <ns4:PossibleValues>Enabled</ns4:PossibleValues>
+ <ns4:PossibleValues>Disabled</ns4:PossibleValues>
+ </ns4:DCIM_BIOSEnumeration>
+ </ns3:Items>
+ </ns2:EnumerateResponse>
+ </ns0:Body>
+ </ns0:Envelope>""",
+ 'Dict': {
+ 'MemTest': {
+ 'name': 'MemTest',
+ 'current_value': 'Disabled',
+ 'pending_value': None,
+ 'read_only': False,
+ 'possible_values': ['Disabled', 'Enabled']},
+ 'ProcCStates': {
+ 'name': 'ProcCStates',
+ 'current_value': 'Disabled',
+ 'pending_value': None,
+ 'read_only': True,
+ 'possible_values': ['Disabled', 'Enabled']}}},
+ resource_uris.DCIM_BIOSString: {
+ 'XML': """<ns0:Envelope
+xmlns:ns0="http://www.w3.org/2003/05/soap-envelope"
+xmlns:ns1="http://schemas.xmlsoap.org/ws/2004/08/addressing"
+xmlns:ns2="http://schemas.xmlsoap.org/ws/2004/09/enumeration"
+xmlns:ns3="http://schemas.dmtf.org/wbem/wsman/1/wsman.xsd"
+xmlns:ns4="http://schemas.dell.com/wbem/wscim/1/cim-schema/2/DCIM_BIOSString"
+xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+ <ns0:Header>
+ <ns1:To>http://schemas.xmlsoap.org/ws/2004/08/addressing/role/anonymous
+</ns1:To>
+ <ns1:Action>
+http://schemas.xmlsoap.org/ws/2004/09/enumeration/EnumerateResponse
+</ns1:Action>
+ <ns1:RelatesTo>uuid:1f877bcb-0e6f-1e6f-8004-4f266e3acab8</ns1:RelatesTo>
+ <ns1:MessageID>uuid:21bea321-0e6f-1e6f-a82b-f0e4fb722ab8</ns1:MessageID>
+ </ns0:Header>
+ <ns0:Body>
+ <ns2:EnumerateResponse>
+ <ns3:Items>
+ <ns4:DCIM_BIOSString>
+ <ns4:AttributeName>SystemModelName</ns4:AttributeName>
+ <ns4:CurrentValue>PowerEdge R630</ns4:CurrentValue>
+ <ns4:Dependency xsi:nil="true" />
+ <ns4:DisplayOrder>201</ns4:DisplayOrder>
+ <ns4:FQDD>BIOS.Setup.1-1</ns4:FQDD>
+ <ns4:GroupDisplayName>System Information</ns4:GroupDisplayName>
+ <ns4:GroupID>SysInformation</ns4:GroupID>
+ <ns4:InstanceID>BIOS.Setup.1-1:SystemModelName</ns4:InstanceID>
+ <ns4:IsReadOnly>true</ns4:IsReadOnly>
+ <ns4:MaxLength>40</ns4:MaxLength>
+ <ns4:MinLength>0</ns4:MinLength>
+ <ns4:PendingValue xsi:nil="true" />
+ <ns4:ValueExpression xsi:nil="true" />
+ </ns4:DCIM_BIOSString>
+ <ns4:DCIM_BIOSString>
+ <ns4:AttributeName>SystemModelName2</ns4:AttributeName>
+ <ns4:CurrentValue>PowerEdge R630</ns4:CurrentValue>
+ <ns4:Dependency xsi:nil="true" />
+ <ns4:DisplayOrder>201</ns4:DisplayOrder>
+ <ns4:FQDD>BIOS.Setup.1-1</ns4:FQDD>
+ <ns4:GroupDisplayName>System Information</ns4:GroupDisplayName>
+ <ns4:GroupID>SysInformation</ns4:GroupID>
+ <ns4:InstanceID>BIOS.Setup.1-1:SystemModelName2</ns4:InstanceID>
+ <ns4:IsReadOnly>true</ns4:IsReadOnly>
+ <ns4:MaxLength>40</ns4:MaxLength>
+ <ns4:MinLength>0</ns4:MinLength>
+ <ns4:PendingValue xsi:nil="true" />
+ </ns4:DCIM_BIOSString>
+ <ns4:DCIM_BIOSString>
+ <ns4:AttributeDisplayName>Asset Tag</ns4:AttributeDisplayName>
+ <ns4:AttributeName>AssetTag</ns4:AttributeName>
+ <ns4:CurrentValue xsi:nil="true" />
+ <ns4:Dependency xsi:nil="true" />
+ <ns4:DisplayOrder>1903</ns4:DisplayOrder>
+ <ns4:FQDD>BIOS.Setup.1-1</ns4:FQDD>
+ <ns4:GroupDisplayName>Miscellaneous Settings</ns4:GroupDisplayName>
+ <ns4:GroupID>MiscSettings</ns4:GroupID>
+ <ns4:InstanceID>BIOS.Setup.1-1:AssetTag</ns4:InstanceID>
+ <ns4:IsReadOnly>false</ns4:IsReadOnly>
+ <ns4:MaxLength>63</ns4:MaxLength>
+ <ns4:MinLength>0</ns4:MinLength>
+ <ns4:PendingValue xsi:nil="true" />
+ <ns4:ValueExpression>^[ -~]{0,63}$</ns4:ValueExpression>
+ </ns4:DCIM_BIOSString>
+ </ns3:Items>
+ <ns2:EnumerationContext />
+ <ns3:EndOfSequence />
+ </ns2:EnumerateResponse>
+ </ns0:Body>
+ </ns0:Envelope>""",
+ 'Dict': {
+ 'SystemModelName': {
+ 'name': 'SystemModelName',
+ 'current_value': 'PowerEdge R630',
+ 'pending_value': None,
+ 'read_only': True,
+ 'min_length': 0,
+ 'max_length': 40,
+ 'pcre_regex': None},
+ 'SystemModelName2': {
+ 'name': 'SystemModelName2',
+ 'current_value': 'PowerEdge R630',
+ 'pending_value': None,
+ 'read_only': True,
+ 'min_length': 0,
+ 'max_length': 40,
+ 'pcre_regex': None},
+ 'AssetTag': {
+ 'name': 'AssetTag',
+ 'current_value': None,
+ 'pending_value': None,
+ 'read_only': False,
+ 'min_length': 0,
+ 'max_length': 63,
+ 'pcre_regex': '^[ -~]{0,63}$'}}},
+ resource_uris.DCIM_BIOSInteger: {
+ 'XML': """<ns0:Envelope
+xmlns:ns0="http://www.w3.org/2003/05/soap-envelope"
+xmlns:ns1="http://schemas.xmlsoap.org/ws/2004/08/addressing"
+xmlns:ns2="http://schemas.xmlsoap.org/ws/2004/09/enumeration"
+xmlns:ns3="http://schemas.dmtf.org/wbem/wsman/1/wsman.xsd"
+xmlns:ns4="http://schemas.dell.com/wbem/wscim/1/cim-schema/2/DCIM_BIOSInteger"
+xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+ <ns0:Header>
+ <ns1:To>
+http://schemas.xmlsoap.org/ws/2004/08/addressing/role/anonymous</ns1:To>
+ <ns1:Action>
+http://schemas.xmlsoap.org/ws/2004/09/enumeration/EnumerateResponse</ns1:Action>
+ <ns1:RelatesTo>uuid:1fa60792-0e6f-1e6f-8005-4f266e3acab8</ns1:RelatesTo>
+ <ns1:MessageID>uuid:21ccf01d-0e6f-1e6f-a82d-f0e4fb722ab8</ns1:MessageID>
+ </ns0:Header>
+ <ns0:Body>
+ <ns2:EnumerateResponse>
+ <ns3:Items>
+ <ns4:DCIM_BIOSInteger>
+ <ns4:AttributeName>Proc1NumCores</ns4:AttributeName>
+ <ns4:CurrentValue>8</ns4:CurrentValue>
+ <ns4:Dependency xsi:nil="true" />
+ <ns4:DisplayOrder>439</ns4:DisplayOrder>
+ <ns4:FQDD>BIOS.Setup.1-1</ns4:FQDD>
+ <ns4:GroupDisplayName>Processor Settings</ns4:GroupDisplayName>
+ <ns4:GroupID>ProcSettings</ns4:GroupID>
+ <ns4:InstanceID>BIOS.Setup.1-1:Proc1NumCores</ns4:InstanceID>
+ <ns4:IsReadOnly>true</ns4:IsReadOnly>
+ <ns4:LowerBound>0</ns4:LowerBound>
+ <ns4:PendingValue xsi:nil="true" />
+ <ns4:UpperBound>65535</ns4:UpperBound>
+ </ns4:DCIM_BIOSInteger>
+ <ns4:DCIM_BIOSInteger>
+ <ns4:AttributeName>AcPwrRcvryUserDelay</ns4:AttributeName>
+ <ns4:CurrentValue>60</ns4:CurrentValue>
+ <ns4:DisplayOrder>1825</ns4:DisplayOrder>
+ <ns4:FQDD>BIOS.Setup.1-1</ns4:FQDD>
+ <ns4:GroupDisplayName>System Security</ns4:GroupDisplayName>
+ <ns4:GroupID>SysSecurity</ns4:GroupID>
+ <ns4:InstanceID>BIOS.Setup.1-1:AcPwrRcvryUserDelay</ns4:InstanceID>
+ <ns4:IsReadOnly>false</ns4:IsReadOnly>
+ <ns4:LowerBound>60</ns4:LowerBound>
+ <ns4:PendingValue xsi:nil="true" />
+ <ns4:UpperBound>240</ns4:UpperBound>
+ </ns4:DCIM_BIOSInteger>
+ </ns3:Items>
+ <ns2:EnumerationContext />
+ <ns3:EndOfSequence />
+ </ns2:EnumerateResponse>
+ </ns0:Body>
+ </ns0:Envelope>""",
+ 'Dict': {
+ 'Proc1NumCores': {
+ 'name': 'Proc1NumCores',
+ 'current_value': 8,
+ 'pending_value': None,
+ 'read_only': True,
+ 'lower_bound': 0,
+ 'upper_bound': 65535},
+ 'AcPwrRcvryUserDelay': {
+ 'name': 'AcPwrRcvryUserDelay',
+ 'current_value': 60,
+ 'pending_value': None,
+ 'read_only': False,
+ 'lower_bound': 60,
+ 'upper_bound': 240}}}}
+
+Invoke_Commit = """<ns0:Envelope
+xmlns:ns0="http://www.w3.org/2003/05/soap-envelope"
+xmlns:ns1="http://schemas.xmlsoap.org/ws/2004/08/addressing"
+xmlns:ns2="http://schemas.dell.com/wbem/wscim/1/cim-schema/2/DCIM_BIOSService">
+ <ns0:Header>
+ <ns1:To>
+http://schemas.xmlsoap.org/ws/2004/08/addressing/role/anonymous</ns1:To>
+ <ns1:Action>
+http://schemas.dell.com/wbem/wscim/1/cim-schema/2/DCIM_BIOSService/SetAttributesResponse</ns1:Action>
+ <ns1:RelatesTo>uuid:42baa476-0ee9-1ee9-8020-4f266e3acab8</ns1:RelatesTo>
+ <ns1:MessageID>uuid:fadae2f8-0eea-1eea-9626-76a8f1d9bed4</ns1:MessageID>
+ </ns0:Header>
+ <ns0:Body>
+ <ns2:SetAttributes_OUTPUT>
+ <ns2:Message>The command was successful.</ns2:Message>
+ <ns2:MessageID>BIOS001</ns2:MessageID>
+ <ns2:RebootRequired>Yes</ns2:RebootRequired>
+ <ns2:ReturnValue>0</ns2:ReturnValue>
+ <ns2:SetResult>Set PendingValue</ns2:SetResult>
+ </ns2:SetAttributes_OUTPUT>
+ </ns0:Body>
+</ns0:Envelope>"""
diff --git a/ironic/tests/unit/drivers/drac/test_bios.py b/ironic/tests/unit/drivers/drac/test_bios.py
new file mode 100644
index 000000000..e3f2c4e39
--- /dev/null
+++ b/ironic/tests/unit/drivers/drac/test_bios.py
@@ -0,0 +1,199 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2015 Dell, Inc.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""
+Test class for DRAC BIOS interface
+"""
+
+import mock
+
+from ironic.common import exception
+from ironic.conductor import task_manager
+from ironic.drivers.modules.drac import bios
+from ironic.drivers.modules.drac import client as drac_client
+from ironic.drivers.modules.drac import management as drac_mgmt
+from ironic.drivers.modules.drac import resource_uris
+from ironic.tests.unit.conductor import utils as mgr_utils
+from ironic.tests.unit.db import base as db_base
+from ironic.tests.unit.db import utils as db_utils
+from ironic.tests.unit.drivers.drac import bios_wsman_mock
+from ironic.tests.unit.drivers.drac import utils as test_utils
+from ironic.tests.unit.objects import utils as obj_utils
+from six.moves.urllib.parse import unquote
+
+FAKE_DRAC = db_utils.get_test_drac_info()
+
+
+def _base_config(responses=[]):
+ for resource in [resource_uris.DCIM_BIOSEnumeration,
+ resource_uris.DCIM_BIOSString,
+ resource_uris.DCIM_BIOSInteger]:
+ xml_root = test_utils.mock_wsman_root(
+ bios_wsman_mock.Enumerations[resource]['XML'])
+ responses.append(xml_root)
+ return responses
+
+
+def _set_config(responses=[]):
+ ccj_xml = test_utils.build_soap_xml([{'DCIM_LifecycleJob':
+ {'Name': 'fake'}}],
+ resource_uris.DCIM_LifecycleJob)
+ responses.append(test_utils.mock_wsman_root(ccj_xml))
+ return _base_config(responses)
+
+
+def _mock_pywsman_responses(client, responses):
+ mpw = client.Client.return_value
+ mpw.enumerate.side_effect = responses
+ return mpw
+
+
+@mock.patch.object(drac_client, 'pywsman')
+class DracBiosTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(DracBiosTestCase, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver='fake_drac')
+ self.node = obj_utils.create_test_node(self.context,
+ driver='fake_drac',
+ driver_info=FAKE_DRAC)
+
+ def test_get_config(self, client):
+ _mock_pywsman_responses(client, _base_config())
+ expected = {}
+ for resource in [resource_uris.DCIM_BIOSEnumeration,
+ resource_uris.DCIM_BIOSString,
+ resource_uris.DCIM_BIOSInteger]:
+ expected.update(bios_wsman_mock.Enumerations[resource]['Dict'])
+ result = bios.get_config(self.node)
+ self.assertEqual(expected, result)
+
+ def test_set_config_empty(self, client):
+ _mock_pywsman_responses(client, _set_config())
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ res = bios.set_config(task)
+ self.assertFalse(res)
+
+ def test_set_config_nochange(self, client):
+ _mock_pywsman_responses(client, _set_config())
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node = self.node
+ res = bios.set_config(task,
+ MemTest='Disabled',
+ ProcCStates='Disabled',
+ SystemModelName='PowerEdge R630',
+ AssetTag=None,
+ Proc1NumCores=8,
+ AcPwrRcvryUserDelay=60)
+ self.assertFalse(res)
+
+ def test_set_config_ro(self, client):
+ _mock_pywsman_responses(client, _set_config())
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node = self.node
+ self.assertRaises(exception.DracOperationFailed,
+ bios.set_config, task,
+ ProcCStates="Enabled")
+
+ def test_set_config_enum_invalid(self, client):
+ _mock_pywsman_responses(client, _set_config())
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node = self.node
+ self.assertRaises(exception.DracOperationFailed,
+ bios.set_config, task,
+ MemTest="Never")
+
+ def test_set_config_string_toolong(self, client):
+ _mock_pywsman_responses(client, _set_config())
+ tag = ('Never have I seen such a silly long asset tag! '
+ 'It is really rather ridiculous, don\'t you think?')
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node = self.node
+ self.assertRaises(exception.DracOperationFailed,
+ bios.set_config, task,
+ AssetTag=tag)
+
+ def test_set_config_string_nomatch(self, client):
+ _mock_pywsman_responses(client, _set_config())
+ tag = unquote('%80')
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node = self.node
+ self.assertRaises(exception.DracOperationFailed,
+ bios.set_config, task,
+ AssetTag=tag)
+
+ def test_set_config_integer_toosmall(self, client):
+ _mock_pywsman_responses(client, _set_config())
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node = self.node
+ self.assertRaises(exception.DracOperationFailed,
+ bios.set_config, task,
+ AcPwrRcvryUserDelay=0)
+
+ def test_set_config_integer_toobig(self, client):
+ _mock_pywsman_responses(client, _set_config())
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node = self.node
+ self.assertRaises(exception.DracOperationFailed,
+ bios.set_config, task,
+ AcPwrRcvryUserDelay=600)
+
+ def test_set_config_needreboot(self, client):
+ mock_pywsman = _mock_pywsman_responses(client, _set_config())
+ invoke_xml = test_utils.mock_wsman_root(
+ bios_wsman_mock.Invoke_Commit)
+ # TODO(victor-lowther) This needs more work.
+ # Specifically, we will need to verify that
+ # invoke was handed the XML blob we expected.
+ mock_pywsman.invoke.return_value = invoke_xml
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node = self.node
+ res = bios.set_config(task,
+ AssetTag="An Asset Tag",
+ MemTest="Enabled")
+ self.assertTrue(res)
+
+ @mock.patch.object(drac_mgmt, 'check_for_config_job',
+ spec_set=True, autospec=True)
+ @mock.patch.object(drac_mgmt, 'create_config_job', spec_set=True,
+ autospec=True)
+ def test_commit_config(self, mock_ccj, mock_cfcj, client):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node = self.node
+ bios.commit_config(task)
+ self.assertTrue(mock_cfcj.called)
+ self.assertTrue(mock_ccj.called)
+
+ @mock.patch.object(drac_client.Client, 'wsman_invoke', spec_set=True,
+ autospec=True)
+ def test_abandon_config(self, mock_wi, client):
+ _mock_pywsman_responses(client, _set_config())
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node = self.node
+ bios.abandon_config(task)
+ self.assertTrue(mock_wi.called)
diff --git a/ironic/tests/unit/drivers/drac/test_client.py b/ironic/tests/unit/drivers/drac/test_client.py
new file mode 100644
index 000000000..93f8fbb24
--- /dev/null
+++ b/ironic/tests/unit/drivers/drac/test_client.py
@@ -0,0 +1,256 @@
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""
+Test class for DRAC client wrapper.
+"""
+
+import time
+from xml.etree import ElementTree
+
+import mock
+
+from ironic.common import exception
+from ironic.drivers.modules.drac import client as drac_client
+from ironic.tests.unit import base
+from ironic.tests.unit.db import utils as db_utils
+from ironic.tests.unit.drivers.drac import utils as test_utils
+from ironic.tests.unit.drivers import third_party_driver_mock_specs \
+ as mock_specs
+
+INFO_DICT = db_utils.get_test_drac_info()
+
+
+@mock.patch.object(drac_client, 'pywsman', spec_set=mock_specs.PYWSMAN_SPEC)
+class DracClientTestCase(base.TestCase):
+
+ def setUp(self):
+ super(DracClientTestCase, self).setUp()
+ self.resource_uri = 'http://foo/wsman'
+
+ def test_wsman_enumerate(self, mock_client_pywsman):
+ mock_xml = test_utils.mock_wsman_root('<test></test>')
+ mock_pywsman_client = mock_client_pywsman.Client.return_value
+ mock_pywsman_client.enumerate.return_value = mock_xml
+
+ client = drac_client.Client(**INFO_DICT)
+ client.wsman_enumerate(self.resource_uri)
+
+ mock_options = mock_client_pywsman.ClientOptions.return_value
+ mock_options.set_flags.assert_called_once_with(
+ mock_client_pywsman.FLAG_ENUMERATION_OPTIMIZATION)
+ mock_options.set_max_elements.assert_called_once_with(100)
+ mock_pywsman_client.enumerate.assert_called_once_with(
+ mock_options, None, self.resource_uri)
+ mock_xml.context.assert_called_once_with()
+
+ @mock.patch.object(time, 'sleep', lambda seconds: None)
+ def test_wsman_enumerate_retry(self, mock_client_pywsman):
+ mock_xml = test_utils.mock_wsman_root('<test></test>')
+ mock_pywsman_client = mock_client_pywsman.Client.return_value
+ mock_pywsman_client.enumerate.side_effect = [None, mock_xml]
+
+ client = drac_client.Client(**INFO_DICT)
+ client.wsman_enumerate(self.resource_uri)
+
+ mock_options = mock_client_pywsman.ClientOptions.return_value
+ mock_options.set_flags.assert_called_once_with(
+ mock_client_pywsman.FLAG_ENUMERATION_OPTIMIZATION)
+ mock_options.set_max_elements.assert_called_once_with(100)
+ mock_pywsman_client.enumerate.assert_has_calls([
+ mock.call(mock_options, None, self.resource_uri),
+ mock.call(mock_options, None, self.resource_uri)
+ ])
+ mock_xml.context.assert_called_once_with()
+
+ def test_wsman_enumerate_with_additional_pull(self, mock_client_pywsman):
+ mock_root = mock.Mock(spec=['string'])
+ mock_root.string.side_effect = [
+ test_utils.build_soap_xml([{'item1': 'test1'}]),
+ test_utils.build_soap_xml([{'item2': 'test2'}])
+ ]
+ mock_xml = mock.Mock(spec=['context', 'root'])
+ mock_xml.root.return_value = mock_root
+ mock_xml.context.side_effect = [42, 42, None]
+
+ mock_pywsman_client = mock_client_pywsman.Client.return_value
+ mock_pywsman_client.enumerate.return_value = mock_xml
+ mock_pywsman_client.pull.return_value = mock_xml
+
+ client = drac_client.Client(**INFO_DICT)
+ result = client.wsman_enumerate(self.resource_uri)
+
+ # assert the XML was merged
+ result_string = ElementTree.tostring(result)
+ self.assertIn(b'<item1>test1</item1>', result_string)
+ self.assertIn(b'<item2>test2</item2>', result_string)
+
+ mock_options = mock_client_pywsman.ClientOptions.return_value
+ mock_options.set_flags.assert_called_once_with(
+ mock_client_pywsman.FLAG_ENUMERATION_OPTIMIZATION)
+ mock_options.set_max_elements.assert_called_once_with(100)
+ mock_pywsman_client.enumerate.assert_called_once_with(
+ mock_options, None, self.resource_uri)
+
+ def test_wsman_enumerate_filter_query(self, mock_client_pywsman):
+ mock_xml = test_utils.mock_wsman_root('<test></test>')
+ mock_pywsman_client = mock_client_pywsman.Client.return_value
+ mock_pywsman_client.enumerate.return_value = mock_xml
+
+ client = drac_client.Client(**INFO_DICT)
+ filter_query = 'SELECT * FROM foo'
+ client.wsman_enumerate(self.resource_uri, filter_query=filter_query)
+
+ mock_options = mock_client_pywsman.ClientOptions.return_value
+ mock_filter = mock_client_pywsman.Filter.return_value
+ mock_filter.simple.assert_called_once_with(mock.ANY, filter_query)
+ mock_pywsman_client.enumerate.assert_called_once_with(
+ mock_options, mock_filter, self.resource_uri)
+ mock_xml.context.assert_called_once_with()
+
+ def test_wsman_enumerate_invalid_filter_dialect(self, mock_client_pywsman):
+ client = drac_client.Client(**INFO_DICT)
+ self.assertRaises(exception.DracInvalidFilterDialect,
+ client.wsman_enumerate, self.resource_uri,
+ filter_query='foo',
+ filter_dialect='invalid')
+
+ def test_wsman_invoke(self, mock_client_pywsman):
+ result_xml = test_utils.build_soap_xml(
+ [{'ReturnValue': drac_client.RET_SUCCESS}], self.resource_uri)
+ mock_xml = test_utils.mock_wsman_root(result_xml)
+ mock_pywsman_client = mock_client_pywsman.Client.return_value
+ mock_pywsman_client.invoke.return_value = mock_xml
+
+ method_name = 'method'
+ client = drac_client.Client(**INFO_DICT)
+ client.wsman_invoke(self.resource_uri, method_name)
+
+ mock_options = mock_client_pywsman.ClientOptions.return_value
+ mock_pywsman_client.invoke.assert_called_once_with(
+ mock_options, self.resource_uri, method_name, None)
+
+ @mock.patch.object(time, 'sleep', lambda seconds: None)
+ def test_wsman_invoke_retry(self, mock_client_pywsman):
+ result_xml = test_utils.build_soap_xml(
+ [{'ReturnValue': drac_client.RET_SUCCESS}], self.resource_uri)
+ mock_xml = test_utils.mock_wsman_root(result_xml)
+ mock_pywsman_client = mock_client_pywsman.Client.return_value
+ mock_pywsman_client.invoke.side_effect = [None, mock_xml]
+
+ method_name = 'method'
+ client = drac_client.Client(**INFO_DICT)
+ client.wsman_invoke(self.resource_uri, method_name)
+
+ mock_options = mock_client_pywsman.ClientOptions.return_value
+ mock_pywsman_client.invoke.assert_has_calls([
+ mock.call(mock_options, self.resource_uri, method_name, None),
+ mock.call(mock_options, self.resource_uri, method_name, None)
+ ])
+
+ def test_wsman_invoke_with_selectors(self, mock_client_pywsman):
+ result_xml = test_utils.build_soap_xml(
+ [{'ReturnValue': drac_client.RET_SUCCESS}], self.resource_uri)
+ mock_xml = test_utils.mock_wsman_root(result_xml)
+ mock_pywsman_client = mock_client_pywsman.Client.return_value
+ mock_pywsman_client.invoke.return_value = mock_xml
+
+ method_name = 'method'
+ selectors = {'foo': 'bar'}
+ client = drac_client.Client(**INFO_DICT)
+ client.wsman_invoke(self.resource_uri, method_name,
+ selectors=selectors)
+
+ mock_options = mock_client_pywsman.ClientOptions.return_value
+ mock_pywsman_client.invoke.assert_called_once_with(
+ mock_options, self.resource_uri, method_name, None)
+ mock_options.add_selector.assert_called_once_with('foo', 'bar')
+
+ def test_wsman_invoke_with_properties(self, mock_client_pywsman):
+ result_xml = test_utils.build_soap_xml(
+ [{'ReturnValue': drac_client.RET_SUCCESS}], self.resource_uri)
+ mock_xml = test_utils.mock_wsman_root(result_xml)
+ mock_pywsman_client = mock_client_pywsman.Client.return_value
+ mock_pywsman_client.invoke.return_value = mock_xml
+
+ method_name = 'method'
+ properties = {'foo': 'bar'}
+ client = drac_client.Client(**INFO_DICT)
+ client.wsman_invoke(self.resource_uri, method_name,
+ properties=properties)
+
+ mock_options = mock_client_pywsman.ClientOptions.return_value
+ mock_pywsman_client.invoke.assert_called_once_with(
+ mock_options, self.resource_uri, method_name, None)
+ mock_options.add_property.assert_called_once_with('foo', 'bar')
+
+ def test_wsman_invoke_with_properties_including_a_list(
+ self, mock_client_pywsman):
+ result_xml = test_utils.build_soap_xml(
+ [{'ReturnValue': drac_client.RET_SUCCESS}], self.resource_uri)
+ mock_xml = test_utils.mock_wsman_root(result_xml)
+ mock_pywsman_client = mock_client_pywsman.Client.return_value
+ mock_pywsman_client.invoke.return_value = mock_xml
+ mock_request_xml = mock_client_pywsman.XmlDoc.return_value
+
+ method_name = 'method'
+ properties = {'foo': ['bar', 'baz']}
+ client = drac_client.Client(**INFO_DICT)
+ client.wsman_invoke(self.resource_uri, method_name,
+ properties=properties)
+
+ mock_options = mock_client_pywsman.ClientOptions.return_value
+ mock_pywsman_client.invoke.assert_called_once_with(
+ mock_options, self.resource_uri, method_name, mock_request_xml)
+ mock_request_xml.root().add.assert_has_calls([
+ mock.call(self.resource_uri, 'foo', 'bar'),
+ mock.call(self.resource_uri, 'foo', 'baz')
+ ])
+ self.assertEqual(2, mock_request_xml.root().add.call_count)
+
+ def test_wsman_invoke_receives_error_return_value(
+ self, mock_client_pywsman):
+ result_xml = test_utils.build_soap_xml(
+ [{'ReturnValue': drac_client.RET_ERROR,
+ 'Message': 'error message'}],
+ self.resource_uri)
+ mock_xml = test_utils.mock_wsman_root(result_xml)
+ mock_pywsman_client = mock_client_pywsman.Client.return_value
+ mock_pywsman_client.invoke.return_value = mock_xml
+
+ method_name = 'method'
+ client = drac_client.Client(**INFO_DICT)
+ self.assertRaises(exception.DracOperationFailed,
+ client.wsman_invoke, self.resource_uri, method_name)
+
+ mock_options = mock_client_pywsman.ClientOptions.return_value
+ mock_pywsman_client.invoke.assert_called_once_with(
+ mock_options, self.resource_uri, method_name, None)
+
+ def test_wsman_invoke_receives_unexpected_return_value(
+ self, mock_client_pywsman):
+ result_xml = test_utils.build_soap_xml(
+ [{'ReturnValue': '42'}], self.resource_uri)
+ mock_xml = test_utils.mock_wsman_root(result_xml)
+ mock_pywsman_client = mock_client_pywsman.Client.return_value
+ mock_pywsman_client.invoke.return_value = mock_xml
+
+ method_name = 'method'
+ client = drac_client.Client(**INFO_DICT)
+ self.assertRaises(exception.DracUnexpectedReturnValue,
+ client.wsman_invoke, self.resource_uri, method_name,
+ {}, {}, drac_client.RET_SUCCESS)
+
+ mock_options = mock_client_pywsman.ClientOptions.return_value
+ mock_pywsman_client.invoke.assert_called_once_with(
+ mock_options, self.resource_uri, method_name, None)
diff --git a/ironic/tests/unit/drivers/drac/test_common.py b/ironic/tests/unit/drivers/drac/test_common.py
new file mode 100644
index 000000000..2f2b7b7d4
--- /dev/null
+++ b/ironic/tests/unit/drivers/drac/test_common.py
@@ -0,0 +1,135 @@
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""
+Test class for common methods used by DRAC modules.
+"""
+
+from xml.etree import ElementTree
+
+from testtools.matchers import HasLength
+
+from ironic.common import exception
+from ironic.drivers.modules.drac import common as drac_common
+from ironic.tests.unit.db import base as db_base
+from ironic.tests.unit.db import utils as db_utils
+from ironic.tests.unit.objects import utils as obj_utils
+
+INFO_DICT = db_utils.get_test_drac_info()
+
+
+class DracCommonMethodsTestCase(db_base.DbTestCase):
+
+ def test_parse_driver_info(self):
+ node = obj_utils.create_test_node(self.context,
+ driver='fake_drac',
+ driver_info=INFO_DICT)
+ info = drac_common.parse_driver_info(node)
+
+ self.assertIsNotNone(info.get('drac_host'))
+ self.assertIsNotNone(info.get('drac_port'))
+ self.assertIsNotNone(info.get('drac_path'))
+ self.assertIsNotNone(info.get('drac_protocol'))
+ self.assertIsNotNone(info.get('drac_username'))
+ self.assertIsNotNone(info.get('drac_password'))
+
+ def test_parse_driver_info_missing_host(self):
+ node = obj_utils.create_test_node(self.context,
+ driver='fake_drac',
+ driver_info=INFO_DICT)
+ del node.driver_info['drac_host']
+ self.assertRaises(exception.InvalidParameterValue,
+ drac_common.parse_driver_info, node)
+
+ def test_parse_driver_info_missing_port(self):
+ node = obj_utils.create_test_node(self.context,
+ driver='fake_drac',
+ driver_info=INFO_DICT)
+ del node.driver_info['drac_port']
+
+ info = drac_common.parse_driver_info(node)
+ self.assertEqual(443, info.get('drac_port'))
+
+ def test_parse_driver_info_invalid_port(self):
+ node = obj_utils.create_test_node(self.context,
+ driver='fake_drac',
+ driver_info=INFO_DICT)
+ node.driver_info['drac_port'] = 'foo'
+ self.assertRaises(exception.InvalidParameterValue,
+ drac_common.parse_driver_info, node)
+
+ def test_parse_driver_info_missing_path(self):
+ node = obj_utils.create_test_node(self.context,
+ driver='fake_drac',
+ driver_info=INFO_DICT)
+ del node.driver_info['drac_path']
+
+ info = drac_common.parse_driver_info(node)
+ self.assertEqual('/wsman', info.get('drac_path'))
+
+ def test_parse_driver_info_missing_protocol(self):
+ node = obj_utils.create_test_node(self.context,
+ driver='fake_drac',
+ driver_info=INFO_DICT)
+ del node.driver_info['drac_protocol']
+
+ info = drac_common.parse_driver_info(node)
+ self.assertEqual('https', info.get('drac_protocol'))
+
+ def test_parse_driver_info_missing_username(self):
+ node = obj_utils.create_test_node(self.context,
+ driver='fake_drac',
+ driver_info=INFO_DICT)
+ del node.driver_info['drac_username']
+ self.assertRaises(exception.InvalidParameterValue,
+ drac_common.parse_driver_info, node)
+
+ def test_parse_driver_info_missing_password(self):
+ node = obj_utils.create_test_node(self.context,
+ driver='fake_drac',
+ driver_info=INFO_DICT)
+ del node.driver_info['drac_password']
+ self.assertRaises(exception.InvalidParameterValue,
+ drac_common.parse_driver_info, node)
+
+ def test_find_xml(self):
+ namespace = 'http://fake'
+ value = 'fake_value'
+ test_doc = ElementTree.fromstring("""<Envelope xmlns:ns1="%(ns)s">
+ <Body>
+ <ns1:test_element>%(value)s</ns1:test_element>
+ </Body>
+ </Envelope>""" % {'ns': namespace, 'value': value})
+
+ result = drac_common.find_xml(test_doc, 'test_element', namespace)
+ self.assertEqual(value, result.text)
+
+ def test_find_xml_find_all(self):
+ namespace = 'http://fake'
+ value1 = 'fake_value1'
+ value2 = 'fake_value2'
+ test_doc = ElementTree.fromstring("""<Envelope xmlns:ns1="%(ns)s">
+ <Body>
+ <ns1:test_element>%(value1)s</ns1:test_element>
+ <ns1:cat>meow</ns1:cat>
+ <ns1:test_element>%(value2)s</ns1:test_element>
+ <ns1:dog>bark</ns1:dog>
+ </Body>
+ </Envelope>""" % {'ns': namespace, 'value1': value1,
+ 'value2': value2})
+
+ result = drac_common.find_xml(test_doc, 'test_element',
+ namespace, find_all=True)
+ self.assertThat(result, HasLength(2))
+ result_text = [v.text for v in result]
+ self.assertEqual(sorted([value1, value2]), sorted(result_text))
diff --git a/ironic/tests/unit/drivers/drac/test_management.py b/ironic/tests/unit/drivers/drac/test_management.py
new file mode 100644
index 000000000..249f31bdd
--- /dev/null
+++ b/ironic/tests/unit/drivers/drac/test_management.py
@@ -0,0 +1,461 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2014 Red Hat, Inc.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""
+Test class for DRAC ManagementInterface
+"""
+
+import mock
+
+from ironic.common import boot_devices
+from ironic.common import exception
+from ironic.conductor import task_manager
+from ironic.drivers.modules.drac import client as drac_client
+from ironic.drivers.modules.drac import common as drac_common
+from ironic.drivers.modules.drac import management as drac_mgmt
+from ironic.drivers.modules.drac import resource_uris
+from ironic.tests.unit.conductor import utils as mgr_utils
+from ironic.tests.unit.db import base as db_base
+from ironic.tests.unit.db import utils as db_utils
+from ironic.tests.unit.drivers.drac import utils as test_utils
+from ironic.tests.unit.drivers import third_party_driver_mock_specs \
+ as mock_specs
+from ironic.tests.unit.objects import utils as obj_utils
+
+INFO_DICT = db_utils.get_test_drac_info()
+
+
+@mock.patch.object(drac_client, 'pywsman', spec_set=mock_specs.PYWSMAN_SPEC)
+class DracManagementInternalMethodsTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(DracManagementInternalMethodsTestCase, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver='fake_drac')
+ self.node = obj_utils.create_test_node(self.context,
+ driver='fake_drac',
+ driver_info=INFO_DICT)
+
+ def test__get_next_boot_list(self, mock_client_pywsman):
+ result_xml = test_utils.build_soap_xml(
+ [{'DCIM_BootConfigSetting': {'InstanceID': 'IPL',
+ 'IsNext': drac_mgmt.PERSISTENT}}],
+ resource_uris.DCIM_BootConfigSetting)
+
+ mock_xml = test_utils.mock_wsman_root(result_xml)
+ mock_pywsman = mock_client_pywsman.Client.return_value
+ mock_pywsman.enumerate.return_value = mock_xml
+
+ expected = {'instance_id': 'IPL', 'is_next': drac_mgmt.PERSISTENT}
+ result = drac_mgmt._get_next_boot_list(self.node)
+
+ self.assertEqual(expected, result)
+ mock_pywsman.enumerate.assert_called_once_with(
+ mock.ANY, mock.ANY, resource_uris.DCIM_BootConfigSetting)
+
+ def test__get_next_boot_list_onetime(self, mock_client_pywsman):
+ result_xml = test_utils.build_soap_xml(
+ [{'DCIM_BootConfigSetting': {'InstanceID': 'IPL',
+ 'IsNext': drac_mgmt.PERSISTENT}},
+ {'DCIM_BootConfigSetting': {'InstanceID': 'OneTime',
+ 'IsNext': drac_mgmt.ONE_TIME_BOOT}}],
+ resource_uris.DCIM_BootConfigSetting)
+
+ mock_xml = test_utils.mock_wsman_root(result_xml)
+ mock_pywsman = mock_client_pywsman.Client.return_value
+ mock_pywsman.enumerate.return_value = mock_xml
+
+ expected = {'instance_id': 'OneTime',
+ 'is_next': drac_mgmt.ONE_TIME_BOOT}
+ result = drac_mgmt._get_next_boot_list(self.node)
+
+ self.assertEqual(expected, result)
+ mock_pywsman.enumerate.assert_called_once_with(
+ mock.ANY, mock.ANY, resource_uris.DCIM_BootConfigSetting)
+
+ def test__check_for_config_job(self, mock_client_pywsman):
+ result_xml = test_utils.build_soap_xml(
+ [{'DCIM_LifecycleJob': {'Name': 'fake'}}],
+ resource_uris.DCIM_LifecycleJob)
+
+ mock_xml = test_utils.mock_wsman_root(result_xml)
+ mock_pywsman = mock_client_pywsman.Client.return_value
+ mock_pywsman.enumerate.return_value = mock_xml
+
+ result = drac_mgmt.check_for_config_job(self.node)
+
+ self.assertIsNone(result)
+ mock_pywsman.enumerate.assert_called_once_with(
+ mock.ANY, mock.ANY, resource_uris.DCIM_LifecycleJob)
+
+ def test__check_for_config_job_already_exist(self, mock_client_pywsman):
+ result_xml = test_utils.build_soap_xml(
+ [{'DCIM_LifecycleJob': {'Name': 'BIOS.Setup.1-1',
+ 'JobStatus': 'scheduled',
+ 'InstanceID': 'fake'}}],
+ resource_uris.DCIM_LifecycleJob)
+
+ mock_xml = test_utils.mock_wsman_root(result_xml)
+ mock_pywsman = mock_client_pywsman.Client.return_value
+ mock_pywsman.enumerate.return_value = mock_xml
+
+ self.assertRaises(exception.DracPendingConfigJobExists,
+ drac_mgmt.check_for_config_job, self.node)
+ mock_pywsman.enumerate.assert_called_once_with(
+ mock.ANY, mock.ANY, resource_uris.DCIM_LifecycleJob)
+
+ def test__check_for_config_job_not_exist(self, mock_client_pywsman):
+ job_statuses = ["Completed", "Completed with Errors", "Failed"]
+ for job_status in job_statuses:
+ result_xml = test_utils.build_soap_xml(
+ [{'DCIM_LifecycleJob': {'Name': 'BIOS.Setup.1-1',
+ 'JobStatus': job_status,
+ 'InstanceID': 'fake'}}],
+ resource_uris.DCIM_LifecycleJob)
+
+ mock_xml = test_utils.mock_wsman_root(result_xml)
+ mock_pywsman = mock_client_pywsman.Client.return_value
+ mock_pywsman.enumerate.return_value = mock_xml
+
+ try:
+ drac_mgmt.check_for_config_job(self.node)
+ except (exception.DracClientError,
+ exception.DracPendingConfigJobExists):
+ self.fail("Failed to detect completed job due to "
+ "\"{}\" job status".format(job_status))
+
+ def test_create_config_job(self, mock_client_pywsman):
+ result_xml = test_utils.build_soap_xml(
+ [{'ReturnValue': drac_client.RET_CREATED}],
+ resource_uris.DCIM_BIOSService)
+
+ mock_xml = test_utils.mock_wsman_root(result_xml)
+ mock_pywsman = mock_client_pywsman.Client.return_value
+ mock_pywsman.invoke.return_value = mock_xml
+
+ result = drac_mgmt.create_config_job(self.node)
+
+ self.assertIsNone(result)
+ mock_pywsman.invoke.assert_called_once_with(
+ mock.ANY, resource_uris.DCIM_BIOSService,
+ 'CreateTargetedConfigJob', None)
+
+ def test_create_config_job_error(self, mock_client_pywsman):
+ result_xml = test_utils.build_soap_xml(
+ [{'ReturnValue': drac_client.RET_ERROR,
+ 'Message': 'E_FAKE'}],
+ resource_uris.DCIM_BIOSService)
+
+ mock_xml = test_utils.mock_wsman_root(result_xml)
+ mock_pywsman = mock_client_pywsman.Client.return_value
+ mock_pywsman.invoke.return_value = mock_xml
+
+ self.assertRaises(exception.DracOperationFailed,
+ drac_mgmt.create_config_job, self.node)
+ mock_pywsman.invoke.assert_called_once_with(
+ mock.ANY, resource_uris.DCIM_BIOSService,
+ 'CreateTargetedConfigJob', None)
+
+ def test__get_lifecycle_controller_version(self, mock_client_pywsman):
+ result_xml = test_utils.build_soap_xml(
+ [{'DCIM_SystemView': {'LifecycleControllerVersion': '42'}}],
+ resource_uris.DCIM_SystemView)
+
+ mock_xml = test_utils.mock_wsman_root(result_xml)
+ mock_pywsman = mock_client_pywsman.Client.return_value
+ mock_pywsman.enumerate.return_value = mock_xml
+
+ result = drac_mgmt._get_lifecycle_controller_version(self.node)
+
+ self.assertEqual('42', result)
+ mock_pywsman.enumerate.assert_called_once_with(
+ mock.ANY, mock.ANY, resource_uris.DCIM_SystemView)
+
+
+@mock.patch.object(drac_client, 'pywsman', spec_set=mock_specs.PYWSMAN_SPEC)
+class DracManagementTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(DracManagementTestCase, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver='fake_drac')
+ self.node = obj_utils.create_test_node(self.context,
+ driver='fake_drac',
+ driver_info=INFO_DICT)
+ self.driver = drac_mgmt.DracManagement()
+ self.task = mock.Mock(spec=['node'])
+ self.task.node = self.node
+
+ def test_get_properties(self, mock_client_pywsman):
+ expected = drac_common.COMMON_PROPERTIES
+ self.assertEqual(expected, self.driver.get_properties())
+
+ def test_get_supported_boot_devices(self, mock_client_pywsman):
+ expected = [boot_devices.PXE, boot_devices.DISK, boot_devices.CDROM]
+ self.assertEqual(sorted(expected),
+ sorted(self.driver.
+ get_supported_boot_devices(self.task)))
+
+ @mock.patch.object(drac_mgmt, '_get_next_boot_list', spec_set=True,
+ autospec=True)
+ @mock.patch.object(drac_mgmt, '_get_lifecycle_controller_version',
+ spec_set=True, autospec=True)
+ def test_get_boot_device(self, mock_glcv, mock_gnbl, mock_client_pywsman):
+ controller_version = '2.1.5.0'
+ mock_glcv.return_value = controller_version
+ mock_gnbl.return_value = {'instance_id': 'OneTime',
+ 'is_next': drac_mgmt.ONE_TIME_BOOT}
+
+ result_xml = test_utils.build_soap_xml(
+ [{'InstanceID': 'HardDisk'}], resource_uris.DCIM_BootSourceSetting)
+
+ mock_xml = test_utils.mock_wsman_root(result_xml)
+ mock_pywsman = mock_client_pywsman.Client.return_value
+ mock_pywsman.enumerate.return_value = mock_xml
+
+ result = self.driver.get_boot_device(self.task)
+ expected = {'boot_device': boot_devices.DISK, 'persistent': False}
+
+ self.assertEqual(expected, result)
+ mock_pywsman.enumerate.assert_called_once_with(
+ mock.ANY, mock.ANY, resource_uris.DCIM_BootSourceSetting)
+
+ @mock.patch.object(drac_mgmt, '_get_next_boot_list', spec_set=True,
+ autospec=True)
+ @mock.patch.object(drac_mgmt, '_get_lifecycle_controller_version',
+ spec_set=True, autospec=True)
+ def test_get_boot_device_persistent(self, mock_glcv, mock_gnbl,
+ mock_client_pywsman):
+ controller_version = '2.1.5.0'
+ mock_glcv.return_value = controller_version
+ mock_gnbl.return_value = {'instance_id': 'IPL',
+ 'is_next': drac_mgmt.PERSISTENT}
+
+ result_xml = test_utils.build_soap_xml(
+ [{'InstanceID': 'NIC', 'BootSourceType': 'IPL'}],
+ resource_uris.DCIM_BootSourceSetting)
+
+ mock_xml = test_utils.mock_wsman_root(result_xml)
+ mock_pywsman = mock_client_pywsman.Client.return_value
+ mock_pywsman.enumerate.return_value = mock_xml
+
+ result = self.driver.get_boot_device(self.task)
+ expected = {'boot_device': boot_devices.PXE, 'persistent': True}
+
+ self.assertEqual(expected, result)
+ mock_pywsman.enumerate.assert_called_once_with(
+ mock.ANY, mock.ANY, resource_uris.DCIM_BootSourceSetting)
+
+ @mock.patch.object(drac_client.Client, 'wsman_enumerate', spec_set=True,
+ autospec=True)
+ @mock.patch.object(drac_mgmt, '_get_next_boot_list', spec_set=True,
+ autospec=True)
+ @mock.patch.object(drac_mgmt, '_get_lifecycle_controller_version',
+ spec_set=True, autospec=True)
+ def test_get_boot_device_client_error(self, mock_glcv, mock_gnbl, mock_we,
+ mock_client_pywsman):
+ controller_version = '2.1.5.0'
+ mock_glcv.return_value = controller_version
+ mock_gnbl.return_value = {'instance_id': 'OneTime',
+ 'is_next': drac_mgmt.ONE_TIME_BOOT}
+ mock_we.side_effect = iter([exception.DracClientError('E_FAKE')])
+
+ self.assertRaises(exception.DracClientError,
+ self.driver.get_boot_device, self.task)
+ mock_we.assert_called_once_with(
+ mock.ANY, resource_uris.DCIM_BootSourceSetting,
+ filter_query=mock.ANY)
+
+ @mock.patch.object(drac_mgmt, '_get_boot_device', spec_set=True,
+ autospec=True)
+ @mock.patch.object(drac_mgmt, '_get_lifecycle_controller_version',
+ spec_set=True, autospec=True)
+ @mock.patch.object(drac_mgmt, 'check_for_config_job', spec_set=True,
+ autospec=True)
+ @mock.patch.object(drac_mgmt, 'create_config_job', spec_set=True,
+ autospec=True)
+ def test_set_boot_device(self, mock_ccj, mock_cfcj, mock_glcv, mock_gbd,
+ mock_client_pywsman):
+ controller_version = '2.1.5.0'
+ mock_glcv.return_value = controller_version
+ mock_gbd.return_value = {'boot_device': boot_devices.PXE,
+ 'persistent': True}
+ result_xml_enum = test_utils.build_soap_xml(
+ [{'InstanceID': 'NIC', 'BootSourceType': 'IPL'}],
+ resource_uris.DCIM_BootSourceSetting)
+ result_xml_invk = test_utils.build_soap_xml(
+ [{'ReturnValue': drac_client.RET_SUCCESS}],
+ resource_uris.DCIM_BootConfigSetting)
+
+ mock_xml_enum = test_utils.mock_wsman_root(result_xml_enum)
+ mock_xml_invk = test_utils.mock_wsman_root(result_xml_invk)
+ mock_pywsman = mock_client_pywsman.Client.return_value
+ mock_pywsman.enumerate.return_value = mock_xml_enum
+ mock_pywsman.invoke.return_value = mock_xml_invk
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node = self.node
+ result = self.driver.set_boot_device(task, boot_devices.PXE)
+
+ self.assertIsNone(result)
+ mock_pywsman.enumerate.assert_called_once_with(
+ mock.ANY, mock.ANY, resource_uris.DCIM_BootSourceSetting)
+ mock_pywsman.invoke.assert_called_once_with(
+ mock.ANY, resource_uris.DCIM_BootConfigSetting,
+ 'ChangeBootOrderByInstanceID', None)
+ mock_glcv.assert_called_once_with(self.node)
+ mock_gbd.assert_called_once_with(self.node, controller_version)
+ mock_cfcj.assert_called_once_with(self.node)
+ mock_ccj.assert_called_once_with(self.node)
+
+ @mock.patch.object(drac_mgmt, '_get_boot_device', spec_set=True,
+ autospec=True)
+ @mock.patch.object(drac_mgmt, '_get_lifecycle_controller_version',
+ spec_set=True, autospec=True)
+ @mock.patch.object(drac_mgmt, 'check_for_config_job', spec_set=True,
+ autospec=True)
+ @mock.patch.object(drac_mgmt, 'create_config_job', spec_set=True,
+ autospec=True)
+ def test_set_boot_device_fail(self, mock_ccj, mock_cfcj, mock_glcv,
+ mock_gbd, mock_client_pywsman):
+ controller_version = '2.1.5.0'
+ mock_glcv.return_value = controller_version
+ mock_gbd.return_value = {'boot_device': boot_devices.PXE,
+ 'persistent': True}
+ result_xml_enum = test_utils.build_soap_xml(
+ [{'InstanceID': 'NIC', 'BootSourceType': 'IPL'}],
+ resource_uris.DCIM_BootSourceSetting)
+ result_xml_invk = test_utils.build_soap_xml(
+ [{'ReturnValue': drac_client.RET_ERROR, 'Message': 'E_FAKE'}],
+ resource_uris.DCIM_BootConfigSetting)
+
+ mock_xml_enum = test_utils.mock_wsman_root(result_xml_enum)
+ mock_xml_invk = test_utils.mock_wsman_root(result_xml_invk)
+ mock_pywsman = mock_client_pywsman.Client.return_value
+ mock_pywsman.enumerate.return_value = mock_xml_enum
+ mock_pywsman.invoke.return_value = mock_xml_invk
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node = self.node
+ self.assertRaises(exception.DracOperationFailed,
+ self.driver.set_boot_device, task,
+ boot_devices.PXE)
+
+ mock_pywsman.enumerate.assert_called_once_with(
+ mock.ANY, mock.ANY, resource_uris.DCIM_BootSourceSetting)
+ mock_pywsman.invoke.assert_called_once_with(
+ mock.ANY, resource_uris.DCIM_BootConfigSetting,
+ 'ChangeBootOrderByInstanceID', None)
+ mock_glcv.assert_called_once_with(self.node)
+ mock_gbd.assert_called_once_with(self.node, controller_version)
+ mock_cfcj.assert_called_once_with(self.node)
+ self.assertFalse(mock_ccj.called)
+
+ @mock.patch.object(drac_mgmt, '_get_boot_device', spec_set=True,
+ autospec=True)
+ @mock.patch.object(drac_mgmt, '_get_lifecycle_controller_version',
+ spec_set=True, autospec=True)
+ @mock.patch.object(drac_client.Client, 'wsman_enumerate', spec_set=True,
+ autospec=True)
+ @mock.patch.object(drac_mgmt, 'check_for_config_job', spec_set=True,
+ autospec=True)
+ def test_set_boot_device_client_error(self, mock_cfcj, mock_we, mock_glcv,
+ mock_gbd,
+ mock_client_pywsman):
+ controller_version = '2.1.5.0'
+ mock_glcv.return_value = controller_version
+ mock_gbd.return_value = {'boot_device': boot_devices.PXE,
+ 'persistent': True}
+ mock_we.side_effect = iter([exception.DracClientError('E_FAKE')])
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node = self.node
+ self.assertRaises(exception.DracClientError,
+ self.driver.set_boot_device, task,
+ boot_devices.PXE)
+ mock_glcv.assert_called_once_with(self.node)
+ mock_gbd.assert_called_once_with(self.node, controller_version)
+ mock_we.assert_called_once_with(
+ mock.ANY, resource_uris.DCIM_BootSourceSetting,
+ filter_query=mock.ANY)
+
+ @mock.patch.object(drac_mgmt, '_get_boot_device', spec_set=True,
+ autospec=True)
+ @mock.patch.object(drac_mgmt, '_get_lifecycle_controller_version',
+ spec_set=True, autospec=True)
+ @mock.patch.object(drac_mgmt, 'check_for_config_job', spec_set=True,
+ autospec=True)
+ def test_set_boot_device_noop(self, mock_cfcj, mock_glcv, mock_gbd,
+ mock_client_pywsman):
+ controller_version = '2.1.5.0'
+ mock_glcv.return_value = controller_version
+ mock_gbd.return_value = {'boot_device': boot_devices.PXE,
+ 'persistent': False}
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node = self.node
+ result = self.driver.set_boot_device(task, boot_devices.PXE)
+ self.assertIsNone(result)
+ mock_glcv.assert_called_once_with(self.node)
+ mock_gbd.assert_called_once_with(self.node, controller_version)
+ self.assertFalse(mock_cfcj.called)
+
+ def test_get_sensors_data(self, mock_client_pywsman):
+ self.assertRaises(NotImplementedError,
+ self.driver.get_sensors_data, self.task)
+
+ @mock.patch.object(drac_mgmt, '_get_boot_device', spec_set=True,
+ autospec=True)
+ @mock.patch.object(drac_mgmt, '_get_lifecycle_controller_version',
+ spec_set=True, autospec=True)
+ @mock.patch.object(drac_mgmt, 'check_for_config_job', spec_set=True,
+ autospec=True)
+ @mock.patch.object(drac_mgmt, 'create_config_job', spec_set=True,
+ autospec=True)
+ def test_set_boot_device_11g(self, mock_ccj, mock_cfcj, mock_glcv,
+ mock_gbd, mock_client_pywsman):
+ controller_version = '1.5.0.0'
+ mock_glcv.return_value = controller_version
+ mock_gbd.return_value = {'boot_device': boot_devices.PXE,
+ 'persistent': True}
+ result_xml_enum = test_utils.build_soap_xml(
+ [{'InstanceID': 'NIC'}],
+ resource_uris.DCIM_BootSourceSetting)
+ result_xml_invk = test_utils.build_soap_xml(
+ [{'ReturnValue': drac_client.RET_SUCCESS}],
+ resource_uris.DCIM_BootConfigSetting)
+
+ mock_xml_enum = test_utils.mock_wsman_root(result_xml_enum)
+ mock_xml_invk = test_utils.mock_wsman_root(result_xml_invk)
+ mock_pywsman = mock_client_pywsman.Client.return_value
+ mock_pywsman.enumerate.return_value = mock_xml_enum
+ mock_pywsman.invoke.return_value = mock_xml_invk
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node = self.node
+ result = self.driver.set_boot_device(task, boot_devices.PXE)
+
+ self.assertIsNone(result)
+ mock_pywsman.enumerate.assert_called_once_with(
+ mock.ANY, mock.ANY, resource_uris.DCIM_BootSourceSetting)
+ mock_pywsman.invoke.assert_called_once_with(
+ mock.ANY, resource_uris.DCIM_BootConfigSetting,
+ 'ChangeBootOrderByInstanceID', None)
+ mock_glcv.assert_called_once_with(self.node)
+ mock_gbd.assert_called_once_with(self.node, controller_version)
+ mock_cfcj.assert_called_once_with(self.node)
+ mock_ccj.assert_called_once_with(self.node)
diff --git a/ironic/tests/unit/drivers/drac/test_power.py b/ironic/tests/unit/drivers/drac/test_power.py
new file mode 100644
index 000000000..26cf2f25f
--- /dev/null
+++ b/ironic/tests/unit/drivers/drac/test_power.py
@@ -0,0 +1,175 @@
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""
+Test class for DRAC Power Driver
+"""
+
+import mock
+
+from ironic.common import exception
+from ironic.common import states
+from ironic.conductor import task_manager
+from ironic.drivers.modules.drac import client as drac_client
+from ironic.drivers.modules.drac import common as drac_common
+from ironic.drivers.modules.drac import power as drac_power
+from ironic.drivers.modules.drac import resource_uris
+from ironic.tests.unit.conductor import utils as mgr_utils
+from ironic.tests.unit.db import base
+from ironic.tests.unit.db import utils as db_utils
+from ironic.tests.unit.drivers.drac import utils as test_utils
+from ironic.tests.unit.drivers import third_party_driver_mock_specs \
+ as mock_specs
+
+INFO_DICT = db_utils.get_test_drac_info()
+
+
+@mock.patch.object(drac_client, 'pywsman', spec_set=mock_specs.PYWSMAN_SPEC)
+@mock.patch.object(drac_power, 'pywsman', spec_set=mock_specs.PYWSMAN_SPEC)
+class DracPowerInternalMethodsTestCase(base.DbTestCase):
+
+ def setUp(self):
+ super(DracPowerInternalMethodsTestCase, self).setUp()
+ driver_info = INFO_DICT
+ self.node = db_utils.create_test_node(
+ driver='fake_drac',
+ driver_info=driver_info,
+ instance_uuid='instance_uuid_123')
+
+ def test__get_power_state(self, mock_power_pywsman, mock_client_pywsman):
+ result_xml = test_utils.build_soap_xml(
+ [{'EnabledState': '2'}], resource_uris.DCIM_ComputerSystem)
+ mock_xml = test_utils.mock_wsman_root(result_xml)
+ mock_pywsman_client = mock_client_pywsman.Client.return_value
+ mock_pywsman_client.enumerate.return_value = mock_xml
+
+ self.assertEqual(states.POWER_ON,
+ drac_power._get_power_state(self.node))
+
+ mock_pywsman_client.enumerate.assert_called_once_with(
+ mock.ANY, mock.ANY, resource_uris.DCIM_ComputerSystem)
+
+ def test__set_power_state(self, mock_power_pywsman, mock_client_pywsman):
+ result_xml = test_utils.build_soap_xml(
+ [{'ReturnValue': drac_client.RET_SUCCESS}],
+ resource_uris.DCIM_ComputerSystem)
+ mock_xml = test_utils.mock_wsman_root(result_xml)
+ mock_pywsman_client = mock_client_pywsman.Client.return_value
+ mock_pywsman_client.invoke.return_value = mock_xml
+
+ mock_pywsman_clientopts = (
+ mock_client_pywsman.ClientOptions.return_value)
+
+ drac_power._set_power_state(self.node, states.POWER_ON)
+
+ mock_pywsman_clientopts.add_selector.assert_has_calls([
+ mock.call('CreationClassName', 'DCIM_ComputerSystem'),
+ mock.call('Name', 'srv:system')
+ ], any_order=True)
+ mock_pywsman_clientopts.add_property.assert_called_once_with(
+ 'RequestedState', '2')
+
+ mock_pywsman_client.invoke.assert_called_once_with(
+ mock.ANY, resource_uris.DCIM_ComputerSystem,
+ 'RequestStateChange', None)
+
+ def test__set_power_state_fail(self, mock_power_pywsman,
+ mock_client_pywsman):
+ result_xml = test_utils.build_soap_xml(
+ [{'ReturnValue': drac_client.RET_ERROR,
+ 'Message': 'error message'}],
+ resource_uris.DCIM_ComputerSystem)
+
+ mock_xml = test_utils.mock_wsman_root(result_xml)
+ mock_pywsman_client = mock_client_pywsman.Client.return_value
+ mock_pywsman_client.invoke.return_value = mock_xml
+
+ mock_pywsman_clientopts = (
+ mock_client_pywsman.ClientOptions.return_value)
+
+ self.assertRaises(exception.DracOperationFailed,
+ drac_power._set_power_state, self.node,
+ states.POWER_ON)
+
+ mock_pywsman_clientopts.add_selector.assert_has_calls([
+ mock.call('CreationClassName', 'DCIM_ComputerSystem'),
+ mock.call('Name', 'srv:system')
+ ], any_order=True)
+ mock_pywsman_clientopts.add_property.assert_called_once_with(
+ 'RequestedState', '2')
+
+ mock_pywsman_client.invoke.assert_called_once_with(
+ mock.ANY, resource_uris.DCIM_ComputerSystem,
+ 'RequestStateChange', None)
+
+
+class DracPowerTestCase(base.DbTestCase):
+
+ def setUp(self):
+ super(DracPowerTestCase, self).setUp()
+ driver_info = INFO_DICT
+ mgr_utils.mock_the_extension_manager(driver="fake_drac")
+ self.node = db_utils.create_test_node(
+ driver='fake_drac',
+ driver_info=driver_info,
+ instance_uuid='instance_uuid_123')
+
+ def test_get_properties(self):
+ expected = drac_common.COMMON_PROPERTIES
+ driver = drac_power.DracPower()
+ self.assertEqual(expected, driver.get_properties())
+
+ @mock.patch.object(drac_power, '_get_power_state', spec_set=True,
+ autospec=True)
+ def test_get_power_state(self, mock_get_power_state):
+ mock_get_power_state.return_value = states.POWER_ON
+ driver = drac_power.DracPower()
+ task = mock.Mock()
+ task.node.return_value = self.node
+
+ self.assertEqual(states.POWER_ON, driver.get_power_state(task))
+ mock_get_power_state.assert_called_once_with(task.node)
+
+ @mock.patch.object(drac_power, '_set_power_state', spec_set=True,
+ autospec=True)
+ def test_set_power_state(self, mock_set_power_state):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.power.set_power_state(task, states.POWER_ON)
+ mock_set_power_state.assert_called_once_with(task.node,
+ states.POWER_ON)
+
+ @mock.patch.object(drac_power, '_set_power_state', spec_set=True,
+ autospec=True)
+ @mock.patch.object(drac_power, '_get_power_state', spec_set=True,
+ autospec=True)
+ def test_reboot(self, mock_get_power_state, mock_set_power_state):
+ mock_get_power_state.return_value = states.POWER_ON
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.power.reboot(task)
+ mock_set_power_state.assert_called_once_with(task.node,
+ states.REBOOT)
+
+ @mock.patch.object(drac_power, '_set_power_state', spec_set=True,
+ autospec=True)
+ @mock.patch.object(drac_power, '_get_power_state', spec_set=True,
+ autospec=True)
+ def test_reboot_in_power_off(self, mock_get_power_state,
+ mock_set_power_state):
+ mock_get_power_state.return_value = states.POWER_OFF
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.power.reboot(task)
+ mock_set_power_state.assert_called_once_with(task.node,
+ states.POWER_ON)
diff --git a/ironic/tests/unit/drivers/drac/utils.py b/ironic/tests/unit/drivers/drac/utils.py
new file mode 100644
index 000000000..6338f7168
--- /dev/null
+++ b/ironic/tests/unit/drivers/drac/utils.py
@@ -0,0 +1,72 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2014 Red Hat, Inc.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from xml.etree import ElementTree
+
+import mock
+
+
+def build_soap_xml(items, namespace=None):
+ """Build a SOAP XML.
+
+ :param items: a list of dictionaries where key is the element name
+ and the value is the element text.
+ :param namespace: the namespace for the elements, None for no
+ namespace. Defaults to None
+ :returns: a XML string.
+
+ """
+
+ def _create_element(name, value=None):
+ xml_string = name
+ if namespace:
+ xml_string = "{%(namespace)s}%(item)s" % {'namespace': namespace,
+ 'item': xml_string}
+
+ element = ElementTree.Element(xml_string)
+ element.text = value
+ return element
+
+ soap_namespace = "http://www.w3.org/2003/05/soap-envelope"
+ envelope_element = ElementTree.Element("{%s}Envelope" % soap_namespace)
+ body_element = ElementTree.Element("{%s}Body" % soap_namespace)
+
+ for item in items:
+ for i in item:
+ insertion_point = _create_element(i)
+ if isinstance(item[i], dict):
+ for j, value in item[i].items():
+ insertion_point.append(_create_element(j, value))
+ else:
+ insertion_point.text = item[i]
+
+ body_element.append(insertion_point)
+
+ envelope_element.append(body_element)
+ return ElementTree.tostring(envelope_element)
+
+
+def mock_wsman_root(return_value):
+ """Helper function to mock the root() from wsman client."""
+ mock_xml_root = mock.Mock(spec_set=['string'])
+ mock_xml_root.string.return_value = return_value
+
+ mock_xml = mock.Mock(spec_set=['context', 'root'])
+ mock_xml.context.return_value = None
+ mock_xml.root.return_value = mock_xml_root
+
+ return mock_xml
diff --git a/ironic/tests/unit/drivers/elilo_efi_pxe_config.template b/ironic/tests/unit/drivers/elilo_efi_pxe_config.template
new file mode 100644
index 000000000..0dca09d8c
--- /dev/null
+++ b/ironic/tests/unit/drivers/elilo_efi_pxe_config.template
@@ -0,0 +1,16 @@
+default=deploy
+
+image=/tftpboot/1be26c0b-03f2-4d2e-ae87-c02d7f33c123/deploy_kernel
+ label=deploy
+ initrd=/tftpboot/1be26c0b-03f2-4d2e-ae87-c02d7f33c123/deploy_ramdisk
+ append="selinux=0 disk=cciss/c0d0,sda,hda,vda iscsi_target_iqn=iqn-1be26c0b-03f2-4d2e-ae87-c02d7f33c123 deployment_id=1be26c0b-03f2-4d2e-ae87-c02d7f33c123 deployment_key=0123456789ABCDEFGHIJKLMNOPQRSTUV ironic_api_url=http://192.168.122.184:6385 troubleshoot=0 text test_param ip=%I::%G:%M:%H::on root_device=vendor=fake,size=123 ipa-api-url=http://192.168.122.184:6385 ipa-driver-name=pxe_ssh boot_option=netboot boot_mode=uefi coreos.configdrive=0"
+
+
+image=/tftpboot/1be26c0b-03f2-4d2e-ae87-c02d7f33c123/kernel
+ label=boot_partition
+ initrd=/tftpboot/1be26c0b-03f2-4d2e-ae87-c02d7f33c123/ramdisk
+ append="root={{ ROOT }} ro text test_param ip=%I::%G:%M:%H::on"
+
+image=chain.c32
+ label=boot_whole_disk
+ append="mbr:{{ DISK_IDENTIFIER }}"
diff --git a/ironic/tests/unit/drivers/ilo/__init__.py b/ironic/tests/unit/drivers/ilo/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ironic/tests/unit/drivers/ilo/__init__.py
diff --git a/ironic/tests/unit/drivers/ilo/test_common.py b/ironic/tests/unit/drivers/ilo/test_common.py
new file mode 100644
index 000000000..10b1ad305
--- /dev/null
+++ b/ironic/tests/unit/drivers/ilo/test_common.py
@@ -0,0 +1,675 @@
+# Copyright 2014 Hewlett-Packard Development Company, L.P.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Test class for common methods used by iLO modules."""
+
+import os
+import shutil
+import tempfile
+
+import mock
+from oslo_config import cfg
+from oslo_utils import importutils
+import six
+
+from ironic.common import exception
+from ironic.common import images
+from ironic.common import swift
+from ironic.common import utils
+from ironic.conductor import task_manager
+from ironic.drivers.modules.ilo import common as ilo_common
+from ironic.tests.unit.conductor import utils as mgr_utils
+from ironic.tests.unit.db import base as db_base
+from ironic.tests.unit.db import utils as db_utils
+from ironic.tests.unit.objects import utils as obj_utils
+
+ilo_client = importutils.try_import('proliantutils.ilo.client')
+ilo_error = importutils.try_import('proliantutils.exception')
+
+if six.PY3:
+ import io
+ file = io.BytesIO
+
+
+CONF = cfg.CONF
+
+
+class IloValidateParametersTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(IloValidateParametersTestCase, self).setUp()
+ self.node = obj_utils.create_test_node(
+ self.context, driver='fake_ilo',
+ driver_info=db_utils.get_test_ilo_info())
+
+ def test_parse_driver_info(self):
+ info = ilo_common.parse_driver_info(self.node)
+
+ self.assertIsNotNone(info.get('ilo_address'))
+ self.assertIsNotNone(info.get('ilo_username'))
+ self.assertIsNotNone(info.get('ilo_password'))
+ self.assertIsNotNone(info.get('client_timeout'))
+ self.assertIsNotNone(info.get('client_port'))
+
+ def test_parse_driver_info_missing_address(self):
+ del self.node.driver_info['ilo_address']
+ self.assertRaises(exception.MissingParameterValue,
+ ilo_common.parse_driver_info, self.node)
+
+ def test_parse_driver_info_missing_username(self):
+ del self.node.driver_info['ilo_username']
+ self.assertRaises(exception.MissingParameterValue,
+ ilo_common.parse_driver_info, self.node)
+
+ def test_parse_driver_info_missing_password(self):
+ del self.node.driver_info['ilo_password']
+ self.assertRaises(exception.MissingParameterValue,
+ ilo_common.parse_driver_info, self.node)
+
+ def test_parse_driver_info_invalid_timeout(self):
+ self.node.driver_info['client_timeout'] = 'qwe'
+ self.assertRaises(exception.InvalidParameterValue,
+ ilo_common.parse_driver_info, self.node)
+
+ def test_parse_driver_info_invalid_port(self):
+ self.node.driver_info['client_port'] = 'qwe'
+ self.assertRaises(exception.InvalidParameterValue,
+ ilo_common.parse_driver_info, self.node)
+
+ def test_parse_driver_info_missing_multiple_params(self):
+ del self.node.driver_info['ilo_password']
+ del self.node.driver_info['ilo_address']
+ try:
+ ilo_common.parse_driver_info(self.node)
+ self.fail("parse_driver_info did not throw exception.")
+ except exception.MissingParameterValue as e:
+ self.assertIn('ilo_password', str(e))
+ self.assertIn('ilo_address', str(e))
+
+ def test_parse_driver_info_invalid_multiple_params(self):
+ self.node.driver_info['client_timeout'] = 'qwe'
+ self.node.driver_info['console_port'] = 'not-int'
+ try:
+ ilo_common.parse_driver_info(self.node)
+ self.fail("parse_driver_info did not throw exception.")
+ except exception.InvalidParameterValue as e:
+ self.assertIn('client_timeout', str(e))
+ self.assertIn('console_port', str(e))
+
+
+class IloCommonMethodsTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(IloCommonMethodsTestCase, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver="fake_ilo")
+ self.info = db_utils.get_test_ilo_info()
+ self.node = obj_utils.create_test_node(
+ self.context, driver='fake_ilo', driver_info=self.info)
+
+ @mock.patch.object(ilo_client, 'IloClient', spec_set=True,
+ autospec=True)
+ def test_get_ilo_object(self, ilo_client_mock):
+ self.info['client_timeout'] = 60
+ self.info['client_port'] = 443
+ ilo_client_mock.return_value = 'ilo_object'
+ returned_ilo_object = ilo_common.get_ilo_object(self.node)
+ ilo_client_mock.assert_called_with(
+ self.info['ilo_address'],
+ self.info['ilo_username'],
+ self.info['ilo_password'],
+ self.info['client_timeout'],
+ self.info['client_port'])
+ self.assertEqual('ilo_object', returned_ilo_object)
+
+ @mock.patch.object(ilo_common, 'get_ilo_object', spec_set=True,
+ autospec=True)
+ def test_get_ilo_license(self, get_ilo_object_mock):
+ ilo_advanced_license = {'LICENSE_TYPE': 'iLO 3 Advanced'}
+ ilo_standard_license = {'LICENSE_TYPE': 'iLO 3'}
+
+ ilo_mock_object = get_ilo_object_mock.return_value
+ ilo_mock_object.get_all_licenses.return_value = ilo_advanced_license
+
+ license = ilo_common.get_ilo_license(self.node)
+ self.assertEqual(ilo_common.ADVANCED_LICENSE, license)
+
+ ilo_mock_object.get_all_licenses.return_value = ilo_standard_license
+ license = ilo_common.get_ilo_license(self.node)
+ self.assertEqual(ilo_common.STANDARD_LICENSE, license)
+
+ @mock.patch.object(ilo_common, 'get_ilo_object', spec_set=True,
+ autospec=True)
+ def test_get_ilo_license_fail(self, get_ilo_object_mock):
+ ilo_mock_object = get_ilo_object_mock.return_value
+ exc = ilo_error.IloError('error')
+ ilo_mock_object.get_all_licenses.side_effect = exc
+ self.assertRaises(exception.IloOperationError,
+ ilo_common.get_ilo_license,
+ self.node)
+
+ def test_update_ipmi_properties(self):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ ipmi_info = {
+ "ipmi_address": "1.2.3.4",
+ "ipmi_username": "admin",
+ "ipmi_password": "fake",
+ "ipmi_terminal_port": 60
+ }
+ self.info['console_port'] = 60
+ task.node.driver_info = self.info
+ ilo_common.update_ipmi_properties(task)
+ actual_info = task.node.driver_info
+ expected_info = dict(self.info, **ipmi_info)
+ self.assertEqual(expected_info, actual_info)
+
+ def test__get_floppy_image_name(self):
+ image_name_expected = 'image-' + self.node.uuid
+ image_name_actual = ilo_common._get_floppy_image_name(self.node)
+ self.assertEqual(image_name_expected, image_name_actual)
+
+ @mock.patch.object(swift, 'SwiftAPI', spec_set=True, autospec=True)
+ @mock.patch.object(images, 'create_vfat_image', spec_set=True,
+ autospec=True)
+ @mock.patch.object(tempfile, 'NamedTemporaryFile', spec_set=True,
+ autospec=True)
+ def test__prepare_floppy_image(self, tempfile_mock, fatimage_mock,
+ swift_api_mock):
+ mock_image_file_handle = mock.MagicMock(spec=file)
+ mock_image_file_obj = mock.MagicMock(spec=file)
+ mock_image_file_obj.name = 'image-tmp-file'
+ mock_image_file_handle.__enter__.return_value = mock_image_file_obj
+
+ tempfile_mock.return_value = mock_image_file_handle
+
+ swift_obj_mock = swift_api_mock.return_value
+ self.config(swift_ilo_container='ilo_cont', group='ilo')
+ self.config(swift_object_expiry_timeout=1, group='ilo')
+ deploy_args = {'arg1': 'val1', 'arg2': 'val2'}
+ swift_obj_mock.get_temp_url.return_value = 'temp-url'
+ timeout = CONF.ilo.swift_object_expiry_timeout
+ object_headers = {'X-Delete-After': timeout}
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ temp_url = ilo_common._prepare_floppy_image(task, deploy_args)
+ node_uuid = task.node.uuid
+
+ object_name = 'image-' + node_uuid
+ fatimage_mock.assert_called_once_with('image-tmp-file',
+ parameters=deploy_args)
+
+ swift_obj_mock.create_object.assert_called_once_with(
+ 'ilo_cont', object_name, 'image-tmp-file',
+ object_headers=object_headers)
+ swift_obj_mock.get_temp_url.assert_called_once_with(
+ 'ilo_cont', object_name, timeout)
+ self.assertEqual('temp-url', temp_url)
+
+ @mock.patch.object(ilo_common, 'copy_image_to_web_server',
+ spec_set=True, autospec=True)
+ @mock.patch.object(images, 'create_vfat_image', spec_set=True,
+ autospec=True)
+ @mock.patch.object(tempfile, 'NamedTemporaryFile', spec_set=True,
+ autospec=True)
+ def test__prepare_floppy_image_use_webserver(self, tempfile_mock,
+ fatimage_mock,
+ copy_mock):
+ mock_image_file_handle = mock.MagicMock(spec=file)
+ mock_image_file_obj = mock.MagicMock(spec=file)
+ mock_image_file_obj.name = 'image-tmp-file'
+ mock_image_file_handle.__enter__.return_value = mock_image_file_obj
+
+ tempfile_mock.return_value = mock_image_file_handle
+ self.config(use_web_server_for_images=True, group='ilo')
+ deploy_args = {'arg1': 'val1', 'arg2': 'val2'}
+ CONF.deploy.http_url = "http://abc.com/httpboot"
+ CONF.deploy.http_root = "/httpboot"
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ node_uuid = task.node.uuid
+ object_name = 'image-' + node_uuid
+ http_url = CONF.deploy.http_url + '/' + object_name
+ copy_mock.return_value = "http://abc.com/httpboot/" + object_name
+ temp_url = ilo_common._prepare_floppy_image(task, deploy_args)
+
+ fatimage_mock.assert_called_once_with('image-tmp-file',
+ parameters=deploy_args)
+ copy_mock.assert_called_once_with('image-tmp-file', object_name)
+ self.assertEqual(http_url, temp_url)
+
+ @mock.patch.object(ilo_common, 'get_ilo_object', spec_set=True,
+ autospec=True)
+ def test_attach_vmedia(self, get_ilo_object_mock):
+ ilo_mock_object = get_ilo_object_mock.return_value
+ insert_media_mock = ilo_mock_object.insert_virtual_media
+ set_status_mock = ilo_mock_object.set_vm_status
+
+ ilo_common.attach_vmedia(self.node, 'FLOPPY', 'url')
+ insert_media_mock.assert_called_once_with('url', device='FLOPPY')
+ set_status_mock.assert_called_once_with(
+ device='FLOPPY', boot_option='CONNECT', write_protect='YES')
+
+ @mock.patch.object(ilo_common, 'get_ilo_object', spec_set=True,
+ autospec=True)
+ def test_attach_vmedia_fails(self, get_ilo_object_mock):
+ ilo_mock_object = get_ilo_object_mock.return_value
+ set_status_mock = ilo_mock_object.set_vm_status
+ exc = ilo_error.IloError('error')
+ set_status_mock.side_effect = exc
+ self.assertRaises(exception.IloOperationError,
+ ilo_common.attach_vmedia, self.node,
+ 'FLOPPY', 'url')
+
+ @mock.patch.object(ilo_common, 'get_ilo_object', spec_set=True,
+ autospec=True)
+ def test_set_boot_mode(self, get_ilo_object_mock):
+ ilo_object_mock = get_ilo_object_mock.return_value
+ get_pending_boot_mode_mock = ilo_object_mock.get_pending_boot_mode
+ set_pending_boot_mode_mock = ilo_object_mock.set_pending_boot_mode
+ get_pending_boot_mode_mock.return_value = 'LEGACY'
+ ilo_common.set_boot_mode(self.node, 'uefi')
+ get_ilo_object_mock.assert_called_once_with(self.node)
+ get_pending_boot_mode_mock.assert_called_once_with()
+ set_pending_boot_mode_mock.assert_called_once_with('UEFI')
+
+ @mock.patch.object(ilo_common, 'get_ilo_object', spec_set=True,
+ autospec=True)
+ def test_set_boot_mode_without_set_pending_boot_mode(self,
+ get_ilo_object_mock):
+ ilo_object_mock = get_ilo_object_mock.return_value
+ get_pending_boot_mode_mock = ilo_object_mock.get_pending_boot_mode
+ get_pending_boot_mode_mock.return_value = 'LEGACY'
+ ilo_common.set_boot_mode(self.node, 'bios')
+ get_ilo_object_mock.assert_called_once_with(self.node)
+ get_pending_boot_mode_mock.assert_called_once_with()
+ self.assertFalse(ilo_object_mock.set_pending_boot_mode.called)
+
+ @mock.patch.object(ilo_common, 'get_ilo_object', spec_set=True,
+ autospec=True)
+ def test_set_boot_mode_with_IloOperationError(self,
+ get_ilo_object_mock):
+ ilo_object_mock = get_ilo_object_mock.return_value
+ get_pending_boot_mode_mock = ilo_object_mock.get_pending_boot_mode
+ get_pending_boot_mode_mock.return_value = 'UEFI'
+ set_pending_boot_mode_mock = ilo_object_mock.set_pending_boot_mode
+ exc = ilo_error.IloError('error')
+ set_pending_boot_mode_mock.side_effect = exc
+ self.assertRaises(exception.IloOperationError,
+ ilo_common.set_boot_mode, self.node, 'bios')
+ get_ilo_object_mock.assert_called_once_with(self.node)
+ get_pending_boot_mode_mock.assert_called_once_with()
+
+ @mock.patch.object(ilo_common, 'set_boot_mode', spec_set=True,
+ autospec=True)
+ def test_update_boot_mode_instance_info_exists(self,
+ set_boot_mode_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node.instance_info['deploy_boot_mode'] = 'bios'
+ ilo_common.update_boot_mode(task)
+ set_boot_mode_mock.assert_called_once_with(task.node, 'bios')
+
+ @mock.patch.object(ilo_common, 'set_boot_mode', spec_set=True,
+ autospec=True)
+ def test_update_boot_mode_capabilities_exist(self,
+ set_boot_mode_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node.properties['capabilities'] = 'boot_mode:bios'
+ ilo_common.update_boot_mode(task)
+ set_boot_mode_mock.assert_called_once_with(task.node, 'bios')
+
+ @mock.patch.object(ilo_common, 'get_ilo_object', spec_set=True,
+ autospec=True)
+ def test_update_boot_mode(self, get_ilo_object_mock):
+ ilo_mock_obj = get_ilo_object_mock.return_value
+ ilo_mock_obj.get_pending_boot_mode.return_value = 'LEGACY'
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ ilo_common.update_boot_mode(task)
+ get_ilo_object_mock.assert_called_once_with(task.node)
+ ilo_mock_obj.get_pending_boot_mode.assert_called_once_with()
+ self.assertEqual('bios',
+ task.node.instance_info['deploy_boot_mode'])
+
+ @mock.patch.object(ilo_common, 'get_ilo_object', spec_set=True,
+ autospec=True)
+ def test_update_boot_mode_unknown(self,
+ get_ilo_object_mock):
+ ilo_mock_obj = get_ilo_object_mock.return_value
+ ilo_mock_obj.get_pending_boot_mode.return_value = 'UNKNOWN'
+ set_pending_boot_mode_mock = ilo_mock_obj.set_pending_boot_mode
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ ilo_common.update_boot_mode(task)
+ get_ilo_object_mock.assert_called_once_with(task.node)
+ ilo_mock_obj.get_pending_boot_mode.assert_called_once_with()
+ set_pending_boot_mode_mock.assert_called_once_with('UEFI')
+ self.assertEqual('uefi',
+ task.node.instance_info['deploy_boot_mode'])
+
+ @mock.patch.object(ilo_common, 'get_ilo_object', spec_set=True,
+ autospec=True)
+ def test_update_boot_mode_unknown_except(self,
+ get_ilo_object_mock):
+ ilo_mock_obj = get_ilo_object_mock.return_value
+ ilo_mock_obj.get_pending_boot_mode.return_value = 'UNKNOWN'
+ set_pending_boot_mode_mock = ilo_mock_obj.set_pending_boot_mode
+ exc = ilo_error.IloError('error')
+ set_pending_boot_mode_mock.side_effect = exc
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ self.assertRaises(exception.IloOperationError,
+ ilo_common.update_boot_mode, task)
+ get_ilo_object_mock.assert_called_once_with(task.node)
+ ilo_mock_obj.get_pending_boot_mode.assert_called_once_with()
+
+ @mock.patch.object(ilo_common, 'get_ilo_object', spec_set=True,
+ autospec=True)
+ def test_update_boot_mode_legacy(self,
+ get_ilo_object_mock):
+ ilo_mock_obj = get_ilo_object_mock.return_value
+ exc = ilo_error.IloCommandNotSupportedError('error')
+ ilo_mock_obj.get_pending_boot_mode.side_effect = exc
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ ilo_common.update_boot_mode(task)
+ get_ilo_object_mock.assert_called_once_with(task.node)
+ ilo_mock_obj.get_pending_boot_mode.assert_called_once_with()
+ self.assertEqual('bios',
+ task.node.instance_info['deploy_boot_mode'])
+
+ @mock.patch.object(ilo_common, 'set_boot_mode', spec_set=True,
+ autospec=True)
+ def test_update_boot_mode_prop_boot_mode_exist(self,
+ set_boot_mode_mock):
+
+ properties = {'capabilities': 'boot_mode:uefi'}
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node.properties = properties
+ ilo_common.update_boot_mode(task)
+ set_boot_mode_mock.assert_called_once_with(task.node, 'uefi')
+
+ @mock.patch.object(images, 'get_temp_url_for_glance_image', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_common, 'attach_vmedia', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_common, '_prepare_floppy_image', spec_set=True,
+ autospec=True)
+ def test_setup_vmedia_for_boot_with_parameters(
+ self, prepare_image_mock, attach_vmedia_mock, temp_url_mock):
+ parameters = {'a': 'b'}
+ boot_iso = '733d1c44-a2ea-414b-aca7-69decf20d810'
+ prepare_image_mock.return_value = 'floppy_url'
+ temp_url_mock.return_value = 'image_url'
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ ilo_common.setup_vmedia_for_boot(task, boot_iso, parameters)
+ prepare_image_mock.assert_called_once_with(task, parameters)
+ attach_vmedia_mock.assert_any_call(task.node, 'FLOPPY',
+ 'floppy_url')
+
+ temp_url_mock.assert_called_once_with(
+ task.context, '733d1c44-a2ea-414b-aca7-69decf20d810')
+ attach_vmedia_mock.assert_any_call(task.node, 'CDROM', 'image_url')
+
+ @mock.patch.object(swift, 'SwiftAPI', spec_set=True, autospec=True)
+ @mock.patch.object(ilo_common, 'attach_vmedia', spec_set=True,
+ autospec=True)
+ def test_setup_vmedia_for_boot_with_swift(self, attach_vmedia_mock,
+ swift_api_mock):
+ swift_obj_mock = swift_api_mock.return_value
+ boot_iso = 'swift:object-name'
+ swift_obj_mock.get_temp_url.return_value = 'image_url'
+ CONF.keystone_authtoken.auth_uri = 'http://authurl'
+ CONF.ilo.swift_ilo_container = 'ilo_cont'
+ CONF.ilo.swift_object_expiry_timeout = 1
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ ilo_common.setup_vmedia_for_boot(task, boot_iso)
+ swift_obj_mock.get_temp_url.assert_called_once_with(
+ 'ilo_cont', 'object-name', 1)
+ attach_vmedia_mock.assert_called_once_with(
+ task.node, 'CDROM', 'image_url')
+
+ @mock.patch.object(ilo_common, 'attach_vmedia', spec_set=True,
+ autospec=True)
+ def test_setup_vmedia_for_boot_with_url(self, attach_vmedia_mock):
+ boot_iso = 'http://abc.com/img.iso'
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ ilo_common.setup_vmedia_for_boot(task, boot_iso)
+ attach_vmedia_mock.assert_called_once_with(task.node, 'CDROM',
+ boot_iso)
+
+ @mock.patch.object(ilo_common, 'eject_vmedia_devices',
+ spec_set=True, autospec=True)
+ @mock.patch.object(swift, 'SwiftAPI', spec_set=True, autospec=True)
+ @mock.patch.object(ilo_common, '_get_floppy_image_name', spec_set=True,
+ autospec=True)
+ def test_cleanup_vmedia_boot(self, get_name_mock, swift_api_mock,
+ eject_mock):
+ swift_obj_mock = swift_api_mock.return_value
+ CONF.ilo.swift_ilo_container = 'ilo_cont'
+
+ get_name_mock.return_value = 'image-node-uuid'
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ ilo_common.cleanup_vmedia_boot(task)
+ swift_obj_mock.delete_object.assert_called_once_with(
+ 'ilo_cont', 'image-node-uuid')
+ eject_mock.assert_called_once_with(task)
+
+ @mock.patch.object(ilo_common, 'eject_vmedia_devices',
+ spec_set=True, autospec=True)
+ @mock.patch.object(ilo_common, 'destroy_floppy_image_from_web_server',
+ spec_set=True, autospec=True)
+ def test_cleanup_vmedia_boot_for_webserver(self,
+ destroy_image_mock,
+ eject_mock):
+ CONF.ilo.use_web_server_for_images = True
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ ilo_common.cleanup_vmedia_boot(task)
+ destroy_image_mock.assert_called_once_with(task.node)
+ eject_mock.assert_called_once_with(task)
+
+ @mock.patch.object(ilo_common, 'get_ilo_object', spec_set=True,
+ autospec=True)
+ def test_eject_vmedia_devices(self, get_ilo_object_mock):
+ ilo_object_mock = mock.MagicMock(spec=['eject_virtual_media'])
+ get_ilo_object_mock.return_value = ilo_object_mock
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ ilo_common.eject_vmedia_devices(task)
+
+ ilo_object_mock.eject_virtual_media.assert_has_calls(
+ [mock.call('FLOPPY'), mock.call('CDROM')])
+
+ @mock.patch.object(ilo_common, 'get_ilo_object', spec_set=True,
+ autospec=True)
+ def test_eject_vmedia_devices_raises(
+ self, get_ilo_object_mock):
+ ilo_object_mock = mock.MagicMock(spec=['eject_virtual_media'])
+ get_ilo_object_mock.return_value = ilo_object_mock
+ exc = ilo_error.IloError('error')
+ ilo_object_mock.eject_virtual_media.side_effect = exc
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ self.assertRaises(exception.IloOperationError,
+ ilo_common.eject_vmedia_devices,
+ task)
+
+ ilo_object_mock.eject_virtual_media.assert_called_once_with(
+ 'FLOPPY')
+
+ @mock.patch.object(ilo_common, 'get_ilo_object', spec_set=True,
+ autospec=True)
+ def test_get_secure_boot_mode(self,
+ get_ilo_object_mock):
+ ilo_object_mock = get_ilo_object_mock.return_value
+ ilo_object_mock.get_current_boot_mode.return_value = 'UEFI'
+ ilo_object_mock.get_secure_boot_mode.return_value = True
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ ret = ilo_common.get_secure_boot_mode(task)
+ ilo_object_mock.get_current_boot_mode.assert_called_once_with()
+ ilo_object_mock.get_secure_boot_mode.assert_called_once_with()
+ self.assertTrue(ret)
+
+ @mock.patch.object(ilo_common, 'get_ilo_object', spec_set=True,
+ autospec=True)
+ def test_get_secure_boot_mode_bios(self,
+ get_ilo_object_mock):
+ ilo_object_mock = get_ilo_object_mock.return_value
+ ilo_object_mock.get_current_boot_mode.return_value = 'BIOS'
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ ret = ilo_common.get_secure_boot_mode(task)
+ ilo_object_mock.get_current_boot_mode.assert_called_once_with()
+ self.assertFalse(ilo_object_mock.get_secure_boot_mode.called)
+ self.assertFalse(ret)
+
+ @mock.patch.object(ilo_common, 'get_ilo_object', spec_set=True,
+ autospec=True)
+ def test_get_secure_boot_mode_fail(self,
+ get_ilo_object_mock):
+ ilo_mock_object = get_ilo_object_mock.return_value
+ exc = ilo_error.IloError('error')
+ ilo_mock_object.get_current_boot_mode.side_effect = exc
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ self.assertRaises(exception.IloOperationError,
+ ilo_common.get_secure_boot_mode,
+ task)
+ ilo_mock_object.get_current_boot_mode.assert_called_once_with()
+
+ @mock.patch.object(ilo_common, 'get_ilo_object', spec_set=True,
+ autospec=True)
+ def test_get_secure_boot_mode_not_supported(self,
+ ilo_object_mock):
+ ilo_mock_object = ilo_object_mock.return_value
+ exc = ilo_error.IloCommandNotSupportedError('error')
+ ilo_mock_object.get_current_boot_mode.return_value = 'UEFI'
+ ilo_mock_object.get_secure_boot_mode.side_effect = exc
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ self.assertRaises(exception.IloOperationNotSupported,
+ ilo_common.get_secure_boot_mode,
+ task)
+ ilo_mock_object.get_current_boot_mode.assert_called_once_with()
+ ilo_mock_object.get_secure_boot_mode.assert_called_once_with()
+
+ @mock.patch.object(ilo_common, 'get_ilo_object', spec_set=True,
+ autospec=True)
+ def test_set_secure_boot_mode(self,
+ get_ilo_object_mock):
+ ilo_object_mock = get_ilo_object_mock.return_value
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ ilo_common.set_secure_boot_mode(task, True)
+ ilo_object_mock.set_secure_boot_mode.assert_called_once_with(True)
+
+ @mock.patch.object(ilo_common, 'get_ilo_object', spec_set=True,
+ autospec=True)
+ def test_set_secure_boot_mode_fail(self,
+ get_ilo_object_mock):
+ ilo_mock_object = get_ilo_object_mock.return_value
+ exc = ilo_error.IloError('error')
+ ilo_mock_object.set_secure_boot_mode.side_effect = exc
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ self.assertRaises(exception.IloOperationError,
+ ilo_common.set_secure_boot_mode,
+ task, False)
+ ilo_mock_object.set_secure_boot_mode.assert_called_once_with(False)
+
+ @mock.patch.object(ilo_common, 'get_ilo_object', spec_set=True,
+ autospec=True)
+ def test_set_secure_boot_mode_not_supported(self,
+ ilo_object_mock):
+ ilo_mock_object = ilo_object_mock.return_value
+ exc = ilo_error.IloCommandNotSupportedError('error')
+ ilo_mock_object.set_secure_boot_mode.side_effect = exc
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ self.assertRaises(exception.IloOperationNotSupported,
+ ilo_common.set_secure_boot_mode,
+ task, False)
+ ilo_mock_object.set_secure_boot_mode.assert_called_once_with(False)
+
+ @mock.patch.object(os, 'chmod', spec_set=True,
+ autospec=True)
+ @mock.patch.object(shutil, 'copyfile', spec_set=True,
+ autospec=True)
+ def test_copy_image_to_web_server(self, copy_mock,
+ chmod_mock):
+ CONF.deploy.http_url = "http://x.y.z.a/webserver/"
+ CONF.deploy.http_root = "/webserver"
+ expected_url = "http://x.y.z.a/webserver/image-UUID"
+ source = 'tmp_image_file'
+ destination = "image-UUID"
+ image_path = "/webserver/image-UUID"
+ actual_url = ilo_common.copy_image_to_web_server(source, destination)
+ self.assertEqual(expected_url, actual_url)
+ copy_mock.assert_called_once_with(source, image_path)
+ chmod_mock.assert_called_once_with(image_path, 0o644)
+
+ @mock.patch.object(os, 'chmod', spec_set=True,
+ autospec=True)
+ @mock.patch.object(shutil, 'copyfile', spec_set=True,
+ autospec=True)
+ def test_copy_image_to_web_server_fails(self, copy_mock,
+ chmod_mock):
+ CONF.deploy.http_url = "http://x.y.z.a/webserver/"
+ CONF.deploy.http_root = "/webserver"
+ source = 'tmp_image_file'
+ destination = "image-UUID"
+ image_path = "/webserver/image-UUID"
+ exc = exception.ImageUploadFailed('reason')
+ copy_mock.side_effect = exc
+ self.assertRaises(exception.ImageUploadFailed,
+ ilo_common.copy_image_to_web_server,
+ source, destination)
+ copy_mock.assert_called_once_with(source, image_path)
+ self.assertFalse(chmod_mock.called)
+
+ @mock.patch.object(utils, 'unlink_without_raise', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_common, '_get_floppy_image_name', spec_set=True,
+ autospec=True)
+ def test_destroy_floppy_image_from_web_server(self, get_floppy_name_mock,
+ utils_mock):
+ get_floppy_name_mock.return_value = 'image-uuid'
+ CONF.deploy.http_root = "/webserver/"
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ ilo_common.destroy_floppy_image_from_web_server(task.node)
+ get_floppy_name_mock.assert_called_once_with(task.node)
+ utils_mock.assert_called_once_with('/webserver/image-uuid')
diff --git a/ironic/tests/unit/drivers/ilo/test_deploy.py b/ironic/tests/unit/drivers/ilo/test_deploy.py
new file mode 100644
index 000000000..451761bed
--- /dev/null
+++ b/ironic/tests/unit/drivers/ilo/test_deploy.py
@@ -0,0 +1,1860 @@
+# Copyright 2014 Hewlett-Packard Development Company, L.P.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Test class for common methods used by iLO modules."""
+
+import tempfile
+
+import mock
+from oslo_config import cfg
+import six
+
+from ironic.common import boot_devices
+from ironic.common import exception
+from ironic.common.glance_service import service_utils
+from ironic.common import image_service
+from ironic.common import images
+from ironic.common import states
+from ironic.common import swift
+from ironic.common import utils
+from ironic.conductor import task_manager
+from ironic.conductor import utils as manager_utils
+from ironic.drivers.modules import agent
+from ironic.drivers.modules import agent_base_vendor
+from ironic.drivers.modules import deploy_utils
+from ironic.drivers.modules.ilo import common as ilo_common
+from ironic.drivers.modules.ilo import deploy as ilo_deploy
+from ironic.drivers.modules import iscsi_deploy
+from ironic.drivers import utils as driver_utils
+from ironic.tests.unit.conductor import utils as mgr_utils
+from ironic.tests.unit.db import base as db_base
+from ironic.tests.unit.db import utils as db_utils
+from ironic.tests.unit.objects import utils as obj_utils
+
+
+if six.PY3:
+ import io
+ file = io.BytesIO
+
+INFO_DICT = db_utils.get_test_ilo_info()
+CONF = cfg.CONF
+
+
+class IloDeployPrivateMethodsTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(IloDeployPrivateMethodsTestCase, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver="iscsi_ilo")
+ self.node = obj_utils.create_test_node(
+ self.context, driver='iscsi_ilo', driver_info=INFO_DICT)
+
+ def test__get_boot_iso_object_name(self):
+ boot_iso_actual = ilo_deploy._get_boot_iso_object_name(self.node)
+ boot_iso_expected = "boot-%s" % self.node.uuid
+ self.assertEqual(boot_iso_expected, boot_iso_actual)
+
+ @mock.patch.object(image_service.HttpImageService, 'validate_href',
+ spec_set=True, autospec=True)
+ def test__get_boot_iso_http_url(self, service_mock):
+ url = 'http://abc.org/image/qcow2'
+ i_info = self.node.instance_info
+ i_info['ilo_boot_iso'] = url
+ self.node.instance_info = i_info
+ self.node.save()
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ boot_iso_actual = ilo_deploy._get_boot_iso(task, 'root-uuid')
+ service_mock.assert_called_once_with(mock.ANY, url)
+ self.assertEqual(url, boot_iso_actual)
+
+ @mock.patch.object(image_service.HttpImageService, 'validate_href',
+ spec_set=True, autospec=True)
+ def test__get_boot_iso_url(self, mock_validate):
+ url = 'http://aaa/bbb'
+ i_info = self.node.instance_info
+ i_info['ilo_boot_iso'] = url
+ self.node.instance_info = i_info
+ self.node.save()
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ boot_iso_actual = ilo_deploy._get_boot_iso(task, 'root-uuid')
+ self.assertEqual(url, boot_iso_actual)
+ mock_validate.assert_called_once_with(mock.ANY, url)
+
+ @mock.patch.object(image_service.HttpImageService, 'validate_href',
+ spec_set=True, autospec=True)
+ def test__get_boot_iso_unsupported_url(self, validate_href_mock):
+ validate_href_mock.side_effect = iter(
+ [exception.ImageRefValidationFailed(
+ image_href='file://img.qcow2', reason='fail')])
+ url = 'file://img.qcow2'
+ i_info = self.node.instance_info
+ i_info['ilo_boot_iso'] = url
+ self.node.instance_info = i_info
+ self.node.save()
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ self.assertRaises(exception.ImageRefValidationFailed,
+ ilo_deploy._get_boot_iso, task, 'root-uuid')
+
+ @mock.patch.object(images, 'get_image_properties', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_deploy, '_parse_deploy_info', spec_set=True,
+ autospec=True)
+ def test__get_boot_iso_glance_image(self, deploy_info_mock,
+ image_props_mock):
+ deploy_info_mock.return_value = {'image_source': 'image-uuid',
+ 'ilo_deploy_iso': 'deploy_iso_uuid'}
+ image_props_mock.return_value = {'boot_iso': 'boot-iso-uuid',
+ 'kernel_id': None,
+ 'ramdisk_id': None}
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ boot_iso_actual = ilo_deploy._get_boot_iso(task, 'root-uuid')
+ deploy_info_mock.assert_called_once_with(task.node)
+ image_props_mock.assert_called_once_with(
+ task.context, 'image-uuid',
+ ['boot_iso', 'kernel_id', 'ramdisk_id'])
+ boot_iso_expected = 'boot-iso-uuid'
+ self.assertEqual(boot_iso_expected, boot_iso_actual)
+
+ @mock.patch.object(deploy_utils, 'get_boot_mode_for_deploy', spec_set=True,
+ autospec=True)
+ @mock.patch.object(images, 'get_image_properties', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_deploy, '_parse_deploy_info', spec_set=True,
+ autospec=True)
+ def test__get_boot_iso_uefi_no_glance_image(self,
+ deploy_info_mock,
+ image_props_mock,
+ boot_mode_mock):
+ deploy_info_mock.return_value = {'image_source': 'image-uuid',
+ 'ilo_deploy_iso': 'deploy_iso_uuid'}
+ image_props_mock.return_value = {'boot_iso': None,
+ 'kernel_id': None,
+ 'ramdisk_id': None}
+ properties = {'capabilities': 'boot_mode:uefi'}
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node.properties = properties
+ boot_iso_result = ilo_deploy._get_boot_iso(task, 'root-uuid')
+ deploy_info_mock.assert_called_once_with(task.node)
+ image_props_mock.assert_called_once_with(
+ task.context, 'image-uuid',
+ ['boot_iso', 'kernel_id', 'ramdisk_id'])
+ self.assertFalse(boot_mode_mock.called)
+ self.assertIsNone(boot_iso_result)
+
+ @mock.patch.object(tempfile, 'NamedTemporaryFile', spec_set=True,
+ autospec=True)
+ @mock.patch.object(images, 'create_boot_iso', spec_set=True, autospec=True)
+ @mock.patch.object(swift, 'SwiftAPI', spec_set=True, autospec=True)
+ @mock.patch.object(ilo_deploy, '_get_boot_iso_object_name', spec_set=True,
+ autospec=True)
+ @mock.patch.object(driver_utils, 'get_node_capability', spec_set=True,
+ autospec=True)
+ @mock.patch.object(images, 'get_image_properties', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_deploy, '_parse_deploy_info', spec_set=True,
+ autospec=True)
+ def test__get_boot_iso_create(self, deploy_info_mock, image_props_mock,
+ capability_mock, boot_object_name_mock,
+ swift_api_mock,
+ create_boot_iso_mock, tempfile_mock):
+ CONF.ilo.swift_ilo_container = 'ilo-cont'
+ CONF.pxe.pxe_append_params = 'kernel-params'
+
+ swift_obj_mock = swift_api_mock.return_value
+ fileobj_mock = mock.MagicMock(spec=file)
+ fileobj_mock.name = 'tmpfile'
+ mock_file_handle = mock.MagicMock(spec=file)
+ mock_file_handle.__enter__.return_value = fileobj_mock
+ tempfile_mock.return_value = mock_file_handle
+
+ deploy_info_mock.return_value = {'image_source': 'image-uuid',
+ 'ilo_deploy_iso': 'deploy_iso_uuid'}
+ image_props_mock.return_value = {'boot_iso': None,
+ 'kernel_id': 'kernel_uuid',
+ 'ramdisk_id': 'ramdisk_uuid'}
+ boot_object_name_mock.return_value = 'abcdef'
+ create_boot_iso_mock.return_value = '/path/to/boot-iso'
+ capability_mock.return_value = 'uefi'
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ boot_iso_actual = ilo_deploy._get_boot_iso(task, 'root-uuid')
+ deploy_info_mock.assert_called_once_with(task.node)
+ image_props_mock.assert_called_once_with(
+ task.context, 'image-uuid',
+ ['boot_iso', 'kernel_id', 'ramdisk_id'])
+ boot_object_name_mock.assert_called_once_with(task.node)
+ create_boot_iso_mock.assert_called_once_with(task.context,
+ 'tmpfile',
+ 'kernel_uuid',
+ 'ramdisk_uuid',
+ 'deploy_iso_uuid',
+ 'root-uuid',
+ 'kernel-params',
+ 'uefi')
+ swift_obj_mock.create_object.assert_called_once_with('ilo-cont',
+ 'abcdef',
+ 'tmpfile')
+ boot_iso_expected = 'swift:abcdef'
+ self.assertEqual(boot_iso_expected, boot_iso_actual)
+
+ @mock.patch.object(ilo_common, 'copy_image_to_web_server', spec_set=True,
+ autospec=True)
+ @mock.patch.object(tempfile, 'NamedTemporaryFile', spec_set=True,
+ autospec=True)
+ @mock.patch.object(images, 'create_boot_iso', spec_set=True, autospec=True)
+ @mock.patch.object(ilo_deploy, '_get_boot_iso_object_name', spec_set=True,
+ autospec=True)
+ @mock.patch.object(driver_utils, 'get_node_capability', spec_set=True,
+ autospec=True)
+ @mock.patch.object(images, 'get_image_properties', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_deploy, '_parse_deploy_info', spec_set=True,
+ autospec=True)
+ def test__get_boot_iso_create_use_webserver_true_ramdisk_webserver(
+ self, deploy_info_mock, image_props_mock,
+ capability_mock, boot_object_name_mock,
+ create_boot_iso_mock, tempfile_mock,
+ copy_file_mock):
+ CONF.ilo.swift_ilo_container = 'ilo-cont'
+ CONF.ilo.use_web_server_for_images = True
+ CONF.deploy.http_url = "http://10.10.1.30/httpboot"
+ CONF.deploy.http_root = "/httpboot"
+ CONF.pxe.pxe_append_params = 'kernel-params'
+
+ fileobj_mock = mock.MagicMock(spec=file)
+ fileobj_mock.name = 'tmpfile'
+ mock_file_handle = mock.MagicMock(spec=file)
+ mock_file_handle.__enter__.return_value = fileobj_mock
+ tempfile_mock.return_value = mock_file_handle
+
+ ramdisk_href = "http://10.10.1.30/httpboot/ramdisk"
+ kernel_href = "http://10.10.1.30/httpboot/kernel"
+ deploy_info_mock.return_value = {'image_source': 'image-uuid',
+ 'ilo_deploy_iso': 'deploy_iso_uuid'}
+ image_props_mock.return_value = {'boot_iso': None,
+ 'kernel_id': kernel_href,
+ 'ramdisk_id': ramdisk_href}
+ boot_object_name_mock.return_value = 'abcdef'
+ create_boot_iso_mock.return_value = '/path/to/boot-iso'
+ capability_mock.return_value = 'uefi'
+ copy_file_mock.return_value = "http://10.10.1.30/httpboot/abcdef"
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ boot_iso_actual = ilo_deploy._get_boot_iso(task, 'root-uuid')
+ deploy_info_mock.assert_called_once_with(task.node)
+ image_props_mock.assert_called_once_with(
+ task.context, 'image-uuid',
+ ['boot_iso', 'kernel_id', 'ramdisk_id'])
+ boot_object_name_mock.assert_called_once_with(task.node)
+ create_boot_iso_mock.assert_called_once_with(task.context,
+ 'tmpfile',
+ kernel_href,
+ ramdisk_href,
+ 'deploy_iso_uuid',
+ 'root-uuid',
+ 'kernel-params',
+ 'uefi')
+ boot_iso_expected = 'http://10.10.1.30/httpboot/abcdef'
+ self.assertEqual(boot_iso_expected, boot_iso_actual)
+ copy_file_mock.assert_called_once_with(fileobj_mock.name,
+ 'abcdef')
+
+ @mock.patch.object(ilo_deploy, '_get_boot_iso_object_name', spec_set=True,
+ autospec=True)
+ @mock.patch.object(swift, 'SwiftAPI', spec_set=True, autospec=True)
+ def test__clean_up_boot_iso_for_instance(self, swift_mock,
+ boot_object_name_mock):
+ swift_obj_mock = swift_mock.return_value
+ CONF.ilo.swift_ilo_container = 'ilo-cont'
+ boot_object_name_mock.return_value = 'boot-object'
+ i_info = self.node.instance_info
+ i_info['ilo_boot_iso'] = 'swift:bootiso'
+ self.node.instance_info = i_info
+ self.node.save()
+ ilo_deploy._clean_up_boot_iso_for_instance(self.node)
+ swift_obj_mock.delete_object.assert_called_once_with('ilo-cont',
+ 'boot-object')
+
+ @mock.patch.object(utils, 'unlink_without_raise', spec_set=True,
+ autospec=True)
+ def test__clean_up_boot_iso_for_instance_on_webserver(self, unlink_mock):
+
+ CONF.ilo.use_web_server_for_images = True
+ CONF.deploy.http_root = "/webserver"
+ i_info = self.node.instance_info
+ i_info['ilo_boot_iso'] = 'http://x.y.z.a/webserver/boot-object'
+ self.node.instance_info = i_info
+ self.node.save()
+ boot_iso_path = "/webserver/boot-object"
+ ilo_deploy._clean_up_boot_iso_for_instance(self.node)
+ unlink_mock.assert_called_once_with(boot_iso_path)
+
+ @mock.patch.object(ilo_deploy, '_get_boot_iso_object_name', spec_set=True,
+ autospec=True)
+ def test__clean_up_boot_iso_for_instance_no_boot_iso(
+ self, boot_object_name_mock):
+ ilo_deploy._clean_up_boot_iso_for_instance(self.node)
+ self.assertFalse(boot_object_name_mock.called)
+
+ @mock.patch.object(deploy_utils, 'check_for_missing_params', spec_set=True,
+ autospec=True)
+ def test__parse_driver_info(self, check_params_mock):
+ self.node.driver_info['ilo_deploy_iso'] = 'deploy-iso-uuid'
+ driver_info_expected = {'ilo_deploy_iso': 'deploy-iso-uuid'}
+ driver_info_actual = ilo_deploy._parse_driver_info(self.node)
+ error_msg = ("Error validating iLO virtual media deploy. Some"
+ " parameters were missing in node's driver_info")
+ check_params_mock.assert_called_once_with(driver_info_expected,
+ error_msg)
+ self.assertEqual(driver_info_expected, driver_info_actual)
+
+ @mock.patch.object(ilo_deploy, '_parse_driver_info', spec_set=True,
+ autospec=True)
+ @mock.patch.object(iscsi_deploy, 'parse_instance_info', spec_set=True,
+ autospec=True)
+ def test__parse_deploy_info(self, instance_info_mock, driver_info_mock):
+ instance_info_mock.return_value = {'a': 'b'}
+ driver_info_mock.return_value = {'c': 'd'}
+ expected_info = {'a': 'b', 'c': 'd'}
+ actual_info = ilo_deploy._parse_deploy_info(self.node)
+ self.assertEqual(expected_info, actual_info)
+
+ @mock.patch.object(manager_utils, 'node_power_action', spec_set=True,
+ autospec=True)
+ @mock.patch.object(manager_utils, 'node_set_boot_device', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_common, 'setup_vmedia_for_boot', spec_set=True,
+ autospec=True)
+ def test__reboot_into(self, setup_vmedia_mock, set_boot_device_mock,
+ node_power_action_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ opts = {'a': 'b'}
+ ilo_deploy._reboot_into(task, 'iso', opts)
+ setup_vmedia_mock.assert_called_once_with(task, 'iso', opts)
+ set_boot_device_mock.assert_called_once_with(task,
+ boot_devices.CDROM)
+ node_power_action_mock.assert_called_once_with(task, states.REBOOT)
+
+ @mock.patch.object(ilo_common, 'eject_vmedia_devices',
+ spec_set=True, autospec=True)
+ @mock.patch.object(ilo_deploy, '_reboot_into', spec_set=True,
+ autospec=True)
+ @mock.patch.object(deploy_utils, 'build_agent_options', spec_set=True,
+ autospec=True)
+ def test__prepare_agent_vmedia_boot(self, build_options_mock,
+ reboot_into_mock, eject_mock):
+ deploy_opts = {'a': 'b'}
+ build_options_mock.return_value = deploy_opts
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node.driver_info['ilo_deploy_iso'] = 'deploy-iso-uuid'
+
+ ilo_deploy._prepare_agent_vmedia_boot(task)
+
+ eject_mock.assert_called_once_with(task)
+ build_options_mock.assert_called_once_with(task.node)
+ reboot_into_mock.assert_called_once_with(task,
+ 'deploy-iso-uuid',
+ deploy_opts)
+
+ @mock.patch.object(deploy_utils, 'is_secure_boot_requested', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_common, 'set_secure_boot_mode', spec_set=True,
+ autospec=True)
+ def test__update_secure_boot_mode_passed_true(self,
+ func_set_secure_boot_mode,
+ func_is_secure_boot_req):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ func_is_secure_boot_req.return_value = True
+ ilo_deploy._update_secure_boot_mode(task, True)
+ func_set_secure_boot_mode.assert_called_once_with(task, True)
+
+ @mock.patch.object(deploy_utils, 'is_secure_boot_requested', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_common, 'set_secure_boot_mode', spec_set=True,
+ autospec=True)
+ def test__update_secure_boot_mode_passed_False(self,
+ func_set_secure_boot_mode,
+ func_is_secure_boot_req):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ func_is_secure_boot_req.return_value = False
+ ilo_deploy._update_secure_boot_mode(task, False)
+ self.assertFalse(func_set_secure_boot_mode.called)
+
+ @mock.patch.object(ilo_common, 'set_secure_boot_mode', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_common, 'get_secure_boot_mode', spec_set=True,
+ autospec=True)
+ def test__disable_secure_boot_false(self,
+ func_get_secure_boot_mode,
+ func_set_secure_boot_mode):
+ func_get_secure_boot_mode.return_value = False
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ returned_state = ilo_deploy._disable_secure_boot(task)
+ func_get_secure_boot_mode.assert_called_once_with(task)
+ self.assertFalse(func_set_secure_boot_mode.called)
+ self.assertFalse(returned_state)
+
+ @mock.patch.object(ilo_common, 'set_secure_boot_mode', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_common, 'get_secure_boot_mode', spec_set=True,
+ autospec=True)
+ def test__disable_secure_boot_true(self,
+ func_get_secure_boot_mode,
+ func_set_secure_boot_mode):
+ func_get_secure_boot_mode.return_value = True
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ returned_state = ilo_deploy._disable_secure_boot(task)
+ func_get_secure_boot_mode.assert_called_once_with(task)
+ func_set_secure_boot_mode.assert_called_once_with(task, False)
+ self.assertTrue(returned_state)
+
+ @mock.patch.object(ilo_deploy.LOG, 'debug', spec_set=True, autospec=True)
+ @mock.patch.object(ilo_deploy, 'exception', spec_set=True, autospec=True)
+ @mock.patch.object(ilo_common, 'get_secure_boot_mode', spec_set=True,
+ autospec=True)
+ def test__disable_secure_boot_exception(self,
+ func_get_secure_boot_mode,
+ exception_mock,
+ mock_log):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ exception_mock.IloOperationNotSupported = Exception
+ func_get_secure_boot_mode.side_effect = Exception
+ returned_state = ilo_deploy._disable_secure_boot(task)
+ func_get_secure_boot_mode.assert_called_once_with(task)
+ self.assertTrue(mock_log.called)
+ self.assertFalse(returned_state)
+
+ @mock.patch.object(ilo_common, 'update_boot_mode', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_deploy, '_disable_secure_boot', spec_set=True,
+ autospec=True)
+ @mock.patch.object(manager_utils, 'node_power_action', spec_set=True,
+ autospec=True)
+ def test__prepare_node_for_deploy(self,
+ func_node_power_action,
+ func_disable_secure_boot,
+ func_update_boot_mode):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ func_disable_secure_boot.return_value = False
+ ilo_deploy._prepare_node_for_deploy(task)
+ func_node_power_action.assert_called_once_with(task,
+ states.POWER_OFF)
+ func_disable_secure_boot.assert_called_once_with(task)
+ func_update_boot_mode.assert_called_once_with(task)
+ bootmode = driver_utils.get_node_capability(task.node, "boot_mode")
+ self.assertIsNone(bootmode)
+
+ @mock.patch.object(ilo_common, 'update_boot_mode', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_deploy, '_disable_secure_boot', spec_set=True,
+ autospec=True)
+ @mock.patch.object(manager_utils, 'node_power_action', spec_set=True,
+ autospec=True)
+ def test__prepare_node_for_deploy_sec_boot_on(self,
+ func_node_power_action,
+ func_disable_secure_boot,
+ func_update_boot_mode):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ func_disable_secure_boot.return_value = True
+ ilo_deploy._prepare_node_for_deploy(task)
+ func_node_power_action.assert_called_once_with(task,
+ states.POWER_OFF)
+ func_disable_secure_boot.assert_called_once_with(task)
+ self.assertFalse(func_update_boot_mode.called)
+ ret_boot_mode = task.node.instance_info['deploy_boot_mode']
+ self.assertEqual('uefi', ret_boot_mode)
+ bootmode = driver_utils.get_node_capability(task.node, "boot_mode")
+ self.assertIsNone(bootmode)
+
+ @mock.patch.object(ilo_common, 'update_boot_mode', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_deploy, '_disable_secure_boot', spec_set=True,
+ autospec=True)
+ @mock.patch.object(manager_utils, 'node_power_action', spec_set=True,
+ autospec=True)
+ def test__prepare_node_for_deploy_inst_info(self,
+ func_node_power_action,
+ func_disable_secure_boot,
+ func_update_boot_mode):
+ instance_info = {'capabilities': '{"secure_boot": "true"}'}
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ func_disable_secure_boot.return_value = False
+ task.node.instance_info = instance_info
+ ilo_deploy._prepare_node_for_deploy(task)
+ func_node_power_action.assert_called_once_with(task,
+ states.POWER_OFF)
+ func_disable_secure_boot.assert_called_once_with(task)
+ func_update_boot_mode.assert_called_once_with(task)
+ bootmode = driver_utils.get_node_capability(task.node, "boot_mode")
+ self.assertIsNone(bootmode)
+ deploy_boot_mode = task.node.instance_info.get('deploy_boot_mode')
+ self.assertIsNone(deploy_boot_mode)
+
+ @mock.patch.object(ilo_common, 'update_boot_mode', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_deploy, '_disable_secure_boot', spec_set=True,
+ autospec=True)
+ @mock.patch.object(manager_utils, 'node_power_action', spec_set=True,
+ autospec=True)
+ def test__prepare_node_for_deploy_sec_boot_on_inst_info(
+ self, func_node_power_action, func_disable_secure_boot,
+ func_update_boot_mode):
+ instance_info = {'capabilities': '{"secure_boot": "true"}'}
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ func_disable_secure_boot.return_value = True
+ task.node.instance_info = instance_info
+ ilo_deploy._prepare_node_for_deploy(task)
+ func_node_power_action.assert_called_once_with(task,
+ states.POWER_OFF)
+ func_disable_secure_boot.assert_called_once_with(task)
+ self.assertFalse(func_update_boot_mode.called)
+ bootmode = driver_utils.get_node_capability(task.node, "boot_mode")
+ self.assertIsNone(bootmode)
+ deploy_boot_mode = task.node.instance_info.get('deploy_boot_mode')
+ self.assertIsNone(deploy_boot_mode)
+
+ @mock.patch.object(ilo_deploy.LOG, 'warning', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_deploy, '_get_boot_iso', spec_set=True,
+ autospec=True)
+ def test__recreate_and_populate_boot_iso_root_uuid_set(self,
+ get_boot_iso_mock,
+ log_mock):
+ driver_internal_info = {}
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ driver_internal_info['root_uuid_or_disk_id'] = 'root-uuid'
+ task.node.driver_internal_info = driver_internal_info
+ r_uuid = task.node.driver_internal_info['root_uuid_or_disk_id']
+ get_boot_iso_mock.return_value = 'boot-uuid'
+ ilo_deploy._recreate_and_populate_ilo_boot_iso(task)
+ self.assertEqual(task.node.instance_info['ilo_boot_iso'],
+ 'boot-uuid')
+ get_boot_iso_mock.assert_called_once_with(task, r_uuid)
+ self.assertFalse(log_mock.called)
+
+ @mock.patch.object(ilo_deploy.LOG, 'warning', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_deploy, '_get_boot_iso', spec_set=True,
+ autospec=True)
+ def test__recreate_and_populate_boot_iso_root_not_set(self,
+ get_boot_iso_mock,
+ log_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node.instance_info['ilo_boot_iso'] = 'boot-uuid-old-iso'
+ ilo_deploy._recreate_and_populate_ilo_boot_iso(task)
+ self.assertEqual(task.node.instance_info['ilo_boot_iso'],
+ 'boot-uuid-old-iso')
+ self.assertFalse(get_boot_iso_mock.called)
+ self.assertTrue(log_mock.called)
+
+ @mock.patch.object(ilo_deploy.LOG, 'warning',
+ spec_set=True, autospec=True)
+ @mock.patch.object(ilo_deploy, '_get_boot_iso',
+ spec_set=True, autospec=True)
+ def test__recreate_and_populate_get_boot_iso_fails(self,
+ get_boot_iso_mock,
+ log_mock):
+ driver_internal_info = {}
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+
+ driver_internal_info['boot_iso_created_in_web_server'] = True
+ driver_internal_info['root_uuid_or_disk_id'] = 'uuid'
+ task.node.instance_info['ilo_boot_iso'] = 'boot-uuid-old-iso'
+ task.node.driver_internal_info = driver_internal_info
+ task.node.save()
+ r_uuid = task.node.driver_internal_info.get('root_uuid_or_disk_id')
+ get_boot_iso_mock.side_effect = Exception
+ ilo_deploy._recreate_and_populate_ilo_boot_iso(task)
+ self.assertEqual(task.node.instance_info['ilo_boot_iso'],
+ 'boot-uuid-old-iso')
+ get_boot_iso_mock.assert_called_once_with(task, r_uuid)
+ self.assertTrue(log_mock.called)
+
+ @mock.patch.object(ilo_deploy.LOG, 'warning',
+ spec_set=True, autospec=True)
+ @mock.patch.object(ilo_deploy, '_get_boot_iso',
+ spec_set=True, autospec=True)
+ def test__recreate_and_populate_get_boot_iso_none(self,
+ boot_iso_mock,
+ log_mock):
+ driver_internal_info = {}
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ driver_internal_info['boot_iso_created_in_web_server'] = True
+ driver_internal_info['root_uuid_or_disk_id'] = 'uuid'
+ task.node.driver_internal_info = driver_internal_info
+ r_uuid = task.node.driver_internal_info.get('root_uuid_or_disk_id')
+ task.node.instance_info['ilo_boot_iso'] = 'boot-uuid-old-iso'
+ task.node.save()
+ boot_iso_mock.return_value = None
+ ilo_deploy._recreate_and_populate_ilo_boot_iso(task)
+ boot_iso_mock.assert_called_once_with(task, r_uuid)
+ self.assertEqual(task.node.instance_info['ilo_boot_iso'],
+ 'boot-uuid-old-iso')
+ self.assertTrue(log_mock.called)
+
+
+class IloVirtualMediaIscsiDeployTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(IloVirtualMediaIscsiDeployTestCase, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver="iscsi_ilo")
+ self.node = obj_utils.create_test_node(
+ self.context, driver='iscsi_ilo', driver_info=INFO_DICT)
+
+ @mock.patch.object(deploy_utils, 'validate_capabilities',
+ spec_set=True, autospec=True)
+ @mock.patch.object(deploy_utils, 'validate_image_properties',
+ spec_set=True, autospec=True)
+ @mock.patch.object(ilo_deploy, '_parse_deploy_info', spec_set=True,
+ autospec=True)
+ @mock.patch.object(iscsi_deploy, 'validate', spec_set=True, autospec=True)
+ def _test_validate(self, validate_mock,
+ deploy_info_mock,
+ validate_prop_mock,
+ validate_capability_mock,
+ props_expected):
+ d_info = {'image_source': 'uuid'}
+ deploy_info_mock.return_value = d_info
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.deploy.validate(task)
+ validate_mock.assert_called_once_with(task)
+ deploy_info_mock.assert_called_once_with(task.node)
+ validate_prop_mock.assert_called_once_with(
+ task.context, d_info, props_expected)
+ validate_capability_mock.assert_called_once_with(task.node)
+
+ @mock.patch.object(deploy_utils, 'validate_image_properties',
+ spec_set=True, autospec=True)
+ @mock.patch.object(ilo_deploy, '_parse_deploy_info', spec_set=True,
+ autospec=True)
+ @mock.patch.object(iscsi_deploy, 'validate', spec_set=True, autospec=True)
+ def test_validate_invalid_boot_option(self,
+ validate_mock,
+ deploy_info_mock,
+ validate_prop_mock):
+ d_info = {'image_source': '733d1c44-a2ea-414b-aca7-69decf20d810'}
+ properties = {'capabilities': 'boot_mode:uefi,boot_option:foo'}
+ deploy_info_mock.return_value = d_info
+ props = ['kernel_id', 'ramdisk_id']
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node.properties = properties
+ exc = self.assertRaises(exception.InvalidParameterValue,
+ task.driver.deploy.validate,
+ task)
+ validate_mock.assert_called_once_with(task)
+ deploy_info_mock.assert_called_once_with(task.node)
+ validate_prop_mock.assert_called_once_with(task.context,
+ d_info, props)
+ self.assertIn('boot_option', str(exc))
+
+ @mock.patch.object(deploy_utils, 'validate_image_properties',
+ spec_set=True, autospec=True)
+ @mock.patch.object(ilo_deploy, '_parse_deploy_info', spec_set=True,
+ autospec=True)
+ @mock.patch.object(iscsi_deploy, 'validate', spec_set=True, autospec=True)
+ def test_validate_invalid_boot_mode(self,
+ validate_mock,
+ deploy_info_mock,
+ validate_prop_mock):
+ d_info = {'image_source': '733d1c44-a2ea-414b-aca7-69decf20d810'}
+ properties = {'capabilities': 'boot_mode:foo,boot_option:local'}
+ deploy_info_mock.return_value = d_info
+ props = ['kernel_id', 'ramdisk_id']
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node.properties = properties
+ exc = self.assertRaises(exception.InvalidParameterValue,
+ task.driver.deploy.validate,
+ task)
+ validate_mock.assert_called_once_with(task)
+ deploy_info_mock.assert_called_once_with(task.node)
+ validate_prop_mock.assert_called_once_with(task.context,
+ d_info, props)
+ self.assertIn('boot_mode', str(exc))
+
+ @mock.patch.object(service_utils, 'is_glance_image', spec_set=True,
+ autospec=True)
+ def test_validate_glance_partition_image(self, is_glance_image_mock):
+ is_glance_image_mock.return_value = True
+ self._test_validate(props_expected=['kernel_id', 'ramdisk_id'])
+
+ def test_validate_whole_disk_image(self):
+ self.node.driver_internal_info = {'is_whole_disk_image': True}
+ self.node.save()
+ self._test_validate(props_expected=[])
+
+ @mock.patch.object(service_utils, 'is_glance_image', spec_set=True,
+ autospec=True)
+ def test_validate_non_glance_partition_image(self, is_glance_image_mock):
+ is_glance_image_mock.return_value = False
+ self._test_validate(props_expected=['kernel', 'ramdisk'])
+
+ @mock.patch.object(ilo_common, 'eject_vmedia_devices',
+ spec_set=True, autospec=True)
+ @mock.patch.object(ilo_deploy, '_reboot_into', spec_set=True,
+ autospec=True)
+ @mock.patch.object(deploy_utils, 'get_single_nic_with_vif_port_id',
+ spec_set=True, autospec=True)
+ @mock.patch.object(deploy_utils, 'build_agent_options', spec_set=True,
+ autospec=True)
+ @mock.patch.object(iscsi_deploy, 'build_deploy_ramdisk_options',
+ spec_set=True, autospec=True)
+ @mock.patch.object(iscsi_deploy, 'check_image_size', spec_set=True,
+ autospec=True)
+ @mock.patch.object(iscsi_deploy, 'cache_instance_image', spec_set=True,
+ autospec=True)
+ def _test_deploy(self,
+ cache_instance_image_mock,
+ check_image_size_mock,
+ build_opts_mock,
+ agent_options_mock,
+ get_nic_mock,
+ reboot_into_mock,
+ eject_mock,
+ ilo_boot_iso,
+ image_source
+ ):
+ instance_info = self.node.instance_info
+ instance_info['ilo_boot_iso'] = ilo_boot_iso
+ instance_info['image_source'] = image_source
+ self.node.instance_info = instance_info
+ self.node.save()
+
+ deploy_opts = {'a': 'b'}
+ agent_options_mock.return_value = {
+ 'ipa-api-url': 'http://1.2.3.4:6385'}
+ build_opts_mock.return_value = deploy_opts
+ get_nic_mock.return_value = '12:34:56:78:90:ab'
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+
+ task.node.driver_info['ilo_deploy_iso'] = 'deploy-iso'
+ returned_state = task.driver.deploy.deploy(task)
+
+ eject_mock.assert_called_once_with(task)
+ cache_instance_image_mock.assert_called_once_with(task.context,
+ task.node)
+ check_image_size_mock.assert_called_once_with(task)
+ expected_ramdisk_opts = {'a': 'b', 'BOOTIF': '12:34:56:78:90:ab',
+ 'ipa-api-url': 'http://1.2.3.4:6385'}
+ build_opts_mock.assert_called_once_with(task.node)
+ get_nic_mock.assert_called_once_with(task)
+ reboot_into_mock.assert_called_once_with(task, 'deploy-iso',
+ expected_ramdisk_opts)
+
+ self.assertEqual(states.DEPLOYWAIT, returned_state)
+
+ def test_deploy_glance_image(self):
+ self._test_deploy(
+ ilo_boot_iso='swift:abcdef',
+ image_source='6b2f0c0c-79e8-4db6-842e-43c9764204af')
+ self.node.refresh()
+ self.assertNotIn('ilo_boot_iso', self.node.instance_info)
+
+ def test_deploy_not_a_glance_image(self):
+ self._test_deploy(
+ ilo_boot_iso='http://mybootiso',
+ image_source='http://myimage')
+ self.node.refresh()
+ self.assertEqual('http://mybootiso',
+ self.node.instance_info['ilo_boot_iso'])
+
+ @mock.patch.object(ilo_deploy, '_update_secure_boot_mode', spec_set=True,
+ autospec=True)
+ @mock.patch.object(manager_utils, 'node_power_action', spec_set=True,
+ autospec=True)
+ def test_tear_down(self,
+ node_power_action_mock,
+ update_secure_boot_mode_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ driver_internal_info = task.node.driver_internal_info
+ driver_internal_info['boot_iso_created_in_web_server'] = True
+ driver_internal_info['root_uuid_or_disk_id'] = 'uuid'
+ task.node.driver_internal_info = driver_internal_info
+ task.node.save()
+ returned_state = task.driver.deploy.tear_down(task)
+ node_power_action_mock.assert_called_once_with(task,
+ states.POWER_OFF)
+ update_secure_boot_mode_mock.assert_called_once_with(task, False)
+ self.assertEqual(states.DELETED, returned_state)
+ dinfo = task.node.driver_internal_info
+ self.assertNotIn('boot_iso_created_in_web_server', dinfo)
+ self.assertNotIn('root_uuid_or_disk_id', dinfo)
+
+ @mock.patch.object(ilo_deploy.LOG, 'warn', spec_set=True, autospec=True)
+ @mock.patch.object(ilo_deploy, 'exception', spec_set=True, autospec=True)
+ @mock.patch.object(ilo_deploy, '_update_secure_boot_mode', spec_set=True,
+ autospec=True)
+ @mock.patch.object(manager_utils, 'node_power_action', spec_set=True,
+ autospec=True)
+ def test_tear_down_handle_exception(self,
+ node_power_action_mock,
+ update_secure_boot_mode_mock,
+ exception_mock,
+ mock_log):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ exception_mock.IloOperationNotSupported = Exception
+ update_secure_boot_mode_mock.side_effect = Exception
+ returned_state = task.driver.deploy.tear_down(task)
+ node_power_action_mock.assert_called_once_with(task,
+ states.POWER_OFF)
+ update_secure_boot_mode_mock.assert_called_once_with(task, False)
+ self.assertTrue(mock_log.called)
+ self.assertEqual(states.DELETED, returned_state)
+
+ @mock.patch.object(ilo_deploy, '_clean_up_boot_iso_for_instance',
+ spec_set=True, autospec=True)
+ @mock.patch.object(iscsi_deploy, 'destroy_images', spec_set=True,
+ autospec=True)
+ def test_clean_up(self, destroy_images_mock, clean_up_boot_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.deploy.clean_up(task)
+ destroy_images_mock.assert_called_once_with(task.node.uuid)
+ clean_up_boot_mock.assert_called_once_with(task.node)
+
+ @mock.patch.object(ilo_deploy, '_clean_up_boot_iso_for_instance',
+ spec_set=True, autospec=True)
+ @mock.patch.object(ilo_common, 'destroy_floppy_image_from_web_server',
+ spec_set=True, autospec=True)
+ def test_clean_up_of_webserver_images(self, destroy_images_mock,
+ clean_up_boot_mock):
+ CONF.ilo.use_web_server_for_images = True
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.deploy.clean_up(task)
+ destroy_images_mock.assert_called_once_with(task.node)
+ clean_up_boot_mock.assert_called_once_with(task.node)
+
+ @mock.patch.object(ilo_deploy, '_prepare_node_for_deploy', spec_set=True,
+ autospec=True)
+ def test_prepare(self, func_prepare_node_for_deploy):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.deploy.prepare(task)
+ func_prepare_node_for_deploy.assert_called_once_with(task)
+
+ @mock.patch.object(ilo_deploy, '_prepare_node_for_deploy', spec_set=True,
+ autospec=True)
+ def test_prepare_active_node(self, func_prepare_node_for_deploy):
+ self.node.provision_state = states.ACTIVE
+ self.node.save()
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.deploy.prepare(task)
+ self.assertFalse(func_prepare_node_for_deploy.called)
+
+ @mock.patch.object(ilo_deploy, '_recreate_and_populate_ilo_boot_iso',
+ spec_set=True, autospec=True)
+ def test_take_over_recreate_iso_config_and_dif_set(self, mock_recreate):
+ driver_internal_info = {}
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ CONF.ilo.use_web_server_for_images = True
+ driver_internal_info['boot_iso_created_in_web_server'] = True
+ task.node.driver_internal_info = driver_internal_info
+ task.node.save()
+ task.driver.deploy.take_over(task)
+ mock_recreate.assert_called_once_with(task)
+
+ @mock.patch.object(ilo_deploy, '_recreate_and_populate_ilo_boot_iso',
+ spec_set=True, autospec=True)
+ def test_take_over_recreate_iso_config_set_and_dif_not_set(self,
+ mock_recreate):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ CONF.ilo.use_web_server_for_images = True
+ task.node.save()
+ task.driver.deploy.take_over(task)
+ self.assertFalse(mock_recreate.called)
+
+ @mock.patch.object(ilo_deploy, '_recreate_and_populate_ilo_boot_iso',
+ spec_set=True, autospec=True)
+ def test_take_over_recreate_iso_config_not_set(self, mock_recreate):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ CONF.ilo.use_web_server_for_images = False
+ task.node.save()
+ task.driver.deploy.take_over(task)
+ self.assertFalse(mock_recreate.called)
+
+
+class IloVirtualMediaAgentDeployTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(IloVirtualMediaAgentDeployTestCase, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver="agent_ilo")
+ self.node = obj_utils.create_test_node(
+ self.context, driver='agent_ilo', driver_info=INFO_DICT)
+
+ @mock.patch.object(deploy_utils, 'validate_capabilities',
+ spec_set=True, autospec=True)
+ @mock.patch.object(ilo_deploy, '_parse_driver_info', spec_set=True,
+ autospec=True)
+ def test_validate(self,
+ parse_driver_info_mock,
+ validate_capability_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.deploy.validate(task)
+ parse_driver_info_mock.assert_called_once_with(task.node)
+ validate_capability_mock.assert_called_once_with(task.node)
+
+ @mock.patch.object(ilo_deploy, '_prepare_agent_vmedia_boot', spec_set=True,
+ autospec=True)
+ def test_deploy(self, vmedia_boot_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ returned_state = task.driver.deploy.deploy(task)
+ vmedia_boot_mock.assert_called_once_with(task)
+ self.assertEqual(states.DEPLOYWAIT, returned_state)
+
+ @mock.patch.object(ilo_deploy, '_update_secure_boot_mode', spec_set=True,
+ autospec=True)
+ @mock.patch.object(manager_utils, 'node_power_action', spec_set=True,
+ autospec=True)
+ def test_tear_down(self,
+ node_power_action_mock,
+ update_secure_boot_mode_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ returned_state = task.driver.deploy.tear_down(task)
+ node_power_action_mock.assert_called_once_with(task,
+ states.POWER_OFF)
+ update_secure_boot_mode_mock.assert_called_once_with(task, False)
+ self.assertEqual(states.DELETED, returned_state)
+
+ @mock.patch.object(ilo_deploy.LOG, 'warn', spec_set=True, autospec=True)
+ @mock.patch.object(ilo_deploy, 'exception', spec_set=True, autospec=True)
+ @mock.patch.object(ilo_deploy, '_update_secure_boot_mode', spec_set=True,
+ autospec=True)
+ @mock.patch.object(manager_utils, 'node_power_action', spec_set=True,
+ autospec=True)
+ def test_tear_down_handle_exception(self,
+ node_power_action_mock,
+ update_secure_boot_mode_mock,
+ exception_mock,
+ mock_log):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ exception_mock.IloOperationNotSupported = Exception
+ update_secure_boot_mode_mock.side_effect = Exception
+ returned_state = task.driver.deploy.tear_down(task)
+ node_power_action_mock.assert_called_once_with(task,
+ states.POWER_OFF)
+ update_secure_boot_mode_mock.assert_called_once_with(task, False)
+ self.assertTrue(mock_log.called)
+ self.assertEqual(states.DELETED, returned_state)
+
+ @mock.patch.object(ilo_deploy, '_prepare_node_for_deploy', spec_set=True,
+ autospec=True)
+ @mock.patch.object(agent, 'build_instance_info_for_deploy', spec_set=True,
+ autospec=True)
+ def test_prepare(self,
+ build_instance_info_mock,
+ func_prepare_node_for_deploy):
+ deploy_opts = {'a': 'b'}
+ build_instance_info_mock.return_value = deploy_opts
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.deploy.prepare(task)
+ self.assertEqual(deploy_opts, task.node.instance_info)
+ func_prepare_node_for_deploy.assert_called_once_with(task)
+
+ @mock.patch.object(ilo_deploy, '_prepare_node_for_deploy', spec_set=True,
+ autospec=True)
+ @mock.patch.object(agent, 'build_instance_info_for_deploy', spec_set=True,
+ autospec=True)
+ def test_prepare_active_node(self,
+ build_instance_info_mock,
+ func_prepare_node_for_deploy):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node.provision_state = states.ACTIVE
+ task.driver.deploy.prepare(task)
+ self.assertFalse(build_instance_info_mock.called)
+ self.assertFalse(func_prepare_node_for_deploy.called)
+
+ @mock.patch('ironic.dhcp.neutron.NeutronDHCPApi.delete_cleaning_ports',
+ spec_set=True, autospec=True)
+ @mock.patch('ironic.dhcp.neutron.NeutronDHCPApi.create_cleaning_ports',
+ spec_set=True, autospec=True)
+ @mock.patch.object(ilo_deploy, '_prepare_agent_vmedia_boot', spec_set=True,
+ autospec=True)
+ def test_prepare_cleaning(self, vmedia_boot_mock, create_port_mock,
+ delete_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ returned_state = task.driver.deploy.prepare_cleaning(task)
+ vmedia_boot_mock.assert_called_once_with(task)
+ self.assertEqual(states.CLEANWAIT, returned_state)
+ create_port_mock.assert_called_once_with(mock.ANY, task)
+ delete_mock.assert_called_once_with(mock.ANY, task)
+ self.assertEqual(task.node.driver_internal_info.get(
+ 'agent_erase_devices_iterations'), 1)
+
+ @mock.patch('ironic.dhcp.neutron.NeutronDHCPApi.delete_cleaning_ports',
+ spec_set=True, autospec=True)
+ @mock.patch.object(manager_utils, 'node_power_action', spec_set=True,
+ autospec=True)
+ def test_tear_down_cleaning(self, power_mock, delete_mock):
+ with task_manager.acquire(
+ self.context, self.node['uuid'], shared=False) as task:
+ task.driver.deploy.tear_down_cleaning(task)
+ power_mock.assert_called_once_with(task, states.POWER_OFF)
+ delete_mock.assert_called_once_with(mock.ANY, task)
+
+ @mock.patch.object(deploy_utils, 'agent_execute_clean_step', spec_set=True,
+ autospec=True)
+ def test_execute_clean_step(self, execute_mock):
+ with task_manager.acquire(
+ self.context, self.node['uuid'], shared=False) as task:
+ task.driver.deploy.execute_clean_step(task, 'fake-step')
+ execute_mock.assert_called_once_with(task, 'fake-step')
+
+ @mock.patch.object(deploy_utils, 'agent_get_clean_steps', spec_set=True,
+ autospec=True)
+ def test_get_clean_steps_with_conf_option(self, get_clean_step_mock):
+ self.config(clean_priority_erase_devices=20, group='ilo')
+ get_clean_step_mock.return_value = [{
+ 'step': 'erase_devices',
+ 'priority': 10,
+ 'interface': 'deploy',
+ 'reboot_requested': False
+ }]
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ step = task.driver.deploy.get_clean_steps(task)
+ get_clean_step_mock.assert_called_once_with(task)
+ self.assertEqual(step[0].get('priority'),
+ CONF.ilo.clean_priority_erase_devices)
+
+ @mock.patch.object(deploy_utils, 'agent_get_clean_steps', spec_set=True,
+ autospec=True)
+ def test_get_clean_steps_erase_devices_disable(self, get_clean_step_mock):
+ self.config(clean_priority_erase_devices=0, group='ilo')
+ get_clean_step_mock.return_value = [{
+ 'step': 'erase_devices',
+ 'priority': 10,
+ 'interface': 'deploy',
+ 'reboot_requested': False
+ }]
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ step = task.driver.deploy.get_clean_steps(task)
+ get_clean_step_mock.assert_called_once_with(task)
+ self.assertEqual(step[0].get('priority'),
+ CONF.ilo.clean_priority_erase_devices)
+
+ @mock.patch.object(deploy_utils, 'agent_get_clean_steps', spec_set=True,
+ autospec=True)
+ def test_get_clean_steps_without_conf_option(self, get_clean_step_mock):
+ get_clean_step_mock.return_value = [{
+ 'step': 'erase_devices',
+ 'priority': 10,
+ 'interface': 'deploy',
+ 'reboot_requested': False
+ }]
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ step = task.driver.deploy.get_clean_steps(task)
+ get_clean_step_mock.assert_called_once_with(task)
+ self.assertEqual(step[0].get('priority'), 10)
+
+
+class VendorPassthruTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(VendorPassthruTestCase, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver="iscsi_ilo")
+ self.node = obj_utils.create_test_node(self.context,
+ driver='iscsi_ilo',
+ driver_info=INFO_DICT)
+
+ @mock.patch.object(iscsi_deploy, 'get_deploy_info', spec_set=True,
+ autospec=True)
+ def test_validate_pass_deploy_info(self, get_deploy_info_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ vendor = ilo_deploy.VendorPassthru()
+ vendor.validate(task, method='pass_deploy_info', foo='bar')
+ get_deploy_info_mock.assert_called_once_with(task.node,
+ foo='bar')
+
+ @mock.patch.object(iscsi_deploy, 'validate_pass_bootloader_info_input',
+ spec_set=True, autospec=True)
+ def test_validate_pass_bootloader_install_info(self,
+ validate_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ kwargs = {'address': '1.2.3.4', 'key': 'fake-key',
+ 'status': 'SUCCEEDED', 'error': ''}
+ task.driver.vendor.validate(
+ task, method='pass_bootloader_install_info', **kwargs)
+ validate_mock.assert_called_once_with(task, kwargs)
+
+ @mock.patch.object(iscsi_deploy, 'get_deploy_info', spec_set=True,
+ autospec=True)
+ def test_validate_heartbeat(self, get_deploy_info_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ vendor = ilo_deploy.VendorPassthru()
+ vendor.validate(task, method='heartbeat', foo='bar')
+ self.assertFalse(get_deploy_info_mock.called)
+
+ @mock.patch.object(manager_utils, 'node_set_boot_device', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_common, 'setup_vmedia_for_boot', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_deploy, '_get_boot_iso', spec_set=True,
+ autospec=True)
+ def test__configure_vmedia_boot_with_boot_iso(
+ self, get_boot_iso_mock, setup_vmedia_mock, set_boot_device_mock):
+ root_uuid = {'root uuid': 'root_uuid'}
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ get_boot_iso_mock.return_value = 'boot.iso'
+
+ task.driver.vendor._configure_vmedia_boot(
+ task, root_uuid)
+
+ get_boot_iso_mock.assert_called_once_with(
+ task, root_uuid)
+ setup_vmedia_mock.assert_called_once_with(
+ task, 'boot.iso')
+ set_boot_device_mock.assert_called_once_with(
+ task, boot_devices.CDROM, persistent=True)
+ self.assertEqual('boot.iso',
+ task.node.instance_info['ilo_boot_iso'])
+
+ @mock.patch.object(manager_utils, 'node_set_boot_device', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_common, 'setup_vmedia_for_boot', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_deploy, '_get_boot_iso', spec_set=True,
+ autospec=True)
+ def test__configure_vmedia_boot_without_boot_iso(
+ self, get_boot_iso_mock, setup_vmedia_mock, set_boot_device_mock):
+ root_uuid = {'root uuid': 'root_uuid'}
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ get_boot_iso_mock.return_value = None
+
+ task.driver.vendor._configure_vmedia_boot(
+ task, root_uuid)
+
+ get_boot_iso_mock.assert_called_once_with(
+ task, root_uuid)
+ self.assertFalse(setup_vmedia_mock.called)
+ self.assertFalse(set_boot_device_mock.called)
+
+ @mock.patch.object(iscsi_deploy, 'validate_bootloader_install_status',
+ spec_set=True, autospec=True)
+ @mock.patch.object(iscsi_deploy, 'finish_deploy', spec_set=True,
+ autospec=True)
+ def test_pass_bootloader_install_info(self, finish_deploy_mock,
+ validate_input_mock):
+ kwargs = {'method': 'pass_deploy_info', 'address': '123456'}
+ self.node.provision_state = states.DEPLOYWAIT
+ self.node.target_provision_state = states.ACTIVE
+ self.node.save()
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.vendor.pass_bootloader_install_info(task, **kwargs)
+ finish_deploy_mock.assert_called_once_with(task, '123456')
+ validate_input_mock.assert_called_once_with(task, kwargs)
+
+ @mock.patch.object(deploy_utils, 'notify_ramdisk_to_proceed',
+ spec_set=True, autospec=True)
+ @mock.patch.object(ilo_deploy, '_update_secure_boot_mode', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_common, 'update_boot_mode', spec_set=True,
+ autospec=True)
+ @mock.patch.object(manager_utils, 'node_set_boot_device', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_common, 'setup_vmedia_for_boot', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_deploy, '_get_boot_iso', spec_set=True,
+ autospec=True)
+ @mock.patch.object(iscsi_deploy, 'continue_deploy', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_common, 'cleanup_vmedia_boot', spec_set=True,
+ autospec=True)
+ def test_pass_deploy_info_good(self, cleanup_vmedia_boot_mock,
+ continue_deploy_mock, get_boot_iso_mock,
+ setup_vmedia_mock, set_boot_device_mock,
+ func_update_boot_mode,
+ func_update_secure_boot_mode,
+ notify_ramdisk_to_proceed_mock):
+ kwargs = {'method': 'pass_deploy_info', 'address': '123456'}
+ continue_deploy_mock.return_value = {'root uuid': 'root-uuid'}
+ get_boot_iso_mock.return_value = 'boot-iso'
+
+ self.node.provision_state = states.DEPLOYWAIT
+ self.node.target_provision_state = states.ACTIVE
+ self.node.save()
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.vendor.pass_deploy_info(task, **kwargs)
+
+ cleanup_vmedia_boot_mock.assert_called_once_with(task)
+ continue_deploy_mock.assert_called_once_with(task, **kwargs)
+ get_boot_iso_mock.assert_called_once_with(task, 'root-uuid')
+ setup_vmedia_mock.assert_called_once_with(task, 'boot-iso')
+ self.assertEqual(states.ACTIVE, task.node.provision_state)
+ self.assertEqual(states.NOSTATE, task.node.target_provision_state)
+ set_boot_device_mock.assert_called_once_with(task,
+ boot_devices.CDROM,
+ persistent=True)
+ func_update_boot_mode.assert_called_once_with(task)
+ func_update_secure_boot_mode.assert_called_once_with(task, True)
+
+ self.assertEqual('boot-iso',
+ task.node.instance_info['ilo_boot_iso'])
+ info = task.node.driver_internal_info['root_uuid_or_disk_id']
+ self.assertEqual('root-uuid', info)
+ notify_ramdisk_to_proceed_mock.assert_called_once_with('123456')
+
+ @mock.patch.object(ilo_common, 'cleanup_vmedia_boot', spec_set=True,
+ autospec=True)
+ def test_pass_deploy_info_bad(self, cleanup_vmedia_boot_mock):
+ kwargs = {'method': 'pass_deploy_info', 'address': '123456'}
+
+ self.node.provision_state = states.AVAILABLE
+ self.node.target_provision_state = states.NOSTATE
+ self.node.save()
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ vendor = task.driver.vendor
+ self.assertRaises(exception.InvalidState,
+ vendor.pass_deploy_info,
+ task, **kwargs)
+ self.assertEqual(states.AVAILABLE, task.node.provision_state)
+ self.assertEqual(states.NOSTATE, task.node.target_provision_state)
+ self.assertFalse(cleanup_vmedia_boot_mock.called)
+
+ @mock.patch.object(ilo_deploy, '_update_secure_boot_mode', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_common, 'update_boot_mode', spec_set=True,
+ autospec=True)
+ @mock.patch.object(manager_utils, 'node_power_action', spec_set=True,
+ autospec=True)
+ @mock.patch.object(iscsi_deploy, 'continue_deploy', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_common, 'cleanup_vmedia_boot', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_deploy, '_get_boot_iso', spec_set=True,
+ autospec=True)
+ def test_pass_deploy_info_create_boot_iso_fail(
+ self, get_iso_mock, cleanup_vmedia_boot_mock, continue_deploy_mock,
+ node_power_mock, update_boot_mode_mock,
+ update_secure_boot_mode_mock):
+ kwargs = {'address': '123456'}
+ continue_deploy_mock.return_value = {'root uuid': 'root-uuid'}
+ get_iso_mock.side_effect = iter([exception.ImageCreationFailed(
+ image_type='iso', error="error")])
+ self.node.provision_state = states.DEPLOYWAIT
+ self.node.target_provision_state = states.ACTIVE
+ self.node.save()
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.vendor.pass_deploy_info(task, **kwargs)
+
+ cleanup_vmedia_boot_mock.assert_called_once_with(task)
+ update_boot_mode_mock.assert_called_once_with(task)
+ update_secure_boot_mode_mock.assert_called_once_with(task, True)
+ continue_deploy_mock.assert_called_once_with(task, **kwargs)
+ get_iso_mock.assert_called_once_with(task, 'root-uuid')
+ node_power_mock.assert_called_once_with(task, states.POWER_OFF)
+ self.assertEqual(states.DEPLOYFAIL, task.node.provision_state)
+ self.assertEqual(states.ACTIVE, task.node.target_provision_state)
+ self.assertIsNotNone(task.node.last_error)
+
+ @mock.patch.object(iscsi_deploy, 'finish_deploy', spec_set=True,
+ autospec=True)
+ @mock.patch.object(deploy_utils, 'notify_ramdisk_to_proceed',
+ spec_set=True, autospec=True)
+ @mock.patch.object(manager_utils, 'node_set_boot_device', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_deploy, '_update_secure_boot_mode', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_common, 'update_boot_mode', spec_set=True,
+ autospec=True)
+ @mock.patch.object(iscsi_deploy, 'continue_deploy', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_common, 'cleanup_vmedia_boot', spec_set=True,
+ autospec=True)
+ def test_pass_deploy_info_boot_option_local(
+ self, cleanup_vmedia_boot_mock, continue_deploy_mock,
+ func_update_boot_mode, func_update_secure_boot_mode,
+ set_boot_device_mock, notify_ramdisk_to_proceed_mock,
+ finish_deploy_mock):
+ kwargs = {'method': 'pass_deploy_info', 'address': '123456'}
+ continue_deploy_mock.return_value = {'root uuid': '<some-uuid>'}
+
+ self.node.instance_info = {'capabilities': '{"boot_option": "local"}'}
+ self.node.provision_state = states.DEPLOYWAIT
+ self.node.target_provision_state = states.ACTIVE
+ self.node.save()
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ vendor = task.driver.vendor
+ vendor.pass_deploy_info(task, **kwargs)
+
+ cleanup_vmedia_boot_mock.assert_called_once_with(task)
+ continue_deploy_mock.assert_called_once_with(task, **kwargs)
+ set_boot_device_mock.assert_called_once_with(task,
+ boot_devices.DISK,
+ persistent=True)
+ func_update_boot_mode.assert_called_once_with(task)
+ func_update_secure_boot_mode.assert_called_once_with(task, True)
+ notify_ramdisk_to_proceed_mock.assert_called_once_with('123456')
+ self.assertEqual(states.DEPLOYWAIT, task.node.provision_state)
+ self.assertEqual(states.ACTIVE, task.node.target_provision_state)
+ self.assertFalse(finish_deploy_mock.called)
+
+ @mock.patch.object(iscsi_deploy, 'finish_deploy', spec_set=True,
+ autospec=True)
+ @mock.patch.object(manager_utils, 'node_set_boot_device', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_deploy, '_update_secure_boot_mode', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_common, 'update_boot_mode', spec_set=True,
+ autospec=True)
+ @mock.patch.object(iscsi_deploy, 'continue_deploy', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_common, 'cleanup_vmedia_boot', spec_set=True,
+ autospec=True)
+ def _test_pass_deploy_info_whole_disk_image(
+ self, cleanup_vmedia_boot_mock, continue_deploy_mock,
+ func_update_boot_mode, func_update_secure_boot_mode,
+ set_boot_device_mock, notify_ramdisk_to_proceed_mock):
+ kwargs = {'method': 'pass_deploy_info', 'address': '123456'}
+ continue_deploy_mock.return_value = {'root uuid': '<some-uuid>'}
+
+ self.node.driver_internal_info = {'is_whole_disk_image': True}
+ self.node.provision_state = states.DEPLOYWAIT
+ self.node.target_provision_state = states.ACTIVE
+ self.node.save()
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ vendor = task.driver.vendor
+ vendor.pass_deploy_info(task, **kwargs)
+
+ cleanup_vmedia_boot_mock.assert_called_once_with(task)
+ continue_deploy_mock.assert_called_once_with(task, **kwargs)
+ set_boot_device_mock.assert_called_once_with(task,
+ boot_devices.DISK,
+ persistent=True)
+ func_update_boot_mode.assert_called_once_with(task)
+ func_update_secure_boot_mode.assert_called_once_with(task, True)
+ iscsi_deploy.finish_deploy.assert_called_once_with(task, '123456')
+
+ def test_pass_deploy_info_whole_disk_image_local(self):
+ self.node.instance_info = {'capabilities': '{"boot_option": "local"}'}
+ self.node.save()
+ self._test_pass_deploy_info_whole_disk_image()
+
+ def test_pass_deploy_info_whole_disk_image(self):
+ self._test_pass_deploy_info_whole_disk_image()
+
+ @mock.patch.object(ilo_deploy, '_update_secure_boot_mode', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_common, 'update_boot_mode', spec_set=True,
+ autospec=True)
+ @mock.patch.object(agent_base_vendor.BaseAgentVendor,
+ 'reboot_and_finish_deploy', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_deploy.VendorPassthru, '_configure_vmedia_boot',
+ spec_set=True, autospec=True)
+ @mock.patch.object(iscsi_deploy, 'do_agent_iscsi_deploy', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_common, 'cleanup_vmedia_boot', spec_set=True,
+ autospec=True)
+ def test_continue_deploy_netboot(self, cleanup_vmedia_boot_mock,
+ do_agent_iscsi_deploy_mock,
+ configure_vmedia_boot_mock,
+ reboot_and_finish_deploy_mock,
+ boot_mode_cap_mock,
+ update_secure_boot_mock):
+ self.node.provision_state = states.DEPLOYWAIT
+ self.node.target_provision_state = states.DEPLOYING
+ self.node.save()
+ do_agent_iscsi_deploy_mock.return_value = {
+ 'root uuid': 'some-root-uuid'}
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.vendor.continue_deploy(task)
+ cleanup_vmedia_boot_mock.assert_called_once_with(task)
+ do_agent_iscsi_deploy_mock.assert_called_once_with(task,
+ mock.ANY)
+ configure_vmedia_boot_mock.assert_called_once_with(
+ mock.ANY, task, 'some-root-uuid')
+ boot_mode_cap_mock.assert_called_once_with(task)
+ update_secure_boot_mock.assert_called_once_with(task, True)
+ reboot_and_finish_deploy_mock.assert_called_once_with(
+ mock.ANY, task)
+
+ @mock.patch.object(ilo_deploy, '_update_secure_boot_mode', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_common, 'update_boot_mode', spec_set=True,
+ autospec=True)
+ @mock.patch.object(agent_base_vendor.BaseAgentVendor,
+ 'reboot_and_finish_deploy', spec_set=True,
+ autospec=True)
+ @mock.patch.object(agent_base_vendor.BaseAgentVendor,
+ 'configure_local_boot', spec_set=True, autospec=True)
+ @mock.patch.object(iscsi_deploy, 'do_agent_iscsi_deploy', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_common, 'cleanup_vmedia_boot', spec_set=True,
+ autospec=True)
+ def test_continue_deploy_localboot(self, cleanup_vmedia_boot_mock,
+ do_agent_iscsi_deploy_mock,
+ configure_local_boot_mock,
+ reboot_and_finish_deploy_mock,
+ boot_mode_cap_mock,
+ update_secure_boot_mock):
+ self.node.provision_state = states.DEPLOYWAIT
+ self.node.target_provision_state = states.DEPLOYING
+ self.node.instance_info = {
+ 'capabilities': {'boot_option': 'local'}}
+ self.node.save()
+ do_agent_iscsi_deploy_mock.return_value = {
+ 'root uuid': 'some-root-uuid'}
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.vendor.continue_deploy(task)
+ cleanup_vmedia_boot_mock.assert_called_once_with(task)
+ do_agent_iscsi_deploy_mock.assert_called_once_with(task,
+ mock.ANY)
+ configure_local_boot_mock.assert_called_once_with(
+ mock.ANY, task, root_uuid='some-root-uuid',
+ efi_system_part_uuid=None)
+ boot_mode_cap_mock.assert_called_once_with(task)
+ update_secure_boot_mock.assert_called_once_with(task, True)
+ reboot_and_finish_deploy_mock.assert_called_once_with(
+ mock.ANY, task)
+
+ @mock.patch.object(ilo_deploy, '_update_secure_boot_mode', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_common, 'update_boot_mode', spec_set=True,
+ autospec=True)
+ @mock.patch.object(agent_base_vendor.BaseAgentVendor,
+ 'reboot_and_finish_deploy', spec_set=True,
+ autospec=True)
+ @mock.patch.object(agent_base_vendor.BaseAgentVendor,
+ 'configure_local_boot', spec_set=True, autospec=True)
+ @mock.patch.object(iscsi_deploy, 'do_agent_iscsi_deploy', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_common, 'cleanup_vmedia_boot', spec_set=True,
+ autospec=True)
+ def test_continue_deploy_whole_disk_image(
+ self, cleanup_vmedia_boot_mock, do_agent_iscsi_deploy_mock,
+ configure_local_boot_mock, reboot_and_finish_deploy_mock,
+ boot_mode_cap_mock, update_secure_boot_mock):
+ self.node.provision_state = states.DEPLOYWAIT
+ self.node.target_provision_state = states.DEPLOYING
+ self.node.driver_internal_info = {'is_whole_disk_image': True}
+ self.node.save()
+ do_agent_iscsi_deploy_mock.return_value = {
+ 'disk identifier': 'some-disk-id'}
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.vendor.continue_deploy(task)
+ cleanup_vmedia_boot_mock.assert_called_once_with(task)
+ do_agent_iscsi_deploy_mock.assert_called_once_with(task,
+ mock.ANY)
+ configure_local_boot_mock.assert_called_once_with(
+ mock.ANY, task, root_uuid=None, efi_system_part_uuid=None)
+ reboot_and_finish_deploy_mock.assert_called_once_with(
+ mock.ANY, task)
+
+ @mock.patch.object(ilo_deploy, '_update_secure_boot_mode', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_common, 'update_boot_mode', spec_set=True,
+ autospec=True)
+ @mock.patch.object(agent_base_vendor.BaseAgentVendor,
+ 'reboot_and_finish_deploy', spec_set=True,
+ autospec=True)
+ @mock.patch.object(agent_base_vendor.BaseAgentVendor,
+ 'configure_local_boot', spec_set=True, autospec=True)
+ @mock.patch.object(iscsi_deploy, 'do_agent_iscsi_deploy', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_common, 'cleanup_vmedia_boot', spec_set=True,
+ autospec=True)
+ def test_continue_deploy_localboot_uefi(self, cleanup_vmedia_boot_mock,
+ do_agent_iscsi_deploy_mock,
+ configure_local_boot_mock,
+ reboot_and_finish_deploy_mock,
+ boot_mode_cap_mock,
+ update_secure_boot_mock):
+ self.node.provision_state = states.DEPLOYWAIT
+ self.node.target_provision_state = states.DEPLOYING
+ self.node.instance_info = {
+ 'capabilities': {'boot_option': 'local'}}
+ self.node.save()
+ do_agent_iscsi_deploy_mock.return_value = {
+ 'root uuid': 'some-root-uuid',
+ 'efi system partition uuid': 'efi-system-part-uuid'}
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.vendor.continue_deploy(task)
+ cleanup_vmedia_boot_mock.assert_called_once_with(task)
+ do_agent_iscsi_deploy_mock.assert_called_once_with(task,
+ mock.ANY)
+ configure_local_boot_mock.assert_called_once_with(
+ mock.ANY, task, root_uuid='some-root-uuid',
+ efi_system_part_uuid='efi-system-part-uuid')
+ boot_mode_cap_mock.assert_called_once_with(task)
+ update_secure_boot_mock.assert_called_once_with(task, True)
+ reboot_and_finish_deploy_mock.assert_called_once_with(
+ mock.ANY, task)
+
+ @mock.patch.object(ilo_deploy, '_reboot_into', spec_set=True,
+ autospec=True)
+ def test_boot_into_iso(self, reboot_into_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.vendor.boot_into_iso(task, boot_iso_href='foo')
+ reboot_into_mock.assert_called_once_with(task, 'foo',
+ ramdisk_options=None)
+
+ @mock.patch.object(ilo_deploy.VendorPassthru, '_validate_boot_into_iso',
+ spec_set=True, autospec=True)
+ def test_validate_boot_into_iso(self, validate_boot_into_iso_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ vendor = ilo_deploy.VendorPassthru()
+ vendor.validate(task, method='boot_into_iso', foo='bar')
+ validate_boot_into_iso_mock.assert_called_once_with(
+ vendor, task, {'foo': 'bar'})
+
+ def test__validate_boot_into_iso_invalid_state(self):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node.provision_state = states.AVAILABLE
+ self.assertRaises(
+ exception.InvalidStateRequested,
+ task.driver.vendor._validate_boot_into_iso,
+ task, {})
+
+ def test__validate_boot_into_iso_missing_boot_iso_href(self):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node.provision_state = states.MANAGEABLE
+ self.assertRaises(
+ exception.MissingParameterValue,
+ task.driver.vendor._validate_boot_into_iso,
+ task, {})
+
+ @mock.patch.object(deploy_utils, 'validate_image_properties',
+ spec_set=True, autospec=True)
+ def test__validate_boot_into_iso_manage(self, validate_image_prop_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ info = {'boot_iso_href': 'foo'}
+ task.node.provision_state = states.MANAGEABLE
+ task.driver.vendor._validate_boot_into_iso(
+ task, info)
+ validate_image_prop_mock.assert_called_once_with(
+ task.context, {'image_source': 'foo'}, [])
+
+ @mock.patch.object(deploy_utils, 'validate_image_properties',
+ spec_set=True, autospec=True)
+ def test__validate_boot_into_iso_maintenance(
+ self, validate_image_prop_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ info = {'boot_iso_href': 'foo'}
+ task.node.maintenance = True
+ task.driver.vendor._validate_boot_into_iso(
+ task, info)
+ validate_image_prop_mock.assert_called_once_with(
+ task.context, {'image_source': 'foo'}, [])
+
+
+class IloPXEDeployTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(IloPXEDeployTestCase, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver="pxe_ilo")
+ self.node = obj_utils.create_test_node(
+ self.context, driver='pxe_ilo', driver_info=INFO_DICT)
+
+ @mock.patch.object(iscsi_deploy.ISCSIDeploy, 'validate', spec_set=True,
+ autospec=True)
+ def test_validate(self, pxe_validate_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.deploy.validate(task)
+ pxe_validate_mock.assert_called_once_with(mock.ANY, task)
+
+ @mock.patch.object(iscsi_deploy.ISCSIDeploy, 'prepare', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_deploy, '_prepare_node_for_deploy', spec_set=True,
+ autospec=True)
+ def test_prepare(self,
+ prepare_node_mock,
+ pxe_prepare_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node.properties['capabilities'] = 'boot_mode:uefi'
+ task.driver.deploy.prepare(task)
+ prepare_node_mock.assert_called_once_with(task)
+ pxe_prepare_mock.assert_called_once_with(mock.ANY, task)
+
+ @mock.patch.object(iscsi_deploy.ISCSIDeploy, 'prepare', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_deploy, '_prepare_node_for_deploy', spec_set=True,
+ autospec=True)
+ def test_prepare_active_node(self,
+ prepare_node_mock,
+ pxe_prepare_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node.provision_state = states.ACTIVE
+ task.node.properties['capabilities'] = 'boot_mode:uefi'
+ task.driver.deploy.prepare(task)
+ self.assertFalse(prepare_node_mock.called)
+ pxe_prepare_mock.assert_called_once_with(mock.ANY, task)
+
+ @mock.patch.object(iscsi_deploy.ISCSIDeploy, 'prepare', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_deploy, '_prepare_node_for_deploy', spec_set=True,
+ autospec=True)
+ def test_prepare_uefi_whole_disk_image_fail(self,
+ prepare_node_for_deploy_mock,
+ pxe_prepare_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node.properties['capabilities'] = 'boot_mode:uefi'
+ task.node.driver_internal_info['is_whole_disk_image'] = True
+ self.assertRaises(exception.InvalidParameterValue,
+ task.driver.deploy.prepare, task)
+ prepare_node_for_deploy_mock.assert_called_once_with(task)
+ self.assertFalse(pxe_prepare_mock.called)
+
+ @mock.patch.object(iscsi_deploy.ISCSIDeploy, 'deploy', spec_set=True,
+ autospec=True)
+ @mock.patch.object(manager_utils, 'node_set_boot_device', spec_set=True,
+ autospec=True)
+ def test_deploy_boot_mode_exists(self, set_persistent_mock,
+ pxe_deploy_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.deploy.deploy(task)
+ set_persistent_mock.assert_called_with(task, boot_devices.PXE)
+ pxe_deploy_mock.assert_called_once_with(mock.ANY, task)
+
+ @mock.patch.object(iscsi_deploy.ISCSIDeploy, 'tear_down',
+ spec_set=True, autospec=True)
+ @mock.patch.object(ilo_deploy, '_update_secure_boot_mode', autospec=True)
+ @mock.patch.object(manager_utils, 'node_power_action', spec_set=True,
+ autospec=True)
+ def test_tear_down(self, node_power_action_mock,
+ update_secure_boot_mode_mock, pxe_tear_down_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ pxe_tear_down_mock.return_value = states.DELETED
+ returned_state = task.driver.deploy.tear_down(task)
+ node_power_action_mock.assert_called_once_with(task,
+ states.POWER_OFF)
+ update_secure_boot_mode_mock.assert_called_once_with(task, False)
+ pxe_tear_down_mock.assert_called_once_with(mock.ANY, task)
+ self.assertEqual(states.DELETED, returned_state)
+
+ @mock.patch.object(ilo_deploy.LOG, 'warn', spec_set=True, autospec=True)
+ @mock.patch.object(iscsi_deploy.ISCSIDeploy, 'tear_down',
+ spec_set=True, autospec=True)
+ @mock.patch.object(ilo_deploy, 'exception', spec_set=True, autospec=True)
+ @mock.patch.object(ilo_deploy, '_update_secure_boot_mode',
+ spec_set=True, autospec=True)
+ @mock.patch.object(manager_utils, 'node_power_action', spec_set=True,
+ autospec=True)
+ def test_tear_down_handle_exception(self, node_power_action_mock,
+ update_secure_boot_mode_mock,
+ exception_mock, pxe_tear_down_mock,
+ mock_log):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ pxe_tear_down_mock.return_value = states.DELETED
+ exception_mock.IloOperationNotSupported = Exception
+ update_secure_boot_mode_mock.side_effect = Exception
+ returned_state = task.driver.deploy.tear_down(task)
+ update_secure_boot_mode_mock.assert_called_once_with(task, False)
+ pxe_tear_down_mock.assert_called_once_with(mock.ANY, task)
+ node_power_action_mock.assert_called_once_with(task,
+ states.POWER_OFF)
+ self.assertTrue(mock_log.called)
+ self.assertEqual(states.DELETED, returned_state)
+
+
+class IloPXEVendorPassthruTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(IloPXEVendorPassthruTestCase, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver="pxe_ilo")
+ self.node = obj_utils.create_test_node(
+ self.context, driver='pxe_ilo', driver_info=INFO_DICT)
+
+ def test_vendor_routes(self):
+ expected = ['heartbeat', 'pass_deploy_info',
+ 'pass_bootloader_install_info']
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ vendor_routes = task.driver.vendor.vendor_routes
+ self.assertIsInstance(vendor_routes, dict)
+ self.assertEqual(sorted(expected), sorted(list(vendor_routes)))
+
+ def test_driver_routes(self):
+ expected = ['lookup']
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ driver_routes = task.driver.vendor.driver_routes
+ self.assertIsInstance(driver_routes, dict)
+ self.assertEqual(sorted(expected), sorted(list(driver_routes)))
+
+ @mock.patch.object(iscsi_deploy.VendorPassthru, 'pass_deploy_info',
+ spec_set=True, autospec=True)
+ @mock.patch.object(ilo_deploy, '_update_secure_boot_mode', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_common, 'update_boot_mode', spec_set=True,
+ autospec=True)
+ @mock.patch.object(manager_utils, 'node_set_boot_device', spec_set=True,
+ autospec=True)
+ def test_vendorpassthru_pass_deploy_info(self, set_boot_device_mock,
+ func_update_boot_mode,
+ func_update_secure_boot_mode,
+ pxe_vendorpassthru_mock):
+ kwargs = {'address': '123456'}
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node.provision_state = states.DEPLOYWAIT
+ task.node.target_provision_state = states.ACTIVE
+ task.driver.vendor.pass_deploy_info(task, **kwargs)
+ set_boot_device_mock.assert_called_with(task, boot_devices.PXE,
+ persistent=True)
+ func_update_boot_mode.assert_called_once_with(task)
+ func_update_secure_boot_mode.assert_called_once_with(task, True)
+ pxe_vendorpassthru_mock.assert_called_once_with(
+ mock.ANY, task, **kwargs)
+
+ @mock.patch.object(iscsi_deploy.VendorPassthru, 'continue_deploy',
+ spec_set=True, autospec=True)
+ @mock.patch.object(ilo_deploy, '_update_secure_boot_mode', autospec=True)
+ @mock.patch.object(ilo_common, 'update_boot_mode', autospec=True)
+ def test_vendorpassthru_continue_deploy(self,
+ func_update_boot_mode,
+ func_update_secure_boot_mode,
+ pxe_vendorpassthru_mock):
+ kwargs = {'address': '123456'}
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node.provision_state = states.DEPLOYWAIT
+ task.node.target_provision_state = states.ACTIVE
+ task.driver.vendor.continue_deploy(task, **kwargs)
+ func_update_boot_mode.assert_called_once_with(task)
+ func_update_secure_boot_mode.assert_called_once_with(task, True)
+ pxe_vendorpassthru_mock.assert_called_once_with(
+ mock.ANY, task, **kwargs)
+
+
+class IloVirtualMediaAgentVendorInterfaceTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(IloVirtualMediaAgentVendorInterfaceTestCase, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver="agent_ilo")
+ self.node = obj_utils.create_test_node(
+ self.context, driver='agent_ilo', driver_info=INFO_DICT)
+
+ @mock.patch.object(agent.AgentVendorInterface, 'reboot_to_instance',
+ spec_set=True, autospec=True)
+ @mock.patch.object(agent.AgentVendorInterface, 'check_deploy_success',
+ spec_set=True, autospec=True)
+ @mock.patch.object(ilo_common, 'update_boot_mode', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_deploy, '_update_secure_boot_mode', spec_set=True,
+ autospec=True)
+ def test_reboot_to_instance(self, func_update_secure_boot_mode,
+ func_update_boot_mode,
+ check_deploy_success_mock,
+ agent_reboot_to_instance_mock):
+ kwargs = {'address': '123456'}
+ check_deploy_success_mock.return_value = None
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.vendor.reboot_to_instance(task, **kwargs)
+ check_deploy_success_mock.assert_called_once_with(
+ mock.ANY, task.node)
+ func_update_boot_mode.assert_called_once_with(task)
+ func_update_secure_boot_mode.assert_called_once_with(task, True)
+ agent_reboot_to_instance_mock.assert_called_once_with(
+ mock.ANY, task, **kwargs)
+
+ @mock.patch.object(agent.AgentVendorInterface, 'reboot_to_instance',
+ spec_set=True, autospec=True)
+ @mock.patch.object(agent.AgentVendorInterface, 'check_deploy_success',
+ spec_set=True, autospec=True)
+ @mock.patch.object(ilo_common, 'update_boot_mode', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_deploy, '_update_secure_boot_mode', spec_set=True,
+ autospec=True)
+ def test_reboot_to_instance_deploy_fail(self, func_update_secure_boot_mode,
+ func_update_boot_mode,
+ check_deploy_success_mock,
+ agent_reboot_to_instance_mock):
+ kwargs = {'address': '123456'}
+ check_deploy_success_mock.return_value = "Error"
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.vendor.reboot_to_instance(task, **kwargs)
+ check_deploy_success_mock.assert_called_once_with(
+ mock.ANY, task.node)
+ self.assertFalse(func_update_boot_mode.called)
+ self.assertFalse(func_update_secure_boot_mode.called)
+ agent_reboot_to_instance_mock.assert_called_once_with(
+ mock.ANY, task, **kwargs)
diff --git a/ironic/tests/unit/drivers/ilo/test_inspect.py b/ironic/tests/unit/drivers/ilo/test_inspect.py
new file mode 100644
index 000000000..94ddbdd45
--- /dev/null
+++ b/ironic/tests/unit/drivers/ilo/test_inspect.py
@@ -0,0 +1,365 @@
+# Copyright 2014 Hewlett-Packard Development Company, L.P.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+
+"""Test class for Management Interface used by iLO modules."""
+
+import mock
+from oslo_config import cfg
+import six
+
+from ironic.common import exception
+from ironic.common import states
+from ironic.common import utils
+from ironic.conductor import task_manager
+from ironic.conductor import utils as conductor_utils
+from ironic.db import api as dbapi
+from ironic.drivers.modules.ilo import common as ilo_common
+from ironic.drivers.modules.ilo import inspect as ilo_inspect
+from ironic.drivers.modules.ilo import power as ilo_power
+from ironic.tests.unit.conductor import utils as mgr_utils
+from ironic.tests.unit.db import base as db_base
+from ironic.tests.unit.db import utils as db_utils
+from ironic.tests.unit.objects import utils as obj_utils
+
+
+INFO_DICT = db_utils.get_test_ilo_info()
+CONF = cfg.CONF
+
+
+class IloInspectTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(IloInspectTestCase, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver="fake_ilo")
+ self.node = obj_utils.create_test_node(
+ self.context, driver='fake_ilo', driver_info=INFO_DICT)
+
+ def test_get_properties(self):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ properties = ilo_common.REQUIRED_PROPERTIES.copy()
+ self.assertEqual(properties,
+ task.driver.inspect.get_properties())
+
+ @mock.patch.object(ilo_common, 'parse_driver_info', spec_set=True,
+ autospec=True)
+ def test_validate(self, driver_info_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.inspect.validate(task)
+ driver_info_mock.assert_called_once_with(task.node)
+
+ @mock.patch.object(ilo_inspect, '_get_capabilities', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_inspect, '_create_ports_if_not_exist',
+ spec_set=True, autospec=True)
+ @mock.patch.object(ilo_inspect, '_get_essential_properties', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_power.IloPower, 'get_power_state', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_common, 'get_ilo_object', spec_set=True,
+ autospec=True)
+ def test_inspect_essential_ok(self, get_ilo_object_mock,
+ power_mock,
+ get_essential_mock,
+ create_port_mock,
+ get_capabilities_mock):
+ ilo_object_mock = get_ilo_object_mock.return_value
+ properties = {'memory_mb': '512', 'local_gb': '10',
+ 'cpus': '1', 'cpu_arch': 'x86_64'}
+ macs = {'Port 1': 'aa:aa:aa:aa:aa:aa', 'Port 2': 'bb:bb:bb:bb:bb:bb'}
+ capabilities = ''
+ result = {'properties': properties, 'macs': macs}
+ get_essential_mock.return_value = result
+ get_capabilities_mock.return_value = capabilities
+ power_mock.return_value = states.POWER_ON
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.inspect.inspect_hardware(task)
+ self.assertEqual(properties, task.node.properties)
+ power_mock.assert_called_once_with(mock.ANY, task)
+ get_essential_mock.assert_called_once_with(task.node,
+ ilo_object_mock)
+ get_capabilities_mock.assert_called_once_with(task.node,
+ ilo_object_mock)
+ create_port_mock.assert_called_once_with(task.node, macs)
+
+ @mock.patch.object(ilo_inspect, '_get_capabilities', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_inspect, '_create_ports_if_not_exist',
+ spec_set=True, autospec=True)
+ @mock.patch.object(ilo_inspect, '_get_essential_properties', spec_set=True,
+ autospec=True)
+ @mock.patch.object(conductor_utils, 'node_power_action', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_power.IloPower, 'get_power_state', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_common, 'get_ilo_object', spec_set=True,
+ autospec=True)
+ def test_inspect_essential_ok_power_off(self, get_ilo_object_mock,
+ power_mock,
+ set_power_mock,
+ get_essential_mock,
+ create_port_mock,
+ get_capabilities_mock):
+ ilo_object_mock = get_ilo_object_mock.return_value
+ properties = {'memory_mb': '512', 'local_gb': '10',
+ 'cpus': '1', 'cpu_arch': 'x86_64'}
+ macs = {'Port 1': 'aa:aa:aa:aa:aa:aa', 'Port 2': 'bb:bb:bb:bb:bb:bb'}
+ capabilities = ''
+ result = {'properties': properties, 'macs': macs}
+ get_essential_mock.return_value = result
+ get_capabilities_mock.return_value = capabilities
+ power_mock.return_value = states.POWER_OFF
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.inspect.inspect_hardware(task)
+ self.assertEqual(properties, task.node.properties)
+ power_mock.assert_called_once_with(mock.ANY, task)
+ set_power_mock.assert_any_call(task, states.POWER_ON)
+ get_essential_mock.assert_called_once_with(task.node,
+ ilo_object_mock)
+ get_capabilities_mock.assert_called_once_with(task.node,
+ ilo_object_mock)
+ create_port_mock.assert_called_once_with(task.node, macs)
+
+ @mock.patch.object(ilo_inspect, '_get_capabilities', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_inspect, '_create_ports_if_not_exist',
+ spec_set=True, autospec=True)
+ @mock.patch.object(ilo_inspect, '_get_essential_properties', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_power.IloPower, 'get_power_state', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_common, 'get_ilo_object', spec_set=True,
+ autospec=True)
+ def test_inspect_essential_capabilities_ok(self, get_ilo_object_mock,
+ power_mock,
+ get_essential_mock,
+ create_port_mock,
+ get_capabilities_mock):
+ ilo_object_mock = get_ilo_object_mock.return_value
+ properties = {'memory_mb': '512', 'local_gb': '10',
+ 'cpus': '1', 'cpu_arch': 'x86_64'}
+ macs = {'Port 1': 'aa:aa:aa:aa:aa:aa', 'Port 2': 'bb:bb:bb:bb:bb:bb'}
+ capability_str = 'BootMode:uefi'
+ capabilities = {'BootMode': 'uefi'}
+ result = {'properties': properties, 'macs': macs}
+ get_essential_mock.return_value = result
+ get_capabilities_mock.return_value = capabilities
+ power_mock.return_value = states.POWER_ON
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.inspect.inspect_hardware(task)
+ expected_properties = {'memory_mb': '512', 'local_gb': '10',
+ 'cpus': '1', 'cpu_arch': 'x86_64',
+ 'capabilities': capability_str}
+ self.assertEqual(expected_properties, task.node.properties)
+ power_mock.assert_called_once_with(mock.ANY, task)
+ get_essential_mock.assert_called_once_with(task.node,
+ ilo_object_mock)
+ get_capabilities_mock.assert_called_once_with(task.node,
+ ilo_object_mock)
+ create_port_mock.assert_called_once_with(task.node, macs)
+
+ @mock.patch.object(ilo_inspect, '_get_capabilities', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_inspect, '_create_ports_if_not_exist',
+ spec_set=True, autospec=True)
+ @mock.patch.object(ilo_inspect, '_get_essential_properties', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_power.IloPower, 'get_power_state', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_common, 'get_ilo_object', spec_set=True,
+ autospec=True)
+ def test_inspect_essential_capabilities_exist_ok(self, get_ilo_object_mock,
+ power_mock,
+ get_essential_mock,
+ create_port_mock,
+ get_capabilities_mock):
+ ilo_object_mock = get_ilo_object_mock.return_value
+ properties = {'memory_mb': '512', 'local_gb': '10',
+ 'cpus': '1', 'cpu_arch': 'x86_64',
+ 'somekey': 'somevalue'}
+ macs = {'Port 1': 'aa:aa:aa:aa:aa:aa', 'Port 2': 'bb:bb:bb:bb:bb:bb'}
+ result = {'properties': properties, 'macs': macs}
+ capabilities = {'BootMode': 'uefi'}
+ get_essential_mock.return_value = result
+ get_capabilities_mock.return_value = capabilities
+ power_mock.return_value = states.POWER_ON
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node.properties = {'capabilities': 'foo:bar'}
+ expected_capabilities = ('BootMode:uefi,'
+ 'foo:bar')
+ set1 = set(expected_capabilities.split(','))
+ task.driver.inspect.inspect_hardware(task)
+ end_capabilities = task.node.properties['capabilities']
+ set2 = set(end_capabilities.split(','))
+ self.assertEqual(set1, set2)
+ expected_properties = {'memory_mb': '512', 'local_gb': '10',
+ 'cpus': '1', 'cpu_arch': 'x86_64',
+ 'capabilities': end_capabilities}
+ power_mock.assert_called_once_with(mock.ANY, task)
+ self.assertEqual(task.node.properties, expected_properties)
+ get_essential_mock.assert_called_once_with(task.node,
+ ilo_object_mock)
+ get_capabilities_mock.assert_called_once_with(task.node,
+ ilo_object_mock)
+ create_port_mock.assert_called_once_with(task.node, macs)
+
+
+class TestInspectPrivateMethods(db_base.DbTestCase):
+
+ def setUp(self):
+ super(TestInspectPrivateMethods, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver="fake_ilo")
+ self.node = obj_utils.create_test_node(
+ self.context, driver='fake_ilo', driver_info=INFO_DICT)
+
+ @mock.patch.object(ilo_inspect.LOG, 'info', spec_set=True, autospec=True)
+ @mock.patch.object(dbapi, 'get_instance', spec_set=True, autospec=True)
+ def test__create_ports_if_not_exist(self, instance_mock, log_mock):
+ db_obj = instance_mock.return_value
+ macs = {'Port 1': 'aa:aa:aa:aa:aa:aa', 'Port 2': 'bb:bb:bb:bb:bb:bb'}
+ node_id = self.node.id
+ port_dict1 = {'address': 'aa:aa:aa:aa:aa:aa', 'node_id': node_id}
+ port_dict2 = {'address': 'bb:bb:bb:bb:bb:bb', 'node_id': node_id}
+ ilo_inspect._create_ports_if_not_exist(self.node, macs)
+ instance_mock.assert_called_once_with()
+ self.assertTrue(log_mock.called)
+ db_obj.create_port.assert_any_call(port_dict1)
+ db_obj.create_port.assert_any_call(port_dict2)
+
+ @mock.patch.object(ilo_inspect.LOG, 'warn', spec_set=True, autospec=True)
+ @mock.patch.object(dbapi, 'get_instance', spec_set=True, autospec=True)
+ def test__create_ports_if_not_exist_mac_exception(self,
+ instance_mock,
+ log_mock):
+ dbapi_mock = instance_mock.return_value
+ dbapi_mock.create_port.side_effect = exception.MACAlreadyExists('f')
+ macs = {'Port 1': 'aa:aa:aa:aa:aa:aa', 'Port 2': 'bb:bb:bb:bb:bb:bb'}
+ ilo_inspect._create_ports_if_not_exist(self.node, macs)
+ instance_mock.assert_called_once_with()
+ self.assertTrue(log_mock.called)
+
+ def test__get_essential_properties_ok(self):
+ ilo_mock = mock.MagicMock(spec=['get_essential_properties'])
+ properties = {'memory_mb': '512', 'local_gb': '10',
+ 'cpus': '1', 'cpu_arch': 'x86_64'}
+ macs = {'Port 1': 'aa:aa:aa:aa:aa:aa', 'Port 2': 'bb:bb:bb:bb:bb:bb'}
+ result = {'properties': properties, 'macs': macs}
+ ilo_mock.get_essential_properties.return_value = result
+ actual_result = ilo_inspect._get_essential_properties(self.node,
+ ilo_mock)
+ self.assertEqual(result, actual_result)
+
+ def test__get_essential_properties_fail(self):
+ ilo_mock = mock.MagicMock(
+ spec=['get_additional_capabilities', 'get_essential_properties'])
+ # Missing key: cpu_arch
+ properties = {'memory_mb': '512', 'local_gb': '10',
+ 'cpus': '1'}
+ macs = {'Port 1': 'aa:aa:aa:aa:aa:aa', 'Port 2': 'bb:bb:bb:bb:bb:bb'}
+ result = {'properties': properties, 'macs': macs}
+ ilo_mock.get_essential_properties.return_value = result
+ result = self.assertRaises(exception.HardwareInspectionFailure,
+ ilo_inspect._get_essential_properties,
+ self.node,
+ ilo_mock)
+ self.assertEqual(
+ six.text_type(result),
+ ("Failed to inspect hardware. Reason: Server didn't return the "
+ "key(s): cpu_arch"))
+
+ def test__get_essential_properties_fail_invalid_format(self):
+ ilo_mock = mock.MagicMock(
+ spec=['get_additional_capabilities', 'get_essential_properties'])
+ # Not a dict
+ properties = ['memory_mb', '512', 'local_gb', '10',
+ 'cpus', '1']
+ macs = ['aa:aa:aa:aa:aa:aa', 'bb:bb:bb:bb:bb:bb']
+ capabilities = ''
+ result = {'properties': properties, 'macs': macs}
+ ilo_mock.get_essential_properties.return_value = result
+ ilo_mock.get_additional_capabilities.return_value = capabilities
+ self.assertRaises(exception.HardwareInspectionFailure,
+ ilo_inspect._get_essential_properties,
+ self.node, ilo_mock)
+
+ def test__get_essential_properties_fail_mac_invalid_format(self):
+ ilo_mock = mock.MagicMock(spec=['get_essential_properties'])
+ properties = {'memory_mb': '512', 'local_gb': '10',
+ 'cpus': '1', 'cpu_arch': 'x86_64'}
+ # Not a dict
+ macs = 'aa:aa:aa:aa:aa:aa'
+ result = {'properties': properties, 'macs': macs}
+ ilo_mock.get_essential_properties.return_value = result
+ self.assertRaises(exception.HardwareInspectionFailure,
+ ilo_inspect._get_essential_properties,
+ self.node, ilo_mock)
+
+ def test__get_essential_properties_hardware_port_empty(self):
+ ilo_mock = mock.MagicMock(
+ spec=['get_additional_capabilities', 'get_essential_properties'])
+ properties = {'memory_mb': '512', 'local_gb': '10',
+ 'cpus': '1', 'cpu_arch': 'x86_64'}
+ # Not a dictionary
+ macs = None
+ result = {'properties': properties, 'macs': macs}
+ capabilities = ''
+ ilo_mock.get_essential_properties.return_value = result
+ ilo_mock.get_additional_capabilities.return_value = capabilities
+ self.assertRaises(exception.HardwareInspectionFailure,
+ ilo_inspect._get_essential_properties,
+ self.node, ilo_mock)
+
+ def test__get_essential_properties_hardware_port_not_dict(self):
+ ilo_mock = mock.MagicMock(spec=['get_essential_properties'])
+ properties = {'memory_mb': '512', 'local_gb': '10',
+ 'cpus': '1', 'cpu_arch': 'x86_64'}
+ # Not a dict
+ macs = 'aa:bb:cc:dd:ee:ff'
+ result = {'properties': properties, 'macs': macs}
+ ilo_mock.get_essential_properties.return_value = result
+ result = self.assertRaises(
+ exception.HardwareInspectionFailure,
+ ilo_inspect._get_essential_properties, self.node, ilo_mock)
+
+ @mock.patch.object(utils, 'get_updated_capabilities', spec_set=True,
+ autospec=True)
+ def test__get_capabilities_ok(self, capability_mock):
+ ilo_mock = mock.MagicMock(spec=['get_server_capabilities'])
+ capabilities = {'ilo_firmware_version': 'xyz'}
+ ilo_mock.get_server_capabilities.return_value = capabilities
+ cap = ilo_inspect._get_capabilities(self.node, ilo_mock)
+ self.assertEqual(cap, capabilities)
+
+ def test__validate_ok(self):
+ properties = {'memory_mb': '512', 'local_gb': '10',
+ 'cpus': '2', 'cpu_arch': 'x86_arch'}
+ macs = {'Port 1': 'aa:aa:aa:aa:aa:aa'}
+ data = {'properties': properties, 'macs': macs}
+ valid_keys = ilo_inspect.IloInspect.ESSENTIAL_PROPERTIES
+ ilo_inspect._validate(self.node, data)
+ self.assertEqual(sorted(set(properties)), sorted(valid_keys))
+
+ def test__validate_essential_keys_fail_missing_key(self):
+ properties = {'memory_mb': '512', 'local_gb': '10',
+ 'cpus': '1'}
+ macs = {'Port 1': 'aa:aa:aa:aa:aa:aa'}
+ data = {'properties': properties, 'macs': macs}
+ self.assertRaises(exception.HardwareInspectionFailure,
+ ilo_inspect._validate, self.node, data)
diff --git a/ironic/tests/unit/drivers/ilo/test_management.py b/ironic/tests/unit/drivers/ilo/test_management.py
new file mode 100644
index 000000000..a3a9b7910
--- /dev/null
+++ b/ironic/tests/unit/drivers/ilo/test_management.py
@@ -0,0 +1,298 @@
+# Copyright 2014 Hewlett-Packard Development Company, L.P.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Test class for Management Interface used by iLO modules."""
+
+import mock
+from oslo_config import cfg
+from oslo_utils import importutils
+
+from ironic.common import boot_devices
+from ironic.common import exception
+from ironic.conductor import task_manager
+from ironic.drivers.modules.ilo import common as ilo_common
+from ironic.drivers.modules.ilo import management as ilo_management
+from ironic.drivers.modules import ipmitool
+from ironic.tests.unit.conductor import utils as mgr_utils
+from ironic.tests.unit.db import base as db_base
+from ironic.tests.unit.db import utils as db_utils
+from ironic.tests.unit.objects import utils as obj_utils
+
+ilo_error = importutils.try_import('proliantutils.exception')
+
+INFO_DICT = db_utils.get_test_ilo_info()
+CONF = cfg.CONF
+
+
+class IloManagementTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(IloManagementTestCase, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver="fake_ilo")
+ self.node = obj_utils.create_test_node(
+ self.context, driver='fake_ilo', driver_info=INFO_DICT)
+
+ def test_get_properties(self):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ expected = ilo_management.MANAGEMENT_PROPERTIES
+ self.assertEqual(expected,
+ task.driver.management.get_properties())
+
+ @mock.patch.object(ilo_common, 'parse_driver_info', spec_set=True,
+ autospec=True)
+ def test_validate(self, driver_info_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.management.validate(task)
+ driver_info_mock.assert_called_once_with(task.node)
+
+ def test_get_supported_boot_devices(self):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ expected = [boot_devices.PXE, boot_devices.DISK,
+ boot_devices.CDROM]
+ self.assertEqual(
+ sorted(expected),
+ sorted(task.driver.management.
+ get_supported_boot_devices(task)))
+
+ @mock.patch.object(ilo_common, 'get_ilo_object', spec_set=True,
+ autospec=True)
+ def test_get_boot_device_next_boot(self, get_ilo_object_mock):
+ ilo_object_mock = get_ilo_object_mock.return_value
+ ilo_object_mock.get_one_time_boot.return_value = 'CDROM'
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ expected_device = boot_devices.CDROM
+ expected_response = {'boot_device': expected_device,
+ 'persistent': False}
+ self.assertEqual(expected_response,
+ task.driver.management.get_boot_device(task))
+ ilo_object_mock.get_one_time_boot.assert_called_once_with()
+
+ @mock.patch.object(ilo_common, 'get_ilo_object', spec_set=True,
+ autospec=True)
+ def test_get_boot_device_persistent(self, get_ilo_object_mock):
+ ilo_mock = get_ilo_object_mock.return_value
+ ilo_mock.get_one_time_boot.return_value = 'Normal'
+ ilo_mock.get_persistent_boot_device.return_value = 'NETWORK'
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ expected_device = boot_devices.PXE
+ expected_response = {'boot_device': expected_device,
+ 'persistent': True}
+ self.assertEqual(expected_response,
+ task.driver.management.get_boot_device(task))
+ ilo_mock.get_one_time_boot.assert_called_once_with()
+ ilo_mock.get_persistent_boot_device.assert_called_once_with()
+
+ @mock.patch.object(ilo_common, 'get_ilo_object', spec_set=True,
+ autospec=True)
+ def test_get_boot_device_fail(self, get_ilo_object_mock):
+ ilo_mock_object = get_ilo_object_mock.return_value
+ exc = ilo_error.IloError('error')
+ ilo_mock_object.get_one_time_boot.side_effect = exc
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ self.assertRaises(exception.IloOperationError,
+ task.driver.management.get_boot_device,
+ task)
+ ilo_mock_object.get_one_time_boot.assert_called_once_with()
+
+ @mock.patch.object(ilo_common, 'get_ilo_object', spec_set=True,
+ autospec=True)
+ def test_get_boot_device_persistent_fail(self, get_ilo_object_mock):
+ ilo_mock_object = get_ilo_object_mock.return_value
+ ilo_mock_object.get_one_time_boot.return_value = 'Normal'
+ exc = ilo_error.IloError('error')
+ ilo_mock_object.get_persistent_boot_device.side_effect = exc
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ self.assertRaises(exception.IloOperationError,
+ task.driver.management.get_boot_device,
+ task)
+ ilo_mock_object.get_one_time_boot.assert_called_once_with()
+ ilo_mock_object.get_persistent_boot_device.assert_called_once_with()
+
+ @mock.patch.object(ilo_common, 'get_ilo_object', spec_set=True,
+ autospec=True)
+ def test_set_boot_device_ok(self, get_ilo_object_mock):
+ ilo_object_mock = get_ilo_object_mock.return_value
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.management.set_boot_device(task, boot_devices.CDROM,
+ False)
+ get_ilo_object_mock.assert_called_once_with(task.node)
+ ilo_object_mock.set_one_time_boot.assert_called_once_with('CDROM')
+
+ @mock.patch.object(ilo_common, 'get_ilo_object', spec_set=True,
+ autospec=True)
+ def test_set_boot_device_persistent_true(self, get_ilo_object_mock):
+ ilo_mock = get_ilo_object_mock.return_value
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.management.set_boot_device(task, boot_devices.PXE,
+ True)
+ get_ilo_object_mock.assert_called_once_with(task.node)
+ ilo_mock.update_persistent_boot.assert_called_once_with(
+ ['NETWORK'])
+
+ @mock.patch.object(ilo_common, 'get_ilo_object', spec_set=True,
+ autospec=True)
+ def test_set_boot_device_fail(self, get_ilo_object_mock):
+ ilo_mock_object = get_ilo_object_mock.return_value
+ exc = ilo_error.IloError('error')
+ ilo_mock_object.set_one_time_boot.side_effect = exc
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ self.assertRaises(exception.IloOperationError,
+ task.driver.management.set_boot_device,
+ task, boot_devices.PXE)
+ ilo_mock_object.set_one_time_boot.assert_called_once_with('NETWORK')
+
+ @mock.patch.object(ilo_common, 'get_ilo_object', spec_set=True,
+ autospec=True)
+ def test_set_boot_device_persistent_fail(self, get_ilo_object_mock):
+ ilo_mock_object = get_ilo_object_mock.return_value
+ exc = ilo_error.IloError('error')
+ ilo_mock_object.update_persistent_boot.side_effect = exc
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ self.assertRaises(exception.IloOperationError,
+ task.driver.management.set_boot_device,
+ task, boot_devices.PXE, True)
+ ilo_mock_object.update_persistent_boot.assert_called_once_with(
+ ['NETWORK'])
+
+ def test_set_boot_device_invalid_device(self):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ self.assertRaises(exception.InvalidParameterValue,
+ task.driver.management.set_boot_device,
+ task, 'fake-device')
+
+ @mock.patch.object(ilo_common, 'update_ipmi_properties', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ipmitool.IPMIManagement, 'get_sensors_data',
+ spec_set=True, autospec=True)
+ def test_get_sensor_data(self, get_sensors_data_mock, update_ipmi_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.management.get_sensors_data(task)
+ update_ipmi_mock.assert_called_once_with(task)
+ get_sensors_data_mock.assert_called_once_with(mock.ANY, task)
+
+ @mock.patch.object(ilo_common, 'get_ilo_object', spec_set=True,
+ autospec=True)
+ def test__execute_ilo_clean_step_ok(self, get_ilo_object_mock):
+ ilo_mock = get_ilo_object_mock.return_value
+ clean_step_mock = getattr(ilo_mock, 'fake-step')
+ ilo_management._execute_ilo_clean_step(
+ self.node, 'fake-step', 'args', kwarg='kwarg')
+ clean_step_mock.assert_called_once_with('args', kwarg='kwarg')
+
+ @mock.patch.object(ilo_management, 'LOG', spec_set=True, autospec=True)
+ @mock.patch.object(ilo_common, 'get_ilo_object', spec_set=True,
+ autospec=True)
+ def test__execute_ilo_clean_step_not_supported(self, get_ilo_object_mock,
+ log_mock):
+ ilo_mock = get_ilo_object_mock.return_value
+ exc = ilo_error.IloCommandNotSupportedError("error")
+ clean_step_mock = getattr(ilo_mock, 'fake-step')
+ clean_step_mock.side_effect = exc
+ ilo_management._execute_ilo_clean_step(
+ self.node, 'fake-step', 'args', kwarg='kwarg')
+ clean_step_mock.assert_called_once_with('args', kwarg='kwarg')
+ self.assertTrue(log_mock.warn.called)
+
+ @mock.patch.object(ilo_common, 'get_ilo_object', spec_set=True,
+ autospec=True)
+ def test__execute_ilo_clean_step_fail(self, get_ilo_object_mock):
+ ilo_mock = get_ilo_object_mock.return_value
+ exc = ilo_error.IloError("error")
+ clean_step_mock = getattr(ilo_mock, 'fake-step')
+ clean_step_mock.side_effect = exc
+ self.assertRaises(exception.NodeCleaningFailure,
+ ilo_management._execute_ilo_clean_step,
+ self.node, 'fake-step', 'args', kwarg='kwarg')
+ clean_step_mock.assert_called_once_with('args', kwarg='kwarg')
+
+ @mock.patch.object(ilo_management, '_execute_ilo_clean_step',
+ spec_set=True, autospec=True)
+ def test_reset_ilo(self, clean_step_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.management.reset_ilo(task)
+ clean_step_mock.assert_called_once_with(task.node, 'reset_ilo')
+
+ @mock.patch.object(ilo_management, '_execute_ilo_clean_step',
+ spec_set=True, autospec=True)
+ def test_reset_ilo_credential_ok(self, clean_step_mock):
+ info = self.node.driver_info
+ info['ilo_change_password'] = "fake-password"
+ self.node.driver_info = info
+ self.node.save()
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.management.reset_ilo_credential(task)
+ clean_step_mock.assert_called_once_with(
+ task.node, 'reset_ilo_credential', 'fake-password')
+ self.assertIsNone(
+ task.node.driver_info.get('ilo_change_password'))
+ self.assertEqual(task.node.driver_info['ilo_password'],
+ 'fake-password')
+
+ @mock.patch.object(ilo_management, 'LOG', spec_set=True, autospec=True)
+ @mock.patch.object(ilo_management, '_execute_ilo_clean_step',
+ spec_set=True, autospec=True)
+ def test_reset_ilo_credential_no_password(self, clean_step_mock,
+ log_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.management.reset_ilo_credential(task)
+ self.assertFalse(clean_step_mock.called)
+ self.assertTrue(log_mock.info.called)
+
+ @mock.patch.object(ilo_management, '_execute_ilo_clean_step',
+ spec_set=True, autospec=True)
+ def test_reset_bios_to_default(self, clean_step_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.management.reset_bios_to_default(task)
+ clean_step_mock.assert_called_once_with(task.node,
+ 'reset_bios_to_default')
+
+ @mock.patch.object(ilo_management, '_execute_ilo_clean_step',
+ spec_set=True, autospec=True)
+ def test_reset_secure_boot_keys_to_default(self, clean_step_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.management.reset_secure_boot_keys_to_default(task)
+ clean_step_mock.assert_called_once_with(task.node,
+ 'reset_secure_boot_keys')
+
+ @mock.patch.object(ilo_management, '_execute_ilo_clean_step',
+ spec_set=True, autospec=True)
+ def test_clear_secure_boot_keys(self, clean_step_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.management.clear_secure_boot_keys(task)
+ clean_step_mock.assert_called_once_with(task.node,
+ 'clear_secure_boot_keys')
diff --git a/ironic/tests/unit/drivers/ilo/test_power.py b/ironic/tests/unit/drivers/ilo/test_power.py
new file mode 100644
index 000000000..13fade795
--- /dev/null
+++ b/ironic/tests/unit/drivers/ilo/test_power.py
@@ -0,0 +1,231 @@
+# Copyright 2014 Hewlett-Packard Development Company, L.P.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Test class for IloPower module."""
+
+import mock
+from oslo_config import cfg
+from oslo_utils import importutils
+
+from ironic.common import boot_devices
+from ironic.common import exception
+from ironic.common import states
+from ironic.conductor import task_manager
+from ironic.conductor import utils as manager_utils
+from ironic.drivers.modules.ilo import common as ilo_common
+from ironic.drivers.modules.ilo import power as ilo_power
+from ironic.tests.unit.conductor import utils as mgr_utils
+from ironic.tests.unit.db import base as db_base
+from ironic.tests.unit.db import utils as db_utils
+from ironic.tests.unit.objects import utils as obj_utils
+
+ilo_error = importutils.try_import('proliantutils.exception')
+
+INFO_DICT = db_utils.get_test_ilo_info()
+CONF = cfg.CONF
+
+
+@mock.patch.object(ilo_common, 'get_ilo_object', spec_set=True, autospec=True)
+class IloPowerInternalMethodsTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(IloPowerInternalMethodsTestCase, self).setUp()
+ driver_info = INFO_DICT
+ mgr_utils.mock_the_extension_manager(driver="fake_ilo")
+ self.node = db_utils.create_test_node(
+ driver='fake_ilo',
+ driver_info=driver_info,
+ instance_uuid='instance_uuid_123')
+ CONF.set_override('power_retry', 2, 'ilo')
+ CONF.set_override('power_wait', 0, 'ilo')
+
+ def test__get_power_state(self, get_ilo_object_mock):
+ ilo_mock_object = get_ilo_object_mock.return_value
+ ilo_mock_object.get_host_power_status.return_value = 'ON'
+
+ self.assertEqual(
+ states.POWER_ON, ilo_power._get_power_state(self.node))
+
+ ilo_mock_object.get_host_power_status.return_value = 'OFF'
+ self.assertEqual(
+ states.POWER_OFF, ilo_power._get_power_state(self.node))
+
+ ilo_mock_object.get_host_power_status.return_value = 'ERROR'
+ self.assertEqual(states.ERROR, ilo_power._get_power_state(self.node))
+
+ def test__get_power_state_fail(self, get_ilo_object_mock):
+ ilo_mock_object = get_ilo_object_mock.return_value
+ exc = ilo_error.IloError('error')
+ ilo_mock_object.get_host_power_status.side_effect = exc
+
+ self.assertRaises(exception.IloOperationError,
+ ilo_power._get_power_state,
+ self.node)
+ ilo_mock_object.get_host_power_status.assert_called_once_with()
+
+ def test__set_power_state_invalid_state(self, get_ilo_object_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ self.assertRaises(exception.InvalidParameterValue,
+ ilo_power._set_power_state,
+ task,
+ states.ERROR)
+
+ def test__set_power_state_reboot_fail(self, get_ilo_object_mock):
+ ilo_mock_object = get_ilo_object_mock.return_value
+ exc = ilo_error.IloError('error')
+ ilo_mock_object.reset_server.side_effect = exc
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ self.assertRaises(exception.IloOperationError,
+ ilo_power._set_power_state,
+ task,
+ states.REBOOT)
+ ilo_mock_object.reset_server.assert_called_once_with()
+
+ def test__set_power_state_reboot_ok(self, get_ilo_object_mock):
+ ilo_mock_object = get_ilo_object_mock.return_value
+ ilo_mock_object.get_host_power_status.side_effect = ['ON', 'OFF', 'ON']
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ ilo_power._set_power_state(task, states.REBOOT)
+
+ ilo_mock_object.reset_server.assert_called_once_with()
+
+ def test__set_power_state_off_fail(self, get_ilo_object_mock):
+ ilo_mock_object = get_ilo_object_mock.return_value
+ ilo_mock_object.get_host_power_status.return_value = 'ON'
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ self.assertRaises(exception.PowerStateFailure,
+ ilo_power._set_power_state,
+ task,
+ states.POWER_OFF)
+
+ ilo_mock_object.get_host_power_status.assert_called_with()
+ ilo_mock_object.hold_pwr_btn.assert_called_once_with()
+
+ def test__set_power_state_on_ok(self, get_ilo_object_mock):
+ ilo_mock_object = get_ilo_object_mock.return_value
+ ilo_mock_object.get_host_power_status.side_effect = ['OFF', 'ON']
+
+ target_state = states.POWER_ON
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ ilo_power._set_power_state(task, target_state)
+ ilo_mock_object.get_host_power_status.assert_called_with()
+ ilo_mock_object.set_host_power.assert_called_once_with('ON')
+
+ @mock.patch.object(manager_utils, 'node_set_boot_device', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_common, 'setup_vmedia_for_boot', spec_set=True,
+ autospec=True)
+ def test__attach_boot_iso_if_needed(
+ self, setup_vmedia_mock, set_boot_device_mock,
+ get_ilo_object_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ task.node.provision_state = states.ACTIVE
+ task.node.instance_info['ilo_boot_iso'] = 'boot-iso'
+ ilo_power._attach_boot_iso_if_needed(task)
+ setup_vmedia_mock.assert_called_once_with(task, 'boot-iso')
+ set_boot_device_mock.assert_called_once_with(task,
+ boot_devices.CDROM)
+
+ @mock.patch.object(manager_utils, 'node_set_boot_device', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_common, 'setup_vmedia_for_boot', spec_set=True,
+ autospec=True)
+ def test__attach_boot_iso_if_needed_on_rebuild(
+ self, setup_vmedia_mock, set_boot_device_mock,
+ get_ilo_object_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ task.node.provision_state = states.DEPLOYING
+ task.node.instance_info['ilo_boot_iso'] = 'boot-iso'
+ ilo_power._attach_boot_iso_if_needed(task)
+ self.assertFalse(setup_vmedia_mock.called)
+ self.assertFalse(set_boot_device_mock.called)
+
+
+class IloPowerTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(IloPowerTestCase, self).setUp()
+ driver_info = INFO_DICT
+ mgr_utils.mock_the_extension_manager(driver="fake_ilo")
+ self.node = obj_utils.create_test_node(self.context,
+ driver='fake_ilo',
+ driver_info=driver_info)
+
+ def test_get_properties(self):
+ expected = ilo_common.COMMON_PROPERTIES
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ self.assertEqual(expected, task.driver.power.get_properties())
+
+ @mock.patch.object(ilo_common, 'parse_driver_info', spec_set=True,
+ autospec=True)
+ def test_validate(self, mock_drvinfo):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ task.driver.power.validate(task)
+ mock_drvinfo.assert_called_once_with(task.node)
+
+ @mock.patch.object(ilo_common, 'parse_driver_info', spec_set=True,
+ autospec=True)
+ def test_validate_fail(self, mock_drvinfo):
+ side_effect = iter([exception.InvalidParameterValue("Invalid Input")])
+ mock_drvinfo.side_effect = side_effect
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ self.assertRaises(exception.InvalidParameterValue,
+ task.driver.power.validate,
+ task)
+
+ @mock.patch.object(ilo_power, '_get_power_state', spec_set=True,
+ autospec=True)
+ def test_get_power_state(self, mock_get_power):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ mock_get_power.return_value = states.POWER_ON
+ self.assertEqual(states.POWER_ON,
+ task.driver.power.get_power_state(task))
+ mock_get_power.assert_called_once_with(task.node)
+
+ @mock.patch.object(ilo_power, '_set_power_state', spec_set=True,
+ autospec=True)
+ def test_set_power_state(self, mock_set_power):
+ mock_set_power.return_value = states.POWER_ON
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.power.set_power_state(task, states.POWER_ON)
+ mock_set_power.assert_called_once_with(task, states.POWER_ON)
+
+ @mock.patch.object(ilo_power, '_set_power_state', spec_set=True,
+ autospec=True)
+ @mock.patch.object(ilo_power, '_get_power_state', spec_set=True,
+ autospec=True)
+ def test_reboot(self, mock_get_power, mock_set_power):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ mock_get_power.return_value = states.POWER_ON
+ mock_set_power.return_value = states.POWER_ON
+ task.driver.power.reboot(task)
+ mock_get_power.assert_called_once_with(task.node)
+ mock_set_power.assert_called_once_with(task, states.REBOOT)
diff --git a/ironic/tests/unit/drivers/ipxe_config.template b/ironic/tests/unit/drivers/ipxe_config.template
new file mode 100644
index 000000000..bc803d4a7
--- /dev/null
+++ b/ironic/tests/unit/drivers/ipxe_config.template
@@ -0,0 +1,21 @@
+#!ipxe
+
+dhcp
+
+goto deploy
+
+:deploy
+kernel http://1.2.3.4:1234/deploy_kernel selinux=0 disk=cciss/c0d0,sda,hda,vda iscsi_target_iqn=iqn-1be26c0b-03f2-4d2e-ae87-c02d7f33c123 deployment_id=1be26c0b-03f2-4d2e-ae87-c02d7f33c123 deployment_key=0123456789ABCDEFGHIJKLMNOPQRSTUV ironic_api_url=http://192.168.122.184:6385 troubleshoot=0 text test_param boot_option=netboot ip=${ip}:${next-server}:${gateway}:${netmask} BOOTIF=${mac} root_device=vendor=fake,size=123 ipa-api-url=http://192.168.122.184:6385 ipa-driver-name=pxe_ssh coreos.configdrive=0
+
+initrd http://1.2.3.4:1234/deploy_ramdisk
+boot
+
+:boot_partition
+kernel http://1.2.3.4:1234/kernel root={{ ROOT }} ro text test_param
+initrd http://1.2.3.4:1234/ramdisk
+boot
+
+:boot_whole_disk
+kernel chain.c32
+append mbr:{{ DISK_IDENTIFIER }}
+boot
diff --git a/ironic/tests/unit/drivers/irmc/__init__.py b/ironic/tests/unit/drivers/irmc/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ironic/tests/unit/drivers/irmc/__init__.py
diff --git a/ironic/tests/unit/drivers/irmc/fake_sensors_data_ng.xml b/ironic/tests/unit/drivers/irmc/fake_sensors_data_ng.xml
new file mode 100644
index 000000000..c8788427b
--- /dev/null
+++ b/ironic/tests/unit/drivers/irmc/fake_sensors_data_ng.xml
@@ -0,0 +1,156 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<?xml-stylesheet type="text/xsl" href="Report.xslt"?>
+<Root Schema="2" Version="7.65F" OS="iRMC S4" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+ <System>
+ <SensorDataRecords Schema="1">
+ <SDR Nr="1" RecordID="1" RecordType="1" RecordTypeName="Full SDR" Version="1.5" Length="64">
+ <Data Size="64">
+ <Decoded>
+ <Key>
+ <SensorOwner Type="IPMB">
+ <ID>20</ID>
+ <LUN>0</LUN>
+ <Channel>0</Channel>
+ </SensorOwner>
+ <SensorNumber>1</SensorNumber>
+ </Key>
+ <Entity>
+ <ID>55</ID>
+ <Instance>0</Instance>
+ <Name>Ambient</Name>
+ </Entity>
+ <Sensor>
+ <Type>1</Type>
+ <!-- <TypeName>Temperature</TypeName> -->
+ <BaseUnit>1</BaseUnit>
+ <ModifierUnit>0</ModifierUnit>
+ <BaseUnitName>degree C</BaseUnitName>
+ <ModifierUnitName>unspecified</ModifierUnitName>
+ <Thresholds>
+ <UpperCritical>
+ <Raw>168</Raw>
+ <Normalized>42</Normalized>
+ </UpperCritical>
+ <LowerCritical>
+ <Raw>4</Raw>
+ <Normalized>1</Normalized>
+ </LowerCritical>
+ <UpperNonCritical>
+ <Raw>148</Raw>
+ <Normalized>37</Normalized>
+ </UpperNonCritical>
+ <LowerNonCritical>
+ <Raw>24</Raw>
+ <Normalized>6</Normalized>
+ </LowerNonCritical>
+ </Thresholds>
+ </Sensor>
+ </Decoded>
+ </Data>
+ </SDR>
+ <SDR Nr="2" RecordID="2" RecordType="1" RecordTypeName="Full SDR" Version="1.5" Length="64">
+ <Data Size="64">
+ <Decoded>
+ <Key>
+ <SensorOwner Type="IPMB">
+ <ID>20</ID>
+ <LUN>0</LUN>
+ <Channel>0</Channel>
+ </SensorOwner>
+ <SensorNumber>2</SensorNumber>
+ </Key>
+ <Entity>
+ <ID>7</ID>
+ <Instance>0</Instance>
+ <Name>Systemboard 1</Name>
+ </Entity>
+ <Sensor>
+ <!-- <Type>1</Type> -->
+ <TypeName>Temperature</TypeName>
+ <BaseUnit>1</BaseUnit>
+ <ModifierUnit>0</ModifierUnit>
+ <BaseUnitName>degree C</BaseUnitName>
+ <ModifierUnitName>unspecified</ModifierUnitName>
+ <Thresholds>
+ <UpperCritical>
+ <Raw>80</Raw>
+ <Normalized>80</Normalized>
+ </UpperCritical>
+ <UpperNonCritical>
+ <Raw>75</Raw>
+ <Normalized>75</Normalized>
+ </UpperNonCritical>
+ </Thresholds>
+ </Sensor>
+ </Decoded>
+ </Data>
+ </SDR>
+ <SDR Nr="34" RecordID="34" RecordType="1" RecordTypeName="Full SDR" Version="1.5" Length="64">
+ <Data Size="64">
+ <Decoded>
+ <Key>
+ <SensorOwner Type="IPMB">
+ <ID>20</ID>
+ <LUN>0</LUN>
+ <Channel>0</Channel>
+ </SensorOwner>
+ <SensorNumber>35</SensorNumber>
+ </Key>
+ <Entity>
+ <ID>29</ID>
+ <Instance>0</Instance>
+ <!-- <Name>FAN1 SYS</Name> -->
+ </Entity>
+ <Sensor>
+ <Type>4</Type>
+ <TypeName>Fan</TypeName>
+ <BaseUnit>18</BaseUnit>
+ <ModifierUnit>0</ModifierUnit>
+ <BaseUnitName>RPM</BaseUnitName>
+ <ModifierUnitName>unspecified</ModifierUnitName>
+ <Thresholds>
+ <LowerCritical>
+ <Raw>10</Raw>
+ <Normalized>600</Normalized>
+ </LowerCritical>
+ </Thresholds>
+ </Sensor>
+ </Decoded>
+ </Data>
+ </SDR>
+ <SDR Nr="35" RecordID="35" RecordType="1" RecordTypeName="Full SDR" Version="1.5" Length="64">
+ <Data Size="64">
+ <Decoded>
+ <Key>
+ <SensorOwner Type="IPMB">
+ <ID>20</ID>
+ <LUN>0</LUN>
+ <Channel>0</Channel>
+ </SensorOwner>
+ <SensorNumber>36</SensorNumber>
+ </Key>
+ <Entity>
+ <!-- <ID>29</ID> -->
+ <Instance>1</Instance>
+ <Name>FAN2 SYS</Name>
+ </Entity>
+ <Sensor>
+ <Type>4</Type>
+ <TypeName>Fan</TypeName>
+ <BaseUnit>18</BaseUnit>
+ <ModifierUnit>0</ModifierUnit>
+ <BaseUnitName>RPM</BaseUnitName>
+ <ModifierUnitName>unspecified</ModifierUnitName>
+ <Thresholds>
+ <LowerCritical>
+ <Raw>10</Raw>
+ <Normalized>600</Normalized>
+ </LowerCritical>
+ </Thresholds>
+ </Sensor>
+ </Decoded>
+ </Data>
+ </SDR>
+ </SensorDataRecords>
+ </System>
+</Root>
diff --git a/ironic/tests/unit/drivers/irmc/fake_sensors_data_ok.xml b/ironic/tests/unit/drivers/irmc/fake_sensors_data_ok.xml
new file mode 100644
index 000000000..fb8edba68
--- /dev/null
+++ b/ironic/tests/unit/drivers/irmc/fake_sensors_data_ok.xml
@@ -0,0 +1,156 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<?xml-stylesheet type="text/xsl" href="Report.xslt"?>
+<Root Schema="2" Version="7.65F" OS="iRMC S4" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+ <System>
+ <SensorDataRecords Schema="1">
+ <SDR Nr="1" RecordID="1" RecordType="1" RecordTypeName="Full SDR" Version="1.5" Length="64">
+ <Data Size="64">
+ <Decoded>
+ <Key>
+ <SensorOwner Type="IPMB">
+ <ID>20</ID>
+ <LUN>0</LUN>
+ <Channel>0</Channel>
+ </SensorOwner>
+ <SensorNumber>1</SensorNumber>
+ </Key>
+ <Entity>
+ <ID>55</ID>
+ <Instance>0</Instance>
+ <Name>Ambient</Name>
+ </Entity>
+ <Sensor>
+ <Type>1</Type>
+ <TypeName>Temperature</TypeName>
+ <BaseUnit>1</BaseUnit>
+ <ModifierUnit>0</ModifierUnit>
+ <BaseUnitName>degree C</BaseUnitName>
+ <ModifierUnitName>unspecified</ModifierUnitName>
+ <Thresholds>
+ <UpperCritical>
+ <Raw>168</Raw>
+ <Normalized>42</Normalized>
+ </UpperCritical>
+ <LowerCritical>
+ <Raw>4</Raw>
+ <Normalized>1</Normalized>
+ </LowerCritical>
+ <UpperNonCritical>
+ <Raw>148</Raw>
+ <Normalized>37</Normalized>
+ </UpperNonCritical>
+ <LowerNonCritical>
+ <Raw>24</Raw>
+ <Normalized>6</Normalized>
+ </LowerNonCritical>
+ </Thresholds>
+ </Sensor>
+ </Decoded>
+ </Data>
+ </SDR>
+ <SDR Nr="2" RecordID="2" RecordType="1" RecordTypeName="Full SDR" Version="1.5" Length="64">
+ <Data Size="64">
+ <Decoded>
+ <Key>
+ <SensorOwner Type="IPMB">
+ <ID>20</ID>
+ <LUN>0</LUN>
+ <Channel>0</Channel>
+ </SensorOwner>
+ <SensorNumber>2</SensorNumber>
+ </Key>
+ <Entity>
+ <ID>7</ID>
+ <Instance>0</Instance>
+ <Name>Systemboard 1</Name>
+ </Entity>
+ <Sensor>
+ <Type>1</Type>
+ <TypeName>Temperature</TypeName>
+ <BaseUnit>1</BaseUnit>
+ <ModifierUnit>0</ModifierUnit>
+ <BaseUnitName>degree C</BaseUnitName>
+ <ModifierUnitName>unspecified</ModifierUnitName>
+ <Thresholds>
+ <UpperCritical>
+ <Raw>80</Raw>
+ <Normalized>80</Normalized>
+ </UpperCritical>
+ <UpperNonCritical>
+ <Raw>75</Raw>
+ <Normalized>75</Normalized>
+ </UpperNonCritical>
+ </Thresholds>
+ </Sensor>
+ </Decoded>
+ </Data>
+ </SDR>
+ <SDR Nr="34" RecordID="34" RecordType="1" RecordTypeName="Full SDR" Version="1.5" Length="64">
+ <Data Size="64">
+ <Decoded>
+ <Key>
+ <SensorOwner Type="IPMB">
+ <ID>20</ID>
+ <LUN>0</LUN>
+ <Channel>0</Channel>
+ </SensorOwner>
+ <SensorNumber>35</SensorNumber>
+ </Key>
+ <Entity>
+ <ID>29</ID>
+ <Instance>0</Instance>
+ <Name>FAN1 SYS</Name>
+ </Entity>
+ <Sensor>
+ <Type>4</Type>
+ <TypeName>Fan</TypeName>
+ <BaseUnit>18</BaseUnit>
+ <ModifierUnit>0</ModifierUnit>
+ <BaseUnitName>RPM</BaseUnitName>
+ <ModifierUnitName>unspecified</ModifierUnitName>
+ <Thresholds>
+ <LowerCritical>
+ <Raw>10</Raw>
+ <Normalized>600</Normalized>
+ </LowerCritical>
+ </Thresholds>
+ </Sensor>
+ </Decoded>
+ </Data>
+ </SDR>
+ <SDR Nr="35" RecordID="35" RecordType="1" RecordTypeName="Full SDR" Version="1.5" Length="64">
+ <Data Size="64">
+ <Decoded>
+ <Key>
+ <SensorOwner Type="IPMB">
+ <ID>20</ID>
+ <LUN>0</LUN>
+ <Channel>0</Channel>
+ </SensorOwner>
+ <SensorNumber>36</SensorNumber>
+ </Key>
+ <Entity>
+ <ID>29</ID>
+ <Instance>1</Instance>
+ <Name>FAN2 SYS</Name>
+ </Entity>
+ <Sensor>
+ <Type>4</Type>
+ <TypeName>Fan</TypeName>
+ <BaseUnit>18</BaseUnit>
+ <ModifierUnit>0</ModifierUnit>
+ <!-- <BaseUnitName>RPM</BaseUnitName> -->
+ <ModifierUnitName>unspecified</ModifierUnitName>
+ <Thresholds>
+ <LowerCritical>
+ <Raw>10</Raw>
+ <Normalized></Normalized>
+ </LowerCritical>
+ </Thresholds>
+ </Sensor>
+ </Decoded>
+ </Data>
+ </SDR>
+ </SensorDataRecords>
+ </System>
+</Root>
diff --git a/ironic/tests/unit/drivers/irmc/test_common.py b/ironic/tests/unit/drivers/irmc/test_common.py
new file mode 100644
index 000000000..0876e76d0
--- /dev/null
+++ b/ironic/tests/unit/drivers/irmc/test_common.py
@@ -0,0 +1,168 @@
+# Copyright 2015 FUJITSU LIMITED
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""
+Test class for common methods used by iRMC modules.
+"""
+
+import mock
+
+from ironic.common import exception
+from ironic.conductor import task_manager
+from ironic.drivers.modules.irmc import common as irmc_common
+from ironic.tests.unit.conductor import utils as mgr_utils
+from ironic.tests.unit.db import base as db_base
+from ironic.tests.unit.db import utils as db_utils
+from ironic.tests.unit.drivers import third_party_driver_mock_specs \
+ as mock_specs
+from ironic.tests.unit.objects import utils as obj_utils
+
+
+class IRMCValidateParametersTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(IRMCValidateParametersTestCase, self).setUp()
+ self.node = obj_utils.create_test_node(
+ self.context,
+ driver='fake_irmc',
+ driver_info=db_utils.get_test_irmc_info())
+
+ def test_parse_driver_info(self):
+ info = irmc_common.parse_driver_info(self.node)
+
+ self.assertIsNotNone(info.get('irmc_address'))
+ self.assertIsNotNone(info.get('irmc_username'))
+ self.assertIsNotNone(info.get('irmc_password'))
+ self.assertIsNotNone(info.get('irmc_client_timeout'))
+ self.assertIsNotNone(info.get('irmc_port'))
+ self.assertIsNotNone(info.get('irmc_auth_method'))
+ self.assertIsNotNone(info.get('irmc_sensor_method'))
+
+ def test_parse_driver_option_default(self):
+ self.node.driver_info = {
+ "irmc_address": "1.2.3.4",
+ "irmc_username": "admin0",
+ "irmc_password": "fake0",
+ }
+ info = irmc_common.parse_driver_info(self.node)
+
+ self.assertEqual('basic', info.get('irmc_auth_method'))
+ self.assertEqual(443, info.get('irmc_port'))
+ self.assertEqual(60, info.get('irmc_client_timeout'))
+ self.assertEqual('ipmitool', info.get('irmc_sensor_method'))
+
+ def test_parse_driver_info_missing_address(self):
+ del self.node.driver_info['irmc_address']
+ self.assertRaises(exception.MissingParameterValue,
+ irmc_common.parse_driver_info, self.node)
+
+ def test_parse_driver_info_missing_username(self):
+ del self.node.driver_info['irmc_username']
+ self.assertRaises(exception.MissingParameterValue,
+ irmc_common.parse_driver_info, self.node)
+
+ def test_parse_driver_info_missing_password(self):
+ del self.node.driver_info['irmc_password']
+ self.assertRaises(exception.MissingParameterValue,
+ irmc_common.parse_driver_info, self.node)
+
+ def test_parse_driver_info_invalid_timeout(self):
+ self.node.driver_info['irmc_client_timeout'] = 'qwe'
+ self.assertRaises(exception.InvalidParameterValue,
+ irmc_common.parse_driver_info, self.node)
+
+ def test_parse_driver_info_invalid_port(self):
+ self.node.driver_info['irmc_port'] = 'qwe'
+ self.assertRaises(exception.InvalidParameterValue,
+ irmc_common.parse_driver_info, self.node)
+
+ def test_parse_driver_info_invalid_auth_method(self):
+ self.node.driver_info['irmc_auth_method'] = 'qwe'
+ self.assertRaises(exception.InvalidParameterValue,
+ irmc_common.parse_driver_info, self.node)
+
+ def test_parse_driver_info_invalid_sensor_method(self):
+ self.node.driver_info['irmc_sensor_method'] = 'qwe'
+ self.assertRaises(exception.InvalidParameterValue,
+ irmc_common.parse_driver_info, self.node)
+
+ def test_parse_driver_info_missing_multiple_params(self):
+ del self.node.driver_info['irmc_password']
+ del self.node.driver_info['irmc_address']
+ try:
+ irmc_common.parse_driver_info(self.node)
+ self.fail("parse_driver_info did not throw exception.")
+ except exception.MissingParameterValue as e:
+ self.assertIn('irmc_password', str(e))
+ self.assertIn('irmc_address', str(e))
+
+
+class IRMCCommonMethodsTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(IRMCCommonMethodsTestCase, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver="fake_irmc")
+ self.info = db_utils.get_test_irmc_info()
+ self.node = obj_utils.create_test_node(
+ self.context,
+ driver='fake_irmc',
+ driver_info=self.info)
+
+ @mock.patch.object(irmc_common, 'scci',
+ spec_set=mock_specs.SCCICLIENT_IRMC_SCCI_SPEC)
+ def test_get_irmc_client(self, mock_scci):
+ self.info['irmc_port'] = 80
+ self.info['irmc_auth_method'] = 'digest'
+ self.info['irmc_client_timeout'] = 60
+ mock_scci.get_client.return_value = 'get_client'
+ returned_mock_scci_get_client = irmc_common.get_irmc_client(self.node)
+ mock_scci.get_client.assert_called_with(
+ self.info['irmc_address'],
+ self.info['irmc_username'],
+ self.info['irmc_password'],
+ port=self.info['irmc_port'],
+ auth_method=self.info['irmc_auth_method'],
+ client_timeout=self.info['irmc_client_timeout'])
+ self.assertEqual('get_client', returned_mock_scci_get_client)
+
+ def test_update_ipmi_properties(self):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ ipmi_info = {
+ "ipmi_address": "1.2.3.4",
+ "ipmi_username": "admin0",
+ "ipmi_password": "fake0",
+ }
+ task.node.driver_info = self.info
+ irmc_common.update_ipmi_properties(task)
+ actual_info = task.node.driver_info
+ expected_info = dict(self.info, **ipmi_info)
+ self.assertEqual(expected_info, actual_info)
+
+ @mock.patch.object(irmc_common, 'scci',
+ spec_set=mock_specs.SCCICLIENT_IRMC_SCCI_SPEC)
+ def test_get_irmc_report(self, mock_scci):
+ self.info['irmc_port'] = 80
+ self.info['irmc_auth_method'] = 'digest'
+ self.info['irmc_client_timeout'] = 60
+ mock_scci.get_report.return_value = 'get_report'
+ returned_mock_scci_get_report = irmc_common.get_irmc_report(self.node)
+ mock_scci.get_report.assert_called_with(
+ self.info['irmc_address'],
+ self.info['irmc_username'],
+ self.info['irmc_password'],
+ port=self.info['irmc_port'],
+ auth_method=self.info['irmc_auth_method'],
+ client_timeout=self.info['irmc_client_timeout'])
+ self.assertEqual('get_report', returned_mock_scci_get_report)
diff --git a/ironic/tests/unit/drivers/irmc/test_deploy.py b/ironic/tests/unit/drivers/irmc/test_deploy.py
new file mode 100644
index 000000000..bba950cbf
--- /dev/null
+++ b/ironic/tests/unit/drivers/irmc/test_deploy.py
@@ -0,0 +1,1536 @@
+# Copyright 2015 FUJITSU LIMITED
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""
+Test class for iRMC Deploy Driver
+"""
+
+import os
+import shutil
+import tempfile
+
+import mock
+from oslo_config import cfg
+import six
+
+from ironic.common import boot_devices
+from ironic.common import exception
+from ironic.common.glance_service import service_utils
+from ironic.common.i18n import _
+from ironic.common import images
+from ironic.common import states
+from ironic.common import utils
+from ironic.conductor import task_manager
+from ironic.conductor import utils as manager_utils
+from ironic.drivers.modules import agent
+from ironic.drivers.modules import agent_base_vendor
+from ironic.drivers.modules import deploy_utils
+from ironic.drivers.modules.irmc import common as irmc_common
+from ironic.drivers.modules.irmc import deploy as irmc_deploy
+from ironic.drivers.modules import iscsi_deploy
+from ironic.tests.unit.conductor import utils as mgr_utils
+from ironic.tests.unit.db import base as db_base
+from ironic.tests.unit.db import utils as db_utils
+from ironic.tests.unit.objects import utils as obj_utils
+
+
+if six.PY3:
+ import io
+ file = io.BytesIO
+
+
+INFO_DICT = db_utils.get_test_irmc_info()
+CONF = cfg.CONF
+
+
+class IRMCDeployPrivateMethodsTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ irmc_deploy._check_share_fs_mounted_patcher.start()
+ super(IRMCDeployPrivateMethodsTestCase, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver='iscsi_irmc')
+ self.node = obj_utils.create_test_node(
+ self.context, driver='iscsi_irmc', driver_info=INFO_DICT)
+
+ CONF.irmc.remote_image_share_root = '/remote_image_share_root'
+ CONF.irmc.remote_image_server = '10.20.30.40'
+ CONF.irmc.remote_image_share_type = 'NFS'
+ CONF.irmc.remote_image_share_name = 'share'
+ CONF.irmc.remote_image_user_name = 'admin'
+ CONF.irmc.remote_image_user_password = 'admin0'
+ CONF.irmc.remote_image_user_domain = 'local'
+
+ @mock.patch.object(os.path, 'isdir', spec_set=True, autospec=True)
+ def test__parse_config_option(self, isdir_mock):
+ isdir_mock.return_value = True
+
+ result = irmc_deploy._parse_config_option()
+
+ isdir_mock.assert_called_once_with('/remote_image_share_root')
+ self.assertIsNone(result)
+
+ @mock.patch.object(os.path, 'isdir', spec_set=True, autospec=True)
+ def test__parse_config_option_non_existed_root(self, isdir_mock):
+ CONF.irmc.remote_image_share_root = '/non_existed_root'
+ isdir_mock.return_value = False
+
+ self.assertRaises(exception.InvalidParameterValue,
+ irmc_deploy._parse_config_option)
+ isdir_mock.assert_called_once_with('/non_existed_root')
+
+ @mock.patch.object(os.path, 'isdir', spec_set=True, autospec=True)
+ def test__parse_config_option_wrong_share_type(self, isdir_mock):
+ CONF.irmc.remote_image_share_type = 'NTFS'
+ isdir_mock.return_value = True
+
+ self.assertRaises(exception.InvalidParameterValue,
+ irmc_deploy._parse_config_option)
+ isdir_mock.assert_called_once_with('/remote_image_share_root')
+
+ @mock.patch.object(os.path, 'isfile', spec_set=True, autospec=True)
+ def test__parse_driver_info_in_share(self, isfile_mock):
+ """With required 'irmc_deploy_iso' in share."""
+ isfile_mock.return_value = True
+ self.node.driver_info['irmc_deploy_iso'] = 'deploy.iso'
+ driver_info_expected = {'irmc_deploy_iso': 'deploy.iso'}
+
+ driver_info_actual = irmc_deploy._parse_driver_info(self.node)
+
+ isfile_mock.assert_called_once_with(
+ '/remote_image_share_root/deploy.iso')
+ self.assertEqual(driver_info_expected, driver_info_actual)
+
+ @mock.patch.object(service_utils, 'is_image_href_ordinary_file_name',
+ spec_set=True, autospec=True)
+ def test__parse_driver_info_not_in_share(
+ self, is_image_href_ordinary_file_name_mock):
+ """With required 'irmc_deploy_iso' not in share."""
+ self.node.driver_info[
+ 'irmc_deploy_iso'] = 'bc784057-a140-4130-add3-ef890457e6b3'
+ driver_info_expected = {'irmc_deploy_iso':
+ 'bc784057-a140-4130-add3-ef890457e6b3'}
+ is_image_href_ordinary_file_name_mock.return_value = False
+
+ driver_info_actual = irmc_deploy._parse_driver_info(self.node)
+
+ self.assertEqual(driver_info_expected, driver_info_actual)
+
+ @mock.patch.object(os.path, 'isfile', spec_set=True, autospec=True)
+ def test__parse_driver_info_with_deploy_iso_invalid(self, isfile_mock):
+ """With required 'irmc_deploy_iso' non existed."""
+ isfile_mock.return_value = False
+
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ task.node.driver_info['irmc_deploy_iso'] = 'deploy.iso'
+ error_msg = (_("Deploy ISO file, %(deploy_iso)s, "
+ "not found for node: %(node)s.") %
+ {'deploy_iso': '/remote_image_share_root/deploy.iso',
+ 'node': task.node.uuid})
+
+ e = self.assertRaises(exception.InvalidParameterValue,
+ irmc_deploy._parse_driver_info,
+ task.node)
+ self.assertEqual(error_msg, str(e))
+
+ def test__parse_driver_info_with_deploy_iso_missing(self):
+ """With required 'irmc_deploy_iso' empty."""
+ self.node.driver_info['irmc_deploy_iso'] = None
+
+ error_msg = ("Error validating iRMC virtual media deploy. Some"
+ " parameters were missing in node's driver_info."
+ " Missing are: ['irmc_deploy_iso']")
+ e = self.assertRaises(exception.MissingParameterValue,
+ irmc_deploy._parse_driver_info,
+ self.node)
+ self.assertEqual(error_msg, str(e))
+
+ def test__parse_instance_info_with_boot_iso_file_name_ok(self):
+ """With optional 'irmc_boot_iso' file name."""
+ CONF.irmc.remote_image_share_root = '/etc'
+ self.node.instance_info['irmc_boot_iso'] = 'hosts'
+ instance_info_expected = {'irmc_boot_iso': 'hosts'}
+ instance_info_actual = irmc_deploy._parse_instance_info(self.node)
+
+ self.assertEqual(instance_info_expected, instance_info_actual)
+
+ def test__parse_instance_info_without_boot_iso_ok(self):
+ """With optional no 'irmc_boot_iso' file name."""
+ CONF.irmc.remote_image_share_root = '/etc'
+
+ self.node.instance_info['irmc_boot_iso'] = None
+ instance_info_expected = {}
+ instance_info_actual = irmc_deploy._parse_instance_info(self.node)
+
+ self.assertEqual(instance_info_expected, instance_info_actual)
+
+ def test__parse_instance_info_with_boot_iso_uuid_ok(self):
+ """With optional 'irmc_boot_iso' glance uuid."""
+ self.node.instance_info[
+ 'irmc_boot_iso'] = 'bc784057-a140-4130-add3-ef890457e6b3'
+ instance_info_expected = {'irmc_boot_iso':
+ 'bc784057-a140-4130-add3-ef890457e6b3'}
+ instance_info_actual = irmc_deploy._parse_instance_info(self.node)
+
+ self.assertEqual(instance_info_expected, instance_info_actual)
+
+ def test__parse_instance_info_with_boot_iso_glance_ok(self):
+ """With optional 'irmc_boot_iso' glance url."""
+ self.node.instance_info['irmc_boot_iso'] = (
+ 'glance://bc784057-a140-4130-add3-ef890457e6b3')
+ instance_info_expected = {
+ 'irmc_boot_iso': 'glance://bc784057-a140-4130-add3-ef890457e6b3',
+ }
+ instance_info_actual = irmc_deploy._parse_instance_info(self.node)
+
+ self.assertEqual(instance_info_expected, instance_info_actual)
+
+ def test__parse_instance_info_with_boot_iso_http_ok(self):
+ """With optional 'irmc_boot_iso' http url."""
+ self.node.driver_info[
+ 'irmc_deploy_iso'] = 'http://irmc_boot_iso'
+ driver_info_expected = {'irmc_deploy_iso': 'http://irmc_boot_iso'}
+ driver_info_actual = irmc_deploy._parse_driver_info(self.node)
+
+ self.assertEqual(driver_info_expected, driver_info_actual)
+
+ def test__parse_instance_info_with_boot_iso_https_ok(self):
+ """With optional 'irmc_boot_iso' https url."""
+ self.node.instance_info[
+ 'irmc_boot_iso'] = 'https://irmc_boot_iso'
+ instance_info_expected = {'irmc_boot_iso': 'https://irmc_boot_iso'}
+ instance_info_actual = irmc_deploy._parse_instance_info(self.node)
+
+ self.assertEqual(instance_info_expected, instance_info_actual)
+
+ def test__parse_instance_info_with_boot_iso_file_url_ok(self):
+ """With optional 'irmc_boot_iso' file url."""
+ self.node.instance_info[
+ 'irmc_boot_iso'] = 'file://irmc_boot_iso'
+ instance_info_expected = {'irmc_boot_iso': 'file://irmc_boot_iso'}
+ instance_info_actual = irmc_deploy._parse_instance_info(self.node)
+
+ self.assertEqual(instance_info_expected, instance_info_actual)
+
+ @mock.patch.object(os.path, 'isfile', spec_set=True, autospec=True)
+ def test__parse_instance_info_with_boot_iso_invalid(self, isfile_mock):
+ CONF.irmc.remote_image_share_root = '/etc'
+ isfile_mock.return_value = False
+
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ task.node.instance_info['irmc_boot_iso'] = 'hosts~non~existed'
+
+ error_msg = (_("Boot ISO file, %(boot_iso)s, "
+ "not found for node: %(node)s.") %
+ {'boot_iso': '/etc/hosts~non~existed',
+ 'node': task.node.uuid})
+
+ e = self.assertRaises(exception.InvalidParameterValue,
+ irmc_deploy._parse_instance_info,
+ task.node)
+ self.assertEqual(error_msg, str(e))
+
+ @mock.patch.object(iscsi_deploy, 'parse_instance_info', spec_set=True,
+ autospec=True)
+ def test__parse_deploy_info_ok(self, instance_info_mock):
+ CONF.irmc.remote_image_share_root = '/etc'
+ instance_info_mock.return_value = {'a': 'b'}
+ driver_info_expected = {'a': 'b',
+ 'irmc_deploy_iso': 'hosts',
+ 'irmc_boot_iso': 'fstab'}
+
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ task.node.driver_info['irmc_deploy_iso'] = 'hosts'
+ task.node.instance_info['irmc_boot_iso'] = 'fstab'
+ driver_info_actual = irmc_deploy._parse_deploy_info(task.node)
+ self.assertEqual(driver_info_expected, driver_info_actual)
+
+ @mock.patch.object(manager_utils, 'node_power_action', spec_set=True,
+ autospec=True)
+ @mock.patch.object(manager_utils, 'node_set_boot_device', spec_set=True,
+ autospec=True)
+ @mock.patch.object(irmc_deploy, 'setup_vmedia_for_boot', spec_set=True,
+ autospec=True)
+ @mock.patch.object(images, 'fetch', spec_set=True,
+ autospec=True)
+ def test__reboot_into_deploy_iso_with_file(self,
+ fetch_mock,
+ setup_vmedia_mock,
+ set_boot_device_mock,
+ node_power_action_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node.driver_info['irmc_deploy_iso'] = 'deploy_iso_filename'
+ ramdisk_opts = {'a': 'b'}
+ irmc_deploy._reboot_into_deploy_iso(task, ramdisk_opts)
+
+ self.assertFalse(fetch_mock.called)
+
+ setup_vmedia_mock.assert_called_once_with(
+ task,
+ 'deploy_iso_filename',
+ ramdisk_opts)
+ set_boot_device_mock.assert_called_once_with(task,
+ boot_devices.CDROM)
+ node_power_action_mock.assert_called_once_with(task, states.REBOOT)
+
+ @mock.patch.object(manager_utils, 'node_power_action', spec_set=True,
+ autospec=True)
+ @mock.patch.object(manager_utils, 'node_set_boot_device', spec_set=True,
+ autospec=True)
+ @mock.patch.object(irmc_deploy, 'setup_vmedia_for_boot', spec_set=True,
+ autospec=True)
+ @mock.patch.object(images, 'fetch', spec_set=True,
+ autospec=True)
+ @mock.patch.object(service_utils, 'is_image_href_ordinary_file_name',
+ spec_set=True, autospec=True)
+ def test__reboot_into_deploy_iso_with_image_service(
+ self,
+ is_image_href_ordinary_file_name_mock,
+ fetch_mock,
+ setup_vmedia_mock,
+ set_boot_device_mock,
+ node_power_action_mock):
+ CONF.irmc.remote_image_share_root = '/'
+ is_image_href_ordinary_file_name_mock.return_value = False
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node.driver_info['irmc_deploy_iso'] = 'glance://deploy_iso'
+ ramdisk_opts = {'a': 'b'}
+ irmc_deploy._reboot_into_deploy_iso(task, ramdisk_opts)
+
+ fetch_mock.assert_called_once_with(
+ task.context,
+ 'glance://deploy_iso',
+ "/deploy-%s.iso" % self.node.uuid)
+
+ setup_vmedia_mock.assert_called_once_with(
+ task,
+ "deploy-%s.iso" % self.node.uuid,
+ ramdisk_opts)
+ set_boot_device_mock.assert_called_once_with(
+ task, boot_devices.CDROM)
+ node_power_action_mock.assert_called_once_with(
+ task, states.REBOOT)
+
+ def test__get_deploy_iso_name(self):
+ actual = irmc_deploy._get_deploy_iso_name(self.node)
+ expected = "deploy-%s.iso" % self.node.uuid
+ self.assertEqual(expected, actual)
+
+ def test__get_boot_iso_name(self):
+ actual = irmc_deploy._get_boot_iso_name(self.node)
+ expected = "boot-%s.iso" % self.node.uuid
+ self.assertEqual(expected, actual)
+
+ @mock.patch.object(images, 'create_boot_iso', spec_set=True, autospec=True)
+ @mock.patch.object(deploy_utils, 'get_boot_mode_for_deploy', spec_set=True,
+ autospec=True)
+ @mock.patch.object(images, 'get_image_properties', spec_set=True,
+ autospec=True)
+ @mock.patch.object(images, 'fetch', spec_set=True,
+ autospec=True)
+ @mock.patch.object(irmc_deploy, '_parse_deploy_info', spec_set=True,
+ autospec=True)
+ def test__prepare_boot_iso_file(self,
+ deploy_info_mock,
+ fetch_mock,
+ image_props_mock,
+ boot_mode_mock,
+ create_boot_iso_mock):
+ deploy_info_mock.return_value = {'irmc_boot_iso': 'irmc_boot.iso'}
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ irmc_deploy._prepare_boot_iso(task, 'root-uuid')
+
+ deploy_info_mock.assert_called_once_with(task.node)
+ self.assertFalse(fetch_mock.called)
+ self.assertFalse(image_props_mock.called)
+ self.assertFalse(boot_mode_mock.called)
+ self.assertFalse(create_boot_iso_mock.called)
+ task.node.refresh()
+ self.assertEqual('irmc_boot.iso',
+ task.node.driver_internal_info['irmc_boot_iso'])
+
+ @mock.patch.object(images, 'create_boot_iso', spec_set=True, autospec=True)
+ @mock.patch.object(deploy_utils, 'get_boot_mode_for_deploy', spec_set=True,
+ autospec=True)
+ @mock.patch.object(images, 'get_image_properties', spec_set=True,
+ autospec=True)
+ @mock.patch.object(images, 'fetch', spec_set=True,
+ autospec=True)
+ @mock.patch.object(irmc_deploy, '_parse_deploy_info', spec_set=True,
+ autospec=True)
+ @mock.patch.object(service_utils, 'is_image_href_ordinary_file_name',
+ spec_set=True, autospec=True)
+ def test__prepare_boot_iso_fetch_ok(self,
+ is_image_href_ordinary_file_name_mock,
+ deploy_info_mock,
+ fetch_mock,
+ image_props_mock,
+ boot_mode_mock,
+ create_boot_iso_mock):
+
+ CONF.irmc.remote_image_share_root = '/'
+ image = '733d1c44-a2ea-414b-aca7-69decf20d810'
+ is_image_href_ordinary_file_name_mock.return_value = False
+ deploy_info_mock.return_value = {'irmc_boot_iso': image}
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node.instance_info['irmc_boot_iso'] = image
+ irmc_deploy._prepare_boot_iso(task, 'root-uuid')
+
+ deploy_info_mock.assert_called_once_with(task.node)
+ fetch_mock.assert_called_once_with(
+ task.context,
+ image,
+ "/boot-%s.iso" % self.node.uuid)
+ self.assertFalse(image_props_mock.called)
+ self.assertFalse(boot_mode_mock.called)
+ self.assertFalse(create_boot_iso_mock.called)
+ task.node.refresh()
+ self.assertEqual("boot-%s.iso" % self.node.uuid,
+ task.node.driver_internal_info['irmc_boot_iso'])
+
+ @mock.patch.object(images, 'create_boot_iso', spec_set=True, autospec=True)
+ @mock.patch.object(deploy_utils, 'get_boot_mode_for_deploy', spec_set=True,
+ autospec=True)
+ @mock.patch.object(images, 'get_image_properties', spec_set=True,
+ autospec=True)
+ @mock.patch.object(images, 'fetch', spec_set=True,
+ autospec=True)
+ @mock.patch.object(irmc_deploy, '_parse_deploy_info', spec_set=True,
+ autospec=True)
+ def test__prepare_boot_iso_create_ok(self,
+ deploy_info_mock,
+ fetch_mock,
+ image_props_mock,
+ boot_mode_mock,
+ create_boot_iso_mock):
+ CONF.pxe.pxe_append_params = 'kernel-params'
+
+ deploy_info_mock.return_value = {'image_source': 'image-uuid'}
+ image_props_mock.return_value = {'kernel_id': 'kernel_uuid',
+ 'ramdisk_id': 'ramdisk_uuid'}
+
+ CONF.irmc.remote_image_share_name = '/remote_image_share_root'
+ boot_mode_mock.return_value = 'uefi'
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ irmc_deploy._prepare_boot_iso(task, 'root-uuid')
+
+ self.assertFalse(fetch_mock.called)
+ deploy_info_mock.assert_called_once_with(task.node)
+ image_props_mock.assert_called_once_with(
+ task.context, 'image-uuid', ['kernel_id', 'ramdisk_id'])
+ create_boot_iso_mock.assert_called_once_with(
+ task.context,
+ '/remote_image_share_root/' +
+ "boot-%s.iso" % self.node.uuid,
+ 'kernel_uuid', 'ramdisk_uuid',
+ 'file:///remote_image_share_root/' +
+ "deploy-%s.iso" % self.node.uuid,
+ 'root-uuid', 'kernel-params', 'uefi')
+ task.node.refresh()
+ self.assertEqual("boot-%s.iso" % self.node.uuid,
+ task.node.driver_internal_info['irmc_boot_iso'])
+
+ def test__get_floppy_image_name(self):
+ actual = irmc_deploy._get_floppy_image_name(self.node)
+ expected = "image-%s.img" % self.node.uuid
+ self.assertEqual(expected, actual)
+
+ @mock.patch.object(shutil, 'copyfile', spec_set=True, autospec=True)
+ @mock.patch.object(images, 'create_vfat_image', spec_set=True,
+ autospec=True)
+ @mock.patch.object(tempfile, 'NamedTemporaryFile', spec_set=True,
+ autospec=True)
+ def test__prepare_floppy_image(self,
+ tempfile_mock,
+ create_vfat_image_mock,
+ copyfile_mock):
+ mock_image_file_handle = mock.MagicMock(spec=file)
+ mock_image_file_obj = mock.MagicMock()
+ mock_image_file_obj.name = 'image-tmp-file'
+ mock_image_file_handle.__enter__.return_value = mock_image_file_obj
+ tempfile_mock.side_effect = iter([mock_image_file_handle])
+
+ deploy_args = {'arg1': 'val1', 'arg2': 'val2'}
+ CONF.irmc.remote_image_share_name = '/remote_image_share_root'
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ irmc_deploy._prepare_floppy_image(task, deploy_args)
+
+ create_vfat_image_mock.assert_called_once_with(
+ 'image-tmp-file', parameters=deploy_args)
+ copyfile_mock.assert_called_once_with(
+ 'image-tmp-file',
+ '/remote_image_share_root/' + "image-%s.img" % self.node.uuid)
+
+ @mock.patch.object(shutil, 'copyfile', spec_set=True, autospec=True)
+ @mock.patch.object(images, 'create_vfat_image', spec_set=True,
+ autospec=True)
+ @mock.patch.object(tempfile, 'NamedTemporaryFile', spec_set=True,
+ autospec=True)
+ def test__prepare_floppy_image_exception(self,
+ tempfile_mock,
+ create_vfat_image_mock,
+ copyfile_mock):
+ mock_image_file_handle = mock.MagicMock(spec=file)
+ mock_image_file_obj = mock.MagicMock()
+ mock_image_file_obj.name = 'image-tmp-file'
+ mock_image_file_handle.__enter__.return_value = mock_image_file_obj
+ tempfile_mock.side_effect = iter([mock_image_file_handle])
+
+ deploy_args = {'arg1': 'val1', 'arg2': 'val2'}
+ CONF.irmc.remote_image_share_name = '/remote_image_share_root'
+ copyfile_mock.side_effect = iter([IOError("fake error")])
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ self.assertRaises(exception.IRMCOperationError,
+ irmc_deploy._prepare_floppy_image,
+ task,
+ deploy_args)
+
+ create_vfat_image_mock.assert_called_once_with(
+ 'image-tmp-file', parameters=deploy_args)
+ copyfile_mock.assert_called_once_with(
+ 'image-tmp-file',
+ '/remote_image_share_root/' + "image-%s.img" % self.node.uuid)
+
+ @mock.patch.object(irmc_deploy, '_attach_virtual_cd', spec_set=True,
+ autospec=True)
+ @mock.patch.object(irmc_deploy, '_attach_virtual_fd', spec_set=True,
+ autospec=True)
+ @mock.patch.object(irmc_deploy, '_prepare_floppy_image', spec_set=True,
+ autospec=True)
+ @mock.patch.object(irmc_deploy, '_detach_virtual_fd', spec_set=True,
+ autospec=True)
+ @mock.patch.object(irmc_deploy, '_detach_virtual_cd', spec_set=True,
+ autospec=True)
+ def test_setup_vmedia_for_boot_with_parameters(self,
+ _detach_virtual_cd_mock,
+ _detach_virtual_fd_mock,
+ _prepare_floppy_image_mock,
+ _attach_virtual_fd_mock,
+ _attach_virtual_cd_mock):
+ parameters = {'a': 'b'}
+ iso_filename = 'deploy_iso_or_boot_iso'
+ _prepare_floppy_image_mock.return_value = 'floppy_file_name'
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ irmc_deploy.setup_vmedia_for_boot(task, iso_filename, parameters)
+
+ _detach_virtual_cd_mock.assert_called_once_with(task.node)
+ _detach_virtual_fd_mock.assert_called_once_with(task.node)
+ _prepare_floppy_image_mock.assert_called_once_with(task,
+ parameters)
+ _attach_virtual_fd_mock.assert_called_once_with(task.node,
+ 'floppy_file_name')
+ _attach_virtual_cd_mock.assert_called_once_with(task.node,
+ iso_filename)
+
+ @mock.patch.object(irmc_deploy, '_attach_virtual_cd', autospec=True)
+ @mock.patch.object(irmc_deploy, '_detach_virtual_fd', spec_set=True,
+ autospec=True)
+ @mock.patch.object(irmc_deploy, '_detach_virtual_cd', spec_set=True,
+ autospec=True)
+ def test_setup_vmedia_for_boot_without_parameters(
+ self,
+ _detach_virtual_cd_mock,
+ _detach_virtual_fd_mock,
+ _attach_virtual_cd_mock):
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ irmc_deploy.setup_vmedia_for_boot(task, 'bootable_iso_filename')
+
+ _detach_virtual_cd_mock.assert_called_once_with(task.node)
+ _detach_virtual_fd_mock.assert_called_once_with(task.node)
+ _attach_virtual_cd_mock.assert_called_once_with(
+ task.node,
+ 'bootable_iso_filename')
+
+ @mock.patch.object(irmc_deploy, '_get_deploy_iso_name', spec_set=True,
+ autospec=True)
+ @mock.patch.object(irmc_deploy, '_get_floppy_image_name', spec_set=True,
+ autospec=True)
+ @mock.patch.object(irmc_deploy, '_remove_share_file', spec_set=True,
+ autospec=True)
+ @mock.patch.object(irmc_deploy, '_detach_virtual_fd', spec_set=True,
+ autospec=True)
+ @mock.patch.object(irmc_deploy, '_detach_virtual_cd', spec_set=True,
+ autospec=True)
+ def test__cleanup_vmedia_boot_ok(self,
+ _detach_virtual_cd_mock,
+ _detach_virtual_fd_mock,
+ _remove_share_file_mock,
+ _get_floppy_image_name_mock,
+ _get_deploy_iso_name_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ irmc_deploy._cleanup_vmedia_boot(task)
+
+ _detach_virtual_cd_mock.assert_called_once_with(task.node)
+ _detach_virtual_fd_mock.assert_called_once_with(task.node)
+ _get_floppy_image_name_mock.assert_called_once_with(task.node)
+ _get_deploy_iso_name_mock.assert_called_once_with(task.node)
+ self.assertTrue(_remove_share_file_mock.call_count, 2)
+ _remove_share_file_mock.assert_has_calls(
+ [mock.call(_get_floppy_image_name_mock(task.node)),
+ mock.call(_get_deploy_iso_name_mock(task.node))])
+
+ @mock.patch.object(utils, 'unlink_without_raise', spec_set=True,
+ autospec=True)
+ def test__remove_share_file(self, unlink_without_raise_mock):
+ CONF.irmc.remote_image_share_name = '/'
+
+ irmc_deploy._remove_share_file("boot.iso")
+
+ unlink_without_raise_mock.assert_called_once_with('/boot.iso')
+
+ @mock.patch.object(irmc_common, 'get_irmc_client', spec_set=True,
+ autospec=True)
+ def test__attach_virtual_cd_ok(self, get_irmc_client_mock):
+ irmc_client = get_irmc_client_mock.return_value
+ irmc_deploy.scci.get_virtual_cd_set_params_cmd = (
+ mock.MagicMock(sepc_set=[]))
+ cd_set_params = (irmc_deploy.scci
+ .get_virtual_cd_set_params_cmd.return_value)
+
+ CONF.irmc.remote_image_server = '10.20.30.40'
+ CONF.irmc.remote_image_user_domain = 'local'
+ CONF.irmc.remote_image_share_type = 'NFS'
+ CONF.irmc.remote_image_share_name = 'share'
+ CONF.irmc.remote_image_user_name = 'admin'
+ CONF.irmc.remote_image_user_password = 'admin0'
+
+ irmc_deploy.scci.get_share_type.return_value = 0
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ irmc_deploy._attach_virtual_cd(task.node, 'iso_filename')
+
+ get_irmc_client_mock.assert_called_once_with(task.node)
+ (irmc_deploy.scci.get_virtual_cd_set_params_cmd
+ .assert_called_once_with)('10.20.30.40',
+ 'local',
+ 0,
+ 'share',
+ 'iso_filename',
+ 'admin',
+ 'admin0')
+ irmc_client.assert_has_calls(
+ [mock.call(cd_set_params, async=False),
+ mock.call(irmc_deploy.scci.MOUNT_CD, async=False)])
+
+ @mock.patch.object(irmc_common, 'get_irmc_client', spec_set=True,
+ autospec=True)
+ def test__attach_virtual_cd_fail(self, get_irmc_client_mock):
+ irmc_client = get_irmc_client_mock.return_value
+ irmc_client.side_effect = Exception("fake error")
+ irmc_deploy.scci.SCCIClientError = Exception
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ e = self.assertRaises(exception.IRMCOperationError,
+ irmc_deploy._attach_virtual_cd,
+ task.node,
+ 'iso_filename')
+ get_irmc_client_mock.assert_called_once_with(task.node)
+ self.assertEqual("iRMC Inserting virtual cdrom failed. " +
+ "Reason: fake error", str(e))
+
+ @mock.patch.object(irmc_common, 'get_irmc_client', spec_set=True,
+ autospec=True)
+ def test__detach_virtual_cd_ok(self, get_irmc_client_mock):
+ irmc_client = get_irmc_client_mock.return_value
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ irmc_deploy._detach_virtual_cd(task.node)
+
+ irmc_client.assert_called_once_with(irmc_deploy.scci.UNMOUNT_CD)
+
+ @mock.patch.object(irmc_common, 'get_irmc_client', spec_set=True,
+ autospec=True)
+ def test__detach_virtual_cd_fail(self, get_irmc_client_mock):
+ irmc_client = get_irmc_client_mock.return_value
+ irmc_client.side_effect = Exception("fake error")
+ irmc_deploy.scci.SCCIClientError = Exception
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ e = self.assertRaises(exception.IRMCOperationError,
+ irmc_deploy._detach_virtual_cd,
+ task.node)
+ self.assertEqual("iRMC Ejecting virtual cdrom failed. " +
+ "Reason: fake error", str(e))
+
+ @mock.patch.object(irmc_common, 'get_irmc_client', spec_set=True,
+ autospec=True)
+ def test__attach_virtual_fd_ok(self, get_irmc_client_mock):
+ irmc_client = get_irmc_client_mock.return_value
+ irmc_deploy.scci.get_virtual_fd_set_params_cmd = (
+ mock.MagicMock(sepc_set=[]))
+ fd_set_params = (irmc_deploy.scci
+ .get_virtual_fd_set_params_cmd.return_value)
+
+ CONF.irmc.remote_image_server = '10.20.30.40'
+ CONF.irmc.remote_image_user_domain = 'local'
+ CONF.irmc.remote_image_share_type = 'NFS'
+ CONF.irmc.remote_image_share_name = 'share'
+ CONF.irmc.remote_image_user_name = 'admin'
+ CONF.irmc.remote_image_user_password = 'admin0'
+
+ irmc_deploy.scci.get_share_type.return_value = 0
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ irmc_deploy._attach_virtual_fd(task.node,
+ 'floppy_image_filename')
+
+ get_irmc_client_mock.assert_called_once_with(task.node)
+ (irmc_deploy.scci.get_virtual_fd_set_params_cmd
+ .assert_called_once_with)('10.20.30.40',
+ 'local',
+ 0,
+ 'share',
+ 'floppy_image_filename',
+ 'admin',
+ 'admin0')
+ irmc_client.assert_has_calls(
+ [mock.call(fd_set_params, async=False),
+ mock.call(irmc_deploy.scci.MOUNT_FD, async=False)])
+
+ @mock.patch.object(irmc_common, 'get_irmc_client', spec_set=True,
+ autospec=True)
+ def test__attach_virtual_fd_fail(self, get_irmc_client_mock):
+ irmc_client = get_irmc_client_mock.return_value
+ irmc_client.side_effect = Exception("fake error")
+ irmc_deploy.scci.SCCIClientError = Exception
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ e = self.assertRaises(exception.IRMCOperationError,
+ irmc_deploy._attach_virtual_fd,
+ task.node,
+ 'iso_filename')
+ get_irmc_client_mock.assert_called_once_with(task.node)
+ self.assertEqual("iRMC Inserting virtual floppy failed. " +
+ "Reason: fake error", str(e))
+
+ @mock.patch.object(irmc_common, 'get_irmc_client', spec_set=True,
+ autospec=True)
+ def test__detach_virtual_fd_ok(self, get_irmc_client_mock):
+ irmc_client = get_irmc_client_mock.return_value
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ irmc_deploy._detach_virtual_fd(task.node)
+
+ irmc_client.assert_called_once_with(irmc_deploy.scci.UNMOUNT_FD)
+
+ @mock.patch.object(irmc_common, 'get_irmc_client', spec_set=True,
+ autospec=True)
+ def test__detach_virtual_fd_fail(self, get_irmc_client_mock):
+ irmc_client = get_irmc_client_mock.return_value
+ irmc_client.side_effect = Exception("fake error")
+ irmc_deploy.scci.SCCIClientError = Exception
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ e = self.assertRaises(exception.IRMCOperationError,
+ irmc_deploy._detach_virtual_fd,
+ task.node)
+ self.assertEqual("iRMC Ejecting virtual floppy failed. "
+ "Reason: fake error", str(e))
+
+ @mock.patch.object(irmc_deploy, '_parse_config_option', spec_set=True,
+ autospec=True)
+ def test__check_share_fs_mounted_ok(self, parse_conf_mock):
+ # Note(naohirot): mock.patch.stop() and mock.patch.start() don't work.
+ # therefor monkey patching is used to
+ # irmc_deploy._check_share_fs_mounted.
+ # irmc_deploy._check_share_fs_mounted is mocked in
+ # third_party_driver_mocks.py.
+ # irmc_deploy._check_share_fs_mounted_orig is the real function.
+ CONF.irmc.remote_image_share_root = '/'
+ CONF.irmc.remote_image_share_type = 'nfs'
+ result = irmc_deploy._check_share_fs_mounted_orig()
+
+ parse_conf_mock.assert_called_once_with()
+ self.assertIsNone(result)
+
+ @mock.patch.object(irmc_deploy, '_parse_config_option', spec_set=True,
+ autospec=True)
+ def test__check_share_fs_mounted_exception(self, parse_conf_mock):
+ # Note(naohirot): mock.patch.stop() and mock.patch.start() don't work.
+ # therefor monkey patching is used to
+ # irmc_deploy._check_share_fs_mounted.
+ # irmc_deploy._check_share_fs_mounted is mocked in
+ # third_party_driver_mocks.py.
+ # irmc_deploy._check_share_fs_mounted_orig is the real function.
+ CONF.irmc.remote_image_share_root = '/etc'
+ CONF.irmc.remote_image_share_type = 'cifs'
+
+ self.assertRaises(exception.IRMCSharedFileSystemNotMounted,
+ irmc_deploy._check_share_fs_mounted_orig)
+ parse_conf_mock.assert_called_once_with()
+
+
+class IRMCVirtualMediaIscsiDeployTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ irmc_deploy._check_share_fs_mounted_patcher.start()
+ super(IRMCVirtualMediaIscsiDeployTestCase, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver="iscsi_irmc")
+ self.node = obj_utils.create_test_node(
+ self.context, driver='iscsi_irmc', driver_info=INFO_DICT)
+
+ @mock.patch.object(deploy_utils, 'validate_capabilities',
+ spec_set=True, autospec=True)
+ @mock.patch.object(deploy_utils, 'validate_image_properties',
+ spec_set=True, autospec=True)
+ @mock.patch.object(service_utils, 'is_glance_image', spec_set=True,
+ autospec=True)
+ @mock.patch.object(irmc_deploy, '_parse_deploy_info', spec_set=True,
+ autospec=True)
+ @mock.patch.object(iscsi_deploy, 'validate', spec_set=True, autospec=True)
+ @mock.patch.object(irmc_deploy, '_check_share_fs_mounted', spec_set=True,
+ autospec=True)
+ def test_validate_whole_disk_image(self,
+ _check_share_fs_mounted_mock,
+ validate_mock,
+ deploy_info_mock,
+ is_glance_image_mock,
+ validate_prop_mock,
+ validate_capabilities_mock):
+ d_info = {'image_source': '733d1c44-a2ea-414b-aca7-69decf20d810'}
+ deploy_info_mock.return_value = d_info
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node.driver_internal_info = {'is_whole_disk_image': True}
+ task.driver.deploy.validate(task)
+
+ _check_share_fs_mounted_mock.assert_called_once_with()
+ validate_mock.assert_called_once_with(task)
+ deploy_info_mock.assert_called_once_with(task.node)
+ self.assertFalse(is_glance_image_mock.called)
+ validate_prop_mock.assert_called_once_with(task.context,
+ d_info, [])
+ validate_capabilities_mock.assert_called_once_with(task.node)
+
+ @mock.patch.object(deploy_utils, 'validate_capabilities',
+ spec_set=True, autospec=True)
+ @mock.patch.object(deploy_utils, 'validate_image_properties',
+ spec_set=True, autospec=True)
+ @mock.patch.object(service_utils, 'is_glance_image', spec_set=True,
+ autospec=True)
+ @mock.patch.object(irmc_deploy, '_parse_deploy_info', spec_set=True,
+ autospec=True)
+ @mock.patch.object(iscsi_deploy, 'validate', spec_set=True, autospec=True)
+ @mock.patch.object(irmc_deploy, '_check_share_fs_mounted', spec_set=True,
+ autospec=True)
+ def test_validate_glance_image(self,
+ _check_share_fs_mounted_mock,
+ validate_mock,
+ deploy_info_mock,
+ is_glance_image_mock,
+ validate_prop_mock,
+ validate_capabilities_mock):
+ d_info = {'image_source': '733d1c44-a2ea-414b-aca7-69decf20d810'}
+ deploy_info_mock.return_value = d_info
+ is_glance_image_mock.return_value = True
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.deploy.validate(task)
+
+ _check_share_fs_mounted_mock.assert_called_once_with()
+ validate_mock.assert_called_once_with(task)
+ deploy_info_mock.assert_called_once_with(task.node)
+ validate_prop_mock.assert_called_once_with(
+ task.context, d_info, ['kernel_id', 'ramdisk_id'])
+ validate_capabilities_mock.assert_called_once_with(task.node)
+
+ @mock.patch.object(deploy_utils, 'validate_capabilities',
+ spec_set=True, autospec=True)
+ @mock.patch.object(deploy_utils, 'validate_image_properties',
+ spec_set=True, autospec=True)
+ @mock.patch.object(service_utils, 'is_glance_image', spec_set=True,
+ autospec=True)
+ @mock.patch.object(irmc_deploy, '_parse_deploy_info', spec_set=True,
+ autospec=True)
+ @mock.patch.object(iscsi_deploy, 'validate', spec_set=True, autospec=True)
+ @mock.patch.object(irmc_deploy, '_check_share_fs_mounted', spec_set=True,
+ autospec=True)
+ def test_validate_non_glance_image(self,
+ _check_share_fs_mounted_mock,
+ validate_mock,
+ deploy_info_mock,
+ is_glance_image_mock,
+ validate_prop_mock,
+ validate_capabilities_mock):
+ d_info = {'image_source': '733d1c44-a2ea-414b-aca7-69decf20d810'}
+ deploy_info_mock.return_value = d_info
+ is_glance_image_mock.return_value = False
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.deploy.validate(task)
+
+ _check_share_fs_mounted_mock.assert_called_once_with()
+ validate_mock.assert_called_once_with(task)
+ deploy_info_mock.assert_called_once_with(task.node)
+ validate_prop_mock.assert_called_once_with(
+ task.context, d_info, ['kernel', 'ramdisk'])
+ validate_capabilities_mock.assert_called_once_with(task.node)
+
+ @mock.patch.object(irmc_deploy, '_reboot_into_deploy_iso',
+ spec_set=True, autospec=True)
+ @mock.patch.object(deploy_utils, 'get_single_nic_with_vif_port_id',
+ spec_set=True, autospec=True)
+ @mock.patch.object(deploy_utils, 'build_agent_options', spec_set=True,
+ autospec=True)
+ @mock.patch.object(iscsi_deploy, 'build_deploy_ramdisk_options',
+ spec_set=True, autospec=True)
+ @mock.patch.object(iscsi_deploy, 'check_image_size', spec_set=True,
+ autospec=True)
+ @mock.patch.object(iscsi_deploy, 'cache_instance_image', spec_set=True,
+ autospec=True)
+ @mock.patch.object(manager_utils, 'node_power_action', spec_set=True,
+ autospec=True)
+ def test_deploy(self,
+ node_power_action_mock,
+ cache_instance_image_mock,
+ check_image_size_mock,
+ build_deploy_ramdisk_options_mock,
+ build_agent_options_mock,
+ get_single_nic_with_vif_port_id_mock,
+ _reboot_into_mock):
+ deploy_opts = {'a': 'b'}
+ build_agent_options_mock.return_value = {
+ 'ipa-api-url': 'http://1.2.3.4:6385'}
+ build_deploy_ramdisk_options_mock.return_value = deploy_opts
+ get_single_nic_with_vif_port_id_mock.return_value = '12:34:56:78:90:ab'
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ returned_state = task.driver.deploy.deploy(task)
+
+ node_power_action_mock.assert_called_once_with(
+ task, states.POWER_OFF)
+ cache_instance_image_mock.assert_called_once_with(
+ task.context, task.node)
+ check_image_size_mock.assert_called_once_with(task)
+ expected_ramdisk_opts = {'a': 'b', 'BOOTIF': '12:34:56:78:90:ab',
+ 'ipa-api-url': 'http://1.2.3.4:6385'}
+ build_agent_options_mock.assert_called_once_with(task.node)
+ build_deploy_ramdisk_options_mock.assert_called_once_with(
+ task.node)
+ get_single_nic_with_vif_port_id_mock.assert_called_once_with(
+ task)
+ _reboot_into_mock.assert_called_once_with(
+ task, expected_ramdisk_opts)
+ self.assertEqual(states.DEPLOYWAIT, returned_state)
+
+ @mock.patch.object(manager_utils, 'node_power_action', spec_set=True,
+ autospec=True)
+ @mock.patch.object(irmc_deploy, '_remove_share_file', spec_set=True,
+ autospec=True)
+ def test_tear_down(self, _remove_share_file_mock, node_power_action_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node.instance_info['irmc_boot_iso'] = 'glance://deploy_iso'
+ task.node.driver_internal_info['irmc_boot_iso'] = 'irmc_boot.iso'
+
+ returned_state = task.driver.deploy.tear_down(task)
+
+ _remove_share_file_mock.assert_called_once_with(
+ irmc_deploy._get_boot_iso_name(task.node))
+ node_power_action_mock.assert_called_once_with(
+ task, states.POWER_OFF)
+ self.assertFalse(
+ task.node.driver_internal_info.get('irmc_boot_iso'))
+ self.assertEqual(states.DELETED, returned_state)
+
+ @mock.patch.object(iscsi_deploy, 'destroy_images', spec_set=True,
+ autospec=True)
+ @mock.patch.object(irmc_deploy, '_cleanup_vmedia_boot', spec_set=True,
+ autospec=True)
+ def test_clean_up(self, _cleanup_vmedia_boot_mock, destroy_images_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.deploy.clean_up(task)
+
+ _cleanup_vmedia_boot_mock.assert_called_once_with(task)
+ destroy_images_mock.assert_called_once_with(task.node.uuid)
+
+
+class IRMCVirtualMediaAgentDeployTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ irmc_deploy._check_share_fs_mounted_patcher.start()
+ super(IRMCVirtualMediaAgentDeployTestCase, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver="agent_irmc")
+ self.node = obj_utils.create_test_node(
+ self.context, driver='agent_irmc', driver_info=INFO_DICT)
+
+ @mock.patch.object(deploy_utils, 'validate_capabilities',
+ spec_set=True, autospec=True)
+ @mock.patch.object(irmc_deploy, '_parse_driver_info', spec_set=True,
+ autospec=True)
+ def test_validate(self, _parse_driver_info_mock,
+ validate_capabilities_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.deploy.validate(task)
+ _parse_driver_info_mock.assert_called_once_with(task.node)
+ validate_capabilities_mock.assert_called_once_with(task.node)
+
+ @mock.patch.object(irmc_deploy, '_reboot_into_deploy_iso',
+ spec_set=True, autospec=True)
+ @mock.patch.object(deploy_utils, 'build_agent_options', spec_set=True,
+ autospec=True)
+ def test_deploy(self, build_agent_options_mock,
+ _reboot_into_deploy_iso_mock):
+ deploy_ramdisk_opts = build_agent_options_mock.return_value
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ returned_state = task.driver.deploy.deploy(task)
+ build_agent_options_mock.assert_called_once_with(task.node)
+ _reboot_into_deploy_iso_mock.assert_called_once_with(
+ task, deploy_ramdisk_opts)
+ self.assertEqual(states.DEPLOYWAIT, returned_state)
+
+ @mock.patch.object(manager_utils, 'node_power_action', spec_set=True,
+ autospec=True)
+ def test_tear_down(self, node_power_action_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ returned_state = task.driver.deploy.tear_down(task)
+ node_power_action_mock.assert_called_once_with(
+ task, states.POWER_OFF)
+ self.assertEqual(states.DELETED, returned_state)
+
+ @mock.patch.object(agent, 'build_instance_info_for_deploy', spec_set=True,
+ autospec=True)
+ def test_prepare(self, build_instance_info_for_deploy_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node.save = mock.MagicMock(sepc_set=[])
+ task.driver.deploy.prepare(task)
+ build_instance_info_for_deploy_mock.assert_called_once_with(
+ task)
+ task.node.save.assert_called_once_with()
+
+ @mock.patch.object(irmc_deploy, '_cleanup_vmedia_boot', spec_set=True,
+ autospec=True)
+ def test_clean_up(self, _cleanup_vmedia_boot_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.deploy.clean_up(task)
+ _cleanup_vmedia_boot_mock.assert_called_once_with(task)
+
+
+class VendorPassthruTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ irmc_deploy._check_share_fs_mounted_patcher.start()
+ super(VendorPassthruTestCase, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver="iscsi_irmc")
+ self.node = obj_utils.create_test_node(
+ self.context, driver='iscsi_irmc', driver_info=INFO_DICT)
+
+ CONF.irmc.remote_image_share_root = '/remote_image_share_root'
+ CONF.irmc.remote_image_server = '10.20.30.40'
+ CONF.irmc.remote_image_share_type = 'NFS'
+ CONF.irmc.remote_image_share_name = 'share'
+ CONF.irmc.remote_image_user_name = 'admin'
+ CONF.irmc.remote_image_user_password = 'admin0'
+ CONF.irmc.remote_image_user_domain = 'local'
+
+ @mock.patch.object(iscsi_deploy, 'get_deploy_info', spec_set=True,
+ autospec=True)
+ def test_validate_pass_deploy_info(self, get_deploy_info_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.vendor.validate(task, method='pass_deploy_info', a=1)
+ get_deploy_info_mock.assert_called_once_with(task.node, a=1)
+
+ @mock.patch.object(iscsi_deploy, 'validate_pass_bootloader_info_input',
+ spec_set=True, autospec=True)
+ def test_validate_pass_bootloader_install_info(self, validate_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ kwargs = {'address': '1.2.3.4', 'key': 'fake-key',
+ 'status': 'SUCCEEDED', 'error': ''}
+ task.driver.vendor.validate(
+ task, method='pass_bootloader_install_info', **kwargs)
+ validate_mock.assert_called_once_with(task, kwargs)
+
+ @mock.patch.object(manager_utils, 'node_set_boot_device', spec_set=True,
+ autospec=True)
+ @mock.patch.object(irmc_deploy, 'setup_vmedia_for_boot', spec_set=True,
+ autospec=True)
+ @mock.patch.object(irmc_deploy, '_prepare_boot_iso', spec_set=True,
+ autospec=True)
+ def test__configure_vmedia_boot(self,
+ _prepare_boot_iso_mock,
+ setup_vmedia_for_boot_mock,
+ node_set_boot_device):
+ root_uuid_or_disk_id = {'root uuid': 'root_uuid'}
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node.driver_internal_info['irmc_boot_iso'] = 'boot.iso'
+ task.driver.vendor._configure_vmedia_boot(
+ task, root_uuid_or_disk_id)
+
+ _prepare_boot_iso_mock.assert_called_once_with(
+ task, root_uuid_or_disk_id)
+ setup_vmedia_for_boot_mock.assert_called_once_with(
+ task, 'boot.iso')
+ node_set_boot_device.assert_called_once_with(
+ task, boot_devices.CDROM, persistent=True)
+
+ @mock.patch.object(iscsi_deploy, 'validate_bootloader_install_status',
+ spec_set=True, autospec=True)
+ @mock.patch.object(iscsi_deploy, 'finish_deploy', spec_set=True,
+ autospec=True)
+ def test_pass_bootloader_install_info(self, finish_deploy_mock,
+ validate_input_mock):
+ kwargs = {'method': 'pass_deploy_info', 'address': '123456'}
+ self.node.provision_state = states.DEPLOYWAIT
+ self.node.target_provision_state = states.ACTIVE
+ self.node.save()
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.vendor.pass_bootloader_install_info(task, **kwargs)
+ finish_deploy_mock.assert_called_once_with(task, '123456')
+ validate_input_mock.assert_called_once_with(task, kwargs)
+
+ @mock.patch.object(deploy_utils, 'set_failed_state', spec_set=True,
+ autospec=True)
+ @mock.patch.object(deploy_utils, 'notify_ramdisk_to_proceed',
+ spec_set=True, autospec=True)
+ @mock.patch.object(manager_utils, 'node_set_boot_device', spec_set=True,
+ autospec=True)
+ @mock.patch.object(irmc_deploy, 'setup_vmedia_for_boot', spec_set=True,
+ autospec=True)
+ @mock.patch.object(irmc_deploy, '_prepare_boot_iso', spec_set=True,
+ autospec=True)
+ @mock.patch.object(iscsi_deploy, 'continue_deploy', spec_set=True,
+ autospec=True)
+ @mock.patch.object(irmc_deploy, '_cleanup_vmedia_boot', spec_set=True,
+ autospec=True)
+ def test_pass_deploy_info_ok(self,
+ _cleanup_vmedia_boot_mock,
+ continue_deploy_mock,
+ _prepare_boot_iso_mock,
+ setup_vmedia_for_boot_mock,
+ node_set_boot_device_mock,
+ notify_ramdisk_to_proceed_mock,
+ set_failed_state_mock):
+ kwargs = {'method': 'pass_deploy_info', 'address': '123456'}
+ continue_deploy_mock.return_value = {'root uuid': 'root_uuid'}
+
+ self.node.provision_state = states.DEPLOYWAIT
+ self.node.target_provision_state = states.ACTIVE
+ self.node.save()
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node.driver_internal_info['irmc_boot_iso'] = 'irmc_boot.iso'
+ task.driver.vendor.pass_deploy_info(task, **kwargs)
+
+ _cleanup_vmedia_boot_mock.assert_called_once_with(task)
+ continue_deploy_mock.assert_called_once_with(task, **kwargs)
+
+ _prepare_boot_iso_mock.assert_called_once_with(
+ task, 'root_uuid')
+ setup_vmedia_for_boot_mock.assert_called_once_with(
+ task, 'irmc_boot.iso')
+ node_set_boot_device_mock.assert_called_once_with(
+ task, boot_devices.CDROM, persistent=True)
+ notify_ramdisk_to_proceed_mock.assert_called_once_with(
+ '123456')
+
+ self.assertEqual(states.ACTIVE, task.node.provision_state)
+ self.assertEqual(states.NOSTATE, task.node.target_provision_state)
+ self.assertFalse(set_failed_state_mock.called)
+
+ @mock.patch.object(deploy_utils, 'set_failed_state', spec_set=True,
+ autospec=True)
+ @mock.patch.object(deploy_utils, 'notify_ramdisk_to_proceed',
+ spec_set=True, autospec=True)
+ @mock.patch.object(manager_utils, 'node_set_boot_device', spec_set=True,
+ autospec=True)
+ @mock.patch.object(irmc_deploy, 'setup_vmedia_for_boot', spec_set=True,
+ autospec=True)
+ @mock.patch.object(irmc_deploy, '_prepare_boot_iso', spec_set=True,
+ autospec=True)
+ @mock.patch.object(iscsi_deploy, 'continue_deploy', spec_set=True,
+ autospec=True)
+ @mock.patch.object(irmc_deploy, '_cleanup_vmedia_boot', spec_set=True,
+ autospec=True)
+ def test_pass_deploy_info_fail(self,
+ _cleanup_vmedia_boot_mock,
+ continue_deploy_mock,
+ _prepare_boot_iso_mock,
+ setup_vmedia_for_boot_mock,
+ node_set_boot_device_mock,
+ notify_ramdisk_to_proceed_mock,
+ set_failed_state_mock):
+ kwargs = {'method': 'pass_deploy_info', 'address': '123456'}
+
+ self.node.provision_state = states.AVAILABLE
+ self.node.target_provision_state = states.NOSTATE
+ self.node.save()
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ self.assertRaises(exception.InvalidState,
+ task.driver.vendor.pass_deploy_info,
+ task, **kwargs)
+
+ self.assertEqual(states.AVAILABLE, task.node.provision_state)
+ self.assertEqual(states.NOSTATE, task.node.target_provision_state)
+
+ self.assertFalse(_cleanup_vmedia_boot_mock.called)
+ self.assertFalse(continue_deploy_mock.called)
+ self.assertFalse(_prepare_boot_iso_mock.called)
+ self.assertFalse(setup_vmedia_for_boot_mock.called)
+ self.assertFalse(node_set_boot_device_mock.called)
+ self.assertFalse(notify_ramdisk_to_proceed_mock.called)
+ self.assertFalse(set_failed_state_mock.called)
+
+ @mock.patch.object(manager_utils, 'node_power_action', spec_set=True,
+ autospec=True)
+ @mock.patch.object(deploy_utils, 'notify_ramdisk_to_proceed',
+ spec_set=True, autospec=True)
+ @mock.patch.object(manager_utils, 'node_set_boot_device', spec_set=True,
+ autospec=True)
+ @mock.patch.object(irmc_deploy, 'setup_vmedia_for_boot', spec_set=True,
+ autospec=True)
+ @mock.patch.object(irmc_deploy, '_prepare_boot_iso', spec_set=True,
+ autospec=True)
+ @mock.patch.object(iscsi_deploy, 'continue_deploy', spec_set=True,
+ autospec=True)
+ @mock.patch.object(irmc_deploy, '_cleanup_vmedia_boot', spec_set=True,
+ autospec=True)
+ def test_pass_deploy_info__prepare_boot_exception(
+ self,
+ _cleanup_vmedia_boot_mock,
+ continue_deploy_mock,
+ _prepare_boot_iso_mock,
+ setup_vmedia_for_boot_mock,
+ node_set_boot_device_mock,
+ notify_ramdisk_to_proceed_mock,
+ node_power_mock):
+ kwargs = {'method': 'pass_deploy_info', 'address': '123456'}
+ continue_deploy_mock.return_value = {'root uuid': 'root_uuid'}
+ _prepare_boot_iso_mock.side_effect = Exception("fake error")
+
+ self.node.driver_internal_info = {'is_whole_disk_image': False}
+ self.node.provision_state = states.DEPLOYWAIT
+ self.node.target_provision_state = states.ACTIVE
+ self.node.save()
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.vendor.pass_deploy_info(task, **kwargs)
+
+ continue_deploy_mock.assert_called_once_with(
+ task, method='pass_deploy_info', address='123456')
+
+ _cleanup_vmedia_boot_mock.assert_called_once_with(task)
+ _prepare_boot_iso_mock.assert_called_once_with(
+ task, 'root_uuid')
+
+ self.assertEqual(states.DEPLOYFAIL, task.node.provision_state)
+ self.assertEqual(states.ACTIVE, task.node.target_provision_state)
+ self.assertFalse(setup_vmedia_for_boot_mock.called)
+ self.assertFalse(node_set_boot_device_mock.called)
+ self.assertFalse(notify_ramdisk_to_proceed_mock.called)
+ node_power_mock.assert_called_once_with(task, states.POWER_OFF)
+
+ @mock.patch.object(deploy_utils, 'notify_ramdisk_to_proceed',
+ spec_set=True, autospec=True)
+ @mock.patch.object(manager_utils, 'node_set_boot_device', spec_set=True,
+ autospec=True)
+ @mock.patch.object(iscsi_deploy, 'continue_deploy', spec_set=True,
+ autospec=True)
+ @mock.patch.object(irmc_deploy, '_cleanup_vmedia_boot', spec_set=True,
+ autospec=True)
+ def test_pass_deploy_info_localboot(self,
+ _cleanup_vmedia_boot_mock,
+ continue_deploy_mock,
+ set_boot_device_mock,
+ notify_ramdisk_to_proceed_mock):
+
+ kwargs = {'method': 'pass_deploy_info', 'address': '123456'}
+ continue_deploy_mock.return_value = {'root uuid': '<some-uuid>'}
+
+ self.node.driver_internal_info = {'is_whole_disk_image': False}
+ self.node.provision_state = states.DEPLOYWAIT
+ self.node.target_provision_state = states.ACTIVE
+ self.node.instance_info = {'capabilities': '{"boot_option": "local"}'}
+ self.node.save()
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ vendor = task.driver.vendor
+ vendor.pass_deploy_info(task, **kwargs)
+
+ _cleanup_vmedia_boot_mock.assert_called_once_with(task)
+ continue_deploy_mock.assert_called_once_with(task, **kwargs)
+ set_boot_device_mock.assert_called_once_with(task,
+ boot_devices.DISK,
+ persistent=True)
+ notify_ramdisk_to_proceed_mock.assert_called_once_with('123456')
+ self.assertEqual(states.DEPLOYWAIT, task.node.provision_state)
+ self.assertEqual(states.ACTIVE, task.node.target_provision_state)
+
+ @mock.patch.object(iscsi_deploy, 'finish_deploy', spec_set=True,
+ autospec=True)
+ @mock.patch.object(deploy_utils, 'notify_ramdisk_to_proceed',
+ spec_set=True, autospec=True)
+ @mock.patch.object(manager_utils, 'node_set_boot_device', spec_set=True,
+ autospec=True)
+ @mock.patch.object(iscsi_deploy, 'continue_deploy', spec_set=True,
+ autospec=True)
+ @mock.patch.object(irmc_deploy, '_cleanup_vmedia_boot', spec_set=True,
+ autospec=True)
+ def test_pass_deploy_info_whole_disk_image(
+ self,
+ _cleanup_vmedia_boot_mock,
+ continue_deploy_mock,
+ set_boot_device_mock,
+ notify_ramdisk_to_proceed_mock,
+ finish_deploy_mock):
+
+ kwargs = {'method': 'pass_deploy_info', 'address': '123456'}
+ continue_deploy_mock.return_value = {'root uuid': '<some-uuid>'}
+
+ self.node.driver_internal_info = {'is_whole_disk_image': True}
+ self.node.provision_state = states.DEPLOYWAIT
+ self.node.target_provision_state = states.ACTIVE
+ self.node.save()
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ vendor = task.driver.vendor
+ vendor.pass_deploy_info(task, **kwargs)
+
+ _cleanup_vmedia_boot_mock.assert_called_once_with(task)
+ continue_deploy_mock.assert_called_once_with(task, **kwargs)
+ set_boot_device_mock.assert_called_once_with(task,
+ boot_devices.DISK,
+ persistent=True)
+ self.assertFalse(notify_ramdisk_to_proceed_mock.called)
+ finish_deploy_mock.assert_called_once_with(task, '123456')
+
+ @mock.patch.object(iscsi_deploy, 'finish_deploy', spec_set=True,
+ autospec=True)
+ @mock.patch.object(deploy_utils, 'notify_ramdisk_to_proceed',
+ spec_set=True, autospec=True)
+ @mock.patch.object(manager_utils, 'node_set_boot_device', spec_set=True,
+ autospec=True)
+ @mock.patch.object(iscsi_deploy, 'continue_deploy', spec_set=True,
+ autospec=True)
+ @mock.patch.object(irmc_deploy, '_cleanup_vmedia_boot', spec_set=True,
+ autospec=True)
+ def test_pass_deploy_info_whole_disk_image_local(
+ self,
+ _cleanup_vmedia_boot_mock,
+ continue_deploy_mock,
+ set_boot_device_mock,
+ notify_ramdisk_to_proceed_mock,
+ finish_deploy_mock):
+
+ kwargs = {'method': 'pass_deploy_info', 'address': '123456'}
+ continue_deploy_mock.return_value = {'root uuid': '<some-uuid>'}
+
+ self.node.driver_internal_info = {'is_whole_disk_image': True}
+ self.node.instance_info = {'capabilities': '{"boot_option": "local"}'}
+ self.node.provision_state = states.DEPLOYWAIT
+ self.node.target_provision_state = states.ACTIVE
+ self.node.save()
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ vendor = task.driver.vendor
+ vendor.pass_deploy_info(task, **kwargs)
+
+ _cleanup_vmedia_boot_mock.assert_called_once_with(task)
+ continue_deploy_mock.assert_called_once_with(task, **kwargs)
+ set_boot_device_mock.assert_called_once_with(task,
+ boot_devices.DISK,
+ persistent=True)
+ self.assertFalse(notify_ramdisk_to_proceed_mock.called)
+ finish_deploy_mock.assert_called_once_with(task, '123456')
+
+ @mock.patch.object(agent_base_vendor.BaseAgentVendor,
+ 'reboot_and_finish_deploy', spec_set=True,
+ autospec=True)
+ @mock.patch.object(irmc_deploy.VendorPassthru, '_configure_vmedia_boot',
+ autospec=True)
+ @mock.patch.object(iscsi_deploy, 'do_agent_iscsi_deploy', spec_set=True,
+ autospec=True)
+ @mock.patch.object(irmc_deploy, '_cleanup_vmedia_boot', spec_set=True,
+ autospec=True)
+ def test_continue_deploy_netboot(self,
+ _cleanup_vmedia_boot_mock,
+ do_agent_iscsi_deploy_mock,
+ _configure_vmedia_boot_mock,
+ reboot_and_finish_deploy_mock):
+ self.node.provision_state = states.DEPLOYWAIT
+ self.node.target_provision_state = states.DEPLOYING
+ self.node.save()
+ do_agent_iscsi_deploy_mock.return_value = {
+ 'root uuid': 'some-root-uuid'}
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.vendor.continue_deploy(task)
+
+ _cleanup_vmedia_boot_mock.assert_called_once_with(task)
+ do_agent_iscsi_deploy_mock.assert_called_once_with(task,
+ mock.ANY)
+ _configure_vmedia_boot_mock.assert_called_once_with(
+ mock.ANY, task, 'some-root-uuid')
+ reboot_and_finish_deploy_mock.assert_called_once_with(
+ task.driver.vendor, task)
+
+ @mock.patch.object(agent_base_vendor.BaseAgentVendor,
+ 'reboot_and_finish_deploy', spec_set=True,
+ autospec=True)
+ @mock.patch.object(agent_base_vendor.BaseAgentVendor,
+ 'configure_local_boot', spec_set=True, autospec=True)
+ @mock.patch.object(iscsi_deploy, 'do_agent_iscsi_deploy', spec_set=True,
+ autospec=True)
+ @mock.patch.object(irmc_deploy, '_cleanup_vmedia_boot', spec_set=True,
+ autospec=True)
+ def test_continue_deploy_localboot(self,
+ _cleanup_vmedia_boot_mock,
+ do_agent_iscsi_deploy_mock,
+ configure_local_boot_mock,
+ reboot_and_finish_deploy_mock):
+ self.node.provision_state = states.DEPLOYWAIT
+ self.node.target_provision_state = states.DEPLOYING
+ self.node.instance_info = {
+ 'capabilities': {'boot_option': 'local'}}
+ self.node.save()
+ do_agent_iscsi_deploy_mock.return_value = {
+ 'root uuid': 'some-root-uuid'}
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.vendor.continue_deploy(task)
+
+ _cleanup_vmedia_boot_mock.assert_called_once_with(task)
+ do_agent_iscsi_deploy_mock.assert_called_once_with(task,
+ mock.ANY)
+ configure_local_boot_mock.assert_called_once_with(
+ mock.ANY, task, root_uuid='some-root-uuid',
+ efi_system_part_uuid=None)
+ reboot_and_finish_deploy_mock.assert_called_once_with(
+ mock.ANY, task)
+
+ @mock.patch.object(agent_base_vendor.BaseAgentVendor,
+ 'reboot_and_finish_deploy', spec_set=True,
+ autospec=True)
+ @mock.patch.object(agent_base_vendor.BaseAgentVendor,
+ 'configure_local_boot', spec_set=True, autospec=True)
+ @mock.patch.object(iscsi_deploy, 'do_agent_iscsi_deploy', spec_set=True,
+ autospec=True)
+ @mock.patch.object(irmc_deploy, '_cleanup_vmedia_boot', spec_set=True,
+ autospec=True)
+ def test_continue_deploy_whole_disk_image(self,
+ _cleanup_vmedia_boot_mock,
+ do_agent_iscsi_deploy_mock,
+ configure_local_boot_mock,
+ reboot_and_finish_deploy_mock):
+ self.node.provision_state = states.DEPLOYWAIT
+ self.node.target_provision_state = states.DEPLOYING
+ self.node.driver_internal_info = {'is_whole_disk_image': True}
+ self.node.save()
+ do_agent_iscsi_deploy_mock.return_value = {
+ 'disk identifier': 'some-disk-id'}
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.vendor.continue_deploy(task)
+
+ _cleanup_vmedia_boot_mock.assert_called_once_with(task)
+ do_agent_iscsi_deploy_mock.assert_called_once_with(task,
+ mock.ANY)
+ configure_local_boot_mock.assert_called_once_with(
+ mock.ANY, task, root_uuid=None, efi_system_part_uuid=None)
+ reboot_and_finish_deploy_mock.assert_called_once_with(
+ mock.ANY, task)
+
+ @mock.patch.object(agent_base_vendor.BaseAgentVendor,
+ 'reboot_and_finish_deploy', spec_set=True,
+ autospec=True)
+ @mock.patch.object(agent_base_vendor.BaseAgentVendor,
+ 'configure_local_boot', spec_set=True, autospec=True)
+ @mock.patch.object(iscsi_deploy, 'do_agent_iscsi_deploy', spec_set=True,
+ autospec=True)
+ @mock.patch.object(irmc_deploy, '_cleanup_vmedia_boot', autospec=True)
+ def test_continue_deploy_localboot_uefi(self,
+ _cleanup_vmedia_boot_mock,
+ do_agent_iscsi_deploy_mock,
+ configure_local_boot_mock,
+ reboot_and_finish_deploy_mock):
+ self.node.provision_state = states.DEPLOYWAIT
+ self.node.target_provision_state = states.DEPLOYING
+ self.node.instance_info = {
+ 'capabilities': {'boot_option': 'local'}}
+ self.node.save()
+ do_agent_iscsi_deploy_mock.return_value = {
+ 'root uuid': 'some-root-uuid',
+ 'efi system partition uuid': 'efi-system-part-uuid'}
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.vendor.continue_deploy(task)
+
+ _cleanup_vmedia_boot_mock.assert_called_once_with(task)
+ do_agent_iscsi_deploy_mock.assert_called_once_with(task,
+ mock.ANY)
+ configure_local_boot_mock.assert_called_once_with(
+ mock.ANY, task, root_uuid='some-root-uuid',
+ efi_system_part_uuid='efi-system-part-uuid')
+ reboot_and_finish_deploy_mock.assert_called_once_with(
+ mock.ANY, task)
+
+
+class IRMCVirtualMediaAgentVendorInterfaceTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(IRMCVirtualMediaAgentVendorInterfaceTestCase, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver="agent_irmc")
+ self.node = obj_utils.create_test_node(
+ self.context, driver='agent_irmc', driver_info=INFO_DICT)
+
+ @mock.patch.object(agent.AgentVendorInterface, 'reboot_to_instance',
+ spec_set=True, autospec=True)
+ @mock.patch.object(irmc_deploy, '_cleanup_vmedia_boot', autospec=True)
+ def test_reboot_to_instance(self,
+ _cleanup_vmedia_boot_mock,
+ agent_reboot_to_instance_mock):
+ kwargs = {}
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.vendor.reboot_to_instance(task, **kwargs)
+
+ _cleanup_vmedia_boot_mock.assert_called_once_with(task)
+ agent_reboot_to_instance_mock.assert_called_once_with(
+ mock.ANY, task, **kwargs)
diff --git a/ironic/tests/unit/drivers/irmc/test_management.py b/ironic/tests/unit/drivers/irmc/test_management.py
new file mode 100644
index 000000000..be75321ce
--- /dev/null
+++ b/ironic/tests/unit/drivers/irmc/test_management.py
@@ -0,0 +1,302 @@
+# Copyright 2015 FUJITSU LIMITED
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""
+Test class for iRMC Management Driver
+"""
+
+import os
+import xml.etree.ElementTree as ET
+
+import mock
+
+from ironic.common import boot_devices
+from ironic.common import driver_factory
+from ironic.common import exception
+from ironic.conductor import task_manager
+from ironic.drivers.modules import ipmitool
+from ironic.drivers.modules.irmc import common as irmc_common
+from ironic.drivers.modules.irmc import management as irmc_management
+from ironic.drivers import utils as driver_utils
+from ironic.tests.unit.conductor import utils as mgr_utils
+from ironic.tests.unit.db import base as db_base
+from ironic.tests.unit.db import utils as db_utils
+from ironic.tests.unit.drivers import third_party_driver_mock_specs \
+ as mock_specs
+from ironic.tests.unit.objects import utils as obj_utils
+
+INFO_DICT = db_utils.get_test_irmc_info()
+
+
+class IRMCManagementTestCase(db_base.DbTestCase):
+ def setUp(self):
+ super(IRMCManagementTestCase, self).setUp()
+ driver_info = INFO_DICT
+
+ mgr_utils.mock_the_extension_manager(driver="fake_irmc")
+ self.driver = driver_factory.get_driver("fake_irmc")
+ self.node = obj_utils.create_test_node(self.context,
+ driver='fake_irmc',
+ driver_info=driver_info)
+ self.info = irmc_common.parse_driver_info(self.node)
+
+ def test_get_properties(self):
+ expected = irmc_common.COMMON_PROPERTIES
+ expected.update(ipmitool.COMMON_PROPERTIES)
+ expected.update(ipmitool.CONSOLE_PROPERTIES)
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ self.assertEqual(expected, task.driver.get_properties())
+
+ @mock.patch.object(irmc_common, 'parse_driver_info', spec_set=True,
+ autospec=True)
+ def test_validate(self, mock_drvinfo):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ task.driver.management.validate(task)
+ mock_drvinfo.assert_called_once_with(task.node)
+
+ @mock.patch.object(irmc_common, 'parse_driver_info', spec_set=True,
+ autospec=True)
+ def test_validate_fail(self, mock_drvinfo):
+ side_effect = iter([exception.InvalidParameterValue("Invalid Input")])
+ mock_drvinfo.side_effect = side_effect
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ self.assertRaises(exception.InvalidParameterValue,
+ task.driver.management.validate,
+ task)
+
+ def test_management_interface_get_supported_boot_devices(self):
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ expected = [boot_devices.PXE, boot_devices.DISK,
+ boot_devices.CDROM, boot_devices.BIOS,
+ boot_devices.SAFE]
+ self.assertEqual(sorted(expected), sorted(task.driver.management.
+ get_supported_boot_devices(task)))
+
+ @mock.patch.object(ipmitool.IPMIManagement, 'set_boot_device',
+ spec_set=True, autospec=True)
+ def test_management_interface_set_boot_device_no_mode_ok(
+ self,
+ set_boot_device_mock):
+ """no boot mode specified."""
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.management.set_boot_device(task, boot_devices.PXE)
+ set_boot_device_mock.assert_called_once_with(
+ task.driver.management, task,
+ boot_devices.PXE,
+ False)
+
+ @mock.patch.object(ipmitool.IPMIManagement, 'set_boot_device',
+ spec_set=True, autospec=True)
+ def test_management_interface_set_boot_device_bios_ok(
+ self,
+ set_boot_device_mock):
+ """bios mode specified."""
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ driver_utils.add_node_capability(task, 'boot_mode', 'bios')
+ task.driver.management.set_boot_device(task, boot_devices.PXE)
+ set_boot_device_mock.assert_called_once_with(
+ task.driver.management, task,
+ boot_devices.PXE,
+ False)
+
+ @mock.patch.object(irmc_management.ipmitool, "send_raw", spec_set=True,
+ autospec=True)
+ def _test_management_interface_set_boot_device_uefi_ok(self, params,
+ expected_raw_code,
+ send_raw_mock):
+ send_raw_mock.return_value = [None, None]
+
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ task.node.properties['capabilities'] = ''
+ driver_utils.add_node_capability(task, 'boot_mode', 'uefi')
+ self.driver.management.set_boot_device(task, **params)
+ send_raw_mock.assert_has_calls([
+ mock.call(task, "0x00 0x08 0x03 0x08"),
+ mock.call(task, expected_raw_code)])
+
+ def test_management_interface_set_boot_device_uefi_ok_pxe(self):
+ params = {'device': boot_devices.PXE, 'persistent': False}
+ self._test_management_interface_set_boot_device_uefi_ok(
+ params,
+ "0x00 0x08 0x05 0xa0 0x04 0x00 0x00 0x00")
+
+ params['persistent'] = True
+ self._test_management_interface_set_boot_device_uefi_ok(
+ params,
+ "0x00 0x08 0x05 0xe0 0x04 0x00 0x00 0x00")
+
+ def test_management_interface_set_boot_device_uefi_ok_disk(self):
+ params = {'device': boot_devices.DISK, 'persistent': False}
+ self._test_management_interface_set_boot_device_uefi_ok(
+ params,
+ "0x00 0x08 0x05 0xa0 0x08 0x00 0x00 0x00")
+
+ params['persistent'] = True
+ self._test_management_interface_set_boot_device_uefi_ok(
+ params,
+ "0x00 0x08 0x05 0xe0 0x08 0x00 0x00 0x00")
+
+ def test_management_interface_set_boot_device_uefi_ok_cdrom(self):
+ params = {'device': boot_devices.CDROM, 'persistent': False}
+ self._test_management_interface_set_boot_device_uefi_ok(
+ params,
+ "0x00 0x08 0x05 0xa0 0x14 0x00 0x00 0x00")
+
+ params['persistent'] = True
+ self._test_management_interface_set_boot_device_uefi_ok(
+ params,
+ "0x00 0x08 0x05 0xe0 0x14 0x00 0x00 0x00")
+
+ def test_management_interface_set_boot_device_uefi_ok_bios(self):
+ params = {'device': boot_devices.BIOS, 'persistent': False}
+ self._test_management_interface_set_boot_device_uefi_ok(
+ params,
+ "0x00 0x08 0x05 0xa0 0x18 0x00 0x00 0x00")
+
+ params['persistent'] = True
+ self._test_management_interface_set_boot_device_uefi_ok(
+ params,
+ "0x00 0x08 0x05 0xe0 0x18 0x00 0x00 0x00")
+
+ def test_management_interface_set_boot_device_uefi_ok_safe(self):
+ params = {'device': boot_devices.SAFE, 'persistent': False}
+ self._test_management_interface_set_boot_device_uefi_ok(
+ params,
+ "0x00 0x08 0x05 0xa0 0x0c 0x00 0x00 0x00")
+
+ params['persistent'] = True
+ self._test_management_interface_set_boot_device_uefi_ok(
+ params,
+ "0x00 0x08 0x05 0xe0 0x0c 0x00 0x00 0x00")
+
+ @mock.patch.object(irmc_management.ipmitool, "send_raw", spec_set=True,
+ autospec=True)
+ def test_management_interface_set_boot_device_uefi_ng(self,
+ send_raw_mock):
+ """uefi mode, next boot only, unknown device."""
+ send_raw_mock.return_value = [None, None]
+
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ driver_utils.add_node_capability(task, 'boot_mode', 'uefi')
+ self.assertRaises(exception.InvalidParameterValue,
+ self.driver.management.set_boot_device,
+ task,
+ "unknown")
+
+ @mock.patch.object(irmc_management, 'scci',
+ spec_set=mock_specs.SCCICLIENT_IRMC_SCCI_SPEC)
+ @mock.patch.object(irmc_common, 'get_irmc_report', spec_set=True,
+ autospec=True)
+ def test_management_interface_get_sensors_data_scci_ok(
+ self, mock_get_irmc_report, mock_scci):
+ """'irmc_sensor_method' = 'scci' specified and OK data."""
+ with open(os.path.join(os.path.dirname(__file__),
+ 'fake_sensors_data_ok.xml'), "r") as report:
+ fake_txt = report.read()
+ fake_xml = ET.fromstring(fake_txt)
+
+ mock_get_irmc_report.return_value = fake_xml
+ mock_scci.get_sensor_data.return_value = fake_xml.find(
+ "./System/SensorDataRecords")
+
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ task.node.driver_info['irmc_sensor_method'] = 'scci'
+ sensor_dict = self.driver.management.get_sensors_data(task)
+
+ expected = {
+ 'Fan (4)': {
+ 'FAN1 SYS (29)': {
+ 'Units': 'RPM',
+ 'Sensor ID': 'FAN1 SYS (29)',
+ 'Sensor Reading': '600 RPM'
+ },
+ 'FAN2 SYS (29)': {
+ 'Units': 'None',
+ 'Sensor ID': 'FAN2 SYS (29)',
+ 'Sensor Reading': 'None None'
+ }
+ },
+ 'Temperature (1)': {
+ 'Systemboard 1 (7)': {
+ 'Units': 'degree C',
+ 'Sensor ID': 'Systemboard 1 (7)',
+ 'Sensor Reading': '80 degree C'
+ },
+ 'Ambient (55)': {
+ 'Units': 'degree C',
+ 'Sensor ID': 'Ambient (55)',
+ 'Sensor Reading': '42 degree C'
+ }
+ }
+ }
+ self.assertEqual(expected, sensor_dict)
+
+ @mock.patch.object(irmc_management, 'scci',
+ spec_set=mock_specs.SCCICLIENT_IRMC_SCCI_SPEC)
+ @mock.patch.object(irmc_common, 'get_irmc_report', spec_set=True,
+ autospec=True)
+ def test_management_interface_get_sensors_data_scci_ng(
+ self, mock_get_irmc_report, mock_scci):
+ """'irmc_sensor_method' = 'scci' specified and NG data."""
+ with open(os.path.join(os.path.dirname(__file__),
+ 'fake_sensors_data_ng.xml'), "r") as report:
+ fake_txt = report.read()
+ fake_xml = ET.fromstring(fake_txt)
+
+ mock_get_irmc_report.return_value = fake_xml
+ mock_scci.get_sensor_data.return_value = fake_xml.find(
+ "./System/SensorDataRecords")
+
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ task.node.driver_info['irmc_sensor_method'] = 'scci'
+ sensor_dict = self.driver.management.get_sensors_data(task)
+
+ self.assertEqual(len(sensor_dict), 0)
+
+ @mock.patch.object(ipmitool.IPMIManagement, 'get_sensors_data',
+ spec_set=True, autospec=True)
+ def test_management_interface_get_sensors_data_ipmitool_ok(
+ self,
+ get_sensors_data_mock):
+ """'irmc_sensor_method' = 'ipmitool' specified."""
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ task.node.driver_info['irmc_sensor_method'] = 'ipmitool'
+ task.driver.management.get_sensors_data(task)
+ get_sensors_data_mock.assert_called_once_with(
+ task.driver.management, task)
+
+ @mock.patch.object(irmc_common, 'get_irmc_report', spec_set=True,
+ autospec=True)
+ def test_management_interface_get_sensors_data_exception(
+ self,
+ get_irmc_report_mock):
+ """'FailedToGetSensorData Exception."""
+
+ get_irmc_report_mock.side_effect = exception.InvalidParameterValue(
+ "Fake Error")
+ irmc_management.scci.SCCIInvalidInputError = Exception
+ irmc_management.scci.SCCIClientError = Exception
+
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ task.node.driver_info['irmc_sensor_method'] = 'scci'
+ e = self.assertRaises(exception.FailedToGetSensorData,
+ self.driver.management.get_sensors_data,
+ task)
+ self.assertEqual("Failed to get sensor data for node 1be26c0b-" +
+ "03f2-4d2e-ae87-c02d7f33c123. Error: Fake Error",
+ str(e))
diff --git a/ironic/tests/unit/drivers/irmc/test_power.py b/ironic/tests/unit/drivers/irmc/test_power.py
new file mode 100644
index 000000000..1b51f9eac
--- /dev/null
+++ b/ironic/tests/unit/drivers/irmc/test_power.py
@@ -0,0 +1,224 @@
+# Copyright 2015 FUJITSU LIMITED
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""
+Test class for iRMC Power Driver
+"""
+
+import mock
+from oslo_config import cfg
+
+from ironic.common import boot_devices
+from ironic.common import exception
+from ironic.common import states
+from ironic.conductor import task_manager
+from ironic.conductor import utils as manager_utils
+from ironic.drivers.modules.irmc import common as irmc_common
+from ironic.drivers.modules.irmc import deploy as irmc_deploy
+from ironic.drivers.modules.irmc import power as irmc_power
+from ironic.tests.unit.conductor import utils as mgr_utils
+from ironic.tests.unit.db import base as db_base
+from ironic.tests.unit.db import utils as db_utils
+from ironic.tests.unit.objects import utils as obj_utils
+
+INFO_DICT = db_utils.get_test_irmc_info()
+CONF = cfg.CONF
+
+
+@mock.patch.object(irmc_common, 'get_irmc_client', spec_set=True,
+ autospec=True)
+class IRMCPowerInternalMethodsTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(IRMCPowerInternalMethodsTestCase, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver='fake_irmc')
+ driver_info = INFO_DICT
+ self.node = db_utils.create_test_node(
+ driver='fake_irmc',
+ driver_info=driver_info,
+ instance_uuid='instance_uuid_123')
+
+ @mock.patch.object(irmc_power, '_attach_boot_iso_if_needed')
+ def test__set_power_state_power_on_ok(
+ self,
+ _attach_boot_iso_if_needed_mock,
+ get_irmc_client_mock):
+ irmc_client = get_irmc_client_mock.return_value
+ target_state = states.POWER_ON
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ irmc_power._set_power_state(task, target_state)
+ _attach_boot_iso_if_needed_mock.assert_called_once_with(task)
+ irmc_client.assert_called_once_with(irmc_power.scci.POWER_ON)
+
+ def test__set_power_state_power_off_ok(self,
+ get_irmc_client_mock):
+ irmc_client = get_irmc_client_mock.return_value
+ target_state = states.POWER_OFF
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ irmc_power._set_power_state(task, target_state)
+ irmc_client.assert_called_once_with(irmc_power.scci.POWER_OFF)
+
+ @mock.patch.object(irmc_power, '_attach_boot_iso_if_needed')
+ def test__set_power_state_power_reboot_ok(
+ self,
+ _attach_boot_iso_if_needed_mock,
+ get_irmc_client_mock):
+ irmc_client = get_irmc_client_mock.return_value
+ target_state = states.REBOOT
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ irmc_power._set_power_state(task, target_state)
+ _attach_boot_iso_if_needed_mock.assert_called_once_with(task)
+ irmc_client.assert_called_once_with(irmc_power.scci.POWER_RESET)
+
+ def test__set_power_state_invalid_target_state(self,
+ get_irmc_client_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ self.assertRaises(exception.InvalidParameterValue,
+ irmc_power._set_power_state,
+ task,
+ states.ERROR)
+
+ def test__set_power_state_scci_exception(self,
+ get_irmc_client_mock):
+ irmc_client = get_irmc_client_mock.return_value
+ irmc_client.side_effect = Exception()
+ irmc_power.scci.SCCIClientError = Exception
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ self.assertRaises(exception.IRMCOperationError,
+ irmc_power._set_power_state,
+ task,
+ states.POWER_ON)
+
+ @mock.patch.object(manager_utils, 'node_set_boot_device', spec_set=True,
+ autospec=True)
+ @mock.patch.object(irmc_deploy, 'setup_vmedia_for_boot', spec_set=True,
+ autospec=True)
+ def test__attach_boot_iso_if_needed(
+ self,
+ setup_vmedia_mock,
+ set_boot_device_mock,
+ get_irmc_client_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ task.node.provision_state = states.ACTIVE
+ task.node.driver_internal_info['irmc_boot_iso'] = 'boot-iso'
+ irmc_power._attach_boot_iso_if_needed(task)
+ setup_vmedia_mock.assert_called_once_with(task, 'boot-iso')
+ set_boot_device_mock.assert_called_once_with(
+ task, boot_devices.CDROM)
+
+ @mock.patch.object(manager_utils, 'node_set_boot_device', spec_set=True,
+ autospec=True)
+ @mock.patch.object(irmc_deploy, 'setup_vmedia_for_boot', spec_set=True,
+ autospec=True)
+ def test__attach_boot_iso_if_needed_on_rebuild(
+ self,
+ setup_vmedia_mock,
+ set_boot_device_mock,
+ get_irmc_client_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ task.node.provision_state = states.DEPLOYING
+ task.node.driver_internal_info['irmc_boot_iso'] = 'boot-iso'
+ irmc_power._attach_boot_iso_if_needed(task)
+ self.assertFalse(setup_vmedia_mock.called)
+ self.assertFalse(set_boot_device_mock.called)
+
+
+class IRMCPowerTestCase(db_base.DbTestCase):
+ def setUp(self):
+ super(IRMCPowerTestCase, self).setUp()
+ driver_info = INFO_DICT
+ mgr_utils.mock_the_extension_manager(driver="fake_irmc")
+ self.node = obj_utils.create_test_node(self.context,
+ driver='fake_irmc',
+ driver_info=driver_info)
+
+ def test_get_properties(self):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ properties = task.driver.get_properties()
+ for prop in irmc_common.COMMON_PROPERTIES:
+ self.assertIn(prop, properties)
+
+ @mock.patch.object(irmc_common, 'parse_driver_info', spec_set=True,
+ autospec=True)
+ def test_validate(self, mock_drvinfo):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ task.driver.power.validate(task)
+ mock_drvinfo.assert_called_once_with(task.node)
+
+ @mock.patch.object(irmc_common, 'parse_driver_info', spec_set=True,
+ autospec=True)
+ def test_validate_fail(self, mock_drvinfo):
+ side_effect = iter([exception.InvalidParameterValue("Invalid Input")])
+ mock_drvinfo.side_effect = side_effect
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ self.assertRaises(exception.InvalidParameterValue,
+ task.driver.power.validate,
+ task)
+
+ @mock.patch('ironic.drivers.modules.irmc.power.ipmitool.IPMIPower',
+ spec_set=True, autospec=True)
+ def test_get_power_state(self, mock_IPMIPower):
+ ipmi_power = mock_IPMIPower.return_value
+ ipmi_power.get_power_state.return_value = states.POWER_ON
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ self.assertEqual(states.POWER_ON,
+ task.driver.power.get_power_state(task))
+ ipmi_power.get_power_state.assert_called_once_with(task)
+
+ @mock.patch.object(irmc_power, '_set_power_state', spec_set=True,
+ autospec=True)
+ def test_set_power_state(self, mock_set_power):
+ mock_set_power.return_value = states.POWER_ON
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.power.set_power_state(task, states.POWER_ON)
+ mock_set_power.assert_called_once_with(task, states.POWER_ON)
+
+ @mock.patch.object(irmc_power, '_set_power_state', spec_set=True,
+ autospec=True)
+ @mock.patch.object(irmc_power.IRMCPower, 'get_power_state', spec_set=True,
+ autospec=True)
+ def test_reboot_reboot(self, mock_get_power, mock_set_power):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ mock_get_power.return_value = states.POWER_ON
+ task.driver.power.reboot(task)
+ mock_get_power.assert_called_once_with(
+ task.driver.power, task)
+ mock_set_power.assert_called_once_with(task, states.REBOOT)
+
+ @mock.patch.object(irmc_power, '_set_power_state', spec_set=True,
+ autospec=True)
+ @mock.patch.object(irmc_power.IRMCPower, 'get_power_state', spec_set=True,
+ autospec=True)
+ def test_reboot_power_on(self, mock_get_power, mock_set_power):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ mock_get_power.return_value = states.POWER_OFF
+ task.driver.power.reboot(task)
+ mock_get_power.assert_called_once_with(
+ task.driver.power, task)
+ mock_set_power.assert_called_once_with(task, states.POWER_ON)
diff --git a/ironic/tests/unit/drivers/msftocs/__init__.py b/ironic/tests/unit/drivers/msftocs/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ironic/tests/unit/drivers/msftocs/__init__.py
diff --git a/ironic/tests/unit/drivers/msftocs/test_common.py b/ironic/tests/unit/drivers/msftocs/test_common.py
new file mode 100644
index 000000000..c5891a743
--- /dev/null
+++ b/ironic/tests/unit/drivers/msftocs/test_common.py
@@ -0,0 +1,110 @@
+# Copyright 2015 Cloudbase Solutions Srl
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""
+Test class for MSFT OCS common functions
+"""
+
+import mock
+
+from ironic.common import exception
+from ironic.conductor import task_manager
+from ironic.drivers.modules.msftocs import common as msftocs_common
+from ironic.tests.unit.conductor import utils as mgr_utils
+from ironic.tests.unit.db import base as db_base
+from ironic.tests.unit.db import utils as db_utils
+from ironic.tests.unit.objects import utils as obj_utils
+
+INFO_DICT = db_utils.get_test_msftocs_info()
+
+
+class MSFTOCSCommonTestCase(db_base.DbTestCase):
+ def setUp(self):
+ super(MSFTOCSCommonTestCase, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver='fake_msftocs')
+ self.info = INFO_DICT
+ self.node = obj_utils.create_test_node(self.context,
+ driver='fake_msftocs',
+ driver_info=self.info)
+
+ def test_get_client_info(self):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ driver_info = task.node.driver_info
+ (client, blade_id) = msftocs_common.get_client_info(driver_info)
+
+ self.assertEqual(driver_info['msftocs_base_url'], client._base_url)
+ self.assertEqual(driver_info['msftocs_username'], client._username)
+ self.assertEqual(driver_info['msftocs_password'], client._password)
+ self.assertEqual(driver_info['msftocs_blade_id'], blade_id)
+
+ @mock.patch.object(msftocs_common, '_is_valid_url', autospec=True)
+ def test_parse_driver_info(self, mock_is_valid_url):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ msftocs_common.parse_driver_info(task.node)
+ mock_is_valid_url.assert_called_once_with(
+ task.node.driver_info['msftocs_base_url'])
+
+ def test_parse_driver_info_fail_missing_param(self):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ del task.node.driver_info['msftocs_base_url']
+ self.assertRaises(exception.MissingParameterValue,
+ msftocs_common.parse_driver_info,
+ task.node)
+
+ def test_parse_driver_info_fail_bad_url(self):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ task.node.driver_info['msftocs_base_url'] = "bad-url"
+ self.assertRaises(exception.InvalidParameterValue,
+ msftocs_common.parse_driver_info,
+ task.node)
+
+ def test_parse_driver_info_fail_bad_blade_id_type(self):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ task.node.driver_info['msftocs_blade_id'] = "bad-blade-id"
+ self.assertRaises(exception.InvalidParameterValue,
+ msftocs_common.parse_driver_info,
+ task.node)
+
+ def test_parse_driver_info_fail_bad_blade_id_value(self):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ task.node.driver_info['msftocs_blade_id'] = 0
+ self.assertRaises(exception.InvalidParameterValue,
+ msftocs_common.parse_driver_info,
+ task.node)
+
+ def test__is_valid_url(self):
+ self.assertIs(True, msftocs_common._is_valid_url("http://fake.com"))
+ self.assertIs(
+ True, msftocs_common._is_valid_url("http://www.fake.com"))
+ self.assertIs(True, msftocs_common._is_valid_url("http://FAKE.com"))
+ self.assertIs(True, msftocs_common._is_valid_url("http://fake"))
+ self.assertIs(
+ True, msftocs_common._is_valid_url("http://fake.com/blah"))
+ self.assertIs(True, msftocs_common._is_valid_url("http://localhost"))
+ self.assertIs(True, msftocs_common._is_valid_url("https://fake.com"))
+ self.assertIs(True, msftocs_common._is_valid_url("http://10.0.0.1"))
+ self.assertIs(False, msftocs_common._is_valid_url("bad-url"))
+ self.assertIs(False, msftocs_common._is_valid_url("http://.bad-url"))
+ self.assertIs(False, msftocs_common._is_valid_url("http://bad-url$"))
+ self.assertIs(False, msftocs_common._is_valid_url("http://$bad-url"))
+ self.assertIs(False, msftocs_common._is_valid_url("http://bad$url"))
+ self.assertIs(False, msftocs_common._is_valid_url(None))
+ self.assertIs(False, msftocs_common._is_valid_url(0))
diff --git a/ironic/tests/unit/drivers/msftocs/test_management.py b/ironic/tests/unit/drivers/msftocs/test_management.py
new file mode 100644
index 000000000..a121a778d
--- /dev/null
+++ b/ironic/tests/unit/drivers/msftocs/test_management.py
@@ -0,0 +1,133 @@
+# Copyright 2015 Cloudbase Solutions Srl
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""
+Test class for MSFT OCS ManagementInterface
+"""
+
+import mock
+
+from ironic.common import boot_devices
+from ironic.common import exception
+from ironic.conductor import task_manager
+from ironic.drivers.modules.msftocs import common as msftocs_common
+from ironic.drivers.modules.msftocs import msftocsclient
+from ironic.drivers import utils as drivers_utils
+from ironic.tests.unit.conductor import utils as mgr_utils
+from ironic.tests.unit.db import base as db_base
+from ironic.tests.unit.db import utils as db_utils
+from ironic.tests.unit.objects import utils as obj_utils
+
+INFO_DICT = db_utils.get_test_msftocs_info()
+
+
+class MSFTOCSManagementTestCase(db_base.DbTestCase):
+ def setUp(self):
+ super(MSFTOCSManagementTestCase, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver='fake_msftocs')
+ self.info = INFO_DICT
+ self.node = obj_utils.create_test_node(self.context,
+ driver='fake_msftocs',
+ driver_info=self.info)
+
+ def test_get_properties(self):
+ expected = msftocs_common.REQUIRED_PROPERTIES
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ self.assertEqual(expected, task.driver.get_properties())
+
+ @mock.patch.object(msftocs_common, 'parse_driver_info', autospec=True)
+ def test_validate(self, mock_drvinfo):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ task.driver.power.validate(task)
+ mock_drvinfo.assert_called_once_with(task.node)
+
+ @mock.patch.object(msftocs_common, 'parse_driver_info', autospec=True)
+ def test_validate_fail(self, mock_drvinfo):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ mock_drvinfo.side_effect = iter(
+ [exception.InvalidParameterValue('x')])
+ self.assertRaises(exception.InvalidParameterValue,
+ task.driver.power.validate,
+ task)
+
+ def test_get_supported_boot_devices(self):
+ expected = [boot_devices.PXE, boot_devices.DISK, boot_devices.BIOS]
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ self.assertEqual(
+ sorted(expected),
+ sorted(task.driver.management.
+ get_supported_boot_devices(task)))
+
+ @mock.patch.object(msftocs_common, 'get_client_info', autospec=True)
+ def _test_set_boot_device_one_time(self, persistent, uefi,
+ mock_gci):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ mock_c = mock.MagicMock(spec=msftocsclient.MSFTOCSClientApi)
+ blade_id = task.node.driver_info['msftocs_blade_id']
+ mock_gci.return_value = (mock_c, blade_id)
+
+ if uefi:
+ drivers_utils.add_node_capability(task, 'boot_mode', 'uefi')
+
+ task.driver.management.set_boot_device(
+ task, boot_devices.PXE, persistent)
+
+ mock_gci.assert_called_once_with(task.node.driver_info)
+ mock_c.set_next_boot.assert_called_once_with(
+ blade_id, msftocsclient.BOOT_TYPE_FORCE_PXE, persistent, uefi)
+
+ def test_set_boot_device_one_time(self):
+ self._test_set_boot_device_one_time(False, False)
+
+ def test_set_boot_device_persistent(self):
+ self._test_set_boot_device_one_time(True, False)
+
+ def test_set_boot_device_uefi(self):
+ self._test_set_boot_device_one_time(True, True)
+
+ def test_set_boot_device_fail(self):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ self.assertRaises(exception.InvalidParameterValue,
+ task.driver.management.set_boot_device,
+ task, 'fake-device')
+
+ @mock.patch.object(msftocs_common, 'get_client_info', autospec=True)
+ def test_get_boot_device(self, mock_gci):
+ expected = {'boot_device': boot_devices.DISK, 'persistent': None}
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ mock_c = mock.MagicMock(spec=msftocsclient.MSFTOCSClientApi)
+ blade_id = task.node.driver_info['msftocs_blade_id']
+ mock_gci.return_value = (mock_c, blade_id)
+ force_hdd = msftocsclient.BOOT_TYPE_FORCE_DEFAULT_HDD
+ mock_c.get_next_boot.return_value = force_hdd
+
+ self.assertEqual(expected,
+ task.driver.management.get_boot_device(task))
+ mock_gci.assert_called_once_with(task.node.driver_info)
+ mock_c.get_next_boot.assert_called_once_with(blade_id)
+
+ def test_get_sensor_data(self):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ self.assertRaises(NotImplementedError,
+ task.driver.management.get_sensors_data,
+ task)
diff --git a/ironic/tests/unit/drivers/msftocs/test_msftocsclient.py b/ironic/tests/unit/drivers/msftocs/test_msftocsclient.py
new file mode 100644
index 000000000..15be486c5
--- /dev/null
+++ b/ironic/tests/unit/drivers/msftocs/test_msftocsclient.py
@@ -0,0 +1,182 @@
+# Copyright 2015 Cloudbase Solutions Srl
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""
+Test class for MSFT OCS REST API client
+"""
+
+import mock
+import requests
+from requests import exceptions as requests_exceptions
+
+from ironic.common import exception
+from ironic.drivers.modules.msftocs import msftocsclient
+from ironic.tests.unit import base
+
+
+FAKE_BOOT_RESPONSE = (
+ '<BootResponse xmlns="%s" '
+ 'xmlns:i="http://www.w3.org/2001/XMLSchema-instance">'
+ '<completionCode>Success</completionCode>'
+ '<apiVersion>1</apiVersion>'
+ '<statusDescription>Success</statusDescription>'
+ '<bladeNumber>1</bladeNumber>'
+ '<nextBoot>ForcePxe</nextBoot>'
+ '</BootResponse>') % msftocsclient.WCSNS
+
+FAKE_BLADE_RESPONSE = (
+ '<BladeResponse xmlns="%s" '
+ 'xmlns:i="http://www.w3.org/2001/XMLSchema-instance">'
+ '<completionCode>Success</completionCode>'
+ '<apiVersion>1</apiVersion>'
+ '<statusDescription/>'
+ '<bladeNumber>1</bladeNumber>'
+ '</BladeResponse>') % msftocsclient.WCSNS
+
+FAKE_POWER_STATE_RESPONSE = (
+ '<PowerStateResponse xmlns="%s" '
+ 'xmlns:i="http://www.w3.org/2001/XMLSchema-instance">'
+ '<completionCode>Success</completionCode>'
+ '<apiVersion>1</apiVersion>'
+ '<statusDescription>Blade Power is On, firmware decompressed'
+ '</statusDescription>'
+ '<bladeNumber>1</bladeNumber>'
+ '<Decompression>0</Decompression>'
+ '<powerState>ON</powerState>'
+ '</PowerStateResponse>') % msftocsclient.WCSNS
+
+FAKE_BLADE_STATE_RESPONSE = (
+ '<BladeStateResponse xmlns="%s" '
+ 'xmlns:i="http://www.w3.org/2001/XMLSchema-instance">'
+ '<completionCode>Success</completionCode>'
+ '<apiVersion>1</apiVersion>'
+ '<statusDescription/>'
+ '<bladeNumber>1</bladeNumber>'
+ '<bladeState>ON</bladeState>'
+ '</BladeStateResponse>') % msftocsclient.WCSNS
+
+
+class MSFTOCSClientApiTestCase(base.TestCase):
+ def setUp(self):
+ super(MSFTOCSClientApiTestCase, self).setUp()
+ self._fake_base_url = "http://fakehost:8000"
+ self._fake_username = "admin"
+ self._fake_password = 'fake'
+ self._fake_blade_id = 1
+ self._client = msftocsclient.MSFTOCSClientApi(
+ self._fake_base_url, self._fake_username, self._fake_password)
+
+ @mock.patch.object(requests, 'get', autospec=True)
+ def test__exec_cmd(self, mock_get):
+ fake_response_text = 'fake_response_text'
+ fake_rel_url = 'fake_rel_url'
+ mock_get.return_value.text = 'fake_response_text'
+
+ self.assertEqual(fake_response_text,
+ self._client._exec_cmd(fake_rel_url))
+ mock_get.assert_called_once_with(
+ self._fake_base_url + "/" + fake_rel_url, auth=mock.ANY)
+
+ @mock.patch.object(requests, 'get', autospec=True)
+ def test__exec_cmd_http_get_fail(self, mock_get):
+ fake_rel_url = 'fake_rel_url'
+ mock_get.side_effect = iter([requests_exceptions.ConnectionError('x')])
+
+ self.assertRaises(exception.MSFTOCSClientApiException,
+ self._client._exec_cmd,
+ fake_rel_url)
+ mock_get.assert_called_once_with(
+ self._fake_base_url + "/" + fake_rel_url, auth=mock.ANY)
+
+ def test__check_completion_code(self):
+ et = self._client._check_completion_code(FAKE_BOOT_RESPONSE)
+ self.assertEqual('{%s}BootResponse' % msftocsclient.WCSNS, et.tag)
+
+ def test__check_completion_code_fail(self):
+ self.assertRaises(exception.MSFTOCSClientApiException,
+ self._client._check_completion_code,
+ '<fake xmlns="%s"></fake>' % msftocsclient.WCSNS)
+
+ def test__check_completion_with_bad_completion_code_fail(self):
+ self.assertRaises(exception.MSFTOCSClientApiException,
+ self._client._check_completion_code,
+ '<fake xmlns="%s">'
+ '<completionCode>Fail</completionCode>'
+ '</fake>' % msftocsclient.WCSNS)
+
+ def test__check_completion_code_xml_parsing_fail(self):
+ self.assertRaises(exception.MSFTOCSClientApiException,
+ self._client._check_completion_code,
+ 'bad_xml')
+
+ @mock.patch.object(
+ msftocsclient.MSFTOCSClientApi, '_exec_cmd', autospec=True)
+ def test_get_blade_state(self, mock_exec_cmd):
+ mock_exec_cmd.return_value = FAKE_BLADE_STATE_RESPONSE
+ self.assertEqual(
+ msftocsclient.POWER_STATUS_ON,
+ self._client.get_blade_state(self._fake_blade_id))
+ mock_exec_cmd.assert_called_once_with(
+ self._client, "GetBladeState?bladeId=%d" % self._fake_blade_id)
+
+ @mock.patch.object(
+ msftocsclient.MSFTOCSClientApi, '_exec_cmd', autospec=True)
+ def test_set_blade_on(self, mock_exec_cmd):
+ mock_exec_cmd.return_value = FAKE_BLADE_RESPONSE
+ self._client.set_blade_on(self._fake_blade_id)
+ mock_exec_cmd.assert_called_once_with(
+ self._client, "SetBladeOn?bladeId=%d" % self._fake_blade_id)
+
+ @mock.patch.object(
+ msftocsclient.MSFTOCSClientApi, '_exec_cmd', autospec=True)
+ def test_set_blade_off(self, mock_exec_cmd):
+ mock_exec_cmd.return_value = FAKE_BLADE_RESPONSE
+ self._client.set_blade_off(self._fake_blade_id)
+ mock_exec_cmd.assert_called_once_with(
+ self._client, "SetBladeOff?bladeId=%d" % self._fake_blade_id)
+
+ @mock.patch.object(
+ msftocsclient.MSFTOCSClientApi, '_exec_cmd', autospec=True)
+ def test_set_blade_power_cycle(self, mock_exec_cmd):
+ mock_exec_cmd.return_value = FAKE_BLADE_RESPONSE
+ self._client.set_blade_power_cycle(self._fake_blade_id)
+ mock_exec_cmd.assert_called_once_with(
+ self._client,
+ "SetBladeActivePowerCycle?bladeId=%d&offTime=0" %
+ self._fake_blade_id)
+
+ @mock.patch.object(
+ msftocsclient.MSFTOCSClientApi, '_exec_cmd', autospec=True)
+ def test_get_next_boot(self, mock_exec_cmd):
+ mock_exec_cmd.return_value = FAKE_BOOT_RESPONSE
+ self.assertEqual(
+ msftocsclient.BOOT_TYPE_FORCE_PXE,
+ self._client.get_next_boot(self._fake_blade_id))
+ mock_exec_cmd.assert_called_once_with(
+ self._client, "GetNextBoot?bladeId=%d" % self._fake_blade_id)
+
+ @mock.patch.object(
+ msftocsclient.MSFTOCSClientApi, '_exec_cmd', autospec=True)
+ def test_set_next_boot(self, mock_exec_cmd):
+ mock_exec_cmd.return_value = FAKE_BOOT_RESPONSE
+ self._client.set_next_boot(self._fake_blade_id,
+ msftocsclient.BOOT_TYPE_FORCE_PXE)
+ mock_exec_cmd.assert_called_once_with(
+ self._client,
+ "SetNextBoot?bladeId=%(blade_id)d&bootType=%(boot_type)d&"
+ "uefi=%(uefi)s&persistent=%(persistent)s" %
+ {"blade_id": self._fake_blade_id,
+ "boot_type": msftocsclient.BOOT_TYPE_FORCE_PXE,
+ "uefi": "true", "persistent": "true"})
diff --git a/ironic/tests/unit/drivers/msftocs/test_power.py b/ironic/tests/unit/drivers/msftocs/test_power.py
new file mode 100644
index 000000000..aeeabe710
--- /dev/null
+++ b/ironic/tests/unit/drivers/msftocs/test_power.py
@@ -0,0 +1,164 @@
+# Copyright 2015 Cloudbase Solutions Srl
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""
+Test class for MSFT OCS PowerInterface
+"""
+
+import mock
+
+from ironic.common import exception
+from ironic.common import states
+from ironic.conductor import task_manager
+from ironic.drivers.modules.msftocs import common as msftocs_common
+from ironic.drivers.modules.msftocs import msftocsclient
+from ironic.tests.unit.conductor import utils as mgr_utils
+from ironic.tests.unit.db import base as db_base
+from ironic.tests.unit.db import utils as db_utils
+from ironic.tests.unit.objects import utils as obj_utils
+
+INFO_DICT = db_utils.get_test_msftocs_info()
+
+
+class MSFTOCSPowerTestCase(db_base.DbTestCase):
+ def setUp(self):
+ super(MSFTOCSPowerTestCase, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver='fake_msftocs')
+ self.info = INFO_DICT
+ self.node = obj_utils.create_test_node(self.context,
+ driver='fake_msftocs',
+ driver_info=self.info)
+
+ def test_get_properties(self):
+ expected = msftocs_common.REQUIRED_PROPERTIES
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ self.assertEqual(expected, task.driver.get_properties())
+
+ @mock.patch.object(msftocs_common, 'parse_driver_info', autospec=True)
+ def test_validate(self, mock_drvinfo):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ task.driver.power.validate(task)
+ mock_drvinfo.assert_called_once_with(task.node)
+
+ @mock.patch.object(msftocs_common, 'parse_driver_info', autospec=True)
+ def test_validate_fail(self, mock_drvinfo):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ mock_drvinfo.side_effect = iter(
+ [exception.InvalidParameterValue('x')])
+ self.assertRaises(exception.InvalidParameterValue,
+ task.driver.power.validate,
+ task)
+
+ @mock.patch.object(msftocs_common, 'get_client_info', autospec=True)
+ def test_get_power_state(self, mock_gci):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ mock_c = mock.MagicMock(spec=msftocsclient.MSFTOCSClientApi)
+ blade_id = task.node.driver_info['msftocs_blade_id']
+ mock_gci.return_value = (mock_c, blade_id)
+ mock_c.get_blade_state.return_value = msftocsclient.POWER_STATUS_ON
+
+ self.assertEqual(states.POWER_ON,
+ task.driver.power.get_power_state(task))
+ mock_gci.assert_called_once_with(task.node.driver_info)
+ mock_c.get_blade_state.assert_called_once_with(blade_id)
+
+ @mock.patch.object(msftocs_common, 'get_client_info', autospec=True)
+ def test_set_power_state_on(self, mock_gci):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ mock_c = mock.MagicMock(spec=msftocsclient.MSFTOCSClientApi)
+ blade_id = task.node.driver_info['msftocs_blade_id']
+ mock_gci.return_value = (mock_c, blade_id)
+
+ task.driver.power.set_power_state(task, states.POWER_ON)
+ mock_gci.assert_called_once_with(task.node.driver_info)
+ mock_c.set_blade_on.assert_called_once_with(blade_id)
+
+ @mock.patch.object(msftocs_common, 'get_client_info', autospec=True)
+ def test_set_power_state_off(self, mock_gci):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ mock_c = mock.MagicMock(spec=msftocsclient.MSFTOCSClientApi)
+ blade_id = task.node.driver_info['msftocs_blade_id']
+ mock_gci.return_value = (mock_c, blade_id)
+
+ task.driver.power.set_power_state(task, states.POWER_OFF)
+ mock_gci.assert_called_once_with(task.node.driver_info)
+ mock_c.set_blade_off.assert_called_once_with(blade_id)
+
+ @mock.patch.object(msftocs_common, 'get_client_info', autospec=True)
+ def test_set_power_state_blade_on_fail(self, mock_gci):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ mock_c = mock.MagicMock(spec=msftocsclient.MSFTOCSClientApi)
+ blade_id = task.node.driver_info['msftocs_blade_id']
+ mock_gci.return_value = (mock_c, blade_id)
+
+ ex = exception.MSFTOCSClientApiException('x')
+ mock_c.set_blade_on.side_effect = ex
+
+ pstate = states.POWER_ON
+ self.assertRaises(exception.PowerStateFailure,
+ task.driver.power.set_power_state,
+ task, pstate)
+ mock_gci.assert_called_once_with(task.node.driver_info)
+ mock_c.set_blade_on.assert_called_once_with(blade_id)
+
+ @mock.patch.object(msftocs_common, 'get_client_info', autospec=True)
+ def test_set_power_state_invalid_parameter_fail(self, mock_gci):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ mock_c = mock.MagicMock(spec=msftocsclient.MSFTOCSClientApi)
+ blade_id = task.node.driver_info['msftocs_blade_id']
+ mock_gci.return_value = (mock_c, blade_id)
+
+ pstate = states.ERROR
+ self.assertRaises(exception.InvalidParameterValue,
+ task.driver.power.set_power_state,
+ task, pstate)
+ mock_gci.assert_called_once_with(task.node.driver_info)
+
+ @mock.patch.object(msftocs_common, 'get_client_info', autospec=True)
+ def test_reboot(self, mock_gci):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ mock_c = mock.MagicMock(spec=msftocsclient.MSFTOCSClientApi)
+ blade_id = task.node.driver_info['msftocs_blade_id']
+ mock_gci.return_value = (mock_c, blade_id)
+
+ task.driver.power.reboot(task)
+ mock_gci.assert_called_once_with(task.node.driver_info)
+ mock_c.set_blade_power_cycle.assert_called_once_with(blade_id)
+
+ @mock.patch.object(msftocs_common, 'get_client_info', autospec=True)
+ def test_reboot_fail(self, mock_gci):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ mock_c = mock.MagicMock(spec=msftocsclient.MSFTOCSClientApi)
+ blade_id = task.node.driver_info['msftocs_blade_id']
+ mock_gci.return_value = (mock_c, blade_id)
+
+ ex = exception.MSFTOCSClientApiException('x')
+ mock_c.set_blade_power_cycle.side_effect = ex
+
+ self.assertRaises(exception.PowerStateFailure,
+ task.driver.power.reboot,
+ task)
+ mock_gci.assert_called_once_with(task.node.driver_info)
+ mock_c.set_blade_power_cycle.assert_called_once_with(blade_id)
diff --git a/ironic/tests/unit/drivers/pxe_config.template b/ironic/tests/unit/drivers/pxe_config.template
new file mode 100644
index 000000000..6bbc9afc5
--- /dev/null
+++ b/ironic/tests/unit/drivers/pxe_config.template
@@ -0,0 +1,20 @@
+default deploy
+
+label deploy
+kernel /tftpboot/1be26c0b-03f2-4d2e-ae87-c02d7f33c123/deploy_kernel
+append initrd=/tftpboot/1be26c0b-03f2-4d2e-ae87-c02d7f33c123/deploy_ramdisk selinux=0 disk=cciss/c0d0,sda,hda,vda iscsi_target_iqn=iqn-1be26c0b-03f2-4d2e-ae87-c02d7f33c123 deployment_id=1be26c0b-03f2-4d2e-ae87-c02d7f33c123 deployment_key=0123456789ABCDEFGHIJKLMNOPQRSTUV ironic_api_url=http://192.168.122.184:6385 troubleshoot=0 text test_param boot_option=netboot root_device=vendor=fake,size=123 ipa-api-url=http://192.168.122.184:6385 ipa-driver-name=pxe_ssh boot_mode=bios coreos.configdrive=0
+ipappend 3
+
+
+label boot_partition
+kernel /tftpboot/1be26c0b-03f2-4d2e-ae87-c02d7f33c123/kernel
+append initrd=/tftpboot/1be26c0b-03f2-4d2e-ae87-c02d7f33c123/ramdisk root={{ ROOT }} ro text test_param
+
+
+label boot_whole_disk
+COM32 chain.c32
+append mbr:{{ DISK_IDENTIFIER }}
+
+label trusted_boot
+kernel mboot
+append tboot.gz --- /tftpboot/1be26c0b-03f2-4d2e-ae87-c02d7f33c123/kernel root={{ ROOT }} ro text test_param intel_iommu=on --- /tftpboot/1be26c0b-03f2-4d2e-ae87-c02d7f33c123/ramdisk
diff --git a/ironic/tests/unit/drivers/pxe_grub_config.template b/ironic/tests/unit/drivers/pxe_grub_config.template
new file mode 100644
index 000000000..96444c125
--- /dev/null
+++ b/ironic/tests/unit/drivers/pxe_grub_config.template
@@ -0,0 +1,18 @@
+set default=deploy
+set timeout=5
+set hidden_timeout_quiet=false
+
+menuentry "deploy" {
+ linuxefi /tftpboot/1be26c0b-03f2-4d2e-ae87-c02d7f33c123/deploy_kernel selinux=0 troubleshoot=0 text disk=cciss/c0d0,sda,hda,vda iscsi_target_iqn=iqn-1be26c0b-03f2-4d2e-ae87-c02d7f33c123 deployment_id=1be26c0b-03f2-4d2e-ae87-c02d7f33c123 deployment_key=0123456789ABCDEFGHIJKLMNOPQRSTUV ironic_api_url=http://192.168.122.184:6385 test_param boot_server=192.0.2.1 root_device=vendor=fake,size=123 ipa-api-url=http://192.168.122.184:6385 ipa-driver-name=pxe_ssh boot_option=netboot boot_mode=uefi coreos.configdrive=0
+ initrdefi /tftpboot/1be26c0b-03f2-4d2e-ae87-c02d7f33c123/deploy_ramdisk
+}
+
+menuentry "boot_partition" {
+ linuxefi /tftpboot/1be26c0b-03f2-4d2e-ae87-c02d7f33c123/kernel root=(( ROOT )) ro text test_param boot_server=192.0.2.1
+ initrdefi /tftpboot/1be26c0b-03f2-4d2e-ae87-c02d7f33c123/ramdisk
+}
+
+menuentry "boot_whole_disk" {
+ linuxefi chain.c32 mbr:(( DISK_IDENTIFIER ))
+}
+
diff --git a/ironic/tests/unit/drivers/test_agent.py b/ironic/tests/unit/drivers/test_agent.py
new file mode 100644
index 000000000..41fe87a77
--- /dev/null
+++ b/ironic/tests/unit/drivers/test_agent.py
@@ -0,0 +1,805 @@
+# Copyright 2014 Rackspace, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import types
+
+import mock
+from oslo_config import cfg
+
+from ironic.common import exception
+from ironic.common import image_service
+from ironic.common import images
+from ironic.common import raid
+from ironic.common import states
+from ironic.conductor import task_manager
+from ironic.conductor import utils as manager_utils
+from ironic.drivers.modules import agent
+from ironic.drivers.modules import agent_base_vendor
+from ironic.drivers.modules import agent_client
+from ironic.drivers.modules import deploy_utils
+from ironic.drivers.modules import fake
+from ironic.drivers.modules import pxe
+from ironic.tests.unit.conductor import utils as mgr_utils
+from ironic.tests.unit.db import base as db_base
+from ironic.tests.unit.db import utils as db_utils
+from ironic.tests.unit.objects import utils as object_utils
+
+
+INSTANCE_INFO = db_utils.get_test_agent_instance_info()
+DRIVER_INFO = db_utils.get_test_agent_driver_info()
+DRIVER_INTERNAL_INFO = db_utils.get_test_agent_driver_internal_info()
+
+CONF = cfg.CONF
+
+
+class TestAgentMethods(db_base.DbTestCase):
+ def setUp(self):
+ super(TestAgentMethods, self).setUp()
+ self.node = object_utils.create_test_node(self.context,
+ driver='fake_agent')
+
+ @mock.patch.object(image_service, 'GlanceImageService', autospec=True)
+ def test_build_instance_info_for_deploy_glance_image(self, glance_mock):
+ i_info = self.node.instance_info
+ i_info['image_source'] = '733d1c44-a2ea-414b-aca7-69decf20d810'
+ self.node.instance_info = i_info
+ self.node.save()
+
+ image_info = {'checksum': 'aa', 'disk_format': 'qcow2',
+ 'container_format': 'bare'}
+ glance_mock.return_value.show = mock.MagicMock(spec_set=[],
+ return_value=image_info)
+
+ mgr_utils.mock_the_extension_manager(driver='fake_agent')
+ with task_manager.acquire(
+ self.context, self.node.uuid, shared=False) as task:
+
+ agent.build_instance_info_for_deploy(task)
+
+ glance_mock.assert_called_once_with(version=2,
+ context=task.context)
+ glance_mock.return_value.show.assert_called_once_with(
+ self.node.instance_info['image_source'])
+ glance_mock.return_value.swift_temp_url.assert_called_once_with(
+ image_info)
+
+ @mock.patch.object(image_service.HttpImageService, 'validate_href',
+ autospec=True)
+ def test_build_instance_info_for_deploy_nonglance_image(
+ self, validate_href_mock):
+ i_info = self.node.instance_info
+ i_info['image_source'] = 'http://image-ref'
+ i_info['image_checksum'] = 'aa'
+ self.node.instance_info = i_info
+ self.node.save()
+
+ mgr_utils.mock_the_extension_manager(driver='fake_agent')
+ with task_manager.acquire(
+ self.context, self.node.uuid, shared=False) as task:
+
+ info = agent.build_instance_info_for_deploy(task)
+
+ self.assertEqual(self.node.instance_info['image_source'],
+ info['image_url'])
+ validate_href_mock.assert_called_once_with(
+ mock.ANY, 'http://image-ref')
+
+ @mock.patch.object(image_service.HttpImageService, 'validate_href',
+ autospec=True)
+ def test_build_instance_info_for_deploy_nonsupported_image(
+ self, validate_href_mock):
+ validate_href_mock.side_effect = iter(
+ [exception.ImageRefValidationFailed(
+ image_href='file://img.qcow2', reason='fail')])
+ i_info = self.node.instance_info
+ i_info['image_source'] = 'file://img.qcow2'
+ i_info['image_checksum'] = 'aa'
+ self.node.instance_info = i_info
+ self.node.save()
+
+ mgr_utils.mock_the_extension_manager(driver='fake_agent')
+ with task_manager.acquire(
+ self.context, self.node.uuid, shared=False) as task:
+
+ self.assertRaises(exception.ImageRefValidationFailed,
+ agent.build_instance_info_for_deploy, task)
+
+ @mock.patch.object(images, 'download_size', autospec=True)
+ def test_check_image_size(self, size_mock):
+ size_mock.return_value = 10 * 1024 * 1024
+ mgr_utils.mock_the_extension_manager(driver='fake_agent')
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node.properties['memory_mb'] = 10
+ agent.check_image_size(task, 'fake-image')
+ size_mock.assert_called_once_with(self.context, 'fake-image')
+
+ @mock.patch.object(images, 'download_size', autospec=True)
+ def test_check_image_size_without_memory_mb(self, size_mock):
+ mgr_utils.mock_the_extension_manager(driver='fake_agent')
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node.properties.pop('memory_mb', None)
+ agent.check_image_size(task, 'fake-image')
+ self.assertFalse(size_mock.called)
+
+ @mock.patch.object(images, 'download_size', autospec=True)
+ def test_check_image_size_fail(self, size_mock):
+ size_mock.return_value = 11 * 1024 * 1024
+
+ mgr_utils.mock_the_extension_manager(driver='fake_agent')
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node.properties['memory_mb'] = 10
+ self.assertRaises(exception.InvalidParameterValue,
+ agent.check_image_size,
+ task, 'fake-image')
+ size_mock.assert_called_once_with(self.context, 'fake-image')
+
+ @mock.patch.object(images, 'download_size', autospec=True)
+ def test_check_image_size_fail_by_agent_consumed_memory(self, size_mock):
+ self.config(memory_consumed_by_agent=2, group='agent')
+ size_mock.return_value = 9 * 1024 * 1024
+ mgr_utils.mock_the_extension_manager(driver='fake_agent')
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node.properties['memory_mb'] = 10
+ self.assertRaises(exception.InvalidParameterValue,
+ agent.check_image_size,
+ task, 'fake-image')
+ size_mock.assert_called_once_with(self.context, 'fake-image')
+
+
+class TestAgentDeploy(db_base.DbTestCase):
+ def setUp(self):
+ super(TestAgentDeploy, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver='fake_agent')
+ self.driver = agent.AgentDeploy()
+ n = {
+ 'driver': 'fake_agent',
+ 'instance_info': INSTANCE_INFO,
+ 'driver_info': DRIVER_INFO,
+ 'driver_internal_info': DRIVER_INTERNAL_INFO,
+ }
+ self.node = object_utils.create_test_node(self.context, **n)
+ self.ports = [
+ object_utils.create_test_port(self.context, node_id=self.node.id)]
+
+ def test_get_properties(self):
+ expected = agent.COMMON_PROPERTIES
+ self.assertEqual(expected, self.driver.get_properties())
+
+ @mock.patch.object(images, 'download_size', autospec=True)
+ @mock.patch.object(pxe.PXEBoot, 'validate', autospec=True)
+ def test_validate(self, pxe_boot_validate_mock, size_mock):
+ with task_manager.acquire(
+ self.context, self.node['uuid'], shared=False) as task:
+ self.driver.validate(task)
+ pxe_boot_validate_mock.assert_called_once_with(
+ task.driver.boot, task)
+ size_mock.assert_called_once_with(self.context, 'fake-image')
+
+ @mock.patch.object(images, 'download_size', autospec=True)
+ @mock.patch.object(pxe.PXEBoot, 'validate', autospec=True)
+ def test_validate_driver_info_manage_agent_boot_false(
+ self, pxe_boot_validate_mock, size_mock):
+ self.config(manage_agent_boot=False, group='agent')
+ self.node.driver_info = {}
+ self.node.save()
+ with task_manager.acquire(
+ self.context, self.node['uuid'], shared=False) as task:
+ self.driver.validate(task)
+ self.assertFalse(pxe_boot_validate_mock.called)
+ size_mock.assert_called_once_with(self.context, 'fake-image')
+
+ @mock.patch.object(pxe.PXEBoot, 'validate', autospec=True)
+ def test_validate_instance_info_missing_params(
+ self, pxe_boot_validate_mock):
+ self.node.instance_info = {}
+ self.node.save()
+ with task_manager.acquire(
+ self.context, self.node['uuid'], shared=False) as task:
+ e = self.assertRaises(exception.MissingParameterValue,
+ self.driver.validate, task)
+ pxe_boot_validate_mock.assert_called_once_with(
+ task.driver.boot, task)
+
+ self.assertIn('instance_info.image_source', str(e))
+
+ @mock.patch.object(pxe.PXEBoot, 'validate', autospec=True)
+ def test_validate_nonglance_image_no_checksum(
+ self, pxe_boot_validate_mock):
+ i_info = self.node.instance_info
+ i_info['image_source'] = 'http://image-ref'
+ del i_info['image_checksum']
+ self.node.instance_info = i_info
+ self.node.save()
+
+ with task_manager.acquire(
+ self.context, self.node.uuid, shared=False) as task:
+ self.assertRaises(exception.MissingParameterValue,
+ self.driver.validate, task)
+ pxe_boot_validate_mock.assert_called_once_with(
+ task.driver.boot, task)
+
+ @mock.patch.object(images, 'download_size', autospec=True)
+ @mock.patch.object(pxe.PXEBoot, 'validate', autospec=True)
+ def test_validate_agent_fail_partition_image(
+ self, pxe_boot_validate_mock, size_mock):
+ with task_manager.acquire(
+ self.context, self.node['uuid'], shared=False) as task:
+ task.node.driver_internal_info['is_whole_disk_image'] = False
+ self.assertRaises(exception.InvalidParameterValue,
+ self.driver.validate, task)
+ pxe_boot_validate_mock.assert_called_once_with(
+ task.driver.boot, task)
+ size_mock.assert_called_once_with(self.context, 'fake-image')
+
+ @mock.patch.object(images, 'download_size', autospec=True)
+ @mock.patch.object(pxe.PXEBoot, 'validate', autospec=True)
+ def test_validate_invalid_root_device_hints(
+ self, pxe_boot_validate_mock, size_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ task.node.properties['root_device'] = {'size': 'not-int'}
+ self.assertRaises(exception.InvalidParameterValue,
+ task.driver.deploy.validate, task)
+ pxe_boot_validate_mock.assert_called_once_with(
+ task.driver.boot, task)
+ size_mock.assert_called_once_with(self.context, 'fake-image')
+
+ @mock.patch('ironic.conductor.utils.node_power_action', autospec=True)
+ def test_deploy(self, power_mock):
+ with task_manager.acquire(
+ self.context, self.node['uuid'], shared=False) as task:
+ driver_return = self.driver.deploy(task)
+ self.assertEqual(driver_return, states.DEPLOYWAIT)
+ power_mock.assert_called_once_with(task, states.REBOOT)
+
+ @mock.patch('ironic.conductor.utils.node_power_action', autospec=True)
+ def test_tear_down(self, power_mock):
+ with task_manager.acquire(
+ self.context, self.node['uuid'], shared=False) as task:
+ driver_return = self.driver.tear_down(task)
+ power_mock.assert_called_once_with(task, states.POWER_OFF)
+ self.assertEqual(driver_return, states.DELETED)
+
+ @mock.patch.object(pxe.PXEBoot, 'prepare_ramdisk')
+ @mock.patch.object(deploy_utils, 'build_agent_options')
+ @mock.patch.object(agent, 'build_instance_info_for_deploy')
+ def test_prepare(self, build_instance_info_mock, build_options_mock,
+ pxe_prepare_ramdisk_mock):
+ with task_manager.acquire(
+ self.context, self.node['uuid'], shared=False) as task:
+ task.node.provision_state = states.DEPLOYING
+ build_instance_info_mock.return_value = {'foo': 'bar'}
+ build_options_mock.return_value = {'a': 'b'}
+
+ self.driver.prepare(task)
+
+ build_instance_info_mock.assert_called_once_with(task)
+ build_options_mock.assert_called_once_with(task.node)
+ pxe_prepare_ramdisk_mock.assert_called_once_with(
+ task, {'a': 'b'})
+
+ self.node.refresh()
+ self.assertEqual('bar', self.node.instance_info['foo'])
+
+ @mock.patch.object(pxe.PXEBoot, 'prepare_ramdisk')
+ @mock.patch.object(deploy_utils, 'build_agent_options')
+ @mock.patch.object(agent, 'build_instance_info_for_deploy')
+ def test_prepare_manage_agent_boot_false(
+ self, build_instance_info_mock, build_options_mock,
+ pxe_prepare_ramdisk_mock):
+ self.config(group='agent', manage_agent_boot=False)
+ with task_manager.acquire(
+ self.context, self.node['uuid'], shared=False) as task:
+ task.node.provision_state = states.DEPLOYING
+ build_instance_info_mock.return_value = {'foo': 'bar'}
+
+ self.driver.prepare(task)
+
+ build_instance_info_mock.assert_called_once_with(task)
+ self.assertFalse(build_options_mock.called)
+ self.assertFalse(pxe_prepare_ramdisk_mock.called)
+
+ self.node.refresh()
+ self.assertEqual('bar', self.node.instance_info['foo'])
+
+ @mock.patch.object(pxe.PXEBoot, 'prepare_ramdisk')
+ @mock.patch.object(deploy_utils, 'build_agent_options')
+ @mock.patch.object(agent, 'build_instance_info_for_deploy')
+ def test_prepare_active(
+ self, build_instance_info_mock, build_options_mock,
+ pxe_prepare_ramdisk_mock):
+ with task_manager.acquire(
+ self.context, self.node['uuid'], shared=False) as task:
+ task.node.provision_state = states.ACTIVE
+
+ self.driver.prepare(task)
+
+ self.assertFalse(build_instance_info_mock.called)
+ self.assertFalse(build_options_mock.called)
+ self.assertFalse(pxe_prepare_ramdisk_mock.called)
+
+ @mock.patch.object(pxe.PXEBoot, 'clean_up_ramdisk')
+ def test_clean_up(self, pxe_clean_up_ramdisk_mock):
+ with task_manager.acquire(
+ self.context, self.node['uuid'], shared=False) as task:
+ self.driver.clean_up(task)
+ pxe_clean_up_ramdisk_mock.assert_called_once_with(task)
+
+ @mock.patch.object(pxe.PXEBoot, 'clean_up_ramdisk')
+ def test_clean_up_manage_agent_boot_false(self, pxe_clean_up_ramdisk_mock):
+ with task_manager.acquire(
+ self.context, self.node['uuid'], shared=False) as task:
+ self.config(group='agent', manage_agent_boot=False)
+ self.driver.clean_up(task)
+ self.assertFalse(pxe_clean_up_ramdisk_mock.called)
+
+ @mock.patch('ironic.drivers.modules.deploy_utils.agent_get_clean_steps',
+ autospec=True)
+ def test_get_clean_steps(self, mock_get_clean_steps):
+ # Test getting clean steps
+ mock_steps = [{'priority': 10, 'interface': 'deploy',
+ 'step': 'erase_devices'}]
+ mock_get_clean_steps.return_value = mock_steps
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ steps = self.driver.get_clean_steps(task)
+ mock_get_clean_steps.assert_called_once_with(task)
+ self.assertEqual(mock_steps, steps)
+
+ @mock.patch('ironic.drivers.modules.deploy_utils.agent_get_clean_steps',
+ autospec=True)
+ def test_get_clean_steps_config_priority(self, mock_get_clean_steps):
+ # Test that we can override the priority of get clean steps
+ # Use 0 because it is an edge case (false-y) and used in devstack
+ self.config(erase_devices_priority=0, group='deploy')
+ mock_steps = [{'priority': 10, 'interface': 'deploy',
+ 'step': 'erase_devices'}]
+ expected_steps = [{'priority': 0, 'interface': 'deploy',
+ 'step': 'erase_devices'}]
+ mock_get_clean_steps.return_value = mock_steps
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ steps = self.driver.get_clean_steps(task)
+ mock_get_clean_steps.assert_called_once_with(task)
+ self.assertEqual(expected_steps, steps)
+
+ @mock.patch.object(deploy_utils, 'prepare_inband_cleaning', autospec=True)
+ def test_prepare_cleaning(self, prepare_inband_cleaning_mock):
+ prepare_inband_cleaning_mock.return_value = states.CLEANWAIT
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ self.assertEqual(
+ states.CLEANWAIT, self.driver.prepare_cleaning(task))
+ prepare_inband_cleaning_mock.assert_called_once_with(
+ task, manage_boot=True)
+
+ @mock.patch.object(deploy_utils, 'prepare_inband_cleaning', autospec=True)
+ def test_prepare_cleaning_manage_agent_boot_false(
+ self, prepare_inband_cleaning_mock):
+ prepare_inband_cleaning_mock.return_value = states.CLEANWAIT
+ self.config(group='agent', manage_agent_boot=False)
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ self.assertEqual(
+ states.CLEANWAIT, self.driver.prepare_cleaning(task))
+ prepare_inband_cleaning_mock.assert_called_once_with(
+ task, manage_boot=False)
+
+ @mock.patch.object(deploy_utils, 'tear_down_inband_cleaning',
+ autospec=True)
+ def test_tear_down_cleaning(self, tear_down_cleaning_mock):
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ self.driver.tear_down_cleaning(task)
+ tear_down_cleaning_mock.assert_called_once_with(
+ task, manage_boot=True)
+
+ @mock.patch.object(deploy_utils, 'tear_down_inband_cleaning',
+ autospec=True)
+ def test_tear_down_cleaning_manage_agent_boot_false(
+ self, tear_down_cleaning_mock):
+ self.config(group='agent', manage_agent_boot=False)
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ self.driver.tear_down_cleaning(task)
+ tear_down_cleaning_mock.assert_called_once_with(
+ task, manage_boot=False)
+
+
+class TestAgentVendor(db_base.DbTestCase):
+
+ def setUp(self):
+ super(TestAgentVendor, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver="fake_agent")
+ self.passthru = agent.AgentVendorInterface()
+ n = {
+ 'driver': 'fake_agent',
+ 'instance_info': INSTANCE_INFO,
+ 'driver_info': DRIVER_INFO,
+ 'driver_internal_info': DRIVER_INTERNAL_INFO,
+ }
+ self.node = object_utils.create_test_node(self.context, **n)
+
+ def test_continue_deploy(self):
+ self.node.provision_state = states.DEPLOYWAIT
+ self.node.target_provision_state = states.ACTIVE
+ self.node.save()
+ test_temp_url = 'http://image'
+ expected_image_info = {
+ 'urls': [test_temp_url],
+ 'id': 'fake-image',
+ 'checksum': 'checksum',
+ 'disk_format': 'qcow2',
+ 'container_format': 'bare',
+ }
+
+ client_mock = mock.MagicMock(spec_set=['prepare_image'])
+ self.passthru._client = client_mock
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ self.passthru.continue_deploy(task)
+
+ client_mock.prepare_image.assert_called_with(task.node,
+ expected_image_info)
+ self.assertEqual(states.DEPLOYWAIT, task.node.provision_state)
+ self.assertEqual(states.ACTIVE,
+ task.node.target_provision_state)
+
+ def test_continue_deploy_image_source_is_url(self):
+ self.node.provision_state = states.DEPLOYWAIT
+ self.node.target_provision_state = states.ACTIVE
+ self.node.save()
+ test_temp_url = 'http://image'
+ expected_image_info = {
+ 'urls': [test_temp_url],
+ 'id': self.node.instance_info['image_source'],
+ 'checksum': 'checksum',
+ 'disk_format': 'qcow2',
+ 'container_format': 'bare',
+ }
+
+ client_mock = mock.MagicMock(spec_set=['prepare_image'])
+ self.passthru._client = client_mock
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ self.passthru.continue_deploy(task)
+
+ client_mock.prepare_image.assert_called_with(task.node,
+ expected_image_info)
+ self.assertEqual(states.DEPLOYWAIT, task.node.provision_state)
+ self.assertEqual(states.ACTIVE,
+ task.node.target_provision_state)
+
+ @mock.patch.object(manager_utils, 'node_power_action', autospec=True)
+ @mock.patch.object(fake.FakePower, 'get_power_state',
+ spec=types.FunctionType)
+ @mock.patch.object(agent_client.AgentClient, 'power_off',
+ spec=types.FunctionType)
+ @mock.patch('ironic.conductor.utils.node_set_boot_device', autospec=True)
+ @mock.patch('ironic.drivers.modules.agent.AgentVendorInterface'
+ '.check_deploy_success', autospec=True)
+ @mock.patch.object(pxe.PXEBoot, 'clean_up_ramdisk', autospec=True)
+ def test_reboot_to_instance(self, clean_pxe_mock, check_deploy_mock,
+ bootdev_mock, power_off_mock,
+ get_power_state_mock, node_power_action_mock):
+ check_deploy_mock.return_value = None
+
+ self.node.provision_state = states.DEPLOYWAIT
+ self.node.target_provision_state = states.ACTIVE
+ self.node.save()
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ get_power_state_mock.return_value = states.POWER_OFF
+ task.node.driver_internal_info['is_whole_disk_image'] = True
+
+ self.passthru.reboot_to_instance(task)
+
+ clean_pxe_mock.assert_called_once_with(task.driver.boot, task)
+ check_deploy_mock.assert_called_once_with(mock.ANY, task.node)
+ bootdev_mock.assert_called_once_with(task, 'disk', persistent=True)
+ power_off_mock.assert_called_once_with(task.node)
+ get_power_state_mock.assert_called_once_with(task)
+ node_power_action_mock.assert_called_once_with(
+ task, states.POWER_ON)
+ self.assertEqual(states.ACTIVE, task.node.provision_state)
+ self.assertEqual(states.NOSTATE, task.node.target_provision_state)
+
+ @mock.patch.object(manager_utils, 'node_power_action', autospec=True)
+ @mock.patch.object(fake.FakePower, 'get_power_state',
+ spec=types.FunctionType)
+ @mock.patch.object(agent_client.AgentClient, 'power_off',
+ spec=types.FunctionType)
+ @mock.patch('ironic.conductor.utils.node_set_boot_device', autospec=True)
+ @mock.patch('ironic.drivers.modules.agent.AgentVendorInterface'
+ '.check_deploy_success', autospec=True)
+ @mock.patch.object(pxe.PXEBoot, 'clean_up_ramdisk', autospec=True)
+ def test_reboot_to_instance_boot_none(self, clean_pxe_mock,
+ check_deploy_mock,
+ bootdev_mock, power_off_mock,
+ get_power_state_mock,
+ node_power_action_mock):
+ check_deploy_mock.return_value = None
+
+ self.node.provision_state = states.DEPLOYWAIT
+ self.node.target_provision_state = states.ACTIVE
+ self.node.save()
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ get_power_state_mock.return_value = states.POWER_OFF
+ task.node.driver_internal_info['is_whole_disk_image'] = True
+ task.driver.boot = None
+
+ self.passthru.reboot_to_instance(task)
+
+ self.assertFalse(clean_pxe_mock.called)
+ check_deploy_mock.assert_called_once_with(mock.ANY, task.node)
+ bootdev_mock.assert_called_once_with(task, 'disk', persistent=True)
+ power_off_mock.assert_called_once_with(task.node)
+ get_power_state_mock.assert_called_once_with(task)
+ node_power_action_mock.assert_called_once_with(
+ task, states.POWER_ON)
+ self.assertEqual(states.ACTIVE, task.node.provision_state)
+ self.assertEqual(states.NOSTATE, task.node.target_provision_state)
+
+ @mock.patch.object(agent_client.AgentClient, 'get_commands_status',
+ autospec=True)
+ def test_deploy_has_started(self, mock_get_cmd):
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ mock_get_cmd.return_value = []
+ self.assertFalse(self.passthru.deploy_has_started(task))
+
+ @mock.patch.object(agent_client.AgentClient, 'get_commands_status',
+ autospec=True)
+ def test_deploy_has_started_is_done(self, mock_get_cmd):
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ mock_get_cmd.return_value = [{'command_name': 'prepare_image',
+ 'command_status': 'SUCCESS'}]
+ self.assertTrue(self.passthru.deploy_has_started(task))
+
+ @mock.patch.object(agent_client.AgentClient, 'get_commands_status',
+ autospec=True)
+ def test_deploy_has_started_did_start(self, mock_get_cmd):
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ mock_get_cmd.return_value = [{'command_name': 'prepare_image',
+ 'command_status': 'RUNNING'}]
+ self.assertTrue(self.passthru.deploy_has_started(task))
+
+ @mock.patch.object(agent_client.AgentClient, 'get_commands_status',
+ autospec=True)
+ def test_deploy_has_started_multiple_commands(self, mock_get_cmd):
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ mock_get_cmd.return_value = [{'command_name': 'cache_image',
+ 'command_status': 'SUCCESS'},
+ {'command_name': 'prepare_image',
+ 'command_status': 'RUNNING'}]
+ self.assertTrue(self.passthru.deploy_has_started(task))
+
+ @mock.patch.object(agent_client.AgentClient, 'get_commands_status',
+ autospec=True)
+ def test_deploy_has_started_other_commands(self, mock_get_cmd):
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ mock_get_cmd.return_value = [{'command_name': 'cache_image',
+ 'command_status': 'SUCCESS'}]
+ self.assertFalse(self.passthru.deploy_has_started(task))
+
+ @mock.patch.object(agent_client.AgentClient, 'get_commands_status',
+ autospec=True)
+ def test_deploy_is_done(self, mock_get_cmd):
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ mock_get_cmd.return_value = [{'command_name': 'prepare_image',
+ 'command_status': 'SUCCESS'}]
+ self.assertTrue(self.passthru.deploy_is_done(task))
+
+ @mock.patch.object(agent_client.AgentClient, 'get_commands_status',
+ autospec=True)
+ def test_deploy_is_done_empty_response(self, mock_get_cmd):
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ mock_get_cmd.return_value = []
+ self.assertFalse(self.passthru.deploy_is_done(task))
+
+ @mock.patch.object(agent_client.AgentClient, 'get_commands_status',
+ autospec=True)
+ def test_deploy_is_done_race(self, mock_get_cmd):
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ mock_get_cmd.return_value = [{'command_name': 'some_other_command',
+ 'command_status': 'SUCCESS'}]
+ self.assertFalse(self.passthru.deploy_is_done(task))
+
+ @mock.patch.object(agent_client.AgentClient, 'get_commands_status',
+ autospec=True)
+ def test_deploy_is_done_still_running(self, mock_get_cmd):
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ mock_get_cmd.return_value = [{'command_name': 'prepare_image',
+ 'command_status': 'RUNNING'}]
+ self.assertFalse(self.passthru.deploy_is_done(task))
+
+
+class AgentRAIDTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(AgentRAIDTestCase, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver="fake_agent")
+ self.passthru = agent.AgentVendorInterface()
+ self.target_raid_config = {
+ "logical_disks": [
+ {'size_gb': 200, 'raid_level': 0, 'is_root_volume': True},
+ {'size_gb': 200, 'raid_level': 5}
+ ]}
+ self.clean_step = {'step': 'create_configuration',
+ 'interface': 'raid'}
+ n = {
+ 'driver': 'fake_agent',
+ 'instance_info': INSTANCE_INFO,
+ 'driver_info': DRIVER_INFO,
+ 'driver_internal_info': DRIVER_INTERNAL_INFO,
+ 'target_raid_config': self.target_raid_config,
+ 'clean_step': self.clean_step,
+ }
+ self.node = object_utils.create_test_node(self.context, **n)
+
+ @mock.patch.object(deploy_utils, 'agent_get_clean_steps', autospec=True)
+ def test_get_clean_steps(self, get_steps_mock):
+ get_steps_mock.return_value = [
+ {'step': 'create_configuration', 'interface': 'raid',
+ 'priority': 1},
+ {'step': 'delete_configuration', 'interface': 'raid',
+ 'priority': 2}]
+
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ ret = task.driver.raid.get_clean_steps(task)
+
+ self.assertEqual(0, ret[0]['priority'])
+ self.assertEqual(0, ret[1]['priority'])
+
+ @mock.patch.object(deploy_utils, 'agent_execute_clean_step',
+ autospec=True)
+ def test_create_configuration(self, execute_mock):
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ execute_mock.return_value = states.CLEANWAIT
+
+ return_value = task.driver.raid.create_configuration(task)
+
+ self.assertEqual(states.CLEANWAIT, return_value)
+ self.assertEqual(
+ self.target_raid_config,
+ task.node.driver_internal_info['target_raid_config'])
+ execute_mock.assert_called_once_with(task, self.clean_step)
+
+ @mock.patch.object(deploy_utils, 'agent_execute_clean_step',
+ autospec=True)
+ def test_create_configuration_skip_root(self, execute_mock):
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ execute_mock.return_value = states.CLEANWAIT
+
+ return_value = task.driver.raid.create_configuration(
+ task, create_root_volume=False)
+
+ self.assertEqual(states.CLEANWAIT, return_value)
+ execute_mock.assert_called_once_with(task, self.clean_step)
+ exp_target_raid_config = {
+ "logical_disks": [
+ {'size_gb': 200, 'raid_level': 5}
+ ]}
+ self.assertEqual(
+ exp_target_raid_config,
+ task.node.driver_internal_info['target_raid_config'])
+
+ @mock.patch.object(deploy_utils, 'agent_execute_clean_step',
+ autospec=True)
+ def test_create_configuration_skip_nonroot(self, execute_mock):
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ execute_mock.return_value = states.CLEANWAIT
+
+ return_value = task.driver.raid.create_configuration(
+ task, create_nonroot_volumes=False)
+
+ self.assertEqual(states.CLEANWAIT, return_value)
+ execute_mock.assert_called_once_with(task, self.clean_step)
+ exp_target_raid_config = {
+ "logical_disks": [
+ {'size_gb': 200, 'raid_level': 0, 'is_root_volume': True},
+ ]}
+ self.assertEqual(
+ exp_target_raid_config,
+ task.node.driver_internal_info['target_raid_config'])
+
+ @mock.patch.object(deploy_utils, 'agent_execute_clean_step',
+ autospec=True)
+ def test_create_configuration_no_target_raid_config_after_skipping(
+ self, execute_mock):
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ self.assertRaises(
+ exception.MissingParameterValue,
+ task.driver.raid.create_configuration,
+ task, create_root_volume=False,
+ create_nonroot_volumes=False)
+
+ self.assertFalse(execute_mock.called)
+
+ @mock.patch.object(deploy_utils, 'agent_execute_clean_step',
+ autospec=True)
+ def test_create_configuration_empty_target_raid_config(
+ self, execute_mock):
+ execute_mock.return_value = states.CLEANING
+ self.node.target_raid_config = {}
+ self.node.save()
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ self.assertRaises(exception.MissingParameterValue,
+ task.driver.raid.create_configuration,
+ task)
+ self.assertFalse(execute_mock.called)
+
+ @mock.patch.object(raid, 'update_raid_info', autospec=True)
+ def test__create_configuration_final(
+ self, update_raid_info_mock):
+ command = {'command_result': {'clean_result': 'foo'}}
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ raid_mgmt = agent.AgentRAID
+ raid_mgmt._create_configuration_final(task, command)
+ update_raid_info_mock.assert_called_once_with(task.node, 'foo')
+
+ @mock.patch.object(raid, 'update_raid_info', autospec=True)
+ def test__create_configuration_final_registered(
+ self, update_raid_info_mock):
+ self.node.clean_step = {'interface': 'raid',
+ 'step': 'create_configuration'}
+ command = {'command_result': {'clean_result': 'foo'}}
+ create_hook = agent_base_vendor._get_post_clean_step_hook(self.node)
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ create_hook(task, command)
+ update_raid_info_mock.assert_called_once_with(task.node, 'foo')
+
+ @mock.patch.object(raid, 'update_raid_info', autospec=True)
+ def test__create_configuration_final_bad_command_result(
+ self, update_raid_info_mock):
+ command = {}
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ raid_mgmt = agent.AgentRAID
+ self.assertRaises(exception.IronicException,
+ raid_mgmt._create_configuration_final,
+ task, command)
+ self.assertFalse(update_raid_info_mock.called)
+
+ @mock.patch.object(deploy_utils, 'agent_execute_clean_step',
+ autospec=True)
+ def test_delete_configuration(self, execute_mock):
+ execute_mock.return_value = states.CLEANING
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ return_value = task.driver.raid.delete_configuration(task)
+
+ execute_mock.assert_called_once_with(task, self.clean_step)
+ self.assertEqual(states.CLEANING, return_value)
+
+ def test__delete_configuration_final(self):
+ command = {'command_result': {'clean_result': 'foo'}}
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ task.node.raid_config = {'foo': 'bar'}
+ raid_mgmt = agent.AgentRAID
+ raid_mgmt._delete_configuration_final(task, command)
+
+ self.node.refresh()
+ self.assertEqual({}, self.node.raid_config)
+
+ def test__delete_configuration_final_registered(
+ self):
+ self.node.clean_step = {'interface': 'raid',
+ 'step': 'delete_configuration'}
+ self.node.raid_config = {'foo': 'bar'}
+ command = {'command_result': {'clean_result': 'foo'}}
+ delete_hook = agent_base_vendor._get_post_clean_step_hook(self.node)
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ delete_hook(task, command)
+
+ self.node.refresh()
+ self.assertEqual({}, self.node.raid_config)
diff --git a/ironic/tests/unit/drivers/test_agent_base_vendor.py b/ironic/tests/unit/drivers/test_agent_base_vendor.py
new file mode 100644
index 000000000..2cf23ab3e
--- /dev/null
+++ b/ironic/tests/unit/drivers/test_agent_base_vendor.py
@@ -0,0 +1,948 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2015 Red Hat, Inc.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import time
+import types
+
+import mock
+
+from ironic.common import boot_devices
+from ironic.common import exception
+from ironic.common import states
+from ironic.conductor import manager
+from ironic.conductor import task_manager
+from ironic.conductor import utils as manager_utils
+from ironic.drivers.modules import agent_base_vendor
+from ironic.drivers.modules import agent_client
+from ironic.drivers.modules import deploy_utils
+from ironic.drivers.modules import fake
+from ironic import objects
+from ironic.tests.unit.conductor import utils as mgr_utils
+from ironic.tests.unit.db import base as db_base
+from ironic.tests.unit.db import utils as db_utils
+from ironic.tests.unit.objects import utils as object_utils
+
+INSTANCE_INFO = db_utils.get_test_agent_instance_info()
+DRIVER_INFO = db_utils.get_test_agent_driver_info()
+DRIVER_INTERNAL_INFO = db_utils.get_test_agent_driver_internal_info()
+
+
+class TestBaseAgentVendor(db_base.DbTestCase):
+
+ def setUp(self):
+ super(TestBaseAgentVendor, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver="fake_agent")
+ self.passthru = agent_base_vendor.BaseAgentVendor()
+ n = {
+ 'driver': 'fake_agent',
+ 'instance_info': INSTANCE_INFO,
+ 'driver_info': DRIVER_INFO,
+ 'driver_internal_info': DRIVER_INTERNAL_INFO,
+ }
+ self.node = object_utils.create_test_node(self.context, **n)
+
+ def test_validate(self):
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ method = 'heartbeat'
+ self.passthru.validate(task, method)
+
+ def test_driver_validate(self):
+ kwargs = {'version': '2'}
+ method = 'lookup'
+ self.passthru.driver_validate(method, **kwargs)
+
+ def test_driver_validate_invalid_paremeter(self):
+ method = 'lookup'
+ kwargs = {'version': '1'}
+ self.assertRaises(exception.InvalidParameterValue,
+ self.passthru.driver_validate,
+ method, **kwargs)
+
+ def test_driver_validate_missing_parameter(self):
+ method = 'lookup'
+ kwargs = {}
+ self.assertRaises(exception.MissingParameterValue,
+ self.passthru.driver_validate,
+ method, **kwargs)
+
+ def test_lookup_version_not_found(self):
+ kwargs = {
+ 'version': '999',
+ }
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ self.assertRaises(exception.InvalidParameterValue,
+ self.passthru.lookup,
+ task.context,
+ **kwargs)
+
+ @mock.patch('ironic.drivers.modules.agent_base_vendor.BaseAgentVendor'
+ '._find_node_by_macs', autospec=True)
+ def test_lookup_v2(self, find_mock):
+ kwargs = {
+ 'version': '2',
+ 'inventory': {
+ 'interfaces': [
+ {
+ 'mac_address': 'aa:bb:cc:dd:ee:ff',
+ 'name': 'eth0'
+ },
+ {
+ 'mac_address': 'ff:ee:dd:cc:bb:aa',
+ 'name': 'eth1'
+ }
+
+ ]
+ }
+ }
+ find_mock.return_value = self.node
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ node = self.passthru.lookup(task.context, **kwargs)
+ self.assertEqual(self.node.as_dict(), node['node'])
+
+ def test_lookup_v2_missing_inventory(self):
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ self.assertRaises(exception.InvalidParameterValue,
+ self.passthru.lookup,
+ task.context)
+
+ def test_lookup_v2_empty_inventory(self):
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ self.assertRaises(exception.InvalidParameterValue,
+ self.passthru.lookup,
+ task.context,
+ inventory={})
+
+ def test_lookup_v2_empty_interfaces(self):
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ self.assertRaises(exception.NodeNotFound,
+ self.passthru.lookup,
+ task.context,
+ version='2',
+ inventory={'interfaces': []})
+
+ @mock.patch.object(objects.Node, 'get_by_uuid')
+ def test_lookup_v2_with_node_uuid(self, mock_get_node):
+ kwargs = {
+ 'version': '2',
+ 'node_uuid': 'fake uuid',
+ 'inventory': {
+ 'interfaces': [
+ {
+ 'mac_address': 'aa:bb:cc:dd:ee:ff',
+ 'name': 'eth0'
+ },
+ {
+ 'mac_address': 'ff:ee:dd:cc:bb:aa',
+ 'name': 'eth1'
+ }
+
+ ]
+ }
+ }
+ mock_get_node.return_value = self.node
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ node = self.passthru.lookup(task.context, **kwargs)
+ self.assertEqual(self.node.as_dict(), node['node'])
+ mock_get_node.assert_called_once_with(mock.ANY, 'fake uuid')
+
+ @mock.patch.object(objects.port.Port, 'get_by_address',
+ spec_set=types.FunctionType)
+ def test_find_ports_by_macs(self, mock_get_port):
+ fake_port = object_utils.get_test_port(self.context)
+ mock_get_port.return_value = fake_port
+
+ macs = ['aa:bb:cc:dd:ee:ff']
+
+ with task_manager.acquire(
+ self.context, self.node['uuid'], shared=True) as task:
+ ports = self.passthru._find_ports_by_macs(task, macs)
+ self.assertEqual(1, len(ports))
+ self.assertEqual(fake_port.uuid, ports[0].uuid)
+ self.assertEqual(fake_port.node_id, ports[0].node_id)
+
+ @mock.patch.object(objects.port.Port, 'get_by_address',
+ spec_set=types.FunctionType)
+ def test_find_ports_by_macs_bad_params(self, mock_get_port):
+ mock_get_port.side_effect = exception.PortNotFound(port="123")
+
+ macs = ['aa:bb:cc:dd:ee:ff']
+ with task_manager.acquire(
+ self.context, self.node['uuid'], shared=True) as task:
+ empty_ids = self.passthru._find_ports_by_macs(task, macs)
+ self.assertEqual([], empty_ids)
+
+ @mock.patch('ironic.objects.node.Node.get_by_id',
+ spec_set=types.FunctionType)
+ @mock.patch('ironic.drivers.modules.agent_base_vendor.BaseAgentVendor'
+ '._get_node_id', autospec=True)
+ @mock.patch('ironic.drivers.modules.agent_base_vendor.BaseAgentVendor'
+ '._find_ports_by_macs', autospec=True)
+ def test_find_node_by_macs(self, ports_mock, node_id_mock, node_mock):
+ ports_mock.return_value = object_utils.get_test_port(self.context)
+ node_id_mock.return_value = '1'
+ node_mock.return_value = self.node
+
+ macs = ['aa:bb:cc:dd:ee:ff']
+ with task_manager.acquire(
+ self.context, self.node['uuid'], shared=True) as task:
+ node = self.passthru._find_node_by_macs(task, macs)
+ self.assertEqual(node, node)
+
+ @mock.patch('ironic.drivers.modules.agent_base_vendor.BaseAgentVendor'
+ '._find_ports_by_macs', autospec=True)
+ def test_find_node_by_macs_no_ports(self, ports_mock):
+ ports_mock.return_value = []
+
+ macs = ['aa:bb:cc:dd:ee:ff']
+ with task_manager.acquire(
+ self.context, self.node['uuid'], shared=True) as task:
+ self.assertRaises(exception.NodeNotFound,
+ self.passthru._find_node_by_macs,
+ task,
+ macs)
+
+ @mock.patch('ironic.objects.node.Node.get_by_uuid',
+ spec_set=types.FunctionType)
+ @mock.patch('ironic.drivers.modules.agent_base_vendor.BaseAgentVendor'
+ '._get_node_id', autospec=True)
+ @mock.patch('ironic.drivers.modules.agent_base_vendor.BaseAgentVendor'
+ '._find_ports_by_macs', autospec=True)
+ def test_find_node_by_macs_nodenotfound(self, ports_mock, node_id_mock,
+ node_mock):
+ port = object_utils.get_test_port(self.context)
+ ports_mock.return_value = [port]
+ node_id_mock.return_value = self.node['uuid']
+ node_mock.side_effect = [self.node,
+ exception.NodeNotFound(node=self.node)]
+
+ macs = ['aa:bb:cc:dd:ee:ff']
+ with task_manager.acquire(
+ self.context, self.node['uuid'], shared=True) as task:
+ self.assertRaises(exception.NodeNotFound,
+ self.passthru._find_node_by_macs,
+ task,
+ macs)
+
+ def test_get_node_id(self):
+ fake_port1 = object_utils.get_test_port(self.context,
+ node_id=123,
+ address="aa:bb:cc:dd:ee:fe")
+ fake_port2 = object_utils.get_test_port(self.context,
+ node_id=123,
+ id=42,
+ address="aa:bb:cc:dd:ee:fb",
+ uuid='1be26c0b-03f2-4d2e-ae87-'
+ 'c02d7f33c782')
+
+ node_id = self.passthru._get_node_id([fake_port1, fake_port2])
+ self.assertEqual(fake_port2.node_id, node_id)
+
+ def test_get_node_id_exception(self):
+ fake_port1 = object_utils.get_test_port(self.context,
+ node_id=123,
+ address="aa:bb:cc:dd:ee:fc")
+ fake_port2 = object_utils.get_test_port(self.context,
+ node_id=321,
+ id=42,
+ address="aa:bb:cc:dd:ee:fd",
+ uuid='1be26c0b-03f2-4d2e-ae87-'
+ 'c02d7f33c782')
+
+ self.assertRaises(exception.NodeNotFound,
+ self.passthru._get_node_id,
+ [fake_port1, fake_port2])
+
+ def test_get_interfaces(self):
+ fake_inventory = {
+ 'interfaces': [
+ {
+ 'mac_address': 'aa:bb:cc:dd:ee:ff',
+ 'name': 'eth0'
+ }
+ ]
+ }
+ interfaces = self.passthru._get_interfaces(fake_inventory)
+ self.assertEqual(fake_inventory['interfaces'], interfaces)
+
+ def test_get_interfaces_bad(self):
+ self.assertRaises(exception.InvalidParameterValue,
+ self.passthru._get_interfaces,
+ inventory={})
+
+ def test_heartbeat(self):
+ kwargs = {
+ 'agent_url': 'http://127.0.0.1:9999/bar'
+ }
+ with task_manager.acquire(
+ self.context, self.node['uuid'], shared=True) as task:
+ self.passthru.heartbeat(task, **kwargs)
+
+ def test_heartbeat_bad(self):
+ kwargs = {}
+ with task_manager.acquire(
+ self.context, self.node['uuid'], shared=True) as task:
+ self.assertRaises(exception.MissingParameterValue,
+ self.passthru.heartbeat, task, **kwargs)
+
+ @mock.patch.object(agent_base_vendor.BaseAgentVendor, 'deploy_has_started',
+ autospec=True)
+ @mock.patch.object(deploy_utils, 'set_failed_state', autospec=True)
+ @mock.patch.object(agent_base_vendor.BaseAgentVendor, 'deploy_is_done',
+ autospec=True)
+ @mock.patch.object(agent_base_vendor.LOG, 'exception', autospec=True)
+ def test_heartbeat_deploy_done_fails(self, log_mock, done_mock,
+ failed_mock, deploy_started_mock):
+ deploy_started_mock.return_value = True
+ kwargs = {
+ 'agent_url': 'http://127.0.0.1:9999/bar'
+ }
+ done_mock.side_effect = iter([Exception('LlamaException')])
+ with task_manager.acquire(
+ self.context, self.node['uuid'], shared=True) as task:
+ task.node.provision_state = states.DEPLOYWAIT
+ task.node.target_provision_state = states.ACTIVE
+ self.passthru.heartbeat(task, **kwargs)
+ failed_mock.assert_called_once_with(task, mock.ANY)
+ log_mock.assert_called_once_with(
+ 'Asynchronous exception for node '
+ '1be26c0b-03f2-4d2e-ae87-c02d7f33c123: Failed checking if deploy '
+ 'is done. exception: LlamaException')
+
+ @mock.patch.object(objects.node.Node, 'touch_provisioning', autospec=True)
+ @mock.patch.object(manager, 'set_node_cleaning_steps', autospec=True)
+ @mock.patch.object(agent_base_vendor.BaseAgentVendor,
+ '_notify_conductor_resume_clean', autospec=True)
+ def test_heartbeat_resume_clean(self, mock_notify, mock_set_steps,
+ mock_touch):
+ kwargs = {
+ 'agent_url': 'http://127.0.0.1:9999/bar'
+ }
+ self.node.clean_step = {}
+ for state in (states.CLEANWAIT, states.CLEANING):
+ self.node.provision_state = state
+ self.node.save()
+ with task_manager.acquire(
+ self.context, self.node.uuid, shared=True) as task:
+ self.passthru.heartbeat(task, **kwargs)
+
+ mock_touch.assert_called_once_with(mock.ANY)
+ mock_notify.assert_called_once_with(mock.ANY, task)
+ mock_set_steps.assert_called_once_with(task)
+ # Reset mocks for the next interaction
+ mock_touch.reset_mock()
+ mock_notify.reset_mock()
+ mock_set_steps.reset_mock()
+
+ @mock.patch.object(objects.node.Node, 'touch_provisioning', autospec=True)
+ @mock.patch.object(agent_base_vendor.BaseAgentVendor,
+ 'continue_cleaning', autospec=True)
+ def test_heartbeat_continue_cleaning(self, mock_continue, mock_touch):
+ kwargs = {
+ 'agent_url': 'http://127.0.0.1:9999/bar'
+ }
+ self.node.clean_step = {
+ 'priority': 10,
+ 'interface': 'deploy',
+ 'step': 'foo',
+ 'reboot_requested': False
+ }
+ for state in (states.CLEANWAIT, states.CLEANING):
+ self.node.provision_state = state
+ self.node.save()
+ with task_manager.acquire(
+ self.context, self.node.uuid, shared=True) as task:
+ self.passthru.heartbeat(task, **kwargs)
+
+ mock_touch.assert_called_once_with(mock.ANY)
+ mock_continue.assert_called_once_with(mock.ANY, task, **kwargs)
+ # Reset mocks for the next interaction
+ mock_touch.reset_mock()
+ mock_continue.reset_mock()
+
+ @mock.patch('ironic.conductor.manager.cleaning_error_handler')
+ @mock.patch.object(agent_base_vendor.BaseAgentVendor,
+ 'continue_cleaning', autospec=True)
+ def test_heartbeat_continue_cleaning_fails(self, mock_continue,
+ mock_handler):
+ kwargs = {
+ 'agent_url': 'http://127.0.0.1:9999/bar'
+ }
+ self.node.clean_step = {
+ 'priority': 10,
+ 'interface': 'deploy',
+ 'step': 'foo',
+ 'reboot_requested': False
+ }
+
+ mock_continue.side_effect = Exception()
+
+ for state in (states.CLEANWAIT, states.CLEANING):
+ self.node.provision_state = state
+ self.node.save()
+ with task_manager.acquire(
+ self.context, self.node.uuid, shared=True) as task:
+ self.passthru.heartbeat(task, **kwargs)
+
+ mock_continue.assert_called_once_with(mock.ANY, task, **kwargs)
+ mock_handler.assert_called_once_with(task, mock.ANY)
+ mock_handler.reset_mock()
+ mock_continue.reset_mock()
+
+ @mock.patch.object(agent_base_vendor.BaseAgentVendor, 'continue_deploy',
+ autospec=True)
+ @mock.patch.object(agent_base_vendor.BaseAgentVendor, 'reboot_to_instance',
+ autospec=True)
+ @mock.patch.object(agent_base_vendor.BaseAgentVendor,
+ '_notify_conductor_resume_clean', autospec=True)
+ def test_heartbeat_noops_maintenance_mode(self, ncrc_mock, rti_mock,
+ cd_mock):
+ """Ensures that heartbeat() no-ops for a maintenance node."""
+ kwargs = {
+ 'agent_url': 'http://127.0.0.1:9999/bar'
+ }
+ self.node.maintenance = True
+ for state in (states.AVAILABLE, states.DEPLOYWAIT, states.DEPLOYING,
+ states.CLEANING):
+ self.node.provision_state = state
+ self.node.save()
+ with task_manager.acquire(
+ self.context, self.node['uuid'], shared=True) as task:
+ self.passthru.heartbeat(task, **kwargs)
+
+ self.assertEqual(0, ncrc_mock.call_count)
+ self.assertEqual(0, rti_mock.call_count)
+ self.assertEqual(0, cd_mock.call_count)
+
+ @mock.patch.object(objects.node.Node, 'touch_provisioning', autospec=True)
+ @mock.patch.object(agent_base_vendor.BaseAgentVendor, 'deploy_has_started',
+ autospec=True)
+ def test_heartbeat_touch_provisioning(self, mock_deploy_started,
+ mock_touch):
+ mock_deploy_started.return_value = True
+ kwargs = {
+ 'agent_url': 'http://127.0.0.1:9999/bar'
+ }
+
+ self.node.provision_state = states.DEPLOYWAIT
+ self.node.save()
+ with task_manager.acquire(
+ self.context, self.node.uuid, shared=True) as task:
+ self.passthru.heartbeat(task, **kwargs)
+
+ mock_touch.assert_called_once_with(mock.ANY)
+
+ def test_vendor_passthru_vendor_routes(self):
+ expected = ['heartbeat']
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ vendor_routes = task.driver.vendor.vendor_routes
+ self.assertIsInstance(vendor_routes, dict)
+ self.assertEqual(expected, list(vendor_routes))
+
+ def test_vendor_passthru_driver_routes(self):
+ expected = ['lookup']
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ driver_routes = task.driver.vendor.driver_routes
+ self.assertIsInstance(driver_routes, dict)
+ self.assertEqual(expected, list(driver_routes))
+
+ @mock.patch.object(time, 'sleep', lambda seconds: None)
+ @mock.patch.object(manager_utils, 'node_power_action', autospec=True)
+ @mock.patch.object(fake.FakePower, 'get_power_state',
+ spec=types.FunctionType)
+ @mock.patch.object(agent_client.AgentClient, 'power_off',
+ spec=types.FunctionType)
+ def test_reboot_and_finish_deploy(self, power_off_mock,
+ get_power_state_mock,
+ node_power_action_mock):
+ self.node.provision_state = states.DEPLOYING
+ self.node.target_provision_state = states.ACTIVE
+ self.node.save()
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ get_power_state_mock.side_effect = [states.POWER_ON,
+ states.POWER_OFF]
+ self.passthru.reboot_and_finish_deploy(task)
+ power_off_mock.assert_called_once_with(task.node)
+ self.assertEqual(2, get_power_state_mock.call_count)
+ node_power_action_mock.assert_called_once_with(
+ task, states.POWER_ON)
+ self.assertEqual(states.ACTIVE, task.node.provision_state)
+ self.assertEqual(states.NOSTATE, task.node.target_provision_state)
+
+ @mock.patch.object(time, 'sleep', lambda seconds: None)
+ @mock.patch.object(manager_utils, 'node_power_action', autospec=True)
+ @mock.patch.object(fake.FakePower, 'get_power_state',
+ spec=types.FunctionType)
+ @mock.patch.object(agent_client.AgentClient, 'power_off',
+ spec=types.FunctionType)
+ def test_reboot_and_finish_deploy_soft_poweroff_doesnt_complete(
+ self, power_off_mock, get_power_state_mock,
+ node_power_action_mock):
+ self.node.provision_state = states.DEPLOYING
+ self.node.target_provision_state = states.ACTIVE
+ self.node.save()
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ get_power_state_mock.return_value = states.POWER_ON
+ self.passthru.reboot_and_finish_deploy(task)
+ power_off_mock.assert_called_once_with(task.node)
+ self.assertEqual(7, get_power_state_mock.call_count)
+ node_power_action_mock.assert_called_once_with(
+ task, states.REBOOT)
+ self.assertEqual(states.ACTIVE, task.node.provision_state)
+ self.assertEqual(states.NOSTATE, task.node.target_provision_state)
+
+ @mock.patch.object(manager_utils, 'node_power_action', autospec=True)
+ @mock.patch.object(agent_client.AgentClient, 'power_off',
+ spec=types.FunctionType)
+ def test_reboot_and_finish_deploy_soft_poweroff_fails(
+ self, power_off_mock, node_power_action_mock):
+ power_off_mock.side_effect = iter([RuntimeError("boom")])
+ self.node.provision_state = states.DEPLOYING
+ self.node.target_provision_state = states.ACTIVE
+ self.node.save()
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ self.passthru.reboot_and_finish_deploy(task)
+ power_off_mock.assert_called_once_with(task.node)
+ node_power_action_mock.assert_called_once_with(
+ task, states.REBOOT)
+ self.assertEqual(states.ACTIVE, task.node.provision_state)
+ self.assertEqual(states.NOSTATE, task.node.target_provision_state)
+
+ @mock.patch.object(time, 'sleep', lambda seconds: None)
+ @mock.patch.object(manager_utils, 'node_power_action', autospec=True)
+ @mock.patch.object(fake.FakePower, 'get_power_state',
+ spec=types.FunctionType)
+ @mock.patch.object(agent_client.AgentClient, 'power_off',
+ spec=types.FunctionType)
+ def test_reboot_and_finish_deploy_get_power_state_fails(
+ self, power_off_mock, get_power_state_mock,
+ node_power_action_mock):
+ self.node.provision_state = states.DEPLOYING
+ self.node.target_provision_state = states.ACTIVE
+ self.node.save()
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ get_power_state_mock.side_effect = iter([RuntimeError("boom")])
+ self.passthru.reboot_and_finish_deploy(task)
+ power_off_mock.assert_called_once_with(task.node)
+ self.assertEqual(7, get_power_state_mock.call_count)
+ node_power_action_mock.assert_called_once_with(
+ task, states.REBOOT)
+ self.assertEqual(states.ACTIVE, task.node.provision_state)
+ self.assertEqual(states.NOSTATE, task.node.target_provision_state)
+
+ @mock.patch.object(time, 'sleep', lambda seconds: None)
+ @mock.patch.object(manager_utils, 'node_power_action', autospec=True)
+ @mock.patch.object(fake.FakePower, 'get_power_state',
+ spec=types.FunctionType)
+ @mock.patch.object(agent_client.AgentClient, 'power_off',
+ spec=types.FunctionType)
+ def test_reboot_and_finish_deploy_power_action_fails(
+ self, power_off_mock, get_power_state_mock,
+ node_power_action_mock):
+ self.node.provision_state = states.DEPLOYING
+ self.node.target_provision_state = states.ACTIVE
+ self.node.save()
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ get_power_state_mock.return_value = states.POWER_ON
+ node_power_action_mock.side_effect = iter([RuntimeError("boom")])
+ self.assertRaises(exception.InstanceDeployFailure,
+ self.passthru.reboot_and_finish_deploy,
+ task)
+ power_off_mock.assert_called_once_with(task.node)
+ self.assertEqual(7, get_power_state_mock.call_count)
+ node_power_action_mock.assert_has_calls([
+ mock.call(task, states.REBOOT),
+ mock.call(task, states.POWER_OFF)])
+ self.assertEqual(states.DEPLOYFAIL, task.node.provision_state)
+ self.assertEqual(states.ACTIVE, task.node.target_provision_state)
+
+ @mock.patch.object(agent_client.AgentClient, 'install_bootloader',
+ autospec=True)
+ @mock.patch.object(deploy_utils, 'try_set_boot_device', autospec=True)
+ def test_configure_local_boot(self, try_set_boot_device_mock,
+ install_bootloader_mock):
+ install_bootloader_mock.return_value = {
+ 'command_status': 'SUCCESS', 'command_error': None}
+ with task_manager.acquire(self.context, self.node['uuid'],
+ shared=False) as task:
+ task.node.driver_internal_info['is_whole_disk_image'] = False
+ self.passthru.configure_local_boot(task,
+ root_uuid='some-root-uuid')
+ try_set_boot_device_mock.assert_called_once_with(
+ task, boot_devices.DISK)
+ install_bootloader_mock.assert_called_once_with(
+ mock.ANY, task.node, root_uuid='some-root-uuid',
+ efi_system_part_uuid=None)
+
+ @mock.patch.object(agent_client.AgentClient, 'install_bootloader',
+ autospec=True)
+ @mock.patch.object(deploy_utils, 'try_set_boot_device', autospec=True)
+ def test_configure_local_boot_uefi(self, try_set_boot_device_mock,
+ install_bootloader_mock):
+ install_bootloader_mock.return_value = {
+ 'command_status': 'SUCCESS', 'command_error': None}
+ with task_manager.acquire(self.context, self.node['uuid'],
+ shared=False) as task:
+ task.node.driver_internal_info['is_whole_disk_image'] = False
+ self.passthru.configure_local_boot(
+ task, root_uuid='some-root-uuid',
+ efi_system_part_uuid='efi-system-part-uuid')
+ try_set_boot_device_mock.assert_called_once_with(
+ task, boot_devices.DISK)
+ install_bootloader_mock.assert_called_once_with(
+ mock.ANY, task.node, root_uuid='some-root-uuid',
+ efi_system_part_uuid='efi-system-part-uuid')
+
+ @mock.patch.object(deploy_utils, 'try_set_boot_device', autospec=True)
+ @mock.patch.object(agent_client.AgentClient, 'install_bootloader',
+ autospec=True)
+ def test_configure_local_boot_whole_disk_image(
+ self, install_bootloader_mock, try_set_boot_device_mock):
+ with task_manager.acquire(self.context, self.node['uuid'],
+ shared=False) as task:
+ self.passthru.configure_local_boot(task)
+ self.assertFalse(install_bootloader_mock.called)
+ try_set_boot_device_mock.assert_called_once_with(
+ task, boot_devices.DISK)
+
+ @mock.patch.object(deploy_utils, 'try_set_boot_device', autospec=True)
+ @mock.patch.object(agent_client.AgentClient, 'install_bootloader',
+ autospec=True)
+ def test_configure_local_boot_no_root_uuid(
+ self, install_bootloader_mock, try_set_boot_device_mock):
+ with task_manager.acquire(self.context, self.node['uuid'],
+ shared=False) as task:
+ task.node.driver_internal_info['is_whole_disk_image'] = False
+ self.passthru.configure_local_boot(task)
+ self.assertFalse(install_bootloader_mock.called)
+ try_set_boot_device_mock.assert_called_once_with(
+ task, boot_devices.DISK)
+
+ @mock.patch.object(agent_client.AgentClient, 'install_bootloader',
+ autospec=True)
+ def test_configure_local_boot_boot_loader_install_fail(
+ self, install_bootloader_mock):
+ install_bootloader_mock.return_value = {
+ 'command_status': 'FAILED', 'command_error': 'boom'}
+ self.node.provision_state = states.DEPLOYING
+ self.node.target_provision_state = states.ACTIVE
+ self.node.save()
+ with task_manager.acquire(self.context, self.node['uuid'],
+ shared=False) as task:
+ task.node.driver_internal_info['is_whole_disk_image'] = False
+ self.assertRaises(exception.InstanceDeployFailure,
+ self.passthru.configure_local_boot,
+ task, root_uuid='some-root-uuid')
+ install_bootloader_mock.assert_called_once_with(
+ mock.ANY, task.node, root_uuid='some-root-uuid',
+ efi_system_part_uuid=None)
+ self.assertEqual(states.DEPLOYFAIL, task.node.provision_state)
+ self.assertEqual(states.ACTIVE, task.node.target_provision_state)
+
+ @mock.patch.object(deploy_utils, 'try_set_boot_device', autospec=True)
+ @mock.patch.object(agent_client.AgentClient, 'install_bootloader',
+ autospec=True)
+ def test_configure_local_boot_set_boot_device_fail(
+ self, install_bootloader_mock, try_set_boot_device_mock):
+ install_bootloader_mock.return_value = {
+ 'command_status': 'SUCCESS', 'command_error': None}
+ try_set_boot_device_mock.side_effect = iter([RuntimeError('error')])
+ self.node.provision_state = states.DEPLOYING
+ self.node.target_provision_state = states.ACTIVE
+ self.node.save()
+ with task_manager.acquire(self.context, self.node['uuid'],
+ shared=False) as task:
+ task.node.driver_internal_info['is_whole_disk_image'] = False
+ self.assertRaises(exception.InstanceDeployFailure,
+ self.passthru.configure_local_boot,
+ task, root_uuid='some-root-uuid')
+ install_bootloader_mock.assert_called_once_with(
+ mock.ANY, task.node, root_uuid='some-root-uuid',
+ efi_system_part_uuid=None)
+ try_set_boot_device_mock.assert_called_once_with(
+ task, boot_devices.DISK)
+ self.assertEqual(states.DEPLOYFAIL, task.node.provision_state)
+ self.assertEqual(states.ACTIVE, task.node.target_provision_state)
+
+ @mock.patch.object(agent_base_vendor.BaseAgentVendor,
+ '_notify_conductor_resume_clean', autospec=True)
+ @mock.patch.object(agent_client.AgentClient, 'get_commands_status',
+ autospec=True)
+ def test_continue_cleaning(self, status_mock, notify_mock):
+ # Test a successful execute clean step on the agent
+ self.node.clean_step = {
+ 'priority': 10,
+ 'interface': 'deploy',
+ 'step': 'erase_devices',
+ 'reboot_requested': False
+ }
+ self.node.save()
+ status_mock.return_value = [{
+ 'command_status': 'SUCCEEDED',
+ 'command_name': 'execute_clean_step',
+ 'command_result': {
+ 'clean_step': self.node.clean_step
+ }
+ }]
+ with task_manager.acquire(self.context, self.node['uuid'],
+ shared=False) as task:
+ self.passthru.continue_cleaning(task)
+ notify_mock.assert_called_once_with(mock.ANY, task)
+
+ @mock.patch.object(agent_base_vendor,
+ '_get_post_clean_step_hook', autospec=True)
+ @mock.patch.object(agent_base_vendor.BaseAgentVendor,
+ '_notify_conductor_resume_clean', autospec=True)
+ @mock.patch.object(agent_client.AgentClient, 'get_commands_status',
+ autospec=True)
+ def test_continue_cleaning_with_hook(
+ self, status_mock, notify_mock, get_hook_mock):
+ self.node.clean_step = {
+ 'priority': 10,
+ 'interface': 'raid',
+ 'step': 'create_configuration',
+ }
+ self.node.save()
+ command_status = {
+ 'command_status': 'SUCCEEDED',
+ 'command_name': 'execute_clean_step',
+ 'command_result': {'clean_step': self.node.clean_step}}
+ status_mock.return_value = [command_status]
+ hook_mock = mock.MagicMock(spec=types.FunctionType, __name__='foo')
+ get_hook_mock.return_value = hook_mock
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ self.passthru.continue_cleaning(task)
+
+ get_hook_mock.assert_called_once_with(task.node)
+ hook_mock.assert_called_once_with(task, command_status)
+ notify_mock.assert_called_once_with(mock.ANY, task)
+
+ @mock.patch.object(agent_base_vendor.BaseAgentVendor,
+ '_notify_conductor_resume_clean', autospec=True)
+ @mock.patch.object(agent_base_vendor,
+ '_get_post_clean_step_hook', autospec=True)
+ @mock.patch.object(manager, 'cleaning_error_handler', autospec=True)
+ @mock.patch.object(agent_client.AgentClient, 'get_commands_status',
+ autospec=True)
+ def test_continue_cleaning_with_hook_fails(
+ self, status_mock, error_handler_mock, get_hook_mock,
+ notify_mock):
+ self.node.clean_step = {
+ 'priority': 10,
+ 'interface': 'raid',
+ 'step': 'create_configuration',
+ }
+ self.node.save()
+ command_status = {
+ 'command_status': 'SUCCEEDED',
+ 'command_name': 'execute_clean_step',
+ 'command_result': {'clean_step': self.node.clean_step}}
+ status_mock.return_value = [command_status]
+ hook_mock = mock.MagicMock(spec=types.FunctionType, __name__='foo')
+ hook_mock.side_effect = RuntimeError('error')
+ get_hook_mock.return_value = hook_mock
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ self.passthru.continue_cleaning(task)
+
+ get_hook_mock.assert_called_once_with(task.node)
+ hook_mock.assert_called_once_with(task, command_status)
+ error_handler_mock.assert_called_once_with(task, mock.ANY)
+ self.assertFalse(notify_mock.called)
+
+ @mock.patch.object(agent_base_vendor.BaseAgentVendor,
+ '_notify_conductor_resume_clean', autospec=True)
+ @mock.patch.object(agent_client.AgentClient, 'get_commands_status',
+ autospec=True)
+ def test_continue_cleaning_old_command(self, status_mock, notify_mock):
+ # Test when a second execute_clean_step happens to the agent, but
+ # the new step hasn't started yet.
+ self.node.clean_step = {
+ 'priority': 10,
+ 'interface': 'deploy',
+ 'step': 'erase_devices',
+ 'reboot_requested': False
+ }
+ self.node.save()
+ status_mock.return_value = [{
+ 'command_status': 'SUCCEEDED',
+ 'command_name': 'execute_clean_step',
+ 'command_result': {
+ 'priority': 20,
+ 'interface': 'deploy',
+ 'step': 'update_firmware',
+ 'reboot_requested': False
+ }
+ }]
+ with task_manager.acquire(self.context, self.node['uuid'],
+ shared=False) as task:
+ self.passthru.continue_cleaning(task)
+ self.assertFalse(notify_mock.called)
+
+ @mock.patch.object(agent_base_vendor.BaseAgentVendor,
+ '_notify_conductor_resume_clean', autospec=True)
+ @mock.patch.object(agent_client.AgentClient, 'get_commands_status',
+ autospec=True)
+ def test_continue_cleaning_running(self, status_mock, notify_mock):
+ # Test that no action is taken while a clean step is executing
+ status_mock.return_value = [{
+ 'command_status': 'RUNNING',
+ 'command_name': 'execute_clean_step',
+ 'command_result': None
+ }]
+ with task_manager.acquire(self.context, self.node['uuid'],
+ shared=False) as task:
+ self.passthru.continue_cleaning(task)
+ self.assertFalse(notify_mock.called)
+
+ @mock.patch('ironic.conductor.manager.cleaning_error_handler',
+ autospec=True)
+ @mock.patch.object(agent_client.AgentClient, 'get_commands_status',
+ autospec=True)
+ def test_continue_cleaning_fail(self, status_mock, error_mock):
+ # Test the a failure puts the node in CLEANFAIL
+ status_mock.return_value = [{
+ 'command_status': 'FAILED',
+ 'command_name': 'execute_clean_step',
+ 'command_result': {}
+ }]
+ with task_manager.acquire(self.context, self.node['uuid'],
+ shared=False) as task:
+ self.passthru.continue_cleaning(task)
+ error_mock.assert_called_once_with(task, mock.ANY)
+
+ @mock.patch('ironic.conductor.manager.set_node_cleaning_steps',
+ autospec=True)
+ @mock.patch.object(agent_base_vendor.BaseAgentVendor,
+ '_notify_conductor_resume_clean', autospec=True)
+ @mock.patch.object(agent_client.AgentClient, 'get_commands_status',
+ autospec=True)
+ def test_continue_cleaning_clean_version_mismatch(
+ self, status_mock, notify_mock, steps_mock):
+ # Test that cleaning is restarted if there is a version mismatch
+ status_mock.return_value = [{
+ 'command_status': 'CLEAN_VERSION_MISMATCH',
+ 'command_name': 'execute_clean_step',
+ 'command_result': {}
+ }]
+ with task_manager.acquire(self.context, self.node['uuid'],
+ shared=False) as task:
+ self.passthru.continue_cleaning(task)
+ steps_mock.assert_called_once_with(task)
+ notify_mock.assert_called_once_with(mock.ANY, task)
+
+ @mock.patch('ironic.conductor.manager.cleaning_error_handler',
+ autospec=True)
+ @mock.patch.object(agent_client.AgentClient, 'get_commands_status',
+ autospec=True)
+ def test_continue_cleaning_unknown(self, status_mock, error_mock):
+ # Test that unknown commands are treated as failures
+ status_mock.return_value = [{
+ 'command_status': 'UNKNOWN',
+ 'command_name': 'execute_clean_step',
+ 'command_result': {}
+ }]
+ with task_manager.acquire(self.context, self.node['uuid'],
+ shared=False) as task:
+ self.passthru.continue_cleaning(task)
+ error_mock.assert_called_once_with(task, mock.ANY)
+
+ def _test_clean_step_hook(self, hook_dict_mock):
+ """Helper method for unit tests related to clean step hooks.
+
+ This is a helper method for other unit tests related to
+ clean step hooks. It acceps a mock 'hook_dict_mock' which is
+ a MagicMock and sets it up to function as a mock dictionary.
+ After that, it defines a dummy hook_method for two clean steps
+ raid.create_configuration and raid.delete_configuration.
+
+ :param hook_dict_mock: An instance of mock.MagicMock() which
+ is the mocked value of agent_base_vendor.POST_CLEAN_STEP_HOOKS
+ :returns: a tuple, where the first item is the hook method created
+ by this method and second item is the backend dictionary for
+ the mocked hook_dict_mock
+ """
+ hook_dict = {}
+
+ def get(key, default):
+ return hook_dict.get(key, default)
+
+ def getitem(self, key):
+ return hook_dict[key]
+
+ def setdefault(key, default):
+ if key not in hook_dict:
+ hook_dict[key] = default
+ return hook_dict[key]
+
+ hook_dict_mock.get = get
+ hook_dict_mock.__getitem__ = getitem
+ hook_dict_mock.setdefault = setdefault
+ some_function_mock = mock.MagicMock()
+
+ @agent_base_vendor.post_clean_step_hook(
+ interface='raid', step='delete_configuration')
+ @agent_base_vendor.post_clean_step_hook(
+ interface='raid', step='create_configuration')
+ def hook_method():
+ some_function_mock('some-arguments')
+
+ return hook_method, hook_dict
+
+ @mock.patch.object(agent_base_vendor, 'POST_CLEAN_STEP_HOOKS',
+ spec_set=dict)
+ def test_post_clean_step_hook(self, hook_dict_mock):
+ # This unit test makes sure that hook methods are registered
+ # properly and entries are made in
+ # agent_base_vendor.POST_CLEAN_STEP_HOOKS
+ hook_method, hook_dict = self._test_clean_step_hook(hook_dict_mock)
+ self.assertEqual(hook_method,
+ hook_dict['raid']['create_configuration'])
+ self.assertEqual(hook_method,
+ hook_dict['raid']['delete_configuration'])
+
+ @mock.patch.object(agent_base_vendor, 'POST_CLEAN_STEP_HOOKS',
+ spec_set=dict)
+ def test__get_post_clean_step_hook(self, hook_dict_mock):
+ # Check if agent_base_vendor._get_post_clean_step_hook can get
+ # clean step for which hook is registered.
+ hook_method, hook_dict = self._test_clean_step_hook(hook_dict_mock)
+ self.node.clean_step = {'step': 'create_configuration',
+ 'interface': 'raid'}
+ self.node.save()
+ hook_returned = agent_base_vendor._get_post_clean_step_hook(self.node)
+ self.assertEqual(hook_method, hook_returned)
+
+ @mock.patch.object(agent_base_vendor, 'POST_CLEAN_STEP_HOOKS',
+ spec_set=dict)
+ def test__get_post_clean_step_hook_no_hook_registered(
+ self, hook_dict_mock):
+ # Make sure agent_base_vendor._get_post_clean_step_hook returns
+ # None when no clean step hook is registered for the clean step.
+ hook_method, hook_dict = self._test_clean_step_hook(hook_dict_mock)
+ self.node.clean_step = {'step': 'some-clean-step',
+ 'interface': 'some-other-interface'}
+ self.node.save()
+ hook_returned = agent_base_vendor._get_post_clean_step_hook(self.node)
+ self.assertIsNone(hook_returned)
diff --git a/ironic/tests/unit/drivers/test_agent_client.py b/ironic/tests/unit/drivers/test_agent_client.py
new file mode 100644
index 000000000..223844882
--- /dev/null
+++ b/ironic/tests/unit/drivers/test_agent_client.py
@@ -0,0 +1,220 @@
+# Copyright 2014 Rackspace, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+
+import mock
+import requests
+import six
+
+from ironic.common import exception
+from ironic.drivers.modules import agent_client
+from ironic.tests.unit import base
+
+
+class MockResponse(object):
+ status_code = 200
+
+ def __init__(self, text):
+ assert isinstance(text, six.string_types)
+ self.text = text
+
+ def json(self):
+ return json.loads(self.text)
+
+
+class MockNode(object):
+ def __init__(self):
+ self.uuid = 'uuid'
+ self.driver_info = {}
+ self.driver_internal_info = {
+ 'agent_url': "http://127.0.0.1:9999",
+ 'clean_version': {'generic': '1'}
+ }
+ self.instance_info = {}
+
+ def as_dict(self):
+ return {
+ 'uuid': self.uuid,
+ 'driver_info': self.driver_info,
+ 'driver_internal_info': self.driver_internal_info,
+ 'instance_info': self.instance_info
+ }
+
+
+class TestAgentClient(base.TestCase):
+ def setUp(self):
+ super(TestAgentClient, self).setUp()
+ self.client = agent_client.AgentClient()
+ self.client.session = mock.MagicMock(autospec=requests.Session)
+ self.node = MockNode()
+
+ def test_content_type_header(self):
+ client = agent_client.AgentClient()
+ self.assertEqual('application/json',
+ client.session.headers['Content-Type'])
+
+ def test__get_command_url(self):
+ command_url = self.client._get_command_url(self.node)
+ expected = self.node.driver_internal_info['agent_url'] + '/v1/commands'
+ self.assertEqual(expected, command_url)
+
+ def test__get_command_url_fail(self):
+ del self.node.driver_internal_info['agent_url']
+ self.assertRaises(exception.IronicException,
+ self.client._get_command_url,
+ self.node)
+
+ def test__get_command_body(self):
+ expected = json.dumps({'name': 'prepare_image', 'params': {}})
+ self.assertEqual(expected,
+ self.client._get_command_body('prepare_image', {}))
+
+ def test__command(self):
+ response_data = {'status': 'ok'}
+ response_text = json.dumps(response_data)
+ self.client.session.post.return_value = MockResponse(response_text)
+ method = 'standby.run_image'
+ image_info = {'image_id': 'test_image'}
+ params = {'image_info': image_info}
+
+ url = self.client._get_command_url(self.node)
+ body = self.client._get_command_body(method, params)
+
+ response = self.client._command(self.node, method, params)
+ self.assertEqual(response, response_data)
+ self.client.session.post.assert_called_once_with(
+ url,
+ data=body,
+ params={'wait': 'false'})
+
+ def test__command_fail_json(self):
+ response_text = 'this be not json matey!'
+ self.client.session.post.return_value = MockResponse(response_text)
+ method = 'standby.run_image'
+ image_info = {'image_id': 'test_image'}
+ params = {'image_info': image_info}
+
+ url = self.client._get_command_url(self.node)
+ body = self.client._get_command_body(method, params)
+
+ self.assertRaises(exception.IronicException,
+ self.client._command,
+ self.node, method, params)
+ self.client.session.post.assert_called_once_with(
+ url,
+ data=body,
+ params={'wait': 'false'})
+
+ def test_get_commands_status(self):
+ with mock.patch.object(self.client.session, 'get',
+ autospec=True) as mock_get:
+ res = mock.MagicMock(spec_set=['json'])
+ res.json.return_value = {'commands': []}
+ mock_get.return_value = res
+ self.assertEqual([], self.client.get_commands_status(self.node))
+
+ @mock.patch('uuid.uuid4', mock.MagicMock(spec_set=[], return_value='uuid'))
+ def test_prepare_image(self):
+ self.client._command = mock.MagicMock(spec_set=[])
+ image_info = {'image_id': 'image'}
+ params = {'image_info': image_info}
+
+ self.client.prepare_image(self.node,
+ image_info,
+ wait=False)
+ self.client._command.assert_called_once_with(
+ node=self.node, method='standby.prepare_image',
+ params=params, wait=False)
+
+ @mock.patch('uuid.uuid4', mock.MagicMock(spec_set=[], return_value='uuid'))
+ def test_prepare_image_with_configdrive(self):
+ self.client._command = mock.MagicMock(spec_set=[])
+ configdrive_url = 'http://swift/configdrive'
+ self.node.instance_info['configdrive'] = configdrive_url
+ image_info = {'image_id': 'image'}
+ params = {
+ 'image_info': image_info,
+ 'configdrive': configdrive_url,
+ }
+
+ self.client.prepare_image(self.node,
+ image_info,
+ wait=False)
+ self.client._command.assert_called_once_with(
+ node=self.node, method='standby.prepare_image',
+ params=params, wait=False)
+
+ @mock.patch('uuid.uuid4', mock.MagicMock(spec_set=[], return_value='uuid'))
+ def test_start_iscsi_target(self):
+ self.client._command = mock.MagicMock(spec_set=[])
+ iqn = 'fake-iqn'
+ params = {'iqn': iqn}
+
+ self.client.start_iscsi_target(self.node, iqn)
+ self.client._command.assert_called_once_with(
+ node=self.node, method='iscsi.start_iscsi_target',
+ params=params, wait=True)
+
+ @mock.patch('uuid.uuid4', mock.MagicMock(spec_set=[], return_value='uuid'))
+ def test_install_bootloader(self):
+ self.client._command = mock.MagicMock(spec_set=[])
+ root_uuid = 'fake-root-uuid'
+ efi_system_part_uuid = 'fake-efi-system-part-uuid'
+ params = {'root_uuid': root_uuid,
+ 'efi_system_part_uuid': efi_system_part_uuid}
+
+ self.client.install_bootloader(
+ self.node, root_uuid, efi_system_part_uuid=efi_system_part_uuid)
+ self.client._command.assert_called_once_with(
+ node=self.node, method='image.install_bootloader', params=params,
+ wait=True)
+
+ def test_get_clean_steps(self):
+ self.client._command = mock.MagicMock(spec_set=[])
+ ports = []
+ expected_params = {
+ 'node': self.node.as_dict(),
+ 'ports': []
+ }
+
+ self.client.get_clean_steps(self.node,
+ ports)
+ self.client._command.assert_called_once_with(
+ node=self.node, method='clean.get_clean_steps',
+ params=expected_params, wait=True)
+
+ def test_execute_clean_step(self):
+ self.client._command = mock.MagicMock(spec_set=[])
+ ports = []
+ step = {'priority': 10, 'step': 'erase_devices', 'interface': 'deploy'}
+ expected_params = {
+ 'step': step,
+ 'node': self.node.as_dict(),
+ 'ports': [],
+ 'clean_version': self.node.driver_internal_info.get(
+ 'hardware_manager_version')
+ }
+ self.client.execute_clean_step(step,
+ self.node,
+ ports)
+ self.client._command.assert_called_once_with(
+ node=self.node, method='clean.execute_clean_step',
+ params=expected_params, wait=False)
+
+ def test_power_off(self):
+ self.client._command = mock.MagicMock(spec_set=[])
+ self.client.power_off(self.node)
+ self.client._command.assert_called_once_with(
+ node=self.node, method='standby.power_off', params={})
diff --git a/ironic/tests/unit/drivers/test_base.py b/ironic/tests/unit/drivers/test_base.py
new file mode 100644
index 000000000..1bd6fae47
--- /dev/null
+++ b/ironic/tests/unit/drivers/test_base.py
@@ -0,0 +1,247 @@
+# Copyright 2014 Cisco Systems, Inc.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import json
+
+import eventlet
+import mock
+
+from ironic.common import exception
+from ironic.common import raid
+from ironic.drivers import base as driver_base
+from ironic.tests.unit import base
+
+
+class FakeVendorInterface(driver_base.VendorInterface):
+ def get_properties(self):
+ pass
+
+ @driver_base.passthru(['POST'])
+ def noexception(self):
+ return "Fake"
+
+ @driver_base.driver_passthru(['POST'])
+ def driver_noexception(self):
+ return "Fake"
+
+ @driver_base.passthru(['POST'])
+ def ironicexception(self):
+ raise exception.IronicException("Fake!")
+
+ @driver_base.passthru(['POST'])
+ def normalexception(self):
+ raise Exception("Fake!")
+
+ def validate(self, task, **kwargs):
+ pass
+
+ def driver_validate(self, **kwargs):
+ pass
+
+
+class PassthruDecoratorTestCase(base.TestCase):
+
+ def setUp(self):
+ super(PassthruDecoratorTestCase, self).setUp()
+ self.fvi = FakeVendorInterface()
+
+ def test_passthru_noexception(self):
+ result = self.fvi.noexception()
+ self.assertEqual("Fake", result)
+
+ @mock.patch.object(driver_base, 'LOG', autospec=True)
+ def test_passthru_ironicexception(self, mock_log):
+ self.assertRaises(exception.IronicException,
+ self.fvi.ironicexception, mock.ANY)
+ mock_log.exception.assert_called_with(
+ mock.ANY, 'ironicexception')
+
+ @mock.patch.object(driver_base, 'LOG', autospec=True)
+ def test_passthru_nonironicexception(self, mock_log):
+ self.assertRaises(exception.VendorPassthruException,
+ self.fvi.normalexception, mock.ANY)
+ mock_log.exception.assert_called_with(
+ mock.ANY, 'normalexception')
+
+ def test_passthru_check_func_references(self):
+ inst1 = FakeVendorInterface()
+ inst2 = FakeVendorInterface()
+
+ self.assertNotEqual(inst1.vendor_routes['noexception']['func'],
+ inst2.vendor_routes['noexception']['func'])
+ self.assertNotEqual(inst1.driver_routes['driver_noexception']['func'],
+ inst2.driver_routes['driver_noexception']['func'])
+
+
+@mock.patch.object(eventlet.greenthread, 'spawn_n', autospec=True,
+ side_effect=lambda func, *args, **kw: func(*args, **kw))
+class DriverPeriodicTaskTestCase(base.TestCase):
+ def test(self, spawn_mock):
+ method_mock = mock.MagicMock(spec_set=[])
+ function_mock = mock.MagicMock(spec_set=[])
+
+ class TestClass(object):
+ @driver_base.driver_periodic_task(spacing=42)
+ def method(self, foo, bar=None):
+ method_mock(foo, bar=bar)
+
+ @driver_base.driver_periodic_task(spacing=100, parallel=False)
+ def function():
+ function_mock()
+
+ obj = TestClass()
+ self.assertEqual(42, obj.method._periodic_spacing)
+ self.assertTrue(obj.method._periodic_task)
+ self.assertEqual('ironic.tests.unit.drivers.test_base.method',
+ obj.method._periodic_name)
+ self.assertEqual('ironic.tests.unit.drivers.test_base.function',
+ function._periodic_name)
+
+ obj.method(1, bar=2)
+ method_mock.assert_called_once_with(1, bar=2)
+ self.assertEqual(1, spawn_mock.call_count)
+ function()
+ function_mock.assert_called_once_with()
+ self.assertEqual(1, spawn_mock.call_count)
+
+
+class CleanStepTestCase(base.TestCase):
+ def test_get_and_execute_clean_steps(self):
+ # Create a fake Driver class, create some clean steps, make sure
+ # they are listed correctly, and attempt to execute one of them
+
+ method_mock = mock.MagicMock(spec_set=[])
+ task_mock = mock.MagicMock(spec_set=[])
+
+ class TestClass(driver_base.BaseInterface):
+ interface_type = 'test'
+
+ @driver_base.clean_step(priority=0)
+ def zap_method(self, task):
+ pass
+
+ @driver_base.clean_step(priority=10, abortable=True)
+ def clean_method(self, task):
+ method_mock(task)
+
+ def not_clean_method(self, task):
+ pass
+
+ class TestClass2(driver_base.BaseInterface):
+ interface_type = 'test2'
+
+ @driver_base.clean_step(priority=0)
+ def zap_method2(self, task):
+ pass
+
+ @driver_base.clean_step(priority=20, abortable=True)
+ def clean_method2(self, task):
+ method_mock(task)
+
+ def not_clean_method2(self, task):
+ pass
+
+ obj = TestClass()
+ obj2 = TestClass2()
+
+ self.assertEqual(2, len(obj.get_clean_steps(task_mock)))
+ # Ensure the steps look correct
+ self.assertEqual(10, obj.get_clean_steps(task_mock)[0]['priority'])
+ self.assertTrue(obj.get_clean_steps(task_mock)[0]['abortable'])
+ self.assertEqual('test', obj.get_clean_steps(
+ task_mock)[0]['interface'])
+ self.assertEqual('clean_method', obj.get_clean_steps(
+ task_mock)[0]['step'])
+ self.assertEqual(0, obj.get_clean_steps(task_mock)[1]['priority'])
+ self.assertFalse(obj.get_clean_steps(task_mock)[1]['abortable'])
+ self.assertEqual('test', obj.get_clean_steps(
+ task_mock)[1]['interface'])
+ self.assertEqual('zap_method', obj.get_clean_steps(
+ task_mock)[1]['step'])
+
+ # Ensure the second obj get different clean steps
+ self.assertEqual(2, len(obj2.get_clean_steps(task_mock)))
+ # Ensure the steps look correct
+ self.assertEqual(20, obj2.get_clean_steps(task_mock)[0]['priority'])
+ self.assertTrue(obj2.get_clean_steps(task_mock)[0]['abortable'])
+ self.assertEqual('test2', obj2.get_clean_steps(
+ task_mock)[0]['interface'])
+ self.assertEqual('clean_method2', obj2.get_clean_steps(
+ task_mock)[0]['step'])
+ self.assertEqual(0, obj2.get_clean_steps(task_mock)[1]['priority'])
+ self.assertFalse(obj.get_clean_steps(task_mock)[1]['abortable'])
+ self.assertEqual('test2', obj2.get_clean_steps(
+ task_mock)[1]['interface'])
+ self.assertEqual('zap_method2', obj2.get_clean_steps(
+ task_mock)[1]['step'])
+
+ # Ensure we can execute the function.
+ obj.execute_clean_step(task_mock, obj.get_clean_steps(task_mock)[0])
+ method_mock.assert_called_once_with(task_mock)
+
+
+class MyRAIDInterface(driver_base.RAIDInterface):
+
+ def create_configuration(self, task):
+ pass
+
+ def delete_configuration(self, task):
+ pass
+
+
+class RAIDInterfaceTestCase(base.TestCase):
+
+ @mock.patch.object(driver_base.RAIDInterface, 'validate_raid_config',
+ autospec=True)
+ def test_validate(self, validate_raid_config_mock):
+ raid_interface = MyRAIDInterface()
+ node_mock = mock.MagicMock(target_raid_config='some_raid_config')
+ task_mock = mock.MagicMock(node=node_mock)
+
+ raid_interface.validate(task_mock)
+
+ validate_raid_config_mock.assert_called_once_with(
+ raid_interface, task_mock, 'some_raid_config')
+
+ @mock.patch.object(driver_base.RAIDInterface, 'validate_raid_config',
+ autospec=True)
+ def test_validate_no_target_raid_config(self, validate_raid_config_mock):
+ raid_interface = MyRAIDInterface()
+ node_mock = mock.MagicMock(target_raid_config={})
+ task_mock = mock.MagicMock(node=node_mock)
+
+ raid_interface.validate(task_mock)
+
+ self.assertFalse(validate_raid_config_mock.called)
+
+ @mock.patch.object(raid, 'validate_configuration', autospec=True)
+ def test_validate_raid_config(self, common_validate_mock):
+ with open(driver_base.RAID_CONFIG_SCHEMA, 'r') as raid_schema_fobj:
+ raid_schema = json.load(raid_schema_fobj)
+ raid_interface = MyRAIDInterface()
+
+ raid_interface.validate_raid_config('task', 'some_raid_config')
+
+ common_validate_mock.assert_called_once_with(
+ 'some_raid_config', raid_schema)
+
+ @mock.patch.object(raid, 'get_logical_disk_properties',
+ autospec=True)
+ def test_get_logical_disk_properties(self, get_properties_mock):
+ with open(driver_base.RAID_CONFIG_SCHEMA, 'r') as raid_schema_fobj:
+ raid_schema = json.load(raid_schema_fobj)
+ raid_interface = MyRAIDInterface()
+ raid_interface.get_logical_disk_properties()
+ get_properties_mock.assert_called_once_with(raid_schema)
diff --git a/ironic/tests/unit/drivers/test_console_utils.py b/ironic/tests/unit/drivers/test_console_utils.py
new file mode 100644
index 000000000..23762e9f8
--- /dev/null
+++ b/ironic/tests/unit/drivers/test_console_utils.py
@@ -0,0 +1,348 @@
+# coding=utf-8
+
+# Copyright 2014 International Business Machines Corporation
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Test class for console_utils driver module."""
+
+import os
+import random
+import string
+import subprocess
+import tempfile
+
+import mock
+from oslo_concurrency import processutils
+from oslo_config import cfg
+from oslo_utils import netutils
+
+from ironic.common import exception
+from ironic.common import utils
+from ironic.drivers.modules import console_utils
+from ironic.drivers.modules import ipmitool as ipmi
+from ironic.tests.unit.db import base as db_base
+from ironic.tests.unit.db import utils as db_utils
+from ironic.tests.unit.objects import utils as obj_utils
+
+
+CONF = cfg.CONF
+
+INFO_DICT = db_utils.get_test_ipmi_info()
+
+
+class ConsoleUtilsTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(ConsoleUtilsTestCase, self).setUp()
+ self.node = obj_utils.get_test_node(
+ self.context,
+ driver='fake_ipmitool',
+ driver_info=INFO_DICT)
+ self.info = ipmi._parse_driver_info(self.node)
+
+ def test__get_console_pid_dir(self):
+ pid_dir = '/tmp/pid_dir'
+ self.config(terminal_pid_dir=pid_dir, group='console')
+ dir = console_utils._get_console_pid_dir()
+ self.assertEqual(pid_dir, dir)
+
+ def test__get_console_pid_dir_tempdir(self):
+ self.config(tempdir='/tmp/fake_dir')
+ dir = console_utils._get_console_pid_dir()
+ self.assertEqual(CONF.tempdir, dir)
+
+ @mock.patch.object(os, 'makedirs', autospec=True)
+ @mock.patch.object(os.path, 'exists', autospec=True)
+ def test__ensure_console_pid_dir_exists(self, mock_path_exists,
+ mock_makedirs):
+ mock_path_exists.return_value = True
+ mock_makedirs.side_effect = OSError
+ pid_dir = console_utils._get_console_pid_dir()
+
+ console_utils._ensure_console_pid_dir_exists()
+
+ mock_path_exists.assert_called_once_with(pid_dir)
+ self.assertFalse(mock_makedirs.called)
+
+ @mock.patch.object(os, 'makedirs', autospec=True)
+ @mock.patch.object(os.path, 'exists', autospec=True)
+ def test__ensure_console_pid_dir_exists_fail(self, mock_path_exists,
+ mock_makedirs):
+ mock_path_exists.return_value = False
+ mock_makedirs.side_effect = OSError
+ pid_dir = console_utils._get_console_pid_dir()
+
+ self.assertRaises(exception.ConsoleError,
+ console_utils._ensure_console_pid_dir_exists)
+
+ mock_path_exists.assert_called_once_with(pid_dir)
+ mock_makedirs.assert_called_once_with(pid_dir)
+
+ @mock.patch.object(console_utils, '_get_console_pid_dir', autospec=True)
+ def test__get_console_pid_file(self, mock_dir):
+ mock_dir.return_value = tempfile.gettempdir()
+ expected_path = '%(tempdir)s/%(uuid)s.pid' % {
+ 'tempdir': mock_dir.return_value,
+ 'uuid': self.info.get('uuid')}
+ path = console_utils._get_console_pid_file(self.info['uuid'])
+ self.assertEqual(expected_path, path)
+ mock_dir.assert_called_once_with()
+
+ @mock.patch.object(console_utils, '_get_console_pid_file', autospec=True)
+ def test__get_console_pid(self, mock_exec):
+ tmp_file_handle = tempfile.NamedTemporaryFile()
+ tmp_file = tmp_file_handle.name
+ self.addCleanup(utils.unlink_without_raise, tmp_file)
+ with open(tmp_file, "w") as f:
+ f.write("12345\n")
+
+ mock_exec.return_value = tmp_file
+
+ pid = console_utils._get_console_pid(self.info['uuid'])
+
+ mock_exec.assert_called_once_with(self.info['uuid'])
+ self.assertEqual(pid, 12345)
+
+ @mock.patch.object(console_utils, '_get_console_pid_file', autospec=True)
+ def test__get_console_pid_not_a_num(self, mock_exec):
+ tmp_file_handle = tempfile.NamedTemporaryFile()
+ tmp_file = tmp_file_handle.name
+ self.addCleanup(utils.unlink_without_raise, tmp_file)
+ with open(tmp_file, "w") as f:
+ f.write("Hello World\n")
+
+ mock_exec.return_value = tmp_file
+
+ self.assertRaises(exception.NoConsolePid,
+ console_utils._get_console_pid,
+ self.info['uuid'])
+ mock_exec.assert_called_once_with(self.info['uuid'])
+
+ def test__get_console_pid_file_not_found(self):
+ self.assertRaises(exception.NoConsolePid,
+ console_utils._get_console_pid,
+ self.info['uuid'])
+
+ @mock.patch.object(utils, 'unlink_without_raise', autospec=True)
+ @mock.patch.object(utils, 'execute', autospec=True)
+ @mock.patch.object(console_utils, '_get_console_pid', autospec=True)
+ def test__stop_console(self, mock_pid, mock_execute, mock_unlink):
+ pid_file = console_utils._get_console_pid_file(self.info['uuid'])
+ mock_pid.return_value = '12345'
+
+ console_utils._stop_console(self.info['uuid'])
+
+ mock_pid.assert_called_once_with(self.info['uuid'])
+ mock_execute.assert_called_once_with('kill', mock_pid.return_value,
+ check_exit_code=[0, 99])
+ mock_unlink.assert_called_once_with(pid_file)
+
+ @mock.patch.object(utils, 'unlink_without_raise', autospec=True)
+ @mock.patch.object(utils, 'execute', autospec=True)
+ @mock.patch.object(console_utils, '_get_console_pid', autospec=True)
+ def test__stop_console_nopid(self, mock_pid, mock_execute, mock_unlink):
+ pid_file = console_utils._get_console_pid_file(self.info['uuid'])
+ mock_pid.side_effect = iter(
+ [exception.NoConsolePid(pid_path="/tmp/blah")])
+
+ self.assertRaises(exception.NoConsolePid,
+ console_utils._stop_console,
+ self.info['uuid'])
+
+ mock_pid.assert_called_once_with(self.info['uuid'])
+ self.assertFalse(mock_execute.called)
+ mock_unlink.assert_called_once_with(pid_file)
+
+ @mock.patch.object(utils, 'unlink_without_raise', autospec=True)
+ @mock.patch.object(utils, 'execute', autospec=True)
+ @mock.patch.object(console_utils, '_get_console_pid', autospec=True)
+ def test__stop_console_nokill(self, mock_pid, mock_execute, mock_unlink):
+ pid_file = console_utils._get_console_pid_file(self.info['uuid'])
+ mock_pid.return_value = '12345'
+ mock_execute.side_effect = iter([processutils.ProcessExecutionError()])
+
+ self.assertRaises(processutils.ProcessExecutionError,
+ console_utils._stop_console,
+ self.info['uuid'])
+
+ mock_pid.assert_called_once_with(self.info['uuid'])
+ mock_execute.assert_called_once_with('kill', mock_pid.return_value,
+ check_exit_code=[0, 99])
+ mock_unlink.assert_called_once_with(pid_file)
+
+ def _get_shellinabox_console(self, scheme):
+ generated_url = (
+ console_utils.get_shellinabox_console_url(self.info['port']))
+ console_host = CONF.my_ip
+ if netutils.is_valid_ipv6(console_host):
+ console_host = '[%s]' % console_host
+ http_url = "%s://%s:%s" % (scheme, console_host, self.info['port'])
+ self.assertEqual(http_url, generated_url)
+
+ def test_get_shellinabox_console_url(self):
+ self._get_shellinabox_console('http')
+
+ def test_get_shellinabox_console_https_url(self):
+ # specify terminal_cert_dir in /etc/ironic/ironic.conf
+ self.config(terminal_cert_dir='/tmp', group='console')
+ # use https
+ self._get_shellinabox_console('https')
+
+ def test_make_persistent_password_file(self):
+ filepath = '%(tempdir)s/%(node_uuid)s' % {
+ 'tempdir': tempfile.gettempdir(),
+ 'node_uuid': self.info['uuid']}
+ password = ''.join([random.choice(string.ascii_letters)
+ for n in range(16)])
+ console_utils.make_persistent_password_file(filepath, password)
+ # make sure file exists
+ self.assertTrue(os.path.exists(filepath))
+ # make sure the content is correct
+ with open(filepath) as file:
+ content = file.read()
+ self.assertEqual(password, content)
+ # delete the file
+ os.unlink(filepath)
+
+ @mock.patch.object(os, 'chmod', autospec=True)
+ def test_make_persistent_password_file_fail(self, mock_chmod):
+ mock_chmod.side_effect = IOError()
+ filepath = '%(tempdir)s/%(node_uuid)s' % {
+ 'tempdir': tempfile.gettempdir(),
+ 'node_uuid': self.info['uuid']}
+ self.assertRaises(exception.PasswordFileFailedToCreate,
+ console_utils.make_persistent_password_file,
+ filepath,
+ 'password')
+
+ @mock.patch.object(subprocess, 'Popen', autospec=True)
+ @mock.patch.object(console_utils, '_ensure_console_pid_dir_exists',
+ autospec=True)
+ @mock.patch.object(console_utils, '_stop_console', autospec=True)
+ def test_start_shellinabox_console(self, mock_stop, mock_dir_exists,
+ mock_popen):
+ mock_popen.return_value.poll.return_value = 0
+
+ # touch the pid file
+ pid_file = console_utils._get_console_pid_file(self.info['uuid'])
+ open(pid_file, 'a').close()
+ self.addCleanup(os.remove, pid_file)
+ self.assertTrue(os.path.exists(pid_file))
+
+ console_utils.start_shellinabox_console(self.info['uuid'],
+ self.info['port'],
+ 'ls&')
+
+ mock_stop.assert_called_once_with(self.info['uuid'])
+ mock_dir_exists.assert_called_once_with()
+ mock_popen.assert_called_once_with(mock.ANY,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ mock_popen.return_value.poll.assert_called_once_with()
+
+ @mock.patch.object(subprocess, 'Popen', autospec=True)
+ @mock.patch.object(console_utils, '_ensure_console_pid_dir_exists',
+ autospec=True)
+ @mock.patch.object(console_utils, '_stop_console', autospec=True)
+ def test_start_shellinabox_console_nopid(self, mock_stop, mock_dir_exists,
+ mock_popen):
+ # no existing PID file before starting
+ mock_stop.side_effect = iter([exception.NoConsolePid('/tmp/blah')])
+ mock_popen.return_value.poll.return_value = 0
+
+ # touch the pid file
+ pid_file = console_utils._get_console_pid_file(self.info['uuid'])
+ open(pid_file, 'a').close()
+ self.addCleanup(os.remove, pid_file)
+ self.assertTrue(os.path.exists(pid_file))
+
+ console_utils.start_shellinabox_console(self.info['uuid'],
+ self.info['port'],
+ 'ls&')
+
+ mock_stop.assert_called_once_with(self.info['uuid'])
+ mock_dir_exists.assert_called_once_with()
+ mock_popen.assert_called_once_with(mock.ANY,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ mock_popen.return_value.poll.assert_called_once_with()
+
+ @mock.patch.object(subprocess, 'Popen', autospec=True)
+ @mock.patch.object(console_utils, '_ensure_console_pid_dir_exists',
+ autospec=True)
+ @mock.patch.object(console_utils, '_stop_console', autospec=True)
+ def test_start_shellinabox_console_fail(self, mock_stop, mock_dir_exists,
+ mock_popen):
+ mock_popen.return_value.poll.return_value = 1
+ mock_popen.return_value.communicate.return_value = ('output', 'error')
+
+ self.assertRaises(exception.ConsoleSubprocessFailed,
+ console_utils.start_shellinabox_console,
+ self.info['uuid'],
+ self.info['port'],
+ 'ls&')
+
+ mock_stop.assert_called_once_with(self.info['uuid'])
+ mock_dir_exists.assert_called_once_with()
+ mock_popen.assert_called_once_with(mock.ANY,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ mock_popen.return_value.poll.assert_called_once_with()
+
+ @mock.patch.object(subprocess, 'Popen', autospec=True)
+ @mock.patch.object(console_utils, '_ensure_console_pid_dir_exists',
+ autospec=True)
+ @mock.patch.object(console_utils, '_stop_console', autospec=True)
+ def test_start_shellinabox_console_fail_nopiddir(self, mock_stop,
+ mock_dir_exists,
+ mock_popen):
+ mock_dir_exists.side_effect = iter(
+ [exception.ConsoleError(message='fail')])
+ mock_popen.return_value.poll.return_value = 0
+
+ self.assertRaises(exception.ConsoleError,
+ console_utils.start_shellinabox_console,
+ self.info['uuid'],
+ self.info['port'],
+ 'ls&')
+
+ mock_stop.assert_called_once_with(self.info['uuid'])
+ mock_dir_exists.assert_called_once_with()
+ self.assertFalse(mock_popen.called)
+
+ @mock.patch.object(console_utils, '_stop_console', autospec=True)
+ def test_stop_shellinabox_console(self, mock_stop):
+
+ console_utils.stop_shellinabox_console(self.info['uuid'])
+
+ mock_stop.assert_called_once_with(self.info['uuid'])
+
+ @mock.patch.object(console_utils, '_stop_console', autospec=True)
+ def test_stop_shellinabox_console_fail_nopid(self, mock_stop):
+ mock_stop.side_effect = iter([exception.NoConsolePid('/tmp/blah')])
+
+ console_utils.stop_shellinabox_console(self.info['uuid'])
+
+ mock_stop.assert_called_once_with(self.info['uuid'])
+
+ @mock.patch.object(console_utils, '_stop_console', autospec=True)
+ def test_stop_shellinabox_console_fail_nokill(self, mock_stop):
+ mock_stop.side_effect = iter([processutils.ProcessExecutionError()])
+
+ self.assertRaises(exception.ConsoleError,
+ console_utils.stop_shellinabox_console,
+ self.info['uuid'])
+
+ mock_stop.assert_called_once_with(self.info['uuid'])
diff --git a/ironic/tests/unit/drivers/test_deploy_utils.py b/ironic/tests/unit/drivers/test_deploy_utils.py
new file mode 100644
index 000000000..33ccb2221
--- /dev/null
+++ b/ironic/tests/unit/drivers/test_deploy_utils.py
@@ -0,0 +1,2284 @@
+# Copyright (c) 2012 NTT DOCOMO, INC.
+# Copyright 2011 OpenStack Foundation
+# Copyright 2011 Ilya Alekseyev
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import base64
+import gzip
+import os
+import shutil
+import stat
+import tempfile
+import time
+import types
+
+import mock
+from oslo_concurrency import processutils
+from oslo_config import cfg
+from oslo_utils import uuidutils
+import requests
+import testtools
+
+from ironic.common import boot_devices
+from ironic.common import disk_partitioner
+from ironic.common import exception
+from ironic.common import image_service
+from ironic.common import images
+from ironic.common import keystone
+from ironic.common import states
+from ironic.common import utils as common_utils
+from ironic.conductor import task_manager
+from ironic.conductor import utils as manager_utils
+from ironic.drivers.modules import agent_client
+from ironic.drivers.modules import deploy_utils as utils
+from ironic.drivers.modules import image_cache
+from ironic.drivers.modules import pxe
+from ironic.tests.unit import base as tests_base
+from ironic.tests.unit.conductor import utils as mgr_utils
+from ironic.tests.unit.db import base as db_base
+from ironic.tests.unit.db import utils as db_utils
+from ironic.tests.unit.objects import utils as obj_utils
+
+INST_INFO_DICT = db_utils.get_test_pxe_instance_info()
+DRV_INFO_DICT = db_utils.get_test_pxe_driver_info()
+DRV_INTERNAL_INFO_DICT = db_utils.get_test_pxe_driver_internal_info()
+
+_PXECONF_DEPLOY = b"""
+default deploy
+
+label deploy
+kernel deploy_kernel
+append initrd=deploy_ramdisk
+ipappend 3
+
+label boot_partition
+kernel kernel
+append initrd=ramdisk root={{ ROOT }}
+
+label boot_whole_disk
+COM32 chain.c32
+append mbr:{{ DISK_IDENTIFIER }}
+
+label trusted_boot
+kernel mboot
+append tboot.gz --- kernel root={{ ROOT }} --- ramdisk
+"""
+
+_PXECONF_BOOT_PARTITION = """
+default boot_partition
+
+label deploy
+kernel deploy_kernel
+append initrd=deploy_ramdisk
+ipappend 3
+
+label boot_partition
+kernel kernel
+append initrd=ramdisk root=UUID=12345678-1234-1234-1234-1234567890abcdef
+
+label boot_whole_disk
+COM32 chain.c32
+append mbr:{{ DISK_IDENTIFIER }}
+
+label trusted_boot
+kernel mboot
+append tboot.gz --- kernel root=UUID=12345678-1234-1234-1234-1234567890abcdef \
+--- ramdisk
+"""
+
+_PXECONF_BOOT_WHOLE_DISK = """
+default boot_whole_disk
+
+label deploy
+kernel deploy_kernel
+append initrd=deploy_ramdisk
+ipappend 3
+
+label boot_partition
+kernel kernel
+append initrd=ramdisk root={{ ROOT }}
+
+label boot_whole_disk
+COM32 chain.c32
+append mbr:0x12345678
+
+label trusted_boot
+kernel mboot
+append tboot.gz --- kernel root={{ ROOT }} --- ramdisk
+"""
+
+_PXECONF_TRUSTED_BOOT = """
+default trusted_boot
+
+label deploy
+kernel deploy_kernel
+append initrd=deploy_ramdisk
+ipappend 3
+
+label boot_partition
+kernel kernel
+append initrd=ramdisk root=UUID=12345678-1234-1234-1234-1234567890abcdef
+
+label boot_whole_disk
+COM32 chain.c32
+append mbr:{{ DISK_IDENTIFIER }}
+
+label trusted_boot
+kernel mboot
+append tboot.gz --- kernel root=UUID=12345678-1234-1234-1234-1234567890abcdef \
+--- ramdisk
+"""
+
+_IPXECONF_DEPLOY = b"""
+#!ipxe
+
+dhcp
+
+goto deploy
+
+:deploy
+kernel deploy_kernel
+initrd deploy_ramdisk
+boot
+
+:boot_partition
+kernel kernel
+append initrd=ramdisk root={{ ROOT }}
+boot
+
+:boot_whole_disk
+kernel chain.c32
+append mbr:{{ DISK_IDENTIFIER }}
+boot
+"""
+
+_IPXECONF_BOOT_PARTITION = """
+#!ipxe
+
+dhcp
+
+goto boot_partition
+
+:deploy
+kernel deploy_kernel
+initrd deploy_ramdisk
+boot
+
+:boot_partition
+kernel kernel
+append initrd=ramdisk root=UUID=12345678-1234-1234-1234-1234567890abcdef
+boot
+
+:boot_whole_disk
+kernel chain.c32
+append mbr:{{ DISK_IDENTIFIER }}
+boot
+"""
+
+_IPXECONF_BOOT_WHOLE_DISK = """
+#!ipxe
+
+dhcp
+
+goto boot_whole_disk
+
+:deploy
+kernel deploy_kernel
+initrd deploy_ramdisk
+boot
+
+:boot_partition
+kernel kernel
+append initrd=ramdisk root={{ ROOT }}
+boot
+
+:boot_whole_disk
+kernel chain.c32
+append mbr:0x12345678
+boot
+"""
+
+_UEFI_PXECONF_DEPLOY = b"""
+default=deploy
+
+image=deploy_kernel
+ label=deploy
+ initrd=deploy_ramdisk
+ append="ro text"
+
+image=kernel
+ label=boot_partition
+ initrd=ramdisk
+ append="root={{ ROOT }}"
+
+image=chain.c32
+ label=boot_whole_disk
+ append="mbr:{{ DISK_IDENTIFIER }}"
+"""
+
+_UEFI_PXECONF_BOOT_PARTITION = """
+default=boot_partition
+
+image=deploy_kernel
+ label=deploy
+ initrd=deploy_ramdisk
+ append="ro text"
+
+image=kernel
+ label=boot_partition
+ initrd=ramdisk
+ append="root=UUID=12345678-1234-1234-1234-1234567890abcdef"
+
+image=chain.c32
+ label=boot_whole_disk
+ append="mbr:{{ DISK_IDENTIFIER }}"
+"""
+
+_UEFI_PXECONF_BOOT_WHOLE_DISK = """
+default=boot_whole_disk
+
+image=deploy_kernel
+ label=deploy
+ initrd=deploy_ramdisk
+ append="ro text"
+
+image=kernel
+ label=boot_partition
+ initrd=ramdisk
+ append="root={{ ROOT }}"
+
+image=chain.c32
+ label=boot_whole_disk
+ append="mbr:0x12345678"
+"""
+
+_UEFI_PXECONF_DEPLOY_GRUB = b"""
+set default=deploy
+set timeout=5
+set hidden_timeout_quiet=false
+
+menuentry "deploy" {
+ linuxefi deploy_kernel "ro text"
+ initrdefi deploy_ramdisk
+}
+
+menuentry "boot_partition" {
+ linuxefi kernel "root=(( ROOT ))"
+ initrdefi ramdisk
+}
+
+menuentry "boot_whole_disk" {
+ linuxefi chain.c32 mbr:(( DISK_IDENTIFIER ))
+}
+"""
+
+_UEFI_PXECONF_BOOT_PARTITION_GRUB = """
+set default=boot_partition
+set timeout=5
+set hidden_timeout_quiet=false
+
+menuentry "deploy" {
+ linuxefi deploy_kernel "ro text"
+ initrdefi deploy_ramdisk
+}
+
+menuentry "boot_partition" {
+ linuxefi kernel "root=UUID=12345678-1234-1234-1234-1234567890abcdef"
+ initrdefi ramdisk
+}
+
+menuentry "boot_whole_disk" {
+ linuxefi chain.c32 mbr:(( DISK_IDENTIFIER ))
+}
+"""
+
+_UEFI_PXECONF_BOOT_WHOLE_DISK_GRUB = """
+set default=boot_whole_disk
+set timeout=5
+set hidden_timeout_quiet=false
+
+menuentry "deploy" {
+ linuxefi deploy_kernel "ro text"
+ initrdefi deploy_ramdisk
+}
+
+menuentry "boot_partition" {
+ linuxefi kernel "root=(( ROOT ))"
+ initrdefi ramdisk
+}
+
+menuentry "boot_whole_disk" {
+ linuxefi chain.c32 mbr:0x12345678
+}
+"""
+
+
+@mock.patch.object(time, 'sleep', lambda seconds: None)
+class PhysicalWorkTestCase(tests_base.TestCase):
+
+ def _mock_calls(self, name_list):
+ patch_list = [mock.patch.object(utils, name,
+ spec_set=types.FunctionType)
+ for name in name_list]
+ mock_list = [patcher.start() for patcher in patch_list]
+ for patcher in patch_list:
+ self.addCleanup(patcher.stop)
+
+ parent_mock = mock.MagicMock(spec=[])
+ for mocker, name in zip(mock_list, name_list):
+ parent_mock.attach_mock(mocker, name)
+ return parent_mock
+
+ @mock.patch.object(common_utils, 'mkfs', autospec=True)
+ def _test_deploy_partition_image(self, mock_mkfs, boot_option=None,
+ boot_mode=None):
+ """Check loosely all functions are called with right args."""
+ address = '127.0.0.1'
+ port = 3306
+ iqn = 'iqn.xyz'
+ lun = 1
+ image_path = '/tmp/xyz/image'
+ root_mb = 128
+ swap_mb = 64
+ ephemeral_mb = 0
+ ephemeral_format = None
+ configdrive_mb = 0
+ node_uuid = "12345678-1234-1234-1234-1234567890abcxyz"
+
+ dev = '/dev/fake'
+ swap_part = '/dev/fake-part1'
+ root_part = '/dev/fake-part2'
+ root_uuid = '12345678-1234-1234-12345678-12345678abcdef'
+
+ name_list = ['get_dev', 'get_image_mb', 'discovery', 'login_iscsi',
+ 'logout_iscsi', 'delete_iscsi', 'make_partitions',
+ 'is_block_device', 'populate_image', 'block_uuid',
+ 'notify', 'destroy_disk_metadata']
+ parent_mock = self._mock_calls(name_list)
+ parent_mock.get_dev.return_value = dev
+ parent_mock.get_image_mb.return_value = 1
+ parent_mock.is_block_device.return_value = True
+ parent_mock.block_uuid.return_value = root_uuid
+ parent_mock.make_partitions.return_value = {'root': root_part,
+ 'swap': swap_part}
+
+ make_partitions_expected_args = [dev, root_mb, swap_mb, ephemeral_mb,
+ configdrive_mb, node_uuid]
+ make_partitions_expected_kwargs = {'commit': True}
+ deploy_kwargs = {}
+
+ if boot_option:
+ make_partitions_expected_kwargs['boot_option'] = boot_option
+ deploy_kwargs['boot_option'] = boot_option
+ else:
+ make_partitions_expected_kwargs['boot_option'] = 'netboot'
+
+ if boot_mode:
+ make_partitions_expected_kwargs['boot_mode'] = boot_mode
+ deploy_kwargs['boot_mode'] = boot_mode
+ else:
+ make_partitions_expected_kwargs['boot_mode'] = 'bios'
+
+ # If no boot_option, then it should default to netboot.
+ calls_expected = [mock.call.get_image_mb(image_path),
+ mock.call.get_dev(address, port, iqn, lun),
+ mock.call.discovery(address, port),
+ mock.call.login_iscsi(address, port, iqn),
+ mock.call.is_block_device(dev),
+ mock.call.destroy_disk_metadata(dev, node_uuid),
+ mock.call.make_partitions(
+ *make_partitions_expected_args,
+ **make_partitions_expected_kwargs),
+ mock.call.is_block_device(root_part),
+ mock.call.is_block_device(swap_part),
+ mock.call.populate_image(image_path, root_part),
+ mock.call.block_uuid(root_part),
+ mock.call.logout_iscsi(address, port, iqn),
+ mock.call.delete_iscsi(address, port, iqn)]
+
+ uuids_dict_returned = utils.deploy_partition_image(
+ address, port, iqn, lun, image_path, root_mb, swap_mb,
+ ephemeral_mb, ephemeral_format, node_uuid, **deploy_kwargs)
+
+ self.assertEqual(calls_expected, parent_mock.mock_calls)
+ expected_uuid_dict = {
+ 'root uuid': root_uuid,
+ 'efi system partition uuid': None}
+ self.assertEqual(expected_uuid_dict, uuids_dict_returned)
+ mock_mkfs.assert_called_once_with('swap', swap_part, 'swap1')
+
+ def test_deploy_partition_image_without_boot_option(self):
+ self._test_deploy_partition_image()
+
+ def test_deploy_partition_image_netboot(self):
+ self._test_deploy_partition_image(boot_option="netboot")
+
+ def test_deploy_partition_image_localboot(self):
+ self._test_deploy_partition_image(boot_option="local")
+
+ def test_deploy_partition_image_wo_boot_option_and_wo_boot_mode(self):
+ self._test_deploy_partition_image()
+
+ def test_deploy_partition_image_netboot_bios(self):
+ self._test_deploy_partition_image(boot_option="netboot",
+ boot_mode="bios")
+
+ def test_deploy_partition_image_localboot_bios(self):
+ self._test_deploy_partition_image(boot_option="local",
+ boot_mode="bios")
+
+ def test_deploy_partition_image_netboot_uefi(self):
+ self._test_deploy_partition_image(boot_option="netboot",
+ boot_mode="uefi")
+
+ @mock.patch.object(utils, 'get_image_mb', return_value=129, autospec=True)
+ def test_deploy_partition_image_image_exceeds_root_partition(self,
+ gim_mock):
+ address = '127.0.0.1'
+ port = 3306
+ iqn = 'iqn.xyz'
+ lun = 1
+ image_path = '/tmp/xyz/image'
+ root_mb = 128
+ swap_mb = 64
+ ephemeral_mb = 0
+ ephemeral_format = None
+ node_uuid = "12345678-1234-1234-1234-1234567890abcxyz"
+
+ self.assertRaises(exception.InstanceDeployFailure,
+ utils.deploy_partition_image, address, port, iqn,
+ lun, image_path, root_mb, swap_mb, ephemeral_mb,
+ ephemeral_format, node_uuid)
+
+ gim_mock.assert_called_once_with(image_path)
+
+ # We mock utils.block_uuid separately here because we can't predict
+ # the order in which it will be called.
+ @mock.patch.object(utils, 'block_uuid', autospec=True)
+ @mock.patch.object(common_utils, 'mkfs', autospec=True)
+ def test_deploy_partition_image_localboot_uefi(self, mock_mkfs,
+ block_uuid_mock):
+ """Check loosely all functions are called with right args."""
+ address = '127.0.0.1'
+ port = 3306
+ iqn = 'iqn.xyz'
+ lun = 1
+ image_path = '/tmp/xyz/image'
+ root_mb = 128
+ swap_mb = 64
+ ephemeral_mb = 0
+ ephemeral_format = None
+ configdrive_mb = 0
+ node_uuid = "12345678-1234-1234-1234-1234567890abcxyz"
+
+ dev = '/dev/fake'
+ swap_part = '/dev/fake-part2'
+ root_part = '/dev/fake-part3'
+ efi_system_part = '/dev/fake-part1'
+ root_uuid = '12345678-1234-1234-12345678-12345678abcdef'
+ efi_system_part_uuid = '9036-482'
+
+ name_list = ['get_dev', 'get_image_mb', 'discovery', 'login_iscsi',
+ 'logout_iscsi', 'delete_iscsi', 'make_partitions',
+ 'is_block_device', 'populate_image', 'notify',
+ 'destroy_disk_metadata']
+ parent_mock = self._mock_calls(name_list)
+ parent_mock.get_dev.return_value = dev
+ parent_mock.get_image_mb.return_value = 1
+ parent_mock.is_block_device.return_value = True
+
+ def block_uuid_side_effect(device):
+ if device == root_part:
+ return root_uuid
+ if device == efi_system_part:
+ return efi_system_part_uuid
+
+ block_uuid_mock.side_effect = block_uuid_side_effect
+ parent_mock.make_partitions.return_value = {
+ 'root': root_part, 'swap': swap_part,
+ 'efi system partition': efi_system_part}
+
+ # If no boot_option, then it should default to netboot.
+ calls_expected = [mock.call.get_image_mb(image_path),
+ mock.call.get_dev(address, port, iqn, lun),
+ mock.call.discovery(address, port),
+ mock.call.login_iscsi(address, port, iqn),
+ mock.call.is_block_device(dev),
+ mock.call.destroy_disk_metadata(dev, node_uuid),
+ mock.call.make_partitions(dev, root_mb, swap_mb,
+ ephemeral_mb,
+ configdrive_mb,
+ node_uuid,
+ commit=True,
+ boot_option="local",
+ boot_mode="uefi"),
+ mock.call.is_block_device(root_part),
+ mock.call.is_block_device(swap_part),
+ mock.call.is_block_device(efi_system_part),
+ mock.call.populate_image(image_path, root_part),
+ mock.call.logout_iscsi(address, port, iqn),
+ mock.call.delete_iscsi(address, port, iqn)]
+
+ uuid_dict_returned = utils.deploy_partition_image(
+ address, port, iqn, lun, image_path, root_mb, swap_mb,
+ ephemeral_mb, ephemeral_format, node_uuid, boot_option="local",
+ boot_mode="uefi")
+
+ self.assertEqual(calls_expected, parent_mock.mock_calls)
+ block_uuid_mock.assert_any_call('/dev/fake-part1')
+ block_uuid_mock.assert_any_call('/dev/fake-part3')
+ expected_uuid_dict = {
+ 'root uuid': root_uuid,
+ 'efi system partition uuid': efi_system_part_uuid}
+ self.assertEqual(expected_uuid_dict, uuid_dict_returned)
+ expected_calls = [mock.call('vfat', efi_system_part, 'efi-part'),
+ mock.call('swap', swap_part, 'swap1')]
+ mock_mkfs.assert_has_calls(expected_calls)
+
+ def test_deploy_partition_image_without_swap(self):
+ """Check loosely all functions are called with right args."""
+ address = '127.0.0.1'
+ port = 3306
+ iqn = 'iqn.xyz'
+ lun = 1
+ image_path = '/tmp/xyz/image'
+ root_mb = 128
+ swap_mb = 0
+ ephemeral_mb = 0
+ ephemeral_format = None
+ configdrive_mb = 0
+ node_uuid = "12345678-1234-1234-1234-1234567890abcxyz"
+
+ dev = '/dev/fake'
+ root_part = '/dev/fake-part1'
+ root_uuid = '12345678-1234-1234-12345678-12345678abcdef'
+
+ name_list = ['get_dev', 'get_image_mb', 'discovery', 'login_iscsi',
+ 'logout_iscsi', 'delete_iscsi', 'make_partitions',
+ 'is_block_device', 'populate_image', 'block_uuid',
+ 'notify', 'destroy_disk_metadata']
+ parent_mock = self._mock_calls(name_list)
+ parent_mock.get_dev.return_value = dev
+ parent_mock.get_image_mb.return_value = 1
+ parent_mock.is_block_device.return_value = True
+ parent_mock.block_uuid.return_value = root_uuid
+ parent_mock.make_partitions.return_value = {'root': root_part}
+ calls_expected = [mock.call.get_image_mb(image_path),
+ mock.call.get_dev(address, port, iqn, lun),
+ mock.call.discovery(address, port),
+ mock.call.login_iscsi(address, port, iqn),
+ mock.call.is_block_device(dev),
+ mock.call.destroy_disk_metadata(dev, node_uuid),
+ mock.call.make_partitions(dev, root_mb, swap_mb,
+ ephemeral_mb,
+ configdrive_mb,
+ node_uuid,
+ commit=True,
+ boot_option="netboot",
+ boot_mode="bios"),
+ mock.call.is_block_device(root_part),
+ mock.call.populate_image(image_path, root_part),
+ mock.call.block_uuid(root_part),
+ mock.call.logout_iscsi(address, port, iqn),
+ mock.call.delete_iscsi(address, port, iqn)]
+
+ uuid_dict_returned = utils.deploy_partition_image(address, port, iqn,
+ lun, image_path,
+ root_mb, swap_mb,
+ ephemeral_mb,
+ ephemeral_format,
+ node_uuid)
+
+ self.assertEqual(calls_expected, parent_mock.mock_calls)
+ self.assertEqual(root_uuid, uuid_dict_returned['root uuid'])
+
+ @mock.patch.object(common_utils, 'mkfs', autospec=True)
+ def test_deploy_partition_image_with_ephemeral(self, mock_mkfs):
+ """Check loosely all functions are called with right args."""
+ address = '127.0.0.1'
+ port = 3306
+ iqn = 'iqn.xyz'
+ lun = 1
+ image_path = '/tmp/xyz/image'
+ root_mb = 128
+ swap_mb = 64
+ ephemeral_mb = 256
+ configdrive_mb = 0
+ ephemeral_format = 'exttest'
+ node_uuid = "12345678-1234-1234-1234-1234567890abcxyz"
+
+ dev = '/dev/fake'
+ ephemeral_part = '/dev/fake-part1'
+ swap_part = '/dev/fake-part2'
+ root_part = '/dev/fake-part3'
+ root_uuid = '12345678-1234-1234-12345678-12345678abcdef'
+
+ name_list = ['get_dev', 'get_image_mb', 'discovery', 'login_iscsi',
+ 'logout_iscsi', 'delete_iscsi', 'make_partitions',
+ 'is_block_device', 'populate_image', 'block_uuid',
+ 'notify', 'destroy_disk_metadata']
+ parent_mock = self._mock_calls(name_list)
+ parent_mock.get_dev.return_value = dev
+ parent_mock.get_image_mb.return_value = 1
+ parent_mock.is_block_device.return_value = True
+ parent_mock.block_uuid.return_value = root_uuid
+ parent_mock.make_partitions.return_value = {
+ 'swap': swap_part, 'ephemeral': ephemeral_part, 'root': root_part}
+ calls_expected = [mock.call.get_image_mb(image_path),
+ mock.call.get_dev(address, port, iqn, lun),
+ mock.call.discovery(address, port),
+ mock.call.login_iscsi(address, port, iqn),
+ mock.call.is_block_device(dev),
+ mock.call.destroy_disk_metadata(dev, node_uuid),
+ mock.call.make_partitions(dev, root_mb, swap_mb,
+ ephemeral_mb,
+ configdrive_mb,
+ node_uuid,
+ commit=True,
+ boot_option="netboot",
+ boot_mode="bios"),
+ mock.call.is_block_device(root_part),
+ mock.call.is_block_device(swap_part),
+ mock.call.is_block_device(ephemeral_part),
+ mock.call.populate_image(image_path, root_part),
+ mock.call.block_uuid(root_part),
+ mock.call.logout_iscsi(address, port, iqn),
+ mock.call.delete_iscsi(address, port, iqn)]
+
+ uuid_dict_returned = utils.deploy_partition_image(address, port, iqn,
+ lun, image_path,
+ root_mb, swap_mb,
+ ephemeral_mb,
+ ephemeral_format,
+ node_uuid)
+
+ self.assertEqual(calls_expected, parent_mock.mock_calls)
+ self.assertEqual(root_uuid, uuid_dict_returned['root uuid'])
+ expected_calls = [mock.call('swap', swap_part, 'swap1'),
+ mock.call(ephemeral_format, ephemeral_part,
+ 'ephemeral0')]
+ mock_mkfs.assert_has_calls(expected_calls)
+
+ @mock.patch.object(common_utils, 'mkfs', autospec=True)
+ def test_deploy_partition_image_preserve_ephemeral(self, mock_mkfs):
+ """Check if all functions are called with right args."""
+ address = '127.0.0.1'
+ port = 3306
+ iqn = 'iqn.xyz'
+ lun = 1
+ image_path = '/tmp/xyz/image'
+ root_mb = 128
+ swap_mb = 64
+ ephemeral_mb = 256
+ ephemeral_format = 'exttest'
+ configdrive_mb = 0
+ node_uuid = "12345678-1234-1234-1234-1234567890abcxyz"
+
+ dev = '/dev/fake'
+ ephemeral_part = '/dev/fake-part1'
+ swap_part = '/dev/fake-part2'
+ root_part = '/dev/fake-part3'
+ root_uuid = '12345678-1234-1234-12345678-12345678abcdef'
+
+ name_list = ['get_dev', 'get_image_mb', 'discovery', 'login_iscsi',
+ 'logout_iscsi', 'delete_iscsi', 'make_partitions',
+ 'is_block_device', 'populate_image', 'block_uuid',
+ 'notify', 'get_dev_block_size']
+ parent_mock = self._mock_calls(name_list)
+ parent_mock.get_dev.return_value = dev
+ parent_mock.get_image_mb.return_value = 1
+ parent_mock.is_block_device.return_value = True
+ parent_mock.block_uuid.return_value = root_uuid
+ parent_mock.make_partitions.return_value = {
+ 'swap': swap_part, 'ephemeral': ephemeral_part, 'root': root_part}
+ parent_mock.block_uuid.return_value = root_uuid
+ calls_expected = [mock.call.get_image_mb(image_path),
+ mock.call.get_dev(address, port, iqn, lun),
+ mock.call.discovery(address, port),
+ mock.call.login_iscsi(address, port, iqn),
+ mock.call.is_block_device(dev),
+ mock.call.make_partitions(dev, root_mb, swap_mb,
+ ephemeral_mb,
+ configdrive_mb,
+ node_uuid,
+ commit=False,
+ boot_option="netboot",
+ boot_mode="bios"),
+ mock.call.is_block_device(root_part),
+ mock.call.is_block_device(swap_part),
+ mock.call.is_block_device(ephemeral_part),
+ mock.call.populate_image(image_path, root_part),
+ mock.call.block_uuid(root_part),
+ mock.call.logout_iscsi(address, port, iqn),
+ mock.call.delete_iscsi(address, port, iqn)]
+
+ uuid_dict_returned = utils.deploy_partition_image(
+ address, port, iqn, lun, image_path, root_mb, swap_mb,
+ ephemeral_mb, ephemeral_format, node_uuid,
+ preserve_ephemeral=True, boot_option="netboot")
+ self.assertEqual(calls_expected, parent_mock.mock_calls)
+ self.assertFalse(parent_mock.get_dev_block_size.called)
+ self.assertEqual(root_uuid, uuid_dict_returned['root uuid'])
+ mock_mkfs.assert_called_once_with('swap', swap_part, 'swap1')
+
+ @mock.patch.object(common_utils, 'unlink_without_raise', autospec=True)
+ def test_deploy_partition_image_with_configdrive(self, mock_unlink):
+ """Check loosely all functions are called with right args."""
+ address = '127.0.0.1'
+ port = 3306
+ iqn = 'iqn.xyz'
+ lun = 1
+ image_path = '/tmp/xyz/image'
+ root_mb = 128
+ swap_mb = 0
+ ephemeral_mb = 0
+ configdrive_mb = 10
+ ephemeral_format = None
+ node_uuid = "12345678-1234-1234-1234-1234567890abcxyz"
+ configdrive_url = 'http://1.2.3.4/cd'
+
+ dev = '/dev/fake'
+ configdrive_part = '/dev/fake-part1'
+ root_part = '/dev/fake-part2'
+ root_uuid = '12345678-1234-1234-12345678-12345678abcdef'
+
+ name_list = ['get_dev', 'get_image_mb', 'discovery', 'login_iscsi',
+ 'logout_iscsi', 'delete_iscsi', 'make_partitions',
+ 'is_block_device', 'populate_image', 'block_uuid',
+ 'notify', 'destroy_disk_metadata', 'dd',
+ '_get_configdrive']
+ parent_mock = self._mock_calls(name_list)
+ parent_mock.get_dev.return_value = dev
+ parent_mock.get_image_mb.return_value = 1
+ parent_mock.is_block_device.return_value = True
+ parent_mock.block_uuid.return_value = root_uuid
+ parent_mock.make_partitions.return_value = {'root': root_part,
+ 'configdrive':
+ configdrive_part}
+ parent_mock._get_configdrive.return_value = (10, 'configdrive-path')
+ calls_expected = [mock.call.get_image_mb(image_path),
+ mock.call.get_dev(address, port, iqn, lun),
+ mock.call.discovery(address, port),
+ mock.call.login_iscsi(address, port, iqn),
+ mock.call.is_block_device(dev),
+ mock.call.destroy_disk_metadata(dev, node_uuid),
+ mock.call._get_configdrive(configdrive_url,
+ node_uuid),
+ mock.call.make_partitions(dev, root_mb, swap_mb,
+ ephemeral_mb,
+ configdrive_mb,
+ node_uuid,
+ commit=True,
+ boot_option="netboot",
+ boot_mode="bios"),
+ mock.call.is_block_device(root_part),
+ mock.call.is_block_device(configdrive_part),
+ mock.call.dd(mock.ANY, configdrive_part),
+ mock.call.populate_image(image_path, root_part),
+ mock.call.block_uuid(root_part),
+ mock.call.logout_iscsi(address, port, iqn),
+ mock.call.delete_iscsi(address, port, iqn)]
+
+ uuid_dict_returned = utils.deploy_partition_image(
+ address, port, iqn, lun, image_path, root_mb, swap_mb,
+ ephemeral_mb, ephemeral_format, node_uuid,
+ configdrive=configdrive_url)
+
+ self.assertEqual(calls_expected, parent_mock.mock_calls)
+ self.assertEqual(root_uuid, uuid_dict_returned['root uuid'])
+ mock_unlink.assert_called_once_with('configdrive-path')
+
+ @mock.patch.object(utils, 'get_disk_identifier', autospec=True)
+ def test_deploy_whole_disk_image(self, mock_gdi):
+ """Check loosely all functions are called with right args."""
+ address = '127.0.0.1'
+ port = 3306
+ iqn = 'iqn.xyz'
+ lun = 1
+ image_path = '/tmp/xyz/image'
+ node_uuid = "12345678-1234-1234-1234-1234567890abcxyz"
+
+ dev = '/dev/fake'
+ name_list = ['get_dev', 'discovery', 'login_iscsi', 'logout_iscsi',
+ 'delete_iscsi', 'is_block_device', 'populate_image',
+ 'notify']
+ parent_mock = self._mock_calls(name_list)
+ parent_mock.get_dev.return_value = dev
+ parent_mock.is_block_device.return_value = True
+ mock_gdi.return_value = '0x12345678'
+ calls_expected = [mock.call.get_dev(address, port, iqn, lun),
+ mock.call.discovery(address, port),
+ mock.call.login_iscsi(address, port, iqn),
+ mock.call.is_block_device(dev),
+ mock.call.populate_image(image_path, dev),
+ mock.call.logout_iscsi(address, port, iqn),
+ mock.call.delete_iscsi(address, port, iqn)]
+
+ uuid_dict_returned = utils.deploy_disk_image(address, port, iqn, lun,
+ image_path, node_uuid)
+
+ self.assertEqual(calls_expected, parent_mock.mock_calls)
+ self.assertEqual('0x12345678', uuid_dict_returned['disk identifier'])
+
+ @mock.patch.object(common_utils, 'execute', autospec=True)
+ def test_verify_iscsi_connection_raises(self, mock_exec):
+ iqn = 'iqn.xyz'
+ mock_exec.return_value = ['iqn.abc', '']
+ self.assertRaises(exception.InstanceDeployFailure,
+ utils.verify_iscsi_connection, iqn)
+ self.assertEqual(3, mock_exec.call_count)
+
+ @mock.patch.object(os.path, 'exists', autospec=True)
+ def test_check_file_system_for_iscsi_device_raises(self, mock_os):
+ iqn = 'iqn.xyz'
+ ip = "127.0.0.1"
+ port = "22"
+ mock_os.return_value = False
+ self.assertRaises(exception.InstanceDeployFailure,
+ utils.check_file_system_for_iscsi_device,
+ ip, port, iqn)
+ self.assertEqual(3, mock_os.call_count)
+
+ @mock.patch.object(os.path, 'exists', autospec=True)
+ def test_check_file_system_for_iscsi_device(self, mock_os):
+ iqn = 'iqn.xyz'
+ ip = "127.0.0.1"
+ port = "22"
+ check_dir = "/dev/disk/by-path/ip-%s:%s-iscsi-%s-lun-1" % (ip,
+ port,
+ iqn)
+
+ mock_os.return_value = True
+ utils.check_file_system_for_iscsi_device(ip, port, iqn)
+ mock_os.assert_called_once_with(check_dir)
+
+ @mock.patch.object(common_utils, 'execute', autospec=True)
+ def test_verify_iscsi_connection(self, mock_exec):
+ iqn = 'iqn.xyz'
+ mock_exec.return_value = ['iqn.xyz', '']
+ utils.verify_iscsi_connection(iqn)
+ mock_exec.assert_called_once_with(
+ 'iscsiadm',
+ '-m', 'node',
+ '-S',
+ run_as_root=True,
+ check_exit_code=[0])
+
+ @mock.patch.object(common_utils, 'execute', autospec=True)
+ def test_force_iscsi_lun_update(self, mock_exec):
+ iqn = 'iqn.xyz'
+ utils.force_iscsi_lun_update(iqn)
+ mock_exec.assert_called_once_with(
+ 'iscsiadm',
+ '-m', 'node',
+ '-T', iqn,
+ '-R',
+ run_as_root=True,
+ check_exit_code=[0])
+
+ @mock.patch.object(common_utils, 'execute', autospec=True)
+ @mock.patch.object(utils, 'verify_iscsi_connection', autospec=True)
+ @mock.patch.object(utils, 'force_iscsi_lun_update', autospec=True)
+ @mock.patch.object(utils, 'check_file_system_for_iscsi_device',
+ autospec=True)
+ def test_login_iscsi_calls_verify_and_update(self,
+ mock_check_dev,
+ mock_update,
+ mock_verify,
+ mock_exec):
+ address = '127.0.0.1'
+ port = 3306
+ iqn = 'iqn.xyz'
+ mock_exec.return_value = ['iqn.xyz', '']
+ utils.login_iscsi(address, port, iqn)
+ mock_exec.assert_called_once_with(
+ 'iscsiadm',
+ '-m', 'node',
+ '-p', '%s:%s' % (address, port),
+ '-T', iqn,
+ '--login',
+ run_as_root=True,
+ check_exit_code=[0],
+ attempts=5,
+ delay_on_retry=True)
+
+ mock_verify.assert_called_once_with(iqn)
+
+ mock_update.assert_called_once_with(iqn)
+
+ mock_check_dev.assert_called_once_with(address, port, iqn)
+
+ @mock.patch.object(utils, 'is_block_device', lambda d: True)
+ def test_always_logout_and_delete_iscsi(self):
+ """Check if logout_iscsi() and delete_iscsi() are called.
+
+ Make sure that logout_iscsi() and delete_iscsi() are called once
+ login_iscsi() is invoked.
+
+ """
+ address = '127.0.0.1'
+ port = 3306
+ iqn = 'iqn.xyz'
+ lun = 1
+ image_path = '/tmp/xyz/image'
+ root_mb = 128
+ swap_mb = 64
+ ephemeral_mb = 256
+ ephemeral_format = 'exttest'
+ node_uuid = "12345678-1234-1234-1234-1234567890abcxyz"
+
+ dev = '/dev/fake'
+
+ class TestException(Exception):
+ pass
+
+ name_list = ['get_dev', 'get_image_mb', 'discovery', 'login_iscsi',
+ 'logout_iscsi', 'delete_iscsi', 'work_on_disk']
+ patch_list = [mock.patch.object(utils, name,
+ spec_set=types.FunctionType)
+ for name in name_list]
+ mock_list = [patcher.start() for patcher in patch_list]
+ for patcher in patch_list:
+ self.addCleanup(patcher.stop)
+
+ parent_mock = mock.MagicMock(spec=[])
+ for mocker, name in zip(mock_list, name_list):
+ parent_mock.attach_mock(mocker, name)
+
+ parent_mock.get_dev.return_value = dev
+ parent_mock.get_image_mb.return_value = 1
+ parent_mock.work_on_disk.side_effect = TestException
+ calls_expected = [mock.call.get_image_mb(image_path),
+ mock.call.get_dev(address, port, iqn, lun),
+ mock.call.discovery(address, port),
+ mock.call.login_iscsi(address, port, iqn),
+ mock.call.work_on_disk(dev, root_mb, swap_mb,
+ ephemeral_mb,
+ ephemeral_format, image_path,
+ node_uuid, configdrive=None,
+ preserve_ephemeral=False,
+ boot_option="netboot",
+ boot_mode="bios"),
+ mock.call.logout_iscsi(address, port, iqn),
+ mock.call.delete_iscsi(address, port, iqn)]
+
+ self.assertRaises(TestException, utils.deploy_partition_image,
+ address, port, iqn, lun, image_path,
+ root_mb, swap_mb, ephemeral_mb, ephemeral_format,
+ node_uuid)
+
+ self.assertEqual(calls_expected, parent_mock.mock_calls)
+
+
+class SwitchPxeConfigTestCase(tests_base.TestCase):
+
+ def _create_config(self, ipxe=False, boot_mode=None, boot_loader='elilo'):
+ (fd, fname) = tempfile.mkstemp()
+ if boot_mode == 'uefi':
+ if boot_loader == 'grub':
+ pxe_cfg = _UEFI_PXECONF_DEPLOY_GRUB
+ else:
+ pxe_cfg = _UEFI_PXECONF_DEPLOY
+ else:
+ pxe_cfg = _IPXECONF_DEPLOY if ipxe else _PXECONF_DEPLOY
+ os.write(fd, pxe_cfg)
+ os.close(fd)
+ self.addCleanup(os.unlink, fname)
+ return fname
+
+ def test_switch_pxe_config_partition_image(self):
+ boot_mode = 'bios'
+ fname = self._create_config()
+ utils.switch_pxe_config(fname,
+ '12345678-1234-1234-1234-1234567890abcdef',
+ boot_mode,
+ False)
+ with open(fname, 'r') as f:
+ pxeconf = f.read()
+ self.assertEqual(_PXECONF_BOOT_PARTITION, pxeconf)
+
+ def test_switch_pxe_config_whole_disk_image(self):
+ boot_mode = 'bios'
+ fname = self._create_config()
+ utils.switch_pxe_config(fname,
+ '0x12345678',
+ boot_mode,
+ True)
+ with open(fname, 'r') as f:
+ pxeconf = f.read()
+ self.assertEqual(_PXECONF_BOOT_WHOLE_DISK, pxeconf)
+
+ def test_switch_pxe_config_trusted_boot(self):
+ boot_mode = 'bios'
+ fname = self._create_config()
+ utils.switch_pxe_config(fname,
+ '12345678-1234-1234-1234-1234567890abcdef',
+ boot_mode,
+ False, True)
+ with open(fname, 'r') as f:
+ pxeconf = f.read()
+ self.assertEqual(_PXECONF_TRUSTED_BOOT, pxeconf)
+
+ def test_switch_ipxe_config_partition_image(self):
+ boot_mode = 'bios'
+ cfg.CONF.set_override('ipxe_enabled', True, 'pxe')
+ fname = self._create_config(ipxe=True)
+ utils.switch_pxe_config(fname,
+ '12345678-1234-1234-1234-1234567890abcdef',
+ boot_mode,
+ False)
+ with open(fname, 'r') as f:
+ pxeconf = f.read()
+ self.assertEqual(_IPXECONF_BOOT_PARTITION, pxeconf)
+
+ def test_switch_ipxe_config_whole_disk_image(self):
+ boot_mode = 'bios'
+ cfg.CONF.set_override('ipxe_enabled', True, 'pxe')
+ fname = self._create_config(ipxe=True)
+ utils.switch_pxe_config(fname,
+ '0x12345678',
+ boot_mode,
+ True)
+ with open(fname, 'r') as f:
+ pxeconf = f.read()
+ self.assertEqual(_IPXECONF_BOOT_WHOLE_DISK, pxeconf)
+
+ def test_switch_uefi_elilo_pxe_config_partition_image(self):
+ boot_mode = 'uefi'
+ fname = self._create_config(boot_mode=boot_mode)
+ utils.switch_pxe_config(fname,
+ '12345678-1234-1234-1234-1234567890abcdef',
+ boot_mode,
+ False)
+ with open(fname, 'r') as f:
+ pxeconf = f.read()
+ self.assertEqual(_UEFI_PXECONF_BOOT_PARTITION, pxeconf)
+
+ def test_switch_uefi_elilo_config_whole_disk_image(self):
+ boot_mode = 'uefi'
+ fname = self._create_config(boot_mode=boot_mode)
+ utils.switch_pxe_config(fname,
+ '0x12345678',
+ boot_mode,
+ True)
+ with open(fname, 'r') as f:
+ pxeconf = f.read()
+ self.assertEqual(_UEFI_PXECONF_BOOT_WHOLE_DISK, pxeconf)
+
+ def test_switch_uefi_grub_pxe_config_partition_image(self):
+ boot_mode = 'uefi'
+ fname = self._create_config(boot_mode=boot_mode, boot_loader='grub')
+ utils.switch_pxe_config(fname,
+ '12345678-1234-1234-1234-1234567890abcdef',
+ boot_mode,
+ False)
+ with open(fname, 'r') as f:
+ pxeconf = f.read()
+ self.assertEqual(_UEFI_PXECONF_BOOT_PARTITION_GRUB, pxeconf)
+
+ def test_switch_uefi_grub_config_whole_disk_image(self):
+ boot_mode = 'uefi'
+ fname = self._create_config(boot_mode=boot_mode, boot_loader='grub')
+ utils.switch_pxe_config(fname,
+ '0x12345678',
+ boot_mode,
+ True)
+ with open(fname, 'r') as f:
+ pxeconf = f.read()
+ self.assertEqual(_UEFI_PXECONF_BOOT_WHOLE_DISK_GRUB, pxeconf)
+
+
+@mock.patch('time.sleep', lambda sec: None)
+class OtherFunctionTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(OtherFunctionTestCase, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver="fake_pxe")
+ self.node = obj_utils.create_test_node(self.context, driver='fake_pxe')
+
+ def test_get_dev(self):
+ expected = '/dev/disk/by-path/ip-1.2.3.4:5678-iscsi-iqn.fake-lun-9'
+ actual = utils.get_dev('1.2.3.4', 5678, 'iqn.fake', 9)
+ self.assertEqual(expected, actual)
+
+ @mock.patch.object(os, 'stat', autospec=True)
+ @mock.patch.object(stat, 'S_ISBLK', autospec=True)
+ def test_is_block_device_works(self, mock_is_blk, mock_os):
+ device = '/dev/disk/by-path/ip-1.2.3.4:5678-iscsi-iqn.fake-lun-9'
+ mock_is_blk.return_value = True
+ mock_os().st_mode = 10000
+ self.assertTrue(utils.is_block_device(device))
+ mock_is_blk.assert_called_once_with(mock_os().st_mode)
+
+ @mock.patch.object(os, 'stat', autospec=True)
+ def test_is_block_device_raises(self, mock_os):
+ device = '/dev/disk/by-path/ip-1.2.3.4:5678-iscsi-iqn.fake-lun-9'
+ mock_os.side_effect = OSError
+ self.assertRaises(exception.InstanceDeployFailure,
+ utils.is_block_device, device)
+ mock_os.assert_has_calls([mock.call(device)] * 3)
+
+ @mock.patch.object(os.path, 'getsize', autospec=True)
+ @mock.patch.object(images, 'converted_size', autospec=True)
+ def test_get_image_mb(self, mock_csize, mock_getsize):
+ mb = 1024 * 1024
+
+ mock_getsize.return_value = 0
+ mock_csize.return_value = 0
+ self.assertEqual(0, utils.get_image_mb('x', False))
+ self.assertEqual(0, utils.get_image_mb('x', True))
+ mock_getsize.return_value = 1
+ mock_csize.return_value = 1
+ self.assertEqual(1, utils.get_image_mb('x', False))
+ self.assertEqual(1, utils.get_image_mb('x', True))
+ mock_getsize.return_value = mb
+ mock_csize.return_value = mb
+ self.assertEqual(1, utils.get_image_mb('x', False))
+ self.assertEqual(1, utils.get_image_mb('x', True))
+ mock_getsize.return_value = mb + 1
+ mock_csize.return_value = mb + 1
+ self.assertEqual(2, utils.get_image_mb('x', False))
+ self.assertEqual(2, utils.get_image_mb('x', True))
+
+ def test_parse_root_device_hints(self):
+ self.node.properties['root_device'] = {'wwn': 123456}
+ expected = 'wwn=123456'
+ result = utils.parse_root_device_hints(self.node)
+ self.assertEqual(expected, result)
+
+ def test_parse_root_device_hints_string_space(self):
+ self.node.properties['root_device'] = {'model': 'fake model'}
+ expected = 'model=fake%20model'
+ result = utils.parse_root_device_hints(self.node)
+ self.assertEqual(expected, result)
+
+ def test_parse_root_device_hints_no_hints(self):
+ self.node.properties = {}
+ result = utils.parse_root_device_hints(self.node)
+ self.assertIsNone(result)
+
+ def test_parse_root_device_hints_invalid_hints(self):
+ self.node.properties['root_device'] = {'vehicle': 'Owlship'}
+ self.assertRaises(exception.InvalidParameterValue,
+ utils.parse_root_device_hints, self.node)
+
+ def test_parse_root_device_hints_invalid_size(self):
+ self.node.properties['root_device'] = {'size': 'not-int'}
+ self.assertRaises(exception.InvalidParameterValue,
+ utils.parse_root_device_hints, self.node)
+
+ @mock.patch.object(utils, 'LOG', autospec=True)
+ @mock.patch.object(manager_utils, 'node_power_action', autospec=True)
+ @mock.patch.object(task_manager.TaskManager, 'process_event',
+ autospec=True)
+ def _test_set_failed_state(self, mock_event, mock_power, mock_log,
+ event_value=None, power_value=None,
+ log_calls=None):
+ err_msg = 'some failure'
+ mock_event.side_effect = event_value
+ mock_power.side_effect = power_value
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ utils.set_failed_state(task, err_msg)
+ mock_event.assert_called_once_with(task, 'fail')
+ mock_power.assert_called_once_with(task, states.POWER_OFF)
+ self.assertEqual(err_msg, task.node.last_error)
+ if log_calls:
+ mock_log.exception.assert_has_calls(log_calls)
+ else:
+ self.assertFalse(mock_log.called)
+
+ def test_set_failed_state(self):
+ exc_state = exception.InvalidState('invalid state')
+ exc_param = exception.InvalidParameterValue('invalid parameter')
+ mock_call = mock.call(mock.ANY)
+ self._test_set_failed_state()
+ calls = [mock_call]
+ self._test_set_failed_state(event_value=iter([exc_state] * len(calls)),
+ log_calls=calls)
+ calls = [mock_call]
+ self._test_set_failed_state(power_value=iter([exc_param] * len(calls)),
+ log_calls=calls)
+ calls = [mock_call, mock_call]
+ self._test_set_failed_state(event_value=iter([exc_state] * len(calls)),
+ power_value=iter([exc_param] * len(calls)),
+ log_calls=calls)
+
+ def test_get_boot_option(self):
+ self.node.instance_info = {'capabilities': '{"boot_option": "local"}'}
+ result = utils.get_boot_option(self.node)
+ self.assertEqual("local", result)
+
+ def test_get_boot_option_default_value(self):
+ self.node.instance_info = {}
+ result = utils.get_boot_option(self.node)
+ self.assertEqual("netboot", result)
+
+
+@mock.patch.object(disk_partitioner.DiskPartitioner, 'commit', lambda _: None)
+class WorkOnDiskTestCase(tests_base.TestCase):
+
+ def setUp(self):
+ super(WorkOnDiskTestCase, self).setUp()
+ self.image_path = '/tmp/xyz/image'
+ self.root_mb = 128
+ self.swap_mb = 64
+ self.ephemeral_mb = 0
+ self.ephemeral_format = None
+ self.configdrive_mb = 0
+ self.dev = '/dev/fake'
+ self.swap_part = '/dev/fake-part1'
+ self.root_part = '/dev/fake-part2'
+
+ self.mock_ibd_obj = mock.patch.object(
+ utils, 'is_block_device', autospec=True)
+ self.mock_ibd = self.mock_ibd_obj.start()
+ self.addCleanup(self.mock_ibd_obj.stop)
+ self.mock_mp_obj = mock.patch.object(
+ utils, 'make_partitions', autospec=True)
+ self.mock_mp = self.mock_mp_obj.start()
+ self.addCleanup(self.mock_mp_obj.stop)
+ self.mock_remlbl_obj = mock.patch.object(
+ utils, 'destroy_disk_metadata', autospec=True)
+ self.mock_remlbl = self.mock_remlbl_obj.start()
+ self.addCleanup(self.mock_remlbl_obj.stop)
+ self.mock_mp.return_value = {'swap': self.swap_part,
+ 'root': self.root_part}
+
+ def test_no_root_partition(self):
+ self.mock_ibd.return_value = False
+ self.assertRaises(exception.InstanceDeployFailure,
+ utils.work_on_disk, self.dev, self.root_mb,
+ self.swap_mb, self.ephemeral_mb,
+ self.ephemeral_format, self.image_path, 'fake-uuid')
+ self.mock_ibd.assert_called_once_with(self.root_part)
+ self.mock_mp.assert_called_once_with(self.dev, self.root_mb,
+ self.swap_mb, self.ephemeral_mb,
+ self.configdrive_mb,
+ 'fake-uuid',
+ commit=True,
+ boot_option="netboot",
+ boot_mode="bios")
+
+ def test_no_swap_partition(self):
+ self.mock_ibd.side_effect = iter([True, False])
+ calls = [mock.call(self.root_part),
+ mock.call(self.swap_part)]
+ self.assertRaises(exception.InstanceDeployFailure,
+ utils.work_on_disk, self.dev, self.root_mb,
+ self.swap_mb, self.ephemeral_mb,
+ self.ephemeral_format, self.image_path, 'fake-uuid')
+ self.assertEqual(self.mock_ibd.call_args_list, calls)
+ self.mock_mp.assert_called_once_with(self.dev, self.root_mb,
+ self.swap_mb, self.ephemeral_mb,
+ self.configdrive_mb,
+ 'fake-uuid',
+ commit=True,
+ boot_option="netboot",
+ boot_mode="bios")
+
+ def test_no_ephemeral_partition(self):
+ ephemeral_part = '/dev/fake-part1'
+ swap_part = '/dev/fake-part2'
+ root_part = '/dev/fake-part3'
+ ephemeral_mb = 256
+ ephemeral_format = 'exttest'
+
+ self.mock_mp.return_value = {'ephemeral': ephemeral_part,
+ 'swap': swap_part,
+ 'root': root_part}
+ self.mock_ibd.side_effect = iter([True, True, False])
+ calls = [mock.call(root_part),
+ mock.call(swap_part),
+ mock.call(ephemeral_part)]
+ self.assertRaises(exception.InstanceDeployFailure,
+ utils.work_on_disk, self.dev, self.root_mb,
+ self.swap_mb, ephemeral_mb, ephemeral_format,
+ self.image_path, 'fake-uuid')
+ self.assertEqual(self.mock_ibd.call_args_list, calls)
+ self.mock_mp.assert_called_once_with(self.dev, self.root_mb,
+ self.swap_mb, ephemeral_mb,
+ self.configdrive_mb,
+ 'fake-uuid',
+ commit=True,
+ boot_option="netboot",
+ boot_mode="bios")
+
+ @mock.patch.object(common_utils, 'unlink_without_raise', autospec=True)
+ @mock.patch.object(utils, '_get_configdrive', autospec=True)
+ def test_no_configdrive_partition(self, mock_configdrive, mock_unlink):
+ mock_configdrive.return_value = (10, 'fake-path')
+ swap_part = '/dev/fake-part1'
+ configdrive_part = '/dev/fake-part2'
+ root_part = '/dev/fake-part3'
+ configdrive_url = 'http://1.2.3.4/cd'
+ configdrive_mb = 10
+
+ self.mock_mp.return_value = {'swap': swap_part,
+ 'configdrive': configdrive_part,
+ 'root': root_part}
+ self.mock_ibd.side_effect = iter([True, True, False])
+ calls = [mock.call(root_part),
+ mock.call(swap_part),
+ mock.call(configdrive_part)]
+ self.assertRaises(exception.InstanceDeployFailure,
+ utils.work_on_disk, self.dev, self.root_mb,
+ self.swap_mb, self.ephemeral_mb,
+ self.ephemeral_format, self.image_path, 'fake-uuid',
+ preserve_ephemeral=False,
+ configdrive=configdrive_url,
+ boot_option="netboot")
+ self.assertEqual(self.mock_ibd.call_args_list, calls)
+ self.mock_mp.assert_called_once_with(self.dev, self.root_mb,
+ self.swap_mb, self.ephemeral_mb,
+ configdrive_mb,
+ 'fake-uuid',
+ commit=True,
+ boot_option="netboot",
+ boot_mode="bios")
+ mock_unlink.assert_called_once_with('fake-path')
+
+
+@mock.patch.object(common_utils, 'execute', autospec=True)
+class MakePartitionsTestCase(tests_base.TestCase):
+
+ def setUp(self):
+ super(MakePartitionsTestCase, self).setUp()
+ self.dev = 'fake-dev'
+ self.root_mb = 1024
+ self.swap_mb = 512
+ self.ephemeral_mb = 0
+ self.configdrive_mb = 0
+ self.parted_static_cmd = ['parted', '-a', 'optimal', '-s', self.dev,
+ '--', 'unit', 'MiB', 'mklabel', 'msdos']
+
+ def _test_make_partitions(self, mock_exc, boot_option):
+ mock_exc.return_value = (None, None)
+ utils.make_partitions(self.dev, self.root_mb, self.swap_mb,
+ self.ephemeral_mb, self.configdrive_mb,
+ '12345678-1234-1234-1234-1234567890abcxyz',
+ boot_option=boot_option)
+
+ expected_mkpart = ['mkpart', 'primary', 'linux-swap', '1', '513',
+ 'mkpart', 'primary', '', '513', '1537']
+ if boot_option == "local":
+ expected_mkpart.extend(['set', '2', 'boot', 'on'])
+ parted_cmd = self.parted_static_cmd + expected_mkpart
+ parted_call = mock.call(*parted_cmd, run_as_root=True,
+ check_exit_code=[0])
+ fuser_cmd = ['fuser', 'fake-dev']
+ fuser_call = mock.call(*fuser_cmd, run_as_root=True,
+ check_exit_code=[0, 1])
+ mock_exc.assert_has_calls([parted_call, fuser_call])
+
+ def test_make_partitions(self, mock_exc):
+ self._test_make_partitions(mock_exc, boot_option="netboot")
+
+ def test_make_partitions_local_boot(self, mock_exc):
+ self._test_make_partitions(mock_exc, boot_option="local")
+
+ def test_make_partitions_with_ephemeral(self, mock_exc):
+ self.ephemeral_mb = 2048
+ expected_mkpart = ['mkpart', 'primary', '', '1', '2049',
+ 'mkpart', 'primary', 'linux-swap', '2049', '2561',
+ 'mkpart', 'primary', '', '2561', '3585']
+ cmd = self.parted_static_cmd + expected_mkpart
+ mock_exc.return_value = (None, None)
+ utils.make_partitions(self.dev, self.root_mb, self.swap_mb,
+ self.ephemeral_mb, self.configdrive_mb,
+ '12345678-1234-1234-1234-1234567890abcxyz')
+
+ parted_call = mock.call(*cmd, run_as_root=True, check_exit_code=[0])
+ mock_exc.assert_has_calls([parted_call])
+
+
+@mock.patch.object(utils, 'get_dev_block_size', autospec=True)
+@mock.patch.object(common_utils, 'execute', autospec=True)
+class DestroyMetaDataTestCase(tests_base.TestCase):
+
+ def setUp(self):
+ super(DestroyMetaDataTestCase, self).setUp()
+ self.dev = 'fake-dev'
+ self.node_uuid = "12345678-1234-1234-1234-1234567890abcxyz"
+
+ def test_destroy_disk_metadata(self, mock_exec, mock_gz):
+ mock_gz.return_value = 64
+ expected_calls = [mock.call('dd', 'if=/dev/zero', 'of=fake-dev',
+ 'bs=512', 'count=36', run_as_root=True,
+ check_exit_code=[0]),
+ mock.call('dd', 'if=/dev/zero', 'of=fake-dev',
+ 'bs=512', 'count=36', 'seek=28',
+ run_as_root=True,
+ check_exit_code=[0])]
+ utils.destroy_disk_metadata(self.dev, self.node_uuid)
+ mock_exec.assert_has_calls(expected_calls)
+ self.assertTrue(mock_gz.called)
+
+ def test_destroy_disk_metadata_get_dev_size_fail(self, mock_exec, mock_gz):
+ mock_gz.side_effect = processutils.ProcessExecutionError
+
+ expected_call = [mock.call('dd', 'if=/dev/zero', 'of=fake-dev',
+ 'bs=512', 'count=36', run_as_root=True,
+ check_exit_code=[0])]
+ self.assertRaises(processutils.ProcessExecutionError,
+ utils.destroy_disk_metadata,
+ self.dev,
+ self.node_uuid)
+ mock_exec.assert_has_calls(expected_call)
+
+ def test_destroy_disk_metadata_dd_fail(self, mock_exec, mock_gz):
+ mock_exec.side_effect = processutils.ProcessExecutionError
+
+ expected_call = [mock.call('dd', 'if=/dev/zero', 'of=fake-dev',
+ 'bs=512', 'count=36', run_as_root=True,
+ check_exit_code=[0])]
+ self.assertRaises(processutils.ProcessExecutionError,
+ utils.destroy_disk_metadata,
+ self.dev,
+ self.node_uuid)
+ mock_exec.assert_has_calls(expected_call)
+ self.assertFalse(mock_gz.called)
+
+
+@mock.patch.object(common_utils, 'execute', autospec=True)
+class GetDeviceBlockSizeTestCase(tests_base.TestCase):
+
+ def setUp(self):
+ super(GetDeviceBlockSizeTestCase, self).setUp()
+ self.dev = 'fake-dev'
+ self.node_uuid = "12345678-1234-1234-1234-1234567890abcxyz"
+
+ def test_get_dev_block_size(self, mock_exec):
+ mock_exec.return_value = ("64", "")
+ expected_call = [mock.call('blockdev', '--getsz', self.dev,
+ run_as_root=True, check_exit_code=[0])]
+ utils.get_dev_block_size(self.dev)
+ mock_exec.assert_has_calls(expected_call)
+
+
+@mock.patch.object(utils, 'dd', autospec=True)
+@mock.patch.object(images, 'qemu_img_info', autospec=True)
+@mock.patch.object(images, 'convert_image', autospec=True)
+class PopulateImageTestCase(tests_base.TestCase):
+
+ def setUp(self):
+ super(PopulateImageTestCase, self).setUp()
+
+ def test_populate_raw_image(self, mock_cg, mock_qinfo, mock_dd):
+ type(mock_qinfo.return_value).file_format = mock.PropertyMock(
+ return_value='raw')
+ utils.populate_image('src', 'dst')
+ mock_dd.assert_called_once_with('src', 'dst')
+ self.assertFalse(mock_cg.called)
+
+ def test_populate_qcow2_image(self, mock_cg, mock_qinfo, mock_dd):
+ type(mock_qinfo.return_value).file_format = mock.PropertyMock(
+ return_value='qcow2')
+ utils.populate_image('src', 'dst')
+ mock_cg.assert_called_once_with('src', 'dst', 'raw', True)
+ self.assertFalse(mock_dd.called)
+
+
+@mock.patch.object(utils, 'is_block_device', lambda d: True)
+@mock.patch.object(utils, 'block_uuid', lambda p: 'uuid')
+@mock.patch.object(utils, 'dd', lambda *_: None)
+@mock.patch.object(images, 'convert_image', lambda *_: None)
+@mock.patch.object(common_utils, 'mkfs', lambda *_: None)
+# NOTE(dtantsur): destroy_disk_metadata resets file size, disabling it
+@mock.patch.object(utils, 'destroy_disk_metadata', lambda *_: None)
+class RealFilePartitioningTestCase(tests_base.TestCase):
+ """This test applies some real-world partitioning scenario to a file.
+
+ This test covers the whole partitioning, mocking everything not possible
+ on a file. That helps us assure, that we do all partitioning math properly
+ and also conducts integration testing of DiskPartitioner.
+ """
+
+ def setUp(self):
+ super(RealFilePartitioningTestCase, self).setUp()
+ # NOTE(dtantsur): no parted utility on gate-ironic-python26
+ try:
+ common_utils.execute('parted', '--version')
+ except OSError as exc:
+ self.skipTest('parted utility was not found: %s' % exc)
+ self.file = tempfile.NamedTemporaryFile(delete=False)
+ # NOTE(ifarkas): the file needs to be closed, so fuser won't report
+ # any usage
+ self.file.close()
+ # NOTE(dtantsur): 20 MiB file with zeros
+ common_utils.execute('dd', 'if=/dev/zero', 'of=%s' % self.file.name,
+ 'bs=1', 'count=0', 'seek=20MiB')
+
+ @staticmethod
+ def _run_without_root(func, *args, **kwargs):
+ """Make sure root is not required when using utils.execute."""
+ real_execute = common_utils.execute
+
+ def fake_execute(*cmd, **kwargs):
+ kwargs['run_as_root'] = False
+ return real_execute(*cmd, **kwargs)
+
+ with mock.patch.object(common_utils, 'execute', fake_execute):
+ return func(*args, **kwargs)
+
+ def test_different_sizes(self):
+ # NOTE(dtantsur): Keep this list in order with expected partitioning
+ fields = ['ephemeral_mb', 'swap_mb', 'root_mb']
+ variants = ((0, 0, 12), (4, 2, 8), (0, 4, 10), (5, 0, 10))
+ for variant in variants:
+ kwargs = dict(zip(fields, variant))
+ self._run_without_root(utils.work_on_disk, self.file.name,
+ ephemeral_format='ext4', node_uuid='',
+ image_path='path', **kwargs)
+ part_table = self._run_without_root(
+ disk_partitioner.list_partitions, self.file.name)
+ for part, expected_size in zip(part_table, filter(None, variant)):
+ self.assertEqual(expected_size, part['size'],
+ "comparison failed for %s" % list(variant))
+
+ def test_whole_disk(self):
+ # 6 MiB ephemeral + 3 MiB swap + 9 MiB root + 1 MiB for MBR
+ # + 1 MiB MAGIC == 20 MiB whole disk
+ # TODO(dtantsur): figure out why we need 'magic' 1 more MiB
+ # and why the is different on Ubuntu and Fedora (see below)
+ self._run_without_root(utils.work_on_disk, self.file.name,
+ root_mb=9, ephemeral_mb=6, swap_mb=3,
+ ephemeral_format='ext4', node_uuid='',
+ image_path='path')
+ part_table = self._run_without_root(
+ disk_partitioner.list_partitions, self.file.name)
+ sizes = [part['size'] for part in part_table]
+ # NOTE(dtantsur): parted in Ubuntu 12.04 will occupy the last MiB,
+ # parted in Fedora 20 won't - thus two possible variants for last part
+ self.assertEqual([6, 3], sizes[:2],
+ "unexpected partitioning %s" % part_table)
+ self.assertIn(sizes[2], (9, 10))
+
+ @mock.patch.object(image_cache, 'clean_up_caches', autospec=True)
+ def test_fetch_images(self, mock_clean_up_caches):
+
+ mock_cache = mock.MagicMock(
+ spec_set=['fetch_image', 'master_dir'], master_dir='master_dir')
+ utils.fetch_images(None, mock_cache, [('uuid', 'path')])
+ mock_clean_up_caches.assert_called_once_with(None, 'master_dir',
+ [('uuid', 'path')])
+ mock_cache.fetch_image.assert_called_once_with('uuid', 'path',
+ ctx=None,
+ force_raw=True)
+
+ @mock.patch.object(image_cache, 'clean_up_caches', autospec=True)
+ def test_fetch_images_fail(self, mock_clean_up_caches):
+
+ exc = exception.InsufficientDiskSpace(path='a',
+ required=2,
+ actual=1)
+
+ mock_cache = mock.MagicMock(
+ spec_set=['master_dir'], master_dir='master_dir')
+ mock_clean_up_caches.side_effect = iter([exc])
+ self.assertRaises(exception.InstanceDeployFailure,
+ utils.fetch_images,
+ None,
+ mock_cache,
+ [('uuid', 'path')])
+ mock_clean_up_caches.assert_called_once_with(None, 'master_dir',
+ [('uuid', 'path')])
+
+
+@mock.patch.object(tempfile, 'NamedTemporaryFile', autospec=True)
+@mock.patch.object(shutil, 'copyfileobj', autospec=True)
+@mock.patch.object(requests, 'get', autospec=True)
+class GetConfigdriveTestCase(tests_base.TestCase):
+
+ def setUp(self):
+ super(GetConfigdriveTestCase, self).setUp()
+ # NOTE(lucasagomes): "name" can't be passed to Mock() when
+ # instantiating the object because it's an expected parameter.
+ # https://docs.python.org/3/library/unittest.mock.html
+ self.fake_configdrive_file = mock.Mock(tell=lambda *_: 123)
+ self.fake_configdrive_file.name = '/tmp/foo'
+
+ @mock.patch.object(gzip, 'GzipFile', autospec=True)
+ def test_get_configdrive(self, mock_gzip, mock_requests, mock_copy,
+ mock_file):
+ mock_file.return_value = self.fake_configdrive_file
+ mock_requests.return_value = mock.MagicMock(
+ spec_set=['content'], content='Zm9vYmFy')
+ utils._get_configdrive('http://1.2.3.4/cd', 'fake-node-uuid')
+ mock_requests.assert_called_once_with('http://1.2.3.4/cd')
+ mock_gzip.assert_called_once_with('configdrive', 'rb',
+ fileobj=mock.ANY)
+ mock_copy.assert_called_once_with(mock.ANY, mock.ANY)
+ mock_file.assert_called_once_with(prefix='configdrive',
+ dir=cfg.CONF.tempdir, delete=False)
+
+ @mock.patch.object(gzip, 'GzipFile', autospec=True)
+ def test_get_configdrive_base64_string(self, mock_gzip, mock_requests,
+ mock_copy, mock_file):
+ mock_file.return_value = self.fake_configdrive_file
+ utils._get_configdrive('Zm9vYmFy', 'fake-node-uuid')
+ self.assertFalse(mock_requests.called)
+ mock_gzip.assert_called_once_with('configdrive', 'rb',
+ fileobj=mock.ANY)
+ mock_copy.assert_called_once_with(mock.ANY, mock.ANY)
+ mock_file.assert_called_once_with(prefix='configdrive',
+ dir=cfg.CONF.tempdir, delete=False)
+
+ def test_get_configdrive_bad_url(self, mock_requests, mock_copy,
+ mock_file):
+ mock_requests.side_effect = requests.exceptions.RequestException
+ self.assertRaises(exception.InstanceDeployFailure,
+ utils._get_configdrive, 'http://1.2.3.4/cd',
+ 'fake-node-uuid')
+ self.assertFalse(mock_copy.called)
+ self.assertFalse(mock_file.called)
+
+ @mock.patch.object(base64, 'b64decode', autospec=True)
+ def test_get_configdrive_base64_error(self, mock_b64, mock_requests,
+ mock_copy, mock_file):
+ mock_b64.side_effect = TypeError
+ self.assertRaises(exception.InstanceDeployFailure,
+ utils._get_configdrive,
+ 'malformed', 'fake-node-uuid')
+ mock_b64.assert_called_once_with('malformed')
+ self.assertFalse(mock_copy.called)
+ self.assertFalse(mock_file.called)
+
+ @mock.patch.object(gzip, 'GzipFile', autospec=True)
+ def test_get_configdrive_gzip_error(self, mock_gzip, mock_requests,
+ mock_copy, mock_file):
+ mock_file.return_value = self.fake_configdrive_file
+ mock_requests.return_value = mock.MagicMock(
+ spec_set=['content'], content='Zm9vYmFy')
+ mock_copy.side_effect = IOError
+ self.assertRaises(exception.InstanceDeployFailure,
+ utils._get_configdrive, 'http://1.2.3.4/cd',
+ 'fake-node-uuid')
+ mock_requests.assert_called_once_with('http://1.2.3.4/cd')
+ mock_gzip.assert_called_once_with('configdrive', 'rb',
+ fileobj=mock.ANY)
+ mock_copy.assert_called_once_with(mock.ANY, mock.ANY)
+ mock_file.assert_called_once_with(prefix='configdrive',
+ dir=cfg.CONF.tempdir, delete=False)
+
+
+class VirtualMediaDeployUtilsTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(VirtualMediaDeployUtilsTestCase, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver="iscsi_ilo")
+ info_dict = db_utils.get_test_ilo_info()
+ self.node = obj_utils.create_test_node(
+ self.context, driver='iscsi_ilo', driver_info=info_dict)
+
+ def test_get_single_nic_with_vif_port_id(self):
+ obj_utils.create_test_port(
+ self.context, node_id=self.node.id, address='aa:bb:cc:dd:ee:ff',
+ uuid=uuidutils.generate_uuid(),
+ extra={'vif_port_id': 'test-vif-A'}, driver='iscsi_ilo')
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ address = utils.get_single_nic_with_vif_port_id(task)
+ self.assertEqual('aa:bb:cc:dd:ee:ff', address)
+
+
+class ParseInstanceInfoCapabilitiesTestCase(tests_base.TestCase):
+
+ def setUp(self):
+ super(ParseInstanceInfoCapabilitiesTestCase, self).setUp()
+ self.node = obj_utils.get_test_node(self.context, driver='fake')
+
+ def test_parse_instance_info_capabilities_string(self):
+ self.node.instance_info = {'capabilities': '{"cat": "meow"}'}
+ expected_result = {"cat": "meow"}
+ result = utils.parse_instance_info_capabilities(self.node)
+ self.assertEqual(expected_result, result)
+
+ def test_parse_instance_info_capabilities(self):
+ self.node.instance_info = {'capabilities': {"dog": "wuff"}}
+ expected_result = {"dog": "wuff"}
+ result = utils.parse_instance_info_capabilities(self.node)
+ self.assertEqual(expected_result, result)
+
+ def test_parse_instance_info_invalid_type(self):
+ self.node.instance_info = {'capabilities': 'not-a-dict'}
+ self.assertRaises(exception.InvalidParameterValue,
+ utils.parse_instance_info_capabilities, self.node)
+
+ def test_is_secure_boot_requested_true(self):
+ self.node.instance_info = {'capabilities': {"secure_boot": "tRue"}}
+ self.assertTrue(utils.is_secure_boot_requested(self.node))
+
+ def test_is_secure_boot_requested_false(self):
+ self.node.instance_info = {'capabilities': {"secure_boot": "false"}}
+ self.assertFalse(utils.is_secure_boot_requested(self.node))
+
+ def test_is_secure_boot_requested_invalid(self):
+ self.node.instance_info = {'capabilities': {"secure_boot": "invalid"}}
+ self.assertFalse(utils.is_secure_boot_requested(self.node))
+
+ def test_is_trusted_boot_requested_true(self):
+ self.node.instance_info = {'capabilities': {"trusted_boot": "true"}}
+ self.assertTrue(utils.is_trusted_boot_requested(self.node))
+
+ def test_is_trusted_boot_requested_false(self):
+ self.node.instance_info = {'capabilities': {"trusted_boot": "false"}}
+ self.assertFalse(utils.is_trusted_boot_requested(self.node))
+
+ def test_is_trusted_boot_requested_invalid(self):
+ self.node.instance_info = {'capabilities': {"trusted_boot": "invalid"}}
+ self.assertFalse(utils.is_trusted_boot_requested(self.node))
+
+ def test_get_boot_mode_for_deploy_using_capabilities(self):
+ properties = {'capabilities': 'boot_mode:uefi,cap2:value2'}
+ self.node.properties = properties
+
+ result = utils.get_boot_mode_for_deploy(self.node)
+ self.assertEqual('uefi', result)
+
+ def test_get_boot_mode_for_deploy_using_instance_info_cap(self):
+ instance_info = {'capabilities': {'secure_boot': 'True'}}
+ self.node.instance_info = instance_info
+
+ result = utils.get_boot_mode_for_deploy(self.node)
+ self.assertEqual('uefi', result)
+
+ instance_info = {'capabilities': {'trusted_boot': 'True'}}
+ self.node.instance_info = instance_info
+
+ result = utils.get_boot_mode_for_deploy(self.node)
+ self.assertEqual('bios', result)
+
+ instance_info = {'capabilities': {'trusted_boot': 'True'},
+ 'capabilities': {'secure_boot': 'True'}}
+ self.node.instance_info = instance_info
+
+ result = utils.get_boot_mode_for_deploy(self.node)
+ self.assertEqual('uefi', result)
+
+ def test_get_boot_mode_for_deploy_using_instance_info(self):
+ instance_info = {'deploy_boot_mode': 'bios'}
+ self.node.instance_info = instance_info
+
+ result = utils.get_boot_mode_for_deploy(self.node)
+ self.assertEqual('bios', result)
+
+ def test_validate_boot_mode_capability(self):
+ prop = {'capabilities': 'boot_mode:uefi,cap2:value2'}
+ self.node.properties = prop
+
+ result = utils.validate_capabilities(self.node)
+ self.assertIsNone(result)
+
+ def test_validate_boot_mode_capability_with_exc(self):
+ prop = {'capabilities': 'boot_mode:UEFI,cap2:value2'}
+ self.node.properties = prop
+
+ self.assertRaises(exception.InvalidParameterValue,
+ utils.validate_capabilities, self.node)
+
+ def test_validate_boot_mode_capability_instance_info(self):
+ inst_info = {'capabilities': {"boot_mode": "uefi", "cap2": "value2"}}
+ self.node.instance_info = inst_info
+
+ result = utils.validate_capabilities(self.node)
+ self.assertIsNone(result)
+
+ def test_validate_boot_mode_capability_instance_info_with_exc(self):
+ inst_info = {'capabilities': {"boot_mode": "UEFI", "cap2": "value2"}}
+ self.node.instance_info = inst_info
+
+ self.assertRaises(exception.InvalidParameterValue,
+ utils.validate_capabilities, self.node)
+
+ def test_validate_trusted_boot_capability(self):
+ properties = {'capabilities': 'trusted_boot:value'}
+ self.node.properties = properties
+ self.assertRaises(exception.InvalidParameterValue,
+ utils.validate_capabilities, self.node)
+
+ def test_all_supported_capabilities(self):
+ self.assertEqual(('local', 'netboot'),
+ utils.SUPPORTED_CAPABILITIES['boot_option'])
+ self.assertEqual(('bios', 'uefi'),
+ utils.SUPPORTED_CAPABILITIES['boot_mode'])
+ self.assertEqual(('true', 'false'),
+ utils.SUPPORTED_CAPABILITIES['secure_boot'])
+ self.assertEqual(('true', 'false'),
+ utils.SUPPORTED_CAPABILITIES['trusted_boot'])
+
+
+class TrySetBootDeviceTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(TrySetBootDeviceTestCase, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver="fake")
+ self.node = obj_utils.create_test_node(self.context, driver="fake")
+
+ @mock.patch.object(manager_utils, 'node_set_boot_device', autospec=True)
+ def test_try_set_boot_device_okay(self, node_set_boot_device_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ utils.try_set_boot_device(task, boot_devices.DISK,
+ persistent=True)
+ node_set_boot_device_mock.assert_called_once_with(
+ task, boot_devices.DISK, persistent=True)
+
+ @mock.patch.object(utils, 'LOG', autospec=True)
+ @mock.patch.object(manager_utils, 'node_set_boot_device', autospec=True)
+ def test_try_set_boot_device_ipmifailure_uefi(
+ self, node_set_boot_device_mock, log_mock):
+ self.node.properties = {'capabilities': 'boot_mode:uefi'}
+ self.node.save()
+ node_set_boot_device_mock.side_effect = iter(
+ [exception.IPMIFailure(cmd='a')])
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ utils.try_set_boot_device(task, boot_devices.DISK,
+ persistent=True)
+ node_set_boot_device_mock.assert_called_once_with(
+ task, boot_devices.DISK, persistent=True)
+ log_mock.warning.assert_called_once_with(mock.ANY)
+
+ @mock.patch.object(manager_utils, 'node_set_boot_device', autospec=True)
+ def test_try_set_boot_device_ipmifailure_bios(
+ self, node_set_boot_device_mock):
+ node_set_boot_device_mock.side_effect = iter(
+ [exception.IPMIFailure(cmd='a')])
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ self.assertRaises(exception.IPMIFailure,
+ utils.try_set_boot_device,
+ task, boot_devices.DISK, persistent=True)
+ node_set_boot_device_mock.assert_called_once_with(
+ task, boot_devices.DISK, persistent=True)
+
+ @mock.patch.object(manager_utils, 'node_set_boot_device', autospec=True)
+ def test_try_set_boot_device_some_other_exception(
+ self, node_set_boot_device_mock):
+ exc = exception.IloOperationError(operation="qwe", error="error")
+ node_set_boot_device_mock.side_effect = iter([exc])
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ self.assertRaises(exception.IloOperationError,
+ utils.try_set_boot_device,
+ task, boot_devices.DISK, persistent=True)
+ node_set_boot_device_mock.assert_called_once_with(
+ task, boot_devices.DISK, persistent=True)
+
+
+class AgentMethodsTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(AgentMethodsTestCase, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver='fake_agent')
+ n = {'driver': 'fake_agent',
+ 'driver_internal_info': {'agent_url': 'http://127.0.0.1:9999'}}
+
+ self.node = obj_utils.create_test_node(self.context, **n)
+ self.ports = [obj_utils.create_test_port(self.context,
+ node_id=self.node.id)]
+
+ self.clean_steps = {
+ 'hardware_manager_version': '1',
+ 'clean_steps': {
+ 'GenericHardwareManager': [
+ {'interface': 'deploy',
+ 'step': 'erase_devices',
+ 'priority': 20},
+ ],
+ 'SpecificHardwareManager': [
+ {'interface': 'deploy',
+ 'step': 'update_firmware',
+ 'priority': 30},
+ {'interface': 'raid',
+ 'step': 'create_configuration',
+ 'priority': 10},
+ ]
+ }
+ }
+
+ @mock.patch('ironic.objects.Port.list_by_node_id',
+ spec_set=types.FunctionType)
+ @mock.patch.object(agent_client.AgentClient, 'get_clean_steps',
+ autospec=True)
+ def test_get_clean_steps(self, client_mock, list_ports_mock):
+ client_mock.return_value = {
+ 'command_result': self.clean_steps}
+ list_ports_mock.return_value = self.ports
+
+ with task_manager.acquire(
+ self.context, self.node['uuid'], shared=False) as task:
+ response = utils.agent_get_clean_steps(task)
+ client_mock.assert_called_once_with(mock.ANY, task.node,
+ self.ports)
+ self.assertEqual('1', task.node.driver_internal_info[
+ 'hardware_manager_version'])
+
+ # Since steps are returned in dicts, they have non-deterministic
+ # ordering
+ self.assertEqual(2, len(response))
+ self.assertIn(self.clean_steps['clean_steps'][
+ 'GenericHardwareManager'][0], response)
+ self.assertIn(self.clean_steps['clean_steps'][
+ 'SpecificHardwareManager'][0], response)
+
+ @mock.patch('ironic.objects.Port.list_by_node_id',
+ spec_set=types.FunctionType)
+ @mock.patch.object(agent_client.AgentClient, 'get_clean_steps',
+ autospec=True)
+ def test_get_clean_steps_missing_steps(self, client_mock,
+ list_ports_mock):
+ del self.clean_steps['clean_steps']
+ client_mock.return_value = {
+ 'command_result': self.clean_steps}
+ list_ports_mock.return_value = self.ports
+
+ with task_manager.acquire(
+ self.context, self.node['uuid'], shared=False) as task:
+ self.assertRaises(exception.NodeCleaningFailure,
+ utils.agent_get_clean_steps,
+ task)
+ client_mock.assert_called_once_with(mock.ANY, task.node,
+ self.ports)
+
+ @mock.patch('ironic.objects.Port.list_by_node_id',
+ spec_set=types.FunctionType)
+ @mock.patch.object(agent_client.AgentClient, 'execute_clean_step',
+ autospec=True)
+ def test_execute_clean_step(self, client_mock, list_ports_mock):
+ client_mock.return_value = {
+ 'command_status': 'SUCCEEDED'}
+ list_ports_mock.return_value = self.ports
+
+ with task_manager.acquire(
+ self.context, self.node['uuid'], shared=False) as task:
+ response = utils.agent_execute_clean_step(
+ task,
+ self.clean_steps['clean_steps']['GenericHardwareManager'][0])
+ self.assertEqual(states.CLEANWAIT, response)
+
+ @mock.patch('ironic.objects.Port.list_by_node_id',
+ spec_set=types.FunctionType)
+ @mock.patch.object(agent_client.AgentClient, 'execute_clean_step',
+ autospec=True)
+ def test_execute_clean_step_running(self, client_mock, list_ports_mock):
+ client_mock.return_value = {
+ 'command_status': 'RUNNING'}
+ list_ports_mock.return_value = self.ports
+
+ with task_manager.acquire(
+ self.context, self.node['uuid'], shared=False) as task:
+ response = utils.agent_execute_clean_step(
+ task,
+ self.clean_steps['clean_steps']['GenericHardwareManager'][0])
+ self.assertEqual(states.CLEANWAIT, response)
+
+ @mock.patch('ironic.objects.Port.list_by_node_id',
+ spec_set=types.FunctionType)
+ @mock.patch.object(agent_client.AgentClient, 'execute_clean_step',
+ autospec=True)
+ def test_execute_clean_step_version_mismatch(
+ self, client_mock, list_ports_mock):
+ client_mock.return_value = {
+ 'command_status': 'RUNNING'}
+ list_ports_mock.return_value = self.ports
+
+ with task_manager.acquire(
+ self.context, self.node['uuid'], shared=False) as task:
+ response = utils.agent_execute_clean_step(
+ task,
+ self.clean_steps['clean_steps']['GenericHardwareManager'][0])
+ self.assertEqual(states.CLEANWAIT, response)
+
+ def test_agent_add_clean_params(self):
+ cfg.CONF.deploy.erase_devices_iterations = 2
+ with task_manager.acquire(
+ self.context, self.node['uuid'], shared=False) as task:
+ utils.agent_add_clean_params(task)
+ self.assertEqual(task.node.driver_internal_info.get(
+ 'agent_erase_devices_iterations'), 2)
+
+ @mock.patch('ironic.dhcp.neutron.NeutronDHCPApi.delete_cleaning_ports',
+ autospec=True)
+ @mock.patch('ironic.dhcp.neutron.NeutronDHCPApi.create_cleaning_ports',
+ autospec=True)
+ def _test_prepare_inband_cleaning_ports(
+ self, create_mock, delete_mock, return_vif_port_id=True):
+ if return_vif_port_id:
+ create_mock.return_value = {self.ports[0].uuid: 'vif-port-id'}
+ else:
+ create_mock.return_value = {}
+ with task_manager.acquire(
+ self.context, self.node.uuid, shared=False) as task:
+ utils.prepare_cleaning_ports(task)
+ create_mock.assert_called_once_with(mock.ANY, task)
+ delete_mock.assert_called_once_with(mock.ANY, task)
+
+ self.ports[0].refresh()
+ self.assertEqual('vif-port-id', self.ports[0].extra['vif_port_id'])
+
+ def test_prepare_inband_cleaning_ports(self):
+ self._test_prepare_inband_cleaning_ports()
+
+ def test_prepare_inband_cleaning_ports_no_vif_port_id(self):
+ self.assertRaises(
+ exception.NodeCleaningFailure,
+ self._test_prepare_inband_cleaning_ports,
+ return_vif_port_id=False)
+
+ @mock.patch('ironic.dhcp.neutron.NeutronDHCPApi.delete_cleaning_ports',
+ autospec=True)
+ def test_tear_down_inband_cleaning_ports(self, neutron_mock):
+ extra_dict = self.ports[0].extra
+ extra_dict['vif_port_id'] = 'vif-port-id'
+ self.ports[0].extra = extra_dict
+ self.ports[0].save()
+ with task_manager.acquire(
+ self.context, self.node.uuid, shared=False) as task:
+ utils.tear_down_cleaning_ports(task)
+ neutron_mock.assert_called_once_with(mock.ANY, task)
+
+ self.ports[0].refresh()
+ self.assertNotIn('vif_port_id', self.ports[0].extra)
+
+ @mock.patch.object(pxe.PXEBoot, 'prepare_ramdisk', autospec=True)
+ @mock.patch('ironic.conductor.utils.node_power_action', autospec=True)
+ @mock.patch.object(utils, 'build_agent_options', autospec=True)
+ @mock.patch.object(utils, 'prepare_cleaning_ports', autospec=True)
+ def _test_prepare_inband_cleaning(
+ self, prepare_cleaning_ports_mock,
+ build_options_mock, power_mock, prepare_ramdisk_mock,
+ manage_boot=True):
+ build_options_mock.return_value = {'a': 'b'}
+ with task_manager.acquire(
+ self.context, self.node.uuid, shared=False) as task:
+ self.assertEqual(
+ states.CLEANWAIT,
+ utils.prepare_inband_cleaning(task, manage_boot=manage_boot))
+ prepare_cleaning_ports_mock.assert_called_once_with(task)
+ power_mock.assert_called_once_with(task, states.REBOOT)
+ self.assertEqual(task.node.driver_internal_info.get(
+ 'agent_erase_devices_iterations'), 1)
+ if manage_boot:
+ prepare_ramdisk_mock.assert_called_once_with(
+ mock.ANY, mock.ANY, {'a': 'b'})
+ build_options_mock.assert_called_once_with(task.node)
+ else:
+ self.assertFalse(prepare_ramdisk_mock.called)
+ self.assertFalse(build_options_mock.called)
+
+ def test_prepare_inband_cleaning(self):
+ self._test_prepare_inband_cleaning()
+
+ def test_prepare_inband_cleaning_manage_boot_false(self):
+ self._test_prepare_inband_cleaning(manage_boot=False)
+
+ @mock.patch.object(pxe.PXEBoot, 'clean_up_ramdisk', autospec=True)
+ @mock.patch.object(utils, 'tear_down_cleaning_ports', autospec=True)
+ @mock.patch('ironic.conductor.utils.node_power_action', autospec=True)
+ def _test_tear_down_inband_cleaning(
+ self, power_mock, tear_down_ports_mock,
+ clean_up_ramdisk_mock, manage_boot=True):
+ with task_manager.acquire(
+ self.context, self.node.uuid, shared=False) as task:
+ utils.tear_down_inband_cleaning(task, manage_boot=manage_boot)
+ power_mock.assert_called_once_with(task, states.POWER_OFF)
+ tear_down_ports_mock.assert_called_once_with(task)
+ if manage_boot:
+ clean_up_ramdisk_mock.assert_called_once_with(
+ task.driver.boot, task)
+ else:
+ self.assertFalse(clean_up_ramdisk_mock.called)
+
+ def test_tear_down_inband_cleaning(self):
+ self._test_tear_down_inband_cleaning(manage_boot=True)
+
+ def test_tear_down_inband_cleaning_manage_boot_false(self):
+ self._test_tear_down_inband_cleaning(manage_boot=False)
+
+ def test_build_agent_options_conf(self):
+ self.config(api_url='api-url', group='conductor')
+ options = utils.build_agent_options(self.node)
+ self.assertEqual('api-url', options['ipa-api-url'])
+ self.assertEqual('fake_agent', options['ipa-driver-name'])
+ self.assertEqual(0, options['coreos.configdrive'])
+
+ @mock.patch.object(keystone, 'get_service_url', autospec=True)
+ def test_build_agent_options_keystone(self, get_url_mock):
+
+ self.config(api_url=None, group='conductor')
+ get_url_mock.return_value = 'api-url'
+ options = utils.build_agent_options(self.node)
+ self.assertEqual('api-url', options['ipa-api-url'])
+ self.assertEqual('fake_agent', options['ipa-driver-name'])
+ self.assertEqual(0, options['coreos.configdrive'])
+
+ def test_build_agent_options_root_device_hints(self):
+ self.config(api_url='api-url', group='conductor')
+ self.node.properties['root_device'] = {'model': 'fake_model'}
+ options = utils.build_agent_options(self.node)
+ self.assertEqual('api-url', options['ipa-api-url'])
+ self.assertEqual('fake_agent', options['ipa-driver-name'])
+ self.assertEqual('model=fake_model', options['root_device'])
+
+
+@mock.patch.object(utils, 'is_block_device', autospec=True)
+@mock.patch.object(utils, 'login_iscsi', lambda *_: None)
+@mock.patch.object(utils, 'discovery', lambda *_: None)
+@mock.patch.object(utils, 'logout_iscsi', lambda *_: None)
+@mock.patch.object(utils, 'delete_iscsi', lambda *_: None)
+@mock.patch.object(utils, 'get_dev', lambda *_: '/dev/fake')
+class ISCSISetupAndHandleErrorsTestCase(tests_base.TestCase):
+
+ def test_no_parent_device(self, mock_ibd):
+ address = '127.0.0.1'
+ port = 3306
+ iqn = 'iqn.xyz'
+ lun = 1
+ mock_ibd.return_value = False
+ expected_dev = '/dev/fake'
+ with testtools.ExpectedException(exception.InstanceDeployFailure):
+ with utils._iscsi_setup_and_handle_errors(
+ address, port, iqn, lun) as dev:
+ self.assertEqual(expected_dev, dev)
+
+ mock_ibd.assert_called_once_with(expected_dev)
+
+ def test_parent_device_yield(self, mock_ibd):
+ address = '127.0.0.1'
+ port = 3306
+ iqn = 'iqn.xyz'
+ lun = 1
+ expected_dev = '/dev/fake'
+ mock_ibd.return_value = True
+ with utils._iscsi_setup_and_handle_errors(
+ address, port, iqn, lun) as dev:
+ self.assertEqual(expected_dev, dev)
+
+ mock_ibd.assert_called_once_with(expected_dev)
+
+
+class ValidateImagePropertiesTestCase(db_base.DbTestCase):
+
+ @mock.patch.object(image_service, 'get_image_service', autospec=True)
+ def test_validate_image_properties_glance_image(self, image_service_mock):
+ node = obj_utils.create_test_node(
+ self.context, driver='fake_pxe',
+ instance_info=INST_INFO_DICT,
+ driver_info=DRV_INFO_DICT,
+ driver_internal_info=DRV_INTERNAL_INFO_DICT,
+ )
+ d_info = pxe._parse_instance_info(node)
+ image_service_mock.return_value.show.return_value = {
+ 'properties': {'kernel_id': '1111', 'ramdisk_id': '2222'},
+ }
+
+ utils.validate_image_properties(self.context, d_info,
+ ['kernel_id', 'ramdisk_id'])
+ image_service_mock.assert_called_once_with(
+ node.instance_info['image_source'], context=self.context
+ )
+
+ @mock.patch.object(image_service, 'get_image_service', autospec=True)
+ def test_validate_image_properties_glance_image_missing_prop(
+ self, image_service_mock):
+ node = obj_utils.create_test_node(
+ self.context, driver='fake_pxe',
+ instance_info=INST_INFO_DICT,
+ driver_info=DRV_INFO_DICT,
+ driver_internal_info=DRV_INTERNAL_INFO_DICT,
+ )
+ d_info = pxe._parse_instance_info(node)
+ image_service_mock.return_value.show.return_value = {
+ 'properties': {'kernel_id': '1111'},
+ }
+
+ self.assertRaises(exception.MissingParameterValue,
+ utils.validate_image_properties,
+ self.context, d_info, ['kernel_id', 'ramdisk_id'])
+ image_service_mock.assert_called_once_with(
+ node.instance_info['image_source'], context=self.context
+ )
+
+ @mock.patch.object(image_service, 'get_image_service', autospec=True)
+ def test_validate_image_properties_glance_image_not_authorized(
+ self, image_service_mock):
+ d_info = {'image_source': 'uuid'}
+ show_mock = image_service_mock.return_value.show
+ show_mock.side_effect = exception.ImageNotAuthorized(image_id='uuid')
+ self.assertRaises(exception.InvalidParameterValue,
+ utils.validate_image_properties, self.context,
+ d_info, [])
+
+ @mock.patch.object(image_service, 'get_image_service', autospec=True)
+ def test_validate_image_properties_glance_image_not_found(
+ self, image_service_mock):
+ d_info = {'image_source': 'uuid'}
+ show_mock = image_service_mock.return_value.show
+ show_mock.side_effect = exception.ImageNotFound(image_id='uuid')
+ self.assertRaises(exception.InvalidParameterValue,
+ utils.validate_image_properties, self.context,
+ d_info, [])
+
+ def test_validate_image_properties_invalid_image_href(self):
+ d_info = {'image_source': 'emule://uuid'}
+ self.assertRaises(exception.InvalidParameterValue,
+ utils.validate_image_properties, self.context,
+ d_info, [])
+
+ @mock.patch.object(image_service.HttpImageService, 'show', autospec=True)
+ def test_validate_image_properties_nonglance_image(
+ self, image_service_show_mock):
+ instance_info = {
+ 'image_source': 'http://ubuntu',
+ 'kernel': 'kernel_uuid',
+ 'ramdisk': 'file://initrd',
+ 'root_gb': 100,
+ }
+ image_service_show_mock.return_value = {'size': 1, 'properties': {}}
+ node = obj_utils.create_test_node(
+ self.context, driver='fake_pxe',
+ instance_info=instance_info,
+ driver_info=DRV_INFO_DICT,
+ driver_internal_info=DRV_INTERNAL_INFO_DICT,
+ )
+ d_info = pxe._parse_instance_info(node)
+ utils.validate_image_properties(self.context, d_info,
+ ['kernel', 'ramdisk'])
+ image_service_show_mock.assert_called_once_with(
+ mock.ANY, instance_info['image_source'])
+
+ @mock.patch.object(image_service.HttpImageService, 'show', autospec=True)
+ def test_validate_image_properties_nonglance_image_validation_fail(
+ self, img_service_show_mock):
+ instance_info = {
+ 'image_source': 'http://ubuntu',
+ 'kernel': 'kernel_uuid',
+ 'ramdisk': 'file://initrd',
+ 'root_gb': 100,
+ }
+ img_service_show_mock.side_effect = iter(
+ [exception.ImageRefValidationFailed(
+ image_href='http://ubuntu', reason='HTTPError')])
+ node = obj_utils.create_test_node(
+ self.context, driver='fake_pxe',
+ instance_info=instance_info,
+ driver_info=DRV_INFO_DICT,
+ driver_internal_info=DRV_INTERNAL_INFO_DICT,
+ )
+ d_info = pxe._parse_instance_info(node)
+ self.assertRaises(exception.InvalidParameterValue,
+ utils.validate_image_properties, self.context,
+ d_info, ['kernel', 'ramdisk'])
diff --git a/ironic/tests/unit/drivers/test_fake.py b/ironic/tests/unit/drivers/test_fake.py
new file mode 100644
index 000000000..26ca601db
--- /dev/null
+++ b/ironic/tests/unit/drivers/test_fake.py
@@ -0,0 +1,116 @@
+# coding=utf-8
+
+# Copyright 2013 Hewlett-Packard Development Company, L.P.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Test class for Fake driver."""
+
+import mock
+
+from ironic.common import boot_devices
+from ironic.common import driver_factory
+from ironic.common import exception
+from ironic.common import states
+from ironic.conductor import task_manager
+from ironic.drivers import base as driver_base
+from ironic.tests.unit.conductor import utils as mgr_utils
+from ironic.tests.unit.db import base as db_base
+from ironic.tests.unit.objects import utils as obj_utils
+
+
+class FakeDriverTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(FakeDriverTestCase, self).setUp()
+ mgr_utils.mock_the_extension_manager()
+ self.driver = driver_factory.get_driver("fake")
+ self.node = obj_utils.get_test_node(self.context)
+ self.task = mock.MagicMock(spec=task_manager.TaskManager)
+ self.task.shared = False
+ self.task.node = self.node
+ self.task.driver = self.driver
+
+ def test_driver_interfaces(self):
+ # fake driver implements only 5 out of 6 interfaces
+ self.assertIsInstance(self.driver.power, driver_base.PowerInterface)
+ self.assertIsInstance(self.driver.deploy, driver_base.DeployInterface)
+ self.assertIsInstance(self.driver.vendor, driver_base.VendorInterface)
+ self.assertIsInstance(self.driver.console,
+ driver_base.ConsoleInterface)
+ self.assertIsNone(self.driver.rescue)
+
+ def test_get_properties(self):
+ expected = ['A1', 'A2', 'B1', 'B2']
+ properties = self.driver.get_properties()
+ self.assertEqual(sorted(expected), sorted(properties.keys()))
+
+ def test_power_interface(self):
+ self.assertEqual({}, self.driver.power.get_properties())
+ self.driver.power.validate(self.task)
+ self.driver.power.get_power_state(self.task)
+ self.assertRaises(exception.InvalidParameterValue,
+ self.driver.power.set_power_state,
+ self.task, states.NOSTATE)
+ self.driver.power.set_power_state(self.task, states.POWER_ON)
+ self.driver.power.reboot(self.task)
+
+ def test_deploy_interface(self):
+ self.assertEqual({}, self.driver.deploy.get_properties())
+ self.driver.deploy.validate(None)
+
+ self.driver.deploy.prepare(None)
+ self.driver.deploy.deploy(None)
+
+ self.driver.deploy.take_over(None)
+
+ self.driver.deploy.clean_up(None)
+ self.driver.deploy.tear_down(None)
+
+ def test_console_interface(self):
+ self.assertEqual({}, self.driver.console.get_properties())
+ self.driver.console.validate(self.task)
+ self.driver.console.start_console(self.task)
+ self.driver.console.stop_console(self.task)
+ self.driver.console.get_console(self.task)
+
+ def test_management_interface_get_properties(self):
+ self.assertEqual({}, self.driver.management.get_properties())
+
+ def test_management_interface_validate(self):
+ self.driver.management.validate(self.task)
+
+ def test_management_interface_set_boot_device_good(self):
+ self.driver.management.set_boot_device(self.task, boot_devices.PXE)
+
+ def test_management_interface_set_boot_device_fail(self):
+ self.assertRaises(exception.InvalidParameterValue,
+ self.driver.management.set_boot_device, self.task,
+ 'not-supported')
+
+ def test_management_interface_get_supported_boot_devices(self):
+ expected = [boot_devices.PXE]
+ self.assertEqual(
+ expected,
+ self.driver.management.get_supported_boot_devices(self.task))
+
+ def test_management_interface_get_boot_device(self):
+ expected = {'boot_device': boot_devices.PXE, 'persistent': False}
+ self.assertEqual(expected,
+ self.driver.management.get_boot_device(self.task))
+
+ def test_inspect_interface(self):
+ self.assertEqual({}, self.driver.inspect.get_properties())
+ self.driver.inspect.validate(self.task)
+ self.driver.inspect.inspect_hardware(self.task)
diff --git a/ironic/tests/unit/drivers/test_iboot.py b/ironic/tests/unit/drivers/test_iboot.py
new file mode 100644
index 000000000..2ad9ed09f
--- /dev/null
+++ b/ironic/tests/unit/drivers/test_iboot.py
@@ -0,0 +1,384 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2014 Red Hat, Inc.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Test class for iBoot PDU driver module."""
+
+import types
+
+import mock
+
+from ironic.common import driver_factory
+from ironic.common import exception
+from ironic.common import states
+from ironic.conductor import task_manager
+from ironic.drivers.modules import iboot
+from ironic.tests.unit.conductor import utils as mgr_utils
+from ironic.tests.unit.db import base as db_base
+from ironic.tests.unit.db import utils as db_utils
+from ironic.tests.unit.objects import utils as obj_utils
+
+
+INFO_DICT = db_utils.get_test_iboot_info()
+
+
+class IBootPrivateMethodTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(IBootPrivateMethodTestCase, self).setUp()
+ self.config(max_retry=0, group='iboot')
+ self.config(retry_interval=0, group='iboot')
+
+ def test__parse_driver_info_good(self):
+ node = obj_utils.create_test_node(
+ self.context,
+ driver='fake_iboot',
+ driver_info=INFO_DICT)
+ info = iboot._parse_driver_info(node)
+ self.assertIsNotNone(info.get('address'))
+ self.assertIsNotNone(info.get('username'))
+ self.assertIsNotNone(info.get('password'))
+ self.assertIsNotNone(info.get('port'))
+ self.assertIsNotNone(info.get('relay_id'))
+
+ def test__parse_driver_info_good_with_explicit_port(self):
+ info = dict(INFO_DICT)
+ info['iboot_port'] = '1234'
+ node = obj_utils.create_test_node(
+ self.context,
+ driver='fake_iboot',
+ driver_info=info)
+ info = iboot._parse_driver_info(node)
+ self.assertEqual(1234, info.get('port'))
+
+ def test__parse_driver_info_good_with_explicit_relay_id(self):
+ info = dict(INFO_DICT)
+ info['iboot_relay_id'] = '2'
+ node = obj_utils.create_test_node(
+ self.context,
+ driver='fake_iboot',
+ driver_info=info)
+ info = iboot._parse_driver_info(node)
+ self.assertEqual(2, info.get('relay_id'))
+
+ def test__parse_driver_info_missing_address(self):
+ info = dict(INFO_DICT)
+ del info['iboot_address']
+ node = obj_utils.create_test_node(
+ self.context,
+ driver='fake_iboot',
+ driver_info=info)
+ self.assertRaises(exception.MissingParameterValue,
+ iboot._parse_driver_info,
+ node)
+
+ def test__parse_driver_info_missing_username(self):
+ info = dict(INFO_DICT)
+ del info['iboot_username']
+ node = obj_utils.create_test_node(
+ self.context,
+ driver='fake_iboot',
+ driver_info=info)
+ self.assertRaises(exception.MissingParameterValue,
+ iboot._parse_driver_info,
+ node)
+
+ def test__parse_driver_info_missing_password(self):
+ info = dict(INFO_DICT)
+ del info['iboot_password']
+ node = obj_utils.create_test_node(
+ self.context,
+ driver='fake_iboot',
+ driver_info=info)
+ self.assertRaises(exception.MissingParameterValue,
+ iboot._parse_driver_info,
+ node)
+
+ def test__parse_driver_info_bad_port(self):
+ info = dict(INFO_DICT)
+ info['iboot_port'] = 'not-integer'
+ node = obj_utils.create_test_node(
+ self.context,
+ driver='fake_iboot',
+ driver_info=info)
+ self.assertRaises(exception.InvalidParameterValue,
+ iboot._parse_driver_info,
+ node)
+
+ def test__parse_driver_info_bad_relay_id(self):
+ info = dict(INFO_DICT)
+ info['iboot_relay_id'] = 'not-integer'
+ node = obj_utils.create_test_node(
+ self.context,
+ driver='fake_iboot',
+ driver_info=info)
+ self.assertRaises(exception.InvalidParameterValue,
+ iboot._parse_driver_info,
+ node)
+
+ @mock.patch.object(iboot, '_get_connection', autospec=True)
+ def test__power_status_on(self, mock_get_conn):
+ mock_connection = mock.MagicMock(spec_set=['get_relays'])
+ mock_connection.get_relays.return_value = [True]
+ mock_get_conn.return_value = mock_connection
+ node = obj_utils.create_test_node(
+ self.context,
+ driver='fake_iboot',
+ driver_info=INFO_DICT)
+ info = iboot._parse_driver_info(node)
+
+ status = iboot._power_status(info)
+
+ self.assertEqual(states.POWER_ON, status)
+ mock_get_conn.assert_called_once_with(info)
+ mock_connection.get_relays.assert_called_once_with()
+
+ @mock.patch.object(iboot, '_get_connection', autospec=True)
+ def test__power_status_off(self, mock_get_conn):
+ mock_connection = mock.MagicMock(spec_set=['get_relays'])
+ mock_connection.get_relays.return_value = [False]
+ mock_get_conn.return_value = mock_connection
+ node = obj_utils.create_test_node(
+ self.context,
+ driver='fake_iboot',
+ driver_info=INFO_DICT)
+ info = iboot._parse_driver_info(node)
+
+ status = iboot._power_status(info)
+
+ self.assertEqual(states.POWER_OFF, status)
+ mock_get_conn.assert_called_once_with(info)
+ mock_connection.get_relays.assert_called_once_with()
+
+ @mock.patch.object(iboot, '_get_connection', autospec=True)
+ def test__power_status_exception(self, mock_get_conn):
+ mock_connection = mock.MagicMock(spec_set=['get_relays'])
+ mock_connection.get_relays.return_value = None
+ mock_get_conn.return_value = mock_connection
+ node = obj_utils.create_test_node(
+ self.context,
+ driver='fake_iboot',
+ driver_info=INFO_DICT)
+ info = iboot._parse_driver_info(node)
+
+ status = iboot._power_status(info)
+ self.assertEqual(states.ERROR, status)
+ mock_get_conn.assert_called_once_with(info)
+ mock_connection.get_relays.assert_called_once_with()
+
+ @mock.patch.object(iboot, '_get_connection', autospec=True)
+ def test__power_status_exception_type_error(self, mock_get_conn):
+ mock_connection = mock.MagicMock(spec_set=['get_relays'])
+ side_effect = TypeError("Surprise!")
+ mock_connection.get_relays.side_effect = side_effect
+
+ mock_get_conn.return_value = mock_connection
+ node = obj_utils.create_test_node(
+ self.context,
+ driver='fake_iboot',
+ driver_info=INFO_DICT)
+ info = iboot._parse_driver_info(node)
+
+ status = iboot._power_status(info)
+ self.assertEqual(states.ERROR, status)
+ mock_get_conn.assert_called_once_with(info)
+ mock_connection.get_relays.assert_called_once_with()
+
+ @mock.patch.object(iboot, '_get_connection', autospec=True)
+ def test__power_status_exception_index_error(self, mock_get_conn):
+ mock_connection = mock.MagicMock(spec_set=['get_relays'])
+ side_effect = IndexError("Gotcha!")
+ mock_connection.get_relays.side_effect = side_effect
+
+ mock_get_conn.return_value = mock_connection
+ node = obj_utils.create_test_node(
+ self.context,
+ driver='fake_iboot',
+ driver_info=INFO_DICT)
+ info = iboot._parse_driver_info(node)
+ status = iboot._power_status(info)
+ self.assertEqual(states.ERROR, status)
+
+ mock_get_conn.assert_called_once_with(info)
+ mock_connection.get_relays.assert_called_once_with()
+
+ @mock.patch.object(iboot, '_get_connection', autospec=True)
+ def test__power_status_error(self, mock_get_conn):
+ mock_connection = mock.MagicMock(spec_set=['get_relays'])
+ mock_connection.get_relays.return_value = list()
+ mock_get_conn.return_value = mock_connection
+ node = obj_utils.create_test_node(
+ self.context,
+ driver='fake_iboot',
+ driver_info=INFO_DICT)
+ info = iboot._parse_driver_info(node)
+
+ status = iboot._power_status(info)
+
+ self.assertEqual(states.ERROR, status)
+ mock_get_conn.assert_called_once_with(info)
+ mock_connection.get_relays.assert_called_once_with()
+
+ @mock.patch.object(iboot, '_get_connection', autospec=True)
+ def test__power_status_retries(self, mock_get_conn):
+ self.config(max_retry=1, group='iboot')
+
+ mock_connection = mock.MagicMock(spec_set=['get_relays'])
+ side_effect = TypeError("Surprise!")
+ mock_connection.get_relays.side_effect = side_effect
+
+ mock_get_conn.return_value = mock_connection
+ node = obj_utils.create_test_node(
+ self.context,
+ driver='fake_iboot',
+ driver_info=INFO_DICT)
+ info = iboot._parse_driver_info(node)
+
+ status = iboot._power_status(info)
+ self.assertEqual(states.ERROR, status)
+ mock_get_conn.assert_called_once_with(info)
+ self.assertEqual(2, mock_connection.get_relays.call_count)
+
+
+class IBootDriverTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(IBootDriverTestCase, self).setUp()
+ self.config(max_retry=0, group='iboot')
+ self.config(retry_interval=0, group='iboot')
+ mgr_utils.mock_the_extension_manager(driver='fake_iboot')
+ self.driver = driver_factory.get_driver('fake_iboot')
+ self.node = obj_utils.create_test_node(
+ self.context,
+ driver='fake_iboot',
+ driver_info=INFO_DICT)
+ self.info = iboot._parse_driver_info(self.node)
+
+ def test_get_properties(self):
+ expected = iboot.COMMON_PROPERTIES
+ with task_manager.acquire(
+ self.context, self.node.uuid, shared=True) as task:
+ self.assertEqual(expected, task.driver.get_properties())
+
+ @mock.patch.object(iboot, '_power_status', autospec=True)
+ @mock.patch.object(iboot, '_switch', autospec=True)
+ def test_set_power_state_good(self, mock_switch, mock_power_status):
+ mock_power_status.return_value = states.POWER_ON
+
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ task.driver.power.set_power_state(task, states.POWER_ON)
+
+ # ensure functions were called with the valid parameters
+ mock_switch.assert_called_once_with(self.info, True)
+ mock_power_status.assert_called_once_with(self.info)
+
+ @mock.patch.object(iboot, '_power_status', autospec=True)
+ @mock.patch.object(iboot, '_switch', autospec=True)
+ def test_set_power_state_bad(self, mock_switch, mock_power_status):
+ mock_power_status.return_value = states.POWER_OFF
+
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ self.assertRaises(exception.PowerStateFailure,
+ task.driver.power.set_power_state,
+ task, states.POWER_ON)
+
+ # ensure functions were called with the valid parameters
+ mock_switch.assert_called_once_with(self.info, True)
+ mock_power_status.assert_called_once_with(self.info)
+
+ @mock.patch.object(iboot, '_power_status', autospec=True)
+ @mock.patch.object(iboot, '_switch', autospec=True)
+ def test_set_power_state_invalid_parameter(self, mock_switch,
+ mock_power_status):
+ mock_power_status.return_value = states.POWER_ON
+
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ self.assertRaises(exception.InvalidParameterValue,
+ task.driver.power.set_power_state,
+ task, states.NOSTATE)
+
+ @mock.patch.object(iboot, '_power_status', autospec=True)
+ @mock.patch.object(iboot, '_switch', spec_set=types.FunctionType)
+ def test_reboot_good(self, mock_switch, mock_power_status):
+ manager = mock.MagicMock(spec_set=['switch'])
+ mock_power_status.return_value = states.POWER_ON
+
+ manager.attach_mock(mock_switch, 'switch')
+ expected = [mock.call.switch(self.info, False),
+ mock.call.switch(self.info, True)]
+
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ task.driver.power.reboot(task)
+
+ self.assertEqual(manager.mock_calls, expected)
+
+ @mock.patch.object(iboot, '_power_status', autospec=True)
+ @mock.patch.object(iboot, '_switch', spec_set=types.FunctionType)
+ def test_reboot_bad(self, mock_switch, mock_power_status):
+ manager = mock.MagicMock(spec_set=['switch'])
+ mock_power_status.return_value = states.POWER_OFF
+
+ manager.attach_mock(mock_switch, 'switch')
+ expected = [mock.call.switch(self.info, False),
+ mock.call.switch(self.info, True)]
+
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ self.assertRaises(exception.PowerStateFailure,
+ task.driver.power.reboot, task)
+
+ self.assertEqual(manager.mock_calls, expected)
+
+ @mock.patch.object(iboot, '_power_status', autospec=True)
+ @mock.patch.object(iboot, '_get_connection', autospec=True)
+ def test__switch_retries(self, mock_get_conn, mock_power_status):
+ self.config(max_retry=1, group='iboot')
+ mock_power_status.return_value = states.POWER_ON
+
+ mock_connection = mock.MagicMock(spec_set=['switch'])
+ side_effect = TypeError("Surprise!")
+ mock_connection.switch.side_effect = side_effect
+ mock_get_conn.return_value = mock_connection
+
+ iboot._switch(self.info, False)
+ self.assertEqual(2, mock_connection.switch.call_count)
+
+ @mock.patch.object(iboot, '_power_status', autospec=True)
+ def test_get_power_state(self, mock_power_status):
+ mock_power_status.return_value = states.POWER_ON
+
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ state = task.driver.power.get_power_state(task)
+ self.assertEqual(state, states.POWER_ON)
+
+ # ensure functions were called with the valid parameters
+ mock_power_status.assert_called_once_with(self.info)
+
+ @mock.patch.object(iboot, '_parse_driver_info', autospec=True)
+ def test_validate_good(self, parse_drv_info_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ task.driver.power.validate(task)
+ self.assertEqual(1, parse_drv_info_mock.call_count)
+
+ @mock.patch.object(iboot, '_parse_driver_info', autospec=True)
+ def test_validate_fails(self, parse_drv_info_mock):
+ side_effect = iter([exception.InvalidParameterValue("Bad input")])
+ parse_drv_info_mock.side_effect = side_effect
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ self.assertRaises(exception.InvalidParameterValue,
+ task.driver.power.validate, task)
+ self.assertEqual(1, parse_drv_info_mock.call_count)
diff --git a/ironic/tests/unit/drivers/test_image_cache.py b/ironic/tests/unit/drivers/test_image_cache.py
new file mode 100644
index 000000000..447db0a99
--- /dev/null
+++ b/ironic/tests/unit/drivers/test_image_cache.py
@@ -0,0 +1,697 @@
+# -*- encoding: utf-8 -*-
+#
+# Copyright 2014 Red Hat, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Tests for ImageCache class and helper functions."""
+
+import datetime
+import os
+import tempfile
+import time
+import uuid
+
+import mock
+from oslo_utils import uuidutils
+import six
+
+from ironic.common import exception
+from ironic.common import image_service
+from ironic.common import images
+from ironic.common import utils
+from ironic.drivers.modules import image_cache
+from ironic.tests.unit import base
+
+
+def touch(filename):
+ open(filename, 'w').close()
+
+
+class TestImageCacheFetch(base.TestCase):
+
+ def setUp(self):
+ super(TestImageCacheFetch, self).setUp()
+ self.master_dir = tempfile.mkdtemp()
+ self.cache = image_cache.ImageCache(self.master_dir, None, None)
+ self.dest_dir = tempfile.mkdtemp()
+ self.dest_path = os.path.join(self.dest_dir, 'dest')
+ self.uuid = uuidutils.generate_uuid()
+ self.master_path = os.path.join(self.master_dir, self.uuid)
+
+ @mock.patch.object(image_cache, '_fetch', autospec=True)
+ @mock.patch.object(image_cache.ImageCache, 'clean_up', autospec=True)
+ @mock.patch.object(image_cache.ImageCache, '_download_image',
+ autospec=True)
+ def test_fetch_image_no_master_dir(self, mock_download, mock_clean_up,
+ mock_fetch):
+ self.cache.master_dir = None
+ self.cache.fetch_image(self.uuid, self.dest_path)
+ self.assertFalse(mock_download.called)
+ mock_fetch.assert_called_once_with(
+ None, self.uuid, self.dest_path, True)
+ self.assertFalse(mock_clean_up.called)
+
+ @mock.patch.object(image_cache.ImageCache, 'clean_up', autospec=True)
+ @mock.patch.object(image_cache.ImageCache, '_download_image',
+ autospec=True)
+ @mock.patch.object(os, 'link', autospec=True)
+ @mock.patch.object(image_cache, '_delete_dest_path_if_stale',
+ return_value=True, autospec=True)
+ @mock.patch.object(image_cache, '_delete_master_path_if_stale',
+ return_value=True, autospec=True)
+ def test_fetch_image_dest_and_master_uptodate(
+ self, mock_cache_upd, mock_dest_upd, mock_link, mock_download,
+ mock_clean_up):
+ self.cache.fetch_image(self.uuid, self.dest_path)
+ mock_cache_upd.assert_called_once_with(self.master_path, self.uuid,
+ None)
+ mock_dest_upd.assert_called_once_with(self.master_path, self.dest_path)
+ self.assertFalse(mock_link.called)
+ self.assertFalse(mock_download.called)
+ self.assertFalse(mock_clean_up.called)
+
+ @mock.patch.object(image_cache.ImageCache, 'clean_up', autospec=True)
+ @mock.patch.object(image_cache.ImageCache, '_download_image',
+ autospec=True)
+ @mock.patch.object(os, 'link', autospec=True)
+ @mock.patch.object(image_cache, '_delete_dest_path_if_stale',
+ return_value=False, autospec=True)
+ @mock.patch.object(image_cache, '_delete_master_path_if_stale',
+ return_value=True, autospec=True)
+ def test_fetch_image_dest_out_of_date(
+ self, mock_cache_upd, mock_dest_upd, mock_link, mock_download,
+ mock_clean_up):
+ self.cache.fetch_image(self.uuid, self.dest_path)
+ mock_cache_upd.assert_called_once_with(self.master_path, self.uuid,
+ None)
+ mock_dest_upd.assert_called_once_with(self.master_path, self.dest_path)
+ mock_link.assert_called_once_with(self.master_path, self.dest_path)
+ self.assertFalse(mock_download.called)
+ self.assertFalse(mock_clean_up.called)
+
+ @mock.patch.object(image_cache.ImageCache, 'clean_up', autospec=True)
+ @mock.patch.object(image_cache.ImageCache, '_download_image',
+ autospec=True)
+ @mock.patch.object(os, 'link', autospec=True)
+ @mock.patch.object(image_cache, '_delete_dest_path_if_stale',
+ return_value=True, autospec=True)
+ @mock.patch.object(image_cache, '_delete_master_path_if_stale',
+ return_value=False, autospec=True)
+ def test_fetch_image_master_out_of_date(
+ self, mock_cache_upd, mock_dest_upd, mock_link, mock_download,
+ mock_clean_up):
+ self.cache.fetch_image(self.uuid, self.dest_path)
+ mock_cache_upd.assert_called_once_with(self.master_path, self.uuid,
+ None)
+ mock_dest_upd.assert_called_once_with(self.master_path, self.dest_path)
+ self.assertFalse(mock_link.called)
+ mock_download.assert_called_once_with(
+ self.cache, self.uuid, self.master_path, self.dest_path,
+ ctx=None, force_raw=True)
+ mock_clean_up.assert_called_once_with(self.cache)
+
+ @mock.patch.object(image_cache.ImageCache, 'clean_up', autospec=True)
+ @mock.patch.object(image_cache.ImageCache, '_download_image',
+ autospec=True)
+ @mock.patch.object(os, 'link', autospec=True)
+ @mock.patch.object(image_cache, '_delete_dest_path_if_stale',
+ return_value=True, autospec=True)
+ @mock.patch.object(image_cache, '_delete_master_path_if_stale',
+ return_value=False, autospec=True)
+ def test_fetch_image_both_master_and_dest_out_of_date(
+ self, mock_cache_upd, mock_dest_upd, mock_link, mock_download,
+ mock_clean_up):
+ self.cache.fetch_image(self.uuid, self.dest_path)
+ mock_cache_upd.assert_called_once_with(self.master_path, self.uuid,
+ None)
+ mock_dest_upd.assert_called_once_with(self.master_path, self.dest_path)
+ self.assertFalse(mock_link.called)
+ mock_download.assert_called_once_with(
+ self.cache, self.uuid, self.master_path, self.dest_path,
+ ctx=None, force_raw=True)
+ mock_clean_up.assert_called_once_with(self.cache)
+
+ @mock.patch.object(image_cache.ImageCache, 'clean_up', autospec=True)
+ @mock.patch.object(image_cache.ImageCache, '_download_image',
+ autospec=True)
+ def test_fetch_image_not_uuid(self, mock_download, mock_clean_up):
+ href = u'http://abc.com/ubuntu.qcow2'
+ href_encoded = href.encode('utf-8') if six.PY2 else href
+ href_converted = str(uuid.uuid5(uuid.NAMESPACE_URL, href_encoded))
+ master_path = os.path.join(self.master_dir, href_converted)
+ self.cache.fetch_image(href, self.dest_path)
+ mock_download.assert_called_once_with(
+ self.cache, href, master_path, self.dest_path,
+ ctx=None, force_raw=True)
+ self.assertTrue(mock_clean_up.called)
+
+ @mock.patch.object(image_cache, '_fetch', autospec=True)
+ def test__download_image(self, mock_fetch):
+ def _fake_fetch(ctx, uuid, tmp_path, *args):
+ self.assertEqual(self.uuid, uuid)
+ self.assertNotEqual(self.dest_path, tmp_path)
+ self.assertNotEqual(os.path.dirname(tmp_path), self.master_dir)
+ with open(tmp_path, 'w') as fp:
+ fp.write("TEST")
+
+ mock_fetch.side_effect = _fake_fetch
+ self.cache._download_image(self.uuid, self.master_path, self.dest_path)
+ self.assertTrue(os.path.isfile(self.dest_path))
+ self.assertTrue(os.path.isfile(self.master_path))
+ self.assertEqual(os.stat(self.dest_path).st_ino,
+ os.stat(self.master_path).st_ino)
+ with open(self.dest_path) as fp:
+ self.assertEqual("TEST", fp.read())
+
+
+@mock.patch.object(os, 'unlink', autospec=True)
+class TestUpdateImages(base.TestCase):
+
+ def setUp(self):
+ super(TestUpdateImages, self).setUp()
+ self.master_dir = tempfile.mkdtemp()
+ self.dest_dir = tempfile.mkdtemp()
+ self.dest_path = os.path.join(self.dest_dir, 'dest')
+ self.uuid = uuidutils.generate_uuid()
+ self.master_path = os.path.join(self.master_dir, self.uuid)
+
+ @mock.patch.object(os.path, 'exists', return_value=False, autospec=True)
+ @mock.patch.object(image_service, 'get_image_service', autospec=True)
+ def test__delete_master_path_if_stale_glance_img_not_cached(
+ self, mock_gis, mock_path_exists, mock_unlink):
+ res = image_cache._delete_master_path_if_stale(self.master_path,
+ self.uuid, None)
+ self.assertFalse(mock_gis.called)
+ self.assertFalse(mock_unlink.called)
+ mock_path_exists.assert_called_once_with(self.master_path)
+ self.assertFalse(res)
+
+ @mock.patch.object(os.path, 'exists', return_value=True, autospec=True)
+ @mock.patch.object(image_service, 'get_image_service', autospec=True)
+ def test__delete_master_path_if_stale_glance_img(
+ self, mock_gis, mock_path_exists, mock_unlink):
+ res = image_cache._delete_master_path_if_stale(self.master_path,
+ self.uuid, None)
+ self.assertFalse(mock_gis.called)
+ self.assertFalse(mock_unlink.called)
+ mock_path_exists.assert_called_once_with(self.master_path)
+ self.assertTrue(res)
+
+ @mock.patch.object(image_service, 'get_image_service', autospec=True)
+ def test__delete_master_path_if_stale_no_master(self, mock_gis,
+ mock_unlink):
+ res = image_cache._delete_master_path_if_stale(self.master_path,
+ 'http://11', None)
+ self.assertFalse(mock_gis.called)
+ self.assertFalse(mock_unlink.called)
+ self.assertFalse(res)
+
+ @mock.patch.object(image_service, 'get_image_service', autospec=True)
+ def test__delete_master_path_if_stale_no_updated_at(self, mock_gis,
+ mock_unlink):
+ touch(self.master_path)
+ href = 'http://awesomefreeimages.al/img111'
+ mock_gis.return_value.show.return_value = {}
+ res = image_cache._delete_master_path_if_stale(self.master_path, href,
+ None)
+ mock_gis.assert_called_once_with(href, context=None)
+ self.assertFalse(mock_unlink.called)
+ self.assertTrue(res)
+
+ @mock.patch.object(image_service, 'get_image_service', autospec=True)
+ def test__delete_master_path_if_stale_master_up_to_date(self, mock_gis,
+ mock_unlink):
+ touch(self.master_path)
+ href = 'http://awesomefreeimages.al/img999'
+ mock_gis.return_value.show.return_value = {
+ 'updated_at': datetime.datetime(1999, 11, 15, 8, 12, 31)
+ }
+ res = image_cache._delete_master_path_if_stale(self.master_path, href,
+ None)
+ mock_gis.assert_called_once_with(href, context=None)
+ self.assertFalse(mock_unlink.called)
+ self.assertTrue(res)
+
+ @mock.patch.object(image_service, 'get_image_service', autospec=True)
+ def test__delete_master_path_if_stale_master_same_time(self, mock_gis,
+ mock_unlink):
+ # When times identical should not delete cached file
+ touch(self.master_path)
+ mtime = utils.unix_file_modification_datetime(self.master_path)
+ href = 'http://awesomefreeimages.al/img999'
+ mock_gis.return_value.show.return_value = {
+ 'updated_at': mtime
+ }
+ res = image_cache._delete_master_path_if_stale(self.master_path, href,
+ None)
+ mock_gis.assert_called_once_with(href, context=None)
+ self.assertFalse(mock_unlink.called)
+ self.assertTrue(res)
+
+ @mock.patch.object(image_service, 'get_image_service', autospec=True)
+ def test__delete_master_path_if_stale_out_of_date(self, mock_gis,
+ mock_unlink):
+ touch(self.master_path)
+ href = 'http://awesomefreeimages.al/img999'
+ mock_gis.return_value.show.return_value = {
+ 'updated_at': datetime.datetime((datetime.datetime.utcnow().year
+ + 1), 11, 15, 8, 12, 31)
+ }
+ res = image_cache._delete_master_path_if_stale(self.master_path, href,
+ None)
+ mock_gis.assert_called_once_with(href, context=None)
+ mock_unlink.assert_called_once_with(self.master_path)
+ self.assertFalse(res)
+
+ def test__delete_dest_path_if_stale_no_dest(self, mock_unlink):
+ res = image_cache._delete_dest_path_if_stale(self.master_path,
+ self.dest_path)
+ self.assertFalse(mock_unlink.called)
+ self.assertFalse(res)
+
+ def test__delete_dest_path_if_stale_no_master(self, mock_unlink):
+ touch(self.dest_path)
+ res = image_cache._delete_dest_path_if_stale(self.master_path,
+ self.dest_path)
+ mock_unlink.assert_called_once_with(self.dest_path)
+ self.assertFalse(res)
+
+ def test__delete_dest_path_if_stale_out_of_date(self, mock_unlink):
+ touch(self.master_path)
+ touch(self.dest_path)
+ res = image_cache._delete_dest_path_if_stale(self.master_path,
+ self.dest_path)
+ mock_unlink.assert_called_once_with(self.dest_path)
+ self.assertFalse(res)
+
+ def test__delete_dest_path_if_stale_up_to_date(self, mock_unlink):
+ touch(self.master_path)
+ os.link(self.master_path, self.dest_path)
+ res = image_cache._delete_dest_path_if_stale(self.master_path,
+ self.dest_path)
+ self.assertFalse(mock_unlink.called)
+ self.assertTrue(res)
+
+
+class TestImageCacheCleanUp(base.TestCase):
+
+ def setUp(self):
+ super(TestImageCacheCleanUp, self).setUp()
+ self.master_dir = tempfile.mkdtemp()
+ self.cache = image_cache.ImageCache(self.master_dir,
+ cache_size=10,
+ cache_ttl=600)
+
+ @mock.patch.object(image_cache.ImageCache, '_clean_up_ensure_cache_size',
+ autospec=True)
+ def test_clean_up_old_deleted(self, mock_clean_size):
+ mock_clean_size.return_value = None
+ files = [os.path.join(self.master_dir, str(i))
+ for i in range(2)]
+ for filename in files:
+ touch(filename)
+ # NOTE(dtantsur): Can't alter ctime, have to set mtime to the future
+ new_current_time = time.time() + 900
+ os.utime(files[0], (new_current_time - 100, new_current_time - 100))
+ with mock.patch.object(time, 'time', lambda: new_current_time):
+ self.cache.clean_up()
+
+ mock_clean_size.assert_called_once_with(self.cache, mock.ANY, None)
+ survived = mock_clean_size.call_args[0][1]
+ self.assertEqual(1, len(survived))
+ self.assertEqual(files[0], survived[0][0])
+ # NOTE(dtantsur): do not compare milliseconds
+ self.assertEqual(int(new_current_time - 100), int(survived[0][1]))
+ self.assertEqual(int(new_current_time - 100),
+ int(survived[0][2].st_mtime))
+
+ @mock.patch.object(image_cache.ImageCache, '_clean_up_ensure_cache_size',
+ autospec=True)
+ def test_clean_up_old_with_amount(self, mock_clean_size):
+ files = [os.path.join(self.master_dir, str(i))
+ for i in range(2)]
+ for filename in files:
+ open(filename, 'wb').write(b'X')
+ new_current_time = time.time() + 900
+ with mock.patch.object(time, 'time', lambda: new_current_time):
+ self.cache.clean_up(amount=1)
+
+ self.assertFalse(mock_clean_size.called)
+ # Exactly one file is expected to be deleted
+ self.assertTrue(any(os.path.exists(f) for f in files))
+ self.assertFalse(all(os.path.exists(f) for f in files))
+
+ @mock.patch.object(image_cache.ImageCache, '_clean_up_ensure_cache_size',
+ autospec=True)
+ def test_clean_up_files_with_links_untouched(self, mock_clean_size):
+ mock_clean_size.return_value = None
+ files = [os.path.join(self.master_dir, str(i))
+ for i in range(2)]
+ for filename in files:
+ touch(filename)
+ os.link(filename, filename + 'copy')
+
+ new_current_time = time.time() + 900
+ with mock.patch.object(time, 'time', lambda: new_current_time):
+ self.cache.clean_up()
+
+ for filename in files:
+ self.assertTrue(os.path.exists(filename))
+ mock_clean_size.assert_called_once_with(mock.ANY, [], None)
+
+ @mock.patch.object(image_cache.ImageCache, '_clean_up_too_old',
+ autospec=True)
+ def test_clean_up_ensure_cache_size(self, mock_clean_ttl):
+ mock_clean_ttl.side_effect = lambda *xx: xx[1:]
+ # NOTE(dtantsur): Cache size in test is 10 bytes, we create 6 files
+ # with 3 bytes each and expect 3 to be deleted
+ files = [os.path.join(self.master_dir, str(i))
+ for i in range(6)]
+ for filename in files:
+ with open(filename, 'w') as fp:
+ fp.write('123')
+ # NOTE(dtantsur): Make 3 files 'newer' to check that
+ # old ones are deleted first
+ new_current_time = time.time() + 100
+ for filename in files[:3]:
+ os.utime(filename, (new_current_time, new_current_time))
+
+ with mock.patch.object(time, 'time', lambda: new_current_time):
+ self.cache.clean_up()
+
+ for filename in files[:3]:
+ self.assertTrue(os.path.exists(filename))
+ for filename in files[3:]:
+ self.assertFalse(os.path.exists(filename))
+
+ mock_clean_ttl.assert_called_once_with(mock.ANY, mock.ANY, None)
+
+ @mock.patch.object(image_cache.ImageCache, '_clean_up_too_old',
+ autospec=True)
+ def test_clean_up_ensure_cache_size_with_amount(self, mock_clean_ttl):
+ mock_clean_ttl.side_effect = lambda *xx: xx[1:]
+ # NOTE(dtantsur): Cache size in test is 10 bytes, we create 6 files
+ # with 3 bytes each and set amount to be 15, 5 files are to be deleted
+ files = [os.path.join(self.master_dir, str(i))
+ for i in range(6)]
+ for filename in files:
+ with open(filename, 'w') as fp:
+ fp.write('123')
+ # NOTE(dtantsur): Make 1 file 'newer' to check that
+ # old ones are deleted first
+ new_current_time = time.time() + 100
+ os.utime(files[0], (new_current_time, new_current_time))
+
+ with mock.patch.object(time, 'time', lambda: new_current_time):
+ self.cache.clean_up(amount=15)
+
+ self.assertTrue(os.path.exists(files[0]))
+ for filename in files[5:]:
+ self.assertFalse(os.path.exists(filename))
+
+ mock_clean_ttl.assert_called_once_with(mock.ANY, mock.ANY, 15)
+
+ @mock.patch.object(image_cache.LOG, 'info', autospec=True)
+ @mock.patch.object(image_cache.ImageCache, '_clean_up_too_old',
+ autospec=True)
+ def test_clean_up_cache_still_large(self, mock_clean_ttl, mock_log):
+ mock_clean_ttl.side_effect = lambda *xx: xx[1:]
+ # NOTE(dtantsur): Cache size in test is 10 bytes, we create 2 files
+ # than cannot be deleted and expected this to be logged
+ files = [os.path.join(self.master_dir, str(i))
+ for i in range(2)]
+ for filename in files:
+ with open(filename, 'w') as fp:
+ fp.write('123')
+ os.link(filename, filename + 'copy')
+
+ self.cache.clean_up()
+
+ for filename in files:
+ self.assertTrue(os.path.exists(filename))
+ self.assertTrue(mock_log.called)
+ mock_clean_ttl.assert_called_once_with(mock.ANY, mock.ANY, None)
+
+ @mock.patch.object(utils, 'rmtree_without_raise', autospec=True)
+ @mock.patch.object(image_cache, '_fetch', autospec=True)
+ def test_temp_images_not_cleaned(self, mock_fetch, mock_rmtree):
+ def _fake_fetch(ctx, uuid, tmp_path, *args):
+ with open(tmp_path, 'w') as fp:
+ fp.write("TEST" * 10)
+
+ # assume cleanup from another thread at this moment
+ self.cache.clean_up()
+ self.assertTrue(os.path.exists(tmp_path))
+
+ mock_fetch.side_effect = _fake_fetch
+ master_path = os.path.join(self.master_dir, 'uuid')
+ dest_path = os.path.join(tempfile.mkdtemp(), 'dest')
+ self.cache._download_image('uuid', master_path, dest_path)
+ self.assertTrue(mock_rmtree.called)
+
+ @mock.patch.object(utils, 'rmtree_without_raise', autospec=True)
+ @mock.patch.object(image_cache, '_fetch', autospec=True)
+ def test_temp_dir_exception(self, mock_fetch, mock_rmtree):
+ mock_fetch.side_effect = exception.IronicException
+ self.assertRaises(exception.IronicException,
+ self.cache._download_image,
+ 'uuid', 'fake', 'fake')
+ self.assertTrue(mock_rmtree.called)
+
+ @mock.patch.object(image_cache.LOG, 'warn', autospec=True)
+ @mock.patch.object(image_cache.ImageCache, '_clean_up_too_old',
+ autospec=True)
+ @mock.patch.object(image_cache.ImageCache, '_clean_up_ensure_cache_size',
+ autospec=True)
+ def test_clean_up_amount_not_satisfied(self, mock_clean_size,
+ mock_clean_ttl, mock_log):
+ mock_clean_ttl.side_effect = lambda *xx: xx[1:]
+ mock_clean_size.side_effect = lambda self, listing, amount: amount
+ self.cache.clean_up(amount=15)
+ self.assertTrue(mock_log.called)
+
+ def test_cleanup_ordering(self):
+
+ class ParentCache(image_cache.ImageCache):
+ def __init__(self):
+ super(ParentCache, self).__init__('a', 1, 1, None)
+
+ @image_cache.cleanup(priority=10000)
+ class Cache1(ParentCache):
+ pass
+
+ @image_cache.cleanup(priority=20000)
+ class Cache2(ParentCache):
+ pass
+
+ @image_cache.cleanup(priority=10000)
+ class Cache3(ParentCache):
+ pass
+
+ self.assertEqual(image_cache._cache_cleanup_list[0][1], Cache2)
+
+ # The order of caches with same prioirty is not deterministic.
+ item_possibilities = [Cache1, Cache3]
+ second_item_actual = image_cache._cache_cleanup_list[1][1]
+ self.assertIn(second_item_actual, item_possibilities)
+ item_possibilities.remove(second_item_actual)
+ third_item_actual = image_cache._cache_cleanup_list[2][1]
+ self.assertEqual(item_possibilities[0], third_item_actual)
+
+
+@mock.patch.object(image_cache, '_cache_cleanup_list', autospec=True)
+@mock.patch.object(os, 'statvfs', autospec=True)
+@mock.patch.object(image_service, 'get_image_service', autospec=True)
+class CleanupImageCacheTestCase(base.TestCase):
+
+ def setUp(self):
+ super(CleanupImageCacheTestCase, self).setUp()
+ self.mock_first_cache = mock.MagicMock(spec_set=[])
+ self.mock_second_cache = mock.MagicMock(spec_set=[])
+ self.cache_cleanup_list = [(50, self.mock_first_cache),
+ (20, self.mock_second_cache)]
+ self.mock_first_cache.return_value.master_dir = 'first_cache_dir'
+ self.mock_second_cache.return_value.master_dir = 'second_cache_dir'
+
+ def test_no_clean_up(self, mock_image_service, mock_statvfs,
+ cache_cleanup_list_mock):
+ # Enough space found - no clean up
+ mock_show = mock_image_service.return_value.show
+ mock_show.return_value = dict(size=42)
+ mock_statvfs.return_value = mock.MagicMock(
+ spec_set=['f_frsize', 'f_bavail'], f_frsize=1, f_bavail=1024)
+
+ cache_cleanup_list_mock.__iter__.return_value = self.cache_cleanup_list
+
+ image_cache.clean_up_caches(None, 'master_dir', [('uuid', 'path')])
+
+ mock_show.assert_called_once_with('uuid')
+ mock_statvfs.assert_called_once_with('master_dir')
+ self.assertFalse(self.mock_first_cache.return_value.clean_up.called)
+ self.assertFalse(self.mock_second_cache.return_value.clean_up.called)
+
+ mock_statvfs.assert_called_once_with('master_dir')
+
+ @mock.patch.object(os, 'stat', autospec=True)
+ def test_one_clean_up(self, mock_stat, mock_image_service, mock_statvfs,
+ cache_cleanup_list_mock):
+ # Not enough space, first cache clean up is enough
+ mock_stat.return_value.st_dev = 1
+ mock_show = mock_image_service.return_value.show
+ mock_show.return_value = dict(size=42)
+ mock_statvfs.side_effect = [
+ mock.MagicMock(f_frsize=1, f_bavail=1,
+ spec_set=['f_frsize', 'f_bavail']),
+ mock.MagicMock(f_frsize=1, f_bavail=1024,
+ spec_set=['f_frsize', 'f_bavail'])
+ ]
+ cache_cleanup_list_mock.__iter__.return_value = self.cache_cleanup_list
+ image_cache.clean_up_caches(None, 'master_dir', [('uuid', 'path')])
+
+ mock_show.assert_called_once_with('uuid')
+ mock_statvfs.assert_called_with('master_dir')
+ self.assertEqual(2, mock_statvfs.call_count)
+ self.mock_first_cache.return_value.clean_up.assert_called_once_with(
+ amount=(42 - 1))
+ self.assertFalse(self.mock_second_cache.return_value.clean_up.called)
+
+ # Since we are using generator expression in clean_up_caches, stat on
+ # second cache wouldn't be called if we got enough free space on
+ # cleaning up the first cache.
+ mock_stat_calls_expected = [mock.call('master_dir'),
+ mock.call('first_cache_dir')]
+ mock_statvfs_calls_expected = [mock.call('master_dir'),
+ mock.call('master_dir')]
+ self.assertEqual(mock_stat_calls_expected, mock_stat.mock_calls)
+ self.assertEqual(mock_statvfs_calls_expected, mock_statvfs.mock_calls)
+
+ @mock.patch.object(os, 'stat', autospec=True)
+ def test_clean_up_another_fs(self, mock_stat, mock_image_service,
+ mock_statvfs, cache_cleanup_list_mock):
+ # Not enough space, need to cleanup second cache
+ mock_stat.side_effect = [mock.MagicMock(st_dev=1, spec_set=['st_dev']),
+ mock.MagicMock(st_dev=2, spec_set=['st_dev']),
+ mock.MagicMock(st_dev=1, spec_set=['st_dev'])]
+ mock_show = mock_image_service.return_value.show
+ mock_show.return_value = dict(size=42)
+ mock_statvfs.side_effect = [
+ mock.MagicMock(f_frsize=1, f_bavail=1,
+ spec_set=['f_frsize', 'f_bavail']),
+ mock.MagicMock(f_frsize=1, f_bavail=1024,
+ spec_set=['f_frsize', 'f_bavail'])
+ ]
+
+ cache_cleanup_list_mock.__iter__.return_value = self.cache_cleanup_list
+ image_cache.clean_up_caches(None, 'master_dir', [('uuid', 'path')])
+
+ mock_show.assert_called_once_with('uuid')
+ mock_statvfs.assert_called_with('master_dir')
+ self.assertEqual(2, mock_statvfs.call_count)
+ self.mock_second_cache.return_value.clean_up.assert_called_once_with(
+ amount=(42 - 1))
+ self.assertFalse(self.mock_first_cache.return_value.clean_up.called)
+
+ # Since first cache exists on a different partition, it wouldn't be
+ # considered for cleanup.
+ mock_stat_calls_expected = [mock.call('master_dir'),
+ mock.call('first_cache_dir'),
+ mock.call('second_cache_dir')]
+ mock_statvfs_calls_expected = [mock.call('master_dir'),
+ mock.call('master_dir')]
+ self.assertEqual(mock_stat_calls_expected, mock_stat.mock_calls)
+ self.assertEqual(mock_statvfs_calls_expected, mock_statvfs.mock_calls)
+
+ @mock.patch.object(os, 'stat', autospec=True)
+ def test_both_clean_up(self, mock_stat, mock_image_service, mock_statvfs,
+ cache_cleanup_list_mock):
+ # Not enough space, clean up of both caches required
+ mock_stat.return_value.st_dev = 1
+ mock_show = mock_image_service.return_value.show
+ mock_show.return_value = dict(size=42)
+ mock_statvfs.side_effect = [
+ mock.MagicMock(f_frsize=1, f_bavail=1,
+ spec_set=['f_frsize', 'f_bavail']),
+ mock.MagicMock(f_frsize=1, f_bavail=2,
+ spec_set=['f_frsize', 'f_bavail']),
+ mock.MagicMock(f_frsize=1, f_bavail=1024,
+ spec_set=['f_frsize', 'f_bavail'])
+ ]
+
+ cache_cleanup_list_mock.__iter__.return_value = self.cache_cleanup_list
+ image_cache.clean_up_caches(None, 'master_dir', [('uuid', 'path')])
+
+ mock_show.assert_called_once_with('uuid')
+ mock_statvfs.assert_called_with('master_dir')
+ self.assertEqual(3, mock_statvfs.call_count)
+ self.mock_first_cache.return_value.clean_up.assert_called_once_with(
+ amount=(42 - 1))
+ self.mock_second_cache.return_value.clean_up.assert_called_once_with(
+ amount=(42 - 2))
+
+ mock_stat_calls_expected = [mock.call('master_dir'),
+ mock.call('first_cache_dir'),
+ mock.call('second_cache_dir')]
+ mock_statvfs_calls_expected = [mock.call('master_dir'),
+ mock.call('master_dir'),
+ mock.call('master_dir')]
+ self.assertEqual(mock_stat_calls_expected, mock_stat.mock_calls)
+ self.assertEqual(mock_statvfs_calls_expected, mock_statvfs.mock_calls)
+
+ @mock.patch.object(os, 'stat', autospec=True)
+ def test_clean_up_fail(self, mock_stat, mock_image_service, mock_statvfs,
+ cache_cleanup_list_mock):
+ # Not enough space even after cleaning both caches - failure
+ mock_stat.return_value.st_dev = 1
+ mock_show = mock_image_service.return_value.show
+ mock_show.return_value = dict(size=42)
+ mock_statvfs.return_value = mock.MagicMock(
+ f_frsize=1, f_bavail=1, spec_set=['f_frsize', 'f_bavail'])
+
+ cache_cleanup_list_mock.__iter__.return_value = self.cache_cleanup_list
+ self.assertRaises(exception.InsufficientDiskSpace,
+ image_cache.clean_up_caches,
+ None, 'master_dir', [('uuid', 'path')])
+
+ mock_show.assert_called_once_with('uuid')
+ mock_statvfs.assert_called_with('master_dir')
+ self.assertEqual(3, mock_statvfs.call_count)
+ self.mock_first_cache.return_value.clean_up.assert_called_once_with(
+ amount=(42 - 1))
+ self.mock_second_cache.return_value.clean_up.assert_called_once_with(
+ amount=(42 - 1))
+
+ mock_stat_calls_expected = [mock.call('master_dir'),
+ mock.call('first_cache_dir'),
+ mock.call('second_cache_dir')]
+ mock_statvfs_calls_expected = [mock.call('master_dir'),
+ mock.call('master_dir'),
+ mock.call('master_dir')]
+ self.assertEqual(mock_stat_calls_expected, mock_stat.mock_calls)
+ self.assertEqual(mock_statvfs_calls_expected, mock_statvfs.mock_calls)
+
+
+class TestFetchCleanup(base.TestCase):
+
+ @mock.patch.object(images, 'converted_size', autospec=True)
+ @mock.patch.object(images, 'fetch', autospec=True)
+ @mock.patch.object(images, 'image_to_raw', autospec=True)
+ @mock.patch.object(image_cache, '_clean_up_caches', autospec=True)
+ def test__fetch(self, mock_clean, mock_raw, mock_fetch, mock_size):
+ mock_size.return_value = 100
+ image_cache._fetch('fake', 'fake-uuid', '/foo/bar', force_raw=True)
+ mock_fetch.assert_called_once_with('fake', 'fake-uuid',
+ '/foo/bar.part', force_raw=False)
+ mock_clean.assert_called_once_with('/foo', 100)
+ mock_raw.assert_called_once_with('fake-uuid', '/foo/bar',
+ '/foo/bar.part')
diff --git a/ironic/tests/unit/drivers/test_inspector.py b/ironic/tests/unit/drivers/test_inspector.py
new file mode 100644
index 000000000..349acb4b8
--- /dev/null
+++ b/ironic/tests/unit/drivers/test_inspector.py
@@ -0,0 +1,239 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import eventlet
+import ironic_inspector_client as client
+import mock
+
+from ironic.common import driver_factory
+from ironic.common import exception
+from ironic.common import keystone
+from ironic.common import states
+from ironic.conductor import task_manager
+from ironic.drivers.modules import inspector
+from ironic.tests.unit.conductor import utils as mgr_utils
+from ironic.tests.unit.db import base as db_base
+from ironic.tests.unit.objects import utils as obj_utils
+
+
+class DisabledTestCase(db_base.DbTestCase):
+ def setUp(self):
+ super(DisabledTestCase, self).setUp()
+
+ def _do_mock(self):
+ # NOTE(dtantsur): fake driver always has inspection, using another one
+ mgr_utils.mock_the_extension_manager("pxe_ssh")
+ self.driver = driver_factory.get_driver("pxe_ssh")
+
+ def test_disabled(self):
+ self.config(enabled=False, group='inspector')
+ self._do_mock()
+ self.assertIsNone(self.driver.inspect)
+ # NOTE(dtantsur): it's expected that fake_inspector fails to load
+ # in this case
+ self.assertRaises(exception.DriverLoadError,
+ mgr_utils.mock_the_extension_manager,
+ "fake_inspector")
+
+ def test_enabled(self):
+ self.config(enabled=True, group='inspector')
+ self._do_mock()
+ self.assertIsNotNone(self.driver.inspect)
+
+ @mock.patch.object(inspector, 'client', None)
+ def test_init_inspector_not_imported(self):
+ self.assertRaises(exception.DriverLoadError,
+ inspector.Inspector)
+
+ def test_init_ok(self):
+ self.config(enabled=True, group='inspector')
+ inspector.Inspector()
+
+
+class BaseTestCase(db_base.DbTestCase):
+ def setUp(self):
+ super(BaseTestCase, self).setUp()
+ self.config(enabled=True, group='inspector')
+ mgr_utils.mock_the_extension_manager("fake_inspector")
+ self.driver = driver_factory.get_driver("fake_inspector")
+ self.node = obj_utils.get_test_node(self.context)
+ self.task = mock.MagicMock(spec=task_manager.TaskManager)
+ self.task.context = mock.MagicMock(spec_set=['auth_token'])
+ self.task.shared = False
+ self.task.node = self.node
+ self.task.driver = self.driver
+ self.api_version = (1, 0)
+
+
+class CommonFunctionsTestCase(BaseTestCase):
+ def test_validate_ok(self):
+ self.driver.inspect.validate(self.task)
+
+ def test_get_properties(self):
+ res = self.driver.inspect.get_properties()
+ self.assertEqual({}, res)
+
+ def test_create_if_enabled(self):
+ res = inspector.Inspector.create_if_enabled('driver')
+ self.assertIsInstance(res, inspector.Inspector)
+
+ @mock.patch.object(inspector.LOG, 'info', autospec=True)
+ def test_create_if_enabled_disabled(self, warn_mock):
+ self.config(enabled=False, group='inspector')
+ res = inspector.Inspector.create_if_enabled('driver')
+ self.assertIsNone(res)
+ self.assertTrue(warn_mock.called)
+
+
+@mock.patch.object(eventlet, 'spawn_n', lambda f, *a, **kw: f(*a, **kw))
+@mock.patch.object(client, 'introspect')
+class InspectHardwareTestCase(BaseTestCase):
+ def test_ok(self, mock_introspect):
+ self.assertEqual(states.INSPECTING,
+ self.driver.inspect.inspect_hardware(self.task))
+ mock_introspect.assert_called_once_with(
+ self.node.uuid,
+ api_version=self.api_version,
+ auth_token=self.task.context.auth_token)
+
+ def test_url(self, mock_introspect):
+ self.config(service_url='meow', group='inspector')
+ self.assertEqual(states.INSPECTING,
+ self.driver.inspect.inspect_hardware(self.task))
+ mock_introspect.assert_called_once_with(
+ self.node.uuid,
+ api_version=self.api_version,
+ auth_token=self.task.context.auth_token,
+ base_url='meow')
+
+ @mock.patch.object(task_manager, 'acquire', autospec=True)
+ def test_error(self, mock_acquire, mock_introspect):
+ mock_introspect.side_effect = RuntimeError('boom')
+ self.driver.inspect.inspect_hardware(self.task)
+ mock_introspect.assert_called_once_with(
+ self.node.uuid,
+ api_version=self.api_version,
+ auth_token=self.task.context.auth_token)
+ task = mock_acquire.return_value.__enter__.return_value
+ self.assertIn('boom', task.node.last_error)
+ task.process_event.assert_called_once_with('fail')
+
+
+@mock.patch.object(keystone, 'get_admin_auth_token', lambda: 'the token')
+@mock.patch.object(client, 'get_status')
+class CheckStatusTestCase(BaseTestCase):
+ def setUp(self):
+ super(CheckStatusTestCase, self).setUp()
+ self.node.provision_state = states.INSPECTING
+
+ def test_not_inspecting(self, mock_get):
+ self.node.provision_state = states.MANAGEABLE
+ inspector._check_status(self.task)
+ self.assertFalse(mock_get.called)
+
+ def test_not_inspector(self, mock_get):
+ self.task.driver.inspect = object()
+ inspector._check_status(self.task)
+ self.assertFalse(mock_get.called)
+
+ def test_not_finished(self, mock_get):
+ mock_get.return_value = {}
+ inspector._check_status(self.task)
+ mock_get.assert_called_once_with(self.node.uuid,
+ api_version=self.api_version,
+ auth_token='the token')
+ self.assertFalse(self.task.process_event.called)
+
+ def test_exception_ignored(self, mock_get):
+ mock_get.side_effect = RuntimeError('boom')
+ inspector._check_status(self.task)
+ mock_get.assert_called_once_with(self.node.uuid,
+ api_version=self.api_version,
+ auth_token='the token')
+ self.assertFalse(self.task.process_event.called)
+
+ def test_status_ok(self, mock_get):
+ mock_get.return_value = {'finished': True}
+ inspector._check_status(self.task)
+ mock_get.assert_called_once_with(self.node.uuid,
+ api_version=self.api_version,
+ auth_token='the token')
+ self.task.process_event.assert_called_once_with('done')
+
+ def test_status_error(self, mock_get):
+ mock_get.return_value = {'error': 'boom'}
+ inspector._check_status(self.task)
+ mock_get.assert_called_once_with(self.node.uuid,
+ api_version=self.api_version,
+ auth_token='the token')
+ self.task.process_event.assert_called_once_with('fail')
+ self.assertIn('boom', self.node.last_error)
+
+ def test_service_url(self, mock_get):
+ self.config(service_url='meow', group='inspector')
+ mock_get.return_value = {'finished': True}
+ inspector._check_status(self.task)
+ mock_get.assert_called_once_with(self.node.uuid,
+ api_version=self.api_version,
+ auth_token='the token',
+ base_url='meow')
+ self.task.process_event.assert_called_once_with('done')
+
+ def test_is_standalone(self, mock_get):
+ self.config(auth_strategy='noauth')
+ mock_get.return_value = {'finished': True}
+ inspector._check_status(self.task)
+ mock_get.assert_called_once_with(
+ self.node.uuid,
+ api_version=self.api_version,
+ auth_token=self.task.context.auth_token)
+ self.task.process_event.assert_called_once_with('done')
+
+ def test_not_standalone(self, mock_get):
+ self.config(auth_strategy='keystone')
+ mock_get.return_value = {'finished': True}
+ inspector._check_status(self.task)
+ mock_get.assert_called_once_with(self.node.uuid,
+ api_version=self.api_version,
+ auth_token='the token')
+ self.task.process_event.assert_called_once_with('done')
+
+
+@mock.patch.object(eventlet.greenthread, 'spawn_n',
+ lambda f, *a, **kw: f(*a, **kw))
+@mock.patch.object(task_manager, 'acquire', autospec=True)
+@mock.patch.object(inspector, '_check_status', autospec=True)
+class PeriodicTaskTestCase(BaseTestCase):
+ def test_ok(self, mock_check, mock_acquire):
+ mgr = mock.MagicMock(spec=['iter_nodes'])
+ mgr.iter_nodes.return_value = [('1', 'd1'), ('2', 'd2')]
+ tasks = [mock.sentinel.task1, mock.sentinel.task2]
+ mock_acquire.side_effect = (
+ mock.MagicMock(__enter__=mock.MagicMock(return_value=task))
+ for task in tasks
+ )
+ inspector.Inspector()._periodic_check_result(
+ mgr, mock.sentinel.context)
+ mock_check.assert_any_call(tasks[0])
+ mock_check.assert_any_call(tasks[1])
+ self.assertEqual(2, mock_acquire.call_count)
+
+ def test_node_locked(self, mock_check, mock_acquire):
+ iter_nodes_ret = [('1', 'd1'), ('2', 'd2')]
+ mock_acquire.side_effect = iter([exception.NodeLocked("boom")] *
+ len(iter_nodes_ret))
+ mgr = mock.MagicMock(spec=['iter_nodes'])
+ mgr.iter_nodes.return_value = iter_nodes_ret
+ inspector.Inspector()._periodic_check_result(
+ mgr, mock.sentinel.context)
+ self.assertFalse(mock_check.called)
+ self.assertEqual(2, mock_acquire.call_count)
diff --git a/ironic/tests/unit/drivers/test_ipminative.py b/ironic/tests/unit/drivers/test_ipminative.py
new file mode 100644
index 000000000..8cc1ecf87
--- /dev/null
+++ b/ironic/tests/unit/drivers/test_ipminative.py
@@ -0,0 +1,610 @@
+# coding=utf-8
+
+# Copyright 2013 International Business Machines Corporation
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""
+Test class for Native IPMI power driver module.
+"""
+
+import mock
+from oslo_utils import uuidutils
+from pyghmi import exceptions as pyghmi_exception
+
+from ironic.common import boot_devices
+from ironic.common import driver_factory
+from ironic.common import exception
+from ironic.common import states
+from ironic.conductor import task_manager
+from ironic.drivers.modules import console_utils
+from ironic.drivers.modules import ipminative
+from ironic.drivers import utils as driver_utils
+from ironic.tests.unit.conductor import utils as mgr_utils
+from ironic.tests.unit.db import base as db_base
+from ironic.tests.unit.db import utils as db_utils
+from ironic.tests.unit.objects import utils as obj_utils
+
+INFO_DICT = db_utils.get_test_ipmi_info()
+
+
+class IPMINativePrivateMethodTestCase(db_base.DbTestCase):
+ """Test cases for ipminative private methods."""
+
+ def setUp(self):
+ super(IPMINativePrivateMethodTestCase, self).setUp()
+ self.node = obj_utils.create_test_node(self.context,
+ driver='fake_ipminative',
+ driver_info=INFO_DICT)
+ self.info = ipminative._parse_driver_info(self.node)
+
+ def test__parse_driver_info(self):
+ # make sure we get back the expected things
+ self.assertIsNotNone(self.info.get('address'))
+ self.assertIsNotNone(self.info.get('username'))
+ self.assertIsNotNone(self.info.get('password'))
+ self.assertIsNotNone(self.info.get('uuid'))
+ self.assertIsNotNone(self.info.get('force_boot_device'))
+
+ # make sure error is raised when info, eg. username, is missing
+ info = dict(INFO_DICT)
+ del info['ipmi_username']
+
+ node = obj_utils.get_test_node(self.context, driver_info=info)
+ self.assertRaises(exception.MissingParameterValue,
+ ipminative._parse_driver_info,
+ node)
+
+ @mock.patch('pyghmi.ipmi.command.Command', autospec=True)
+ def test__power_status_on(self, ipmi_mock):
+ ipmicmd = ipmi_mock.return_value
+ ipmicmd.get_power.return_value = {'powerstate': 'on'}
+
+ state = ipminative._power_status(self.info)
+ ipmicmd.get_power.assert_called_once_with()
+ self.assertEqual(states.POWER_ON, state)
+
+ @mock.patch('pyghmi.ipmi.command.Command', autospec=True)
+ def test__power_status_off(self, ipmi_mock):
+ ipmicmd = ipmi_mock.return_value
+ ipmicmd.get_power.return_value = {'powerstate': 'off'}
+
+ state = ipminative._power_status(self.info)
+ ipmicmd.get_power.assert_called_once_with()
+ self.assertEqual(states.POWER_OFF, state)
+
+ @mock.patch('pyghmi.ipmi.command.Command', autospec=True)
+ def test__power_status_error(self, ipmi_mock):
+ ipmicmd = ipmi_mock.return_value
+ ipmicmd.get_power.return_value = {'powerstate': 'Error'}
+
+ state = ipminative._power_status(self.info)
+ ipmicmd.get_power.assert_called_once_with()
+ self.assertEqual(states.ERROR, state)
+
+ @mock.patch('pyghmi.ipmi.command.Command', autospec=True)
+ def test__power_on(self, ipmi_mock):
+ ipmicmd = ipmi_mock.return_value
+ ipmicmd.set_power.return_value = {'powerstate': 'on'}
+
+ self.config(retry_timeout=400, group='ipmi')
+ state = ipminative._power_on(self.info)
+ ipmicmd.set_power.assert_called_once_with('on', 400)
+ self.assertEqual(states.POWER_ON, state)
+
+ @mock.patch('pyghmi.ipmi.command.Command', autospec=True)
+ def test__power_off(self, ipmi_mock):
+ ipmicmd = ipmi_mock.return_value
+ ipmicmd.set_power.return_value = {'powerstate': 'off'}
+
+ self.config(retry_timeout=500, group='ipmi')
+ state = ipminative._power_off(self.info)
+ ipmicmd.set_power.assert_called_once_with('off', 500)
+ self.assertEqual(states.POWER_OFF, state)
+
+ @mock.patch('pyghmi.ipmi.command.Command', autospec=True)
+ def test__reboot(self, ipmi_mock):
+ ipmicmd = ipmi_mock.return_value
+ ipmicmd.set_power.return_value = {'powerstate': 'on'}
+
+ self.config(retry_timeout=600, group='ipmi')
+ state = ipminative._reboot(self.info)
+ ipmicmd.set_power.assert_called_once_with('boot', 600)
+ self.assertEqual(states.POWER_ON, state)
+
+ def _create_sensor_object(self, value, type_, name, states=None,
+ units='fake_units', health=0):
+ if states is None:
+ states = []
+ return type('Reading', (object, ), {
+ 'value': value, 'type': type_, 'name': name,
+ 'states': states, 'units': units, 'health': health})()
+
+ @mock.patch('pyghmi.ipmi.command.Command', autospec=True)
+ def test__get_sensors_data(self, ipmi_mock):
+ reading_1 = self._create_sensor_object('fake_value1',
+ 'fake_type_A',
+ 'fake_name1')
+ reading_2 = self._create_sensor_object('fake_value2',
+ 'fake_type_A',
+ 'fake_name2')
+ reading_3 = self._create_sensor_object('fake_value3',
+ 'fake_type_B',
+ 'fake_name3')
+ readings = [reading_1, reading_2, reading_3]
+ ipmicmd = ipmi_mock.return_value
+ ipmicmd.get_sensor_data.return_value = readings
+ expected = {
+ 'fake_type_A': {
+ 'fake_name1': {
+ 'Health': '0',
+ 'Sensor ID': 'fake_name1',
+ 'Sensor Reading': 'fake_value1 fake_units',
+ 'States': '[]',
+ 'Units': 'fake_units'
+ },
+ 'fake_name2': {
+ 'Health': '0',
+ 'Sensor ID': 'fake_name2',
+ 'Sensor Reading': 'fake_value2 fake_units',
+ 'States': '[]',
+ 'Units': 'fake_units'
+ }
+ },
+ 'fake_type_B': {
+ 'fake_name3': {
+ 'Health': '0',
+ 'Sensor ID': 'fake_name3',
+ 'Sensor Reading': 'fake_value3 fake_units',
+ 'States': '[]', 'Units': 'fake_units'
+ }
+ }
+ }
+ ret = ipminative._get_sensors_data(self.info)
+ self.assertEqual(expected, ret)
+
+ @mock.patch('pyghmi.ipmi.command.Command', autospec=True)
+ def test__get_sensors_data_missing_values(self, ipmi_mock):
+ reading_1 = self._create_sensor_object('fake_value1',
+ 'fake_type_A',
+ 'fake_name1')
+ reading_2 = self._create_sensor_object(None,
+ 'fake_type_A',
+ 'fake_name2')
+ reading_3 = self._create_sensor_object(None,
+ 'fake_type_B',
+ 'fake_name3')
+ readings = [reading_1, reading_2, reading_3]
+ ipmicmd = ipmi_mock.return_value
+ ipmicmd.get_sensor_data.return_value = readings
+
+ expected = {
+ 'fake_type_A': {
+ 'fake_name1': {
+ 'Health': '0',
+ 'Sensor ID': 'fake_name1',
+ 'Sensor Reading': 'fake_value1 fake_units',
+ 'States': '[]',
+ 'Units': 'fake_units'
+ }
+ }
+ }
+ ret = ipminative._get_sensors_data(self.info)
+ self.assertEqual(expected, ret)
+
+ def test__parse_raw_bytes_ok(self):
+ bytes_string = '0x11 0x12 0x25 0xFF'
+ netfn, cmd, data = ipminative._parse_raw_bytes(bytes_string)
+ self.assertEqual(0x11, netfn)
+ self.assertEqual(0x12, cmd)
+ self.assertEqual([0x25, 0xFF], data)
+
+ def test__parse_raw_bytes_invalid_value(self):
+ bytes_string = '0x11 oops'
+ self.assertRaises(exception.InvalidParameterValue,
+ ipminative._parse_raw_bytes,
+ bytes_string)
+
+ def test__parse_raw_bytes_missing_byte(self):
+ bytes_string = '0x11'
+ self.assertRaises(exception.InvalidParameterValue,
+ ipminative._parse_raw_bytes,
+ bytes_string)
+
+ @mock.patch('pyghmi.ipmi.command.Command', autospec=True)
+ def test__send_raw(self, ipmi_mock):
+ ipmicmd = ipmi_mock.return_value
+ ipminative._send_raw(self.info, '0x01 0x02 0x03 0x04')
+ ipmicmd.xraw_command.assert_called_once_with(1, 2, data=[3, 4])
+
+ @mock.patch('pyghmi.ipmi.command.Command', autospec=True)
+ def test__send_raw_fail(self, ipmi_mock):
+ ipmicmd = ipmi_mock.return_value
+ ipmicmd.xraw_command.side_effect = pyghmi_exception.IpmiException()
+ self.assertRaises(exception.IPMIFailure, ipminative._send_raw,
+ self.info, '0x01 0x02')
+
+
+class IPMINativeDriverTestCase(db_base.DbTestCase):
+ """Test cases for ipminative.NativeIPMIPower class functions."""
+
+ def setUp(self):
+ super(IPMINativeDriverTestCase, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver="fake_ipminative")
+ self.driver = driver_factory.get_driver("fake_ipminative")
+
+ self.node = obj_utils.create_test_node(self.context,
+ driver='fake_ipminative',
+ driver_info=INFO_DICT)
+ self.info = ipminative._parse_driver_info(self.node)
+
+ def test_get_properties(self):
+ expected = ipminative.COMMON_PROPERTIES
+ self.assertEqual(expected, self.driver.power.get_properties())
+ self.assertEqual(expected, self.driver.management.get_properties())
+ self.assertEqual(expected, self.driver.vendor.get_properties())
+
+ expected = list(ipminative.COMMON_PROPERTIES)
+ expected += list(ipminative.CONSOLE_PROPERTIES)
+ self.assertEqual(sorted(expected),
+ sorted(self.driver.console.get_properties().keys()))
+ self.assertEqual(sorted(expected),
+ sorted(self.driver.get_properties().keys()))
+
+ @mock.patch('pyghmi.ipmi.command.Command', autospec=True)
+ def test_get_power_state(self, ipmi_mock):
+ # Getting the mocked command.
+ cmd_mock = ipmi_mock.return_value
+ # Getting the get power mock.
+ get_power_mock = cmd_mock.get_power
+
+ return_values = [{'powerstate': 'error'},
+ {'powerstate': 'on'},
+ {'powerstate': 'off'}]
+
+ get_power_mock.side_effect = lambda: return_values.pop()
+
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ pstate = self.driver.power.get_power_state(task)
+ self.assertEqual(states.POWER_OFF, pstate)
+
+ pstate = self.driver.power.get_power_state(task)
+ self.assertEqual(states.POWER_ON, pstate)
+
+ pstate = self.driver.power.get_power_state(task)
+ self.assertEqual(states.ERROR, pstate)
+ self.assertEqual(3, get_power_mock.call_count,
+ "pyghmi.ipmi.command.Command.get_power was not"
+ " called 3 times.")
+
+ @mock.patch.object(ipminative, '_power_on', autospec=True)
+ def test_set_power_on_ok(self, power_on_mock):
+ power_on_mock.return_value = states.POWER_ON
+
+ with task_manager.acquire(self.context,
+ self.node.uuid) as task:
+ self.driver.power.set_power_state(
+ task, states.POWER_ON)
+ power_on_mock.assert_called_once_with(self.info)
+
+ @mock.patch.object(driver_utils, 'ensure_next_boot_device', autospec=True)
+ @mock.patch.object(ipminative, '_power_on', autospec=True)
+ def test_set_power_on_with_next_boot(self, power_on_mock, mock_next_boot):
+ power_on_mock.return_value = states.POWER_ON
+
+ with task_manager.acquire(self.context,
+ self.node.uuid) as task:
+ self.driver.power.set_power_state(
+ task, states.POWER_ON)
+ mock_next_boot.assert_called_once_with(task, self.info)
+ power_on_mock.assert_called_once_with(self.info)
+
+ @mock.patch.object(ipminative, '_power_off', autospec=True)
+ def test_set_power_off_ok(self, power_off_mock):
+ power_off_mock.return_value = states.POWER_OFF
+
+ with task_manager.acquire(self.context,
+ self.node.uuid) as task:
+ self.driver.power.set_power_state(
+ task, states.POWER_OFF)
+ power_off_mock.assert_called_once_with(self.info)
+
+ @mock.patch('pyghmi.ipmi.command.Command', autospec=True)
+ def test_set_power_on_fail(self, ipmi_mock):
+ ipmicmd = ipmi_mock.return_value
+ ipmicmd.set_power.return_value = {'powerstate': 'error'}
+
+ self.config(retry_timeout=500, group='ipmi')
+ with task_manager.acquire(self.context,
+ self.node.uuid) as task:
+ self.assertRaises(exception.PowerStateFailure,
+ self.driver.power.set_power_state,
+ task,
+ states.POWER_ON)
+ ipmicmd.set_power.assert_called_once_with('on', 500)
+
+ @mock.patch('pyghmi.ipmi.command.Command', autospec=True)
+ def test_set_boot_device_ok(self, ipmi_mock):
+ ipmicmd = ipmi_mock.return_value
+ ipmicmd.set_bootdev.return_value = None
+
+ with task_manager.acquire(self.context,
+ self.node.uuid) as task:
+ self.driver.management.set_boot_device(task, boot_devices.PXE)
+ # PXE is converted to 'network' internally by ipminative
+ ipmicmd.set_bootdev.assert_called_once_with('network', persist=False)
+
+ @mock.patch('pyghmi.ipmi.command.Command', autospec=True)
+ def test_force_set_boot_device_ok(self, ipmi_mock):
+ ipmicmd = ipmi_mock.return_value
+ ipmicmd.set_bootdev.return_value = None
+
+ with task_manager.acquire(self.context,
+ self.node.uuid) as task:
+ task.node.driver_info['ipmi_force_boot_device'] = True
+ self.driver.management.set_boot_device(task, boot_devices.PXE)
+ task.node.refresh()
+ self.assertEqual(
+ False,
+ task.node.driver_internal_info['is_next_boot_persistent']
+ )
+ # PXE is converted to 'network' internally by ipminative
+ ipmicmd.set_bootdev.assert_called_once_with('network', persist=False)
+
+ @mock.patch('pyghmi.ipmi.command.Command', autospec=True)
+ def test_set_boot_device_with_persistent(self, ipmi_mock):
+ ipmicmd = ipmi_mock.return_value
+ ipmicmd.set_bootdev.return_value = None
+
+ with task_manager.acquire(self.context,
+ self.node.uuid) as task:
+ task.node.driver_info['ipmi_force_boot_device'] = True
+ self.driver.management.set_boot_device(task,
+ boot_devices.PXE,
+ True)
+ self.assertEqual(
+ boot_devices.PXE,
+ task.node.driver_internal_info['persistent_boot_device'])
+ # PXE is converted to 'network' internally by ipminative
+ ipmicmd.set_bootdev.assert_called_once_with('network', persist=False)
+
+ def test_set_boot_device_bad_device(self):
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ self.assertRaises(exception.InvalidParameterValue,
+ self.driver.management.set_boot_device,
+ task,
+ 'fake-device')
+
+ @mock.patch.object(driver_utils, 'ensure_next_boot_device', autospec=True)
+ @mock.patch.object(ipminative, '_reboot', autospec=True)
+ def test_reboot_ok(self, reboot_mock, mock_next_boot):
+ reboot_mock.return_value = None
+
+ with task_manager.acquire(self.context,
+ self.node.uuid) as task:
+ self.driver.power.reboot(task)
+ mock_next_boot.assert_called_once_with(task, self.info)
+ reboot_mock.assert_called_once_with(self.info)
+
+ @mock.patch('pyghmi.ipmi.command.Command', autospec=True)
+ def test_reboot_fail(self, ipmi_mock):
+ ipmicmd = ipmi_mock.return_value
+ ipmicmd.set_power.return_value = {'powerstate': 'error'}
+
+ self.config(retry_timeout=500, group='ipmi')
+ with task_manager.acquire(self.context,
+ self.node.uuid) as task:
+ self.assertRaises(exception.PowerStateFailure,
+ self.driver.power.reboot,
+ task)
+ ipmicmd.set_power.assert_called_once_with('boot', 500)
+
+ def test_management_interface_get_supported_boot_devices(self):
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ expected = [boot_devices.PXE, boot_devices.DISK,
+ boot_devices.CDROM, boot_devices.BIOS]
+ self.assertEqual(sorted(expected), sorted(task.driver.management.
+ get_supported_boot_devices(task)))
+
+ @mock.patch('pyghmi.ipmi.command.Command', autospec=True)
+ def test_management_interface_get_boot_device_good(self, ipmi_mock):
+ ipmicmd = ipmi_mock.return_value
+ ipmicmd.get_bootdev.return_value = {'bootdev': 'hd'}
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ bootdev = self.driver.management.get_boot_device(task)
+ self.assertEqual(boot_devices.DISK, bootdev['boot_device'])
+ self.assertIsNone(bootdev['persistent'])
+
+ @mock.patch('pyghmi.ipmi.command.Command', autospec=True)
+ def test_management_interface_get_boot_device_persistent(self, ipmi_mock):
+ ipmicmd = ipmi_mock.return_value
+ ipmicmd.get_bootdev.return_value = {'bootdev': 'hd',
+ 'persistent': True}
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ bootdev = self.driver.management.get_boot_device(task)
+ self.assertEqual(boot_devices.DISK, bootdev['boot_device'])
+ self.assertTrue(bootdev['persistent'])
+
+ @mock.patch('pyghmi.ipmi.command.Command', autospec=True)
+ def test_management_interface_get_boot_device_fail(self, ipmi_mock):
+ ipmicmd = ipmi_mock.return_value
+ ipmicmd.get_bootdev.side_effect = pyghmi_exception.IpmiException
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ self.assertRaises(exception.IPMIFailure,
+ self.driver.management.get_boot_device, task)
+
+ @mock.patch('pyghmi.ipmi.command.Command', autospec=True)
+ def test_management_interface_get_boot_device_fail_dict(self, ipmi_mock):
+ ipmicmd = ipmi_mock.return_value
+ ipmicmd.get_bootdev.return_value = {'error': 'boooom'}
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ self.assertRaises(exception.IPMIFailure,
+ self.driver.management.get_boot_device, task)
+
+ @mock.patch('pyghmi.ipmi.command.Command', autospec=True)
+ def test_management_interface_get_boot_device_unknown(self, ipmi_mock):
+ ipmicmd = ipmi_mock.return_value
+ ipmicmd.get_bootdev.return_value = {'bootdev': 'unknown'}
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ expected = {'boot_device': None, 'persistent': None}
+ self.assertEqual(expected,
+ self.driver.management.get_boot_device(task))
+
+ def test_get_force_boot_device_persistent(self):
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ task.node.driver_info['ipmi_force_boot_device'] = True
+ task.node.driver_internal_info['persistent_boot_device'] = 'pxe'
+ bootdev = self.driver.management.get_boot_device(task)
+ self.assertEqual('pxe', bootdev['boot_device'])
+ self.assertTrue(bootdev['persistent'])
+
+ def test_management_interface_validate_good(self):
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ task.driver.management.validate(task)
+
+ def test_management_interface_validate_fail(self):
+ # Missing IPMI driver_info information
+ node = obj_utils.create_test_node(self.context,
+ uuid=uuidutils.generate_uuid(),
+ driver='fake_ipminative')
+ with task_manager.acquire(self.context, node.uuid) as task:
+ self.assertRaises(exception.MissingParameterValue,
+ task.driver.management.validate, task)
+
+ @mock.patch('pyghmi.ipmi.command.Command', autospec=True)
+ def test_get_sensors_data(self, ipmi_mock):
+ ipmicmd = ipmi_mock.return_value
+ ipmicmd.get_sensor_data.return_value = None
+
+ with task_manager.acquire(self.context,
+ self.node.uuid) as task:
+ self.driver.management.get_sensors_data(task)
+ ipmicmd.get_sensor_data.assert_called_once_with()
+
+ @mock.patch.object(console_utils, 'start_shellinabox_console',
+ autospec=True)
+ def test_start_console(self, mock_exec):
+ mock_exec.return_value = None
+
+ with task_manager.acquire(self.context,
+ self.node.uuid) as task:
+ self.driver.console.start_console(task)
+
+ mock_exec.assert_called_once_with(self.info['uuid'],
+ self.info['port'],
+ mock.ANY)
+ self.assertTrue(mock_exec.called)
+
+ @mock.patch.object(console_utils, 'start_shellinabox_console',
+ autospec=True)
+ def test_start_console_fail(self, mock_exec):
+ mock_exec.side_effect = iter(
+ [exception.ConsoleSubprocessFailed(error='error')])
+
+ with task_manager.acquire(self.context,
+ self.node.uuid) as task:
+ self.assertRaises(exception.ConsoleSubprocessFailed,
+ self.driver.console.start_console,
+ task)
+
+ @mock.patch.object(console_utils, 'stop_shellinabox_console',
+ autospec=True)
+ def test_stop_console(self, mock_exec):
+ mock_exec.return_value = None
+
+ with task_manager.acquire(self.context,
+ self.node['uuid']) as task:
+ self.driver.console.stop_console(task)
+
+ mock_exec.assert_called_once_with(self.info['uuid'])
+ self.assertTrue(mock_exec.called)
+
+ @mock.patch.object(console_utils, 'stop_shellinabox_console',
+ autospec=True)
+ def test_stop_console_fail(self, mock_stop):
+ mock_stop.side_effect = iter([exception.ConsoleError()])
+
+ with task_manager.acquire(self.context,
+ self.node.uuid) as task:
+ self.assertRaises(exception.ConsoleError,
+ self.driver.console.stop_console,
+ task)
+
+ mock_stop.assert_called_once_with(self.node.uuid)
+
+ @mock.patch.object(console_utils, 'get_shellinabox_console_url',
+ autospec=True)
+ def test_get_console(self, mock_exec):
+ url = 'http://localhost:4201'
+ mock_exec.return_value = url
+ expected = {'type': 'shellinabox', 'url': url}
+
+ with task_manager.acquire(self.context,
+ self.node.uuid) as task:
+ console_info = self.driver.console.get_console(task)
+
+ self.assertEqual(expected, console_info)
+ mock_exec.assert_called_once_with(self.info['port'])
+ self.assertTrue(mock_exec.called)
+
+ @mock.patch.object(ipminative, '_parse_driver_info', autospec=True)
+ @mock.patch.object(ipminative, '_parse_raw_bytes', autospec=True)
+ def test_vendor_passthru_validate__send_raw_bytes_good(self, mock_raw,
+ mock_driver):
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ self.driver.vendor.validate(task,
+ method='send_raw',
+ http_method='POST',
+ raw_bytes='0x00 0x01')
+ mock_raw.assert_called_once_with('0x00 0x01')
+ mock_driver.assert_called_once_with(task.node)
+
+ def test_vendor_passthru_validate__send_raw_bytes_fail(self):
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ self.assertRaises(exception.MissingParameterValue,
+ self.driver.vendor.validate,
+ task, method='send_raw')
+
+ def test_vendor_passthru_vendor_routes(self):
+ expected = ['send_raw', 'bmc_reset']
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ vendor_routes = task.driver.vendor.vendor_routes
+ self.assertIsInstance(vendor_routes, dict)
+ self.assertEqual(sorted(expected), sorted(vendor_routes))
+
+ @mock.patch.object(ipminative, '_send_raw', autospec=True)
+ def test_send_raw(self, send_raw_mock):
+ bytes = '0x00 0x01'
+ with task_manager.acquire(self.context,
+ self.node.uuid) as task:
+ self.driver.vendor.send_raw(task, http_method='POST',
+ raw_bytes=bytes)
+
+ send_raw_mock.assert_called_once_with(self.info, bytes)
+
+ @mock.patch.object(ipminative, '_send_raw', autospec=True)
+ def _test_bmc_reset(self, warm, send_raw_mock):
+ expected_bytes = '0x06 0x03' if warm else '0x06 0x02'
+ with task_manager.acquire(self.context,
+ self.node.uuid) as task:
+ self.driver.vendor.bmc_reset(task, http_method='POST', warm=warm)
+
+ send_raw_mock.assert_called_once_with(self.info, expected_bytes)
+
+ def test_bmc_reset_cold(self):
+ self._test_bmc_reset(False)
+
+ def test_bmc_reset_warm(self):
+ self._test_bmc_reset(True)
diff --git a/ironic/tests/unit/drivers/test_ipmitool.py b/ironic/tests/unit/drivers/test_ipmitool.py
new file mode 100644
index 000000000..6a80b7ea9
--- /dev/null
+++ b/ironic/tests/unit/drivers/test_ipmitool.py
@@ -0,0 +1,1899 @@
+# coding=utf-8
+
+# Copyright 2012 Hewlett-Packard Development Company, L.P.
+# Copyright (c) 2012 NTT DOCOMO, INC.
+# Copyright 2014 International Business Machines Corporation
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+#
+
+"""Test class for IPMITool driver module."""
+
+import os
+import stat
+import subprocess
+import tempfile
+import time
+import types
+
+import mock
+from oslo_concurrency import processutils
+from oslo_config import cfg
+from oslo_utils import uuidutils
+import six
+
+from ironic.common import boot_devices
+from ironic.common import driver_factory
+from ironic.common import exception
+from ironic.common import states
+from ironic.common import utils
+from ironic.conductor import task_manager
+from ironic.drivers.modules import console_utils
+from ironic.drivers.modules import ipmitool as ipmi
+from ironic.drivers import utils as driver_utils
+from ironic.tests.unit import base
+from ironic.tests.unit.conductor import utils as mgr_utils
+from ironic.tests.unit.db import base as db_base
+from ironic.tests.unit.db import utils as db_utils
+from ironic.tests.unit.objects import utils as obj_utils
+
+CONF = cfg.CONF
+
+CONF.import_opt('min_command_interval',
+ 'ironic.drivers.modules.ipminative',
+ group='ipmi')
+
+INFO_DICT = db_utils.get_test_ipmi_info()
+
+# BRIDGE_INFO_DICT will have all the bridging parameters appended
+BRIDGE_INFO_DICT = INFO_DICT.copy()
+BRIDGE_INFO_DICT.update(db_utils.get_test_ipmi_bridging_parameters())
+
+
+class IPMIToolCheckInitTestCase(base.TestCase):
+
+ @mock.patch.object(ipmi, '_is_option_supported', autospec=True)
+ @mock.patch.object(utils, 'check_dir', autospec=True)
+ def test_power_init_calls(self, mock_check_dir, mock_support):
+ mock_support.return_value = True
+ ipmi.TMP_DIR_CHECKED = None
+ ipmi.IPMIPower()
+ mock_support.assert_called_with(mock.ANY)
+ mock_check_dir.assert_called_once_with()
+
+ @mock.patch.object(ipmi, '_is_option_supported', autospec=True)
+ @mock.patch.object(utils, 'check_dir', autospec=True)
+ def test_power_init_calls_raises_1(self, mock_check_dir, mock_support):
+ mock_support.return_value = True
+ ipmi.TMP_DIR_CHECKED = None
+ mock_check_dir.side_effect = iter(
+ [exception.PathNotFound(dir="foo_dir")])
+ self.assertRaises(exception.PathNotFound, ipmi.IPMIPower)
+
+ @mock.patch.object(ipmi, '_is_option_supported', autospec=True)
+ @mock.patch.object(utils, 'check_dir', autospec=True)
+ def test_power_init_calls_raises_2(self, mock_check_dir, mock_support):
+ mock_support.return_value = True
+ ipmi.TMP_DIR_CHECKED = None
+ mock_check_dir.side_effect = iter(
+ [exception.DirectoryNotWritable(dir="foo_dir")])
+ self.assertRaises(exception.DirectoryNotWritable, ipmi.IPMIPower)
+
+ @mock.patch.object(ipmi, '_is_option_supported', autospec=True)
+ @mock.patch.object(utils, 'check_dir', autospec=True)
+ def test_power_init_calls_raises_3(self, mock_check_dir, mock_support):
+ mock_support.return_value = True
+ ipmi.TMP_DIR_CHECKED = None
+ mock_check_dir.side_effect = iter([exception.InsufficientDiskSpace(
+ path="foo_dir", required=1, actual=0)])
+ self.assertRaises(exception.InsufficientDiskSpace, ipmi.IPMIPower)
+
+ @mock.patch.object(ipmi, '_is_option_supported', autospec=True)
+ @mock.patch.object(utils, 'check_dir', autospec=True)
+ def test_power_init_calls_already_checked(self,
+ mock_check_dir,
+ mock_support):
+ mock_support.return_value = True
+ ipmi.TMP_DIR_CHECKED = True
+ ipmi.IPMIPower()
+ mock_support.assert_called_with(mock.ANY)
+ self.assertEqual(0, mock_check_dir.call_count)
+
+ @mock.patch.object(ipmi, '_is_option_supported', autospec=True)
+ @mock.patch.object(utils, 'check_dir', autospec=True)
+ def test_management_init_calls(self, mock_check_dir, mock_support):
+ mock_support.return_value = True
+ ipmi.TMP_DIR_CHECKED = None
+
+ ipmi.IPMIManagement()
+ mock_support.assert_called_with(mock.ANY)
+ mock_check_dir.assert_called_once_with()
+
+ @mock.patch.object(ipmi, '_is_option_supported', autospec=True)
+ @mock.patch.object(utils, 'check_dir', autospec=True)
+ def test_management_init_calls_already_checked(self,
+ mock_check_dir,
+ mock_support):
+ mock_support.return_value = True
+ ipmi.TMP_DIR_CHECKED = False
+
+ ipmi.IPMIManagement()
+ mock_support.assert_called_with(mock.ANY)
+ self.assertEqual(0, mock_check_dir.call_count)
+
+ @mock.patch.object(ipmi, '_is_option_supported', autospec=True)
+ @mock.patch.object(utils, 'check_dir', autospec=True)
+ def test_vendor_passthru_init_calls(self, mock_check_dir, mock_support):
+ mock_support.return_value = True
+ ipmi.TMP_DIR_CHECKED = None
+ ipmi.VendorPassthru()
+ mock_support.assert_called_with(mock.ANY)
+ mock_check_dir.assert_called_once_with()
+
+ @mock.patch.object(ipmi, '_is_option_supported', autospec=True)
+ @mock.patch.object(utils, 'check_dir', autospec=True)
+ def test_vendor_passthru_init_calls_already_checked(self,
+ mock_check_dir,
+ mock_support):
+ mock_support.return_value = True
+ ipmi.TMP_DIR_CHECKED = True
+ ipmi.VendorPassthru()
+ mock_support.assert_called_with(mock.ANY)
+ self.assertEqual(0, mock_check_dir.call_count)
+
+ @mock.patch.object(ipmi, '_is_option_supported', autospec=True)
+ @mock.patch.object(utils, 'check_dir', autospec=True)
+ def test_console_init_calls(self, mock_check_dir, mock_support):
+ mock_support.return_value = True
+ ipmi.TMP_DIR_CHECKED = None
+ ipmi.IPMIShellinaboxConsole()
+ mock_support.assert_called_with(mock.ANY)
+ mock_check_dir.assert_called_once_with()
+
+ @mock.patch.object(ipmi, '_is_option_supported', autospec=True)
+ @mock.patch.object(utils, 'check_dir', autospec=True)
+ def test_console_init_calls_already_checked(self,
+ mock_check_dir,
+ mock_support):
+ mock_support.return_value = True
+ ipmi.TMP_DIR_CHECKED = True
+ ipmi.IPMIShellinaboxConsole()
+ mock_support.assert_called_with(mock.ANY)
+ self.assertEqual(0, mock_check_dir.call_count)
+
+
+@mock.patch.object(ipmi, '_is_option_supported', autospec=True)
+@mock.patch.object(subprocess, 'check_call', autospec=True)
+class IPMIToolCheckOptionSupportedTestCase(base.TestCase):
+
+ def test_check_timing_pass(self, mock_chkcall, mock_support):
+ mock_chkcall.return_value = (None, None)
+ mock_support.return_value = None
+ expected = [mock.call('timing'),
+ mock.call('timing', True)]
+
+ ipmi._check_option_support(['timing'])
+ self.assertTrue(mock_chkcall.called)
+ self.assertEqual(expected, mock_support.call_args_list)
+
+ def test_check_timing_fail(self, mock_chkcall, mock_support):
+ mock_chkcall.side_effect = iter(
+ [subprocess.CalledProcessError(1, 'ipmitool')])
+ mock_support.return_value = None
+ expected = [mock.call('timing'),
+ mock.call('timing', False)]
+
+ ipmi._check_option_support(['timing'])
+ self.assertTrue(mock_chkcall.called)
+ self.assertEqual(expected, mock_support.call_args_list)
+
+ def test_check_timing_no_ipmitool(self, mock_chkcall, mock_support):
+ mock_chkcall.side_effect = iter([OSError()])
+ mock_support.return_value = None
+ expected = [mock.call('timing')]
+
+ self.assertRaises(OSError, ipmi._check_option_support, ['timing'])
+ self.assertTrue(mock_chkcall.called)
+ self.assertEqual(expected, mock_support.call_args_list)
+
+ def test_check_single_bridge_pass(self, mock_chkcall, mock_support):
+ mock_chkcall.return_value = (None, None)
+ mock_support.return_value = None
+ expected = [mock.call('single_bridge'),
+ mock.call('single_bridge', True)]
+
+ ipmi._check_option_support(['single_bridge'])
+ self.assertTrue(mock_chkcall.called)
+ self.assertEqual(expected, mock_support.call_args_list)
+
+ def test_check_single_bridge_fail(self, mock_chkcall, mock_support):
+ mock_chkcall.side_effect = iter(
+ [subprocess.CalledProcessError(1, 'ipmitool')])
+ mock_support.return_value = None
+ expected = [mock.call('single_bridge'),
+ mock.call('single_bridge', False)]
+
+ ipmi._check_option_support(['single_bridge'])
+ self.assertTrue(mock_chkcall.called)
+ self.assertEqual(expected, mock_support.call_args_list)
+
+ def test_check_single_bridge_no_ipmitool(self, mock_chkcall,
+ mock_support):
+ mock_chkcall.side_effect = iter([OSError()])
+ mock_support.return_value = None
+ expected = [mock.call('single_bridge')]
+
+ self.assertRaises(OSError, ipmi._check_option_support,
+ ['single_bridge'])
+ self.assertTrue(mock_chkcall.called)
+ self.assertEqual(expected, mock_support.call_args_list)
+
+ def test_check_dual_bridge_pass(self, mock_chkcall, mock_support):
+ mock_chkcall.return_value = (None, None)
+ mock_support.return_value = None
+ expected = [mock.call('dual_bridge'),
+ mock.call('dual_bridge', True)]
+
+ ipmi._check_option_support(['dual_bridge'])
+ self.assertTrue(mock_chkcall.called)
+ self.assertEqual(expected, mock_support.call_args_list)
+
+ def test_check_dual_bridge_fail(self, mock_chkcall, mock_support):
+ mock_chkcall.side_effect = iter(
+ [subprocess.CalledProcessError(1, 'ipmitool')])
+ mock_support.return_value = None
+ expected = [mock.call('dual_bridge'),
+ mock.call('dual_bridge', False)]
+
+ ipmi._check_option_support(['dual_bridge'])
+ self.assertTrue(mock_chkcall.called)
+ self.assertEqual(expected, mock_support.call_args_list)
+
+ def test_check_dual_bridge_no_ipmitool(self, mock_chkcall, mock_support):
+ mock_chkcall.side_effect = iter([OSError()])
+ mock_support.return_value = None
+ expected = [mock.call('dual_bridge')]
+
+ self.assertRaises(OSError, ipmi._check_option_support,
+ ['dual_bridge'])
+ self.assertTrue(mock_chkcall.called)
+ self.assertEqual(expected, mock_support.call_args_list)
+
+ def test_check_all_options_pass(self, mock_chkcall, mock_support):
+ mock_chkcall.return_value = (None, None)
+ mock_support.return_value = None
+ expected = [
+ mock.call('timing'), mock.call('timing', True),
+ mock.call('single_bridge'),
+ mock.call('single_bridge', True),
+ mock.call('dual_bridge'), mock.call('dual_bridge', True)]
+
+ ipmi._check_option_support(['timing', 'single_bridge', 'dual_bridge'])
+ self.assertTrue(mock_chkcall.called)
+ self.assertEqual(expected, mock_support.call_args_list)
+
+ def test_check_all_options_fail(self, mock_chkcall, mock_support):
+ options = ['timing', 'single_bridge', 'dual_bridge']
+ mock_chkcall.side_effect = iter(
+ [subprocess.CalledProcessError(1, 'ipmitool')] * len(options))
+ mock_support.return_value = None
+ expected = [
+ mock.call('timing'), mock.call('timing', False),
+ mock.call('single_bridge'),
+ mock.call('single_bridge', False),
+ mock.call('dual_bridge'),
+ mock.call('dual_bridge', False)]
+
+ ipmi._check_option_support(options)
+ self.assertTrue(mock_chkcall.called)
+ self.assertEqual(expected, mock_support.call_args_list)
+
+ def test_check_all_options_no_ipmitool(self, mock_chkcall, mock_support):
+ mock_chkcall.side_effect = iter([OSError()])
+ mock_support.return_value = None
+ # exception is raised once ipmitool was not found for an command
+ expected = [mock.call('timing')]
+
+ self.assertRaises(OSError, ipmi._check_option_support,
+ ['timing', 'single_bridge', 'dual_bridge'])
+ self.assertTrue(mock_chkcall.called)
+ self.assertEqual(expected, mock_support.call_args_list)
+
+
+@mock.patch.object(time, 'sleep', autospec=True)
+class IPMIToolPrivateMethodTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(IPMIToolPrivateMethodTestCase, self).setUp()
+ self.node = obj_utils.get_test_node(
+ self.context,
+ driver='fake_ipmitool',
+ driver_info=INFO_DICT)
+ self.info = ipmi._parse_driver_info(self.node)
+
+ def _test__make_password_file(self, mock_sleep, input_password,
+ exception_to_raise=None):
+ pw_file = None
+ try:
+ with ipmi._make_password_file(input_password) as pw_file:
+ if exception_to_raise is not None:
+ raise exception_to_raise
+ self.assertTrue(os.path.isfile(pw_file))
+ self.assertEqual(0o600, os.stat(pw_file)[stat.ST_MODE] & 0o777)
+ with open(pw_file, "r") as f:
+ password = f.read()
+ self.assertEqual(str(input_password), password)
+ finally:
+ if pw_file is not None:
+ self.assertFalse(os.path.isfile(pw_file))
+
+ def test__make_password_file_str_password(self, mock_sleep):
+ self._test__make_password_file(mock_sleep, self.info.get('password'))
+
+ def test__make_password_file_with_numeric_password(self, mock_sleep):
+ self._test__make_password_file(mock_sleep, 12345)
+
+ def test__make_password_file_caller_exception(self, mock_sleep):
+ # Test caller raising exception
+ result = self.assertRaises(
+ ValueError,
+ self._test__make_password_file,
+ mock_sleep, 12345, ValueError('we should fail'))
+ self.assertEqual('we should fail', six.text_type(result))
+
+ @mock.patch.object(tempfile, 'NamedTemporaryFile',
+ new=mock.MagicMock(side_effect=OSError('Test Error')))
+ def test__make_password_file_tempfile_known_exception(self, mock_sleep):
+ # Test OSError exception in _make_password_file for
+ # tempfile.NamedTemporaryFile
+ self.assertRaises(
+ exception.PasswordFileFailedToCreate,
+ self._test__make_password_file, mock_sleep, 12345)
+
+ @mock.patch.object(
+ tempfile, 'NamedTemporaryFile',
+ new=mock.MagicMock(side_effect=OverflowError('Test Error')))
+ def test__make_password_file_tempfile_unknown_exception(self, mock_sleep):
+ # Test exception in _make_password_file for tempfile.NamedTemporaryFile
+ result = self.assertRaises(
+ OverflowError,
+ self._test__make_password_file, mock_sleep, 12345)
+ self.assertEqual('Test Error', six.text_type(result))
+
+ def test__make_password_file_write_exception(self, mock_sleep):
+ # Test exception in _make_password_file for write()
+ mock_namedtemp = mock.mock_open(mock.MagicMock(name='JLV'))
+ with mock.patch('tempfile.NamedTemporaryFile', mock_namedtemp):
+ mock_filehandle = mock_namedtemp.return_value
+ mock_write = mock_filehandle.write
+ mock_write.side_effect = OSError('Test 2 Error')
+ self.assertRaises(
+ exception.PasswordFileFailedToCreate,
+ self._test__make_password_file, mock_sleep, 12345)
+
+ def test__parse_driver_info(self, mock_sleep):
+ # make sure we get back the expected things
+ _OPTIONS = ['address', 'username', 'password', 'uuid']
+ for option in _OPTIONS:
+ self.assertIsNotNone(self.info.get(option))
+
+ info = dict(INFO_DICT)
+
+ # test the default value for 'priv_level'
+ node = obj_utils.get_test_node(self.context, driver_info=info)
+ ret = ipmi._parse_driver_info(node)
+ self.assertEqual('ADMINISTRATOR', ret['priv_level'])
+
+ # ipmi_username / ipmi_password are not mandatory
+ del info['ipmi_username']
+ node = obj_utils.get_test_node(self.context, driver_info=info)
+ ipmi._parse_driver_info(node)
+ del info['ipmi_password']
+ node = obj_utils.get_test_node(self.context, driver_info=info)
+ ipmi._parse_driver_info(node)
+
+ # make sure error is raised when ipmi_address is missing
+ info = dict(INFO_DICT)
+ del info['ipmi_address']
+ node = obj_utils.get_test_node(self.context, driver_info=info)
+ self.assertRaises(exception.MissingParameterValue,
+ ipmi._parse_driver_info,
+ node)
+
+ # test the invalid priv_level value
+ info = dict(INFO_DICT)
+ info['ipmi_priv_level'] = 'ABCD'
+ node = obj_utils.get_test_node(self.context, driver_info=info)
+ self.assertRaises(exception.InvalidParameterValue,
+ ipmi._parse_driver_info,
+ node)
+
+ @mock.patch.object(ipmi, '_is_option_supported', autospec=True)
+ def test__parse_driver_info_with_invalid_bridging_type(
+ self, mock_support, mock_sleep):
+ info = BRIDGE_INFO_DICT.copy()
+ # make sure error is raised when ipmi_bridging has unexpected value
+ info['ipmi_bridging'] = 'junk'
+ node = obj_utils.get_test_node(self.context, driver_info=info)
+ self.assertRaises(exception.InvalidParameterValue,
+ ipmi._parse_driver_info,
+ node)
+ self.assertFalse(mock_support.called)
+
+ @mock.patch.object(ipmi, '_is_option_supported', autospec=True)
+ def test__parse_driver_info_with_no_bridging(
+ self, mock_support, mock_sleep):
+ _OPTIONS = ['address', 'username', 'password', 'uuid']
+ _BRIDGING_OPTIONS = ['local_address', 'transit_channel',
+ 'transit_address',
+ 'target_channel', 'target_address']
+ info = BRIDGE_INFO_DICT.copy()
+ info['ipmi_bridging'] = 'no'
+ node = obj_utils.get_test_node(self.context, driver='fake_ipmitool',
+ driver_info=info)
+ ret = ipmi._parse_driver_info(node)
+
+ # ensure that _is_option_supported was not called
+ self.assertFalse(mock_support.called)
+ # check if we got all the required options
+ for option in _OPTIONS:
+ self.assertIsNotNone(ret[option])
+ # test the default value for 'priv_level'
+ self.assertEqual('ADMINISTRATOR', ret['priv_level'])
+
+ # check if bridging parameters were set to None
+ for option in _BRIDGING_OPTIONS:
+ self.assertIsNone(ret[option])
+
+ @mock.patch.object(ipmi, '_is_option_supported', autospec=True)
+ def test__parse_driver_info_with_dual_bridging_pass(
+ self, mock_support, mock_sleep):
+ _OPTIONS = ['address', 'username', 'password', 'uuid',
+ 'local_address', 'transit_channel', 'transit_address',
+ 'target_channel', 'target_address']
+ node = obj_utils.get_test_node(self.context, driver='fake_ipmitool',
+ driver_info=BRIDGE_INFO_DICT)
+
+ expected = [mock.call('dual_bridge')]
+
+ # test double bridging and make sure we get back expected result
+ mock_support.return_value = True
+ ret = ipmi._parse_driver_info(node)
+ self.assertEqual(expected, mock_support.call_args_list)
+ for option in _OPTIONS:
+ self.assertIsNotNone(ret[option])
+ # test the default value for 'priv_level'
+ self.assertEqual('ADMINISTRATOR', ret['priv_level'])
+
+ info = BRIDGE_INFO_DICT.copy()
+ # ipmi_local_address / ipmi_username / ipmi_password are not mandatory
+ for optional_arg in ['ipmi_local_address', 'ipmi_username',
+ 'ipmi_password']:
+ del info[optional_arg]
+ node = obj_utils.get_test_node(self.context, driver_info=info)
+ ipmi._parse_driver_info(node)
+ self.assertEqual(mock.call('dual_bridge'), mock_support.call_args)
+
+ @mock.patch.object(ipmi, '_is_option_supported', autospec=True)
+ def test__parse_driver_info_with_dual_bridging_not_supported(
+ self, mock_support, mock_sleep):
+ node = obj_utils.get_test_node(self.context, driver='fake_ipmitool',
+ driver_info=BRIDGE_INFO_DICT)
+ # if dual bridge is not supported then check if error is raised
+ mock_support.return_value = False
+ self.assertRaises(exception.InvalidParameterValue,
+ ipmi._parse_driver_info, node)
+ mock_support.assert_called_once_with('dual_bridge')
+
+ @mock.patch.object(ipmi, '_is_option_supported', autospec=True)
+ def test__parse_driver_info_with_dual_bridging_missing_parameters(
+ self, mock_support, mock_sleep):
+ info = BRIDGE_INFO_DICT.copy()
+ mock_support.return_value = True
+ # make sure error is raised when dual bridging is selected and the
+ # required parameters for dual bridging are not provided
+ for param in ['ipmi_transit_channel', 'ipmi_target_address',
+ 'ipmi_transit_address', 'ipmi_target_channel']:
+ del info[param]
+ node = obj_utils.get_test_node(self.context, driver_info=info)
+ self.assertRaises(exception.MissingParameterValue,
+ ipmi._parse_driver_info, node)
+ self.assertEqual(mock.call('dual_bridge'),
+ mock_support.call_args)
+
+ @mock.patch.object(ipmi, '_is_option_supported', autospec=True)
+ def test__parse_driver_info_with_single_bridging_pass(
+ self, mock_support, mock_sleep):
+ _OPTIONS = ['address', 'username', 'password', 'uuid',
+ 'local_address', 'target_channel', 'target_address']
+
+ info = BRIDGE_INFO_DICT.copy()
+ info['ipmi_bridging'] = 'single'
+ node = obj_utils.get_test_node(self.context, driver='fake_ipmitool',
+ driver_info=info)
+
+ expected = [mock.call('single_bridge')]
+
+ # test single bridging and make sure we get back expected things
+ mock_support.return_value = True
+ ret = ipmi._parse_driver_info(node)
+ self.assertEqual(expected, mock_support.call_args_list)
+ for option in _OPTIONS:
+ self.assertIsNotNone(ret[option])
+ # test the default value for 'priv_level'
+ self.assertEqual('ADMINISTRATOR', ret['priv_level'])
+
+ # check if dual bridge params are set to None
+ self.assertIsNone(ret['transit_channel'])
+ self.assertIsNone(ret['transit_address'])
+
+ # ipmi_local_address / ipmi_username / ipmi_password are not mandatory
+ for optional_arg in ['ipmi_local_address', 'ipmi_username',
+ 'ipmi_password']:
+ del info[optional_arg]
+ node = obj_utils.get_test_node(self.context, driver_info=info)
+ ipmi._parse_driver_info(node)
+ self.assertEqual(mock.call('single_bridge'),
+ mock_support.call_args)
+
+ @mock.patch.object(ipmi, '_is_option_supported', autospec=True)
+ def test__parse_driver_info_with_single_bridging_not_supported(
+ self, mock_support, mock_sleep):
+ info = BRIDGE_INFO_DICT.copy()
+ info['ipmi_bridging'] = 'single'
+ node = obj_utils.get_test_node(self.context, driver='fake_ipmitool',
+ driver_info=info)
+
+ # if single bridge is not supported then check if error is raised
+ mock_support.return_value = False
+ self.assertRaises(exception.InvalidParameterValue,
+ ipmi._parse_driver_info, node)
+ mock_support.assert_called_once_with('single_bridge')
+
+ @mock.patch.object(ipmi, '_is_option_supported', autospec=True)
+ def test__parse_driver_info_with_single_bridging_missing_parameters(
+ self, mock_support, mock_sleep):
+ info = dict(BRIDGE_INFO_DICT)
+ info['ipmi_bridging'] = 'single'
+ mock_support.return_value = True
+ # make sure error is raised when single bridging is selected and the
+ # required parameters for single bridging are not provided
+ for param in ['ipmi_target_channel', 'ipmi_target_address']:
+ del info[param]
+ node = obj_utils.get_test_node(self.context, driver_info=info)
+ self.assertRaises(exception.MissingParameterValue,
+ ipmi._parse_driver_info,
+ node)
+ self.assertEqual(mock.call('single_bridge'),
+ mock_support.call_args)
+
+ def test__parse_driver_info_ipmi_prot_version_1_5(self, mock_sleep):
+ info = dict(INFO_DICT)
+ info['ipmi_protocol_version'] = '1.5'
+ node = obj_utils.get_test_node(self.context, driver_info=info)
+ ret = ipmi._parse_driver_info(node)
+ self.assertEqual('1.5', ret['protocol_version'])
+
+ def test__parse_driver_info_invalid_ipmi_prot_version(self, mock_sleep):
+ info = dict(INFO_DICT)
+ info['ipmi_protocol_version'] = '9000'
+ node = obj_utils.get_test_node(self.context, driver_info=info)
+ self.assertRaises(exception.InvalidParameterValue,
+ ipmi._parse_driver_info, node)
+
+ @mock.patch.object(ipmi, '_is_option_supported', autospec=True)
+ @mock.patch.object(ipmi, '_make_password_file', autospec=True)
+ @mock.patch.object(utils, 'execute', autospec=True)
+ def test__exec_ipmitool_first_call_to_address(self, mock_exec, mock_pwf,
+ mock_support, mock_sleep):
+ ipmi.LAST_CMD_TIME = {}
+ pw_file_handle = tempfile.NamedTemporaryFile()
+ pw_file = pw_file_handle.name
+ file_handle = open(pw_file, "w")
+ args = [
+ 'ipmitool',
+ '-I', 'lanplus',
+ '-H', self.info['address'],
+ '-L', self.info['priv_level'],
+ '-U', self.info['username'],
+ '-f', file_handle,
+ 'A', 'B', 'C',
+ ]
+
+ mock_support.return_value = False
+ mock_pwf.return_value = file_handle
+ mock_exec.return_value = (None, None)
+
+ ipmi._exec_ipmitool(self.info, 'A B C')
+
+ mock_support.assert_called_once_with('timing')
+ mock_pwf.assert_called_once_with(self.info['password'])
+ mock_exec.assert_called_once_with(*args)
+ self.assertFalse(mock_sleep.called)
+
+ @mock.patch.object(ipmi, '_is_option_supported', autospec=True)
+ @mock.patch.object(ipmi, '_make_password_file', autospec=True)
+ @mock.patch.object(utils, 'execute', autospec=True)
+ def test__exec_ipmitool_second_call_to_address_sleep(
+ self, mock_exec, mock_pwf, mock_support, mock_sleep):
+ ipmi.LAST_CMD_TIME = {}
+ pw_file_handle1 = tempfile.NamedTemporaryFile()
+ pw_file1 = pw_file_handle1.name
+ file_handle1 = open(pw_file1, "w")
+ pw_file_handle2 = tempfile.NamedTemporaryFile()
+ pw_file2 = pw_file_handle2.name
+ file_handle2 = open(pw_file2, "w")
+ args = [[
+ 'ipmitool',
+ '-I', 'lanplus',
+ '-H', self.info['address'],
+ '-L', self.info['priv_level'],
+ '-U', self.info['username'],
+ '-f', file_handle1,
+ 'A', 'B', 'C',
+ ], [
+ 'ipmitool',
+ '-I', 'lanplus',
+ '-H', self.info['address'],
+ '-L', self.info['priv_level'],
+ '-U', self.info['username'],
+ '-f', file_handle2,
+ 'D', 'E', 'F',
+ ]]
+
+ expected = [mock.call('timing'),
+ mock.call('timing')]
+ mock_support.return_value = False
+ mock_pwf.side_effect = iter([file_handle1, file_handle2])
+ mock_exec.side_effect = iter([(None, None), (None, None)])
+
+ ipmi._exec_ipmitool(self.info, 'A B C')
+ mock_exec.assert_called_with(*args[0])
+
+ ipmi._exec_ipmitool(self.info, 'D E F')
+ self.assertTrue(mock_sleep.called)
+ self.assertEqual(expected, mock_support.call_args_list)
+ mock_exec.assert_called_with(*args[1])
+
+ @mock.patch.object(ipmi, '_is_option_supported', autospec=True)
+ @mock.patch.object(ipmi, '_make_password_file', autospec=True)
+ @mock.patch.object(utils, 'execute', autospec=True)
+ def test__exec_ipmitool_second_call_to_address_no_sleep(
+ self, mock_exec, mock_pwf, mock_support, mock_sleep):
+ ipmi.LAST_CMD_TIME = {}
+ pw_file_handle1 = tempfile.NamedTemporaryFile()
+ pw_file1 = pw_file_handle1.name
+ file_handle1 = open(pw_file1, "w")
+ pw_file_handle2 = tempfile.NamedTemporaryFile()
+ pw_file2 = pw_file_handle2.name
+ file_handle2 = open(pw_file2, "w")
+ args = [[
+ 'ipmitool',
+ '-I', 'lanplus',
+ '-H', self.info['address'],
+ '-L', self.info['priv_level'],
+ '-U', self.info['username'],
+ '-f', file_handle1,
+ 'A', 'B', 'C',
+ ], [
+ 'ipmitool',
+ '-I', 'lanplus',
+ '-H', self.info['address'],
+ '-L', self.info['priv_level'],
+ '-U', self.info['username'],
+ '-f', file_handle2,
+ 'D', 'E', 'F',
+ ]]
+
+ expected = [mock.call('timing'),
+ mock.call('timing')]
+ mock_support.return_value = False
+ mock_pwf.side_effect = iter([file_handle1, file_handle2])
+ mock_exec.side_effect = iter([(None, None), (None, None)])
+
+ ipmi._exec_ipmitool(self.info, 'A B C')
+ mock_exec.assert_called_with(*args[0])
+ # act like enough time has passed
+ ipmi.LAST_CMD_TIME[self.info['address']] = (
+ time.time() - CONF.ipmi.min_command_interval)
+ ipmi._exec_ipmitool(self.info, 'D E F')
+ self.assertFalse(mock_sleep.called)
+ self.assertEqual(expected, mock_support.call_args_list)
+ mock_exec.assert_called_with(*args[1])
+
+ @mock.patch.object(ipmi, '_is_option_supported', autospec=True)
+ @mock.patch.object(ipmi, '_make_password_file', autospec=True)
+ @mock.patch.object(utils, 'execute', autospec=True)
+ def test__exec_ipmitool_two_calls_to_diff_address(
+ self, mock_exec, mock_pwf, mock_support, mock_sleep):
+ ipmi.LAST_CMD_TIME = {}
+ pw_file_handle1 = tempfile.NamedTemporaryFile()
+ pw_file1 = pw_file_handle1.name
+ file_handle1 = open(pw_file1, "w")
+ pw_file_handle2 = tempfile.NamedTemporaryFile()
+ pw_file2 = pw_file_handle2.name
+ file_handle2 = open(pw_file2, "w")
+ args = [[
+ 'ipmitool',
+ '-I', 'lanplus',
+ '-H', self.info['address'],
+ '-L', self.info['priv_level'],
+ '-U', self.info['username'],
+ '-f', file_handle1,
+ 'A', 'B', 'C',
+ ], [
+ 'ipmitool',
+ '-I', 'lanplus',
+ '-H', '127.127.127.127',
+ '-L', self.info['priv_level'],
+ '-U', self.info['username'],
+ '-f', file_handle2,
+ 'D', 'E', 'F',
+ ]]
+
+ expected = [mock.call('timing'),
+ mock.call('timing')]
+ mock_support.return_value = False
+ mock_pwf.side_effect = iter([file_handle1, file_handle2])
+ mock_exec.side_effect = iter([(None, None), (None, None)])
+
+ ipmi._exec_ipmitool(self.info, 'A B C')
+ mock_exec.assert_called_with(*args[0])
+ self.info['address'] = '127.127.127.127'
+ ipmi._exec_ipmitool(self.info, 'D E F')
+ self.assertFalse(mock_sleep.called)
+ self.assertEqual(expected, mock_support.call_args_list)
+ mock_exec.assert_called_with(*args[1])
+
+ @mock.patch.object(ipmi, '_is_option_supported', autospec=True)
+ @mock.patch.object(ipmi, '_make_password_file', autospec=True)
+ @mock.patch.object(utils, 'execute', autospec=True)
+ def test__exec_ipmitool_without_timing(
+ self, mock_exec, mock_pwf, mock_support, mock_sleep):
+ pw_file_handle = tempfile.NamedTemporaryFile()
+ pw_file = pw_file_handle.name
+ file_handle = open(pw_file, "w")
+
+ args = [
+ 'ipmitool',
+ '-I', 'lanplus',
+ '-H', self.info['address'],
+ '-L', self.info['priv_level'],
+ '-U', self.info['username'],
+ '-f', file_handle,
+ 'A', 'B', 'C',
+ ]
+
+ mock_support.return_value = False
+ mock_pwf.return_value = file_handle
+ mock_exec.return_value = (None, None)
+
+ ipmi._exec_ipmitool(self.info, 'A B C')
+
+ mock_support.assert_called_once_with('timing')
+ mock_pwf.assert_called_once_with(self.info['password'])
+ mock_exec.assert_called_once_with(*args)
+
+ @mock.patch.object(ipmi, '_is_option_supported', autospec=True)
+ @mock.patch.object(ipmi, '_make_password_file', autospec=True)
+ @mock.patch.object(utils, 'execute', autospec=True)
+ def test__exec_ipmitool_with_timing(
+ self, mock_exec, mock_pwf, mock_support, mock_sleep):
+ pw_file_handle = tempfile.NamedTemporaryFile()
+ pw_file = pw_file_handle.name
+ file_handle = open(pw_file, "w")
+ args = [
+ 'ipmitool',
+ '-I', 'lanplus',
+ '-H', self.info['address'],
+ '-L', self.info['priv_level'],
+ '-U', self.info['username'],
+ '-R', '12',
+ '-N', '5',
+ '-f', file_handle,
+ 'A', 'B', 'C',
+ ]
+
+ mock_support.return_value = True
+ mock_pwf.return_value = file_handle
+ mock_exec.return_value = (None, None)
+
+ ipmi._exec_ipmitool(self.info, 'A B C')
+
+ mock_support.assert_called_once_with('timing')
+ mock_pwf.assert_called_once_with(self.info['password'])
+ mock_exec.assert_called_once_with(*args)
+
+ @mock.patch.object(ipmi, '_is_option_supported', autospec=True)
+ @mock.patch.object(ipmi, '_make_password_file', autospec=True)
+ @mock.patch.object(utils, 'execute', autospec=True)
+ def test__exec_ipmitool_without_username(
+ self, mock_exec, mock_pwf, mock_support, mock_sleep):
+ self.info['username'] = None
+ pw_file_handle = tempfile.NamedTemporaryFile()
+ pw_file = pw_file_handle.name
+ file_handle = open(pw_file, "w")
+ args = [
+ 'ipmitool',
+ '-I', 'lanplus',
+ '-H', self.info['address'],
+ '-L', self.info['priv_level'],
+ '-f', file_handle,
+ 'A', 'B', 'C',
+ ]
+
+ mock_support.return_value = False
+ mock_pwf.return_value = file_handle
+ mock_exec.return_value = (None, None)
+ ipmi._exec_ipmitool(self.info, 'A B C')
+ mock_support.assert_called_once_with('timing')
+ self.assertTrue(mock_pwf.called)
+ mock_exec.assert_called_once_with(*args)
+
+ @mock.patch.object(ipmi, '_is_option_supported', autospec=True)
+ @mock.patch.object(ipmi, '_make_password_file', autospec=True)
+ @mock.patch.object(utils, 'execute', autospec=True)
+ def test__exec_ipmitool_with_dual_bridging(self,
+ mock_exec, mock_pwf,
+ mock_support,
+ mock_sleep):
+
+ node = obj_utils.get_test_node(self.context, driver='fake_ipmitool',
+ driver_info=BRIDGE_INFO_DICT)
+ # when support for dual bridge command is called returns True
+ mock_support.return_value = True
+ info = ipmi._parse_driver_info(node)
+ pw_file_handle = tempfile.NamedTemporaryFile()
+ pw_file = pw_file_handle.name
+ file_handle = open(pw_file, "w")
+ args = [
+ 'ipmitool',
+ '-I', 'lanplus',
+ '-H', info['address'],
+ '-L', info['priv_level'],
+ '-U', info['username'],
+ '-m', info['local_address'],
+ '-B', info['transit_channel'],
+ '-T', info['transit_address'],
+ '-b', info['target_channel'],
+ '-t', info['target_address'],
+ '-f', file_handle,
+ 'A', 'B', 'C',
+ ]
+
+ expected = [mock.call('dual_bridge'),
+ mock.call('timing')]
+ # When support for timing command is called returns False
+ mock_support.return_value = False
+ mock_pwf.return_value = file_handle
+ mock_exec.return_value = (None, None)
+ ipmi._exec_ipmitool(info, 'A B C')
+ self.assertEqual(expected, mock_support.call_args_list)
+ self.assertTrue(mock_pwf.called)
+ mock_exec.assert_called_once_with(*args)
+
+ @mock.patch.object(ipmi, '_is_option_supported', autospec=True)
+ @mock.patch.object(ipmi, '_make_password_file', autospec=True)
+ @mock.patch.object(utils, 'execute', autospec=True)
+ def test__exec_ipmitool_with_single_bridging(self,
+ mock_exec, mock_pwf,
+ mock_support,
+ mock_sleep):
+ single_bridge_info = dict(BRIDGE_INFO_DICT)
+ single_bridge_info['ipmi_bridging'] = 'single'
+ node = obj_utils.get_test_node(self.context, driver='fake_ipmitool',
+ driver_info=single_bridge_info)
+ # when support for single bridge command is called returns True
+ mock_support.return_value = True
+ info = ipmi._parse_driver_info(node)
+ info['transit_channel'] = info['transit_address'] = None
+
+ pw_file_handle = tempfile.NamedTemporaryFile()
+ pw_file = pw_file_handle.name
+ file_handle = open(pw_file, "w")
+ args = [
+ 'ipmitool',
+ '-I', 'lanplus',
+ '-H', info['address'],
+ '-L', info['priv_level'],
+ '-U', info['username'],
+ '-m', info['local_address'],
+ '-b', info['target_channel'],
+ '-t', info['target_address'],
+ '-f', file_handle,
+ 'A', 'B', 'C',
+ ]
+
+ expected = [mock.call('single_bridge'),
+ mock.call('timing')]
+ # When support for timing command is called returns False
+ mock_support.return_value = False
+ mock_pwf.return_value = file_handle
+ mock_exec.return_value = (None, None)
+ ipmi._exec_ipmitool(info, 'A B C')
+ self.assertEqual(expected, mock_support.call_args_list)
+ self.assertTrue(mock_pwf.called)
+ mock_exec.assert_called_once_with(*args)
+
+ @mock.patch.object(ipmi, '_is_option_supported', autospec=True)
+ @mock.patch.object(ipmi, '_make_password_file', autospec=True)
+ @mock.patch.object(utils, 'execute', autospec=True)
+ def test__exec_ipmitool_exception(
+ self, mock_exec, mock_pwf, mock_support, mock_sleep):
+ pw_file_handle = tempfile.NamedTemporaryFile()
+ pw_file = pw_file_handle.name
+ file_handle = open(pw_file, "w")
+ args = [
+ 'ipmitool',
+ '-I', 'lanplus',
+ '-H', self.info['address'],
+ '-L', self.info['priv_level'],
+ '-U', self.info['username'],
+ '-f', file_handle,
+ 'A', 'B', 'C',
+ ]
+
+ mock_support.return_value = False
+ mock_pwf.return_value = file_handle
+ mock_exec.side_effect = iter([processutils.ProcessExecutionError("x")])
+ self.assertRaises(processutils.ProcessExecutionError,
+ ipmi._exec_ipmitool,
+ self.info, 'A B C')
+ mock_support.assert_called_once_with('timing')
+ mock_pwf.assert_called_once_with(self.info['password'])
+ mock_exec.assert_called_once_with(*args)
+ self.assertEqual(1, mock_exec.call_count)
+
+ @mock.patch.object(ipmi, '_is_option_supported', autospec=True)
+ @mock.patch.object(utils, 'execute', autospec=True)
+ def test__exec_ipmitool_exception_retry(
+ self, mock_exec, mock_support, mock_sleep):
+
+ ipmi.LAST_CMD_TIME = {}
+ mock_support.return_value = False
+ mock_exec.side_effect = iter([
+ processutils.ProcessExecutionError(
+ stderr="insufficient resources for session"
+ ),
+ (None, None)
+ ])
+
+ # Directly set the configuration values such that
+ # the logic will cause _exec_ipmitool to retry twice.
+ self.config(min_command_interval=1, group='ipmi')
+ self.config(retry_timeout=2, group='ipmi')
+
+ ipmi._exec_ipmitool(self.info, 'A B C')
+
+ mock_support.assert_called_once_with('timing')
+ self.assertEqual(2, mock_exec.call_count)
+
+ @mock.patch.object(ipmi, '_is_option_supported', autospec=True)
+ @mock.patch.object(utils, 'execute', autospec=True)
+ def test__exec_ipmitool_exception_retries_exceeded(
+ self, mock_exec, mock_support, mock_sleep):
+
+ ipmi.LAST_CMD_TIME = {}
+ mock_support.return_value = False
+
+ mock_exec.side_effect = iter([processutils.ProcessExecutionError(
+ stderr="insufficient resources for session"
+ )])
+
+ # Directly set the configuration values such that
+ # the logic will cause _exec_ipmitool to timeout.
+ self.config(min_command_interval=1, group='ipmi')
+ self.config(retry_timeout=1, group='ipmi')
+
+ self.assertRaises(processutils.ProcessExecutionError,
+ ipmi._exec_ipmitool,
+ self.info, 'A B C')
+ mock_support.assert_called_once_with('timing')
+ self.assertEqual(1, mock_exec.call_count)
+
+ @mock.patch.object(ipmi, '_is_option_supported', autospec=True)
+ @mock.patch.object(utils, 'execute', autospec=True)
+ def test__exec_ipmitool_exception_non_retryable_failure(
+ self, mock_exec, mock_support, mock_sleep):
+
+ ipmi.LAST_CMD_TIME = {}
+ mock_support.return_value = False
+
+ # Return a retryable error, then an error that cannot
+ # be retried thus resulting in a single retry
+ # attempt by _exec_ipmitool.
+ mock_exec.side_effect = iter([
+ processutils.ProcessExecutionError(
+ stderr="insufficient resources for session"
+ ),
+ processutils.ProcessExecutionError(
+ stderr="Unknown"
+ ),
+ ])
+
+ # Directly set the configuration values such that
+ # the logic will cause _exec_ipmitool to retry up
+ # to 3 times.
+ self.config(min_command_interval=1, group='ipmi')
+ self.config(retry_timeout=3, group='ipmi')
+
+ self.assertRaises(processutils.ProcessExecutionError,
+ ipmi._exec_ipmitool,
+ self.info, 'A B C')
+ mock_support.assert_called_once_with('timing')
+ self.assertEqual(2, mock_exec.call_count)
+
+ @mock.patch.object(ipmi, '_is_option_supported', autospec=True)
+ @mock.patch.object(ipmi, '_make_password_file', autospec=True)
+ @mock.patch.object(utils, 'execute', autospec=True)
+ def test__exec_ipmitool_IPMI_version_1_5(
+ self, mock_exec, mock_pwf, mock_support, mock_sleep):
+ self.info['protocol_version'] = '1.5'
+ # Assert it uses "-I lan" (1.5) instead of "-I lanplus" (2.0)
+ args = [
+ 'ipmitool',
+ '-I', 'lan',
+ '-H', self.info['address'],
+ '-L', self.info['priv_level'],
+ '-U', self.info['username'],
+ '-f', mock.ANY,
+ 'A', 'B', 'C',
+ ]
+
+ mock_support.return_value = False
+ mock_exec.return_value = (None, None)
+ ipmi._exec_ipmitool(self.info, 'A B C')
+ mock_support.assert_called_once_with('timing')
+ self.assertTrue(mock_pwf.called)
+ mock_exec.assert_called_once_with(*args)
+
+ @mock.patch.object(ipmi, '_exec_ipmitool', autospec=True)
+ def test__power_status_on(self, mock_exec, mock_sleep):
+ mock_exec.return_value = ["Chassis Power is on\n", None]
+
+ state = ipmi._power_status(self.info)
+
+ mock_exec.assert_called_once_with(self.info, "power status")
+ self.assertEqual(states.POWER_ON, state)
+
+ @mock.patch.object(ipmi, '_exec_ipmitool', autospec=True)
+ def test__power_status_off(self, mock_exec, mock_sleep):
+ mock_exec.return_value = ["Chassis Power is off\n", None]
+
+ state = ipmi._power_status(self.info)
+
+ mock_exec.assert_called_once_with(self.info, "power status")
+ self.assertEqual(states.POWER_OFF, state)
+
+ @mock.patch.object(ipmi, '_exec_ipmitool', autospec=True)
+ def test__power_status_error(self, mock_exec, mock_sleep):
+ mock_exec.return_value = ["Chassis Power is badstate\n", None]
+
+ state = ipmi._power_status(self.info)
+
+ mock_exec.assert_called_once_with(self.info, "power status")
+ self.assertEqual(states.ERROR, state)
+
+ @mock.patch.object(ipmi, '_exec_ipmitool', autospec=True)
+ def test__power_status_exception(self, mock_exec, mock_sleep):
+ mock_exec.side_effect = iter(
+ [processutils.ProcessExecutionError("error")])
+ self.assertRaises(exception.IPMIFailure,
+ ipmi._power_status,
+ self.info)
+ mock_exec.assert_called_once_with(self.info, "power status")
+
+ @mock.patch.object(ipmi, '_exec_ipmitool', autospec=True)
+ @mock.patch('eventlet.greenthread.sleep', autospec=True)
+ def test__power_on_max_retries(self, sleep_mock, mock_exec, mock_sleep):
+ self.config(retry_timeout=2, group='ipmi')
+
+ def side_effect(driver_info, command):
+ resp_dict = {"power status": ["Chassis Power is off\n", None],
+ "power on": [None, None]}
+ return resp_dict.get(command, ["Bad\n", None])
+
+ mock_exec.side_effect = side_effect
+
+ expected = [mock.call(self.info, "power on"),
+ mock.call(self.info, "power status"),
+ mock.call(self.info, "power status")]
+
+ state = ipmi._power_on(self.info)
+
+ self.assertEqual(mock_exec.call_args_list, expected)
+ self.assertEqual(states.ERROR, state)
+
+
+class IPMIToolDriverTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(IPMIToolDriverTestCase, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver="fake_ipmitool")
+ self.driver = driver_factory.get_driver("fake_ipmitool")
+
+ self.node = obj_utils.create_test_node(self.context,
+ driver='fake_ipmitool',
+ driver_info=INFO_DICT)
+ self.info = ipmi._parse_driver_info(self.node)
+
+ @mock.patch.object(ipmi, "_parse_driver_info", autospec=True)
+ def test_power_validate(self, mock_parse):
+ node = obj_utils.get_test_node(self.context, driver='fake_ipmitool',
+ driver_info=INFO_DICT)
+ mock_parse.return_value = {}
+
+ with task_manager.acquire(self.context, node.uuid) as task:
+ task.driver.power.validate(task)
+ mock_parse.assert_called_once_with(mock.ANY)
+
+ def test_get_properties(self):
+ expected = ipmi.COMMON_PROPERTIES
+ self.assertEqual(expected, self.driver.power.get_properties())
+
+ expected = list(ipmi.COMMON_PROPERTIES) + list(ipmi.CONSOLE_PROPERTIES)
+ self.assertEqual(sorted(expected),
+ sorted(self.driver.console.get_properties().keys()))
+ self.assertEqual(sorted(expected),
+ sorted(self.driver.get_properties().keys()))
+
+ @mock.patch.object(ipmi, '_exec_ipmitool', autospec=True)
+ def test_get_power_state(self, mock_exec):
+ returns = iter([["Chassis Power is off\n", None],
+ ["Chassis Power is on\n", None],
+ ["\n", None]])
+ expected = [mock.call(self.info, "power status"),
+ mock.call(self.info, "power status"),
+ mock.call(self.info, "power status")]
+ mock_exec.side_effect = returns
+
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ pstate = self.driver.power.get_power_state(task)
+ self.assertEqual(states.POWER_OFF, pstate)
+
+ pstate = self.driver.power.get_power_state(task)
+ self.assertEqual(states.POWER_ON, pstate)
+
+ pstate = self.driver.power.get_power_state(task)
+ self.assertEqual(states.ERROR, pstate)
+
+ self.assertEqual(mock_exec.call_args_list, expected)
+
+ @mock.patch.object(ipmi, '_exec_ipmitool', autospec=True)
+ def test_get_power_state_exception(self, mock_exec):
+ mock_exec.side_effect = iter(
+ [processutils.ProcessExecutionError("error")])
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ self.assertRaises(exception.IPMIFailure,
+ self.driver.power.get_power_state,
+ task)
+ mock_exec.assert_called_once_with(self.info, "power status")
+
+ @mock.patch.object(ipmi, '_power_on', autospec=True)
+ @mock.patch.object(ipmi, '_power_off', autospec=True)
+ def test_set_power_on_ok(self, mock_off, mock_on):
+ self.config(retry_timeout=0, group='ipmi')
+
+ mock_on.return_value = states.POWER_ON
+ with task_manager.acquire(self.context,
+ self.node['uuid']) as task:
+ self.driver.power.set_power_state(task,
+ states.POWER_ON)
+
+ mock_on.assert_called_once_with(self.info)
+ self.assertFalse(mock_off.called)
+
+ @mock.patch.object(driver_utils, 'ensure_next_boot_device', autospec=True)
+ @mock.patch.object(ipmi, '_power_on', autospec=True)
+ @mock.patch.object(ipmi, '_power_off', autospec=True)
+ def test_set_power_on_with_next_boot(self, mock_off, mock_on,
+ mock_next_boot):
+ self.config(retry_timeout=0, group='ipmi')
+
+ mock_on.return_value = states.POWER_ON
+ with task_manager.acquire(self.context,
+ self.node['uuid']) as task:
+ self.driver.power.set_power_state(task,
+ states.POWER_ON)
+ mock_next_boot.assert_called_once_with(task, self.info)
+
+ mock_on.assert_called_once_with(self.info)
+ self.assertFalse(mock_off.called)
+
+ @mock.patch.object(ipmi, '_power_on', autospec=True)
+ @mock.patch.object(ipmi, '_power_off', autospec=True)
+ def test_set_power_off_ok(self, mock_off, mock_on):
+ self.config(retry_timeout=0, group='ipmi')
+
+ mock_off.return_value = states.POWER_OFF
+
+ with task_manager.acquire(self.context,
+ self.node['uuid']) as task:
+ self.driver.power.set_power_state(task,
+ states.POWER_OFF)
+
+ mock_off.assert_called_once_with(self.info)
+ self.assertFalse(mock_on.called)
+
+ @mock.patch.object(ipmi, '_power_on', autospec=True)
+ @mock.patch.object(ipmi, '_power_off', autospec=True)
+ def test_set_power_on_fail(self, mock_off, mock_on):
+ self.config(retry_timeout=0, group='ipmi')
+
+ mock_on.return_value = states.ERROR
+ with task_manager.acquire(self.context,
+ self.node['uuid']) as task:
+ self.assertRaises(exception.PowerStateFailure,
+ self.driver.power.set_power_state,
+ task,
+ states.POWER_ON)
+
+ mock_on.assert_called_once_with(self.info)
+ self.assertFalse(mock_off.called)
+
+ def test_set_power_invalid_state(self):
+ with task_manager.acquire(self.context, self.node['uuid']) as task:
+ self.assertRaises(exception.InvalidParameterValue,
+ self.driver.power.set_power_state,
+ task,
+ "fake state")
+
+ @mock.patch.object(ipmi, '_exec_ipmitool', autospec=True)
+ def test_send_raw_bytes_ok(self, mock_exec):
+ mock_exec.return_value = [None, None]
+
+ with task_manager.acquire(self.context,
+ self.node['uuid']) as task:
+ self.driver.vendor.send_raw(task, http_method='POST',
+ raw_bytes='0x00 0x01')
+
+ mock_exec.assert_called_once_with(self.info, 'raw 0x00 0x01')
+
+ @mock.patch.object(ipmi, '_exec_ipmitool', autospec=True)
+ def test_send_raw_bytes_fail(self, mock_exec):
+ mock_exec.side_effect = iter(
+ [exception.PasswordFileFailedToCreate('error')])
+
+ with task_manager.acquire(self.context,
+ self.node['uuid']) as task:
+ self.assertRaises(exception.IPMIFailure,
+ self.driver.vendor.send_raw,
+ task,
+ http_method='POST',
+ raw_bytes='0x00 0x01')
+
+ @mock.patch.object(ipmi, '_exec_ipmitool', autospec=True)
+ def test__bmc_reset_ok(self, mock_exec):
+ mock_exec.return_value = [None, None]
+
+ with task_manager.acquire(self.context,
+ self.node['uuid']) as task:
+ self.driver.vendor.bmc_reset(task, 'POST')
+
+ mock_exec.assert_called_once_with(self.info, 'bmc reset warm')
+
+ @mock.patch.object(ipmi, '_exec_ipmitool', autospec=True)
+ def test__bmc_reset_cold(self, mock_exec):
+ mock_exec.return_value = [None, None]
+
+ with task_manager.acquire(self.context,
+ self.node['uuid']) as task:
+ self.driver.vendor.bmc_reset(task, 'POST', warm=False)
+
+ mock_exec.assert_called_once_with(self.info, 'bmc reset cold')
+
+ @mock.patch.object(ipmi, '_exec_ipmitool', autospec=True)
+ def test__bmc_reset_fail(self, mock_exec):
+ mock_exec.side_effect = iter([processutils.ProcessExecutionError()])
+
+ with task_manager.acquire(self.context,
+ self.node['uuid']) as task:
+ self.assertRaises(exception.IPMIFailure,
+ self.driver.vendor.bmc_reset,
+ task, 'POST')
+
+ @mock.patch.object(driver_utils, 'ensure_next_boot_device', autospec=True)
+ @mock.patch.object(ipmi, '_power_off', spec_set=types.FunctionType)
+ @mock.patch.object(ipmi, '_power_on', spec_set=types.FunctionType)
+ def test_reboot_ok(self, mock_on, mock_off, mock_next_boot):
+ manager = mock.MagicMock()
+ # NOTE(rloo): if autospec is True, then manager.mock_calls is empty
+ mock_on.return_value = states.POWER_ON
+ manager.attach_mock(mock_off, 'power_off')
+ manager.attach_mock(mock_on, 'power_on')
+ expected = [mock.call.power_off(self.info),
+ mock.call.power_on(self.info)]
+
+ with task_manager.acquire(self.context,
+ self.node['uuid']) as task:
+ self.driver.power.reboot(task)
+ mock_next_boot.assert_called_once_with(task, self.info)
+
+ self.assertEqual(manager.mock_calls, expected)
+
+ @mock.patch.object(ipmi, '_power_off', spec_set=types.FunctionType)
+ @mock.patch.object(ipmi, '_power_on', spec_set=types.FunctionType)
+ def test_reboot_fail(self, mock_on, mock_off):
+ manager = mock.MagicMock()
+ # NOTE(rloo): if autospec is True, then manager.mock_calls is empty
+ mock_on.return_value = states.ERROR
+ manager.attach_mock(mock_off, 'power_off')
+ manager.attach_mock(mock_on, 'power_on')
+ expected = [mock.call.power_off(self.info),
+ mock.call.power_on(self.info)]
+
+ with task_manager.acquire(self.context,
+ self.node['uuid']) as task:
+ self.assertRaises(exception.PowerStateFailure,
+ self.driver.power.reboot,
+ task)
+
+ self.assertEqual(manager.mock_calls, expected)
+
+ @mock.patch.object(ipmi, '_parse_driver_info', autospec=True)
+ def test_vendor_passthru_validate__parse_driver_info_fail(self, info_mock):
+ info_mock.side_effect = iter([exception.InvalidParameterValue("bad")])
+ with task_manager.acquire(self.context, self.node['uuid']) as task:
+ self.assertRaises(exception.InvalidParameterValue,
+ self.driver.vendor.validate,
+ task, method='send_raw', raw_bytes='0x00 0x01')
+ info_mock.assert_called_once_with(task.node)
+
+ def test_vendor_passthru_validate__send_raw_bytes_good(self):
+ with task_manager.acquire(self.context, self.node['uuid']) as task:
+ self.driver.vendor.validate(task,
+ method='send_raw',
+ http_method='POST',
+ raw_bytes='0x00 0x01')
+
+ def test_vendor_passthru_validate__send_raw_bytes_fail(self):
+ with task_manager.acquire(self.context, self.node['uuid']) as task:
+ self.assertRaises(exception.MissingParameterValue,
+ self.driver.vendor.validate,
+ task, method='send_raw')
+
+ @mock.patch.object(ipmi.VendorPassthru, 'send_raw', autospec=True)
+ def test_vendor_passthru_call_send_raw_bytes(self, raw_bytes_mock):
+ with task_manager.acquire(self.context, self.node['uuid'],
+ shared=False) as task:
+ self.driver.vendor.send_raw(task, http_method='POST',
+ raw_bytes='0x00 0x01')
+ raw_bytes_mock.assert_called_once_with(
+ self.driver.vendor, task, http_method='POST',
+ raw_bytes='0x00 0x01')
+
+ def test_vendor_passthru_validate__bmc_reset_good(self):
+ with task_manager.acquire(self.context, self.node['uuid']) as task:
+ self.driver.vendor.validate(task,
+ method='bmc_reset')
+
+ def test_vendor_passthru_validate__bmc_reset_warm_good(self):
+ with task_manager.acquire(self.context, self.node['uuid']) as task:
+ self.driver.vendor.validate(task,
+ method='bmc_reset',
+ warm=True)
+
+ def test_vendor_passthru_validate__bmc_reset_cold_good(self):
+ with task_manager.acquire(self.context, self.node['uuid']) as task:
+ self.driver.vendor.validate(task,
+ method='bmc_reset',
+ warm=False)
+
+ @mock.patch.object(ipmi.VendorPassthru, 'bmc_reset', autospec=True)
+ def test_vendor_passthru_call_bmc_reset_warm(self, bmc_mock):
+ with task_manager.acquire(self.context, self.node['uuid'],
+ shared=False) as task:
+ self.driver.vendor.bmc_reset(task, 'POST', warm=True)
+ bmc_mock.assert_called_once_with(
+ self.driver.vendor, task, 'POST', warm=True)
+
+ @mock.patch.object(ipmi.VendorPassthru, 'bmc_reset', autospec=True)
+ def test_vendor_passthru_call_bmc_reset_cold(self, bmc_mock):
+ with task_manager.acquire(self.context, self.node['uuid'],
+ shared=False) as task:
+ self.driver.vendor.bmc_reset(task, 'POST', warm=False)
+ bmc_mock.assert_called_once_with(
+ self.driver.vendor, task, 'POST', warm=False)
+
+ def test_vendor_passthru_vendor_routes(self):
+ expected = ['send_raw', 'bmc_reset']
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ vendor_routes = task.driver.vendor.vendor_routes
+ self.assertIsInstance(vendor_routes, dict)
+ self.assertEqual(sorted(expected), sorted(vendor_routes))
+
+ def test_vendor_passthru_driver_routes(self):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ driver_routes = task.driver.vendor.driver_routes
+ self.assertIsInstance(driver_routes, dict)
+ self.assertEqual({}, driver_routes)
+
+ def test_console_validate(self):
+ with task_manager.acquire(
+ self.context, self.node.uuid, shared=True) as task:
+ task.node.driver_info['ipmi_terminal_port'] = 123
+ task.driver.console.validate(task)
+
+ def test_console_validate_missing_port(self):
+ with task_manager.acquire(
+ self.context, self.node.uuid, shared=True) as task:
+ task.node.driver_info.pop('ipmi_terminal_port', None)
+ self.assertRaises(exception.MissingParameterValue,
+ task.driver.console.validate, task)
+
+ def test_console_validate_invalid_port(self):
+ with task_manager.acquire(
+ self.context, self.node.uuid, shared=True) as task:
+ task.node.driver_info['ipmi_terminal_port'] = ''
+ self.assertRaises(exception.InvalidParameterValue,
+ task.driver.console.validate, task)
+
+ def test_console_validate_wrong_ipmi_protocol_version(self):
+ with task_manager.acquire(
+ self.context, self.node.uuid, shared=True) as task:
+ task.node.driver_info['ipmi_terminal_port'] = 123
+ task.node.driver_info['ipmi_protocol_version'] = '1.5'
+ self.assertRaises(exception.InvalidParameterValue,
+ task.driver.console.validate, task)
+
+ @mock.patch.object(console_utils, 'start_shellinabox_console',
+ autospec=True)
+ def test_start_console(self, mock_exec):
+ mock_exec.return_value = None
+
+ with task_manager.acquire(self.context,
+ self.node['uuid']) as task:
+ self.driver.console.start_console(task)
+
+ mock_exec.assert_called_once_with(self.info['uuid'],
+ self.info['port'],
+ mock.ANY)
+ self.assertTrue(mock_exec.called)
+
+ @mock.patch.object(console_utils, 'start_shellinabox_console',
+ autospec=True)
+ def test_start_console_fail(self, mock_exec):
+ mock_exec.side_effect = iter(
+ [exception.ConsoleSubprocessFailed(error='error')])
+
+ with task_manager.acquire(self.context,
+ self.node['uuid']) as task:
+ self.assertRaises(exception.ConsoleSubprocessFailed,
+ self.driver.console.start_console,
+ task)
+
+ @mock.patch.object(console_utils, 'start_shellinabox_console',
+ autospec=True)
+ def test_start_console_fail_nodir(self, mock_exec):
+ mock_exec.side_effect = iter([exception.ConsoleError()])
+
+ with task_manager.acquire(self.context,
+ self.node.uuid) as task:
+ self.assertRaises(exception.ConsoleError,
+ self.driver.console.start_console,
+ task)
+ mock_exec.assert_called_once_with(self.node.uuid, mock.ANY, mock.ANY)
+
+ @mock.patch.object(console_utils, 'stop_shellinabox_console',
+ autospec=True)
+ def test_stop_console(self, mock_exec):
+ mock_exec.return_value = None
+
+ with task_manager.acquire(self.context,
+ self.node['uuid']) as task:
+ self.driver.console.stop_console(task)
+
+ mock_exec.assert_called_once_with(self.info['uuid'])
+ self.assertTrue(mock_exec.called)
+
+ @mock.patch.object(console_utils, 'stop_shellinabox_console',
+ autospec=True)
+ def test_stop_console_fail(self, mock_stop):
+ mock_stop.side_effect = iter([exception.ConsoleError()])
+
+ with task_manager.acquire(self.context,
+ self.node.uuid) as task:
+ self.assertRaises(exception.ConsoleError,
+ self.driver.console.stop_console,
+ task)
+
+ mock_stop.assert_called_once_with(self.node.uuid)
+
+ @mock.patch.object(console_utils, 'get_shellinabox_console_url',
+ autospec=True)
+ def test_get_console(self, mock_exec):
+ url = 'http://localhost:4201'
+ mock_exec.return_value = url
+ expected = {'type': 'shellinabox', 'url': url}
+
+ with task_manager.acquire(self.context,
+ self.node['uuid']) as task:
+ console_info = self.driver.console.get_console(task)
+
+ self.assertEqual(expected, console_info)
+ mock_exec.assert_called_once_with(self.info['port'])
+ self.assertTrue(mock_exec.called)
+
+ @mock.patch.object(ipmi, '_exec_ipmitool', autospec=True)
+ def test_management_interface_set_boot_device_ok(self, mock_exec):
+ mock_exec.return_value = [None, None]
+
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ self.driver.management.set_boot_device(task, boot_devices.PXE)
+
+ mock_calls = [mock.call(self.info, "raw 0x00 0x08 0x03 0x08"),
+ mock.call(self.info, "chassis bootdev pxe")]
+ mock_exec.assert_has_calls(mock_calls)
+
+ @mock.patch.object(ipmi, '_exec_ipmitool', autospec=True)
+ def test_management_interface_force_set_boot_device_ok(self, mock_exec):
+ mock_exec.return_value = [None, None]
+
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ task.node.driver_info['ipmi_force_boot_device'] = True
+ self.info['force_boot_device'] = True
+ self.driver.management.set_boot_device(task, boot_devices.PXE)
+ task.node.refresh()
+ self.assertEqual(
+ False,
+ task.node.driver_internal_info['is_next_boot_persistent']
+ )
+
+ mock_calls = [mock.call(self.info, "raw 0x00 0x08 0x03 0x08"),
+ mock.call(self.info, "chassis bootdev pxe")]
+ mock_exec.assert_has_calls(mock_calls)
+
+ @mock.patch.object(ipmi, '_exec_ipmitool', autospec=True)
+ def test_management_interface_set_boot_device_persistent(self, mock_exec):
+ mock_exec.return_value = [None, None]
+
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ task.node.driver_info['ipmi_force_boot_device'] = True
+ self.info['force_boot_device'] = True
+ self.driver.management.set_boot_device(task,
+ boot_devices.PXE,
+ True)
+ self.assertEqual(
+ boot_devices.PXE,
+ task.node.driver_internal_info['persistent_boot_device'])
+
+ mock_calls = [mock.call(self.info, "raw 0x00 0x08 0x03 0x08"),
+ mock.call(self.info, "chassis bootdev pxe")]
+ mock_exec.assert_has_calls(mock_calls)
+
+ def test_management_interface_set_boot_device_bad_device(self):
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ self.assertRaises(exception.InvalidParameterValue,
+ self.driver.management.set_boot_device,
+ task, 'fake-device')
+
+ @mock.patch.object(ipmi, '_exec_ipmitool', autospec=True)
+ def test_management_interface_set_boot_device_exec_failed(self, mock_exec):
+ mock_exec.side_effect = iter([processutils.ProcessExecutionError()])
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ self.assertRaises(exception.IPMIFailure,
+ self.driver.management.set_boot_device,
+ task, boot_devices.PXE)
+
+ @mock.patch.object(ipmi, '_exec_ipmitool', autospec=True)
+ def test_management_interface_set_boot_device_unknown_exception(self,
+ mock_exec):
+
+ class FakeException(Exception):
+ pass
+
+ mock_exec.side_effect = iter([FakeException('boom')])
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ self.assertRaises(FakeException,
+ self.driver.management.set_boot_device,
+ task, boot_devices.PXE)
+
+ def test_management_interface_get_supported_boot_devices(self):
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ expected = [boot_devices.PXE, boot_devices.DISK,
+ boot_devices.CDROM, boot_devices.BIOS,
+ boot_devices.SAFE]
+ self.assertEqual(sorted(expected), sorted(task.driver.management.
+ get_supported_boot_devices(task)))
+
+ @mock.patch.object(ipmi, '_exec_ipmitool', autospec=True)
+ def test_management_interface_get_boot_device(self, mock_exec):
+ # output, expected boot device
+ bootdevs = [('Boot Device Selector : '
+ 'Force Boot from default Hard-Drive\n',
+ boot_devices.DISK),
+ ('Boot Device Selector : '
+ 'Force Boot from default Hard-Drive, request Safe-Mode\n',
+ boot_devices.SAFE),
+ ('Boot Device Selector : '
+ 'Force Boot into BIOS Setup\n',
+ boot_devices.BIOS),
+ ('Boot Device Selector : '
+ 'Force PXE\n',
+ boot_devices.PXE),
+ ('Boot Device Selector : '
+ 'Force Boot from CD/DVD\n',
+ boot_devices.CDROM)]
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ for out, expected_device in bootdevs:
+ mock_exec.return_value = (out, '')
+ expected_response = {'boot_device': expected_device,
+ 'persistent': False}
+ self.assertEqual(expected_response,
+ task.driver.management.get_boot_device(task))
+ mock_exec.assert_called_with(mock.ANY,
+ "chassis bootparam get 5")
+
+ @mock.patch.object(ipmi, '_exec_ipmitool', autospec=True)
+ def test_management_interface_get_boot_device_unknown_dev(self, mock_exec):
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ mock_exec.return_value = ('Boot Device Selector : Fake\n', '')
+ response = task.driver.management.get_boot_device(task)
+ self.assertIsNone(response['boot_device'])
+ mock_exec.assert_called_with(mock.ANY, "chassis bootparam get 5")
+
+ @mock.patch.object(ipmi, '_exec_ipmitool', autospec=True)
+ def test_management_interface_get_boot_device_fail(self, mock_exec):
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ mock_exec.side_effect = iter(
+ [processutils.ProcessExecutionError()])
+ self.assertRaises(exception.IPMIFailure,
+ task.driver.management.get_boot_device, task)
+ mock_exec.assert_called_with(mock.ANY, "chassis bootparam get 5")
+
+ @mock.patch.object(ipmi, '_exec_ipmitool', autospec=True)
+ def test_management_interface_get_boot_device_persistent(self, mock_exec):
+ outputs = [('Options apply to only next boot\n'
+ 'Boot Device Selector : Force PXE\n',
+ False),
+ ('Options apply to all future boots\n'
+ 'Boot Device Selector : Force PXE\n',
+ True)]
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ for out, expected_persistent in outputs:
+ mock_exec.return_value = (out, '')
+ expected_response = {'boot_device': boot_devices.PXE,
+ 'persistent': expected_persistent}
+ self.assertEqual(expected_response,
+ task.driver.management.get_boot_device(task))
+ mock_exec.assert_called_with(mock.ANY,
+ "chassis bootparam get 5")
+
+ def test_get_force_boot_device_persistent(self):
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ task.node.driver_info['ipmi_force_boot_device'] = True
+ task.node.driver_internal_info['persistent_boot_device'] = 'pxe'
+ bootdev = self.driver.management.get_boot_device(task)
+ self.assertEqual('pxe', bootdev['boot_device'])
+ self.assertTrue(bootdev['persistent'])
+
+ def test_management_interface_validate_good(self):
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ task.driver.management.validate(task)
+
+ def test_management_interface_validate_fail(self):
+ # Missing IPMI driver_info information
+ node = obj_utils.create_test_node(self.context,
+ uuid=uuidutils.generate_uuid(),
+ driver='fake_ipmitool')
+ with task_manager.acquire(self.context, node.uuid) as task:
+ self.assertRaises(exception.MissingParameterValue,
+ task.driver.management.validate, task)
+
+ def test__parse_ipmi_sensor_data_ok(self):
+ fake_sensors_data = """
+ Sensor ID : Temp (0x1)
+ Entity ID : 3.1 (Processor)
+ Sensor Type (Analog) : Temperature
+ Sensor Reading : -58 (+/- 1) degrees C
+ Status : ok
+ Nominal Reading : 50.000
+ Normal Minimum : 11.000
+ Normal Maximum : 69.000
+ Upper critical : 90.000
+ Upper non-critical : 85.000
+ Positive Hysteresis : 1.000
+ Negative Hysteresis : 1.000
+
+ Sensor ID : Temp (0x2)
+ Entity ID : 3.2 (Processor)
+ Sensor Type (Analog) : Temperature
+ Sensor Reading : 50 (+/- 1) degrees C
+ Status : ok
+ Nominal Reading : 50.000
+ Normal Minimum : 11.000
+ Normal Maximum : 69.000
+ Upper critical : 90.000
+ Upper non-critical : 85.000
+ Positive Hysteresis : 1.000
+ Negative Hysteresis : 1.000
+
+ Sensor ID : FAN MOD 1A RPM (0x30)
+ Entity ID : 7.1 (System Board)
+ Sensor Type (Analog) : Fan
+ Sensor Reading : 8400 (+/- 75) RPM
+ Status : ok
+ Nominal Reading : 5325.000
+ Normal Minimum : 10425.000
+ Normal Maximum : 14775.000
+ Lower critical : 4275.000
+ Positive Hysteresis : 375.000
+ Negative Hysteresis : 375.000
+
+ Sensor ID : FAN MOD 1B RPM (0x31)
+ Entity ID : 7.1 (System Board)
+ Sensor Type (Analog) : Fan
+ Sensor Reading : 8550 (+/- 75) RPM
+ Status : ok
+ Nominal Reading : 7800.000
+ Normal Minimum : 10425.000
+ Normal Maximum : 14775.000
+ Lower critical : 4275.000
+ Positive Hysteresis : 375.000
+ Negative Hysteresis : 375.000
+ """
+ expected_return = {
+ 'Fan': {
+ 'FAN MOD 1A RPM (0x30)': {
+ 'Status': 'ok',
+ 'Sensor Reading': '8400 (+/- 75) RPM',
+ 'Entity ID': '7.1 (System Board)',
+ 'Normal Minimum': '10425.000',
+ 'Positive Hysteresis': '375.000',
+ 'Normal Maximum': '14775.000',
+ 'Sensor Type (Analog)': 'Fan',
+ 'Lower critical': '4275.000',
+ 'Negative Hysteresis': '375.000',
+ 'Sensor ID': 'FAN MOD 1A RPM (0x30)',
+ 'Nominal Reading': '5325.000'
+ },
+ 'FAN MOD 1B RPM (0x31)': {
+ 'Status': 'ok',
+ 'Sensor Reading': '8550 (+/- 75) RPM',
+ 'Entity ID': '7.1 (System Board)',
+ 'Normal Minimum': '10425.000',
+ 'Positive Hysteresis': '375.000',
+ 'Normal Maximum': '14775.000',
+ 'Sensor Type (Analog)': 'Fan',
+ 'Lower critical': '4275.000',
+ 'Negative Hysteresis': '375.000',
+ 'Sensor ID': 'FAN MOD 1B RPM (0x31)',
+ 'Nominal Reading': '7800.000'
+ }
+ },
+ 'Temperature': {
+ 'Temp (0x1)': {
+ 'Status': 'ok',
+ 'Sensor Reading': '-58 (+/- 1) degrees C',
+ 'Entity ID': '3.1 (Processor)',
+ 'Normal Minimum': '11.000',
+ 'Positive Hysteresis': '1.000',
+ 'Upper non-critical': '85.000',
+ 'Normal Maximum': '69.000',
+ 'Sensor Type (Analog)': 'Temperature',
+ 'Negative Hysteresis': '1.000',
+ 'Upper critical': '90.000',
+ 'Sensor ID': 'Temp (0x1)',
+ 'Nominal Reading': '50.000'
+ },
+ 'Temp (0x2)': {
+ 'Status': 'ok',
+ 'Sensor Reading': '50 (+/- 1) degrees C',
+ 'Entity ID': '3.2 (Processor)',
+ 'Normal Minimum': '11.000',
+ 'Positive Hysteresis': '1.000',
+ 'Upper non-critical': '85.000',
+ 'Normal Maximum': '69.000',
+ 'Sensor Type (Analog)': 'Temperature',
+ 'Negative Hysteresis': '1.000',
+ 'Upper critical': '90.000',
+ 'Sensor ID': 'Temp (0x2)',
+ 'Nominal Reading': '50.000'
+ }
+ }
+ }
+ ret = ipmi._parse_ipmi_sensors_data(self.node, fake_sensors_data)
+
+ self.assertEqual(expected_return, ret)
+
+ def test__parse_ipmi_sensor_data_missing_sensor_reading(self):
+ fake_sensors_data = """
+ Sensor ID : Temp (0x1)
+ Entity ID : 3.1 (Processor)
+ Sensor Type (Analog) : Temperature
+ Status : ok
+ Nominal Reading : 50.000
+ Normal Minimum : 11.000
+ Normal Maximum : 69.000
+ Upper critical : 90.000
+ Upper non-critical : 85.000
+ Positive Hysteresis : 1.000
+ Negative Hysteresis : 1.000
+
+ Sensor ID : Temp (0x2)
+ Entity ID : 3.2 (Processor)
+ Sensor Type (Analog) : Temperature
+ Sensor Reading : 50 (+/- 1) degrees C
+ Status : ok
+ Nominal Reading : 50.000
+ Normal Minimum : 11.000
+ Normal Maximum : 69.000
+ Upper critical : 90.000
+ Upper non-critical : 85.000
+ Positive Hysteresis : 1.000
+ Negative Hysteresis : 1.000
+
+ Sensor ID : FAN MOD 1A RPM (0x30)
+ Entity ID : 7.1 (System Board)
+ Sensor Type (Analog) : Fan
+ Sensor Reading : 8400 (+/- 75) RPM
+ Status : ok
+ Nominal Reading : 5325.000
+ Normal Minimum : 10425.000
+ Normal Maximum : 14775.000
+ Lower critical : 4275.000
+ Positive Hysteresis : 375.000
+ Negative Hysteresis : 375.000
+ """
+ expected_return = {
+ 'Fan': {
+ 'FAN MOD 1A RPM (0x30)': {
+ 'Status': 'ok',
+ 'Sensor Reading': '8400 (+/- 75) RPM',
+ 'Entity ID': '7.1 (System Board)',
+ 'Normal Minimum': '10425.000',
+ 'Positive Hysteresis': '375.000',
+ 'Normal Maximum': '14775.000',
+ 'Sensor Type (Analog)': 'Fan',
+ 'Lower critical': '4275.000',
+ 'Negative Hysteresis': '375.000',
+ 'Sensor ID': 'FAN MOD 1A RPM (0x30)',
+ 'Nominal Reading': '5325.000'
+ }
+ },
+ 'Temperature': {
+ 'Temp (0x2)': {
+ 'Status': 'ok',
+ 'Sensor Reading': '50 (+/- 1) degrees C',
+ 'Entity ID': '3.2 (Processor)',
+ 'Normal Minimum': '11.000',
+ 'Positive Hysteresis': '1.000',
+ 'Upper non-critical': '85.000',
+ 'Normal Maximum': '69.000',
+ 'Sensor Type (Analog)': 'Temperature',
+ 'Negative Hysteresis': '1.000',
+ 'Upper critical': '90.000',
+ 'Sensor ID': 'Temp (0x2)',
+ 'Nominal Reading': '50.000'
+ }
+ }
+ }
+ ret = ipmi._parse_ipmi_sensors_data(self.node, fake_sensors_data)
+
+ self.assertEqual(expected_return, ret)
+
+ def test__parse_ipmi_sensor_data_failed(self):
+ fake_sensors_data = "abcdef"
+ self.assertRaises(exception.FailedToParseSensorData,
+ ipmi._parse_ipmi_sensors_data,
+ self.node,
+ fake_sensors_data)
+
+ fake_sensors_data = "abc:def:ghi"
+ self.assertRaises(exception.FailedToParseSensorData,
+ ipmi._parse_ipmi_sensors_data,
+ self.node,
+ fake_sensors_data)
diff --git a/ironic/tests/unit/drivers/test_irmc.py b/ironic/tests/unit/drivers/test_irmc.py
new file mode 100644
index 000000000..51989a318
--- /dev/null
+++ b/ironic/tests/unit/drivers/test_irmc.py
@@ -0,0 +1,104 @@
+# Copyright 2015 FUJITSU LIMITED
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""
+Test class for iRMC Deploy Driver
+"""
+
+import mock
+import testtools
+
+from ironic.common import exception
+from ironic.drivers import irmc
+
+
+class IRMCVirtualMediaIscsiTestCase(testtools.TestCase):
+
+ def setUp(self):
+ irmc.deploy._check_share_fs_mounted_patcher.start()
+ super(IRMCVirtualMediaIscsiTestCase, self).setUp()
+
+ @mock.patch.object(irmc.importutils, 'try_import', spec_set=True,
+ autospec=True)
+ def test___init___share_fs_mounted_ok(self,
+ mock_try_import):
+ mock_try_import.return_value = True
+
+ driver = irmc.IRMCVirtualMediaIscsiDriver()
+
+ self.assertIsInstance(driver.power, irmc.power.IRMCPower)
+ self.assertIsInstance(driver.deploy,
+ irmc.deploy.IRMCVirtualMediaIscsiDeploy)
+ self.assertIsInstance(driver.console,
+ irmc.ipmitool.IPMIShellinaboxConsole)
+ self.assertIsInstance(driver.management,
+ irmc.management.IRMCManagement)
+ self.assertIsInstance(driver.vendor, irmc.deploy.VendorPassthru)
+
+ @mock.patch.object(irmc.importutils, 'try_import')
+ def test___init___try_import_exception(self, mock_try_import):
+ mock_try_import.return_value = False
+
+ self.assertRaises(exception.DriverLoadError,
+ irmc.IRMCVirtualMediaIscsiDriver)
+
+ @mock.patch.object(irmc.deploy.IRMCVirtualMediaIscsiDeploy, '__init__',
+ spec_set=True, autospec=True)
+ def test___init___share_fs_not_mounted_exception(self, __init___mock):
+ __init___mock.side_effect = iter(
+ [exception.IRMCSharedFileSystemNotMounted()])
+
+ self.assertRaises(exception.IRMCSharedFileSystemNotMounted,
+ irmc.IRMCVirtualMediaIscsiDriver)
+
+
+class IRMCVirtualMediaAgentTestCase(testtools.TestCase):
+
+ def setUp(self):
+ irmc.deploy._check_share_fs_mounted_patcher.start()
+ super(IRMCVirtualMediaAgentTestCase, self).setUp()
+
+ @mock.patch.object(irmc.importutils, 'try_import', spec_set=True,
+ autospec=True)
+ def test___init___share_fs_mounted_ok(self,
+ mock_try_import):
+ mock_try_import.return_value = True
+
+ driver = irmc.IRMCVirtualMediaAgentDriver()
+
+ self.assertIsInstance(driver.power, irmc.power.IRMCPower)
+ self.assertIsInstance(driver.deploy,
+ irmc.deploy.IRMCVirtualMediaAgentDeploy)
+ self.assertIsInstance(driver.console,
+ irmc.ipmitool.IPMIShellinaboxConsole)
+ self.assertIsInstance(driver.management,
+ irmc.management.IRMCManagement)
+ self.assertIsInstance(driver.vendor,
+ irmc.deploy.IRMCVirtualMediaAgentVendorInterface)
+
+ @mock.patch.object(irmc.importutils, 'try_import')
+ def test___init___try_import_exception(self, mock_try_import):
+ mock_try_import.return_value = False
+
+ self.assertRaises(exception.DriverLoadError,
+ irmc.IRMCVirtualMediaAgentDriver)
+
+ @mock.patch.object(irmc.deploy.IRMCVirtualMediaAgentDeploy, '__init__',
+ spec_set=True, autospec=True)
+ def test___init___share_fs_not_mounted_exception(self, __init___mock):
+ __init___mock.side_effect = iter([
+ exception.IRMCSharedFileSystemNotMounted()])
+
+ self.assertRaises(exception.IRMCSharedFileSystemNotMounted,
+ irmc.IRMCVirtualMediaAgentDriver)
diff --git a/ironic/tests/unit/drivers/test_iscsi_deploy.py b/ironic/tests/unit/drivers/test_iscsi_deploy.py
new file mode 100644
index 000000000..261f11766
--- /dev/null
+++ b/ironic/tests/unit/drivers/test_iscsi_deploy.py
@@ -0,0 +1,1402 @@
+# Copyright 2013 Hewlett-Packard Development Company, L.P.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Test class for iSCSI deploy mechanism."""
+
+import os
+import tempfile
+
+import mock
+from oslo_config import cfg
+from oslo_utils import fileutils
+from oslo_utils import uuidutils
+
+from ironic.common import driver_factory
+from ironic.common import exception
+from ironic.common import keystone
+from ironic.common import pxe_utils
+from ironic.common import states
+from ironic.common import utils
+from ironic.conductor import task_manager
+from ironic.conductor import utils as manager_utils
+from ironic.drivers.modules import agent_base_vendor
+from ironic.drivers.modules import agent_client
+from ironic.drivers.modules import deploy_utils
+from ironic.drivers.modules import fake
+from ironic.drivers.modules import iscsi_deploy
+from ironic.drivers.modules import pxe
+from ironic.tests.unit.conductor import utils as mgr_utils
+from ironic.tests.unit.db import base as db_base
+from ironic.tests.unit.db import utils as db_utils
+from ironic.tests.unit.objects import utils as obj_utils
+
+CONF = cfg.CONF
+
+INST_INFO_DICT = db_utils.get_test_pxe_instance_info()
+DRV_INFO_DICT = db_utils.get_test_pxe_driver_info()
+DRV_INTERNAL_INFO_DICT = db_utils.get_test_pxe_driver_internal_info()
+
+
+class IscsiDeployValidateParametersTestCase(db_base.DbTestCase):
+
+ def test_parse_instance_info_good(self):
+ # make sure we get back the expected things
+ node = obj_utils.create_test_node(
+ self.context, driver='fake_pxe',
+ instance_info=INST_INFO_DICT,
+ driver_internal_info=DRV_INTERNAL_INFO_DICT
+ )
+ info = iscsi_deploy.parse_instance_info(node)
+ self.assertIsNotNone(info.get('image_source'))
+ self.assertIsNotNone(info.get('root_gb'))
+ self.assertEqual(0, info.get('ephemeral_gb'))
+ self.assertIsNone(info.get('configdrive'))
+
+ def test_parse_instance_info_missing_instance_source(self):
+ # make sure error is raised when info is missing
+ info = dict(INST_INFO_DICT)
+ del info['image_source']
+ node = obj_utils.create_test_node(
+ self.context, instance_info=info,
+ driver_internal_info=DRV_INTERNAL_INFO_DICT,
+ )
+ self.assertRaises(exception.MissingParameterValue,
+ iscsi_deploy.parse_instance_info,
+ node)
+
+ def test_parse_instance_info_missing_root_gb(self):
+ # make sure error is raised when info is missing
+ info = dict(INST_INFO_DICT)
+ del info['root_gb']
+
+ node = obj_utils.create_test_node(
+ self.context, instance_info=info,
+ driver_internal_info=DRV_INTERNAL_INFO_DICT,
+ )
+ self.assertRaises(exception.MissingParameterValue,
+ iscsi_deploy.parse_instance_info,
+ node)
+
+ def test_parse_instance_info_invalid_root_gb(self):
+ info = dict(INST_INFO_DICT)
+ info['root_gb'] = 'foobar'
+ node = obj_utils.create_test_node(
+ self.context, instance_info=info,
+ driver_internal_info=DRV_INTERNAL_INFO_DICT,
+ )
+ self.assertRaises(exception.InvalidParameterValue,
+ iscsi_deploy.parse_instance_info,
+ node)
+
+ def test_parse_instance_info_valid_ephemeral_gb(self):
+ ephemeral_gb = 10
+ ephemeral_fmt = 'test-fmt'
+ info = dict(INST_INFO_DICT)
+ info['ephemeral_gb'] = ephemeral_gb
+ info['ephemeral_format'] = ephemeral_fmt
+ node = obj_utils.create_test_node(
+ self.context, instance_info=info,
+ driver_internal_info=DRV_INTERNAL_INFO_DICT,
+ )
+ data = iscsi_deploy.parse_instance_info(node)
+ self.assertEqual(ephemeral_gb, data.get('ephemeral_gb'))
+ self.assertEqual(ephemeral_fmt, data.get('ephemeral_format'))
+
+ def test_parse_instance_info_invalid_ephemeral_gb(self):
+ info = dict(INST_INFO_DICT)
+ info['ephemeral_gb'] = 'foobar'
+ info['ephemeral_format'] = 'exttest'
+
+ node = obj_utils.create_test_node(
+ self.context, instance_info=info,
+ driver_internal_info=DRV_INTERNAL_INFO_DICT,
+ )
+ self.assertRaises(exception.InvalidParameterValue,
+ iscsi_deploy.parse_instance_info,
+ node)
+
+ def test_parse_instance_info_valid_ephemeral_missing_format(self):
+ ephemeral_gb = 10
+ ephemeral_fmt = 'test-fmt'
+ info = dict(INST_INFO_DICT)
+ info['ephemeral_gb'] = ephemeral_gb
+ info['ephemeral_format'] = None
+ self.config(default_ephemeral_format=ephemeral_fmt, group='pxe')
+ node = obj_utils.create_test_node(
+ self.context, instance_info=info,
+ driver_internal_info=DRV_INTERNAL_INFO_DICT,
+ )
+ instance_info = iscsi_deploy.parse_instance_info(node)
+ self.assertEqual(ephemeral_fmt, instance_info['ephemeral_format'])
+
+ def test_parse_instance_info_valid_preserve_ephemeral_true(self):
+ info = dict(INST_INFO_DICT)
+ for opt in ['true', 'TRUE', 'True', 't',
+ 'on', 'yes', 'y', '1']:
+ info['preserve_ephemeral'] = opt
+
+ node = obj_utils.create_test_node(
+ self.context, uuid=uuidutils.generate_uuid(),
+ instance_info=info,
+ driver_internal_info=DRV_INTERNAL_INFO_DICT,
+ )
+ data = iscsi_deploy.parse_instance_info(node)
+ self.assertTrue(data.get('preserve_ephemeral'))
+
+ def test_parse_instance_info_valid_preserve_ephemeral_false(self):
+ info = dict(INST_INFO_DICT)
+ for opt in ['false', 'FALSE', 'False', 'f',
+ 'off', 'no', 'n', '0']:
+ info['preserve_ephemeral'] = opt
+ node = obj_utils.create_test_node(
+ self.context, uuid=uuidutils.generate_uuid(),
+ instance_info=info,
+ driver_internal_info=DRV_INTERNAL_INFO_DICT,
+ )
+ data = iscsi_deploy.parse_instance_info(node)
+ self.assertFalse(data.get('preserve_ephemeral'))
+
+ def test_parse_instance_info_invalid_preserve_ephemeral(self):
+ info = dict(INST_INFO_DICT)
+ info['preserve_ephemeral'] = 'foobar'
+ node = obj_utils.create_test_node(
+ self.context, instance_info=info,
+ driver_internal_info=DRV_INTERNAL_INFO_DICT,
+ )
+ self.assertRaises(exception.InvalidParameterValue,
+ iscsi_deploy.parse_instance_info,
+ node)
+
+ def test_parse_instance_info_invalid_ephemeral_disk(self):
+ info = dict(INST_INFO_DICT)
+ info['ephemeral_gb'] = 10
+ info['swap_mb'] = 0
+ info['root_gb'] = 20
+ info['preserve_ephemeral'] = True
+ drv_internal_dict = {'instance': {'ephemeral_gb': 9,
+ 'swap_mb': 0,
+ 'root_gb': 20}}
+ drv_internal_dict.update(DRV_INTERNAL_INFO_DICT)
+ node = obj_utils.create_test_node(
+ self.context, instance_info=info,
+ driver_internal_info=drv_internal_dict,
+ )
+ self.assertRaises(exception.InvalidParameterValue,
+ iscsi_deploy.parse_instance_info,
+ node)
+
+ def test__check_disk_layout_unchanged_fails(self):
+ info = dict(INST_INFO_DICT)
+ info['ephemeral_gb'] = 10
+ info['swap_mb'] = 0
+ info['root_gb'] = 20
+ info['preserve_ephemeral'] = True
+ drv_internal_dict = {'instance': {'ephemeral_gb': 20,
+ 'swap_mb': 0,
+ 'root_gb': 20}}
+ drv_internal_dict.update(DRV_INTERNAL_INFO_DICT)
+ node = obj_utils.create_test_node(
+ self.context, instance_info=info,
+ driver_internal_info=drv_internal_dict,
+ )
+ self.assertRaises(exception.InvalidParameterValue,
+ iscsi_deploy._check_disk_layout_unchanged,
+ node, info)
+
+ def test__check_disk_layout_unchanged(self):
+ info = dict(INST_INFO_DICT)
+ info['ephemeral_gb'] = 10
+ info['swap_mb'] = 0
+ info['root_gb'] = 20
+ info['preserve_ephemeral'] = True
+ drv_internal_dict = {'instance': {'ephemeral_gb': 10,
+ 'swap_mb': 0,
+ 'root_gb': 20}}
+ drv_internal_dict.update(DRV_INTERNAL_INFO_DICT)
+ node = obj_utils.create_test_node(
+ self.context, instance_info=info,
+ driver_internal_info=drv_internal_dict,
+ )
+ self.assertIsNone(iscsi_deploy._check_disk_layout_unchanged(node,
+ info))
+
+ def test__save_disk_layout(self):
+ info = dict(INST_INFO_DICT)
+ info['ephemeral_gb'] = 10
+ info['swap_mb'] = 0
+ info['root_gb'] = 10
+ info['preserve_ephemeral'] = False
+ node = obj_utils.create_test_node(
+ self.context, instance_info=info,
+ driver_internal_info=DRV_INTERNAL_INFO_DICT,
+ )
+ iscsi_deploy._save_disk_layout(node, info)
+ node.refresh()
+ for param in ('ephemeral_gb', 'swap_mb', 'root_gb'):
+ self.assertEqual(
+ info[param], node.driver_internal_info['instance'][param]
+ )
+
+ def test_parse_instance_info_configdrive(self):
+ info = dict(INST_INFO_DICT)
+ info['configdrive'] = 'http://1.2.3.4/cd'
+ node = obj_utils.create_test_node(
+ self.context, instance_info=info,
+ driver_internal_info=DRV_INTERNAL_INFO_DICT,
+ )
+ instance_info = iscsi_deploy.parse_instance_info(node)
+ self.assertEqual('http://1.2.3.4/cd', instance_info['configdrive'])
+
+ def test_parse_instance_info_nonglance_image(self):
+ info = INST_INFO_DICT.copy()
+ info['image_source'] = 'file:///image.qcow2'
+ info['kernel'] = 'file:///image.vmlinuz'
+ info['ramdisk'] = 'file:///image.initrd'
+ node = obj_utils.create_test_node(
+ self.context, instance_info=info,
+ driver_internal_info=DRV_INTERNAL_INFO_DICT,
+ )
+ iscsi_deploy.parse_instance_info(node)
+
+ def test_parse_instance_info_nonglance_image_no_kernel(self):
+ info = INST_INFO_DICT.copy()
+ info['image_source'] = 'file:///image.qcow2'
+ info['ramdisk'] = 'file:///image.initrd'
+ node = obj_utils.create_test_node(
+ self.context, instance_info=info,
+ driver_internal_info=DRV_INTERNAL_INFO_DICT,
+ )
+ self.assertRaises(exception.MissingParameterValue,
+ iscsi_deploy.parse_instance_info, node)
+
+ def test_parse_instance_info_whole_disk_image(self):
+ driver_internal_info = dict(DRV_INTERNAL_INFO_DICT)
+ driver_internal_info['is_whole_disk_image'] = True
+ node = obj_utils.create_test_node(
+ self.context, instance_info=INST_INFO_DICT,
+ driver_internal_info=driver_internal_info,
+ )
+ instance_info = iscsi_deploy.parse_instance_info(node)
+ self.assertIsNotNone(instance_info.get('image_source'))
+ self.assertIsNotNone(instance_info.get('root_gb'))
+ self.assertEqual(0, instance_info.get('swap_mb'))
+ self.assertEqual(0, instance_info.get('ephemeral_gb'))
+ self.assertIsNone(instance_info.get('configdrive'))
+
+ def test_parse_instance_info_whole_disk_image_missing_root(self):
+ info = dict(INST_INFO_DICT)
+ del info['root_gb']
+ node = obj_utils.create_test_node(self.context, instance_info=info)
+ self.assertRaises(exception.InvalidParameterValue,
+ iscsi_deploy.parse_instance_info, node)
+
+
+class IscsiDeployPrivateMethodsTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(IscsiDeployPrivateMethodsTestCase, self).setUp()
+ n = {
+ 'driver': 'fake_pxe',
+ 'instance_info': INST_INFO_DICT,
+ 'driver_info': DRV_INFO_DICT,
+ 'driver_internal_info': DRV_INTERNAL_INFO_DICT,
+ }
+ mgr_utils.mock_the_extension_manager(driver="fake_pxe")
+ self.node = obj_utils.create_test_node(self.context, **n)
+
+ def test__get_image_dir_path(self):
+ self.assertEqual(os.path.join(CONF.pxe.images_path,
+ self.node.uuid),
+ iscsi_deploy._get_image_dir_path(self.node.uuid))
+
+ def test__get_image_file_path(self):
+ self.assertEqual(os.path.join(CONF.pxe.images_path,
+ self.node.uuid,
+ 'disk'),
+ iscsi_deploy._get_image_file_path(self.node.uuid))
+
+
+class IscsiDeployMethodsTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(IscsiDeployMethodsTestCase, self).setUp()
+ instance_info = dict(INST_INFO_DICT)
+ instance_info['deploy_key'] = 'fake-56789'
+ n = {
+ 'driver': 'fake_pxe',
+ 'instance_info': instance_info,
+ 'driver_info': DRV_INFO_DICT,
+ 'driver_internal_info': DRV_INTERNAL_INFO_DICT,
+ }
+ mgr_utils.mock_the_extension_manager(driver="fake_pxe")
+ self.node = obj_utils.create_test_node(self.context, **n)
+
+ @mock.patch.object(deploy_utils, 'get_image_mb', autospec=True)
+ def test_check_image_size(self, get_image_mb_mock):
+ get_image_mb_mock.return_value = 1000
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node.instance_info['root_gb'] = 1
+ iscsi_deploy.check_image_size(task)
+ get_image_mb_mock.assert_called_once_with(
+ iscsi_deploy._get_image_file_path(task.node.uuid))
+
+ @mock.patch.object(deploy_utils, 'get_image_mb', autospec=True)
+ def test_check_image_size_fails(self, get_image_mb_mock):
+ get_image_mb_mock.return_value = 1025
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node.instance_info['root_gb'] = 1
+ self.assertRaises(exception.InstanceDeployFailure,
+ iscsi_deploy.check_image_size,
+ task)
+ get_image_mb_mock.assert_called_once_with(
+ iscsi_deploy._get_image_file_path(task.node.uuid))
+
+ @mock.patch.object(deploy_utils, 'fetch_images', autospec=True)
+ def test_cache_instance_images_master_path(self, mock_fetch_image):
+ temp_dir = tempfile.mkdtemp()
+ self.config(images_path=temp_dir, group='pxe')
+ self.config(instance_master_path=os.path.join(temp_dir,
+ 'instance_master_path'),
+ group='pxe')
+ fileutils.ensure_tree(CONF.pxe.instance_master_path)
+
+ (uuid, image_path) = iscsi_deploy.cache_instance_image(None, self.node)
+ mock_fetch_image.assert_called_once_with(None,
+ mock.ANY,
+ [(uuid, image_path)], True)
+ self.assertEqual('glance://image_uuid', uuid)
+ self.assertEqual(os.path.join(temp_dir,
+ self.node.uuid,
+ 'disk'),
+ image_path)
+
+ @mock.patch.object(utils, 'unlink_without_raise', autospec=True)
+ @mock.patch.object(utils, 'rmtree_without_raise', autospec=True)
+ @mock.patch.object(iscsi_deploy, 'InstanceImageCache', autospec=True)
+ def test_destroy_images(self, mock_cache, mock_rmtree, mock_unlink):
+ self.config(images_path='/path', group='pxe')
+
+ iscsi_deploy.destroy_images('uuid')
+
+ mock_cache.return_value.clean_up.assert_called_once_with()
+ mock_unlink.assert_called_once_with('/path/uuid/disk')
+ mock_rmtree.assert_called_once_with('/path/uuid')
+
+ def _test_build_deploy_ramdisk_options(self, mock_alnum, api_url,
+ expected_root_device=None,
+ expected_boot_option='netboot',
+ expected_boot_mode='bios'):
+ fake_key = '0123456789ABCDEFGHIJKLMNOPQRSTUV'
+ fake_disk = 'fake-disk'
+
+ self.config(disk_devices=fake_disk, group='pxe')
+
+ mock_alnum.return_value = fake_key
+
+ expected_iqn = 'iqn.2008-10.org.openstack:%s' % self.node.uuid
+ expected_opts = {
+ 'iscsi_target_iqn': expected_iqn,
+ 'deployment_id': self.node.uuid,
+ 'deployment_key': fake_key,
+ 'disk': fake_disk,
+ 'ironic_api_url': api_url,
+ 'boot_option': expected_boot_option,
+ 'boot_mode': expected_boot_mode,
+ 'coreos.configdrive': 0,
+ }
+
+ if expected_root_device:
+ expected_opts['root_device'] = expected_root_device
+
+ opts = iscsi_deploy.build_deploy_ramdisk_options(self.node)
+
+ self.assertEqual(expected_opts, opts)
+ mock_alnum.assert_called_once_with(32)
+ # assert deploy_key was injected in the node
+ self.assertIn('deploy_key', self.node.instance_info)
+
+ @mock.patch.object(keystone, 'get_service_url', autospec=True)
+ @mock.patch.object(utils, 'random_alnum', autospec=True)
+ def test_build_deploy_ramdisk_options(self, mock_alnum, mock_get_url):
+ fake_api_url = 'http://127.0.0.1:6385'
+ self.config(api_url=fake_api_url, group='conductor')
+ self._test_build_deploy_ramdisk_options(mock_alnum, fake_api_url)
+
+ # As we are getting the Ironic api url from the config file
+ # assert keystone wasn't called
+ self.assertFalse(mock_get_url.called)
+
+ @mock.patch.object(keystone, 'get_service_url', autospec=True)
+ @mock.patch.object(utils, 'random_alnum', autospec=True)
+ def test_build_deploy_ramdisk_options_keystone(self, mock_alnum,
+ mock_get_url):
+ fake_api_url = 'http://127.0.0.1:6385'
+ mock_get_url.return_value = fake_api_url
+ self._test_build_deploy_ramdisk_options(mock_alnum, fake_api_url)
+
+ # As the Ironic api url is not specified in the config file
+ # assert we are getting it from keystone
+ mock_get_url.assert_called_once_with()
+
+ @mock.patch.object(keystone, 'get_service_url', autospec=True)
+ @mock.patch.object(utils, 'random_alnum', autospec=True)
+ def test_build_deploy_ramdisk_options_root_device(self, mock_alnum,
+ mock_get_url):
+ self.node.properties['root_device'] = {'wwn': 123456}
+ expected = 'wwn=123456'
+ fake_api_url = 'http://127.0.0.1:6385'
+ self.config(api_url=fake_api_url, group='conductor')
+ self._test_build_deploy_ramdisk_options(mock_alnum, fake_api_url,
+ expected_root_device=expected)
+
+ @mock.patch.object(keystone, 'get_service_url', autospec=True)
+ @mock.patch.object(utils, 'random_alnum', autospec=True)
+ def test_build_deploy_ramdisk_options_boot_option(self, mock_alnum,
+ mock_get_url):
+ self.node.instance_info = {'capabilities': '{"boot_option": "local"}'}
+ expected = 'local'
+ fake_api_url = 'http://127.0.0.1:6385'
+ self.config(api_url=fake_api_url, group='conductor')
+ self._test_build_deploy_ramdisk_options(mock_alnum, fake_api_url,
+ expected_boot_option=expected)
+
+ @mock.patch.object(keystone, 'get_service_url', autospec=True)
+ @mock.patch.object(utils, 'random_alnum', autospec=True)
+ def test_build_deploy_ramdisk_options_whole_disk_image(self, mock_alnum,
+ mock_get_url):
+ """Tests a hack to boot_option for whole disk images.
+
+ This hack is in place to fix bug #1441556.
+ """
+ self.node.instance_info = {'capabilities': '{"boot_option": "local"}'}
+ dii = self.node.driver_internal_info
+ dii['is_whole_disk_image'] = True
+ self.node.driver_internal_info = dii
+ self.node.save()
+ expected = 'netboot'
+ fake_api_url = 'http://127.0.0.1:6385'
+ self.config(api_url=fake_api_url, group='conductor')
+ self._test_build_deploy_ramdisk_options(mock_alnum, fake_api_url,
+ expected_boot_option=expected)
+
+ @mock.patch.object(iscsi_deploy, '_save_disk_layout', autospec=True)
+ @mock.patch.object(iscsi_deploy, 'InstanceImageCache', autospec=True)
+ @mock.patch.object(manager_utils, 'node_power_action', autospec=True)
+ @mock.patch.object(deploy_utils, 'deploy_partition_image', autospec=True)
+ def test_continue_deploy_fail(self, deploy_mock, power_mock,
+ mock_image_cache, mock_disk_layout):
+ kwargs = {'address': '123456', 'iqn': 'aaa-bbb', 'key': 'fake-56789'}
+ deploy_mock.side_effect = iter([
+ exception.InstanceDeployFailure("test deploy error")])
+ self.node.provision_state = states.DEPLOYWAIT
+ self.node.target_provision_state = states.ACTIVE
+ self.node.save()
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ params = iscsi_deploy.get_deploy_info(task.node, **kwargs)
+ self.assertRaises(exception.InstanceDeployFailure,
+ iscsi_deploy.continue_deploy,
+ task, **kwargs)
+ self.assertEqual(states.DEPLOYFAIL, task.node.provision_state)
+ self.assertEqual(states.ACTIVE, task.node.target_provision_state)
+ self.assertIsNotNone(task.node.last_error)
+ deploy_mock.assert_called_once_with(**params)
+ power_mock.assert_called_once_with(task, states.POWER_OFF)
+ mock_image_cache.assert_called_once_with()
+ mock_image_cache.return_value.clean_up.assert_called_once_with()
+ self.assertFalse(mock_disk_layout.called)
+
+ @mock.patch.object(iscsi_deploy, '_save_disk_layout', autospec=True)
+ @mock.patch.object(iscsi_deploy, 'InstanceImageCache', autospec=True)
+ @mock.patch.object(manager_utils, 'node_power_action', autospec=True)
+ @mock.patch.object(deploy_utils, 'deploy_partition_image', autospec=True)
+ def test_continue_deploy_ramdisk_fails(self, deploy_mock, power_mock,
+ mock_image_cache, mock_disk_layout):
+ kwargs = {'address': '123456', 'iqn': 'aaa-bbb', 'key': 'fake-56789',
+ 'error': 'test ramdisk error'}
+ self.node.provision_state = states.DEPLOYWAIT
+ self.node.target_provision_state = states.ACTIVE
+ self.node.save()
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ self.assertRaises(exception.InstanceDeployFailure,
+ iscsi_deploy.continue_deploy,
+ task, **kwargs)
+ self.assertIsNotNone(task.node.last_error)
+ self.assertEqual(states.DEPLOYFAIL, task.node.provision_state)
+ self.assertEqual(states.ACTIVE, task.node.target_provision_state)
+ power_mock.assert_called_once_with(task, states.POWER_OFF)
+ mock_image_cache.assert_called_once_with()
+ mock_image_cache.return_value.clean_up.assert_called_once_with()
+ self.assertFalse(deploy_mock.called)
+ self.assertFalse(mock_disk_layout.called)
+
+ @mock.patch.object(iscsi_deploy, '_save_disk_layout', autospec=True)
+ @mock.patch.object(iscsi_deploy, 'InstanceImageCache', autospec=True)
+ @mock.patch.object(manager_utils, 'node_power_action', autospec=True)
+ @mock.patch.object(deploy_utils, 'deploy_partition_image', autospec=True)
+ def test_continue_deploy_fail_no_root_uuid_or_disk_id(
+ self, deploy_mock, power_mock, mock_image_cache, mock_disk_layout):
+ kwargs = {'address': '123456', 'iqn': 'aaa-bbb', 'key': 'fake-56789'}
+ deploy_mock.return_value = {}
+ self.node.provision_state = states.DEPLOYWAIT
+ self.node.target_provision_state = states.ACTIVE
+ self.node.save()
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ params = iscsi_deploy.get_deploy_info(task.node, **kwargs)
+ self.assertRaises(exception.InstanceDeployFailure,
+ iscsi_deploy.continue_deploy,
+ task, **kwargs)
+ self.assertEqual(states.DEPLOYFAIL, task.node.provision_state)
+ self.assertEqual(states.ACTIVE, task.node.target_provision_state)
+ self.assertIsNotNone(task.node.last_error)
+ deploy_mock.assert_called_once_with(**params)
+ power_mock.assert_called_once_with(task, states.POWER_OFF)
+ mock_image_cache.assert_called_once_with()
+ mock_image_cache.return_value.clean_up.assert_called_once_with()
+ self.assertFalse(mock_disk_layout.called)
+
+ @mock.patch.object(iscsi_deploy, '_save_disk_layout', autospec=True)
+ @mock.patch.object(iscsi_deploy, 'InstanceImageCache', autospec=True)
+ @mock.patch.object(manager_utils, 'node_power_action', autospec=True)
+ @mock.patch.object(deploy_utils, 'deploy_partition_image', autospec=True)
+ def test_continue_deploy_fail_empty_root_uuid(
+ self, deploy_mock, power_mock, mock_image_cache, mock_disk_layout):
+ kwargs = {'address': '123456', 'iqn': 'aaa-bbb', 'key': 'fake-56789'}
+ deploy_mock.return_value = {'root uuid': ''}
+ self.node.provision_state = states.DEPLOYWAIT
+ self.node.target_provision_state = states.ACTIVE
+ self.node.save()
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ params = iscsi_deploy.get_deploy_info(task.node, **kwargs)
+ self.assertRaises(exception.InstanceDeployFailure,
+ iscsi_deploy.continue_deploy,
+ task, **kwargs)
+ self.assertEqual(states.DEPLOYFAIL, task.node.provision_state)
+ self.assertEqual(states.ACTIVE, task.node.target_provision_state)
+ self.assertIsNotNone(task.node.last_error)
+ deploy_mock.assert_called_once_with(**params)
+ power_mock.assert_called_once_with(task, states.POWER_OFF)
+ mock_image_cache.assert_called_once_with()
+ mock_image_cache.return_value.clean_up.assert_called_once_with()
+ self.assertFalse(mock_disk_layout.called)
+
+ @mock.patch.object(iscsi_deploy, '_save_disk_layout', autospec=True)
+ @mock.patch.object(iscsi_deploy, 'LOG', autospec=True)
+ @mock.patch.object(iscsi_deploy, 'get_deploy_info', autospec=True)
+ @mock.patch.object(iscsi_deploy, 'InstanceImageCache', autospec=True)
+ @mock.patch.object(manager_utils, 'node_power_action', autospec=True)
+ @mock.patch.object(deploy_utils, 'deploy_partition_image', autospec=True)
+ def test_continue_deploy(self, deploy_mock, power_mock, mock_image_cache,
+ mock_deploy_info, mock_log, mock_disk_layout):
+ kwargs = {'address': '123456', 'iqn': 'aaa-bbb', 'key': 'fake-56789'}
+ self.node.provision_state = states.DEPLOYWAIT
+ self.node.target_provision_state = states.ACTIVE
+ self.node.save()
+
+ mock_deploy_info.return_value = {
+ 'address': '123456',
+ 'boot_option': 'netboot',
+ 'configdrive': "I've got the power",
+ 'ephemeral_format': None,
+ 'ephemeral_mb': 0,
+ 'image_path': (u'/var/lib/ironic/images/1be26c0b-03f2-4d2e-ae87-'
+ u'c02d7f33c123/disk'),
+ 'iqn': 'aaa-bbb',
+ 'lun': '1',
+ 'node_uuid': u'1be26c0b-03f2-4d2e-ae87-c02d7f33c123',
+ 'port': '3260',
+ 'preserve_ephemeral': True,
+ 'root_mb': 102400,
+ 'swap_mb': 0,
+ }
+ log_params = mock_deploy_info.return_value.copy()
+ # Make sure we don't log the full content of the configdrive
+ log_params['configdrive'] = '***'
+ expected_dict = {
+ 'node': self.node.uuid,
+ 'params': log_params,
+ }
+ uuid_dict_returned = {'root uuid': '12345678-87654321'}
+ deploy_mock.return_value = uuid_dict_returned
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ mock_log.isEnabledFor.return_value = True
+ retval = iscsi_deploy.continue_deploy(task, **kwargs)
+ mock_log.debug.assert_called_once_with(
+ mock.ANY, expected_dict)
+ self.assertEqual(states.DEPLOYWAIT, task.node.provision_state)
+ self.assertEqual(states.ACTIVE, task.node.target_provision_state)
+ self.assertIsNone(task.node.last_error)
+ mock_image_cache.assert_called_once_with()
+ mock_image_cache.return_value.clean_up.assert_called_once_with()
+ self.assertEqual(uuid_dict_returned, retval)
+ mock_disk_layout.assert_called_once_with(task.node, mock.ANY)
+
+ @mock.patch.object(iscsi_deploy, 'LOG', autospec=True)
+ @mock.patch.object(iscsi_deploy, 'get_deploy_info', autospec=True)
+ @mock.patch.object(iscsi_deploy, 'InstanceImageCache', autospec=True)
+ @mock.patch.object(manager_utils, 'node_power_action', autospec=True)
+ @mock.patch.object(deploy_utils, 'deploy_disk_image', autospec=True)
+ def test_continue_deploy_whole_disk_image(
+ self, deploy_mock, power_mock, mock_image_cache, mock_deploy_info,
+ mock_log):
+ kwargs = {'address': '123456', 'iqn': 'aaa-bbb', 'key': 'fake-56789'}
+ self.node.provision_state = states.DEPLOYWAIT
+ self.node.target_provision_state = states.ACTIVE
+ self.node.save()
+
+ mock_deploy_info.return_value = {
+ 'address': '123456',
+ 'image_path': (u'/var/lib/ironic/images/1be26c0b-03f2-4d2e-ae87-'
+ u'c02d7f33c123/disk'),
+ 'iqn': 'aaa-bbb',
+ 'lun': '1',
+ 'node_uuid': u'1be26c0b-03f2-4d2e-ae87-c02d7f33c123',
+ 'port': '3260',
+ }
+ log_params = mock_deploy_info.return_value.copy()
+ expected_dict = {
+ 'node': self.node.uuid,
+ 'params': log_params,
+ }
+ uuid_dict_returned = {'disk identifier': '87654321'}
+ deploy_mock.return_value = uuid_dict_returned
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node.driver_internal_info['is_whole_disk_image'] = True
+ mock_log.isEnabledFor.return_value = True
+ retval = iscsi_deploy.continue_deploy(task, **kwargs)
+ mock_log.debug.assert_called_once_with(
+ mock.ANY, expected_dict)
+ self.assertEqual(states.DEPLOYWAIT, task.node.provision_state)
+ self.assertEqual(states.ACTIVE, task.node.target_provision_state)
+ self.assertIsNone(task.node.last_error)
+ mock_image_cache.assert_called_once_with()
+ mock_image_cache.return_value.clean_up.assert_called_once_with()
+ self.assertEqual(uuid_dict_returned, retval)
+
+ def test_get_deploy_info_boot_option_default(self):
+ instance_info = self.node.instance_info
+ instance_info['deploy_key'] = 'key'
+ self.node.instance_info = instance_info
+ kwargs = {'address': '1.1.1.1', 'iqn': 'target-iqn', 'key': 'key'}
+ ret_val = iscsi_deploy.get_deploy_info(self.node, **kwargs)
+ self.assertEqual('1.1.1.1', ret_val['address'])
+ self.assertEqual('target-iqn', ret_val['iqn'])
+ self.assertEqual('netboot', ret_val['boot_option'])
+
+ def test_get_deploy_info_netboot_specified(self):
+ instance_info = self.node.instance_info
+ instance_info['deploy_key'] = 'key'
+ instance_info['capabilities'] = {'boot_option': 'netboot'}
+ self.node.instance_info = instance_info
+ kwargs = {'address': '1.1.1.1', 'iqn': 'target-iqn', 'key': 'key'}
+ ret_val = iscsi_deploy.get_deploy_info(self.node, **kwargs)
+ self.assertEqual('1.1.1.1', ret_val['address'])
+ self.assertEqual('target-iqn', ret_val['iqn'])
+ self.assertEqual('netboot', ret_val['boot_option'])
+
+ def test_get_deploy_info_localboot(self):
+ instance_info = self.node.instance_info
+ instance_info['deploy_key'] = 'key'
+ instance_info['capabilities'] = {'boot_option': 'local'}
+ self.node.instance_info = instance_info
+ kwargs = {'address': '1.1.1.1', 'iqn': 'target-iqn', 'key': 'key'}
+ ret_val = iscsi_deploy.get_deploy_info(self.node, **kwargs)
+ self.assertEqual('1.1.1.1', ret_val['address'])
+ self.assertEqual('target-iqn', ret_val['iqn'])
+ self.assertEqual('local', ret_val['boot_option'])
+
+ @mock.patch.object(iscsi_deploy, 'continue_deploy', autospec=True)
+ @mock.patch.object(iscsi_deploy, 'build_deploy_ramdisk_options',
+ autospec=True)
+ def test_do_agent_iscsi_deploy_okay(self, build_options_mock,
+ continue_deploy_mock):
+ build_options_mock.return_value = {'deployment_key': 'abcdef',
+ 'iscsi_target_iqn': 'iqn-qweqwe'}
+ agent_client_mock = mock.MagicMock(spec_set=agent_client.AgentClient)
+ agent_client_mock.start_iscsi_target.return_value = {
+ 'command_status': 'SUCCESS', 'command_error': None}
+ driver_internal_info = {'agent_url': 'http://1.2.3.4:1234'}
+ self.node.driver_internal_info = driver_internal_info
+ self.node.save()
+ uuid_dict_returned = {'root uuid': 'some-root-uuid'}
+ continue_deploy_mock.return_value = uuid_dict_returned
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ ret_val = iscsi_deploy.do_agent_iscsi_deploy(
+ task, agent_client_mock)
+ build_options_mock.assert_called_once_with(task.node)
+ agent_client_mock.start_iscsi_target.assert_called_once_with(
+ task.node, 'iqn-qweqwe')
+ continue_deploy_mock.assert_called_once_with(
+ task, error=None, iqn='iqn-qweqwe', key='abcdef',
+ address='1.2.3.4')
+ self.assertEqual(
+ 'some-root-uuid',
+ task.node.driver_internal_info['root_uuid_or_disk_id'])
+ self.assertEqual(ret_val, uuid_dict_returned)
+
+ @mock.patch.object(iscsi_deploy, 'build_deploy_ramdisk_options',
+ autospec=True)
+ def test_do_agent_iscsi_deploy_start_iscsi_failure(self,
+ build_options_mock):
+ build_options_mock.return_value = {'deployment_key': 'abcdef',
+ 'iscsi_target_iqn': 'iqn-qweqwe'}
+ agent_client_mock = mock.MagicMock(spec_set=agent_client.AgentClient)
+ agent_client_mock.start_iscsi_target.return_value = {
+ 'command_status': 'FAILED', 'command_error': 'booom'}
+ self.node.provision_state = states.DEPLOYING
+ self.node.target_provision_state = states.ACTIVE
+ self.node.save()
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ self.assertRaises(exception.InstanceDeployFailure,
+ iscsi_deploy.do_agent_iscsi_deploy,
+ task, agent_client_mock)
+ build_options_mock.assert_called_once_with(task.node)
+ agent_client_mock.start_iscsi_target.assert_called_once_with(
+ task.node, 'iqn-qweqwe')
+ self.node.refresh()
+ self.assertEqual(states.DEPLOYFAIL, self.node.provision_state)
+ self.assertEqual(states.ACTIVE, self.node.target_provision_state)
+ self.assertIsNotNone(self.node.last_error)
+
+ def test_validate_pass_bootloader_info_input(self):
+ params = {'key': 'some-random-key', 'address': '1.2.3.4',
+ 'error': '', 'status': 'SUCCEEDED'}
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node.instance_info['deploy_key'] = 'some-random-key'
+ # Assert that the method doesn't raise
+ iscsi_deploy.validate_pass_bootloader_info_input(task, params)
+
+ def test_validate_pass_bootloader_info_missing_status(self):
+ params = {'key': 'some-random-key', 'address': '1.2.3.4'}
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ self.assertRaises(exception.MissingParameterValue,
+ iscsi_deploy.validate_pass_bootloader_info_input,
+ task, params)
+
+ def test_validate_pass_bootloader_info_missing_key(self):
+ params = {'status': 'SUCCEEDED', 'address': '1.2.3.4'}
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ self.assertRaises(exception.MissingParameterValue,
+ iscsi_deploy.validate_pass_bootloader_info_input,
+ task, params)
+
+ def test_validate_pass_bootloader_info_missing_address(self):
+ params = {'status': 'SUCCEEDED', 'key': 'some-random-key'}
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ self.assertRaises(exception.MissingParameterValue,
+ iscsi_deploy.validate_pass_bootloader_info_input,
+ task, params)
+
+ def test_validate_pass_bootloader_info_input_invalid_key(self):
+ params = {'key': 'some-other-key', 'address': '1.2.3.4',
+ 'status': 'SUCCEEDED'}
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node.instance_info['deploy_key'] = 'some-random-key'
+ self.assertRaises(exception.InvalidParameterValue,
+ iscsi_deploy.validate_pass_bootloader_info_input,
+ task, params)
+
+ def test_validate_bootloader_install_status(self):
+ kwargs = {'key': 'abcdef', 'status': 'SUCCEEDED', 'error': ''}
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node.instance_info['deploy_key'] = 'abcdef'
+ # Nothing much to assert except that it shouldn't raise.
+ iscsi_deploy.validate_bootloader_install_status(task, kwargs)
+
+ @mock.patch.object(deploy_utils, 'set_failed_state', autospec=True)
+ def test_validate_bootloader_install_status_install_failed(
+ self, set_fail_state_mock):
+ kwargs = {'key': 'abcdef', 'status': 'FAILED', 'error': 'some-error'}
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node.provision_state = states.DEPLOYING
+ task.node.target_provision_state = states.ACTIVE
+ task.node.instance_info['deploy_key'] = 'abcdef'
+ self.assertRaises(exception.InstanceDeployFailure,
+ iscsi_deploy.validate_bootloader_install_status,
+ task, kwargs)
+ set_fail_state_mock.assert_called_once_with(task, mock.ANY)
+
+ @mock.patch.object(deploy_utils, 'notify_ramdisk_to_proceed',
+ autospec=True)
+ def test_finish_deploy(self, notify_mock):
+ self.node.provision_state = states.DEPLOYING
+ self.node.target_provision_state = states.ACTIVE
+ self.node.save()
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ iscsi_deploy.finish_deploy(task, '1.2.3.4')
+ notify_mock.assert_called_once_with('1.2.3.4')
+ self.assertEqual(states.ACTIVE, task.node.provision_state)
+ self.assertEqual(states.NOSTATE, task.node.target_provision_state)
+
+ @mock.patch.object(deploy_utils, 'set_failed_state', autospec=True)
+ @mock.patch.object(deploy_utils, 'notify_ramdisk_to_proceed',
+ autospec=True)
+ def test_finish_deploy_notify_fails(self, notify_mock,
+ set_fail_state_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ notify_mock.side_effect = RuntimeError()
+ self.assertRaises(exception.InstanceDeployFailure,
+ iscsi_deploy.finish_deploy, task, '1.2.3.4')
+ set_fail_state_mock.assert_called_once_with(task, mock.ANY)
+
+ @mock.patch.object(manager_utils, 'node_power_action', autospec=True)
+ @mock.patch.object(deploy_utils, 'notify_ramdisk_to_proceed',
+ autospec=True)
+ def test_finish_deploy_ssh_with_local_boot(self, notify_mock,
+ node_power_mock):
+ instance_info = dict(INST_INFO_DICT)
+ instance_info['capabilities'] = {'boot_option': 'local'}
+ n = {
+ 'uuid': uuidutils.generate_uuid(),
+ 'driver': 'fake_ssh',
+ 'instance_info': instance_info,
+ 'provision_state': states.DEPLOYING,
+ 'target_provision_state': states.ACTIVE,
+ }
+ mgr_utils.mock_the_extension_manager(driver="fake_ssh")
+ node = obj_utils.create_test_node(self.context, **n)
+
+ with task_manager.acquire(self.context, node.uuid,
+ shared=False) as task:
+ iscsi_deploy.finish_deploy(task, '1.2.3.4')
+ notify_mock.assert_called_once_with('1.2.3.4')
+ self.assertEqual(states.ACTIVE, task.node.provision_state)
+ self.assertEqual(states.NOSTATE, task.node.target_provision_state)
+ node_power_mock.assert_called_once_with(task, states.REBOOT)
+
+ @mock.patch.object(keystone, 'get_service_url', autospec=True)
+ def test_validate_good_api_url_from_config_file(self, mock_ks):
+ # not present in the keystone catalog
+ mock_ks.side_effect = exception.KeystoneFailure
+ self.config(group='conductor', api_url='http://foo')
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ iscsi_deploy.validate(task)
+ self.assertFalse(mock_ks.called)
+
+ @mock.patch.object(keystone, 'get_service_url', autospec=True)
+ def test_validate_good_api_url_from_keystone(self, mock_ks):
+ # present in the keystone catalog
+ mock_ks.return_value = 'http://127.0.0.1:1234'
+ # not present in the config file
+ self.config(group='conductor', api_url=None)
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ iscsi_deploy.validate(task)
+ mock_ks.assert_called_once_with()
+
+ @mock.patch.object(keystone, 'get_service_url', autospec=True)
+ def test_validate_fail_no_api_url(self, mock_ks):
+ # not present in the keystone catalog
+ mock_ks.side_effect = exception.KeystoneFailure
+ # not present in the config file
+ self.config(group='conductor', api_url=None)
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ self.assertRaises(exception.InvalidParameterValue,
+ iscsi_deploy.validate, task)
+ mock_ks.assert_called_once_with()
+
+ def test_validate_invalid_root_device_hints(self):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ task.node.properties['root_device'] = {'size': 'not-int'}
+ self.assertRaises(exception.InvalidParameterValue,
+ iscsi_deploy.validate, task)
+
+
+class ISCSIDeployTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(ISCSIDeployTestCase, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver="fake_pxe")
+ self.driver = driver_factory.get_driver("fake_pxe")
+ self.driver.vendor = iscsi_deploy.VendorPassthru()
+ self.node = obj_utils.create_test_node(
+ self.context, driver='fake_pxe',
+ instance_info=INST_INFO_DICT,
+ driver_info=DRV_INFO_DICT,
+ driver_internal_info=DRV_INTERNAL_INFO_DICT,
+ )
+ self.node.driver_internal_info['agent_url'] = 'http://1.2.3.4:1234'
+ self.task = mock.MagicMock(spec=task_manager.TaskManager)
+ self.task.shared = False
+ self.task.node = self.node
+ self.task.driver = self.driver
+ self.task.context = self.context
+
+ def test_get_properties(self):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ self.assertEqual({}, task.driver.deploy.get_properties())
+
+ @mock.patch.object(iscsi_deploy, 'validate', autospec=True)
+ @mock.patch.object(deploy_utils, 'validate_capabilities', autospec=True)
+ @mock.patch.object(pxe.PXEBoot, 'validate', autospec=True)
+ def test_validate(self, pxe_validate_mock,
+ validate_capabilities_mock, validate_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+
+ task.driver.deploy.validate(task)
+
+ pxe_validate_mock.assert_called_once_with(task.driver.boot, task)
+ validate_capabilities_mock.assert_called_once_with(task.node)
+ validate_mock.assert_called_once_with(task)
+
+ @mock.patch.object(pxe.PXEBoot, 'prepare_instance', autospec=True)
+ def test_prepare_node_active(self, prepare_instance_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ task.node.provision_state = states.ACTIVE
+
+ task.driver.deploy.prepare(task)
+
+ prepare_instance_mock.assert_called_once_with(
+ task.driver.boot, task)
+
+ @mock.patch.object(deploy_utils, 'build_agent_options', autospec=True)
+ @mock.patch.object(iscsi_deploy, 'build_deploy_ramdisk_options',
+ autospec=True)
+ @mock.patch.object(pxe.PXEBoot, 'prepare_ramdisk', autospec=True)
+ def test_prepare_node_deploying(self, mock_prepare_ramdisk,
+ mock_iscsi_options, mock_agent_options):
+ mock_iscsi_options.return_value = {'a': 'b'}
+ mock_agent_options.return_value = {'c': 'd'}
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ task.node.provision_state = states.DEPLOYWAIT
+
+ task.driver.deploy.prepare(task)
+
+ mock_iscsi_options.assert_called_once_with(task.node)
+ mock_agent_options.assert_called_once_with(task.node)
+ mock_prepare_ramdisk.assert_called_once_with(
+ task.driver.boot, task, {'a': 'b', 'c': 'd'})
+
+ @mock.patch.object(manager_utils, 'node_power_action', autospec=True)
+ @mock.patch.object(iscsi_deploy, 'check_image_size', autospec=True)
+ @mock.patch.object(iscsi_deploy, 'cache_instance_image', autospec=True)
+ def test_deploy(self, mock_cache_instance_image,
+ mock_check_image_size, mock_node_power_action):
+ with task_manager.acquire(self.context,
+ self.node.uuid, shared=False) as task:
+ state = task.driver.deploy.deploy(task)
+ self.assertEqual(state, states.DEPLOYWAIT)
+ mock_cache_instance_image.assert_called_once_with(
+ self.context, task.node)
+ mock_check_image_size.assert_called_once_with(task)
+ mock_node_power_action.assert_called_once_with(task, states.REBOOT)
+
+ @mock.patch.object(manager_utils, 'node_power_action', autospec=True)
+ def test_tear_down(self, node_power_action_mock):
+ with task_manager.acquire(self.context,
+ self.node.uuid, shared=False) as task:
+ state = task.driver.deploy.tear_down(task)
+ self.assertEqual(state, states.DELETED)
+ node_power_action_mock.assert_called_once_with(task,
+ states.POWER_OFF)
+
+ @mock.patch.object(pxe.PXEBoot, 'clean_up_instance', autospec=True)
+ @mock.patch.object(pxe.PXEBoot, 'clean_up_ramdisk', autospec=True)
+ @mock.patch.object(iscsi_deploy, 'destroy_images', autospec=True)
+ def test_clean_up(self, destroy_images_mock, clean_up_ramdisk_mock,
+ clean_up_instance_mock):
+ with task_manager.acquire(self.context,
+ self.node.uuid, shared=False) as task:
+ task.driver.deploy.clean_up(task)
+ destroy_images_mock.assert_called_once_with(task.node.uuid)
+ clean_up_ramdisk_mock.assert_called_once_with(
+ task.driver.boot, task)
+ clean_up_instance_mock.assert_called_once_with(
+ task.driver.boot, task)
+
+
+class TestVendorPassthru(db_base.DbTestCase):
+
+ def setUp(self):
+ super(TestVendorPassthru, self).setUp()
+ mgr_utils.mock_the_extension_manager()
+ self.driver = driver_factory.get_driver("fake")
+ self.driver.vendor = iscsi_deploy.VendorPassthru()
+ self.node = obj_utils.create_test_node(
+ self.context, driver='fake',
+ instance_info=INST_INFO_DICT,
+ driver_info=DRV_INFO_DICT,
+ driver_internal_info=DRV_INTERNAL_INFO_DICT,
+ )
+ self.node.driver_internal_info['agent_url'] = 'http://1.2.3.4:1234'
+ self.task = mock.MagicMock(spec=task_manager.TaskManager)
+ self.task.shared = False
+ self.task.node = self.node
+ self.task.driver = self.driver
+ self.task.context = self.context
+
+ def test_validate_good(self):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ task.node.instance_info['deploy_key'] = 'fake-56789'
+ task.driver.vendor.validate(task, method='pass_deploy_info',
+ address='123456', iqn='aaa-bbb',
+ key='fake-56789')
+
+ def test_validate_fail(self):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ self.assertRaises(exception.InvalidParameterValue,
+ task.driver.vendor.validate,
+ task, method='pass_deploy_info',
+ key='fake-56789')
+
+ def test_validate_key_notmatch(self):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ self.assertRaises(exception.InvalidParameterValue,
+ task.driver.vendor.validate,
+ task, method='pass_deploy_info',
+ address='123456', iqn='aaa-bbb',
+ key='fake-12345')
+
+ @mock.patch.object(fake.FakeBoot, 'prepare_instance', autospec=True)
+ @mock.patch.object(deploy_utils, 'notify_ramdisk_to_proceed',
+ autospec=True)
+ @mock.patch.object(iscsi_deploy, 'InstanceImageCache', autospec=True)
+ @mock.patch.object(deploy_utils, 'deploy_partition_image', autospec=True)
+ def _test_pass_deploy_info_deploy(self, is_localboot, mock_deploy,
+ mock_image_cache,
+ notify_mock,
+ fakeboot_prepare_instance_mock):
+ # set local boot
+ i_info = self.node.instance_info
+ if is_localboot:
+ i_info['capabilities'] = '{"boot_option": "local"}'
+
+ i_info['deploy_key'] = 'fake-56789'
+ self.node.instance_info = i_info
+
+ self.node.power_state = states.POWER_ON
+ self.node.provision_state = states.DEPLOYWAIT
+ self.node.target_provision_state = states.ACTIVE
+ self.node.save()
+
+ root_uuid = "12345678-1234-1234-1234-1234567890abcxyz"
+ mock_deploy.return_value = {'root uuid': root_uuid}
+
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ task.driver.vendor.pass_deploy_info(
+ task, address='123456', iqn='aaa-bbb', key='fake-56789')
+
+ self.node.refresh()
+ self.assertEqual(states.POWER_ON, self.node.power_state)
+ self.assertIn('root_uuid_or_disk_id', self.node.driver_internal_info)
+ self.assertIsNone(self.node.last_error)
+ mock_image_cache.assert_called_once_with()
+ mock_image_cache.return_value.clean_up.assert_called_once_with()
+ notify_mock.assert_called_once_with('123456')
+ fakeboot_prepare_instance_mock.assert_called_once_with(mock.ANY, task)
+
+ @mock.patch.object(fake.FakeBoot, 'prepare_instance', autospec=True)
+ @mock.patch.object(deploy_utils, 'notify_ramdisk_to_proceed',
+ autospec=True)
+ @mock.patch.object(iscsi_deploy, 'InstanceImageCache', autospec=True)
+ @mock.patch.object(deploy_utils, 'deploy_disk_image', autospec=True)
+ def _test_pass_deploy_info_whole_disk_image(self, is_localboot,
+ mock_deploy,
+ mock_image_cache,
+ notify_mock,
+ fakeboot_prep_inst_mock):
+ i_info = self.node.instance_info
+ # set local boot
+ if is_localboot:
+ i_info['capabilities'] = '{"boot_option": "local"}'
+
+ i_info['deploy_key'] = 'fake-56789'
+ self.node.instance_info = i_info
+
+ self.node.power_state = states.POWER_ON
+ self.node.provision_state = states.DEPLOYWAIT
+ self.node.target_provision_state = states.ACTIVE
+ self.node.save()
+
+ disk_id = '0x12345678'
+ mock_deploy.return_value = {'disk identifier': disk_id}
+
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ task.node.driver_internal_info['is_whole_disk_image'] = True
+ task.driver.vendor.pass_deploy_info(task, address='123456',
+ iqn='aaa-bbb',
+ key='fake-56789')
+
+ self.node.refresh()
+ self.assertEqual(states.POWER_ON, self.node.power_state)
+ self.assertIsNone(self.node.last_error)
+ mock_image_cache.assert_called_once_with()
+ mock_image_cache.return_value.clean_up.assert_called_once_with()
+ notify_mock.assert_called_once_with('123456')
+ fakeboot_prep_inst_mock.assert_called_once_with(mock.ANY, task)
+
+ def test_pass_deploy_info_deploy(self):
+ self._test_pass_deploy_info_deploy(False)
+ self.assertEqual(states.ACTIVE, self.node.provision_state)
+ self.assertEqual(states.NOSTATE, self.node.target_provision_state)
+
+ def test_pass_deploy_info_localboot(self):
+ self._test_pass_deploy_info_deploy(True)
+ self.assertEqual(states.DEPLOYWAIT, self.node.provision_state)
+ self.assertEqual(states.ACTIVE, self.node.target_provision_state)
+
+ def test_pass_deploy_info_whole_disk_image(self):
+ self._test_pass_deploy_info_whole_disk_image(False)
+ self.assertEqual(states.ACTIVE, self.node.provision_state)
+ self.assertEqual(states.NOSTATE, self.node.target_provision_state)
+
+ def test_pass_deploy_info_whole_disk_image_localboot(self):
+ self._test_pass_deploy_info_whole_disk_image(True)
+ self.assertEqual(states.ACTIVE, self.node.provision_state)
+ self.assertEqual(states.NOSTATE, self.node.target_provision_state)
+
+ def test_pass_deploy_info_invalid(self):
+ self.node.power_state = states.POWER_ON
+ self.node.provision_state = states.AVAILABLE
+ self.node.target_provision_state = states.NOSTATE
+ self.node.save()
+
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ self.assertRaises(exception.InvalidState,
+ task.driver.vendor.pass_deploy_info,
+ task, address='123456', iqn='aaa-bbb',
+ key='fake-56789', error='test ramdisk error')
+
+ self.node.refresh()
+ self.assertEqual(states.AVAILABLE, self.node.provision_state)
+ self.assertEqual(states.NOSTATE, self.node.target_provision_state)
+ self.assertEqual(states.POWER_ON, self.node.power_state)
+
+ @mock.patch.object(iscsi_deploy.VendorPassthru, 'pass_deploy_info')
+ def test_pass_deploy_info_lock_elevated(self, mock_deploy_info):
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ task.driver.vendor.pass_deploy_info(
+ task, address='123456', iqn='aaa-bbb', key='fake-56789')
+
+ # lock elevated w/o exception
+ self.assertEqual(1, mock_deploy_info.call_count,
+ "pass_deploy_info was not called once.")
+
+ def test_vendor_routes(self):
+ expected = ['heartbeat', 'pass_deploy_info',
+ 'pass_bootloader_install_info']
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ vendor_routes = task.driver.vendor.vendor_routes
+ self.assertIsInstance(vendor_routes, dict)
+ self.assertEqual(sorted(expected), sorted(list(vendor_routes)))
+
+ def test_driver_routes(self):
+ expected = ['lookup']
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ driver_routes = task.driver.vendor.driver_routes
+ self.assertIsInstance(driver_routes, dict)
+ self.assertEqual(sorted(expected), sorted(list(driver_routes)))
+
+ @mock.patch.object(iscsi_deploy, 'validate_bootloader_install_status',
+ autospec=True)
+ @mock.patch.object(iscsi_deploy, 'finish_deploy', autospec=True)
+ def test_pass_bootloader_install_info(self, finish_deploy_mock,
+ validate_input_mock):
+ kwargs = {'method': 'pass_deploy_info', 'address': '123456'}
+ self.node.provision_state = states.DEPLOYWAIT
+ self.node.target_provision_state = states.ACTIVE
+ self.node.save()
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.vendor.pass_bootloader_install_info(task, **kwargs)
+ finish_deploy_mock.assert_called_once_with(task, '123456')
+ validate_input_mock.assert_called_once_with(task, kwargs)
+
+ @mock.patch.object(agent_base_vendor.BaseAgentVendor,
+ 'reboot_and_finish_deploy', autospec=True)
+ @mock.patch.object(iscsi_deploy, 'do_agent_iscsi_deploy', autospec=True)
+ def test_continue_deploy_netboot(self, do_agent_iscsi_deploy_mock,
+ reboot_and_finish_deploy_mock):
+
+ uuid_dict_returned = {'root uuid': 'some-root-uuid'}
+ do_agent_iscsi_deploy_mock.return_value = uuid_dict_returned
+ self.driver.vendor.continue_deploy(self.task)
+ do_agent_iscsi_deploy_mock.assert_called_once_with(
+ self.task, self.driver.vendor._client)
+ reboot_and_finish_deploy_mock.assert_called_once_with(
+ mock.ANY, self.task)
+
+ @mock.patch.object(agent_base_vendor.BaseAgentVendor,
+ 'reboot_and_finish_deploy', autospec=True)
+ @mock.patch.object(agent_base_vendor.BaseAgentVendor,
+ 'configure_local_boot', autospec=True)
+ @mock.patch.object(iscsi_deploy, 'do_agent_iscsi_deploy', autospec=True)
+ def test_continue_deploy_localboot(self, do_agent_iscsi_deploy_mock,
+ configure_local_boot_mock,
+ reboot_and_finish_deploy_mock):
+
+ self.node.instance_info = {
+ 'capabilities': {'boot_option': 'local'}}
+ self.node.save()
+ uuid_dict_returned = {'root uuid': 'some-root-uuid'}
+ do_agent_iscsi_deploy_mock.return_value = uuid_dict_returned
+
+ self.driver.vendor.continue_deploy(self.task)
+ do_agent_iscsi_deploy_mock.assert_called_once_with(
+ self.task, self.driver.vendor._client)
+ configure_local_boot_mock.assert_called_once_with(
+ self.task.driver.vendor, self.task, root_uuid='some-root-uuid',
+ efi_system_part_uuid=None)
+ reboot_and_finish_deploy_mock.assert_called_once_with(
+ self.task.driver.vendor, self.task)
+
+ @mock.patch.object(agent_base_vendor.BaseAgentVendor,
+ 'reboot_and_finish_deploy', autospec=True)
+ @mock.patch.object(agent_base_vendor.BaseAgentVendor,
+ 'configure_local_boot', autospec=True)
+ @mock.patch.object(iscsi_deploy, 'do_agent_iscsi_deploy', autospec=True)
+ def test_continue_deploy_localboot_uefi(self, do_agent_iscsi_deploy_mock,
+ configure_local_boot_mock,
+ reboot_and_finish_deploy_mock):
+
+ self.node.instance_info = {
+ 'capabilities': {'boot_option': 'local'}}
+ self.node.save()
+ uuid_dict_returned = {'root uuid': 'some-root-uuid',
+ 'efi system partition uuid': 'efi-part-uuid'}
+ do_agent_iscsi_deploy_mock.return_value = uuid_dict_returned
+
+ self.driver.vendor.continue_deploy(self.task)
+ do_agent_iscsi_deploy_mock.assert_called_once_with(
+ self.task, self.driver.vendor._client)
+ configure_local_boot_mock.assert_called_once_with(
+ self.task.driver.vendor, self.task, root_uuid='some-root-uuid',
+ efi_system_part_uuid='efi-part-uuid')
+ reboot_and_finish_deploy_mock.assert_called_once_with(
+ self.task.driver.vendor, self.task)
+
+
+# Cleanup of iscsi_deploy with pxe boot interface
+class CleanUpFullFlowTestCase(db_base.DbTestCase):
+ def setUp(self):
+ super(CleanUpFullFlowTestCase, self).setUp()
+ self.config(image_cache_size=0, group='pxe')
+
+ # Configure node
+ mgr_utils.mock_the_extension_manager(driver="fake_pxe")
+ instance_info = INST_INFO_DICT
+ instance_info['deploy_key'] = 'fake-56789'
+ self.node = obj_utils.create_test_node(
+ self.context, driver='fake_pxe',
+ instance_info=instance_info,
+ driver_info=DRV_INFO_DICT,
+ driver_internal_info=DRV_INTERNAL_INFO_DICT,
+ )
+ self.port = obj_utils.create_test_port(self.context,
+ node_id=self.node.id)
+
+ # Configure temporary directories
+ pxe_temp_dir = tempfile.mkdtemp()
+ self.config(tftp_root=pxe_temp_dir, group='pxe')
+ tftp_master_dir = os.path.join(CONF.pxe.tftp_root,
+ 'tftp_master')
+ self.config(tftp_master_path=tftp_master_dir, group='pxe')
+ os.makedirs(tftp_master_dir)
+
+ instance_temp_dir = tempfile.mkdtemp()
+ self.config(images_path=instance_temp_dir,
+ group='pxe')
+ instance_master_dir = os.path.join(CONF.pxe.images_path,
+ 'instance_master')
+ self.config(instance_master_path=instance_master_dir,
+ group='pxe')
+ os.makedirs(instance_master_dir)
+ self.pxe_config_dir = os.path.join(CONF.pxe.tftp_root, 'pxelinux.cfg')
+ os.makedirs(self.pxe_config_dir)
+
+ # Populate some file names
+ self.master_kernel_path = os.path.join(CONF.pxe.tftp_master_path,
+ 'kernel')
+ self.master_instance_path = os.path.join(CONF.pxe.instance_master_path,
+ 'image_uuid')
+ self.node_tftp_dir = os.path.join(CONF.pxe.tftp_root,
+ self.node.uuid)
+ os.makedirs(self.node_tftp_dir)
+ self.kernel_path = os.path.join(self.node_tftp_dir,
+ 'kernel')
+ self.node_image_dir = iscsi_deploy._get_image_dir_path(self.node.uuid)
+ os.makedirs(self.node_image_dir)
+ self.image_path = iscsi_deploy._get_image_file_path(self.node.uuid)
+ self.config_path = pxe_utils.get_pxe_config_file_path(self.node.uuid)
+ self.mac_path = pxe_utils._get_pxe_mac_path(self.port.address)
+
+ # Create files
+ self.files = [self.config_path, self.master_kernel_path,
+ self.master_instance_path]
+ for fname in self.files:
+ # NOTE(dtantsur): files with 0 size won't be cleaned up
+ with open(fname, 'w') as fp:
+ fp.write('test')
+
+ os.link(self.config_path, self.mac_path)
+ os.link(self.master_kernel_path, self.kernel_path)
+ os.link(self.master_instance_path, self.image_path)
+
+ @mock.patch.object(pxe, '_get_instance_image_info', autospec=True)
+ @mock.patch.object(pxe, '_get_deploy_image_info', autospec=True)
+ def test_clean_up_with_master(self, mock_get_deploy_image_info,
+ mock_get_instance_image_info):
+ image_info = {'kernel': ('kernel_uuid',
+ self.kernel_path)}
+ mock_get_instance_image_info.return_value = image_info
+ mock_get_deploy_image_info.return_value = {}
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ task.driver.deploy.clean_up(task)
+ mock_get_instance_image_info.assert_called_with(task.node,
+ task.context)
+ mock_get_deploy_image_info.assert_called_with(task.node)
+ for path in ([self.kernel_path, self.image_path, self.config_path]
+ + self.files):
+ self.assertFalse(os.path.exists(path),
+ '%s is not expected to exist' % path)
diff --git a/ironic/tests/unit/drivers/test_pxe.py b/ironic/tests/unit/drivers/test_pxe.py
new file mode 100644
index 000000000..178ffb088
--- /dev/null
+++ b/ironic/tests/unit/drivers/test_pxe.py
@@ -0,0 +1,918 @@
+# Copyright 2013 Hewlett-Packard Development Company, L.P.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Test class for PXE driver."""
+
+import os
+import shutil
+import tempfile
+
+import mock
+from oslo_config import cfg
+from oslo_serialization import jsonutils as json
+from oslo_utils import fileutils
+
+from ironic.common import boot_devices
+from ironic.common import dhcp_factory
+from ironic.common import exception
+from ironic.common.glance_service import base_image_service
+from ironic.common import pxe_utils
+from ironic.common import states
+from ironic.common import utils
+from ironic.conductor import task_manager
+from ironic.drivers.modules import deploy_utils
+from ironic.drivers.modules import pxe
+from ironic.tests.unit.conductor import utils as mgr_utils
+from ironic.tests.unit.db import base as db_base
+from ironic.tests.unit.db import utils as db_utils
+from ironic.tests.unit.objects import utils as obj_utils
+
+CONF = cfg.CONF
+
+INST_INFO_DICT = db_utils.get_test_pxe_instance_info()
+DRV_INFO_DICT = db_utils.get_test_pxe_driver_info()
+DRV_INTERNAL_INFO_DICT = db_utils.get_test_pxe_driver_internal_info()
+
+
+class PXEValidateParametersTestCase(db_base.DbTestCase):
+
+ def _test__parse_instance_info(
+ self, instance_info=INST_INFO_DICT,
+ driver_info=DRV_INFO_DICT,
+ driver_internal_info=DRV_INTERNAL_INFO_DICT):
+ # make sure we get back the expected things
+ node = obj_utils.create_test_node(
+ self.context,
+ driver='fake_pxe',
+ instance_info=instance_info,
+ driver_info=driver_info,
+ driver_internal_info=DRV_INTERNAL_INFO_DICT,
+ )
+
+ info = pxe._parse_instance_info(node)
+ self.assertIsNotNone(info.get('image_source'))
+ return info
+
+ def test__parse_instance_info_good(self):
+ self._test__parse_instance_info()
+
+ def test__parse_instance_info_good_non_glance_image(self):
+ instance_info = INST_INFO_DICT.copy()
+ instance_info['image_source'] = 'http://image'
+ instance_info['kernel'] = 'http://kernel'
+ instance_info['ramdisk'] = 'http://ramdisk'
+
+ info = self._test__parse_instance_info(instance_info=instance_info)
+
+ self.assertIsNotNone(info.get('ramdisk'))
+ self.assertIsNotNone(info.get('kernel'))
+
+ def test__parse_instance_info_non_glance_image_missing_kernel(self):
+ instance_info = INST_INFO_DICT.copy()
+ instance_info['image_source'] = 'http://image'
+ instance_info['ramdisk'] = 'http://ramdisk'
+
+ self.assertRaises(
+ exception.MissingParameterValue,
+ self._test__parse_instance_info,
+ instance_info=instance_info)
+
+ def test__parse_instance_info_non_glance_image_missing_ramdisk(self):
+ instance_info = INST_INFO_DICT.copy()
+ instance_info['image_source'] = 'http://image'
+ instance_info['kernel'] = 'http://kernel'
+
+ self.assertRaises(
+ exception.MissingParameterValue,
+ self._test__parse_instance_info,
+ instance_info=instance_info)
+
+ def test__parse_instance_info_missing_image_source(self):
+ instance_info = INST_INFO_DICT.copy()
+ del instance_info['image_source']
+
+ self.assertRaises(
+ exception.MissingParameterValue,
+ self._test__parse_instance_info,
+ instance_info=instance_info)
+
+ def test__parse_instance_info_whole_disk_image(self):
+ driver_internal_info = DRV_INTERNAL_INFO_DICT.copy()
+ driver_internal_info['is_whole_disk_image'] = True
+
+ self._test__parse_instance_info(
+ driver_internal_info=driver_internal_info)
+
+
+class PXEPrivateMethodsTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(PXEPrivateMethodsTestCase, self).setUp()
+ n = {
+ 'driver': 'fake_pxe',
+ 'instance_info': INST_INFO_DICT,
+ 'driver_info': DRV_INFO_DICT,
+ 'driver_internal_info': DRV_INTERNAL_INFO_DICT,
+ }
+ mgr_utils.mock_the_extension_manager(driver="fake_pxe")
+ self.node = obj_utils.create_test_node(self.context, **n)
+
+ def _test_get_pxe_conf_option(self, driver, expected_value):
+ mgr_utils.mock_the_extension_manager(driver=driver)
+ self.node.driver = driver
+ self.node.save()
+
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ returned_value = pxe._get_pxe_conf_option(
+ task, 'pxe_config_template')
+
+ self.assertEqual(expected_value, returned_value)
+
+ def test_get_pxe_conf_option_iscsi_deploy(self):
+ self.config(group='pxe', pxe_config_template='my-pxe-config-template')
+ self._test_get_pxe_conf_option('fake_pxe',
+ 'my-pxe-config-template')
+
+ def test_get_pxe_conf_option_agent_deploy_default(self):
+ self.config(group='pxe', pxe_config_template='my-pxe-config-template')
+ self._test_get_pxe_conf_option('fake_agent',
+ 'my-pxe-config-template')
+
+ def test_get_pxe_conf_option_agent_deploy_not_default(self):
+ self.config(group='agent',
+ agent_pxe_config_template='my-agent-config-template')
+ self.config(group='pxe', pxe_config_template='my-pxe-config-template')
+ self._test_get_pxe_conf_option('fake_agent',
+ 'my-agent-config-template')
+
+ def test__parse_driver_info_missing_deploy_kernel(self):
+ del self.node.driver_info['deploy_kernel']
+ self.assertRaises(exception.MissingParameterValue,
+ pxe._parse_driver_info, self.node)
+
+ def test__parse_driver_info_missing_deploy_ramdisk(self):
+ del self.node.driver_info['deploy_ramdisk']
+ self.assertRaises(exception.MissingParameterValue,
+ pxe._parse_driver_info, self.node)
+
+ def test__parse_driver_info(self):
+ expected_info = {'deploy_ramdisk': 'glance://deploy_ramdisk_uuid',
+ 'deploy_kernel': 'glance://deploy_kernel_uuid'}
+ image_info = pxe._parse_driver_info(self.node)
+ self.assertEqual(expected_info, image_info)
+
+ def test__get_deploy_image_info(self):
+ expected_info = {'deploy_ramdisk':
+ (DRV_INFO_DICT['deploy_ramdisk'],
+ os.path.join(CONF.pxe.tftp_root,
+ self.node.uuid,
+ 'deploy_ramdisk')),
+ 'deploy_kernel':
+ (DRV_INFO_DICT['deploy_kernel'],
+ os.path.join(CONF.pxe.tftp_root,
+ self.node.uuid,
+ 'deploy_kernel'))}
+ image_info = pxe._get_deploy_image_info(self.node)
+ self.assertEqual(expected_info, image_info)
+
+ def test__get_deploy_image_info_missing_deploy_kernel(self):
+ del self.node.driver_info['deploy_kernel']
+ self.assertRaises(exception.MissingParameterValue,
+ pxe._get_deploy_image_info, self.node)
+
+ def test__get_deploy_image_info_deploy_ramdisk(self):
+ del self.node.driver_info['deploy_ramdisk']
+ self.assertRaises(exception.MissingParameterValue,
+ pxe._get_deploy_image_info, self.node)
+
+ @mock.patch.object(base_image_service.BaseImageService, '_show',
+ autospec=True)
+ def _test__get_instance_image_info(self, show_mock):
+ properties = {'properties': {u'kernel_id': u'instance_kernel_uuid',
+ u'ramdisk_id': u'instance_ramdisk_uuid'}}
+
+ expected_info = {'ramdisk':
+ ('instance_ramdisk_uuid',
+ os.path.join(CONF.pxe.tftp_root,
+ self.node.uuid,
+ 'ramdisk')),
+ 'kernel':
+ ('instance_kernel_uuid',
+ os.path.join(CONF.pxe.tftp_root,
+ self.node.uuid,
+ 'kernel'))}
+ show_mock.return_value = properties
+ self.context.auth_token = 'fake'
+ image_info = pxe._get_instance_image_info(self.node, self.context)
+ show_mock.assert_called_once_with(mock.ANY, 'glance://image_uuid',
+ method='get')
+ self.assertEqual(expected_info, image_info)
+
+ # test with saved info
+ show_mock.reset_mock()
+ image_info = pxe._get_instance_image_info(self.node, self.context)
+ self.assertEqual(expected_info, image_info)
+ self.assertFalse(show_mock.called)
+ self.assertEqual('instance_kernel_uuid',
+ self.node.instance_info.get('kernel'))
+ self.assertEqual('instance_ramdisk_uuid',
+ self.node.instance_info.get('ramdisk'))
+
+ def test__get_instance_image_info(self):
+ # Tests when 'is_whole_disk_image' exists in driver_internal_info
+ self._test__get_instance_image_info()
+
+ def test__get_instance_image_info_without_is_whole_disk_image(self):
+ # Tests when 'is_whole_disk_image' doesn't exists in
+ # driver_internal_info
+ del self.node.driver_internal_info['is_whole_disk_image']
+ self.node.save()
+ self._test__get_instance_image_info()
+
+ @mock.patch.object(base_image_service.BaseImageService, '_show',
+ autospec=True)
+ def test__get_instance_image_info_whole_disk_image(self, show_mock):
+ properties = {'properties': None}
+ show_mock.return_value = properties
+ self.node.driver_internal_info['is_whole_disk_image'] = True
+ image_info = pxe._get_instance_image_info(self.node, self.context)
+ self.assertEqual({}, image_info)
+
+ @mock.patch.object(pxe_utils, '_build_pxe_config', autospec=True)
+ def _test_build_pxe_config_options(self, build_pxe_mock,
+ whle_dsk_img=False,
+ ipxe_enabled=False):
+ self.config(pxe_append_params='test_param', group='pxe')
+ # NOTE: right '/' should be removed from url string
+ self.config(api_url='http://192.168.122.184:6385', group='conductor')
+ self.config(disk_devices='sda', group='pxe')
+
+ driver_internal_info = self.node.driver_internal_info
+ driver_internal_info['is_whole_disk_image'] = whle_dsk_img
+ self.node.driver_internal_info = driver_internal_info
+ self.node.save()
+
+ tftp_server = CONF.pxe.tftp_server
+
+ if ipxe_enabled:
+ http_url = 'http://192.1.2.3:1234'
+ self.config(ipxe_enabled=True, group='pxe')
+ self.config(http_url=http_url, group='deploy')
+
+ deploy_kernel = os.path.join(http_url, self.node.uuid,
+ 'deploy_kernel')
+ deploy_ramdisk = os.path.join(http_url, self.node.uuid,
+ 'deploy_ramdisk')
+ kernel = os.path.join(http_url, self.node.uuid, 'kernel')
+ ramdisk = os.path.join(http_url, self.node.uuid, 'ramdisk')
+ root_dir = CONF.deploy.http_root
+ else:
+ deploy_kernel = os.path.join(CONF.pxe.tftp_root, self.node.uuid,
+ 'deploy_kernel')
+ deploy_ramdisk = os.path.join(CONF.pxe.tftp_root, self.node.uuid,
+ 'deploy_ramdisk')
+ kernel = os.path.join(CONF.pxe.tftp_root, self.node.uuid,
+ 'kernel')
+ ramdisk = os.path.join(CONF.pxe.tftp_root, self.node.uuid,
+ 'ramdisk')
+ root_dir = CONF.pxe.tftp_root
+
+ if whle_dsk_img:
+ ramdisk = 'no_ramdisk'
+ kernel = 'no_kernel'
+
+ expected_options = {
+ 'ari_path': ramdisk,
+ 'deployment_ari_path': deploy_ramdisk,
+ 'pxe_append_params': 'test_param',
+ 'aki_path': kernel,
+ 'deployment_aki_path': deploy_kernel,
+ 'tftp_server': tftp_server,
+ }
+
+ image_info = {'deploy_kernel': ('deploy_kernel',
+ os.path.join(root_dir,
+ self.node.uuid,
+ 'deploy_kernel')),
+ 'deploy_ramdisk': ('deploy_ramdisk',
+ os.path.join(root_dir,
+ self.node.uuid,
+ 'deploy_ramdisk')),
+ 'kernel': ('kernel_id',
+ os.path.join(root_dir,
+ self.node.uuid,
+ 'kernel')),
+ 'ramdisk': ('ramdisk_id',
+ os.path.join(root_dir,
+ self.node.uuid,
+ 'ramdisk'))}
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ options = pxe._build_pxe_config_options(task, image_info)
+ self.assertEqual(expected_options, options)
+
+ def test__build_pxe_config_options(self):
+ self._test_build_pxe_config_options(whle_dsk_img=True,
+ ipxe_enabled=False)
+
+ def test__build_pxe_config_options_ipxe(self):
+ self._test_build_pxe_config_options(whle_dsk_img=True,
+ ipxe_enabled=True)
+
+ def test__build_pxe_config_options_without_is_whole_disk_image(self):
+ del self.node.driver_internal_info['is_whole_disk_image']
+ self.node.save()
+ self._test_build_pxe_config_options(whle_dsk_img=False,
+ ipxe_enabled=False)
+
+ @mock.patch.object(pxe_utils, '_build_pxe_config', autospec=True)
+ def test__build_pxe_config_options_whole_disk_image(self,
+ build_pxe_mock,
+ ipxe_enabled=False):
+ self.config(pxe_append_params='test_param', group='pxe')
+ # NOTE: right '/' should be removed from url string
+ self.config(api_url='http://192.168.122.184:6385', group='conductor')
+ self.config(disk_devices='sda', group='pxe')
+
+ tftp_server = CONF.pxe.tftp_server
+
+ if ipxe_enabled:
+ http_url = 'http://192.1.2.3:1234'
+ self.config(ipxe_enabled=True, group='pxe')
+ self.config(http_url=http_url, group='deploy')
+
+ deploy_kernel = os.path.join(http_url, self.node.uuid,
+ 'deploy_kernel')
+ deploy_ramdisk = os.path.join(http_url, self.node.uuid,
+ 'deploy_ramdisk')
+ root_dir = CONF.deploy.http_root
+ else:
+ deploy_kernel = os.path.join(CONF.pxe.tftp_root, self.node.uuid,
+ 'deploy_kernel')
+ deploy_ramdisk = os.path.join(CONF.pxe.tftp_root, self.node.uuid,
+ 'deploy_ramdisk')
+ root_dir = CONF.pxe.tftp_root
+
+ expected_options = {
+ 'deployment_ari_path': deploy_ramdisk,
+ 'pxe_append_params': 'test_param',
+ 'deployment_aki_path': deploy_kernel,
+ 'tftp_server': tftp_server,
+ 'aki_path': 'no_kernel',
+ 'ari_path': 'no_ramdisk',
+ }
+
+ image_info = {'deploy_kernel': ('deploy_kernel',
+ os.path.join(root_dir,
+ self.node.uuid,
+ 'deploy_kernel')),
+ 'deploy_ramdisk': ('deploy_ramdisk',
+ os.path.join(root_dir,
+ self.node.uuid,
+ 'deploy_ramdisk')),
+ }
+ driver_internal_info = self.node.driver_internal_info
+ driver_internal_info['is_whole_disk_image'] = True
+ self.node.driver_internal_info = driver_internal_info
+ self.node.save()
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ options = pxe._build_pxe_config_options(task, image_info)
+ self.assertEqual(expected_options, options)
+
+ def test__build_pxe_config_options_no_kernel_no_ramdisk(self):
+ del self.node.driver_internal_info['is_whole_disk_image']
+ self.node.save()
+ self.config(group='pxe', tftp_server='my-tftp-server')
+ self.config(group='pxe', pxe_append_params='my-pxe-append-params')
+ image_info = {
+ 'deploy_kernel': ('deploy_kernel',
+ 'path-to-deploy_kernel'),
+ 'deploy_ramdisk': ('deploy_ramdisk',
+ 'path-to-deploy_ramdisk')}
+
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ options = pxe._build_pxe_config_options(task, image_info)
+
+ expected_options = {
+ 'deployment_aki_path': 'path-to-deploy_kernel',
+ 'deployment_ari_path': 'path-to-deploy_ramdisk',
+ 'pxe_append_params': 'my-pxe-append-params',
+ 'tftp_server': 'my-tftp-server',
+ 'aki_path': 'no_kernel',
+ 'ari_path': 'no_ramdisk'}
+ self.assertEqual(expected_options, options)
+
+ @mock.patch.object(deploy_utils, 'fetch_images', autospec=True)
+ def test__cache_tftp_images_master_path(self, mock_fetch_image):
+ temp_dir = tempfile.mkdtemp()
+ self.config(tftp_root=temp_dir, group='pxe')
+ self.config(tftp_master_path=os.path.join(temp_dir,
+ 'tftp_master_path'),
+ group='pxe')
+ image_path = os.path.join(temp_dir, self.node.uuid,
+ 'deploy_kernel')
+ image_info = {'deploy_kernel': ('deploy_kernel', image_path)}
+ fileutils.ensure_tree(CONF.pxe.tftp_master_path)
+
+ pxe._cache_ramdisk_kernel(None, self.node, image_info)
+
+ mock_fetch_image.assert_called_once_with(None,
+ mock.ANY,
+ [('deploy_kernel',
+ image_path)],
+ True)
+
+ @mock.patch.object(pxe, 'TFTPImageCache', lambda: None)
+ @mock.patch.object(fileutils, 'ensure_tree', autospec=True)
+ @mock.patch.object(deploy_utils, 'fetch_images', autospec=True)
+ def test__cache_ramdisk_kernel(self, mock_fetch_image, mock_ensure_tree):
+ self.config(ipxe_enabled=False, group='pxe')
+ fake_pxe_info = {'foo': 'bar'}
+ expected_path = os.path.join(CONF.pxe.tftp_root, self.node.uuid)
+
+ pxe._cache_ramdisk_kernel(self.context, self.node, fake_pxe_info)
+ mock_ensure_tree.assert_called_with(expected_path)
+ mock_fetch_image.assert_called_once_with(
+ self.context, mock.ANY, list(fake_pxe_info.values()), True)
+
+ @mock.patch.object(pxe, 'TFTPImageCache', lambda: None)
+ @mock.patch.object(fileutils, 'ensure_tree', autospec=True)
+ @mock.patch.object(deploy_utils, 'fetch_images', autospec=True)
+ def test__cache_ramdisk_kernel_ipxe(self, mock_fetch_image,
+ mock_ensure_tree):
+ self.config(ipxe_enabled=True, group='pxe')
+ fake_pxe_info = {'foo': 'bar'}
+ expected_path = os.path.join(CONF.deploy.http_root,
+ self.node.uuid)
+
+ pxe._cache_ramdisk_kernel(self.context, self.node, fake_pxe_info)
+ mock_ensure_tree.assert_called_with(expected_path)
+ mock_fetch_image.assert_called_once_with(self.context, mock.ANY,
+ list(fake_pxe_info.values()),
+ True)
+
+ @mock.patch.object(pxe.LOG, 'error', autospec=True)
+ def test_validate_boot_option_for_uefi_exc(self, mock_log):
+ properties = {'capabilities': 'boot_mode:uefi'}
+ instance_info = {"boot_option": "netboot"}
+ self.node.properties = properties
+ self.node.instance_info['capabilities'] = instance_info
+ self.node.driver_internal_info['is_whole_disk_image'] = True
+ self.assertRaises(exception.InvalidParameterValue,
+ pxe.validate_boot_option_for_uefi,
+ self.node)
+ self.assertTrue(mock_log.called)
+
+ @mock.patch.object(pxe.LOG, 'error', autospec=True)
+ def test_validate_boot_option_for_uefi_noexc_one(self, mock_log):
+ properties = {'capabilities': 'boot_mode:uefi'}
+ instance_info = {"boot_option": "local"}
+ self.node.properties = properties
+ self.node.instance_info['capabilities'] = instance_info
+ self.node.driver_internal_info['is_whole_disk_image'] = True
+ pxe.validate_boot_option_for_uefi(self.node)
+ self.assertFalse(mock_log.called)
+
+ @mock.patch.object(pxe.LOG, 'error', autospec=True)
+ def test_validate_boot_option_for_uefi_noexc_two(self, mock_log):
+ properties = {'capabilities': 'boot_mode:bios'}
+ instance_info = {"boot_option": "local"}
+ self.node.properties = properties
+ self.node.instance_info['capabilities'] = instance_info
+ self.node.driver_internal_info['is_whole_disk_image'] = True
+ pxe.validate_boot_option_for_uefi(self.node)
+ self.assertFalse(mock_log.called)
+
+ @mock.patch.object(pxe.LOG, 'error', autospec=True)
+ def test_validate_boot_option_for_uefi_noexc_three(self, mock_log):
+ properties = {'capabilities': 'boot_mode:uefi'}
+ instance_info = {"boot_option": "local"}
+ self.node.properties = properties
+ self.node.instance_info['capabilities'] = instance_info
+ self.node.driver_internal_info['is_whole_disk_image'] = False
+ pxe.validate_boot_option_for_uefi(self.node)
+ self.assertFalse(mock_log.called)
+
+ @mock.patch.object(pxe.LOG, 'error', autospec=True)
+ def test_validate_boot_parameters_for_trusted_boot_one(self, mock_log):
+ properties = {'capabilities': 'boot_mode:uefi'}
+ instance_info = {"boot_option": "netboot"}
+ self.node.properties = properties
+ self.node.instance_info['capabilities'] = instance_info
+ self.node.driver_internal_info['is_whole_disk_image'] = False
+ self.assertRaises(exception.InvalidParameterValue,
+ pxe.validate_boot_parameters_for_trusted_boot,
+ self.node)
+ self.assertTrue(mock_log.called)
+
+ @mock.patch.object(pxe.LOG, 'error', autospec=True)
+ def test_validate_boot_parameters_for_trusted_boot_two(self, mock_log):
+ properties = {'capabilities': 'boot_mode:bios'}
+ instance_info = {"boot_option": "local"}
+ self.node.properties = properties
+ self.node.instance_info['capabilities'] = instance_info
+ self.node.driver_internal_info['is_whole_disk_image'] = False
+ self.assertRaises(exception.InvalidParameterValue,
+ pxe.validate_boot_parameters_for_trusted_boot,
+ self.node)
+ self.assertTrue(mock_log.called)
+
+ @mock.patch.object(pxe.LOG, 'error', autospec=True)
+ def test_validate_boot_parameters_for_trusted_boot_three(self, mock_log):
+ properties = {'capabilities': 'boot_mode:bios'}
+ instance_info = {"boot_option": "netboot"}
+ self.node.properties = properties
+ self.node.instance_info['capabilities'] = instance_info
+ self.node.driver_internal_info['is_whole_disk_image'] = True
+ self.assertRaises(exception.InvalidParameterValue,
+ pxe.validate_boot_parameters_for_trusted_boot,
+ self.node)
+ self.assertTrue(mock_log.called)
+
+ @mock.patch.object(pxe.LOG, 'error', autospec=True)
+ def test_validate_boot_parameters_for_trusted_boot_pass(self, mock_log):
+ properties = {'capabilities': 'boot_mode:bios'}
+ instance_info = {"boot_option": "netboot"}
+ self.node.properties = properties
+ self.node.instance_info['capabilities'] = instance_info
+ self.node.driver_internal_info['is_whole_disk_image'] = False
+ pxe.validate_boot_parameters_for_trusted_boot(self.node)
+ self.assertFalse(mock_log.called)
+
+
+@mock.patch.object(utils, 'unlink_without_raise', autospec=True)
+@mock.patch.object(pxe_utils, 'clean_up_pxe_config', autospec=True)
+@mock.patch.object(pxe, 'TFTPImageCache', autospec=True)
+class CleanUpPxeEnvTestCase(db_base.DbTestCase):
+ def setUp(self):
+ super(CleanUpPxeEnvTestCase, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver="fake_pxe")
+ instance_info = INST_INFO_DICT
+ instance_info['deploy_key'] = 'fake-56789'
+ self.node = obj_utils.create_test_node(
+ self.context, driver='fake_pxe',
+ instance_info=instance_info,
+ driver_info=DRV_INFO_DICT,
+ driver_internal_info=DRV_INTERNAL_INFO_DICT,
+ )
+
+ def test__clean_up_pxe_env(self, mock_cache, mock_pxe_clean,
+ mock_unlink):
+ image_info = {'label': ['', 'deploy_kernel']}
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ pxe._clean_up_pxe_env(task, image_info)
+ mock_pxe_clean.assert_called_once_with(task)
+ mock_unlink.assert_any_call('deploy_kernel')
+ mock_cache.return_value.clean_up.assert_called_once_with()
+
+
+class PXEBootTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(PXEBootTestCase, self).setUp()
+ self.context.auth_token = 'fake'
+ self.temp_dir = tempfile.mkdtemp()
+ self.config(tftp_root=self.temp_dir, group='pxe')
+ self.temp_dir = tempfile.mkdtemp()
+ self.config(images_path=self.temp_dir, group='pxe')
+ mgr_utils.mock_the_extension_manager(driver="fake_pxe")
+ instance_info = INST_INFO_DICT
+ instance_info['deploy_key'] = 'fake-56789'
+ self.node = obj_utils.create_test_node(
+ self.context,
+ driver='fake_pxe',
+ instance_info=instance_info,
+ driver_info=DRV_INFO_DICT,
+ driver_internal_info=DRV_INTERNAL_INFO_DICT)
+ self.port = obj_utils.create_test_port(self.context,
+ node_id=self.node.id)
+ self.config(group='conductor', api_url='http://127.0.0.1:1234/')
+
+ def test_get_properties(self):
+ expected = pxe.COMMON_PROPERTIES
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ self.assertEqual(expected, task.driver.get_properties())
+
+ @mock.patch.object(base_image_service.BaseImageService, '_show',
+ autospec=True)
+ def test_validate_good(self, mock_glance):
+ mock_glance.return_value = {'properties': {'kernel_id': 'fake-kernel',
+ 'ramdisk_id': 'fake-initr'}}
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ task.driver.boot.validate(task)
+
+ @mock.patch.object(base_image_service.BaseImageService, '_show',
+ autospec=True)
+ def test_validate_good_whole_disk_image(self, mock_glance):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ task.node.driver_internal_info['is_whole_disk_image'] = True
+ task.driver.boot.validate(task)
+
+ def test_validate_fail_missing_deploy_kernel(self):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ del task.node.driver_info['deploy_kernel']
+ self.assertRaises(exception.MissingParameterValue,
+ task.driver.boot.validate, task)
+
+ def test_validate_fail_missing_deploy_ramdisk(self):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ del task.node.driver_info['deploy_ramdisk']
+ self.assertRaises(exception.MissingParameterValue,
+ task.driver.boot.validate, task)
+
+ def test_validate_fail_missing_image_source(self):
+ info = dict(INST_INFO_DICT)
+ del info['image_source']
+ self.node.instance_info = json.dumps(info)
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ task.node['instance_info'] = json.dumps(info)
+ self.assertRaises(exception.MissingParameterValue,
+ task.driver.boot.validate, task)
+
+ @mock.patch.object(base_image_service.BaseImageService, '_show',
+ autospec=True)
+ def test_validate_fail_invalid_config_uefi_ipxe(self, mock_glance):
+ properties = {'capabilities': 'boot_mode:uefi,cap2:value2'}
+ mock_glance.return_value = {'properties': {'kernel_id': 'fake-kernel',
+ 'ramdisk_id': 'fake-initr'}}
+ self.config(ipxe_enabled=True, group='pxe')
+ self.config(http_url='dummy_url', group='deploy')
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ task.node.properties = properties
+ self.assertRaises(exception.InvalidParameterValue,
+ task.driver.boot.validate, task)
+
+ def test_validate_fail_invalid_config_uefi_whole_disk_image(self):
+ properties = {'capabilities': 'boot_mode:uefi,boot_option:netboot'}
+ instance_info = {"boot_option": "netboot"}
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ task.node.properties = properties
+ task.node.instance_info['capabilities'] = instance_info
+ task.node.driver_internal_info['is_whole_disk_image'] = True
+ self.assertRaises(exception.InvalidParameterValue,
+ task.driver.boot.validate, task)
+
+ def test_validate_fail_no_port(self):
+ new_node = obj_utils.create_test_node(
+ self.context,
+ uuid='aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee',
+ driver='fake_pxe', instance_info=INST_INFO_DICT,
+ driver_info=DRV_INFO_DICT)
+ with task_manager.acquire(self.context, new_node.uuid,
+ shared=True) as task:
+ self.assertRaises(exception.MissingParameterValue,
+ task.driver.boot.validate, task)
+
+ def test_validate_fail_trusted_boot_with_secure_boot(self):
+ instance_info = {"boot_option": "netboot",
+ "secure_boot": "true",
+ "trusted_boot": "true"}
+ properties = {'capabilities': 'trusted_boot:true'}
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ task.node.instance_info['capabilities'] = instance_info
+ task.node.properties = properties
+ task.node.driver_internal_info['is_whole_disk_image'] = False
+ self.assertRaises(exception.InvalidParameterValue,
+ task.driver.boot.validate, task)
+
+ def test_validate_fail_invalid_trusted_boot_value(self):
+ properties = {'capabilities': 'trusted_boot:value'}
+ instance_info = {"trusted_boot": "value"}
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ task.node.properties = properties
+ task.node.instance_info['capabilities'] = instance_info
+ self.assertRaises(exception.InvalidParameterValue,
+ task.driver.boot.validate, task)
+
+ @mock.patch.object(base_image_service.BaseImageService, '_show',
+ autospec=True)
+ def test_validate_fail_no_image_kernel_ramdisk_props(self, mock_glance):
+ mock_glance.return_value = {'properties': {}}
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ self.assertRaises(exception.MissingParameterValue,
+ task.driver.boot.validate,
+ task)
+
+ @mock.patch.object(base_image_service.BaseImageService, '_show',
+ autospec=True)
+ def test_validate_fail_glance_image_doesnt_exists(self, mock_glance):
+ mock_glance.side_effect = iter([exception.ImageNotFound('not found')])
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ self.assertRaises(exception.InvalidParameterValue,
+ task.driver.boot.validate, task)
+
+ @mock.patch.object(base_image_service.BaseImageService, '_show',
+ autospec=True)
+ def test_validate_fail_glance_conn_problem(self, mock_glance):
+ exceptions = (exception.GlanceConnectionFailed('connection fail'),
+ exception.ImageNotAuthorized('not authorized'),
+ exception.Invalid('invalid'))
+ mock_glance.side_effect = iter(exceptions)
+ for exc in exceptions:
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ self.assertRaises(exception.InvalidParameterValue,
+ task.driver.boot.validate, task)
+
+ @mock.patch.object(dhcp_factory, 'DHCPFactory')
+ @mock.patch.object(pxe, '_get_instance_image_info', autospec=True)
+ @mock.patch.object(pxe, '_get_deploy_image_info', autospec=True)
+ @mock.patch.object(pxe, '_cache_ramdisk_kernel', autospec=True)
+ @mock.patch.object(pxe, '_build_pxe_config_options', autospec=True)
+ @mock.patch.object(pxe_utils, 'create_pxe_config', autospec=True)
+ def _test_prepare_ramdisk(self, mock_pxe_config,
+ mock_build_pxe, mock_cache_r_k,
+ mock_deploy_img_info,
+ mock_instance_img_info,
+ dhcp_factory_mock, uefi=False,
+ cleaning=False):
+ mock_build_pxe.return_value = {}
+ mock_deploy_img_info.return_value = {'deploy_kernel': 'a'}
+ mock_instance_img_info.return_value = {'kernel': 'b'}
+ mock_pxe_config.return_value = None
+ mock_cache_r_k.return_value = None
+ provider_mock = mock.MagicMock()
+ dhcp_factory_mock.return_value = provider_mock
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ dhcp_opts = pxe_utils.dhcp_options_for_instance(task)
+ task.driver.boot.prepare_ramdisk(task, {'foo': 'bar'})
+ mock_deploy_img_info.assert_called_once_with(task.node)
+ provider_mock.update_dhcp.assert_called_once_with(task, dhcp_opts)
+ if cleaning is False:
+ mock_cache_r_k.assert_called_once_with(
+ self.context, task.node,
+ {'deploy_kernel': 'a', 'kernel': 'b'})
+ mock_instance_img_info.assert_called_once_with(task.node,
+ self.context)
+ else:
+ mock_cache_r_k.assert_called_once_with(
+ self.context, task.node,
+ {'deploy_kernel': 'a'})
+ if uefi:
+ mock_pxe_config.assert_called_once_with(
+ task, {'foo': 'bar'}, CONF.pxe.uefi_pxe_config_template)
+ else:
+ mock_pxe_config.assert_called_once_with(
+ task, {'foo': 'bar'}, CONF.pxe.pxe_config_template)
+
+ def test_prepare_ramdisk(self):
+ self.node.provision_state = states.DEPLOYING
+ self.node.save()
+ self._test_prepare_ramdisk()
+
+ def test_prepare_ramdisk_uefi(self):
+ self.node.provision_state = states.DEPLOYING
+ self.node.save()
+ properties = self.node.properties
+ properties['capabilities'] = 'boot_mode:uefi'
+ self.node.properties = properties
+ self.node.save()
+ self._test_prepare_ramdisk(uefi=True)
+
+ @mock.patch.object(shutil, 'copyfile', autospec=True)
+ def test_prepare_ramdisk_ipxe(self, copyfile_mock):
+ self.node.provision_state = states.DEPLOYING
+ self.node.save()
+ self.config(group='pxe', ipxe_enabled=True)
+ self.config(group='deploy', http_url='http://myserver')
+ self._test_prepare_ramdisk()
+ copyfile_mock.assert_called_once_with(
+ CONF.pxe.ipxe_boot_script,
+ os.path.join(
+ CONF.deploy.http_root,
+ os.path.basename(CONF.pxe.ipxe_boot_script)))
+
+ def test_prepare_ramdisk_cleaning(self):
+ self.node.provision_state = states.CLEANING
+ self.node.save()
+ self._test_prepare_ramdisk(cleaning=True)
+
+ @mock.patch.object(pxe, '_clean_up_pxe_env', autospec=True)
+ @mock.patch.object(pxe, '_get_deploy_image_info', autospec=True)
+ def test_clean_up_ramdisk(self, get_deploy_image_info_mock,
+ clean_up_pxe_env_mock):
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ image_info = {'deploy_kernel': ['', '/path/to/deploy_kernel'],
+ 'deploy_ramdisk': ['', '/path/to/deploy_ramdisk']}
+ get_deploy_image_info_mock.return_value = image_info
+ task.driver.boot.clean_up_ramdisk(task)
+ clean_up_pxe_env_mock.assert_called_once_with(task, image_info)
+ get_deploy_image_info_mock.assert_called_once_with(task.node)
+
+ @mock.patch.object(deploy_utils, 'try_set_boot_device', autospec=True)
+ @mock.patch.object(deploy_utils, 'switch_pxe_config', autospec=True)
+ @mock.patch.object(dhcp_factory, 'DHCPFactory', autospec=True)
+ @mock.patch.object(pxe, '_cache_ramdisk_kernel', autospec=True)
+ @mock.patch.object(pxe, '_get_instance_image_info', autospec=True)
+ def test_prepare_instance_netboot(
+ self, get_image_info_mock, cache_mock,
+ dhcp_factory_mock, switch_pxe_config_mock,
+ set_boot_device_mock):
+ provider_mock = mock.MagicMock()
+ dhcp_factory_mock.return_value = provider_mock
+ image_info = {'kernel': ('', '/path/to/kernel'),
+ 'ramdisk': ('', '/path/to/ramdisk')}
+ get_image_info_mock.return_value = image_info
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ dhcp_opts = pxe_utils.dhcp_options_for_instance(task)
+ pxe_config_path = pxe_utils.get_pxe_config_file_path(
+ task.node.uuid)
+ task.node.properties['capabilities'] = 'boot_mode:bios'
+ task.node.driver_internal_info['root_uuid_or_disk_id'] = (
+ "30212642-09d3-467f-8e09-21685826ab50")
+ task.node.driver_internal_info['is_whole_disk_image'] = False
+
+ task.driver.boot.prepare_instance(task)
+
+ get_image_info_mock.assert_called_once_with(
+ task.node, task.context)
+ cache_mock.assert_called_once_with(
+ task.context, task.node, image_info)
+ provider_mock.update_dhcp.assert_called_once_with(task, dhcp_opts)
+ switch_pxe_config_mock.assert_called_once_with(
+ pxe_config_path, "30212642-09d3-467f-8e09-21685826ab50",
+ 'bios', False, False)
+ set_boot_device_mock.assert_called_once_with(task,
+ boot_devices.PXE)
+
+ @mock.patch.object(deploy_utils, 'try_set_boot_device', autospec=True)
+ @mock.patch.object(deploy_utils, 'switch_pxe_config', autospec=True)
+ @mock.patch.object(dhcp_factory, 'DHCPFactory')
+ @mock.patch.object(pxe, '_cache_ramdisk_kernel', autospec=True)
+ @mock.patch.object(pxe, '_get_instance_image_info', autospec=True)
+ def test_prepare_instance_netboot_missing_root_uuid(
+ self, get_image_info_mock, cache_mock,
+ dhcp_factory_mock, switch_pxe_config_mock,
+ set_boot_device_mock):
+ provider_mock = mock.MagicMock()
+ dhcp_factory_mock.return_value = provider_mock
+ image_info = {'kernel': ('', '/path/to/kernel'),
+ 'ramdisk': ('', '/path/to/ramdisk')}
+ get_image_info_mock.return_value = image_info
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ dhcp_opts = pxe_utils.dhcp_options_for_instance(task)
+ task.node.properties['capabilities'] = 'boot_mode:bios'
+ task.node.driver_internal_info['is_whole_disk_image'] = False
+
+ task.driver.boot.prepare_instance(task)
+
+ get_image_info_mock.assert_called_once_with(
+ task.node, task.context)
+ cache_mock.assert_called_once_with(
+ task.context, task.node, image_info)
+ provider_mock.update_dhcp.assert_called_once_with(task, dhcp_opts)
+ self.assertFalse(switch_pxe_config_mock.called)
+ self.assertFalse(set_boot_device_mock.called)
+
+ @mock.patch.object(deploy_utils, 'try_set_boot_device', autospec=True)
+ @mock.patch.object(pxe_utils, 'clean_up_pxe_config', autospec=True)
+ def test_prepare_instance_localboot(self, clean_up_pxe_config_mock,
+ set_boot_device_mock):
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ task.node.instance_info['capabilities'] = {'boot_option': 'local'}
+ task.driver.boot.prepare_instance(task)
+ clean_up_pxe_config_mock.assert_called_once_with(task)
+ set_boot_device_mock.assert_called_once_with(task,
+ boot_devices.DISK)
+
+ @mock.patch.object(pxe, '_clean_up_pxe_env', autospec=True)
+ @mock.patch.object(pxe, '_get_instance_image_info', autospec=True)
+ def test_clean_up_instance(self, get_image_info_mock,
+ clean_up_pxe_env_mock):
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ image_info = {'kernel': ['', '/path/to/kernel'],
+ 'ramdisk': ['', '/path/to/ramdisk']}
+ get_image_info_mock.return_value = image_info
+ task.driver.boot.clean_up_instance(task)
+ clean_up_pxe_env_mock.assert_called_once_with(task, image_info)
+ get_image_info_mock.assert_called_once_with(
+ task.node, task.context)
diff --git a/ironic/tests/unit/drivers/test_seamicro.py b/ironic/tests/unit/drivers/test_seamicro.py
new file mode 100644
index 000000000..63d816112
--- /dev/null
+++ b/ironic/tests/unit/drivers/test_seamicro.py
@@ -0,0 +1,676 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Test class for Ironic SeaMicro driver."""
+
+import uuid
+
+import mock
+from oslo_utils import uuidutils
+from seamicroclient import client as seamicro_client
+from seamicroclient import exceptions as seamicro_client_exception
+
+from ironic.common import boot_devices
+from ironic.common import driver_factory
+from ironic.common import exception
+from ironic.common import states
+from ironic.conductor import task_manager
+from ironic.drivers.modules import console_utils
+from ironic.drivers.modules import seamicro
+from ironic.tests.unit.conductor import utils as mgr_utils
+from ironic.tests.unit.db import base as db_base
+from ironic.tests.unit.db import utils as db_utils
+from ironic.tests.unit.objects import utils as obj_utils
+
+INFO_DICT = db_utils.get_test_seamicro_info()
+
+
+class Fake_Server(object):
+ def __init__(self, active=False, *args, **kwargs):
+ self.active = active
+ self.nic = {'0': {'untaggedVlan': ''}}
+
+ def power_on(self):
+ self.active = True
+
+ def power_off(self, force=False):
+ self.active = False
+
+ def reset(self):
+ self.active = True
+
+ def set_untagged_vlan(self, vlan_id):
+ return
+
+ def attach_volume(self, volume_id):
+ return
+
+ def detach_volume(self):
+ return
+
+ def set_boot_order(self, boot_order):
+ return
+
+ def refresh(self, wait=0):
+ return self
+
+
+class Fake_Volume(object):
+ def __init__(self, id=None, *args, **kwargs):
+ if id is None:
+ self.id = "%s/%s/%s" % ("0", "ironic-p6-6", str(uuid.uuid4()))
+ else:
+ self.id = id
+
+
+class Fake_Pool(object):
+ def __init__(self, freeSize=None, *args, **kwargs):
+ self.freeSize = freeSize
+
+
+class SeaMicroValidateParametersTestCase(db_base.DbTestCase):
+
+ def test__parse_driver_info_good(self):
+ # make sure we get back the expected things
+ node = obj_utils.get_test_node(
+ self.context,
+ driver='fake_seamicro',
+ driver_info=INFO_DICT)
+ info = seamicro._parse_driver_info(node)
+ self.assertIsNotNone(info.get('api_endpoint'))
+ self.assertIsNotNone(info.get('username'))
+ self.assertIsNotNone(info.get('password'))
+ self.assertIsNotNone(info.get('server_id'))
+ self.assertIsNotNone(info.get('uuid'))
+
+ def test__parse_driver_info_missing_api_endpoint(self):
+ # make sure error is raised when info is missing
+ info = dict(INFO_DICT)
+ del info['seamicro_api_endpoint']
+ node = obj_utils.get_test_node(self.context, driver_info=info)
+ self.assertRaises(exception.MissingParameterValue,
+ seamicro._parse_driver_info,
+ node)
+
+ def test__parse_driver_info_missing_username(self):
+ # make sure error is raised when info is missing
+ info = dict(INFO_DICT)
+ del info['seamicro_username']
+ node = obj_utils.get_test_node(self.context, driver_info=info)
+ self.assertRaises(exception.MissingParameterValue,
+ seamicro._parse_driver_info,
+ node)
+
+ def test__parse_driver_info_missing_password(self):
+ # make sure error is raised when info is missing
+ info = dict(INFO_DICT)
+ del info['seamicro_password']
+ node = obj_utils.get_test_node(self.context, driver_info=info)
+ self.assertRaises(exception.MissingParameterValue,
+ seamicro._parse_driver_info,
+ node)
+
+ def test__parse_driver_info_missing_server_id(self):
+ # make sure error is raised when info is missing
+ info = dict(INFO_DICT)
+ del info['seamicro_server_id']
+ node = obj_utils.get_test_node(self.context, driver_info=info)
+ self.assertRaises(exception.MissingParameterValue,
+ seamicro._parse_driver_info,
+ node)
+
+
+@mock.patch('eventlet.greenthread.sleep', lambda n: None)
+class SeaMicroPrivateMethodsTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(SeaMicroPrivateMethodsTestCase, self).setUp()
+ n = {
+ 'driver': 'fake_seamicro',
+ 'driver_info': INFO_DICT
+ }
+ self.node = obj_utils.create_test_node(self.context, **n)
+ self.Server = Fake_Server
+ self.Volume = Fake_Volume
+ self.Pool = Fake_Pool
+ self.config(action_timeout=0, group='seamicro')
+ self.config(max_retry=2, group='seamicro')
+
+ self.info = seamicro._parse_driver_info(self.node)
+
+ @mock.patch.object(seamicro_client, "Client", autospec=True)
+ def test__get_client(self, mock_client):
+ args = {'username': self.info['username'],
+ 'password': self.info['password'],
+ 'auth_url': self.info['api_endpoint']}
+ seamicro._get_client(**self.info)
+ mock_client.assert_called_once_with(self.info['api_version'], **args)
+
+ @mock.patch.object(seamicro_client, "Client", autospec=True)
+ def test__get_client_fail(self, mock_client):
+ args = {'username': self.info['username'],
+ 'password': self.info['password'],
+ 'auth_url': self.info['api_endpoint']}
+ mock_client.side_effect = seamicro_client_exception.UnsupportedVersion
+ self.assertRaises(exception.InvalidParameterValue,
+ seamicro._get_client,
+ **self.info)
+ mock_client.assert_called_once_with(self.info['api_version'], **args)
+
+ @mock.patch.object(seamicro, "_get_server", autospec=True)
+ def test__get_power_status_on(self, mock_get_server):
+ mock_get_server.return_value = self.Server(active=True)
+ pstate = seamicro._get_power_status(self.node)
+ self.assertEqual(states.POWER_ON, pstate)
+
+ @mock.patch.object(seamicro, "_get_server", autospec=True)
+ def test__get_power_status_off(self, mock_get_server):
+ mock_get_server.return_value = self.Server(active=False)
+ pstate = seamicro._get_power_status(self.node)
+ self.assertEqual(states.POWER_OFF, pstate)
+
+ @mock.patch.object(seamicro, "_get_server", autospec=True)
+ def test__get_power_status_error(self, mock_get_server):
+ mock_get_server.return_value = self.Server(active=None)
+ pstate = seamicro._get_power_status(self.node)
+ self.assertEqual(states.ERROR, pstate)
+
+ @mock.patch.object(seamicro, "_get_server", autospec=True)
+ def test__power_on_good(self, mock_get_server):
+ mock_get_server.return_value = self.Server(active=False)
+ pstate = seamicro._power_on(self.node)
+ self.assertEqual(states.POWER_ON, pstate)
+
+ @mock.patch.object(seamicro, "_get_server", autospec=True)
+ def test__power_on_fail(self, mock_get_server):
+ def fake_power_on():
+ return
+
+ server = self.Server(active=False)
+ server.power_on = fake_power_on
+ mock_get_server.return_value = server
+ pstate = seamicro._power_on(self.node)
+ self.assertEqual(states.ERROR, pstate)
+
+ @mock.patch.object(seamicro, "_get_server", autospec=True)
+ def test__power_off_good(self, mock_get_server):
+ mock_get_server.return_value = self.Server(active=True)
+ pstate = seamicro._power_off(self.node)
+ self.assertEqual(states.POWER_OFF, pstate)
+
+ @mock.patch.object(seamicro, "_get_server", autospec=True)
+ def test__power_off_fail(self, mock_get_server):
+ def fake_power_off():
+ return
+ server = self.Server(active=True)
+ server.power_off = fake_power_off
+ mock_get_server.return_value = server
+ pstate = seamicro._power_off(self.node)
+ self.assertEqual(states.ERROR, pstate)
+
+ @mock.patch.object(seamicro, "_get_server", autospec=True)
+ def test__reboot_good(self, mock_get_server):
+ mock_get_server.return_value = self.Server(active=True)
+ pstate = seamicro._reboot(self.node)
+ self.assertEqual(states.POWER_ON, pstate)
+
+ @mock.patch.object(seamicro, "_get_server", autospec=True)
+ def test__reboot_fail(self, mock_get_server):
+ def fake_reboot():
+ return
+ server = self.Server(active=False)
+ server.reset = fake_reboot
+ mock_get_server.return_value = server
+ pstate = seamicro._reboot(self.node)
+ self.assertEqual(states.ERROR, pstate)
+
+ @mock.patch.object(seamicro, "_get_volume", autospec=True)
+ def test__validate_fail(self, mock_get_volume):
+ volume_id = "0/p6-6/vol1"
+ volume = self.Volume()
+ volume.id = volume_id
+ mock_get_volume.return_value = volume
+ self.assertRaises(exception.InvalidParameterValue,
+ seamicro._validate_volume, self.info, volume_id)
+
+ @mock.patch.object(seamicro, "_get_volume", autospec=True)
+ def test__validate_good(self, mock_get_volume):
+ volume = self.Volume()
+ mock_get_volume.return_value = volume
+ valid = seamicro._validate_volume(self.info, volume.id)
+ self.assertEqual(valid, True)
+
+ @mock.patch.object(seamicro, "_get_pools", autospec=True)
+ def test__create_volume_fail(self, mock_get_pools):
+ mock_get_pools.return_value = None
+ self.assertRaises(exception.IronicException,
+ seamicro._create_volume,
+ self.info, 2)
+
+ @mock.patch.object(seamicro, "_get_pools", autospec=True)
+ @mock.patch.object(seamicro, "_get_client", autospec=True)
+ def test__create_volume_good(self, mock_get_client, mock_get_pools):
+ pools = [self.Pool(1), self.Pool(6), self.Pool(5)]
+ mock_seamicro_volumes = mock.MagicMock(spec_set=['create'])
+ mock_get_client.return_value = mock.MagicMock(
+ volumes=mock_seamicro_volumes, spec_set=['volumes'])
+ mock_get_pools.return_value = pools
+ seamicro._create_volume(self.info, 2)
+
+
+class SeaMicroPowerDriverTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(SeaMicroPowerDriverTestCase, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver='fake_seamicro')
+ self.driver = driver_factory.get_driver('fake_seamicro')
+ self.node = obj_utils.create_test_node(self.context,
+ driver='fake_seamicro',
+ driver_info=INFO_DICT)
+ self.get_server_patcher = mock.patch.object(seamicro, '_get_server',
+ autospec=True)
+
+ self.get_server_mock = None
+ self.Server = Fake_Server
+ self.Volume = Fake_Volume
+ self.info = seamicro._parse_driver_info(self.node)
+
+ def test_get_properties(self):
+ expected = seamicro.COMMON_PROPERTIES
+ with task_manager.acquire(self.context, self.node['uuid'],
+ shared=True) as task:
+ self.assertEqual(expected, task.driver.power.get_properties())
+
+ expected = (list(seamicro.COMMON_PROPERTIES) +
+ list(seamicro.CONSOLE_PROPERTIES))
+ console_properties = task.driver.console.get_properties().keys()
+ self.assertEqual(sorted(expected), sorted(console_properties))
+ self.assertEqual(sorted(expected),
+ sorted(task.driver.get_properties().keys()))
+
+ def test_vendor_routes(self):
+ expected = ['set_node_vlan_id', 'attach_volume']
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ vendor_routes = task.driver.vendor.vendor_routes
+ self.assertIsInstance(vendor_routes, dict)
+ self.assertEqual(sorted(expected), sorted(vendor_routes))
+
+ def test_driver_routes(self):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ driver_routes = task.driver.vendor.driver_routes
+ self.assertIsInstance(driver_routes, dict)
+ self.assertEqual({}, driver_routes)
+
+ @mock.patch.object(seamicro, '_parse_driver_info', autospec=True)
+ def test_power_interface_validate_good(self, parse_drv_info_mock):
+ with task_manager.acquire(self.context, self.node['uuid'],
+ shared=True) as task:
+ task.driver.power.validate(task)
+ self.assertEqual(1, parse_drv_info_mock.call_count)
+
+ @mock.patch.object(seamicro, '_parse_driver_info', autospec=True)
+ def test_power_interface_validate_fails(self, parse_drv_info_mock):
+ side_effect = iter([exception.InvalidParameterValue("Bad input")])
+ parse_drv_info_mock.side_effect = side_effect
+ with task_manager.acquire(self.context, self.node['uuid'],
+ shared=True) as task:
+ self.assertRaises(exception.InvalidParameterValue,
+ task.driver.power.validate, task)
+ self.assertEqual(1, parse_drv_info_mock.call_count)
+
+ @mock.patch.object(seamicro, '_reboot', autospec=True)
+ def test_reboot(self, mock_reboot):
+ mock_reboot.return_value = states.POWER_ON
+
+ with task_manager.acquire(self.context, self.info['uuid'],
+ shared=False) as task:
+ task.driver.power.reboot(task)
+
+ mock_reboot.assert_called_once_with(task.node)
+
+ def test_set_power_state_bad_state(self):
+ self.get_server_mock = self.get_server_patcher.start()
+ self.get_server_mock.return_value = self.Server()
+
+ with task_manager.acquire(self.context, self.info['uuid'],
+ shared=False) as task:
+ self.assertRaises(exception.IronicException,
+ task.driver.power.set_power_state,
+ task, "BAD_PSTATE")
+ self.get_server_patcher.stop()
+
+ @mock.patch.object(seamicro, '_power_on', autospec=True)
+ def test_set_power_state_on_good(self, mock_power_on):
+ mock_power_on.return_value = states.POWER_ON
+
+ with task_manager.acquire(self.context, self.info['uuid'],
+ shared=False) as task:
+ task.driver.power.set_power_state(task, states.POWER_ON)
+
+ mock_power_on.assert_called_once_with(task.node)
+
+ @mock.patch.object(seamicro, '_power_on', autospec=True)
+ def test_set_power_state_on_fail(self, mock_power_on):
+ mock_power_on.return_value = states.POWER_OFF
+
+ with task_manager.acquire(self.context, self.info['uuid'],
+ shared=False) as task:
+ self.assertRaises(exception.PowerStateFailure,
+ task.driver.power.set_power_state,
+ task, states.POWER_ON)
+
+ mock_power_on.assert_called_once_with(task.node)
+
+ @mock.patch.object(seamicro, '_power_off', autospec=True)
+ def test_set_power_state_off_good(self, mock_power_off):
+ mock_power_off.return_value = states.POWER_OFF
+
+ with task_manager.acquire(self.context, self.info['uuid'],
+ shared=False) as task:
+ task.driver.power.set_power_state(task, states.POWER_OFF)
+
+ mock_power_off.assert_called_once_with(task.node)
+
+ @mock.patch.object(seamicro, '_power_off', autospec=True)
+ def test_set_power_state_off_fail(self, mock_power_off):
+ mock_power_off.return_value = states.POWER_ON
+
+ with task_manager.acquire(self.context, self.info['uuid'],
+ shared=False) as task:
+ self.assertRaises(exception.PowerStateFailure,
+ task.driver.power.set_power_state,
+ task, states.POWER_OFF)
+
+ mock_power_off.assert_called_once_with(task.node)
+
+ @mock.patch.object(seamicro, '_parse_driver_info', autospec=True)
+ def test_vendor_passthru_validate_good(self, mock_info):
+ with task_manager.acquire(self.context, self.node['uuid'],
+ shared=True) as task:
+ for method in task.driver.vendor.vendor_routes:
+ task.driver.vendor.validate(task, **{'method': method})
+ self.assertEqual(len(task.driver.vendor.vendor_routes),
+ mock_info.call_count)
+
+ @mock.patch.object(seamicro, '_parse_driver_info', autospec=True)
+ def test_vendor_passthru_validate_parse_driver_info_fail(self, mock_info):
+ mock_info.side_effect = iter([exception.InvalidParameterValue("bad")])
+ with task_manager.acquire(self.context, self.node['uuid'],
+ shared=True) as task:
+ method = list(task.driver.vendor.vendor_routes)[0]
+ self.assertRaises(exception.InvalidParameterValue,
+ task.driver.vendor.validate,
+ task, **{'method': method})
+ mock_info.assert_called_once_with(task.node)
+
+ @mock.patch.object(seamicro, '_get_server', autospec=True)
+ def test_set_node_vlan_id_good(self, mock_get_server):
+ vlan_id = "12"
+ mock_get_server.return_value = self.Server(active="true")
+ with task_manager.acquire(self.context, self.info['uuid'],
+ shared=False) as task:
+ kwargs = {'vlan_id': vlan_id}
+ task.driver.vendor.set_node_vlan_id(task, **kwargs)
+ mock_get_server.assert_called_once_with(self.info)
+
+ def test_set_node_vlan_id_no_input(self):
+ with task_manager.acquire(self.context, self.info['uuid'],
+ shared=False) as task:
+ self.assertRaises(exception.InvalidParameterValue,
+ task.driver.vendor.set_node_vlan_id,
+ task, **{})
+
+ @mock.patch.object(seamicro, '_get_server', autospec=True)
+ def test_set_node_vlan_id_fail(self, mock_get_server):
+ def fake_set_untagged_vlan(self, **kwargs):
+ raise seamicro_client_exception.ClientException(500)
+
+ vlan_id = "12"
+ server = self.Server(active="true")
+ server.set_untagged_vlan = fake_set_untagged_vlan
+ mock_get_server.return_value = server
+ with task_manager.acquire(self.context, self.info['uuid'],
+ shared=False) as task:
+ kwargs = {'vlan_id': vlan_id}
+ self.assertRaises(exception.IronicException,
+ task.driver.vendor.set_node_vlan_id,
+ task, **kwargs)
+
+ mock_get_server.assert_called_once_with(self.info)
+
+ @mock.patch.object(seamicro, '_get_server', autospec=True)
+ @mock.patch.object(seamicro, '_validate_volume', autospec=True)
+ def test_attach_volume_with_volume_id_good(self, mock_validate_volume,
+ mock_get_server):
+ volume_id = '0/ironic-p6-1/vol1'
+ mock_validate_volume.return_value = True
+ mock_get_server.return_value = self.Server(active="true")
+ with task_manager.acquire(self.context, self.info['uuid'],
+ shared=False) as task:
+ kwargs = {'volume_id': volume_id}
+ task.driver.vendor.attach_volume(task, **kwargs)
+ mock_get_server.assert_called_once_with(self.info)
+
+ @mock.patch.object(seamicro, '_get_server', autospec=True)
+ @mock.patch.object(seamicro, '_get_volume', autospec=True)
+ def test_attach_volume_with_invalid_volume_id_fail(self,
+ mock_get_volume,
+ mock_get_server):
+ volume_id = '0/p6-1/vol1'
+ mock_get_volume.return_value = self.Volume(volume_id)
+ mock_get_server.return_value = self.Server(active="true")
+ with task_manager.acquire(self.context, self.info['uuid'],
+ shared=False) as task:
+ kwargs = {'volume_id': volume_id}
+ self.assertRaises(exception.InvalidParameterValue,
+ task.driver.vendor.attach_volume,
+ task, **kwargs)
+
+ @mock.patch.object(seamicro, '_get_server', autospec=True)
+ @mock.patch.object(seamicro, '_validate_volume', autospec=True)
+ def test_attach_volume_fail(self, mock_validate_volume,
+ mock_get_server):
+ def fake_attach_volume(self, **kwargs):
+ raise seamicro_client_exception.ClientException(500)
+
+ volume_id = '0/p6-1/vol1'
+ mock_validate_volume.return_value = True
+ server = self.Server(active="true")
+ server.attach_volume = fake_attach_volume
+ mock_get_server.return_value = server
+ with task_manager.acquire(self.context, self.info['uuid'],
+ shared=False) as task:
+ kwargs = {'volume_id': volume_id}
+ self.assertRaises(exception.IronicException,
+ task.driver.vendor.attach_volume,
+ task, **kwargs)
+
+ mock_get_server.assert_called_once_with(self.info)
+
+ @mock.patch.object(seamicro, '_get_server', autospec=True)
+ @mock.patch.object(seamicro, '_validate_volume', autospec=True)
+ @mock.patch.object(seamicro, '_create_volume', autospec=True)
+ def test_attach_volume_with_volume_size_good(self, mock_create_volume,
+ mock_validate_volume,
+ mock_get_server):
+ volume_id = '0/ironic-p6-1/vol1'
+ volume_size = 2
+ mock_create_volume.return_value = volume_id
+ mock_validate_volume.return_value = True
+ mock_get_server.return_value = self.Server(active="true")
+ with task_manager.acquire(self.context, self.info['uuid'],
+ shared=False) as task:
+ kwargs = {'volume_size': volume_size}
+ task.driver.vendor.attach_volume(task, **kwargs)
+ mock_get_server.assert_called_once_with(self.info)
+ mock_create_volume.assert_called_once_with(self.info, volume_size)
+
+ def test_attach_volume_with_no_input_fail(self):
+ with task_manager.acquire(self.context, self.info['uuid'],
+ shared=False) as task:
+ self.assertRaises(exception.InvalidParameterValue,
+ task.driver.vendor.attach_volume, task,
+ **{})
+
+ @mock.patch.object(seamicro, '_get_server', autospec=True)
+ def test_set_boot_device_good(self, mock_get_server):
+ boot_device = "disk"
+ mock_get_server.return_value = self.Server(active="true")
+ with task_manager.acquire(self.context, self.info['uuid'],
+ shared=False) as task:
+ task.driver.management.set_boot_device(task, boot_device)
+ mock_get_server.assert_called_once_with(self.info)
+
+ @mock.patch.object(seamicro, '_get_server', autospec=True)
+ def test_set_boot_device_invalid_device_fail(self, mock_get_server):
+ boot_device = "invalid_device"
+ mock_get_server.return_value = self.Server(active="true")
+ with task_manager.acquire(self.context, self.info['uuid'],
+ shared=False) as task:
+ self.assertRaises(exception.InvalidParameterValue,
+ task.driver.management.set_boot_device,
+ task, boot_device)
+
+ @mock.patch.object(seamicro, '_get_server', autospec=True)
+ def test_set_boot_device_fail(self, mock_get_server):
+ def fake_set_boot_order(self, **kwargs):
+ raise seamicro_client_exception.ClientException(500)
+
+ boot_device = "pxe"
+ server = self.Server(active="true")
+ server.set_boot_order = fake_set_boot_order
+ mock_get_server.return_value = server
+ with task_manager.acquire(self.context, self.info['uuid'],
+ shared=False) as task:
+ self.assertRaises(exception.IronicException,
+ task.driver.management.set_boot_device,
+ task, boot_device)
+
+ mock_get_server.assert_called_once_with(self.info)
+
+ def test_management_interface_get_supported_boot_devices(self):
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ expected = [boot_devices.PXE, boot_devices.DISK]
+ self.assertEqual(sorted(expected), sorted(task.driver.management.
+ get_supported_boot_devices(task)))
+
+ def test_management_interface_get_boot_device(self):
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ expected = {'boot_device': None, 'persistent': None}
+ self.assertEqual(expected,
+ task.driver.management.get_boot_device(task))
+
+ def test_management_interface_validate_good(self):
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ task.driver.management.validate(task)
+
+ def test_management_interface_validate_fail(self):
+ # Missing SEAMICRO driver_info information
+ node = obj_utils.create_test_node(self.context,
+ uuid=uuidutils.generate_uuid(),
+ driver='fake_seamicro')
+ with task_manager.acquire(self.context, node.uuid) as task:
+ self.assertRaises(exception.MissingParameterValue,
+ task.driver.management.validate, task)
+
+
+class SeaMicroDriverTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(SeaMicroDriverTestCase, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver='fake_seamicro')
+ self.driver = driver_factory.get_driver('fake_seamicro')
+ self.node = obj_utils.create_test_node(self.context,
+ driver='fake_seamicro',
+ driver_info=INFO_DICT)
+ self.get_server_patcher = mock.patch.object(seamicro, '_get_server',
+ autospec=True)
+
+ self.get_server_mock = None
+ self.Server = Fake_Server
+ self.Volume = Fake_Volume
+ self.info = seamicro._parse_driver_info(self.node)
+
+ @mock.patch.object(console_utils, 'start_shellinabox_console',
+ autospec=True)
+ def test_start_console(self, mock_exec):
+ mock_exec.return_value = None
+ with task_manager.acquire(self.context,
+ self.node.uuid) as task:
+ self.driver.console.start_console(task)
+
+ mock_exec.assert_called_once_with(self.info['uuid'],
+ self.info['port'],
+ mock.ANY)
+
+ @mock.patch.object(console_utils, 'start_shellinabox_console',
+ autospec=True)
+ def test_start_console_fail(self, mock_exec):
+ mock_exec.side_effect = iter(
+ [exception.ConsoleSubprocessFailed(error='error')])
+
+ with task_manager.acquire(self.context,
+ self.node.uuid) as task:
+ self.assertRaises(exception.ConsoleSubprocessFailed,
+ self.driver.console.start_console,
+ task)
+
+ @mock.patch.object(console_utils, 'stop_shellinabox_console',
+ autospec=True)
+ def test_stop_console(self, mock_exec):
+ mock_exec.return_value = None
+ with task_manager.acquire(self.context,
+ self.node.uuid) as task:
+ self.driver.console.stop_console(task)
+
+ mock_exec.assert_called_once_with(self.info['uuid'])
+
+ @mock.patch.object(console_utils, 'stop_shellinabox_console',
+ autospec=True)
+ def test_stop_console_fail(self, mock_stop):
+ mock_stop.side_effect = iter([exception.ConsoleError()])
+
+ with task_manager.acquire(self.context,
+ self.node.uuid) as task:
+ self.assertRaises(exception.ConsoleError,
+ self.driver.console.stop_console,
+ task)
+
+ mock_stop.assert_called_once_with(self.node.uuid)
+
+ @mock.patch.object(console_utils, 'start_shellinabox_console',
+ autospec=True)
+ def test_start_console_fail_nodir(self, mock_exec):
+ mock_exec.side_effect = iter([exception.ConsoleError()])
+
+ with task_manager.acquire(self.context,
+ self.node.uuid) as task:
+ self.assertRaises(exception.ConsoleError,
+ self.driver.console.start_console,
+ task)
+ mock_exec.assert_called_once_with(self.node.uuid, mock.ANY, mock.ANY)
+
+ @mock.patch.object(console_utils, 'get_shellinabox_console_url',
+ autospec=True)
+ def test_get_console(self, mock_exec):
+ url = 'http://localhost:4201'
+ mock_exec.return_value = url
+ expected = {'type': 'shellinabox', 'url': url}
+
+ with task_manager.acquire(self.context,
+ self.node.uuid) as task:
+ console_info = self.driver.console.get_console(task)
+
+ self.assertEqual(expected, console_info)
+ mock_exec.assert_called_once_with(self.info['port'])
diff --git a/ironic/tests/unit/drivers/test_snmp.py b/ironic/tests/unit/drivers/test_snmp.py
new file mode 100644
index 000000000..5842739f0
--- /dev/null
+++ b/ironic/tests/unit/drivers/test_snmp.py
@@ -0,0 +1,1263 @@
+# Copyright 2013,2014 Cray Inc
+#
+# Authors: David Hewson <dhewson@cray.com>
+# Stig Telfer <stelfer@cray.com>
+# Mark Goddard <mgoddard@cray.com>
+#
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Test class for SNMP power driver module."""
+
+import mock
+from oslo_config import cfg
+from pysnmp.entity.rfc3413.oneliner import cmdgen
+from pysnmp import error as snmp_error
+
+from ironic.common import exception
+from ironic.common import states
+from ironic.conductor import task_manager
+from ironic.drivers.modules import snmp as snmp
+from ironic.tests.unit import base
+from ironic.tests.unit.conductor import utils as mgr_utils
+from ironic.tests.unit.db import base as db_base
+from ironic.tests.unit.db import utils as db_utils
+from ironic.tests.unit.objects import utils as obj_utils
+
+CONF = cfg.CONF
+INFO_DICT = db_utils.get_test_snmp_info()
+
+
+@mock.patch.object(cmdgen, 'CommandGenerator', autospec=True)
+class SNMPClientTestCase(base.TestCase):
+ def setUp(self):
+ super(SNMPClientTestCase, self).setUp()
+ self.address = '1.2.3.4'
+ self.port = '6700'
+ self.oid = 'oid'
+ self.value = 'value'
+
+ def test___init__(self, mock_cmdgen):
+ client = snmp.SNMPClient(self.address, self.port, snmp.SNMP_V1)
+ mock_cmdgen.assert_called_once_with()
+ self.assertEqual(self.address, client.address)
+ self.assertEqual(self.port, client.port)
+ self.assertEqual(snmp.SNMP_V1, client.version)
+ self.assertIsNone(client.community)
+ self.assertFalse('security' in client.__dict__)
+ self.assertEqual(mock_cmdgen.return_value, client.cmd_gen)
+
+ @mock.patch.object(cmdgen, 'CommunityData', autospec=True)
+ def test__get_auth_v1(self, mock_community, mock_cmdgen):
+ client = snmp.SNMPClient(self.address, self.port, snmp.SNMP_V1)
+ client._get_auth()
+ mock_cmdgen.assert_called_once_with()
+ mock_community.assert_called_once_with(client.community, mpModel=0)
+
+ @mock.patch.object(cmdgen, 'UsmUserData', autospec=True)
+ def test__get_auth_v3(self, mock_user, mock_cmdgen):
+ client = snmp.SNMPClient(self.address, self.port, snmp.SNMP_V3)
+ client._get_auth()
+ mock_cmdgen.assert_called_once_with()
+ mock_user.assert_called_once_with(client.security)
+
+ @mock.patch.object(cmdgen, 'UdpTransportTarget', autospec=True)
+ def test__get_transport(self, mock_transport, mock_cmdgen):
+ client = snmp.SNMPClient(self.address, self.port, snmp.SNMP_V3)
+ client._get_transport()
+ mock_cmdgen.assert_called_once_with()
+ mock_transport.assert_called_once_with((client.address, client.port))
+
+ @mock.patch.object(cmdgen, 'UdpTransportTarget', autospec=True)
+ def test__get_transport_err(self, mock_transport, mock_cmdgen):
+ mock_transport.side_effect = snmp_error.PySnmpError
+ client = snmp.SNMPClient(self.address, self.port, snmp.SNMP_V3)
+ self.assertRaises(snmp_error.PySnmpError, client._get_transport)
+ mock_cmdgen.assert_called_once_with()
+ mock_transport.assert_called_once_with((client.address, client.port))
+
+ @mock.patch.object(snmp.SNMPClient, '_get_transport', autospec=True)
+ @mock.patch.object(snmp.SNMPClient, '_get_auth', autospec=True)
+ def test_get(self, mock_auth, mock_transport, mock_cmdgen):
+ var_bind = (self.oid, self.value)
+ mock_cmdgenerator = mock_cmdgen.return_value
+ mock_cmdgenerator.getCmd.return_value = ("", None, 0, [var_bind])
+ client = snmp.SNMPClient(self.address, self.port, snmp.SNMP_V3)
+ val = client.get(self.oid)
+ self.assertEqual(var_bind[1], val)
+ mock_cmdgenerator.getCmd.assert_called_once_with(mock.ANY, mock.ANY,
+ self.oid)
+
+ @mock.patch.object(snmp.SNMPClient, '_get_transport', autospec=True)
+ @mock.patch.object(snmp.SNMPClient, '_get_auth', autospec=True)
+ def test_get_err_transport(self, mock_auth, mock_transport, mock_cmdgen):
+ mock_transport.side_effect = snmp_error.PySnmpError
+ var_bind = (self.oid, self.value)
+ mock_cmdgenerator = mock_cmdgen.return_value
+ mock_cmdgenerator.getCmd.return_value = ("engine error", None, 0,
+ [var_bind])
+ client = snmp.SNMPClient(self.address, self.port, snmp.SNMP_V3)
+ self.assertRaises(exception.SNMPFailure, client.get, self.oid)
+ self.assertFalse(mock_cmdgenerator.getCmd.called)
+
+ @mock.patch.object(snmp.SNMPClient, '_get_transport', autospec=True)
+ @mock.patch.object(snmp.SNMPClient, '_get_auth', autospec=True)
+ def test_get_err_engine(self, mock_auth, mock_transport, mock_cmdgen):
+ var_bind = (self.oid, self.value)
+ mock_cmdgenerator = mock_cmdgen.return_value
+ mock_cmdgenerator.getCmd.return_value = ("engine error", None, 0,
+ [var_bind])
+ client = snmp.SNMPClient(self.address, self.port, snmp.SNMP_V3)
+ self.assertRaises(exception.SNMPFailure, client.get, self.oid)
+ mock_cmdgenerator.getCmd.assert_called_once_with(mock.ANY, mock.ANY,
+ self.oid)
+
+ @mock.patch.object(snmp.SNMPClient, '_get_transport', autospec=True)
+ @mock.patch.object(snmp.SNMPClient, '_get_auth', autospec=True)
+ def test_set(self, mock_auth, mock_transport, mock_cmdgen):
+ var_bind = (self.oid, self.value)
+ mock_cmdgenerator = mock_cmdgen.return_value
+ mock_cmdgenerator.setCmd.return_value = ("", None, 0, [var_bind])
+ client = snmp.SNMPClient(self.address, self.port, snmp.SNMP_V3)
+ client.set(self.oid, self.value)
+ mock_cmdgenerator.setCmd.assert_called_once_with(mock.ANY, mock.ANY,
+ var_bind)
+
+ @mock.patch.object(snmp.SNMPClient, '_get_transport', autospec=True)
+ @mock.patch.object(snmp.SNMPClient, '_get_auth', autospec=True)
+ def test_set_err_transport(self, mock_auth, mock_transport, mock_cmdgen):
+ mock_transport.side_effect = snmp_error.PySnmpError
+ var_bind = (self.oid, self.value)
+ mock_cmdgenerator = mock_cmdgen.return_value
+ mock_cmdgenerator.setCmd.return_value = ("engine error", None, 0,
+ [var_bind])
+ client = snmp.SNMPClient(self.address, self.port, snmp.SNMP_V3)
+ self.assertRaises(exception.SNMPFailure,
+ client.set, self.oid, self.value)
+ self.assertFalse(mock_cmdgenerator.setCmd.called)
+
+ @mock.patch.object(snmp.SNMPClient, '_get_transport', autospec=True)
+ @mock.patch.object(snmp.SNMPClient, '_get_auth', autospec=True)
+ def test_set_err_engine(self, mock_auth, mock_transport, mock_cmdgen):
+ var_bind = (self.oid, self.value)
+ mock_cmdgenerator = mock_cmdgen.return_value
+ mock_cmdgenerator.setCmd.return_value = ("engine error", None, 0,
+ [var_bind])
+ client = snmp.SNMPClient(self.address, self.port, snmp.SNMP_V3)
+ self.assertRaises(exception.SNMPFailure,
+ client.set, self.oid, self.value)
+ mock_cmdgenerator.setCmd.assert_called_once_with(mock.ANY, mock.ANY,
+ var_bind)
+
+
+class SNMPValidateParametersTestCase(db_base.DbTestCase):
+
+ def _get_test_node(self, driver_info):
+ return obj_utils.get_test_node(
+ self.context,
+ driver_info=driver_info)
+
+ def test__parse_driver_info_default(self):
+ # Make sure we get back the expected things.
+ node = self._get_test_node(INFO_DICT)
+ info = snmp._parse_driver_info(node)
+ self.assertEqual(INFO_DICT['snmp_driver'], info.get('driver'))
+ self.assertEqual(INFO_DICT['snmp_address'], info.get('address'))
+ self.assertEqual(INFO_DICT['snmp_port'], str(info.get('port')))
+ self.assertEqual(INFO_DICT['snmp_outlet'], info.get('outlet'))
+ self.assertEqual(INFO_DICT['snmp_version'], info.get('version'))
+ self.assertEqual(INFO_DICT.get('snmp_community'),
+ info.get('community'))
+ self.assertEqual(INFO_DICT.get('snmp_security'),
+ info.get('security'))
+
+ def test__parse_driver_info_apc(self):
+ # Make sure the APC driver type is parsed.
+ info = db_utils.get_test_snmp_info(snmp_driver='apc')
+ node = self._get_test_node(info)
+ info = snmp._parse_driver_info(node)
+ self.assertEqual('apc', info.get('driver'))
+
+ def test__parse_driver_info_apc_masterswitch(self):
+ # Make sure the APC driver type is parsed.
+ info = db_utils.get_test_snmp_info(snmp_driver='apc_masterswitch')
+ node = self._get_test_node(info)
+ info = snmp._parse_driver_info(node)
+ self.assertEqual('apc_masterswitch', info.get('driver'))
+
+ def test__parse_driver_info_apc_masterswitchplus(self):
+ # Make sure the APC driver type is parsed.
+ info = db_utils.get_test_snmp_info(snmp_driver='apc_masterswitchplus')
+ node = self._get_test_node(info)
+ info = snmp._parse_driver_info(node)
+ self.assertEqual('apc_masterswitchplus', info.get('driver'))
+
+ def test__parse_driver_info_apc_rackpdu(self):
+ # Make sure the APC driver type is parsed.
+ info = db_utils.get_test_snmp_info(snmp_driver='apc_rackpdu')
+ node = self._get_test_node(info)
+ info = snmp._parse_driver_info(node)
+ self.assertEqual('apc_rackpdu', info.get('driver'))
+
+ def test__parse_driver_info_aten(self):
+ # Make sure the Aten driver type is parsed.
+ info = db_utils.get_test_snmp_info(snmp_driver='aten')
+ node = self._get_test_node(info)
+ info = snmp._parse_driver_info(node)
+ self.assertEqual('aten', info.get('driver'))
+
+ def test__parse_driver_info_cyberpower(self):
+ # Make sure the CyberPower driver type is parsed.
+ info = db_utils.get_test_snmp_info(snmp_driver='cyberpower')
+ node = self._get_test_node(info)
+ info = snmp._parse_driver_info(node)
+ self.assertEqual('cyberpower', info.get('driver'))
+
+ def test__parse_driver_info_eatonpower(self):
+ # Make sure the Eaton Power driver type is parsed.
+ info = db_utils.get_test_snmp_info(snmp_driver='eatonpower')
+ node = self._get_test_node(info)
+ info = snmp._parse_driver_info(node)
+ self.assertEqual('eatonpower', info.get('driver'))
+
+ def test__parse_driver_info_teltronix(self):
+ # Make sure the Teltronix driver type is parsed.
+ info = db_utils.get_test_snmp_info(snmp_driver='teltronix')
+ node = self._get_test_node(info)
+ info = snmp._parse_driver_info(node)
+ self.assertEqual('teltronix', info.get('driver'))
+
+ def test__parse_driver_info_snmp_v1(self):
+ # Make sure SNMPv1 is parsed with a community string.
+ info = db_utils.get_test_snmp_info(snmp_version='1',
+ snmp_community='public')
+ node = self._get_test_node(info)
+ info = snmp._parse_driver_info(node)
+ self.assertEqual('1', info.get('version'))
+ self.assertEqual('public', info.get('community'))
+
+ def test__parse_driver_info_snmp_v2c(self):
+ # Make sure SNMPv2c is parsed with a community string.
+ info = db_utils.get_test_snmp_info(snmp_version='2c',
+ snmp_community='private')
+ node = self._get_test_node(info)
+ info = snmp._parse_driver_info(node)
+ self.assertEqual('2c', info.get('version'))
+ self.assertEqual('private', info.get('community'))
+
+ def test__parse_driver_info_snmp_v3(self):
+ # Make sure SNMPv3 is parsed with a security string.
+ info = db_utils.get_test_snmp_info(snmp_version='3',
+ snmp_security='pass')
+ node = self._get_test_node(info)
+ info = snmp._parse_driver_info(node)
+ self.assertEqual('3', info.get('version'))
+ self.assertEqual('pass', info.get('security'))
+
+ def test__parse_driver_info_snmp_port_default(self):
+ # Make sure default SNMP UDP port numbers are correct
+ info = dict(INFO_DICT)
+ del info['snmp_port']
+ node = self._get_test_node(info)
+ info = snmp._parse_driver_info(node)
+ self.assertEqual(161, info.get('port'))
+
+ def test__parse_driver_info_snmp_port(self):
+ # Make sure non-default SNMP UDP port numbers can be configured
+ info = db_utils.get_test_snmp_info(snmp_port='10161')
+ node = self._get_test_node(info)
+ info = snmp._parse_driver_info(node)
+ self.assertEqual(10161, info.get('port'))
+
+ def test__parse_driver_info_missing_driver(self):
+ # Make sure exception is raised when the driver type is missing.
+ info = dict(INFO_DICT)
+ del info['snmp_driver']
+ node = self._get_test_node(info)
+ self.assertRaises(exception.MissingParameterValue,
+ snmp._parse_driver_info,
+ node)
+
+ def test__parse_driver_info_invalid_driver(self):
+ # Make sure exception is raised when the driver type is invalid.
+ info = db_utils.get_test_snmp_info(snmp_driver='invalidpower')
+ node = self._get_test_node(info)
+ self.assertRaises(exception.InvalidParameterValue,
+ snmp._parse_driver_info,
+ node)
+
+ def test__parse_driver_info_missing_address(self):
+ # Make sure exception is raised when the address is missing.
+ info = dict(INFO_DICT)
+ del info['snmp_address']
+ node = self._get_test_node(info)
+ self.assertRaises(exception.MissingParameterValue,
+ snmp._parse_driver_info,
+ node)
+
+ def test__parse_driver_info_missing_outlet(self):
+ # Make sure exception is raised when the outlet is missing.
+ info = dict(INFO_DICT)
+ del info['snmp_outlet']
+ node = self._get_test_node(info)
+ self.assertRaises(exception.MissingParameterValue,
+ snmp._parse_driver_info,
+ node)
+
+ def test__parse_driver_info_default_version(self):
+ # Make sure version defaults to 1 when it is missing.
+ info = dict(INFO_DICT)
+ del info['snmp_version']
+ node = self._get_test_node(info)
+ info = snmp._parse_driver_info(node)
+ self.assertEqual('1', info.get('version'))
+ self.assertEqual(INFO_DICT['snmp_community'], info.get('community'))
+
+ def test__parse_driver_info_invalid_version(self):
+ # Make sure exception is raised when version is invalid.
+ info = db_utils.get_test_snmp_info(snmp_version='42',
+ snmp_community='public',
+ snmp_security='pass')
+ node = self._get_test_node(info)
+ self.assertRaises(exception.InvalidParameterValue,
+ snmp._parse_driver_info,
+ node)
+
+ def test__parse_driver_info_default_version_and_missing_community(self):
+ # Make sure exception is raised when version and community are missing.
+ info = dict(INFO_DICT)
+ del info['snmp_version']
+ del info['snmp_community']
+ node = self._get_test_node(info)
+ self.assertRaises(exception.MissingParameterValue,
+ snmp._parse_driver_info,
+ node)
+
+ def test__parse_driver_info_missing_community_snmp_v1(self):
+ # Make sure exception is raised when community is missing with SNMPv1.
+ info = dict(INFO_DICT)
+ del info['snmp_community']
+ node = self._get_test_node(info)
+ self.assertRaises(exception.MissingParameterValue,
+ snmp._parse_driver_info,
+ node)
+
+ def test__parse_driver_info_missing_community_snmp_v2c(self):
+ # Make sure exception is raised when community is missing with SNMPv2c.
+ info = db_utils.get_test_snmp_info(snmp_version='2c')
+ del info['snmp_community']
+ node = self._get_test_node(info)
+ self.assertRaises(exception.MissingParameterValue,
+ snmp._parse_driver_info,
+ node)
+
+ def test__parse_driver_info_missing_security(self):
+ # Make sure exception is raised when security is missing with SNMPv3.
+ info = db_utils.get_test_snmp_info(snmp_version='3')
+ del info['snmp_security']
+ node = self._get_test_node(info)
+ self.assertRaises(exception.MissingParameterValue,
+ snmp._parse_driver_info,
+ node)
+
+
+@mock.patch.object(snmp, '_get_client', autospec=True)
+class SNMPDeviceDriverTestCase(db_base.DbTestCase):
+ """Tests for the SNMP device-specific driver classes.
+
+ The SNMP client object is mocked to allow various error cases to be tested.
+ """
+
+ def setUp(self):
+ super(SNMPDeviceDriverTestCase, self).setUp()
+ self.node = obj_utils.get_test_node(
+ self.context,
+ driver='fake_snmp',
+ driver_info=INFO_DICT)
+
+ def _update_driver_info(self, **kwargs):
+ self.node["driver_info"].update(**kwargs)
+
+ def _set_snmp_driver(self, snmp_driver):
+ self._update_driver_info(snmp_driver=snmp_driver)
+
+ def _get_snmp_failure(self):
+ return exception.SNMPFailure(operation='test-operation',
+ error='test-error')
+
+ def test_power_state_on(self, mock_get_client):
+ # Ensure the power on state is queried correctly
+ mock_client = mock_get_client.return_value
+ driver = snmp._get_driver(self.node)
+ mock_client.get.return_value = driver.value_power_on
+ pstate = driver.power_state()
+ mock_client.get.assert_called_once_with(driver._snmp_oid())
+ self.assertEqual(states.POWER_ON, pstate)
+
+ def test_power_state_off(self, mock_get_client):
+ # Ensure the power off state is queried correctly
+ mock_client = mock_get_client.return_value
+ driver = snmp._get_driver(self.node)
+ mock_client.get.return_value = driver.value_power_off
+ pstate = driver.power_state()
+ mock_client.get.assert_called_once_with(driver._snmp_oid())
+ self.assertEqual(states.POWER_OFF, pstate)
+
+ def test_power_state_error(self, mock_get_client):
+ # Ensure an unexpected power state returns an error
+ mock_client = mock_get_client.return_value
+ driver = snmp._get_driver(self.node)
+ mock_client.get.return_value = 42
+ pstate = driver.power_state()
+ mock_client.get.assert_called_once_with(driver._snmp_oid())
+ self.assertEqual(states.ERROR, pstate)
+
+ def test_power_state_snmp_failure(self, mock_get_client):
+ # Ensure SNMP failure exceptions raised during a query are propagated
+ mock_client = mock_get_client.return_value
+ driver = snmp._get_driver(self.node)
+ mock_client.get.side_effect = self._get_snmp_failure()
+ self.assertRaises(exception.SNMPFailure,
+ driver.power_state)
+ mock_client.get.assert_called_once_with(driver._snmp_oid())
+
+ def test_power_on(self, mock_get_client):
+ # Ensure the device is powered on correctly
+ mock_client = mock_get_client.return_value
+ driver = snmp._get_driver(self.node)
+ mock_client.get.return_value = driver.value_power_on
+ pstate = driver.power_on()
+ mock_client.set.assert_called_once_with(driver._snmp_oid(),
+ driver.value_power_on)
+ mock_client.get.assert_called_once_with(driver._snmp_oid())
+ self.assertEqual(states.POWER_ON, pstate)
+
+ def test_power_off(self, mock_get_client):
+ # Ensure the device is powered off correctly
+ mock_client = mock_get_client.return_value
+ driver = snmp._get_driver(self.node)
+ mock_client.get.return_value = driver.value_power_off
+ pstate = driver.power_off()
+ mock_client.set.assert_called_once_with(driver._snmp_oid(),
+ driver.value_power_off)
+ mock_client.get.assert_called_once_with(driver._snmp_oid())
+ self.assertEqual(states.POWER_OFF, pstate)
+
+ @mock.patch("eventlet.greenthread.sleep", autospec=True)
+ def test_power_on_delay(self, mock_sleep, mock_get_client):
+ # Ensure driver waits for the state to change following a power on
+ mock_client = mock_get_client.return_value
+ driver = snmp._get_driver(self.node)
+ mock_client.get.side_effect = [driver.value_power_off,
+ driver.value_power_on]
+ pstate = driver.power_on()
+ mock_client.set.assert_called_once_with(driver._snmp_oid(),
+ driver.value_power_on)
+ calls = [mock.call(driver._snmp_oid())] * 2
+ mock_client.get.assert_has_calls(calls)
+ self.assertEqual(states.POWER_ON, pstate)
+
+ @mock.patch("eventlet.greenthread.sleep", autospec=True)
+ def test_power_off_delay(self, mock_sleep, mock_get_client):
+ # Ensure driver waits for the state to change following a power off
+ mock_client = mock_get_client.return_value
+ driver = snmp._get_driver(self.node)
+ mock_client.get.side_effect = [driver.value_power_on,
+ driver.value_power_off]
+ pstate = driver.power_off()
+ mock_client.set.assert_called_once_with(driver._snmp_oid(),
+ driver.value_power_off)
+ calls = [mock.call(driver._snmp_oid())] * 2
+ mock_client.get.assert_has_calls(calls)
+ self.assertEqual(states.POWER_OFF, pstate)
+
+ @mock.patch("eventlet.greenthread.sleep", autospec=True)
+ def test_power_on_invalid_state(self, mock_sleep, mock_get_client):
+ # Ensure driver retries when querying unexpected states following a
+ # power on
+ mock_client = mock_get_client.return_value
+ driver = snmp._get_driver(self.node)
+ mock_client.get.return_value = 42
+ pstate = driver.power_on()
+ mock_client.set.assert_called_once_with(driver._snmp_oid(),
+ driver.value_power_on)
+ attempts = CONF.snmp.power_timeout // driver.retry_interval
+ calls = [mock.call(driver._snmp_oid())] * attempts
+ mock_client.get.assert_has_calls(calls)
+ self.assertEqual(states.ERROR, pstate)
+
+ @mock.patch("eventlet.greenthread.sleep", autospec=True)
+ def test_power_off_invalid_state(self, mock_sleep, mock_get_client):
+ # Ensure driver retries when querying unexpected states following a
+ # power off
+ mock_client = mock_get_client.return_value
+ driver = snmp._get_driver(self.node)
+ mock_client.get.return_value = 42
+ pstate = driver.power_off()
+ mock_client.set.assert_called_once_with(driver._snmp_oid(),
+ driver.value_power_off)
+ attempts = CONF.snmp.power_timeout // driver.retry_interval
+ calls = [mock.call(driver._snmp_oid())] * attempts
+ mock_client.get.assert_has_calls(calls)
+ self.assertEqual(states.ERROR, pstate)
+
+ def test_power_on_snmp_set_failure(self, mock_get_client):
+ # Ensure SNMP failure exceptions raised during a power on set operation
+ # are propagated
+ mock_client = mock_get_client.return_value
+ driver = snmp._get_driver(self.node)
+ mock_client.set.side_effect = self._get_snmp_failure()
+ self.assertRaises(exception.SNMPFailure,
+ driver.power_on)
+ mock_client.set.assert_called_once_with(driver._snmp_oid(),
+ driver.value_power_on)
+
+ def test_power_off_snmp_set_failure(self, mock_get_client):
+ # Ensure SNMP failure exceptions raised during a power off set
+ # operation are propagated
+ mock_client = mock_get_client.return_value
+ driver = snmp._get_driver(self.node)
+ mock_client.set.side_effect = self._get_snmp_failure()
+ self.assertRaises(exception.SNMPFailure,
+ driver.power_off)
+ mock_client.set.assert_called_once_with(driver._snmp_oid(),
+ driver.value_power_off)
+
+ def test_power_on_snmp_get_failure(self, mock_get_client):
+ # Ensure SNMP failure exceptions raised during a power on get operation
+ # are propagated
+ mock_client = mock_get_client.return_value
+ driver = snmp._get_driver(self.node)
+ mock_client.get.side_effect = self._get_snmp_failure()
+ self.assertRaises(exception.SNMPFailure,
+ driver.power_on)
+ mock_client.set.assert_called_once_with(driver._snmp_oid(),
+ driver.value_power_on)
+ mock_client.get.assert_called_once_with(driver._snmp_oid())
+
+ def test_power_off_snmp_get_failure(self, mock_get_client):
+ # Ensure SNMP failure exceptions raised during a power off get
+ # operation are propagated
+ mock_client = mock_get_client.return_value
+ driver = snmp._get_driver(self.node)
+ mock_client.get.side_effect = self._get_snmp_failure()
+ self.assertRaises(exception.SNMPFailure,
+ driver.power_off)
+ mock_client.set.assert_called_once_with(driver._snmp_oid(),
+ driver.value_power_off)
+ mock_client.get.assert_called_once_with(driver._snmp_oid())
+
+ @mock.patch("eventlet.greenthread.sleep", autospec=True)
+ def test_power_on_timeout(self, mock_sleep, mock_get_client):
+ # Ensure that a power on consistency poll timeout causes an error
+ mock_client = mock_get_client.return_value
+ driver = snmp._get_driver(self.node)
+ mock_client.get.return_value = driver.value_power_off
+ pstate = driver.power_on()
+ mock_client.set.assert_called_once_with(driver._snmp_oid(),
+ driver.value_power_on)
+ attempts = CONF.snmp.power_timeout // driver.retry_interval
+ calls = [mock.call(driver._snmp_oid())] * attempts
+ mock_client.get.assert_has_calls(calls)
+ self.assertEqual(states.ERROR, pstate)
+
+ @mock.patch("eventlet.greenthread.sleep", autospec=True)
+ def test_power_off_timeout(self, mock_sleep, mock_get_client):
+ # Ensure that a power off consistency poll timeout causes an error
+ mock_client = mock_get_client.return_value
+ CONF.snmp.power_timeout = 5
+ driver = snmp._get_driver(self.node)
+ mock_client.get.return_value = driver.value_power_on
+ pstate = driver.power_off()
+ mock_client.set.assert_called_once_with(driver._snmp_oid(),
+ driver.value_power_off)
+ attempts = CONF.snmp.power_timeout // driver.retry_interval
+ calls = [mock.call(driver._snmp_oid())] * attempts
+ mock_client.get.assert_has_calls(calls)
+ self.assertEqual(states.ERROR, pstate)
+
+ def test_power_reset(self, mock_get_client):
+ # Ensure the device is reset correctly
+ mock_client = mock_get_client.return_value
+ driver = snmp._get_driver(self.node)
+ mock_client.get.side_effect = [driver.value_power_off,
+ driver.value_power_on]
+ pstate = driver.power_reset()
+ calls = [mock.call(driver._snmp_oid(), driver.value_power_off),
+ mock.call(driver._snmp_oid(), driver.value_power_on)]
+ mock_client.set.assert_has_calls(calls)
+ calls = [mock.call(driver._snmp_oid())] * 2
+ mock_client.get.assert_has_calls(calls)
+ self.assertEqual(states.POWER_ON, pstate)
+
+ @mock.patch("eventlet.greenthread.sleep", autospec=True)
+ def test_power_reset_off_delay(self, mock_sleep, mock_get_client):
+ # Ensure driver waits for the power off state change following a power
+ # reset
+ mock_client = mock_get_client.return_value
+ driver = snmp._get_driver(self.node)
+ mock_client.get.side_effect = [driver.value_power_on,
+ driver.value_power_off,
+ driver.value_power_on]
+ pstate = driver.power_reset()
+ calls = [mock.call(driver._snmp_oid(), driver.value_power_off),
+ mock.call(driver._snmp_oid(), driver.value_power_on)]
+ mock_client.set.assert_has_calls(calls)
+ calls = [mock.call(driver._snmp_oid())] * 3
+ mock_client.get.assert_has_calls(calls)
+ self.assertEqual(states.POWER_ON, pstate)
+
+ @mock.patch("eventlet.greenthread.sleep", autospec=True)
+ def test_power_reset_on_delay(self, mock_sleep, mock_get_client):
+ # Ensure driver waits for the power on state change following a power
+ # reset
+ mock_client = mock_get_client.return_value
+ driver = snmp._get_driver(self.node)
+ mock_client.get.side_effect = [driver.value_power_off,
+ driver.value_power_off,
+ driver.value_power_on]
+ pstate = driver.power_reset()
+ calls = [mock.call(driver._snmp_oid(), driver.value_power_off),
+ mock.call(driver._snmp_oid(), driver.value_power_on)]
+ mock_client.set.assert_has_calls(calls)
+ calls = [mock.call(driver._snmp_oid())] * 3
+ mock_client.get.assert_has_calls(calls)
+ self.assertEqual(states.POWER_ON, pstate)
+
+ @mock.patch("eventlet.greenthread.sleep", autospec=True)
+ def test_power_reset_off_delay_on_delay(self, mock_sleep, mock_get_client):
+ # Ensure driver waits for both state changes following a power reset
+ mock_client = mock_get_client.return_value
+ driver = snmp._get_driver(self.node)
+ mock_client.get.side_effect = [driver.value_power_on,
+ driver.value_power_off,
+ driver.value_power_off,
+ driver.value_power_on]
+ pstate = driver.power_reset()
+ calls = [mock.call(driver._snmp_oid(), driver.value_power_off),
+ mock.call(driver._snmp_oid(), driver.value_power_on)]
+ mock_client.set.assert_has_calls(calls)
+ calls = [mock.call(driver._snmp_oid())] * 4
+ mock_client.get.assert_has_calls(calls)
+ self.assertEqual(states.POWER_ON, pstate)
+
+ @mock.patch("eventlet.greenthread.sleep", autospec=True)
+ def test_power_reset_off_invalid_state(self, mock_sleep, mock_get_client):
+ # Ensure driver retries when querying unexpected states following a
+ # power off during a reset
+ mock_client = mock_get_client.return_value
+ driver = snmp._get_driver(self.node)
+ mock_client.get.return_value = 42
+ pstate = driver.power_reset()
+ mock_client.set.assert_called_once_with(driver._snmp_oid(),
+ driver.value_power_off)
+ attempts = CONF.snmp.power_timeout // driver.retry_interval
+ calls = [mock.call(driver._snmp_oid())] * attempts
+ mock_client.get.assert_has_calls(calls)
+ self.assertEqual(states.ERROR, pstate)
+
+ @mock.patch("eventlet.greenthread.sleep", autospec=True)
+ def test_power_reset_on_invalid_state(self, mock_sleep, mock_get_client):
+ # Ensure driver retries when querying unexpected states following a
+ # power on during a reset
+ mock_client = mock_get_client.return_value
+ driver = snmp._get_driver(self.node)
+ attempts = CONF.snmp.power_timeout // driver.retry_interval
+ mock_client.get.side_effect = ([driver.value_power_off] +
+ [42] * attempts)
+ pstate = driver.power_reset()
+ calls = [mock.call(driver._snmp_oid(), driver.value_power_off),
+ mock.call(driver._snmp_oid(), driver.value_power_on)]
+ mock_client.set.assert_has_calls(calls)
+ calls = [mock.call(driver._snmp_oid())] * (1 + attempts)
+ mock_client.get.assert_has_calls(calls)
+ self.assertEqual(states.ERROR, pstate)
+
+ @mock.patch("eventlet.greenthread.sleep", autospec=True)
+ def test_power_reset_off_timeout(self, mock_sleep, mock_get_client):
+ # Ensure that a power off consistency poll timeout during a reset
+ # causes an error
+ mock_client = mock_get_client.return_value
+ driver = snmp._get_driver(self.node)
+ mock_client.get.return_value = driver.value_power_on
+ pstate = driver.power_reset()
+ mock_client.set.assert_called_once_with(driver._snmp_oid(),
+ driver.value_power_off)
+ attempts = CONF.snmp.power_timeout // driver.retry_interval
+ calls = [mock.call(driver._snmp_oid())] * attempts
+ mock_client.get.assert_has_calls(calls)
+ self.assertEqual(states.ERROR, pstate)
+
+ @mock.patch("eventlet.greenthread.sleep", autospec=True)
+ def test_power_reset_on_timeout(self, mock_sleep, mock_get_client):
+ # Ensure that a power on consistency poll timeout during a reset
+ # causes an error
+ mock_client = mock_get_client.return_value
+ driver = snmp._get_driver(self.node)
+ attempts = CONF.snmp.power_timeout // driver.retry_interval
+ mock_client.get.side_effect = ([driver.value_power_off] *
+ (1 + attempts))
+ pstate = driver.power_reset()
+ calls = [mock.call(driver._snmp_oid(), driver.value_power_off),
+ mock.call(driver._snmp_oid(), driver.value_power_on)]
+ mock_client.set.assert_has_calls(calls)
+ calls = [mock.call(driver._snmp_oid())] * (1 + attempts)
+ mock_client.get.assert_has_calls(calls)
+ self.assertEqual(states.ERROR, pstate)
+
+ def test_power_reset_off_snmp_set_failure(self, mock_get_client):
+ # Ensure SNMP failure exceptions raised during a reset power off set
+ # operation are propagated
+ mock_client = mock_get_client.return_value
+ driver = snmp._get_driver(self.node)
+ mock_client.set.side_effect = self._get_snmp_failure()
+ self.assertRaises(exception.SNMPFailure,
+ driver.power_reset)
+ mock_client.set.assert_called_once_with(driver._snmp_oid(),
+ driver.value_power_off)
+ self.assertFalse(mock_client.get.called)
+
+ def test_power_reset_off_snmp_get_failure(self, mock_get_client):
+ # Ensure SNMP failure exceptions raised during a reset power off get
+ # operation are propagated
+ mock_client = mock_get_client.return_value
+ driver = snmp._get_driver(self.node)
+ mock_client.get.side_effect = self._get_snmp_failure()
+ self.assertRaises(exception.SNMPFailure,
+ driver.power_reset)
+ mock_client.set.assert_called_once_with(driver._snmp_oid(),
+ driver.value_power_off)
+ mock_client.get.assert_called_once_with(driver._snmp_oid())
+
+ def test_power_reset_on_snmp_set_failure(self, mock_get_client):
+ # Ensure SNMP failure exceptions raised during a reset power on set
+ # operation are propagated
+ mock_client = mock_get_client.return_value
+ driver = snmp._get_driver(self.node)
+ mock_client.set.side_effect = [None, self._get_snmp_failure()]
+ mock_client.get.return_value = driver.value_power_off
+ self.assertRaises(exception.SNMPFailure,
+ driver.power_reset)
+ calls = [mock.call(driver._snmp_oid(), driver.value_power_off),
+ mock.call(driver._snmp_oid(), driver.value_power_on)]
+ mock_client.set.assert_has_calls(calls)
+ mock_client.get.assert_called_once_with(driver._snmp_oid())
+
+ def test_power_reset_on_snmp_get_failure(self, mock_get_client):
+ # Ensure SNMP failure exceptions raised during a reset power on get
+ # operation are propagated
+ mock_client = mock_get_client.return_value
+ driver = snmp._get_driver(self.node)
+ mock_client.get.side_effect = [driver.value_power_off,
+ self._get_snmp_failure()]
+ self.assertRaises(exception.SNMPFailure,
+ driver.power_reset)
+ calls = [mock.call(driver._snmp_oid(), driver.value_power_off),
+ mock.call(driver._snmp_oid(), driver.value_power_on)]
+ mock_client.set.assert_has_calls(calls)
+ calls = [mock.call(driver._snmp_oid()), mock.call(driver._snmp_oid())]
+ mock_client.get.assert_has_calls(calls)
+
+ def _test_simple_device_power_state_on(self, snmp_driver, mock_get_client):
+ # Ensure a simple device driver queries power on correctly
+ mock_client = mock_get_client.return_value
+ self._set_snmp_driver(snmp_driver)
+ driver = snmp._get_driver(self.node)
+ mock_client.get.return_value = driver.value_power_on
+ pstate = driver.power_state()
+ mock_client.get.assert_called_once_with(driver._snmp_oid())
+ self.assertEqual(states.POWER_ON, pstate)
+
+ def _test_simple_device_power_state_off(self, snmp_driver,
+ mock_get_client):
+ # Ensure a simple device driver queries power off correctly
+ mock_client = mock_get_client.return_value
+ self._set_snmp_driver(snmp_driver)
+ driver = snmp._get_driver(self.node)
+ mock_client.get.return_value = driver.value_power_off
+ pstate = driver.power_state()
+ mock_client.get.assert_called_once_with(driver._snmp_oid())
+ self.assertEqual(states.POWER_OFF, pstate)
+
+ def _test_simple_device_power_on(self, snmp_driver, mock_get_client):
+ # Ensure a simple device driver powers on correctly
+ mock_client = mock_get_client.return_value
+ self._set_snmp_driver(snmp_driver)
+ driver = snmp._get_driver(self.node)
+ mock_client.get.return_value = driver.value_power_on
+ pstate = driver.power_on()
+ mock_client.set.assert_called_once_with(driver._snmp_oid(),
+ driver.value_power_on)
+ mock_client.get.assert_called_once_with(driver._snmp_oid())
+ self.assertEqual(states.POWER_ON, pstate)
+
+ def _test_simple_device_power_off(self, snmp_driver, mock_get_client):
+ # Ensure a simple device driver powers off correctly
+ mock_client = mock_get_client.return_value
+ self._set_snmp_driver(snmp_driver)
+ driver = snmp._get_driver(self.node)
+ mock_client.get.return_value = driver.value_power_off
+ pstate = driver.power_off()
+ mock_client.set.assert_called_once_with(driver._snmp_oid(),
+ driver.value_power_off)
+ mock_client.get.assert_called_once_with(driver._snmp_oid())
+ self.assertEqual(states.POWER_OFF, pstate)
+
+ def _test_simple_device_power_reset(self, snmp_driver, mock_get_client):
+ # Ensure a simple device driver resets correctly
+ mock_client = mock_get_client.return_value
+ self._set_snmp_driver(snmp_driver)
+ driver = snmp._get_driver(self.node)
+ mock_client.get.side_effect = [driver.value_power_off,
+ driver.value_power_on]
+ pstate = driver.power_reset()
+ calls = [mock.call(driver._snmp_oid(), driver.value_power_off),
+ mock.call(driver._snmp_oid(), driver.value_power_on)]
+ mock_client.set.assert_has_calls(calls)
+ calls = [mock.call(driver._snmp_oid())] * 2
+ mock_client.get.assert_has_calls(calls)
+ self.assertEqual(states.POWER_ON, pstate)
+
+ def test_apc_snmp_objects(self, mock_get_client):
+ # Ensure the correct SNMP object OIDs and values are used by the APC
+ # driver
+ self._update_driver_info(snmp_driver="apc",
+ snmp_outlet="3")
+ driver = snmp._get_driver(self.node)
+ oid = (1, 3, 6, 1, 4, 1, 318, 1, 1, 4, 4, 2, 1, 3, 3)
+ self.assertEqual(oid, driver._snmp_oid())
+ self.assertEqual(1, driver.value_power_on)
+ self.assertEqual(2, driver.value_power_off)
+
+ def test_apc_power_state_on(self, mock_get_client):
+ self._test_simple_device_power_state_on('apc', mock_get_client)
+
+ def test_apc_power_state_off(self, mock_get_client):
+ self._test_simple_device_power_state_off('apc', mock_get_client)
+
+ def test_apc_power_on(self, mock_get_client):
+ self._test_simple_device_power_on('apc', mock_get_client)
+
+ def test_apc_power_off(self, mock_get_client):
+ self._test_simple_device_power_off('apc', mock_get_client)
+
+ def test_apc_power_reset(self, mock_get_client):
+ self._test_simple_device_power_reset('apc', mock_get_client)
+
+ def test_apc_masterswitch_snmp_objects(self, mock_get_client):
+ # Ensure the correct SNMP object OIDs and values are used by the APC
+ # masterswitch driver
+ self._update_driver_info(snmp_driver="apc_masterswitch",
+ snmp_outlet="6")
+ driver = snmp._get_driver(self.node)
+ oid = (1, 3, 6, 1, 4, 1, 318, 1, 1, 4, 4, 2, 1, 3, 6)
+ self.assertEqual(oid, driver._snmp_oid())
+ self.assertEqual(1, driver.value_power_on)
+ self.assertEqual(2, driver.value_power_off)
+
+ def test_apc_masterswitch_power_state_on(self, mock_get_client):
+ self._test_simple_device_power_state_on('apc_masterswitch',
+ mock_get_client)
+
+ def test_apc_masterswitch_power_state_off(self, mock_get_client):
+ self._test_simple_device_power_state_off('apc_masterswitch',
+ mock_get_client)
+
+ def test_apc_masterswitch_power_on(self, mock_get_client):
+ self._test_simple_device_power_on('apc_masterswitch', mock_get_client)
+
+ def test_apc_masterswitch_power_off(self, mock_get_client):
+ self._test_simple_device_power_off('apc_masterswitch', mock_get_client)
+
+ def test_apc_masterswitch_power_reset(self, mock_get_client):
+ self._test_simple_device_power_reset('apc_masterswitch',
+ mock_get_client)
+
+ def test_apc_masterswitchplus_snmp_objects(self, mock_get_client):
+ # Ensure the correct SNMP object OIDs and values are used by the APC
+ # masterswitchplus driver
+ self._update_driver_info(snmp_driver="apc_masterswitchplus",
+ snmp_outlet="6")
+ driver = snmp._get_driver(self.node)
+ oid = (1, 3, 6, 1, 4, 1, 318, 1, 1, 6, 5, 1, 1, 5, 6)
+ self.assertEqual(oid, driver._snmp_oid())
+ self.assertEqual(1, driver.value_power_on)
+ self.assertEqual(3, driver.value_power_off)
+
+ def test_apc_masterswitchplus_power_state_on(self, mock_get_client):
+ self._test_simple_device_power_state_on('apc_masterswitchplus',
+ mock_get_client)
+
+ def test_apc_masterswitchplus_power_state_off(self, mock_get_client):
+ self._test_simple_device_power_state_off('apc_masterswitchplus',
+ mock_get_client)
+
+ def test_apc_masterswitchplus_power_on(self, mock_get_client):
+ self._test_simple_device_power_on('apc_masterswitchplus',
+ mock_get_client)
+
+ def test_apc_masterswitchplus_power_off(self, mock_get_client):
+ self._test_simple_device_power_off('apc_masterswitchplus',
+ mock_get_client)
+
+ def test_apc_masterswitchplus_power_reset(self, mock_get_client):
+ self._test_simple_device_power_reset('apc_masterswitchplus',
+ mock_get_client)
+
+ def test_apc_rackpdu_snmp_objects(self, mock_get_client):
+ # Ensure the correct SNMP object OIDs and values are used by the APC
+ # rackpdu driver
+ self._update_driver_info(snmp_driver="apc_rackpdu",
+ snmp_outlet="6")
+ driver = snmp._get_driver(self.node)
+ oid = (1, 3, 6, 1, 4, 1, 318, 1, 1, 12, 3, 3, 1, 1, 4, 6)
+
+ self.assertEqual(oid, driver._snmp_oid())
+ self.assertEqual(1, driver.value_power_on)
+ self.assertEqual(2, driver.value_power_off)
+
+ def test_apc_rackpdu_power_state_on(self, mock_get_client):
+ self._test_simple_device_power_state_on('apc_rackpdu', mock_get_client)
+
+ def test_apc_rackpdu_power_state_off(self, mock_get_client):
+ self._test_simple_device_power_state_off('apc_rackpdu',
+ mock_get_client)
+
+ def test_apc_rackpdu_power_on(self, mock_get_client):
+ self._test_simple_device_power_on('apc_rackpdu', mock_get_client)
+
+ def test_apc_rackpdu_power_off(self, mock_get_client):
+ self._test_simple_device_power_off('apc_rackpdu', mock_get_client)
+
+ def test_apc_rackpdu_power_reset(self, mock_get_client):
+ self._test_simple_device_power_reset('apc_rackpdu', mock_get_client)
+
+ def test_aten_snmp_objects(self, mock_get_client):
+ # Ensure the correct SNMP object OIDs and values are used by the
+ # Aten driver
+ self._update_driver_info(snmp_driver="aten",
+ snmp_outlet="3")
+ driver = snmp._get_driver(self.node)
+ oid = (1, 3, 6, 1, 4, 1, 21317, 1, 3, 2, 2, 2, 2, 3, 0)
+ self.assertEqual(oid, driver._snmp_oid())
+ self.assertEqual(2, driver.value_power_on)
+ self.assertEqual(1, driver.value_power_off)
+
+ def test_aten_power_state_on(self, mock_get_client):
+ self._test_simple_device_power_state_on('aten', mock_get_client)
+
+ def test_aten_power_state_off(self, mock_get_client):
+ self._test_simple_device_power_state_off('aten', mock_get_client)
+
+ def test_aten_power_on(self, mock_get_client):
+ self._test_simple_device_power_on('aten', mock_get_client)
+
+ def test_aten_power_off(self, mock_get_client):
+ self._test_simple_device_power_off('aten', mock_get_client)
+
+ def test_aten_power_reset(self, mock_get_client):
+ self._test_simple_device_power_reset('aten', mock_get_client)
+
+ def test_cyberpower_snmp_objects(self, mock_get_client):
+ # Ensure the correct SNMP object OIDs and values are used by the
+ # CyberPower driver
+ self._update_driver_info(snmp_driver="cyberpower",
+ snmp_outlet="3")
+ driver = snmp._get_driver(self.node)
+ oid = (1, 3, 6, 1, 4, 1, 3808, 1, 1, 3, 3, 3, 1, 1, 4, 3)
+ self.assertEqual(oid, driver._snmp_oid())
+ self.assertEqual(1, driver.value_power_on)
+ self.assertEqual(2, driver.value_power_off)
+
+ def test_cyberpower_power_state_on(self, mock_get_client):
+ self._test_simple_device_power_state_on('cyberpower', mock_get_client)
+
+ def test_cyberpower_power_state_off(self, mock_get_client):
+ self._test_simple_device_power_state_off('cyberpower', mock_get_client)
+
+ def test_cyberpower_power_on(self, mock_get_client):
+ self._test_simple_device_power_on('cyberpower', mock_get_client)
+
+ def test_cyberpower_power_off(self, mock_get_client):
+ self._test_simple_device_power_off('cyberpower', mock_get_client)
+
+ def test_cyberpower_power_reset(self, mock_get_client):
+ self._test_simple_device_power_reset('cyberpower', mock_get_client)
+
+ def test_teltronix_snmp_objects(self, mock_get_client):
+ # Ensure the correct SNMP object OIDs and values are used by the
+ # Teltronix driver
+ self._update_driver_info(snmp_driver="teltronix",
+ snmp_outlet="3")
+ driver = snmp._get_driver(self.node)
+ oid = (1, 3, 6, 1, 4, 1, 23620, 1, 2, 2, 1, 4, 3)
+ self.assertEqual(oid, driver._snmp_oid())
+ self.assertEqual(2, driver.value_power_on)
+ self.assertEqual(1, driver.value_power_off)
+
+ def test_teltronix_power_state_on(self, mock_get_client):
+ self._test_simple_device_power_state_on('teltronix', mock_get_client)
+
+ def test_teltronix_power_state_off(self, mock_get_client):
+ self._test_simple_device_power_state_off('teltronix', mock_get_client)
+
+ def test_teltronix_power_on(self, mock_get_client):
+ self._test_simple_device_power_on('teltronix', mock_get_client)
+
+ def test_teltronix_power_off(self, mock_get_client):
+ self._test_simple_device_power_off('teltronix', mock_get_client)
+
+ def test_teltronix_power_reset(self, mock_get_client):
+ self._test_simple_device_power_reset('teltronix', mock_get_client)
+
+ def test_eaton_power_snmp_objects(self, mock_get_client):
+ # Ensure the correct SNMP object OIDs and values are used by the Eaton
+ # Power driver
+ self._update_driver_info(snmp_driver="eatonpower",
+ snmp_outlet="3")
+ driver = snmp._get_driver(self.node)
+ status_oid = (1, 3, 6, 1, 4, 1, 534, 6, 6, 7, 6, 6, 1, 2, 3)
+ poweron_oid = (1, 3, 6, 1, 4, 1, 534, 6, 6, 7, 6, 6, 1, 3, 3)
+ poweroff_oid = (1, 3, 6, 1, 4, 1, 534, 6, 6, 7, 6, 6, 1, 4, 3)
+ self.assertEqual(status_oid, driver._snmp_oid(driver.oid_status))
+ self.assertEqual(poweron_oid, driver._snmp_oid(driver.oid_poweron))
+ self.assertEqual(poweroff_oid, driver._snmp_oid(driver.oid_poweroff))
+ self.assertEqual(0, driver.status_off)
+ self.assertEqual(1, driver.status_on)
+ self.assertEqual(2, driver.status_pending_off)
+ self.assertEqual(3, driver.status_pending_on)
+
+ def test_eaton_power_power_state_on(self, mock_get_client):
+ # Ensure the Eaton Power driver queries on correctly
+ mock_client = mock_get_client.return_value
+ self._set_snmp_driver("eatonpower")
+ driver = snmp._get_driver(self.node)
+ mock_client.get.return_value = driver.status_on
+ pstate = driver.power_state()
+ mock_client.get.assert_called_once_with(
+ driver._snmp_oid(driver.oid_status))
+ self.assertEqual(states.POWER_ON, pstate)
+
+ def test_eaton_power_power_state_off(self, mock_get_client):
+ # Ensure the Eaton Power driver queries off correctly
+ mock_client = mock_get_client.return_value
+ self._set_snmp_driver("eatonpower")
+ driver = snmp._get_driver(self.node)
+ mock_client.get.return_value = driver.status_off
+ pstate = driver.power_state()
+ mock_client.get.assert_called_once_with(
+ driver._snmp_oid(driver.oid_status))
+ self.assertEqual(states.POWER_OFF, pstate)
+
+ def test_eaton_power_power_state_pending_off(self, mock_get_client):
+ # Ensure the Eaton Power driver queries pending off correctly
+ mock_client = mock_get_client.return_value
+ self._set_snmp_driver("eatonpower")
+ driver = snmp._get_driver(self.node)
+ mock_client.get.return_value = driver.status_pending_off
+ pstate = driver.power_state()
+ mock_client.get.assert_called_once_with(
+ driver._snmp_oid(driver.oid_status))
+ self.assertEqual(states.POWER_ON, pstate)
+
+ def test_eaton_power_power_state_pending_on(self, mock_get_client):
+ # Ensure the Eaton Power driver queries pending on correctly
+ mock_client = mock_get_client.return_value
+ self._set_snmp_driver("eatonpower")
+ driver = snmp._get_driver(self.node)
+ mock_client.get.return_value = driver.status_pending_on
+ pstate = driver.power_state()
+ mock_client.get.assert_called_once_with(
+ driver._snmp_oid(driver.oid_status))
+ self.assertEqual(states.POWER_OFF, pstate)
+
+ def test_eaton_power_power_on(self, mock_get_client):
+ # Ensure the Eaton Power driver powers on correctly
+ mock_client = mock_get_client.return_value
+ self._set_snmp_driver("eatonpower")
+ driver = snmp._get_driver(self.node)
+ mock_client.get.return_value = driver.status_on
+ pstate = driver.power_on()
+ mock_client.set.assert_called_once_with(
+ driver._snmp_oid(driver.oid_poweron), driver.value_power_on)
+ mock_client.get.assert_called_once_with(
+ driver._snmp_oid(driver.oid_status))
+ self.assertEqual(states.POWER_ON, pstate)
+
+ def test_eaton_power_power_off(self, mock_get_client):
+ # Ensure the Eaton Power driver powers off correctly
+ mock_client = mock_get_client.return_value
+ self._set_snmp_driver("eatonpower")
+ driver = snmp._get_driver(self.node)
+ mock_client.get.return_value = driver.status_off
+ pstate = driver.power_off()
+ mock_client.set.assert_called_once_with(
+ driver._snmp_oid(driver.oid_poweroff), driver.value_power_off)
+ mock_client.get.assert_called_once_with(
+ driver._snmp_oid(driver.oid_status))
+ self.assertEqual(states.POWER_OFF, pstate)
+
+ def test_eaton_power_power_reset(self, mock_get_client):
+ # Ensure the Eaton Power driver resets correctly
+ mock_client = mock_get_client.return_value
+ self._set_snmp_driver("eatonpower")
+ driver = snmp._get_driver(self.node)
+ mock_client.get.side_effect = [driver.status_off, driver.status_on]
+ pstate = driver.power_reset()
+ calls = [mock.call(driver._snmp_oid(driver.oid_poweroff),
+ driver.value_power_off),
+ mock.call(driver._snmp_oid(driver.oid_poweron),
+ driver.value_power_on)]
+ mock_client.set.assert_has_calls(calls)
+ calls = [mock.call(driver._snmp_oid(driver.oid_status))] * 2
+ mock_client.get.assert_has_calls(calls)
+ self.assertEqual(states.POWER_ON, pstate)
+
+
+@mock.patch.object(snmp, '_get_driver', autospec=True)
+class SNMPDriverTestCase(db_base.DbTestCase):
+ """SNMP power driver interface tests.
+
+ In this test case, the SNMP power driver interface is exercised. The
+ device-specific SNMP driver is mocked to allow various error cases to be
+ tested.
+ """
+
+ def setUp(self):
+ super(SNMPDriverTestCase, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver='fake_snmp')
+
+ self.node = obj_utils.create_test_node(self.context,
+ driver='fake_snmp',
+ driver_info=INFO_DICT)
+
+ def _get_snmp_failure(self):
+ return exception.SNMPFailure(operation='test-operation',
+ error='test-error')
+
+ def test_get_properties(self, mock_get_driver):
+ expected = snmp.COMMON_PROPERTIES
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ self.assertEqual(expected, task.driver.get_properties())
+
+ def test_get_power_state_on(self, mock_get_driver):
+ mock_driver = mock_get_driver.return_value
+ mock_driver.power_state.return_value = states.POWER_ON
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ pstate = task.driver.power.get_power_state(task)
+ mock_driver.power_state.assert_called_once_with()
+ self.assertEqual(states.POWER_ON, pstate)
+
+ def test_get_power_state_off(self, mock_get_driver):
+ mock_driver = mock_get_driver.return_value
+ mock_driver.power_state.return_value = states.POWER_OFF
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ pstate = task.driver.power.get_power_state(task)
+ mock_driver.power_state.assert_called_once_with()
+ self.assertEqual(states.POWER_OFF, pstate)
+
+ def test_get_power_state_error(self, mock_get_driver):
+ mock_driver = mock_get_driver.return_value
+ mock_driver.power_state.return_value = states.ERROR
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ pstate = task.driver.power.get_power_state(task)
+ mock_driver.power_state.assert_called_once_with()
+ self.assertEqual(states.ERROR, pstate)
+
+ def test_get_power_state_snmp_failure(self, mock_get_driver):
+ mock_driver = mock_get_driver.return_value
+ mock_driver.power_state.side_effect = self._get_snmp_failure()
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ self.assertRaises(exception.SNMPFailure,
+ task.driver.power.get_power_state, task)
+ mock_driver.power_state.assert_called_once_with()
+
+ def test_set_power_state_on(self, mock_get_driver):
+ mock_driver = mock_get_driver.return_value
+ mock_driver.power_on.return_value = states.POWER_ON
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ task.driver.power.set_power_state(task, states.POWER_ON)
+ mock_driver.power_on.assert_called_once_with()
+
+ def test_set_power_state_off(self, mock_get_driver):
+ mock_driver = mock_get_driver.return_value
+ mock_driver.power_off.return_value = states.POWER_OFF
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ task.driver.power.set_power_state(task, states.POWER_OFF)
+ mock_driver.power_off.assert_called_once_with()
+
+ def test_set_power_state_error(self, mock_get_driver):
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ self.assertRaises(exception.InvalidParameterValue,
+ task.driver.power.set_power_state,
+ task, states.ERROR)
+
+ def test_set_power_state_on_snmp_failure(self, mock_get_driver):
+ mock_driver = mock_get_driver.return_value
+ mock_driver.power_on.side_effect = self._get_snmp_failure()
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ self.assertRaises(exception.SNMPFailure,
+ task.driver.power.set_power_state,
+ task, states.POWER_ON)
+ mock_driver.power_on.assert_called_once_with()
+
+ def test_set_power_state_off_snmp_failure(self, mock_get_driver):
+ mock_driver = mock_get_driver.return_value
+ mock_driver.power_off.side_effect = self._get_snmp_failure()
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ self.assertRaises(exception.SNMPFailure,
+ task.driver.power.set_power_state,
+ task, states.POWER_OFF)
+ mock_driver.power_off.assert_called_once_with()
+
+ def test_set_power_state_on_timeout(self, mock_get_driver):
+ mock_driver = mock_get_driver.return_value
+ mock_driver.power_on.return_value = states.ERROR
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ self.assertRaises(exception.PowerStateFailure,
+ task.driver.power.set_power_state,
+ task, states.POWER_ON)
+ mock_driver.power_on.assert_called_once_with()
+
+ def test_set_power_state_off_timeout(self, mock_get_driver):
+ mock_driver = mock_get_driver.return_value
+ mock_driver.power_off.return_value = states.ERROR
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ self.assertRaises(exception.PowerStateFailure,
+ task.driver.power.set_power_state,
+ task, states.POWER_OFF)
+ mock_driver.power_off.assert_called_once_with()
+
+ def test_reboot(self, mock_get_driver):
+ mock_driver = mock_get_driver.return_value
+ mock_driver.power_reset.return_value = states.POWER_ON
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ task.driver.power.reboot(task)
+ mock_driver.power_reset.assert_called_once_with()
+
+ def test_reboot_snmp_failure(self, mock_get_driver):
+ mock_driver = mock_get_driver.return_value
+ mock_driver.power_reset.side_effect = self._get_snmp_failure()
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ self.assertRaises(exception.SNMPFailure,
+ task.driver.power.reboot, task)
+ mock_driver.power_reset.assert_called_once_with()
+
+ def test_reboot_timeout(self, mock_get_driver):
+ mock_driver = mock_get_driver.return_value
+ mock_driver.power_reset.return_value = states.ERROR
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ self.assertRaises(exception.PowerStateFailure,
+ task.driver.power.reboot, task)
+ mock_driver.power_reset.assert_called_once_with()
diff --git a/ironic/tests/unit/drivers/test_ssh.py b/ironic/tests/unit/drivers/test_ssh.py
new file mode 100644
index 000000000..a7c25667c
--- /dev/null
+++ b/ironic/tests/unit/drivers/test_ssh.py
@@ -0,0 +1,975 @@
+# Copyright 2013 Hewlett-Packard Development Company, L.P.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Test class for Ironic SSH power driver."""
+
+import tempfile
+
+import mock
+from oslo_concurrency import processutils
+from oslo_config import cfg
+from oslo_utils import uuidutils
+import paramiko
+
+from ironic.common import boot_devices
+from ironic.common import driver_factory
+from ironic.common import exception
+from ironic.common import states
+from ironic.common import utils
+from ironic.conductor import task_manager
+from ironic.drivers.modules import ssh
+from ironic.drivers import utils as driver_utils
+from ironic.tests.unit.conductor import utils as mgr_utils
+from ironic.tests.unit.db import base as db_base
+from ironic.tests.unit.db import utils as db_utils
+from ironic.tests.unit.objects import utils as obj_utils
+
+
+CONF = cfg.CONF
+
+
+class SSHValidateParametersTestCase(db_base.DbTestCase):
+
+ def test__parse_driver_info_good_password(self):
+ # make sure we get back the expected things
+ node = obj_utils.get_test_node(
+ self.context,
+ driver='fake_ssh',
+ driver_info=db_utils.get_test_ssh_info('password'))
+ info = ssh._parse_driver_info(node)
+ self.assertIsNotNone(info.get('host'))
+ self.assertIsNotNone(info.get('username'))
+ self.assertIsNotNone(info.get('password'))
+ self.assertIsNotNone(info.get('port'))
+ self.assertIsNotNone(info.get('virt_type'))
+ self.assertIsNotNone(info.get('cmd_set'))
+ self.assertIsNotNone(info.get('uuid'))
+
+ def test__parse_driver_info_good_key(self):
+ # make sure we get back the expected things
+ node = obj_utils.get_test_node(
+ self.context,
+ driver='fake_ssh',
+ driver_info=db_utils.get_test_ssh_info('key'))
+ info = ssh._parse_driver_info(node)
+ self.assertIsNotNone(info.get('host'))
+ self.assertIsNotNone(info.get('username'))
+ self.assertIsNotNone(info.get('key_contents'))
+ self.assertIsNotNone(info.get('port'))
+ self.assertIsNotNone(info.get('virt_type'))
+ self.assertIsNotNone(info.get('cmd_set'))
+ self.assertIsNotNone(info.get('uuid'))
+
+ def test__parse_driver_info_good_file(self):
+ # make sure we get back the expected things
+ d_info = db_utils.get_test_ssh_info('file')
+ tempdir = tempfile.mkdtemp()
+ key_path = tempdir + '/foo'
+ open(key_path, 'wt').close()
+ d_info['ssh_key_filename'] = key_path
+ node = obj_utils.get_test_node(
+ self.context,
+ driver='fake_ssh',
+ driver_info=d_info)
+ info = ssh._parse_driver_info(node)
+ self.assertIsNotNone(info.get('host'))
+ self.assertIsNotNone(info.get('username'))
+ self.assertIsNotNone(info.get('key_filename'))
+ self.assertIsNotNone(info.get('port'))
+ self.assertIsNotNone(info.get('virt_type'))
+ self.assertIsNotNone(info.get('cmd_set'))
+ self.assertIsNotNone(info.get('uuid'))
+
+ def test__parse_driver_info_bad_file(self):
+ # A filename that doesn't exist errors.
+ info = db_utils.get_test_ssh_info('file')
+ node = obj_utils.get_test_node(
+ self.context,
+ driver='fake_ssh',
+ driver_info=info)
+ self.assertRaises(
+ exception.InvalidParameterValue, ssh._parse_driver_info, node)
+
+ def test__parse_driver_info_too_many(self):
+ info = db_utils.get_test_ssh_info('too_many')
+ node = obj_utils.get_test_node(
+ self.context,
+ driver='fake_ssh',
+ driver_info=info)
+ self.assertRaises(
+ exception.InvalidParameterValue, ssh._parse_driver_info, node)
+
+ def test__parse_driver_info_missing_host(self):
+ # make sure error is raised when info is missing
+ info = db_utils.get_test_ssh_info()
+ del info['ssh_address']
+ node = obj_utils.get_test_node(self.context, driver_info=info)
+ self.assertRaises(exception.MissingParameterValue,
+ ssh._parse_driver_info,
+ node)
+
+ def test__parse_driver_info_missing_user(self):
+ # make sure error is raised when info is missing
+ info = db_utils.get_test_ssh_info()
+ del info['ssh_username']
+ node = obj_utils.get_test_node(self.context, driver_info=info)
+ self.assertRaises(exception.MissingParameterValue,
+ ssh._parse_driver_info,
+ node)
+
+ def test__parse_driver_info_invalid_creds(self):
+ # make sure error is raised when info is missing
+ info = db_utils.get_test_ssh_info('no-creds')
+ node = obj_utils.get_test_node(self.context, driver_info=info)
+ self.assertRaises(exception.InvalidParameterValue,
+ ssh._parse_driver_info,
+ node)
+
+ def test__parse_driver_info_missing_virt_type(self):
+ # make sure error is raised when info is missing
+ info = db_utils.get_test_ssh_info()
+ del info['ssh_virt_type']
+ node = obj_utils.get_test_node(self.context, driver_info=info)
+ self.assertRaises(exception.MissingParameterValue,
+ ssh._parse_driver_info,
+ node)
+
+ def test__parse_driver_info_ssh_port_wrong_type(self):
+ # make sure error is raised when ssh_port is not integer
+ info = db_utils.get_test_ssh_info()
+ info['ssh_port'] = 'wrong_port_value'
+ node = obj_utils.get_test_node(self.context, driver_info=info)
+ self.assertRaises(exception.InvalidParameterValue,
+ ssh._parse_driver_info,
+ node)
+
+ def test__normalize_mac_string(self):
+ mac_raw = "0A:1B-2C-3D:4F"
+ mac_clean = ssh._normalize_mac(mac_raw)
+ self.assertEqual("0a1b2c3d4f", mac_clean)
+
+ def test__normalize_mac_unicode(self):
+ mac_raw = u"0A:1B-2C-3D:4F"
+ mac_clean = ssh._normalize_mac(mac_raw)
+ self.assertEqual("0a1b2c3d4f", mac_clean)
+
+ def test__parse_driver_info_with_custom_libvirt_uri(self):
+ CONF.set_override('libvirt_uri', 'qemu:///foo', 'ssh')
+ expected_base_cmd = "LC_ALL=C /usr/bin/virsh --connect qemu:///foo"
+
+ node = obj_utils.get_test_node(
+ self.context,
+ driver='fake_ssh',
+ driver_info=db_utils.get_test_ssh_info())
+ node['driver_info']['ssh_virt_type'] = 'virsh'
+ info = ssh._parse_driver_info(node)
+ self.assertEqual(expected_base_cmd, info['cmd_set']['base_cmd'])
+
+ def test__get_boot_device_map_parallels(self):
+ boot_map = ssh._get_boot_device_map('parallels')
+ self.assertEqual('net0', boot_map[boot_devices.PXE])
+
+ def test__get_boot_device_map_vbox(self):
+ boot_map = ssh._get_boot_device_map('vbox')
+ self.assertEqual('net', boot_map[boot_devices.PXE])
+
+ def test__get_boot_device_map_exception(self):
+ self.assertRaises(exception.InvalidParameterValue,
+ ssh._get_boot_device_map,
+ 'this_doesn_t_exist')
+
+
+class SSHPrivateMethodsTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(SSHPrivateMethodsTestCase, self).setUp()
+ self.node = obj_utils.get_test_node(
+ self.context,
+ driver='fake_ssh',
+ driver_info=db_utils.get_test_ssh_info())
+ self.sshclient = paramiko.SSHClient()
+
+ @mock.patch.object(utils, 'ssh_connect', autospec=True)
+ def test__get_connection_client(self, ssh_connect_mock):
+ ssh_connect_mock.return_value = self.sshclient
+ client = ssh._get_connection(self.node)
+ self.assertEqual(self.sshclient, client)
+ driver_info = ssh._parse_driver_info(self.node)
+ ssh_connect_mock.assert_called_once_with(driver_info)
+
+ @mock.patch.object(utils, 'ssh_connect', autospec=True)
+ def test__get_connection_exception(self, ssh_connect_mock):
+ ssh_connect_mock.side_effect = iter(
+ [exception.SSHConnectFailed(host='fake')])
+ self.assertRaises(exception.SSHConnectFailed,
+ ssh._get_connection,
+ self.node)
+ driver_info = ssh._parse_driver_info(self.node)
+ ssh_connect_mock.assert_called_once_with(driver_info)
+
+ @mock.patch.object(processutils, 'ssh_execute', autospec=True)
+ def test__ssh_execute(self, exec_ssh_mock):
+ ssh_cmd = "somecmd"
+ expected = ['a', 'b', 'c']
+ exec_ssh_mock.return_value = ('\n'.join(expected), '')
+ lst = ssh._ssh_execute(self.sshclient, ssh_cmd)
+ exec_ssh_mock.assert_called_once_with(self.sshclient, ssh_cmd)
+ self.assertEqual(expected, lst)
+
+ @mock.patch.object(processutils, 'ssh_execute', autospec=True)
+ def test__ssh_execute_exception(self, exec_ssh_mock):
+ ssh_cmd = "somecmd"
+ exec_ssh_mock.side_effect = processutils.ProcessExecutionError
+ self.assertRaises(exception.SSHCommandFailed,
+ ssh._ssh_execute,
+ self.sshclient,
+ ssh_cmd)
+ exec_ssh_mock.assert_called_once_with(self.sshclient, ssh_cmd)
+
+ @mock.patch.object(processutils, 'ssh_execute', autospec=True)
+ @mock.patch.object(ssh, '_get_hosts_name_for_node', autospec=True)
+ def test__get_power_status_on_unquoted(self, get_hosts_name_mock,
+ exec_ssh_mock):
+ info = ssh._parse_driver_info(self.node)
+ exec_ssh_mock.return_value = (
+ 'ExactNodeName', '')
+ get_hosts_name_mock.return_value = "ExactNodeName"
+
+ pstate = ssh._get_power_status(self.sshclient, info)
+
+ ssh_cmd = "%s %s" % (info['cmd_set']['base_cmd'],
+ info['cmd_set']['list_running'])
+ self.assertEqual(states.POWER_ON, pstate)
+ exec_ssh_mock.assert_called_once_with(self.sshclient, ssh_cmd)
+ get_hosts_name_mock.assert_called_once_with(self.sshclient, info)
+
+ @mock.patch.object(processutils, 'ssh_execute', autospec=True)
+ @mock.patch.object(ssh, '_get_hosts_name_for_node', autospec=True)
+ def test__get_power_status_on(self, get_hosts_name_mock, exec_ssh_mock):
+ info = ssh._parse_driver_info(self.node)
+ exec_ssh_mock.return_value = (
+ '"NodeName" {b43c4982-110c-4c29-9325-d5f41b053513}', '')
+ get_hosts_name_mock.return_value = "NodeName"
+
+ pstate = ssh._get_power_status(self.sshclient, info)
+
+ ssh_cmd = "%s %s" % (info['cmd_set']['base_cmd'],
+ info['cmd_set']['list_running'])
+ self.assertEqual(states.POWER_ON, pstate)
+ exec_ssh_mock.assert_called_once_with(self.sshclient, ssh_cmd)
+ get_hosts_name_mock.assert_called_once_with(self.sshclient, info)
+
+ @mock.patch.object(processutils, 'ssh_execute', autospec=True)
+ @mock.patch.object(ssh, '_get_hosts_name_for_node', autospec=True)
+ def test__get_power_status_off(self, get_hosts_name_mock, exec_ssh_mock):
+ info = ssh._parse_driver_info(self.node)
+ exec_ssh_mock.return_value = (
+ '"NodeName" {b43c4982-110c-4c29-9325-d5f41b053513}', '')
+ get_hosts_name_mock.return_value = "NotNodeName"
+
+ pstate = ssh._get_power_status(self.sshclient, info)
+
+ ssh_cmd = "%s %s" % (info['cmd_set']['base_cmd'],
+ info['cmd_set']['list_running'])
+ self.assertEqual(states.POWER_OFF, pstate)
+ exec_ssh_mock.assert_called_once_with(self.sshclient, ssh_cmd)
+ get_hosts_name_mock.assert_called_once_with(self.sshclient, info)
+
+ @mock.patch.object(processutils, 'ssh_execute', autospec=True)
+ @mock.patch.object(ssh, '_get_hosts_name_for_node', autospec=True)
+ def test__get_power_status_error(self, get_hosts_name_mock, exec_ssh_mock):
+
+ info = ssh._parse_driver_info(self.node)
+
+ info['macs'] = ["11:11:11:11:11:11", "52:54:00:cf:2d:31"]
+ get_hosts_name_mock.return_value = None
+ self.assertRaises(exception.NodeNotFound,
+ ssh._get_power_status,
+ self.sshclient,
+ info)
+
+ get_hosts_name_mock.assert_called_once_with(self.sshclient, info)
+ self.assertFalse(exec_ssh_mock.called)
+
+ @mock.patch.object(processutils, 'ssh_execute', autospec=True)
+ def test__get_power_status_exception(self, exec_ssh_mock):
+ info = ssh._parse_driver_info(self.node)
+ exec_ssh_mock.side_effect = processutils.ProcessExecutionError
+
+ self.assertRaises(exception.SSHCommandFailed,
+ ssh._get_power_status,
+ self.sshclient,
+ info)
+ ssh_cmd = "%s %s" % (info['cmd_set']['base_cmd'],
+ info['cmd_set']['list_all'])
+ exec_ssh_mock.assert_called_once_with(
+ self.sshclient, ssh_cmd)
+
+ @mock.patch.object(processutils, 'ssh_execute', autospec=True)
+ @mock.patch.object(ssh, '_get_hosts_name_for_node', autospec=True)
+ def test__get_power_status_correct_node(self, get_hosts_name_mock,
+ exec_ssh_mock):
+ # Bug: #1397834 test that get_power_status return status of
+ # baremeta_1 (off) and not baremetal_11 (on)
+ info = ssh._parse_driver_info(self.node)
+ exec_ssh_mock.return_value = ('"baremetal_11"\n"seed"\n', '')
+ get_hosts_name_mock.return_value = "baremetal_1"
+
+ pstate = ssh._get_power_status(self.sshclient, info)
+ self.assertEqual(states.POWER_OFF, pstate)
+
+ @mock.patch.object(processutils, 'ssh_execute', autospec=True)
+ def test__get_hosts_name_for_node_match(self, exec_ssh_mock):
+ info = ssh._parse_driver_info(self.node)
+ info['macs'] = ["11:11:11:11:11:11", "52:54:00:cf:2d:31"]
+ ssh_cmd = "%s %s" % (info['cmd_set']['base_cmd'],
+ info['cmd_set']['list_all'])
+
+ cmd_to_exec = "%s %s" % (info['cmd_set']['base_cmd'],
+ info['cmd_set']['get_node_macs'])
+ cmd_to_exec = cmd_to_exec.replace('{_NodeName_}', 'NodeName')
+ exec_ssh_mock.side_effect = iter([('NodeName', ''),
+ ('52:54:00:cf:2d:31', '')])
+ expected = [mock.call(self.sshclient, ssh_cmd),
+ mock.call(self.sshclient, cmd_to_exec)]
+
+ found_name = ssh._get_hosts_name_for_node(self.sshclient, info)
+
+ self.assertEqual('NodeName', found_name)
+ self.assertEqual(expected, exec_ssh_mock.call_args_list)
+
+ @mock.patch.object(processutils, 'ssh_execute', autospec=True)
+ def test__get_hosts_name_for_node_no_match(self, exec_ssh_mock):
+ info = ssh._parse_driver_info(self.node)
+ info['macs'] = ["11:11:11:11:11:11", "22:22:22:22:22:22"]
+ exec_ssh_mock.side_effect = iter([('NodeName', ''),
+ ('52:54:00:cf:2d:31', '')])
+
+ ssh_cmd = "%s %s" % (info['cmd_set']['base_cmd'],
+ info['cmd_set']['list_all'])
+
+ cmd_to_exec = "%s %s" % (info['cmd_set']['base_cmd'],
+ info['cmd_set']['get_node_macs'])
+
+ cmd_to_exec = cmd_to_exec.replace('{_NodeName_}', 'NodeName')
+ expected = [mock.call(self.sshclient, ssh_cmd),
+ mock.call(self.sshclient, cmd_to_exec)]
+
+ found_name = ssh._get_hosts_name_for_node(self.sshclient, info)
+
+ self.assertIsNone(found_name)
+ self.assertEqual(expected, exec_ssh_mock.call_args_list)
+
+ @mock.patch.object(processutils, 'ssh_execute', autospec=True)
+ def test__get_hosts_name_for_node_exception(self, exec_ssh_mock):
+ info = ssh._parse_driver_info(self.node)
+ info['macs'] = ["11:11:11:11:11:11", "52:54:00:cf:2d:31"]
+ ssh_cmd = "%s %s" % (info['cmd_set']['base_cmd'],
+ info['cmd_set']['list_all'])
+
+ cmd_to_exec = "%s %s" % (info['cmd_set']['base_cmd'],
+ info['cmd_set']['get_node_macs'])
+ cmd_to_exec = cmd_to_exec.replace('{_NodeName_}', 'NodeName')
+
+ exec_ssh_mock.side_effect = iter(
+ [('NodeName', ''), processutils.ProcessExecutionError])
+ expected = [mock.call(self.sshclient, ssh_cmd),
+ mock.call(self.sshclient, cmd_to_exec)]
+
+ self.assertRaises(exception.SSHCommandFailed,
+ ssh._get_hosts_name_for_node,
+ self.sshclient,
+ info)
+ self.assertEqual(expected, exec_ssh_mock.call_args_list)
+
+ @mock.patch.object(processutils, 'ssh_execute', autospec=True)
+ @mock.patch.object(ssh, '_get_power_status', autospec=True)
+ @mock.patch.object(ssh, '_get_hosts_name_for_node', autospec=True)
+ def test__power_on_good(self, get_hosts_name_mock, get_power_status_mock,
+ exec_ssh_mock):
+ info = ssh._parse_driver_info(self.node)
+ info['macs'] = ["11:11:11:11:11:11", "52:54:00:cf:2d:31"]
+
+ get_power_status_mock.side_effect = iter([states.POWER_OFF,
+ states.POWER_ON])
+ get_hosts_name_mock.return_value = "NodeName"
+ expected = [mock.call(self.sshclient, info),
+ mock.call(self.sshclient, info)]
+
+ cmd_to_exec = "%s %s" % (info['cmd_set']['base_cmd'],
+ info['cmd_set']['start_cmd'])
+ cmd_to_exec = cmd_to_exec.replace('{_NodeName_}', 'NodeName')
+ current_state = ssh._power_on(self.sshclient, info)
+
+ self.assertEqual(states.POWER_ON, current_state)
+ self.assertEqual(expected, get_power_status_mock.call_args_list)
+ get_hosts_name_mock.assert_called_once_with(self.sshclient, info)
+ exec_ssh_mock.assert_called_once_with(self.sshclient, cmd_to_exec)
+
+ @mock.patch.object(processutils, 'ssh_execute', autospec=True)
+ @mock.patch.object(ssh, '_get_power_status', autospec=True)
+ @mock.patch.object(ssh, '_get_hosts_name_for_node', autospec=True)
+ def test__power_on_fail(self, get_hosts_name_mock, get_power_status_mock,
+ exec_ssh_mock):
+ info = ssh._parse_driver_info(self.node)
+ info['macs'] = ["11:11:11:11:11:11", "52:54:00:cf:2d:31"]
+ get_power_status_mock.side_effect = iter([states.POWER_OFF,
+ states.POWER_OFF])
+ get_hosts_name_mock.return_value = "NodeName"
+ expected = [mock.call(self.sshclient, info),
+ mock.call(self.sshclient, info)]
+
+ cmd_to_exec = "%s %s" % (info['cmd_set']['base_cmd'],
+ info['cmd_set']['start_cmd'])
+ cmd_to_exec = cmd_to_exec.replace('{_NodeName_}', 'NodeName')
+ current_state = ssh._power_on(self.sshclient, info)
+
+ self.assertEqual(states.ERROR, current_state)
+ self.assertEqual(expected, get_power_status_mock.call_args_list)
+ get_hosts_name_mock.assert_called_once_with(self.sshclient, info)
+ exec_ssh_mock.assert_called_once_with(self.sshclient, cmd_to_exec)
+
+ @mock.patch.object(processutils, 'ssh_execute', autospec=True)
+ @mock.patch.object(ssh, '_get_power_status', autospec=True)
+ @mock.patch.object(ssh, '_get_hosts_name_for_node', autospec=True)
+ def test__power_on_exception(self, get_hosts_name_mock,
+ get_power_status_mock, exec_ssh_mock):
+ info = ssh._parse_driver_info(self.node)
+ info['macs'] = ["11:11:11:11:11:11", "52:54:00:cf:2d:31"]
+
+ exec_ssh_mock.side_effect = processutils.ProcessExecutionError
+ get_power_status_mock.side_effect = iter([states.POWER_OFF,
+ states.POWER_ON])
+ get_hosts_name_mock.return_value = "NodeName"
+
+ cmd_to_exec = "%s %s" % (info['cmd_set']['base_cmd'],
+ info['cmd_set']['start_cmd'])
+ cmd_to_exec = cmd_to_exec.replace('{_NodeName_}', 'NodeName')
+
+ self.assertRaises(exception.SSHCommandFailed,
+ ssh._power_on,
+ self.sshclient,
+ info)
+ get_power_status_mock.assert_called_once_with(self.sshclient, info)
+ get_hosts_name_mock.assert_called_once_with(self.sshclient, info)
+ exec_ssh_mock.assert_called_once_with(self.sshclient, cmd_to_exec)
+
+ @mock.patch.object(processutils, 'ssh_execute', autospec=True)
+ @mock.patch.object(ssh, '_get_power_status', autospec=True)
+ @mock.patch.object(ssh, '_get_hosts_name_for_node', autospec=True)
+ def test__power_off_good(self, get_hosts_name_mock,
+ get_power_status_mock, exec_ssh_mock):
+ info = ssh._parse_driver_info(self.node)
+ info['macs'] = ["11:11:11:11:11:11", "52:54:00:cf:2d:31"]
+ get_power_status_mock.side_effect = iter([states.POWER_ON,
+ states.POWER_OFF])
+ get_hosts_name_mock.return_value = "NodeName"
+ expected = [mock.call(self.sshclient, info),
+ mock.call(self.sshclient, info)]
+
+ cmd_to_exec = "%s %s" % (info['cmd_set']['base_cmd'],
+ info['cmd_set']['stop_cmd'])
+ cmd_to_exec = cmd_to_exec.replace('{_NodeName_}', 'NodeName')
+ current_state = ssh._power_off(self.sshclient, info)
+
+ self.assertEqual(states.POWER_OFF, current_state)
+ self.assertEqual(expected, get_power_status_mock.call_args_list)
+ get_hosts_name_mock.assert_called_once_with(self.sshclient, info)
+ exec_ssh_mock.assert_called_once_with(self.sshclient, cmd_to_exec)
+
+ @mock.patch.object(processutils, 'ssh_execute', autospec=True)
+ @mock.patch.object(ssh, '_get_power_status', autospec=True)
+ @mock.patch.object(ssh, '_get_hosts_name_for_node', autospec=True)
+ def test__power_off_fail(self, get_hosts_name_mock,
+ get_power_status_mock, exec_ssh_mock):
+ info = ssh._parse_driver_info(self.node)
+ info['macs'] = ["11:11:11:11:11:11", "52:54:00:cf:2d:31"]
+ get_power_status_mock.side_effect = iter([states.POWER_ON,
+ states.POWER_ON])
+ get_hosts_name_mock.return_value = "NodeName"
+ expected = [mock.call(self.sshclient, info),
+ mock.call(self.sshclient, info)]
+
+ cmd_to_exec = "%s %s" % (info['cmd_set']['base_cmd'],
+ info['cmd_set']['stop_cmd'])
+ cmd_to_exec = cmd_to_exec.replace('{_NodeName_}', 'NodeName')
+ current_state = ssh._power_off(self.sshclient, info)
+
+ self.assertEqual(states.ERROR, current_state)
+ self.assertEqual(expected, get_power_status_mock.call_args_list)
+ get_hosts_name_mock.assert_called_once_with(self.sshclient, info)
+ exec_ssh_mock.assert_called_once_with(self.sshclient, cmd_to_exec)
+
+ @mock.patch.object(processutils, 'ssh_execute', autospec=True)
+ @mock.patch.object(ssh, '_get_power_status', autospec=True)
+ @mock.patch.object(ssh, '_get_hosts_name_for_node', autospec=True)
+ def test__power_off_exception(self, get_hosts_name_mock,
+ get_power_status_mock, exec_ssh_mock):
+ info = ssh._parse_driver_info(self.node)
+ info['macs'] = ["11:11:11:11:11:11", "52:54:00:cf:2d:31"]
+ exec_ssh_mock.side_effect = processutils.ProcessExecutionError
+ get_power_status_mock.side_effect = iter([states.POWER_ON,
+ states.POWER_OFF])
+ get_hosts_name_mock.return_value = "NodeName"
+
+ cmd_to_exec = "%s %s" % (info['cmd_set']['base_cmd'],
+ info['cmd_set']['stop_cmd'])
+ cmd_to_exec = cmd_to_exec.replace('{_NodeName_}', 'NodeName')
+
+ self.assertRaises(exception.SSHCommandFailed, ssh._power_off,
+ self.sshclient, info)
+ get_power_status_mock.assert_called_once_with(self.sshclient, info)
+ get_hosts_name_mock.assert_called_once_with(self.sshclient, info)
+ exec_ssh_mock.assert_called_once_with(self.sshclient, cmd_to_exec)
+
+ def test_exec_ssh_command_good(self):
+ class Channel(object):
+ def recv_exit_status(self):
+ return 0
+
+ class Stream(object):
+ def __init__(self, buffer=''):
+ self.buffer = buffer
+ self.channel = Channel()
+
+ def read(self):
+ return self.buffer
+
+ def close(self):
+ pass
+
+ with mock.patch.object(self.sshclient, 'exec_command',
+ autospec=True) as exec_command_mock:
+ exec_command_mock.return_value = (Stream(),
+ Stream('hello'),
+ Stream())
+ stdout, stderr = processutils.ssh_execute(self.sshclient,
+ "command")
+
+ self.assertEqual('hello', stdout)
+ exec_command_mock.assert_called_once_with("command")
+
+ def test_exec_ssh_command_fail(self):
+ class Channel(object):
+ def recv_exit_status(self):
+ return 127
+
+ class Stream(object):
+ def __init__(self, buffer=''):
+ self.buffer = buffer
+ self.channel = Channel()
+
+ def read(self):
+ return self.buffer
+
+ def close(self):
+ pass
+
+ with mock.patch.object(self.sshclient, 'exec_command',
+ autospec=True) as exec_command_mock:
+ exec_command_mock.return_value = (Stream(),
+ Stream('hello'),
+ Stream())
+ self.assertRaises(processutils.ProcessExecutionError,
+ processutils.ssh_execute,
+ self.sshclient,
+ "command")
+ exec_command_mock.assert_called_once_with("command")
+
+
+class SSHDriverTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(SSHDriverTestCase, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver="fake_ssh")
+ self.driver = driver_factory.get_driver("fake_ssh")
+ self.node = obj_utils.create_test_node(
+ self.context, driver='fake_ssh',
+ driver_info=db_utils.get_test_ssh_info())
+ self.port = obj_utils.create_test_port(self.context,
+ node_id=self.node.id)
+ self.sshclient = paramiko.SSHClient()
+
+ @mock.patch.object(utils, 'ssh_connect', autospec=True)
+ def test__validate_info_ssh_connect_failed(self, ssh_connect_mock):
+ info = ssh._parse_driver_info(self.node)
+
+ ssh_connect_mock.side_effect = iter(
+ [exception.SSHConnectFailed(host='fake')])
+ with task_manager.acquire(self.context, info['uuid'],
+ shared=False) as task:
+ self.assertRaises(exception.InvalidParameterValue,
+ task.driver.power.validate, task)
+ driver_info = ssh._parse_driver_info(task.node)
+ ssh_connect_mock.assert_called_once_with(driver_info)
+
+ def test_get_properties(self):
+ expected = ssh.COMMON_PROPERTIES
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ self.assertEqual(expected, task.driver.power.get_properties())
+ self.assertEqual(expected, task.driver.get_properties())
+ self.assertEqual(expected, task.driver.management.get_properties())
+
+ def test_validate_fail_no_port(self):
+ new_node = obj_utils.create_test_node(
+ self.context,
+ uuid='aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee',
+ driver='fake_ssh',
+ driver_info=db_utils.get_test_ssh_info())
+ with task_manager.acquire(self.context, new_node.uuid,
+ shared=True) as task:
+ self.assertRaises(exception.MissingParameterValue,
+ task.driver.power.validate,
+ task)
+
+ @mock.patch.object(driver_utils, 'get_node_mac_addresses', autospec=True)
+ @mock.patch.object(ssh, '_get_connection', autospec=True)
+ @mock.patch.object(ssh, '_power_on', autospec=True)
+ def test_reboot_good(self, power_on_mock, get_conn_mock,
+ get_mac_addr_mock):
+ info = ssh._parse_driver_info(self.node)
+ info['macs'] = ["11:11:11:11:11:11", "52:54:00:cf:2d:31"]
+ get_mac_addr_mock.return_value = info['macs']
+ get_conn_mock.return_value = self.sshclient
+ power_on_mock.return_value = states.POWER_ON
+ with mock.patch.object(ssh, '_parse_driver_info',
+ autospec=True) as parse_drv_info_mock:
+ parse_drv_info_mock.return_value = info
+ with task_manager.acquire(self.context, info['uuid'],
+ shared=False) as task:
+ task.driver.power.reboot(task)
+
+ parse_drv_info_mock.assert_called_once_with(task.node)
+ get_mac_addr_mock.assert_called_once_with(mock.ANY)
+ get_conn_mock.assert_called_once_with(task.node)
+ power_on_mock.assert_called_once_with(self.sshclient, info)
+
+ @mock.patch.object(driver_utils, 'get_node_mac_addresses', autospec=True)
+ @mock.patch.object(ssh, '_get_connection', autospec=True)
+ @mock.patch.object(ssh, '_power_on', autospec=True)
+ def test_reboot_fail(self, power_on_mock, get_conn_mock,
+ get_mac_addr_mock):
+ info = ssh._parse_driver_info(self.node)
+ info['macs'] = ["11:11:11:11:11:11", "52:54:00:cf:2d:31"]
+ get_mac_addr_mock.return_value = info['macs']
+ get_conn_mock.return_value = self.sshclient
+ power_on_mock.return_value = states.POWER_OFF
+ with mock.patch.object(ssh, '_parse_driver_info',
+ autospec=True) as parse_drv_info_mock:
+ parse_drv_info_mock.return_value = info
+ with task_manager.acquire(self.context, info['uuid'],
+ shared=False) as task:
+ self.assertRaises(exception.PowerStateFailure,
+ task.driver.power.reboot, task)
+ parse_drv_info_mock.assert_called_once_with(task.node)
+ get_mac_addr_mock.assert_called_once_with(mock.ANY)
+ get_conn_mock.assert_called_once_with(task.node)
+ power_on_mock.assert_called_once_with(self.sshclient, info)
+
+ @mock.patch.object(driver_utils, 'get_node_mac_addresses', autospec=True)
+ @mock.patch.object(ssh, '_get_connection', autospec=True)
+ def test_set_power_state_bad_state(self, get_conn_mock,
+ get_mac_addr_mock):
+ info = ssh._parse_driver_info(self.node)
+ info['macs'] = ["11:11:11:11:11:11", "52:54:00:cf:2d:31"]
+ get_mac_addr_mock.return_value = info['macs']
+ get_conn_mock.return_value = self.sshclient
+ with mock.patch.object(ssh, '_parse_driver_info',
+ autospec=True) as parse_drv_info_mock:
+ parse_drv_info_mock.return_value = info
+ with task_manager.acquire(self.context, info['uuid'],
+ shared=False) as task:
+ self.assertRaises(
+ exception.InvalidParameterValue,
+ task.driver.power.set_power_state,
+ task,
+ "BAD_PSTATE")
+
+ parse_drv_info_mock.assert_called_once_with(task.node)
+ get_mac_addr_mock.assert_called_once_with(mock.ANY)
+ get_conn_mock.assert_called_once_with(task.node)
+
+ @mock.patch.object(driver_utils, 'get_node_mac_addresses', autospec=True)
+ @mock.patch.object(ssh, '_get_connection', autospec=True)
+ @mock.patch.object(ssh, '_power_on', autospec=True)
+ def test_set_power_state_on_good(self, power_on_mock, get_conn_mock,
+ get_mac_addr_mock):
+ info = ssh._parse_driver_info(self.node)
+ info['macs'] = ["11:11:11:11:11:11", "52:54:00:cf:2d:31"]
+ get_mac_addr_mock.return_value = info['macs']
+ get_conn_mock.return_value = self.sshclient
+ power_on_mock.return_value = states.POWER_ON
+ with mock.patch.object(ssh, '_parse_driver_info',
+ autospec=True) as parse_drv_info_mock:
+ parse_drv_info_mock.return_value = info
+ with task_manager.acquire(self.context, info['uuid'],
+ shared=False) as task:
+ task.driver.power.set_power_state(task, states.POWER_ON)
+
+ parse_drv_info_mock.assert_called_once_with(task.node)
+ get_mac_addr_mock.assert_called_once_with(mock.ANY)
+ get_conn_mock.assert_called_once_with(task.node)
+ power_on_mock.assert_called_once_with(self.sshclient, info)
+
+ @mock.patch.object(driver_utils, 'get_node_mac_addresses', autospec=True)
+ @mock.patch.object(ssh, '_get_connection', autospec=True)
+ @mock.patch.object(ssh, '_power_on', autospec=True)
+ def test_set_power_state_on_fail(self, power_on_mock, get_conn_mock,
+ get_mac_addr_mock):
+ info = ssh._parse_driver_info(self.node)
+ info['macs'] = ["11:11:11:11:11:11", "52:54:00:cf:2d:31"]
+ get_mac_addr_mock.return_value = info['macs']
+ get_conn_mock.return_value = self.sshclient
+ power_on_mock.return_value = states.POWER_OFF
+ with mock.patch.object(ssh, '_parse_driver_info',
+ autospec=True) as parse_drv_info_mock:
+ parse_drv_info_mock.return_value = info
+ with task_manager.acquire(self.context, info['uuid'],
+ shared=False) as task:
+ self.assertRaises(
+ exception.PowerStateFailure,
+ task.driver.power.set_power_state,
+ task,
+ states.POWER_ON)
+
+ parse_drv_info_mock.assert_called_once_with(task.node)
+ get_mac_addr_mock.assert_called_once_with(mock.ANY)
+ get_conn_mock.assert_called_once_with(task.node)
+ power_on_mock.assert_called_once_with(self.sshclient, info)
+
+ @mock.patch.object(driver_utils, 'get_node_mac_addresses', autospec=True)
+ @mock.patch.object(ssh, '_get_connection', autospec=True)
+ @mock.patch.object(ssh, '_power_off', autospec=True)
+ def test_set_power_state_off_good(self, power_off_mock, get_conn_mock,
+ get_mac_addr_mock):
+ info = ssh._parse_driver_info(self.node)
+ info['macs'] = ["11:11:11:11:11:11", "52:54:00:cf:2d:31"]
+ get_mac_addr_mock.return_value = info['macs']
+ get_conn_mock.return_value = self.sshclient
+ power_off_mock.return_value = states.POWER_OFF
+ with mock.patch.object(ssh, '_parse_driver_info',
+ autospec=True) as parse_drv_info_mock:
+ parse_drv_info_mock.return_value = info
+ with task_manager.acquire(self.context, info['uuid'],
+ shared=False) as task:
+ task.driver.power.set_power_state(task, states.POWER_OFF)
+
+ parse_drv_info_mock.assert_called_once_with(task.node)
+ get_mac_addr_mock.assert_called_once_with(mock.ANY)
+ get_conn_mock.assert_called_once_with(task.node)
+ power_off_mock.assert_called_once_with(self.sshclient, info)
+
+ @mock.patch.object(driver_utils, 'get_node_mac_addresses', autospec=True)
+ @mock.patch.object(ssh, '_get_connection', autospec=True)
+ @mock.patch.object(ssh, '_power_off', autospec=True)
+ def test_set_power_state_off_fail(self, power_off_mock, get_conn_mock,
+ get_mac_addr_mock):
+ info = ssh._parse_driver_info(self.node)
+ info['macs'] = ["11:11:11:11:11:11", "52:54:00:cf:2d:31"]
+ get_mac_addr_mock.return_value = info['macs']
+ get_conn_mock.return_value = self.sshclient
+ power_off_mock.return_value = states.POWER_ON
+ with mock.patch.object(ssh, '_parse_driver_info',
+ autospec=True) as parse_drv_info_mock:
+ parse_drv_info_mock.return_value = info
+ with task_manager.acquire(self.context, info['uuid'],
+ shared=False) as task:
+ self.assertRaises(
+ exception.PowerStateFailure,
+ task.driver.power.set_power_state,
+ task,
+ states.POWER_OFF)
+
+ parse_drv_info_mock.assert_called_once_with(task.node)
+ get_mac_addr_mock.assert_called_once_with(mock.ANY)
+ get_conn_mock.assert_called_once_with(task.node)
+ power_off_mock.assert_called_once_with(self.sshclient, info)
+
+ @mock.patch.object(ssh, '_get_connection', autospec=True)
+ @mock.patch.object(ssh, '_get_hosts_name_for_node', autospec=True)
+ @mock.patch.object(ssh, '_ssh_execute', autospec=True)
+ def test_management_interface_set_boot_device_vbox_ok(self, mock_exc,
+ mock_h,
+ mock_get_conn):
+ fake_name = 'fake-name'
+ mock_h.return_value = fake_name
+ mock_get_conn.return_value = self.sshclient
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ task.node['driver_info']['ssh_virt_type'] = 'vbox'
+ self.driver.management.set_boot_device(task, boot_devices.PXE)
+ expected_cmd = ('LC_ALL=C /usr/bin/VBoxManage modifyvm %s '
+ '--boot1 net') % fake_name
+ mock_exc.assert_called_once_with(mock.ANY, expected_cmd)
+
+ @mock.patch.object(ssh, '_get_connection', autospec=True)
+ @mock.patch.object(ssh, '_get_hosts_name_for_node', autospec=True)
+ @mock.patch.object(ssh, '_ssh_execute', autospec=True)
+ def test_management_interface_set_boot_device_parallels_ok(self, mock_exc,
+ mock_h,
+ mock_get_conn):
+ fake_name = 'fake-name'
+ mock_h.return_value = fake_name
+ mock_get_conn.return_value = self.sshclient
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ task.node['driver_info']['ssh_virt_type'] = 'parallels'
+ self.driver.management.set_boot_device(task, boot_devices.PXE)
+ expected_cmd = ('LC_ALL=C /usr/bin/prlctl set %s '
+ '--device-bootorder "net0"') % fake_name
+ mock_exc.assert_called_once_with(mock.ANY, expected_cmd)
+
+ @mock.patch.object(ssh, '_get_connection', autospec=True)
+ @mock.patch.object(ssh, '_get_hosts_name_for_node', autospec=True)
+ @mock.patch.object(ssh, '_ssh_execute', autospec=True)
+ def test_management_interface_set_boot_device_virsh_ok(self, mock_exc,
+ mock_h,
+ mock_get_conn):
+ fake_name = 'fake-name'
+ mock_h.return_value = fake_name
+ mock_get_conn.return_value = self.sshclient
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ task.node['driver_info']['ssh_virt_type'] = 'virsh'
+ self.driver.management.set_boot_device(task, boot_devices.PXE)
+ expected_cmd = ('EDITOR="sed -i \'/<boot \\(dev\\|order\\)=*\\>'
+ '/d;/<\\/os>/i\\<boot dev=\\"network\\"/>\'" '
+ 'LC_ALL=C /usr/bin/virsh --connect qemu:///system '
+ 'edit %s') % fake_name
+ mock_exc.assert_called_once_with(mock.ANY, expected_cmd)
+
+ def test_set_boot_device_bad_device(self):
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ self.assertRaises(exception.InvalidParameterValue,
+ self.driver.management.set_boot_device,
+ task, 'invalid-device')
+
+ @mock.patch.object(ssh, '_get_connection', autospec=True)
+ @mock.patch.object(ssh, '_get_hosts_name_for_node', autospec=True)
+ def test_set_boot_device_not_supported(self, mock_h, mock_get_conn):
+ mock_h.return_value = 'NodeName'
+ mock_get_conn.return_value = self.sshclient
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ # vmware does not support set_boot_device()
+ task.node['driver_info']['ssh_virt_type'] = 'vmware'
+ self.assertRaises(NotImplementedError,
+ self.driver.management.set_boot_device,
+ task, boot_devices.PXE)
+
+ def test_management_interface_get_supported_boot_devices(self):
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ expected = [boot_devices.PXE, boot_devices.DISK,
+ boot_devices.CDROM]
+ self.assertEqual(sorted(expected), sorted(task.driver.management.
+ get_supported_boot_devices(task)))
+
+ @mock.patch.object(ssh, '_get_connection', autospec=True)
+ @mock.patch.object(ssh, '_get_hosts_name_for_node', autospec=True)
+ @mock.patch.object(ssh, '_ssh_execute', autospec=True)
+ def test_management_interface_get_boot_device_vbox(self, mock_exc,
+ mock_h,
+ mock_get_conn):
+ fake_name = 'fake-name'
+ mock_h.return_value = fake_name
+ mock_exc.return_value = ('net', '')
+ mock_get_conn.return_value = self.sshclient
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ task.node['driver_info']['ssh_virt_type'] = 'vbox'
+ result = self.driver.management.get_boot_device(task)
+ self.assertEqual(boot_devices.PXE, result['boot_device'])
+ expected_cmd = ('LC_ALL=C /usr/bin/VBoxManage showvminfo '
+ '--machinereadable %s '
+ '| awk -F \'"\' \'/boot1/{print $2}\'') % fake_name
+ mock_exc.assert_called_once_with(mock.ANY, expected_cmd)
+
+ @mock.patch.object(ssh, '_get_connection', autospec=True)
+ @mock.patch.object(ssh, '_get_hosts_name_for_node', autospec=True)
+ @mock.patch.object(ssh, '_ssh_execute', autospec=True)
+ def test_management_interface_get_boot_device_parallels(self, mock_exc,
+ mock_h,
+ mock_get_conn):
+ fake_name = 'fake-name'
+ mock_h.return_value = fake_name
+ mock_exc.return_value = ('net0', '')
+ mock_get_conn.return_value = self.sshclient
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ task.node['driver_info']['ssh_virt_type'] = 'parallels'
+ result = self.driver.management.get_boot_device(task)
+ self.assertEqual(boot_devices.PXE, result['boot_device'])
+ expected_cmd = ('LC_ALL=C /usr/bin/prlctl list -i %s '
+ '| awk \'/^Boot order:/ {print $3}\'') % fake_name
+ mock_exc.assert_called_once_with(mock.ANY, expected_cmd)
+
+ @mock.patch.object(ssh, '_get_connection', autospec=True)
+ @mock.patch.object(ssh, '_get_hosts_name_for_node', autospec=True)
+ @mock.patch.object(ssh, '_ssh_execute', autospec=True)
+ def test_management_interface_get_boot_device_virsh(self, mock_exc,
+ mock_h,
+ mock_get_conn):
+ fake_name = 'fake-name'
+ mock_h.return_value = fake_name
+ mock_exc.return_value = ('network', '')
+ mock_get_conn.return_value = self.sshclient
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ task.node['driver_info']['ssh_virt_type'] = 'virsh'
+ result = self.driver.management.get_boot_device(task)
+ self.assertEqual(boot_devices.PXE, result['boot_device'])
+ expected_cmd = ('LC_ALL=C /usr/bin/virsh --connect '
+ 'qemu:///system dumpxml %s | awk \'/boot dev=/ '
+ '{ gsub( ".*dev=" Q, "" ); gsub( Q ".*", "" ); '
+ 'print; }\' Q="\'" RS="[<>]" | head -1') % fake_name
+ mock_exc.assert_called_once_with(mock.ANY, expected_cmd)
+
+ @mock.patch.object(ssh, '_get_connection', autospec=True)
+ @mock.patch.object(ssh, '_get_hosts_name_for_node', autospec=True)
+ def test_get_boot_device_not_supported(self, mock_h, mock_get_conn):
+ mock_h.return_value = 'NodeName'
+ mock_get_conn.return_value = self.sshclient
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ # vmware does not support get_boot_device()
+ task.node['driver_info']['ssh_virt_type'] = 'vmware'
+ expected = {'boot_device': None, 'persistent': None}
+ self.assertEqual(expected,
+ self.driver.management.get_boot_device(task))
+
+ @mock.patch.object(ssh, '_get_connection', autospec=True)
+ @mock.patch.object(ssh, '_get_hosts_name_for_node', autospec=True)
+ @mock.patch.object(ssh, '_ssh_execute', autospec=True)
+ def test_get_power_state_vmware(self, mock_exc, mock_h, mock_get_conn):
+ # To see replacing {_NodeName_} in vmware's list_running
+ nodename = 'fakevm'
+ mock_h.return_value = nodename
+ mock_get_conn.return_value = self.sshclient
+ # list_running quotes names
+ mock_exc.return_value = ('"%s"' % nodename, '')
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ task.node['driver_info']['ssh_virt_type'] = 'vmware'
+ power_state = self.driver.power.get_power_state(task)
+ self.assertEqual(states.POWER_ON, power_state)
+ expected_cmd = ("LC_ALL=C /bin/vim-cmd vmsvc/power.getstate "
+ "%(node)s | grep 'Powered on' >/dev/null && "
+ "echo '\"%(node)s\"' || true") % {'node': nodename}
+ mock_exc.assert_called_once_with(mock.ANY, expected_cmd)
+
+ def test_management_interface_validate_good(self):
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ task.driver.management.validate(task)
+
+ def test_management_interface_validate_fail(self):
+ # Missing SSH driver_info information
+ node = obj_utils.create_test_node(self.context,
+ uuid=uuidutils.generate_uuid(),
+ driver='fake_ssh')
+ with task_manager.acquire(self.context, node.uuid) as task:
+ self.assertRaises(exception.MissingParameterValue,
+ task.driver.management.validate, task)
diff --git a/ironic/tests/unit/drivers/test_utils.py b/ironic/tests/unit/drivers/test_utils.py
new file mode 100644
index 000000000..af49e0e67
--- /dev/null
+++ b/ironic/tests/unit/drivers/test_utils.py
@@ -0,0 +1,165 @@
+# Copyright 2013 Hewlett-Packard Development Company, L.P.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import mock
+
+from ironic.common import driver_factory
+from ironic.common import exception
+from ironic.conductor import task_manager
+from ironic.conductor import utils as manager_utils
+from ironic.drivers.modules import fake
+from ironic.drivers import utils as driver_utils
+from ironic.tests.unit.conductor import utils as mgr_utils
+from ironic.tests.unit.db import base as db_base
+from ironic.tests.unit.objects import utils as obj_utils
+
+
+class UtilsTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(UtilsTestCase, self).setUp()
+ mgr_utils.mock_the_extension_manager()
+ self.driver = driver_factory.get_driver("fake")
+ self.node = obj_utils.create_test_node(self.context)
+
+ def test_vendor_interface_get_properties(self):
+ expected = {'A1': 'A1 description. Required.',
+ 'A2': 'A2 description. Optional.',
+ 'B1': 'B1 description. Required.',
+ 'B2': 'B2 description. Required.'}
+ props = self.driver.vendor.get_properties()
+ self.assertEqual(expected, props)
+
+ @mock.patch.object(fake.FakeVendorA, 'validate', autospec=True)
+ def test_vendor_interface_validate_valid_methods(self,
+ mock_fakea_validate):
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ self.driver.vendor.validate(task, method='first_method')
+ mock_fakea_validate.assert_called_once_with(
+ self.driver.vendor.mapping['first_method'],
+ task, method='first_method')
+
+ def test_vendor_interface_validate_bad_method(self):
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ self.assertRaises(exception.InvalidParameterValue,
+ self.driver.vendor.validate,
+ task, method='fake_method')
+
+ def test_get_node_mac_addresses(self):
+ ports = []
+ ports.append(
+ obj_utils.create_test_port(
+ self.context,
+ address='aa:bb:cc:dd:ee:ff',
+ uuid='bb43dc0b-03f2-4d2e-ae87-c02d7f33cc53',
+ node_id=self.node.id)
+ )
+ ports.append(
+ obj_utils.create_test_port(
+ self.context,
+ address='dd:ee:ff:aa:bb:cc',
+ uuid='4fc26c0b-03f2-4d2e-ae87-c02d7f33c234',
+ node_id=self.node.id)
+ )
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ node_macs = driver_utils.get_node_mac_addresses(task)
+ self.assertEqual(sorted([p.address for p in ports]), sorted(node_macs))
+
+ def test_get_node_capability(self):
+ properties = {'capabilities': 'cap1:value1, cap2: value2'}
+ self.node.properties = properties
+ expected = 'value1'
+ expected2 = 'value2'
+
+ result = driver_utils.get_node_capability(self.node, 'cap1')
+ result2 = driver_utils.get_node_capability(self.node, 'cap2')
+ self.assertEqual(expected, result)
+ self.assertEqual(expected2, result2)
+
+ def test_get_node_capability_returns_none(self):
+ properties = {'capabilities': 'cap1:value1,cap2:value2'}
+ self.node.properties = properties
+
+ result = driver_utils.get_node_capability(self.node, 'capX')
+ self.assertIsNone(result)
+
+ def test_add_node_capability(self):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node.properties['capabilities'] = ''
+ driver_utils.add_node_capability(task, 'boot_mode', 'bios')
+ self.assertEqual('boot_mode:bios',
+ task.node.properties['capabilities'])
+
+ def test_add_node_capability_append(self):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node.properties['capabilities'] = 'a:b,c:d'
+ driver_utils.add_node_capability(task, 'boot_mode', 'bios')
+ self.assertEqual('a:b,c:d,boot_mode:bios',
+ task.node.properties['capabilities'])
+
+ def test_add_node_capability_append_duplicate(self):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node.properties['capabilities'] = 'a:b,c:d'
+ driver_utils.add_node_capability(task, 'a', 'b')
+ self.assertEqual('a:b,c:d,a:b',
+ task.node.properties['capabilities'])
+
+ @mock.patch.object(manager_utils, 'node_set_boot_device', autospec=True)
+ def test_ensure_next_boot_device(self, node_set_boot_device_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node.driver_internal_info['persistent_boot_device'] = 'pxe'
+ driver_utils.ensure_next_boot_device(
+ task,
+ {'force_boot_device': True}
+ )
+ node_set_boot_device_mock.assert_called_once_with(task, 'pxe')
+
+ def test_ensure_next_boot_device_clears_is_next_boot_persistent(self):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node.driver_internal_info['persistent_boot_device'] = 'pxe'
+ task.node.driver_internal_info['is_next_boot_persistent'] = False
+ driver_utils.ensure_next_boot_device(
+ task,
+ {'force_boot_device': True}
+ )
+ task.node.refresh()
+ self.assertNotIn('is_next_boot_persistent',
+ task.node.driver_internal_info)
+
+ def test_force_persistent_boot_true(self):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.node.driver_info['ipmi_force_boot_device'] = True
+ ret = driver_utils.force_persistent_boot(task, 'pxe', True)
+ self.assertEqual(None, ret)
+ task.node.refresh()
+ self.assertIn('persistent_boot_device',
+ task.node.driver_internal_info)
+
+ def test_force_persistent_boot_false(self):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ ret = driver_utils.force_persistent_boot(task, 'pxe', False)
+ self.assertEqual(None, ret)
+ task.node.refresh()
+ self.assertEqual(
+ False,
+ task.node.driver_internal_info.get('is_next_boot_persistent')
+ )
diff --git a/ironic/tests/unit/drivers/test_virtualbox.py b/ironic/tests/unit/drivers/test_virtualbox.py
new file mode 100644
index 000000000..a854cb553
--- /dev/null
+++ b/ironic/tests/unit/drivers/test_virtualbox.py
@@ -0,0 +1,374 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Test class for VirtualBox Driver Modules."""
+
+import mock
+from oslo_config import cfg
+from pyremotevbox import exception as pyremotevbox_exc
+from pyremotevbox import vbox as pyremotevbox_vbox
+
+from ironic.common import boot_devices
+from ironic.common import exception
+from ironic.common import states
+from ironic.conductor import task_manager
+from ironic.drivers.modules import virtualbox
+from ironic.tests.unit.conductor import utils as mgr_utils
+from ironic.tests.unit.db import base as db_base
+from ironic.tests.unit.objects import utils as obj_utils
+
+INFO_DICT = {
+ 'virtualbox_vmname': 'baremetal1',
+ 'virtualbox_host': '10.0.2.2',
+ 'virtualbox_username': 'username',
+ 'virtualbox_password': 'password',
+ 'virtualbox_port': 12345,
+}
+
+CONF = cfg.CONF
+
+
+class VirtualBoxMethodsTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(VirtualBoxMethodsTestCase, self).setUp()
+ driver_info = INFO_DICT.copy()
+ mgr_utils.mock_the_extension_manager(driver="fake_vbox")
+ self.node = obj_utils.create_test_node(self.context,
+ driver='fake_vbox',
+ driver_info=driver_info)
+
+ def test__parse_driver_info(self):
+ info = virtualbox._parse_driver_info(self.node)
+ self.assertEqual('baremetal1', info['vmname'])
+ self.assertEqual('10.0.2.2', info['host'])
+ self.assertEqual('username', info['username'])
+ self.assertEqual('password', info['password'])
+ self.assertEqual(12345, info['port'])
+
+ def test__parse_driver_info_missing_vmname(self):
+ del self.node.driver_info['virtualbox_vmname']
+ self.assertRaises(exception.MissingParameterValue,
+ virtualbox._parse_driver_info, self.node)
+
+ def test__parse_driver_info_missing_host(self):
+ del self.node.driver_info['virtualbox_host']
+ self.assertRaises(exception.MissingParameterValue,
+ virtualbox._parse_driver_info, self.node)
+
+ def test__parse_driver_info_invalid_port(self):
+ self.node.driver_info['virtualbox_port'] = 'invalid-port'
+ self.assertRaises(exception.InvalidParameterValue,
+ virtualbox._parse_driver_info, self.node)
+
+ def test__parse_driver_info_missing_port(self):
+ del self.node.driver_info['virtualbox_port']
+ info = virtualbox._parse_driver_info(self.node)
+ self.assertEqual(18083, info['port'])
+
+ @mock.patch.object(pyremotevbox_vbox, 'VirtualBoxHost', autospec=True)
+ def test__run_virtualbox_method(self, host_mock):
+ host_object_mock = mock.MagicMock(spec_set=['find_vm'])
+ func_mock = mock.MagicMock(spec_set=[])
+ vm_object_mock = mock.MagicMock(spec_set=['foo'], foo=func_mock)
+ host_mock.return_value = host_object_mock
+ host_object_mock.find_vm.return_value = vm_object_mock
+ func_mock.return_value = 'return-value'
+
+ return_value = virtualbox._run_virtualbox_method(
+ self.node, 'some-ironic-method', 'foo', 'args', kwarg='kwarg')
+
+ host_mock.assert_called_once_with(vmname='baremetal1',
+ host='10.0.2.2',
+ username='username',
+ password='password',
+ port=12345)
+ host_object_mock.find_vm.assert_called_once_with('baremetal1')
+ func_mock.assert_called_once_with('args', kwarg='kwarg')
+ self.assertEqual('return-value', return_value)
+
+ @mock.patch.object(pyremotevbox_vbox, 'VirtualBoxHost', autospec=True)
+ def test__run_virtualbox_method_get_host_fails(self, host_mock):
+ host_mock.side_effect = pyremotevbox_exc.PyRemoteVBoxException
+
+ self.assertRaises(exception.VirtualBoxOperationFailed,
+ virtualbox._run_virtualbox_method,
+ self.node, 'some-ironic-method', 'foo',
+ 'args', kwarg='kwarg')
+
+ @mock.patch.object(pyremotevbox_vbox, 'VirtualBoxHost', autospec=True)
+ def test__run_virtualbox_method_find_vm_fails(self, host_mock):
+ host_object_mock = mock.MagicMock(spec_set=['find_vm'])
+ host_mock.return_value = host_object_mock
+ exc = pyremotevbox_exc.PyRemoteVBoxException
+ host_object_mock.find_vm.side_effect = exc
+
+ self.assertRaises(exception.VirtualBoxOperationFailed,
+ virtualbox._run_virtualbox_method,
+ self.node, 'some-ironic-method', 'foo', 'args',
+ kwarg='kwarg')
+ host_mock.assert_called_once_with(vmname='baremetal1',
+ host='10.0.2.2',
+ username='username',
+ password='password',
+ port=12345)
+ host_object_mock.find_vm.assert_called_once_with('baremetal1')
+
+ @mock.patch.object(pyremotevbox_vbox, 'VirtualBoxHost', autospec=True)
+ def test__run_virtualbox_method_func_fails(self, host_mock):
+ host_object_mock = mock.MagicMock(spec_set=['find_vm'])
+ host_mock.return_value = host_object_mock
+ func_mock = mock.MagicMock()
+ vm_object_mock = mock.MagicMock(spec_set=['foo'], foo=func_mock)
+ host_object_mock.find_vm.return_value = vm_object_mock
+ func_mock.side_effect = pyremotevbox_exc.PyRemoteVBoxException
+
+ self.assertRaises(exception.VirtualBoxOperationFailed,
+ virtualbox._run_virtualbox_method,
+ self.node, 'some-ironic-method', 'foo',
+ 'args', kwarg='kwarg')
+ host_mock.assert_called_once_with(vmname='baremetal1',
+ host='10.0.2.2',
+ username='username',
+ password='password',
+ port=12345)
+ host_object_mock.find_vm.assert_called_once_with('baremetal1')
+ func_mock.assert_called_once_with('args', kwarg='kwarg')
+
+ @mock.patch.object(pyremotevbox_vbox, 'VirtualBoxHost', autospec=True)
+ def test__run_virtualbox_method_invalid_method(self, host_mock):
+ host_object_mock = mock.MagicMock(spec_set=['find_vm'])
+ host_mock.return_value = host_object_mock
+ vm_object_mock = mock.MagicMock(spec_set=[])
+ host_object_mock.find_vm.return_value = vm_object_mock
+ del vm_object_mock.foo
+
+ self.assertRaises(exception.InvalidParameterValue,
+ virtualbox._run_virtualbox_method,
+ self.node, 'some-ironic-method', 'foo',
+ 'args', kwarg='kwarg')
+ host_mock.assert_called_once_with(vmname='baremetal1',
+ host='10.0.2.2',
+ username='username',
+ password='password',
+ port=12345)
+ host_object_mock.find_vm.assert_called_once_with('baremetal1')
+
+ @mock.patch.object(pyremotevbox_vbox, 'VirtualBoxHost', autospec=True)
+ def test__run_virtualbox_method_vm_wrong_power_state(self, host_mock):
+ host_object_mock = mock.MagicMock(spec_set=['find_vm'])
+ host_mock.return_value = host_object_mock
+ func_mock = mock.MagicMock(spec_set=[])
+ vm_object_mock = mock.MagicMock(spec_set=['foo'], foo=func_mock)
+ host_object_mock.find_vm.return_value = vm_object_mock
+ func_mock.side_effect = pyremotevbox_exc.VmInWrongPowerState
+
+ # _run_virtualbox_method() doesn't catch VmInWrongPowerState and
+ # lets caller handle it.
+ self.assertRaises(pyremotevbox_exc.VmInWrongPowerState,
+ virtualbox._run_virtualbox_method,
+ self.node, 'some-ironic-method', 'foo',
+ 'args', kwarg='kwarg')
+ host_mock.assert_called_once_with(vmname='baremetal1',
+ host='10.0.2.2',
+ username='username',
+ password='password',
+ port=12345)
+ host_object_mock.find_vm.assert_called_once_with('baremetal1')
+ func_mock.assert_called_once_with('args', kwarg='kwarg')
+
+
+class VirtualBoxPowerTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(VirtualBoxPowerTestCase, self).setUp()
+ driver_info = INFO_DICT.copy()
+ mgr_utils.mock_the_extension_manager(driver="fake_vbox")
+ self.node = obj_utils.create_test_node(self.context,
+ driver='fake_vbox',
+ driver_info=driver_info)
+
+ def test_get_properties(self):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ properties = task.driver.power.get_properties()
+
+ self.assertIn('virtualbox_vmname', properties)
+ self.assertIn('virtualbox_host', properties)
+
+ @mock.patch.object(virtualbox, '_parse_driver_info', autospec=True)
+ def test_validate(self, parse_info_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.power.validate(task)
+ parse_info_mock.assert_called_once_with(task.node)
+
+ @mock.patch.object(virtualbox, '_run_virtualbox_method', autospec=True)
+ def test_get_power_state(self, run_method_mock):
+ run_method_mock.return_value = 'PoweredOff'
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ power_state = task.driver.power.get_power_state(task)
+ run_method_mock.assert_called_once_with(task.node,
+ 'get_power_state',
+ 'get_power_status')
+ self.assertEqual(states.POWER_OFF, power_state)
+
+ @mock.patch.object(virtualbox, '_run_virtualbox_method', autospec=True)
+ def test_get_power_state_invalid_state(self, run_method_mock):
+ run_method_mock.return_value = 'invalid-state'
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ power_state = task.driver.power.get_power_state(task)
+ run_method_mock.assert_called_once_with(task.node,
+ 'get_power_state',
+ 'get_power_status')
+ self.assertEqual(states.ERROR, power_state)
+
+ @mock.patch.object(virtualbox, '_run_virtualbox_method', autospec=True)
+ def test_set_power_state_off(self, run_method_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.power.set_power_state(task, states.POWER_OFF)
+ run_method_mock.assert_called_once_with(task.node,
+ 'set_power_state',
+ 'stop')
+
+ @mock.patch.object(virtualbox, '_run_virtualbox_method', autospec=True)
+ def test_set_power_state_on(self, run_method_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.power.set_power_state(task, states.POWER_ON)
+ run_method_mock.assert_called_once_with(task.node,
+ 'set_power_state',
+ 'start')
+
+ @mock.patch.object(virtualbox, '_run_virtualbox_method', autospec=True)
+ def test_set_power_state_reboot(self, run_method_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.power.set_power_state(task, states.REBOOT)
+ run_method_mock.assert_any_call(task.node,
+ 'reboot',
+ 'stop')
+ run_method_mock.assert_any_call(task.node,
+ 'reboot',
+ 'start')
+
+ def test_set_power_state_invalid_state(self):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ self.assertRaises(exception.InvalidParameterValue,
+ task.driver.power.set_power_state,
+ task, 'invalid-state')
+
+ @mock.patch.object(virtualbox, '_run_virtualbox_method', autospec=True)
+ def test_reboot(self, run_method_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.power.reboot(task)
+ run_method_mock.assert_any_call(task.node,
+ 'reboot',
+ 'stop')
+ run_method_mock.assert_any_call(task.node,
+ 'reboot',
+ 'start')
+
+
+class VirtualBoxManagementTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(VirtualBoxManagementTestCase, self).setUp()
+ driver_info = INFO_DICT.copy()
+ mgr_utils.mock_the_extension_manager(driver="fake_vbox")
+ self.node = obj_utils.create_test_node(self.context,
+ driver='fake_vbox',
+ driver_info=driver_info)
+
+ def test_get_properties(self):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ properties = task.driver.management.get_properties()
+
+ self.assertIn('virtualbox_vmname', properties)
+ self.assertIn('virtualbox_host', properties)
+
+ @mock.patch.object(virtualbox, '_parse_driver_info', autospec=True)
+ def test_validate(self, parse_info_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.management.validate(task)
+ parse_info_mock.assert_called_once_with(task.node)
+
+ def test_get_supported_boot_devices(self):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ devices = task.driver.management.get_supported_boot_devices(task)
+ self.assertIn(boot_devices.PXE, devices)
+ self.assertIn(boot_devices.DISK, devices)
+ self.assertIn(boot_devices.CDROM, devices)
+
+ @mock.patch.object(virtualbox, '_run_virtualbox_method', autospec=True)
+ def test_get_boot_device_ok(self, run_method_mock):
+ run_method_mock.return_value = 'Network'
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ ret_val = task.driver.management.get_boot_device(task)
+ run_method_mock.assert_called_once_with(task.node,
+ 'get_boot_device',
+ 'get_boot_device')
+ self.assertEqual(boot_devices.PXE, ret_val['boot_device'])
+ self.assertTrue(ret_val['persistent'])
+
+ @mock.patch.object(virtualbox, '_run_virtualbox_method', autospec=True)
+ def test_get_boot_device_invalid(self, run_method_mock):
+ run_method_mock.return_value = 'invalid-boot-device'
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ ret_val = task.driver.management.get_boot_device(task)
+ self.assertIsNone(ret_val['boot_device'])
+ self.assertIsNone(ret_val['persistent'])
+
+ @mock.patch.object(virtualbox, '_run_virtualbox_method', autospec=True)
+ def test_set_boot_device_ok(self, run_method_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.management.set_boot_device(task, boot_devices.PXE)
+ run_method_mock.assert_called_once_with(task.node,
+ 'set_boot_device',
+ 'set_boot_device',
+ 'Network')
+
+ @mock.patch.object(virtualbox, 'LOG', autospec=True)
+ @mock.patch.object(virtualbox, '_run_virtualbox_method', autospec=True)
+ def test_set_boot_device_wrong_power_state(self, run_method_mock,
+ log_mock):
+ run_method_mock.side_effect = pyremotevbox_exc.VmInWrongPowerState
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ task.driver.management.set_boot_device(task, boot_devices.PXE)
+ log_mock.error.assert_called_once_with(mock.ANY, mock.ANY)
+
+ @mock.patch.object(virtualbox, '_run_virtualbox_method', autospec=True)
+ def test_set_boot_device_invalid(self, run_method_mock):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ self.assertRaises(exception.InvalidParameterValue,
+ task.driver.management.set_boot_device,
+ task, 'invalid-boot-device')
+
+ def test_get_sensors_data(self):
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ self.assertRaises(NotImplementedError,
+ task.driver.management.get_sensors_data,
+ task)
diff --git a/ironic/tests/unit/drivers/test_wol.py b/ironic/tests/unit/drivers/test_wol.py
new file mode 100644
index 000000000..f8e02e298
--- /dev/null
+++ b/ironic/tests/unit/drivers/test_wol.py
@@ -0,0 +1,194 @@
+# Copyright 2015 Red Hat, Inc.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Test class for Wake-On-Lan driver module."""
+
+import socket
+import time
+
+import mock
+from oslo_utils import uuidutils
+
+from ironic.common import driver_factory
+from ironic.common import exception
+from ironic.common import states
+from ironic.conductor import task_manager
+from ironic.drivers.modules import wol
+from ironic.tests.unit.conductor import utils as mgr_utils
+from ironic.tests.unit.db import base as db_base
+from ironic.tests.unit.objects import utils as obj_utils
+
+
+@mock.patch.object(time, 'sleep', lambda *_: None)
+class WakeOnLanPrivateMethodTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(WakeOnLanPrivateMethodTestCase, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver='fake_wol')
+ self.driver = driver_factory.get_driver('fake_wol')
+ self.node = obj_utils.create_test_node(self.context,
+ driver='fake_wol')
+ self.port = obj_utils.create_test_port(self.context,
+ node_id=self.node.id)
+
+ def test__parse_parameters(self):
+ with task_manager.acquire(
+ self.context, self.node.uuid, shared=True) as task:
+ params = wol._parse_parameters(task)
+ self.assertEqual('255.255.255.255', params['host'])
+ self.assertEqual(9, params['port'])
+
+ def test__parse_parameters_non_default_params(self):
+ with task_manager.acquire(
+ self.context, self.node.uuid, shared=True) as task:
+ task.node.driver_info = {'wol_host': '1.2.3.4',
+ 'wol_port': 7}
+ params = wol._parse_parameters(task)
+ self.assertEqual('1.2.3.4', params['host'])
+ self.assertEqual(7, params['port'])
+
+ def test__parse_parameters_no_ports_fail(self):
+ node = obj_utils.create_test_node(
+ self.context,
+ uuid=uuidutils.generate_uuid(),
+ driver='fake_wol')
+ with task_manager.acquire(
+ self.context, node.uuid, shared=True) as task:
+ self.assertRaises(exception.InvalidParameterValue,
+ wol._parse_parameters, task)
+
+ @mock.patch.object(socket, 'socket', autospec=True, spec_set=True)
+ def test_send_magic_packets(self, mock_socket):
+ fake_socket = mock.Mock(spec=socket, spec_set=True)
+ mock_socket.return_value = fake_socket()
+ obj_utils.create_test_port(self.context,
+ uuid=uuidutils.generate_uuid(),
+ address='aa:bb:cc:dd:ee:ff',
+ node_id=self.node.id)
+ with task_manager.acquire(
+ self.context, self.node.uuid, shared=True) as task:
+ wol._send_magic_packets(task, '255.255.255.255', 9)
+
+ expected_calls = [
+ mock.call(),
+ mock.call().setsockopt(socket.SOL_SOCKET,
+ socket.SO_BROADCAST, 1),
+ mock.call().sendto(mock.ANY, ('255.255.255.255', 9)),
+ mock.call().sendto(mock.ANY, ('255.255.255.255', 9)),
+ mock.call().close()]
+
+ fake_socket.assert_has_calls(expected_calls)
+ self.assertEqual(1, mock_socket.call_count)
+
+ @mock.patch.object(socket, 'socket', autospec=True, spec_set=True)
+ def test_send_magic_packets_network_sendto_error(self, mock_socket):
+ fake_socket = mock.Mock(spec=socket, spec_set=True)
+ fake_socket.return_value.sendto.side_effect = socket.error('boom')
+ mock_socket.return_value = fake_socket()
+ with task_manager.acquire(
+ self.context, self.node.uuid, shared=True) as task:
+ self.assertRaises(exception.WolOperationError,
+ wol._send_magic_packets,
+ task, '255.255.255.255', 9)
+ self.assertEqual(1, mock_socket.call_count)
+ # assert sendt0() was invoked
+ fake_socket.return_value.sendto.assert_called_once_with(
+ mock.ANY, ('255.255.255.255', 9))
+
+ @mock.patch.object(socket, 'socket', autospec=True, spec_set=True)
+ def test_magic_packet_format(self, mock_socket):
+ fake_socket = mock.Mock(spec=socket, spec_set=True)
+ mock_socket.return_value = fake_socket()
+ with task_manager.acquire(
+ self.context, self.node.uuid, shared=True) as task:
+ wol._send_magic_packets(task, '255.255.255.255', 9)
+
+ expct_packet = (b'\xff\xff\xff\xff\xff\xffRT\x00\xcf-1RT\x00'
+ b'\xcf-1RT\x00\xcf-1RT\x00\xcf-1RT\x00\xcf-1RT'
+ b'\x00\xcf-1RT\x00\xcf-1RT\x00\xcf-1RT\x00'
+ b'\xcf-1RT\x00\xcf-1RT\x00\xcf-1RT\x00\xcf-1RT'
+ b'\x00\xcf-1RT\x00\xcf-1RT\x00\xcf-1RT\x00\xcf-1')
+ mock_socket.return_value.sendto.assert_called_once_with(
+ expct_packet, ('255.255.255.255', 9))
+
+
+@mock.patch.object(time, 'sleep', lambda *_: None)
+class WakeOnLanDriverTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(WakeOnLanDriverTestCase, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver='fake_wol')
+ self.driver = driver_factory.get_driver('fake_wol')
+ self.node = obj_utils.create_test_node(self.context,
+ driver='fake_wol')
+ self.port = obj_utils.create_test_port(self.context,
+ node_id=self.node.id)
+
+ def test_get_properties(self):
+ expected = wol.COMMON_PROPERTIES
+ with task_manager.acquire(
+ self.context, self.node.uuid, shared=True) as task:
+ self.assertEqual(expected, task.driver.get_properties())
+
+ def test_get_power_state(self):
+ with task_manager.acquire(
+ self.context, self.node.uuid, shared=True) as task:
+ task.node.power_state = states.POWER_ON
+ pstate = task.driver.power.get_power_state(task)
+ self.assertEqual(states.POWER_ON, pstate)
+
+ def test_get_power_state_nostate(self):
+ with task_manager.acquire(
+ self.context, self.node.uuid, shared=True) as task:
+ task.node.power_state = states.NOSTATE
+ pstate = task.driver.power.get_power_state(task)
+ self.assertEqual(states.POWER_OFF, pstate)
+
+ @mock.patch.object(wol, '_send_magic_packets', autospec=True,
+ spec_set=True)
+ def test_set_power_state_power_on(self, mock_magic):
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ task.driver.power.set_power_state(task, states.POWER_ON)
+ mock_magic.assert_called_once_with(task, '255.255.255.255', 9)
+
+ @mock.patch.object(wol.LOG, 'info', autospec=True, spec_set=True)
+ @mock.patch.object(wol, '_send_magic_packets', autospec=True,
+ spec_set=True)
+ def test_set_power_state_power_off(self, mock_magic, mock_log):
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ task.driver.power.set_power_state(task, states.POWER_OFF)
+ mock_log.assert_called_once_with(mock.ANY, self.node.uuid)
+ # assert magic packets weren't sent
+ self.assertFalse(mock_magic.called)
+
+ @mock.patch.object(wol, '_send_magic_packets', autospec=True,
+ spec_set=True)
+ def test_set_power_state_power_fail(self, mock_magic):
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ self.assertRaises(exception.InvalidParameterValue,
+ task.driver.power.set_power_state,
+ task, 'wrong-state')
+ # assert magic packets weren't sent
+ self.assertFalse(mock_magic.called)
+
+ @mock.patch.object(wol.LOG, 'info', autospec=True, spec_set=True)
+ @mock.patch.object(wol.WakeOnLanPower, 'set_power_state', autospec=True,
+ spec_set=True)
+ def test_reboot(self, mock_power, mock_log):
+ with task_manager.acquire(self.context, self.node.uuid) as task:
+ task.driver.power.reboot(task)
+ mock_log.assert_called_once_with(mock.ANY, self.node.uuid)
+ mock_power.assert_called_once_with(task.driver.power, task,
+ states.POWER_ON)
diff --git a/ironic/tests/unit/drivers/third_party_driver_mock_specs.py b/ironic/tests/unit/drivers/third_party_driver_mock_specs.py
new file mode 100644
index 000000000..94666d043
--- /dev/null
+++ b/ironic/tests/unit/drivers/third_party_driver_mock_specs.py
@@ -0,0 +1,118 @@
+# Copyright 2015 Intel Corporation
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""This module provides mock 'specs' for third party modules that can be used
+when needing to mock those third party modules"""
+
+# iboot
+IBOOT_SPEC = (
+ 'iBootInterface',
+)
+
+# ironic_inspector
+IRONIC_INSPECTOR_CLIENT_SPEC = (
+ 'introspect',
+ 'get_status',
+)
+
+# proliantutils
+PROLIANTUTILS_SPEC = (
+ 'exception',
+ 'ilo',
+)
+
+# pyghmi
+PYGHMI_SPEC = (
+ 'exceptions',
+ 'ipmi',
+)
+PYGHMI_EXC_SPEC = (
+ 'IpmiException',
+)
+PYGHMI_IPMI_SPEC = (
+ 'command',
+)
+PYGHMI_IPMICMD_SPEC = (
+ 'boot_devices',
+ 'Command',
+)
+
+# pyremotevbox
+PYREMOTEVBOX_SPEC = (
+ 'exception',
+ 'vbox',
+)
+PYREMOTEVBOX_EXC_SPEC = (
+ 'PyRemoteVBoxException',
+ 'VmInWrongPowerState',
+)
+PYREMOTEVBOX_VBOX_SPEC = (
+ 'VirtualBoxHost',
+)
+
+# pywsman
+PYWSMAN_SPEC = (
+ 'Client',
+ 'ClientOptions',
+ 'EndPointReference',
+ 'FLAG_ENUMERATION_OPTIMIZATION',
+ 'Filter',
+ 'XmlDoc',
+ 'wsman_transport_set_verify_host',
+ 'wsman_transport_set_verify_peer',
+)
+
+# pywsnmp
+PYWSNMP_SPEC = (
+ 'entity',
+ 'error',
+ 'proto',
+)
+
+# scciclient
+SCCICLIENT_SPEC = (
+ 'irmc',
+)
+SCCICLIENT_IRMC_SCCI_SPEC = (
+ 'POWER_OFF',
+ 'POWER_ON',
+ 'POWER_RESET',
+ 'MOUNT_CD',
+ 'UNMOUNT_CD',
+ 'MOUNT_FD',
+ 'UNMOUNT_FD',
+ 'SCCIClientError',
+ 'SCCIInvalidInputError',
+ 'get_share_type',
+ 'get_client',
+ 'get_report',
+ 'get_sensor_data',
+ 'get_virtual_cd_set_params_cmd',
+ 'get_virtual_fd_set_params_cmd',
+)
+
+# seamicro
+SEAMICRO_SPEC = (
+ 'client',
+ 'exceptions',
+)
+# seamicro.client module
+SEAMICRO_CLIENT_MOD_SPEC = (
+ 'Client',
+)
+SEAMICRO_EXC_SPEC = (
+ 'ClientException',
+ 'UnsupportedVersion',
+)
diff --git a/ironic/tests/unit/drivers/third_party_driver_mocks.py b/ironic/tests/unit/drivers/third_party_driver_mocks.py
new file mode 100644
index 000000000..e88959aab
--- /dev/null
+++ b/ironic/tests/unit/drivers/third_party_driver_mocks.py
@@ -0,0 +1,244 @@
+# Copyright 2014 Hewlett-Packard Development Company, L.P.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""This module detects whether third-party libraries, utilized by third-party
+drivers, are present on the system. If they are not, it mocks them and tinkers
+with sys.modules so that the drivers can be loaded by unit tests, and the unit
+tests can continue to test the functionality of those drivers without the
+respective external libraries' actually being present.
+
+Any external library required by a third-party driver should be mocked here.
+Current list of mocked libraries:
+
+- seamicroclient
+- ipminative
+- proliantutils
+- pysnmp
+- scciclient
+"""
+
+import sys
+
+import mock
+from oslo_utils import importutils
+import six
+
+from ironic.drivers.modules import ipmitool
+from ironic.tests.unit.drivers import third_party_driver_mock_specs \
+ as mock_specs
+
+
+# attempt to load the external 'seamicroclient' library, which is
+# required by the optional drivers.modules.seamicro module
+seamicroclient = importutils.try_import("seamicroclient")
+if not seamicroclient:
+ smc = mock.MagicMock(spec_set=mock_specs.SEAMICRO_SPEC)
+ smc.client = mock.MagicMock(spec_set=mock_specs.SEAMICRO_CLIENT_MOD_SPEC)
+ smc.exceptions = mock.MagicMock(spec_set=mock_specs.SEAMICRO_EXC_SPEC)
+ smc.exceptions.ClientException = Exception
+ smc.exceptions.UnsupportedVersion = Exception
+ sys.modules['seamicroclient'] = smc
+ sys.modules['seamicroclient.client'] = smc.client
+ sys.modules['seamicroclient.exceptions'] = smc.exceptions
+
+# if anything has loaded the seamicro driver yet, reload it now that
+# the external library has been mocked
+if 'ironic.drivers.modules.seamicro' in sys.modules:
+ six.moves.reload_module(sys.modules['ironic.drivers.modules.seamicro'])
+
+# IPMITool driver checks the system for presence of 'ipmitool' binary during
+# __init__. We bypass that check in order to run the unit tests, which do not
+# depend on 'ipmitool' being on the system.
+ipmitool.TIMING_SUPPORT = False
+ipmitool.DUAL_BRIDGE_SUPPORT = False
+ipmitool.SINGLE_BRIDGE_SUPPORT = False
+
+pyghmi = importutils.try_import("pyghmi")
+if not pyghmi:
+ p = mock.MagicMock(spec_set=mock_specs.PYGHMI_SPEC)
+ p.exceptions = mock.MagicMock(spec_set=mock_specs.PYGHMI_EXC_SPEC)
+ p.exceptions.IpmiException = Exception
+ p.ipmi = mock.MagicMock(spec_set=mock_specs.PYGHMI_IPMI_SPEC)
+ p.ipmi.command = mock.MagicMock(spec_set=mock_specs.PYGHMI_IPMICMD_SPEC)
+ p.ipmi.command.Command = mock.MagicMock(spec_set=[])
+ sys.modules['pyghmi'] = p
+ sys.modules['pyghmi.exceptions'] = p.exceptions
+ sys.modules['pyghmi.ipmi'] = p.ipmi
+ sys.modules['pyghmi.ipmi.command'] = p.ipmi.command
+ # FIXME(deva): the next line is a hack, because several unit tests
+ # actually depend on this particular string being present
+ # in pyghmi.ipmi.command.boot_devices
+ p.ipmi.command.boot_devices = {'pxe': 4}
+
+if 'ironic.drivers.modules.ipminative' in sys.modules:
+ six.moves.reload_module(sys.modules['ironic.drivers.modules.ipminative'])
+
+proliantutils = importutils.try_import('proliantutils')
+if not proliantutils:
+ proliantutils = mock.MagicMock(spec_set=mock_specs.PROLIANTUTILS_SPEC)
+ sys.modules['proliantutils'] = proliantutils
+ sys.modules['proliantutils.ilo'] = proliantutils.ilo
+ sys.modules['proliantutils.ilo.client'] = proliantutils.ilo.client
+ sys.modules['proliantutils.exception'] = proliantutils.exception
+ proliantutils.exception.IloError = type('IloError', (Exception,), {})
+ command_exception = type('IloCommandNotSupportedError', (Exception,), {})
+ proliantutils.exception.IloCommandNotSupportedError = command_exception
+ if 'ironic.drivers.ilo' in sys.modules:
+ six.moves.reload_module(sys.modules['ironic.drivers.ilo'])
+
+
+# attempt to load the external 'pywsman' library, which is required by
+# the optional drivers.modules.drac and drivers.modules.amt module
+pywsman = importutils.try_import('pywsman')
+if not pywsman:
+ pywsman = mock.MagicMock(spec_set=mock_specs.PYWSMAN_SPEC)
+ sys.modules['pywsman'] = pywsman
+ # Now that the external library has been mocked, if anything had already
+ # loaded any of the drivers, reload them.
+ if 'ironic.drivers.modules.drac' in sys.modules:
+ six.moves.reload_module(sys.modules['ironic.drivers.modules.drac'])
+ if 'ironic.drivers.modules.amt' in sys.modules:
+ six.moves.reload_module(sys.modules['ironic.drivers.modules.amt'])
+
+
+# attempt to load the external 'iboot' library, which is required by
+# the optional drivers.modules.iboot module
+iboot = importutils.try_import("iboot")
+if not iboot:
+ ib = mock.MagicMock(spec_set=mock_specs.IBOOT_SPEC)
+ ib.iBootInterface = mock.MagicMock(spec_set=[])
+ sys.modules['iboot'] = ib
+
+# if anything has loaded the iboot driver yet, reload it now that the
+# external library has been mocked
+if 'ironic.drivers.modules.iboot' in sys.modules:
+ six.moves.reload_module(sys.modules['ironic.drivers.modules.iboot'])
+
+
+# attempt to load the external 'pysnmp' library, which is required by
+# the optional drivers.modules.snmp module
+pysnmp = importutils.try_import("pysnmp")
+if not pysnmp:
+ pysnmp = mock.MagicMock(spec_set=mock_specs.PYWSNMP_SPEC)
+ sys.modules["pysnmp"] = pysnmp
+ sys.modules["pysnmp.entity"] = pysnmp.entity
+ sys.modules["pysnmp.entity.rfc3413"] = pysnmp.entity.rfc3413
+ sys.modules["pysnmp.entity.rfc3413.oneliner"] = (
+ pysnmp.entity.rfc3413.oneliner)
+ sys.modules["pysnmp.entity.rfc3413.oneliner.cmdgen"] = (
+ pysnmp.entity.rfc3413.oneliner.cmdgen)
+ sys.modules["pysnmp.error"] = pysnmp.error
+ pysnmp.error.PySnmpError = Exception
+ sys.modules["pysnmp.proto"] = pysnmp.proto
+ sys.modules["pysnmp.proto.rfc1902"] = pysnmp.proto.rfc1902
+ # Patch the RFC1902 integer class with a python int
+ pysnmp.proto.rfc1902.Integer = int
+
+
+# if anything has loaded the snmp driver yet, reload it now that the
+# external library has been mocked
+if 'ironic.drivers.modules.snmp' in sys.modules:
+ six.moves.reload_module(sys.modules['ironic.drivers.modules.snmp'])
+
+
+# attempt to load the external 'scciclient' library, which is required by
+# the optional drivers.modules.irmc module
+scciclient = importutils.try_import('scciclient')
+if not scciclient:
+ mock_scciclient = mock.MagicMock(spec_set=mock_specs.SCCICLIENT_SPEC)
+ sys.modules['scciclient'] = mock_scciclient
+ sys.modules['scciclient.irmc'] = mock_scciclient.irmc
+ sys.modules['scciclient.irmc.scci'] = mock.MagicMock(
+ spec_set=mock_specs.SCCICLIENT_IRMC_SCCI_SPEC,
+ POWER_OFF=mock.sentinel.POWER_OFF,
+ POWER_ON=mock.sentinel.POWER_ON,
+ POWER_RESET=mock.sentinel.POWER_RESET,
+ MOUNT_CD=mock.sentinel.MOUNT_CD,
+ UNMOUNT_CD=mock.sentinel.UNMOUNT_CD,
+ MOUNT_FD=mock.sentinel.MOUNT_FD,
+ UNMOUNT_FD=mock.sentinel.UNMOUNT_FD)
+
+
+# if anything has loaded the iRMC driver yet, reload it now that the
+# external library has been mocked
+if 'ironic.drivers.modules.irmc' in sys.modules:
+ six.moves.reload_module(sys.modules['ironic.drivers.modules.irmc'])
+
+
+# install mock object to prevent 'iscsi_irmc' and 'agent_irmc' from
+# checking whether NFS/CIFS share file system is mounted or not.
+irmc_deploy = importutils.import_module(
+ 'ironic.drivers.modules.irmc.deploy')
+irmc_deploy._check_share_fs_mounted_orig = irmc_deploy._check_share_fs_mounted
+irmc_deploy._check_share_fs_mounted_patcher = mock.patch(
+ 'ironic.drivers.modules.irmc.deploy._check_share_fs_mounted')
+irmc_deploy._check_share_fs_mounted_patcher.return_value = None
+
+
+pyremotevbox = importutils.try_import('pyremotevbox')
+if not pyremotevbox:
+ pyremotevbox = mock.MagicMock(spec_set=mock_specs.PYREMOTEVBOX_SPEC)
+ pyremotevbox.exception = mock.MagicMock(
+ spec_set=mock_specs.PYREMOTEVBOX_EXC_SPEC)
+ pyremotevbox.exception.PyRemoteVBoxException = Exception
+ pyremotevbox.exception.VmInWrongPowerState = Exception
+ pyremotevbox.vbox = mock.MagicMock(
+ spec_set=mock_specs.PYREMOTEVBOX_VBOX_SPEC)
+ sys.modules['pyremotevbox'] = pyremotevbox
+ if 'ironic.drivers.modules.virtualbox' in sys.modules:
+ six.moves.reload_module(
+ sys.modules['ironic.drivers.modules.virtualbox'])
+
+
+ironic_inspector_client = importutils.try_import('ironic_inspector_client')
+if not ironic_inspector_client:
+ ironic_inspector_client = mock.MagicMock(
+ spec_set=mock_specs.IRONIC_INSPECTOR_CLIENT_SPEC)
+ sys.modules['ironic_inspector_client'] = ironic_inspector_client
+ if 'ironic.drivers.modules.inspector' in sys.modules:
+ six.moves.reload_module(
+ sys.modules['ironic.drivers.modules.inspector'])
+
+
+class MockKwargsException(Exception):
+ def __init__(self, *args, **kwargs):
+ super(MockKwargsException, self).__init__(*args)
+ self.kwargs = kwargs
+
+
+ucssdk = importutils.try_import('UcsSdk')
+if not ucssdk:
+ ucssdk = mock.MagicMock()
+ sys.modules['UcsSdk'] = ucssdk
+ sys.modules['UcsSdk.utils'] = ucssdk.utils
+ sys.modules['UcsSdk.utils.power'] = ucssdk.utils.power
+ sys.modules['UcsSdk.utils.management'] = ucssdk.utils.management
+ sys.modules['UcsSdk.utils.exception'] = ucssdk.utils.exception
+ ucssdk.utils.exception.UcsOperationError = (
+ type('UcsOperationError', (MockKwargsException,), {}))
+ ucssdk.utils.exception.UcsConnectionError = (
+ type('UcsConnectionError', (MockKwargsException,), {}))
+ if 'ironic.drivers.modules.ucs' in sys.modules:
+ six.moves.reload_module(
+ sys.modules['ironic.drivers.modules.ucs'])
+
+imcsdk = importutils.try_import('ImcSdk')
+if not imcsdk:
+ imcsdk = mock.MagicMock()
+ imcsdk.ImcException = Exception
+ sys.modules['ImcSdk'] = imcsdk
+ if 'ironic.drivers.modules.cimc' in sys.modules:
+ six.moves.reload_module(
+ sys.modules['ironic.drivers.modules.cimc'])
diff --git a/ironic/tests/unit/drivers/ucs/__init__.py b/ironic/tests/unit/drivers/ucs/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ironic/tests/unit/drivers/ucs/__init__.py
diff --git a/ironic/tests/unit/drivers/ucs/test_helper.py b/ironic/tests/unit/drivers/ucs/test_helper.py
new file mode 100644
index 000000000..c83e82d46
--- /dev/null
+++ b/ironic/tests/unit/drivers/ucs/test_helper.py
@@ -0,0 +1,161 @@
+# Copyright 2015, Cisco Systems.
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Test class for common methods used by UCS modules."""
+
+import mock
+from oslo_config import cfg
+from oslo_utils import importutils
+
+from ironic.common import exception
+from ironic.conductor import task_manager
+from ironic.db import api as dbapi
+from ironic.drivers.modules.ucs import helper as ucs_helper
+from ironic.tests.unit.conductor import utils as mgr_utils
+from ironic.tests.unit.db import base as db_base
+from ironic.tests.unit.db import utils as db_utils
+from ironic.tests.unit.objects import utils as obj_utils
+
+ucs_error = importutils.try_import('UcsSdk.utils.exception')
+
+INFO_DICT = db_utils.get_test_ucs_info()
+CONF = cfg.CONF
+
+
+class UcsValidateParametersTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(UcsValidateParametersTestCase, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver="fake_ucs")
+ self.node = obj_utils.create_test_node(self.context,
+ driver='fake_ucs',
+ driver_info=INFO_DICT)
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ self.helper = ucs_helper.CiscoUcsHelper(task)
+
+ def test_parse_driver_info(self):
+ info = ucs_helper.parse_driver_info(self.node)
+
+ self.assertIsNotNone(info.get('ucs_address'))
+ self.assertIsNotNone(info.get('ucs_username'))
+ self.assertIsNotNone(info.get('ucs_password'))
+ self.assertIsNotNone(info.get('ucs_service_profile'))
+
+ def test_parse_driver_info_missing_address(self):
+
+ del self.node.driver_info['ucs_address']
+ self.assertRaises(exception.MissingParameterValue,
+ ucs_helper.parse_driver_info, self.node)
+
+ def test_parse_driver_info_missing_username(self):
+ del self.node.driver_info['ucs_username']
+ self.assertRaises(exception.MissingParameterValue,
+ ucs_helper.parse_driver_info, self.node)
+
+ def test_parse_driver_info_missing_password(self):
+ del self.node.driver_info['ucs_password']
+ self.assertRaises(exception.MissingParameterValue,
+ ucs_helper.parse_driver_info, self.node)
+
+ def test_parse_driver_info_missing_service_profile(self):
+ del self.node.driver_info['ucs_service_profile']
+ self.assertRaises(exception.MissingParameterValue,
+ ucs_helper.parse_driver_info, self.node)
+
+ @mock.patch('ironic.drivers.modules.ucs.helper.ucs_helper',
+ spec_set=True, autospec=True)
+ def test_connect_ucsm(self, mock_helper):
+ mock_helper.generate_ucsm_handle.return_value = (True, mock.Mock())
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ self.helper.connect_ucsm()
+
+ mock_helper.generate_ucsm_handle.assert_called_once_with(
+ task.node.driver_info['ucs_address'],
+ task.node.driver_info['ucs_username'],
+ task.node.driver_info['ucs_password']
+ )
+
+ @mock.patch('ironic.drivers.modules.ucs.helper.ucs_helper',
+ spec_set=True, autospec=True)
+ def test_connect_ucsm_fail(self, mock_helper):
+ side_effect = ucs_error.UcsConnectionError(
+ message='connecting to ucsm',
+ error='failed')
+ mock_helper.generate_ucsm_handle.side_effect = side_effect
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ self.assertRaises(exception.UcsConnectionError,
+ self.helper.connect_ucsm
+ )
+ mock_helper.generate_ucsm_handle.assert_called_once_with(
+ task.node.driver_info['ucs_address'],
+ task.node.driver_info['ucs_username'],
+ task.node.driver_info['ucs_password']
+ )
+
+ @mock.patch('ironic.drivers.modules.ucs.helper',
+ autospec=True)
+ def test_logout(self, mock_helper):
+ self.helper.logout()
+
+
+class UcsCommonMethodsTestcase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(UcsCommonMethodsTestcase, self).setUp()
+ self.dbapi = dbapi.get_instance()
+ mgr_utils.mock_the_extension_manager(driver="fake_ucs")
+ self.node = obj_utils.create_test_node(self.context,
+ driver='fake_ucs',
+ driver_info=INFO_DICT.copy())
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ self.helper = ucs_helper.CiscoUcsHelper(task)
+
+ @mock.patch('ironic.drivers.modules.ucs.helper.ucs_helper', autospec=True)
+ @mock.patch('ironic.drivers.modules.ucs.helper.CiscoUcsHelper',
+ autospec=True)
+ def test_requires_ucs_client_ok_logout(self, mc_helper, mock_ucs_helper):
+ mock_helper = mc_helper.return_value
+ mock_helper.logout.return_value = None
+ mock_working_function = mock.Mock()
+ mock_working_function.__name__ = "Working"
+ mock_working_function.return_valure = "Success"
+ mock_ucs_helper.generate_ucsm_handle.return_value = (True, mock.Mock())
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ wont_error = ucs_helper.requires_ucs_client(
+ mock_working_function)
+ wont_error(wont_error, task)
+ mock_helper.logout.assert_called_once_with()
+
+ @mock.patch('ironic.drivers.modules.ucs.helper.ucs_helper', autospec=True)
+ @mock.patch('ironic.drivers.modules.ucs.helper.CiscoUcsHelper',
+ autospec=True)
+ def test_requires_ucs_client_fail_logout(self, mc_helper, mock_ucs_helper):
+ mock_helper = mc_helper.return_value
+ mock_helper.logout.return_value = None
+ mock_broken_function = mock.Mock()
+ mock_broken_function.__name__ = "Broken"
+ mock_broken_function.side_effect = exception.IronicException()
+ mock_ucs_helper.generate_ucsm_handle.return_value = (True, mock.Mock())
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+
+ will_error = ucs_helper.requires_ucs_client(mock_broken_function)
+ self.assertRaises(exception.IronicException,
+ will_error, will_error, task)
+ mock_helper.logout.assert_called_once_with()
diff --git a/ironic/tests/unit/drivers/ucs/test_management.py b/ironic/tests/unit/drivers/ucs/test_management.py
new file mode 100644
index 000000000..17101756b
--- /dev/null
+++ b/ironic/tests/unit/drivers/ucs/test_management.py
@@ -0,0 +1,139 @@
+# Copyright 2015, Cisco Systems.
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Test class for UCS ManagementInterface
+"""
+
+import mock
+from oslo_config import cfg
+from oslo_utils import importutils
+
+from ironic.common import boot_devices
+from ironic.common import exception
+from ironic.conductor import task_manager
+from ironic.drivers.modules.ucs import helper as ucs_helper
+from ironic.drivers.modules.ucs import management as ucs_mgmt
+from ironic.tests.unit.conductor import utils as mgr_utils
+from ironic.tests.unit.db import base as db_base
+from ironic.tests.unit.db import utils as db_utils
+from ironic.tests.unit.objects import utils as obj_utils
+
+ucs_error = importutils.try_import('UcsSdk.utils.exception')
+
+INFO_DICT = db_utils.get_test_ucs_info()
+CONF = cfg.CONF
+
+
+class UcsManagementTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(UcsManagementTestCase, self).setUp()
+ mgr_utils.mock_the_extension_manager(driver='fake_ucs')
+ self.node = obj_utils.create_test_node(self.context,
+ driver='fake_ucs',
+ driver_info=INFO_DICT)
+ self.interface = ucs_mgmt.UcsManagement()
+ self.task = mock.Mock()
+ self.task.node = self.node
+
+ def test_get_properties(self):
+ expected = ucs_helper.COMMON_PROPERTIES
+ self.assertEqual(expected, self.interface.get_properties())
+
+ def test_get_supported_boot_devices(self):
+ expected = [boot_devices.PXE, boot_devices.DISK, boot_devices.CDROM]
+ self.assertEqual(sorted(expected),
+ sorted(self.interface.get_supported_boot_devices()))
+
+ @mock.patch('ironic.drivers.modules.ucs.helper.ucs_helper',
+ spec_set=True, autospec=True)
+ @mock.patch(
+ 'ironic.drivers.modules.ucs.management.ucs_mgmt.BootDeviceHelper',
+ spec_set=True, autospec=True)
+ def test_get_boot_device(self, mock_ucs_mgmt, mock_helper):
+ mock_helper.generate_ucsm_handle.return_value = (True, mock.Mock())
+ mock_mgmt = mock_ucs_mgmt.return_value
+ mock_mgmt.get_boot_device.return_value = {
+ 'boot_device': 'disk',
+ 'persistent': False
+ }
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ expected_device = boot_devices.DISK
+ expected_response = {'boot_device': expected_device,
+ 'persistent': False}
+ self.assertEqual(expected_response,
+ self.interface.get_boot_device(task))
+ mock_mgmt.get_boot_device.assert_called_once_with()
+
+ @mock.patch('ironic.drivers.modules.ucs.helper.ucs_helper',
+ spec_set=True, autospec=True)
+ @mock.patch(
+ 'ironic.drivers.modules.ucs.management.ucs_mgmt.BootDeviceHelper',
+ spec_set=True, autospec=True)
+ def test_get_boot_device_fail(self, mock_ucs_mgmt, mock_helper):
+ mock_helper.generate_ucsm_handle.return_value = (True, mock.Mock())
+ mock_mgmt = mock_ucs_mgmt.return_value
+ side_effect = ucs_error.UcsOperationError(
+ operation='getting boot device',
+ error='failed',
+ node=self.node.uuid
+ )
+ mock_mgmt.get_boot_device.side_effect = side_effect
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ self.assertRaises(exception.UcsOperationError,
+ self.interface.get_boot_device,
+ task)
+ mock_mgmt.get_boot_device.assert_called_once_with()
+
+ @mock.patch('ironic.drivers.modules.ucs.helper.ucs_helper',
+ spec_set=True, autospec=True)
+ @mock.patch(
+ 'ironic.drivers.modules.ucs.management.ucs_mgmt.BootDeviceHelper',
+ spec_set=True, autospec=True)
+ def test_set_boot_device(self, mock_mgmt, mock_helper):
+ mc_mgmt = mock_mgmt.return_value
+ mock_helper.generate_ucsm_handle.return_value = (True, mock.Mock())
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ self.interface.set_boot_device(task, boot_devices.CDROM)
+
+ mc_mgmt.set_boot_device.assert_called_once_with('cdrom', False)
+
+ @mock.patch('ironic.drivers.modules.ucs.helper.ucs_helper',
+ spec_set=True, autospec=True)
+ @mock.patch(
+ 'ironic.drivers.modules.ucs.management.ucs_mgmt.BootDeviceHelper',
+ spec_set=True, autospec=True)
+ def test_set_boot_device_fail(self, mock_mgmt, mock_helper):
+ mc_mgmt = mock_mgmt.return_value
+ mock_helper.generate_ucsm_handle.return_value = (True, mock.Mock())
+ side_effect = exception.UcsOperationError(
+ operation='setting boot device',
+ error='failed',
+ node=self.node.uuid)
+ mc_mgmt.set_boot_device.side_effect = side_effect
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ self.assertRaises(exception.IronicException,
+ self.interface.set_boot_device,
+ task, boot_devices.PXE)
+ mc_mgmt.set_boot_device.assert_called_once_with(
+ boot_devices.PXE, False)
+
+ def test_get_sensors_data(self):
+ self.assertRaises(NotImplementedError,
+ self.interface.get_sensors_data, self.task)
diff --git a/ironic/tests/unit/drivers/ucs/test_power.py b/ironic/tests/unit/drivers/ucs/test_power.py
new file mode 100644
index 000000000..a01a96bbd
--- /dev/null
+++ b/ironic/tests/unit/drivers/ucs/test_power.py
@@ -0,0 +1,302 @@
+# Copyright 2015, Cisco Systems.
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Test class for UcsPower module."""
+import mock
+from oslo_config import cfg
+from oslo_utils import importutils
+
+from ironic.common import exception
+from ironic.common import states
+from ironic.conductor import task_manager
+from ironic.drivers.modules.ucs import helper as ucs_helper
+from ironic.drivers.modules.ucs import power as ucs_power
+from ironic.tests.unit.conductor import utils as mgr_utils
+from ironic.tests.unit.db import base as db_base
+from ironic.tests.unit.db import utils as db_utils
+from ironic.tests.unit.objects import utils as obj_utils
+
+ucs_error = importutils.try_import('UcsSdk.utils.exception')
+
+INFO_DICT = db_utils.get_test_ucs_info()
+CONF = cfg.CONF
+
+
+class UcsPowerTestCase(db_base.DbTestCase):
+
+ def setUp(self):
+ super(UcsPowerTestCase, self).setUp()
+ driver_info = INFO_DICT
+ mgr_utils.mock_the_extension_manager(driver="fake_ucs")
+ self.node = obj_utils.create_test_node(self.context,
+ driver='fake_ucs',
+ driver_info=driver_info)
+ CONF.set_override('max_retry', 2, 'cisco_ucs')
+ CONF.set_override('action_interval', 0, 'cisco_ucs')
+ self.interface = ucs_power.Power()
+
+ def test_get_properties(self):
+ expected = ucs_helper.COMMON_PROPERTIES
+ expected.update(ucs_helper.COMMON_PROPERTIES)
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ self.assertEqual(expected, task.driver.get_properties())
+
+ @mock.patch.object(ucs_helper, 'parse_driver_info',
+ spec_set=True, autospec=True)
+ def test_validate(self, mock_parse_driver_info):
+ mock_parse_driver_info.return_value = {}
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ self.interface.validate(task)
+ mock_parse_driver_info.assert_called_once_with(task.node)
+
+ @mock.patch.object(ucs_helper, 'parse_driver_info',
+ spec_set=True, autospec=True)
+ def test_validate_fail(self, mock_parse_driver_info):
+ side_effect = iter([exception.InvalidParameterValue('Invalid Input')])
+ mock_parse_driver_info.side_effect = side_effect
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ self.assertRaises(exception.InvalidParameterValue,
+ self.interface.validate,
+ task)
+ mock_parse_driver_info.assert_called_once_with(task.node)
+
+ @mock.patch('ironic.drivers.modules.ucs.helper.ucs_helper',
+ spec_set=True, autospec=True)
+ @mock.patch('ironic.drivers.modules.ucs.power.ucs_power.UcsPower',
+ spec_set=True, autospec=True)
+ def test_get_power_state_up(self, mock_power_helper, mock_helper):
+ mock_helper.generate_ucsm_handle.return_value = (True, mock.Mock())
+ mock_power = mock_power_helper.return_value
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ mock_power.get_power_state.return_value = 'up'
+ self.assertEqual(states.POWER_ON,
+ self.interface.get_power_state(task))
+ mock_power.get_power_state.assert_called_once_with()
+ mock_power.get_power_state.reset_mock()
+
+ @mock.patch('ironic.drivers.modules.ucs.helper.ucs_helper',
+ spec_set=True, autospec=True)
+ @mock.patch('ironic.drivers.modules.ucs.power.ucs_power.UcsPower',
+ spec_set=True, autospec=True)
+ def test_get_power_state_down(self, mock_power_helper, mock_helper):
+ mock_helper.generate_ucsm_handle.return_value = (True, mock.Mock())
+ mock_power = mock_power_helper.return_value
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ mock_power.get_power_state.return_value = 'down'
+ self.assertEqual(states.POWER_OFF,
+ self.interface.get_power_state(task))
+ mock_power.get_power_state.assert_called_once_with()
+ mock_power.get_power_state.reset_mock()
+
+ @mock.patch('ironic.drivers.modules.ucs.helper.ucs_helper',
+ spec_set=True, autospec=True)
+ @mock.patch('ironic.drivers.modules.ucs.power.ucs_power.UcsPower',
+ spec_set=True, autospec=True)
+ def test_get_power_state_error(self, mock_power_helper, mock_helper):
+ mock_helper.generate_ucsm_handle.return_value = (True, mock.Mock())
+ mock_power = mock_power_helper.return_value
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ mock_power.get_power_state.return_value = states.ERROR
+ self.assertEqual(states.ERROR,
+ self.interface.get_power_state(task))
+ mock_power.get_power_state.assert_called_once_with()
+
+ @mock.patch('ironic.drivers.modules.ucs.helper.ucs_helper',
+ spec_set=True, autospec=True)
+ @mock.patch('ironic.drivers.modules.ucs.power.ucs_power.UcsPower',
+ spec_set=True, autospec=True)
+ def test_get_power_state_fail(self,
+ mock_ucs_power,
+ mock_helper):
+ mock_helper.generate_ucsm_handle.return_value = (True, mock.Mock())
+ power = mock_ucs_power.return_value
+ power.get_power_state.side_effect = (
+ ucs_error.UcsOperationError(operation='getting power state',
+ error='failed'))
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=True) as task:
+ self.assertRaises(exception.UcsOperationError,
+ self.interface.get_power_state,
+ task)
+ power.get_power_state.assert_called_with()
+
+ @mock.patch('ironic.drivers.modules.ucs.helper.ucs_helper',
+ spec_set=True, autospec=True)
+ @mock.patch('ironic.drivers.modules.ucs.power._wait_for_state_change',
+ spec_set=True, autospec=True)
+ @mock.patch('ironic.drivers.modules.ucs.power.ucs_power.UcsPower',
+ spec_set=True, autospec=True)
+ def test_set_power_state(self, mock_power_helper, mock__wait, mock_helper):
+ target_state = states.POWER_ON
+ mock_power = mock_power_helper.return_value
+ mock_power.get_power_state.side_effect = ['down', 'up']
+ mock_helper.generate_ucsm_handle.return_value = (True, mock.Mock())
+ mock__wait.return_value = target_state
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ self.assertIsNone(self.interface.set_power_state(task,
+ target_state))
+
+ mock_power.set_power_state.assert_called_once_with('up')
+ mock_power.get_power_state.assert_called_once_with()
+ mock__wait.assert_called_once_with(target_state, mock_power)
+
+ @mock.patch('ironic.drivers.modules.ucs.helper.ucs_helper',
+ spec_set=True, autospec=True)
+ @mock.patch('ironic.drivers.modules.ucs.power.ucs_power.UcsPower',
+ spec_set=True, autospec=True)
+ def test_set_power_state_fail(self, mock_power_helper, mock_helper):
+ mock_power = mock_power_helper.return_value
+ mock_power.set_power_state.side_effect = (
+ ucs_error.UcsOperationError(operation='setting power state',
+ error='failed'))
+ mock_helper.generate_ucsm_handle.return_value = (True, mock.Mock())
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ self.assertRaises(exception.UcsOperationError,
+ self.interface.set_power_state,
+ task, states.POWER_OFF)
+ mock_power.set_power_state.assert_called_once_with('down')
+
+ @mock.patch('ironic.drivers.modules.ucs.helper.ucs_helper',
+ spec_set=True, autospec=True)
+ def test_set_power_state_invalid_state(self, mock_helper):
+ mock_helper.generate_ucsm_handle.return_value = (True, mock.Mock())
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ self.assertRaises(exception.InvalidParameterValue,
+ self.interface.set_power_state,
+ task, states.ERROR)
+
+ @mock.patch('ironic.drivers.modules.ucs.helper.ucs_helper',
+ spec_set=True, autospec=True)
+ @mock.patch('ironic.drivers.modules.ucs.power.ucs_power.UcsPower',
+ spec_set=True, autospec=True)
+ def test__wait_for_state_change_already_target_state(
+ self,
+ mock_ucs_power,
+ mock_helper):
+ mock_power = mock_ucs_power.return_value
+ target_state = states.POWER_ON
+ mock_power.get_power_state.return_value = 'up'
+ mock_helper.generate_ucsm_handle.return_value = (True, mock.Mock())
+ self.assertEqual(states.POWER_ON,
+ ucs_power._wait_for_state_change(
+ target_state, mock_power))
+ mock_power.get_power_state.assert_called_with()
+
+ @mock.patch('ironic.drivers.modules.ucs.helper.ucs_helper',
+ spec_set=True, autospec=True)
+ @mock.patch('ironic.drivers.modules.ucs.power.ucs_power.UcsPower',
+ spec_set=True, autospec=True)
+ def test__wait_for_state_change_exceed_iterations(
+ self,
+ mock_power_helper,
+ mock_helper):
+ mock_power = mock_power_helper.return_value
+ target_state = states.POWER_ON
+ mock_helper.generate_ucsm_handle.return_value = (True, mock.Mock())
+ mock_power.get_power_state.side_effect = (
+ ['down', 'down', 'down', 'down'])
+ self.assertEqual(states.ERROR,
+ ucs_power._wait_for_state_change(
+ target_state, mock_power)
+ )
+ mock_power.get_power_state.assert_called_with()
+ self.assertEqual(4, mock_power.get_power_state.call_count)
+
+ @mock.patch('ironic.drivers.modules.ucs.helper.ucs_helper',
+ spec_set=True, autospec=True)
+ @mock.patch('ironic.drivers.modules.ucs.power._wait_for_state_change',
+ spec_set=True, autospec=True)
+ @mock.patch('ironic.drivers.modules.ucs.power.ucs_power.UcsPower',
+ spec_set=True, autospec=True)
+ def test_set_and_wait_for_state_change_fail(
+ self,
+ mock_power_helper,
+ mock__wait,
+ mock_helper):
+ target_state = states.POWER_ON
+ mock_power = mock_power_helper.return_value
+ mock_power.get_power_state.return_value = 'down'
+ mock_helper.generate_ucsm_handle.return_value = (True, mock.Mock())
+ mock__wait.return_value = states.POWER_OFF
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ self.assertRaises(exception.PowerStateFailure,
+ self.interface.set_power_state,
+ task,
+ target_state)
+
+ mock_power.set_power_state.assert_called_once_with('up')
+ mock_power.get_power_state.assert_called_once_with()
+ mock__wait.assert_called_once_with(target_state, mock_power)
+
+ @mock.patch('ironic.drivers.modules.ucs.helper.ucs_helper',
+ spec_set=True, autospec=True)
+ @mock.patch('ironic.drivers.modules.ucs.power._wait_for_state_change',
+ spec_set=True, autospec=True)
+ @mock.patch('ironic.drivers.modules.ucs.power.ucs_power.UcsPower',
+ spec_set=True, autospec=True)
+ def test_reboot(self, mock_power_helper, mock__wait, mock_helper):
+ mock_helper.generate_ucsm_handle.return_value = (True, mock.Mock())
+ mock_power = mock_power_helper.return_value
+ mock__wait.return_value = states.POWER_ON
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ self.assertIsNone(self.interface.reboot(task))
+ mock_power.reboot.assert_called_once_with()
+
+ @mock.patch('ironic.drivers.modules.ucs.helper.ucs_helper',
+ spec_set=True, autospec=True)
+ @mock.patch('ironic.drivers.modules.ucs.power.ucs_power.UcsPower',
+ spec_set=True, autospec=True)
+ def test_reboot_fail(self, mock_power_helper,
+ mock_ucs_helper):
+ mock_ucs_helper.generate_ucsm_handle.return_value = (True, mock.Mock())
+ mock_power = mock_power_helper.return_value
+ mock_power.reboot.side_effect = (
+ ucs_error.UcsOperationError(operation='rebooting', error='failed'))
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ self.assertRaises(exception.UcsOperationError,
+ self.interface.reboot,
+ task
+ )
+ mock_power.reboot.assert_called_once_with()
+
+ @mock.patch('ironic.drivers.modules.ucs.helper.ucs_helper',
+ spec_set=True, autospec=True)
+ @mock.patch('ironic.drivers.modules.ucs.power._wait_for_state_change',
+ spec_set=True, autospec=True)
+ @mock.patch('ironic.drivers.modules.ucs.power.ucs_power.UcsPower',
+ spec_set=True, autospec=True)
+ def test_reboot__wait_state_change_fail(self, mock_power_helper,
+ mock__wait,
+ mock_ucs_helper):
+ mock_ucs_helper.generate_ucsm_handle.return_value = (True, mock.Mock())
+ mock_power = mock_power_helper.return_value
+ mock__wait.return_value = states.ERROR
+ with task_manager.acquire(self.context, self.node.uuid,
+ shared=False) as task:
+ self.assertRaises(exception.PowerStateFailure,
+ self.interface.reboot,
+ task)
+ mock_power.reboot.assert_called_once_with()
diff --git a/ironic/tests/unit/fake_policy.py b/ironic/tests/unit/fake_policy.py
new file mode 100644
index 000000000..66f600845
--- /dev/null
+++ b/ironic/tests/unit/fake_policy.py
@@ -0,0 +1,42 @@
+# Copyright (c) 2012 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+
+policy_data = """
+{
+ "admin_api": "role:admin or role:administrator",
+ "public_api": "is_public_api:True",
+ "trusted_call": "rule:admin_api or rule:public_api",
+ "default": "rule:trusted_call",
+ "show_password": "tenant:admin"
+}
+"""
+
+
+policy_data_compat_juno = """
+{
+ "admin": "role:admin or role:administrator",
+ "admin_api": "is_admin:True",
+ "default": "rule:admin_api"
+}
+"""
+
+
+def get_policy_data(compat):
+ if not compat:
+ return policy_data
+ elif compat == 'juno':
+ return policy_data_compat_juno
+ else:
+ raise Exception('Policy data for %s not available' % compat)
diff --git a/ironic/tests/unit/objects/__init__.py b/ironic/tests/unit/objects/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ironic/tests/unit/objects/__init__.py
diff --git a/ironic/tests/unit/objects/test_chassis.py b/ironic/tests/unit/objects/test_chassis.py
new file mode 100644
index 000000000..d4859a571
--- /dev/null
+++ b/ironic/tests/unit/objects/test_chassis.py
@@ -0,0 +1,104 @@
+# coding=utf-8
+#
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import datetime
+import mock
+from oslo_utils import uuidutils
+from testtools.matchers import HasLength
+
+from ironic.common import exception
+from ironic import objects
+from ironic.tests.unit.db import base
+from ironic.tests.unit.db import utils
+
+
+class TestChassisObject(base.DbTestCase):
+
+ def setUp(self):
+ super(TestChassisObject, self).setUp()
+ self.fake_chassis = utils.get_test_chassis()
+
+ def test_get_by_id(self):
+ chassis_id = self.fake_chassis['id']
+ with mock.patch.object(self.dbapi, 'get_chassis_by_id',
+ autospec=True) as mock_get_chassis:
+ mock_get_chassis.return_value = self.fake_chassis
+
+ chassis = objects.Chassis.get(self.context, chassis_id)
+
+ mock_get_chassis.assert_called_once_with(chassis_id)
+ self.assertEqual(self.context, chassis._context)
+
+ def test_get_by_uuid(self):
+ uuid = self.fake_chassis['uuid']
+ with mock.patch.object(self.dbapi, 'get_chassis_by_uuid',
+ autospec=True) as mock_get_chassis:
+ mock_get_chassis.return_value = self.fake_chassis
+
+ chassis = objects.Chassis.get(self.context, uuid)
+
+ mock_get_chassis.assert_called_once_with(uuid)
+ self.assertEqual(self.context, chassis._context)
+
+ def test_get_bad_id_and_uuid(self):
+ self.assertRaises(exception.InvalidIdentity,
+ objects.Chassis.get, self.context, 'not-a-uuid')
+
+ def test_save(self):
+ uuid = self.fake_chassis['uuid']
+ extra = {"test": 123}
+ test_time = datetime.datetime(2000, 1, 1, 0, 0)
+ with mock.patch.object(self.dbapi, 'get_chassis_by_uuid',
+ autospec=True) as mock_get_chassis:
+ mock_get_chassis.return_value = self.fake_chassis
+ with mock.patch.object(self.dbapi, 'update_chassis',
+ autospec=True) as mock_update_chassis:
+ mock_update_chassis.return_value = (
+ utils.get_test_chassis(extra=extra, updated_at=test_time))
+ c = objects.Chassis.get_by_uuid(self.context, uuid)
+ c.extra = extra
+ c.save()
+
+ mock_get_chassis.assert_called_once_with(uuid)
+ mock_update_chassis.assert_called_once_with(
+ uuid, {'extra': {"test": 123}})
+ self.assertEqual(self.context, c._context)
+ res_updated_at = (c.updated_at).replace(tzinfo=None)
+ self.assertEqual(test_time, res_updated_at)
+
+ def test_refresh(self):
+ uuid = self.fake_chassis['uuid']
+ new_uuid = uuidutils.generate_uuid()
+ returns = [dict(self.fake_chassis, uuid=uuid),
+ dict(self.fake_chassis, uuid=new_uuid)]
+ expected = [mock.call(uuid), mock.call(uuid)]
+ with mock.patch.object(self.dbapi, 'get_chassis_by_uuid',
+ side_effect=returns,
+ autospec=True) as mock_get_chassis:
+ c = objects.Chassis.get_by_uuid(self.context, uuid)
+ self.assertEqual(uuid, c.uuid)
+ c.refresh()
+ self.assertEqual(new_uuid, c.uuid)
+ self.assertEqual(expected, mock_get_chassis.call_args_list)
+ self.assertEqual(self.context, c._context)
+
+ def test_list(self):
+ with mock.patch.object(self.dbapi, 'get_chassis_list',
+ autospec=True) as mock_get_list:
+ mock_get_list.return_value = [self.fake_chassis]
+ chassis = objects.Chassis.list(self.context)
+ self.assertThat(chassis, HasLength(1))
+ self.assertIsInstance(chassis[0], objects.Chassis)
+ self.assertEqual(self.context, chassis[0]._context)
diff --git a/ironic/tests/unit/objects/test_conductor.py b/ironic/tests/unit/objects/test_conductor.py
new file mode 100644
index 000000000..6cb8e6bc0
--- /dev/null
+++ b/ironic/tests/unit/objects/test_conductor.py
@@ -0,0 +1,87 @@
+# coding=utf-8
+#
+# Copyright 2013 Hewlett-Packard Development Company, L.P.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import datetime
+
+import mock
+from oslo_utils import timeutils
+
+from ironic import objects
+from ironic.objects import fields
+from ironic.tests.unit.db import base
+from ironic.tests.unit.db import utils
+
+
+class TestConductorObject(base.DbTestCase):
+
+ def setUp(self):
+ super(TestConductorObject, self).setUp()
+ self.fake_conductor = (
+ utils.get_test_conductor(updated_at=timeutils.utcnow()))
+
+ def test_load(self):
+ host = self.fake_conductor['hostname']
+ with mock.patch.object(self.dbapi, 'get_conductor',
+ autospec=True) as mock_get_cdr:
+ mock_get_cdr.return_value = self.fake_conductor
+ objects.Conductor.get_by_hostname(self.context, host)
+ mock_get_cdr.assert_called_once_with(host)
+
+ def test_save(self):
+ host = self.fake_conductor['hostname']
+ with mock.patch.object(self.dbapi, 'get_conductor',
+ autospec=True) as mock_get_cdr:
+ mock_get_cdr.return_value = self.fake_conductor
+ c = objects.Conductor.get_by_hostname(self.context, host)
+ c.hostname = 'another-hostname'
+ self.assertRaises(NotImplementedError,
+ c.save, self.context)
+ mock_get_cdr.assert_called_once_with(host)
+
+ def test_touch(self):
+ host = self.fake_conductor['hostname']
+ with mock.patch.object(self.dbapi, 'get_conductor',
+ autospec=True) as mock_get_cdr:
+ with mock.patch.object(self.dbapi, 'touch_conductor',
+ autospec=True) as mock_touch_cdr:
+ mock_get_cdr.return_value = self.fake_conductor
+ c = objects.Conductor.get_by_hostname(self.context, host)
+ c.touch(self.context)
+ mock_get_cdr.assert_called_once_with(host)
+ mock_touch_cdr.assert_called_once_with(host)
+
+ def test_refresh(self):
+ host = self.fake_conductor['hostname']
+ t0 = self.fake_conductor['updated_at']
+ t1 = t0 + datetime.timedelta(seconds=10)
+ returns = [dict(self.fake_conductor, updated_at=t0),
+ dict(self.fake_conductor, updated_at=t1)]
+ expected = [mock.call(host), mock.call(host)]
+ with mock.patch.object(self.dbapi, 'get_conductor',
+ side_effect=returns,
+ autospec=True) as mock_get_cdr:
+ c = objects.Conductor.get_by_hostname(self.context, host)
+ # ensure timestamps have tzinfo
+ datetime_field = fields.DateTimeField()
+ self.assertEqual(
+ datetime_field.coerce(datetime_field, 'updated_at', t0),
+ c.updated_at)
+ c.refresh()
+ self.assertEqual(
+ datetime_field.coerce(datetime_field, 'updated_at', t1),
+ c.updated_at)
+ self.assertEqual(expected, mock_get_cdr.call_args_list)
+ self.assertEqual(self.context, c._context)
diff --git a/ironic/tests/unit/objects/test_fields.py b/ironic/tests/unit/objects/test_fields.py
new file mode 100644
index 000000000..53817c42f
--- /dev/null
+++ b/ironic/tests/unit/objects/test_fields.py
@@ -0,0 +1,63 @@
+# Copyright 2015 Red Hat, Inc.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+
+from ironic.common import exception
+from ironic.objects import fields
+from ironic.tests.unit import base as test_base
+
+
+class TestMacAddressField(test_base.TestCase):
+
+ def setUp(self):
+ super(TestMacAddressField, self).setUp()
+ self.field = fields.MACAddressField()
+
+ def test_coerce(self):
+ values = {'aa:bb:cc:dd:ee:ff': 'aa:bb:cc:dd:ee:ff',
+ 'AA:BB:CC:DD:EE:FF': 'aa:bb:cc:dd:ee:ff',
+ 'AA:bb:cc:11:22:33': 'aa:bb:cc:11:22:33'}
+ for k in values:
+ self.assertEqual(values[k], self.field.coerce('obj', 'attr', k))
+
+ def test_coerce_bad_values(self):
+ for v in ('invalid-mac', 'aa-bb-cc-dd-ee-ff'):
+ self.assertRaises(exception.InvalidMAC,
+ self.field.coerce, 'obj', 'attr', v)
+
+
+class TestFlexibleDictField(test_base.TestCase):
+
+ def setUp(self):
+ super(TestFlexibleDictField, self).setUp()
+ self.field = fields.FlexibleDictField()
+
+ def test_coerce(self):
+ d = {'foo_1': 'bar', 'foo_2': 2, 'foo_3': [], 'foo_4': {}}
+ self.assertEqual(d, self.field.coerce('obj', 'attr', d))
+ self.assertEqual({'foo': 'bar'},
+ self.field.coerce('obj', 'attr', '{"foo": "bar"}'))
+
+ def test_coerce_bad_values(self):
+ self.assertRaises(TypeError, self.field.coerce, 'obj', 'attr', 123)
+ self.assertRaises(TypeError, self.field.coerce, 'obj', 'attr', True)
+
+ def test_coerce_nullable_translation(self):
+ # non-nullable
+ self.assertRaises(ValueError, self.field.coerce, 'obj', 'attr', None)
+
+ # nullable
+ self.field = fields.FlexibleDictField(nullable=True)
+ self.assertEqual({}, self.field.coerce('obj', 'attr', None))
diff --git a/ironic/tests/unit/objects/test_node.py b/ironic/tests/unit/objects/test_node.py
new file mode 100644
index 000000000..09ffc6eff
--- /dev/null
+++ b/ironic/tests/unit/objects/test_node.py
@@ -0,0 +1,151 @@
+# coding=utf-8
+#
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import mock
+from testtools.matchers import HasLength
+
+from ironic.common import exception
+from ironic import objects
+from ironic.tests.unit.db import base
+from ironic.tests.unit.db import utils
+
+
+class TestNodeObject(base.DbTestCase):
+
+ def setUp(self):
+ super(TestNodeObject, self).setUp()
+ self.fake_node = utils.get_test_node()
+
+ def test_get_by_id(self):
+ node_id = self.fake_node['id']
+ with mock.patch.object(self.dbapi, 'get_node_by_id',
+ autospec=True) as mock_get_node:
+ mock_get_node.return_value = self.fake_node
+
+ node = objects.Node.get(self.context, node_id)
+
+ mock_get_node.assert_called_once_with(node_id)
+ self.assertEqual(self.context, node._context)
+
+ def test_get_by_uuid(self):
+ uuid = self.fake_node['uuid']
+ with mock.patch.object(self.dbapi, 'get_node_by_uuid',
+ autospec=True) as mock_get_node:
+ mock_get_node.return_value = self.fake_node
+
+ node = objects.Node.get(self.context, uuid)
+
+ mock_get_node.assert_called_once_with(uuid)
+ self.assertEqual(self.context, node._context)
+
+ def test_get_bad_id_and_uuid(self):
+ self.assertRaises(exception.InvalidIdentity,
+ objects.Node.get, self.context, 'not-a-uuid')
+
+ def test_save(self):
+ uuid = self.fake_node['uuid']
+ with mock.patch.object(self.dbapi, 'get_node_by_uuid',
+ autospec=True) as mock_get_node:
+ mock_get_node.return_value = self.fake_node
+ with mock.patch.object(self.dbapi, 'update_node',
+ autospec=True) as mock_update_node:
+
+ n = objects.Node.get(self.context, uuid)
+ self.assertEqual({"foo": "bar", "fake_password": "fakepass"},
+ n.driver_internal_info)
+ n.properties = {"fake": "property"}
+ n.driver = "fake-driver"
+ n.save()
+
+ mock_get_node.assert_called_once_with(uuid)
+ mock_update_node.assert_called_once_with(
+ uuid, {'properties': {"fake": "property"},
+ 'driver': 'fake-driver',
+ 'driver_internal_info': {}})
+ self.assertEqual(self.context, n._context)
+ self.assertEqual({}, n.driver_internal_info)
+
+ def test_refresh(self):
+ uuid = self.fake_node['uuid']
+ returns = [dict(self.fake_node, properties={"fake": "first"}),
+ dict(self.fake_node, properties={"fake": "second"})]
+ expected = [mock.call(uuid), mock.call(uuid)]
+ with mock.patch.object(self.dbapi, 'get_node_by_uuid',
+ side_effect=returns,
+ autospec=True) as mock_get_node:
+ n = objects.Node.get(self.context, uuid)
+ self.assertEqual({"fake": "first"}, n.properties)
+ n.refresh()
+ self.assertEqual({"fake": "second"}, n.properties)
+ self.assertEqual(expected, mock_get_node.call_args_list)
+ self.assertEqual(self.context, n._context)
+
+ def test_list(self):
+ with mock.patch.object(self.dbapi, 'get_node_list',
+ autospec=True) as mock_get_list:
+ mock_get_list.return_value = [self.fake_node]
+ nodes = objects.Node.list(self.context)
+ self.assertThat(nodes, HasLength(1))
+ self.assertIsInstance(nodes[0], objects.Node)
+ self.assertEqual(self.context, nodes[0]._context)
+
+ def test_reserve(self):
+ with mock.patch.object(self.dbapi, 'reserve_node',
+ autospec=True) as mock_reserve:
+ mock_reserve.return_value = self.fake_node
+ node_id = self.fake_node['id']
+ fake_tag = 'fake-tag'
+ node = objects.Node.reserve(self.context, fake_tag, node_id)
+ self.assertIsInstance(node, objects.Node)
+ mock_reserve.assert_called_once_with(fake_tag, node_id)
+ self.assertEqual(self.context, node._context)
+
+ def test_reserve_node_not_found(self):
+ with mock.patch.object(self.dbapi, 'reserve_node',
+ autospec=True) as mock_reserve:
+ node_id = 'non-existent'
+ mock_reserve.side_effect = iter(
+ [exception.NodeNotFound(node=node_id)])
+ self.assertRaises(exception.NodeNotFound,
+ objects.Node.reserve, self.context, 'fake-tag',
+ node_id)
+
+ def test_release(self):
+ with mock.patch.object(self.dbapi, 'release_node',
+ autospec=True) as mock_release:
+ node_id = self.fake_node['id']
+ fake_tag = 'fake-tag'
+ objects.Node.release(self.context, fake_tag, node_id)
+ mock_release.assert_called_once_with(fake_tag, node_id)
+
+ def test_release_node_not_found(self):
+ with mock.patch.object(self.dbapi, 'release_node',
+ autospec=True) as mock_release:
+ node_id = 'non-existent'
+ mock_release.side_effect = iter(
+ [exception.NodeNotFound(node=node_id)])
+ self.assertRaises(exception.NodeNotFound,
+ objects.Node.release, self.context,
+ 'fake-tag', node_id)
+
+ def test_touch_provisioning(self):
+ with mock.patch.object(self.dbapi, 'get_node_by_uuid',
+ autospec=True) as mock_get_node:
+ mock_get_node.return_value = self.fake_node
+ with mock.patch.object(self.dbapi, 'touch_node_provisioning',
+ autospec=True) as mock_touch:
+ node = objects.Node.get(self.context, self.fake_node['uuid'])
+ node.touch_provisioning()
+ mock_touch.assert_called_once_with(node.id)
diff --git a/ironic/tests/unit/objects/test_objects.py b/ironic/tests/unit/objects/test_objects.py
new file mode 100644
index 000000000..a30e9d959
--- /dev/null
+++ b/ironic/tests/unit/objects/test_objects.py
@@ -0,0 +1,487 @@
+# Copyright 2013 IBM Corp.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import contextlib
+import datetime
+import gettext
+import iso8601
+
+import mock
+from oslo_context import context
+from oslo_versionedobjects import base as object_base
+from oslo_versionedobjects import exception as object_exception
+import six
+
+from ironic.objects import base
+from ironic.objects import fields
+from ironic.tests.unit import base as test_base
+
+gettext.install('ironic')
+
+
+@base.IronicObjectRegistry.register
+class MyObj(base.IronicObject, object_base.VersionedObjectDictCompat):
+ VERSION = '1.5'
+
+ fields = {'foo': fields.IntegerField(),
+ 'bar': fields.StringField(),
+ 'missing': fields.StringField(),
+ }
+
+ def obj_load_attr(self, attrname):
+ setattr(self, attrname, 'loaded!')
+
+ @object_base.remotable_classmethod
+ def query(cls, context):
+ obj = cls(context)
+ obj.foo = 1
+ obj.bar = 'bar'
+ obj.obj_reset_changes()
+ return obj
+
+ @object_base.remotable
+ def marco(self, context=None):
+ return 'polo'
+
+ @object_base.remotable
+ def update_test(self, context=None):
+ if context and context.tenant == 'alternate':
+ self.bar = 'alternate-context'
+ else:
+ self.bar = 'updated'
+
+ @object_base.remotable
+ def save(self, context=None):
+ self.obj_reset_changes()
+
+ @object_base.remotable
+ def refresh(self, context=None):
+ self.foo = 321
+ self.bar = 'refreshed'
+ self.obj_reset_changes()
+
+ @object_base.remotable
+ def modify_save_modify(self, context=None):
+ self.bar = 'meow'
+ self.save()
+ self.foo = 42
+
+
+class MyObj2(object):
+ @classmethod
+ def obj_name(cls):
+ return 'MyObj'
+
+ @object_base.remotable_classmethod
+ def get(cls, *args, **kwargs):
+ pass
+
+
+@base.IronicObjectRegistry.register_if(False)
+class TestSubclassedObject(MyObj):
+ fields = {'new_field': fields.StringField()}
+
+
+class _BaseTestCase(test_base.TestCase):
+ def setUp(self):
+ super(_BaseTestCase, self).setUp()
+ self.remote_object_calls = list()
+
+
+class _LocalTest(_BaseTestCase):
+ def setUp(self):
+ super(_LocalTest, self).setUp()
+ # Just in case
+ base.IronicObject.indirection_api = None
+
+ def assertRemotes(self):
+ self.assertEqual([], self.remote_object_calls)
+
+
+@contextlib.contextmanager
+def things_temporarily_local():
+ # Temporarily go non-remote so the conductor handles
+ # this request directly
+ _api = base.IronicObject.indirection_api
+ base.IronicObject.indirection_api = None
+ yield
+ base.IronicObject.indirection_api = _api
+
+
+class _TestObject(object):
+ def test_hydration_type_error(self):
+ primitive = {'ironic_object.name': 'MyObj',
+ 'ironic_object.namespace': 'ironic',
+ 'ironic_object.version': '1.5',
+ 'ironic_object.data': {'foo': 'a'}}
+ self.assertRaises(ValueError, MyObj.obj_from_primitive, primitive)
+
+ def test_hydration(self):
+ primitive = {'ironic_object.name': 'MyObj',
+ 'ironic_object.namespace': 'ironic',
+ 'ironic_object.version': '1.5',
+ 'ironic_object.data': {'foo': 1}}
+ obj = MyObj.obj_from_primitive(primitive)
+ self.assertEqual(1, obj.foo)
+
+ def test_hydration_bad_ns(self):
+ primitive = {'ironic_object.name': 'MyObj',
+ 'ironic_object.namespace': 'foo',
+ 'ironic_object.version': '1.5',
+ 'ironic_object.data': {'foo': 1}}
+ self.assertRaises(object_exception.UnsupportedObjectError,
+ MyObj.obj_from_primitive, primitive)
+
+ def test_dehydration(self):
+ expected = {'ironic_object.name': 'MyObj',
+ 'ironic_object.namespace': 'ironic',
+ 'ironic_object.version': '1.5',
+ 'ironic_object.data': {'foo': 1}}
+ obj = MyObj(self.context)
+ obj.foo = 1
+ obj.obj_reset_changes()
+ self.assertEqual(expected, obj.obj_to_primitive())
+
+ def test_get_updates(self):
+ obj = MyObj(self.context)
+ self.assertEqual({}, obj.obj_get_changes())
+ obj.foo = 123
+ self.assertEqual({'foo': 123}, obj.obj_get_changes())
+ obj.bar = 'test'
+ self.assertEqual({'foo': 123, 'bar': 'test'}, obj.obj_get_changes())
+ obj.obj_reset_changes()
+ self.assertEqual({}, obj.obj_get_changes())
+
+ def test_object_property(self):
+ obj = MyObj(self.context, foo=1)
+ self.assertEqual(1, obj.foo)
+
+ def test_object_property_type_error(self):
+ obj = MyObj(self.context)
+
+ def fail():
+ obj.foo = 'a'
+ self.assertRaises(ValueError, fail)
+
+ def test_load(self):
+ obj = MyObj(self.context)
+ self.assertEqual('loaded!', obj.bar)
+
+ def test_load_in_base(self):
+ @base.IronicObjectRegistry.register_if(False)
+ class Foo(base.IronicObject, object_base.VersionedObjectDictCompat):
+ fields = {'foobar': fields.IntegerField()}
+ obj = Foo(self.context)
+
+ self.assertRaisesRegexp(
+ NotImplementedError, "Cannot load 'foobar' in the base class",
+ getattr, obj, 'foobar')
+
+ def test_loaded_in_primitive(self):
+ obj = MyObj(self.context)
+ obj.foo = 1
+ obj.obj_reset_changes()
+ self.assertEqual('loaded!', obj.bar)
+ expected = {'ironic_object.name': 'MyObj',
+ 'ironic_object.namespace': 'ironic',
+ 'ironic_object.version': '1.5',
+ 'ironic_object.changes': ['bar'],
+ 'ironic_object.data': {'foo': 1,
+ 'bar': 'loaded!'}}
+ self.assertEqual(expected, obj.obj_to_primitive())
+
+ def test_changes_in_primitive(self):
+ obj = MyObj(self.context)
+ obj.foo = 123
+ self.assertEqual(set(['foo']), obj.obj_what_changed())
+ primitive = obj.obj_to_primitive()
+ self.assertTrue('ironic_object.changes' in primitive)
+ obj2 = MyObj.obj_from_primitive(primitive)
+ self.assertEqual(set(['foo']), obj2.obj_what_changed())
+ obj2.obj_reset_changes()
+ self.assertEqual(set(), obj2.obj_what_changed())
+
+ def test_unknown_objtype(self):
+ self.assertRaises(object_exception.UnsupportedObjectError,
+ base.IronicObject.obj_class_from_name, 'foo', '1.0')
+
+ def test_with_alternate_context(self):
+ ctxt1 = context.RequestContext('foo', 'foo')
+ ctxt2 = context.RequestContext('bar', tenant='alternate')
+ obj = MyObj.query(ctxt1)
+ obj.update_test(ctxt2)
+ self.assertEqual('alternate-context', obj.bar)
+ self.assertRemotes()
+
+ def test_orphaned_object(self):
+ obj = MyObj.query(self.context)
+ obj._context = None
+ self.assertRaises(object_exception.OrphanedObjectError,
+ obj.update_test)
+ self.assertRemotes()
+
+ def test_changed_1(self):
+ obj = MyObj.query(self.context)
+ obj.foo = 123
+ self.assertEqual(set(['foo']), obj.obj_what_changed())
+ obj.update_test(self.context)
+ self.assertEqual(set(['foo', 'bar']), obj.obj_what_changed())
+ self.assertEqual(123, obj.foo)
+ self.assertRemotes()
+
+ def test_changed_2(self):
+ obj = MyObj.query(self.context)
+ obj.foo = 123
+ self.assertEqual(set(['foo']), obj.obj_what_changed())
+ obj.save()
+ self.assertEqual(set([]), obj.obj_what_changed())
+ self.assertEqual(123, obj.foo)
+ self.assertRemotes()
+
+ def test_changed_3(self):
+ obj = MyObj.query(self.context)
+ obj.foo = 123
+ self.assertEqual(set(['foo']), obj.obj_what_changed())
+ obj.refresh()
+ self.assertEqual(set([]), obj.obj_what_changed())
+ self.assertEqual(321, obj.foo)
+ self.assertEqual('refreshed', obj.bar)
+ self.assertRemotes()
+
+ def test_changed_4(self):
+ obj = MyObj.query(self.context)
+ obj.bar = 'something'
+ self.assertEqual(set(['bar']), obj.obj_what_changed())
+ obj.modify_save_modify(self.context)
+ self.assertEqual(set(['foo']), obj.obj_what_changed())
+ self.assertEqual(42, obj.foo)
+ self.assertEqual('meow', obj.bar)
+ self.assertRemotes()
+
+ def test_static_result(self):
+ obj = MyObj.query(self.context)
+ self.assertEqual('bar', obj.bar)
+ result = obj.marco()
+ self.assertEqual('polo', result)
+ self.assertRemotes()
+
+ def test_updates(self):
+ obj = MyObj.query(self.context)
+ self.assertEqual(1, obj.foo)
+ obj.update_test()
+ self.assertEqual('updated', obj.bar)
+ self.assertRemotes()
+
+ def test_base_attributes(self):
+ dt = datetime.datetime(1955, 11, 5, 0, 0, tzinfo=iso8601.iso8601.Utc())
+ datatime = fields.DateTimeField()
+ obj = MyObj(self.context)
+ obj.created_at = dt
+ obj.updated_at = dt
+ expected = {'ironic_object.name': 'MyObj',
+ 'ironic_object.namespace': 'ironic',
+ 'ironic_object.version': '1.5',
+ 'ironic_object.changes':
+ ['created_at', 'updated_at'],
+ 'ironic_object.data':
+ {'created_at': datatime.stringify(dt),
+ 'updated_at': datatime.stringify(dt),
+ }
+ }
+ actual = obj.obj_to_primitive()
+ # ironic_object.changes is built from a set and order is undefined
+ self.assertEqual(sorted(expected['ironic_object.changes']),
+ sorted(actual['ironic_object.changes']))
+ del expected['ironic_object.changes'], actual['ironic_object.changes']
+ self.assertEqual(expected, actual)
+
+ def test_contains(self):
+ obj = MyObj(self.context)
+ self.assertFalse('foo' in obj)
+ obj.foo = 1
+ self.assertTrue('foo' in obj)
+ self.assertFalse('does_not_exist' in obj)
+
+ def test_obj_attr_is_set(self):
+ obj = MyObj(self.context, foo=1)
+ self.assertTrue(obj.obj_attr_is_set('foo'))
+ self.assertFalse(obj.obj_attr_is_set('bar'))
+ self.assertRaises(AttributeError, obj.obj_attr_is_set, 'bang')
+
+ def test_get(self):
+ obj = MyObj(self.context, foo=1)
+ # Foo has value, should not get the default
+ self.assertEqual(obj.get('foo', 2), 1)
+ # Foo has value, should return the value without error
+ self.assertEqual(obj.get('foo'), 1)
+ # Bar is not loaded, so we should get the default
+ self.assertEqual(obj.get('bar', 'not-loaded'), 'not-loaded')
+ # Bar without a default should lazy-load
+ self.assertEqual(obj.get('bar'), 'loaded!')
+ # Bar now has a default, but loaded value should be returned
+ self.assertEqual(obj.get('bar', 'not-loaded'), 'loaded!')
+ # Invalid attribute should raise AttributeError
+ self.assertRaises(AttributeError, obj.get, 'nothing')
+ # ...even with a default
+ self.assertRaises(AttributeError, obj.get, 'nothing', 3)
+
+ def test_object_inheritance(self):
+ base_fields = list(base.IronicObject.fields)
+ myobj_fields = ['foo', 'bar', 'missing'] + base_fields
+ myobj3_fields = ['new_field']
+ self.assertTrue(issubclass(TestSubclassedObject, MyObj))
+ self.assertEqual(len(myobj_fields), len(MyObj.fields))
+ self.assertEqual(set(myobj_fields), set(MyObj.fields.keys()))
+ self.assertEqual(len(myobj_fields) + len(myobj3_fields),
+ len(TestSubclassedObject.fields))
+ self.assertEqual(set(myobj_fields) | set(myobj3_fields),
+ set(TestSubclassedObject.fields.keys()))
+
+ def test_get_changes(self):
+ obj = MyObj(self.context)
+ self.assertEqual({}, obj.obj_get_changes())
+ obj.foo = 123
+ self.assertEqual({'foo': 123}, obj.obj_get_changes())
+ obj.bar = 'test'
+ self.assertEqual({'foo': 123, 'bar': 'test'}, obj.obj_get_changes())
+ obj.obj_reset_changes()
+ self.assertEqual({}, obj.obj_get_changes())
+
+ def test_obj_fields(self):
+ @base.IronicObjectRegistry.register_if(False)
+ class TestObj(base.IronicObject,
+ object_base.VersionedObjectDictCompat):
+ fields = {'foo': fields.IntegerField()}
+ obj_extra_fields = ['bar']
+
+ @property
+ def bar(self):
+ return 'this is bar'
+
+ obj = TestObj(self.context)
+ self.assertEqual(set(['created_at', 'updated_at', 'foo', 'bar']),
+ set(obj.obj_fields))
+
+ def test_refresh_object(self):
+ @base.IronicObjectRegistry.register_if(False)
+ class TestObj(base.IronicObject,
+ object_base.VersionedObjectDictCompat):
+ fields = {'foo': fields.IntegerField(),
+ 'bar': fields.StringField()}
+
+ obj = TestObj(self.context)
+ current_obj = TestObj(self.context)
+ obj.foo = 10
+ obj.bar = 'obj.bar'
+ current_obj.foo = 2
+ current_obj.bar = 'current.bar'
+ obj.obj_refresh(current_obj)
+ self.assertEqual(obj.foo, 2)
+ self.assertEqual(obj.bar, 'current.bar')
+
+ def test_obj_constructor(self):
+ obj = MyObj(self.context, foo=123, bar='abc')
+ self.assertEqual(123, obj.foo)
+ self.assertEqual('abc', obj.bar)
+ self.assertEqual(set(['foo', 'bar']), obj.obj_what_changed())
+
+ def test_assign_value_without_DictCompat(self):
+ class TestObj(base.IronicObject):
+ fields = {'foo': fields.IntegerField(),
+ 'bar': fields.StringField()}
+ obj = TestObj(self.context)
+ obj.foo = 10
+ err_message = ''
+ try:
+ obj['bar'] = 'value'
+ except TypeError as e:
+ err_message = six.text_type(e)
+ finally:
+ self.assertIn("'TestObj' object does not support item assignment",
+ err_message)
+
+
+class TestObject(_LocalTest, _TestObject):
+ pass
+
+
+class TestObjectSerializer(test_base.TestCase):
+
+ def test_object_serialization(self):
+ ser = base.IronicObjectSerializer()
+ obj = MyObj(self.context)
+ primitive = ser.serialize_entity(self.context, obj)
+ self.assertTrue('ironic_object.name' in primitive)
+ obj2 = ser.deserialize_entity(self.context, primitive)
+ self.assertIsInstance(obj2, MyObj)
+ self.assertEqual(self.context, obj2._context)
+
+ def test_object_serialization_iterables(self):
+ ser = base.IronicObjectSerializer()
+ obj = MyObj(self.context)
+ for iterable in (list, tuple, set):
+ thing = iterable([obj])
+ primitive = ser.serialize_entity(self.context, thing)
+ self.assertEqual(1, len(primitive))
+ for item in primitive:
+ self.assertFalse(isinstance(item, base.IronicObject))
+ thing2 = ser.deserialize_entity(self.context, primitive)
+ self.assertEqual(1, len(thing2))
+ for item in thing2:
+ self.assertIsInstance(item, MyObj)
+
+ @mock.patch('ironic.objects.base.IronicObject.indirection_api')
+ def _test_deserialize_entity_newer(self, obj_version, backported_to,
+ mock_indirection_api,
+ my_version='1.6'):
+ ser = base.IronicObjectSerializer()
+ mock_indirection_api.object_backport_versions.return_value \
+ = 'backported'
+
+ @base.IronicObjectRegistry.register
+ class MyTestObj(MyObj):
+ VERSION = my_version
+
+ obj = MyTestObj(self.context)
+ obj.VERSION = obj_version
+ primitive = obj.obj_to_primitive()
+ result = ser.deserialize_entity(self.context, primitive)
+ if backported_to is None:
+ self.assertFalse(
+ mock_indirection_api.object_backport_versions.called)
+ else:
+ self.assertEqual('backported', result)
+ versions = object_base.obj_tree_get_versions('MyTestObj')
+ mock_indirection_api.object_backport_versions.assert_called_with(
+ self.context, primitive, versions)
+
+ def test_deserialize_entity_newer_version_backports(self):
+ "Test object with unsupported (newer) version"
+ self._test_deserialize_entity_newer('1.25', '1.6')
+
+ def test_deserialize_entity_same_revision_does_not_backport(self):
+ "Test object with supported revision"
+ self._test_deserialize_entity_newer('1.6', None)
+
+ def test_deserialize_entity_newer_revision_does_not_backport_zero(self):
+ "Test object with supported revision"
+ self._test_deserialize_entity_newer('1.6.0', None)
+
+ def test_deserialize_entity_newer_revision_does_not_backport(self):
+ "Test object with supported (newer) revision"
+ self._test_deserialize_entity_newer('1.6.1', None)
+
+ def test_deserialize_entity_newer_version_passes_revision(self):
+ "Test object with unsupported (newer) version and revision"
+ self._test_deserialize_entity_newer('1.7', '1.6.1', my_version='1.6.1')
diff --git a/ironic/tests/unit/objects/test_port.py b/ironic/tests/unit/objects/test_port.py
new file mode 100644
index 000000000..445ca84b7
--- /dev/null
+++ b/ironic/tests/unit/objects/test_port.py
@@ -0,0 +1,114 @@
+# coding=utf-8
+#
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import datetime
+import mock
+from testtools.matchers import HasLength
+
+from ironic.common import exception
+from ironic import objects
+from ironic.tests.unit.db import base
+from ironic.tests.unit.db import utils
+
+
+class TestPortObject(base.DbTestCase):
+
+ def setUp(self):
+ super(TestPortObject, self).setUp()
+ self.fake_port = utils.get_test_port()
+
+ def test_get_by_id(self):
+ port_id = self.fake_port['id']
+ with mock.patch.object(self.dbapi, 'get_port_by_id',
+ autospec=True) as mock_get_port:
+ mock_get_port.return_value = self.fake_port
+
+ port = objects.Port.get(self.context, port_id)
+
+ mock_get_port.assert_called_once_with(port_id)
+ self.assertEqual(self.context, port._context)
+
+ def test_get_by_uuid(self):
+ uuid = self.fake_port['uuid']
+ with mock.patch.object(self.dbapi, 'get_port_by_uuid',
+ autospec=True) as mock_get_port:
+ mock_get_port.return_value = self.fake_port
+
+ port = objects.Port.get(self.context, uuid)
+
+ mock_get_port.assert_called_once_with(uuid)
+ self.assertEqual(self.context, port._context)
+
+ def test_get_by_address(self):
+ address = self.fake_port['address']
+ with mock.patch.object(self.dbapi, 'get_port_by_address',
+ autospec=True) as mock_get_port:
+ mock_get_port.return_value = self.fake_port
+
+ port = objects.Port.get(self.context, address)
+
+ mock_get_port.assert_called_once_with(address)
+ self.assertEqual(self.context, port._context)
+
+ def test_get_bad_id_and_uuid_and_address(self):
+ self.assertRaises(exception.InvalidIdentity,
+ objects.Port.get, self.context, 'not-a-uuid')
+
+ def test_save(self):
+ uuid = self.fake_port['uuid']
+ address = "b2:54:00:cf:2d:40"
+ test_time = datetime.datetime(2000, 1, 1, 0, 0)
+ with mock.patch.object(self.dbapi, 'get_port_by_uuid',
+ autospec=True) as mock_get_port:
+ mock_get_port.return_value = self.fake_port
+ with mock.patch.object(self.dbapi, 'update_port',
+ autospec=True) as mock_update_port:
+ mock_update_port.return_value = (
+ utils.get_test_port(address=address, updated_at=test_time))
+ p = objects.Port.get_by_uuid(self.context, uuid)
+ p.address = address
+ p.save()
+
+ mock_get_port.assert_called_once_with(uuid)
+ mock_update_port.assert_called_once_with(
+ uuid, {'address': "b2:54:00:cf:2d:40"})
+ self.assertEqual(self.context, p._context)
+ res_updated_at = (p.updated_at).replace(tzinfo=None)
+ self.assertEqual(test_time, res_updated_at)
+
+ def test_refresh(self):
+ uuid = self.fake_port['uuid']
+ returns = [self.fake_port,
+ utils.get_test_port(address="c3:54:00:cf:2d:40")]
+ expected = [mock.call(uuid), mock.call(uuid)]
+ with mock.patch.object(self.dbapi, 'get_port_by_uuid',
+ side_effect=returns,
+ autospec=True) as mock_get_port:
+ p = objects.Port.get_by_uuid(self.context, uuid)
+ self.assertEqual("52:54:00:cf:2d:31", p.address)
+ p.refresh()
+ self.assertEqual("c3:54:00:cf:2d:40", p.address)
+
+ self.assertEqual(expected, mock_get_port.call_args_list)
+ self.assertEqual(self.context, p._context)
+
+ def test_list(self):
+ with mock.patch.object(self.dbapi, 'get_port_list',
+ autospec=True) as mock_get_list:
+ mock_get_list.return_value = [self.fake_port]
+ ports = objects.Port.list(self.context)
+ self.assertThat(ports, HasLength(1))
+ self.assertIsInstance(ports[0], objects.Port)
+ self.assertEqual(self.context, ports[0]._context)
diff --git a/ironic/tests/unit/objects/utils.py b/ironic/tests/unit/objects/utils.py
new file mode 100644
index 000000000..79577f924
--- /dev/null
+++ b/ironic/tests/unit/objects/utils.py
@@ -0,0 +1,99 @@
+# Copyright 2014 Rackspace Hosting
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+"""Ironic object test utilities."""
+
+from ironic import objects
+from ironic.tests.unit.db import utils as db_utils
+
+
+def get_test_node(ctxt, **kw):
+ """Return a Node object with appropriate attributes.
+
+ NOTE: The object leaves the attributes marked as changed, such
+ that a create() could be used to commit it to the DB.
+ """
+ db_node = db_utils.get_test_node(**kw)
+ # Let DB generate ID if it isn't specified explicitly
+ if 'id' not in kw:
+ del db_node['id']
+ node = objects.Node(ctxt)
+ for key in db_node:
+ setattr(node, key, db_node[key])
+ return node
+
+
+def create_test_node(ctxt, **kw):
+ """Create and return a test node object.
+
+ Create a node in the DB and return a Node object with appropriate
+ attributes.
+ """
+ node = get_test_node(ctxt, **kw)
+ node.create()
+ return node
+
+
+def get_test_port(ctxt, **kw):
+ """Return a Port object with appropriate attributes.
+
+ NOTE: The object leaves the attributes marked as changed, such
+ that a create() could be used to commit it to the DB.
+ """
+ db_port = db_utils.get_test_port(**kw)
+ # Let DB generate ID if it isn't specified explicitly
+ if 'id' not in kw:
+ del db_port['id']
+ port = objects.Port(ctxt)
+ for key in db_port:
+ setattr(port, key, db_port[key])
+ return port
+
+
+def create_test_port(ctxt, **kw):
+ """Create and return a test port object.
+
+ Create a port in the DB and return a Port object with appropriate
+ attributes.
+ """
+ port = get_test_port(ctxt, **kw)
+ port.create()
+ return port
+
+
+def get_test_chassis(ctxt, **kw):
+ """Return a Chassis object with appropriate attributes.
+
+ NOTE: The object leaves the attributes marked as changed, such
+ that a create() could be used to commit it to the DB.
+ """
+ db_chassis = db_utils.get_test_chassis(**kw)
+ # Let DB generate ID if it isn't specified explicitly
+ if 'id' not in kw:
+ del db_chassis['id']
+ chassis = objects.Chassis(ctxt)
+ for key in db_chassis:
+ setattr(chassis, key, db_chassis[key])
+ return chassis
+
+
+def create_test_chassis(ctxt, **kw):
+ """Create and return a test chassis object.
+
+ Create a chassis in the DB and return a Chassis object with appropriate
+ attributes.
+ """
+ chassis = get_test_chassis(ctxt, **kw)
+ chassis.create()
+ return chassis
diff --git a/ironic/tests/unit/policy_fixture.py b/ironic/tests/unit/policy_fixture.py
new file mode 100644
index 000000000..7f3f48ac9
--- /dev/null
+++ b/ironic/tests/unit/policy_fixture.py
@@ -0,0 +1,41 @@
+# Copyright 2012 Hewlett-Packard Development Company, L.P.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import os
+
+import fixtures
+from oslo_config import cfg
+from oslo_policy import opts as policy_opts
+
+from ironic.common import policy as ironic_policy
+from ironic.tests.unit import fake_policy
+
+CONF = cfg.CONF
+
+
+class PolicyFixture(fixtures.Fixture):
+ def __init__(self, compat=None):
+ self.compat = compat
+
+ def setUp(self):
+ super(PolicyFixture, self).setUp()
+ self.policy_dir = self.useFixture(fixtures.TempDir())
+ self.policy_file_name = os.path.join(self.policy_dir.path,
+ 'policy.json')
+ with open(self.policy_file_name, 'w') as policy_file:
+ policy_file.write(fake_policy.get_policy_data(self.compat))
+ policy_opts.set_defaults(CONF)
+ CONF.set_override('policy_file', self.policy_file_name, 'oslo_policy')
+ ironic_policy._ENFORCER = None
+ self.addCleanup(ironic_policy.get_enforcer().clear)
diff --git a/ironic/tests/unit/raid_constants.py b/ironic/tests/unit/raid_constants.py
new file mode 100644
index 000000000..c3db74f4d
--- /dev/null
+++ b/ironic/tests/unit/raid_constants.py
@@ -0,0 +1,298 @@
+# Copyright 2014 Hewlett-Packard Development Company, L.P.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# Different RAID configurations for unit tests in test_raid.py
+
+RAID_CONFIG_OKAY = '''
+{
+ "logical_disks": [
+ {
+ "raid_level": "1",
+ "size_gb": 100,
+ "volume_name": "my-volume",
+ "is_root_volume": true,
+ "share_physical_disks": false,
+ "disk_type": "ssd",
+ "interface_type": "sas",
+ "number_of_physical_disks": 2,
+ "controller": "Smart Array P822 in Slot 2",
+ "physical_disks": [
+ "5I:1:1",
+ "5I:1:2"
+ ]
+ }
+ ]
+}
+'''
+
+RAID_CONFIG_NO_LOGICAL_DISKS = '''
+{
+ "logical_disks": []
+}
+'''
+
+RAID_CONFIG_NO_RAID_LEVEL = '''
+{
+ "logical_disks": [
+ {
+ "size_gb": 100
+ }
+ ]
+}
+'''
+
+RAID_CONFIG_INVALID_RAID_LEVEL = '''
+{
+ "logical_disks": [
+ {
+ "size_gb": 100,
+ "raid_level": "foo"
+ }
+ ]
+}
+'''
+
+RAID_CONFIG_NO_SIZE_GB = '''
+{
+ "logical_disks": [
+ {
+ "raid_level": "1"
+ }
+ ]
+}
+'''
+
+RAID_CONFIG_INVALID_SIZE_GB = '''
+{
+ "logical_disks": [
+ {
+ "raid_level": "1",
+ "size_gb": "abcd"
+ }
+ ]
+}
+'''
+
+RAID_CONFIG_MAX_SIZE_GB = '''
+{
+ "logical_disks": [
+ {
+ "raid_level": "1",
+ "size_gb": "MAX"
+ }
+ ]
+}
+'''
+
+RAID_CONFIG_INVALID_IS_ROOT_VOL = '''
+{
+ "logical_disks": [
+ {
+ "raid_level": "1",
+ "size_gb": 100,
+ "is_root_volume": "True"
+ }
+ ]
+}
+'''
+
+RAID_CONFIG_MULTIPLE_IS_ROOT_VOL = '''
+{
+ "logical_disks": [
+ {
+ "raid_level": "1",
+ "size_gb": 100,
+ "is_root_volume": true
+ },
+ {
+ "raid_level": "1",
+ "size_gb": 100,
+ "is_root_volume": true
+ }
+ ]
+}
+'''
+
+RAID_CONFIG_INVALID_SHARE_PHY_DISKS = '''
+{
+ "logical_disks": [
+ {
+ "raid_level": "1",
+ "size_gb": 100,
+ "share_physical_disks": "True"
+ }
+ ]
+}
+'''
+
+RAID_CONFIG_INVALID_DISK_TYPE = '''
+{
+ "logical_disks": [
+ {
+ "raid_level": "1",
+ "size_gb": 100,
+ "disk_type": "foo"
+ }
+ ]
+}
+'''
+
+RAID_CONFIG_INVALID_INT_TYPE = '''
+{
+ "logical_disks": [
+ {
+ "raid_level": "1",
+ "size_gb": 100,
+ "interface_type": "foo"
+ }
+ ]
+}
+'''
+
+RAID_CONFIG_INVALID_NUM_PHY_DISKS = '''
+{
+ "logical_disks": [
+ {
+ "raid_level": "1",
+ "size_gb": 100,
+ "number_of_physical_disks": "a"
+ }
+ ]
+}
+'''
+
+RAID_CONFIG_INVALID_PHY_DISKS = '''
+{
+ "logical_disks": [
+ {
+ "raid_level": "1",
+ "size_gb": 100,
+ "controller": "Smart Array P822 in Slot 2",
+ "physical_disks": "5I:1:1"
+ }
+ ]
+}
+'''
+
+RAID_CONFIG_ADDITIONAL_PROP = '''
+{
+ "logical_disks": [
+ {
+ "raid_levelllllll": "1",
+ "size_gb": 100
+ }
+ ]
+}
+'''
+
+CUSTOM_SCHEMA_RAID_CONFIG = '''
+{
+ "logical_disks": [
+ {
+ "raid_level": "1",
+ "size_gb": 100,
+ "foo": "bar"
+ }
+ ]
+}
+'''
+
+CUSTOM_RAID_SCHEMA = '''
+{
+ "type": "object",
+ "properties": {
+ "logical_disks": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties": {
+ "raid_level": {
+ "type": "string",
+ "enum": [ "0", "1", "2", "5", "6", "1+0" ],
+ "description": "RAID level for the logical disk."
+ },
+ "size_gb": {
+ "type": "integer",
+ "minimum": 0,
+ "exclusiveMinimum": true,
+ "description": "Size (Integer) for the logical disk."
+ },
+ "foo": {
+ "type": "string",
+ "description": "property foo"
+ }
+ },
+ "required": ["raid_level", "size_gb"],
+ "additionalProperties": false
+ },
+ "minItems": 1
+ }
+ },
+ "required": ["logical_disks"],
+ "additionalProperties": false
+}
+'''
+
+CURRENT_RAID_CONFIG = '''
+{
+ "logical_disks": [
+ {
+ "raid_level": "1",
+ "size_gb": 100,
+ "controller": "Smart Array P822 in Slot 2",
+ "is_root_volume": true,
+ "physical_disks": [
+ "5I:1:1",
+ "5I:1:2"
+ ],
+ "root_device_hint": {
+ "wwn": "600508B100"
+ }
+ }
+ ]
+}
+'''
+
+RAID_CONFIG_MULTIPLE_ROOT = '''
+{
+ "logical_disks": [
+ {
+ "raid_level": "1",
+ "size_gb": 100,
+ "controller": "Smart Array P822 in Slot 2",
+ "is_root_volume": true,
+ "physical_disks": [
+ "5I:1:1",
+ "5I:1:2"
+ ],
+ "root_device_hint": {
+ "wwn": "600508B100"
+ }
+ },
+ {
+ "raid_level": "1",
+ "size_gb": 100,
+ "controller": "Smart Array P822 in Slot 2",
+ "is_root_volume": true,
+ "physical_disks": [
+ "5I:1:1",
+ "5I:1:2"
+ ],
+ "root_device_hint": {
+ "wwn": "600508B100"
+ }
+ }
+ ]
+}
+'''
diff --git a/ironic/tests/unit/stubs.py b/ironic/tests/unit/stubs.py
new file mode 100644
index 000000000..d20c1fd8a
--- /dev/null
+++ b/ironic/tests/unit/stubs.py
@@ -0,0 +1,116 @@
+# Copyright (c) 2011 Citrix Systems, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from glanceclient import exc as glance_exc
+
+
+NOW_GLANCE_FORMAT = "2010-10-11T10:30:22"
+
+
+class StubGlanceClient(object):
+
+ def __init__(self, images=None):
+ self._images = []
+ _images = images or []
+ map(lambda image: self.create(**image), _images)
+
+ # NOTE(bcwaldon): HACK to get client.images.* to work
+ self.images = lambda: None
+ for fn in ('list', 'get', 'data', 'create', 'update', 'delete'):
+ setattr(self.images, fn, getattr(self, fn))
+
+ # TODO(bcwaldon): implement filters
+ def list(self, filters=None, marker=None, limit=30):
+ if marker is None:
+ index = 0
+ else:
+ for index, image in enumerate(self._images):
+ if image.id == str(marker):
+ index += 1
+ break
+ else:
+ raise glance_exc.BadRequest('Marker not found')
+
+ return self._images[index:index + limit]
+
+ def get(self, image_id):
+ for image in self._images:
+ if image.id == str(image_id):
+ return image
+ raise glance_exc.NotFound(image_id)
+
+ def data(self, image_id):
+ self.get(image_id)
+ return []
+
+ def create(self, **metadata):
+ metadata['created_at'] = NOW_GLANCE_FORMAT
+ metadata['updated_at'] = NOW_GLANCE_FORMAT
+
+ self._images.append(FakeImage(metadata))
+
+ try:
+ image_id = str(metadata['id'])
+ except KeyError:
+ # auto-generate an id if one wasn't provided
+ image_id = str(len(self._images))
+
+ self._images[-1].id = image_id
+
+ return self._images[-1]
+
+ def update(self, image_id, **metadata):
+ for i, image in enumerate(self._images):
+ if image.id == str(image_id):
+ for k, v in metadata.items():
+ setattr(self._images[i], k, v)
+ return self._images[i]
+ raise glance_exc.NotFound(image_id)
+
+ def delete(self, image_id):
+ for i, image in enumerate(self._images):
+ if image.id == image_id:
+ # When you delete an image from glance, it sets the status to
+ # DELETED. If you try to delete a DELETED image, it raises
+ # HTTPForbidden.
+ image_data = self._images[i]
+ if image_data.deleted:
+ raise glance_exc.Forbidden()
+ image_data.deleted = True
+ return
+ raise glance_exc.NotFound(image_id)
+
+
+class FakeImage(object):
+ def __init__(self, metadata):
+ IMAGE_ATTRIBUTES = ['size', 'disk_format', 'owner',
+ 'container_format', 'checksum', 'id',
+ 'name', 'created_at', 'updated_at',
+ 'deleted', 'status',
+ 'min_disk', 'min_ram', 'is_public']
+ raw = dict.fromkeys(IMAGE_ATTRIBUTES)
+ raw.update(metadata)
+ self.__dict__['raw'] = raw
+
+ def __getattr__(self, key):
+ try:
+ return self.__dict__['raw'][key]
+ except KeyError:
+ raise AttributeError(key)
+
+ def __setattr__(self, key, value):
+ try:
+ self.__dict__['raw'][key] = value
+ except KeyError:
+ raise AttributeError(key)