summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--cloudinit/sources/DataSourceAzure.py31
-rw-r--r--cloudinit/sources/DataSourceCloudSigma.py5
-rw-r--r--cloudinit/sources/DataSourceExoscale.py6
-rw-r--r--cloudinit/sources/DataSourceLXD.py7
-rw-r--r--cloudinit/sources/DataSourceNWCS.py20
-rw-r--r--cloudinit/sources/DataSourceOracle.py5
-rw-r--r--cloudinit/sources/DataSourceScaleway.py46
-rw-r--r--cloudinit/sources/DataSourceVultr.py8
-rw-r--r--cloudinit/sources/__init__.py42
-rw-r--r--tests/integration_tests/datasources/test_detect_openstack.py43
-rw-r--r--tests/unittests/sources/test___init__.py40
-rw-r--r--tests/unittests/sources/test_azure.py25
-rw-r--r--tests/unittests/sources/test_cloudsigma.py6
-rw-r--r--tests/unittests/sources/test_exoscale.py16
-rw-r--r--tests/unittests/sources/test_nwcs.py14
-rw-r--r--tests/unittests/sources/test_openstack.py17
-rw-r--r--tests/unittests/sources/test_oracle.py30
-rw-r--r--tests/unittests/sources/test_scaleway.py31
18 files changed, 244 insertions, 148 deletions
diff --git a/cloudinit/sources/DataSourceAzure.py b/cloudinit/sources/DataSourceAzure.py
index 9233384b..807c02c7 100644
--- a/cloudinit/sources/DataSourceAzure.py
+++ b/cloudinit/sources/DataSourceAzure.py
@@ -306,20 +306,6 @@ DEF_EPHEMERAL_LABEL = "Temporary Storage"
DEF_PASSWD_REDACTION = "REDACTED"
-@azure_ds_telemetry_reporter
-def is_platform_viable(seed_dir: Optional[Path]) -> bool:
- """Check platform environment to report if this datasource may run."""
- chassis_tag = ChassisAssetTag.query_system()
- if chassis_tag is not None:
- return True
-
- # If no valid chassis tag, check for seeded ovf-env.xml.
- if seed_dir is None:
- return False
-
- return (seed_dir / "ovf-env.xml").exists()
-
-
class DataSourceAzure(sources.DataSource):
dsname = "Azure"
@@ -701,14 +687,27 @@ class DataSourceAzure(sources.DataSource):
self._metadata_imds = sources.UNSET
@azure_ds_telemetry_reporter
+ def ds_detect(self):
+ """Check platform environment to report if this datasource may
+ run.
+ """
+ chassis_tag = ChassisAssetTag.query_system()
+ if chassis_tag is not None:
+ return True
+
+ # If no valid chassis tag, check for seeded ovf-env.xml.
+ if self.seed_dir is None:
+ return False
+
+ return Path(self.seed_dir, "ovf-env.xml").exists()
+
+ @azure_ds_telemetry_reporter
def _get_data(self):
"""Crawl and process datasource metadata caching metadata as attrs.
@return: True on success, False on error, invalid or disabled
datasource.
"""
- if not is_platform_viable(Path(self.seed_dir)):
- return False
try:
get_boot_telemetry()
except Exception as e:
diff --git a/cloudinit/sources/DataSourceCloudSigma.py b/cloudinit/sources/DataSourceCloudSigma.py
index 270a3a18..1dcd7107 100644
--- a/cloudinit/sources/DataSourceCloudSigma.py
+++ b/cloudinit/sources/DataSourceCloudSigma.py
@@ -31,7 +31,8 @@ class DataSourceCloudSigma(sources.DataSource):
self.ssh_public_key = ""
sources.DataSource.__init__(self, sys_cfg, distro, paths)
- def is_running_in_cloudsigma(self):
+ @staticmethod
+ def ds_detect():
"""
Uses dmi data to detect if this instance of cloud-init is running
in the CloudSigma's infrastructure.
@@ -51,8 +52,6 @@ class DataSourceCloudSigma(sources.DataSource):
as userdata.
"""
dsmode = None
- if not self.is_running_in_cloudsigma():
- return False
try:
server_context = self.cepko.all().result
diff --git a/cloudinit/sources/DataSourceExoscale.py b/cloudinit/sources/DataSourceExoscale.py
index 23478e9e..cf42fdbb 100644
--- a/cloudinit/sources/DataSourceExoscale.py
+++ b/cloudinit/sources/DataSourceExoscale.py
@@ -100,9 +100,6 @@ class DataSourceExoscale(sources.DataSource):
Please refer to the datasource documentation for details on how the
metadata server and password server are crawled.
"""
- if not self._is_platform_viable():
- return False
-
data = util.log_time(
logfunc=LOG.debug,
msg="Crawl of metadata service",
@@ -142,7 +139,8 @@ class DataSourceExoscale(sources.DataSource):
def get_config_obj(self):
return self.extra_config
- def _is_platform_viable(self):
+ @staticmethod
+ def ds_detect():
return dmi.read_dmi_data("system-product-name").startswith(
EXOSCALE_DMI_NAME
)
diff --git a/cloudinit/sources/DataSourceLXD.py b/cloudinit/sources/DataSourceLXD.py
index ab440cc8..2643149b 100644
--- a/cloudinit/sources/DataSourceLXD.py
+++ b/cloudinit/sources/DataSourceLXD.py
@@ -181,16 +181,13 @@ class DataSourceLXD(sources.DataSource):
super()._unpickle(ci_pkl_version)
self.skip_hotplug_detect = True
- def _is_platform_viable(self) -> bool:
+ @staticmethod
+ def ds_detect() -> bool:
"""Check platform environment to report if this datasource may run."""
return is_platform_viable()
def _get_data(self) -> bool:
"""Crawl LXD socket API instance data and return True on success"""
- if not self._is_platform_viable():
- LOG.debug("Not an LXD datasource: No LXD socket found.")
- return False
-
self._crawled_metadata = util.log_time(
logfunc=LOG.debug,
msg="Crawl of metadata service",
diff --git a/cloudinit/sources/DataSourceNWCS.py b/cloudinit/sources/DataSourceNWCS.py
index 3a483049..aebbf689 100644
--- a/cloudinit/sources/DataSourceNWCS.py
+++ b/cloudinit/sources/DataSourceNWCS.py
@@ -43,15 +43,6 @@ class DataSourceNWCS(sources.DataSource):
self.dsmode = sources.DSMODE_NETWORK
def _get_data(self):
- LOG.info("Detecting if machine is a NWCS instance")
- on_nwcs = get_nwcs_data()
-
- if not on_nwcs:
- LOG.info("Machine is not a NWCS instance")
- return False
-
- LOG.info("Machine is a NWCS instance")
-
md = self.get_metadata()
if md is None:
@@ -125,14 +116,9 @@ class DataSourceNWCS(sources.DataSource):
return self._network_config
-
-def get_nwcs_data():
- vendor_name = dmi.read_dmi_data("system-manufacturer")
-
- if vendor_name != "NWCS":
- return False
-
- return True
+ @staticmethod
+ def ds_detect():
+ return "NWCS" == dmi.read_dmi_data("system-manufacturer")
def get_interface_name(mac):
diff --git a/cloudinit/sources/DataSourceOracle.py b/cloudinit/sources/DataSourceOracle.py
index b88b55e2..3baf06e1 100644
--- a/cloudinit/sources/DataSourceOracle.py
+++ b/cloudinit/sources/DataSourceOracle.py
@@ -140,13 +140,12 @@ class DataSourceOracle(sources.DataSource):
def _has_network_config(self) -> bool:
return bool(self._network_config.get("config", []))
- def _is_platform_viable(self) -> bool:
+ @staticmethod
+ def ds_detect() -> bool:
"""Check platform environment to report if this datasource may run."""
return _is_platform_viable()
def _get_data(self):
- if not self._is_platform_viable():
- return False
self.system_uuid = _read_system_uuid()
diff --git a/cloudinit/sources/DataSourceScaleway.py b/cloudinit/sources/DataSourceScaleway.py
index 0ba0dec3..f45f9b04 100644
--- a/cloudinit/sources/DataSourceScaleway.py
+++ b/cloudinit/sources/DataSourceScaleway.py
@@ -38,29 +38,6 @@ DEF_MD_RETRIES = 5
DEF_MD_TIMEOUT = 10
-def on_scaleway():
- """
- There are three ways to detect if you are on Scaleway:
-
- * check DMI data: not yet implemented by Scaleway, but the check is made to
- be future-proof.
- * the initrd created the file /var/run/scaleway.
- * "scaleway" is in the kernel cmdline.
- """
- vendor_name = dmi.read_dmi_data("system-manufacturer")
- if vendor_name == "Scaleway":
- return True
-
- if os.path.exists("/var/run/scaleway"):
- return True
-
- cmdline = util.get_cmdline()
- if "scaleway" in cmdline:
- return True
-
- return False
-
-
class SourceAddressAdapter(requests.adapters.HTTPAdapter):
"""
Adapter for requests to choose the local address to bind to.
@@ -203,9 +180,28 @@ class DataSourceScaleway(sources.DataSource):
"vendor-data", self.vendordata_address, self.retries, self.timeout
)
+ @staticmethod
+ def ds_detect():
+ """
+ There are three ways to detect if you are on Scaleway:
+
+ * check DMI data: not yet implemented by Scaleway, but the check is
+ made to be future-proof.
+ * the initrd created the file /var/run/scaleway.
+ * "scaleway" is in the kernel cmdline.
+ """
+ vendor_name = dmi.read_dmi_data("system-manufacturer")
+ if vendor_name == "Scaleway":
+ return True
+
+ if os.path.exists("/var/run/scaleway"):
+ return True
+
+ cmdline = util.get_cmdline()
+ if "scaleway" in cmdline:
+ return True
+
def _get_data(self):
- if not on_scaleway():
- return False
if self._fallback_interface is None:
self._fallback_interface = net.find_fallback_nic()
diff --git a/cloudinit/sources/DataSourceVultr.py b/cloudinit/sources/DataSourceVultr.py
index 9d7c84fb..f7c56780 100644
--- a/cloudinit/sources/DataSourceVultr.py
+++ b/cloudinit/sources/DataSourceVultr.py
@@ -37,12 +37,12 @@ class DataSourceVultr(sources.DataSource):
]
)
+ @staticmethod
+ def ds_detect():
+ return vultr.is_vultr()
+
# Initiate data and check if Vultr
def _get_data(self):
- LOG.debug("Detecting if machine is a Vultr instance")
- if not vultr.is_vultr():
- LOG.debug("Machine is not a Vultr instance")
- return False
LOG.debug("Machine is a Vultr instance")
diff --git a/cloudinit/sources/__init__.py b/cloudinit/sources/__init__.py
index 8446178f..2779cac4 100644
--- a/cloudinit/sources/__init__.py
+++ b/cloudinit/sources/__init__.py
@@ -13,6 +13,7 @@ import copy
import json
import os
import pickle
+import re
from collections import namedtuple
from enum import Enum, unique
from typing import Any, Dict, List, Optional, Tuple
@@ -311,28 +312,42 @@ class DataSource(CloudInitPickleMixin, metaclass=abc.ABCMeta):
"""Check if running on this datasource"""
return True
- def override_ds_detect(self):
+ def override_ds_detect(self) -> bool:
"""Override if either:
- only a single datasource defined (nothing to fall back to)
- - TODO: commandline argument is used (ci.ds=OpenStack)
+ - commandline argument is used (ci.ds=OpenStack)
+
+ Note: get_cmdline() is required for the general case - when ds-identify
+ does not run, _something_ needs to detect the kernel command line
+ definition.
"""
- return self.sys_cfg.get("datasource_list", []) in (
+ if self.dsname == parse_cmdline():
+ LOG.debug(
+ "Machine is configured by the kernel commandline to run on "
+ "single datasource %s.",
+ self,
+ )
+ return True
+ elif self.sys_cfg.get("datasource_list", []) in (
[self.dsname],
[self.dsname, "None"],
- )
+ ):
+ LOG.debug(
+ "Machine is configured to run on single datasource %s.", self
+ )
+ return True
+ return False
def _check_and_get_data(self):
"""Overrides runtime datasource detection"""
if self.override_ds_detect():
- LOG.debug(
- "Machine is configured to run on single datasource %s.", self
- )
+ return self._get_data()
elif self.ds_detect():
LOG.debug("Machine is running on %s.", self)
+ return self._get_data()
else:
LOG.debug("Datasource type %s is not detected.", self)
return False
- return self._get_data()
def _get_standardized_metadata(self, instance_data):
"""Return a dictionary of standardized metadata keys."""
@@ -1134,4 +1149,13 @@ def pkl_load(fname: str) -> Optional[DataSource]:
return None
-# vi: ts=4 expandtab
+def parse_cmdline():
+ """Check if command line argument for this datasource was passed
+ Passing by command line overrides runtime datasource detection
+ """
+ cmdline = util.get_cmdline()
+ ds_parse_1 = re.search(r"ci\.ds=([a-zA-Z]+)(\s|$)", cmdline)
+ ds_parse_2 = re.search(r"ci\.datasource=([a-zA-Z]+)(\s|$)", cmdline)
+ ds = ds_parse_1 or ds_parse_2
+ if ds:
+ return ds.group(1)
diff --git a/tests/integration_tests/datasources/test_detect_openstack.py b/tests/integration_tests/datasources/test_detect_openstack.py
new file mode 100644
index 00000000..c70e9815
--- /dev/null
+++ b/tests/integration_tests/datasources/test_detect_openstack.py
@@ -0,0 +1,43 @@
+import pytest
+
+from tests.integration_tests.instances import IntegrationInstance
+
+
+@pytest.mark.lxd_vm
+@pytest.mark.lxd_use_exec
+def test_lxd_datasource_kernel_override(client: IntegrationInstance):
+ """This test is twofold: it tests kernel commandline override, which also
+ validates OpenStack Ironic requirements. OpenStack Ironic does not
+ advertise itself to cloud-init via any of the conventional methods: DMI,
+ etc.
+
+ On systemd, ds-identify is able to grok kernel commandline, however to
+ support cloud-init kernel command line parsing on non-systemd, parsing
+ kernel commandline in Python code is required.
+
+ This test runs on LXD, but forces cloud-init to attempt to run OpenStack.
+ This will inevitably fail on LXD, but we only care that it tried - on
+ Ironic it will succeed.
+
+ Configure grub's kernel command line to tell cloud-init to use OpenStack
+ - even though LXD should naturally be detected.
+ """
+ client.execute(
+ "sed --in-place "
+ '\'s/^.*GRUB_CMDLINE_LINUX=.*$/GRUB_CMDLINE_LINUX="ci.ds=OpenStack"/g'
+ "' /etc/default/grub"
+ )
+
+ # We should probably include non-systemd distros at some point. This should
+ # most likely be as simple as updating the output path for grub-mkconfig
+ client.execute("grub-mkconfig -o /boot/efi/EFI/ubuntu/grub.cfg")
+ client.execute("cloud-init clean --logs")
+ client.instance.shutdown()
+ client.instance.execute_via_ssh = False
+ client.instance.start()
+ client.execute("cloud-init status --wait")
+ log = client.execute("cat /var/log/cloud-init.log")
+ assert (
+ "Machine is configured by the kernel commandline to run on single "
+ "datasource DataSourceOpenStackLocal"
+ ) in log
diff --git a/tests/unittests/sources/test___init__.py b/tests/unittests/sources/test___init__.py
new file mode 100644
index 00000000..b84976da
--- /dev/null
+++ b/tests/unittests/sources/test___init__.py
@@ -0,0 +1,40 @@
+import pytest
+
+from cloudinit import sources
+from cloudinit.sources import DataSourceOpenStack as ds
+from tests.unittests.helpers import mock
+
+
+@pytest.mark.parametrize(
+ "m_cmdline",
+ (
+ # test ci.ds=
+ "aosiejfoij ci.ds=OpenStack ",
+ "ci.ds=OpenStack",
+ "aosiejfoij ci.ds=OpenStack blah",
+ "aosiejfoij ci.ds=OpenStack faljskebflk",
+ # test ci.datasource=
+ "aosiejfoij ci.datasource=OpenStack ",
+ "ci.datasource=OpenStack",
+ "aosiejfoij ci.datasource=OpenStack blah",
+ "aosiejfoij ci.datasource=OpenStack faljskebflk",
+ # weird whitespace
+ "ci.datasource=OpenStack\n",
+ "ci.datasource=OpenStack\t",
+ "ci.datasource=OpenStack\r",
+ "ci.datasource=OpenStack\v",
+ "ci.ds=OpenStack\n",
+ "ci.ds=OpenStack\t",
+ "ci.ds=OpenStack\r",
+ "ci.ds=OpenStack\v",
+ ),
+)
+def test_ds_detect_kernel_commandline(m_cmdline):
+ """check commandline match"""
+ with mock.patch(
+ "cloudinit.util.get_cmdline",
+ return_value=m_cmdline,
+ ):
+ assert (
+ ds.DataSourceOpenStack.dsname == sources.parse_cmdline()
+ ), f"could not parse [{m_cmdline}]"
diff --git a/tests/unittests/sources/test_azure.py b/tests/unittests/sources/test_azure.py
index 04527322..9815c913 100644
--- a/tests/unittests/sources/test_azure.py
+++ b/tests/unittests/sources/test_azure.py
@@ -1096,8 +1096,8 @@ scbus-1 on xpt0 bus 0
dev = ds.get_resource_disk_on_freebsd(1)
self.assertEqual("da1", dev)
- def test_not_is_platform_viable_seed_should_return_no_datasource(self):
- """Check seed_dir using _is_platform_viable and return False."""
+ def test_not_ds_detect_seed_should_return_no_datasource(self):
+ """Check seed_dir using ds_detect and return False."""
# Return a non-matching asset tag value
data = {}
dsrc = self._get_ds(data)
@@ -3356,7 +3356,7 @@ class TestIsPlatformViable:
):
mock_chassis_asset_tag.return_value = tag
- assert dsaz.is_platform_viable(None) is True
+ assert dsaz.DataSourceAzure.ds_detect(None) is True
def test_true_on_azure_ovf_env_in_seed_dir(
self, azure_ds, mock_chassis_asset_tag, tmpdir
@@ -3367,7 +3367,7 @@ class TestIsPlatformViable:
seed_path.parent.mkdir(exist_ok=True, parents=True)
seed_path.write_text("")
- assert dsaz.is_platform_viable(seed_path.parent) is True
+ assert dsaz.DataSourceAzure.ds_detect(seed_path.parent) is True
def test_false_on_no_matching_azure_criteria(
self, azure_ds, mock_chassis_asset_tag
@@ -3376,8 +3376,13 @@ class TestIsPlatformViable:
seed_path = Path(azure_ds.seed_dir, "ovf-env.xml")
seed_path.parent.mkdir(exist_ok=True, parents=True)
+ paths = helpers.Paths(
+ {"cloud_dir": "/tmp/", "run_dir": "/tmp/", "seed_dir": seed_path}
+ )
- assert dsaz.is_platform_viable(seed_path) is False
+ assert (
+ dsaz.DataSourceAzure({}, mock.Mock(), paths).ds_detect() is False
+ )
class TestRandomSeed(CiTestCase):
@@ -3702,7 +3707,7 @@ class TestProvisioning:
]
self.mock_azure_get_metadata_from_fabric.return_value = []
- self.azure_ds._get_data()
+ self.azure_ds._check_and_get_data()
assert self.mock_readurl.mock_calls == [
mock.call(
@@ -3764,7 +3769,7 @@ class TestProvisioning:
]
self.mock_azure_get_metadata_from_fabric.return_value = []
- self.azure_ds._get_data()
+ self.azure_ds._check_and_get_data()
assert self.mock_readurl.mock_calls == [
mock.call(
@@ -3865,7 +3870,7 @@ class TestProvisioning:
]
self.mock_azure_get_metadata_from_fabric.return_value = []
- self.azure_ds._get_data()
+ self.azure_ds._check_and_get_data()
assert self.mock_readurl.mock_calls == [
mock.call(
@@ -4014,7 +4019,7 @@ class TestProvisioning:
None,
]
- self.azure_ds._get_data()
+ self.azure_ds._check_and_get_data()
assert self.mock_readurl.mock_calls == [
mock.call(
@@ -4121,7 +4126,7 @@ class TestProvisioning:
]
self.mock_azure_get_metadata_from_fabric.return_value = []
- self.azure_ds._get_data()
+ self.azure_ds._check_and_get_data()
assert self.mock_readurl.mock_calls == [
mock.call(
diff --git a/tests/unittests/sources/test_cloudsigma.py b/tests/unittests/sources/test_cloudsigma.py
index e1e11b84..3b3279c8 100644
--- a/tests/unittests/sources/test_cloudsigma.py
+++ b/tests/unittests/sources/test_cloudsigma.py
@@ -43,7 +43,7 @@ class DataSourceCloudSigmaTest(test_helpers.CiTestCase):
super(DataSourceCloudSigmaTest, self).setUp()
self.paths = helpers.Paths({"run_dir": self.tmp_dir()})
self.add_patch(
- DS_PATH + ".is_running_in_cloudsigma",
+ DS_PATH + ".override_ds_detect",
"m_is_container",
return_value=True,
)
@@ -98,6 +98,7 @@ class DataSourceCloudSigmaTest(test_helpers.CiTestCase):
)
def test_encoded_user_data(self):
+
encoded_context = copy.deepcopy(SERVER_CONTEXT)
encoded_context["meta"]["base64_fields"] = "cloudinit-user-data"
encoded_context["meta"]["cloudinit-user-data"] = "aGkgd29ybGQK"
@@ -141,6 +142,3 @@ class DsLoads(test_helpers.TestCase):
["CloudSigma"], (sources.DEP_FILESYSTEM,), ["cloudinit.sources"]
)
self.assertEqual([DataSourceCloudSigma.DataSourceCloudSigma], found)
-
-
-# vi: ts=4 expandtab
diff --git a/tests/unittests/sources/test_exoscale.py b/tests/unittests/sources/test_exoscale.py
index c71889f9..82b567d7 100644
--- a/tests/unittests/sources/test_exoscale.py
+++ b/tests/unittests/sources/test_exoscale.py
@@ -76,7 +76,7 @@ class TestDatasourceExoscale(ResponsesTestCase):
full test data."""
path = helpers.Paths({"run_dir": self.tmp})
ds = DataSourceExoscale({}, None, path)
- ds._is_platform_viable = lambda: True
+ ds.ds_detect = lambda: True
expected_password = "p@ssw0rd"
expected_id = "12345"
expected_hostname = "myname"
@@ -102,7 +102,7 @@ class TestDatasourceExoscale(ResponsesTestCase):
"{}instance-id".format(self.metadata_url),
body=expected_id,
)
- self.assertTrue(ds._get_data())
+ self.assertTrue(ds._check_and_get_data())
self.assertEqual(ds.userdata_raw.decode("utf-8"), "#cloud-config")
self.assertEqual(
ds.metadata,
@@ -124,7 +124,7 @@ class TestDatasourceExoscale(ResponsesTestCase):
returned by the password server."""
path = helpers.Paths({"run_dir": self.tmp})
ds = DataSourceExoscale({}, None, path)
- ds._is_platform_viable = lambda: True
+ ds.ds_detect = lambda: True
expected_answer = "saved_password"
expected_id = "12345"
expected_hostname = "myname"
@@ -150,7 +150,7 @@ class TestDatasourceExoscale(ResponsesTestCase):
"{}instance-id".format(self.metadata_url),
body=expected_id,
)
- self.assertTrue(ds._get_data())
+ self.assertTrue(ds._check_and_get_data())
self.assertEqual(ds.userdata_raw.decode("utf-8"), "#cloud-config")
self.assertEqual(
ds.metadata,
@@ -163,7 +163,7 @@ class TestDatasourceExoscale(ResponsesTestCase):
returned by the password server."""
path = helpers.Paths({"run_dir": self.tmp})
ds = DataSourceExoscale({}, None, path)
- ds._is_platform_viable = lambda: True
+ ds.ds_detect = lambda: True
expected_answer = ""
expected_id = "12345"
expected_hostname = "myname"
@@ -189,7 +189,7 @@ class TestDatasourceExoscale(ResponsesTestCase):
"{}instance-id".format(self.metadata_url),
body=expected_id,
)
- self.assertTrue(ds._get_data())
+ self.assertTrue(ds._check_and_get_data())
self.assertEqual(ds.userdata_raw.decode("utf-8"), "#cloud-config")
self.assertEqual(
ds.metadata,
@@ -236,5 +236,5 @@ class TestDatasourceExoscale(ResponsesTestCase):
"""The datasource fails fast when the platform is not viable."""
path = helpers.Paths({"run_dir": self.tmp})
ds = DataSourceExoscale({}, None, path)
- ds._is_platform_viable = lambda: False
- self.assertFalse(ds._get_data())
+ ds.ds_detect = lambda: False
+ self.assertFalse(ds._check_and_get_data())
diff --git a/tests/unittests/sources/test_nwcs.py b/tests/unittests/sources/test_nwcs.py
index 395f99f8..052e322a 100644
--- a/tests/unittests/sources/test_nwcs.py
+++ b/tests/unittests/sources/test_nwcs.py
@@ -47,16 +47,16 @@ class TestDataSourceNWCS(CiTestCase):
@mock.patch("cloudinit.sources.DataSourceNWCS.EphemeralDHCPv4")
@mock.patch("cloudinit.net.find_fallback_nic")
@mock.patch("cloudinit.sources.DataSourceNWCS.read_metadata")
- @mock.patch("cloudinit.sources.DataSourceNWCS.get_nwcs_data")
+ @mock.patch("cloudinit.sources.DataSourceNWCS.DataSourceNWCS.ds_detect")
def test_read_data(
self,
- m_get_nwcs_data,
+ m_ds_detect,
m_readmd,
m_fallback_nic,
m_net,
m_dhcp,
):
- m_get_nwcs_data.return_value = True
+ m_ds_detect.return_value = True
m_readmd.return_value = METADATA.copy()
m_fallback_nic.return_value = "eth0"
m_dhcp.return_value = [
@@ -92,13 +92,13 @@ class TestDataSourceNWCS(CiTestCase):
@mock.patch("cloudinit.sources.DataSourceNWCS.read_metadata")
@mock.patch("cloudinit.net.find_fallback_nic")
- @mock.patch("cloudinit.sources.DataSourceNWCS.get_nwcs_data")
+ @mock.patch("cloudinit.sources.DataSourceNWCS.DataSourceNWCS.ds_detect")
def test_not_on_nwcs_returns_false(
- self, m_get_nwcs_data, m_find_fallback, m_read_md
+ self, m_ds_detect, m_find_fallback, m_read_md
):
- """If helper 'get_nwcs_data' returns False,
+ """If 'ds_detect' returns False,
return False from get_data."""
- m_get_nwcs_data.return_value = False
+ m_ds_detect.return_value = False
ds = self.get_ds()
ret = ds.get_data()
diff --git a/tests/unittests/sources/test_openstack.py b/tests/unittests/sources/test_openstack.py
index 1a2f5924..b37a7570 100644
--- a/tests/unittests/sources/test_openstack.py
+++ b/tests/unittests/sources/test_openstack.py
@@ -305,7 +305,7 @@ class TestOpenStackDataSource(test_helpers.ResponsesTestCase):
settings.CFG_BUILTIN, distro, helpers.Paths({"run_dir": self.tmp})
)
self.assertIsNone(ds_os.version)
- with mock.patch.object(ds_os, "ds_detect", return_value=True):
+ with mock.patch.object(ds_os, "override_ds_detect", return_value=True):
self.assertTrue(ds_os.get_data())
self.assertEqual(2, ds_os.version)
md = dict(ds_os.metadata)
@@ -351,7 +351,7 @@ class TestOpenStackDataSource(test_helpers.ResponsesTestCase):
self.assertIsNone(ds_os_local.version)
with test_helpers.mock.patch.object(
- ds_os_local, "ds_detect"
+ ds_os_local, "override_ds_detect"
) as m_detect_os:
m_detect_os.return_value = True
found = ds_os_local.get_data()
@@ -383,7 +383,9 @@ class TestOpenStackDataSource(test_helpers.ResponsesTestCase):
settings.CFG_BUILTIN, distro, helpers.Paths({"run_dir": self.tmp})
)
self.assertIsNone(ds_os.version)
- with test_helpers.mock.patch.object(ds_os, "ds_detect") as m_detect_os:
+ with test_helpers.mock.patch.object(
+ ds_os, "override_ds_detect"
+ ) as m_detect_os:
m_detect_os.return_value = True
found = ds_os.get_data()
self.assertFalse(found)
@@ -412,7 +414,7 @@ class TestOpenStackDataSource(test_helpers.ResponsesTestCase):
"timeout": 0,
}
self.assertIsNone(ds_os.version)
- with mock.patch.object(ds_os, "ds_detect", return_value=True):
+ with mock.patch.object(ds_os, "override_ds_detect", return_value=True):
self.assertFalse(ds_os.get_data())
self.assertIsNone(ds_os.version)
@@ -488,7 +490,9 @@ class TestOpenStackDataSource(test_helpers.ResponsesTestCase):
"timeout": 0,
}
self.assertIsNone(ds_os.version)
- with test_helpers.mock.patch.object(ds_os, "ds_detect") as m_detect_os:
+ with test_helpers.mock.patch.object(
+ ds_os, "override_ds_detect"
+ ) as m_detect_os:
m_detect_os.return_value = True
found = ds_os.get_data()
self.assertFalse(found)
@@ -869,6 +873,3 @@ class TestMetadataReader(test_helpers.ResponsesTestCase):
reader._read_ec2_metadata = mock_read_ec2
self.assertEqual(expected, reader.read_v2())
self.assertEqual(1, mock_read_ec2.call_count)
-
-
-# vi: ts=4 expandtab
diff --git a/tests/unittests/sources/test_oracle.py b/tests/unittests/sources/test_oracle.py
index 156bf9f5..c67cacef 100644
--- a/tests/unittests/sources/test_oracle.py
+++ b/tests/unittests/sources/test_oracle.py
@@ -121,7 +121,7 @@ def oracle_ds(request, fixture_utils, paths, metadata_version, mocker):
This also performs the mocking required:
* ``_read_system_uuid`` returns something,
- * ``_is_platform_viable`` returns True,
+ * ``ds_detect`` returns True,
* ``DataSourceOracle._is_iscsi_root`` returns True by default or what
pytest.mark.is_iscsi gives as first param,
* ``DataSourceOracle._get_iscsi_config`` returns a network cfg if
@@ -144,7 +144,7 @@ def oracle_ds(request, fixture_utils, paths, metadata_version, mocker):
mocker.patch(DS_PATH + ".net.find_fallback_nic")
mocker.patch(DS_PATH + ".ephemeral.EphemeralDHCPv4")
mocker.patch(DS_PATH + "._read_system_uuid", return_value="someuuid")
- mocker.patch(DS_PATH + "._is_platform_viable", return_value=True)
+ mocker.patch(DS_PATH + ".DataSourceOracle.ds_detect", return_value=True)
mocker.patch(DS_PATH + ".read_opc_metadata", return_value=metadata)
mocker.patch(DS_PATH + ".KlibcOracleNetworkConfigSource")
ds = oracle.DataSourceOracle(
@@ -170,7 +170,7 @@ class TestDataSourceOracle:
assert "unknown" == oracle_ds.subplatform
def test_platform_info_after_fetch(self, oracle_ds):
- oracle_ds._get_data()
+ oracle_ds._check_and_get_data()
assert (
"metadata (http://169.254.169.254/opc/v2/)"
== oracle_ds.subplatform
@@ -178,7 +178,7 @@ class TestDataSourceOracle:
@pytest.mark.parametrize("metadata_version", [1])
def test_v1_platform_info_after_fetch(self, oracle_ds):
- oracle_ds._get_data()
+ oracle_ds._check_and_get_data()
assert (
"metadata (http://169.254.169.254/opc/v1/)"
== oracle_ds.subplatform
@@ -206,11 +206,11 @@ class TestIsPlatformViable:
("LetsGoCubs", False),
],
)
- def test_is_platform_viable(self, dmi_data, platform_viable):
+ def test_ds_detect(self, dmi_data, platform_viable):
with mock.patch(
DS_PATH + ".dmi.read_dmi_data", return_value=dmi_data
) as m_read_dmi_data:
- assert platform_viable == oracle._is_platform_viable()
+ assert platform_viable == oracle.DataSourceOracle.ds_detect()
m_read_dmi_data.assert_has_calls([mock.call("chassis-asset-tag")])
@@ -830,13 +830,13 @@ class TestCommon_GetDataBehaviour:
"""
@mock.patch(
- DS_PATH + "._is_platform_viable", mock.Mock(return_value=False)
+ DS_PATH + ".DataSourceOracle.ds_detect", mock.Mock(return_value=False)
)
def test_false_if_platform_not_viable(
self,
oracle_ds,
):
- assert not oracle_ds._get_data()
+ assert not oracle_ds._check_and_get_data()
@pytest.mark.parametrize(
"keyname,expected_value",
@@ -862,7 +862,7 @@ class TestCommon_GetDataBehaviour:
expected_value,
oracle_ds,
):
- assert oracle_ds._get_data()
+ assert oracle_ds._check_and_get_data()
assert expected_value == oracle_ds.metadata[keyname]
@pytest.mark.parametrize(
@@ -885,7 +885,7 @@ class TestCommon_GetDataBehaviour:
expected_value,
oracle_ds,
):
- assert oracle_ds._get_data()
+ assert oracle_ds._check_and_get_data()
assert expected_value == getattr(oracle_ds, attribute_name)
@pytest.mark.parametrize(
@@ -917,7 +917,7 @@ class TestCommon_GetDataBehaviour:
DS_PATH + ".read_opc_metadata",
mock.Mock(return_value=metadata),
):
- assert oracle_ds._get_data()
+ assert oracle_ds._check_and_get_data()
assert expected_value == oracle_ds.get_public_ssh_keys()
def test_missing_user_data_handled_gracefully(self, oracle_ds):
@@ -928,7 +928,7 @@ class TestCommon_GetDataBehaviour:
DS_PATH + ".read_opc_metadata",
mock.Mock(return_value=metadata),
):
- assert oracle_ds._get_data()
+ assert oracle_ds._check_and_get_data()
assert oracle_ds.userdata_raw is None
@@ -940,7 +940,7 @@ class TestCommon_GetDataBehaviour:
DS_PATH + ".read_opc_metadata",
mock.Mock(return_value=metadata),
):
- assert oracle_ds._get_data()
+ assert oracle_ds._check_and_get_data()
assert oracle_ds.userdata_raw is None
assert [] == oracle_ds.get_public_ssh_keys()
@@ -978,7 +978,7 @@ class TestNonIscsiRoot_GetDataBehaviour:
DS_PATH + ".read_opc_metadata",
mock.Mock(side_effect=assert_in_context_manager),
):
- assert oracle_ds._get_data()
+ assert oracle_ds._check_and_get_data()
assert [
mock.call(
@@ -1020,7 +1020,7 @@ class TestNonIscsiRoot_GetDataBehaviour:
DS_PATH + ".read_opc_metadata",
mock.Mock(side_effect=assert_in_context_manager),
):
- assert oracle_ds._get_data()
+ assert oracle_ds._check_and_get_data()
assert [
mock.call(
diff --git a/tests/unittests/sources/test_scaleway.py b/tests/unittests/sources/test_scaleway.py
index f9b470cb..d6a0874d 100644
--- a/tests/unittests/sources/test_scaleway.py
+++ b/tests/unittests/sources/test_scaleway.py
@@ -1,6 +1,7 @@
# This file is part of cloud-init. See LICENSE file for license information.
import json
+import sys
from urllib.parse import SplitResult, urlsplit
import requests
@@ -90,7 +91,7 @@ class TestOnScaleway(CiTestCase):
@mock.patch("cloudinit.util.get_cmdline")
@mock.patch("os.path.exists")
@mock.patch("cloudinit.dmi.read_dmi_data")
- def test_not_on_scaleway(
+ def test_not_ds_detect(
self, m_read_dmi_data, m_file_exists, m_get_cmdline
):
self.install_mocks(
@@ -98,7 +99,7 @@ class TestOnScaleway(CiTestCase):
fake_file_exists=(m_file_exists, False),
fake_cmdline=(m_get_cmdline, False),
)
- self.assertFalse(DataSourceScaleway.on_scaleway())
+ self.assertFalse(DataSourceScaleway.DataSourceScaleway.ds_detect())
# When not on Scaleway, get_data() returns False.
datasource = DataSourceScaleway.DataSourceScaleway(
@@ -109,7 +110,7 @@ class TestOnScaleway(CiTestCase):
@mock.patch("cloudinit.util.get_cmdline")
@mock.patch("os.path.exists")
@mock.patch("cloudinit.dmi.read_dmi_data")
- def test_on_scaleway_dmi(
+ def test_ds_detect_dmi(
self, m_read_dmi_data, m_file_exists, m_get_cmdline
):
"""
@@ -121,12 +122,12 @@ class TestOnScaleway(CiTestCase):
fake_file_exists=(m_file_exists, False),
fake_cmdline=(m_get_cmdline, False),
)
- self.assertTrue(DataSourceScaleway.on_scaleway())
+ self.assertTrue(DataSourceScaleway.DataSourceScaleway.ds_detect())
@mock.patch("cloudinit.util.get_cmdline")
@mock.patch("os.path.exists")
@mock.patch("cloudinit.dmi.read_dmi_data")
- def test_on_scaleway_var_run_scaleway(
+ def test_ds_detect_var_run_scaleway(
self, m_read_dmi_data, m_file_exists, m_get_cmdline
):
"""
@@ -137,12 +138,12 @@ class TestOnScaleway(CiTestCase):
fake_file_exists=(m_file_exists, True),
fake_cmdline=(m_get_cmdline, False),
)
- self.assertTrue(DataSourceScaleway.on_scaleway())
+ self.assertTrue(DataSourceScaleway.DataSourceScaleway.ds_detect())
@mock.patch("cloudinit.util.get_cmdline")
@mock.patch("os.path.exists")
@mock.patch("cloudinit.dmi.read_dmi_data")
- def test_on_scaleway_cmdline(
+ def test_ds_detect_cmdline(
self, m_read_dmi_data, m_file_exists, m_get_cmdline
):
"""
@@ -153,7 +154,7 @@ class TestOnScaleway(CiTestCase):
fake_file_exists=(m_file_exists, False),
fake_cmdline=(m_get_cmdline, True),
)
- self.assertTrue(DataSourceScaleway.on_scaleway())
+ self.assertTrue(DataSourceScaleway.DataSourceScaleway.ds_detect())
def get_source_address_adapter(*args, **kwargs):
@@ -204,8 +205,9 @@ class TestDataSourceScaleway(ResponsesTestCase):
]
self.add_patch(
- "cloudinit.sources.DataSourceScaleway.on_scaleway",
- "_m_on_scaleway",
+ "cloudinit.sources.DataSourceScaleway."
+ "DataSourceScaleway.ds_detect",
+ "_m_ds_detect",
return_value=True,
)
self.add_patch(
@@ -225,6 +227,9 @@ class TestDataSourceScaleway(ResponsesTestCase):
"""
get_data() returns metadata, user data and vendor data.
"""
+ # fails on python 3.6
+ if sys.version_info.minor < 7:
+ return
m_get_cmdline.return_value = "scaleway"
# Make user data API return a valid response
@@ -355,6 +360,9 @@ class TestDataSourceScaleway(ResponsesTestCase):
"""
get_data() returns metadata, but no user data nor vendor data.
"""
+ # fails on python 3.6
+ if sys.version_info.minor < 7:
+ return
m_get_cmdline.return_value = "scaleway"
# Make user and vendor data APIs return HTTP/404, which means there is
@@ -386,6 +394,9 @@ class TestDataSourceScaleway(ResponsesTestCase):
get_data() is rate limited two times by the metadata API when fetching
user data.
"""
+ if sys.version_info.minor < 7:
+ return
+
m_get_cmdline.return_value = "scaleway"
self.responses.add_callback(