diff options
author | Brett Holman <brett.holman@canonical.com> | 2023-03-19 19:24:49 -0600 |
---|---|---|
committer | GitHub <noreply@github.com> | 2023-03-19 20:24:49 -0500 |
commit | a60c0845806baff72c74603286d048efbafab664 (patch) | |
tree | 4ae8acc8555cc90948e8895ca72f599c4ec96d3a /cloudinit | |
parent | 3634678465e7b8f8608bcb9a1f5773ae7837cbe9 (diff) | |
download | cloud-init-git-a60c0845806baff72c74603286d048efbafab664.tar.gz |
datasource: Optimize datasource detection, fix bugs (#2060)
Commit d1ffbea556a06105 enabled skipping python datasource detection on
OpenStack when no other datasources (besides DataSourceNone) can be discovered.
This allowed one to override detection, which is a requirement for OpenStack
Ironic which does not advertise itself to cloud-init.
Since no further datasources can be detected at this stage in the code, this
pattern can be generalized to other datasources to facilitate troubleshooting
or providing a general workaround to runtime detection bugs.
Additionally, this pattern can be extended to kernel commandline datasource
definition. Since kernel commandline is highest priority of the
configurations, it makes sense to override python code datasource
detection as well.
Include an integration test on LXD for this behavior that configures kernel
commandline and reboots to verify that the specified datasource is forced.
Diffstat (limited to 'cloudinit')
-rw-r--r-- | cloudinit/sources/DataSourceAzure.py | 31 | ||||
-rw-r--r-- | cloudinit/sources/DataSourceCloudSigma.py | 5 | ||||
-rw-r--r-- | cloudinit/sources/DataSourceExoscale.py | 6 | ||||
-rw-r--r-- | cloudinit/sources/DataSourceLXD.py | 7 | ||||
-rw-r--r-- | cloudinit/sources/DataSourceNWCS.py | 20 | ||||
-rw-r--r-- | cloudinit/sources/DataSourceOracle.py | 5 | ||||
-rw-r--r-- | cloudinit/sources/DataSourceScaleway.py | 46 | ||||
-rw-r--r-- | cloudinit/sources/DataSourceVultr.py | 8 | ||||
-rw-r--r-- | cloudinit/sources/__init__.py | 42 |
9 files changed, 84 insertions, 86 deletions
diff --git a/cloudinit/sources/DataSourceAzure.py b/cloudinit/sources/DataSourceAzure.py index 9233384b..807c02c7 100644 --- a/cloudinit/sources/DataSourceAzure.py +++ b/cloudinit/sources/DataSourceAzure.py @@ -306,20 +306,6 @@ DEF_EPHEMERAL_LABEL = "Temporary Storage" DEF_PASSWD_REDACTION = "REDACTED" -@azure_ds_telemetry_reporter -def is_platform_viable(seed_dir: Optional[Path]) -> bool: - """Check platform environment to report if this datasource may run.""" - chassis_tag = ChassisAssetTag.query_system() - if chassis_tag is not None: - return True - - # If no valid chassis tag, check for seeded ovf-env.xml. - if seed_dir is None: - return False - - return (seed_dir / "ovf-env.xml").exists() - - class DataSourceAzure(sources.DataSource): dsname = "Azure" @@ -701,14 +687,27 @@ class DataSourceAzure(sources.DataSource): self._metadata_imds = sources.UNSET @azure_ds_telemetry_reporter + def ds_detect(self): + """Check platform environment to report if this datasource may + run. + """ + chassis_tag = ChassisAssetTag.query_system() + if chassis_tag is not None: + return True + + # If no valid chassis tag, check for seeded ovf-env.xml. + if self.seed_dir is None: + return False + + return Path(self.seed_dir, "ovf-env.xml").exists() + + @azure_ds_telemetry_reporter def _get_data(self): """Crawl and process datasource metadata caching metadata as attrs. @return: True on success, False on error, invalid or disabled datasource. """ - if not is_platform_viable(Path(self.seed_dir)): - return False try: get_boot_telemetry() except Exception as e: diff --git a/cloudinit/sources/DataSourceCloudSigma.py b/cloudinit/sources/DataSourceCloudSigma.py index 270a3a18..1dcd7107 100644 --- a/cloudinit/sources/DataSourceCloudSigma.py +++ b/cloudinit/sources/DataSourceCloudSigma.py @@ -31,7 +31,8 @@ class DataSourceCloudSigma(sources.DataSource): self.ssh_public_key = "" sources.DataSource.__init__(self, sys_cfg, distro, paths) - def is_running_in_cloudsigma(self): + @staticmethod + def ds_detect(): """ Uses dmi data to detect if this instance of cloud-init is running in the CloudSigma's infrastructure. @@ -51,8 +52,6 @@ class DataSourceCloudSigma(sources.DataSource): as userdata. """ dsmode = None - if not self.is_running_in_cloudsigma(): - return False try: server_context = self.cepko.all().result diff --git a/cloudinit/sources/DataSourceExoscale.py b/cloudinit/sources/DataSourceExoscale.py index 23478e9e..cf42fdbb 100644 --- a/cloudinit/sources/DataSourceExoscale.py +++ b/cloudinit/sources/DataSourceExoscale.py @@ -100,9 +100,6 @@ class DataSourceExoscale(sources.DataSource): Please refer to the datasource documentation for details on how the metadata server and password server are crawled. """ - if not self._is_platform_viable(): - return False - data = util.log_time( logfunc=LOG.debug, msg="Crawl of metadata service", @@ -142,7 +139,8 @@ class DataSourceExoscale(sources.DataSource): def get_config_obj(self): return self.extra_config - def _is_platform_viable(self): + @staticmethod + def ds_detect(): return dmi.read_dmi_data("system-product-name").startswith( EXOSCALE_DMI_NAME ) diff --git a/cloudinit/sources/DataSourceLXD.py b/cloudinit/sources/DataSourceLXD.py index ab440cc8..2643149b 100644 --- a/cloudinit/sources/DataSourceLXD.py +++ b/cloudinit/sources/DataSourceLXD.py @@ -181,16 +181,13 @@ class DataSourceLXD(sources.DataSource): super()._unpickle(ci_pkl_version) self.skip_hotplug_detect = True - def _is_platform_viable(self) -> bool: + @staticmethod + def ds_detect() -> bool: """Check platform environment to report if this datasource may run.""" return is_platform_viable() def _get_data(self) -> bool: """Crawl LXD socket API instance data and return True on success""" - if not self._is_platform_viable(): - LOG.debug("Not an LXD datasource: No LXD socket found.") - return False - self._crawled_metadata = util.log_time( logfunc=LOG.debug, msg="Crawl of metadata service", diff --git a/cloudinit/sources/DataSourceNWCS.py b/cloudinit/sources/DataSourceNWCS.py index 3a483049..aebbf689 100644 --- a/cloudinit/sources/DataSourceNWCS.py +++ b/cloudinit/sources/DataSourceNWCS.py @@ -43,15 +43,6 @@ class DataSourceNWCS(sources.DataSource): self.dsmode = sources.DSMODE_NETWORK def _get_data(self): - LOG.info("Detecting if machine is a NWCS instance") - on_nwcs = get_nwcs_data() - - if not on_nwcs: - LOG.info("Machine is not a NWCS instance") - return False - - LOG.info("Machine is a NWCS instance") - md = self.get_metadata() if md is None: @@ -125,14 +116,9 @@ class DataSourceNWCS(sources.DataSource): return self._network_config - -def get_nwcs_data(): - vendor_name = dmi.read_dmi_data("system-manufacturer") - - if vendor_name != "NWCS": - return False - - return True + @staticmethod + def ds_detect(): + return "NWCS" == dmi.read_dmi_data("system-manufacturer") def get_interface_name(mac): diff --git a/cloudinit/sources/DataSourceOracle.py b/cloudinit/sources/DataSourceOracle.py index b88b55e2..3baf06e1 100644 --- a/cloudinit/sources/DataSourceOracle.py +++ b/cloudinit/sources/DataSourceOracle.py @@ -140,13 +140,12 @@ class DataSourceOracle(sources.DataSource): def _has_network_config(self) -> bool: return bool(self._network_config.get("config", [])) - def _is_platform_viable(self) -> bool: + @staticmethod + def ds_detect() -> bool: """Check platform environment to report if this datasource may run.""" return _is_platform_viable() def _get_data(self): - if not self._is_platform_viable(): - return False self.system_uuid = _read_system_uuid() diff --git a/cloudinit/sources/DataSourceScaleway.py b/cloudinit/sources/DataSourceScaleway.py index 0ba0dec3..f45f9b04 100644 --- a/cloudinit/sources/DataSourceScaleway.py +++ b/cloudinit/sources/DataSourceScaleway.py @@ -38,29 +38,6 @@ DEF_MD_RETRIES = 5 DEF_MD_TIMEOUT = 10 -def on_scaleway(): - """ - There are three ways to detect if you are on Scaleway: - - * check DMI data: not yet implemented by Scaleway, but the check is made to - be future-proof. - * the initrd created the file /var/run/scaleway. - * "scaleway" is in the kernel cmdline. - """ - vendor_name = dmi.read_dmi_data("system-manufacturer") - if vendor_name == "Scaleway": - return True - - if os.path.exists("/var/run/scaleway"): - return True - - cmdline = util.get_cmdline() - if "scaleway" in cmdline: - return True - - return False - - class SourceAddressAdapter(requests.adapters.HTTPAdapter): """ Adapter for requests to choose the local address to bind to. @@ -203,9 +180,28 @@ class DataSourceScaleway(sources.DataSource): "vendor-data", self.vendordata_address, self.retries, self.timeout ) + @staticmethod + def ds_detect(): + """ + There are three ways to detect if you are on Scaleway: + + * check DMI data: not yet implemented by Scaleway, but the check is + made to be future-proof. + * the initrd created the file /var/run/scaleway. + * "scaleway" is in the kernel cmdline. + """ + vendor_name = dmi.read_dmi_data("system-manufacturer") + if vendor_name == "Scaleway": + return True + + if os.path.exists("/var/run/scaleway"): + return True + + cmdline = util.get_cmdline() + if "scaleway" in cmdline: + return True + def _get_data(self): - if not on_scaleway(): - return False if self._fallback_interface is None: self._fallback_interface = net.find_fallback_nic() diff --git a/cloudinit/sources/DataSourceVultr.py b/cloudinit/sources/DataSourceVultr.py index 9d7c84fb..f7c56780 100644 --- a/cloudinit/sources/DataSourceVultr.py +++ b/cloudinit/sources/DataSourceVultr.py @@ -37,12 +37,12 @@ class DataSourceVultr(sources.DataSource): ] ) + @staticmethod + def ds_detect(): + return vultr.is_vultr() + # Initiate data and check if Vultr def _get_data(self): - LOG.debug("Detecting if machine is a Vultr instance") - if not vultr.is_vultr(): - LOG.debug("Machine is not a Vultr instance") - return False LOG.debug("Machine is a Vultr instance") diff --git a/cloudinit/sources/__init__.py b/cloudinit/sources/__init__.py index 8446178f..2779cac4 100644 --- a/cloudinit/sources/__init__.py +++ b/cloudinit/sources/__init__.py @@ -13,6 +13,7 @@ import copy import json import os import pickle +import re from collections import namedtuple from enum import Enum, unique from typing import Any, Dict, List, Optional, Tuple @@ -311,28 +312,42 @@ class DataSource(CloudInitPickleMixin, metaclass=abc.ABCMeta): """Check if running on this datasource""" return True - def override_ds_detect(self): + def override_ds_detect(self) -> bool: """Override if either: - only a single datasource defined (nothing to fall back to) - - TODO: commandline argument is used (ci.ds=OpenStack) + - commandline argument is used (ci.ds=OpenStack) + + Note: get_cmdline() is required for the general case - when ds-identify + does not run, _something_ needs to detect the kernel command line + definition. """ - return self.sys_cfg.get("datasource_list", []) in ( + if self.dsname == parse_cmdline(): + LOG.debug( + "Machine is configured by the kernel commandline to run on " + "single datasource %s.", + self, + ) + return True + elif self.sys_cfg.get("datasource_list", []) in ( [self.dsname], [self.dsname, "None"], - ) + ): + LOG.debug( + "Machine is configured to run on single datasource %s.", self + ) + return True + return False def _check_and_get_data(self): """Overrides runtime datasource detection""" if self.override_ds_detect(): - LOG.debug( - "Machine is configured to run on single datasource %s.", self - ) + return self._get_data() elif self.ds_detect(): LOG.debug("Machine is running on %s.", self) + return self._get_data() else: LOG.debug("Datasource type %s is not detected.", self) return False - return self._get_data() def _get_standardized_metadata(self, instance_data): """Return a dictionary of standardized metadata keys.""" @@ -1134,4 +1149,13 @@ def pkl_load(fname: str) -> Optional[DataSource]: return None -# vi: ts=4 expandtab +def parse_cmdline(): + """Check if command line argument for this datasource was passed + Passing by command line overrides runtime datasource detection + """ + cmdline = util.get_cmdline() + ds_parse_1 = re.search(r"ci\.ds=([a-zA-Z]+)(\s|$)", cmdline) + ds_parse_2 = re.search(r"ci\.datasource=([a-zA-Z]+)(\s|$)", cmdline) + ds = ds_parse_1 or ds_parse_2 + if ds: + return ds.group(1) |