summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAlberto Contreras <alberto.contreras@canonical.com>2022-08-10 22:23:47 +0200
committergit-ubuntu importer <ubuntu-devel-discuss@lists.ubuntu.com>2022-08-15 17:19:11 +0000
commita3b61fe9453bb6f4a9354c977c288aac4457fd27 (patch)
tree130cf29925e5ad2bce14d0fd16c1c8b30747347f
parent941cd498e204632c63917801c22f2411826cf7da (diff)
downloadcloud-init-git-a3b61fe9453bb6f4a9354c977c288aac4457fd27.tar.gz
22.2-139-gb64ba456-0ubuntu1~22.10.1 (patches unapplied)
Imported using git-ubuntu import.
-rw-r--r--.travis.yml2
-rwxr-xr-xcloudinit/cmd/main.py32
-rw-r--r--cloudinit/config/cc_ansible.py188
-rw-r--r--cloudinit/config/cc_debug.py114
-rw-r--r--cloudinit/config/cc_set_passwords.py2
-rw-r--r--cloudinit/config/cc_update_etc_hosts.py2
-rw-r--r--cloudinit/config/cc_users_groups.py2
-rw-r--r--cloudinit/config/cc_wireguard.py295
-rw-r--r--cloudinit/config/schema.py101
-rw-r--r--cloudinit/config/schemas/schema-cloud-config-v1.json237
-rw-r--r--cloudinit/distros/__init__.py12
-rw-r--r--cloudinit/distros/parsers/sys_conf.py2
-rw-r--r--cloudinit/sources/DataSourceAzure.py23
-rw-r--r--cloudinit/sources/helpers/azure.py10
-rw-r--r--cloudinit/ssh_util.py17
-rw-r--r--cloudinit/url_helper.py28
-rw-r--r--cloudinit/util.py62
-rw-r--r--config/cloud.cfg.tmpl4
-rw-r--r--debian/changelog34
-rw-r--r--doc/examples/cloud-config-ansible.txt16
-rw-r--r--doc/examples/cloud-config-wireguard.txt29
-rw-r--r--doc/rtd/topics/boot.rst2
-rw-r--r--doc/rtd/topics/examples.rst9
-rw-r--r--doc/rtd/topics/format.rst2
-rw-r--r--doc/rtd/topics/instancedata.rst7
-rw-r--r--doc/rtd/topics/kernel-cmdline.rst71
-rw-r--r--doc/rtd/topics/modules.rst3
-rw-r--r--doc/sources/kernel-cmdline.txt48
-rw-r--r--integration-requirements.txt2
-rw-r--r--packages/debian/cloud-init.postrm11
-rw-r--r--tests/integration_tests/bugs/test_lp1835584.py11
-rw-r--r--tests/integration_tests/clouds.py33
-rw-r--r--tests/integration_tests/cmd/test_schema.py12
-rw-r--r--tests/integration_tests/conftest.py83
-rw-r--r--tests/integration_tests/modules/test_ansible.py115
-rw-r--r--tests/integration_tests/modules/test_ca_certs.py26
-rw-r--r--tests/integration_tests/modules/test_set_password.py17
-rw-r--r--tests/integration_tests/modules/test_ssh_keys_provided.py17
-rw-r--r--tests/integration_tests/modules/test_users_groups.py10
-rw-r--r--tests/integration_tests/modules/test_wireguard.py117
-rw-r--r--tests/unittests/config/test_apt_configure_sources_list_v1.py2
-rw-r--r--tests/unittests/config/test_cc_ansible.py362
-rw-r--r--tests/unittests/config/test_cc_debug.py112
-rw-r--r--tests/unittests/config/test_cc_disk_setup.py8
-rw-r--r--tests/unittests/config/test_cc_growpart.py64
-rw-r--r--tests/unittests/config/test_cc_power_state_change.py10
-rw-r--r--tests/unittests/config/test_cc_scripts_vendor.py26
-rw-r--r--tests/unittests/config/test_cc_set_passwords.py102
-rw-r--r--tests/unittests/config/test_cc_ssh.py31
-rw-r--r--tests/unittests/config/test_cc_update_etc_hosts.py33
-rw-r--r--tests/unittests/config/test_cc_users_groups.py165
-rw-r--r--tests/unittests/config/test_cc_wireguard.py266
-rw-r--r--tests/unittests/config/test_schema.py149
-rw-r--r--tests/unittests/conftest.py57
-rw-r--r--tests/unittests/distros/test_create_users.py20
-rw-r--r--tests/unittests/sources/test_azure.py65
-rw-r--r--tests/unittests/test__init__.py108
-rw-r--r--tests/unittests/test_ssh_util.py (renamed from tests/unittests/test_sshutil.py)1181
-rw-r--r--tests/unittests/test_url_helper.py16
-rw-r--r--tests/unittests/test_util.py98
-rw-r--r--tools/.github-cla-signers1
-rw-r--r--tox.ini3
62 files changed, 3492 insertions, 1195 deletions
diff --git a/.travis.yml b/.travis.yml
index fbb0b3ef..253295dd 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -61,7 +61,7 @@ matrix:
sudo find /var/snap/lxd/common/lxd/images/ -name $latest_file* -print -exec cp {} "$TRAVIS_BUILD_DIR/lxd_images/" \;
install:
- git fetch --unshallow
- - sudo apt-get install -y --install-recommends sbuild ubuntu-dev-tools fakeroot tox debhelper
+ - sudo apt-get install -y --install-recommends sbuild ubuntu-dev-tools fakeroot tox debhelper wireguard
- pip install .
- pip install tox
# bionic has lxd from deb installed, remove it first to ensure
diff --git a/cloudinit/cmd/main.py b/cloudinit/cmd/main.py
index a6fb7088..2860126a 100755
--- a/cloudinit/cmd/main.py
+++ b/cloudinit/cmd/main.py
@@ -21,6 +21,7 @@ import os
import sys
import time
import traceback
+from typing import Tuple
from cloudinit import patcher
from cloudinit.config.modules import Modules
@@ -143,7 +144,7 @@ def parse_cmdline_url(cmdline, names=("cloud-config-url", "url")):
raise KeyError("No keys (%s) found in string '%s'" % (cmdline, names))
-def attempt_cmdline_url(path, network=True, cmdline=None):
+def attempt_cmdline_url(path, network=True, cmdline=None) -> Tuple[int, str]:
"""Write data from url referenced in command line to path.
path: a file to write content to if downloaded.
@@ -190,7 +191,7 @@ def attempt_cmdline_url(path, network=True, cmdline=None):
return (level, m)
- kwargs = {"url": url, "timeout": 10, "retries": 2}
+ kwargs = {"url": url, "timeout": 10, "retries": 2, "stream": True}
if network or path_is_local:
level = logging.WARN
kwargs["sec_between"] = 1
@@ -202,22 +203,43 @@ def attempt_cmdline_url(path, network=True, cmdline=None):
header = b"#cloud-config"
try:
resp = url_helper.read_file_or_url(**kwargs)
+ sniffed_content = b""
if resp.ok():
- data = resp.contents
- if not resp.contents.startswith(header):
+ is_cloud_cfg = True
+ if isinstance(resp, url_helper.UrlResponse):
+ try:
+ sniffed_content += next(
+ resp.iter_content(chunk_size=len(header))
+ )
+ except StopIteration:
+ pass
+ if not sniffed_content.startswith(header):
+ is_cloud_cfg = False
+ elif not resp.contents.startswith(header):
+ is_cloud_cfg = False
+ if is_cloud_cfg:
+ if cmdline_name == "url":
+ LOG.warning(
+ "DEPRECATED: `url` kernel command line key is"
+ " deprecated for providing cloud-config via URL."
+ " Please use `cloud-config-url` kernel command line"
+ " parameter instead"
+ )
+ else:
if cmdline_name == "cloud-config-url":
level = logging.WARN
else:
level = logging.INFO
return (
level,
- "contents of '%s' did not start with %s" % (url, header),
+ f"contents of '{url}' did not start with {str(header)}",
)
else:
return (
level,
"url '%s' returned code %s. Ignoring." % (url, resp.code),
)
+ data = sniffed_content + resp.contents
except url_helper.UrlError as e:
return (level, "retrieving url '%s' failed: %s" % (url, e))
diff --git a/cloudinit/config/cc_ansible.py b/cloudinit/config/cc_ansible.py
new file mode 100644
index 00000000..92309272
--- /dev/null
+++ b/cloudinit/config/cc_ansible.py
@@ -0,0 +1,188 @@
+"""ansible enables running on first boot either ansible-pull"""
+import abc
+import logging
+import os
+import re
+import sys
+from copy import deepcopy
+from textwrap import dedent
+from typing import Optional
+
+from cloudinit.cloud import Cloud
+from cloudinit.config.schema import MetaSchema, get_meta_doc
+from cloudinit.distros import ALL_DISTROS
+from cloudinit.settings import PER_INSTANCE
+from cloudinit.subp import subp, which
+from cloudinit.util import Version, get_cfg_by_path
+
+meta: MetaSchema = {
+ "id": "cc_ansible",
+ "name": "Ansible",
+ "title": "Configure ansible for instance",
+ "frequency": PER_INSTANCE,
+ "distros": [ALL_DISTROS],
+ "activate_by_schema_keys": ["ansible"],
+ "description": dedent(
+ """\
+ This module provides ``ansible`` integration for
+ augmenting cloud-init's configuration of the local
+ node.
+
+
+ This module installs ansible during boot and
+ then uses ``ansible-pull`` to run the playbook
+ repository at the remote URL.
+ """
+ ),
+ "examples": [
+ dedent(
+ """\
+ #cloud-config
+ ansible:
+ install-method: distro
+ pull:
+ url: "https://github.com/holmanb/vmboot.git"
+ playbook-name: ubuntu.yml
+ """
+ ),
+ dedent(
+ """\
+ #cloud-config
+ ansible:
+ package-name: ansible-core
+ install-method: pip
+ pull:
+ url: "https://github.com/holmanb/vmboot.git"
+ playbook-name: ubuntu.yml
+ """
+ ),
+ ],
+}
+
+__doc__ = get_meta_doc(meta)
+LOG = logging.getLogger(__name__)
+
+
+class AnsiblePull(abc.ABC):
+ cmd_version: list = []
+ cmd_pull: list = []
+ env: dict = os.environ.copy()
+
+ def get_version(self) -> Optional[Version]:
+ stdout, _ = subp(self.cmd_version, env=self.env)
+ first_line = stdout.splitlines().pop(0)
+ matches = re.search(r"([\d\.]+)", first_line)
+ if matches:
+ version = matches.group(0)
+ return Version.from_str(version)
+ return None
+
+ def pull(self, *args) -> str:
+ stdout, _ = subp([*self.cmd_pull, *args], env=self.env)
+ return stdout
+
+ def check_deps(self):
+ if not self.is_installed():
+ raise ValueError("command: ansible is not installed")
+
+ @abc.abstractmethod
+ def is_installed(self):
+ pass
+
+ @abc.abstractmethod
+ def install(self, pkg_name: str):
+ pass
+
+
+class AnsiblePullPip(AnsiblePull):
+ def __init__(self):
+ self.cmd_pull = ["ansible-pull"]
+ self.cmd_version = ["ansible-pull", "--version"]
+ self.env["PATH"] = ":".join([self.env["PATH"], "/root/.local/bin/"])
+
+ def install(self, pkg_name: str):
+ """should cloud-init grow an interface for non-distro package
+ managers? this seems reusable
+ """
+ if not self.is_installed():
+ subp(["python3", "-m", "pip", "install", "--user", pkg_name])
+
+ def is_installed(self) -> bool:
+ stdout, _ = subp(["python3", "-m", "pip", "list"])
+ return "ansible" in stdout
+
+
+class AnsiblePullDistro(AnsiblePull):
+ def __init__(self, distro):
+ self.cmd_pull = ["ansible-pull"]
+ self.cmd_version = ["ansible-pull", "--version"]
+ self.distro = distro
+
+ def install(self, pkg_name: str):
+ if not self.is_installed():
+ self.distro.install_packages(pkg_name)
+
+ def is_installed(self) -> bool:
+ return bool(which("ansible"))
+
+
+def handle(name: str, cfg: dict, cloud: Cloud, _, __):
+ ansible_cfg: dict = cfg.get("ansible", {})
+ if ansible_cfg:
+ validate_config(ansible_cfg)
+ install = ansible_cfg["install-method"]
+ pull_cfg = ansible_cfg.get("pull")
+ if pull_cfg:
+ ansible: AnsiblePull
+ if install == "pip":
+ ansible = AnsiblePullPip()
+ else:
+ ansible = AnsiblePullDistro(cloud.distro)
+ ansible.install(ansible_cfg["package-name"])
+ ansible.check_deps()
+ run_ansible_pull(ansible, deepcopy(pull_cfg))
+
+
+def validate_config(cfg: dict):
+ required_keys = {
+ "install-method",
+ "package-name",
+ "pull/url",
+ "pull/playbook-name",
+ }
+ for key in required_keys:
+ if not get_cfg_by_path(cfg, key):
+ raise ValueError(f"Invalid value config key: '{key}'")
+
+ install = cfg["install-method"]
+ if install not in ("pip", "distro"):
+ raise ValueError("Invalid install method {install}")
+
+
+def filter_args(cfg: dict) -> dict:
+ """remove boolean false values"""
+ return {key: value for (key, value) in cfg.items() if value is not False}
+
+
+def run_ansible_pull(pull: AnsiblePull, cfg: dict):
+ playbook_name: str = cfg.pop("playbook-name")
+
+ v = pull.get_version()
+ if not v:
+ LOG.warning("Cannot parse ansible version")
+ elif v < Version(2, 7, 0):
+ # diff was added in commit edaa0b52450ade9b86b5f63097ce18ebb147f46f
+ if cfg.get("diff"):
+ raise ValueError(
+ f"Ansible version {v.major}.{v.minor}.{v.patch}"
+ "doesn't support --diff flag, exiting."
+ )
+ stdout = pull.pull(
+ *[
+ f"--{key}={value}" if value is not True else f"--{key}"
+ for key, value in filter_args(cfg).items()
+ ],
+ playbook_name,
+ )
+ if stdout:
+ sys.stdout.write(f"{stdout}")
diff --git a/cloudinit/config/cc_debug.py b/cloudinit/config/cc_debug.py
deleted file mode 100644
index bb5f5062..00000000
--- a/cloudinit/config/cc_debug.py
+++ /dev/null
@@ -1,114 +0,0 @@
-# Copyright (C) 2013 Yahoo! Inc.
-#
-# This file is part of cloud-init. See LICENSE file for license information.
-
-"""Debug: Helper to debug cloud-init *internal* datastructures."""
-
-import copy
-from io import StringIO
-from textwrap import dedent
-
-from cloudinit import safeyaml, type_utils, util
-from cloudinit.config.schema import MetaSchema, get_meta_doc
-from cloudinit.distros import ALL_DISTROS
-from cloudinit.settings import PER_INSTANCE
-
-SKIP_KEYS = frozenset(["log_cfgs"])
-
-MODULE_DESCRIPTION = """\
-This module will enable for outputting various internal information that
-cloud-init sources provide to either a file or to the output console/log
-location that this cloud-init has been configured with when running.
-
-.. note::
- Log configurations are not output.
-"""
-
-meta: MetaSchema = {
- "id": "cc_debug",
- "name": "Debug",
- "title": "Helper to debug cloud-init *internal* datastructures",
- "description": MODULE_DESCRIPTION,
- "distros": [ALL_DISTROS],
- "frequency": PER_INSTANCE,
- "examples": [
- dedent(
- """\
- debug:
- verbose: true
- output: /tmp/my_debug.log
- """
- )
- ],
- "activate_by_schema_keys": [],
-}
-
-__doc__ = get_meta_doc(meta)
-
-
-def _make_header(text):
- header = StringIO()
- header.write("-" * 80)
- header.write("\n")
- header.write(text.center(80, " "))
- header.write("\n")
- header.write("-" * 80)
- header.write("\n")
- return header.getvalue()
-
-
-def _dumps(obj):
- text = safeyaml.dumps(obj, explicit_start=False, explicit_end=False)
- return text.rstrip()
-
-
-def handle(name, cfg, cloud, log, args):
- """Handler method activated by cloud-init."""
- verbose = util.get_cfg_by_path(cfg, ("debug", "verbose"), default=True)
- if args:
- # if args are provided (from cmdline) then explicitly set verbose
- out_file = args[0]
- verbose = True
- else:
- out_file = util.get_cfg_by_path(cfg, ("debug", "output"))
-
- if not verbose:
- log.debug("Skipping module named %s, verbose printing disabled", name)
- return
- # Clean out some keys that we just don't care about showing...
- dump_cfg = copy.deepcopy(cfg)
- for k in SKIP_KEYS:
- dump_cfg.pop(k, None)
- all_keys = list(dump_cfg)
- for k in all_keys:
- if k.startswith("_"):
- dump_cfg.pop(k, None)
- # Now dump it...
- to_print = StringIO()
- to_print.write(_make_header("Config"))
- to_print.write(_dumps(dump_cfg))
- to_print.write("\n")
- to_print.write(_make_header("MetaData"))
- to_print.write(_dumps(cloud.datasource.metadata))
- to_print.write("\n")
- to_print.write(_make_header("Misc"))
- to_print.write(
- "Datasource: %s\n" % (type_utils.obj_name(cloud.datasource))
- )
- to_print.write("Distro: %s\n" % (type_utils.obj_name(cloud.distro)))
- to_print.write("Hostname: %s\n" % (cloud.get_hostname(True).hostname))
- to_print.write("Instance ID: %s\n" % (cloud.get_instance_id()))
- to_print.write("Locale: %s\n" % (cloud.get_locale()))
- to_print.write("Launch IDX: %s\n" % (cloud.launch_index))
- contents = to_print.getvalue()
- content_to_file = []
- for line in contents.splitlines():
- line = "ci-info: %s\n" % (line)
- content_to_file.append(line)
- if out_file:
- util.write_file(out_file, "".join(content_to_file), 0o644, "w")
- else:
- util.multi_log("".join(content_to_file), console=True, stderr=False)
-
-
-# vi: ts=4 expandtab
diff --git a/cloudinit/config/cc_set_passwords.py b/cloudinit/config/cc_set_passwords.py
index 6acbb69e..fa7de944 100644
--- a/cloudinit/config/cc_set_passwords.py
+++ b/cloudinit/config/cc_set_passwords.py
@@ -171,7 +171,7 @@ def handle_ssh_pwauth(pw_auth, distro: Distro):
elif util.is_false(pw_auth):
cfg_val = "no"
else:
- bmsg = "Leaving SSH config '%s' unchanged." % cfg_name
+ bmsg = f"Leaving SSH config '{cfg_name}' unchanged."
if pw_auth is None or pw_auth.lower() == "unchanged":
LOG.debug("%s ssh_pwauth=%s", bmsg, pw_auth)
else:
diff --git a/cloudinit/config/cc_update_etc_hosts.py b/cloudinit/config/cc_update_etc_hosts.py
index 606b7860..56c52fe4 100644
--- a/cloudinit/config/cc_update_etc_hosts.py
+++ b/cloudinit/config/cc_update_etc_hosts.py
@@ -32,7 +32,7 @@ fqdn with a distribution dependent ip is present (i.e. ``ping <hostname>`` will
ping ``127.0.0.1`` or ``127.0.1.1`` or other ip).
.. note::
- if ``manage_etc_hosts`` is set ``true`` or ``template``, the contents
+ if ``manage_etc_hosts`` is set ``true``, the contents
of the hosts file will be updated every boot. To make any changes to
the hosts file persistent they must be made in
``/etc/cloud/templates/hosts.tmpl``
diff --git a/cloudinit/config/cc_users_groups.py b/cloudinit/config/cc_users_groups.py
index a84a6183..612f172b 100644
--- a/cloudinit/config/cc_users_groups.py
+++ b/cloudinit/config/cc_users_groups.py
@@ -141,7 +141,7 @@ meta: MetaSchema = {
ssh_import_id: [chad.smith]
user:
name: mynewdefault
- sudo: false
+ sudo: null
"""
),
],
diff --git a/cloudinit/config/cc_wireguard.py b/cloudinit/config/cc_wireguard.py
new file mode 100644
index 00000000..8cfbf6f1
--- /dev/null
+++ b/cloudinit/config/cc_wireguard.py
@@ -0,0 +1,295 @@
+# Author: Fabian Lichtenegger-Lukas <fabian.lichtenegger-lukas@nts.eu>
+# Author: Josef Tschiggerl <josef.tschiggerl@nts.eu>
+# This file is part of cloud-init. See LICENSE file for license information.
+
+"""Wireguard"""
+import re
+from textwrap import dedent
+
+from cloudinit import log as logging
+from cloudinit import subp, util
+from cloudinit.cloud import Cloud
+from cloudinit.config.schema import MetaSchema, get_meta_doc
+from cloudinit.settings import PER_INSTANCE
+
+MODULE_DESCRIPTION = dedent(
+ """\
+Wireguard module provides a dynamic interface for configuring
+Wireguard (as a peer or server) in an easy way.
+
+This module takes care of:
+ - writing interface configuration files
+ - enabling and starting interfaces
+ - installing wireguard-tools package
+ - loading wireguard kernel module
+ - executing readiness probes
+
+What's a readiness probe?\n
+The idea behind readiness probes is to ensure Wireguard connectivity
+before continuing the cloud-init process. This could be useful if you
+need access to specific services like an internal APT Repository Server
+(e.g Landscape) to install/update packages.
+
+Example:\n
+An edge device can't access the internet but uses cloud-init modules which
+will install packages (e.g landscape, packages, ubuntu_advantage). Those
+modules will fail due to missing internet connection. The "wireguard" module
+fixes that problem as it waits until all readinessprobes (which can be
+arbitrary commands - e.g. checking if a proxy server is reachable over
+Wireguard network) are finished before continuing the cloud-init
+"config" stage.
+
+.. note::
+ In order to use DNS with Wireguard you have to install ``resolvconf``
+ package or symlink it to systemd's ``resolvectl``, otherwise ``wg-quick``
+ commands will throw an error message that executable ``resolvconf`` is
+ missing which leads wireguard module to fail.
+"""
+)
+
+meta: MetaSchema = {
+ "id": "cc_wireguard",
+ "name": "Wireguard",
+ "title": "Module to configure Wireguard tunnel",
+ "description": MODULE_DESCRIPTION,
+ "distros": ["ubuntu"],
+ "frequency": PER_INSTANCE,
+ "activate_by_schema_keys": ["wireguard"],
+ "examples": [
+ dedent(
+ """\
+ # Configure one or more WG interfaces and provide optional readinessprobes
+ wireguard:
+ interfaces:
+ - name: wg0
+ config_path: /etc/wireguard/wg0.conf
+ content: |
+ [Interface]
+ PrivateKey = <private_key>
+ Address = <address>
+ [Peer]
+ PublicKey = <public_key>
+ Endpoint = <endpoint_ip>:<endpoint_ip_port>
+ AllowedIPs = <allowedip1>, <allowedip2>, ...
+ - name: wg1
+ config_path: /etc/wireguard/wg1.conf
+ content: |
+ [Interface]
+ PrivateKey = <private_key>
+ Address = <address>
+ [Peer]
+ PublicKey = <public_key>
+ Endpoint = <endpoint_ip>:<endpoint_ip_port>
+ AllowedIPs = <allowedip1>
+ readinessprobe:
+ - 'systemctl restart service'
+ - 'curl https://webhook.endpoint/example'
+ - 'nc -zv some-service-fqdn 443'
+ """
+ ),
+ ],
+}
+
+__doc__ = get_meta_doc(meta)
+
+LOG = logging.getLogger(__name__)
+
+REQUIRED_WG_INT_KEYS = frozenset(["name", "config_path", "content"])
+WG_CONFIG_FILE_MODE = 0o600
+NL = "\n"
+MIN_KERNEL_VERSION = (5, 6)
+
+
+def supplemental_schema_validation(wg_int: dict):
+ """Validate user-provided wg:interfaces option values.
+
+ This function supplements flexible jsonschema validation with specific
+ value checks to aid in triage of invalid user-provided configuration.
+
+ @param wg_int: Dict of configuration value under 'wg:interfaces'.
+
+ @raises: ValueError describing invalid values provided.
+ """
+ errors = []
+ missing = REQUIRED_WG_INT_KEYS.difference(set(wg_int.keys()))
+ if missing:
+ keys = ", ".join(sorted(missing))
+ errors.append(f"Missing required wg:interfaces keys: {keys}")
+
+ for key, value in sorted(wg_int.items()):
+ if key == "name" or key == "config_path" or key == "content":
+ if not isinstance(value, str):
+ errors.append(
+ f"Expected a string for wg:interfaces:{key}. Found {value}"
+ )
+
+ if errors:
+ raise ValueError(
+ f"Invalid wireguard interface configuration:{NL}{NL.join(errors)}"
+ )
+
+
+def write_config(wg_int: dict):
+ """Writing user-provided configuration into Wireguard
+ interface configuration file.
+
+ @param wg_int: Dict of configuration value under 'wg:interfaces'.
+
+ @raises: RuntimeError for issues writing of configuration file.
+ """
+ LOG.debug("Configuring Wireguard interface %s", wg_int["name"])
+ try:
+ LOG.debug("Writing wireguard config to file %s", wg_int["config_path"])
+ util.write_file(
+ wg_int["config_path"], wg_int["content"], mode=WG_CONFIG_FILE_MODE
+ )
+ except Exception as e:
+ raise RuntimeError(
+ "Failure writing Wireguard configuration file"
+ f' {wg_int["config_path"]}:{NL}{str(e)}'
+ ) from e
+
+
+def enable_wg(wg_int: dict, cloud: Cloud):
+ """Enable and start Wireguard interface
+
+ @param wg_int: Dict of configuration value under 'wg:interfaces'.
+
+ @raises: RuntimeError for issues enabling WG interface.
+ """
+ try:
+ LOG.debug("Enabling wg-quick@%s at boot", wg_int["name"])
+ cloud.distro.manage_service("enable", f'wg-quick@{wg_int["name"]}')
+ LOG.debug("Bringing up interface wg-quick@%s", wg_int["name"])
+ cloud.distro.manage_service("start", f'wg-quick@{wg_int["name"]}')
+ except subp.ProcessExecutionError as e:
+ raise RuntimeError(
+ f"Failed enabling/starting Wireguard interface(s):{NL}{str(e)}"
+ ) from e
+
+
+def readinessprobe_command_validation(wg_readinessprobes: list):
+ """Basic validation of user-provided probes
+
+ @param wg_readinessprobes: List of readinessprobe probe(s).
+
+ @raises: ValueError of wrong datatype provided for probes.
+ """
+ errors = []
+ pos = 0
+ for c in wg_readinessprobes:
+ if not isinstance(c, str):
+ errors.append(
+ f"Expected a string for readinessprobe at {pos}. Found {c}"
+ )
+ pos += 1
+
+ if errors:
+ raise ValueError(
+ f"Invalid readinessProbe commands:{NL}{NL.join(errors)}"
+ )
+
+
+def readinessprobe(wg_readinessprobes: list):
+ """Execute provided readiness probe(s)
+
+ @param wg_readinessprobes: List of readinessprobe probe(s).
+
+ @raises: ProcessExecutionError for issues during execution of probes.
+ """
+ errors = []
+ for c in wg_readinessprobes:
+ try:
+ LOG.debug("Running readinessprobe: '%s'", str(c))
+ subp.subp(c, capture=True, shell=True)
+ except subp.ProcessExecutionError as e:
+ errors.append(f"{c}: {e}")
+
+ if errors:
+ raise RuntimeError(
+ f"Failed running readinessprobe command:{NL}{NL.join(errors)}"
+ )
+
+
+def maybe_install_wireguard_packages(cloud: Cloud):
+ """Install wireguard packages and tools
+
+ @param cloud: Cloud object
+
+ @raises: Exception for issues during package
+ installation.
+ """
+
+ packages = ["wireguard-tools"]
+
+ if subp.which("wg"):
+ return
+
+ # Install DKMS when Kernel Verison lower 5.6
+ if util.kernel_version() < MIN_KERNEL_VERSION:
+ packages.append("wireguard")
+
+ try:
+ cloud.distro.update_package_sources()
+ except Exception:
+ util.logexc(LOG, "Package update failed")
+ raise
+ try:
+ cloud.distro.install_packages(packages)
+ except Exception:
+ util.logexc(LOG, "Failed to install wireguard-tools")
+ raise
+
+
+def load_wireguard_kernel_module():
+ """Load wireguard kernel module
+
+ @raises: ProcessExecutionError for issues modprobe
+ """
+ try:
+ out = subp.subp("lsmod", capture=True, shell=True)
+ if not re.search("wireguard", out.stdout.strip()):
+ LOG.debug("Loading wireguard kernel module")
+ subp.subp("modprobe wireguard", capture=True, shell=True)
+ except subp.ProcessExecutionError as e:
+ util.logexc(LOG, f"Could not load wireguard module:{NL}{str(e)}")
+ raise
+
+
+def handle(name: str, cfg: dict, cloud: Cloud, log, args: list):
+ wg_section = None
+
+ if "wireguard" in cfg:
+ LOG.debug("Found Wireguard section in config")
+ wg_section = cfg["wireguard"]
+ else:
+ LOG.debug(
+ "Skipping module named %s," " no 'wireguard' configuration found",
+ name,
+ )
+ return
+
+ # install wireguard tools, enable kernel module
+ maybe_install_wireguard_packages(cloud)
+ load_wireguard_kernel_module()
+
+ for wg_int in wg_section["interfaces"]:
+ # check schema
+ supplemental_schema_validation(wg_int)
+
+ # write wg config files
+ write_config(wg_int)
+
+ # enable wg interfaces
+ enable_wg(wg_int, cloud)
+
+ # parse and run readinessprobe parameters
+ if (
+ "readinessprobe" in wg_section
+ and wg_section["readinessprobe"] is not None
+ ):
+ wg_readinessprobes = wg_section["readinessprobe"]
+ readinessprobe_command_validation(wg_readinessprobes)
+ readinessprobe(wg_readinessprobes)
+ else:
+ LOG.debug("Skipping readinessprobe - no checks defined")
diff --git a/cloudinit/config/schema.py b/cloudinit/config/schema.py
index 1d95b858..d62073d0 100644
--- a/cloudinit/config/schema.py
+++ b/cloudinit/config/schema.py
@@ -9,6 +9,7 @@ import re
import sys
import textwrap
from collections import defaultdict
+from collections.abc import Iterable
from copy import deepcopy
from functools import partial
from itertools import chain
@@ -63,6 +64,7 @@ SCHEMA_LIST_ITEM_TMPL = (
SCHEMA_EXAMPLES_HEADER = "**Examples**::\n\n"
SCHEMA_EXAMPLES_SPACER_TEMPLATE = "\n # --- Example{0} ---"
DEPRECATED_KEY = "deprecated"
+DEPRECATED_PREFIX = "DEPRECATED: "
# type-annotate only if type-checking.
@@ -162,11 +164,10 @@ def is_schema_byte_string(checker, instance):
) or isinstance(instance, (bytes,))
-def _add_deprecation_msg(description: Optional[str] = None):
- msg = "DEPRECATED."
+def _add_deprecation_msg(description: Optional[str] = None) -> str:
if description:
- msg += f" {description}"
- return msg
+ return f"{DEPRECATED_PREFIX}{description}"
+ return DEPRECATED_PREFIX.replace(":", ".").strip()
def _validator_deprecated(
@@ -715,6 +716,31 @@ def _sort_property_order(value):
return 0
+def _flatten(xs):
+ for x in xs:
+ if isinstance(x, Iterable) and not isinstance(x, (str, bytes)):
+ yield from _flatten(x)
+ else:
+ yield x
+
+
+def _collect_subschema_types(property_dict: dict, multi_key: str) -> List[str]:
+ property_types = []
+ for subschema in property_dict.get(multi_key, {}):
+ if subschema.get(DEPRECATED_KEY): # don't document deprecated types
+ continue
+ if subschema.get("enum"):
+ property_types.extend(
+ [
+ f"``{_YAML_MAP.get(enum_value, enum_value)}``"
+ for enum_value in subschema.get("enum", [])
+ ]
+ )
+ elif subschema.get("type"):
+ property_types.append(subschema["type"])
+ return list(_flatten(property_types))
+
+
def _get_property_type(property_dict: dict, defs: dict) -> str:
"""Return a string representing a property type from a given
jsonschema.
@@ -723,18 +749,15 @@ def _get_property_type(property_dict: dict, defs: dict) -> str:
property_types = property_dict.get("type", [])
if not isinstance(property_types, list):
property_types = [property_types]
+ # A property_dict cannot have simultaneously more than one of these props
if property_dict.get("enum"):
property_types = [
f"``{_YAML_MAP.get(k, k)}``" for k in property_dict["enum"]
]
elif property_dict.get("oneOf"):
- property_types.extend(
- [
- subschema["type"]
- for subschema in property_dict.get("oneOf", {})
- if subschema.get("type")
- ]
- )
+ property_types.extend(_collect_subschema_types(property_dict, "oneOf"))
+ elif property_dict.get("anyOf"):
+ property_types.extend(_collect_subschema_types(property_dict, "anyOf"))
if len(property_types) == 1:
property_type = property_types[0]
else:
@@ -745,8 +768,14 @@ def _get_property_type(property_dict: dict, defs: dict) -> str:
if not isinstance(sub_property_types, list):
sub_property_types = [sub_property_types]
# Collect each item type
- for sub_item in items.get("oneOf", {}):
- sub_property_types.append(_get_property_type(sub_item, defs))
+ prune_undefined = bool(sub_property_types)
+ for sub_item in chain(items.get("oneOf", {}), items.get("anyOf", {})):
+ sub_type = _get_property_type(sub_item, defs)
+ if prune_undefined and sub_type == "UNDEFINED":
+ # If the main object has a type, then sub-schemas are allowed to
+ # omit the type. Prune subschema undefined types.
+ continue
+ sub_property_types.append(sub_type)
if sub_property_types:
if len(sub_property_types) == 1:
return f"{property_type} of {sub_property_types[0]}"
@@ -817,6 +846,45 @@ def _flatten_schema_all_of(src_cfg: dict):
src_cfg.update(sub_schema)
+def _get_property_description(prop_config: dict) -> str:
+ """Return accumulated property description.
+
+ Account for the following keys:
+ - top-level description key
+ - any description key present in each subitem under anyOf or allOf
+
+ Order and deprecated property description after active descriptions.
+ Add a trailing stop "." to any description not ending with ":".
+ """
+ prop_descr = prop_config.get("description", "")
+ oneOf = prop_config.get("oneOf", {})
+ anyOf = prop_config.get("anyOf", {})
+ descriptions = []
+ deprecated_descriptions = []
+ if prop_descr:
+ prop_descr = prop_descr.rstrip(".")
+ if not prop_config.get(DEPRECATED_KEY):
+ descriptions.append(prop_descr)
+ else:
+ deprecated_descriptions.append(_add_deprecation_msg(prop_descr))
+ for sub_item in chain(oneOf, anyOf):
+ if not sub_item.get("description"):
+ continue
+ if not sub_item.get(DEPRECATED_KEY):
+ descriptions.append(sub_item["description"].rstrip("."))
+ else:
+ deprecated_descriptions.append(
+ f"{DEPRECATED_PREFIX}{sub_item['description'].rstrip('.')}"
+ )
+ # order deprecated descrs last
+ description = ". ".join(chain(descriptions, deprecated_descriptions))
+ if description:
+ description = f" {description}"
+ if description[-1] != ":":
+ description += "."
+ return description
+
+
def _get_property_doc(schema: dict, defs: dict, prefix=" ") -> str:
"""Return restructured text describing the supported schema properties."""
new_prefix = prefix + " "
@@ -837,12 +905,7 @@ def _get_property_doc(schema: dict, defs: dict, prefix=" ") -> str:
if prop_config.get("hidden") is True:
continue # document nothing for this property
- deprecated = bool(prop_config.get(DEPRECATED_KEY))
- description = prop_config.get("description", "")
- if deprecated:
- description = _add_deprecation_msg(description)
- if description:
- description = " " + description
+ description = _get_property_description(prop_config)
# Define prop_name and description for SCHEMA_PROPERTY_TMPL
label = prop_config.get("label", prop_key)
diff --git a/cloudinit/config/schemas/schema-cloud-config-v1.json b/cloudinit/config/schemas/schema-cloud-config-v1.json
index 598d1c3c..b7124cb7 100644
--- a/cloudinit/config/schemas/schema-cloud-config-v1.json
+++ b/cloudinit/config/schemas/schema-cloud-config-v1.json
@@ -29,7 +29,8 @@
"expiredate": {
"default": null,
"description": "Optional. Date on which the user's account will be disabled. Default: ``null``",
- "type": "string"
+ "type": "string",
+ "format": "date"
},
"gecos": {
"description": "Optional comment about the user, usually a comma-separated string of real name and contact information",
@@ -153,12 +154,28 @@
"default": false
},
"sudo": {
- "type": ["boolean", "string"],
- "description": "Sudo rule to use or false. Absence of a sudo value or ``false`` will result in no sudo rules added for this user. DEPRECATED: the value ``false`` will be deprecated in the future release. Use ``null`` or no ``sudo`` key instead."
+ "oneOf": [
+ {
+ "type": ["string", "null"],
+ "description": "Sudo rule to use or false. Absence of a sudo value or ``null`` will result in no sudo rules added for this user."
+ },
+ {
+ "type": "boolean",
+ "deprecated": true,
+ "description": "The value ``false`` will be dropped after April 2027. Use ``null`` or no ``sudo`` key instead."
+ }
+ ]
},
"uid": {
"description": "The user's ID. Default is next available value.",
- "type": "integer"
+ "oneOf": [
+ {"type": "integer"},
+ {
+ "type": "string",
+ "description": "The use of ``string`` type will be dropped after April 2027. Use an ``integer`` instead.",
+ "deprecated": true
+ }
+ ]
}
}
},
@@ -230,6 +247,101 @@
},
"additionalProperties": true
},
+ "cc_ansible": {
+ "type": "object",
+ "properties": {
+ "ansible": {
+ "type": "object",
+ "additionalProperties": false,
+ "properties": {
+ "install-method": {
+ "type": "string",
+ "default": "distro",
+ "enum": [
+ "distro",
+ "pip"
+ ],
+ "description": "The type of installation for ansible. It can be one of the following values:\n\n - ``distro``\n - ``pip``"
+ },
+ "package-name": {
+ "type": "string",
+ "default": "ansible"
+ },
+ "pull": {
+ "required": ["url", "playbook-name"],
+ "type": "object",
+ "additionalProperties": false,
+ "properties": {
+ "accept-host-key": {
+ "type": "boolean",
+ "default": false
+ },
+ "clean": {
+ "type": "boolean",
+ "default": false
+ },
+ "full": {
+ "type": "boolean",
+ "default": false
+ },
+ "diff": {
+ "type": "boolean",
+ "default": false
+ },
+ "ssh-common-args": {
+ "type": "string"
+ },
+ "scp-extra-args": {
+ "type": "string"
+ },
+ "sftp-extra-args": {
+ "type": "string"
+ },
+ "private-key": {
+ "type": "string"
+ },
+ "checkout": {
+ "type": "string"
+ },
+ "module-path": {
+ "type": "string"
+ },
+ "timeout": {
+ "type": "string"
+ },
+ "url": {
+ "type": "string"
+ },
+ "connection": {
+ "type": "string"
+ },
+ "vault-id": {
+ "type": "string"
+ },
+ "vault-password-file": {
+ "type": "string"
+ },
+ "module-name": {
+ "type": "string"
+ },
+ "sleep": {
+ "type": "string"
+ },
+ "tags": {
+ "type": "string"
+ },
+ "skip-tags": {
+ "type": "string"
+ },
+ "playbook-name": {
+ "type": "string"
+ }
+ }
+ }
+ }
+ }
+ }
+ },
"cc_apk_configure": {
"type": "object",
"properties": {
@@ -589,26 +701,6 @@
}
}
},
- "cc_debug": {
- "type": "object",
- "properties": {
- "debug": {
- "minProperties": 1,
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "verbose": {
- "description": "Should always be true for this module",
- "type": "boolean"
- },
- "output": {
- "description": "Location to write output. Defaults to console + log",
- "type": "string"
- }
- }
- }
- }
- },
"cc_disable_ec2_metadata": {
"type": "object",
"properties": {
@@ -649,7 +741,6 @@
"description": "Specifies the partition table type, either ``mbr`` or ``gpt``. Default: ``mbr``."
},
"layout": {
- "type": ["string", "boolean", "array"],
"default": false,
"oneOf": [
{"type": "string", "enum": ["remove"]},
@@ -769,9 +860,18 @@
"additionalProperties": false,
"properties": {
"mode": {
- "enum": [false, "auto", "growpart", "gpart", "off"],
"default": "auto",
- "description": "The utility to use for resizing. Default: ``auto``\n\nPossible options:\n\n* ``auto`` - Use any available utility\n\n* ``growpart`` - Use growpart utility\n\n* ``gpart`` - Use BSD gpart utility\n\n* ``off`` - Take no action\n\nSpecifying a boolean ``false`` value for this key is deprecated. Use ``off`` instead."
+ "description": "The utility to use for resizing. Default: ``auto``\n\nPossible options:\n\n* ``auto`` - Use any available utility\n\n* ``growpart`` - Use growpart utility\n\n* ``gpart`` - Use BSD gpart utility\n\n* ``off`` - Take no action.",
+ "oneOf": [
+ {
+ "enum": ["auto", "growpart", "gpart", "off"]
+ },
+ {
+ "enum": [false],
+ "description": "Specifying a boolean ``false`` value for this key is deprecated. Use ``off`` instead.",
+ "deprecated": true
+ }
+ ]
},
"devices": {
"type": "array",
@@ -807,7 +907,7 @@
"description": "Device to use as target for grub installation. If unspecified, ``grub-probe`` of ``/boot`` will be used to find the device"
},
"grub-pc/install_devices_empty": {
- "description": "Sets values for ``grub-pc/install_devices_empty``. If unspecified, will be set to ``true`` if ``grub-pc/install_devices`` is empty, otherwise ``false``. Using a non-boolean value for this field is deprecated.",
+ "description": "Sets values for ``grub-pc/install_devices_empty``. If unspecified, will be set to ``true`` if ``grub-pc/install_devices`` is empty, otherwise ``false``.",
"oneOf": [
{
"type": "boolean"
@@ -1385,7 +1485,7 @@
"type": "string",
"pattern": "^\\+?[0-9]+$",
"deprecated": true,
- "description": "Dropped after April 2027. Use ``now`` or integer type."
+ "description": "Use of string for this value will be dropped after April 2027. Use ``now`` or integer type."
},
{"enum": ["now"]}
]
@@ -1752,8 +1852,18 @@
"additionalProperties": false,
"properties": {
"enabled": {
- "type": ["boolean", "string"],
- "description": "Whether vendor data is enabled or not. Use of string for this value is DEPRECATED. Default: ``true``"
+ "description": "Whether vendor data is enabled or not. Default: ``true``",
+ "oneOf": [
+ {
+ "type": "boolean",
+ "default": true
+ },
+ {
+ "type": "string",
+ "description": "Use of string for this value is DEPRECATED. Use a boolean value instead.",
+ "deprecated": true
+ }
+ ]
},
"prefix": {
"type": ["array", "string"],
@@ -1828,9 +1938,13 @@
"ssh_pwauth": {
"oneOf": [
{"type": "boolean"},
- {"type": "string"}
+ {
+ "type": "string",
+ "description": "Use of non-boolean values for this field is DEPRECATED and will result in an error in a future version of cloud-init.",
+ "deprecated": true
+ }
],
- "description": "Sets whether or not to accept password authentication. ``true`` will enable password auth. ``false`` will disable. Default is to leave the value unchanged. Use of non-boolean values for this field is DEPRECATED and will result in an error in a future version of cloud-init."
+ "description": "Sets whether or not to accept password authentication. ``true`` will enable password auth. ``false`` will disable. Default is to leave the value unchanged."
},
"chpasswd": {
"type": "object",
@@ -2170,8 +2284,15 @@
"properties": {
"manage_etc_hosts": {
"default": false,
- "description": "Whether to manage ``/etc/hosts`` on the system. If ``true``, render the hosts file using ``/etc/cloud/templates/hosts.tmpl`` replacing ``$hostname`` and ``$fdqn``. If ``localhost``, append a ``127.0.1.1`` entry that resolves from FQDN and hostname every boot. Default: ``false``. DEPRECATED value ``template`` will be dropped, use ``true`` instead.",
- "enum": [true, false, "template", "localhost"]
+ "description": "Whether to manage ``/etc/hosts`` on the system. If ``true``, render the hosts file using ``/etc/cloud/templates/hosts.tmpl`` replacing ``$hostname`` and ``$fdqn``. If ``localhost``, append a ``127.0.1.1`` entry that resolves from FQDN and hostname every boot. Default: ``false``.",
+ "oneOf": [
+ {"enum": [true, false, "localhost"]},
+ {
+ "enum": ["template"],
+ "description": "Value ``template`` will be dropped after April 2027. Use ``true`` instead.",
+ "deprecated": true
+ }
+ ]
},
"fqdn": {
"type": "string",
@@ -2231,6 +2352,49 @@
}
}
},
+ "cc_wireguard": {
+ "type": "object",
+ "properties": {
+ "wireguard": {
+ "type": ["null", "object"],
+ "properties": {
+ "interfaces": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties": {
+ "name": {
+ "type": "string",
+ "description": "Name of the interface. Typically wgx (example: wg0)"
+ },
+ "config_path": {
+ "type": "string",
+ "description": "Path to configuration file of Wireguard interface"
+ },
+ "content": {
+ "type": "string",
+ "description": "Wireguard interface configuration. Contains key, peer, ..."
+ }
+ },
+ "additionalProperties": false
+ },
+ "minItems": 1
+ },
+ "readinessprobe": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ },
+ "uniqueItems": true,
+ "description": "List of shell commands to be executed as probes."
+ }
+ },
+ "required": ["interfaces"],
+ "minProperties": 1,
+ "additionalProperties": false
+ }
+ }
+ },
"cc_write_files": {
"type": "object",
"properties": {
@@ -2478,6 +2642,7 @@
}
},
"allOf": [
+ { "$ref": "#/$defs/cc_ansible" },
{ "$ref": "#/$defs/cc_apk_configure" },
{ "$ref": "#/$defs/cc_apt_configure" },
{ "$ref": "#/$defs/cc_apt_pipelining" },
@@ -2486,7 +2651,6 @@
{ "$ref": "#/$defs/cc_byobu" },
{ "$ref": "#/$defs/cc_ca_certs" },
{ "$ref": "#/$defs/cc_chef" },
- { "$ref": "#/$defs/cc_debug" },
{ "$ref": "#/$defs/cc_disable_ec2_metadata" },
{ "$ref": "#/$defs/cc_disk_setup" },
{ "$ref": "#/$defs/cc_fan" },
@@ -2528,6 +2692,7 @@
{ "$ref": "#/$defs/cc_update_etc_hosts"},
{ "$ref": "#/$defs/cc_update_hostname"},
{ "$ref": "#/$defs/cc_users_groups"},
+ { "$ref": "#/$defs/cc_wireguard"},
{ "$ref": "#/$defs/cc_write_files"},
{ "$ref": "#/$defs/cc_yum_add_repo"},
{ "$ref": "#/$defs/cc_zypper_add_repo"},
diff --git a/cloudinit/distros/__init__.py b/cloudinit/distros/__init__.py
index ffa41093..7aafaa78 100644
--- a/cloudinit/distros/__init__.py
+++ b/cloudinit/distros/__init__.py
@@ -651,8 +651,16 @@ class Distro(persistence.CloudInitPickleMixin, metaclass=abc.ABCMeta):
self.lock_passwd(name)
# Configure sudo access
- if "sudo" in kwargs and kwargs["sudo"] is not False:
- self.write_sudo_rules(name, kwargs["sudo"])
+ if "sudo" in kwargs:
+ if kwargs["sudo"]:
+ self.write_sudo_rules(name, kwargs["sudo"])
+ elif kwargs["sudo"] is False:
+ LOG.warning(
+ "DEPRECATED: The user %s has a 'sudo' config value of"
+ " 'false' which will be dropped after April 2027."
+ " Use 'null' instead.",
+ name,
+ )
# Import SSH keys
if "ssh_authorized_keys" in kwargs:
diff --git a/cloudinit/distros/parsers/sys_conf.py b/cloudinit/distros/parsers/sys_conf.py
index 4132734c..cb6e583e 100644
--- a/cloudinit/distros/parsers/sys_conf.py
+++ b/cloudinit/distros/parsers/sys_conf.py
@@ -107,7 +107,7 @@ class SysConf(configobj.ConfigObj):
return "%s%s%s%s%s" % (
indent_string,
key,
- self._a_to_u("="),
+ "=",
val,
cmnt,
)
diff --git a/cloudinit/sources/DataSourceAzure.py b/cloudinit/sources/DataSourceAzure.py
index 5aea0c5c..865238cf 100644
--- a/cloudinit/sources/DataSourceAzure.py
+++ b/cloudinit/sources/DataSourceAzure.py
@@ -93,6 +93,7 @@ class MetadataType(Enum):
class PPSType(Enum):
NONE = "None"
+ OS_DISK = "PreprovisionedOSDisk"
RUNNING = "Running"
SAVABLE = "Savable"
UNKNOWN = "Unknown"
@@ -577,6 +578,9 @@ class DataSourceAzure(sources.DataSource):
if pps_type == PPSType.SAVABLE:
self._wait_for_all_nics_ready()
+ elif pps_type == PPSType.OS_DISK:
+ self._report_ready_for_pps(create_marker=False)
+ self._wait_for_pps_os_disk_shutdown()
md, userdata_raw, cfg, files = self._reprovision()
# fetch metadata again as it has changed after reprovisioning
@@ -970,7 +974,7 @@ class DataSourceAzure(sources.DataSource):
)
@azure_ds_telemetry_reporter
- def _report_ready_for_pps(self) -> None:
+ def _report_ready_for_pps(self, *, create_marker: bool = True) -> None:
"""Report ready for PPS, creating the marker file upon completion.
:raises sources.InvalidMetaDataException: On error reporting ready.
@@ -982,7 +986,17 @@ class DataSourceAzure(sources.DataSource):
report_diagnostic_event(msg, logger_func=LOG.error)
raise sources.InvalidMetaDataException(msg) from error
- self._create_report_ready_marker()
+ if create_marker:
+ self._create_report_ready_marker()
+
+ @azure_ds_telemetry_reporter
+ def _wait_for_pps_os_disk_shutdown(self):
+ report_diagnostic_event(
+ "Waiting for host to shutdown VM...",
+ logger_func=LOG.info,
+ )
+ sleep(31536000)
+ raise BrokenAzureDataSource("Shutdown failure for PPS disk.")
@azure_ds_telemetry_reporter
def _check_if_nic_is_primary(self, ifname):
@@ -1403,6 +1417,11 @@ class DataSourceAzure(sources.DataSource):
):
pps_type = PPSType.SAVABLE
elif (
+ ovf_cfg.get("PreprovisionedVMType", None) == PPSType.OS_DISK.value
+ or self._ppstype_from_imds(imds_md) == PPSType.OS_DISK.value
+ ):
+ pps_type = PPSType.OS_DISK
+ elif (
ovf_cfg.get("PreprovisionedVm") is True
or ovf_cfg.get("PreprovisionedVMType", None)
== PPSType.RUNNING.value
diff --git a/cloudinit/sources/helpers/azure.py b/cloudinit/sources/helpers/azure.py
index a2afbaad..19584a61 100644
--- a/cloudinit/sources/helpers/azure.py
+++ b/cloudinit/sources/helpers/azure.py
@@ -13,7 +13,7 @@ from contextlib import contextmanager
from datetime import datetime
from errno import ENOENT
from time import sleep, time
-from typing import List, Optional
+from typing import List, Optional, Union
from xml.etree import ElementTree
from xml.sax.saxutils import escape
@@ -448,7 +448,7 @@ class InvalidGoalStateXMLException(Exception):
class GoalState:
def __init__(
self,
- unparsed_xml: str,
+ unparsed_xml: Union[str, bytes],
azure_endpoint_client: AzureEndpointHttpClient,
need_certificate: bool = True,
) -> None:
@@ -888,7 +888,7 @@ class WALinuxAgentShim:
)
@azure_ds_telemetry_reporter
- def _get_raw_goal_state_xml_from_azure(self) -> str:
+ def _get_raw_goal_state_xml_from_azure(self) -> bytes:
"""Fetches the GoalState XML from the Azure endpoint and returns
the XML as a string.
@@ -916,7 +916,9 @@ class WALinuxAgentShim:
@azure_ds_telemetry_reporter
def _parse_raw_goal_state_xml(
- self, unparsed_goal_state_xml: str, need_certificate: bool
+ self,
+ unparsed_goal_state_xml: Union[str, bytes],
+ need_certificate: bool,
) -> GoalState:
"""Parses a GoalState XML string and returns a GoalState object.
diff --git a/cloudinit/ssh_util.py b/cloudinit/ssh_util.py
index ab4c63aa..5bbbc724 100644
--- a/cloudinit/ssh_util.py
+++ b/cloudinit/ssh_util.py
@@ -544,11 +544,28 @@ def parse_ssh_config_map(fname):
return ret
+def _includes_dconf(fname: str) -> bool:
+ if not os.path.isfile(fname):
+ return False
+ with open(fname, "r") as f:
+ for line in f:
+ if line.startswith(f"Include {fname}.d/*.conf"):
+ return True
+ return False
+
+
def update_ssh_config(updates, fname=DEF_SSHD_CFG):
"""Read fname, and update if changes are necessary.
@param updates: dictionary of desired values {Option: value}
@return: boolean indicating if an update was done."""
+ if _includes_dconf(fname):
+ if not os.path.isdir(f"{fname}.d"):
+ util.ensure_dir(f"{fname}.d", mode=0o755)
+ fname = os.path.join(f"{fname}.d", "50-cloud-init.conf")
+ if not os.path.isfile(fname):
+ # Ensure root read-only:
+ util.ensure_file(fname, 0o600)
lines = parse_ssh_config(fname)
changed = update_ssh_config_lines(lines=lines, updates=updates)
if changed:
diff --git a/cloudinit/url_helper.py b/cloudinit/url_helper.py
index 7dd98d95..291b8d4d 100644
--- a/cloudinit/url_helper.py
+++ b/cloudinit/url_helper.py
@@ -19,7 +19,7 @@ from errno import ENOENT
from functools import partial
from http.client import NOT_FOUND
from itertools import count
-from typing import Any, Callable, List, Tuple
+from typing import Any, Callable, Iterator, List, Optional, Tuple, Union
from urllib.parse import quote, urlparse, urlunparse
import requests
@@ -59,7 +59,7 @@ def combine_url(base, *add_ons):
return url
-def read_file_or_url(url, **kwargs):
+def read_file_or_url(url, **kwargs) -> Union["FileResponse", "UrlResponse"]:
"""Wrapper function around readurl to allow passing a file path as url.
When url is not a local file path, passthrough any kwargs to readurl.
@@ -113,11 +113,13 @@ class FileResponse(StringResponse):
class UrlResponse(object):
- def __init__(self, response):
+ def __init__(self, response: requests.Response):
self._response = response
@property
- def contents(self):
+ def contents(self) -> bytes:
+ if self._response.content is None:
+ return b""
return self._response.content
@property
@@ -144,6 +146,20 @@ class UrlResponse(object):
def __str__(self):
return self._response.text
+ def iter_content(
+ self, chunk_size: Optional[int] = 1, decode_unicode: bool = False
+ ) -> Iterator[bytes]:
+ """Iterates over the response data.
+
+ When stream=True is set on the request, this avoids reading the content
+ at once into memory for large responses.
+
+ :param chunk_size: Number of bytes it should read into memory.
+ :param decode_unicode: If True, content will be decoded using the best
+ available encoding based on the response.
+ """
+ yield from self._response.iter_content(chunk_size, decode_unicode)
+
class UrlError(IOError):
def __init__(self, cause, code=None, headers=None, url=None):
@@ -191,6 +207,7 @@ def readurl(
infinite=False,
log_req_resp=True,
request_method="",
+ stream: bool = False,
) -> UrlResponse:
"""Wrapper around requests.Session to read the url and retry if necessary
@@ -222,10 +239,13 @@ def readurl(
:param request_method: String passed as 'method' to Session.request.
Typically GET, or POST. Default: POST if data is provided, GET
otherwise.
+ :param stream: if False, the response content will be immediately
+ downloaded.
"""
url = _cleanurl(url)
req_args = {
"url": url,
+ "stream": stream,
}
req_args.update(_get_ssl_args(url, ssl_details))
req_args["allow_redirects"] = allow_redirects
diff --git a/cloudinit/util.py b/cloudinit/util.py
index e3a891e4..77e7f66b 100644
--- a/cloudinit/util.py
+++ b/cloudinit/util.py
@@ -34,7 +34,7 @@ import time
from base64 import b64decode, b64encode
from collections import deque, namedtuple
from errno import EACCES, ENOENT
-from functools import lru_cache
+from functools import lru_cache, total_ordering
from typing import Callable, List, TypeVar
from urllib import parse
@@ -2894,13 +2894,19 @@ def get_proc_ppid(pid):
ppid = 0
try:
contents = load_file("/proc/%s/stat" % pid, quiet=True)
+ if contents:
+ # see proc.5 for format
+ m = re.search(r"^\d+ \(.+\) [RSDZTtWXxKPI] (\d+)", str(contents))
+ if m:
+ ppid = int(m.group(1))
+ else:
+ LOG.warning(
+ "Unable to match parent pid of process pid=%s input: %s",
+ pid,
+ contents,
+ )
except IOError as e:
LOG.warning("Failed to load /proc/%s/stat. %s", pid, e)
- if contents:
- parts = contents.split(" ", 4)
- # man proc says
- # ppid %d (4) The PID of the parent.
- ppid = int(parts[3])
return ppid
@@ -2919,4 +2925,46 @@ def error(msg, rc=1, fmt="Error:\n{}", sys_exit=False):
return rc
-# vi: ts=4 expandtab
+@total_ordering
+class Version(namedtuple("Version", ["major", "minor", "patch", "rev"])):
+ def __new__(cls, major=-1, minor=-1, patch=-1, rev=-1):
+ """Default of -1 allows us to tiebreak in favor of the most specific
+ number"""
+ return super(Version, cls).__new__(cls, major, minor, patch, rev)
+
+ @classmethod
+ def from_str(cls, version: str):
+ return cls(*(list(map(int, version.split(".")))))
+
+ def __gt__(self, other):
+ return 1 == self._compare_version(other)
+
+ def __eq__(self, other):
+ return (
+ self.major == other.major
+ and self.minor == other.minor
+ and self.patch == other.patch
+ and self.rev == other.rev
+ )
+
+ def _compare_version(self, other) -> int:
+ """
+ return values:
+ 1: self > v2
+ -1: self < v2
+ 0: self == v2
+
+ to break a tie between 3.1.N and 3.1, always treat the more
+ specific number as larger
+ """
+ if self == other:
+ return 0
+ if self.major > other.major:
+ return 1
+ if self.minor > other.minor:
+ return 1
+ if self.patch > other.patch:
+ return 1
+ if self.rev > other.rev:
+ return 1
+ return -1
diff --git a/config/cloud.cfg.tmpl b/config/cloud.cfg.tmpl
index 5be80f53..a6096f47 100644
--- a/config/cloud.cfg.tmpl
+++ b/config/cloud.cfg.tmpl
@@ -111,6 +111,9 @@ cloud_init_modules:
# The modules that run in the 'config' stage
cloud_config_modules:
+{% if variant in ["ubuntu"] %}
+ - wireguard
+{% endif %}
{% if variant in ["ubuntu", "unknown", "debian"] %}
- snap
{% endif %}
@@ -172,6 +175,7 @@ cloud_final_modules:
- write-files-deferred
- puppet
- chef
+ - ansible
- mcollective
- salt-minion
- reset_rmc
diff --git a/debian/changelog b/debian/changelog
index 5cccfb9f..b19ae3d6 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,3 +1,37 @@
+cloud-init (22.2-139-gb64ba456-0ubuntu1~22.10.1) kinetic; urgency=medium
+
+ * New upstream snapshot.
+ + test: use fake filesystem to avoid file removal (#1647)
+ + tox: Fix tip-flake8 and tip-mypy (#1635)
+ + config: Add wireguard config module (#1570)
+ [Fabian Lichtenegger-Lukas]
+ + tests: can run without azure-cli, tests expect inactive ansible
+ (#1643)
+ + typing: Type UrlResponse.contents (#1633)
+ + testing: fix references to `DEPRECATED.` (#1641)
+ + ssh_util: Handle sshd_config.d folder (LP: #1968873)
+ + schema: Enable deprecations in cc_update_etc_hosts (#1631)
+ + Add Ansible Config Module (#1579)
+ + util: Support Idle process state in get_proc_ppid() (#1637)
+ + schema: Enable deprecations in cc_growpart (#1628)
+ + schema: Enable deprecations in cc_users_groups (#1627)
+ + util: Fix error path and parsing in get_proc_ppid()
+ + main: avoid downloading full contents cmdline urls (#1606)
+ (LP: #1937319)
+ + schema: Enable deprecations in cc_scripts_vendor (#1629)
+ + schema: Enable deprecations in cc_set_passwords (#1630)
+ + sources/azure: add experimental support for preprovisioned os disks
+ (#1622) [Chris Patterson]
+ + Remove configobj a_to_u calls (#1632) [Stefano Rivera]
+ + cc_debug: Drop this module (#1614)
+ + schema: add aggregate descriptions in anyOf/oneOf (#1636)
+ + testing: migrate test_sshutil to pytest (#1617)
+ + testing: Fix test_ca_certs integration test (#1626)
+ + testing: add support for pycloudlib's pro images (#1604)
+ + testing: migrate test_cc_set_passwords to pytest (#1615)
+
+ -- Alberto Contreras <alberto.contreras@canonical.com> Wed, 10 Aug 2022 22:23:47 +0200
+
cloud-init (22.2-115-g6e498773-0ubuntu1~22.10.1) kinetic; urgency=medium
* d/control: lintian fixes:
diff --git a/doc/examples/cloud-config-ansible.txt b/doc/examples/cloud-config-ansible.txt
new file mode 100644
index 00000000..a3e7c273
--- /dev/null
+++ b/doc/examples/cloud-config-ansible.txt
@@ -0,0 +1,16 @@
+#cloud-config
+version: v1
+packages_update: true
+packages_upgrade: true
+
+# if you're already installing other packages, you may
+# wish to manually install ansible to avoid multiple calls
+# to your package manager
+packages:
+ - ansible
+ - git
+ansible:
+ install-method: pip
+ pull:
+ url: "https://github.com/holmanb/vmboot.git"
+ playbook-name: ubuntu.yml
diff --git a/doc/examples/cloud-config-wireguard.txt b/doc/examples/cloud-config-wireguard.txt
new file mode 100644
index 00000000..11920f24
--- /dev/null
+++ b/doc/examples/cloud-config-wireguard.txt
@@ -0,0 +1,29 @@
+#cloud-config
+# vim: syntax=yaml
+#
+# This is the configuration syntax that the wireguard module
+# will know how to understand.
+#
+#
+wireguard:
+ # All wireguard interfaces that should be created. Every interface will be named
+ # after `name` parameter and config will be written to a file under `config_path`.
+ # `content` parameter should be set with a valid Wireguard configuration.
+ interfaces:
+ - name: wg0
+ config_path: /etc/wireguard/wg0.conf
+ content: |
+ [Interface]
+ PrivateKey = <private_key>
+ Address = <address>
+ [Peer]
+ PublicKey = <public_key>
+ Endpoint = <endpoint_ip>:<endpoint_ip_port>
+ AllowedIPs = <allowedip1>, <allowedip2>, ...
+ # The idea behind readiness probes is to ensure Wireguard connectivity before continuing
+ # the cloud-init process. This could be useful if you need access to specific services like
+ # an internal APT Repository Server (e.g Landscape) to install/update packages.
+ readinessprobe:
+ - 'systemctl restart service'
+ - 'curl https://webhook.endpoint/example'
+ - 'nc -zv apt-server-fqdn 443'
diff --git a/doc/rtd/topics/boot.rst b/doc/rtd/topics/boot.rst
index e0663760..db6621a7 100644
--- a/doc/rtd/topics/boot.rst
+++ b/doc/rtd/topics/boot.rst
@@ -149,7 +149,7 @@ accustomed to running after logging into a system should run correctly here.
Things that run here include:
* package installations
- * configuration management plugins (puppet, chef, salt-minion)
+ * configuration management plugins (ansible, puppet, chef, salt-minion)
* user-defined scripts (i.e. shell scripts passed as user-data)
For scripts external to cloud-init looking to wait until cloud-init is
diff --git a/doc/rtd/topics/examples.rst b/doc/rtd/topics/examples.rst
index 8ec8d8ab..353e22d8 100644
--- a/doc/rtd/topics/examples.rst
+++ b/doc/rtd/topics/examples.rst
@@ -41,6 +41,13 @@ Install and run `chef`_ recipes
:language: yaml
:linenos:
+Install and run `ansible`_
+==========================
+
+.. literalinclude:: ../../examples/cloud-config-ansible.txt
+ :language: yaml
+ :linenos:
+
Add primary apt repositories
============================
@@ -124,4 +131,4 @@ Create partitions and filesystems
.. _chef: http://www.chef.io/chef/
.. _puppet: http://puppetlabs.com/
-.. vi: textwidth=79
+.. _ansible: https://docs.ansible.com/ansible/latest/
diff --git a/doc/rtd/topics/format.rst b/doc/rtd/topics/format.rst
index a4b772a2..7d75d168 100644
--- a/doc/rtd/topics/format.rst
+++ b/doc/rtd/topics/format.rst
@@ -66,7 +66,7 @@ Kernel Command Line
When using the :ref:`datasource_nocloud` datasource, users can pass user data
via the kernel command line parameters. See the :ref:`datasource_nocloud`
-datasource documentation for more details.
+datasource and :ref:`kernel_cmdline` documentations for more details.
Gzip Compressed Content
=======================
diff --git a/doc/rtd/topics/instancedata.rst b/doc/rtd/topics/instancedata.rst
index 0c44d04e..cf2f7e10 100644
--- a/doc/rtd/topics/instancedata.rst
+++ b/doc/rtd/topics/instancedata.rst
@@ -4,6 +4,12 @@
Instance Metadata
*****************
+.. toctree::
+ :maxdepth: 1
+ :hidden:
+
+ kernel-cmdline.rst
+
What is instance data?
========================
@@ -16,6 +22,7 @@ comes from any number of sources:
* cloud-config seed files in the booted cloud image or distribution
* vendordata provided from files or cloud metadata services
* userdata provided at instance creation
+* :ref:`kernel_cmdline`
Each cloud provider presents unique configuration metadata in different
formats to the instance. Cloud-init provides a cache of any crawled metadata
diff --git a/doc/rtd/topics/kernel-cmdline.rst b/doc/rtd/topics/kernel-cmdline.rst
new file mode 100644
index 00000000..4aa02855
--- /dev/null
+++ b/doc/rtd/topics/kernel-cmdline.rst
@@ -0,0 +1,71 @@
+.. _kernel_cmdline:
+
+*******************
+Kernel Command Line
+*******************
+
+In order to allow an ephemeral, or otherwise pristine image to
+receive some configuration, cloud-init will read a url directed by
+the kernel command line and proceed as if its data had previously existed.
+
+This allows for configuring a meta-data service, or some other data.
+
+.. note::
+
+ That usage of the kernel command line is somewhat of a last resort,
+ as it requires knowing in advance the correct command line or modifying
+ the boot loader to append data.
+
+For example, when ``cloud-init init --local`` runs, it will check to
+see if ``cloud-config-url`` appears in key/value fashion
+in the kernel command line as in:
+
+.. code-block:: text
+
+ root=/dev/sda ro cloud-config-url=http://foo.bar.zee/abcde
+
+Cloud-init will then read the contents of the given url.
+If the content starts with ``#cloud-config``, it will store
+that data to the local filesystem in a static filename
+``/etc/cloud/cloud.cfg.d/91_kernel_cmdline_url.cfg``, and consider it as
+part of the config from that point forward.
+
+If that file exists already, it will not be overwritten, and the
+`cloud-config-url` parameter is completely ignored.
+
+Then, when the DataSource runs, it will find that config already available.
+
+So, in order to be able to configure the MAAS DataSource by controlling the
+kernel command line from outside the image, you can append:
+
+ * ``cloud-config-url=http://your.url.here/abcdefg``
+
+Then, have the following content at that url:
+
+.. code-block:: yaml
+
+ #cloud-config
+ datasource:
+ MAAS:
+ metadata_url: http://mass-host.localdomain/source
+ consumer_key: Xh234sdkljf
+ token_key: kjfhgb3n
+ token_secret: 24uysdfx1w4
+
+.. warning::
+
+ `url` kernel command line key is deprecated.
+ Please use `cloud-config-url` parameter instead"
+
+.. note::
+
+ Because ``cloud-config-url=`` is so very generic, in order to avoid false
+ positives,
+ cloud-init requires the content to start with ``#cloud-config`` in order
+ for it to be considered.
+
+.. note::
+
+ The ``cloud-config-url=`` is un-authed http GET, and contains credentials.
+ It could be set up to be randomly generated and also check source
+ address in order to be more secure.
diff --git a/doc/rtd/topics/modules.rst b/doc/rtd/topics/modules.rst
index 4bfb27cf..8ffb984d 100644
--- a/doc/rtd/topics/modules.rst
+++ b/doc/rtd/topics/modules.rst
@@ -5,6 +5,7 @@ Module Reference
****************
.. contents:: Table of Contents
+.. automodule:: cloudinit.config.cc_ansible
.. automodule:: cloudinit.config.cc_apk_configure
.. automodule:: cloudinit.config.cc_apt_configure
.. automodule:: cloudinit.config.cc_apt_pipelining
@@ -12,7 +13,6 @@ Module Reference
.. automodule:: cloudinit.config.cc_byobu
.. automodule:: cloudinit.config.cc_ca_certs
.. automodule:: cloudinit.config.cc_chef
-.. automodule:: cloudinit.config.cc_debug
.. automodule:: cloudinit.config.cc_disable_ec2_metadata
.. automodule:: cloudinit.config.cc_disk_setup
.. automodule:: cloudinit.config.cc_fan
@@ -59,6 +59,7 @@ Module Reference
.. automodule:: cloudinit.config.cc_update_etc_hosts
.. automodule:: cloudinit.config.cc_update_hostname
.. automodule:: cloudinit.config.cc_users_groups
+.. automodule:: cloudinit.config.cc_wireguard
.. automodule:: cloudinit.config.cc_write_files
.. automodule:: cloudinit.config.cc_yum_add_repo
.. automodule:: cloudinit.config.cc_zypper_add_repo
diff --git a/doc/sources/kernel-cmdline.txt b/doc/sources/kernel-cmdline.txt
deleted file mode 100644
index 4cbfd217..00000000
--- a/doc/sources/kernel-cmdline.txt
+++ /dev/null
@@ -1,48 +0,0 @@
-In order to allow an ephemeral, or otherwise pristine image to
-receive some configuration, cloud-init will read a url directed by
-the kernel command line and proceed as if its data had previously existed.
-
-This allows for configuring a meta-data service, or some other data.
-
-Note, that usage of the kernel command line is somewhat of a last resort,
-as it requires knowing in advance the correct command line or modifying
-the boot loader to append data.
-
-For example, when 'cloud-init start' runs, it will check to
-see if one of 'cloud-config-url' or 'url' appear in key/value fashion
-in the kernel command line as in:
- root=/dev/sda ro url=http://foo.bar.zee/abcde
-
-Cloud-init will then read the contents of the given url.
-If the content starts with '#cloud-config', it will store
-that data to the local filesystem in a static filename
-'/etc/cloud/cloud.cfg.d/91_kernel_cmdline_url.cfg', and consider it as
-part of the config from that point forward.
-
-If that file exists already, it will not be overwritten, and the url parameters
-completely ignored.
-
-Then, when the DataSource runs, it will find that config already available.
-
-So, in able to configure the MAAS DataSource by controlling the kernel
-command line from outside the image, you can append:
- url=http://your.url.here/abcdefg
-or
- cloud-config-url=http://your.url.here/abcdefg
-
-Then, have the following content at that url:
- #cloud-config
- datasource:
- MAAS:
- metadata_url: http://mass-host.localdomain/source
- consumer_key: Xh234sdkljf
- token_key: kjfhgb3n
- token_secret: 24uysdfx1w4
-
-Notes:
- * Because 'url=' is so very generic, in order to avoid false positives,
- cloud-init requires the content to start with '#cloud-config' in order
- for it to be considered.
- * The url= is un-authed http GET, and contains credentials
- It could be set up to be randomly generated and also check source
- address in order to be more secure
diff --git a/integration-requirements.txt b/integration-requirements.txt
index cd10c540..8b9d0db7 100644
--- a/integration-requirements.txt
+++ b/integration-requirements.txt
@@ -1,5 +1,5 @@
# PyPI requirements for cloud-init integration testing
# https://cloudinit.readthedocs.io/en/latest/topics/integration_tests.html
#
-pycloudlib @ git+https://github.com/canonical/pycloudlib.git@6eee33c9c4f630bc9c13b6e48f9ab36e7fb79ca6
+pycloudlib @ git+https://github.com/canonical/pycloudlib.git@68fe052baf6f32415b727d02ba2ba48b7a995bf2
pytest
diff --git a/packages/debian/cloud-init.postrm b/packages/debian/cloud-init.postrm
new file mode 100644
index 00000000..6cb9f54e
--- /dev/null
+++ b/packages/debian/cloud-init.postrm
@@ -0,0 +1,11 @@
+#!/bin/bash
+
+set -e
+
+cleanup_sshd_config() {
+ rm -f "/etc/ssh/sshd_config.d/50-cloud-init.conf"
+}
+
+if [ "$1" = "purge" ]; then
+ cleanup_sshd_config
+fi
diff --git a/tests/integration_tests/bugs/test_lp1835584.py b/tests/integration_tests/bugs/test_lp1835584.py
index 8ecb1246..4d669ee2 100644
--- a/tests/integration_tests/bugs/test_lp1835584.py
+++ b/tests/integration_tests/bugs/test_lp1835584.py
@@ -28,15 +28,12 @@ https://bugs.launchpad.net/cloud-init/+bug/1835584
import re
import pytest
+from pycloudlib.cloud import ImageType
from tests.integration_tests.clouds import ImageSpecification, IntegrationCloud
from tests.integration_tests.conftest import get_validated_source
from tests.integration_tests.instances import IntegrationInstance
-IMG_AZURE_UBUNTU_PRO_FIPS_BIONIC = (
- "Canonical:0001-com-ubuntu-pro-bionic-fips:pro-fips-18_04:18.04.202010201"
-)
-
def _check_iid_insensitive_across_kernel_upgrade(
instance: IntegrationInstance,
@@ -71,6 +68,7 @@ def _check_iid_insensitive_across_kernel_upgrade(
@pytest.mark.azure
+@pytest.mark.integration_cloud_args(image_type=ImageType.PRO_FIPS)
def test_azure_kernel_upgrade_case_insensitive_uuid(
session_cloud: IntegrationCloud,
):
@@ -86,10 +84,7 @@ def test_azure_kernel_upgrade_case_insensitive_uuid(
pytest.skip(
"Provide CLOUD_INIT_SOURCE to install expected working cloud-init"
)
- image_id = IMG_AZURE_UBUNTU_PRO_FIPS_BIONIC
- with session_cloud.launch(
- launch_kwargs={"image_id": image_id}
- ) as instance:
+ with session_cloud.launch() as instance:
# We can't use setup_image fixture here because we want to avoid
# taking a snapshot or cleaning the booted machine after cloud-init
# upgrade.
diff --git a/tests/integration_tests/clouds.py b/tests/integration_tests/clouds.py
index eb9bd9cc..6b959ade 100644
--- a/tests/integration_tests/clouds.py
+++ b/tests/integration_tests/clouds.py
@@ -18,7 +18,7 @@ from pycloudlib import (
LXDVirtualMachine,
Openstack,
)
-from pycloudlib.cloud import BaseCloud
+from pycloudlib.cloud import BaseCloud, ImageType
from pycloudlib.lxd.cloud import _BaseLXD
from pycloudlib.lxd.instance import BaseInstance, LXDInstance
@@ -94,7 +94,12 @@ class IntegrationCloud(ABC):
datasource: str
cloud_instance: BaseCloud
- def __init__(self, settings=integration_settings):
+ def __init__(
+ self,
+ image_type: ImageType = ImageType.GENERIC,
+ settings=integration_settings,
+ ):
+ self._image_type = image_type
self.settings = settings
self.cloud_instance: BaseCloud = self._get_cloud_instance()
self.initial_image_id = self._get_initial_image()
@@ -119,11 +124,12 @@ class IntegrationCloud(ABC):
def _get_cloud_instance(self):
raise NotImplementedError
- def _get_initial_image(self):
+ def _get_initial_image(self, **kwargs) -> str:
image = ImageSpecification.from_os_image()
try:
- return self.cloud_instance.daily_image(image.image_id)
- except (ValueError, IndexError):
+ return self.cloud_instance.daily_image(image.image_id, **kwargs)
+ except (ValueError, IndexError) as ex:
+ log.debug("Exception while executing `daily_image`: %s", ex)
return image.image_id
def _perform_launch(self, launch_kwargs, **kwargs) -> BaseInstance:
@@ -208,6 +214,11 @@ class Ec2Cloud(IntegrationCloud):
def _get_cloud_instance(self):
return EC2(tag="ec2-integration-test")
+ def _get_initial_image(self, **kwargs) -> str:
+ return super()._get_initial_image(
+ image_type=self._image_type, **kwargs
+ )
+
def _perform_launch(self, launch_kwargs, **kwargs):
"""Use a dual-stack VPC for cloud-init integration testing."""
if "vpc" not in launch_kwargs:
@@ -234,6 +245,11 @@ class GceCloud(IntegrationCloud):
tag="gce-integration-test",
)
+ def _get_initial_image(self, **kwargs) -> str:
+ return super()._get_initial_image(
+ image_type=self._image_type, **kwargs
+ )
+
class AzureCloud(IntegrationCloud):
datasource = "azure"
@@ -242,6 +258,11 @@ class AzureCloud(IntegrationCloud):
def _get_cloud_instance(self):
return Azure(tag="azure-integration-test")
+ def _get_initial_image(self, **kwargs) -> str:
+ return super()._get_initial_image(
+ image_type=self._image_type, **kwargs
+ )
+
def destroy(self):
if self.settings.KEEP_INSTANCE:
log.info(
@@ -366,7 +387,7 @@ class OpenstackCloud(IntegrationCloud):
tag="openstack-integration-test",
)
- def _get_initial_image(self):
+ def _get_initial_image(self, **kwargs):
image = ImageSpecification.from_os_image()
try:
UUID(image.image_id)
diff --git a/tests/integration_tests/cmd/test_schema.py b/tests/integration_tests/cmd/test_schema.py
index 73adc2ac..0d92f146 100644
--- a/tests/integration_tests/cmd/test_schema.py
+++ b/tests/integration_tests/cmd/test_schema.py
@@ -20,9 +20,9 @@ class TestSchemaDeprecations:
log = class_client.read_from_file("/var/log/cloud-init.log")
verify_clean_log(log, ignore_deprecations=True)
assert "WARNING]: Deprecated cloud-config provided:" in log
- assert "apt_reboot_if_required: DEPRECATED." in log
- assert "apt_update: DEPRECATED." in log
- assert "apt_upgrade: DEPRECATED." in log
+ assert "apt_reboot_if_required: DEPRECATED" in log
+ assert "apt_update: DEPRECATED" in log
+ assert "apt_upgrade: DEPRECATED" in log
def test_schema_deprecations(self, class_client: IntegrationInstance):
"""Test schema behavior with deprecated configs."""
@@ -56,9 +56,9 @@ class TestSchemaDeprecations:
apt_reboot_if_required: false\t\t# D3
# Deprecations: -------------
- # D1: DEPRECATED. Dropped after April 2027. Use ``package_update``. Default: ``false``
- # D2: DEPRECATED. Dropped after April 2027. Use ``package_upgrade``. Default: ``false``
- # D3: DEPRECATED. Dropped after April 2027. Use ``package_reboot_if_required``. Default: ``false``
+ # D1: DEPRECATED: Dropped after April 2027. Use ``package_update``. Default: ``false``
+ # D2: DEPRECATED: Dropped after April 2027. Use ``package_upgrade``. Default: ``false``
+ # D3: DEPRECATED: Dropped after April 2027. Use ``package_reboot_if_required``. Default: ``false``
Valid cloud-config: /root/user-data""" # noqa: E501
diff --git a/tests/integration_tests/conftest.py b/tests/integration_tests/conftest.py
index a90a5d49..580fd6ad 100644
--- a/tests/integration_tests/conftest.py
+++ b/tests/integration_tests/conftest.py
@@ -7,7 +7,7 @@ import sys
from contextlib import contextmanager
from pathlib import Path
from tarfile import TarFile
-from typing import Dict, Type
+from typing import Dict, Iterator, Type
import pytest
from pycloudlib.lxd.instance import LXDInstance
@@ -92,8 +92,10 @@ def disable_subp_usage(request):
pass
-@pytest.fixture(scope="session")
-def session_cloud():
+@contextmanager
+def _session_cloud(
+ request: pytest.FixtureRequest,
+) -> Iterator[IntegrationCloud]:
if integration_settings.PLATFORM not in platforms.keys():
raise ValueError(
"{} is an invalid PLATFORM specified in settings. "
@@ -102,7 +104,17 @@ def session_cloud():
)
)
- cloud = platforms[integration_settings.PLATFORM]()
+ integration_cloud_marker = request.node.get_closest_marker(
+ "integration_cloud_args"
+ )
+ cloud_args = []
+ cloud_kwargs = {}
+ if integration_cloud_marker:
+ cloud_args = integration_cloud_marker.args
+ cloud_kwargs = integration_cloud_marker.kwargs
+ cloud = platforms[integration_settings.PLATFORM](
+ *cloud_args, **cloud_kwargs
+ )
cloud.emit_settings_to_log()
yield cloud
@@ -110,6 +122,30 @@ def session_cloud():
cloud.destroy()
+@pytest.fixture
+def session_cloud(
+ request: pytest.FixtureRequest,
+) -> Iterator[IntegrationCloud]:
+ with _session_cloud(request) as cloud:
+ yield cloud
+
+
+@pytest.fixture(scope="module")
+def module_session_cloud(
+ request: pytest.FixtureRequest,
+) -> Iterator[IntegrationCloud]:
+ with _session_cloud(request) as cloud:
+ yield cloud
+
+
+@pytest.fixture(scope="class")
+def class_session_cloud(
+ request: pytest.FixtureRequest,
+) -> Iterator[IntegrationCloud]:
+ with _session_cloud(request) as cloud:
+ yield cloud
+
+
def get_validated_source(
session_cloud: IntegrationCloud,
source=integration_settings.CLOUD_INIT_SOURCE,
@@ -135,13 +171,11 @@ def get_validated_source(
)
-@pytest.fixture(scope="session")
-def setup_image(session_cloud: IntegrationCloud, request):
+def _setup_image(session_cloud: IntegrationCloud, request):
"""Setup the target environment with the correct version of cloud-init.
So we can launch instances / run tests with the correct image
"""
-
source = get_validated_source(session_cloud)
if not source.installs_new_version():
return
@@ -159,6 +193,21 @@ def setup_image(session_cloud: IntegrationCloud, request):
request.addfinalizer(session_cloud.delete_snapshot)
+@pytest.fixture
+def setup_image(session_cloud: IntegrationCloud, request):
+ _setup_image(session_cloud, request)
+
+
+@pytest.fixture(scope="module")
+def module_setup_image(module_session_cloud: IntegrationCloud, request):
+ _setup_image(module_session_cloud, request)
+
+
+@pytest.fixture(scope="class")
+def class_setup_image(class_session_cloud: IntegrationCloud, request):
+ _setup_image(class_session_cloud, request)
+
+
def _collect_logs(
instance: IntegrationInstance, node_id: str, test_failed: bool
):
@@ -218,7 +267,9 @@ def _collect_logs(
@contextmanager
-def _client(request, fixture_utils, session_cloud: IntegrationCloud):
+def _client(
+ request, fixture_utils, session_cloud: IntegrationCloud
+) -> Iterator[IntegrationInstance]:
"""Fixture implementation for the client fixtures.
Launch the dynamic IntegrationClient instance using any provided
@@ -268,23 +319,29 @@ def _client(request, fixture_utils, session_cloud: IntegrationCloud):
@pytest.fixture
-def client(request, fixture_utils, session_cloud, setup_image):
+def client(
+ request, fixture_utils, session_cloud, setup_image
+) -> Iterator[IntegrationInstance]:
"""Provide a client that runs for every test."""
with _client(request, fixture_utils, session_cloud) as client:
yield client
@pytest.fixture(scope="module")
-def module_client(request, fixture_utils, session_cloud, setup_image):
+def module_client(
+ request, fixture_utils, module_session_cloud, module_setup_image
+) -> Iterator[IntegrationInstance]:
"""Provide a client that runs once per module."""
- with _client(request, fixture_utils, session_cloud) as client:
+ with _client(request, fixture_utils, module_session_cloud) as client:
yield client
@pytest.fixture(scope="class")
-def class_client(request, fixture_utils, session_cloud, setup_image):
+def class_client(
+ request, fixture_utils, class_session_cloud, class_setup_image
+) -> Iterator[IntegrationInstance]:
"""Provide a client that runs once per class."""
- with _client(request, fixture_utils, session_cloud) as client:
+ with _client(request, fixture_utils, class_session_cloud) as client:
yield client
diff --git a/tests/integration_tests/modules/test_ansible.py b/tests/integration_tests/modules/test_ansible.py
new file mode 100644
index 00000000..0328781e
--- /dev/null
+++ b/tests/integration_tests/modules/test_ansible.py
@@ -0,0 +1,115 @@
+import pytest
+
+from tests.integration_tests.util import verify_clean_log
+
+# This works by setting up a local repository and web server
+# daemon on the first boot. Second boot should succeed
+# with the running web service and git repo configured.
+# This instrumentation allows the test to run self-contained
+# without network access or external git repos.
+
+REPO_D = "/root/playbooks"
+USER_DATA = """\
+#cloud-config
+version: v1
+packages_update: true
+packages_upgrade: true
+packages:
+ - git
+ - python3-pip
+write_files:
+ - path: /etc/systemd/system/repo_server.service
+ content: |
+ [Unit]
+ Description=Serve a local git repo
+
+ [Service]
+ ExecStart=/usr/bin/env python3 -m http.server --directory \
+/root/playbooks/.git
+ Restart=on-failure
+
+ [Install]
+ WantedBy=cloud-final.service
+
+ - path: /root/playbooks/ubuntu.yml
+ content: |
+ ---
+ - hosts: 127.0.0.1
+ connection: local
+ become: true
+ vars:
+ packages:
+ - git
+ - python3-pip
+ roles:
+ - apt
+ - path: /root/playbooks/roles/apt/tasks/main.yml
+ content: |
+ ---
+ - name: "install packages"
+ apt:
+ name: "*"
+ update_cache: yes
+ cache_valid_time: 3600
+ - name: "install packages"
+ apt:
+ name:
+ - "{{ item }}"
+ state: latest
+ loop: "{{ packages }}"
+
+"""
+INSTALL_METHOD = """
+ansible:
+ install-method: {method}
+ package-name: {package}
+ pull:
+ url: "http://0.0.0.0:8000/"
+ playbook-name: ubuntu.yml
+ full: true
+runcmd:
+ - "systemctl enable repo_server.service"
+"""
+
+SETUP_REPO = f"cd {REPO_D} &&\
+git init {REPO_D} &&\
+git add {REPO_D}/roles/apt/tasks/main.yml {REPO_D}/ubuntu.yml &&\
+git commit -m auto &&\
+git update-server-info"
+
+
+def _test_ansible_pull_from_local_server(my_client):
+
+ assert my_client.execute(SETUP_REPO).ok
+ my_client.execute("cloud-init clean --logs")
+ my_client.restart()
+ log = my_client.read_from_file("/var/log/cloud-init.log")
+
+ # These ensure the repo used for ansible-pull works as expected
+ assert my_client.execute("wget http://0.0.0.0:8000").ok
+ assert my_client.execute("git clone http://0.0.0.0:8000/").ok
+ assert "(dead)" not in my_client.execute(
+ "systemctl status repo_server.service"
+ )
+
+ # Following assertions verify ansible behavior itself
+ verify_clean_log(log)
+ output_log = my_client.read_from_file("/var/log/cloud-init-output.log")
+ assert "ok=3" in output_log
+ assert "SUCCESS: config-ansible ran successfully" in log
+
+
+@pytest.mark.user_data(
+ USER_DATA + INSTALL_METHOD.format(package="ansible-core", method="pip")
+)
+class TestAnsiblePullPip:
+ def test_ansible_pull_pip(self, class_client):
+ _test_ansible_pull_from_local_server(class_client)
+
+
+@pytest.mark.user_data(
+ USER_DATA + INSTALL_METHOD.format(package="ansible", method="distro")
+)
+class TestAnsiblePullDistro:
+ def test_ansible_pull_distro(self, class_client):
+ _test_ansible_pull_from_local_server(class_client)
diff --git a/tests/integration_tests/modules/test_ca_certs.py b/tests/integration_tests/modules/test_ca_certs.py
index 37e5910b..8d18fb76 100644
--- a/tests/integration_tests/modules/test_ca_certs.py
+++ b/tests/integration_tests/modules/test_ca_certs.py
@@ -92,11 +92,16 @@ class TestCaCerts:
in checksum
)
- def test_clean_logs(self, class_client: IntegrationInstance):
+ def test_clean_log(self, class_client: IntegrationInstance):
+ """Verify no errors, no deprecations and correct inactive modules in
+ log.
+ """
log = class_client.read_from_file("/var/log/cloud-init.log")
verify_clean_log(log, ignore_deprecations=False)
- diff = {
+
+ expected_inactive = {
"apt-pipelining",
+ "ansible",
"bootcmd",
"chef",
"disable-ec2-metadata",
@@ -120,9 +125,24 @@ class TestCaCerts:
"ubuntu-advantage",
"ubuntu-drivers",
"update_etc_hosts",
+ "wireguard",
"write-files",
"write-files-deferred",
- }.symmetric_difference(get_inactive_modules(log))
+ }
+
+ # Remove modules that run independent from user-data
+ if class_client.settings.PLATFORM == "azure":
+ expected_inactive.discard("disk_setup")
+ elif class_client.settings.PLATFORM == "gce":
+ expected_inactive.discard("ntp")
+ elif class_client.settings.PLATFORM == "lxd_vm":
+ if class_client.settings.OS_IMAGE == "bionic":
+ expected_inactive.discard("write-files")
+ expected_inactive.discard("write-files-deferred")
+
+ diff = expected_inactive.symmetric_difference(
+ get_inactive_modules(log)
+ )
assert (
not diff
), f"Expected inactive modules do not match, diff: {diff}"
diff --git a/tests/integration_tests/modules/test_set_password.py b/tests/integration_tests/modules/test_set_password.py
index 1ac131d9..4e0ee122 100644
--- a/tests/integration_tests/modules/test_set_password.py
+++ b/tests/integration_tests/modules/test_set_password.py
@@ -11,6 +11,7 @@ only specify one user-data per instance.
import pytest
import yaml
+from tests.integration_tests.clouds import ImageSpecification
from tests.integration_tests.decorators import retry
from tests.integration_tests.util import get_console_log
@@ -179,12 +180,22 @@ class Mixin:
if "name" in user_dict:
assert f'{user_dict["name"]}:' in shadow
- def test_sshd_config(self, class_client):
- """Test that SSH password auth is enabled."""
- sshd_config = class_client.read_from_file("/etc/ssh/sshd_config")
+ def test_sshd_config_file(self, class_client):
+ """Test that SSH config is written in the correct file."""
+ if ImageSpecification.from_os_image().release in {"bionic"}:
+ sshd_file_target = "/etc/ssh/sshd_config"
+ else:
+ sshd_file_target = "/etc/ssh/sshd_config.d/50-cloud-init.conf"
+ assert class_client.execute(f"ls {sshd_file_target}").ok
+ sshd_config = class_client.read_from_file(sshd_file_target)
# We look for the exact line match, to avoid a commented line matching
assert "PasswordAuthentication yes" in sshd_config.splitlines()
+ def test_sshd_config(self, class_client):
+ """Test that SSH password auth is enabled."""
+ sshd_config = class_client.execute("sshd -T").stdout
+ assert "passwordauthentication yes" in sshd_config
+
@pytest.mark.user_data(LIST_USER_DATA)
class TestPasswordList(Mixin):
diff --git a/tests/integration_tests/modules/test_ssh_keys_provided.py b/tests/integration_tests/modules/test_ssh_keys_provided.py
index b79f18eb..8e73267a 100644
--- a/tests/integration_tests/modules/test_ssh_keys_provided.py
+++ b/tests/integration_tests/modules/test_ssh_keys_provided.py
@@ -9,6 +9,8 @@ system.
import pytest
+from tests.integration_tests.clouds import ImageSpecification
+
USER_DATA = """\
#cloud-config
disable_root: false
@@ -110,10 +112,6 @@ class TestSshKeysProvided:
"BP4Phn3L8I7Vqh7lmHKcOfIokEvSEbHDw83Y3JloAAAAD",
),
(
- "/etc/ssh/sshd_config",
- "HostCertificate /etc/ssh/ssh_host_rsa_key-cert.pub",
- ),
- (
"/etc/ssh/ssh_host_ecdsa_key.pub",
"AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAAB"
"BBFsS5Tvky/IC/dXhE/afxxU",
@@ -138,3 +136,14 @@ class TestSshKeysProvided:
def test_ssh_provided_keys(self, config_path, expected_out, class_client):
out = class_client.read_from_file(config_path).strip()
assert expected_out in out
+
+ @pytest.mark.parametrize(
+ "expected_out", ("HostCertificate /etc/ssh/ssh_host_rsa_key-cert.pub")
+ )
+ def test_sshd_config(self, expected_out, class_client):
+ if ImageSpecification.from_os_image().release in {"bionic"}:
+ sshd_config_path = "/etc/ssh/sshd_config"
+ else:
+ sshd_config_path = "/etc/ssh/sshd_config.d/50-cloud-init.conf"
+ sshd_config = class_client.read_from_file(sshd_config_path).strip()
+ assert expected_out in sshd_config
diff --git a/tests/integration_tests/modules/test_users_groups.py b/tests/integration_tests/modules/test_users_groups.py
index 8fa37bb4..91eca345 100644
--- a/tests/integration_tests/modules/test_users_groups.py
+++ b/tests/integration_tests/modules/test_users_groups.py
@@ -10,6 +10,7 @@ import pytest
from tests.integration_tests.clouds import ImageSpecification
from tests.integration_tests.instances import IntegrationInstance
+from tests.integration_tests.util import verify_clean_log
USER_DATA = """\
#cloud-config
@@ -25,7 +26,7 @@ users:
gecos: Foo B. Bar
primary_group: foobar
groups: users
- expiredate: 2038-01-19
+ expiredate: '2038-01-19'
lock_passwd: false
passwd: $6$j212wezy$7H/1LT4f9/N3wpgNunhsIqtMj62OKiS3nyNwuizouQc3u7MbYCarYe\
AHWYPYb2FT.lbioDm2RrkJPb9BZMN1O/
@@ -36,12 +37,13 @@ AHWYPYb2FT.lbioDm2RrkJPb9BZMN1O/
lock_passwd: true
- name: cloudy
gecos: Magic Cloud App Daemon User
- inactive: true
+ inactive: '0'
system: true
- name: eric
+ sudo: null
uid: 1742
- name: archivist
- uid: '1743'
+ uid: 1743
"""
@@ -97,6 +99,8 @@ class TestUsersGroups:
def test_user_root_in_secret(self, class_client):
"""Test root user is in 'secret' group."""
+ log = class_client.read_from_file("/var/log/cloud-init.log")
+ verify_clean_log(log)
output = class_client.execute("groups root").stdout
_, groups_str = output.split(":", maxsplit=1)
groups = groups_str.split()
diff --git a/tests/integration_tests/modules/test_wireguard.py b/tests/integration_tests/modules/test_wireguard.py
new file mode 100644
index 00000000..2e97c1fb
--- /dev/null
+++ b/tests/integration_tests/modules/test_wireguard.py
@@ -0,0 +1,117 @@
+"""Integration test for the wireguard module."""
+import pytest
+from pycloudlib.lxd.instance import LXDInstance
+
+from cloudinit.subp import subp
+from tests.integration_tests.instances import IntegrationInstance
+
+ASCII_TEXT = "ASCII text"
+
+USER_DATA = """\
+#cloud-config
+wireguard:
+ interfaces:
+ - name: wg0
+ config_path: /etc/wireguard/wg0.conf
+ content: |
+ [Interface]
+ Address = 192.168.254.1/32
+ ListenPort = 51820
+ PrivateKey = iNlmgtGo6yiFhD9TuVnx/qJSp+C5Cwg4wwPmOJwlZXI=
+
+ [Peer]
+ PublicKey = 6PewunPjxlUq/0xvbVxklN2p73YIytfjxpoIEohCukY=
+ AllowedIPs = 192.168.254.2/32
+ - name: wg1
+ config_path: /etc/wireguard/wg1.conf
+ content: |
+ [Interface]
+ PrivateKey = GGLU4+5vIcK9lGyfz4AJn9fR5/FN/6sf4Fd5chZ16Vc=
+ Address = 192.168.254.2/24
+
+ [Peer]
+ PublicKey = 2as8z3EDjSsfFEkvOQGVnJ1Hv+h1jRAh2BKJg+DHvGk=
+ Endpoint = 127.0.0.1:51820
+ AllowedIPs = 0.0.0.0/0
+ readinessprobe:
+ - ping -qc 5 192.168.254.1 2>&1 > /dev/null
+ - echo $? > /tmp/ping
+"""
+
+
+def load_wireguard_kernel_module_lxd(instance: LXDInstance):
+ subp(
+ "lxc config set {} linux.kernel_modules wireguard".format(
+ instance.name
+ ).split()
+ )
+
+
+@pytest.mark.ci
+@pytest.mark.user_data(USER_DATA)
+@pytest.mark.lxd_vm
+@pytest.mark.gce
+@pytest.mark.ec2
+@pytest.mark.azure
+@pytest.mark.openstack
+@pytest.mark.oci
+@pytest.mark.ubuntu
+class TestWireguard:
+ @pytest.mark.parametrize(
+ "cmd,expected_out",
+ (
+ # check if wireguard module is loaded
+ ("lsmod | grep '^wireguard' | awk '{print $1}'", "wireguard"),
+ # test if file was written for wg0
+ (
+ "stat -c '%N' /etc/wireguard/wg0.conf",
+ r"'/etc/wireguard/wg0.conf'",
+ ),
+ # check permissions for wg0
+ ("stat -c '%U %a' /etc/wireguard/wg0.conf", r"root 600"),
+ # ASCII check wg1
+ ("file /etc/wireguard/wg1.conf", ASCII_TEXT),
+ # md5sum check wg1
+ (
+ "md5sum </etc/wireguard/wg1.conf",
+ "cff31c9879da0967313d3f561aed766b",
+ ),
+ # sha256sum check
+ (
+ "sha256sum </etc/wireguard/wg1.conf",
+ "8443055d1442d051588beb03f7895b58"
+ "269196eb9916617969dc5220c1a90d54",
+ ),
+ # check if systemd started wg0
+ ("systemctl is-active wg-quick@wg0", "active"),
+ # check if systemd started wg1
+ ("systemctl is-active wg-quick@wg1", "active"),
+ # check readiness probe (ping wg0)
+ ("cat /tmp/ping", "0"),
+ ),
+ )
+ def test_wireguard(
+ self, cmd, expected_out, class_client: IntegrationInstance
+ ):
+ result = class_client.execute(cmd)
+ assert result.ok
+ assert expected_out in result.stdout
+
+ def test_wireguard_tools_installed(
+ self, class_client: IntegrationInstance
+ ):
+ """Test that 'wg version' succeeds, indicating installation."""
+ assert class_client.execute("wg version").ok
+
+
+@pytest.mark.ci
+@pytest.mark.user_data(USER_DATA)
+@pytest.mark.lxd_setup.with_args(load_wireguard_kernel_module_lxd)
+@pytest.mark.lxd_container
+@pytest.mark.ubuntu
+class TestWireguardWithoutKmod:
+ def test_wireguard_tools_installed(
+ self, class_client: IntegrationInstance
+ ):
+ """Test that 'wg version' succeeds, indicating installation."""
+ assert class_client.execute("wg version").ok
diff --git a/tests/unittests/config/test_apt_configure_sources_list_v1.py b/tests/unittests/config/test_apt_configure_sources_list_v1.py
index d4ade106..52964e10 100644
--- a/tests/unittests/config/test_apt_configure_sources_list_v1.py
+++ b/tests/unittests/config/test_apt_configure_sources_list_v1.py
@@ -49,7 +49,7 @@ EXPECTED_CONVERTED_CONTENT = """## Note, this file is written by cloud-init on f
deb http://archive.ubuntu.com/ubuntu/ fakerelease main restricted
deb-src http://archive.ubuntu.com/ubuntu/ fakerelease main restricted
# FIND_SOMETHING_SPECIAL
-"""
+""" # noqa: E501
class TestAptSourceConfigSourceList(t_help.FilesystemMockingTestCase):
diff --git a/tests/unittests/config/test_cc_ansible.py b/tests/unittests/config/test_cc_ansible.py
new file mode 100644
index 00000000..6f71add3
--- /dev/null
+++ b/tests/unittests/config/test_cc_ansible.py
@@ -0,0 +1,362 @@
+import re
+from copy import deepcopy
+from textwrap import dedent
+from unittest import mock
+from unittest.mock import call
+
+from pytest import mark, param, raises
+
+from cloudinit import util
+from cloudinit.config import cc_ansible
+from cloudinit.config.schema import (
+ SchemaValidationError,
+ get_schema,
+ validate_cloudconfig_schema,
+)
+from tests.unittests.helpers import skipUnlessJsonSchema
+from tests.unittests.util import get_cloud
+
+distro_version = dedent(
+ """ansible 2.10.8
+ config file = None
+ configured module search path = ['/home/holmanb/.ansible/plugins/modules', \
+ '/usr/share/ansible/plugins/modules']
+ ansible python module location = /usr/lib/python3/dist-packages/ansible
+ executable location = /usr/bin/ansible
+ python version = 3.10.4 (main, Jun 29 2022, 12:14:53) [GCC 11.2.0]"""
+)
+pip_version = dedent(
+ """ansible-pull [core 2.13.2]
+ config file = None
+ configured module search path = ['/root/.ansible/plugins/modules', \
+ '/usr/share/ansible/plugins/modules']
+ ansible python module location = /root/.local/lib/python3.8/site-\
+ packages/ansible
+ ansible collection location = /root/.ansible/collections:\
+ /usr/share/ansible/collections
+ executable location = /root/.local/lib/python3.8/site-packages/\
+ ansible/__main__.py
+ python version = 3.8.10 (default, Jun 22 2022, 20:18:18) [GCC 9.4.0]
+ jinja version = 3.1.2
+ libyaml = True """
+)
+
+CFG_FULL = {
+ "ansible": {
+ "install-method": "distro",
+ "package-name": "ansible-core",
+ "pull": {
+ "url": "https://github/holmanb/vmboot",
+ "playbook-name": "arch.yml",
+ "accept-host-key": True,
+ "clean": True,
+ "full": True,
+ "diff": False,
+ "ssh-common-args": "-y",
+ "scp-extra-args": "-l",
+ "sftp-extra-args": "-f",
+ "checkout": "tree",
+ "module-path": "~/.ansible/plugins/modules:"
+ "/usr/share/ansible/plugins/modules",
+ "timeout": "10",
+ "vault-id": "me",
+ "connection": "smart",
+ "vault-password-file": "/path/to/file",
+ "module-name": "git",
+ "sleep": "1",
+ "tags": "cumulus",
+ "skip-tags": "cisco",
+ "private-key": "{nope}",
+ },
+ }
+}
+CFG_MINIMAL = {
+ "ansible": {
+ "install-method": "pip",
+ "package-name": "ansible",
+ "pull": {
+ "url": "https://github/holmanb/vmboot",
+ "playbook-name": "ubuntu.yml",
+ },
+ }
+}
+
+
+class TestSetPasswordsSchema:
+ @mark.parametrize(
+ ("config", "error_msg"),
+ (
+ param(
+ CFG_MINIMAL,
+ None,
+ id="essentials",
+ ),
+ param(
+ {
+ "ansible": {
+ "install-method": "distro",
+ "pull": {
+ "url": "https://github/holmanb/vmboot",
+ "playbook-name": "centos.yml",
+ "dance": "bossa nova",
+ },
+ }
+ },
+ "Additional properties are not allowed ",
+ id="additional-properties",
+ ),
+ param(
+ CFG_FULL,
+ None,
+ id="all-keys",
+ ),
+ param(
+ {
+ "ansible": {
+ "install-method": "true",
+ "pull": {
+ "url": "https://github/holmanb/vmboot",
+ "playbook-name": "debian.yml",
+ },
+ }
+ },
+ "'true' is not one of ['distro', 'pip']",
+ id="install-type",
+ ),
+ param(
+ {
+ "ansible": {
+ "install-method": "pip",
+ "pull": {
+ "playbook-name": "fedora.yml",
+ },
+ }
+ },
+ "'url' is a required property",
+ id="require-url",
+ ),
+ param(
+ {
+ "ansible": {
+ "install-method": "pip",
+ "pull": {
+ "url": "gophers://encrypted-gophers/",
+ },
+ }
+ },
+ "'playbook-name' is a required property",
+ id="require-url",
+ ),
+ ),
+ )
+ @skipUnlessJsonSchema()
+ def test_schema_validation(self, config, error_msg):
+ if error_msg is None:
+ validate_cloudconfig_schema(config, get_schema(), strict=True)
+ else:
+ with raises(SchemaValidationError, match=re.escape(error_msg)):
+ validate_cloudconfig_schema(config, get_schema(), strict=True)
+
+
+class TestAnsible:
+ def test_filter_args(self):
+ """only diff should be removed"""
+ out = cc_ansible.filter_args(
+ CFG_FULL.get("ansible", {}).get("pull", {})
+ )
+ assert out == {
+ "url": "https://github/holmanb/vmboot",
+ "playbook-name": "arch.yml",
+ "accept-host-key": True,
+ "clean": True,
+ "full": True,
+ "ssh-common-args": "-y",
+ "scp-extra-args": "-l",
+ "sftp-extra-args": "-f",
+ "checkout": "tree",
+ "module-path": "~/.ansible/plugins/modules:"
+ "/usr/share/ansible/plugins/modules",
+ "timeout": "10",
+ "vault-id": "me",
+ "connection": "smart",
+ "vault-password-file": "/path/to/file",
+ "module-name": "git",
+ "sleep": "1",
+ "tags": "cumulus",
+ "skip-tags": "cisco",
+ "private-key": "{nope}",
+ }
+
+ @mark.parametrize(
+ ("cfg", "exception"),
+ (
+ (CFG_FULL, None),
+ (CFG_MINIMAL, None),
+ (
+ {
+ "ansible": {
+ "package-name": "ansible-core",
+ "install-method": "distro",
+ "pull": {
+ "playbook-name": "ubuntu.yml",
+ },
+ }
+ },
+ ValueError,
+ ),
+ (
+ {
+ "ansible": {
+ "install-method": "pip",
+ "pull": {
+ "url": "https://github/holmanb/vmboot",
+ },
+ }
+ },
+ ValueError,
+ ),
+ ),
+ )
+ def test_required_keys(self, cfg, exception, mocker):
+ m_subp = mocker.patch(
+ "cloudinit.config.cc_ansible.subp", return_value=("", "")
+ )
+ mocker.patch("cloudinit.config.cc_ansible.which", return_value=True)
+ mocker.patch(
+ "cloudinit.config.cc_ansible.AnsiblePull.get_version",
+ return_value=cc_ansible.Version(2, 7, 1),
+ )
+ mocker.patch("cloudinit.config.cc_ansible.AnsiblePull.check_deps")
+ mocker.patch(
+ "cloudinit.config.cc_ansible.AnsiblePullDistro.is_installed",
+ return_value=False,
+ )
+ if exception:
+ with raises(exception):
+ cc_ansible.handle("", cfg, get_cloud(), None, None)
+ else:
+ cloud = get_cloud(mocked_distro=True)
+ print(cfg)
+ install = cfg["ansible"]["install-method"]
+ cc_ansible.handle("", cfg, cloud, None, None)
+ if install == "distro":
+ cloud.distro.install_packages.assert_called_once()
+ cloud.distro.install_packages.assert_called_with(
+ "ansible-core"
+ )
+ elif install == "pip":
+ m_subp.assert_has_calls(
+ [
+ call(["python3", "-m", "pip", "list"]),
+ call(
+ [
+ "python3",
+ "-m",
+ "pip",
+ "install",
+ "--user",
+ "ansible",
+ ]
+ ),
+ ]
+ )
+ assert m_subp.call_args[0][0] == [
+ "ansible-pull",
+ "--url=https://github/holmanb/vmboot",
+ "ubuntu.yml",
+ ]
+
+ @mock.patch("cloudinit.config.cc_ansible.which", return_value=False)
+ def test_deps_not_installed(self, m_which):
+ with raises(ValueError):
+ cc_ansible.AnsiblePullDistro(get_cloud().distro).check_deps()
+
+ @mock.patch("cloudinit.config.cc_ansible.which", return_value=True)
+ def test_deps(self, m_which):
+ cc_ansible.AnsiblePullDistro(get_cloud().distro).check_deps()
+
+ @mock.patch("cloudinit.config.cc_ansible.which", return_value=True)
+ @mock.patch(
+ "cloudinit.config.cc_ansible.subp", return_value=("stdout", "stderr")
+ )
+ @mark.parametrize(
+ ("cfg", "expected"),
+ (
+ (
+ CFG_FULL,
+ [
+ "ansible-pull",
+ "--url=https://github/holmanb/vmboot",
+ "--accept-host-key",
+ "--clean",
+ "--full",
+ "--ssh-common-args=-y",
+ "--scp-extra-args=-l",
+ "--sftp-extra-args=-f",
+ "--checkout=tree",
+ "--module-path=~/.ansible/plugins/modules"
+ ":/usr/share/ansible/plugins/modules",
+ "--timeout=10",
+ "--vault-id=me",
+ "--connection=smart",
+ "--vault-password-file=/path/to/file",
+ "--module-name=git",
+ "--sleep=1",
+ "--tags=cumulus",
+ "--skip-tags=cisco",
+ "--private-key={nope}",
+ "arch.yml",
+ ],
+ ),
+ (
+ CFG_MINIMAL,
+ [
+ "ansible-pull",
+ "--url=https://github/holmanb/vmboot",
+ "ubuntu.yml",
+ ],
+ ),
+ ),
+ )
+ def test_ansible_pull(self, m_subp, m_which, cfg, expected):
+ pull_type = cfg["ansible"]["install-method"]
+ ansible_pull = (
+ cc_ansible.AnsiblePullPip()
+ if pull_type == "pip"
+ else cc_ansible.AnsiblePullDistro(get_cloud().distro)
+ )
+ cc_ansible.run_ansible_pull(
+ ansible_pull, deepcopy(cfg["ansible"]["pull"])
+ )
+ assert m_subp.call_args[0][0] == expected
+
+ @mock.patch("cloudinit.config.cc_ansible.validate_config")
+ def test_do_not_run(self, m_validate):
+ cc_ansible.handle("", {}, None, None, None) # pyright: ignore
+ assert not m_validate.called
+
+ @mock.patch(
+ "cloudinit.config.cc_ansible.subp",
+ side_effect=[
+ (distro_version, ""),
+ (pip_version, ""),
+ (" ansible 2.1.0", ""),
+ (" ansible 2.1.0", ""),
+ ],
+ )
+ def test_parse_version(self, m_subp):
+ assert cc_ansible.AnsiblePullDistro(
+ get_cloud().distro
+ ).get_version() == cc_ansible.Version(2, 10, 8)
+ assert cc_ansible.AnsiblePullPip().get_version() == cc_ansible.Version(
+ 2, 13, 2
+ )
+
+ assert (
+ util.Version(2, 1, 0, -1)
+ == cc_ansible.AnsiblePullPip().get_version()
+ )
+ assert (
+ util.Version(2, 1, 0, -1)
+ == cc_ansible.AnsiblePullDistro(get_cloud().distro).get_version()
+ )
diff --git a/tests/unittests/config/test_cc_debug.py b/tests/unittests/config/test_cc_debug.py
deleted file mode 100644
index fc8d43dc..00000000
--- a/tests/unittests/config/test_cc_debug.py
+++ /dev/null
@@ -1,112 +0,0 @@
-# Copyright (C) 2014 Yahoo! Inc.
-#
-# This file is part of cloud-init. See LICENSE file for license information.
-import logging
-import re
-import shutil
-import tempfile
-
-import pytest
-
-from cloudinit import util
-from cloudinit.config import cc_debug
-from cloudinit.config.schema import (
- SchemaValidationError,
- get_schema,
- validate_cloudconfig_schema,
-)
-from tests.unittests.helpers import (
- FilesystemMockingTestCase,
- mock,
- skipUnlessJsonSchema,
-)
-from tests.unittests.util import get_cloud
-
-LOG = logging.getLogger(__name__)
-
-
-@mock.patch("cloudinit.distros.debian.read_system_locale")
-class TestDebug(FilesystemMockingTestCase):
- def setUp(self):
- super(TestDebug, self).setUp()
- self.new_root = tempfile.mkdtemp()
- self.addCleanup(shutil.rmtree, self.new_root)
- self.patchUtils(self.new_root)
-
- def test_debug_write(self, m_locale):
- m_locale.return_value = "en_US.UTF-8"
- cfg = {
- "abc": "123",
- "c": "\u20a0",
- "debug": {
- "verbose": True,
- # Does not actually write here due to mocking...
- "output": "/var/log/cloud-init-debug.log",
- },
- }
- cc = get_cloud()
- cc_debug.handle("cc_debug", cfg, cc, LOG, [])
- contents = util.load_file("/var/log/cloud-init-debug.log")
- # Some basic sanity tests...
- self.assertNotEqual(0, len(contents))
- for k in cfg.keys():
- self.assertIn(k, contents)
-
- def test_debug_no_write(self, m_locale):
- m_locale.return_value = "en_US.UTF-8"
- cfg = {
- "abc": "123",
- "debug": {
- "verbose": False,
- # Does not actually write here due to mocking...
- "output": "/var/log/cloud-init-debug.log",
- },
- }
- cc = get_cloud()
- cc_debug.handle("cc_debug", cfg, cc, LOG, [])
- self.assertRaises(
- IOError, util.load_file, "/var/log/cloud-init-debug.log"
- )
-
-
-@skipUnlessJsonSchema()
-class TestDebugSchema:
- """Directly test schema rather than through handle."""
-
- @pytest.mark.parametrize(
- "config, error_msg",
- (
- # Valid schemas tested by meta.examples in test_schema
- # Invalid schemas
- ({"debug": 1}, "debug: 1 is not of type 'object'"),
- (
- {"debug": {}},
- re.escape("debug: {} does not have enough properties"),
- ),
- (
- {"debug": {"boguskey": True}},
- re.escape(
- "Additional properties are not allowed ('boguskey' was"
- " unexpected)"
- ),
- ),
- (
- {"debug": {"verbose": 1}},
- "debug.verbose: 1 is not of type 'boolean'",
- ),
- (
- {"debug": {"output": 1}},
- "debug.output: 1 is not of type 'string'",
- ),
- ),
- )
- @skipUnlessJsonSchema()
- def test_schema_validation(self, config, error_msg):
- """Assert expected schema validation and error messages."""
- # New-style schema $defs exist in config/cloud-init-schema*.json
- schema = get_schema()
- with pytest.raises(SchemaValidationError, match=error_msg):
- validate_cloudconfig_schema(config, schema, strict=True)
-
-
-# vi: ts=4 expandtab
diff --git a/tests/unittests/config/test_cc_disk_setup.py b/tests/unittests/config/test_cc_disk_setup.py
index f2796e83..c61a26f3 100644
--- a/tests/unittests/config/test_cc_disk_setup.py
+++ b/tests/unittests/config/test_cc_disk_setup.py
@@ -1,7 +1,6 @@
# This file is part of cloud-init. See LICENSE file for license information.
import random
-import re
import pytest
@@ -312,13 +311,6 @@ class TestDebugSchema:
{"device_aliases": 1},
"device_aliases: 1 is not of type 'object'",
),
- (
- {"debug": {"boguskey": True}},
- re.escape(
- "Additional properties are not allowed ('boguskey' was"
- " unexpected)"
- ),
- ),
),
)
@skipUnlessJsonSchema()
diff --git a/tests/unittests/config/test_cc_growpart.py b/tests/unittests/config/test_cc_growpart.py
index 24e92c88..f4d4e579 100644
--- a/tests/unittests/config/test_cc_growpart.py
+++ b/tests/unittests/config/test_cc_growpart.py
@@ -20,7 +20,11 @@ from cloudinit.config.schema import (
get_schema,
validate_cloudconfig_schema,
)
-from tests.unittests.helpers import TestCase, skipUnlessJsonSchema
+from tests.unittests.helpers import (
+ TestCase,
+ does_not_raise,
+ skipUnlessJsonSchema,
+)
# growpart:
# mode: auto # off, on, auto, 'growpart'
@@ -591,37 +595,65 @@ class Bunch(object):
class TestGrowpartSchema:
@pytest.mark.parametrize(
- "config, error_msg",
+ "config, expectation",
(
- ({"growpart": {"mode": "off"}}, None),
- ({"growpart": {"mode": False}}, None),
+ ({"growpart": {"mode": "off"}}, does_not_raise()),
+ (
+ {"growpart": {"mode": False}},
+ pytest.raises(
+ SchemaValidationError,
+ match=(
+ "deprecations: growpart.mode: DEPRECATED. Specifying"
+ " a boolean ``false`` value for this key is"
+ " deprecated. Use ``off`` instead."
+ ),
+ ),
+ ),
(
{"growpart": {"mode": "false"}},
- "'false' is not one of "
- r"\[False, 'auto', 'growpart', 'gpart', 'off'\]",
+ pytest.raises(
+ SchemaValidationError,
+ match=(
+ "growpart.mode: 'false' is not valid under any of the"
+ " given schemas"
+ ),
+ ),
),
(
{"growpart": {"mode": "a"}},
- "'a' is not one of "
- r"\[False, 'auto', 'growpart', 'gpart', 'off'\]",
+ pytest.raises(
+ SchemaValidationError,
+ match=(
+ "growpart.mode: 'a' is not valid under any of the"
+ " given schemas"
+ ),
+ ),
+ ),
+ (
+ {"growpart": {"devices": "/"}},
+ pytest.raises(
+ SchemaValidationError, match="'/' is not of type 'array'"
+ ),
),
- ({"growpart": {"devices": "/"}}, "'/' is not of type 'array'"),
(
{"growpart": {"ignore_growroot_disabled": "off"}},
- "'off' is not of type 'boolean'",
+ pytest.raises(
+ SchemaValidationError,
+ match="'off' is not of type 'boolean'",
+ ),
),
(
{"growpart": {"a": "b"}},
- "Additional properties are not allowed",
+ pytest.raises(
+ SchemaValidationError,
+ match="Additional properties are not allowed",
+ ),
),
),
)
@skipUnlessJsonSchema()
- def test_schema_validation(self, config, error_msg):
+ def test_schema_validation(self, config, expectation):
"""Assert expected schema validation and error messages."""
schema = get_schema()
- if error_msg is None:
+ with expectation:
validate_cloudconfig_schema(config, schema, strict=True)
- else:
- with pytest.raises(SchemaValidationError, match=error_msg):
- validate_cloudconfig_schema(config, schema, strict=True)
diff --git a/tests/unittests/config/test_cc_power_state_change.py b/tests/unittests/config/test_cc_power_state_change.py
index 5b970002..82824306 100644
--- a/tests/unittests/config/test_cc_power_state_change.py
+++ b/tests/unittests/config/test_cc_power_state_change.py
@@ -176,16 +176,18 @@ class TestPowerStateChangeSchema:
(
{"power_state": {"mode": "halt", "delay": "5"}},
(
- "power_state.delay: DEPRECATED."
- " Dropped after April 2027. Use ``now`` or integer type."
+ "power_state.delay: DEPRECATED:"
+ " Use of string for this value will be dropped after"
+ " April 2027. Use ``now`` or integer type."
),
),
({"power_state": {"mode": "halt", "delay": "now"}}, None),
(
{"power_state": {"mode": "halt", "delay": "+5"}},
(
- "power_state.delay: DEPRECATED."
- " Dropped after April 2027. Use ``now`` or integer type."
+ "power_state.delay: DEPRECATED:"
+ " Use of string for this value will be dropped after"
+ " April 2027. Use ``now`` or integer type."
),
),
({"power_state": {"mode": "halt", "delay": "+"}}, ""),
diff --git a/tests/unittests/config/test_cc_scripts_vendor.py b/tests/unittests/config/test_cc_scripts_vendor.py
index a8cbfb4f..1dcd0573 100644
--- a/tests/unittests/config/test_cc_scripts_vendor.py
+++ b/tests/unittests/config/test_cc_scripts_vendor.py
@@ -5,24 +5,32 @@ from cloudinit.config.schema import (
get_schema,
validate_cloudconfig_schema,
)
-from tests.unittests.helpers import skipUnlessJsonSchema
+from tests.unittests.helpers import does_not_raise, skipUnlessJsonSchema
class TestScriptsVendorSchema:
@pytest.mark.parametrize(
- "config, error_msg",
+ "config, expectation",
(
- ({"vendor_data": {"enabled": True}}, None),
- ({"vendor_data": {"enabled": "yes"}}, None),
+ ({"vendor_data": {"enabled": True}}, does_not_raise()),
+ ({"vendor_data": {"enabled": False}}, does_not_raise()),
+ (
+ {"vendor_data": {"enabled": "yes"}},
+ pytest.raises(
+ SchemaValidationError,
+ match=(
+ "deprecations: vendor_data.enabled: DEPRECATED."
+ " Use of string for this value is DEPRECATED."
+ " Use a boolean value instead."
+ ),
+ ),
+ ),
),
)
@skipUnlessJsonSchema()
- def test_schema_validation(self, config, error_msg):
+ def test_schema_validation(self, config, expectation):
"""Assert expected schema validation and error messages."""
# New-style schema $defs exist in config/cloud-init-schema*.json
schema = get_schema()
- if error_msg is None:
+ with expectation:
validate_cloudconfig_schema(config, schema, strict=True)
- else:
- with pytest.raises(SchemaValidationError, match=error_msg):
- validate_cloudconfig_schema(config, schema, strict=True)
diff --git a/tests/unittests/config/test_cc_set_passwords.py b/tests/unittests/config/test_cc_set_passwords.py
index 1482162a..10473c3b 100644
--- a/tests/unittests/config/test_cc_set_passwords.py
+++ b/tests/unittests/config/test_cc_set_passwords.py
@@ -12,7 +12,7 @@ from cloudinit.config.schema import (
get_schema,
validate_cloudconfig_schema,
)
-from tests.unittests.helpers import skipUnlessJsonSchema
+from tests.unittests.helpers import does_not_raise, skipUnlessJsonSchema
from tests.unittests.util import get_cloud
MODPATH = "cloudinit.config.cc_set_passwords."
@@ -259,6 +259,7 @@ def get_chpasswd_calls(cfg, cloud, log):
log=log,
args=[],
)
+ assert chpasswd.call_count > 0
return chpasswd.call_args[0], subp.call_args
@@ -694,13 +695,39 @@ class TestExpire:
class TestSetPasswordsSchema:
@pytest.mark.parametrize(
- "config, error_msg",
+ "config, expectation",
[
# Test both formats still work
- ({"ssh_pwauth": True}, None),
- ({"ssh_pwauth": "yes"}, None),
- ({"ssh_pwauth": "unchanged"}, None),
- ({"chpasswd": {"list": "blah"}}, "DEPRECATED"),
+ ({"ssh_pwauth": True}, does_not_raise()),
+ ({"ssh_pwauth": False}, does_not_raise()),
+ (
+ {"ssh_pwauth": "yes"},
+ pytest.raises(
+ SchemaValidationError,
+ match=(
+ "deprecations: ssh_pwauth: DEPRECATED. Use of"
+ " non-boolean values for this field is DEPRECATED and"
+ " will result in an error in a future version of"
+ " cloud-init."
+ ),
+ ),
+ ),
+ (
+ {"ssh_pwauth": "unchanged"},
+ pytest.raises(
+ SchemaValidationError,
+ match=(
+ "deprecations: ssh_pwauth: DEPRECATED. Use of"
+ " non-boolean values for this field is DEPRECATED and"
+ " will result in an error in a future version of"
+ " cloud-init."
+ ),
+ ),
+ ),
+ (
+ {"chpasswd": {"list": "blah"}},
+ pytest.raises(SchemaValidationError, match="DEPRECATED"),
+ ),
# Valid combinations
(
{
@@ -728,7 +755,7 @@ class TestSetPasswordsSchema:
]
}
},
- None,
+ does_not_raise(),
),
(
{
@@ -743,7 +770,10 @@ class TestSetPasswordsSchema:
]
}
},
- "is not valid under any of the given schemas",
+ pytest.raises(
+ SchemaValidationError,
+ match="is not valid under any of the given schemas",
+ ),
),
(
{
@@ -757,11 +787,17 @@ class TestSetPasswordsSchema:
]
}
},
- "is not valid under any of the given schemas",
+ pytest.raises(
+ SchemaValidationError,
+ match="is not valid under any of the given schemas",
+ ),
),
(
{"chpasswd": {"users": [{"password": "."}]}},
- "is not valid under any of the given schemas",
+ pytest.raises(
+ SchemaValidationError,
+ match="is not valid under any of the given schemas",
+ ),
),
# when type != RANDOM, password is a required key
(
@@ -770,7 +806,10 @@ class TestSetPasswordsSchema:
"users": [{"name": "what-if-1", "type": "hash"}]
}
},
- "is not valid under any of the given schemas",
+ pytest.raises(
+ SchemaValidationError,
+ match="is not valid under any of the given schemas",
+ ),
),
pytest.param(
{
@@ -784,33 +823,50 @@ class TestSetPasswordsSchema:
]
}
},
- "is not valid under any of the given schemas",
+ pytest.raises(
+ SchemaValidationError,
+ match="is not valid under any of the given schemas",
+ ),
id="dat_is_an_additional_property",
),
(
{"chpasswd": {"users": [{"name": "."}]}},
- "is not valid under any of the given schemas",
+ pytest.raises(
+ SchemaValidationError,
+ match="is not valid under any of the given schemas",
+ ),
),
# Test regex
- ({"chpasswd": {"list": ["user:pass"]}}, "DEPRECATED"),
+ (
+ {"chpasswd": {"list": ["user:pass"]}},
+ pytest.raises(SchemaValidationError, match="DEPRECATED"),
+ ),
# Test valid
- ({"password": "pass"}, None),
+ ({"password": "pass"}, does_not_raise()),
# Test invalid values
(
{"chpasswd": {"expire": "yes"}},
- "'yes' is not of type 'boolean'",
+ pytest.raises(
+ SchemaValidationError,
+ match="'yes' is not of type 'boolean'",
+ ),
+ ),
+ (
+ {"chpasswd": {"list": ["user"]}},
+ pytest.raises(SchemaValidationError),
+ ),
+ (
+ {"chpasswd": {"list": []}},
+ pytest.raises(
+ SchemaValidationError, match=r"\[\] is too short"
+ ),
),
- ({"chpasswd": {"list": ["user"]}}, ""),
- ({"chpasswd": {"list": []}}, r"\[\] is too short"),
],
)
@skipUnlessJsonSchema()
- def test_schema_validation(self, config, error_msg):
- if error_msg is None:
+ def test_schema_validation(self, config, expectation):
+ with expectation:
validate_cloudconfig_schema(config, get_schema(), strict=True)
- else:
- with pytest.raises(SchemaValidationError, match=error_msg):
- validate_cloudconfig_schema(config, get_schema(), strict=True)
# vi: ts=4 expandtab
diff --git a/tests/unittests/config/test_cc_ssh.py b/tests/unittests/config/test_cc_ssh.py
index 47c0c777..8f2ca8bf 100644
--- a/tests/unittests/config/test_cc_ssh.py
+++ b/tests/unittests/config/test_cc_ssh.py
@@ -57,6 +57,7 @@ def _replace_options(user: Optional[str] = None) -> str:
return options
+@pytest.mark.usefixtures("fake_filesystem")
@mock.patch(MODPATH + "ssh_util.setup_user_keys")
class TestHandleSsh:
"""Test cc_ssh handling of ssh config."""
@@ -283,12 +284,30 @@ class TestHandleSsh:
expected_calls == cloud.datasource.publish_host_keys.call_args_list
)
+ @pytest.mark.parametrize("with_sshd_dconf", [False, True])
+ @mock.patch(MODPATH + "util.ensure_dir")
@mock.patch(MODPATH + "ug_util.normalize_users_groups")
@mock.patch(MODPATH + "util.write_file")
- def test_handle_ssh_keys_in_cfg(self, m_write_file, m_nug, m_setup_keys):
+ def test_handle_ssh_keys_in_cfg(
+ self,
+ m_write_file,
+ m_nug,
+ m_ensure_dir,
+ m_setup_keys,
+ with_sshd_dconf,
+ mocker,
+ ):
"""Test handle with ssh keys and certificate."""
# Populate a config dictionary to pass to handle() as well
# as the expected file-writing calls.
+ mocker.patch(
+ MODPATH + "ssh_util._includes_dconf", return_value=with_sshd_dconf
+ )
+ if with_sshd_dconf:
+ sshd_conf_fname = "/etc/ssh/sshd_config.d/50-cloud-init.conf"
+ else:
+ sshd_conf_fname = "/etc/ssh/sshd_config"
+
cfg = {"ssh_keys": {}}
expected_calls = []
@@ -324,7 +343,7 @@ class TestHandleSsh:
384,
),
mock.call(
- "/etc/ssh/sshd_config",
+ sshd_conf_fname,
"HostCertificate /etc/ssh/ssh_host_{}_key-cert.pub"
"\n".format(key_type),
preserve_mode=True,
@@ -343,6 +362,14 @@ class TestHandleSsh:
for call_ in expected_calls:
assert call_ in m_write_file.call_args_list
+ if with_sshd_dconf:
+ assert (
+ mock.call("/etc/ssh/sshd_config.d", mode=0o755)
+ in m_ensure_dir.call_args_list
+ )
+ else:
+ assert [] == m_ensure_dir.call_args_list
+
@pytest.mark.parametrize(
"key_type,reason",
[
diff --git a/tests/unittests/config/test_cc_update_etc_hosts.py b/tests/unittests/config/test_cc_update_etc_hosts.py
index f7aafe46..d48656f7 100644
--- a/tests/unittests/config/test_cc_update_etc_hosts.py
+++ b/tests/unittests/config/test_cc_update_etc_hosts.py
@@ -2,7 +2,6 @@
import logging
import os
-import re
import shutil
import pytest
@@ -78,21 +77,35 @@ class TestHostsFile(t_help.FilesystemMockingTestCase):
class TestUpdateEtcHosts:
@pytest.mark.parametrize(
- "config, error_msg",
+ "config, expectation",
[
+ ({"manage_etc_hosts": True}, t_help.does_not_raise()),
+ ({"manage_etc_hosts": False}, t_help.does_not_raise()),
+ ({"manage_etc_hosts": "localhost"}, t_help.does_not_raise()),
+ (
+ {"manage_etc_hosts": "template"},
+ pytest.raises(
+ SchemaValidationError,
+ match=(
+ "deprecations: manage_etc_hosts: DEPRECATED. Value"
+ " ``template`` will be dropped after April 2027."
+ " Use ``true`` instead"
+ ),
+ ),
+ ),
(
{"manage_etc_hosts": "templatey"},
- re.escape(
- "manage_etc_hosts: 'templatey' is not one of"
- " [True, False, 'template', 'localhost']"
+ pytest.raises(
+ SchemaValidationError,
+ match=(
+ "manage_etc_hosts: 'templatey' is not valid under any"
+ " of the given schemas"
+ ),
),
),
],
)
@t_help.skipUnlessJsonSchema()
- def test_schema_validation(self, config, error_msg):
- if error_msg is None:
+ def test_schema_validation(self, config, expectation):
+ with expectation:
validate_cloudconfig_schema(config, get_schema(), strict=True)
- else:
- with pytest.raises(SchemaValidationError, match=error_msg):
- validate_cloudconfig_schema(config, get_schema(), strict=True)
diff --git a/tests/unittests/config/test_cc_users_groups.py b/tests/unittests/config/test_cc_users_groups.py
index bbb8bec4..12cdaa19 100644
--- a/tests/unittests/config/test_cc_users_groups.py
+++ b/tests/unittests/config/test_cc_users_groups.py
@@ -9,7 +9,12 @@ from cloudinit.config.schema import (
get_schema,
validate_cloudconfig_schema,
)
-from tests.unittests.helpers import CiTestCase, mock, skipUnlessJsonSchema
+from tests.unittests.helpers import (
+ CiTestCase,
+ does_not_raise,
+ mock,
+ skipUnlessJsonSchema,
+)
MODPATH = "cloudinit.config.cc_users_groups"
@@ -298,54 +303,74 @@ class TestHandleUsersGroups(CiTestCase):
class TestUsersGroupsSchema:
@pytest.mark.parametrize(
- "config, problem_msg, has_errors",
+ "config, expectation, has_errors",
[
# Validate default settings not covered by examples
- ({"groups": ["anygrp"]}, None, False),
- ({"groups": "anygrp,anyothergroup"}, None, False), # DEPRECATED
+ ({"groups": ["anygrp"]}, does_not_raise(), None),
+ (
+ {"groups": "anygrp,anyothergroup"},
+ does_not_raise(),
+ None,
+ ), # DEPRECATED
# Create anygrp with user1 as member
- ({"groups": [{"anygrp": "user1"}]}, None, False),
+ ({"groups": [{"anygrp": "user1"}]}, does_not_raise(), None),
# Create anygrp with user1 as member using object/string syntax
- ({"groups": {"anygrp": "user1"}}, None, False),
+ ({"groups": {"anygrp": "user1"}}, does_not_raise(), None),
# Create anygrp with user1 as member using object/list syntax
- ({"groups": {"anygrp": ["user1"]}}, None, False),
- ({"groups": [{"anygrp": ["user1", "user2"]}]}, None, False),
+ ({"groups": {"anygrp": ["user1"]}}, does_not_raise(), None),
+ (
+ {"groups": [{"anygrp": ["user1", "user2"]}]},
+ does_not_raise(),
+ None,
+ ),
# Make default username "olddefault": DEPRECATED
- ({"user": "olddefault"}, None, False),
+ ({"user": "olddefault"}, does_not_raise(), None),
# Create multiple users, and include default user. DEPRECATED
- ({"users": [{"name": "bbsw"}]}, None, False),
+ ({"users": [{"name": "bbsw"}]}, does_not_raise(), None),
(
{"users": [{"name": "bbsw", "garbage-key": None}]},
- "is not valid under any of the given schemas",
+ pytest.raises(
+ SchemaValidationError,
+ match="is not valid under any of the given schemas",
+ ),
True,
),
(
{"groups": {"": "bbsw"}},
- "does not match any of the regexes",
+ pytest.raises(
+ SchemaValidationError,
+ match="does not match any of the regexes",
+ ),
True,
),
(
{"users": [{"name": "bbsw", "groups": ["anygrp"]}]},
+ does_not_raise(),
None,
- False,
), # user with a list of groups
- ({"groups": [{"yep": ["user1"]}]}, None, False),
- ({"users": "oldstyle,default"}, None, False),
- ({"users": ["default"]}, None, False),
- ({"users": ["default", ["aaa", "bbb"]]}, None, False),
- ({"users": ["foobar"]}, None, False), # no default user creation
+ ({"groups": [{"yep": ["user1"]}]}, does_not_raise(), None),
+ ({"users": "oldstyle,default"}, does_not_raise(), None),
+ ({"users": ["default"]}, does_not_raise(), None),
+ ({"users": ["default", ["aaa", "bbb"]]}, does_not_raise(), None),
+ # no default user creation
+ ({"users": ["foobar"]}, does_not_raise(), None),
(
{"users": [{"name": "bbsw", "lock-passwd": True}]},
- "users.0.lock-passwd: DEPRECATED."
- " Dropped after April 2027. Use ``lock_passwd``."
- " Default: ``true``",
+ pytest.raises(
+ SchemaValidationError,
+ match=(
+ "users.0.lock-passwd: DEPRECATED."
+ " Dropped after April 2027. Use ``lock_passwd``."
+ " Default: ``true``"
+ ),
+ ),
False,
),
# users.groups supports comma-delimited str, list and object type
(
{"users": [{"name": "bbsw", "groups": "adm, sudo"}]},
+ does_not_raise(),
None,
- False,
),
(
{
@@ -353,28 +378,42 @@ class TestUsersGroupsSchema:
{"name": "bbsw", "groups": {"adm": None, "sudo": None}}
]
},
- "Cloud config schema deprecations: users.0.groups.adm:"
- " DEPRECATED. When providing an object for"
- " users.groups the ``<group_name>`` keys are the groups to"
- " add this user to,",
+ pytest.raises(
+ SchemaValidationError,
+ match=(
+ "Cloud config schema deprecations: users.0.groups.adm:"
+ " DEPRECATED. When providing an object for"
+ " users.groups the ``<group_name>`` keys are the"
+ " groups to add this user to,"
+ ),
+ ),
False,
),
- ({"groups": [{"yep": ["user1"]}]}, None, False),
+ ({"groups": [{"yep": ["user1"]}]}, does_not_raise(), None),
(
{"user": ["no_list_allowed"]},
- re.escape("user: ['no_list_allowed'] is not valid "),
+ pytest.raises(
+ SchemaValidationError,
+ match=re.escape("user: ['no_list_allowed'] is not valid "),
+ ),
True,
),
(
{"groups": {"anygrp": 1}},
- "groups.anygrp: 1 is not of type 'string', 'array'",
+ pytest.raises(
+ SchemaValidationError,
+ match="groups.anygrp: 1 is not of type 'string', 'array'",
+ ),
True,
),
(
{
"users": [{"inactive": True, "name": "cloudy"}],
},
- "errors: users.0: {'inactive': True",
+ pytest.raises(
+ SchemaValidationError,
+ match="errors: users.0: {'inactive': True",
+ ),
True,
),
(
@@ -387,27 +426,67 @@ class TestUsersGroupsSchema:
}
]
},
+ does_not_raise(),
None,
- False,
),
(
{"user": {"name": "aciba", "groups": {"sbuild": None}}},
- (
- "deprecations: user.groups.sbuild: DEPRECATED. "
- "When providing an object for users.groups the "
- "``<group_name>`` keys are the groups to add this user to"
+ pytest.raises(
+ SchemaValidationError,
+ match=(
+ "deprecations: user.groups.sbuild: DEPRECATED. "
+ "When providing an object for users.groups the "
+ "``<group_name>`` keys are the groups to add this "
+ "user to"
+ ),
),
False,
),
+ (
+ {"user": {"name": "mynewdefault", "sudo": False}},
+ pytest.raises(
+ SchemaValidationError,
+ match=(
+ "deprecations: user.sudo: DEPRECATED. The value"
+ " ``false`` will be dropped after April 2027."
+ " Use ``null`` or no ``sudo`` key instead."
+ ),
+ ),
+ False,
+ ),
+ (
+ {"user": {"name": "mynewdefault", "sudo": None}},
+ does_not_raise(),
+ None,
+ ),
+ (
+ {"users": [{"name": "a", "uid": "1743"}]},
+ pytest.raises(
+ SchemaValidationError,
+ match=(
+ "users.0.uid: DEPRECATED. The use of ``string`` type"
+ " will be dropped after April 2027. Use an ``integer``"
+ " instead."
+ ),
+ ),
+ False,
+ ),
+ (
+ {"users": [{"name": "a", "expiredate": "2038,1,19"}]},
+ pytest.raises(
+ SchemaValidationError,
+ match=(
+ "users.0: {'name': 'a', 'expiredate': '2038,1,19'}"
+ " is not valid under any of the given schemas"
+ ),
+ ),
+ True,
+ ),
],
)
@skipUnlessJsonSchema()
- def test_schema_validation(self, config, problem_msg, has_errors):
- if problem_msg is None:
+ def test_schema_validation(self, config, expectation, has_errors):
+ with expectation as exc_info:
validate_cloudconfig_schema(config, get_schema(), strict=True)
- else:
- with pytest.raises(
- SchemaValidationError, match=problem_msg
- ) as exc_info:
- validate_cloudconfig_schema(config, get_schema(), strict=True)
- assert has_errors == exc_info.value.has_errors()
+ if has_errors is not None:
+ assert has_errors == exc_info.value.has_errors()
diff --git a/tests/unittests/config/test_cc_wireguard.py b/tests/unittests/config/test_cc_wireguard.py
new file mode 100644
index 00000000..59a5223b
--- /dev/null
+++ b/tests/unittests/config/test_cc_wireguard.py
@@ -0,0 +1,266 @@
+# This file is part of cloud-init. See LICENSE file for license information.
+import pytest
+
+from cloudinit import subp, util
+from cloudinit.config import cc_wireguard
+from cloudinit.config.schema import (
+ SchemaValidationError,
+ get_schema,
+ validate_cloudconfig_schema,
+)
+from tests.unittests.helpers import CiTestCase, mock, skipUnlessJsonSchema
+
+NL = "\n"
+# Module path used in mocks
+MPATH = "cloudinit.config.cc_wireguard"
+MIN_KERNEL_VERSION = (5, 6)
+
+
+class FakeCloud(object):
+ def __init__(self, distro):
+ self.distro = distro
+
+
+class TestWireGuard(CiTestCase):
+
+ with_logs = True
+ allowed_subp = [CiTestCase.SUBP_SHELL_TRUE]
+
+ def setUp(self):
+ super(TestWireGuard, self).setUp()
+ self.tmp = self.tmp_dir()
+
+ def test_readiness_probe_schema_non_string_values(self):
+ """ValueError raised for any values expected as string type."""
+ wg_readinessprobes = [1, ["not-a-valid-command"]]
+ errors = [
+ "Expected a string for readinessprobe at 0. Found 1",
+ "Expected a string for readinessprobe at 1."
+ " Found ['not-a-valid-command']",
+ ]
+ with self.assertRaises(ValueError) as context_mgr:
+ cc_wireguard.readinessprobe_command_validation(wg_readinessprobes)
+ error_msg = str(context_mgr.exception)
+ for error in errors:
+ self.assertIn(error, error_msg)
+
+ def test_suppl_schema_error_on_missing_keys(self):
+ """ValueError raised reporting any missing required keys"""
+ cfg = {}
+ match = (
+ f"Invalid wireguard interface configuration:{NL}"
+ "Missing required wg:interfaces keys: config_path, content, name"
+ )
+ with self.assertRaisesRegex(ValueError, match):
+ cc_wireguard.supplemental_schema_validation(cfg)
+
+ def test_suppl_schema_error_on_non_string_values(self):
+ """ValueError raised for any values expected as string type."""
+ cfg = {"name": 1, "config_path": 2, "content": 3}
+ errors = [
+ "Expected a string for wg:interfaces:config_path. Found 2",
+ "Expected a string for wg:interfaces:content. Found 3",
+ "Expected a string for wg:interfaces:name. Found 1",
+ ]
+ with self.assertRaises(ValueError) as context_mgr:
+ cc_wireguard.supplemental_schema_validation(cfg)
+ error_msg = str(context_mgr.exception)
+ for error in errors:
+ self.assertIn(error, error_msg)
+
+ def test_write_config_failed(self):
+ """Errors when writing config are raised."""
+ wg_int = {"name": "wg0", "config_path": "/no/valid/path"}
+
+ with self.assertRaises(RuntimeError) as context_mgr:
+ cc_wireguard.write_config(wg_int)
+ self.assertIn(
+ "Failure writing Wireguard configuration file /no/valid/path:\n",
+ str(context_mgr.exception),
+ )
+
+ @mock.patch("%s.subp.subp" % MPATH)
+ def test_readiness_probe_invalid_command(self, m_subp):
+ """Errors when executing readinessprobes are raised."""
+ wg_readinessprobes = ["not-a-valid-command"]
+
+ def fake_subp(cmd, capture=None, shell=None):
+ fail_cmds = ["not-a-valid-command"]
+ if cmd in fail_cmds and capture and shell:
+ raise subp.ProcessExecutionError(
+ "not-a-valid-command: command not found"
+ )
+
+ m_subp.side_effect = fake_subp
+
+ with self.assertRaises(RuntimeError) as context_mgr:
+ cc_wireguard.readinessprobe(wg_readinessprobes)
+ self.assertIn(
+ "Failed running readinessprobe command:\n"
+ "not-a-valid-command: Unexpected error while"
+ " running command.\n"
+ "Command: -\nExit code: -\nReason: -\n"
+ "Stdout: not-a-valid-command: command not found\nStderr: -",
+ str(context_mgr.exception),
+ )
+
+ @mock.patch("%s.subp.subp" % MPATH)
+ def test_enable_wg_on_error(self, m_subp):
+ """Errors when enabling wireguard interfaces are raised."""
+ wg_int = {"name": "wg0"}
+ distro = mock.MagicMock() # No errors raised
+ distro.manage_service.side_effect = subp.ProcessExecutionError(
+ "systemctl start wg-quik@wg0 failed: exit code 1"
+ )
+ mycloud = FakeCloud(distro)
+ with self.assertRaises(RuntimeError) as context_mgr:
+ cc_wireguard.enable_wg(wg_int, mycloud)
+ self.assertEqual(
+ "Failed enabling/starting Wireguard interface(s):\n"
+ "Unexpected error while running command.\n"
+ "Command: -\nExit code: -\nReason: -\n"
+ "Stdout: systemctl start wg-quik@wg0 failed: exit code 1\n"
+ "Stderr: -",
+ str(context_mgr.exception),
+ )
+
+ @mock.patch("%s.subp.which" % MPATH)
+ def test_maybe_install_wg_packages_noop_when_wg_tools_present(
+ self, m_which
+ ):
+ """Do nothing if wireguard-tools already exists."""
+ m_which.return_value = "/usr/bin/wg" # already installed
+ distro = mock.MagicMock()
+ distro.update_package_sources.side_effect = RuntimeError(
+ "Some apt error"
+ )
+ cc_wireguard.maybe_install_wireguard_packages(cloud=FakeCloud(distro))
+
+ @mock.patch("%s.subp.which" % MPATH)
+ def test_maybe_install_wf_tools_raises_update_errors(self, m_which):
+ """maybe_install_wireguard_packages logs and raises
+ apt update errors."""
+ m_which.return_value = None
+ distro = mock.MagicMock()
+ distro.update_package_sources.side_effect = RuntimeError(
+ "Some apt error"
+ )
+ with self.assertRaises(RuntimeError) as context_manager:
+ cc_wireguard.maybe_install_wireguard_packages(
+ cloud=FakeCloud(distro)
+ )
+ self.assertEqual("Some apt error", str(context_manager.exception))
+ self.assertIn("Package update failed\nTraceback", self.logs.getvalue())
+
+ @mock.patch("%s.subp.which" % MPATH)
+ def test_maybe_install_wg_raises_install_errors(self, m_which):
+ """maybe_install_wireguard_packages logs and raises package
+ install errors."""
+ m_which.return_value = None
+ distro = mock.MagicMock()
+ distro.update_package_sources.return_value = None
+ distro.install_packages.side_effect = RuntimeError(
+ "Some install error"
+ )
+ with self.assertRaises(RuntimeError) as context_manager:
+ cc_wireguard.maybe_install_wireguard_packages(
+ cloud=FakeCloud(distro)
+ )
+ self.assertEqual("Some install error", str(context_manager.exception))
+ self.assertIn(
+ "Failed to install wireguard-tools\n", self.logs.getvalue()
+ )
+
+ @mock.patch("%s.subp.subp" % MPATH)
+ def test_load_wg_module_failed(self, m_subp):
+ """load_wireguard_kernel_module logs and raises
+ kernel modules loading error."""
+ m_subp.side_effect = subp.ProcessExecutionError(
+ "Some kernel module load error"
+ )
+ with self.assertRaises(subp.ProcessExecutionError) as context_manager:
+ cc_wireguard.load_wireguard_kernel_module()
+ self.assertEqual(
+ "Unexpected error while running command.\n"
+ "Command: -\nExit code: -\nReason: -\n"
+ "Stdout: Some kernel module load error\n"
+ "Stderr: -",
+ str(context_manager.exception),
+ )
+ self.assertIn(
+ "WARNING: Could not load wireguard module:\n", self.logs.getvalue()
+ )
+
+ @mock.patch("%s.subp.which" % MPATH)
+ def test_maybe_install_wg_packages_happy_path(self, m_which):
+ """maybe_install_wireguard_packages installs wireguard-tools."""
+ packages = ["wireguard-tools"]
+
+ if util.kernel_version() < MIN_KERNEL_VERSION:
+ packages.append("wireguard")
+
+ m_which.return_value = None
+ distro = mock.MagicMock() # No errors raised
+ cc_wireguard.maybe_install_wireguard_packages(cloud=FakeCloud(distro))
+ distro.update_package_sources.assert_called_once_with()
+ distro.install_packages.assert_called_once_with(packages)
+
+ @mock.patch("%s.maybe_install_wireguard_packages" % MPATH)
+ def test_handle_no_config(self, m_maybe_install_wireguard_packages):
+ """When no wireguard configuration is provided, nothing happens."""
+ cfg = {}
+ cc_wireguard.handle(
+ "wg", cfg=cfg, cloud=None, log=self.logger, args=None
+ )
+ self.assertIn(
+ "DEBUG: Skipping module named wg, no 'wireguard'"
+ " configuration found",
+ self.logs.getvalue(),
+ )
+ self.assertEqual(m_maybe_install_wireguard_packages.call_count, 0)
+
+ def test_readiness_probe_with_non_string_values(self):
+ """ValueError raised for any values expected as string type."""
+ cfg = [1, 2]
+ errors = [
+ "Expected a string for readinessprobe at 0. Found 1",
+ "Expected a string for readinessprobe at 1. Found 2",
+ ]
+ with self.assertRaises(ValueError) as context_manager:
+ cc_wireguard.readinessprobe_command_validation(cfg)
+ error_msg = str(context_manager.exception)
+ for error in errors:
+ self.assertIn(error, error_msg)
+
+
+class TestWireguardSchema:
+ @pytest.mark.parametrize(
+ "config, error_msg",
+ [
+ # Valid schemas
+ (
+ {
+ "wireguard": {
+ "interfaces": [
+ {
+ "name": "wg0",
+ "config_path": "/etc/wireguard/wg0.conf",
+ "content": "test",
+ }
+ ]
+ }
+ },
+ None,
+ ),
+ ],
+ )
+ @skipUnlessJsonSchema()
+ def test_schema_validation(self, config, error_msg):
+ if error_msg is not None:
+ with pytest.raises(SchemaValidationError, match=error_msg):
+ validate_cloudconfig_schema(config, get_schema(), strict=True)
+ else:
+ validate_cloudconfig_schema(config, get_schema(), strict=True)
+
+
+# vi: ts=4 expandtab
diff --git a/tests/unittests/config/test_schema.py b/tests/unittests/config/test_schema.py
index 76640436..b556c4b5 100644
--- a/tests/unittests/config/test_schema.py
+++ b/tests/unittests/config/test_schema.py
@@ -162,6 +162,7 @@ class TestGetSchema:
assert ["$defs", "$schema", "allOf"] == sorted(list(schema.keys()))
# New style schema should be defined in static schema file in $defs
expected_subschema_defs = [
+ {"$ref": "#/$defs/cc_ansible"},
{"$ref": "#/$defs/cc_apk_configure"},
{"$ref": "#/$defs/cc_apt_configure"},
{"$ref": "#/$defs/cc_apt_pipelining"},
@@ -170,7 +171,6 @@ class TestGetSchema:
{"$ref": "#/$defs/cc_byobu"},
{"$ref": "#/$defs/cc_ca_certs"},
{"$ref": "#/$defs/cc_chef"},
- {"$ref": "#/$defs/cc_debug"},
{"$ref": "#/$defs/cc_disable_ec2_metadata"},
{"$ref": "#/$defs/cc_disk_setup"},
{"$ref": "#/$defs/cc_fan"},
@@ -212,6 +212,7 @@ class TestGetSchema:
{"$ref": "#/$defs/cc_update_etc_hosts"},
{"$ref": "#/$defs/cc_update_hostname"},
{"$ref": "#/$defs/cc_users_groups"},
+ {"$ref": "#/$defs/cc_wireguard"},
{"$ref": "#/$defs/cc_write_files"},
{"$ref": "#/$defs/cc_yum_add_repo"},
{"$ref": "#/$defs/cc_zypper_add_repo"},
@@ -404,7 +405,7 @@ class TestValidateCloudConfigSchema:
},
},
{"a-b": "asdf"},
- "Deprecated cloud-config provided:\na-b: DEPRECATED. <desc>",
+ "Deprecated cloud-config provided:\na-b: DEPRECATED: <desc>",
),
(
{
@@ -423,7 +424,7 @@ class TestValidateCloudConfigSchema:
},
},
{"x": "+5"},
- "Deprecated cloud-config provided:\nx: DEPRECATED. <desc>",
+ "Deprecated cloud-config provided:\nx: DEPRECATED: <desc>",
),
(
{
@@ -441,7 +442,7 @@ class TestValidateCloudConfigSchema:
},
},
{"x": "5"},
- "Deprecated cloud-config provided:\nx: DEPRECATED. <desc>",
+ "Deprecated cloud-config provided:\nx: DEPRECATED: <desc>",
),
(
{
@@ -460,7 +461,7 @@ class TestValidateCloudConfigSchema:
},
},
{"x": "5"},
- "Deprecated cloud-config provided:\nx: DEPRECATED. <desc>",
+ "Deprecated cloud-config provided:\nx: DEPRECATED: <desc>",
),
(
{
@@ -474,7 +475,7 @@ class TestValidateCloudConfigSchema:
},
},
{"x": "+5"},
- "Deprecated cloud-config provided:\nx: DEPRECATED. <desc>",
+ "Deprecated cloud-config provided:\nx: DEPRECATED: <desc>",
),
(
{
@@ -509,7 +510,7 @@ class TestValidateCloudConfigSchema:
},
},
{"x": "+5"},
- "Deprecated cloud-config provided:\nx: DEPRECATED. <desc>",
+ "Deprecated cloud-config provided:\nx: DEPRECATED: <desc>",
),
(
{
@@ -546,7 +547,7 @@ class TestValidateCloudConfigSchema:
},
},
{"a-b": "asdf"},
- "Deprecated cloud-config provided:\na-b: DEPRECATED. <desc>",
+ "Deprecated cloud-config provided:\na-b: DEPRECATED: <desc>",
),
pytest.param(
{
@@ -769,7 +770,7 @@ class TestSchemaDocMarkdown:
**Supported distros:** debian, rhel
**Config schema**:
- **prop1:** (array of integer) prop-description
+ **prop1:** (array of integer) prop-description.
**Examples**::
@@ -822,9 +823,9 @@ class TestSchemaDocMarkdown:
**Activate only on keys:** ``prop1``, ``prop2``
**Config schema**:
- **prop1:** (array of string) prop-description
+ **prop1:** (array of string) prop-description.
- **prop2:** (boolean) prop2-description
+ **prop2:** (boolean) prop2-description.
**Examples**::
@@ -842,6 +843,23 @@ class TestSchemaDocMarkdown:
schema = {"properties": {"prop1": {"type": ["string", "integer"]}}}
assert "**prop1:** (string/integer)" in get_meta_doc(self.meta, schema)
+ @pytest.mark.parametrize("multi_key", ["oneOf", "anyOf"])
+ def test_get_meta_doc_handles_multiple_types_recursive(self, multi_key):
+ """get_meta_doc delimits multiple property types with a '/'."""
+ schema = {
+ "properties": {
+ "prop1": {
+ multi_key: [
+ {"type": ["string", "null"]},
+ {"type": "integer"},
+ ]
+ }
+ }
+ }
+ assert "**prop1:** (string/null/integer)" in get_meta_doc(
+ self.meta, schema
+ )
+
def test_references_are_flattened_in_schema_docs(self):
"""get_meta_doc flattens and renders full schema definitions."""
schema = {
@@ -867,7 +885,7 @@ class TestSchemaDocMarkdown:
"""\
**prop1:** (string/object) Objects support the following keys:
- **<opaque_label>:** (array of string) List of cool strings
+ **<opaque_label>:** (array of string) List of cool strings.
"""
)
in get_meta_doc(self.meta, schema)
@@ -941,14 +959,17 @@ class TestSchemaDocMarkdown:
"""
assert expected in get_meta_doc(self.meta, schema)
- def test_get_meta_doc_handles_nested_oneof_property_types(self):
+ @pytest.mark.parametrize("multi_key", ["oneOf", "anyOf"])
+ def test_get_meta_doc_handles_nested_multi_schema_property_types(
+ self, multi_key
+ ):
"""get_meta_doc describes array items oneOf declarations in type."""
schema = {
"properties": {
"prop1": {
"type": "array",
"items": {
- "oneOf": [{"type": "string"}, {"type": "integer"}]
+ multi_key: [{"type": "string"}, {"type": "integer"}]
},
}
}
@@ -957,14 +978,15 @@ class TestSchemaDocMarkdown:
self.meta, schema
)
- def test_get_meta_doc_handles_types_as_list(self):
+ @pytest.mark.parametrize("multi_key", ["oneOf", "anyOf"])
+ def test_get_meta_doc_handles_types_as_list(self, multi_key):
"""get_meta_doc renders types which have a list value."""
schema = {
"properties": {
"prop1": {
"type": ["boolean", "array"],
"items": {
- "oneOf": [{"type": "string"}, {"type": "integer"}]
+ multi_key: [{"type": "string"}, {"type": "integer"}]
},
}
}
@@ -1012,7 +1034,7 @@ class TestSchemaDocMarkdown:
dedent(
"""
**Config schema**:
- **prop1:** (array of integer) prop-description
+ **prop1:** (array of integer) prop-description.
**Examples**::
@@ -1058,7 +1080,7 @@ class TestSchemaDocMarkdown:
- option2
- option3
- The default value is option1
+ The default value is option1.
"""
)
@@ -1169,7 +1191,7 @@ class TestSchemaDocMarkdown:
}
}
},
- "**prop1:** (string/integer) DEPRECATED. <description>",
+ "**prop1:** (string/integer) DEPRECATED: <description>",
),
(
{
@@ -1182,7 +1204,7 @@ class TestSchemaDocMarkdown:
},
},
},
- "**prop1:** (string/integer) DEPRECATED. <description>",
+ "**prop1:** (string/integer) DEPRECATED: <description>",
),
(
{
@@ -1200,7 +1222,7 @@ class TestSchemaDocMarkdown:
}
},
},
- "**prop1:** (string/integer) DEPRECATED. <description>",
+ "**prop1:** (string/integer) DEPRECATED: <description>",
),
(
{
@@ -1220,7 +1242,7 @@ class TestSchemaDocMarkdown:
}
},
},
- "**prop1:** (string/integer) DEPRECATED. <description>",
+ "**prop1:** (string/integer) DEPRECATED: <description>",
),
(
{
@@ -1238,7 +1260,7 @@ class TestSchemaDocMarkdown:
},
},
},
- "**prop1:** (UNDEFINED) <description>\n",
+ "**prop1:** (UNDEFINED) <description>. DEPRECATED: <deprecat",
),
(
{
@@ -1259,7 +1281,74 @@ class TestSchemaDocMarkdown:
},
},
},
- "**prop1:** (UNDEFINED)\n",
+ "**prop1:** (number) <description>. DEPRECATED:"
+ " <deprecated_description>",
+ ),
+ (
+ {
+ "$schema": "http://json-schema.org/draft-04/schema#",
+ "properties": {
+ "prop1": {
+ "anyOf": [
+ {
+ "type": ["string", "integer"],
+ "description": "<deprecated_description>",
+ "deprecated": True,
+ },
+ {
+ "type": "string",
+ "enum": ["none", "unchanged", "os"],
+ "description": "<description>",
+ },
+ ]
+ },
+ },
+ },
+ "**prop1:** (``none``/``unchanged``/``os``) <description>."
+ " DEPRECATED: <deprecated_description>.",
+ ),
+ (
+ {
+ "$schema": "http://json-schema.org/draft-04/schema#",
+ "properties": {
+ "prop1": {
+ "anyOf": [
+ {
+ "type": ["string", "integer"],
+ "description": "<description_1>",
+ },
+ {
+ "type": "string",
+ "enum": ["none", "unchanged", "os"],
+ "description": "<description>_2",
+ },
+ ]
+ },
+ },
+ },
+ "**prop1:** (string/integer/``none``/``unchanged``/``os``)"
+ " <description_1>. <description>_2.\n",
+ ),
+ (
+ {
+ "properties": {
+ "prop1": {
+ "description": "<desc_1>",
+ "type": "array",
+ "items": {
+ "type": "object",
+ "anyOf": [
+ {
+ "properties": {
+ "sub_prop1": {"type": "string"},
+ },
+ },
+ ],
+ },
+ },
+ },
+ },
+ "**prop1:** (array of object) <desc_1>.\n",
),
],
)
@@ -1623,9 +1712,9 @@ class TestHandleSchemaArgs:
apt_reboot_if_required: true # D3
# Deprecations: -------------
- # D1: DEPRECATED. Dropped after April 2027. Use ``package_update``. Default: ``false``
- # D2: DEPRECATED. Dropped after April 2027. Use ``package_upgrade``. Default: ``false``
- # D3: DEPRECATED. Dropped after April 2027. Use ``package_reboot_if_required``. Default: ``false``
+ # D1: DEPRECATED: Dropped after April 2027. Use ``package_update``. Default: ``false``
+ # D2: DEPRECATED: Dropped after April 2027. Use ``package_upgrade``. Default: ``false``
+ # D3: DEPRECATED: Dropped after April 2027. Use ``package_reboot_if_required``. Default: ``false``
Valid cloud-config: {}
@@ -1637,9 +1726,9 @@ class TestHandleSchemaArgs:
dedent(
"""\
Cloud config schema deprecations: \
-apt_reboot_if_required: DEPRECATED. Dropped after April 2027. Use ``package_reboot_if_required``. Default: ``false``, \
-apt_update: DEPRECATED. Dropped after April 2027. Use ``package_update``. Default: ``false``, \
-apt_upgrade: DEPRECATED. Dropped after April 2027. Use ``package_upgrade``. Default: ``false``
+apt_reboot_if_required: DEPRECATED: Dropped after April 2027. Use ``package_reboot_if_required``. Default: ``false``, \
+apt_update: DEPRECATED: Dropped after April 2027. Use ``package_update``. Default: ``false``, \
+apt_upgrade: DEPRECATED: Dropped after April 2027. Use ``package_upgrade``. Default: ``false``
Valid cloud-config: {}
""" # noqa: E501
),
diff --git a/tests/unittests/conftest.py b/tests/unittests/conftest.py
new file mode 100644
index 00000000..e265a285
--- /dev/null
+++ b/tests/unittests/conftest.py
@@ -0,0 +1,57 @@
+import builtins
+import glob
+import os
+
+import pytest
+
+from cloudinit import atomic_helper, util
+from tests.unittests.helpers import retarget_many_wrapper
+
+FS_FUNCS = {
+ os.path: [
+ ("isfile", 1),
+ ("exists", 1),
+ ("islink", 1),
+ ("isdir", 1),
+ ("lexists", 1),
+ ("relpath", 1),
+ ],
+ os: [
+ ("listdir", 1),
+ ("mkdir", 1),
+ ("lstat", 1),
+ ("symlink", 2),
+ ("stat", 1),
+ ("scandir", 1),
+ ],
+ util: [
+ ("write_file", 1),
+ ("append_file", 1),
+ ("load_file", 1),
+ ("ensure_dir", 1),
+ ("chmod", 1),
+ ("delete_dir_contents", 1),
+ ("del_file", 1),
+ ("sym_link", -1),
+ ("copy", -1),
+ ],
+ glob: [
+ ("glob", 1),
+ ],
+ builtins: [
+ ("open", 1),
+ ],
+ atomic_helper: [
+ ("write_file", 1),
+ ],
+}
+
+
+@pytest.fixture
+def fake_filesystem(mocker, tmpdir):
+ """Mocks fs functions to operate under `tmpdir`"""
+ for (mod, funcs) in FS_FUNCS.items():
+ for f, nargs in funcs:
+ func = getattr(mod, f)
+ trap_func = retarget_many_wrapper(str(tmpdir), nargs, func)
+ mocker.patch.object(mod, f, trap_func)
diff --git a/tests/unittests/distros/test_create_users.py b/tests/unittests/distros/test_create_users.py
index f57bfd75..edc152e1 100644
--- a/tests/unittests/distros/test_create_users.py
+++ b/tests/unittests/distros/test_create_users.py
@@ -169,7 +169,7 @@ class TestCreateUser(CiTestCase):
mock.call(["passwd", "-l", user]),
]
self.assertEqual(m_subp.call_args_list, expected)
- self.assertNotIn("WARNING: DEPRECATION: ", self.logs.getvalue())
+ self.assertNotIn("WARNING: DEPRECATED: ", self.logs.getvalue())
def test_explicit_sudo_false(self, m_subp, m_is_snappy):
user = "foouser"
@@ -181,6 +181,24 @@ class TestCreateUser(CiTestCase):
mock.call(["passwd", "-l", user]),
],
)
+ self.assertIn(
+ "WARNING: DEPRECATED: The user foouser has a 'sudo' config value"
+ " of 'false' which will be dropped after April 2027. Use 'null'"
+ " instead.",
+ self.logs.getvalue(),
+ )
+
+ def test_explicit_sudo_none(self, m_subp, m_is_snappy):
+ user = "foouser"
+ self.dist.create_user(user, sudo=None)
+ self.assertEqual(
+ m_subp.call_args_list,
+ [
+ self._useradd2call([user, "-m"]),
+ mock.call(["passwd", "-l", user]),
+ ],
+ )
+ self.assertNotIn("WARNING: DEPRECATED: ", self.logs.getvalue())
@mock.patch("cloudinit.ssh_util.setup_user_keys")
def test_setup_ssh_authorized_keys_with_string(
diff --git a/tests/unittests/sources/test_azure.py b/tests/unittests/sources/test_azure.py
index 9ddb1f56..1c6d1eb1 100644
--- a/tests/unittests/sources/test_azure.py
+++ b/tests/unittests/sources/test_azure.py
@@ -3439,6 +3439,7 @@ class TestAzureDataSourcePreprovisioning(CiTestCase):
method="GET",
timeout=dsaz.IMDS_TIMEOUT_IN_SECONDS,
url=full_url,
+ stream=False,
)
],
)
@@ -3490,6 +3491,7 @@ class TestAzureDataSourcePreprovisioning(CiTestCase):
method="GET",
timeout=dsaz.IMDS_TIMEOUT_IN_SECONDS,
url=full_url,
+ stream=False,
),
m_request.call_args_list,
)
@@ -4384,6 +4386,69 @@ class TestProvisioning:
# Verify no netlink operations for recovering PPS.
assert self.mock_netlink.mock_calls == []
+ @pytest.mark.parametrize(
+ "subp_side_effect",
+ [
+ subp.SubpResult("okie dokie", ""),
+ subp.ProcessExecutionError(
+ cmd=["failed", "cmd"],
+ stdout="test_stdout",
+ stderr="test_stderr",
+ exit_code=4,
+ ),
+ ],
+ )
+ def test_os_disk_pps(self, mock_sleep, subp_side_effect):
+ self.imds_md["extended"]["compute"]["ppsType"] = "PreprovisionedOSDisk"
+
+ self.mock_subp_subp.side_effect = [subp_side_effect]
+ self.mock_readurl.side_effect = [
+ mock.MagicMock(contents=json.dumps(self.imds_md).encode()),
+ ]
+
+ self.azure_ds._get_data()
+
+ assert self.mock_readurl.mock_calls == [
+ mock.call(
+ "http://169.254.169.254/metadata/instance?"
+ "api-version=2021-08-01&extended=true",
+ timeout=2,
+ headers={"Metadata": "true"},
+ retries=10,
+ exception_cb=dsaz.imds_readurl_exception_callback,
+ infinite=False,
+ )
+ ]
+
+ assert self.mock_subp_subp.mock_calls == []
+ assert mock_sleep.mock_calls == [mock.call(31536000)]
+
+ # Verify DHCP is setup once.
+ assert self.mock_wrapping_setup_ephemeral_networking.mock_calls == [
+ mock.call(timeout_minutes=20)
+ ]
+ assert self.mock_net_dhcp_maybe_perform_dhcp_discovery.mock_calls == [
+ mock.call(None, dsaz.dhcp_log_cb)
+ ]
+ assert self.azure_ds._wireserver_endpoint == "10.11.12.13"
+ assert self.azure_ds._is_ephemeral_networking_up() is False
+
+ # Verify reported ready once.
+ assert self.mock_azure_get_metadata_from_fabric.mock_calls == [
+ mock.call(
+ endpoint="10.11.12.13",
+ iso_dev="/dev/sr0",
+ pubkey_info=None,
+ )
+ ]
+
+ # Verify no netlink operations for os disk PPS.
+ assert self.mock_netlink.mock_calls == []
+
+ # Ensure no reported ready marker is left behind as the VM's next
+ # boot will behave like a typical provisioning boot.
+ assert self.patched_reported_ready_marker_path.exists() is False
+
class TestValidateIMDSMetadata:
@pytest.mark.parametrize(
diff --git a/tests/unittests/test__init__.py b/tests/unittests/test__init__.py
index 0ed8a120..44a06b2c 100644
--- a/tests/unittests/test__init__.py
+++ b/tests/unittests/test__init__.py
@@ -5,9 +5,11 @@ import os
import shutil
import tempfile
+import pytest
+
from cloudinit import handlers, helpers, settings, url_helper, util
from cloudinit.cmd import main
-from tests.unittests.helpers import CiTestCase, ExitStack, TestCase, mock
+from tests.unittests.helpers import ExitStack, TestCase, mock
class FakeModule(handlers.Handler):
@@ -218,65 +220,117 @@ class TestHandlerHandlePart(TestCase):
)
-class TestCmdlineUrl(CiTestCase):
+class FakeResponse:
+ def __init__(self, content, status_code=200):
+ self._content = content
+ self._remaining_content = content
+ self.status_code = status_code
+ self.encoding = None
+
+ @property
+ def content(self):
+ return self._remaining_content
+
+ def iter_content(self, chunk_size, *_, **__):
+ iterators = [iter(self._content)] * chunk_size
+ for chunk in zip(*iterators):
+ self._remaining_content = self._remaining_content[chunk_size:]
+ yield bytes(chunk)
+
+
+class TestCmdlineUrl:
def test_parse_cmdline_url_nokey_raises_keyerror(self):
- self.assertRaises(
- KeyError, main.parse_cmdline_url, "root=foo bar single"
- )
+ with pytest.raises(KeyError):
+ main.parse_cmdline_url("root=foo bar single")
def test_parse_cmdline_url_found(self):
cmdline = "root=foo bar single url=http://example.com arg1 -v"
- self.assertEqual(
- ("url", "http://example.com"), main.parse_cmdline_url(cmdline)
- )
+ assert ("url", "http://example.com") == main.parse_cmdline_url(cmdline)
@mock.patch("cloudinit.cmd.main.url_helper.read_file_or_url")
- def test_invalid_content(self, m_read):
+ def test_invalid_content(self, m_read, tmpdir):
key = "cloud-config-url"
url = "http://example.com/foo"
cmdline = "ro %s=%s bar=1" % (key, url)
m_read.return_value = url_helper.StringResponse(b"unexpected blob")
- fpath = self.tmp_path("ccfile")
+ fpath = tmpdir.join("ccfile")
lvl, msg = main.attempt_cmdline_url(
fpath, network=True, cmdline=cmdline
)
- self.assertEqual(logging.WARN, lvl)
- self.assertIn(url, msg)
- self.assertFalse(os.path.exists(fpath))
+ assert logging.WARN == lvl
+ assert url in msg
+ assert False is os.path.exists(fpath)
@mock.patch("cloudinit.cmd.main.url_helper.read_file_or_url")
- def test_valid_content(self, m_read):
+ def test_invalid_content_url(self, m_read, tmpdir):
+ key = "cloud-config-url"
+ url = "http://example.com/foo"
+ cmdline = "ro %s=%s bar=1" % (key, url)
+ response = mock.Mock()
+ response.iter_content.return_value = iter(
+ (b"unexpected blob", StopIteration)
+ )
+ response.status_code = 200
+ m_read.return_value = url_helper.UrlResponse(response)
+
+ fpath = tmpdir.join("ccfile")
+ lvl, msg = main.attempt_cmdline_url(
+ fpath, network=True, cmdline=cmdline
+ )
+ assert logging.WARN == lvl
+ assert url in msg
+ assert False is os.path.exists(fpath)
+
+ @mock.patch("cloudinit.cmd.main.url_helper.read_file_or_url")
+ def test_valid_content(self, m_read, tmpdir):
url = "http://example.com/foo"
payload = b"#cloud-config\nmydata: foo\nbar: wark\n"
cmdline = "ro %s=%s bar=1" % ("cloud-config-url", url)
m_read.return_value = url_helper.StringResponse(payload)
- fpath = self.tmp_path("ccfile")
+ fpath = tmpdir.join("ccfile")
+ lvl, msg = main.attempt_cmdline_url(
+ fpath, network=True, cmdline=cmdline
+ )
+ assert util.load_file(fpath, decode=False) == payload
+ assert logging.INFO == lvl
+ assert url in msg
+
+ @mock.patch("cloudinit.cmd.main.url_helper.read_file_or_url")
+ def test_valid_content_url(self, m_read, tmpdir):
+ url = "http://example.com/foo"
+ payload = b"#cloud-config\nmydata: foo\nbar: wark\n"
+ cmdline = "ro %s=%s bar=1" % ("cloud-config-url", url)
+
+ response = FakeResponse(payload)
+ m_read.return_value = url_helper.UrlResponse(response)
+
+ fpath = tmpdir.join("ccfile")
lvl, msg = main.attempt_cmdline_url(
fpath, network=True, cmdline=cmdline
)
- self.assertEqual(util.load_file(fpath, decode=False), payload)
- self.assertEqual(logging.INFO, lvl)
- self.assertIn(url, msg)
+ assert util.load_file(fpath, decode=False) == payload
+ assert logging.INFO == lvl
+ assert url in msg
@mock.patch("cloudinit.cmd.main.url_helper.read_file_or_url")
- def test_no_key_found(self, m_read):
+ def test_no_key_found(self, m_read, tmpdir):
cmdline = "ro mykey=http://example.com/foo root=foo"
- fpath = self.tmp_path("ccpath")
+ fpath = tmpdir.join("ccfile")
lvl, _msg = main.attempt_cmdline_url(
fpath, network=True, cmdline=cmdline
)
m_read.assert_not_called()
- self.assertFalse(os.path.exists(fpath))
- self.assertEqual(logging.DEBUG, lvl)
+ assert False is os.path.exists(fpath)
+ assert logging.DEBUG == lvl
@mock.patch("cloudinit.cmd.main.url_helper.read_file_or_url")
- def test_exception_warns(self, m_read):
+ def test_exception_warns(self, m_read, tmpdir):
url = "http://example.com/foo"
cmdline = "ro cloud-config-url=%s root=LABEL=bar" % url
- fpath = self.tmp_path("ccfile")
+ fpath = tmpdir.join("ccfile")
m_read.side_effect = url_helper.UrlError(
cause="Unexpected Error", url="http://example.com/foo"
)
@@ -284,9 +338,9 @@ class TestCmdlineUrl(CiTestCase):
lvl, msg = main.attempt_cmdline_url(
fpath, network=True, cmdline=cmdline
)
- self.assertEqual(logging.WARN, lvl)
- self.assertIn(url, msg)
- self.assertFalse(os.path.exists(fpath))
+ assert logging.WARN == lvl
+ assert url in msg
+ assert False is os.path.exists(fpath)
# vi: ts=4 expandtab
diff --git a/tests/unittests/test_sshutil.py b/tests/unittests/test_ssh_util.py
index 3328b8f4..d6a72dc1 100644
--- a/tests/unittests/test_sshutil.py
+++ b/tests/unittests/test_ssh_util.py
@@ -1,30 +1,27 @@
# This file is part of cloud-init. See LICENSE file for license information.
import os
-from collections import namedtuple
+import stat
from functools import partial
+from typing import NamedTuple
+from unittest import mock
from unittest.mock import patch
+import pytest
+
from cloudinit import ssh_util, util
-from cloudinit.temp_utils import mkdtemp
-from tests.unittests import helpers as test_helpers
-
-# https://stackoverflow.com/questions/11351032/
-FakePwEnt = namedtuple(
- "FakePwEnt",
- [
- "pw_name",
- "pw_passwd",
- "pw_uid",
- "pw_gid",
- "pw_gecos",
- "pw_dir",
- "pw_shell",
- ],
-)
-FakePwEnt.__new__.__defaults__ = tuple(
- "UNSET_%s" % n for n in FakePwEnt._fields
-)
+
+M_PATH = "cloudinit.ssh_util."
+
+
+class FakePwEnt(NamedTuple):
+ pw_name: str = "UNSET_pw_name"
+ pw_passwd: str = "UNSET_w_passwd"
+ pw_uid: str = "UNSET_pw_uid"
+ pw_gid: str = "UNSET_pw_gid"
+ pw_gecos: str = "UNSET_pw_gecos"
+ pw_dir: str = "UNSET_pw_dir"
+ pw_shell: str = "UNSET_pw_shell"
def mock_get_owner(updated_permissions, value):
@@ -322,66 +319,40 @@ TEST_OPTIONS = (
)
-class TestAuthKeyLineParser(test_helpers.CiTestCase):
- def test_simple_parse(self):
- # test key line with common 3 fields (keytype, base64, comment)
- parser = ssh_util.AuthKeyLineParser()
- for ktype in KEY_TYPES:
- content = VALID_CONTENT[ktype]
- comment = "user-%s@host" % ktype
- line = " ".join(
- (
- ktype,
- content,
- comment,
- )
- )
- key = parser.parse(line)
-
- self.assertEqual(key.base64, content)
- self.assertFalse(key.options)
- self.assertEqual(key.comment, comment)
- self.assertEqual(key.keytype, ktype)
-
- def test_parse_no_comment(self):
- # test key line with key type and base64 only
- parser = ssh_util.AuthKeyLineParser()
- for ktype in KEY_TYPES:
- content = VALID_CONTENT[ktype]
- line = " ".join(
- (
- ktype,
- content,
- )
- )
- key = parser.parse(line)
-
- self.assertEqual(key.base64, content)
- self.assertFalse(key.options)
- self.assertFalse(key.comment)
- self.assertEqual(key.keytype, ktype)
-
- def test_parse_with_keyoptions(self):
- # test key line with options in it
- parser = ssh_util.AuthKeyLineParser()
+class TestAuthKeyLineParser:
+ @pytest.mark.parametrize("with_options", [True, False])
+ @pytest.mark.parametrize("with_comment", [True, False])
+ @pytest.mark.parametrize("ktype", KEY_TYPES)
+ def test_parse(self, ktype, with_comment, with_options):
+ content = VALID_CONTENT[ktype]
+ comment = "user-%s@host" % ktype
options = TEST_OPTIONS
- for ktype in KEY_TYPES:
- content = VALID_CONTENT[ktype]
- comment = "user-%s@host" % ktype
- line = " ".join(
- (
- options,
- ktype,
- content,
- comment,
- )
- )
- key = parser.parse(line)
-
- self.assertEqual(key.base64, content)
- self.assertEqual(key.options, options)
- self.assertEqual(key.comment, comment)
- self.assertEqual(key.keytype, ktype)
+
+ line_args = []
+ if with_options:
+ line_args.append(options)
+ line_args.extend(
+ [
+ ktype,
+ content,
+ ]
+ )
+ if with_comment:
+ line_args.append(comment)
+ line = " ".join(line_args)
+
+ key = ssh_util.AuthKeyLineParser().parse(line)
+
+ assert key.base64 == content
+ assert key.keytype == ktype
+ if with_options:
+ assert key.options == options
+ else:
+ assert key.options is None
+ if with_comment:
+ assert key.comment == comment
+ else:
+ assert key.comment == ""
def test_parse_with_options_passed_in(self):
# test key line with key type and base64 only
@@ -391,30 +362,44 @@ class TestAuthKeyLineParser(test_helpers.CiTestCase):
myopts = "no-port-forwarding,no-agent-forwarding"
key = parser.parse("allowedopt" + " " + baseline)
- self.assertEqual(key.options, "allowedopt")
+ assert key.options == "allowedopt"
key = parser.parse("overridden_opt " + baseline, options=myopts)
- self.assertEqual(key.options, myopts)
+ assert key.options == myopts
def test_parse_invalid_keytype(self):
parser = ssh_util.AuthKeyLineParser()
key = parser.parse(" ".join(["badkeytype", VALID_CONTENT["rsa"]]))
- self.assertFalse(key.valid())
+ assert not key.valid()
-class TestUpdateAuthorizedKeys(test_helpers.CiTestCase):
- def test_new_keys_replace(self):
+class TestUpdateAuthorizedKeys:
+ @pytest.mark.parametrize(
+ "new_entries",
+ [
+ (
+ [
+ " ".join(("rsa", VALID_CONTENT["rsa"], "new_comment1")),
+ ]
+ ),
+ pytest.param(
+ [
+ " ".join(("rsa", VALID_CONTENT["rsa"], "new_comment1")),
+ "xxx-invalid-thing1",
+ "xxx-invalid-blob2",
+ ],
+ id="skip-invalid-entries",
+ ),
+ ],
+ )
+ def test_new_keys_replace(self, new_entries):
"""new entries with the same base64 should replace old."""
orig_entries = [
" ".join(("rsa", VALID_CONTENT["rsa"], "orig_comment1")),
" ".join(("dsa", VALID_CONTENT["dsa"], "orig_comment2")),
]
- new_entries = [
- " ".join(("rsa", VALID_CONTENT["rsa"], "new_comment1")),
- ]
-
expected = "\n".join([new_entries[0], orig_entries[1]]) + "\n"
parser = ssh_util.AuthKeyLineParser()
@@ -423,100 +408,77 @@ class TestUpdateAuthorizedKeys(test_helpers.CiTestCase):
[parser.parse(p) for p in new_entries],
)
- self.assertEqual(expected, found)
-
- def test_new_invalid_keys_are_ignored(self):
- """new entries that are invalid should be skipped."""
- orig_entries = [
- " ".join(("rsa", VALID_CONTENT["rsa"], "orig_comment1")),
- " ".join(("dsa", VALID_CONTENT["dsa"], "orig_comment2")),
- ]
-
- new_entries = [
- " ".join(("rsa", VALID_CONTENT["rsa"], "new_comment1")),
- "xxx-invalid-thing1",
- "xxx-invalid-blob2",
- ]
-
- expected = "\n".join([new_entries[0], orig_entries[1]]) + "\n"
-
- parser = ssh_util.AuthKeyLineParser()
- found = ssh_util.update_authorized_keys(
- [parser.parse(p) for p in orig_entries],
- [parser.parse(p) for p in new_entries],
- )
-
- self.assertEqual(expected, found)
-
-
-class TestParseSSHConfig(test_helpers.CiTestCase):
- def setUp(self):
- self.load_file_patch = patch("cloudinit.ssh_util.util.load_file")
- self.load_file = self.load_file_patch.start()
- self.isfile_patch = patch("cloudinit.ssh_util.os.path.isfile")
- self.isfile = self.isfile_patch.start()
- self.isfile.return_value = True
-
- def tearDown(self):
- self.load_file_patch.stop()
- self.isfile_patch.stop()
-
- def test_not_a_file(self):
- self.isfile.return_value = False
- self.load_file.side_effect = IOError
- ret = ssh_util.parse_ssh_config("not a real file")
- self.assertEqual([], ret)
-
- def test_empty_file(self):
- self.load_file.return_value = ""
- ret = ssh_util.parse_ssh_config("some real file")
- self.assertEqual([], ret)
-
- def test_comment_line(self):
- comment_line = "# This is a comment"
- self.load_file.return_value = comment_line
+ assert expected == found
+
+
+@mock.patch(M_PATH + "util.load_file")
+@mock.patch(M_PATH + "os.path.isfile")
+class TestParseSSHConfig:
+ @pytest.mark.parametrize(
+ "is_file, file_content",
+ [
+ pytest.param(True, ("",), id="empty-file"),
+ pytest.param(False, IOError, id="not-a-file"),
+ ],
+ )
+ def test_dummy_file(self, m_is_file, m_load_file, is_file, file_content):
+ m_is_file.return_value = is_file
+ m_load_file.side_effect = file_content
+ ret = ssh_util.parse_ssh_config("notmatter")
+ assert [] == ret
+
+ @pytest.mark.parametrize(
+ "file_content",
+ [
+ pytest.param(["# This is a comment"], id="comment_line"),
+ pytest.param(
+ ["# This is a comment", "# This is another comment"],
+ id="two-comment_lines",
+ ),
+ ],
+ )
+ def test_comment_line(self, m_is_file, m_load_file, file_content):
+ m_is_file.return_value = True
+ m_load_file.return_value = "\n".join(file_content)
ret = ssh_util.parse_ssh_config("some real file")
- self.assertEqual(1, len(ret))
- self.assertEqual(comment_line, ret[0].line)
+ assert len(file_content) == len(ret)
+ assert file_content[0] == ret[0].line
- def test_blank_lines(self):
+ def test_blank_lines(self, m_is_file, m_load_file):
+ m_is_file.return_value = True
lines = ["", "\t", " "]
- self.load_file.return_value = "\n".join(lines)
+ m_load_file.return_value = "\n".join(lines)
ret = ssh_util.parse_ssh_config("some real file")
- self.assertEqual(len(lines), len(ret))
+ assert len(lines) == len(ret)
for line in ret:
- self.assertEqual("", line.line)
-
- def test_lower_case_config(self):
- self.load_file.return_value = "foo bar"
- ret = ssh_util.parse_ssh_config("some real file")
- self.assertEqual(1, len(ret))
- self.assertEqual("foo", ret[0].key)
- self.assertEqual("bar", ret[0].value)
-
- def test_upper_case_config(self):
- self.load_file.return_value = "Foo Bar"
- ret = ssh_util.parse_ssh_config("some real file")
- self.assertEqual(1, len(ret))
- self.assertEqual("foo", ret[0].key)
- self.assertEqual("Bar", ret[0].value)
-
- def test_lower_case_with_equals(self):
- self.load_file.return_value = "foo=bar"
- ret = ssh_util.parse_ssh_config("some real file")
- self.assertEqual(1, len(ret))
- self.assertEqual("foo", ret[0].key)
- self.assertEqual("bar", ret[0].value)
-
- def test_upper_case_with_equals(self):
- self.load_file.return_value = "Foo=bar"
+ assert "" == line.line
+
+ @pytest.mark.parametrize(
+ "file_content, expected_key, expected_value",
+ [
+ pytest.param("foo bar", "foo", "bar", id="lower-case"),
+ pytest.param("Foo Bar", "foo", "Bar", id="upper-case"),
+ pytest.param("foo=bar", "foo", "bar", id="lower-case-with-equals"),
+ pytest.param("Foo=bar", "foo", "bar", id="upper-case-with-equals"),
+ ],
+ )
+ def test_case_config(
+ self,
+ m_is_file,
+ m_load_file,
+ file_content,
+ expected_key,
+ expected_value,
+ ):
+ m_is_file.return_value = True
+ m_load_file.return_value = file_content
ret = ssh_util.parse_ssh_config("some real file")
- self.assertEqual(1, len(ret))
- self.assertEqual("foo", ret[0].key)
- self.assertEqual("bar", ret[0].value)
+ assert 1 == len(ret)
+ assert expected_key == ret[0].key
+ assert expected_value == ret[0].value
-class TestUpdateSshConfigLines(test_helpers.CiTestCase):
+class TestUpdateSshConfigLines:
"""Test the update_ssh_config_lines method."""
exlines = [
@@ -529,24 +491,25 @@ class TestUpdateSshConfigLines(test_helpers.CiTestCase):
pwauth = "PasswordAuthentication"
def check_line(self, line, opt, val):
- self.assertEqual(line.key, opt.lower())
- self.assertEqual(line.value, val)
- self.assertIn(opt, str(line))
- self.assertIn(val, str(line))
-
- def test_new_option_added(self):
- """A single update of non-existing option."""
- lines = ssh_util.parse_ssh_config_lines(list(self.exlines))
- result = ssh_util.update_ssh_config_lines(lines, {"MyKey": "MyVal"})
- self.assertEqual(["MyKey"], result)
- self.check_line(lines[-1], "MyKey", "MyVal")
-
- def test_commented_out_not_updated_but_appended(self):
- """Implementation does not un-comment and update lines."""
+ assert line.key == opt.lower()
+ assert line.value == val
+ assert opt in str(line)
+ assert val in str(line)
+
+ @pytest.mark.parametrize(
+ "key, value",
+ [
+ pytest.param("MyKey", "MyVal", id="new_option_added"),
+ pytest.param(
+ pwauth, "no", id="commented_out_not_updated_but_appended"
+ ),
+ ],
+ )
+ def test_update_ssh_config_lines(self, key, value):
lines = ssh_util.parse_ssh_config_lines(list(self.exlines))
- result = ssh_util.update_ssh_config_lines(lines, {self.pwauth: "no"})
- self.assertEqual([self.pwauth], result)
- self.check_line(lines[-1], self.pwauth, "no")
+ result = ssh_util.update_ssh_config_lines(lines, {key: value})
+ assert [key] == result
+ self.check_line(lines[-1], key, value)
def test_option_without_value(self):
"""Implementation only accepts key-value pairs."""
@@ -554,14 +517,14 @@ class TestUpdateSshConfigLines(test_helpers.CiTestCase):
denyusers_opt = "DenyUsers"
extended_exlines.append(denyusers_opt)
lines = ssh_util.parse_ssh_config_lines(list(extended_exlines))
- self.assertNotIn(denyusers_opt, str(lines))
+ assert denyusers_opt not in str(lines)
def test_single_option_updated(self):
"""A single update should have change made and line updated."""
opt, val = ("UsePAM", "no")
lines = ssh_util.parse_ssh_config_lines(list(self.exlines))
result = ssh_util.update_ssh_config_lines(lines, {opt: val})
- self.assertEqual([opt], result)
+ assert [opt] == result
self.check_line(lines[1], opt, val)
def test_multiple_updates_with_add(self):
@@ -574,7 +537,7 @@ class TestUpdateSshConfigLines(test_helpers.CiTestCase):
}
lines = ssh_util.parse_ssh_config_lines(list(self.exlines))
result = ssh_util.update_ssh_config_lines(lines, updates)
- self.assertEqual(set(["UsePAM", "NewOpt", "AcceptEnv"]), set(result))
+ assert set(["UsePAM", "NewOpt", "AcceptEnv"]) == set(result)
self.check_line(lines[3], "AcceptEnv", updates["AcceptEnv"])
def test_return_empty_if_no_changes(self):
@@ -582,8 +545,8 @@ class TestUpdateSshConfigLines(test_helpers.CiTestCase):
updates = {"UsePAM": "yes"}
lines = ssh_util.parse_ssh_config_lines(list(self.exlines))
result = ssh_util.update_ssh_config_lines(lines, updates)
- self.assertEqual([], result)
- self.assertEqual(self.exlines, [str(line) for line in lines])
+ assert [] == result
+ assert self.exlines == [str(line) for line in lines]
def test_keycase_not_modified(self):
"""Original case of key should not be changed on update.
@@ -591,109 +554,150 @@ class TestUpdateSshConfigLines(test_helpers.CiTestCase):
updates = {"usepam": "no"}
lines = ssh_util.parse_ssh_config_lines(list(self.exlines))
result = ssh_util.update_ssh_config_lines(lines, updates)
- self.assertEqual(["usepam"], result)
- self.assertEqual("UsePAM no", str(lines[1]))
+ assert ["usepam"] == result
+ assert "UsePAM no" == str(lines[1])
-class TestUpdateSshConfig(test_helpers.CiTestCase):
+class TestUpdateSshConfig:
cfgdata = "\n".join(["#Option val", "MyKey ORIG_VAL", ""])
- def test_modified(self):
- mycfg = self.tmp_path("ssh_config_1")
+ def test_modified(self, tmpdir):
+ mycfg = tmpdir.join("ssh_config_1")
util.write_file(mycfg, self.cfgdata)
ret = ssh_util.update_ssh_config({"MyKey": "NEW_VAL"}, mycfg)
- self.assertTrue(ret)
+ assert True is ret
found = util.load_file(mycfg)
- self.assertEqual(self.cfgdata.replace("ORIG_VAL", "NEW_VAL"), found)
+ assert self.cfgdata.replace("ORIG_VAL", "NEW_VAL") == found
# assert there is a newline at end of file (LP: #1677205)
- self.assertEqual("\n", found[-1])
+ assert "\n" == found[-1]
- def test_not_modified(self):
- mycfg = self.tmp_path("ssh_config_2")
+ def test_not_modified(self, tmpdir):
+ mycfg = tmpdir.join("ssh_config_2")
util.write_file(mycfg, self.cfgdata)
with patch("cloudinit.ssh_util.util.write_file") as m_write_file:
ret = ssh_util.update_ssh_config({"MyKey": "ORIG_VAL"}, mycfg)
- self.assertFalse(ret)
- self.assertEqual(self.cfgdata, util.load_file(mycfg))
+ assert False is ret
+ assert self.cfgdata == util.load_file(mycfg)
m_write_file.assert_not_called()
-
-class TestBasicAuthorizedKeyParse(test_helpers.CiTestCase):
- def test_user(self):
- self.assertEqual(
- ["/opt/bobby/keys"],
- ssh_util.render_authorizedkeysfile_paths(
- "/opt/%u/keys", "/home/bobby", "bobby"
+ def test_without_include(self, tmpdir):
+ mycfg = tmpdir.join("sshd_config")
+ cfg = "X Y"
+ util.write_file(mycfg, cfg)
+ assert ssh_util.update_ssh_config({"key": "value"}, mycfg)
+ assert "X Y\nkey value\n" == util.load_file(mycfg)
+ expected_conf_file = f"{mycfg}.d/50-cloud-init.conf"
+ assert not os.path.isfile(expected_conf_file)
+
+ @pytest.mark.parametrize(
+ "cfg",
+ ["Include {mycfg}.d/*.conf", "Include {mycfg}.d/*.conf # comment"],
+ )
+ def test_with_include(self, cfg, tmpdir):
+ mycfg = tmpdir.join("sshd_config")
+ util.write_file(mycfg, cfg.format(mycfg=mycfg))
+ assert ssh_util.update_ssh_config({"key": "value"}, mycfg)
+ expected_conf_file = f"{mycfg}.d/50-cloud-init.conf"
+ assert os.path.isfile(expected_conf_file)
+ assert 0o600 == stat.S_IMODE(os.stat(expected_conf_file).st_mode)
+ assert "key value\n" == util.load_file(expected_conf_file)
+
+ def test_with_commented_include(self, tmpdir):
+ mycfg = tmpdir.join("sshd_config")
+ cfg = f"# Include {mycfg}.d/*.conf"
+ util.write_file(mycfg, cfg)
+ assert ssh_util.update_ssh_config({"key": "value"}, mycfg)
+ assert f"{cfg}\nkey value\n" == util.load_file(mycfg)
+ expected_conf_file = f"{mycfg}.d/50-cloud-init.conf"
+ assert not os.path.isfile(expected_conf_file)
+
+ def test_with_other_include(self, tmpdir):
+ mycfg = tmpdir.join("sshd_config")
+ cfg = f"Include other_{mycfg}.d/*.conf"
+ util.write_file(mycfg, cfg)
+ assert ssh_util.update_ssh_config({"key": "value"}, mycfg)
+ assert f"{cfg}\nkey value\n" == util.load_file(mycfg)
+ expected_conf_file = f"{mycfg}.d/50-cloud-init.conf"
+ assert not os.path.isfile(expected_conf_file)
+ assert not os.path.isfile(f"other_{mycfg}.d/50-cloud-init.conf")
+
+
+class TestBasicAuthorizedKeyParse:
+ @pytest.mark.parametrize(
+ "value, homedir, username, expected_rendered",
+ [
+ pytest.param(
+ "/opt/%u/keys",
+ "/home/bobby",
+ "bobby",
+ ["/opt/bobby/keys"],
+ id="user",
),
- )
-
- def test_user_file(self):
- self.assertEqual(
- ["/opt/bobby"],
- ssh_util.render_authorizedkeysfile_paths(
- "/opt/%u", "/home/bobby", "bobby"
+ pytest.param(
+ "/opt/%u",
+ "/home/bobby",
+ "bobby",
+ ["/opt/bobby"],
+ id="user_file",
),
- )
-
- def test_user_file2(self):
- self.assertEqual(
- ["/opt/bobby/bobby"],
- ssh_util.render_authorizedkeysfile_paths(
- "/opt/%u/%u", "/home/bobby", "bobby"
+ pytest.param(
+ "/opt/%u/%u",
+ "/home/bobby",
+ "bobby",
+ ["/opt/bobby/bobby"],
+ id="user_file_2",
),
- )
-
- def test_multiple(self):
- self.assertEqual(
- ["/keys/path1", "/keys/path2"],
- ssh_util.render_authorizedkeysfile_paths(
- "/keys/path1 /keys/path2", "/home/bobby", "bobby"
+ pytest.param(
+ "/keys/path1 /keys/path2",
+ "/home/bobby",
+ "bobby",
+ ["/keys/path1", "/keys/path2"],
+ id="multiple",
),
- )
-
- def test_multiple2(self):
- self.assertEqual(
- ["/keys/path1", "/keys/bobby"],
- ssh_util.render_authorizedkeysfile_paths(
- "/keys/path1 /keys/%u", "/home/bobby", "bobby"
+ pytest.param(
+ "/keys/path1 /keys/%u",
+ "/home/bobby",
+ "bobby",
+ ["/keys/path1", "/keys/bobby"],
+ id="multiple_2",
),
- )
-
- def test_relative(self):
- self.assertEqual(
- ["/home/bobby/.secret/keys"],
- ssh_util.render_authorizedkeysfile_paths(
- ".secret/keys", "/home/bobby", "bobby"
+ pytest.param(
+ ".secret/keys",
+ "/home/bobby",
+ "bobby",
+ ["/home/bobby/.secret/keys"],
+ id="relative",
),
- )
-
- def test_home(self):
- self.assertEqual(
- ["/homedirs/bobby/.keys"],
- ssh_util.render_authorizedkeysfile_paths(
- "%h/.keys", "/homedirs/bobby", "bobby"
+ pytest.param(
+ "%h/.keys",
+ "/homedirs/bobby",
+ "bobby",
+ ["/homedirs/bobby/.keys"],
+ id="home",
),
- )
-
- def test_all(self):
- self.assertEqual(
- [
- "/homedirs/bobby/.keys",
- "/homedirs/bobby/.secret/keys",
- "/keys/path1",
- "/opt/bobby/keys",
- ],
- ssh_util.render_authorizedkeysfile_paths(
+ pytest.param(
"%h/.keys .secret/keys /keys/path1 /opt/%u/keys",
"/homedirs/bobby",
"bobby",
+ [
+ "/homedirs/bobby/.keys",
+ "/homedirs/bobby/.secret/keys",
+ "/keys/path1",
+ "/opt/bobby/keys",
+ ],
+ id="all",
),
+ ],
+ )
+ def test_render_authorizedkeysfile_paths(
+ self, value, homedir, username, expected_rendered
+ ):
+ assert expected_rendered == ssh_util.render_authorizedkeysfile_paths(
+ value, homedir, username
)
-class TestMultipleSshAuthorizedKeysFile(test_helpers.CiTestCase):
- tmp_d = mkdtemp()
-
+class TestMultipleSshAuthorizedKeysFile:
def create_fake_users(
self,
names,
@@ -703,15 +707,16 @@ class TestMultipleSshAuthorizedKeysFile(test_helpers.CiTestCase):
m_get_permissions,
m_getpwnam,
users,
+ tmpdir,
):
homes = []
- root = self.tmp_d + "/root"
+ root = str(tmpdir.join("root"))
fpw = FakePwEnt(pw_name="root", pw_dir=root)
users["root"] = fpw
for name in names:
- home = self.tmp_d + "/home/" + name
+ home = str(tmpdir.join("home", name))
fpw = FakePwEnt(pw_name=name, pw_dir=home)
users[name] = fpw
homes.append(home)
@@ -725,61 +730,71 @@ class TestMultipleSshAuthorizedKeysFile(test_helpers.CiTestCase):
return homes
def create_user_authorized_file(self, home, filename, content_key, keys):
- user_ssh_folder = "%s/.ssh" % home
+ user_ssh_folder = os.path.join(home, ".ssh")
# /tmp/home/<user>/.ssh/authorized_keys = content_key
- authorized_keys = self.tmp_path(filename, dir=user_ssh_folder)
+ authorized_keys = str(os.path.join(user_ssh_folder, filename))
util.write_file(authorized_keys, VALID_CONTENT[content_key])
keys[authorized_keys] = content_key
return authorized_keys
- def create_global_authorized_file(self, filename, content_key, keys):
- authorized_keys = self.tmp_path(filename, dir=self.tmp_d)
+ def create_global_authorized_file(
+ self, filename, content_key, keys, tmpdir
+ ):
+ authorized_keys = str(tmpdir.join(filename))
util.write_file(authorized_keys, VALID_CONTENT[content_key])
keys[authorized_keys] = content_key
return authorized_keys
- def create_sshd_config(self, authorized_keys_files):
- sshd_config = self.tmp_path("sshd_config", dir=self.tmp_d)
+ def create_sshd_config(self, authorized_keys_files, tmpdir):
+ sshd_config = str(tmpdir.join("sshd_config"))
util.write_file(
sshd_config, "AuthorizedKeysFile " + authorized_keys_files
)
return sshd_config
- def execute_and_check(
- self, user, sshd_config, solution, keys, delete_keys=True
- ):
+ def execute_and_check(self, user, sshd_config, solution, keys):
(auth_key_fn, auth_key_entries) = ssh_util.extract_authorized_keys(
user, sshd_config
)
content = ssh_util.update_authorized_keys(auth_key_entries, [])
- self.assertEqual(auth_key_fn, solution)
+ assert auth_key_fn == solution
for path, key in keys.items():
if path == solution:
- self.assertTrue(VALID_CONTENT[key] in content)
+ assert VALID_CONTENT[key] in content
else:
- self.assertFalse(VALID_CONTENT[key] in content)
-
- if delete_keys and os.path.isdir(self.tmp_d + "/home/"):
- util.delete_dir_contents(self.tmp_d + "/home/")
+ assert VALID_CONTENT[key] not in content
+ @pytest.mark.parametrize("inverted", [False, True])
@patch("cloudinit.ssh_util.pwd.getpwnam")
@patch("cloudinit.util.get_permissions")
@patch("cloudinit.util.get_owner")
@patch("cloudinit.util.get_group")
def test_single_user_two_local_files(
- self, m_get_group, m_get_owner, m_get_permissions, m_getpwnam
+ self,
+ m_get_group,
+ m_get_owner,
+ m_get_permissions,
+ m_getpwnam,
+ inverted,
+ tmpdir,
):
user_bobby = "bobby"
keys = {}
users = {}
mock_permissions = {
- self.tmp_d + "/home/bobby": ("bobby", "bobby", 0o700),
- self.tmp_d + "/home/bobby/.ssh": ("bobby", "bobby", 0o700),
- self.tmp_d
- + "/home/bobby/.ssh/user_keys": ("bobby", "bobby", 0o600),
- self.tmp_d
- + "/home/bobby/.ssh/authorized_keys": ("bobby", "bobby", 0o600),
+ tmpdir.join("home", "bobby"): ("bobby", "bobby", 0o700),
+ tmpdir.join("home", "bobby", ".ssh"): ("bobby", "bobby", 0o700),
+ tmpdir.join("home", "bobby", ".ssh", "user_keys"): (
+ "bobby",
+ "bobby",
+ 0o600,
+ ),
+ tmpdir.join("home", "bobby", ".ssh", "authorized_keys"): (
+ "bobby",
+ "bobby",
+ 0o600,
+ ),
}
homes = self.create_fake_users(
@@ -790,6 +805,7 @@ class TestMultipleSshAuthorizedKeysFile(test_helpers.CiTestCase):
m_get_permissions,
m_getpwnam,
users,
+ tmpdir,
)
home = homes[0]
@@ -804,74 +820,49 @@ class TestMultipleSshAuthorizedKeysFile(test_helpers.CiTestCase):
)
# /tmp/sshd_config
- options = "%s %s" % (authorized_keys, user_keys)
- sshd_config = self.create_sshd_config(options)
+ if not inverted:
+ options = f"{authorized_keys} {user_keys}"
+ else:
+ options = f"{user_keys} {authorized_keys}"
+ sshd_config = self.create_sshd_config(options, tmpdir)
- self.execute_and_check(user_bobby, sshd_config, authorized_keys, keys)
+ if not inverted:
+ exec_args = (user_bobby, sshd_config, authorized_keys, keys)
+ else:
+ exec_args = (user_bobby, sshd_config, user_keys, keys)
- @patch("cloudinit.ssh_util.pwd.getpwnam")
- @patch("cloudinit.util.get_permissions")
- @patch("cloudinit.util.get_owner")
- @patch("cloudinit.util.get_group")
- def test_single_user_two_local_files_inverted(
- self, m_get_group, m_get_owner, m_get_permissions, m_getpwnam
- ):
- user_bobby = "bobby"
- keys = {}
- users = {}
- mock_permissions = {
- self.tmp_d + "/home/bobby": ("bobby", "bobby", 0o700),
- self.tmp_d + "/home/bobby/.ssh": ("bobby", "bobby", 0o700),
- self.tmp_d
- + "/home/bobby/.ssh/user_keys": ("bobby", "bobby", 0o600),
- self.tmp_d
- + "/home/bobby/.ssh/authorized_keys": ("bobby", "bobby", 0o600),
- }
-
- homes = self.create_fake_users(
- [user_bobby],
- mock_permissions,
- m_get_group,
- m_get_owner,
- m_get_permissions,
- m_getpwnam,
- users,
- )
- home = homes[0]
-
- # /tmp/home/bobby/.ssh/authorized_keys = rsa
- authorized_keys = self.create_user_authorized_file(
- home, "authorized_keys", "rsa", keys
- )
-
- # /tmp/home/bobby/.ssh/user_keys = dsa
- user_keys = self.create_user_authorized_file(
- home, "user_keys", "dsa", keys
- )
-
- # /tmp/sshd_config
- options = "%s %s" % (user_keys, authorized_keys)
- sshd_config = self.create_sshd_config(options)
-
- self.execute_and_check(user_bobby, sshd_config, user_keys, keys)
+ self.execute_and_check(*exec_args)
+ @pytest.mark.parametrize("inverted", [False, True])
@patch("cloudinit.ssh_util.pwd.getpwnam")
@patch("cloudinit.util.get_permissions")
@patch("cloudinit.util.get_owner")
@patch("cloudinit.util.get_group")
def test_single_user_local_global_files(
- self, m_get_group, m_get_owner, m_get_permissions, m_getpwnam
+ self,
+ m_get_group,
+ m_get_owner,
+ m_get_permissions,
+ m_getpwnam,
+ inverted,
+ tmpdir,
):
user_bobby = "bobby"
keys = {}
users = {}
mock_permissions = {
- self.tmp_d + "/home/bobby": ("bobby", "bobby", 0o700),
- self.tmp_d + "/home/bobby/.ssh": ("bobby", "bobby", 0o700),
- self.tmp_d
- + "/home/bobby/.ssh/user_keys": ("bobby", "bobby", 0o600),
- self.tmp_d
- + "/home/bobby/.ssh/authorized_keys": ("bobby", "bobby", 0o600),
+ tmpdir.join("home", "bobby"): ("bobby", "bobby", 0o700),
+ tmpdir.join("home", "bobby", ".ssh"): ("bobby", "bobby", 0o700),
+ tmpdir.join("home", "bobby", ".ssh", "user_keys"): (
+ "bobby",
+ "bobby",
+ 0o600,
+ ),
+ tmpdir.join("home", "bobby", ".ssh", "authorized_keys"): (
+ "bobby",
+ "bobby",
+ 0o600,
+ ),
}
homes = self.create_fake_users(
@@ -882,6 +873,7 @@ class TestMultipleSshAuthorizedKeysFile(test_helpers.CiTestCase):
m_get_permissions,
m_getpwnam,
users,
+ tmpdir,
)
home = homes[0]
@@ -896,86 +888,39 @@ class TestMultipleSshAuthorizedKeysFile(test_helpers.CiTestCase):
)
authorized_keys_global = self.create_global_authorized_file(
- "etc/ssh/authorized_keys", "ecdsa", keys
+ "etc/ssh/authorized_keys", "ecdsa", keys, tmpdir
)
- options = "%s %s %s" % (
- authorized_keys_global,
- user_keys,
- authorized_keys,
- )
- sshd_config = self.create_sshd_config(options)
+ if not inverted:
+ options = f"{authorized_keys_global} {user_keys} {authorized_keys}"
+ else:
+ options = f"{authorized_keys_global} {authorized_keys} {user_keys}"
+ sshd_config = self.create_sshd_config(options, tmpdir)
- self.execute_and_check(user_bobby, sshd_config, user_keys, keys)
-
- @patch("cloudinit.ssh_util.pwd.getpwnam")
- @patch("cloudinit.util.get_permissions")
- @patch("cloudinit.util.get_owner")
- @patch("cloudinit.util.get_group")
- def test_single_user_local_global_files_inverted(
- self, m_get_group, m_get_owner, m_get_permissions, m_getpwnam
- ):
- user_bobby = "bobby"
- keys = {}
- users = {}
- mock_permissions = {
- self.tmp_d + "/home/bobby": ("bobby", "bobby", 0o700),
- self.tmp_d + "/home/bobby/.ssh": ("bobby", "bobby", 0o700),
- self.tmp_d
- + "/home/bobby/.ssh/user_keys3": ("bobby", "bobby", 0o600),
- self.tmp_d
- + "/home/bobby/.ssh/authorized_keys2": ("bobby", "bobby", 0o600),
- }
-
- homes = self.create_fake_users(
- [user_bobby],
- mock_permissions,
- m_get_group,
- m_get_owner,
- m_get_permissions,
- m_getpwnam,
- users,
- )
- home = homes[0]
-
- # /tmp/home/bobby/.ssh/authorized_keys = rsa
- authorized_keys = self.create_user_authorized_file(
- home, "authorized_keys2", "rsa", keys
- )
-
- # /tmp/home/bobby/.ssh/user_keys = dsa
- user_keys = self.create_user_authorized_file(
- home, "user_keys3", "dsa", keys
- )
-
- authorized_keys_global = self.create_global_authorized_file(
- "etc/ssh/authorized_keys", "ecdsa", keys
- )
-
- options = "%s %s %s" % (
- authorized_keys_global,
- authorized_keys,
- user_keys,
- )
- sshd_config = self.create_sshd_config(options)
-
- self.execute_and_check(user_bobby, sshd_config, authorized_keys, keys)
+ if not inverted:
+ exec_args = (user_bobby, sshd_config, user_keys, keys)
+ else:
+ exec_args = (user_bobby, sshd_config, authorized_keys, keys)
+ self.execute_and_check(*exec_args)
@patch("cloudinit.ssh_util.pwd.getpwnam")
@patch("cloudinit.util.get_permissions")
@patch("cloudinit.util.get_owner")
@patch("cloudinit.util.get_group")
def test_single_user_global_file(
- self, m_get_group, m_get_owner, m_get_permissions, m_getpwnam
+ self, m_get_group, m_get_owner, m_get_permissions, m_getpwnam, tmpdir
):
user_bobby = "bobby"
keys = {}
users = {}
mock_permissions = {
- self.tmp_d + "/home/bobby": ("bobby", "bobby", 0o700),
- self.tmp_d + "/home/bobby/.ssh": ("bobby", "bobby", 0o700),
- self.tmp_d
- + "/home/bobby/.ssh/authorized_keys": ("bobby", "bobby", 0o600),
+ tmpdir.join("home", "bobby"): ("bobby", "bobby", 0o700),
+ tmpdir.join("home", "bobby", ".ssh"): ("bobby", "bobby", 0o700),
+ tmpdir.join("home", "bobby", ".ssh", "authorized_keys"): (
+ "bobby",
+ "bobby",
+ 0o600,
+ ),
}
homes = self.create_fake_users(
@@ -986,16 +931,17 @@ class TestMultipleSshAuthorizedKeysFile(test_helpers.CiTestCase):
m_get_permissions,
m_getpwnam,
users,
+ tmpdir,
)
home = homes[0]
# /tmp/etc/ssh/authorized_keys = rsa
authorized_keys_global = self.create_global_authorized_file(
- "etc/ssh/authorized_keys", "rsa", keys
+ "etc/ssh/authorized_keys", "rsa", keys, tmpdir
)
options = "%s" % authorized_keys_global
- sshd_config = self.create_sshd_config(options)
+ sshd_config = self.create_sshd_config(options, tmpdir)
default = "%s/.ssh/authorized_keys" % home
self.execute_and_check(user_bobby, sshd_config, default, keys)
@@ -1005,19 +951,25 @@ class TestMultipleSshAuthorizedKeysFile(test_helpers.CiTestCase):
@patch("cloudinit.util.get_owner")
@patch("cloudinit.util.get_group")
def test_two_users_local_file_standard(
- self, m_get_group, m_get_owner, m_get_permissions, m_getpwnam
+ self, m_get_group, m_get_owner, m_get_permissions, m_getpwnam, tmpdir
):
keys = {}
users = {}
mock_permissions = {
- self.tmp_d + "/home/bobby": ("bobby", "bobby", 0o700),
- self.tmp_d + "/home/bobby/.ssh": ("bobby", "bobby", 0o700),
- self.tmp_d
- + "/home/bobby/.ssh/authorized_keys": ("bobby", "bobby", 0o600),
- self.tmp_d + "/home/suzie": ("suzie", "suzie", 0o700),
- self.tmp_d + "/home/suzie/.ssh": ("suzie", "suzie", 0o700),
- self.tmp_d
- + "/home/suzie/.ssh/authorized_keys": ("suzie", "suzie", 0o600),
+ tmpdir.join("home", "bobby"): ("bobby", "bobby", 0o700),
+ tmpdir.join("home", "bobby", ".ssh"): ("bobby", "bobby", 0o700),
+ tmpdir.join("home", "bobby", ".ssh", "authorized_keys"): (
+ "bobby",
+ "bobby",
+ 0o600,
+ ),
+ tmpdir.join("home", "suzie"): ("suzie", "suzie", 0o700),
+ tmpdir.join("home", "suzie", ".ssh"): ("suzie", "suzie", 0o700),
+ tmpdir.join("home", "suzie", ".ssh", "authorized_keys"): (
+ "suzie",
+ "suzie",
+ 0o600,
+ ),
}
user_bobby = "bobby"
@@ -1030,6 +982,7 @@ class TestMultipleSshAuthorizedKeysFile(test_helpers.CiTestCase):
m_get_permissions,
m_getpwnam,
users,
+ tmpdir,
)
home_bobby = homes[0]
home_suzie = homes[1]
@@ -1045,11 +998,9 @@ class TestMultipleSshAuthorizedKeysFile(test_helpers.CiTestCase):
)
options = ".ssh/authorized_keys"
- sshd_config = self.create_sshd_config(options)
+ sshd_config = self.create_sshd_config(options, tmpdir)
- self.execute_and_check(
- user_bobby, sshd_config, authorized_keys, keys, delete_keys=False
- )
+ self.execute_and_check(user_bobby, sshd_config, authorized_keys, keys)
self.execute_and_check(user_suzie, sshd_config, authorized_keys2, keys)
@patch("cloudinit.ssh_util.pwd.getpwnam")
@@ -1057,19 +1008,25 @@ class TestMultipleSshAuthorizedKeysFile(test_helpers.CiTestCase):
@patch("cloudinit.util.get_owner")
@patch("cloudinit.util.get_group")
def test_two_users_local_file_custom(
- self, m_get_group, m_get_owner, m_get_permissions, m_getpwnam
+ self, m_get_group, m_get_owner, m_get_permissions, m_getpwnam, tmpdir
):
keys = {}
users = {}
mock_permissions = {
- self.tmp_d + "/home/bobby": ("bobby", "bobby", 0o700),
- self.tmp_d + "/home/bobby/.ssh": ("bobby", "bobby", 0o700),
- self.tmp_d
- + "/home/bobby/.ssh/authorized_keys2": ("bobby", "bobby", 0o600),
- self.tmp_d + "/home/suzie": ("suzie", "suzie", 0o700),
- self.tmp_d + "/home/suzie/.ssh": ("suzie", "suzie", 0o700),
- self.tmp_d
- + "/home/suzie/.ssh/authorized_keys2": ("suzie", "suzie", 0o600),
+ tmpdir.join("home", "bobby"): ("bobby", "bobby", 0o700),
+ tmpdir.join("home", "bobby", ".ssh"): ("bobby", "bobby", 0o700),
+ tmpdir.join("home", "bobby", ".ssh", "authorized_keys2"): (
+ "bobby",
+ "bobby",
+ 0o600,
+ ),
+ tmpdir.join("home", "suzie"): ("suzie", "suzie", 0o700),
+ tmpdir.join("home", "suzie", ".ssh"): ("suzie", "suzie", 0o700),
+ tmpdir.join("home", "suzie", ".ssh", "authorized_keys2"): (
+ "suzie",
+ "suzie",
+ 0o600,
+ ),
}
user_bobby = "bobby"
@@ -1082,6 +1039,7 @@ class TestMultipleSshAuthorizedKeysFile(test_helpers.CiTestCase):
m_get_permissions,
m_getpwnam,
users,
+ tmpdir,
)
home_bobby = homes[0]
home_suzie = homes[1]
@@ -1097,11 +1055,9 @@ class TestMultipleSshAuthorizedKeysFile(test_helpers.CiTestCase):
)
options = ".ssh/authorized_keys2"
- sshd_config = self.create_sshd_config(options)
+ sshd_config = self.create_sshd_config(options, tmpdir)
- self.execute_and_check(
- user_bobby, sshd_config, authorized_keys, keys, delete_keys=False
- )
+ self.execute_and_check(user_bobby, sshd_config, authorized_keys, keys)
self.execute_and_check(user_suzie, sshd_config, authorized_keys2, keys)
@patch("cloudinit.ssh_util.pwd.getpwnam")
@@ -1109,23 +1065,35 @@ class TestMultipleSshAuthorizedKeysFile(test_helpers.CiTestCase):
@patch("cloudinit.util.get_owner")
@patch("cloudinit.util.get_group")
def test_two_users_local_global_files(
- self, m_get_group, m_get_owner, m_get_permissions, m_getpwnam
+ self, m_get_group, m_get_owner, m_get_permissions, m_getpwnam, tmpdir
):
keys = {}
users = {}
mock_permissions = {
- self.tmp_d + "/home/bobby": ("bobby", "bobby", 0o700),
- self.tmp_d + "/home/bobby/.ssh": ("bobby", "bobby", 0o700),
- self.tmp_d
- + "/home/bobby/.ssh/authorized_keys2": ("bobby", "bobby", 0o600),
- self.tmp_d
- + "/home/bobby/.ssh/user_keys3": ("bobby", "bobby", 0o600),
- self.tmp_d + "/home/suzie": ("suzie", "suzie", 0o700),
- self.tmp_d + "/home/suzie/.ssh": ("suzie", "suzie", 0o700),
- self.tmp_d
- + "/home/suzie/.ssh/authorized_keys2": ("suzie", "suzie", 0o600),
- self.tmp_d
- + "/home/suzie/.ssh/user_keys3": ("suzie", "suzie", 0o600),
+ tmpdir.join("home", "bobby"): ("bobby", "bobby", 0o700),
+ tmpdir.join("home", "bobby", ".ssh"): ("bobby", "bobby", 0o700),
+ tmpdir.join("home", "bobby", ".ssh", "authorized_keys2"): (
+ "bobby",
+ "bobby",
+ 0o600,
+ ),
+ tmpdir.join("home", "bobby", ".ssh", "user_keys3"): (
+ "bobby",
+ "bobby",
+ 0o600,
+ ),
+ tmpdir.join("home", "suzie"): ("suzie", "suzie", 0o700),
+ tmpdir.join("home", "suzie", ".ssh"): ("suzie", "suzie", 0o700),
+ tmpdir.join("home", "suzie", ".ssh", "authorized_keys2"): (
+ "suzie",
+ "suzie",
+ 0o600,
+ ),
+ tmpdir.join("home", "suzie", ".ssh", "user_keys3"): (
+ "suzie",
+ "suzie",
+ 0o600,
+ ),
}
user_bobby = "bobby"
@@ -1138,6 +1106,7 @@ class TestMultipleSshAuthorizedKeysFile(test_helpers.CiTestCase):
m_get_permissions,
m_getpwnam,
users,
+ tmpdir,
)
home_bobby = homes[0]
home_suzie = homes[1]
@@ -1158,18 +1127,16 @@ class TestMultipleSshAuthorizedKeysFile(test_helpers.CiTestCase):
# /tmp/etc/ssh/authorized_keys = ecdsa
authorized_keys_global = self.create_global_authorized_file(
- "etc/ssh/authorized_keys2", "ecdsa", keys
+ "etc/ssh/authorized_keys2", "ecdsa", keys, tmpdir
)
options = "%s %s %%h/.ssh/authorized_keys2" % (
authorized_keys_global,
user_keys,
)
- sshd_config = self.create_sshd_config(options)
+ sshd_config = self.create_sshd_config(options, tmpdir)
- self.execute_and_check(
- user_bobby, sshd_config, user_keys, keys, delete_keys=False
- )
+ self.execute_and_check(user_bobby, sshd_config, user_keys, keys)
self.execute_and_check(user_suzie, sshd_config, authorized_keys2, keys)
@patch("cloudinit.util.get_user_groups")
@@ -1184,19 +1151,30 @@ class TestMultipleSshAuthorizedKeysFile(test_helpers.CiTestCase):
m_get_permissions,
m_getpwnam,
m_get_user_groups,
+ tmpdir,
):
keys = {}
users = {}
mock_permissions = {
- self.tmp_d + "/home/bobby": ("bobby", "bobby", 0o700),
- self.tmp_d + "/home/bobby/.ssh": ("bobby", "bobby", 0o700),
- self.tmp_d
- + "/home/bobby/.ssh/authorized_keys2": ("bobby", "bobby", 0o600),
- self.tmp_d
- + "/home/bobby/.ssh/user_keys3": ("bobby", "bobby", 0o600),
- self.tmp_d + "/home/badguy": ("root", "root", 0o755),
- self.tmp_d + "/home/badguy/home": ("root", "root", 0o755),
- self.tmp_d + "/home/badguy/home/bobby": ("root", "root", 0o655),
+ tmpdir.join("home", "bobby"): ("bobby", "bobby", 0o700),
+ tmpdir.join("home", "bobby", ".ssh"): ("bobby", "bobby", 0o700),
+ tmpdir.join("home", "bobby", ".ssh", "authorized_keys2"): (
+ "bobby",
+ "bobby",
+ 0o600,
+ ),
+ tmpdir.join("home", "bobby", ".ssh", "user_keys3"): (
+ "bobby",
+ "bobby",
+ 0o600,
+ ),
+ tmpdir.join("home", "badguy"): ("root", "root", 0o755),
+ tmpdir.join("home", "badguy", "home"): ("root", "root", 0o755),
+ tmpdir.join("home", "badguy", "home", "bobby"): (
+ "root",
+ "root",
+ 0o655,
+ ),
}
user_bobby = "bobby"
@@ -1209,6 +1187,7 @@ class TestMultipleSshAuthorizedKeysFile(test_helpers.CiTestCase):
m_get_permissions,
m_getpwnam,
users,
+ tmpdir,
)
m_get_user_groups.side_effect = mock_get_user_groups
@@ -1222,14 +1201,12 @@ class TestMultipleSshAuthorizedKeysFile(test_helpers.CiTestCase):
)
# /tmp/home/badguy/home/bobby = ""
- authorized_keys2 = self.tmp_path(
- "home/bobby", dir=self.tmp_d + "/home/badguy"
- )
+ authorized_keys2 = str(tmpdir.join("home", "badguy", "home", "bobby"))
util.write_file(authorized_keys2, "")
# /tmp/etc/ssh/authorized_keys = ecdsa
authorized_keys_global = self.create_global_authorized_file(
- "etc/ssh/authorized_keys2", "ecdsa", keys
+ "etc/ssh/authorized_keys2", "ecdsa", keys, tmpdir
)
# /tmp/sshd_config
@@ -1238,11 +1215,9 @@ class TestMultipleSshAuthorizedKeysFile(test_helpers.CiTestCase):
authorized_keys_global,
user_keys,
)
- sshd_config = self.create_sshd_config(options)
+ sshd_config = self.create_sshd_config(options, tmpdir)
- self.execute_and_check(
- user_bobby, sshd_config, authorized_keys, keys, delete_keys=False
- )
+ self.execute_and_check(user_bobby, sshd_config, authorized_keys, keys)
self.execute_and_check(
user_badguy, sshd_config, authorized_keys2, keys
)
@@ -1259,24 +1234,34 @@ class TestMultipleSshAuthorizedKeysFile(test_helpers.CiTestCase):
m_get_permissions,
m_getpwnam,
m_get_user_groups,
+ tmpdir,
):
keys = {}
users = {}
mock_permissions = {
- self.tmp_d + "/home/bobby": ("bobby", "bobby", 0o700),
- self.tmp_d + "/home/bobby/.ssh": ("bobby", "bobby", 0o700),
- self.tmp_d
- + "/home/bobby/.ssh/authorized_keys": ("bobby", "bobby", 0o600),
- self.tmp_d + "/etc": ("root", "root", 0o755),
- self.tmp_d + "/etc/ssh": ("root", "root", 0o755),
- self.tmp_d + "/etc/ssh/userkeys": ("root", "root", 0o700),
- self.tmp_d + "/etc/ssh/userkeys/bobby": ("bobby", "bobby", 0o600),
- self.tmp_d
- + "/etc/ssh/userkeys/badguy": ("badguy", "badguy", 0o600),
- self.tmp_d + "/home/badguy": ("badguy", "badguy", 0o700),
- self.tmp_d + "/home/badguy/.ssh": ("badguy", "badguy", 0o700),
- self.tmp_d
- + "/home/badguy/.ssh/authorized_keys": (
+ tmpdir.join("home", "bobby"): ("bobby", "bobby", 0o700),
+ tmpdir.join("home", "bobby", ".ssh"): ("bobby", "bobby", 0o700),
+ tmpdir.join("home", "bobby", ".ssh", "authorized_keys"): (
+ "bobby",
+ "bobby",
+ 0o600,
+ ),
+ tmpdir.join("etc"): ("root", "root", 0o755),
+ tmpdir.join("etc", "ssh"): ("root", "root", 0o755),
+ tmpdir.join("etc", "ssh", "userkeys"): ("root", "root", 0o700),
+ tmpdir.join("etc", "ssh", "userkeys", "bobby"): (
+ "bobby",
+ "bobby",
+ 0o600,
+ ),
+ tmpdir.join("etc", "ssh", "userkeys", "badguy"): (
+ "badguy",
+ "badguy",
+ 0o600,
+ ),
+ tmpdir.join("home", "badguy"): ("badguy", "badguy", 0o700),
+ tmpdir.join("home", "badguy", ".ssh"): ("badguy", "badguy", 0o700),
+ tmpdir.join("home", "badguy", ".ssh", "authorized_keys"): (
"badguy",
"badguy",
0o600,
@@ -1293,6 +1278,7 @@ class TestMultipleSshAuthorizedKeysFile(test_helpers.CiTestCase):
m_get_permissions,
m_getpwnam,
users,
+ tmpdir,
)
m_get_user_groups.side_effect = mock_get_user_groups
home_bobby = homes[0]
@@ -1305,7 +1291,7 @@ class TestMultipleSshAuthorizedKeysFile(test_helpers.CiTestCase):
# /tmp/etc/ssh/userkeys/bobby = dsa
# assume here that we can bypass userkeys, despite permissions
self.create_global_authorized_file(
- "etc/ssh/userkeys/bobby", "dsa", keys
+ "etc/ssh/userkeys/bobby", "dsa", keys, tmpdir
)
# /tmp/home/badguy/.ssh/authorized_keys = ssh-xmss@openssh.com
@@ -1315,16 +1301,16 @@ class TestMultipleSshAuthorizedKeysFile(test_helpers.CiTestCase):
# /tmp/etc/ssh/userkeys/badguy = ecdsa
self.create_global_authorized_file(
- "etc/ssh/userkeys/badguy", "ecdsa", keys
+ "etc/ssh/userkeys/badguy", "ecdsa", keys, tmpdir
)
# /tmp/sshd_config
- options = self.tmp_d + "/etc/ssh/userkeys/%u .ssh/authorized_keys"
- sshd_config = self.create_sshd_config(options)
-
- self.execute_and_check(
- user_bobby, sshd_config, authorized_keys, keys, delete_keys=False
+ options = str(
+ tmpdir.join("etc", "ssh", "userkeys", "%u .ssh", "authorized_keys")
)
+ sshd_config = self.create_sshd_config(options, tmpdir)
+
+ self.execute_and_check(user_bobby, sshd_config, authorized_keys, keys)
self.execute_and_check(
user_badguy, sshd_config, authorized_keys2, keys
)
@@ -1341,24 +1327,34 @@ class TestMultipleSshAuthorizedKeysFile(test_helpers.CiTestCase):
m_get_permissions,
m_getpwnam,
m_get_user_groups,
+ tmpdir,
):
keys = {}
users = {}
mock_permissions = {
- self.tmp_d + "/home/bobby": ("bobby", "bobby", 0o700),
- self.tmp_d + "/home/bobby/.ssh": ("bobby", "bobby", 0o700),
- self.tmp_d
- + "/home/bobby/.ssh/authorized_keys": ("bobby", "bobby", 0o600),
- self.tmp_d + "/etc": ("root", "root", 0o755),
- self.tmp_d + "/etc/ssh": ("root", "root", 0o755),
- self.tmp_d + "/etc/ssh/userkeys": ("root", "root", 0o755),
- self.tmp_d + "/etc/ssh/userkeys/bobby": ("bobby", "bobby", 0o600),
- self.tmp_d
- + "/etc/ssh/userkeys/badguy": ("badguy", "badguy", 0o600),
- self.tmp_d + "/home/badguy": ("badguy", "badguy", 0o700),
- self.tmp_d + "/home/badguy/.ssh": ("badguy", "badguy", 0o700),
- self.tmp_d
- + "/home/badguy/.ssh/authorized_keys": (
+ tmpdir.join("home", "bobby"): ("bobby", "bobby", 0o700),
+ tmpdir.join("home", "bobby", ".ssh"): ("bobby", "bobby", 0o700),
+ tmpdir.join("home", "bobby", ".ssh", "authorized_keys"): (
+ "bobby",
+ "bobby",
+ 0o600,
+ ),
+ tmpdir.join("etc"): ("root", "root", 0o755),
+ tmpdir.join("etc", "ssh"): ("root", "root", 0o755),
+ tmpdir.join("etc", "ssh", "userkeys"): ("root", "root", 0o755),
+ tmpdir.join("etc", "ssh", "userkeys", "bobby"): (
+ "bobby",
+ "bobby",
+ 0o600,
+ ),
+ tmpdir.join("etc", "ssh", "userkeys", "badguy"): (
+ "badguy",
+ "badguy",
+ 0o600,
+ ),
+ tmpdir.join("home", "badguy"): ("badguy", "badguy", 0o700),
+ tmpdir.join("home", "badguy", ".ssh"): ("badguy", "badguy", 0o700),
+ tmpdir.join("home", "badguy", ".ssh", "authorized_keys"): (
"badguy",
"badguy",
0o600,
@@ -1375,6 +1371,7 @@ class TestMultipleSshAuthorizedKeysFile(test_helpers.CiTestCase):
m_get_permissions,
m_getpwnam,
users,
+ tmpdir,
)
m_get_user_groups.side_effect = mock_get_user_groups
home_bobby = homes[0]
@@ -1387,7 +1384,7 @@ class TestMultipleSshAuthorizedKeysFile(test_helpers.CiTestCase):
# /tmp/etc/ssh/userkeys/bobby = dsa
# assume here that we can bypass userkeys, despite permissions
authorized_keys = self.create_global_authorized_file(
- "etc/ssh/userkeys/bobby", "dsa", keys
+ "etc/ssh/userkeys/bobby", "dsa", keys, tmpdir
)
# /tmp/home/badguy/.ssh/authorized_keys = ssh-xmss@openssh.com
@@ -1397,20 +1394,21 @@ class TestMultipleSshAuthorizedKeysFile(test_helpers.CiTestCase):
# /tmp/etc/ssh/userkeys/badguy = ecdsa
authorized_keys2 = self.create_global_authorized_file(
- "etc/ssh/userkeys/badguy", "ecdsa", keys
+ "etc/ssh/userkeys/badguy", "ecdsa", keys, tmpdir
)
# /tmp/sshd_config
- options = self.tmp_d + "/etc/ssh/userkeys/%u .ssh/authorized_keys"
- sshd_config = self.create_sshd_config(options)
-
- self.execute_and_check(
- user_bobby, sshd_config, authorized_keys, keys, delete_keys=False
+ options = str(
+ tmpdir.join("etc", "ssh", "userkeys", "%u .ssh", "authorized_keys")
)
+ sshd_config = self.create_sshd_config(options, tmpdir)
+
+ self.execute_and_check(user_bobby, sshd_config, authorized_keys, keys)
self.execute_and_check(
user_badguy, sshd_config, authorized_keys2, keys
)
+ @pytest.mark.parametrize("inverted", [False, True])
@patch("cloudinit.util.get_user_groups")
@patch("cloudinit.ssh_util.pwd.getpwnam")
@patch("cloudinit.util.get_permissions")
@@ -1423,18 +1421,26 @@ class TestMultipleSshAuthorizedKeysFile(test_helpers.CiTestCase):
m_get_permissions,
m_getpwnam,
m_get_user_groups,
+ inverted,
+ tmpdir,
):
keys = {}
users = {}
mock_permissions = {
- self.tmp_d + "/home/bobby": ("bobby", "bobby", 0o700),
- self.tmp_d + "/home/bobby/.ssh": ("bobby", "bobby", 0o700),
- self.tmp_d
- + "/home/bobby/.ssh/authorized_keys": ("bobby", "bobby", 0o600),
- self.tmp_d + "/home/suzie": ("suzie", "suzie", 0o700),
- self.tmp_d + "/home/suzie/.ssh": ("suzie", "suzie", 0o700),
- self.tmp_d
- + "/home/suzie/.ssh/authorized_keys": ("suzie", "suzie", 0o600),
+ tmpdir.join("home", "bobby"): ("bobby", "bobby", 0o700),
+ tmpdir.join("home", "bobby", ".ssh"): ("bobby", "bobby", 0o700),
+ tmpdir.join("home", "bobby", ".ssh", "authorized_keys"): (
+ "bobby",
+ "bobby",
+ 0o600,
+ ),
+ tmpdir.join("home", "suzie"): ("suzie", "suzie", 0o700),
+ tmpdir.join("home", "suzie", ".ssh"): ("suzie", "suzie", 0o700),
+ tmpdir.join("home", "suzie", ".ssh", "authorized_keys"): (
+ "suzie",
+ "suzie",
+ 0o600,
+ ),
}
user_bobby = "bobby"
@@ -1447,6 +1453,7 @@ class TestMultipleSshAuthorizedKeysFile(test_helpers.CiTestCase):
m_get_permissions,
m_getpwnam,
users,
+ tmpdir,
)
home_bobby = homes[0]
home_suzie = homes[1]
@@ -1458,80 +1465,26 @@ class TestMultipleSshAuthorizedKeysFile(test_helpers.CiTestCase):
)
# /tmp/home/suzie/.ssh/authorized_keys = ssh-xmss@openssh.com
- self.create_user_authorized_file(
- home_suzie, "authorized_keys", "ssh-xmss@openssh.com", keys
- )
-
- # /tmp/sshd_config
- options = "%s" % (authorized_keys)
- sshd_config = self.create_sshd_config(options)
-
- self.execute_and_check(
- user_bobby, sshd_config, authorized_keys, keys, delete_keys=False
- )
- default = "%s/.ssh/authorized_keys" % home_suzie
- self.execute_and_check(user_suzie, sshd_config, default, keys)
-
- @patch("cloudinit.util.get_user_groups")
- @patch("cloudinit.ssh_util.pwd.getpwnam")
- @patch("cloudinit.util.get_permissions")
- @patch("cloudinit.util.get_owner")
- @patch("cloudinit.util.get_group")
- def test_two_users_hardcoded_single_user_file_inverted(
- self,
- m_get_group,
- m_get_owner,
- m_get_permissions,
- m_getpwnam,
- m_get_user_groups,
- ):
- keys = {}
- users = {}
- mock_permissions = {
- self.tmp_d + "/home/bobby": ("bobby", "bobby", 0o700),
- self.tmp_d + "/home/bobby/.ssh": ("bobby", "bobby", 0o700),
- self.tmp_d
- + "/home/bobby/.ssh/authorized_keys": ("bobby", "bobby", 0o600),
- self.tmp_d + "/home/suzie": ("suzie", "suzie", 0o700),
- self.tmp_d + "/home/suzie/.ssh": ("suzie", "suzie", 0o700),
- self.tmp_d
- + "/home/suzie/.ssh/authorized_keys": ("suzie", "suzie", 0o600),
- }
-
- user_bobby = "bobby"
- user_suzie = "suzie"
- homes = self.create_fake_users(
- [user_bobby, user_suzie],
- mock_permissions,
- m_get_group,
- m_get_owner,
- m_get_permissions,
- m_getpwnam,
- users,
- )
- home_bobby = homes[0]
- home_suzie = homes[1]
- m_get_user_groups.side_effect = mock_get_user_groups
-
- # /tmp/home/bobby/.ssh/authorized_keys = rsa
- self.create_user_authorized_file(
- home_bobby, "authorized_keys", "rsa", keys
- )
-
- # /tmp/home/suzie/.ssh/authorized_keys = ssh-xmss@openssh.com
authorized_keys2 = self.create_user_authorized_file(
home_suzie, "authorized_keys", "ssh-xmss@openssh.com", keys
)
# /tmp/sshd_config
- options = "%s" % (authorized_keys2)
- sshd_config = self.create_sshd_config(options)
-
- default = "%s/.ssh/authorized_keys" % home_bobby
- self.execute_and_check(
- user_bobby, sshd_config, default, keys, delete_keys=False
- )
- self.execute_and_check(user_suzie, sshd_config, authorized_keys2, keys)
+ if not inverted:
+ expected_keys = authorized_keys
+ else:
+ expected_keys = authorized_keys2
+ options = "%s" % (expected_keys)
+ sshd_config = self.create_sshd_config(options, tmpdir)
+
+ if not inverted:
+ expected_bobby = expected_keys
+ expected_suzie = "%s/.ssh/authorized_keys" % home_suzie
+ else:
+ expected_bobby = "%s/.ssh/authorized_keys" % home_bobby
+ expected_suzie = expected_keys
+ self.execute_and_check(user_bobby, sshd_config, expected_bobby, keys)
+ self.execute_and_check(user_suzie, sshd_config, expected_suzie, keys)
@patch("cloudinit.util.get_user_groups")
@patch("cloudinit.ssh_util.pwd.getpwnam")
@@ -1545,18 +1498,25 @@ class TestMultipleSshAuthorizedKeysFile(test_helpers.CiTestCase):
m_get_permissions,
m_getpwnam,
m_get_user_groups,
+ tmpdir,
):
keys = {}
users = {}
mock_permissions = {
- self.tmp_d + "/home/bobby": ("bobby", "bobby", 0o700),
- self.tmp_d + "/home/bobby/.ssh": ("bobby", "bobby", 0o700),
- self.tmp_d
- + "/home/bobby/.ssh/authorized_keys": ("bobby", "bobby", 0o600),
- self.tmp_d + "/home/suzie": ("suzie", "suzie", 0o700),
- self.tmp_d + "/home/suzie/.ssh": ("suzie", "suzie", 0o700),
- self.tmp_d
- + "/home/suzie/.ssh/authorized_keys": ("suzie", "suzie", 0o600),
+ tmpdir.join("home", "bobby"): ("bobby", "bobby", 0o700),
+ tmpdir.join("home", "bobby", ".ssh"): ("bobby", "bobby", 0o700),
+ tmpdir.join("home", "bobby", ".ssh", "authorized_keys"): (
+ "bobby",
+ "bobby",
+ 0o600,
+ ),
+ tmpdir.join("home", "suzie"): ("suzie", "suzie", 0o700),
+ tmpdir.join("home", "suzie", ".ssh"): ("suzie", "suzie", 0o700),
+ tmpdir.join("home", "suzie", ".ssh", "authorized_keys"): (
+ "suzie",
+ "suzie",
+ 0o600,
+ ),
}
user_bobby = "bobby"
@@ -1569,6 +1529,7 @@ class TestMultipleSshAuthorizedKeysFile(test_helpers.CiTestCase):
m_get_permissions,
m_getpwnam,
users,
+ tmpdir,
)
home_bobby = homes[0]
home_suzie = homes[1]
@@ -1586,7 +1547,7 @@ class TestMultipleSshAuthorizedKeysFile(test_helpers.CiTestCase):
# /tmp/etc/ssh/authorized_keys = ecdsa
authorized_keys_global = self.create_global_authorized_file(
- "etc/ssh/authorized_keys", "ecdsa", keys
+ "etc/ssh/authorized_keys", "ecdsa", keys, tmpdir
)
# /tmp/sshd_config
@@ -1595,11 +1556,9 @@ class TestMultipleSshAuthorizedKeysFile(test_helpers.CiTestCase):
authorized_keys,
authorized_keys2,
)
- sshd_config = self.create_sshd_config(options)
+ sshd_config = self.create_sshd_config(options, tmpdir)
- self.execute_and_check(
- user_bobby, sshd_config, authorized_keys, keys, delete_keys=False
- )
+ self.execute_and_check(user_bobby, sshd_config, authorized_keys, keys)
self.execute_and_check(user_suzie, sshd_config, authorized_keys2, keys)
diff --git a/tests/unittests/test_url_helper.py b/tests/unittests/test_url_helper.py
index f756a838..214e5727 100644
--- a/tests/unittests/test_url_helper.py
+++ b/tests/unittests/test_url_helper.py
@@ -15,6 +15,7 @@ from cloudinit.url_helper import (
NOT_FOUND,
REDACTED,
UrlError,
+ UrlResponse,
dual_stack,
oauth_headers,
read_file_or_url,
@@ -93,6 +94,18 @@ class TestReadFileOrUrl(CiTestCase):
self.assertEqual(str(result), data.decode("utf-8"))
@httpretty.activate
+ def test_read_file_or_url_str_from_url_streamed(self):
+ """Test that str(result.contents) on url is text version of contents.
+ It should not be "b'data'", but just "'data'" """
+ url = "http://hostname/path"
+ data = b"This is my url content\n"
+ httpretty.register_uri(httpretty.GET, url, data)
+ result = read_file_or_url(url, stream=True)
+ assert isinstance(result, UrlResponse)
+ self.assertEqual(result.contents, data)
+ self.assertEqual(str(result), data.decode("utf-8"))
+
+ @httpretty.activate
def test_read_file_or_url_str_from_url_redacting_headers_from_logs(self):
"""Headers are redacted from logs but unredacted in requests."""
url = "http://hostname/path"
@@ -146,6 +159,7 @@ class TestReadFileOrUrl(CiTestCase):
"User-Agent": "Cloud-Init/%s"
% (version.version_string())
},
+ "stream": False,
},
kwargs,
)
@@ -186,6 +200,7 @@ class TestReadFileOrUrlParameters:
"ssl_details": {"cert_file": "/path/cert.pem"},
"headers_cb": "headers_cb",
"exception_cb": "exception_cb",
+ "stream": True,
}
assert response == read_file_or_url(**params)
@@ -222,6 +237,7 @@ class TestReadFileOrUrlParameters:
% (version.version_string())
},
"timeout": request_timeout,
+ "stream": False,
}
if request_timeout is None:
expected_kwargs.pop("timeout")
diff --git a/tests/unittests/test_util.py b/tests/unittests/test_util.py
index 9722ddd5..0b297ef1 100644
--- a/tests/unittests/test_util.py
+++ b/tests/unittests/test_util.py
@@ -2464,6 +2464,47 @@ class TestGetProcEnv(helpers.TestCase):
my_ppid = os.getppid()
self.assertEqual(my_ppid, util.get_proc_ppid(my_pid))
+ def test_get_proc_ppid_mocked(self):
+ for ppid, proc_data in (
+ (
+ 0,
+ "1 (systemd) S 0 1 1 0 -1 4194560 112664 14612195 153 18014"
+ "274 237 756828 152754 20 0 1 0 3 173809664 3736"
+ "18446744073709551615 1 1 0 0 0 0 671173123 4096 1260 0 0 0 17"
+ "8 0 0 0 0 123974 0 0 0 0 0 0 0 0",
+ ),
+ (
+ 180771,
+ "180781 ([pytest-xdist r) R 180771 180598 167240 34825 "
+ "180598 4194304 128712 7570 0 0 1061 34 8 1 20 0 2 0 6551540 "
+ "351993856 25173 18446744073709551615 93907896635392 "
+ "93907899455533 140725724279536 0 0 0 0 16781312 17642 0 0 0 "
+ "17 1 0 0 0 0 0 93907901810800 93907902095288 93907928788992 "
+ "140725724288007 140725724288074 140725724288074 "
+ "140725724291047 0",
+ ),
+ (
+ 5620,
+ "8723 (Utility Process) S 5620 5191 5191 0 -1 4194304 3219 "
+ "0 50 0 1045 431 0 0 20 0 3 0 9007 220585984 8758 "
+ "18446744073709551615 94469734690816 94469735319392 "
+ "140728350183632 0 0 0 0 69634 1073745144 0 0 0 17 10 0 0 0 0 "
+ "0 94469735327152 94469735331056 94469763170304 "
+ "140728350189012 140728350189221 140728350189221 "
+ "140728350195661 0",
+ ),
+ (
+ 4946,
+ "4947 ((sd-pam)) S 4946 4946 4946 0 -1 1077936448 54 0 0 0 "
+ "0 0 0 0 20 0 1 0 4136 175616000 1394 18446744073709551615 1 1"
+ "0 0 0 0 0 4096 0 0 0 0 17 8 0 0 0 0 0 0 0 0 0 0 0 0 0",
+ ),
+ ):
+ with mock.patch(
+ "cloudinit.util.load_file", return_value=proc_data
+ ):
+ assert ppid == util.get_proc_ppid("mocked")
+
class TestKernelVersion:
"""test kernel version function"""
@@ -2616,4 +2657,59 @@ class TestFindDevs:
assert devlist == expected_devlist
-# vi: ts=4 expandtab
+class TestVersion:
+ @pytest.mark.parametrize(
+ ("v1", "v2", "eq"),
+ (
+ ("3.1.0", "3.1.0", True),
+ ("3.1.0", "3.1.1", False),
+ ("3.1", "3.1.0.0", False),
+ ),
+ )
+ def test_eq(self, v1, v2, eq):
+ if eq:
+ assert util.Version.from_str(v1) == util.Version.from_str(v2)
+ if not eq:
+ assert util.Version.from_str(v1) != util.Version.from_str(v2)
+
+ @pytest.mark.parametrize(
+ ("v1", "v2", "gt"),
+ (
+ ("3.1.0", "3.1.0", False),
+ ("3.1.0", "3.1.1", False),
+ ("3.1", "3.1.0.0", False),
+ ("3.1.0.0", "3.1", True),
+ ("3.1.1", "3.1.0", True),
+ ),
+ )
+ def test_gt(self, v1, v2, gt):
+ if gt:
+ assert util.Version.from_str(v1) > util.Version.from_str(v2)
+ if not gt:
+ assert util.Version.from_str(v1) < util.Version.from_str(
+ v2
+ ) or util.Version.from_str(v1) == util.Version.from_str(v2)
+
+ @pytest.mark.parametrize(
+ ("str_ver", "cls_ver"),
+ (
+ (
+ "0.0.0.0",
+ util.Version(0, 0, 0, 0),
+ ),
+ (
+ "1.0.0.0",
+ util.Version(1, 0, 0, 0),
+ ),
+ (
+ "1.0.2.0",
+ util.Version(1, 0, 2, 0),
+ ),
+ (
+ "9.8.2.0",
+ util.Version(9, 8, 2, 0),
+ ),
+ ),
+ )
+ def test_from_str(self, str_ver, cls_ver):
+ assert util.Version.from_str(str_ver) == cls_ver
diff --git a/tools/.github-cla-signers b/tools/.github-cla-signers
index b53dc757..6d037c67 100644
--- a/tools/.github-cla-signers
+++ b/tools/.github-cla-signers
@@ -96,6 +96,7 @@ smoser
sshedi
sstallion
stappersg
+stefanor
steverweber
t-8ch
taoyama
diff --git a/tox.ini b/tox.ini
index 1fcd26bc..4765d3bd 100644
--- a/tox.ini
+++ b/tox.ini
@@ -234,6 +234,8 @@ commands = {[testenv:flake8]commands}
[testenv:tip-mypy]
deps =
+ hypothesis
+ hypothesis_jsonschema
mypy
pytest
types-jsonschema
@@ -308,6 +310,7 @@ markers =
gce: test will only run on GCE platform
hypothesis_slow: hypothesis test too slow to run as unit test
instance_name: the name to be used for the test instance
+ integration_cloud_args: args for IntegrationCloud customization
is_iscsi: whether is an instance has iscsi net cfg or not
lxd_config_dict: set the config_dict passed on LXD instance creation
lxd_container: test will only run in LXD container