diff options
author | Chad Smith <chad.smith@canonical.com> | 2022-08-17 22:30:57 -0600 |
---|---|---|
committer | GitHub <noreply@github.com> | 2022-08-17 22:30:57 -0600 |
commit | 923e140d4443a3732fab1cc0229a13caed5d929a (patch) | |
tree | 82053b07a542b9423292d2c043827004ecd5a647 /cloudinit/stages.py | |
parent | 66d4095c8260a73209a98a2cc9b52623b69f1fb7 (diff) | |
download | cloud-init-git-923e140d4443a3732fab1cc0229a13caed5d929a.tar.gz |
sources: obj.pkl cache should be written anyime get_data is run (#1669)
When metadata update events trigger a new datasource.get_data run
ensure we are syncing the cached obj.pkl to disk so subsequent
boot stages can leverage the updated metadata.
Add write_cache param to persist_instance_data to avoid
persisting instance data when init.ds_restored from cache.
This avoids a race on clouds where network config is updated
per boot in init-local timeframe but init-network uses stale network
metadata from cache because updated metadata was not
persisted.
Migate _pkl_load and _pkl_store out of stages module and into
sources as it really is only applicable to datasource serialization.
Diffstat (limited to 'cloudinit/stages.py')
-rw-r--r-- | cloudinit/stages.py | 41 |
1 files changed, 4 insertions, 37 deletions
diff --git a/cloudinit/stages.py b/cloudinit/stages.py index 66e12eed..132dd83b 100644 --- a/cloudinit/stages.py +++ b/cloudinit/stages.py @@ -6,7 +6,6 @@ import copy import os -import pickle import sys from collections import namedtuple from typing import Dict, Iterable, List, Optional, Set @@ -247,7 +246,7 @@ class Init(object): # We try to restore from a current link and static path # by using the instance link, if purge_cache was called # the file wont exist. - return _pkl_load(self.paths.get_ipath_cur("obj_pkl")) + return sources.pkl_load(self.paths.get_ipath_cur("obj_pkl")) def _write_to_cache(self): if self.datasource is None: @@ -260,7 +259,9 @@ class Init(object): omode="w", content="", ) - return _pkl_store(self.datasource, self.paths.get_ipath_cur("obj_pkl")) + return sources.pkl_store( + self.datasource, self.paths.get_ipath_cur("obj_pkl") + ) def _get_datasources(self): # Any config provided??? @@ -973,38 +974,4 @@ def fetch_base_config(): ) -def _pkl_store(obj, fname): - try: - pk_contents = pickle.dumps(obj) - except Exception: - util.logexc(LOG, "Failed pickling datasource %s", obj) - return False - try: - util.write_file(fname, pk_contents, omode="wb", mode=0o400) - except Exception: - util.logexc(LOG, "Failed pickling datasource to %s", fname) - return False - return True - - -def _pkl_load(fname): - pickle_contents = None - try: - pickle_contents = util.load_file(fname, decode=False) - except Exception as e: - if os.path.isfile(fname): - LOG.warning("failed loading pickle in %s: %s", fname, e) - - # This is allowed so just return nothing successfully loaded... - if not pickle_contents: - return None - try: - return pickle.loads(pickle_contents) - except sources.DatasourceUnpickleUserDataError: - return None - except Exception: - util.logexc(LOG, "Failed loading pickled blob from %s", fname) - return None - - # vi: ts=4 expandtab |