summaryrefslogtreecommitdiff
path: root/cloudinit/sources/azure/imds.py
diff options
context:
space:
mode:
Diffstat (limited to 'cloudinit/sources/azure/imds.py')
-rw-r--r--cloudinit/sources/azure/imds.py54
1 files changed, 41 insertions, 13 deletions
diff --git a/cloudinit/sources/azure/imds.py b/cloudinit/sources/azure/imds.py
index 1f5cf008..f0b6911c 100644
--- a/cloudinit/sources/azure/imds.py
+++ b/cloudinit/sources/azure/imds.py
@@ -2,7 +2,8 @@
#
# This file is part of cloud-init. See LICENSE file for license information.
-from typing import Dict
+from time import time
+from typing import Dict, Optional
import requests
@@ -17,21 +18,31 @@ IMDS_URL = "http://169.254.169.254/metadata"
class ReadUrlRetryHandler:
+ """Manager for readurl retry behavior using exception_callback().
+
+ :param logging_backoff: Backoff to limit logging.
+ :param max_connection_errors: Number of connection errors to retry on.
+ :param retry_codes: Set of http codes to retry on.
+ :param retry_deadline: Optional time()-based deadline to retry until.
+ """
+
def __init__(
self,
*,
+ logging_backoff: float = 1.0,
+ max_connection_errors: int = 10,
retry_codes=(
404, # not found (yet)
410, # gone / unavailable (yet)
429, # rate-limited/throttled
500, # server error
),
- max_connection_errors: int = 10,
- logging_backoff: float = 1.0,
+ retry_deadline: Optional[float] = None,
) -> None:
self.logging_backoff = logging_backoff
self.max_connection_errors = max_connection_errors
self.retry_codes = retry_codes
+ self.retry_deadline = retry_deadline
self._logging_threshold = 1.0
self._request_count = 0
@@ -46,7 +57,10 @@ class ReadUrlRetryHandler:
return False
log = True
- retry = True
+ if self.retry_deadline is not None and time() >= self.retry_deadline:
+ retry = False
+ else:
+ retry = True
# Check for connection errors which may occur early boot, but
# are otherwise indicative that we are not connecting with the
@@ -76,22 +90,30 @@ class ReadUrlRetryHandler:
def _fetch_url(
- url: str, *, log_response: bool = True, retries: int = 10, timeout: int = 2
+ url: str,
+ *,
+ retry_deadline: float,
+ log_response: bool = True,
+ timeout: int = 2,
) -> bytes:
"""Fetch URL from IMDS.
+ :param url: url to fetch.
+ :param log_response: log responses in readurl().
+ :param retry_deadline: time()-based deadline to retry until.
+ :param timeout: Read/connection timeout in seconds for readurl().
+
:raises UrlError: on error fetching metadata.
"""
- handler = ReadUrlRetryHandler()
+ handler = ReadUrlRetryHandler(retry_deadline=retry_deadline)
try:
response = readurl(
url,
exception_cb=handler.exception_callback,
headers={"Metadata": "true"},
- infinite=False,
+ infinite=True,
log_req_resp=log_response,
- retries=retries,
timeout=timeout,
)
except UrlError as error:
@@ -106,14 +128,17 @@ def _fetch_url(
def _fetch_metadata(
url: str,
- retries: int = 10,
+ retry_deadline: float,
) -> Dict:
"""Fetch IMDS metadata.
+ :param url: url to fetch.
+ :param retry_deadline: time()-based deadline to retry until.
+
:raises UrlError: on error fetching metadata.
:raises ValueError: on error parsing metadata.
"""
- metadata = _fetch_url(url, retries=retries)
+ metadata = _fetch_url(url, retry_deadline=retry_deadline)
try:
return util.load_json(metadata)
@@ -125,15 +150,17 @@ def _fetch_metadata(
raise
-def fetch_metadata_with_api_fallback(retries: int = 10) -> Dict:
+def fetch_metadata_with_api_fallback(retry_deadline: float) -> Dict:
"""Fetch extended metadata, falling back to non-extended as required.
+ :param retry_deadline: time()-based deadline to retry until.
+
:raises UrlError: on error fetching metadata.
:raises ValueError: on error parsing metadata.
"""
try:
url = IMDS_URL + "/instance?api-version=2021-08-01&extended=true"
- return _fetch_metadata(url, retries=retries)
+ return _fetch_metadata(url, retry_deadline=retry_deadline)
except UrlError as error:
if error.code == 400:
report_diagnostic_event(
@@ -141,7 +168,7 @@ def fetch_metadata_with_api_fallback(retries: int = 10) -> Dict:
logger_func=LOG.warning,
)
url = IMDS_URL + "/instance?api-version=2019-06-01"
- return _fetch_metadata(url, retries=retries)
+ return _fetch_metadata(url, retry_deadline=retry_deadline)
raise
@@ -159,6 +186,7 @@ def fetch_reprovision_data() -> bytes:
404,
410,
),
+ retry_deadline=None,
)
response = readurl(
url,