summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAde Lee <alee@redhat.com>2020-10-01 18:26:08 -0400
committerAde Lee <alee@redhat.com>2020-11-11 13:40:46 -0500
commita34419aecdf3db58426fe8fb7e1849ebdff6a5f1 (patch)
tree3644f222f3ab63ed150956456734cb9159a49a1c
parent488d3e2fe31e4fc7a461e9de8b20df481d78daa2 (diff)
downloadglance_store-a34419aecdf3db58426fe8fb7e1849ebdff6a5f1.tar.gz
Replace md5 with oslo version
md5 is not an approved algorithm in FIPS mode, and trying to instantiate a hashlib.md5() will fail when the system is running in FIPS mode. md5 is allowed when in a non-security context. There is a plan to add a keyword parameter (usedforsecurity) to hashlib.md5() to annotate whether or not the instance is being used in a security context. In the case where it is not, the instantiation of md5 will be allowed. See https://bugs.python.org/issue9216 for more details. Some downstream python versions already support this parameter. To support these versions, a new encapsulation of md5() has been added to oslo_utils. See https://review.opendev.org/#/c/750031/ This patch is to replace the instances of hashlib.md5() with this new encapsulation, adding an annotation indicating whether the usage is a security context or not. It looks like the uses of the md5 are primarily for checksums and generation of etags. With this patch, all the unit and functional tests appear to pass on a FIPS enabled system. Change-Id: I0603ba217d6dc19f5c9f73c60c7b365efd28d30b Depends-On: https://review.opendev.org/#/c/760160
-rw-r--r--glance_store/_drivers/cinder.py4
-rw-r--r--glance_store/_drivers/filesystem.py5
-rw-r--r--glance_store/_drivers/rbd.py5
-rw-r--r--glance_store/_drivers/s3.py9
-rw-r--r--glance_store/_drivers/swift/store.py8
-rw-r--r--glance_store/_drivers/vmware_datastore.py5
-rw-r--r--glance_store/common/utils.py17
-rw-r--r--glance_store/tests/unit/test_cinder_store.py4
-rw-r--r--glance_store/tests/unit/test_driver.py12
-rw-r--r--glance_store/tests/unit/test_filesystem_store.py16
-rw-r--r--glance_store/tests/unit/test_multistore_cinder.py5
-rw-r--r--glance_store/tests/unit/test_multistore_filesystem.py20
-rw-r--r--glance_store/tests/unit/test_multistore_s3.py10
-rw-r--r--glance_store/tests/unit/test_multistore_vmware.py11
-rw-r--r--glance_store/tests/unit/test_rbd_store.py7
-rw-r--r--glance_store/tests/unit/test_s3_store.py7
-rw-r--r--glance_store/tests/unit/test_swift_store.py50
-rw-r--r--glance_store/tests/unit/test_swift_store_multibackend.py34
-rw-r--r--glance_store/tests/unit/test_vmware_store.py11
-rw-r--r--lower-constraints.txt2
-rw-r--r--requirements.txt2
21 files changed, 155 insertions, 89 deletions
diff --git a/glance_store/_drivers/cinder.py b/glance_store/_drivers/cinder.py
index 10d4727..3160a75 100644
--- a/glance_store/_drivers/cinder.py
+++ b/glance_store/_drivers/cinder.py
@@ -829,8 +829,8 @@ class Store(glance_store.driver.Store):
self._check_context(context, require_tenant=True)
client = self.get_cinderclient(context)
- os_hash_value = hashlib.new(str(hashing_algo))
- checksum = hashlib.md5()
+ os_hash_value = utils.get_hasher(hashing_algo, False)
+ checksum = utils.get_hasher('md5', False)
bytes_written = 0
size_gb = int(math.ceil(float(image_size) / units.Gi))
if size_gb == 0:
diff --git a/glance_store/_drivers/filesystem.py b/glance_store/_drivers/filesystem.py
index ff633f1..20f5b87 100644
--- a/glance_store/_drivers/filesystem.py
+++ b/glance_store/_drivers/filesystem.py
@@ -19,7 +19,6 @@ A simple filesystem-backed store
"""
import errno
-import hashlib
import logging
import os
import stat
@@ -739,8 +738,8 @@ class Store(glance_store.driver.Store):
if os.path.exists(filepath):
raise exceptions.Duplicate(image=filepath)
- os_hash_value = hashlib.new(str(hashing_algo))
- checksum = hashlib.md5()
+ os_hash_value = utils.get_hasher(hashing_algo, False)
+ checksum = utils.get_hasher('md5', False)
bytes_written = 0
try:
with open(filepath, 'wb') as f:
diff --git a/glance_store/_drivers/rbd.py b/glance_store/_drivers/rbd.py
index 9eb9324..4951990 100644
--- a/glance_store/_drivers/rbd.py
+++ b/glance_store/_drivers/rbd.py
@@ -18,7 +18,6 @@
(RADOS (Reliable Autonomic Distributed Object Store) Block Device)"""
import contextlib
-import hashlib
import logging
import math
@@ -531,8 +530,8 @@ class Store(driver.Store):
:raises: `glance_store.exceptions.Duplicate` if the image already
exists
"""
- checksum = hashlib.md5()
- os_hash_value = hashlib.new(str(hashing_algo))
+ os_hash_value = utils.get_hasher(hashing_algo, False)
+ checksum = utils.get_hasher('md5', False)
image_name = str(image_id)
with self.get_connection(conffile=self.conf_file,
rados_id=self.user) as conn:
diff --git a/glance_store/_drivers/s3.py b/glance_store/_drivers/s3.py
index 9c74831..1c18531 100644
--- a/glance_store/_drivers/s3.py
+++ b/glance_store/_drivers/s3.py
@@ -15,7 +15,6 @@
"""Storage backend for S3 or Storage Servers that follow the S3 Protocol"""
-import hashlib
import logging
import math
import re
@@ -631,8 +630,8 @@ class Store(glance_store.driver.Store):
(3) checksum, (4) multihash value, and (5) a dictionary
with storage system specific information
"""
- os_hash_value = hashlib.new(str(hashing_algo))
- checksum = hashlib.md5()
+ os_hash_value = utils.get_hasher(hashing_algo, False)
+ checksum = utils.get_hasher('md5', False)
image_data = b''
image_size = 0
for chunk in utils.chunkreadable(image_file, self.WRITE_CHUNKSIZE):
@@ -676,8 +675,8 @@ class Store(glance_store.driver.Store):
(3) checksum, (4) multihash value, and (5) a dictionary
with storage system specific information
"""
- os_hash_value = hashlib.new(str(hashing_algo))
- checksum = hashlib.md5()
+ os_hash_value = utils.get_hasher(hashing_algo, False)
+ checksum = utils.get_hasher('md5', False)
pool_size = self.s3_store_thread_pools
pool = eventlet.greenpool.GreenPool(size=pool_size)
mpu = s3_client.create_multipart_upload(Bucket=bucket, Key=key)
diff --git a/glance_store/_drivers/swift/store.py b/glance_store/_drivers/swift/store.py
index f177602..9a531e4 100644
--- a/glance_store/_drivers/swift/store.py
+++ b/glance_store/_drivers/swift/store.py
@@ -15,7 +15,6 @@
"""Storage backend for SWIFT"""
-import hashlib
import logging
import math
@@ -40,6 +39,7 @@ from glance_store._drivers.swift import buffered
from glance_store._drivers.swift import connection_manager
from glance_store._drivers.swift import utils as sutils
from glance_store import capabilities
+from glance_store.common import utils as gutils
from glance_store import driver
from glance_store import exceptions
from glance_store.i18n import _, _LE, _LI
@@ -931,7 +931,7 @@ class BaseStore(driver.Store):
:raises: `glance_store.exceptions.Duplicate` if something already
exists at this location
"""
- os_hash_value = hashlib.new(str(hashing_algo))
+ os_hash_value = gutils.get_hasher(hashing_algo, False)
location = self.create_location(image_id, context=context)
# initialize a manager with re-auth if image need to be splitted
need_chunks = (image_size == 0) or (
@@ -948,7 +948,7 @@ class BaseStore(driver.Store):
if not need_chunks:
# Image size is known, and is less than large_object_size.
# Send to Swift with regular PUT.
- checksum = hashlib.md5()
+ checksum = gutils.get_hasher('md5', False)
reader = ChunkReader(image_file, checksum,
os_hash_value, image_size,
verifier=verifier)
@@ -973,7 +973,7 @@ class BaseStore(driver.Store):
"Swift.")
total_chunks = '?'
- checksum = hashlib.md5()
+ checksum = gutils.get_hasher('md5', False)
written_chunks = []
combined_chunks_size = 0
while True:
diff --git a/glance_store/_drivers/vmware_datastore.py b/glance_store/_drivers/vmware_datastore.py
index 07a3abb..52ec77a 100644
--- a/glance_store/_drivers/vmware_datastore.py
+++ b/glance_store/_drivers/vmware_datastore.py
@@ -15,7 +15,6 @@
"""Storage backend for VMware Datastore"""
-import hashlib
import logging
import os
@@ -261,8 +260,8 @@ class _Reader(object):
def __init__(self, data, hashing_algo, verifier=None):
self._size = 0
self.data = data
- self.checksum = hashlib.md5()
- self.os_hash_value = hashlib.new(str(hashing_algo))
+ self.os_hash_value = utils.get_hasher(hashing_algo, False)
+ self.checksum = utils.get_hasher('md5', False)
self.verifier = verifier
def read(self, size=None):
diff --git a/glance_store/common/utils.py b/glance_store/common/utils.py
index 0ee40f7..b85b5d7 100644
--- a/glance_store/common/utils.py
+++ b/glance_store/common/utils.py
@@ -18,10 +18,12 @@
System-level utilities and helper functions.
"""
+import hashlib
import logging
import uuid
from oslo_concurrency import lockutils
+from oslo_utils.secretutils import md5
try:
from eventlet import sleep
@@ -106,6 +108,21 @@ def cooperative_read(fd):
return readfn
+def get_hasher(hash_algo, usedforsecurity=True):
+ """
+ Returns the required hasher, given the hashing algorithm.
+ This is primarily to ensure that the hash algorithm is correctly
+ chosen when executed on a FIPS enabled system
+
+ :param hash_algo: hash algorithm requested
+ :param usedforsecurity: whether the hashes are used in a security context
+ """
+ if str(hash_algo) == 'md5':
+ return md5(usedforsecurity=usedforsecurity)
+ else:
+ return hashlib.new(str(hash_algo))
+
+
class CooperativeReader(object):
"""
An eventlet thread friendly class for reading in image data.
diff --git a/glance_store/tests/unit/test_cinder_store.py b/glance_store/tests/unit/test_cinder_store.py
index 2137f42..858bcbd 100644
--- a/glance_store/tests/unit/test_cinder_store.py
+++ b/glance_store/tests/unit/test_cinder_store.py
@@ -29,6 +29,7 @@ import uuid
from cinderclient.v3 import client as cinderclient
from os_brick.initiator import connector
from oslo_concurrency import processutils
+from oslo_utils.secretutils import md5
from oslo_utils import units
from glance_store import exceptions
@@ -333,7 +334,8 @@ class TestCinderStore(base.StoreBaseTest,
expected_size = size_kb * units.Ki
expected_file_contents = b"*" * expected_size
image_file = six.BytesIO(expected_file_contents)
- expected_checksum = hashlib.md5(expected_file_contents).hexdigest()
+ expected_checksum = md5(expected_file_contents,
+ usedforsecurity=False).hexdigest()
expected_multihash = hashlib.sha256(expected_file_contents).hexdigest()
expected_location = 'cinder://%s' % fake_volume.id
fake_client = FakeObject(auth_token=None, management_url=None)
diff --git a/glance_store/tests/unit/test_driver.py b/glance_store/tests/unit/test_driver.py
index 1f3c7e3..103ba8b 100644
--- a/glance_store/tests/unit/test_driver.py
+++ b/glance_store/tests/unit/test_driver.py
@@ -15,6 +15,7 @@
import hashlib
+from oslo_utils.secretutils import md5
from oslotest import base
import glance_store.driver as driver
@@ -26,12 +27,16 @@ class _FakeStore(object):
def add(self, image_id, image_file, image_size, hashing_algo,
context=None, verifier=None):
"""This is a 0.26.0+ add, returns a 5-tuple"""
- hasher = hashlib.new(hashing_algo)
+ if hashing_algo == 'md5':
+ hasher = md5(usedforsecurity=False)
+ else:
+ hasher = hashlib.new(str(hashing_algo))
+
# assume 'image_file' will be bytes for these tests
hasher.update(image_file)
backend_url = "backend://%s" % image_id
bytes_written = len(image_file)
- checksum = hashlib.md5(image_file).hexdigest()
+ checksum = md5(image_file, usedforsecurity=False).hexdigest()
multihash = hasher.hexdigest()
metadata_dict = {"verifier_obj":
verifier.name if verifier else None,
@@ -58,7 +63,8 @@ class TestBackCompatWrapper(base.BaseTestCase):
self.img_id = '1234'
self.img_file = b'0123456789'
self.img_size = 10
- self.img_checksum = hashlib.md5(self.img_file).hexdigest()
+ self.img_checksum = md5(self.img_file,
+ usedforsecurity=False).hexdigest()
self.hashing_algo = 'sha256'
self.img_sha256 = hashlib.sha256(self.img_file).hexdigest()
diff --git a/glance_store/tests/unit/test_filesystem_store.py b/glance_store/tests/unit/test_filesystem_store.py
index bb98f41..db61e89 100644
--- a/glance_store/tests/unit/test_filesystem_store.py
+++ b/glance_store/tests/unit/test_filesystem_store.py
@@ -24,6 +24,7 @@ from unittest import mock
import uuid
import fixtures
+from oslo_utils.secretutils import md5
from oslo_utils import units
import six
from six.moves import builtins
@@ -154,7 +155,8 @@ class TestStore(base.StoreBaseTest,
expected_image_id = str(uuid.uuid4())
expected_file_size = 5 * units.Ki # 5K
expected_file_contents = b"*" * expected_file_size
- expected_checksum = hashlib.md5(expected_file_contents).hexdigest()
+ expected_checksum = md5(expected_file_contents,
+ usedforsecurity=False).hexdigest()
expected_multihash = hashlib.sha256(expected_file_contents).hexdigest()
expected_location = "file://%s/%s" % (self.test_dir,
expected_image_id)
@@ -604,7 +606,8 @@ class TestStore(base.StoreBaseTest,
expected_image_id = str(uuid.uuid4())
expected_file_size = 5 * units.Ki # 5K
expected_file_contents = b"*" * expected_file_size
- expected_checksum = hashlib.md5(expected_file_contents).hexdigest()
+ expected_checksum = md5(expected_file_contents,
+ usedforsecurity=False).hexdigest()
expected_multihash = hashlib.sha256(expected_file_contents).hexdigest()
expected_location = "file://%s/%s" % (store_map[1],
expected_image_id)
@@ -651,7 +654,8 @@ class TestStore(base.StoreBaseTest,
expected_image_id = str(uuid.uuid4())
expected_file_size = 5 * units.Ki # 5K
expected_file_contents = b"*" * expected_file_size
- expected_checksum = hashlib.md5(expected_file_contents).hexdigest()
+ expected_checksum = md5(expected_file_contents,
+ usedforsecurity=False).hexdigest()
expected_multihash = hashlib.sha256(expected_file_contents).hexdigest()
expected_location = "file://%s/%s" % (store_map[1],
expected_image_id)
@@ -761,7 +765,8 @@ class TestStore(base.StoreBaseTest,
expected_image_id = str(uuid.uuid4())
expected_file_size = 5 * units.Ki # 5K
expected_file_contents = b"*" * expected_file_size
- expected_checksum = hashlib.md5(expected_file_contents).hexdigest()
+ expected_checksum = md5(expected_file_contents,
+ usedforsecurity=False).hexdigest()
expected_multihash = hashlib.sha256(expected_file_contents).hexdigest()
expected_location = "file://%s/%s" % (store,
expected_image_id)
@@ -803,7 +808,8 @@ class TestStore(base.StoreBaseTest,
expected_image_id = str(uuid.uuid4())
expected_file_size = 5 * units.Ki # 5K
expected_file_contents = b"*" * expected_file_size
- expected_checksum = hashlib.md5(expected_file_contents).hexdigest()
+ expected_checksum = md5(expected_file_contents,
+ usedforsecurity=False).hexdigest()
expected_multihash = hashlib.sha256(expected_file_contents).hexdigest()
expected_location = "file://%s/%s" % (store,
expected_image_id)
diff --git a/glance_store/tests/unit/test_multistore_cinder.py b/glance_store/tests/unit/test_multistore_cinder.py
index d35dfc9..4a9a76f 100644
--- a/glance_store/tests/unit/test_multistore_cinder.py
+++ b/glance_store/tests/unit/test_multistore_cinder.py
@@ -15,7 +15,6 @@
import contextlib
import errno
-import hashlib
import os
from unittest import mock
@@ -30,6 +29,7 @@ import fixtures
from os_brick.initiator import connector
from oslo_concurrency import processutils
from oslo_config import cfg
+from oslo_utils.secretutils import md5
from oslo_utils import units
import glance_store as store
@@ -403,7 +403,8 @@ class TestMultiCinderStore(base.MultiStoreBaseTest,
expected_size = size_kb * units.Ki
expected_file_contents = b"*" * expected_size
image_file = six.BytesIO(expected_file_contents)
- expected_checksum = hashlib.md5(expected_file_contents).hexdigest()
+ expected_checksum = md5(expected_file_contents,
+ usedforsecurity=False).hexdigest()
expected_location = 'cinder://%s/%s' % (backend, fake_volume.id)
fake_client = FakeObject(auth_token=None, management_url=None)
fake_volume.manager.get.return_value = fake_volume
diff --git a/glance_store/tests/unit/test_multistore_filesystem.py b/glance_store/tests/unit/test_multistore_filesystem.py
index 820d293..5f8d4ea 100644
--- a/glance_store/tests/unit/test_multistore_filesystem.py
+++ b/glance_store/tests/unit/test_multistore_filesystem.py
@@ -16,7 +16,6 @@
"""Tests the filesystem backend store"""
import errno
-import hashlib
import json
import os
import stat
@@ -25,6 +24,7 @@ import uuid
import fixtures
from oslo_config import cfg
+from oslo_utils.secretutils import md5
from oslo_utils import units
import six
from six.moves import builtins
@@ -189,7 +189,8 @@ class TestMultiStore(base.MultiStoreBaseTest,
expected_image_id = str(uuid.uuid4())
expected_file_size = 5 * units.Ki # 5K
expected_file_contents = b"*" * expected_file_size
- expected_checksum = hashlib.md5(expected_file_contents).hexdigest()
+ expected_checksum = md5(expected_file_contents,
+ usedforsecurity=False).hexdigest()
expected_location = "file://%s/%s" % (self.test_dir,
expected_image_id)
image_file = six.BytesIO(expected_file_contents)
@@ -229,7 +230,8 @@ class TestMultiStore(base.MultiStoreBaseTest,
expected_image_id = str(uuid.uuid4())
expected_file_size = 5 * units.Ki # 5K
expected_file_contents = b"*" * expected_file_size
- expected_checksum = hashlib.md5(expected_file_contents).hexdigest()
+ expected_checksum = md5(expected_file_contents,
+ usedforsecurity=False).hexdigest()
expected_location = "file://%s/%s" % (self.test_dir,
expected_image_id)
image_file = six.BytesIO(expected_file_contents)
@@ -583,7 +585,8 @@ class TestMultiStore(base.MultiStoreBaseTest,
expected_image_id = str(uuid.uuid4())
expected_file_size = 5 * units.Ki # 5K
expected_file_contents = b"*" * expected_file_size
- expected_checksum = hashlib.md5(expected_file_contents).hexdigest()
+ expected_checksum = md5(expected_file_contents,
+ usedforsecurity=False).hexdigest()
expected_location = "file://%s/%s" % (store_map[1],
expected_image_id)
image_file = six.BytesIO(expected_file_contents)
@@ -630,7 +633,8 @@ class TestMultiStore(base.MultiStoreBaseTest,
expected_image_id = str(uuid.uuid4())
expected_file_size = 5 * units.Ki # 5K
expected_file_contents = b"*" * expected_file_size
- expected_checksum = hashlib.md5(expected_file_contents).hexdigest()
+ expected_checksum = md5(expected_file_contents,
+ usedforsecurity=False).hexdigest()
expected_location = "file://%s/%s" % (store_map[1],
expected_image_id)
image_file = six.BytesIO(expected_file_contents)
@@ -741,7 +745,8 @@ class TestMultiStore(base.MultiStoreBaseTest,
expected_image_id = str(uuid.uuid4())
expected_file_size = 5 * units.Ki # 5K
expected_file_contents = b"*" * expected_file_size
- expected_checksum = hashlib.md5(expected_file_contents).hexdigest()
+ expected_checksum = md5(expected_file_contents,
+ usedforsecurity=False).hexdigest()
expected_location = "file://%s/%s" % (store,
expected_image_id)
image_file = six.BytesIO(expected_file_contents)
@@ -785,7 +790,8 @@ class TestMultiStore(base.MultiStoreBaseTest,
expected_image_id = str(uuid.uuid4())
expected_file_size = 5 * units.Ki # 5K
expected_file_contents = b"*" * expected_file_size
- expected_checksum = hashlib.md5(expected_file_contents).hexdigest()
+ expected_checksum = md5(expected_file_contents,
+ usedforsecurity=False).hexdigest()
expected_location = "file://%s/%s" % (store,
expected_image_id)
image_file = six.BytesIO(expected_file_contents)
diff --git a/glance_store/tests/unit/test_multistore_s3.py b/glance_store/tests/unit/test_multistore_s3.py
index 3c50a85..d242b8f 100644
--- a/glance_store/tests/unit/test_multistore_s3.py
+++ b/glance_store/tests/unit/test_multistore_s3.py
@@ -24,6 +24,7 @@ import botocore
from botocore import exceptions as boto_exceptions
from botocore import stub
from oslo_config import cfg
+from oslo_utils.secretutils import md5
from oslo_utils import units
import six
@@ -205,7 +206,8 @@ class TestMultiS3Store(base.MultiStoreBaseTest,
expected_image_id = str(uuid.uuid4())
expected_s3_size = FIVE_KB
expected_s3_contents = b"*" * expected_s3_size
- expected_checksum = hashlib.md5(expected_s3_contents).hexdigest()
+ expected_checksum = md5(expected_s3_contents,
+ usedforsecurity=False).hexdigest()
expected_multihash = hashlib.sha256(expected_s3_contents).hexdigest()
expected_location = format_s3_location(
S3_CONF['s3_store_access_key'],
@@ -258,7 +260,8 @@ class TestMultiS3Store(base.MultiStoreBaseTest,
expected_image_id = str(uuid.uuid4())
expected_s3_size = FIVE_KB
expected_s3_contents = b"*" * expected_s3_size
- expected_checksum = hashlib.md5(expected_s3_contents).hexdigest()
+ expected_checksum = md5(expected_s3_contents,
+ usedforsecurity=False).hexdigest()
expected_multihash = hashlib.sha256(expected_s3_contents).hexdigest()
expected_location = format_s3_location(
S3_CONF['s3_store_access_key'],
@@ -331,7 +334,8 @@ class TestMultiS3Store(base.MultiStoreBaseTest,
expected_image_id = str(uuid.uuid4())
expected_s3_size = 16 * units.Mi
expected_s3_contents = b"*" * expected_s3_size
- expected_checksum = hashlib.md5(expected_s3_contents).hexdigest()
+ expected_checksum = md5(expected_s3_contents,
+ usedforsecurity=False).hexdigest()
expected_multihash = hashlib.sha256(expected_s3_contents).hexdigest()
expected_location = format_s3_location(
S3_CONF['s3_store_access_key'],
diff --git a/glance_store/tests/unit/test_multistore_vmware.py b/glance_store/tests/unit/test_multistore_vmware.py
index e060dd1..4a5df7a 100644
--- a/glance_store/tests/unit/test_multistore_vmware.py
+++ b/glance_store/tests/unit/test_multistore_vmware.py
@@ -20,6 +20,7 @@ from unittest import mock
import uuid
from oslo_config import cfg
+from oslo_utils import secretutils
from oslo_utils import units
from oslo_vmware import api
from oslo_vmware import exceptions as vmware_exceptions
@@ -175,7 +176,7 @@ class TestMultiStore(base.MultiStoreBaseTest,
expected_image_id = str(uuid.uuid4())
expected_size = FIVE_KB
expected_contents = b"*" * expected_size
- hash_code = hashlib.md5(expected_contents)
+ hash_code = secretutils.md5(expected_contents, usedforsecurity=False)
expected_checksum = hash_code.hexdigest()
fake_size.__get__ = mock.Mock(return_value=expected_size)
expected_cookie = 'vmware_soap_session=fake-uuid'
@@ -216,7 +217,7 @@ class TestMultiStore(base.MultiStoreBaseTest,
expected_image_id = str(uuid.uuid4())
expected_size = FIVE_KB
expected_contents = b"*" * expected_size
- hash_code = hashlib.md5(expected_contents)
+ hash_code = secretutils.md5(expected_contents, usedforsecurity=False)
expected_checksum = hash_code.hexdigest()
fake_size.__get__ = mock.Mock(return_value=expected_size)
with mock.patch('hashlib.md5') as md5:
@@ -330,7 +331,8 @@ class TestMultiStore(base.MultiStoreBaseTest,
def test_reader_full(self):
content = b'XXX'
image = six.BytesIO(content)
- expected_checksum = hashlib.md5(content).hexdigest()
+ expected_checksum = secretutils.md5(content,
+ usedforsecurity=False).hexdigest()
expected_multihash = hashlib.sha256(content).hexdigest()
reader = vm_store._Reader(image, self.hash_algo)
ret = reader.read()
@@ -342,7 +344,8 @@ class TestMultiStore(base.MultiStoreBaseTest,
def test_reader_partial(self):
content = b'XXX'
image = six.BytesIO(content)
- expected_checksum = hashlib.md5(b'X').hexdigest()
+ expected_checksum = secretutils.md5(b'X',
+ usedforsecurity=False).hexdigest()
expected_multihash = hashlib.sha256(b'X').hexdigest()
reader = vm_store._Reader(image, self.hash_algo)
ret = reader.read(1)
diff --git a/glance_store/tests/unit/test_rbd_store.py b/glance_store/tests/unit/test_rbd_store.py
index c6a5f27..2d1cae6 100644
--- a/glance_store/tests/unit/test_rbd_store.py
+++ b/glance_store/tests/unit/test_rbd_store.py
@@ -16,6 +16,7 @@
import hashlib
from unittest import mock
+from oslo_utils.secretutils import md5
from oslo_utils import units
import six
@@ -416,7 +417,8 @@ class TestStore(base.StoreBaseTest,
file_size = 5 * units.Ki # 5K
file_contents = b"*" * file_size
image_file = six.BytesIO(file_contents)
- expected_checksum = hashlib.md5(file_contents).hexdigest()
+ expected_checksum = md5(file_contents,
+ usedforsecurity=False).hexdigest()
expected_multihash = hashlib.sha256(file_contents).hexdigest()
with mock.patch.object(rbd_store.rbd.Image, 'write'):
@@ -489,7 +491,8 @@ class TestStore(base.StoreBaseTest,
image_id = 'fake_image_id'
image_file = six.BytesIO(content)
- expected_checksum = hashlib.md5(content).hexdigest()
+ expected_checksum = md5(content,
+ usedforsecurity=False).hexdigest()
expected_multihash = hashlib.sha256(content).hexdigest()
with mock.patch.object(rbd_store.rbd.Image, 'write') as mock_write:
diff --git a/glance_store/tests/unit/test_s3_store.py b/glance_store/tests/unit/test_s3_store.py
index a101e36..2b95bfa 100644
--- a/glance_store/tests/unit/test_s3_store.py
+++ b/glance_store/tests/unit/test_s3_store.py
@@ -23,6 +23,7 @@ import boto3
import botocore
from botocore import exceptions as boto_exceptions
from botocore import stub
+from oslo_utils.secretutils import md5
from oslo_utils import units
import six
@@ -158,7 +159,8 @@ class TestStore(base.StoreBaseTest,
expected_image_id = str(uuid.uuid4())
expected_s3_size = FIVE_KB
expected_s3_contents = b"*" * expected_s3_size
- expected_checksum = hashlib.md5(expected_s3_contents).hexdigest()
+ expected_checksum = md5(expected_s3_contents,
+ usedforsecurity=False).hexdigest()
expected_multihash = hashlib.sha256(expected_s3_contents).hexdigest()
expected_location = format_s3_location(
S3_CONF['s3_store_access_key'],
@@ -230,7 +232,8 @@ class TestStore(base.StoreBaseTest,
expected_image_id = str(uuid.uuid4())
expected_s3_size = 16 * units.Mi
expected_s3_contents = b"*" * expected_s3_size
- expected_checksum = hashlib.md5(expected_s3_contents).hexdigest()
+ expected_checksum = md5(expected_s3_contents,
+ usedforsecurity=False).hexdigest()
expected_multihash = hashlib.sha256(expected_s3_contents).hexdigest()
expected_location = format_s3_location(
S3_CONF['s3_store_access_key'],
diff --git a/glance_store/tests/unit/test_swift_store.py b/glance_store/tests/unit/test_swift_store.py
index 6df8e13..809d50e 100644
--- a/glance_store/tests/unit/test_swift_store.py
+++ b/glance_store/tests/unit/test_swift_store.py
@@ -25,6 +25,7 @@ import uuid
from oslo_config import cfg
from oslo_utils import encodeutils
+from oslo_utils.secretutils import md5
from oslo_utils import units
import requests_mock
import six
@@ -121,7 +122,8 @@ class SwiftTests(object):
if kwargs.get('headers'):
manifest = kwargs.get('headers').get('X-Object-Manifest')
etag = kwargs.get('headers') \
- .get('ETag', hashlib.md5(b'').hexdigest())
+ .get('ETag', md5(
+ b'', usedforsecurity=False).hexdigest())
fixture_headers[fixture_key] = {
'manifest': True,
'etag': etag,
@@ -133,7 +135,7 @@ class SwiftTests(object):
fixture_object = six.BytesIO()
read_len = 0
chunk = contents.read(CHUNKSIZE)
- checksum = hashlib.md5()
+ checksum = md5(usedforsecurity=False)
while chunk:
fixture_object.write(chunk)
read_len += len(chunk)
@@ -143,7 +145,8 @@ class SwiftTests(object):
else:
fixture_object = six.BytesIO(contents)
read_len = len(contents)
- etag = hashlib.md5(fixture_object.getvalue()).hexdigest()
+ etag = md5(fixture_object.getvalue(),
+ usedforsecurity=False).hexdigest()
if read_len > MAX_SWIFT_OBJECT_SIZE:
msg = ('Image size:%d exceeds Swift max:%d' %
(read_len, MAX_SWIFT_OBJECT_SIZE))
@@ -421,7 +424,8 @@ class SwiftTests(object):
self.store.configure()
expected_swift_size = FIVE_KB
expected_swift_contents = b"*" * expected_swift_size
- expected_checksum = hashlib.md5(expected_swift_contents).hexdigest()
+ expected_checksum = md5(expected_swift_contents,
+ usedforsecurity=False).hexdigest()
expected_multihash = hashlib.sha256(
expected_swift_contents).hexdigest()
expected_image_id = str(uuid.uuid4())
@@ -542,7 +546,7 @@ class SwiftTests(object):
expected_swift_size = FIVE_KB
expected_swift_contents = b"*" * expected_swift_size
expected_checksum = \
- hashlib.md5(expected_swift_contents).hexdigest()
+ md5(expected_swift_contents, usedforsecurity=False).hexdigest()
expected_multihash = \
hashlib.sha256(expected_swift_contents).hexdigest()
@@ -618,7 +622,8 @@ class SwiftTests(object):
"""
expected_swift_size = FIVE_KB
expected_swift_contents = b"*" * expected_swift_size
- expected_checksum = hashlib.md5(expected_swift_contents).hexdigest()
+ expected_checksum = md5(expected_swift_contents,
+ usedforsecurity=False).hexdigest()
expected_multihash = \
hashlib.sha256(expected_swift_contents).hexdigest()
expected_image_id = str(uuid.uuid4())
@@ -664,7 +669,8 @@ class SwiftTests(object):
"""
expected_swift_size = FIVE_KB
expected_swift_contents = b"*" * expected_swift_size
- expected_checksum = hashlib.md5(expected_swift_contents).hexdigest()
+ expected_checksum = md5(expected_swift_contents,
+ usedforsecurity=False).hexdigest()
expected_multihash = \
hashlib.sha256(expected_swift_contents).hexdigest()
expected_image_id = str(uuid.uuid4())
@@ -885,7 +891,8 @@ class SwiftTests(object):
"""
expected_swift_size = FIVE_KB
expected_swift_contents = b"*" * expected_swift_size
- expected_checksum = hashlib.md5(expected_swift_contents).hexdigest()
+ expected_checksum = md5(expected_swift_contents,
+ usedforsecurity=False).hexdigest()
expected_multihash = \
hashlib.sha256(expected_swift_contents).hexdigest()
expected_image_id = str(uuid.uuid4())
@@ -939,7 +946,8 @@ class SwiftTests(object):
# Set up a 'large' image of 5KB
expected_swift_size = FIVE_KB
expected_swift_contents = b"*" * expected_swift_size
- expected_checksum = hashlib.md5(expected_swift_contents).hexdigest()
+ expected_checksum = md5(expected_swift_contents,
+ usedforsecurity=False).hexdigest()
expected_multihash = \
hashlib.sha256(expected_swift_contents).hexdigest()
expected_image_id = str(uuid.uuid4())
@@ -1922,14 +1930,14 @@ class TestChunkReader(base.StoreBaseTest):
"""
CHUNKSIZE = 100
data = b'*' * units.Ki
- expected_checksum = hashlib.md5(data).hexdigest()
+ expected_checksum = md5(data, usedforsecurity=False).hexdigest()
expected_multihash = hashlib.sha256(data).hexdigest()
data_file = tempfile.NamedTemporaryFile()
data_file.write(data)
data_file.flush()
infile = open(data_file.name, 'rb')
bytes_read = 0
- checksum = hashlib.md5()
+ checksum = md5(usedforsecurity=False)
os_hash_value = hashlib.sha256()
while True:
cr = swift.ChunkReader(infile, checksum, os_hash_value, CHUNKSIZE)
@@ -1951,10 +1959,10 @@ class TestChunkReader(base.StoreBaseTest):
Replicate what goes on in the Swift driver with the
repeated creation of the ChunkReader object
"""
- expected_checksum = hashlib.md5(b'').hexdigest()
+ expected_checksum = md5(b'', usedforsecurity=False).hexdigest()
expected_multihash = hashlib.sha256(b'').hexdigest()
CHUNKSIZE = 100
- checksum = hashlib.md5()
+ checksum = md5(usedforsecurity=False)
os_hash_value = hashlib.sha256()
data_file = tempfile.NamedTemporaryFile()
infile = open(data_file.name, 'rb')
@@ -2050,7 +2058,7 @@ class TestBufferedReader(base.StoreBaseTest):
self.infile = six.BytesIO(s)
self.infile.seek(0)
- self.checksum = hashlib.md5()
+ self.checksum = md5(usedforsecurity=False)
self.hash_algo = HASH_ALGO
self.os_hash_value = hashlib.sha256()
self.verifier = mock.MagicMock(name='mock_verifier')
@@ -2110,7 +2118,7 @@ class TestBufferedReader(base.StoreBaseTest):
def test_checksums(self):
# checksums are updated only once on a full segment read
- expected_csum = hashlib.md5()
+ expected_csum = md5(usedforsecurity=False)
expected_csum.update(b'1234567')
expected_multihash = hashlib.sha256()
expected_multihash.update(b'1234567')
@@ -2122,7 +2130,7 @@ class TestBufferedReader(base.StoreBaseTest):
def test_checksum_updated_only_once_w_full_segment_read(self):
# Test that checksums are updated only once when a full segment read
# is followed by a seek and partial reads.
- expected_csum = hashlib.md5()
+ expected_csum = md5(usedforsecurity=False)
expected_csum.update(b'1234567')
expected_multihash = hashlib.sha256()
expected_multihash.update(b'1234567')
@@ -2137,7 +2145,7 @@ class TestBufferedReader(base.StoreBaseTest):
def test_checksum_updates_during_partial_segment_reads(self):
# Test to check that checksums are updated with only the bytes
# not seen when the number of bytes being read is changed
- expected_csum = hashlib.md5()
+ expected_csum = md5(usedforsecurity=False)
expected_multihash = hashlib.sha256()
self.reader.read(4)
expected_csum.update(b'1234')
@@ -2160,7 +2168,7 @@ class TestBufferedReader(base.StoreBaseTest):
def test_checksum_rolling_calls(self):
# Test that the checksum continues on to the next segment
- expected_csum = hashlib.md5()
+ expected_csum = md5(usedforsecurity=False)
expected_multihash = hashlib.sha256()
self.reader.read(7)
expected_csum.update(b'1234567')
@@ -2229,7 +2237,7 @@ class TestBufferedReader(base.StoreBaseTest):
infile = six.BytesIO(s)
infile.seek(0)
total = 7
- checksum = hashlib.md5()
+ checksum = md5(usedforsecurity=False)
os_hash_value = hashlib.sha256()
self.reader = buffered.BufferedReader(
infile, checksum, os_hash_value, total)
@@ -2254,14 +2262,14 @@ class TestBufferedReader(base.StoreBaseTest):
"""
CHUNKSIZE = 100
data = b'*' * units.Ki
- expected_checksum = hashlib.md5(data).hexdigest()
+ expected_checksum = md5(data, usedforsecurity=False).hexdigest()
expected_multihash = hashlib.sha256(data).hexdigest()
data_file = tempfile.NamedTemporaryFile()
data_file.write(data)
data_file.flush()
infile = open(data_file.name, 'rb')
bytes_read = 0
- checksum = hashlib.md5()
+ checksum = md5(usedforsecurity=False)
os_hash_value = hashlib.sha256()
while True:
cr = buffered.BufferedReader(infile, checksum, os_hash_value,
diff --git a/glance_store/tests/unit/test_swift_store_multibackend.py b/glance_store/tests/unit/test_swift_store_multibackend.py
index 5e76e03..dba5bda 100644
--- a/glance_store/tests/unit/test_swift_store_multibackend.py
+++ b/glance_store/tests/unit/test_swift_store_multibackend.py
@@ -25,6 +25,7 @@ import uuid
from oslo_config import cfg
from oslo_utils import encodeutils
+from oslo_utils.secretutils import md5
from oslo_utils import units
import requests_mock
import six
@@ -112,7 +113,8 @@ class SwiftTests(object):
if kwargs.get('headers'):
manifest = kwargs.get('headers').get('X-Object-Manifest')
etag = kwargs.get('headers') \
- .get('ETag', hashlib.md5(b'').hexdigest())
+ .get('ETag', md5(
+ b'', usedforsecurity=False).hexdigest())
fixture_headers[fixture_key] = {
'manifest': True,
'etag': etag,
@@ -124,7 +126,7 @@ class SwiftTests(object):
fixture_object = six.BytesIO()
read_len = 0
chunk = contents.read(CHUNKSIZE)
- checksum = hashlib.md5()
+ checksum = md5(usedforsecurity=False)
while chunk:
fixture_object.write(chunk)
read_len += len(chunk)
@@ -134,7 +136,8 @@ class SwiftTests(object):
else:
fixture_object = six.BytesIO(contents)
read_len = len(contents)
- etag = hashlib.md5(fixture_object.getvalue()).hexdigest()
+ etag = md5(fixture_object.getvalue(),
+ usedforsecurity=False).hexdigest()
if read_len > MAX_SWIFT_OBJECT_SIZE:
msg = ('Image size:%d exceeds Swift max:%d' %
(read_len, MAX_SWIFT_OBJECT_SIZE))
@@ -396,7 +399,8 @@ class SwiftTests(object):
self.store.configure()
expected_swift_size = FIVE_KB
expected_swift_contents = b"*" * expected_swift_size
- expected_checksum = hashlib.md5(expected_swift_contents).hexdigest()
+ expected_checksum = md5(expected_swift_contents,
+ usedforsecurity=False).hexdigest()
expected_image_id = str(uuid.uuid4())
loc = "swift+https://tenant%%3Auser1:key@localhost:8080/glance/%s"
expected_location = loc % (expected_image_id)
@@ -520,7 +524,7 @@ class SwiftTests(object):
expected_swift_size = FIVE_KB
expected_swift_contents = b"*" * expected_swift_size
expected_checksum = \
- hashlib.md5(expected_swift_contents).hexdigest()
+ md5(expected_swift_contents, usedforsecurity=False).hexdigest()
image_swift = six.BytesIO(expected_swift_contents)
@@ -595,7 +599,8 @@ class SwiftTests(object):
"""
expected_swift_size = FIVE_KB
expected_swift_contents = b"*" * expected_swift_size
- expected_checksum = hashlib.md5(expected_swift_contents).hexdigest()
+ expected_checksum = md5(expected_swift_contents,
+ usedforsecurity=False).hexdigest()
expected_image_id = str(uuid.uuid4())
loc = 'swift+config://ref1/noexist/%s'
expected_location = loc % (expected_image_id)
@@ -641,7 +646,8 @@ class SwiftTests(object):
"""
expected_swift_size = FIVE_KB
expected_swift_contents = b"*" * expected_swift_size
- expected_checksum = hashlib.md5(expected_swift_contents).hexdigest()
+ expected_checksum = md5(expected_swift_contents,
+ usedforsecurity=False).hexdigest()
expected_image_id = str(uuid.uuid4())
container = 'randomname_' + expected_image_id[:2]
loc = 'swift+config://ref1/%s/%s'
@@ -865,7 +871,8 @@ class SwiftTests(object):
"""
expected_swift_size = FIVE_KB
expected_swift_contents = b"*" * expected_swift_size
- expected_checksum = hashlib.md5(expected_swift_contents).hexdigest()
+ expected_checksum = md5(expected_swift_contents,
+ usedforsecurity=False).hexdigest()
expected_image_id = str(uuid.uuid4())
loc = 'swift+config://ref1/glance/%s'
expected_location = loc % (expected_image_id)
@@ -919,7 +926,8 @@ class SwiftTests(object):
# Set up a 'large' image of 5KB
expected_swift_size = FIVE_KB
expected_swift_contents = b"*" * expected_swift_size
- expected_checksum = hashlib.md5(expected_swift_contents).hexdigest()
+ expected_checksum = md5(expected_swift_contents,
+ usedforsecurity=False).hexdigest()
expected_image_id = str(uuid.uuid4())
loc = 'swift+config://ref1/glance/%s'
expected_location = loc % (expected_image_id)
@@ -2076,14 +2084,14 @@ class TestChunkReader(base.MultiStoreBaseTest):
"""
CHUNKSIZE = 100
data = b'*' * units.Ki
- expected_checksum = hashlib.md5(data).hexdigest()
+ expected_checksum = md5(data, usedforsecurity=False).hexdigest()
expected_multihash = hashlib.sha256(data).hexdigest()
data_file = tempfile.NamedTemporaryFile()
data_file.write(data)
data_file.flush()
infile = open(data_file.name, 'rb')
bytes_read = 0
- checksum = hashlib.md5()
+ checksum = md5(usedforsecurity=False)
os_hash_value = hashlib.sha256()
while True:
cr = swift.ChunkReader(infile, checksum, os_hash_value, CHUNKSIZE)
@@ -2105,10 +2113,10 @@ class TestChunkReader(base.MultiStoreBaseTest):
Replicate what goes on in the Swift driver with the
repeated creation of the ChunkReader object
"""
- expected_checksum = hashlib.md5(b'').hexdigest()
+ expected_checksum = md5(b'', usedforsecurity=False).hexdigest()
expected_multihash = hashlib.sha256(b'').hexdigest()
CHUNKSIZE = 100
- checksum = hashlib.md5()
+ checksum = md5(usedforsecurity=False)
os_hash_value = hashlib.sha256()
data_file = tempfile.NamedTemporaryFile()
infile = open(data_file.name, 'rb')
diff --git a/glance_store/tests/unit/test_vmware_store.py b/glance_store/tests/unit/test_vmware_store.py
index 935c08f..c74aab8 100644
--- a/glance_store/tests/unit/test_vmware_store.py
+++ b/glance_store/tests/unit/test_vmware_store.py
@@ -19,6 +19,7 @@ import hashlib
from unittest import mock
import uuid
+from oslo_utils import secretutils
from oslo_utils import units
from oslo_vmware import api
from oslo_vmware import exceptions as vmware_exceptions
@@ -145,7 +146,7 @@ class TestStore(base.StoreBaseTest,
expected_image_id = str(uuid.uuid4())
expected_size = FIVE_KB
expected_contents = b"*" * expected_size
- hash_code = hashlib.md5(expected_contents)
+ hash_code = secretutils.md5(expected_contents, usedforsecurity=False)
expected_checksum = hash_code.hexdigest()
sha256_code = hashlib.sha256(expected_contents)
expected_multihash = sha256_code.hexdigest()
@@ -190,7 +191,7 @@ class TestStore(base.StoreBaseTest,
expected_image_id = str(uuid.uuid4())
expected_size = FIVE_KB
expected_contents = b"*" * expected_size
- hash_code = hashlib.md5(expected_contents)
+ hash_code = secretutils.md5(expected_contents, usedforsecurity=False)
expected_checksum = hash_code.hexdigest()
sha256_code = hashlib.sha256(expected_contents)
expected_multihash = sha256_code.hexdigest()
@@ -303,7 +304,8 @@ class TestStore(base.StoreBaseTest,
def test_reader_full(self):
content = b'XXX'
image = six.BytesIO(content)
- expected_checksum = hashlib.md5(content).hexdigest()
+ expected_checksum = secretutils.md5(content,
+ usedforsecurity=False).hexdigest()
expected_multihash = hashlib.sha256(content).hexdigest()
reader = vm_store._Reader(image, self.hash_algo)
ret = reader.read()
@@ -315,7 +317,8 @@ class TestStore(base.StoreBaseTest,
def test_reader_partial(self):
content = b'XXX'
image = six.BytesIO(content)
- expected_checksum = hashlib.md5(b'X').hexdigest()
+ expected_checksum = secretutils.md5(b'X',
+ usedforsecurity=False).hexdigest()
expected_multihash = hashlib.sha256(b'X').hexdigest()
reader = vm_store._Reader(image, self.hash_algo)
ret = reader.read(1)
diff --git a/lower-constraints.txt b/lower-constraints.txt
index 67923e6..5b478d2 100644
--- a/lower-constraints.txt
+++ b/lower-constraints.txt
@@ -44,7 +44,7 @@ oslo.privsep==1.23.0
oslo.rootwrap==5.8.0
oslo.serialization==2.18.0
oslotest==3.2.0
-oslo.utils==3.33.0
+oslo.utils==4.7.0
oslo.vmware==2.17.0
packaging==17.1
Parsley==1.3
diff --git a/requirements.txt b/requirements.txt
index e537c3a..ff68486 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -4,7 +4,7 @@
oslo.config>=5.2.0 # Apache-2.0
oslo.i18n>=3.15.3 # Apache-2.0
oslo.serialization!=2.19.1,>=2.18.0 # Apache-2.0
-oslo.utils>=3.33.0 # Apache-2.0
+oslo.utils>=4.7.0 # Apache-2.0
oslo.concurrency>=3.26.0 # Apache-2.0
stevedore>=1.20.0 # Apache-2.0
eventlet!=0.18.3,!=0.20.1,>=0.18.2 # MIT