summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorArnaud Morin <arnaud.morin@ovhcloud.com>2022-08-12 20:45:27 +0200
committerArnaud Morin <arnaud.morin@ovhcloud.com>2022-12-02 11:46:46 +0100
commit2ad4a73e7f4278fe961e8c75f7f9bdbf51382b64 (patch)
tree689e08e092e3f3bb6482841cf31b35b5a768d29f
parent052c5cc4b4ed494600612018da4482213f4edc5f (diff)
downloadglance_store-2ad4a73e7f4278fe961e8c75f7f9bdbf51382b64.tar.gz
Add region_name option to s3 store
Add an option to let the operator choose a proper region_name. Some operator are not using S3 from amazon but from other provider. In such case, the get_s3_location cannot give good region_name. Adding an option from config seems a better option that relying on hardcoded stuff in the code. Signed-off-by: Arnaud Morin <arnaud.morin@ovhcloud.com> Change-Id: I71352e75f6415494709d8fb62c8b13f3ea7c6016
-rw-r--r--glance_store/_drivers/s3.py37
-rw-r--r--glance_store/tests/unit/test_multistore_s3.py17
-rw-r--r--glance_store/tests/unit/test_opts.py1
-rw-r--r--glance_store/tests/unit/test_s3_store.py24
4 files changed, 75 insertions, 4 deletions
diff --git a/glance_store/_drivers/s3.py b/glance_store/_drivers/s3.py
index 1a21ddd..2988bae 100644
--- a/glance_store/_drivers/s3.py
+++ b/glance_store/_drivers/s3.py
@@ -65,6 +65,22 @@ Related Options:
* s3_store_secret_key
"""),
+ cfg.StrOpt('s3_store_region_name',
+ default='',
+ help="""
+The S3 region name.
+
+This parameter will set the region_name used by boto.
+If this parameter is not set, we we will try to compute it from the
+s3_store_host.
+
+Possible values:
+ * A valid region name
+
+Related Options:
+ * s3_store_host
+
+"""),
cfg.StrOpt('s3_store_access_key',
secret=True,
help="""
@@ -375,6 +391,7 @@ class Store(glance_store.driver.Store):
itself, it should raise `exceptions.BadStoreConfiguration`
"""
self.s3_host = self._option_get('s3_store_host')
+ self.region_name = self._option_get('s3_store_region_name')
self.access_key = self._option_get('s3_store_access_key')
self.secret_key = self._option_get('s3_store_secret_key')
self.bucket = self._option_get('s3_store_bucket')
@@ -432,6 +449,8 @@ class Store(glance_store.driver.Store):
if not result:
if param == 's3_store_create_bucket_on_put':
return result
+ if param == 's3_store_region_name':
+ return result
reason = _("Could not find %s in configuration options.") % param
LOG.error(reason)
raise exceptions.BadStoreConfiguration(store_name="s3",
@@ -460,7 +479,10 @@ class Store(glance_store.driver.Store):
raise boto_exceptions.InvalidDNSNameError(bucket_name=bucket_name)
region_name, endpoint_url = None, None
- if location:
+ if self.region_name:
+ region_name = self.region_name
+ endpoint_url = s3_host
+ elif location:
region_name = location
else:
endpoint_url = s3_host
@@ -578,7 +600,8 @@ class Store(glance_store.driver.Store):
if self._option_get('s3_store_create_bucket_on_put'):
self._create_bucket(s3_client,
self._option_get('s3_store_host'),
- bucket)
+ bucket,
+ self._option_get('s3_store_region_name'))
else:
msg = (_("The bucket %s does not exist in "
"S3. Please set the "
@@ -850,15 +873,20 @@ class Store(glance_store.driver.Store):
return True
@staticmethod
- def _create_bucket(s3_client, s3_host, bucket):
+ def _create_bucket(s3_client, s3_host, bucket, region_name=None):
"""Create bucket into the S3.
:param s3_client: An object with credentials to connect to S3
:param s3_host: S3 endpoint url
:param bucket: S3 bucket name
+ :param region_name: An optional region_name. If not provided, will try
+ to compute it from s3_host
:raises: BadStoreConfiguration if cannot connect to S3 successfully
"""
- region = get_s3_location(s3_host)
+ if region_name:
+ region = region_name
+ else:
+ region = get_s3_location(s3_host)
try:
s3_client.create_bucket(
Bucket=bucket,
@@ -901,6 +929,7 @@ def get_s3_location(s3_host):
Amazon S3, and if user wants to use S3 compatible storage,
returns ''
"""
+ # NOTE(arnaud): maybe get rid of hardcoded amazon stuff here?
locations = {
's3.amazonaws.com': '',
's3-us-east-1.amazonaws.com': 'us-east-1',
diff --git a/glance_store/tests/unit/test_multistore_s3.py b/glance_store/tests/unit/test_multistore_s3.py
index 60c5e80..b35dca5 100644
--- a/glance_store/tests/unit/test_multistore_s3.py
+++ b/glance_store/tests/unit/test_multistore_s3.py
@@ -90,6 +90,7 @@ class TestMultiS3Store(base.MultiStoreBaseTest,
s3_store_access_key='user',
s3_store_secret_key='key',
s3_store_host='https://s3-region1.com',
+ s3_store_region_name='custom_region_name',
s3_store_bucket='glance',
s3_store_large_object_size=S3_CONF[
's3_store_large_object_size'
@@ -132,6 +133,22 @@ class TestMultiS3Store(base.MultiStoreBaseTest,
self.assertRaises(boto_exceptions.InvalidDNSNameError,
self.store.get, loc)
+ @mock.patch('glance_store.location.Location')
+ @mock.patch.object(boto3.session.Session, "client")
+ def test_client_custom_region_name(self, mock_client, mock_loc):
+ """Test a custom s3_store_region_name in config"""
+ mock_loc.accesskey = 'abcd'
+ mock_loc.secretkey = 'efgh'
+ mock_loc.bucket = 'bucket1'
+ self.store._create_s3_client(mock_loc)
+ mock_client.assert_called_with(
+ config=mock.ANY,
+ endpoint_url='https://s3-region1.com',
+ region_name='custom_region_name',
+ service_name='s3',
+ use_ssl=False,
+ )
+
@mock.patch.object(boto3.session.Session, "client")
def test_get(self, mock_client):
"""Test a "normal" retrieval of an image in chunks."""
diff --git a/glance_store/tests/unit/test_opts.py b/glance_store/tests/unit/test_opts.py
index 5928d4f..bfb9bce 100644
--- a/glance_store/tests/unit/test_opts.py
+++ b/glance_store/tests/unit/test_opts.py
@@ -106,6 +106,7 @@ class OptsTestCase(base.StoreBaseTest):
's3_store_bucket_url_format',
's3_store_create_bucket_on_put',
's3_store_host',
+ 's3_store_region_name',
's3_store_secret_key',
's3_store_large_object_size',
's3_store_large_object_chunk_size',
diff --git a/glance_store/tests/unit/test_s3_store.py b/glance_store/tests/unit/test_s3_store.py
index 7fa7a13..583535f 100644
--- a/glance_store/tests/unit/test_s3_store.py
+++ b/glance_store/tests/unit/test_s3_store.py
@@ -41,6 +41,7 @@ FIVE_KB = 5 * units.Ki
S3_CONF = {
's3_store_access_key': 'user',
's3_store_secret_key': 'key',
+ 's3_store_region_name': '',
's3_store_host': 'localhost',
's3_store_bucket': 'glance',
's3_store_large_object_size': 9, # over 9MB is large
@@ -84,6 +85,29 @@ class TestStore(base.StoreBaseTest,
self.assertRaises(boto_exceptions.InvalidDNSNameError,
self.store.get, loc)
+ @mock.patch('glance_store.location.Location')
+ @mock.patch.object(boto3.session.Session, "client")
+ def test_client_custom_region_name(self, mock_client, mock_loc):
+ """Test a custom s3_store_region_name in config"""
+ self.config(s3_store_host='http://example.com')
+ self.config(s3_store_region_name='regionOne')
+ self.config(s3_store_bucket_url_format='path')
+ self.store.configure()
+
+ mock_loc.accesskey = 'abcd'
+ mock_loc.secretkey = 'efgh'
+ mock_loc.bucket = 'bucket1'
+
+ self.store._create_s3_client(mock_loc)
+
+ mock_client.assert_called_with(
+ config=mock.ANY,
+ endpoint_url='http://example.com',
+ region_name='regionOne',
+ service_name='s3',
+ use_ssl=False,
+ )
+
@mock.patch.object(boto3.session.Session, "client")
def test_get(self, mock_client):
"""Test a "normal" retrieval of an image in chunks."""