summaryrefslogtreecommitdiff
path: root/test
diff options
context:
space:
mode:
Diffstat (limited to 'test')
-rw-r--r--test/functional/__init__.py16
-rw-r--r--test/functional/s3api/__init__.py9
-rw-r--r--test/functional/s3api/s3_test_client.py40
-rw-r--r--test/functional/s3api/test_acl.py6
-rw-r--r--test/functional/s3api/test_bucket.py16
-rw-r--r--test/functional/s3api/test_multi_delete.py2
-rw-r--r--test/functional/s3api/test_multi_upload.py19
-rw-r--r--test/functional/s3api/test_object.py10
-rw-r--r--test/functional/s3api/test_service.py2
-rw-r--r--test/functional/test_object.py9
-rw-r--r--test/functional/test_object_versioning.py106
-rw-r--r--test/functional/test_slo.py31
-rwxr-xr-xtest/functional/test_symlink.py79
-rw-r--r--test/functional/test_versioned_writes.py11
-rw-r--r--test/functional/tests.py75
-rw-r--r--test/probe/test_object_async_update.py7
-rw-r--r--test/probe/test_object_metadata_replication.py3
-rw-r--r--test/sample.conf4
-rw-r--r--test/unit/account/test_backend.py66
-rw-r--r--test/unit/cli/test_info.py148
-rw-r--r--test/unit/cli/test_manage_shard_ranges.py2
-rw-r--r--test/unit/cli/test_relinker.py218
-rw-r--r--test/unit/common/middleware/s3api/test_obj.py91
-rw-r--r--test/unit/common/middleware/s3api/test_s3_acl.py16
-rw-r--r--test/unit/common/middleware/test_ratelimit.py114
-rw-r--r--test/unit/common/middleware/test_symlink.py50
-rw-r--r--test/unit/common/test_db.py2
-rw-r--r--test/unit/common/test_memcached.py79
-rw-r--r--test/unit/common/test_swob.py16
-rw-r--r--test/unit/common/test_utils.py154
-rw-r--r--test/unit/container/test_backend.py259
-rw-r--r--test/unit/container/test_server.py110
-rw-r--r--test/unit/container/test_sharder.py28
-rw-r--r--test/unit/proxy/controllers/test_obj.py6
-rw-r--r--test/unit/proxy/test_server.py2
35 files changed, 1426 insertions, 380 deletions
diff --git a/test/functional/__init__.py b/test/functional/__init__.py
index 707f2239a..a4f4c5a6e 100644
--- a/test/functional/__init__.py
+++ b/test/functional/__init__.py
@@ -322,12 +322,16 @@ def _load_encryption(proxy_conf_file, swift_conf_file, **kwargs):
pipeline = pipeline.replace(
"proxy-logging proxy-server",
"keymaster encryption proxy-logging proxy-server")
+ pipeline = pipeline.replace(
+ "cache listing_formats",
+ "cache etag-quoter listing_formats")
conf.set(section, 'pipeline', pipeline)
root_secret = base64.b64encode(os.urandom(32))
if not six.PY2:
root_secret = root_secret.decode('ascii')
conf.set('filter:keymaster', 'encryption_root_secret', root_secret)
conf.set('filter:versioned_writes', 'allow_object_versioning', 'true')
+ conf.set('filter:etag-quoter', 'enable_by_default', 'true')
except NoSectionError as err:
msg = 'Error problem with proxy conf file %s: %s' % \
(proxy_conf_file, err)
@@ -512,8 +516,6 @@ def _load_losf_as_default_policy(proxy_conf_file, swift_conf_file, **kwargs):
conf_loaders = {
'encryption': _load_encryption,
'ec': _load_ec_as_default_policy,
- 'domain_remap_staticweb': _load_domain_remap_staticweb,
- 's3api': _load_s3api,
'losf': _load_losf_as_default_policy,
}
@@ -552,6 +554,11 @@ def in_process_setup(the_object_server=object_server):
swift_conf = _in_process_setup_swift_conf(swift_conf_src, _testdir)
_info('prepared swift.conf: %s' % swift_conf)
+ # load s3api and staticweb configs
+ proxy_conf, swift_conf = _load_s3api(proxy_conf, swift_conf)
+ proxy_conf, swift_conf = _load_domain_remap_staticweb(proxy_conf,
+ swift_conf)
+
# Call the associated method for the value of
# 'SWIFT_TEST_IN_PROCESS_CONF_LOADER', if one exists
conf_loader_label = os.environ.get(
@@ -621,6 +628,7 @@ def in_process_setup(the_object_server=object_server):
# Below are values used by the functional test framework, as well as
# by the various in-process swift servers
'auth_uri': 'http://127.0.0.1:%d/auth/v1.0/' % prolis.getsockname()[1],
+ 's3_storage_url': 'http://%s:%d/' % prolis.getsockname(),
# Primary functional test account (needs admin access to the
# account)
'account': 'test',
@@ -902,6 +910,8 @@ def setup_package():
443 if parsed.scheme == 'https' else 80),
'auth_prefix': parsed.path,
})
+ config.setdefault('s3_storage_url',
+ urlunsplit(parsed[:2] + ('', None, None)))
elif 'auth_host' in config:
scheme = 'http'
if config_true_value(config.get('auth_ssl', 'no')):
@@ -914,6 +924,8 @@ def setup_package():
auth_prefix += 'v1.0'
config['auth_uri'] = swift_test_auth = urlunsplit(
(scheme, netloc, auth_prefix, None, None))
+ config.setdefault('s3_storage_url', urlunsplit(
+ (scheme, netloc, '', None, None)))
# else, neither auth_uri nor auth_host; swift_test_auth will be unset
# and we'll skip everything later
diff --git a/test/functional/s3api/__init__.py b/test/functional/s3api/__init__.py
index db443db80..7ad2c077b 100644
--- a/test/functional/s3api/__init__.py
+++ b/test/functional/s3api/__init__.py
@@ -37,7 +37,11 @@ class S3ApiBase(unittest.TestCase):
if 's3api' not in tf.cluster_info:
raise tf.SkipTest('s3api middleware is not enabled')
try:
- self.conn = Connection()
+ self.conn = Connection(
+ tf.config['s3_access_key'], tf.config['s3_secret_key'],
+ user_id='%s:%s' % (tf.config['account'],
+ tf.config['username']))
+
self.conn.reset()
except Exception:
message = '%s got an error during initialize process.\n\n%s' % \
@@ -67,7 +71,8 @@ class S3ApiBaseBoto3(S3ApiBase):
if 's3api' not in tf.cluster_info:
raise tf.SkipTest('s3api middleware is not enabled')
try:
- self.conn = get_boto3_conn()
+ self.conn = get_boto3_conn(
+ tf.config['s3_access_key'], tf.config['s3_secret_key'])
self.endpoint_url = self.conn._endpoint.host
self.access_key = self.conn._request_signer._credentials.access_key
self.region = self.conn._client_config.region_name
diff --git a/test/functional/s3api/s3_test_client.py b/test/functional/s3api/s3_test_client.py
index 6eea9dc69..a5d22bce4 100644
--- a/test/functional/s3api/s3_test_client.py
+++ b/test/functional/s3api/s3_test_client.py
@@ -15,6 +15,7 @@
import logging
import os
+from six.moves.urllib.parse import urlparse
import test.functional as tf
import boto3
from botocore.exceptions import ClientError
@@ -46,9 +47,9 @@ class Connection(object):
"""
Connection class used for S3 functional testing.
"""
- def __init__(self, aws_access_key='test:tester',
- aws_secret_key='testing',
- user_id='test:tester'):
+ def __init__(self, aws_access_key,
+ aws_secret_key,
+ user_id=None):
"""
Initialize method.
@@ -64,15 +65,16 @@ class Connection(object):
"""
self.aws_access_key = aws_access_key
self.aws_secret_key = aws_secret_key
- self.user_id = user_id
- # NOTE: auth_host and auth_port can be different from storage location
- self.host = tf.config['auth_host']
- self.port = int(tf.config['auth_port'])
+ self.user_id = user_id or aws_access_key
+ parsed = urlparse(tf.config['s3_storage_url'])
+ self.host = parsed.hostname
+ self.port = parsed.port
self.conn = \
- S3Connection(aws_access_key, aws_secret_key, is_secure=False,
+ S3Connection(aws_access_key, aws_secret_key,
+ is_secure=(parsed.scheme == 'https'),
host=self.host, port=self.port,
calling_format=OrdinaryCallingFormat())
- self.conn.auth_region_name = 'us-east-1'
+ self.conn.auth_region_name = tf.config.get('s3_region', 'us-east-1')
def reset(self):
"""
@@ -140,22 +142,26 @@ class Connection(object):
url = self.conn.generate_url(expires_in, method, bucket, obj)
if os.environ.get('S3_USE_SIGV4') == "True":
# V4 signatures are known-broken in boto, but we can work around it
- if url.startswith('https://'):
+ if url.startswith('https://') and not tf.config[
+ 's3_storage_url'].startswith('https://'):
url = 'http://' + url[8:]
- return url, {'Host': '%(host)s:%(port)d:%(port)d' % {
- 'host': self.host, 'port': self.port}}
+ if self.port is None:
+ return url, {}
+ else:
+ return url, {'Host': '%(host)s:%(port)d:%(port)d' % {
+ 'host': self.host, 'port': self.port}}
return url, {}
-def get_boto3_conn(aws_access_key='test:tester', aws_secret_key='testing'):
- host = tf.config['auth_host']
- port = int(tf.config['auth_port'])
+def get_boto3_conn(aws_access_key, aws_secret_key):
+ endpoint_url = tf.config['s3_storage_url']
config = boto3.session.Config(s3={'addressing_style': 'path'})
return boto3.client(
's3', aws_access_key_id=aws_access_key,
aws_secret_access_key=aws_secret_key,
- config=config, region_name='us-east-1', use_ssl=False,
- endpoint_url='http://{}:{}'.format(host, port))
+ config=config, region_name=tf.config.get('s3_region', 'us-east-1'),
+ use_ssl=endpoint_url.startswith('https:'),
+ endpoint_url=endpoint_url)
def tear_down_s3(conn):
diff --git a/test/functional/s3api/test_acl.py b/test/functional/s3api/test_acl.py
index 610efe0a9..7a3d4f98d 100644
--- a/test/functional/s3api/test_acl.py
+++ b/test/functional/s3api/test_acl.py
@@ -93,7 +93,7 @@ class TestS3Acl(S3ApiBase):
def test_put_bucket_acl_error(self):
req_headers = {'x-amz-acl': 'public-read'}
- aws_error_conn = Connection(aws_secret_key='invalid')
+ aws_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = \
aws_error_conn.make_request('PUT', self.bucket,
headers=req_headers, query='acl')
@@ -110,7 +110,7 @@ class TestS3Acl(S3ApiBase):
self.assertEqual(get_error_code(body), 'AccessDenied')
def test_get_bucket_acl_error(self):
- aws_error_conn = Connection(aws_secret_key='invalid')
+ aws_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = \
aws_error_conn.make_request('GET', self.bucket, query='acl')
self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
@@ -126,7 +126,7 @@ class TestS3Acl(S3ApiBase):
def test_get_object_acl_error(self):
self.conn.make_request('PUT', self.bucket, self.obj)
- aws_error_conn = Connection(aws_secret_key='invalid')
+ aws_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = \
aws_error_conn.make_request('GET', self.bucket, self.obj,
query='acl')
diff --git a/test/functional/s3api/test_bucket.py b/test/functional/s3api/test_bucket.py
index 2197ce823..1e427434d 100644
--- a/test/functional/s3api/test_bucket.py
+++ b/test/functional/s3api/test_bucket.py
@@ -42,11 +42,15 @@ class TestS3ApiBucket(S3ApiBaseBoto3):
self.assertIn('ETag', obj)
self.assertIn('Size', obj)
self.assertEqual(obj['StorageClass'], 'STANDARD')
- if expect_owner:
+ if not expect_owner:
+ self.assertNotIn('Owner', obj)
+ elif tf.cluster_info['s3api'].get('s3_acl'):
self.assertEqual(obj['Owner']['ID'], self.access_key)
self.assertEqual(obj['Owner']['DisplayName'], self.access_key)
else:
- self.assertNotIn('Owner', obj)
+ self.assertIn('Owner', obj)
+ self.assertIn('ID', obj['Owner'])
+ self.assertIn('DisplayName', obj['Owner'])
def test_bucket(self):
bucket = 'bucket'
@@ -128,7 +132,7 @@ class TestS3ApiBucket(S3ApiBaseBoto3):
self.assertEqual(
ctx.exception.response['Error']['Code'], 'InvalidBucketName')
- auth_error_conn = get_boto3_conn(aws_secret_key='invalid')
+ auth_error_conn = get_boto3_conn(tf.config['s3_access_key'], 'invalid')
with self.assertRaises(botocore.exceptions.ClientError) as ctx:
auth_error_conn.create_bucket(Bucket='bucket')
self.assertEqual(
@@ -201,7 +205,7 @@ class TestS3ApiBucket(S3ApiBaseBoto3):
self.assertEqual(
ctx.exception.response['Error']['Code'], 'InvalidBucketName')
- auth_error_conn = get_boto3_conn(aws_secret_key='invalid')
+ auth_error_conn = get_boto3_conn(tf.config['s3_access_key'], 'invalid')
with self.assertRaises(botocore.exceptions.ClientError) as ctx:
auth_error_conn.list_objects(Bucket='bucket')
self.assertEqual(
@@ -388,7 +392,7 @@ class TestS3ApiBucket(S3ApiBaseBoto3):
ctx.exception.response[
'ResponseMetadata']['HTTPHeaders']['content-length'], '0')
- auth_error_conn = get_boto3_conn(aws_secret_key='invalid')
+ auth_error_conn = get_boto3_conn(tf.config['s3_access_key'], 'invalid')
with self.assertRaises(botocore.exceptions.ClientError) as ctx:
auth_error_conn.head_bucket(Bucket='bucket')
self.assertEqual(
@@ -419,7 +423,7 @@ class TestS3ApiBucket(S3ApiBaseBoto3):
self.assertEqual(
ctx.exception.response['Error']['Code'], 'InvalidBucketName')
- auth_error_conn = get_boto3_conn(aws_secret_key='invalid')
+ auth_error_conn = get_boto3_conn(tf.config['s3_access_key'], 'invalid')
with self.assertRaises(botocore.exceptions.ClientError) as ctx:
auth_error_conn.delete_bucket(Bucket='bucket')
self.assertEqual(
diff --git a/test/functional/s3api/test_multi_delete.py b/test/functional/s3api/test_multi_delete.py
index 31e18bb5f..1489d5477 100644
--- a/test/functional/s3api/test_multi_delete.py
+++ b/test/functional/s3api/test_multi_delete.py
@@ -134,7 +134,7 @@ class TestS3ApiMultiDelete(S3ApiBase):
content_md5 = calculate_md5(xml)
query = 'delete'
- auth_error_conn = Connection(aws_secret_key='invalid')
+ auth_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = \
auth_error_conn.make_request('POST', bucket, body=xml,
headers={
diff --git a/test/functional/s3api/test_multi_upload.py b/test/functional/s3api/test_multi_upload.py
index de61551e0..c2e1c0f93 100644
--- a/test/functional/s3api/test_multi_upload.py
+++ b/test/functional/s3api/test_multi_upload.py
@@ -304,9 +304,8 @@ class TestS3ApiMultiUpload(S3ApiBase):
self.assertTrue(lines[0].startswith(b'<?xml'), body)
self.assertTrue(lines[0].endswith(b'?>'), body)
elem = fromstring(body, 'CompleteMultipartUploadResult')
- # TODO: use tf.config value
self.assertEqual(
- 'http://%s:%s/bucket/obj1' % (self.conn.host, self.conn.port),
+ '%s/bucket/obj1' % tf.config['s3_storage_url'].rstrip('/'),
elem.find('Location').text)
self.assertEqual(elem.find('Bucket').text, bucket)
self.assertEqual(elem.find('Key').text, key)
@@ -428,7 +427,7 @@ class TestS3ApiMultiUpload(S3ApiBase):
self.conn.make_request('PUT', bucket)
query = 'uploads'
- auth_error_conn = Connection(aws_secret_key='invalid')
+ auth_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = \
auth_error_conn.make_request('POST', bucket, key, query=query)
self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
@@ -442,7 +441,7 @@ class TestS3ApiMultiUpload(S3ApiBase):
self.conn.make_request('PUT', bucket)
query = 'uploads'
- auth_error_conn = Connection(aws_secret_key='invalid')
+ auth_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = \
auth_error_conn.make_request('GET', bucket, query=query)
self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
@@ -462,7 +461,7 @@ class TestS3ApiMultiUpload(S3ApiBase):
upload_id = elem.find('UploadId').text
query = 'partNumber=%s&uploadId=%s' % (1, upload_id)
- auth_error_conn = Connection(aws_secret_key='invalid')
+ auth_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = \
auth_error_conn.make_request('PUT', bucket, key, query=query)
self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
@@ -500,7 +499,7 @@ class TestS3ApiMultiUpload(S3ApiBase):
upload_id = elem.find('UploadId').text
query = 'partNumber=%s&uploadId=%s' % (1, upload_id)
- auth_error_conn = Connection(aws_secret_key='invalid')
+ auth_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = \
auth_error_conn.make_request('PUT', bucket, key,
headers={
@@ -541,7 +540,7 @@ class TestS3ApiMultiUpload(S3ApiBase):
upload_id = elem.find('UploadId').text
query = 'uploadId=%s' % upload_id
- auth_error_conn = Connection(aws_secret_key='invalid')
+ auth_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = \
auth_error_conn.make_request('GET', bucket, key, query=query)
@@ -568,7 +567,7 @@ class TestS3ApiMultiUpload(S3ApiBase):
self._upload_part(bucket, key, upload_id)
query = 'uploadId=%s' % upload_id
- auth_error_conn = Connection(aws_secret_key='invalid')
+ auth_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = \
auth_error_conn.make_request('DELETE', bucket, key, query=query)
self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
@@ -612,7 +611,7 @@ class TestS3ApiMultiUpload(S3ApiBase):
self.assertEqual(get_error_code(body), 'EntityTooSmall')
# invalid credentials
- auth_error_conn = Connection(aws_secret_key='invalid')
+ auth_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = \
auth_error_conn.make_request('POST', bucket, keys[0], body=xml,
query=query)
@@ -881,6 +880,8 @@ class TestS3ApiMultiUpload(S3ApiBase):
self.assertEqual(headers['content-length'], '0')
def test_object_multi_upload_part_copy_version(self):
+ if 'object_versioning' not in tf.cluster_info:
+ self.skipTest('Object Versioning not enabled')
bucket = 'bucket'
keys = ['obj1']
uploads = []
diff --git a/test/functional/s3api/test_object.py b/test/functional/s3api/test_object.py
index 5b518eaa8..8079c157b 100644
--- a/test/functional/s3api/test_object.py
+++ b/test/functional/s3api/test_object.py
@@ -147,7 +147,7 @@ class TestS3ApiObject(S3ApiBase):
self.assertCommonResponseHeaders(headers)
def test_put_object_error(self):
- auth_error_conn = Connection(aws_secret_key='invalid')
+ auth_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = \
auth_error_conn.make_request('PUT', self.bucket, 'object')
self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
@@ -166,7 +166,7 @@ class TestS3ApiObject(S3ApiBase):
dst_obj = 'dst_object'
headers = {'x-amz-copy-source': '/%s/%s' % (self.bucket, obj)}
- auth_error_conn = Connection(aws_secret_key='invalid')
+ auth_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = \
auth_error_conn.make_request('PUT', dst_bucket, dst_obj, headers)
self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
@@ -197,7 +197,7 @@ class TestS3ApiObject(S3ApiBase):
obj = 'object'
self.conn.make_request('PUT', self.bucket, obj)
- auth_error_conn = Connection(aws_secret_key='invalid')
+ auth_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = \
auth_error_conn.make_request('GET', self.bucket, obj)
self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
@@ -216,7 +216,7 @@ class TestS3ApiObject(S3ApiBase):
obj = 'object'
self.conn.make_request('PUT', self.bucket, obj)
- auth_error_conn = Connection(aws_secret_key='invalid')
+ auth_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = \
auth_error_conn.make_request('HEAD', self.bucket, obj)
self.assertEqual(status, 403)
@@ -239,7 +239,7 @@ class TestS3ApiObject(S3ApiBase):
obj = 'object'
self.conn.make_request('PUT', self.bucket, obj)
- auth_error_conn = Connection(aws_secret_key='invalid')
+ auth_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = \
auth_error_conn.make_request('DELETE', self.bucket, obj)
self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
diff --git a/test/functional/s3api/test_service.py b/test/functional/s3api/test_service.py
index 3eb75dff5..77779cba0 100644
--- a/test/functional/s3api/test_service.py
+++ b/test/functional/s3api/test_service.py
@@ -69,7 +69,7 @@ class TestS3ApiService(S3ApiBase):
self.assertTrue(b.find('CreationDate') is not None)
def test_service_error_signature_not_match(self):
- auth_error_conn = Connection(aws_secret_key='invalid')
+ auth_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = auth_error_conn.make_request('GET')
self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
self.assertEqual(headers['content-type'], 'application/xml')
diff --git a/test/functional/test_object.py b/test/functional/test_object.py
index dbc72acef..6145d4a98 100644
--- a/test/functional/test_object.py
+++ b/test/functional/test_object.py
@@ -1726,7 +1726,7 @@ class TestObject(unittest.TestCase):
if 'etag_quoter' not in tf.cluster_info:
raise SkipTest("etag-quoter middleware is not enabled")
- def do_head(expect_quoted=False):
+ def do_head(expect_quoted=None):
def head(url, token, parsed, conn):
conn.request('HEAD', '%s/%s/%s' % (
parsed.path, self.container, self.obj), '',
@@ -1736,6 +1736,11 @@ class TestObject(unittest.TestCase):
resp = retry(head)
resp.read()
self.assertEqual(resp.status, 200)
+
+ if expect_quoted is None:
+ expect_quoted = tf.cluster_info.get('etag_quoter', {}).get(
+ 'enable_by_default', False)
+
expected_etag = hashlib.md5(b'test').hexdigest()
if expect_quoted:
expected_etag = '"%s"' % expected_etag
@@ -1771,7 +1776,7 @@ class TestObject(unittest.TestCase):
post_container('')
do_head(expect_quoted=True)
post_container('f')
- do_head()
+ do_head(expect_quoted=False)
finally:
# Don't leave a dirty account
post_account('')
diff --git a/test/functional/test_object_versioning.py b/test/functional/test_object_versioning.py
index ebfca68f6..d7db187c0 100644
--- a/test/functional/test_object_versioning.py
+++ b/test/functional/test_object_versioning.py
@@ -26,6 +26,7 @@ from six.moves.urllib.parse import quote, unquote
import test.functional as tf
+from swift.common.swob import normalize_etag
from swift.common.utils import MD5_OF_EMPTY_STRING, config_true_value
from swift.common.middleware.versioned_writes.object_versioning import \
DELETE_MARKER_CONTENT_TYPE
@@ -331,6 +332,80 @@ class TestObjectVersioning(TestObjectVersioningBase):
# listing, though, we'll only ever have the two entries.
self.assertTotalVersions(container, 2)
+ def test_get_if_match(self):
+ body = b'data'
+ oname = Utils.create_name()
+ obj = self.env.unversioned_container.file(oname)
+ resp = obj.write(body, return_resp=True)
+ etag = resp.getheader('etag')
+ self.assertEqual(md5(body).hexdigest(), normalize_etag(etag))
+
+ # un-versioned object is cool with with if-match
+ self.assertEqual(body, obj.read(hdrs={'if-match': etag}))
+ with self.assertRaises(ResponseError) as cm:
+ obj.read(hdrs={'if-match': 'not-the-etag'})
+ self.assertEqual(412, cm.exception.status)
+
+ v_obj = self.env.container.file(oname)
+ resp = v_obj.write(body, return_resp=True)
+ self.assertEqual(resp.getheader('etag'), etag)
+
+ # versioned object is too with with if-match
+ self.assertEqual(body, v_obj.read(hdrs={
+ 'if-match': normalize_etag(etag)}))
+ # works quoted, too
+ self.assertEqual(body, v_obj.read(hdrs={
+ 'if-match': '"%s"' % normalize_etag(etag)}))
+ with self.assertRaises(ResponseError) as cm:
+ v_obj.read(hdrs={'if-match': 'not-the-etag'})
+ self.assertEqual(412, cm.exception.status)
+
+ def test_container_acls(self):
+ if tf.skip3:
+ raise SkipTest('Username3 not set')
+
+ obj = self.env.container.file(Utils.create_name())
+ resp = obj.write(b"data", return_resp=True)
+ version_id = resp.getheader('x-object-version-id')
+ self.assertIsNotNone(version_id)
+
+ with self.assertRaises(ResponseError) as cm:
+ obj.read(hdrs={'X-Auth-Token': self.env.conn3.storage_token})
+ self.assertEqual(403, cm.exception.status)
+
+ # Container ACLs work more or less like they always have
+ self.env.container.update_metadata(
+ hdrs={'X-Container-Read': self.env.conn3.user_acl})
+ self.assertEqual(b"data", obj.read(hdrs={
+ 'X-Auth-Token': self.env.conn3.storage_token}))
+
+ # But the version-specifc GET still requires a swift owner
+ with self.assertRaises(ResponseError) as cm:
+ obj.read(hdrs={'X-Auth-Token': self.env.conn3.storage_token},
+ parms={'version-id': version_id})
+ self.assertEqual(403, cm.exception.status)
+
+ # If it's pointing to a symlink that points elsewhere, that still needs
+ # to be authed
+ tgt_name = Utils.create_name()
+ self.env.unversioned_container.file(tgt_name).write(b'link')
+ sym_tgt_header = quote(unquote('%s/%s' % (
+ self.env.unversioned_container.name, tgt_name)))
+ obj.write(hdrs={'X-Symlink-Target': sym_tgt_header})
+
+ # So, user1's good...
+ self.assertEqual(b'link', obj.read())
+ # ...but user3 can't
+ with self.assertRaises(ResponseError) as cm:
+ obj.read(hdrs={'X-Auth-Token': self.env.conn3.storage_token})
+ self.assertEqual(403, cm.exception.status)
+
+ # unless we add the acl to the unversioned_container
+ self.env.unversioned_container.update_metadata(
+ hdrs={'X-Container-Read': self.env.conn3.user_acl})
+ self.assertEqual(b'link', obj.read(
+ hdrs={'X-Auth-Token': self.env.conn3.storage_token}))
+
def _test_overwriting_setup(self, obj_name=None):
# sanity
container = self.env.container
@@ -919,13 +994,13 @@ class TestObjectVersioning(TestObjectVersioningBase):
'Content-Type': 'text/jibberish32'
}, return_resp=True)
v1_version_id = resp.getheader('x-object-version-id')
- v1_etag = resp.getheader('etag')
+ v1_etag = normalize_etag(resp.getheader('etag'))
resp = obj.write(b'version2', hdrs={
'Content-Type': 'text/jibberish33'
}, return_resp=True)
v2_version_id = resp.getheader('x-object-version-id')
- v2_etag = resp.getheader('etag')
+ v2_etag = normalize_etag(resp.getheader('etag'))
# sanity
self.assertEqual(b'version2', obj.read())
@@ -992,7 +1067,7 @@ class TestObjectVersioning(TestObjectVersioningBase):
self.assertEqual(b'version1', obj.read())
obj_info = obj.info()
self.assertEqual('text/jibberish32', obj_info['content_type'])
- self.assertEqual(v1_etag, obj_info['etag'])
+ self.assertEqual(v1_etag, normalize_etag(obj_info['etag']))
def test_delete_with_version_api_old_object(self):
versioned_obj_name = Utils.create_name()
@@ -2308,7 +2383,7 @@ class TestSloWithVersioning(TestObjectVersioningBase):
expected = {
'bytes': file_info['content_length'],
'content_type': 'application/octet-stream',
- 'hash': manifest_info['etag'],
+ 'hash': normalize_etag(manifest_info['etag']),
'name': 'my-slo-manifest',
'slo_etag': file_info['etag'],
'version_symlink': True,
@@ -2340,7 +2415,7 @@ class TestSloWithVersioning(TestObjectVersioningBase):
expected = {
'bytes': file_info['content_length'],
'content_type': 'application/octet-stream',
- 'hash': manifest_info['etag'],
+ 'hash': normalize_etag(manifest_info['etag']),
'name': 'my-slo-manifest',
'slo_etag': file_info['etag'],
'version_symlink': True,
@@ -2688,16 +2763,11 @@ class TestVersioningContainerTempurl(TestObjectVersioningBase):
obj.write(b"version2")
# get v2 object (reading from versions container)
- # cross container tempurl does not work for container tempurl key
- try:
- obj.read(parms=get_parms, cfg={'no_auth_token': True})
- except ResponseError as e:
- self.assertEqual(e.status, 401)
- else:
- self.fail('request did not error')
- try:
- obj.info(parms=get_parms, cfg={'no_auth_token': True})
- except ResponseError as e:
- self.assertEqual(e.status, 401)
- else:
- self.fail('request did not error')
+ # versioning symlink allows us to bypass the normal
+ # container-tempurl-key scoping
+ contents = obj.read(parms=get_parms, cfg={'no_auth_token': True})
+ self.assert_status([200])
+ self.assertEqual(contents, b"version2")
+ # HEAD works, too
+ obj.info(parms=get_parms, cfg={'no_auth_token': True})
+ self.assert_status([200])
diff --git a/test/functional/test_slo.py b/test/functional/test_slo.py
index c055f7bbd..8003a2d70 100644
--- a/test/functional/test_slo.py
+++ b/test/functional/test_slo.py
@@ -23,6 +23,8 @@ from copy import deepcopy
import six
+from swift.common.swob import normalize_etag
+
import test.functional as tf
from test.functional import cluster_info, SkipTest
from test.functional.tests import Utils, Base, Base2, BaseEnv
@@ -299,8 +301,14 @@ class TestSlo(Base):
# a POST.
file_item.initialize(parms={'multipart-manifest': 'get'})
manifest_etag = file_item.etag
- self.assertFalse(manifest_etag.startswith('"'))
- self.assertFalse(manifest_etag.endswith('"'))
+ if tf.cluster_info.get('etag_quoter', {}).get('enable_by_default'):
+ self.assertTrue(manifest_etag.startswith('"'))
+ self.assertTrue(manifest_etag.endswith('"'))
+ # ...but in the listing, it'll be stripped
+ manifest_etag = manifest_etag[1:-1]
+ else:
+ self.assertFalse(manifest_etag.startswith('"'))
+ self.assertFalse(manifest_etag.endswith('"'))
file_item.initialize()
slo_etag = file_item.etag
@@ -715,6 +723,8 @@ class TestSlo(Base):
source_contents = source.read(parms={'multipart-manifest': 'get'})
source_json = json.loads(source_contents)
manifest_etag = hashlib.md5(source_contents).hexdigest()
+ if tf.cluster_info.get('etag_quoter', {}).get('enable_by_default'):
+ manifest_etag = '"%s"' % manifest_etag
self.assertEqual(manifest_etag, source.etag)
source.initialize()
@@ -752,14 +762,14 @@ class TestSlo(Base):
actual = names['manifest-abcde']
self.assertEqual(4 * 1024 * 1024 + 1, actual['bytes'])
self.assertEqual('application/octet-stream', actual['content_type'])
- self.assertEqual(manifest_etag, actual['hash'])
+ self.assertEqual(normalize_etag(manifest_etag), actual['hash'])
self.assertEqual(slo_etag, actual['slo_etag'])
self.assertIn('copied-abcde-manifest-only', names)
actual = names['copied-abcde-manifest-only']
self.assertEqual(4 * 1024 * 1024 + 1, actual['bytes'])
self.assertEqual('application/octet-stream', actual['content_type'])
- self.assertEqual(manifest_etag, actual['hash'])
+ self.assertEqual(normalize_etag(manifest_etag), actual['hash'])
self.assertEqual(slo_etag, actual['slo_etag'])
# Test copy manifest including data segments
@@ -789,6 +799,8 @@ class TestSlo(Base):
source_contents = source.read(parms={'multipart-manifest': 'get'})
source_json = json.loads(source_contents)
manifest_etag = hashlib.md5(source_contents).hexdigest()
+ if tf.cluster_info.get('etag_quoter', {}).get('enable_by_default'):
+ manifest_etag = '"%s"' % manifest_etag
self.assertEqual(manifest_etag, source.etag)
source.initialize()
@@ -831,14 +843,14 @@ class TestSlo(Base):
self.assertEqual(4 * 1024 * 1024 + 1, actual['bytes'])
self.assertEqual('application/octet-stream', actual['content_type'])
# the container listing should have the etag of the manifest contents
- self.assertEqual(manifest_etag, actual['hash'])
+ self.assertEqual(normalize_etag(manifest_etag), actual['hash'])
self.assertEqual(slo_etag, actual['slo_etag'])
self.assertIn('copied-abcde-manifest-only', names)
actual = names['copied-abcde-manifest-only']
self.assertEqual(4 * 1024 * 1024 + 1, actual['bytes'])
self.assertEqual('image/jpeg', actual['content_type'])
- self.assertEqual(manifest_etag, actual['hash'])
+ self.assertEqual(normalize_etag(manifest_etag), actual['hash'])
self.assertEqual(slo_etag, actual['slo_etag'])
def test_slo_copy_the_manifest_account(self):
@@ -1098,12 +1110,7 @@ class TestSlo(Base):
manifest = self.env.container.file("manifest-db")
got_body = manifest.read(parms={'multipart-manifest': 'get',
'format': 'raw'})
- body_md5 = hashlib.md5(got_body).hexdigest()
- headers = dict(
- (h.lower(), v)
- for h, v in manifest.conn.response.getheaders())
- self.assertIn('etag', headers)
- self.assertEqual(headers['etag'], body_md5)
+ self.assert_etag(hashlib.md5(got_body).hexdigest())
# raw format should have the actual manifest object content-type
self.assertEqual('application/octet-stream', manifest.content_type)
diff --git a/test/functional/test_symlink.py b/test/functional/test_symlink.py
index 5cd66d510..1b6ec820f 100755
--- a/test/functional/test_symlink.py
+++ b/test/functional/test_symlink.py
@@ -25,6 +25,7 @@ from six.moves import urllib
from uuid import uuid4
from swift.common.http import is_success
+from swift.common.swob import normalize_etag
from swift.common.utils import json, MD5_OF_EMPTY_STRING
from swift.common.middleware.slo import SloGetContext
from test.functional import check_response, retry, requires_acls, \
@@ -1135,7 +1136,7 @@ class TestSymlink(Base):
etag=self.env.tgt_etag)
# overwrite tgt object
- old_tgt_etag = self.env.tgt_etag
+ old_tgt_etag = normalize_etag(self.env.tgt_etag)
self.env._create_tgt_object(body='updated target body')
# sanity
@@ -1380,7 +1381,7 @@ class TestSymlink(Base):
object_list[0]['symlink_path'])
obj_info = object_list[0]
self.assertIn('symlink_etag', obj_info)
- self.assertEqual(self.env.tgt_etag,
+ self.assertEqual(normalize_etag(self.env.tgt_etag),
obj_info['symlink_etag'])
self.assertEqual(int(self.env.tgt_length),
obj_info['symlink_bytes'])
@@ -1550,7 +1551,7 @@ class TestSymlinkSlo(Base):
'symlink_path': '/v1/%s/%s/manifest-abcde' % (
self.account_name, self.env.container2.name),
'symlink_bytes': 4 * 2 ** 20 + 1,
- 'symlink_etag': manifest_etag,
+ 'symlink_etag': normalize_etag(manifest_etag),
})
def test_static_link_target_slo_manifest_wrong_etag(self):
@@ -1740,7 +1741,11 @@ class TestSymlinkToSloSegments(Base):
self.assertEqual(1024 * 1024, f_dict['bytes'])
self.assertEqual('application/octet-stream',
f_dict['content_type'])
- self.assertEqual(manifest_etag, f_dict['hash'])
+ if tf.cluster_info.get('etag_quoter', {}).get(
+ 'enable_by_default'):
+ self.assertEqual(manifest_etag, '"%s"' % f_dict['hash'])
+ else:
+ self.assertEqual(manifest_etag, f_dict['hash'])
self.assertEqual(slo_etag, f_dict['slo_etag'])
break
else:
@@ -1759,7 +1764,11 @@ class TestSymlinkToSloSegments(Base):
self.assertEqual(1024 * 1024, f_dict['bytes'])
self.assertEqual(file_item.content_type,
f_dict['content_type'])
- self.assertEqual(manifest_etag, f_dict['hash'])
+ if tf.cluster_info.get('etag_quoter', {}).get(
+ 'enable_by_default'):
+ self.assertEqual(manifest_etag, '"%s"' % f_dict['hash'])
+ else:
+ self.assertEqual(manifest_etag, f_dict['hash'])
self.assertEqual(slo_etag, f_dict['slo_etag'])
break
else:
@@ -1778,7 +1787,11 @@ class TestSymlinkToSloSegments(Base):
self.assertEqual(1024 * 1024, f_dict['bytes'])
self.assertEqual(file_item.content_type,
f_dict['content_type'])
- self.assertEqual(manifest_etag, f_dict['hash'])
+ if tf.cluster_info.get('etag_quoter', {}).get(
+ 'enable_by_default'):
+ self.assertEqual(manifest_etag, '"%s"' % f_dict['hash'])
+ else:
+ self.assertEqual(manifest_etag, f_dict['hash'])
self.assertEqual(slo_etag, f_dict['slo_etag'])
break
else:
@@ -1811,6 +1824,8 @@ class TestSymlinkToSloSegments(Base):
source_contents = source.read(parms={'multipart-manifest': 'get'})
source_json = json.loads(source_contents)
manifest_etag = hashlib.md5(source_contents).hexdigest()
+ if tf.cluster_info.get('etag_quoter', {}).get('enable_by_default'):
+ manifest_etag = '"%s"' % manifest_etag
source.initialize()
slo_etag = source.etag
@@ -1857,14 +1872,20 @@ class TestSymlinkToSloSegments(Base):
actual = names['manifest-linkto-ab']
self.assertEqual(2 * 1024 * 1024, actual['bytes'])
self.assertEqual('application/octet-stream', actual['content_type'])
- self.assertEqual(manifest_etag, actual['hash'])
+ if tf.cluster_info.get('etag_quoter', {}).get('enable_by_default'):
+ self.assertEqual(manifest_etag, '"%s"' % actual['hash'])
+ else:
+ self.assertEqual(manifest_etag, actual['hash'])
self.assertEqual(slo_etag, actual['slo_etag'])
self.assertIn('copied-ab-manifest-only', names)
actual = names['copied-ab-manifest-only']
self.assertEqual(2 * 1024 * 1024, actual['bytes'])
self.assertEqual('application/octet-stream', actual['content_type'])
- self.assertEqual(manifest_etag, actual['hash'])
+ if tf.cluster_info.get('etag_quoter', {}).get('enable_by_default'):
+ self.assertEqual(manifest_etag, '"%s"' % actual['hash'])
+ else:
+ self.assertEqual(manifest_etag, actual['hash'])
self.assertEqual(slo_etag, actual['slo_etag'])
@@ -2000,13 +2021,13 @@ class TestSymlinkTargetObjectComparison(Base):
else:
self.assertEqual(b'', body)
self.assert_status(200)
- self.assert_header('etag', md5)
+ self.assert_etag(md5)
hdrs = {'If-Match': 'bogus'}
self.assertRaises(ResponseError, file_symlink.read, hdrs=hdrs,
parms=self.env.parms)
self.assert_status(412)
- self.assert_header('etag', md5)
+ self.assert_etag(md5)
def testIfMatchMultipleEtags(self):
for file_item in self.env.files:
@@ -2022,13 +2043,13 @@ class TestSymlinkTargetObjectComparison(Base):
else:
self.assertEqual(b'', body)
self.assert_status(200)
- self.assert_header('etag', md5)
+ self.assert_etag(md5)
hdrs = {'If-Match': '"bogus1", "bogus2", "bogus3"'}
self.assertRaises(ResponseError, file_symlink.read, hdrs=hdrs,
parms=self.env.parms)
self.assert_status(412)
- self.assert_header('etag', md5)
+ self.assert_etag(md5)
def testIfNoneMatch(self):
for file_item in self.env.files:
@@ -2044,13 +2065,13 @@ class TestSymlinkTargetObjectComparison(Base):
else:
self.assertEqual(b'', body)
self.assert_status(200)
- self.assert_header('etag', md5)
+ self.assert_etag(md5)
hdrs = {'If-None-Match': md5}
self.assertRaises(ResponseError, file_symlink.read, hdrs=hdrs,
parms=self.env.parms)
self.assert_status(304)
- self.assert_header('etag', md5)
+ self.assert_etag(md5)
self.assert_header('accept-ranges', 'bytes')
def testIfNoneMatchMultipleEtags(self):
@@ -2067,14 +2088,14 @@ class TestSymlinkTargetObjectComparison(Base):
else:
self.assertEqual(b'', body)
self.assert_status(200)
- self.assert_header('etag', md5)
+ self.assert_etag(md5)
hdrs = {'If-None-Match':
'"bogus1", "bogus2", "%s"' % md5}
self.assertRaises(ResponseError, file_symlink.read, hdrs=hdrs,
parms=self.env.parms)
self.assert_status(304)
- self.assert_header('etag', md5)
+ self.assert_etag(md5)
self.assert_header('accept-ranges', 'bytes')
def testIfModifiedSince(self):
@@ -2091,19 +2112,19 @@ class TestSymlinkTargetObjectComparison(Base):
else:
self.assertEqual(b'', body)
self.assert_status(200)
- self.assert_header('etag', md5)
+ self.assert_etag(md5)
self.assertTrue(file_symlink.info(hdrs=hdrs, parms=self.env.parms))
hdrs = {'If-Modified-Since': self.env.time_new}
self.assertRaises(ResponseError, file_symlink.read, hdrs=hdrs,
parms=self.env.parms)
self.assert_status(304)
- self.assert_header('etag', md5)
+ self.assert_etag(md5)
self.assert_header('accept-ranges', 'bytes')
self.assertRaises(ResponseError, file_symlink.info, hdrs=hdrs,
parms=self.env.parms)
self.assert_status(304)
- self.assert_header('etag', md5)
+ self.assert_etag(md5)
self.assert_header('accept-ranges', 'bytes')
def testIfUnmodifiedSince(self):
@@ -2120,18 +2141,18 @@ class TestSymlinkTargetObjectComparison(Base):
else:
self.assertEqual(b'', body)
self.assert_status(200)
- self.assert_header('etag', md5)
+ self.assert_etag(md5)
self.assertTrue(file_symlink.info(hdrs=hdrs, parms=self.env.parms))
hdrs = {'If-Unmodified-Since': self.env.time_old_f2}
self.assertRaises(ResponseError, file_symlink.read, hdrs=hdrs,
parms=self.env.parms)
self.assert_status(412)
- self.assert_header('etag', md5)
+ self.assert_etag(md5)
self.assertRaises(ResponseError, file_symlink.info, hdrs=hdrs,
parms=self.env.parms)
self.assert_status(412)
- self.assert_header('etag', md5)
+ self.assert_etag(md5)
def testIfMatchAndUnmodified(self):
for file_item in self.env.files:
@@ -2148,21 +2169,21 @@ class TestSymlinkTargetObjectComparison(Base):
else:
self.assertEqual(b'', body)
self.assert_status(200)
- self.assert_header('etag', md5)
+ self.assert_etag(md5)
hdrs = {'If-Match': 'bogus',
'If-Unmodified-Since': self.env.time_new}
self.assertRaises(ResponseError, file_symlink.read, hdrs=hdrs,
parms=self.env.parms)
self.assert_status(412)
- self.assert_header('etag', md5)
+ self.assert_etag(md5)
hdrs = {'If-Match': md5,
'If-Unmodified-Since': self.env.time_old_f3}
self.assertRaises(ResponseError, file_symlink.read, hdrs=hdrs,
parms=self.env.parms)
self.assert_status(412)
- self.assert_header('etag', md5)
+ self.assert_etag(md5)
def testLastModified(self):
file_item = self.env.container.file(Utils.create_name())
@@ -2186,7 +2207,7 @@ class TestSymlinkTargetObjectComparison(Base):
hdrs = {'If-Modified-Since': last_modified}
self.assertRaises(ResponseError, file_symlink.read, hdrs=hdrs)
self.assert_status(304)
- self.assert_header('etag', md5)
+ self.assert_etag(md5)
self.assert_header('accept-ranges', 'bytes')
hdrs = {'If-Unmodified-Since': last_modified}
@@ -2227,20 +2248,20 @@ class TestSymlinkComparison(TestSymlinkTargetObjectComparison):
body = file_symlink.read(hdrs=hdrs, parms=self.env.parms)
self.assertEqual(b'', body)
self.assert_status(200)
- self.assert_header('etag', md5)
+ self.assert_etag(md5)
hdrs = {'If-Modified-Since': last_modified}
self.assertRaises(ResponseError, file_symlink.read, hdrs=hdrs,
parms=self.env.parms)
self.assert_status(304)
- self.assert_header('etag', md5)
+ self.assert_etag(md5)
self.assert_header('accept-ranges', 'bytes')
hdrs = {'If-Unmodified-Since': last_modified}
body = file_symlink.read(hdrs=hdrs, parms=self.env.parms)
self.assertEqual(b'', body)
self.assert_status(200)
- self.assert_header('etag', md5)
+ self.assert_etag(md5)
class TestSymlinkAccountTempurl(Base):
diff --git a/test/functional/test_versioned_writes.py b/test/functional/test_versioned_writes.py
index d58da88e6..7521825f2 100644
--- a/test/functional/test_versioned_writes.py
+++ b/test/functional/test_versioned_writes.py
@@ -684,7 +684,11 @@ class TestObjectVersioning(Base):
prev_version = versions_container.file(versioned_obj_name)
prev_version_info = prev_version.info(parms={'symlink': 'get'})
self.assertEqual(b"aaaaa", prev_version.read())
- self.assertEqual(MD5_OF_EMPTY_STRING, prev_version_info['etag'])
+ symlink_etag = prev_version_info['etag']
+ if symlink_etag.startswith('"') and symlink_etag.endswith('"') and \
+ symlink_etag[1:-1]:
+ symlink_etag = symlink_etag[1:-1]
+ self.assertEqual(MD5_OF_EMPTY_STRING, symlink_etag)
self.assertEqual(sym_tgt_header,
prev_version_info['x_symlink_target'])
return symlink, tgt_a
@@ -698,7 +702,10 @@ class TestObjectVersioning(Base):
symlink.delete()
sym_info = symlink.info(parms={'symlink': 'get'})
self.assertEqual(b"aaaaa", symlink.read())
- self.assertEqual(MD5_OF_EMPTY_STRING, sym_info['etag'])
+ if tf.cluster_info.get('etag_quoter', {}).get('enable_by_default'):
+ self.assertEqual('"%s"' % MD5_OF_EMPTY_STRING, sym_info['etag'])
+ else:
+ self.assertEqual(MD5_OF_EMPTY_STRING, sym_info['etag'])
self.assertEqual(
quote(unquote('%s/%s' % (self.env.container.name, target.name))),
sym_info['x_symlink_target'])
diff --git a/test/functional/tests.py b/test/functional/tests.py
index dc149cffa..51b4c663f 100644
--- a/test/functional/tests.py
+++ b/test/functional/tests.py
@@ -27,6 +27,7 @@ import uuid
from copy import deepcopy
import eventlet
from swift.common.http import is_success, is_client_error
+from swift.common.swob import normalize_etag
from email.utils import parsedate
if six.PY2:
@@ -131,6 +132,13 @@ class Base(unittest.TestCase):
'Expected header name %r not found in response.' % header_name)
self.assertEqual(expected_value, actual_value)
+ def assert_etag(self, unquoted_value):
+ if tf.cluster_info.get('etag_quoter', {}).get('enable_by_default'):
+ expected = '"%s"' % unquoted_value
+ else:
+ expected = unquoted_value
+ self.assert_header('etag', expected)
+
class Base2(object):
@classmethod
@@ -874,7 +882,11 @@ class TestContainer(Base):
for actual in file_list:
name = actual['name']
self.assertIn(name, expected)
- self.assertEqual(expected[name]['etag'], actual['hash'])
+ if tf.cluster_info.get('etag_quoter', {}).get('enable_by_default'):
+ self.assertEqual(expected[name]['etag'],
+ '"%s"' % actual['hash'])
+ else:
+ self.assertEqual(expected[name]['etag'], actual['hash'])
self.assertEqual(
expected[name]['content_type'], actual['content_type'])
self.assertEqual(
@@ -1365,6 +1377,8 @@ class TestFile(Base):
'x-delete-at': mock.ANY,
'x-trans-id': mock.ANY,
'x-openstack-request-id': mock.ANY}
+ if tf.cluster_info.get('etag_quoter', {}).get('enable_by_default'):
+ expected_headers['etag'] = '"%s"' % expected_headers['etag']
unexpected_headers = ['connection', 'x-delete-after']
do_test(put_headers, {}, expected_headers, unexpected_headers)
@@ -1420,7 +1434,7 @@ class TestFile(Base):
self.fail('Failed to find %s in listing' % dest_filename)
self.assertEqual(file_item.size, obj['bytes'])
- self.assertEqual(file_item.etag, obj['hash'])
+ self.assertEqual(normalize_etag(file_item.etag), obj['hash'])
self.assertEqual(file_item.content_type, obj['content_type'])
file_copy = cont.file(dest_filename)
@@ -1470,7 +1484,7 @@ class TestFile(Base):
self.fail('Failed to find %s in listing' % dest_filename)
self.assertEqual(file_item.size, obj['bytes'])
- self.assertEqual(file_item.etag, obj['hash'])
+ self.assertEqual(normalize_etag(file_item.etag), obj['hash'])
self.assertEqual(
'application/test-changed', obj['content_type'])
@@ -1505,7 +1519,7 @@ class TestFile(Base):
self.fail('Failed to find %s in listing' % dest_filename)
self.assertEqual(file_item.size, obj['bytes'])
- self.assertEqual(file_item.etag, obj['hash'])
+ self.assertEqual(normalize_etag(file_item.etag), obj['hash'])
self.assertEqual(
'application/test-updated', obj['content_type'])
@@ -2088,7 +2102,7 @@ class TestFile(Base):
self.assertEqual(file_item.read(hdrs=hdrs), data[-i:])
self.assert_header('content-range', 'bytes %d-%d/%d' % (
file_length - i, file_length - 1, file_length))
- self.assert_header('etag', file_item.md5)
+ self.assert_etag(file_item.md5)
self.assert_header('accept-ranges', 'bytes')
range_string = 'bytes=%d-' % (i)
@@ -2102,7 +2116,7 @@ class TestFile(Base):
self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
self.assert_status(416)
self.assert_header('content-range', 'bytes */%d' % file_length)
- self.assert_header('etag', file_item.md5)
+ self.assert_etag(file_item.md5)
self.assert_header('accept-ranges', 'bytes')
range_string = 'bytes=%d-%d' % (file_length - 1000, file_length + 2000)
@@ -2416,14 +2430,16 @@ class TestFile(Base):
file_item.content_type = content_type
file_item.write_random(self.env.file_size)
- md5 = file_item.md5
+ expected_etag = file_item.md5
+ if tf.cluster_info.get('etag_quoter', {}).get('enable_by_default'):
+ expected_etag = '"%s"' % expected_etag
file_item = self.env.container.file(file_name)
info = file_item.info()
self.assert_status(200)
self.assertEqual(info['content_length'], self.env.file_size)
- self.assertEqual(info['etag'], md5)
+ self.assertEqual(info['etag'], expected_etag)
self.assertEqual(info['content_type'], content_type)
self.assertIn('last_modified', info)
@@ -2612,14 +2628,7 @@ class TestFile(Base):
file_item = self.env.container.file(Utils.create_name())
data = io.BytesIO(file_item.write_random(512))
- etag = File.compute_md5sum(data)
-
- headers = dict((h.lower(), v)
- for h, v in self.env.conn.response.getheaders())
- self.assertIn('etag', headers.keys())
-
- header_etag = headers['etag'].strip('"')
- self.assertEqual(etag, header_etag)
+ self.assert_etag(File.compute_md5sum(data))
def testChunkedPut(self):
if (tf.web_front_end == 'apache2'):
@@ -2645,7 +2654,7 @@ class TestFile(Base):
self.assertEqual(data, file_item.read())
info = file_item.info()
- self.assertEqual(etag, info['etag'])
+ self.assertEqual(normalize_etag(info['etag']), etag)
def test_POST(self):
# verify consistency between object and container listing metadata
@@ -2670,7 +2679,10 @@ class TestFile(Base):
self.fail('Failed to find file %r in listing' % file_name)
self.assertEqual(1024, f_dict['bytes'])
self.assertEqual('text/foobar', f_dict['content_type'])
- self.assertEqual(etag, f_dict['hash'])
+ if tf.cluster_info.get('etag_quoter', {}).get('enable_by_default'):
+ self.assertEqual(etag, '"%s"' % f_dict['hash'])
+ else:
+ self.assertEqual(etag, f_dict['hash'])
put_last_modified = f_dict['last_modified']
# now POST updated content-type to each file
@@ -2697,7 +2709,10 @@ class TestFile(Base):
self.assertEqual(1024, f_dict['bytes'])
self.assertEqual('image/foobarbaz', f_dict['content_type'])
self.assertLess(put_last_modified, f_dict['last_modified'])
- self.assertEqual(etag, f_dict['hash'])
+ if tf.cluster_info.get('etag_quoter', {}).get('enable_by_default'):
+ self.assertEqual(etag, '"%s"' % f_dict['hash'])
+ else:
+ self.assertEqual(etag, f_dict['hash'])
class TestFileUTF8(Base2, TestFile):
@@ -2742,7 +2757,7 @@ class TestFileComparison(Base):
hdrs = {'If-Match': 'bogus'}
self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
self.assert_status(412)
- self.assert_header('etag', file_item.md5)
+ self.assert_etag(file_item.md5)
def testIfMatchMultipleEtags(self):
for file_item in self.env.files:
@@ -2752,7 +2767,7 @@ class TestFileComparison(Base):
hdrs = {'If-Match': '"bogus1", "bogus2", "bogus3"'}
self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
self.assert_status(412)
- self.assert_header('etag', file_item.md5)
+ self.assert_etag(file_item.md5)
def testIfNoneMatch(self):
for file_item in self.env.files:
@@ -2762,7 +2777,7 @@ class TestFileComparison(Base):
hdrs = {'If-None-Match': file_item.md5}
self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
self.assert_status(304)
- self.assert_header('etag', file_item.md5)
+ self.assert_etag(file_item.md5)
self.assert_header('accept-ranges', 'bytes')
def testIfNoneMatchMultipleEtags(self):
@@ -2774,7 +2789,7 @@ class TestFileComparison(Base):
'"bogus1", "bogus2", "%s"' % file_item.md5}
self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
self.assert_status(304)
- self.assert_header('etag', file_item.md5)
+ self.assert_etag(file_item.md5)
self.assert_header('accept-ranges', 'bytes')
def testIfModifiedSince(self):
@@ -2786,11 +2801,11 @@ class TestFileComparison(Base):
hdrs = {'If-Modified-Since': self.env.time_new}
self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
self.assert_status(304)
- self.assert_header('etag', file_item.md5)
+ self.assert_etag(file_item.md5)
self.assert_header('accept-ranges', 'bytes')
self.assertRaises(ResponseError, file_item.info, hdrs=hdrs)
self.assert_status(304)
- self.assert_header('etag', file_item.md5)
+ self.assert_etag(file_item.md5)
self.assert_header('accept-ranges', 'bytes')
def testIfUnmodifiedSince(self):
@@ -2802,10 +2817,10 @@ class TestFileComparison(Base):
hdrs = {'If-Unmodified-Since': self.env.time_old_f2}
self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
self.assert_status(412)
- self.assert_header('etag', file_item.md5)
+ self.assert_etag(file_item.md5)
self.assertRaises(ResponseError, file_item.info, hdrs=hdrs)
self.assert_status(412)
- self.assert_header('etag', file_item.md5)
+ self.assert_etag(file_item.md5)
def testIfMatchAndUnmodified(self):
for file_item in self.env.files:
@@ -2817,13 +2832,13 @@ class TestFileComparison(Base):
'If-Unmodified-Since': self.env.time_new}
self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
self.assert_status(412)
- self.assert_header('etag', file_item.md5)
+ self.assert_etag(file_item.md5)
hdrs = {'If-Match': file_item.md5,
'If-Unmodified-Since': self.env.time_old_f3}
self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
self.assert_status(412)
- self.assert_header('etag', file_item.md5)
+ self.assert_etag(file_item.md5)
def testLastModified(self):
file_name = Utils.create_name()
@@ -2844,7 +2859,7 @@ class TestFileComparison(Base):
hdrs = {'If-Modified-Since': last_modified}
self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
self.assert_status(304)
- self.assert_header('etag', etag)
+ self.assert_etag(etag)
self.assert_header('accept-ranges', 'bytes')
hdrs = {'If-Unmodified-Since': last_modified}
diff --git a/test/probe/test_object_async_update.py b/test/probe/test_object_async_update.py
index 44a25eb14..00aefcccb 100644
--- a/test/probe/test_object_async_update.py
+++ b/test/probe/test_object_async_update.py
@@ -23,6 +23,7 @@ from swiftclient.exceptions import ClientException
from swift.common import direct_client
from swift.common.manager import Manager
+from swift.common.swob import normalize_etag
from test.probe.common import kill_nonprimary_server, \
kill_server, ReplProbeTest, start_server, ECProbeTest
@@ -210,7 +211,7 @@ class TestUpdateOverridesEC(ECProbeTest):
self.assertEqual(1, len(listing))
self.assertEqual('o1', listing[0]['name'])
self.assertEqual(len(content), listing[0]['bytes'])
- self.assertEqual(meta['etag'], listing[0]['hash'])
+ self.assertEqual(normalize_etag(meta['etag']), listing[0]['hash'])
self.assertEqual('test/ctype', listing[0]['content_type'])
def test_update_during_POST_only(self):
@@ -261,7 +262,7 @@ class TestUpdateOverridesEC(ECProbeTest):
self.assertEqual(1, len(listing))
self.assertEqual('o1', listing[0]['name'])
self.assertEqual(len(content), listing[0]['bytes'])
- self.assertEqual(meta['etag'], listing[0]['hash'])
+ self.assertEqual(normalize_etag(meta['etag']), listing[0]['hash'])
self.assertEqual('test/ctype', listing[0]['content_type'])
# Run the object-updaters to send the async pending from the PUT
@@ -328,7 +329,7 @@ class TestUpdateOverridesEC(ECProbeTest):
self.assertEqual(1, len(listing))
self.assertEqual('o1', listing[0]['name'])
self.assertEqual(len(content), listing[0]['bytes'])
- self.assertEqual(meta['etag'], listing[0]['hash'])
+ self.assertEqual(normalize_etag(meta['etag']), listing[0]['hash'])
self.assertEqual('test/ctype', listing[0]['content_type'])
diff --git a/test/probe/test_object_metadata_replication.py b/test/probe/test_object_metadata_replication.py
index c5cb93f98..4b5b0d448 100644
--- a/test/probe/test_object_metadata_replication.py
+++ b/test/probe/test_object_metadata_replication.py
@@ -22,6 +22,7 @@ import uuid
from swift.common.direct_client import direct_get_suffix_hashes
from swift.common.exceptions import DiskFileDeleted
from swift.common.internal_client import UnexpectedResponse
+from swift.common.swob import normalize_etag
from swift.container.backend import ContainerBroker
from swift.common import utils
from swiftclient import client
@@ -129,7 +130,7 @@ class Test(ReplProbeTest):
def _assert_object_metadata_matches_listing(self, listing, metadata):
self.assertEqual(listing['bytes'], int(metadata['content-length']))
- self.assertEqual(listing['hash'], metadata['etag'])
+ self.assertEqual(listing['hash'], normalize_etag(metadata['etag']))
self.assertEqual(listing['content_type'], metadata['content-type'])
modified = Timestamp(metadata['x-timestamp']).isoformat
self.assertEqual(listing['last_modified'], modified)
diff --git a/test/sample.conf b/test/sample.conf
index 6b6a7ac4d..b54398835 100644
--- a/test/sample.conf
+++ b/test/sample.conf
@@ -24,6 +24,10 @@ auth_uri = http://127.0.0.1:8080/auth/v1.0
#auth_version = 3
#auth_uri = http://localhost:5000/v3/
+# Used by s3api functional tests, which don't contact auth directly
+#s3_storage_url = http://127.0.0.1:8080/
+#s3_region = us-east-1
+
# Primary functional test account (needs admin access to the account)
account = test
username = tester
diff --git a/test/unit/account/test_backend.py b/test/unit/account/test_backend.py
index 15422bd13..3556a1ad0 100644
--- a/test/unit/account/test_backend.py
+++ b/test/unit/account/test_backend.py
@@ -180,6 +180,72 @@ class TestAccountBroker(unittest.TestCase):
broker.delete_db(Timestamp.now().internal)
broker.reclaim(Timestamp.now().internal, time())
+ def test_batched_reclaim(self):
+ num_of_containers = 60
+ container_specs = []
+ now = time()
+ top_of_the_minute = now - (now % 60)
+ c = itertools.cycle([True, False])
+ for m, is_deleted in six.moves.zip(range(num_of_containers), c):
+ offset = top_of_the_minute - (m * 60)
+ container_specs.append((Timestamp(offset), is_deleted))
+ random.seed(now)
+ random.shuffle(container_specs)
+ policy_indexes = list(p.idx for p in POLICIES)
+ broker = AccountBroker(':memory:', account='test_account')
+ broker.initialize(Timestamp('1').internal)
+ for i, container_spec in enumerate(container_specs):
+ # with container12 before container2 and shuffled ts.internal we
+ # shouldn't be able to accidently rely on any implicit ordering
+ name = 'container%s' % i
+ pidx = random.choice(policy_indexes)
+ ts, is_deleted = container_spec
+ if is_deleted:
+ broker.put_container(name, 0, ts.internal, 0, 0, pidx)
+ else:
+ broker.put_container(name, ts.internal, 0, 0, 0, pidx)
+
+ def count_reclaimable(conn, reclaim_age):
+ return conn.execute(
+ "SELECT count(*) FROM container "
+ "WHERE deleted = 1 AND delete_timestamp < ?", (reclaim_age,)
+ ).fetchone()[0]
+
+ # This is intended to divide the set of timestamps exactly in half
+ # regardless of the value of now
+ reclaim_age = top_of_the_minute + 1 - (num_of_containers / 2 * 60)
+ with broker.get() as conn:
+ self.assertEqual(count_reclaimable(conn, reclaim_age),
+ num_of_containers / 4)
+
+ orig__reclaim = broker._reclaim
+ trace = []
+
+ def tracing_reclaim(conn, age_timestamp, marker):
+ trace.append((age_timestamp, marker,
+ count_reclaimable(conn, age_timestamp)))
+ return orig__reclaim(conn, age_timestamp, marker)
+
+ with mock.patch.object(broker, '_reclaim', new=tracing_reclaim), \
+ mock.patch('swift.common.db.RECLAIM_PAGE_SIZE', 10):
+ broker.reclaim(reclaim_age, reclaim_age)
+ with broker.get() as conn:
+ self.assertEqual(count_reclaimable(conn, reclaim_age), 0)
+ self.assertEqual(3, len(trace), trace)
+ self.assertEqual([age for age, marker, reclaimable in trace],
+ [reclaim_age] * 3)
+ # markers are in-order
+ self.assertLess(trace[0][1], trace[1][1])
+ self.assertLess(trace[1][1], trace[2][1])
+ # reclaimable count gradually decreases
+ # generally, count1 > count2 > count3, but because of the randomness
+ # we may occassionally have count1 == count2 or count2 == count3
+ self.assertGreaterEqual(trace[0][2], trace[1][2])
+ self.assertGreaterEqual(trace[1][2], trace[2][2])
+ # technically, this might happen occasionally, but *really* rarely
+ self.assertTrue(trace[0][2] > trace[1][2] or
+ trace[1][2] > trace[2][2])
+
def test_delete_db_status(self):
start = next(self.ts)
broker = AccountBroker(':memory:', account='a')
diff --git a/test/unit/cli/test_info.py b/test/unit/cli/test_info.py
index ffe10dc69..f1a77d0b1 100644
--- a/test/unit/cli/test_info.py
+++ b/test/unit/cli/test_info.py
@@ -497,13 +497,10 @@ Shard Ranges (3):
print_item_locations(None, partition=part, policy_name='zero',
swift_dir=self.testdir)
exp_part_msg = 'Partition\t%s' % part
- exp_acct_msg = 'Account \tNone'
- exp_cont_msg = 'Container\tNone'
- exp_obj_msg = 'Object \tNone'
self.assertIn(exp_part_msg, out.getvalue())
- self.assertIn(exp_acct_msg, out.getvalue())
- self.assertIn(exp_cont_msg, out.getvalue())
- self.assertIn(exp_obj_msg, out.getvalue())
+ self.assertNotIn('Account', out.getvalue())
+ self.assertNotIn('Container', out.getvalue())
+ self.assertNotIn('Object', out.getvalue())
def test_print_item_locations_dashed_ring_name_partition(self):
out = StringIO()
@@ -513,13 +510,10 @@ Shard Ranges (3):
ring_name='foo-bar', partition=part,
swift_dir=self.testdir)
exp_part_msg = 'Partition\t%s' % part
- exp_acct_msg = 'Account \tNone'
- exp_cont_msg = 'Container\tNone'
- exp_obj_msg = 'Object \tNone'
self.assertIn(exp_part_msg, out.getvalue())
- self.assertIn(exp_acct_msg, out.getvalue())
- self.assertIn(exp_cont_msg, out.getvalue())
- self.assertIn(exp_obj_msg, out.getvalue())
+ self.assertNotIn('Account', out.getvalue())
+ self.assertNotIn('Container', out.getvalue())
+ self.assertNotIn('Object', out.getvalue())
def test_print_item_locations_account_with_ring(self):
out = StringIO()
@@ -533,11 +527,9 @@ Shard Ranges (3):
'but ring not named "account"'
self.assertIn(exp_warning, out.getvalue())
exp_acct_msg = 'Account \t%s' % account
- exp_cont_msg = 'Container\tNone'
- exp_obj_msg = 'Object \tNone'
self.assertIn(exp_acct_msg, out.getvalue())
- self.assertIn(exp_cont_msg, out.getvalue())
- self.assertIn(exp_obj_msg, out.getvalue())
+ self.assertNotIn('Container', out.getvalue())
+ self.assertNotIn('Object', out.getvalue())
def test_print_item_locations_account_no_ring(self):
out = StringIO()
@@ -546,11 +538,9 @@ Shard Ranges (3):
print_item_locations(None, account=account,
swift_dir=self.testdir)
exp_acct_msg = 'Account \t%s' % account
- exp_cont_msg = 'Container\tNone'
- exp_obj_msg = 'Object \tNone'
self.assertIn(exp_acct_msg, out.getvalue())
- self.assertIn(exp_cont_msg, out.getvalue())
- self.assertIn(exp_obj_msg, out.getvalue())
+ self.assertNotIn('Container', out.getvalue())
+ self.assertNotIn('Object', out.getvalue())
def test_print_item_locations_account_container_ring(self):
out = StringIO()
@@ -562,10 +552,9 @@ Shard Ranges (3):
container=container)
exp_acct_msg = 'Account \t%s' % account
exp_cont_msg = 'Container\t%s' % container
- exp_obj_msg = 'Object \tNone'
self.assertIn(exp_acct_msg, out.getvalue())
self.assertIn(exp_cont_msg, out.getvalue())
- self.assertIn(exp_obj_msg, out.getvalue())
+ self.assertNotIn('Object', out.getvalue())
def test_print_item_locations_account_container_no_ring(self):
out = StringIO()
@@ -576,10 +565,9 @@ Shard Ranges (3):
container=container, swift_dir=self.testdir)
exp_acct_msg = 'Account \t%s' % account
exp_cont_msg = 'Container\t%s' % container
- exp_obj_msg = 'Object \tNone'
self.assertIn(exp_acct_msg, out.getvalue())
self.assertIn(exp_cont_msg, out.getvalue())
- self.assertIn(exp_obj_msg, out.getvalue())
+ self.assertNotIn('Object', out.getvalue())
def test_print_item_locations_account_container_object_ring(self):
out = StringIO()
@@ -691,59 +679,59 @@ Shard Ranges (3):
def test_parse_get_node_args(self):
# Capture error messages
# (without any parameters)
- options = Namespace(policy_name=None, partition=None)
+ options = Namespace(policy_name=None, partition=None, quoted=None)
args = ''
self.assertRaisesMessage(InfoSystemExit,
'Need to specify policy_name or <ring.gz>',
parse_get_node_args, options, args.split())
# a
- options = Namespace(policy_name=None, partition=None)
+ options = Namespace(policy_name=None, partition=None, quoted=None)
args = 'a'
self.assertRaisesMessage(InfoSystemExit,
'Need to specify policy_name or <ring.gz>',
parse_get_node_args, options, args.split())
# a c
- options = Namespace(policy_name=None, partition=None)
+ options = Namespace(policy_name=None, partition=None, quoted=None)
args = 'a c'
self.assertRaisesMessage(InfoSystemExit,
'Need to specify policy_name or <ring.gz>',
parse_get_node_args, options, args.split())
# a c o
- options = Namespace(policy_name=None, partition=None)
+ options = Namespace(policy_name=None, partition=None, quoted=None)
args = 'a c o'
self.assertRaisesMessage(InfoSystemExit,
'Need to specify policy_name or <ring.gz>',
parse_get_node_args, options, args.split())
# a/c
- options = Namespace(policy_name=None, partition=None)
+ options = Namespace(policy_name=None, partition=None, quoted=None)
args = 'a/c'
self.assertRaisesMessage(InfoSystemExit,
'Need to specify policy_name or <ring.gz>',
parse_get_node_args, options, args.split())
# a/c/o
- options = Namespace(policy_name=None, partition=None)
+ options = Namespace(policy_name=None, partition=None, quoted=None)
args = 'a/c/o'
self.assertRaisesMessage(InfoSystemExit,
'Need to specify policy_name or <ring.gz>',
parse_get_node_args, options, args.split())
# account container junk/test.ring.gz
- options = Namespace(policy_name=None, partition=None)
+ options = Namespace(policy_name=None, partition=None, quoted=None)
args = 'account container junk/test.ring.gz'
self.assertRaisesMessage(InfoSystemExit,
'Need to specify policy_name or <ring.gz>',
parse_get_node_args, options, args.split())
# account container object junk/test.ring.gz
- options = Namespace(policy_name=None, partition=None)
+ options = Namespace(policy_name=None, partition=None, quoted=None)
args = 'account container object junk/test.ring.gz'
self.assertRaisesMessage(InfoSystemExit,
'Need to specify policy_name or <ring.gz>',
parse_get_node_args, options, args.split())
# object.ring.gz(without any arguments i.e. a c o)
- options = Namespace(policy_name=None, partition=None)
+ options = Namespace(policy_name=None, partition=None, quoted=None)
args = 'object.ring.gz'
self.assertRaisesMessage(InfoSystemExit,
'Ring file does not exist',
@@ -751,55 +739,55 @@ Shard Ranges (3):
# Valid policy
# -P zero
- options = Namespace(policy_name='zero', partition=None)
+ options = Namespace(policy_name='zero', partition=None, quoted=None)
args = ''
self.assertRaisesMessage(InfoSystemExit,
'No target specified',
parse_get_node_args, options, args.split())
# -P one a/c/o
- options = Namespace(policy_name='one', partition=None)
+ options = Namespace(policy_name='one', partition=None, quoted=None)
args = 'a/c/o'
ring_path, args = parse_get_node_args(options, args.split())
self.assertIsNone(ring_path)
self.assertEqual(args, ['a', 'c', 'o'])
# -P one account container photos/cat.jpg
- options = Namespace(policy_name='one', partition=None)
+ options = Namespace(policy_name='one', partition=None, quoted=None)
args = 'account container photos/cat.jpg'
ring_path, args = parse_get_node_args(options, args.split())
self.assertIsNone(ring_path)
self.assertEqual(args, ['account', 'container', 'photos/cat.jpg'])
# -P one account/container/photos/cat.jpg
- options = Namespace(policy_name='one', partition=None)
+ options = Namespace(policy_name='one', partition=None, quoted=None)
args = 'account/container/photos/cat.jpg'
ring_path, args = parse_get_node_args(options, args.split())
self.assertIsNone(ring_path)
self.assertEqual(args, ['account', 'container', 'photos/cat.jpg'])
# -P one account/container/junk/test.ring.gz(object endswith 'ring.gz')
- options = Namespace(policy_name='one', partition=None)
+ options = Namespace(policy_name='one', partition=None, quoted=None)
args = 'account/container/junk/test.ring.gz'
ring_path, args = parse_get_node_args(options, args.split())
self.assertIsNone(ring_path)
self.assertEqual(args, ['account', 'container', 'junk/test.ring.gz'])
# -P two a c o hooya
- options = Namespace(policy_name='two', partition=None)
+ options = Namespace(policy_name='two', partition=None, quoted=None)
args = 'a c o hooya'
self.assertRaisesMessage(InfoSystemExit,
'Invalid arguments',
parse_get_node_args, options, args.split())
# -P zero -p 1
- options = Namespace(policy_name='zero', partition='1')
+ options = Namespace(policy_name='zero', partition='1', quoted=None)
args = ''
ring_path, args = parse_get_node_args(options, args.split())
self.assertIsNone(ring_path)
self.assertFalse(args)
# -P one -p 1 a/c/o
- options = Namespace(policy_name='one', partition='1')
+ options = Namespace(policy_name='one', partition='1', quoted=None)
args = 'a/c/o'
ring_path, args = parse_get_node_args(options, args.split())
self.assertIsNone(ring_path)
self.assertEqual(args, ['a', 'c', 'o'])
# -P two -p 1 a c o hooya
- options = Namespace(policy_name='two', partition='1')
+ options = Namespace(policy_name='two', partition='1', quoted=None)
args = 'a c o hooya'
self.assertRaisesMessage(InfoSystemExit,
'Invalid arguments',
@@ -853,7 +841,7 @@ Shard Ranges (3):
# Mock tests
# /etc/swift/object.ring.gz(without any arguments i.e. a c o)
- options = Namespace(policy_name=None, partition=None)
+ options = Namespace(policy_name=None, partition=None, quoted=None)
args = '/etc/swift/object.ring.gz'
with mock.patch('swift.cli.info.os.path.exists') as exists:
exists.return_value = True
@@ -863,7 +851,7 @@ Shard Ranges (3):
parse_get_node_args, options, args.split())
# Similar ring_path and arguments
# /etc/swift/object.ring.gz /etc/swift/object.ring.gz
- options = Namespace(policy_name=None, partition=None)
+ options = Namespace(policy_name=None, partition=None, quoted=None)
args = '/etc/swift/object.ring.gz /etc/swift/object.ring.gz'
with mock.patch('swift.cli.info.os.path.exists') as exists:
exists.return_value = True
@@ -871,7 +859,7 @@ Shard Ranges (3):
self.assertEqual(ring_path, '/etc/swift/object.ring.gz')
self.assertEqual(args, ['etc', 'swift', 'object.ring.gz'])
# /etc/swift/object.ring.gz a/c/etc/swift/object.ring.gz
- options = Namespace(policy_name=None, partition=None)
+ options = Namespace(policy_name=None, partition=None, quoted=None)
args = '/etc/swift/object.ring.gz a/c/etc/swift/object.ring.gz'
with mock.patch('swift.cli.info.os.path.exists') as exists:
exists.return_value = True
@@ -880,7 +868,7 @@ Shard Ranges (3):
self.assertEqual(args, ['a', 'c', 'etc/swift/object.ring.gz'])
# Invalid path as mentioned in BUG#1539275
# /etc/swift/object.tar.gz account container object
- options = Namespace(policy_name=None, partition=None)
+ options = Namespace(policy_name=None, partition=None, quoted=None)
args = '/etc/swift/object.tar.gz account container object'
self.assertRaisesMessage(
InfoSystemExit,
@@ -888,7 +876,7 @@ Shard Ranges (3):
parse_get_node_args, options, args.split())
# object.ring.gz a/
- options = Namespace(policy_name=None)
+ options = Namespace(policy_name=None, quoted=None)
args = 'object.ring.gz a/'
with mock.patch('swift.cli.info.os.path.exists') as exists:
exists.return_value = True
@@ -896,7 +884,7 @@ Shard Ranges (3):
self.assertEqual(ring_path, 'object.ring.gz')
self.assertEqual(args, ['a'])
# object.ring.gz a/c
- options = Namespace(policy_name=None)
+ options = Namespace(policy_name=None, quoted=None)
args = 'object.ring.gz a/c'
with mock.patch('swift.cli.info.os.path.exists') as exists:
exists.return_value = True
@@ -904,7 +892,7 @@ Shard Ranges (3):
self.assertEqual(ring_path, 'object.ring.gz')
self.assertEqual(args, ['a', 'c'])
# object.ring.gz a/c/o
- options = Namespace(policy_name=None)
+ options = Namespace(policy_name=None, quoted=None)
args = 'object.ring.gz a/c/o'
with mock.patch('swift.cli.info.os.path.exists') as exists:
exists.return_value = True
@@ -912,7 +900,7 @@ Shard Ranges (3):
self.assertEqual(ring_path, 'object.ring.gz')
self.assertEqual(args, ['a', 'c', 'o'])
# object.ring.gz a/c/o/junk/test.ring.gz
- options = Namespace(policy_name=None)
+ options = Namespace(policy_name=None, quoted=None)
args = 'object.ring.gz a/c/o/junk/test.ring.gz'
with mock.patch('swift.cli.info.os.path.exists') as exists:
exists.return_value = True
@@ -920,7 +908,7 @@ Shard Ranges (3):
self.assertEqual(ring_path, 'object.ring.gz')
self.assertEqual(args, ['a', 'c', 'o/junk/test.ring.gz'])
# object.ring.gz a
- options = Namespace(policy_name=None)
+ options = Namespace(policy_name=None, quoted=None)
args = 'object.ring.gz a'
with mock.patch('swift.cli.info.os.path.exists') as exists:
exists.return_value = True
@@ -928,7 +916,7 @@ Shard Ranges (3):
self.assertEqual(ring_path, 'object.ring.gz')
self.assertEqual(args, ['a'])
# object.ring.gz a c
- options = Namespace(policy_name=None)
+ options = Namespace(policy_name=None, quoted=None)
args = 'object.ring.gz a c'
with mock.patch('swift.cli.info.os.path.exists') as exists:
exists.return_value = True
@@ -936,7 +924,7 @@ Shard Ranges (3):
self.assertEqual(ring_path, 'object.ring.gz')
self.assertEqual(args, ['a', 'c'])
# object.ring.gz a c o
- options = Namespace(policy_name=None)
+ options = Namespace(policy_name=None, quoted=None)
args = 'object.ring.gz a c o'
with mock.patch('swift.cli.info.os.path.exists') as exists:
exists.return_value = True
@@ -944,7 +932,7 @@ Shard Ranges (3):
self.assertEqual(ring_path, 'object.ring.gz')
self.assertEqual(args, ['a', 'c', 'o'])
# object.ring.gz a c o blah blah
- options = Namespace(policy_name=None)
+ options = Namespace(policy_name=None, quoted=None)
args = 'object.ring.gz a c o blah blah'
with mock.patch('swift.cli.info.os.path.exists') as exists:
exists.return_value = True
@@ -953,7 +941,7 @@ Shard Ranges (3):
'Invalid arguments',
parse_get_node_args, options, args.split())
# object.ring.gz a/c/o/blah/blah
- options = Namespace(policy_name=None)
+ options = Namespace(policy_name=None, quoted=None)
args = 'object.ring.gz a/c/o/blah/blah'
with mock.patch('swift.cli.info.os.path.exists') as exists:
exists.return_value = True
@@ -962,7 +950,7 @@ Shard Ranges (3):
self.assertEqual(args, ['a', 'c', 'o/blah/blah'])
# object.ring.gz -p 1
- options = Namespace(policy_name=None, partition='1')
+ options = Namespace(policy_name=None, partition='1', quoted=None)
args = 'object.ring.gz'
with mock.patch('swift.cli.info.os.path.exists') as exists:
exists.return_value = True
@@ -970,7 +958,7 @@ Shard Ranges (3):
self.assertEqual(ring_path, 'object.ring.gz')
self.assertFalse(args)
# object.ring.gz -p 1 a c o
- options = Namespace(policy_name=None, partition='1')
+ options = Namespace(policy_name=None, partition='1', quoted=None)
args = 'object.ring.gz a c o'
with mock.patch('swift.cli.info.os.path.exists') as exists:
exists.return_value = True
@@ -978,7 +966,7 @@ Shard Ranges (3):
self.assertEqual(ring_path, 'object.ring.gz')
self.assertEqual(args, ['a', 'c', 'o'])
# object.ring.gz -p 1 a c o forth_arg
- options = Namespace(policy_name=None, partition='1')
+ options = Namespace(policy_name=None, partition='1', quoted=None)
args = 'object.ring.gz a c o forth_arg'
with mock.patch('swift.cli.info.os.path.exists') as exists:
exists.return_value = True
@@ -987,7 +975,7 @@ Shard Ranges (3):
'Invalid arguments',
parse_get_node_args, options, args.split())
# object.ring.gz -p 1 a/c/o
- options = Namespace(policy_name=None, partition='1')
+ options = Namespace(policy_name=None, partition='1', quoted=None)
args = 'object.ring.gz a/c/o'
with mock.patch('swift.cli.info.os.path.exists') as exists:
exists.return_value = True
@@ -995,7 +983,7 @@ Shard Ranges (3):
self.assertEqual(ring_path, 'object.ring.gz')
self.assertEqual(args, ['a', 'c', 'o'])
# object.ring.gz -p 1 a/c/junk/test.ring.gz
- options = Namespace(policy_name=None, partition='1')
+ options = Namespace(policy_name=None, partition='1', quoted=None)
args = 'object.ring.gz a/c/junk/test.ring.gz'
with mock.patch('swift.cli.info.os.path.exists') as exists:
exists.return_value = True
@@ -1003,7 +991,7 @@ Shard Ranges (3):
self.assertEqual(ring_path, 'object.ring.gz')
self.assertEqual(args, ['a', 'c', 'junk/test.ring.gz'])
# object.ring.gz -p 1 a/c/photos/cat.jpg
- options = Namespace(policy_name=None, partition='1')
+ options = Namespace(policy_name=None, partition='1', quoted=None)
args = 'object.ring.gz a/c/photos/cat.jpg'
with mock.patch('swift.cli.info.os.path.exists') as exists:
exists.return_value = True
@@ -1012,7 +1000,7 @@ Shard Ranges (3):
self.assertEqual(args, ['a', 'c', 'photos/cat.jpg'])
# --all object.ring.gz a
- options = Namespace(all=True, policy_name=None)
+ options = Namespace(all=True, policy_name=None, quoted=None)
args = 'object.ring.gz a'
with mock.patch('swift.cli.info.os.path.exists') as exists:
exists.return_value = True
@@ -1020,7 +1008,7 @@ Shard Ranges (3):
self.assertEqual(ring_path, 'object.ring.gz')
self.assertEqual(args, ['a'])
# --all object.ring.gz a c
- options = Namespace(all=True, policy_name=None)
+ options = Namespace(all=True, policy_name=None, quoted=None)
args = 'object.ring.gz a c'
with mock.patch('swift.cli.info.os.path.exists') as exists:
exists.return_value = True
@@ -1028,7 +1016,7 @@ Shard Ranges (3):
self.assertEqual(ring_path, 'object.ring.gz')
self.assertEqual(args, ['a', 'c'])
# --all object.ring.gz a c o
- options = Namespace(all=True, policy_name=None)
+ options = Namespace(all=True, policy_name=None, quoted=None)
args = 'object.ring.gz a c o'
with mock.patch('swift.cli.info.os.path.exists') as exists:
exists.return_value = True
@@ -1036,7 +1024,7 @@ Shard Ranges (3):
self.assertEqual(ring_path, 'object.ring.gz')
self.assertEqual(args, ['a', 'c', 'o'])
# object.ring.gz account container photos/cat.jpg
- options = Namespace(policy_name=None, partition=None)
+ options = Namespace(policy_name=None, partition=None, quoted=None)
args = 'object.ring.gz account container photos/cat.jpg'
with mock.patch('swift.cli.info.os.path.exists') as exists:
exists.return_value = True
@@ -1044,7 +1032,7 @@ Shard Ranges (3):
self.assertEqual(ring_path, 'object.ring.gz')
self.assertEqual(args, ['account', 'container', 'photos/cat.jpg'])
# object.ring.gz /account/container/photos/cat.jpg
- options = Namespace(policy_name=None, partition=None)
+ options = Namespace(policy_name=None, partition=None, quoted=None)
args = 'object.ring.gz account/container/photos/cat.jpg'
with mock.patch('swift.cli.info.os.path.exists') as exists:
exists.return_value = True
@@ -1053,7 +1041,7 @@ Shard Ranges (3):
self.assertEqual(args, ['account', 'container', 'photos/cat.jpg'])
# Object name ends with 'ring.gz'
# object.ring.gz /account/container/junk/test.ring.gz
- options = Namespace(policy_name=None, partition=None)
+ options = Namespace(policy_name=None, partition=None, quoted=None)
args = 'object.ring.gz account/container/junk/test.ring.gz'
with mock.patch('swift.cli.info.os.path.exists') as exists:
exists.return_value = True
@@ -1061,6 +1049,32 @@ Shard Ranges (3):
self.assertEqual(ring_path, 'object.ring.gz')
self.assertEqual(args, ['account', 'container', 'junk/test.ring.gz'])
+ # Object name has special characters
+ # object.ring.gz /account/container/obj\nwith%0anewline
+ options = Namespace(policy_name=None, partition=None, quoted=None)
+ args = ['object.ring.gz', 'account/container/obj\nwith%0anewline']
+ with mock.patch('swift.cli.info.os.path.exists') as exists:
+ exists.return_value = True
+ ring_path, args = parse_get_node_args(options, args)
+ self.assertEqual(ring_path, 'object.ring.gz')
+ self.assertEqual(args, ['account', 'container', 'obj\nwith%0anewline'])
+
+ options = Namespace(policy_name=None, partition=None, quoted=True)
+ args = ['object.ring.gz', 'account/container/obj\nwith%0anewline']
+ with mock.patch('swift.cli.info.os.path.exists') as exists:
+ exists.return_value = True
+ ring_path, args = parse_get_node_args(options, args)
+ self.assertEqual(ring_path, 'object.ring.gz')
+ self.assertEqual(args, ['account', 'container', 'obj\nwith\nnewline'])
+
+ options = Namespace(policy_name=None, partition=None, quoted=False)
+ args = ['object.ring.gz', 'account/container/obj\nwith%0anewline']
+ with mock.patch('swift.cli.info.os.path.exists') as exists:
+ exists.return_value = True
+ ring_path, args = parse_get_node_args(options, args)
+ self.assertEqual(ring_path, 'object.ring.gz')
+ self.assertEqual(args, ['account', 'container', 'obj\nwith%0anewline'])
+
class TestPrintObj(TestCliInfoBase):
diff --git a/test/unit/cli/test_manage_shard_ranges.py b/test/unit/cli/test_manage_shard_ranges.py
index 65bcd0dd6..7f0aa8857 100644
--- a/test/unit/cli/test_manage_shard_ranges.py
+++ b/test/unit/cli/test_manage_shard_ranges.py
@@ -189,6 +189,7 @@ class TestManageShardRanges(unittest.TestCase):
' "meta_timestamp": "%s",' % now.internal,
' "name": "a/c",',
' "object_count": 0,',
+ ' "reported": 0,',
' "state": "sharding",',
' "state_timestamp": "%s",' % now.internal,
' "timestamp": "%s",' % now.internal,
@@ -230,6 +231,7 @@ class TestManageShardRanges(unittest.TestCase):
' "meta_timestamp": "%s",' % now.internal,
' "name": "a/c",',
' "object_count": 0,',
+ ' "reported": 0,',
' "state": "sharding",',
' "state_timestamp": "%s",' % now.internal,
' "timestamp": "%s",' % now.internal,
diff --git a/test/unit/cli/test_relinker.py b/test/unit/cli/test_relinker.py
index 8daddb13f..571f1c2d7 100644
--- a/test/unit/cli/test_relinker.py
+++ b/test/unit/cli/test_relinker.py
@@ -12,6 +12,9 @@
# limitations under the License.
import binascii
+import errno
+import fcntl
+import json
import os
import shutil
import struct
@@ -30,6 +33,9 @@ from test.unit import FakeLogger, skip_if_no_xattrs, DEFAULT_TEST_EC_TYPE, \
patch_policies
+PART_POWER = 8
+
+
class TestRelinker(unittest.TestCase):
def setUp(self):
skip_if_no_xattrs()
@@ -40,7 +46,7 @@ class TestRelinker(unittest.TestCase):
os.mkdir(self.testdir)
os.mkdir(self.devices)
- self.rb = ring.RingBuilder(8, 6.0, 1)
+ self.rb = ring.RingBuilder(PART_POWER, 6.0, 1)
for i in range(6):
ip = "127.0.0.%s" % i
@@ -55,10 +61,10 @@ class TestRelinker(unittest.TestCase):
os.mkdir(self.objects)
self._hash = utils.hash_path('a/c/o')
digest = binascii.unhexlify(self._hash)
- part = struct.unpack_from('>I', digest)[0] >> 24
+ self.part = struct.unpack_from('>I', digest)[0] >> 24
self.next_part = struct.unpack_from('>I', digest)[0] >> 23
self.objdir = os.path.join(
- self.objects, str(part), self._hash[-3:], self._hash)
+ self.objects, str(self.part), self._hash[-3:], self._hash)
os.makedirs(self.objdir)
self.object_fname = "1278553064.00000.data"
self.objname = os.path.join(self.objdir, self.object_fname)
@@ -97,6 +103,27 @@ class TestRelinker(unittest.TestCase):
stat_new = os.stat(self.expected_file)
self.assertEqual(stat_old.st_ino, stat_new.st_ino)
+ def test_relink_device_filter(self):
+ self.rb.prepare_increase_partition_power()
+ self._save_ring()
+ relinker.relink(self.testdir, self.devices, True,
+ device=self.existing_device)
+
+ self.assertTrue(os.path.isdir(self.expected_dir))
+ self.assertTrue(os.path.isfile(self.expected_file))
+
+ stat_old = os.stat(os.path.join(self.objdir, self.object_fname))
+ stat_new = os.stat(self.expected_file)
+ self.assertEqual(stat_old.st_ino, stat_new.st_ino)
+
+ def test_relink_device_filter_invalid(self):
+ self.rb.prepare_increase_partition_power()
+ self._save_ring()
+ relinker.relink(self.testdir, self.devices, True, device='none')
+
+ self.assertFalse(os.path.isdir(self.expected_dir))
+ self.assertFalse(os.path.isfile(self.expected_file))
+
def _common_test_cleanup(self, relink=True):
# Create a ring that has prev_part_power set
self.rb.prepare_increase_partition_power()
@@ -121,6 +148,187 @@ class TestRelinker(unittest.TestCase):
self.assertFalse(os.path.isfile(
os.path.join(self.objdir, self.object_fname)))
+ def test_cleanup_device_filter(self):
+ self._common_test_cleanup()
+ self.assertEqual(0, relinker.cleanup(self.testdir, self.devices, True,
+ device=self.existing_device))
+
+ # Old objectname should be removed, new should still exist
+ self.assertTrue(os.path.isdir(self.expected_dir))
+ self.assertTrue(os.path.isfile(self.expected_file))
+ self.assertFalse(os.path.isfile(
+ os.path.join(self.objdir, self.object_fname)))
+
+ def test_cleanup_device_filter_invalid(self):
+ self._common_test_cleanup()
+ self.assertEqual(0, relinker.cleanup(self.testdir, self.devices, True,
+ device='none'))
+
+ # Old objectname should still exist, new should still exist
+ self.assertTrue(os.path.isdir(self.expected_dir))
+ self.assertTrue(os.path.isfile(self.expected_file))
+ self.assertTrue(os.path.isfile(
+ os.path.join(self.objdir, self.object_fname)))
+
+ def test_relink_cleanup(self):
+ state_file = os.path.join(self.devices, self.existing_device,
+ 'relink.objects.json')
+
+ self.rb.prepare_increase_partition_power()
+ self._save_ring()
+ relinker.relink(self.testdir, self.devices, True)
+ with open(state_file, 'rt') as f:
+ self.assertEqual(json.load(f), {str(self.part): [True, False]})
+
+ self.rb.increase_partition_power()
+ self.rb._ring = None # Force builder to reload ring
+ self._save_ring()
+ relinker.cleanup(self.testdir, self.devices, True)
+ with open(state_file, 'rt') as f:
+ self.assertEqual(json.load(f),
+ {str(self.part): [True, True],
+ str(self.next_part): [True, True]})
+
+ def test_devices_filter_filtering(self):
+ # With no filtering, returns all devices
+ devices = relinker.devices_filter(None, "", [self.existing_device])
+ self.assertEqual(set([self.existing_device]), devices)
+
+ # With a matching filter, returns what is matching
+ devices = relinker.devices_filter(self.existing_device, "",
+ [self.existing_device, 'sda2'])
+ self.assertEqual(set([self.existing_device]), devices)
+
+ # With a non matching filter, returns nothing
+ devices = relinker.devices_filter('none', "", [self.existing_device])
+ self.assertEqual(set(), devices)
+
+ def test_hook_pre_post_device_locking(self):
+ locks = [None]
+ device_path = os.path.join(self.devices, self.existing_device)
+ datadir = 'object'
+ lock_file = os.path.join(device_path, '.relink.%s.lock' % datadir)
+
+ # The first run gets the lock
+ relinker.hook_pre_device(locks, {}, datadir, device_path)
+ self.assertNotEqual([None], locks)
+
+ # A following run would block
+ with self.assertRaises(IOError) as raised:
+ with open(lock_file, 'a') as f:
+ fcntl.flock(f.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB)
+ self.assertEqual(errno.EAGAIN, raised.exception.errno)
+
+ # Another must not get the lock, so it must return an empty list
+ relinker.hook_post_device(locks, "")
+ self.assertEqual([None], locks)
+
+ with open(lock_file, 'a') as f:
+ fcntl.flock(f.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB)
+
+ def test_state_file(self):
+ device_path = os.path.join(self.devices, self.existing_device)
+ datadir = 'objects'
+ datadir_path = os.path.join(device_path, datadir)
+ state_file = os.path.join(device_path, 'relink.%s.json' % datadir)
+
+ def call_partition_filter(step, parts):
+ # Partition 312 will be ignored because it must have been created
+ # by the relinker
+ return relinker.partitions_filter(states, step,
+ PART_POWER, PART_POWER + 1,
+ datadir_path, parts)
+
+ # Start relinking
+ states = {}
+
+ # Load the states: As it starts, it must be empty
+ locks = [None]
+ relinker.hook_pre_device(locks, states, datadir, device_path)
+ self.assertEqual({}, states)
+ os.close(locks[0]) # Release the lock
+
+ # Partition 312 is ignored because it must have been created with the
+ # next_part_power, so it does not need to be relinked
+ # 96 and 227 are reverse ordered
+ # auditor_status_ALL.json is ignored because it's not a partition
+ self.assertEqual(['227', '96'],
+ call_partition_filter(relinker.STEP_RELINK,
+ ['96', '227', '312',
+ 'auditor_status.json']))
+ self.assertEqual(states, {'96': [False, False], '227': [False, False]})
+
+ # Ack partition 96
+ relinker.hook_post_partition(states, relinker.STEP_RELINK,
+ os.path.join(datadir_path, '96'))
+ self.assertEqual(states, {'96': [True, False], '227': [False, False]})
+ with open(state_file, 'rt') as f:
+ self.assertEqual(json.load(f), {'96': [True, False],
+ '227': [False, False]})
+
+ # Restart relinking after only part 96 was done
+ self.assertEqual(['227'],
+ call_partition_filter(relinker.STEP_RELINK,
+ ['96', '227', '312']))
+ self.assertEqual(states, {'96': [True, False], '227': [False, False]})
+
+ # Ack partition 227
+ relinker.hook_post_partition(states, relinker.STEP_RELINK,
+ os.path.join(datadir_path, '227'))
+ self.assertEqual(states, {'96': [True, False], '227': [True, False]})
+ with open(state_file, 'rt') as f:
+ self.assertEqual(json.load(f), {'96': [True, False],
+ '227': [True, False]})
+
+ # If the process restarts, it reload the state
+ locks = [None]
+ states = {}
+ relinker.hook_pre_device(locks, states, datadir, device_path)
+ self.assertEqual(states, {'96': [True, False], '227': [True, False]})
+ os.close(locks[0]) # Release the lock
+
+ # Start cleanup
+ self.assertEqual(['227', '96'],
+ call_partition_filter(relinker.STEP_CLEANUP,
+ ['96', '227', '312']))
+ # Ack partition 227
+ relinker.hook_post_partition(states, relinker.STEP_CLEANUP,
+ os.path.join(datadir_path, '227'))
+ self.assertEqual(states, {'96': [True, False], '227': [True, True]})
+ with open(state_file, 'rt') as f:
+ self.assertEqual(json.load(f), {'96': [True, False],
+ '227': [True, True]})
+
+ # Restart cleanup after only part 227 was done
+ self.assertEqual(['96'],
+ call_partition_filter(relinker.STEP_CLEANUP,
+ ['96', '227', '312']))
+ self.assertEqual(states, {'96': [True, False], '227': [True, True]})
+
+ # Ack partition 96
+ relinker.hook_post_partition(states, relinker.STEP_CLEANUP,
+ os.path.join(datadir_path, '96'))
+ self.assertEqual(states, {'96': [True, True], '227': [True, True]})
+ with open(state_file, 'rt') as f:
+ self.assertEqual(json.load(f), {'96': [True, True],
+ '227': [True, True]})
+
+ # At the end, the state is still accurate
+ locks = [None]
+ states = {}
+ relinker.hook_pre_device(locks, states, datadir, device_path)
+ self.assertEqual(states, {'96': [True, True], '227': [True, True]})
+ os.close(locks[0]) # Release the lock
+
+ # If the file gets corrupted, restart from scratch
+ with open(state_file, 'wt') as f:
+ f.write('NOT JSON')
+ locks = [None]
+ states = {}
+ relinker.hook_pre_device(locks, states, datadir, device_path)
+ self.assertEqual(states, {})
+ os.close(locks[0]) # Release the lock
+
def test_cleanup_not_yet_relinked(self):
self._common_test_cleanup(relink=False)
self.assertEqual(1, relinker.cleanup(self.testdir, self.devices, True))
@@ -176,3 +384,7 @@ class TestRelinker(unittest.TestCase):
self.assertIn('failed audit and was quarantined',
self.logger.get_lines_for_level('warning')[0])
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/test/unit/common/middleware/s3api/test_obj.py b/test/unit/common/middleware/s3api/test_obj.py
index 363a1b2cb..3b80cc355 100644
--- a/test/unit/common/middleware/s3api/test_obj.py
+++ b/test/unit/common/middleware/s3api/test_obj.py
@@ -34,7 +34,7 @@ from swift.common.middleware.s3api.subresource import ACL, User, encode_acl, \
from swift.common.middleware.s3api.etree import fromstring
from swift.common.middleware.s3api.utils import mktime, S3Timestamp
from swift.common.middleware.versioned_writes.object_versioning import \
- DELETE_MARKER_CONTENT_TYPE
+ DELETE_MARKER_CONTENT_TYPE, SYSMETA_VERSIONS_CONT, SYSMETA_VERSIONS_ENABLED
class TestS3ApiObj(S3ApiTestCase):
@@ -402,6 +402,10 @@ class TestS3ApiObj(S3ApiTestCase):
@s3acl
def test_object_GET_version_id(self):
+ self.swift.register(
+ 'HEAD', '/v1/AUTH_test/bucket', swob.HTTPNoContent,
+ {SYSMETA_VERSIONS_CONT: '\x00versions\x00bucket'}, None)
+
# GET current version
req = Request.blank('/bucket/object?versionId=null',
environ={'REQUEST_METHOD': 'GET'},
@@ -452,6 +456,28 @@ class TestS3ApiObj(S3ApiTestCase):
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '404')
+ @s3acl(versioning_enabled=False)
+ def test_object_GET_with_version_id_but_not_enabled(self):
+ # Version not found
+ self.swift.register(
+ 'HEAD', '/v1/AUTH_test/bucket',
+ swob.HTTPNoContent, {}, None)
+ req = Request.blank('/bucket/object?versionId=A',
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '404')
+ elem = fromstring(body, 'Error')
+ self.assertEqual(elem.find('Code').text, 'NoSuchVersion')
+ self.assertEqual(elem.find('Key').text, 'object')
+ self.assertEqual(elem.find('VersionId').text, 'A')
+ expected_calls = []
+ if not self.swift.s3_acl:
+ expected_calls.append(('HEAD', '/v1/AUTH_test/bucket'))
+ # NB: No actual backend GET!
+ self.assertEqual(expected_calls, self.swift.calls)
+
@s3acl
def test_object_PUT_error(self):
code = self._test_method_error('PUT', '/bucket/object',
@@ -1100,6 +1126,9 @@ class TestS3ApiObj(S3ApiTestCase):
def test_object_DELETE_old_version_id(self):
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
swob.HTTPOk, self.response_headers, None)
+ self.swift.register(
+ 'HEAD', '/v1/AUTH_test/bucket', swob.HTTPNoContent,
+ {SYSMETA_VERSIONS_CONT: '\x00versions\x00bucket'}, None)
resp_headers = {'X-Object-Current-Version-Id': '1574360804.34906'}
self.swift.register('DELETE', '/v1/AUTH_test/bucket/object'
'?symlink=get&version-id=1574358170.12293',
@@ -1111,6 +1140,7 @@ class TestS3ApiObj(S3ApiTestCase):
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '204')
self.assertEqual([
+ ('HEAD', '/v1/AUTH_test/bucket'),
('HEAD', '/v1/AUTH_test/bucket/object'
'?symlink=get&version-id=1574358170.12293'),
('DELETE', '/v1/AUTH_test/bucket/object'
@@ -1118,6 +1148,11 @@ class TestS3ApiObj(S3ApiTestCase):
], self.swift.calls)
def test_object_DELETE_current_version_id(self):
+ self.swift.register(
+ 'HEAD', '/v1/AUTH_test/bucket', swob.HTTPNoContent, {
+ SYSMETA_VERSIONS_CONT: '\x00versions\x00bucket',
+ SYSMETA_VERSIONS_ENABLED: True},
+ None)
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
swob.HTTPOk, self.response_headers, None)
resp_headers = {'X-Object-Current-Version-Id': 'null'}
@@ -1142,6 +1177,7 @@ class TestS3ApiObj(S3ApiTestCase):
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '204')
self.assertEqual([
+ ('HEAD', '/v1/AUTH_test/bucket'),
('HEAD', '/v1/AUTH_test/bucket/object'
'?symlink=get&version-id=1574358170.12293'),
('DELETE', '/v1/AUTH_test/bucket/object'
@@ -1152,6 +1188,22 @@ class TestS3ApiObj(S3ApiTestCase):
'?version-id=1574341899.21751'),
], self.swift.calls)
+ @s3acl(versioning_enabled=False)
+ def test_object_DELETE_with_version_id_but_not_enabled(self):
+ self.swift.register('HEAD', '/v1/AUTH_test/bucket',
+ swob.HTTPNoContent, {}, None)
+ req = Request.blank('/bucket/object?versionId=1574358170.12293',
+ method='DELETE', headers={
+ 'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '204')
+ expected_calls = []
+ if not self.swift.s3_acl:
+ expected_calls.append(('HEAD', '/v1/AUTH_test/bucket'))
+ # NB: No actual backend DELETE!
+ self.assertEqual(expected_calls, self.swift.calls)
+
def test_object_DELETE_version_id_not_implemented(self):
req = Request.blank('/bucket/object?versionId=1574358170.12293',
method='DELETE', headers={
@@ -1164,6 +1216,11 @@ class TestS3ApiObj(S3ApiTestCase):
self.assertEqual(status.split()[0], '501', body)
def test_object_DELETE_current_version_id_is_delete_marker(self):
+ self.swift.register(
+ 'HEAD', '/v1/AUTH_test/bucket', swob.HTTPNoContent, {
+ SYSMETA_VERSIONS_CONT: '\x00versions\x00bucket',
+ SYSMETA_VERSIONS_ENABLED: True},
+ None)
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
swob.HTTPOk, self.response_headers, None)
resp_headers = {'X-Object-Current-Version-Id': 'null'}
@@ -1184,6 +1241,7 @@ class TestS3ApiObj(S3ApiTestCase):
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '204')
self.assertEqual([
+ ('HEAD', '/v1/AUTH_test/bucket'),
('HEAD', '/v1/AUTH_test/bucket/object'
'?symlink=get&version-id=1574358170.12293'),
('DELETE', '/v1/AUTH_test/bucket/object'
@@ -1193,6 +1251,11 @@ class TestS3ApiObj(S3ApiTestCase):
], self.swift.calls)
def test_object_DELETE_current_version_id_is_missing(self):
+ self.swift.register(
+ 'HEAD', '/v1/AUTH_test/bucket', swob.HTTPNoContent, {
+ SYSMETA_VERSIONS_CONT: '\x00versions\x00bucket',
+ SYSMETA_VERSIONS_ENABLED: True},
+ None)
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
swob.HTTPOk, self.response_headers, None)
resp_headers = {'X-Object-Current-Version-Id': 'null'}
@@ -1223,6 +1286,7 @@ class TestS3ApiObj(S3ApiTestCase):
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '204')
self.assertEqual([
+ ('HEAD', '/v1/AUTH_test/bucket'),
('HEAD', '/v1/AUTH_test/bucket/object'
'?symlink=get&version-id=1574358170.12293'),
('DELETE', '/v1/AUTH_test/bucket/object'
@@ -1236,6 +1300,11 @@ class TestS3ApiObj(S3ApiTestCase):
], self.swift.calls)
def test_object_DELETE_current_version_id_GET_error(self):
+ self.swift.register(
+ 'HEAD', '/v1/AUTH_test/bucket', swob.HTTPNoContent, {
+ SYSMETA_VERSIONS_CONT: '\x00versions\x00bucket',
+ SYSMETA_VERSIONS_ENABLED: True},
+ None)
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
swob.HTTPOk, self.response_headers, None)
resp_headers = {'X-Object-Current-Version-Id': 'null'}
@@ -1251,6 +1320,7 @@ class TestS3ApiObj(S3ApiTestCase):
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '500')
self.assertEqual([
+ ('HEAD', '/v1/AUTH_test/bucket'),
('HEAD', '/v1/AUTH_test/bucket/object'
'?symlink=get&version-id=1574358170.12293'),
('DELETE', '/v1/AUTH_test/bucket/object'
@@ -1260,6 +1330,11 @@ class TestS3ApiObj(S3ApiTestCase):
], self.swift.calls)
def test_object_DELETE_current_version_id_PUT_error(self):
+ self.swift.register(
+ 'HEAD', '/v1/AUTH_test/bucket', swob.HTTPNoContent, {
+ SYSMETA_VERSIONS_CONT: '\x00versions\x00bucket',
+ SYSMETA_VERSIONS_ENABLED: True},
+ None)
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
swob.HTTPOk, self.response_headers, None)
resp_headers = {'X-Object-Current-Version-Id': 'null'}
@@ -1283,6 +1358,7 @@ class TestS3ApiObj(S3ApiTestCase):
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '500')
self.assertEqual([
+ ('HEAD', '/v1/AUTH_test/bucket'),
('HEAD', '/v1/AUTH_test/bucket/object'
'?symlink=get&version-id=1574358170.12293'),
('DELETE', '/v1/AUTH_test/bucket/object'
@@ -1325,10 +1401,13 @@ class TestS3ApiObj(S3ApiTestCase):
'X-Object-Version-Id': '1574701081.61553'}
self.swift.register('DELETE', '/v1/AUTH_test/bucket/object',
swob.HTTPNoContent, resp_headers, None)
- self.swift.register('HEAD', '/v1/AUTH_test/bucket',
- swob.HTTPNoContent, {
- 'X-Container-Sysmeta-Versions-Enabled': True},
- None)
+ self.swift.register(
+ 'HEAD', '/v1/AUTH_test/bucket', swob.HTTPNoContent, {
+ SYSMETA_VERSIONS_CONT: '\x00versions\x00bucket',
+ SYSMETA_VERSIONS_ENABLED: True},
+ None)
+ self.swift.register('HEAD', '/v1/AUTH_test/\x00versions\x00bucket',
+ swob.HTTPNoContent, {}, None)
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
swob.HTTPNotFound, self.response_headers, None)
req = Request.blank('/bucket/object?versionId=1574701081.61553',
@@ -1338,10 +1417,12 @@ class TestS3ApiObj(S3ApiTestCase):
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '204')
self.assertEqual([
+ ('HEAD', '/v1/AUTH_test/bucket'),
('HEAD', '/v1/AUTH_test/bucket/object'
'?symlink=get&version-id=1574701081.61553'),
('HEAD', '/v1/AUTH_test'),
('HEAD', '/v1/AUTH_test/bucket'),
+ ('HEAD', '/v1/AUTH_test/\x00versions\x00bucket'),
('DELETE', '/v1/AUTH_test/bucket/object'
'?symlink=get&version-id=1574701081.61553'),
], self.swift.calls)
diff --git a/test/unit/common/middleware/s3api/test_s3_acl.py b/test/unit/common/middleware/s3api/test_s3_acl.py
index 48f543916..da0472a66 100644
--- a/test/unit/common/middleware/s3api/test_s3_acl.py
+++ b/test/unit/common/middleware/s3api/test_s3_acl.py
@@ -34,13 +34,16 @@ from test.unit.common.middleware.s3api import FakeSwift
XMLNS_XSI = 'http://www.w3.org/2001/XMLSchema-instance'
-def s3acl(func=None, s3acl_only=False):
+def s3acl(func=None, s3acl_only=False, versioning_enabled=True):
"""
NOTE: s3acl decorator needs an instance of s3api testing framework.
(i.e. An instance for first argument is necessary)
"""
if func is None:
- return functools.partial(s3acl, s3acl_only=s3acl_only)
+ return functools.partial(
+ s3acl,
+ s3acl_only=s3acl_only,
+ versioning_enabled=versioning_enabled)
@functools.wraps(func)
def s3acl_decorator(*args, **kwargs):
@@ -57,9 +60,14 @@ def s3acl(func=None, s3acl_only=False):
# @patch(xxx)
# def test_xxxx(self)
+ fake_info = {'status': 204}
+ if versioning_enabled:
+ fake_info['sysmeta'] = {
+ 'versions-container': '\x00versions\x00bucket',
+ }
+
with patch('swift.common.middleware.s3api.s3request.'
- 'get_container_info',
- return_value={'status': 204}):
+ 'get_container_info', return_value=fake_info):
func(*args, **kwargs)
except AssertionError:
# Make traceback message to clarify the assertion
diff --git a/test/unit/common/middleware/test_ratelimit.py b/test/unit/common/middleware/test_ratelimit.py
index 2070af2d6..10042d351 100644
--- a/test/unit/common/middleware/test_ratelimit.py
+++ b/test/unit/common/middleware/test_ratelimit.py
@@ -72,12 +72,20 @@ class FakeMemcache(object):
class FakeApp(object):
+ skip_handled_check = False
def __call__(self, env, start_response):
+ assert self.skip_handled_check or env.get('swift.ratelimit.handled')
start_response('200 OK', [])
return [b'Some Content']
+class FakeReq(object):
+ def __init__(self, method, env=None):
+ self.method = method
+ self.environ = env or {}
+
+
def start_response(*args):
pass
@@ -160,36 +168,29 @@ class TestRateLimit(unittest.TestCase):
{'object_count': '5'}
the_app = ratelimit.filter_factory(conf_dict)(FakeApp())
the_app.memcache_client = fake_memcache
- req = lambda: None
- req.environ = {'swift.cache': fake_memcache, 'PATH_INFO': '/v1/a/c/o'}
+ environ = {'swift.cache': fake_memcache, 'PATH_INFO': '/v1/a/c/o'}
with mock.patch('swift.common.middleware.ratelimit.get_account_info',
lambda *args, **kwargs: {}):
- req.method = 'DELETE'
self.assertEqual(len(the_app.get_ratelimitable_key_tuples(
- req, 'a', None, None)), 0)
- req.method = 'PUT'
+ FakeReq('DELETE', environ), 'a', None, None)), 0)
self.assertEqual(len(the_app.get_ratelimitable_key_tuples(
- req, 'a', 'c', None)), 1)
- req.method = 'DELETE'
+ FakeReq('PUT', environ), 'a', 'c', None)), 1)
self.assertEqual(len(the_app.get_ratelimitable_key_tuples(
- req, 'a', 'c', None)), 1)
- req.method = 'GET'
+ FakeReq('DELETE', environ), 'a', 'c', None)), 1)
self.assertEqual(len(the_app.get_ratelimitable_key_tuples(
- req, 'a', 'c', 'o')), 0)
- req.method = 'PUT'
+ FakeReq('GET', environ), 'a', 'c', 'o')), 0)
self.assertEqual(len(the_app.get_ratelimitable_key_tuples(
- req, 'a', 'c', 'o')), 1)
+ FakeReq('PUT', environ), 'a', 'c', 'o')), 1)
- req.method = 'PUT'
self.assertEqual(len(the_app.get_ratelimitable_key_tuples(
- req, 'a', 'c', None, global_ratelimit=10)), 2)
+ FakeReq('PUT', environ), 'a', 'c', None, global_ratelimit=10)), 2)
self.assertEqual(the_app.get_ratelimitable_key_tuples(
- req, 'a', 'c', None, global_ratelimit=10)[1],
+ FakeReq('PUT', environ), 'a', 'c', None, global_ratelimit=10)[1],
('ratelimit/global-write/a', 10))
- req.method = 'PUT'
self.assertEqual(len(the_app.get_ratelimitable_key_tuples(
- req, 'a', 'c', None, global_ratelimit='notafloat')), 1)
+ FakeReq('PUT', environ), 'a', 'c', None,
+ global_ratelimit='notafloat')), 1)
def test_memcached_container_info_dict(self):
mdict = headers_to_container_info({'x-container-object-count': '45'})
@@ -204,9 +205,8 @@ class TestRateLimit(unittest.TestCase):
{'container_size': 5}
the_app = ratelimit.filter_factory(conf_dict)(FakeApp())
the_app.memcache_client = fake_memcache
- req = lambda: None
- req.method = 'PUT'
- req.environ = {'PATH_INFO': '/v1/a/c/o', 'swift.cache': fake_memcache}
+ req = FakeReq('PUT', {
+ 'PATH_INFO': '/v1/a/c/o', 'swift.cache': fake_memcache})
with mock.patch('swift.common.middleware.ratelimit.get_account_info',
lambda *args, **kwargs: {}):
tuples = the_app.get_ratelimitable_key_tuples(req, 'a', 'c', 'o')
@@ -227,8 +227,8 @@ class TestRateLimit(unittest.TestCase):
req = Request.blank('/v1/a%s/c' % meth)
req.method = meth
req.environ['swift.cache'] = FakeMemcache()
- make_app_call = lambda: self.test_ratelimit(req.environ,
- start_response)
+ make_app_call = lambda: self.test_ratelimit(
+ req.environ.copy(), start_response)
begin = time.time()
self._run(make_app_call, num_calls, current_rate,
check_time=bool(exp_time))
@@ -244,7 +244,7 @@ class TestRateLimit(unittest.TestCase):
req.method = 'PUT'
req.environ['swift.cache'] = FakeMemcache()
req.environ['swift.cache'].init_incr_return_neg = True
- make_app_call = lambda: self.test_ratelimit(req.environ,
+ make_app_call = lambda: self.test_ratelimit(req.environ.copy(),
start_response)
begin = time.time()
with mock.patch('swift.common.middleware.ratelimit.get_account_info',
@@ -260,15 +260,15 @@ class TestRateLimit(unittest.TestCase):
'account_whitelist': 'a',
'account_blacklist': 'b'}
self.test_ratelimit = ratelimit.filter_factory(conf_dict)(FakeApp())
- req = Request.blank('/')
with mock.patch.object(self.test_ratelimit,
'memcache_client', FakeMemcache()):
self.assertEqual(
- self.test_ratelimit.handle_ratelimit(req, 'a', 'c', 'o'),
+ self.test_ratelimit.handle_ratelimit(
+ Request.blank('/'), 'a', 'c', 'o'),
None)
self.assertEqual(
self.test_ratelimit.handle_ratelimit(
- req, 'b', 'c', 'o').status_int,
+ Request.blank('/'), 'b', 'c', 'o').status_int,
497)
def test_ratelimit_whitelist_sysmeta(self):
@@ -331,7 +331,7 @@ class TestRateLimit(unittest.TestCase):
self.parent = parent
def run(self):
- self.result = self.parent.test_ratelimit(req.environ,
+ self.result = self.parent.test_ratelimit(req.environ.copy(),
start_response)
def get_fake_ratelimit(*args, **kwargs):
@@ -370,18 +370,17 @@ class TestRateLimit(unittest.TestCase):
# simulates 4 requests coming in at same time, then sleeping
with mock.patch('swift.common.middleware.ratelimit.get_account_info',
lambda *args, **kwargs: {}):
- r = self.test_ratelimit(req.environ, start_response)
+ r = self.test_ratelimit(req.environ.copy(), start_response)
mock_sleep(.1)
- r = self.test_ratelimit(req.environ, start_response)
+ r = self.test_ratelimit(req.environ.copy(), start_response)
mock_sleep(.1)
- r = self.test_ratelimit(req.environ, start_response)
+ r = self.test_ratelimit(req.environ.copy(), start_response)
self.assertEqual(r[0], b'Slow down')
mock_sleep(.1)
- r = self.test_ratelimit(req.environ, start_response)
+ r = self.test_ratelimit(req.environ.copy(), start_response)
self.assertEqual(r[0], b'Slow down')
mock_sleep(.1)
- r = self.test_ratelimit(req.environ, start_response)
- print(repr(r))
+ r = self.test_ratelimit(req.environ.copy(), start_response)
self.assertEqual(r[0], b'Some Content')
def test_ratelimit_max_rate_double_container(self):
@@ -404,17 +403,17 @@ class TestRateLimit(unittest.TestCase):
# simulates 4 requests coming in at same time, then sleeping
with mock.patch('swift.common.middleware.ratelimit.get_account_info',
lambda *args, **kwargs: {}):
- r = self.test_ratelimit(req.environ, start_response)
+ r = self.test_ratelimit(req.environ.copy(), start_response)
mock_sleep(.1)
- r = self.test_ratelimit(req.environ, start_response)
+ r = self.test_ratelimit(req.environ.copy(), start_response)
mock_sleep(.1)
- r = self.test_ratelimit(req.environ, start_response)
+ r = self.test_ratelimit(req.environ.copy(), start_response)
self.assertEqual(r[0], b'Slow down')
mock_sleep(.1)
- r = self.test_ratelimit(req.environ, start_response)
+ r = self.test_ratelimit(req.environ.copy(), start_response)
self.assertEqual(r[0], b'Slow down')
mock_sleep(.1)
- r = self.test_ratelimit(req.environ, start_response)
+ r = self.test_ratelimit(req.environ.copy(), start_response)
self.assertEqual(r[0], b'Some Content')
def test_ratelimit_max_rate_double_container_listing(self):
@@ -437,17 +436,17 @@ class TestRateLimit(unittest.TestCase):
lambda *args, **kwargs: {}):
time_override = [0, 0, 0, 0, None]
# simulates 4 requests coming in at same time, then sleeping
- r = self.test_ratelimit(req.environ, start_response)
+ r = self.test_ratelimit(req.environ.copy(), start_response)
mock_sleep(.1)
- r = self.test_ratelimit(req.environ, start_response)
+ r = self.test_ratelimit(req.environ.copy(), start_response)
mock_sleep(.1)
- r = self.test_ratelimit(req.environ, start_response)
+ r = self.test_ratelimit(req.environ.copy(), start_response)
self.assertEqual(r[0], b'Slow down')
mock_sleep(.1)
- r = self.test_ratelimit(req.environ, start_response)
+ r = self.test_ratelimit(req.environ.copy(), start_response)
self.assertEqual(r[0], b'Slow down')
mock_sleep(.1)
- r = self.test_ratelimit(req.environ, start_response)
+ r = self.test_ratelimit(req.environ.copy(), start_response)
self.assertEqual(r[0], b'Some Content')
mc = self.test_ratelimit.memcache_client
try:
@@ -466,9 +465,6 @@ class TestRateLimit(unittest.TestCase):
the_app = ratelimit.filter_factory(conf_dict)(FakeApp())
the_app.memcache_client = fake_memcache
- req = lambda: None
- req.method = 'PUT'
- req.environ = {}
class rate_caller(threading.Thread):
@@ -478,8 +474,8 @@ class TestRateLimit(unittest.TestCase):
def run(self):
for j in range(num_calls):
- self.result = the_app.handle_ratelimit(req, self.myname,
- 'c', None)
+ self.result = the_app.handle_ratelimit(
+ FakeReq('PUT'), self.myname, 'c', None)
with mock.patch('swift.common.middleware.ratelimit.get_account_info',
lambda *args, **kwargs: {}):
@@ -541,7 +537,9 @@ class TestRateLimit(unittest.TestCase):
current_rate = 13
num_calls = 5
conf_dict = {'account_ratelimit': current_rate}
- self.test_ratelimit = ratelimit.filter_factory(conf_dict)(FakeApp())
+ fake_app = FakeApp()
+ fake_app.skip_handled_check = True
+ self.test_ratelimit = ratelimit.filter_factory(conf_dict)(fake_app)
req = Request.blank('/v1/a')
req.environ['swift.cache'] = None
make_app_call = lambda: self.test_ratelimit(req.environ,
@@ -551,6 +549,24 @@ class TestRateLimit(unittest.TestCase):
time_took = time.time() - begin
self.assertEqual(round(time_took, 1), 0) # no memcache, no limiting
+ def test_already_handled(self):
+ current_rate = 13
+ num_calls = 5
+ conf_dict = {'container_listing_ratelimit_0': current_rate}
+ self.test_ratelimit = ratelimit.filter_factory(conf_dict)(FakeApp())
+ fake_cache = FakeMemcache()
+ fake_cache.set(
+ get_cache_key('a', 'c'),
+ {'object_count': 1})
+ req = Request.blank('/v1/a/c', environ={'swift.cache': fake_cache})
+ req.environ['swift.ratelimit.handled'] = True
+ make_app_call = lambda: self.test_ratelimit(req.environ,
+ start_response)
+ begin = time.time()
+ self._run(make_app_call, num_calls, current_rate, check_time=False)
+ time_took = time.time() - begin
+ self.assertEqual(round(time_took, 1), 0) # no memcache, no limiting
+
def test_restarting_memcache(self):
current_rate = 2
num_calls = 5
diff --git a/test/unit/common/middleware/test_symlink.py b/test/unit/common/middleware/test_symlink.py
index 01875ba48..a5e6cbab4 100644
--- a/test/unit/common/middleware/test_symlink.py
+++ b/test/unit/common/middleware/test_symlink.py
@@ -24,6 +24,7 @@ from swift.common import swob
from swift.common.middleware import symlink, copy, versioned_writes, \
listing_formats
from swift.common.swob import Request
+from swift.common.request_helpers import get_reserved_name
from swift.common.utils import MD5_OF_EMPTY_STRING, get_swift_info
from test.unit.common.middleware.helpers import FakeSwift
from test.unit.common.middleware.test_versioned_writes import FakeCache
@@ -618,6 +619,55 @@ class TestSymlinkMiddleware(TestSymlinkMiddlewareBase):
self.assertEqual(req_headers, calls[1].headers)
self.assertFalse(calls[2:])
+ def test_get_symlink_to_reserved_object(self):
+ cont = get_reserved_name('versioned')
+ obj = get_reserved_name('symlink', '9999998765.99999')
+ symlink_target = "%s/%s" % (cont, obj)
+ version_path = '/v1/a/%s' % symlink_target
+ self.app.register('GET', '/v1/a/versioned/symlink', swob.HTTPOk, {
+ symlink.TGT_OBJ_SYSMETA_SYMLINK_HDR: symlink_target,
+ symlink.ALLOW_RESERVED_NAMES: 'true',
+ 'x-object-sysmeta-symlink-target-etag': MD5_OF_EMPTY_STRING,
+ 'x-object-sysmeta-symlink-target-bytes': '0',
+ })
+ self.app.register('GET', version_path, swob.HTTPOk, {})
+ req = Request.blank('/v1/a/versioned/symlink', headers={
+ 'Range': 'foo', 'If-Match': 'bar'})
+ status, headers, body = self.call_sym(req)
+ self.assertEqual(status, '200 OK')
+ self.assertIn(('Content-Location', version_path), headers)
+ self.assertEqual(len(self.authorized), 1)
+ self.assertNotIn('X-Backend-Allow-Reserved-Names',
+ self.app.calls_with_headers[0])
+ call_headers = self.app.calls_with_headers[1].headers
+ self.assertEqual('true', call_headers[
+ 'X-Backend-Allow-Reserved-Names'])
+ self.assertEqual('foo', call_headers['Range'])
+ self.assertEqual('bar', call_headers['If-Match'])
+
+ def test_get_symlink_to_reserved_symlink(self):
+ cont = get_reserved_name('versioned')
+ obj = get_reserved_name('symlink', '9999998765.99999')
+ symlink_target = "%s/%s" % (cont, obj)
+ version_path = '/v1/a/%s' % symlink_target
+ self.app.register('GET', '/v1/a/versioned/symlink', swob.HTTPOk, {
+ symlink.TGT_OBJ_SYSMETA_SYMLINK_HDR: symlink_target,
+ symlink.ALLOW_RESERVED_NAMES: 'true',
+ 'x-object-sysmeta-symlink-target-etag': MD5_OF_EMPTY_STRING,
+ 'x-object-sysmeta-symlink-target-bytes': '0',
+ })
+ self.app.register('GET', version_path, swob.HTTPOk, {
+ symlink.TGT_OBJ_SYSMETA_SYMLINK_HDR: 'unversioned/obj',
+ 'ETag': MD5_OF_EMPTY_STRING,
+ })
+ self.app.register('GET', '/v1/a/unversioned/obj', swob.HTTPOk, {
+ })
+ req = Request.blank('/v1/a/versioned/symlink')
+ status, headers, body = self.call_sym(req)
+ self.assertEqual(status, '200 OK')
+ self.assertIn(('Content-Location', '/v1/a/unversioned/obj'), headers)
+ self.assertEqual(len(self.authorized), 2)
+
def test_symlink_too_deep(self):
self.app.register('GET', '/v1/a/c/symlink', swob.HTTPOk,
{'X-Object-Sysmeta-Symlink-Target': 'c/sym1'})
diff --git a/test/unit/common/test_db.py b/test/unit/common/test_db.py
index eac9f2394..0f3a308dc 100644
--- a/test/unit/common/test_db.py
+++ b/test/unit/common/test_db.py
@@ -1154,7 +1154,7 @@ class TestDatabaseBroker(unittest.TestCase):
return broker
# only testing _reclaim_metadata here
- @patch.object(DatabaseBroker, '_reclaim')
+ @patch.object(DatabaseBroker, '_reclaim', return_value='')
def test_metadata(self, mock_reclaim):
# Initializes a good broker for us
broker = self.get_replication_info_tester(metadata=True)
diff --git a/test/unit/common/test_memcached.py b/test/unit/common/test_memcached.py
index 26cfab555..61ad6082d 100644
--- a/test/unit/common/test_memcached.py
+++ b/test/unit/common/test_memcached.py
@@ -20,6 +20,7 @@ from collections import defaultdict
import errno
from hashlib import md5
import io
+import logging
import six
import socket
import time
@@ -184,9 +185,14 @@ class TestMemcached(unittest.TestCase):
def setUp(self):
self.logger = debug_logger()
- patcher = mock.patch('swift.common.memcached.logging', self.logger)
- self.addCleanup(patcher.stop)
- patcher.start()
+
+ def test_logger_kwarg(self):
+ server_socket = '%s:%s' % ('[::1]', 11211)
+ client = memcached.MemcacheRing([server_socket])
+ self.assertIs(client.logger, logging.getLogger())
+
+ client = memcached.MemcacheRing([server_socket], logger=self.logger)
+ self.assertIs(client.logger, self.logger)
def test_get_conns(self):
sock1 = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
@@ -202,7 +208,8 @@ class TestMemcached(unittest.TestCase):
sock2ipport = '%s:%s' % (sock2ip, memcached.DEFAULT_MEMCACHED_PORT)
# We're deliberately using sock2ip (no port) here to test that the
# default port is used.
- memcache_client = memcached.MemcacheRing([sock1ipport, sock2ip])
+ memcache_client = memcached.MemcacheRing([sock1ipport, sock2ip],
+ logger=self.logger)
one = two = True
while one or two: # Run until we match hosts one and two
key = uuid4().hex.encode('ascii')
@@ -230,7 +237,8 @@ class TestMemcached(unittest.TestCase):
sock.listen(1)
sock_addr = sock.getsockname()
server_socket = '[%s]:%s' % (sock_addr[0], sock_addr[1])
- memcache_client = memcached.MemcacheRing([server_socket])
+ memcache_client = memcached.MemcacheRing([server_socket],
+ logger=self.logger)
key = uuid4().hex.encode('ascii')
for conn in memcache_client._get_conns(key):
peer_sockaddr = conn[2].getpeername()
@@ -251,7 +259,8 @@ class TestMemcached(unittest.TestCase):
server_socket = '[%s]:%s' % (sock_addr[0], sock_addr[1])
server_host = '[%s]' % sock_addr[0]
memcached.DEFAULT_MEMCACHED_PORT = sock_addr[1]
- memcache_client = memcached.MemcacheRing([server_host])
+ memcache_client = memcached.MemcacheRing([server_host],
+ logger=self.logger)
key = uuid4().hex.encode('ascii')
for conn in memcache_client._get_conns(key):
peer_sockaddr = conn[2].getpeername()
@@ -265,7 +274,7 @@ class TestMemcached(unittest.TestCase):
with self.assertRaises(ValueError):
# IPv6 address with missing [] is invalid
server_socket = '%s:%s' % ('::1', 11211)
- memcached.MemcacheRing([server_socket])
+ memcached.MemcacheRing([server_socket], logger=self.logger)
def test_get_conns_hostname(self):
with patch('swift.common.memcached.socket.getaddrinfo') as addrinfo:
@@ -279,7 +288,8 @@ class TestMemcached(unittest.TestCase):
addrinfo.return_value = [(socket.AF_INET,
socket.SOCK_STREAM, 0, '',
('127.0.0.1', sock_addr[1]))]
- memcache_client = memcached.MemcacheRing([server_socket])
+ memcache_client = memcached.MemcacheRing([server_socket],
+ logger=self.logger)
key = uuid4().hex.encode('ascii')
for conn in memcache_client._get_conns(key):
peer_sockaddr = conn[2].getpeername()
@@ -304,7 +314,8 @@ class TestMemcached(unittest.TestCase):
addrinfo.return_value = [(socket.AF_INET6,
socket.SOCK_STREAM, 0, '',
('::1', sock_addr[1]))]
- memcache_client = memcached.MemcacheRing([server_socket])
+ memcache_client = memcached.MemcacheRing([server_socket],
+ logger=self.logger)
key = uuid4().hex.encode('ascii')
for conn in memcache_client._get_conns(key):
peer_sockaddr = conn[2].getpeername()
@@ -317,7 +328,8 @@ class TestMemcached(unittest.TestCase):
sock.close()
def test_set_get_json(self):
- memcache_client = memcached.MemcacheRing(['1.2.3.4:11211'])
+ memcache_client = memcached.MemcacheRing(['1.2.3.4:11211'],
+ logger=self.logger)
mock = MockMemcached()
memcache_client._client_cache['1.2.3.4:11211'] = MockedMemcachePool(
[(mock, mock)] * 2)
@@ -350,7 +362,8 @@ class TestMemcached(unittest.TestCase):
self.assertAlmostEqual(float(cache_timeout), esttimeout, delta=1)
def test_get_failed_connection_mid_request(self):
- memcache_client = memcached.MemcacheRing(['1.2.3.4:11211'])
+ memcache_client = memcached.MemcacheRing(['1.2.3.4:11211'],
+ logger=self.logger)
mock = MockMemcached()
memcache_client._client_cache['1.2.3.4:11211'] = MockedMemcachePool(
[(mock, mock)] * 2)
@@ -365,18 +378,17 @@ class TestMemcached(unittest.TestCase):
# force the logging through the DebugLogger instead of the nose
# handler. This will use stdout, so we can assert that no stack trace
# is logged.
- logger = debug_logger()
- with patch("sys.stdout", fake_stdout),\
- patch('swift.common.memcached.logging', logger):
+ with patch("sys.stdout", fake_stdout):
mock.read_return_empty_str = True
self.assertIsNone(memcache_client.get('some_key'))
- log_lines = logger.get_lines_for_level('error')
+ log_lines = self.logger.get_lines_for_level('error')
self.assertIn('Error talking to memcached', log_lines[0])
self.assertFalse(log_lines[1:])
self.assertNotIn("Traceback", fake_stdout.getvalue())
def test_incr(self):
- memcache_client = memcached.MemcacheRing(['1.2.3.4:11211'])
+ memcache_client = memcached.MemcacheRing(['1.2.3.4:11211'],
+ logger=self.logger)
mock = MockMemcached()
memcache_client._client_cache['1.2.3.4:11211'] = MockedMemcachePool(
[(mock, mock)] * 2)
@@ -396,7 +408,8 @@ class TestMemcached(unittest.TestCase):
self.assertTrue(mock.close_called)
def test_incr_failed_connection_mid_request(self):
- memcache_client = memcached.MemcacheRing(['1.2.3.4:11211'])
+ memcache_client = memcached.MemcacheRing(['1.2.3.4:11211'],
+ logger=self.logger)
mock = MockMemcached()
memcache_client._client_cache['1.2.3.4:11211'] = MockedMemcachePool(
[(mock, mock)] * 2)
@@ -411,19 +424,18 @@ class TestMemcached(unittest.TestCase):
# force the logging through the DebugLogger instead of the nose
# handler. This will use stdout, so we can assert that no stack trace
# is logged.
- logger = debug_logger()
- with patch("sys.stdout", fake_stdout), \
- patch('swift.common.memcached.logging', logger):
+ with patch("sys.stdout", fake_stdout):
mock.read_return_empty_str = True
self.assertRaises(memcached.MemcacheConnectionError,
memcache_client.incr, 'some_key', delta=1)
- log_lines = logger.get_lines_for_level('error')
+ log_lines = self.logger.get_lines_for_level('error')
self.assertIn('Error talking to memcached', log_lines[0])
self.assertFalse(log_lines[1:])
self.assertNotIn('Traceback', fake_stdout.getvalue())
def test_incr_w_timeout(self):
- memcache_client = memcached.MemcacheRing(['1.2.3.4:11211'])
+ memcache_client = memcached.MemcacheRing(['1.2.3.4:11211'],
+ logger=self.logger)
mock = MockMemcached()
memcache_client._client_cache['1.2.3.4:11211'] = MockedMemcachePool(
[(mock, mock)] * 2)
@@ -455,7 +467,8 @@ class TestMemcached(unittest.TestCase):
self.assertEqual(mock.cache, {cache_key: (b'0', b'0', b'10')})
def test_decr(self):
- memcache_client = memcached.MemcacheRing(['1.2.3.4:11211'])
+ memcache_client = memcached.MemcacheRing(['1.2.3.4:11211'],
+ logger=self.logger)
mock = MockMemcached()
memcache_client._client_cache['1.2.3.4:11211'] = MockedMemcachePool(
[(mock, mock)] * 2)
@@ -473,7 +486,7 @@ class TestMemcached(unittest.TestCase):
def test_retry(self):
memcache_client = memcached.MemcacheRing(
- ['1.2.3.4:11211', '1.2.3.5:11211'])
+ ['1.2.3.4:11211', '1.2.3.5:11211'], logger=self.logger)
mock1 = ExplodingMockMemcached()
mock2 = MockMemcached()
memcache_client._client_cache['1.2.3.4:11211'] = MockedMemcachePool(
@@ -500,7 +513,8 @@ class TestMemcached(unittest.TestCase):
[])
def test_delete(self):
- memcache_client = memcached.MemcacheRing(['1.2.3.4:11211'])
+ memcache_client = memcached.MemcacheRing(['1.2.3.4:11211'],
+ logger=self.logger)
mock = MockMemcached()
memcache_client._client_cache['1.2.3.4:11211'] = MockedMemcachePool(
[(mock, mock)] * 2)
@@ -510,7 +524,8 @@ class TestMemcached(unittest.TestCase):
self.assertIsNone(memcache_client.get('some_key'))
def test_multi(self):
- memcache_client = memcached.MemcacheRing(['1.2.3.4:11211'])
+ memcache_client = memcached.MemcacheRing(['1.2.3.4:11211'],
+ logger=self.logger)
mock = MockMemcached()
memcache_client._client_cache['1.2.3.4:11211'] = MockedMemcachePool(
[(mock, mock)] * 2)
@@ -560,7 +575,8 @@ class TestMemcached(unittest.TestCase):
def test_multi_delete(self):
memcache_client = memcached.MemcacheRing(['1.2.3.4:11211',
- '1.2.3.5:11211'])
+ '1.2.3.5:11211'],
+ logger=self.logger)
mock1 = MockMemcached()
mock2 = MockMemcached()
memcache_client._client_cache['1.2.3.4:11211'] = MockedMemcachePool(
@@ -598,7 +614,8 @@ class TestMemcached(unittest.TestCase):
def test_serialization(self):
memcache_client = memcached.MemcacheRing(['1.2.3.4:11211'],
- allow_pickle=True)
+ allow_pickle=True,
+ logger=self.logger)
mock = MockMemcached()
memcache_client._client_cache['1.2.3.4:11211'] = MockedMemcachePool(
[(mock, mock)] * 2)
@@ -643,7 +660,8 @@ class TestMemcached(unittest.TestCase):
mock_sock.connect = wait_connect
memcache_client = memcached.MemcacheRing(['1.2.3.4:11211'],
- connect_timeout=10)
+ connect_timeout=10,
+ logger=self.logger)
# sanity
self.assertEqual(1, len(memcache_client._client_cache))
for server, pool in memcache_client._client_cache.items():
@@ -702,7 +720,8 @@ class TestMemcached(unittest.TestCase):
memcache_client = memcached.MemcacheRing(['1.2.3.4:11211',
'1.2.3.5:11211'],
io_timeout=0.5,
- pool_timeout=0.1)
+ pool_timeout=0.1,
+ logger=self.logger)
# Hand out a couple slow connections to 1.2.3.5, leaving 1.2.3.4
# fast. All ten (10) clients should try to talk to .5 first, and
diff --git a/test/unit/common/test_swob.py b/test/unit/common/test_swob.py
index 2d645234a..ccbe6d2f9 100644
--- a/test/unit/common/test_swob.py
+++ b/test/unit/common/test_swob.py
@@ -914,6 +914,22 @@ class TestRequest(unittest.TestCase):
self.assertEqual(used_req[0].path, '/hi/there')
self.assertEqual(resp.status_int, 200)
+ def test_wsgify_method(self):
+ class _wsgi_class(object):
+ def __init__(self):
+ self.used_req = []
+
+ @swob.wsgify
+ def __call__(self, req):
+ self.used_req.append(req)
+ return swob.Response(b'200 OK')
+
+ req = swob.Request.blank('/hi/there')
+ handler = _wsgi_class()
+ resp = req.get_response(handler)
+ self.assertIs(handler.used_req[0].environ, req.environ)
+ self.assertEqual(resp.status_int, 200)
+
def test_wsgify_raise(self):
used_req = []
diff --git a/test/unit/common/test_utils.py b/test/unit/common/test_utils.py
index b91be1329..26f70656b 100644
--- a/test/unit/common/test_utils.py
+++ b/test/unit/common/test_utils.py
@@ -6080,6 +6080,136 @@ class TestAuditLocationGenerator(unittest.TestCase):
self.assertEqual(list(locations),
[(obj_path, "drive", "partition2")])
+ def test_hooks(self):
+ with temptree([]) as tmpdir:
+ logger = FakeLogger()
+ data = os.path.join(tmpdir, "drive", "data")
+ os.makedirs(data)
+ partition = os.path.join(data, "partition1")
+ os.makedirs(partition)
+ suffix = os.path.join(partition, "suffix1")
+ os.makedirs(suffix)
+ hash_path = os.path.join(suffix, "hash1")
+ os.makedirs(hash_path)
+ obj_path = os.path.join(hash_path, "obj1.dat")
+ with open(obj_path, "w"):
+ pass
+ meta_path = os.path.join(hash_path, "obj1.meta")
+ with open(meta_path, "w"):
+ pass
+ hook_pre_device = MagicMock()
+ hook_post_device = MagicMock()
+ hook_pre_partition = MagicMock()
+ hook_post_partition = MagicMock()
+ hook_pre_suffix = MagicMock()
+ hook_post_suffix = MagicMock()
+ hook_pre_hash = MagicMock()
+ hook_post_hash = MagicMock()
+ locations = utils.audit_location_generator(
+ tmpdir, "data", ".dat", mount_check=False, logger=logger,
+ hook_pre_device=hook_pre_device,
+ hook_post_device=hook_post_device,
+ hook_pre_partition=hook_pre_partition,
+ hook_post_partition=hook_post_partition,
+ hook_pre_suffix=hook_pre_suffix,
+ hook_post_suffix=hook_post_suffix,
+ hook_pre_hash=hook_pre_hash,
+ hook_post_hash=hook_post_hash
+ )
+ list(locations)
+ hook_pre_device.assert_called_once_with(os.path.join(tmpdir,
+ "drive"))
+ hook_post_device.assert_called_once_with(os.path.join(tmpdir,
+ "drive"))
+ hook_pre_partition.assert_called_once_with(partition)
+ hook_post_partition.assert_called_once_with(partition)
+ hook_pre_suffix.assert_called_once_with(suffix)
+ hook_post_suffix.assert_called_once_with(suffix)
+ hook_pre_hash.assert_called_once_with(hash_path)
+ hook_post_hash.assert_called_once_with(hash_path)
+
+ def test_filters(self):
+ with temptree([]) as tmpdir:
+ logger = FakeLogger()
+ data = os.path.join(tmpdir, "drive", "data")
+ os.makedirs(data)
+ partition = os.path.join(data, "partition1")
+ os.makedirs(partition)
+ suffix = os.path.join(partition, "suffix1")
+ os.makedirs(suffix)
+ hash_path = os.path.join(suffix, "hash1")
+ os.makedirs(hash_path)
+ obj_path = os.path.join(hash_path, "obj1.dat")
+ with open(obj_path, "w"):
+ pass
+ meta_path = os.path.join(hash_path, "obj1.meta")
+ with open(meta_path, "w"):
+ pass
+
+ def audit_location_generator(**kwargs):
+ return utils.audit_location_generator(
+ tmpdir, "data", ".dat", mount_check=False, logger=logger,
+ **kwargs)
+
+ # Return the list of devices
+
+ with patch('os.listdir', side_effect=os.listdir) as m_listdir:
+ # devices_filter
+ m_listdir.reset_mock()
+ devices_filter = MagicMock(return_value=["drive"])
+ list(audit_location_generator(devices_filter=devices_filter))
+ devices_filter.assert_called_once_with(tmpdir, ["drive"])
+ self.assertIn(((data,),), m_listdir.call_args_list)
+
+ m_listdir.reset_mock()
+ devices_filter = MagicMock(return_value=[])
+ list(audit_location_generator(devices_filter=devices_filter))
+ devices_filter.assert_called_once_with(tmpdir, ["drive"])
+ self.assertNotIn(((data,),), m_listdir.call_args_list)
+
+ # partitions_filter
+ m_listdir.reset_mock()
+ partitions_filter = MagicMock(return_value=["partition1"])
+ list(audit_location_generator(
+ partitions_filter=partitions_filter))
+ partitions_filter.assert_called_once_with(data,
+ ["partition1"])
+ self.assertIn(((partition,),), m_listdir.call_args_list)
+
+ m_listdir.reset_mock()
+ partitions_filter = MagicMock(return_value=[])
+ list(audit_location_generator(
+ partitions_filter=partitions_filter))
+ partitions_filter.assert_called_once_with(data,
+ ["partition1"])
+ self.assertNotIn(((partition,),), m_listdir.call_args_list)
+
+ # suffixes_filter
+ m_listdir.reset_mock()
+ suffixes_filter = MagicMock(return_value=["suffix1"])
+ list(audit_location_generator(suffixes_filter=suffixes_filter))
+ suffixes_filter.assert_called_once_with(partition, ["suffix1"])
+ self.assertIn(((suffix,),), m_listdir.call_args_list)
+
+ m_listdir.reset_mock()
+ suffixes_filter = MagicMock(return_value=[])
+ list(audit_location_generator(suffixes_filter=suffixes_filter))
+ suffixes_filter.assert_called_once_with(partition, ["suffix1"])
+ self.assertNotIn(((suffix,),), m_listdir.call_args_list)
+
+ # hashes_filter
+ m_listdir.reset_mock()
+ hashes_filter = MagicMock(return_value=["hash1"])
+ list(audit_location_generator(hashes_filter=hashes_filter))
+ hashes_filter.assert_called_once_with(suffix, ["hash1"])
+ self.assertIn(((hash_path,),), m_listdir.call_args_list)
+
+ m_listdir.reset_mock()
+ hashes_filter = MagicMock(return_value=[])
+ list(audit_location_generator(hashes_filter=hashes_filter))
+ hashes_filter.assert_called_once_with(suffix, ["hash1"])
+ self.assertNotIn(((hash_path,),), m_listdir.call_args_list)
+
class TestGreenAsyncPile(unittest.TestCase):
@@ -7224,7 +7354,8 @@ class TestShardRange(unittest.TestCase):
upper='', object_count=0, bytes_used=0,
meta_timestamp=ts_1.internal, deleted=0,
state=utils.ShardRange.FOUND,
- state_timestamp=ts_1.internal, epoch=None)
+ state_timestamp=ts_1.internal, epoch=None,
+ reported=0)
assert_initialisation_ok(dict(empty_run, name='a/c', timestamp=ts_1),
expect)
assert_initialisation_ok(dict(name='a/c', timestamp=ts_1), expect)
@@ -7233,11 +7364,13 @@ class TestShardRange(unittest.TestCase):
upper='u', object_count=2, bytes_used=10,
meta_timestamp=ts_2, deleted=0,
state=utils.ShardRange.CREATED,
- state_timestamp=ts_3.internal, epoch=ts_4)
+ state_timestamp=ts_3.internal, epoch=ts_4,
+ reported=0)
expect.update({'lower': 'l', 'upper': 'u', 'object_count': 2,
'bytes_used': 10, 'meta_timestamp': ts_2.internal,
'state': utils.ShardRange.CREATED,
- 'state_timestamp': ts_3.internal, 'epoch': ts_4})
+ 'state_timestamp': ts_3.internal, 'epoch': ts_4,
+ 'reported': 0})
assert_initialisation_ok(good_run.copy(), expect)
# obj count and bytes used as int strings
@@ -7255,6 +7388,11 @@ class TestShardRange(unittest.TestCase):
assert_initialisation_ok(good_deleted,
dict(expect, deleted=1))
+ good_reported = good_run.copy()
+ good_reported['reported'] = 1
+ assert_initialisation_ok(good_reported,
+ dict(expect, reported=1))
+
assert_initialisation_fails(dict(good_run, timestamp='water balloon'))
assert_initialisation_fails(
@@ -7293,7 +7431,7 @@ class TestShardRange(unittest.TestCase):
'upper': upper, 'object_count': 10, 'bytes_used': 100,
'meta_timestamp': ts_2.internal, 'deleted': 0,
'state': utils.ShardRange.FOUND, 'state_timestamp': ts_3.internal,
- 'epoch': ts_4}
+ 'epoch': ts_4, 'reported': 0}
self.assertEqual(expected, sr_dict)
self.assertIsInstance(sr_dict['lower'], six.string_types)
self.assertIsInstance(sr_dict['upper'], six.string_types)
@@ -7308,6 +7446,14 @@ class TestShardRange(unittest.TestCase):
for key in sr_dict:
bad_dict = dict(sr_dict)
bad_dict.pop(key)
+ if key == 'reported':
+ # This was added after the fact, and we need to be able to eat
+ # data from old servers
+ utils.ShardRange.from_dict(bad_dict)
+ utils.ShardRange(**bad_dict)
+ continue
+
+ # The rest were present from the beginning
with self.assertRaises(KeyError):
utils.ShardRange.from_dict(bad_dict)
# But __init__ still (generally) works!
diff --git a/test/unit/container/test_backend.py b/test/unit/container/test_backend.py
index 33fd5298e..37308c154 100644
--- a/test/unit/container/test_backend.py
+++ b/test/unit/container/test_backend.py
@@ -28,6 +28,7 @@ from contextlib import contextmanager
import sqlite3
import pickle
import json
+import itertools
import six
@@ -558,6 +559,98 @@ class TestContainerBroker(unittest.TestCase):
broker.reclaim(Timestamp.now().internal, time())
broker.delete_db(Timestamp.now().internal)
+ def test_batch_reclaim(self):
+ num_of_objects = 60
+ obj_specs = []
+ now = time()
+ top_of_the_minute = now - (now % 60)
+ c = itertools.cycle([True, False])
+ for m, is_deleted in six.moves.zip(range(num_of_objects), c):
+ offset = top_of_the_minute - (m * 60)
+ obj_specs.append((Timestamp(offset), is_deleted))
+ random.seed(now)
+ random.shuffle(obj_specs)
+ policy_indexes = list(p.idx for p in POLICIES)
+ broker = ContainerBroker(':memory:', account='test_account',
+ container='test_container')
+ broker.initialize(Timestamp('1').internal, 0)
+ for i, obj_spec in enumerate(obj_specs):
+ # with object12 before object2 and shuffled ts.internal we
+ # shouldn't be able to accidently rely on any implicit ordering
+ obj_name = 'object%s' % i
+ pidx = random.choice(policy_indexes)
+ ts, is_deleted = obj_spec
+ if is_deleted:
+ broker.delete_object(obj_name, ts.internal, pidx)
+ else:
+ broker.put_object(obj_name, ts.internal, 0, 'text/plain',
+ 'etag', storage_policy_index=pidx)
+
+ def count_reclaimable(conn, reclaim_age):
+ return conn.execute(
+ "SELECT count(*) FROM object "
+ "WHERE deleted = 1 AND created_at < ?", (reclaim_age,)
+ ).fetchone()[0]
+
+ # This is intended to divide the set of timestamps exactly in half
+ # regardless of the value of now
+ reclaim_age = top_of_the_minute + 1 - (num_of_objects / 2 * 60)
+ with broker.get() as conn:
+ self.assertEqual(count_reclaimable(conn, reclaim_age),
+ num_of_objects / 4)
+
+ orig__reclaim = broker._reclaim
+ trace = []
+
+ def tracing_reclaim(conn, age_timestamp, marker):
+ trace.append((age_timestamp, marker,
+ count_reclaimable(conn, age_timestamp)))
+ return orig__reclaim(conn, age_timestamp, marker)
+
+ with mock.patch.object(broker, '_reclaim', new=tracing_reclaim), \
+ mock.patch('swift.common.db.RECLAIM_PAGE_SIZE', 10):
+ broker.reclaim(reclaim_age, reclaim_age)
+
+ with broker.get() as conn:
+ self.assertEqual(count_reclaimable(conn, reclaim_age), 0)
+ self.assertEqual(3, len(trace), trace)
+ self.assertEqual([age for age, marker, reclaimable in trace],
+ [reclaim_age] * 3)
+ # markers are in-order
+ self.assertLess(trace[0][1], trace[1][1])
+ self.assertLess(trace[1][1], trace[2][1])
+ # reclaimable count gradually decreases
+ # generally, count1 > count2 > count3, but because of the randomness
+ # we may occassionally have count1 == count2 or count2 == count3
+ self.assertGreaterEqual(trace[0][2], trace[1][2])
+ self.assertGreaterEqual(trace[1][2], trace[2][2])
+ # technically, this might happen occasionally, but *really* rarely
+ self.assertTrue(trace[0][2] > trace[1][2] or
+ trace[1][2] > trace[2][2])
+
+ def test_reclaim_with_duplicate_names(self):
+ broker = ContainerBroker(':memory:', account='test_account',
+ container='test_container')
+ broker.initialize(Timestamp('1').internal, 0)
+ now = time()
+ ages_ago = Timestamp(now - (3 * 7 * 24 * 60 * 60))
+ for i in range(10):
+ for spidx in range(10):
+ obj_name = 'object%s' % i
+ broker.delete_object(obj_name, ages_ago.internal, spidx)
+ reclaim_age = now - (2 * 7 * 24 * 60 * 60)
+ with broker.get() as conn:
+ self.assertEqual(conn.execute(
+ "SELECT count(*) FROM object "
+ "WHERE created_at < ?", (reclaim_age,)
+ ).fetchone()[0], 100)
+ with mock.patch('swift.common.db.RECLAIM_PAGE_SIZE', 10):
+ broker.reclaim(reclaim_age, reclaim_age)
+ with broker.get() as conn:
+ self.assertEqual(conn.execute(
+ "SELECT count(*) FROM object "
+ ).fetchone()[0], 0)
+
@with_tempdir
def test_reclaim_deadlock(self, tempdir):
db_path = os.path.join(
@@ -642,10 +735,12 @@ class TestContainerBroker(unittest.TestCase):
self.assertEqual(info['put_timestamp'], start.internal)
self.assertTrue(Timestamp(info['created_at']) >= start)
self.assertEqual(info['delete_timestamp'], '0')
- if self.__class__ in (TestContainerBrokerBeforeMetadata,
- TestContainerBrokerBeforeXSync,
- TestContainerBrokerBeforeSPI,
- TestContainerBrokerBeforeShardRanges):
+ if self.__class__ in (
+ TestContainerBrokerBeforeMetadata,
+ TestContainerBrokerBeforeXSync,
+ TestContainerBrokerBeforeSPI,
+ TestContainerBrokerBeforeShardRanges,
+ TestContainerBrokerBeforeShardRangeReportedColumn):
self.assertEqual(info['status_changed_at'], '0')
else:
self.assertEqual(info['status_changed_at'],
@@ -932,6 +1027,8 @@ class TestContainerBroker(unittest.TestCase):
"SELECT object_count FROM shard_range").fetchone()[0], 0)
self.assertEqual(conn.execute(
"SELECT bytes_used FROM shard_range").fetchone()[0], 0)
+ self.assertEqual(conn.execute(
+ "SELECT reported FROM shard_range").fetchone()[0], 0)
# Reput same event
broker.merge_shard_ranges(
@@ -957,6 +1054,64 @@ class TestContainerBroker(unittest.TestCase):
"SELECT object_count FROM shard_range").fetchone()[0], 0)
self.assertEqual(conn.execute(
"SELECT bytes_used FROM shard_range").fetchone()[0], 0)
+ self.assertEqual(conn.execute(
+ "SELECT reported FROM shard_range").fetchone()[0], 0)
+
+ # Mark it as reported
+ broker.merge_shard_ranges(
+ ShardRange('"a/{<shardrange \'&\' name>}"', timestamp,
+ 'low', 'up', meta_timestamp=meta_timestamp,
+ reported=True))
+ with broker.get() as conn:
+ self.assertEqual(conn.execute(
+ "SELECT name FROM shard_range").fetchone()[0],
+ '"a/{<shardrange \'&\' name>}"')
+ self.assertEqual(conn.execute(
+ "SELECT timestamp FROM shard_range").fetchone()[0],
+ timestamp)
+ self.assertEqual(conn.execute(
+ "SELECT meta_timestamp FROM shard_range").fetchone()[0],
+ meta_timestamp)
+ self.assertEqual(conn.execute(
+ "SELECT lower FROM shard_range").fetchone()[0], 'low')
+ self.assertEqual(conn.execute(
+ "SELECT upper FROM shard_range").fetchone()[0], 'up')
+ self.assertEqual(conn.execute(
+ "SELECT deleted FROM shard_range").fetchone()[0], 0)
+ self.assertEqual(conn.execute(
+ "SELECT object_count FROM shard_range").fetchone()[0], 0)
+ self.assertEqual(conn.execute(
+ "SELECT bytes_used FROM shard_range").fetchone()[0], 0)
+ self.assertEqual(conn.execute(
+ "SELECT reported FROM shard_range").fetchone()[0], 1)
+
+ # Reporting latches it
+ broker.merge_shard_ranges(
+ ShardRange('"a/{<shardrange \'&\' name>}"', timestamp,
+ 'low', 'up', meta_timestamp=meta_timestamp,
+ reported=False))
+ with broker.get() as conn:
+ self.assertEqual(conn.execute(
+ "SELECT name FROM shard_range").fetchone()[0],
+ '"a/{<shardrange \'&\' name>}"')
+ self.assertEqual(conn.execute(
+ "SELECT timestamp FROM shard_range").fetchone()[0],
+ timestamp)
+ self.assertEqual(conn.execute(
+ "SELECT meta_timestamp FROM shard_range").fetchone()[0],
+ meta_timestamp)
+ self.assertEqual(conn.execute(
+ "SELECT lower FROM shard_range").fetchone()[0], 'low')
+ self.assertEqual(conn.execute(
+ "SELECT upper FROM shard_range").fetchone()[0], 'up')
+ self.assertEqual(conn.execute(
+ "SELECT deleted FROM shard_range").fetchone()[0], 0)
+ self.assertEqual(conn.execute(
+ "SELECT object_count FROM shard_range").fetchone()[0], 0)
+ self.assertEqual(conn.execute(
+ "SELECT bytes_used FROM shard_range").fetchone()[0], 0)
+ self.assertEqual(conn.execute(
+ "SELECT reported FROM shard_range").fetchone()[0], 1)
# Put new event
timestamp = next(self.ts).internal
@@ -984,11 +1139,14 @@ class TestContainerBroker(unittest.TestCase):
"SELECT object_count FROM shard_range").fetchone()[0], 1)
self.assertEqual(conn.execute(
"SELECT bytes_used FROM shard_range").fetchone()[0], 2)
+ self.assertEqual(conn.execute(
+ "SELECT reported FROM shard_range").fetchone()[0], 0)
# Put old event
broker.merge_shard_ranges(
ShardRange('"a/{<shardrange \'&\' name>}"', old_put_timestamp,
- 'lower', 'upper', 1, 2, meta_timestamp=meta_timestamp))
+ 'lower', 'upper', 1, 2, meta_timestamp=meta_timestamp,
+ reported=True))
with broker.get() as conn:
self.assertEqual(conn.execute(
"SELECT name FROM shard_range").fetchone()[0],
@@ -1009,6 +1167,8 @@ class TestContainerBroker(unittest.TestCase):
"SELECT object_count FROM shard_range").fetchone()[0], 1)
self.assertEqual(conn.execute(
"SELECT bytes_used FROM shard_range").fetchone()[0], 2)
+ self.assertEqual(conn.execute(
+ "SELECT reported FROM shard_range").fetchone()[0], 0)
# Put old delete event
broker.merge_shard_ranges(
@@ -1885,10 +2045,12 @@ class TestContainerBroker(unittest.TestCase):
self.assertEqual(info['hash'], '00000000000000000000000000000000')
self.assertEqual(info['put_timestamp'], Timestamp(1).internal)
self.assertEqual(info['delete_timestamp'], '0')
- if self.__class__ in (TestContainerBrokerBeforeMetadata,
- TestContainerBrokerBeforeXSync,
- TestContainerBrokerBeforeSPI,
- TestContainerBrokerBeforeShardRanges):
+ if self.__class__ in (
+ TestContainerBrokerBeforeMetadata,
+ TestContainerBrokerBeforeXSync,
+ TestContainerBrokerBeforeSPI,
+ TestContainerBrokerBeforeShardRanges,
+ TestContainerBrokerBeforeShardRangeReportedColumn):
self.assertEqual(info['status_changed_at'], '0')
else:
self.assertEqual(info['status_changed_at'],
@@ -3182,10 +3344,12 @@ class TestContainerBroker(unittest.TestCase):
self.assertEqual(0, info['storage_policy_index']) # sanity check
self.assertEqual(0, info['object_count'])
self.assertEqual(0, info['bytes_used'])
- if self.__class__ in (TestContainerBrokerBeforeMetadata,
- TestContainerBrokerBeforeXSync,
- TestContainerBrokerBeforeSPI,
- TestContainerBrokerBeforeShardRanges):
+ if self.__class__ in (
+ TestContainerBrokerBeforeMetadata,
+ TestContainerBrokerBeforeXSync,
+ TestContainerBrokerBeforeSPI,
+ TestContainerBrokerBeforeShardRanges,
+ TestContainerBrokerBeforeShardRangeReportedColumn):
self.assertEqual(info['status_changed_at'], '0')
else:
self.assertEqual(timestamp.internal, info['status_changed_at'])
@@ -5222,6 +5386,75 @@ class TestContainerBrokerBeforeShardRanges(ContainerBrokerMigrationMixin,
FROM shard_range''')
+def pre_reported_create_shard_range_table(self, conn):
+ """
+ Copied from ContainerBroker before the
+ reported column was added; used for testing with
+ TestContainerBrokerBeforeShardRangeReportedColumn.
+
+ Create a shard_range table with no 'reported' column.
+
+ :param conn: DB connection object
+ """
+ conn.execute("""
+ CREATE TABLE shard_range (
+ ROWID INTEGER PRIMARY KEY AUTOINCREMENT,
+ name TEXT,
+ timestamp TEXT,
+ lower TEXT,
+ upper TEXT,
+ object_count INTEGER DEFAULT 0,
+ bytes_used INTEGER DEFAULT 0,
+ meta_timestamp TEXT,
+ deleted INTEGER DEFAULT 0,
+ state INTEGER,
+ state_timestamp TEXT,
+ epoch TEXT
+ );
+ """)
+
+ conn.execute("""
+ CREATE TRIGGER shard_range_update BEFORE UPDATE ON shard_range
+ BEGIN
+ SELECT RAISE(FAIL, 'UPDATE not allowed; DELETE and INSERT');
+ END;
+ """)
+
+
+class TestContainerBrokerBeforeShardRangeReportedColumn(
+ ContainerBrokerMigrationMixin, TestContainerBroker):
+ """
+ Tests for ContainerBroker against databases created
+ before the shard_ranges table was added.
+ """
+ # *grumble grumble* This should include container_info/policy_stat :-/
+ expected_db_tables = {'outgoing_sync', 'incoming_sync', 'object',
+ 'sqlite_sequence', 'container_stat', 'shard_range'}
+
+ def setUp(self):
+ super(TestContainerBrokerBeforeShardRangeReportedColumn,
+ self).setUp()
+ ContainerBroker.create_shard_range_table = \
+ pre_reported_create_shard_range_table
+
+ broker = ContainerBroker(':memory:', account='a', container='c')
+ broker.initialize(Timestamp('1').internal, 0)
+ with self.assertRaises(sqlite3.DatabaseError) as raised, \
+ broker.get() as conn:
+ conn.execute('''SELECT reported
+ FROM shard_range''')
+ self.assertIn('no such column: reported', str(raised.exception))
+
+ def tearDown(self):
+ super(TestContainerBrokerBeforeShardRangeReportedColumn,
+ self).tearDown()
+ broker = ContainerBroker(':memory:', account='a', container='c')
+ broker.initialize(Timestamp('1').internal, 0)
+ with broker.get() as conn:
+ conn.execute('''SELECT reported
+ FROM shard_range''')
+
+
class TestUpdateNewItemFromExisting(unittest.TestCase):
# TODO: add test scenarios that have swift_bytes in content_type
t0 = '1234567890.00000'
diff --git a/test/unit/container/test_server.py b/test/unit/container/test_server.py
index fc55ff05d..4fd1fcf2e 100644
--- a/test/unit/container/test_server.py
+++ b/test/unit/container/test_server.py
@@ -2380,15 +2380,17 @@ class TestContainerController(unittest.TestCase):
'X-Container-Sysmeta-Test': 'set',
'X-Container-Meta-Test': 'persisted'}
- # PUT shard range to non-existent container with non-autocreate prefix
- req = Request.blank('/sda1/p/a/c', method='PUT', headers=headers,
- body=json.dumps([dict(shard_range)]))
+ # PUT shard range to non-existent container without autocreate flag
+ req = Request.blank(
+ '/sda1/p/.shards_a/shard_c', method='PUT', headers=headers,
+ body=json.dumps([dict(shard_range)]))
resp = req.get_response(self.controller)
self.assertEqual(404, resp.status_int)
- # PUT shard range to non-existent container with autocreate prefix,
+ # PUT shard range to non-existent container with autocreate flag,
# missing storage policy
headers['X-Timestamp'] = next(ts_iter).internal
+ headers['X-Backend-Auto-Create'] = 't'
req = Request.blank(
'/sda1/p/.shards_a/shard_c', method='PUT', headers=headers,
body=json.dumps([dict(shard_range)]))
@@ -2397,7 +2399,7 @@ class TestContainerController(unittest.TestCase):
self.assertIn(b'X-Backend-Storage-Policy-Index header is required',
resp.body)
- # PUT shard range to non-existent container with autocreate prefix
+ # PUT shard range to non-existent container with autocreate flag
headers['X-Timestamp'] = next(ts_iter).internal
policy_index = random.choice(POLICIES).idx
headers['X-Backend-Storage-Policy-Index'] = str(policy_index)
@@ -2407,7 +2409,7 @@ class TestContainerController(unittest.TestCase):
resp = req.get_response(self.controller)
self.assertEqual(201, resp.status_int)
- # repeat PUT of shard range to autocreated container - 204 response
+ # repeat PUT of shard range to autocreated container - 202 response
headers['X-Timestamp'] = next(ts_iter).internal
headers.pop('X-Backend-Storage-Policy-Index') # no longer required
req = Request.blank(
@@ -2416,7 +2418,7 @@ class TestContainerController(unittest.TestCase):
resp = req.get_response(self.controller)
self.assertEqual(202, resp.status_int)
- # regular PUT to autocreated container - 204 response
+ # regular PUT to autocreated container - 202 response
headers['X-Timestamp'] = next(ts_iter).internal
req = Request.blank(
'/sda1/p/.shards_a/shard_c', method='PUT',
@@ -4649,61 +4651,53 @@ class TestContainerController(unittest.TestCase):
"%d on param %s" % (resp.status_int, param))
def test_put_auto_create(self):
- headers = {'x-timestamp': Timestamp(1).internal,
- 'x-size': '0',
- 'x-content-type': 'text/plain',
- 'x-etag': 'd41d8cd98f00b204e9800998ecf8427e'}
-
- req = Request.blank('/sda1/p/a/c/o',
- environ={'REQUEST_METHOD': 'PUT'},
- headers=dict(headers))
- resp = req.get_response(self.controller)
- self.assertEqual(resp.status_int, 404)
-
- req = Request.blank('/sda1/p/.a/c/o',
- environ={'REQUEST_METHOD': 'PUT'},
- headers=dict(headers))
- resp = req.get_response(self.controller)
- self.assertEqual(resp.status_int, 201)
-
- req = Request.blank('/sda1/p/a/.c/o',
- environ={'REQUEST_METHOD': 'PUT'},
- headers=dict(headers))
- resp = req.get_response(self.controller)
- self.assertEqual(resp.status_int, 404)
+ def do_test(expected_status, path, extra_headers=None, body=None):
+ headers = {'x-timestamp': Timestamp(1).internal,
+ 'x-size': '0',
+ 'x-content-type': 'text/plain',
+ 'x-etag': 'd41d8cd98f00b204e9800998ecf8427e'}
+ if extra_headers:
+ headers.update(extra_headers)
+ req = Request.blank('/sda1/p/' + path,
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers=headers, body=body)
+ resp = req.get_response(self.controller)
+ self.assertEqual(resp.status_int, expected_status)
- req = Request.blank('/sda1/p/a/c/.o',
- environ={'REQUEST_METHOD': 'PUT'},
- headers=dict(headers))
- resp = req.get_response(self.controller)
- self.assertEqual(resp.status_int, 404)
+ do_test(404, 'a/c/o')
+ do_test(404, '.a/c/o', {'X-Backend-Auto-Create': 'no'})
+ do_test(201, '.a/c/o')
+ do_test(404, 'a/.c/o')
+ do_test(404, 'a/c/.o')
+ do_test(201, 'a/c/o', {'X-Backend-Auto-Create': 'yes'})
+
+ do_test(404, '.shards_a/c/o')
+ create_shard_headers = {
+ 'X-Backend-Record-Type': 'shard',
+ 'X-Backend-Storage-Policy-Index': '0'}
+ do_test(404, '.shards_a/c', create_shard_headers, '[]')
+ create_shard_headers['X-Backend-Auto-Create'] = 't'
+ do_test(201, '.shards_a/c', create_shard_headers, '[]')
def test_delete_auto_create(self):
- headers = {'x-timestamp': Timestamp(1).internal}
-
- req = Request.blank('/sda1/p/a/c/o',
- environ={'REQUEST_METHOD': 'DELETE'},
- headers=dict(headers))
- resp = req.get_response(self.controller)
- self.assertEqual(resp.status_int, 404)
-
- req = Request.blank('/sda1/p/.a/c/o',
- environ={'REQUEST_METHOD': 'DELETE'},
- headers=dict(headers))
- resp = req.get_response(self.controller)
- self.assertEqual(resp.status_int, 204)
-
- req = Request.blank('/sda1/p/a/.c/o',
- environ={'REQUEST_METHOD': 'DELETE'},
- headers=dict(headers))
- resp = req.get_response(self.controller)
- self.assertEqual(resp.status_int, 404)
+ def do_test(expected_status, path, extra_headers=None):
+ headers = {'x-timestamp': Timestamp(1).internal}
+ if extra_headers:
+ headers.update(extra_headers)
+ req = Request.blank('/sda1/p/' + path,
+ environ={'REQUEST_METHOD': 'DELETE'},
+ headers=headers)
+ resp = req.get_response(self.controller)
+ self.assertEqual(resp.status_int, expected_status)
- req = Request.blank('/sda1/p/a/.c/.o',
- environ={'REQUEST_METHOD': 'DELETE'},
- headers=dict(headers))
- resp = req.get_response(self.controller)
- self.assertEqual(resp.status_int, 404)
+ do_test(404, 'a/c/o')
+ do_test(404, '.a/c/o', {'X-Backend-Auto-Create': 'false'})
+ do_test(204, '.a/c/o')
+ do_test(404, 'a/.c/o')
+ do_test(404, 'a/.c/.o')
+ do_test(404, '.shards_a/c/o')
+ do_test(204, 'a/c/o', {'X-Backend-Auto-Create': 'true'})
+ do_test(204, '.shards_a/c/o', {'X-Backend-Auto-Create': 'true'})
def test_content_type_on_HEAD(self):
Request.blank('/sda1/p/a/o',
diff --git a/test/unit/container/test_sharder.py b/test/unit/container/test_sharder.py
index 43730a5d9..a54ddb652 100644
--- a/test/unit/container/test_sharder.py
+++ b/test/unit/container/test_sharder.py
@@ -4189,6 +4189,7 @@ class TestSharder(BaseTestSharder):
def capture_send(conn, data):
bodies.append(data)
+ self.assertFalse(broker.get_own_shard_range().reported) # sanity
with self._mock_sharder() as sharder:
with mocked_http_conn(204, 204, 204,
give_send=capture_send) as mock_conn:
@@ -4198,6 +4199,7 @@ class TestSharder(BaseTestSharder):
self.assertEqual('PUT', req['method'])
self.assertEqual([expected_sent] * 3,
[json.loads(b) for b in bodies])
+ self.assertTrue(broker.get_own_shard_range().reported)
def test_update_root_container_own_range(self):
broker = self._make_broker()
@@ -4230,6 +4232,32 @@ class TestSharder(BaseTestSharder):
with annotate_failure(state):
check_only_own_shard_range_sent(state)
+ def test_update_root_container_already_reported(self):
+ broker = self._make_broker()
+
+ def check_already_reported_not_sent(state):
+ own_shard_range = broker.get_own_shard_range()
+
+ own_shard_range.reported = True
+ self.assertTrue(own_shard_range.update_state(
+ state, state_timestamp=next(self.ts_iter)))
+ # Check that updating state clears the flag
+ self.assertFalse(own_shard_range.reported)
+
+ # If we claim to have already updated...
+ own_shard_range.reported = True
+ broker.merge_shard_ranges([own_shard_range])
+
+ # ... then there's nothing to send
+ with self._mock_sharder() as sharder:
+ with mocked_http_conn() as mock_conn:
+ sharder._update_root_container(broker)
+ self.assertFalse(mock_conn.requests)
+
+ for state in ShardRange.STATES:
+ with annotate_failure(state):
+ check_already_reported_not_sent(state)
+
def test_update_root_container_all_ranges(self):
broker = self._make_broker()
other_shard_ranges = self._make_shard_ranges((('', 'h'), ('h', '')))
diff --git a/test/unit/proxy/controllers/test_obj.py b/test/unit/proxy/controllers/test_obj.py
index 12849962b..9b862f5d3 100644
--- a/test/unit/proxy/controllers/test_obj.py
+++ b/test/unit/proxy/controllers/test_obj.py
@@ -51,7 +51,8 @@ from swift.common.storage_policy import POLICIES, ECDriverError, \
from test.unit import FakeRing, FakeMemcache, fake_http_connect, \
debug_logger, patch_policies, SlowBody, FakeStatus, \
DEFAULT_TEST_EC_TYPE, encode_frag_archive_bodies, make_ec_object_stub, \
- fake_ec_node_response, StubResponse, mocked_http_conn
+ fake_ec_node_response, StubResponse, mocked_http_conn, \
+ quiet_eventlet_exceptions
from test.unit.proxy.test_server import node_error_count
@@ -1617,7 +1618,8 @@ class TestReplicatedObjController(CommonObjectControllerMixin,
# to the next node rather than hang the request
headers = [{'X-Backend-Timestamp': 'not-a-timestamp'}, {}]
codes = [200, 200]
- with set_http_connect(*codes, headers=headers):
+ with quiet_eventlet_exceptions(), set_http_connect(
+ *codes, headers=headers):
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 200)
diff --git a/test/unit/proxy/test_server.py b/test/unit/proxy/test_server.py
index 1d3257e34..0a4d9de77 100644
--- a/test/unit/proxy/test_server.py
+++ b/test/unit/proxy/test_server.py
@@ -7982,7 +7982,7 @@ class TestObjectDisconnectCleanup(unittest.TestCase):
continue
device_path = os.path.join(_testdir, dev)
for datadir in os.listdir(device_path):
- if 'object' not in datadir:
+ if any(p in datadir for p in ('account', 'container')):
continue
data_path = os.path.join(device_path, datadir)
rmtree(data_path, ignore_errors=True)