summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorKota Tsuyuzaki <tsuyuzaki.kota@lab.ntt.co.jp>2017-10-16 21:39:12 +0900
committerKota Tsuyuzaki <tsuyuzaki.kota@lab.ntt.co.jp>2018-04-27 15:53:57 +0900
commit636b922f3b2882f7dd6c656983d7862b274dcf98 (patch)
tree3b9a06a6e15b21ebe61a6514285842b2b3841289
parent260bd2601b231c86830daaedc72a979d77a14498 (diff)
downloadswift-636b922f3b2882f7dd6c656983d7862b274dcf98.tar.gz
Import swift3 into swift repo as s3api middleware
This attempts to import openstack/swift3 package into swift upstream repository, namespace. This is almost simple porting except following items. 1. Rename swift3 namespace to swift.common.middleware.s3api 1.1 Rename also some conflicted class names (e.g. Request/Response) 2. Port unittests to test/unit/s3api dir to be able to run on the gate. 3. Port functests to test/functional/s3api and setup in-process testing 4. Port docs to doc dir, then address the namespace change. 5. Use get_logger() instead of global logger instance 6. Avoid global conf instance Ex. fix various minor issue on those steps (e.g. packages, dependencies, deprecated things) The details and patch references in the work on feature/s3api are listed at https://trello.com/b/ZloaZ23t/s3api (completed board) Note that, because this is just a porting, no new feature is developed since the last swift3 release, and in the future work, Swift upstream may continue to work on remaining items for further improvements and the best compatibility of Amazon S3. Please read the new docs for your deployment and keep track to know what would be changed in the future releases. Change-Id: Ib803ea89cfee9a53c429606149159dd136c036fd Co-Authored-By: Thiago da Silva <thiago@redhat.com> Co-Authored-By: Tim Burke <tim.burke@gmail.com>
-rw-r--r--.zuul.yaml14
-rw-r--r--AUTHORS7
-rw-r--r--bindep.txt4
-rw-r--r--doc/s3api/conf/ceph-known-failures-keystone.yaml209
-rw-r--r--doc/s3api/conf/ceph-known-failures-tempauth.yaml187
-rw-r--r--doc/s3api/conf/ceph-s3.conf.in18
-rw-r--r--doc/s3api/conf/object-server.conf.in17
-rw-r--r--doc/s3api/rnc/access_control_policy.rnc7
-rw-r--r--doc/s3api/rnc/bucket_logging_status.rnc10
-rw-r--r--doc/s3api/rnc/common.rnc26
-rw-r--r--doc/s3api/rnc/complete_multipart_upload.rnc7
-rw-r--r--doc/s3api/rnc/complete_multipart_upload_result.rnc7
-rw-r--r--doc/s3api/rnc/copy_object_result.rnc5
-rw-r--r--doc/s3api/rnc/copy_part_result.rnc5
-rw-r--r--doc/s3api/rnc/create_bucket_configuration.rnc4
-rw-r--r--doc/s3api/rnc/delete.rnc8
-rw-r--r--doc/s3api/rnc/delete_result.rnc17
-rw-r--r--doc/s3api/rnc/error.rnc11
-rw-r--r--doc/s3api/rnc/initiate_multipart_upload_result.rnc6
-rw-r--r--doc/s3api/rnc/lifecycle_configuration.rnc20
-rw-r--r--doc/s3api/rnc/list_all_my_buckets_result.rnc12
-rw-r--r--doc/s3api/rnc/list_bucket_result.rnc33
-rw-r--r--doc/s3api/rnc/list_multipart_uploads_result.rnc26
-rw-r--r--doc/s3api/rnc/list_parts_result.rnc22
-rw-r--r--doc/s3api/rnc/list_versions_result.rnc37
-rw-r--r--doc/s3api/rnc/location_constraint.rnc1
-rw-r--r--doc/s3api/rnc/versioning_configuration.rnc5
-rw-r--r--doc/source/associated_projects.rst2
-rw-r--r--doc/source/middleware.rst89
-rw-r--r--etc/object-server.conf-sample2
-rw-r--r--etc/proxy-server.conf-sample139
-rw-r--r--requirements.txt2
-rw-r--r--setup.cfg2
-rw-r--r--swift/common/middleware/s3api/__init__.py0
-rw-r--r--swift/common/middleware/s3api/acl_handlers.py479
-rw-r--r--swift/common/middleware/s3api/acl_utils.py95
-rw-r--r--swift/common/middleware/s3api/controllers/__init__.py52
-rw-r--r--swift/common/middleware/s3api/controllers/acl.py130
-rw-r--r--swift/common/middleware/s3api/controllers/base.py100
-rw-r--r--swift/common/middleware/s3api/controllers/bucket.py251
-rw-r--r--swift/common/middleware/s3api/controllers/location.py42
-rw-r--r--swift/common/middleware/s3api/controllers/logging.py54
-rw-r--r--swift/common/middleware/s3api/controllers/multi_delete.py126
-rw-r--r--swift/common/middleware/s3api/controllers/multi_upload.py671
-rw-r--r--swift/common/middleware/s3api/controllers/obj.py150
-rw-r--r--swift/common/middleware/s3api/controllers/s3_acl.py67
-rw-r--r--swift/common/middleware/s3api/controllers/service.py68
-rw-r--r--swift/common/middleware/s3api/controllers/versioning.py53
-rw-r--r--swift/common/middleware/s3api/etree.py146
-rw-r--r--swift/common/middleware/s3api/exception.py36
-rw-r--r--swift/common/middleware/s3api/s3api.py280
-rw-r--r--swift/common/middleware/s3api/s3request.py1402
-rw-r--r--swift/common/middleware/s3api/s3response.py684
-rw-r--r--swift/common/middleware/s3api/s3token.py324
-rw-r--r--swift/common/middleware/s3api/schema/access_control_policy.rng16
-rw-r--r--swift/common/middleware/s3api/schema/bucket_logging_status.rng25
-rw-r--r--swift/common/middleware/s3api/schema/common.rng66
-rw-r--r--swift/common/middleware/s3api/schema/complete_multipart_upload.rng19
-rw-r--r--swift/common/middleware/s3api/schema/complete_multipart_upload_result.rng19
-rw-r--r--swift/common/middleware/s3api/schema/copy_object_result.rng13
-rw-r--r--swift/common/middleware/s3api/schema/copy_part_result.rng13
-rw-r--r--swift/common/middleware/s3api/schema/create_bucket_configuration.rng11
-rw-r--r--swift/common/middleware/s3api/schema/delete.rng28
-rw-r--r--swift/common/middleware/s3api/schema/delete_result.rng47
-rw-r--r--swift/common/middleware/s3api/schema/error.rng30
-rw-r--r--swift/common/middleware/s3api/schema/initiate_multipart_upload_result.rng16
-rw-r--r--swift/common/middleware/s3api/schema/lifecycle_configuration.rng56
-rw-r--r--swift/common/middleware/s3api/schema/list_all_my_buckets_result.rng23
-rw-r--r--swift/common/middleware/s3api/schema/list_bucket_result.rng93
-rw-r--r--swift/common/middleware/s3api/schema/list_multipart_uploads_result.rng73
-rw-r--r--swift/common/middleware/s3api/schema/list_parts_result.rng59
-rw-r--r--swift/common/middleware/s3api/schema/list_versions_result.rng104
-rw-r--r--swift/common/middleware/s3api/schema/location_constraint.rng8
-rw-r--r--swift/common/middleware/s3api/schema/versioning_configuration.rng25
-rw-r--r--swift/common/middleware/s3api/subresource.py563
-rw-r--r--swift/common/middleware/s3api/utils.py190
-rw-r--r--swift/common/middleware/tempauth.py4
-rw-r--r--test-requirements.txt6
-rw-r--r--test/functional/__init__.py51
-rw-r--r--test/functional/s3api/__init__.py61
-rw-r--r--test/functional/s3api/s3_test_client.py139
-rw-r--r--test/functional/s3api/test_acl.py156
-rw-r--r--test/functional/s3api/test_bucket.py487
-rw-r--r--test/functional/s3api/test_multi_delete.py248
-rw-r--r--test/functional/s3api/test_multi_upload.py849
-rw-r--r--test/functional/s3api/test_object.py873
-rw-r--r--test/functional/s3api/test_presigned.py237
-rw-r--r--test/functional/s3api/test_service.py100
-rw-r--r--test/functional/s3api/utils.py31
-rw-r--r--test/sample.conf5
-rw-r--r--test/unit/common/middleware/s3api/__init__.py163
-rw-r--r--test/unit/common/middleware/s3api/exceptions.py18
-rw-r--r--test/unit/common/middleware/s3api/helpers.py185
-rw-r--r--test/unit/common/middleware/s3api/test_acl.py230
-rw-r--r--test/unit/common/middleware/s3api/test_acl_handlers.py42
-rw-r--r--test/unit/common/middleware/s3api/test_acl_utils.py49
-rw-r--r--test/unit/common/middleware/s3api/test_bucket.py755
-rw-r--r--test/unit/common/middleware/s3api/test_cfg.py44
-rw-r--r--test/unit/common/middleware/s3api/test_etree.py73
-rw-r--r--test/unit/common/middleware/s3api/test_helpers.py69
-rw-r--r--test/unit/common/middleware/s3api/test_location.py51
-rw-r--r--test/unit/common/middleware/s3api/test_logging.py66
-rw-r--r--test/unit/common/middleware/s3api/test_multi_delete.py284
-rw-r--r--test/unit/common/middleware/s3api/test_multi_upload.py1742
-rw-r--r--test/unit/common/middleware/s3api/test_obj.py1010
-rw-r--r--test/unit/common/middleware/s3api/test_s3_acl.py540
-rw-r--r--test/unit/common/middleware/s3api/test_s3api.py1049
-rw-r--r--test/unit/common/middleware/s3api/test_s3request.py765
-rw-r--r--test/unit/common/middleware/s3api/test_s3response.py80
-rw-r--r--test/unit/common/middleware/s3api/test_s3token.py821
-rw-r--r--test/unit/common/middleware/s3api/test_service.py235
-rw-r--r--test/unit/common/middleware/s3api/test_subresource.py367
-rw-r--r--test/unit/common/middleware/s3api/test_utils.py133
-rw-r--r--test/unit/common/middleware/s3api/test_versioning.py56
-rw-r--r--test/unit/common/middleware/test_tempauth.py6
-rw-r--r--tox.ini5
116 files changed, 19747 insertions, 6 deletions
diff --git a/.zuul.yaml b/.zuul.yaml
index 121e2b5f3..7ccfa574d 100644
--- a/.zuul.yaml
+++ b/.zuul.yaml
@@ -106,6 +106,18 @@
tox_envlist: func-domain-remap-staticweb
- job:
+ name: swift-tox-func-s3api
+ parent: swift-tox-base
+ description: |
+ Run functional tests for swift under cPython version 2.7.
+
+ Uses tox with the ``func-s3api`` environment.
+ It sets TMPDIR to an XFS mount point created via
+ tools/test-setup.sh.
+ vars:
+ tox_envlist: func-s3api
+
+- job:
name: swift-probetests-centos-7
parent: unittests
nodeset: centos-7
@@ -128,6 +140,7 @@
- swift-tox-func-encryption
- swift-tox-func-domain-remap-staticweb
- swift-tox-func-ec
+ - swift-tox-func-s3api
- swift-probetests-centos-7
gate:
jobs:
@@ -137,6 +150,7 @@
- swift-tox-func-encryption
- swift-tox-func-domain-remap-staticweb
- swift-tox-func-ec
+ - swift-tox-func-s3api
experimental:
jobs:
- swift-tox-py27-centos-7
diff --git a/AUTHORS b/AUTHORS
index 0c1b0d87e..ca9c59c79 100644
--- a/AUTHORS
+++ b/AUTHORS
@@ -72,6 +72,7 @@ Brian Ober (bober@us.ibm.com)
Brian Reitz (brian.reitz@oracle.com)
Bryan Keller (kellerbr@us.ibm.com)
Béla Vancsics (vancsics@inf.u-szeged.hu)
+Виль Суркин (vills@vills-pro.local)
Caleb Tennis (caleb.tennis@gmail.com)
Cao Xuan Hoang (hoangcx@vn.fujitsu.com)
Carlos Cavanna (ccavanna@ca.ibm.com)
@@ -111,6 +112,7 @@ Dan Prince (dprince@redhat.com)
dangming (dangming@unitedstack.com)
Daniele Valeriani (daniele@dvaleriani.net)
Darrell Bishop (darrell@swiftstack.com)
+Darryl Tam (dtam@swiftstack.com)
David Goetz (david.goetz@rackspace.com)
David Hadas (davidh@il.ibm.com)
David Liu (david.liu@cn.ibm.com)
@@ -253,6 +255,7 @@ Martin Geisler (martin@geisler.net)
Martin Kletzander (mkletzan@redhat.com)
Maru Newby (mnewby@internap.com)
Mathias Bjoerkqvist (mbj@zurich.ibm.com)
+Masaki Tsukuda (tsukuda.masaki@po.ntts.co.jp)
Matt Kassawara (mkassawara@gmail.com)
Matt Riedemann (mriedem@us.ibm.com)
Matthew Oliver (matt@oliver.net.au)
@@ -274,6 +277,8 @@ Nakagawa Masaaki (nakagawamsa@nttdata.co.jp)
Nakul Dahiwade (nakul.dahiwade@intel.com)
Nam Nguyen Hoai (namnh@vn.fujitsu.com)
Nandini Tata (nandini.tata@intel.com)
+Naoto Nishizono (nishizono.naoto@po.ntts.co.jp)
+Nassim Babaci (nassim.babaci@cloudwatt.com)
Nathan Kinder (nkinder@redhat.com)
Nelson Almeida (nelsonmarcos@gmail.com)
Newptone (xingchao@unitedstack.com)
@@ -365,11 +370,13 @@ Victor Lowther (victor.lowther@gmail.com)
Victor Rodionov (victor.rodionov@nexenta.com)
Victor Stinner (vstinner@redhat.com)
Viktor Varga (vvarga@inf.u-szeged.hu)
+Vil Surkin (mail@vills.me)
Vincent Untz (vuntz@suse.com)
Vladimir Vechkanov (vvechkanov@mirantis.com)
Vu Cong Tuan (tuanvc@vn.fujitsu.com)
vxlinux (yan.wei7@zte.com.cn)
wanghongtaozz (wanghongtaozz@inspur.com)
+Wyllys Ingersoll (wyllys.ingersoll@evault.com)
Wu Wenxiang (wu.wenxiang@99cloud.net)
xhancar (pavel.hancar@gmail.com)
XieYingYun (smokony@sina.com)
diff --git a/bindep.txt b/bindep.txt
index 1d65d3ff8..fbc75ccb0 100644
--- a/bindep.txt
+++ b/bindep.txt
@@ -10,6 +10,10 @@ liberasurecode-dev [platform:dpkg]
liberasurecode-devel [platform:rpm !platform:centos]
libffi-dev [platform:dpkg]
libffi-devel [platform:rpm]
+libxml2-dev [platform:dpkg]
+libxml2-devel [platform:rpm]
+libxslt-devel [platform:rpm]
+libxslt1-dev [platform:dpkg]
memcached
python-dev [platform:dpkg]
python-devel [platform:rpm]
diff --git a/doc/s3api/conf/ceph-known-failures-keystone.yaml b/doc/s3api/conf/ceph-known-failures-keystone.yaml
new file mode 100644
index 000000000..5d36f5495
--- /dev/null
+++ b/doc/s3api/conf/ceph-known-failures-keystone.yaml
@@ -0,0 +1,209 @@
+ceph_s3:
+ <nose.suite.ContextSuite context=s3tests.functional>:teardown: {status: KNOWN}
+ <nose.suite.ContextSuite context=test_routing_generator>:setup: {status: KNOWN}
+ s3tests.functional.test_headers.test_bucket_create_bad_authorization_invalid_aws2: {status: KNOWN}
+ s3tests.functional.test_headers.test_bucket_create_bad_authorization_none: {status: KNOWN}
+ s3tests.functional.test_headers.test_object_create_bad_authorization_invalid_aws2: {status: KNOWN}
+ s3tests.functional.test_headers.test_object_create_bad_authorization_none: {status: KNOWN}
+ s3tests.functional.test_s3.test_100_continue: {status: KNOWN}
+ s3tests.functional.test_s3.test_atomic_conditional_write_1mb: {status: KNOWN}
+ s3tests.functional.test_s3.test_atomic_dual_conditional_write_1mb: {status: KNOWN}
+ s3tests.functional.test_s3.test_bucket_acl_default: {status: KNOWN}
+ s3tests.functional.test_s3.test_bucket_acl_grant_email: {status: KNOWN}
+ s3tests.functional.test_s3.test_bucket_acl_grant_email_notexist: {status: KNOWN}
+ s3tests.functional.test_s3.test_bucket_acl_grant_nonexist_user: {status: KNOWN}
+ s3tests.functional.test_s3.test_bucket_acl_grant_userid_fullcontrol: {status: KNOWN}
+ s3tests.functional.test_s3.test_bucket_acl_grant_userid_read: {status: KNOWN}
+ s3tests.functional.test_s3.test_bucket_acl_grant_userid_readacp: {status: KNOWN}
+ s3tests.functional.test_s3.test_bucket_acl_grant_userid_write: {status: KNOWN}
+ s3tests.functional.test_s3.test_bucket_acl_grant_userid_writeacp: {status: KNOWN}
+ s3tests.functional.test_s3.test_bucket_acl_no_grants: {status: KNOWN}
+ s3tests.functional.test_s3.test_bucket_acls_changes_persistent: {status: KNOWN}
+ s3tests.functional.test_s3.test_bucket_acl_xml_fullcontrol: {status: KNOWN}
+ s3tests.functional.test_s3.test_bucket_acl_xml_read: {status: KNOWN}
+ s3tests.functional.test_s3.test_bucket_acl_xml_readacp: {status: KNOWN}
+ s3tests.functional.test_s3.test_bucket_acl_xml_write: {status: KNOWN}
+ s3tests.functional.test_s3.test_bucket_acl_xml_writeacp: {status: KNOWN}
+ s3tests.functional.test_s3.test_bucket_create_exists: {status: KNOWN}
+ s3tests.functional.test_s3.test_bucket_header_acl_grants: {status: KNOWN}
+ s3tests.functional.test_s3.test_bucket_list_objects_anonymous: {status: KNOWN}
+ s3tests.functional.test_s3.test_bucket_list_objects_anonymous_fail: {status: KNOWN}
+ s3tests.functional.test_s3.test_bucket_recreate_not_overriding: {status: KNOWN}
+ s3tests.functional.test_s3.test_cors_origin_response: {status: KNOWN}
+ s3tests.functional.test_s3.test_cors_origin_wildcard: {status: KNOWN}
+ s3tests.functional.test_s3.test_list_buckets_anonymous: {status: KNOWN}
+ s3tests.functional.test_s3.test_list_buckets_invalid_auth: {status: KNOWN}
+ s3tests.functional.test_s3.test_logging_toggle: {status: KNOWN}
+ s3tests.functional.test_s3.test_multipart_resend_first_finishes_last: {status: KNOWN}
+ s3tests.functional.test_s3.test_object_acl_full_control_verify_owner: {status: KNOWN}
+ s3tests.functional.test_s3.test_object_acl_xml: {status: KNOWN}
+ s3tests.functional.test_s3.test_object_acl_xml_read: {status: KNOWN}
+ s3tests.functional.test_s3.test_object_acl_xml_readacp: {status: KNOWN}
+ s3tests.functional.test_s3.test_object_acl_xml_write: {status: KNOWN}
+ s3tests.functional.test_s3.test_object_acl_xml_writeacp: {status: KNOWN}
+ s3tests.functional.test_s3.test_object_copy_canned_acl: {status: KNOWN}
+ s3tests.functional.test_s3.test_object_copy_not_owned_object_bucket: {status: KNOWN}
+ s3tests.functional.test_s3.test_object_copy_replacing_metadata: {status: KNOWN}
+ s3tests.functional.test_s3.test_object_giveaway: {status: KNOWN}
+ s3tests.functional.test_s3.test_object_header_acl_grants: {status: KNOWN}
+ s3tests.functional.test_s3.test_object_raw_get: {status: KNOWN}
+ s3tests.functional.test_s3.test_object_raw_get_bucket_acl: {status: KNOWN}
+ s3tests.functional.test_s3.test_object_raw_get_bucket_gone: {status: KNOWN}
+ s3tests.functional.test_s3.test_object_raw_get_object_acl: {status: KNOWN}
+ s3tests.functional.test_s3.test_object_raw_get_object_gone: {status: KNOWN}
+ s3tests.functional.test_s3.test_object_raw_put: {status: KNOWN}
+ s3tests.functional.test_s3.test_object_raw_put_write_access: {status: KNOWN}
+ s3tests.functional.test_s3.test_object_set_valid_acl: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_anonymous_request: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_authenticated_request: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_authenticated_request_bad_access_key: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_case_insensitive_condition_fields: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_condition_is_case_sensitive: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_escaped_field_values: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_expired_policy: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_expires_is_case_sensitive: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_ignored_header: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_invalid_access_key: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_invalid_content_length_argument: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_invalid_date_format: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_invalid_request_field_value: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_invalid_signature: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_missing_conditions_list: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_missing_content_length_argument: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_missing_expires_condition: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_missing_policy_condition: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_missing_signature: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_no_key_specified: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_request_missing_policy_specified_field: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_set_invalid_success_code: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_set_key_from_filename: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_set_success_code: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_success_redirect_action: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_upload_larger_than_chunk: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_upload_size_below_minimum: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_upload_size_limit_exceeded: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_user_specified_header: {status: KNOWN}
+ s3tests.functional.test_s3.test_put_object_ifmatch_failed: {status: KNOWN}
+ s3tests.functional.test_s3.test_put_object_ifmatch_good: {status: KNOWN}
+ s3tests.functional.test_s3.test_put_object_ifmatch_nonexisted_failed: {status: KNOWN}
+ s3tests.functional.test_s3.test_put_object_ifmatch_overwrite_existed_good: {status: KNOWN}
+ s3tests.functional.test_s3.test_put_object_ifnonmatch_failed: {status: KNOWN}
+ s3tests.functional.test_s3.test_put_object_ifnonmatch_good: {status: KNOWN}
+ s3tests.functional.test_s3.test_put_object_ifnonmatch_nonexisted_good: {status: KNOWN}
+ s3tests.functional.test_s3.test_put_object_ifnonmatch_overwrite_existed_failed: {status: KNOWN}
+ s3tests.functional.test_s3.test_set_cors: {status: KNOWN}
+ s3tests.functional.test_s3.test_stress_bucket_acls_changes: {status: KNOWN}
+ s3tests.functional.test_s3.test_versioned_concurrent_object_create_and_remove: {status: KNOWN}
+ s3tests.functional.test_s3.test_versioned_concurrent_object_create_concurrent_remove: {status: KNOWN}
+ s3tests.functional.test_s3.test_versioned_object_acl: {status: KNOWN}
+ s3tests.functional.test_s3.test_versioning_bucket_create_suspend: {status: KNOWN}
+ s3tests.functional.test_s3.test_versioning_copy_obj_version: {status: KNOWN}
+ s3tests.functional.test_s3.test_versioning_multi_object_delete: {status: KNOWN}
+ s3tests.functional.test_s3.test_versioning_multi_object_delete_with_marker: {status: KNOWN}
+ s3tests.functional.test_s3.test_versioning_multi_object_delete_with_marker_create: {status: KNOWN}
+ s3tests.functional.test_s3.test_versioning_obj_create_overwrite_multipart: {status: KNOWN}
+ s3tests.functional.test_s3.test_versioning_obj_create_read_remove: {status: KNOWN}
+ s3tests.functional.test_s3.test_versioning_obj_create_read_remove_head: {status: KNOWN}
+ s3tests.functional.test_s3.test_versioning_obj_create_versions_remove_all: {status: KNOWN}
+ s3tests.functional.test_s3.test_versioning_obj_create_versions_remove_special_names: {status: KNOWN}
+ s3tests.functional.test_s3.test_versioning_obj_list_marker: {status: KNOWN}
+ s3tests.functional.test_s3.test_versioning_obj_plain_null_version_overwrite: {status: KNOWN}
+ s3tests.functional.test_s3.test_versioning_obj_plain_null_version_overwrite_suspended: {status: KNOWN}
+ s3tests.functional.test_s3.test_versioning_obj_plain_null_version_removal: {status: KNOWN}
+ s3tests.functional.test_s3.test_versioning_obj_suspend_versions: {status: KNOWN}
+ s3tests.functional.test_s3.test_versioning_obj_suspend_versions_simple: {status: KNOWN}
+ s3tests.functional.test_s3_website.check_can_test_website: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_bucket_private_redirectall_base: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_bucket_private_redirectall_path: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_bucket_private_redirectall_path_upgrade: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_nonexistant_bucket_rgw: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_nonexistant_bucket_s3: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_private_bucket_list_empty: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_private_bucket_list_empty_blockederrordoc: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_private_bucket_list_empty_gooderrordoc: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_private_bucket_list_empty_missingerrordoc: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_private_bucket_list_private_index: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_private_bucket_list_private_index_blockederrordoc: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_private_bucket_list_private_index_gooderrordoc: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_private_bucket_list_private_index_missingerrordoc: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_private_bucket_list_public_index: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_public_bucket_list_empty: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_public_bucket_list_empty_blockederrordoc: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_public_bucket_list_empty_gooderrordoc: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_public_bucket_list_empty_missingerrordoc: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_public_bucket_list_private_index: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_public_bucket_list_private_index_blockederrordoc: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_public_bucket_list_private_index_gooderrordoc: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_public_bucket_list_private_index_missingerrordoc: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_public_bucket_list_public_index: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_xredirect_nonwebsite: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_xredirect_private_abs: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_xredirect_private_relative: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_xredirect_public_abs: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_xredirect_public_relative: {status: KNOWN}
+ s3tests.functional.test_s3.test_bucket_configure_recreate: {status: KNOWN}
+ s3tests.functional.test_s3.test_bucket_list_return_data_versioning: {status: KNOWN}
+ s3tests.functional.test_s3.test_bucket_policy: {status: KNOWN}
+ s3tests.functional.test_s3.test_bucket_policy_acl: {status: KNOWN}
+ s3tests.functional.test_s3.test_bucket_policy_another_bucket: {status: KNOWN}
+ s3tests.functional.test_s3.test_bucket_policy_different_tenant: {status: KNOWN}
+ s3tests.functional.test_s3.test_bucket_policy_set_condition_operator_end_with_IfExists: {status: KNOWN}
+ s3tests.functional.test_s3.test_delete_tags_obj_public: {status: KNOWN}
+ s3tests.functional.test_s3.test_encryption_sse_c_invalid_md5: {status: KNOWN}
+ s3tests.functional.test_s3.test_encryption_sse_c_method_head: {status: KNOWN}
+ s3tests.functional.test_s3.test_encryption_sse_c_multipart_bad_download: {status: KNOWN}
+ s3tests.functional.test_s3.test_encryption_sse_c_multipart_invalid_chunks_1: {status: KNOWN}
+ s3tests.functional.test_s3.test_encryption_sse_c_multipart_invalid_chunks_2: {status: KNOWN}
+ s3tests.functional.test_s3.test_encryption_sse_c_no_key: {status: KNOWN}
+ s3tests.functional.test_s3.test_encryption_sse_c_no_md5: {status: KNOWN}
+ s3tests.functional.test_s3.test_encryption_sse_c_other_key: {status: KNOWN}
+ s3tests.functional.test_s3.test_encryption_sse_c_post_object_authenticated_request: {status: KNOWN}
+ s3tests.functional.test_s3.test_encryption_sse_c_present: {status: KNOWN}
+ s3tests.functional.test_s3.test_get_obj_head_tagging: {status: KNOWN}
+ s3tests.functional.test_s3.test_get_obj_tagging: {status: KNOWN}
+ s3tests.functional.test_s3.test_get_tags_acl_public: {status: KNOWN}
+ s3tests.functional.test_s3.test_lifecycle_deletemarker_expiration: {status: KNOWN}
+ s3tests.functional.test_s3.test_lifecycle_expiration: {status: KNOWN}
+ s3tests.functional.test_s3.test_lifecycle_expiration_date: {status: KNOWN}
+ s3tests.functional.test_s3.test_lifecycle_get: {status: KNOWN}
+ s3tests.functional.test_s3.test_lifecycle_get_no_id: {status: KNOWN}
+ s3tests.functional.test_s3.test_lifecycle_id_too_long: {status: KNOWN}
+ s3tests.functional.test_s3.test_lifecycle_multipart_expiration: {status: KNOWN}
+ s3tests.functional.test_s3.test_lifecycle_noncur_expiration: {status: KNOWN}
+ s3tests.functional.test_s3.test_lifecycle_rules_conflicted: {status: KNOWN}
+ s3tests.functional.test_s3.test_lifecycle_same_id: {status: KNOWN}
+ s3tests.functional.test_s3.test_lifecycle_set: {status: KNOWN}
+ s3tests.functional.test_s3.test_lifecycle_set_date: {status: KNOWN}
+ s3tests.functional.test_s3.test_lifecycle_set_deletemarker: {status: KNOWN}
+ s3tests.functional.test_s3.test_lifecycle_set_empty_filter: {status: KNOWN}
+ s3tests.functional.test_s3.test_lifecycle_set_filter: {status: KNOWN}
+ s3tests.functional.test_s3.test_lifecycle_set_multipart: {status: KNOWN}
+ s3tests.functional.test_s3.test_lifecycle_set_noncurrent: {status: KNOWN}
+ s3tests.functional.test_s3.test_multipart_copy_invalid_range: {status: KNOWN}
+ s3tests.functional.test_s3.test_multipart_copy_versioned: {status: KNOWN}
+ s3tests.functional.test_s3.test_object_copy_versioned_bucket: {status: KNOWN}
+ s3tests.functional.test_s3.test_object_copy_versioning_multipart_upload: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_empty_conditions: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_tags_anonymous_request: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_tags_authenticated_request: {status: KNOWN}
+ s3tests.functional.test_s3.test_put_delete_tags: {status: KNOWN}
+ s3tests.functional.test_s3.test_put_excess_key_tags: {status: KNOWN}
+ s3tests.functional.test_s3.test_put_excess_tags: {status: KNOWN}
+ s3tests.functional.test_s3.test_put_excess_val_tags: {status: KNOWN}
+ s3tests.functional.test_s3.test_put_max_kvsize_tags: {status: KNOWN}
+ s3tests.functional.test_s3.test_put_max_tags: {status: KNOWN}
+ s3tests.functional.test_s3.test_put_modify_tags: {status: KNOWN}
+ s3tests.functional.test_s3.test_put_obj_with_tags: {status: KNOWN}
+ s3tests.functional.test_s3.test_put_tags_acl_public: {status: KNOWN}
+ s3tests.functional.test_s3.test_sse_kms_method_head: {status: KNOWN}
+ s3tests.functional.test_s3.test_sse_kms_multipart_invalid_chunks_1: {status: KNOWN}
+ s3tests.functional.test_s3.test_sse_kms_multipart_invalid_chunks_2: {status: KNOWN}
+ s3tests.functional.test_s3.test_sse_kms_multipart_upload: {status: KNOWN}
+ s3tests.functional.test_s3.test_sse_kms_post_object_authenticated_request: {status: KNOWN}
+ s3tests.functional.test_s3.test_sse_kms_present: {status: KNOWN}
+ s3tests.functional.test_s3.test_sse_kms_read_declare: {status: KNOWN}
+ s3tests.functional.test_s3.test_sse_kms_transfer_13b: {status: KNOWN}
+ s3tests.functional.test_s3.test_sse_kms_transfer_1MB: {status: KNOWN}
+ s3tests.functional.test_s3.test_sse_kms_transfer_1b: {status: KNOWN}
+ s3tests.functional.test_s3.test_sse_kms_transfer_1kb: {status: KNOWN}
+ s3tests.functional.test_s3.test_versioned_object_acl_no_version_specified: {status: KNOWN}
diff --git a/doc/s3api/conf/ceph-known-failures-tempauth.yaml b/doc/s3api/conf/ceph-known-failures-tempauth.yaml
new file mode 100644
index 000000000..fa070040b
--- /dev/null
+++ b/doc/s3api/conf/ceph-known-failures-tempauth.yaml
@@ -0,0 +1,187 @@
+ceph_s3:
+ <nose.suite.ContextSuite context=s3tests.functional>:teardown: {status: KNOWN}
+ <nose.suite.ContextSuite context=test_routing_generator>:setup: {status: KNOWN}
+ s3tests.functional.test_headers.test_bucket_create_bad_authorization_invalid_aws2: {status: KNOWN}
+ s3tests.functional.test_headers.test_bucket_create_bad_authorization_none: {status: KNOWN}
+ s3tests.functional.test_headers.test_object_create_bad_authorization_invalid_aws2: {status: KNOWN}
+ s3tests.functional.test_headers.test_object_create_bad_authorization_none: {status: KNOWN}
+ s3tests.functional.test_s3.test_100_continue: {status: KNOWN}
+ s3tests.functional.test_s3.test_atomic_conditional_write_1mb: {status: KNOWN}
+ s3tests.functional.test_s3.test_atomic_dual_conditional_write_1mb: {status: KNOWN}
+ s3tests.functional.test_s3.test_bucket_acl_grant_email: {status: KNOWN}
+ s3tests.functional.test_s3.test_bucket_acl_grant_email_notexist: {status: KNOWN}
+ s3tests.functional.test_s3.test_bucket_acl_grant_nonexist_user: {status: KNOWN}
+ s3tests.functional.test_s3.test_bucket_acl_no_grants: {status: KNOWN}
+ s3tests.functional.test_s3.test_bucket_create_exists: {status: KNOWN}
+ s3tests.functional.test_s3.test_bucket_header_acl_grants: {status: KNOWN}
+ s3tests.functional.test_s3.test_bucket_list_objects_anonymous: {status: KNOWN}
+ s3tests.functional.test_s3.test_bucket_list_objects_anonymous_fail: {status: KNOWN}
+ s3tests.functional.test_s3.test_bucket_recreate_not_overriding: {status: KNOWN}
+ s3tests.functional.test_s3.test_cors_origin_response: {status: KNOWN}
+ s3tests.functional.test_s3.test_cors_origin_wildcard: {status: KNOWN}
+ s3tests.functional.test_s3.test_list_buckets_anonymous: {status: KNOWN}
+ s3tests.functional.test_s3.test_list_buckets_invalid_auth: {status: KNOWN}
+ s3tests.functional.test_s3.test_logging_toggle: {status: KNOWN}
+ s3tests.functional.test_s3.test_multipart_resend_first_finishes_last: {status: KNOWN}
+ s3tests.functional.test_s3.test_object_copy_canned_acl: {status: KNOWN}
+ s3tests.functional.test_s3.test_object_copy_replacing_metadata: {status: KNOWN}
+ s3tests.functional.test_s3.test_object_header_acl_grants: {status: KNOWN}
+ s3tests.functional.test_s3.test_object_raw_get: {status: KNOWN}
+ s3tests.functional.test_s3.test_object_raw_get_bucket_acl: {status: KNOWN}
+ s3tests.functional.test_s3.test_object_raw_get_bucket_gone: {status: KNOWN}
+ s3tests.functional.test_s3.test_object_raw_get_object_acl: {status: KNOWN}
+ s3tests.functional.test_s3.test_object_raw_get_object_gone: {status: KNOWN}
+ s3tests.functional.test_s3.test_object_raw_put: {status: KNOWN}
+ s3tests.functional.test_s3.test_object_raw_put_write_access: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_anonymous_request: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_authenticated_request: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_authenticated_request_bad_access_key: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_case_insensitive_condition_fields: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_condition_is_case_sensitive: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_escaped_field_values: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_expired_policy: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_expires_is_case_sensitive: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_ignored_header: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_invalid_access_key: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_invalid_content_length_argument: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_invalid_date_format: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_invalid_request_field_value: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_invalid_signature: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_missing_conditions_list: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_missing_content_length_argument: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_missing_expires_condition: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_missing_policy_condition: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_missing_signature: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_no_key_specified: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_request_missing_policy_specified_field: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_set_invalid_success_code: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_set_key_from_filename: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_set_success_code: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_success_redirect_action: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_upload_larger_than_chunk: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_upload_size_below_minimum: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_upload_size_limit_exceeded: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_user_specified_header: {status: KNOWN}
+ s3tests.functional.test_s3.test_put_object_ifmatch_failed: {status: KNOWN}
+ s3tests.functional.test_s3.test_put_object_ifmatch_good: {status: KNOWN}
+ s3tests.functional.test_s3.test_put_object_ifmatch_nonexisted_failed: {status: KNOWN}
+ s3tests.functional.test_s3.test_put_object_ifmatch_overwrite_existed_good: {status: KNOWN}
+ s3tests.functional.test_s3.test_put_object_ifnonmatch_failed: {status: KNOWN}
+ s3tests.functional.test_s3.test_put_object_ifnonmatch_good: {status: KNOWN}
+ s3tests.functional.test_s3.test_put_object_ifnonmatch_nonexisted_good: {status: KNOWN}
+ s3tests.functional.test_s3.test_put_object_ifnonmatch_overwrite_existed_failed: {status: KNOWN}
+ s3tests.functional.test_s3.test_set_cors: {status: KNOWN}
+ s3tests.functional.test_s3.test_versioned_concurrent_object_create_and_remove: {status: KNOWN}
+ s3tests.functional.test_s3.test_versioned_concurrent_object_create_concurrent_remove: {status: KNOWN}
+ s3tests.functional.test_s3.test_versioned_object_acl: {status: KNOWN}
+ s3tests.functional.test_s3.test_versioning_bucket_create_suspend: {status: KNOWN}
+ s3tests.functional.test_s3.test_versioning_copy_obj_version: {status: KNOWN}
+ s3tests.functional.test_s3.test_versioning_multi_object_delete: {status: KNOWN}
+ s3tests.functional.test_s3.test_versioning_multi_object_delete_with_marker: {status: KNOWN}
+ s3tests.functional.test_s3.test_versioning_multi_object_delete_with_marker_create: {status: KNOWN}
+ s3tests.functional.test_s3.test_versioning_obj_create_overwrite_multipart: {status: KNOWN}
+ s3tests.functional.test_s3.test_versioning_obj_create_read_remove: {status: KNOWN}
+ s3tests.functional.test_s3.test_versioning_obj_create_read_remove_head: {status: KNOWN}
+ s3tests.functional.test_s3.test_versioning_obj_create_versions_remove_all: {status: KNOWN}
+ s3tests.functional.test_s3.test_versioning_obj_create_versions_remove_special_names: {status: KNOWN}
+ s3tests.functional.test_s3.test_versioning_obj_list_marker: {status: KNOWN}
+ s3tests.functional.test_s3.test_versioning_obj_plain_null_version_overwrite: {status: KNOWN}
+ s3tests.functional.test_s3.test_versioning_obj_plain_null_version_overwrite_suspended: {status: KNOWN}
+ s3tests.functional.test_s3.test_versioning_obj_plain_null_version_removal: {status: KNOWN}
+ s3tests.functional.test_s3.test_versioning_obj_suspend_versions: {status: KNOWN}
+ s3tests.functional.test_s3.test_versioning_obj_suspend_versions_simple: {status: KNOWN}
+ s3tests.functional.test_s3_website.check_can_test_website: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_bucket_private_redirectall_base: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_bucket_private_redirectall_path: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_bucket_private_redirectall_path_upgrade: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_nonexistant_bucket_rgw: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_nonexistant_bucket_s3: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_private_bucket_list_empty: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_private_bucket_list_empty_blockederrordoc: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_private_bucket_list_empty_gooderrordoc: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_private_bucket_list_empty_missingerrordoc: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_private_bucket_list_private_index: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_private_bucket_list_private_index_blockederrordoc: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_private_bucket_list_private_index_gooderrordoc: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_private_bucket_list_private_index_missingerrordoc: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_private_bucket_list_public_index: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_public_bucket_list_empty: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_public_bucket_list_empty_blockederrordoc: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_public_bucket_list_empty_gooderrordoc: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_public_bucket_list_empty_missingerrordoc: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_public_bucket_list_private_index: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_public_bucket_list_private_index_blockederrordoc: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_public_bucket_list_private_index_gooderrordoc: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_public_bucket_list_private_index_missingerrordoc: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_public_bucket_list_public_index: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_xredirect_nonwebsite: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_xredirect_private_abs: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_xredirect_private_relative: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_xredirect_public_abs: {status: KNOWN}
+ s3tests.functional.test_s3_website.test_website_xredirect_public_relative: {status: KNOWN}
+ s3tests.functional.test_s3.test_bucket_configure_recreate: {status: KNOWN}
+ s3tests.functional.test_s3.test_bucket_list_return_data_versioning: {status: KNOWN}
+ s3tests.functional.test_s3.test_bucket_policy: {status: KNOWN}
+ s3tests.functional.test_s3.test_bucket_policy_acl: {status: KNOWN}
+ s3tests.functional.test_s3.test_bucket_policy_another_bucket: {status: KNOWN}
+ s3tests.functional.test_s3.test_bucket_policy_different_tenant: {status: KNOWN}
+ s3tests.functional.test_s3.test_bucket_policy_set_condition_operator_end_with_IfExists: {status: KNOWN}
+ s3tests.functional.test_s3.test_delete_tags_obj_public: {status: KNOWN}
+ s3tests.functional.test_s3.test_encryption_sse_c_invalid_md5: {status: KNOWN}
+ s3tests.functional.test_s3.test_encryption_sse_c_method_head: {status: KNOWN}
+ s3tests.functional.test_s3.test_encryption_sse_c_multipart_bad_download: {status: KNOWN}
+ s3tests.functional.test_s3.test_encryption_sse_c_multipart_invalid_chunks_1: {status: KNOWN}
+ s3tests.functional.test_s3.test_encryption_sse_c_multipart_invalid_chunks_2: {status: KNOWN}
+ s3tests.functional.test_s3.test_encryption_sse_c_no_key: {status: KNOWN}
+ s3tests.functional.test_s3.test_encryption_sse_c_no_md5: {status: KNOWN}
+ s3tests.functional.test_s3.test_encryption_sse_c_other_key: {status: KNOWN}
+ s3tests.functional.test_s3.test_encryption_sse_c_post_object_authenticated_request: {status: KNOWN}
+ s3tests.functional.test_s3.test_encryption_sse_c_present: {status: KNOWN}
+ s3tests.functional.test_s3.test_get_obj_head_tagging: {status: KNOWN}
+ s3tests.functional.test_s3.test_get_obj_tagging: {status: KNOWN}
+ s3tests.functional.test_s3.test_get_tags_acl_public: {status: KNOWN}
+ s3tests.functional.test_s3.test_lifecycle_deletemarker_expiration: {status: KNOWN}
+ s3tests.functional.test_s3.test_lifecycle_expiration: {status: KNOWN}
+ s3tests.functional.test_s3.test_lifecycle_expiration_date: {status: KNOWN}
+ s3tests.functional.test_s3.test_lifecycle_get: {status: KNOWN}
+ s3tests.functional.test_s3.test_lifecycle_get_no_id: {status: KNOWN}
+ s3tests.functional.test_s3.test_lifecycle_id_too_long: {status: KNOWN}
+ s3tests.functional.test_s3.test_lifecycle_multipart_expiration: {status: KNOWN}
+ s3tests.functional.test_s3.test_lifecycle_noncur_expiration: {status: KNOWN}
+ s3tests.functional.test_s3.test_lifecycle_rules_conflicted: {status: KNOWN}
+ s3tests.functional.test_s3.test_lifecycle_same_id: {status: KNOWN}
+ s3tests.functional.test_s3.test_lifecycle_set: {status: KNOWN}
+ s3tests.functional.test_s3.test_lifecycle_set_date: {status: KNOWN}
+ s3tests.functional.test_s3.test_lifecycle_set_deletemarker: {status: KNOWN}
+ s3tests.functional.test_s3.test_lifecycle_set_empty_filter: {status: KNOWN}
+ s3tests.functional.test_s3.test_lifecycle_set_filter: {status: KNOWN}
+ s3tests.functional.test_s3.test_lifecycle_set_multipart: {status: KNOWN}
+ s3tests.functional.test_s3.test_lifecycle_set_noncurrent: {status: KNOWN}
+ s3tests.functional.test_s3.test_multipart_copy_invalid_range: {status: KNOWN}
+ s3tests.functional.test_s3.test_multipart_copy_versioned: {status: KNOWN}
+ s3tests.functional.test_s3.test_object_copy_versioned_bucket: {status: KNOWN}
+ s3tests.functional.test_s3.test_object_copy_versioning_multipart_upload: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_empty_conditions: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_tags_anonymous_request: {status: KNOWN}
+ s3tests.functional.test_s3.test_post_object_tags_authenticated_request: {status: KNOWN}
+ s3tests.functional.test_s3.test_put_delete_tags: {status: KNOWN}
+ s3tests.functional.test_s3.test_put_excess_key_tags: {status: KNOWN}
+ s3tests.functional.test_s3.test_put_excess_tags: {status: KNOWN}
+ s3tests.functional.test_s3.test_put_excess_val_tags: {status: KNOWN}
+ s3tests.functional.test_s3.test_put_max_kvsize_tags: {status: KNOWN}
+ s3tests.functional.test_s3.test_put_max_tags: {status: KNOWN}
+ s3tests.functional.test_s3.test_put_modify_tags: {status: KNOWN}
+ s3tests.functional.test_s3.test_put_obj_with_tags: {status: KNOWN}
+ s3tests.functional.test_s3.test_put_tags_acl_public: {status: KNOWN}
+ s3tests.functional.test_s3.test_sse_kms_method_head: {status: KNOWN}
+ s3tests.functional.test_s3.test_sse_kms_multipart_invalid_chunks_1: {status: KNOWN}
+ s3tests.functional.test_s3.test_sse_kms_multipart_invalid_chunks_2: {status: KNOWN}
+ s3tests.functional.test_s3.test_sse_kms_multipart_upload: {status: KNOWN}
+ s3tests.functional.test_s3.test_sse_kms_post_object_authenticated_request: {status: KNOWN}
+ s3tests.functional.test_s3.test_sse_kms_present: {status: KNOWN}
+ s3tests.functional.test_s3.test_sse_kms_read_declare: {status: KNOWN}
+ s3tests.functional.test_s3.test_sse_kms_transfer_13b: {status: KNOWN}
+ s3tests.functional.test_s3.test_sse_kms_transfer_1MB: {status: KNOWN}
+ s3tests.functional.test_s3.test_sse_kms_transfer_1b: {status: KNOWN}
+ s3tests.functional.test_s3.test_sse_kms_transfer_1kb: {status: KNOWN}
+ s3tests.functional.test_s3.test_versioned_object_acl_no_version_specified: {status: KNOWN}
diff --git a/doc/s3api/conf/ceph-s3.conf.in b/doc/s3api/conf/ceph-s3.conf.in
new file mode 100644
index 000000000..65de8053c
--- /dev/null
+++ b/doc/s3api/conf/ceph-s3.conf.in
@@ -0,0 +1,18 @@
+[DEFAULT]
+host = localhost
+port = 8080
+is_secure = no
+
+[s3 main]
+user_id = %ADMIN_ACCESS_KEY%
+display_name = %ADMIN_ACCESS_KEY%
+email = %ADMIN_ACCESS_KEY%
+access_key = %ADMIN_ACCESS_KEY%
+secret_key = %ADMIN_SECRET_KEY%
+
+[s3 alt]
+user_id = %TESTER_ACCESS_KEY%
+display_name = %TESTER_ACCESS_KEY%
+email = %TESTER_ACCESS_KEY%
+access_key = %TESTER_ACCESS_KEY%
+secret_key = %TESTER_SECRET_KEY%
diff --git a/doc/s3api/conf/object-server.conf.in b/doc/s3api/conf/object-server.conf.in
new file mode 100644
index 000000000..9cf7dd58d
--- /dev/null
+++ b/doc/s3api/conf/object-server.conf.in
@@ -0,0 +1,17 @@
+[DEFAULT]
+user = %USER%
+bind_port = 6000
+swift_dir = %TEST_DIR%/etc
+devices = %TEST_DIR%
+mount_check = false
+workers = 1
+log_level = DEBUG
+
+[pipeline:main]
+pipeline = object-server
+
+[app:object-server]
+use = egg:swift#object
+allowed_headers = Cache-Control, Content-Disposition, Content-Encoding,
+ Content-Language, Expires, X-Delete-At, X-Object-Manifest, X-Robots-Tag,
+ X-Static-Large-Object
diff --git a/doc/s3api/rnc/access_control_policy.rnc b/doc/s3api/rnc/access_control_policy.rnc
new file mode 100644
index 000000000..c857359e0
--- /dev/null
+++ b/doc/s3api/rnc/access_control_policy.rnc
@@ -0,0 +1,7 @@
+include "common.rnc"
+
+start =
+ element AccessControlPolicy {
+ element Owner { CanonicalUser } &
+ element AccessControlList { AccessControlList }
+ }
diff --git a/doc/s3api/rnc/bucket_logging_status.rnc b/doc/s3api/rnc/bucket_logging_status.rnc
new file mode 100644
index 000000000..a7d9a1eff
--- /dev/null
+++ b/doc/s3api/rnc/bucket_logging_status.rnc
@@ -0,0 +1,10 @@
+include "common.rnc"
+
+start =
+ element BucketLoggingStatus {
+ element LoggingEnabled {
+ element TargetBucket { xsd:string } &
+ element TargetPrefix { xsd:string } &
+ element TargetGrants { AccessControlList }?
+ }?
+ }
diff --git a/doc/s3api/rnc/common.rnc b/doc/s3api/rnc/common.rnc
new file mode 100644
index 000000000..79dddbb55
--- /dev/null
+++ b/doc/s3api/rnc/common.rnc
@@ -0,0 +1,26 @@
+namespace xsi = "http://www.w3.org/2001/XMLSchema-instance"
+
+CanonicalUser =
+ element ID { xsd:string } &
+ element DisplayName { xsd:string }?
+
+StorageClass = "STANDARD" | "REDUCED_REDUNDANCY" | "GLACIER" | "UNKNOWN"
+
+AccessControlList =
+ element Grant {
+ element Grantee {
+ (
+ attribute xsi:type { "AmazonCustomerByEmail" },
+ element EmailAddress { xsd:string }
+ ) | (
+ attribute xsi:type { "CanonicalUser" },
+ CanonicalUser
+ ) | (
+ attribute xsi:type { "Group" },
+ element URI { xsd:string }
+ )
+ } &
+ element Permission {
+ "READ" | "WRITE" | "READ_ACP" | "WRITE_ACP" | "FULL_CONTROL"
+ }
+ }*
diff --git a/doc/s3api/rnc/complete_multipart_upload.rnc b/doc/s3api/rnc/complete_multipart_upload.rnc
new file mode 100644
index 000000000..dee60e544
--- /dev/null
+++ b/doc/s3api/rnc/complete_multipart_upload.rnc
@@ -0,0 +1,7 @@
+start =
+ element CompleteMultipartUpload {
+ element Part {
+ element PartNumber { xsd:int } &
+ element ETag { xsd:string }
+ }+
+ }
diff --git a/doc/s3api/rnc/complete_multipart_upload_result.rnc b/doc/s3api/rnc/complete_multipart_upload_result.rnc
new file mode 100644
index 000000000..6dd9cbeb9
--- /dev/null
+++ b/doc/s3api/rnc/complete_multipart_upload_result.rnc
@@ -0,0 +1,7 @@
+start =
+ element CompleteMultipartUploadResult {
+ element Location { xsd:anyURI },
+ element Bucket { xsd:string },
+ element Key { xsd:string },
+ element ETag { xsd:string }
+ }
diff --git a/doc/s3api/rnc/copy_object_result.rnc b/doc/s3api/rnc/copy_object_result.rnc
new file mode 100644
index 000000000..bf96a8a91
--- /dev/null
+++ b/doc/s3api/rnc/copy_object_result.rnc
@@ -0,0 +1,5 @@
+start =
+ element CopyObjectResult {
+ element LastModified { xsd:dateTime },
+ element ETag { xsd:string }
+ }
diff --git a/doc/s3api/rnc/copy_part_result.rnc b/doc/s3api/rnc/copy_part_result.rnc
new file mode 100644
index 000000000..a7d795651
--- /dev/null
+++ b/doc/s3api/rnc/copy_part_result.rnc
@@ -0,0 +1,5 @@
+start =
+ element CopyPartResult {
+ element LastModified { xsd:dateTime },
+ element ETag { xsd:string }
+ }
diff --git a/doc/s3api/rnc/create_bucket_configuration.rnc b/doc/s3api/rnc/create_bucket_configuration.rnc
new file mode 100644
index 000000000..e366d72ed
--- /dev/null
+++ b/doc/s3api/rnc/create_bucket_configuration.rnc
@@ -0,0 +1,4 @@
+start =
+ element * {
+ element LocationConstraint { xsd:string }
+ }
diff --git a/doc/s3api/rnc/delete.rnc b/doc/s3api/rnc/delete.rnc
new file mode 100644
index 000000000..95214f01d
--- /dev/null
+++ b/doc/s3api/rnc/delete.rnc
@@ -0,0 +1,8 @@
+start =
+ element Delete {
+ element Quiet { xsd:boolean }? &
+ element Object {
+ element Key { xsd:string } &
+ element VersionId { xsd:string }?
+ }+
+ }
diff --git a/doc/s3api/rnc/delete_result.rnc b/doc/s3api/rnc/delete_result.rnc
new file mode 100644
index 000000000..3a63bf78a
--- /dev/null
+++ b/doc/s3api/rnc/delete_result.rnc
@@ -0,0 +1,17 @@
+start =
+ element DeleteResult {
+ (
+ element Deleted {
+ element Key { xsd:string },
+ element VersionId { xsd:string }?,
+ element DeleteMarker { xsd:boolean }?,
+ element DeleteMarkerVersionId { xsd:string }?
+ } |
+ element Error {
+ element Key { xsd:string },
+ element VersionId { xsd:string }?,
+ element Code { xsd:string },
+ element Message { xsd:string }
+ }
+ )*
+ }
diff --git a/doc/s3api/rnc/error.rnc b/doc/s3api/rnc/error.rnc
new file mode 100644
index 000000000..0e352c71a
--- /dev/null
+++ b/doc/s3api/rnc/error.rnc
@@ -0,0 +1,11 @@
+start =
+ element Error {
+ element Code { xsd:string },
+ element Message { xsd:string },
+ DebugInfo*
+ }
+
+DebugInfo =
+ element * {
+ (attribute * { text } | text | DebugInfo)*
+ }
diff --git a/doc/s3api/rnc/initiate_multipart_upload_result.rnc b/doc/s3api/rnc/initiate_multipart_upload_result.rnc
new file mode 100644
index 000000000..8830121f9
--- /dev/null
+++ b/doc/s3api/rnc/initiate_multipart_upload_result.rnc
@@ -0,0 +1,6 @@
+start =
+ element InitiateMultipartUploadResult {
+ element Bucket { xsd:string },
+ element Key { xsd:string },
+ element UploadId { xsd:string }
+ }
diff --git a/doc/s3api/rnc/lifecycle_configuration.rnc b/doc/s3api/rnc/lifecycle_configuration.rnc
new file mode 100644
index 000000000..b21fc07b6
--- /dev/null
+++ b/doc/s3api/rnc/lifecycle_configuration.rnc
@@ -0,0 +1,20 @@
+include "common.rnc"
+
+start =
+ element LifecycleConfiguration {
+ element Rule {
+ element ID { xsd:string }? &
+ element Prefix { xsd:string } &
+ element Status { "Enabled" | "Disabled" } &
+ element Transition { Transition }? &
+ element Expiration { Expiration }?
+ }+
+ }
+
+Expiration =
+ element Days { xsd:int } |
+ element Date { xsd:dateTime }
+
+Transition =
+ Expiration &
+ element StorageClass { StorageClass }
diff --git a/doc/s3api/rnc/list_all_my_buckets_result.rnc b/doc/s3api/rnc/list_all_my_buckets_result.rnc
new file mode 100644
index 000000000..220a34aa9
--- /dev/null
+++ b/doc/s3api/rnc/list_all_my_buckets_result.rnc
@@ -0,0 +1,12 @@
+include "common.rnc"
+
+start =
+ element ListAllMyBucketsResult {
+ element Owner { CanonicalUser },
+ element Buckets {
+ element Bucket {
+ element Name { xsd:string },
+ element CreationDate { xsd:dateTime }
+ }*
+ }
+ }
diff --git a/doc/s3api/rnc/list_bucket_result.rnc b/doc/s3api/rnc/list_bucket_result.rnc
new file mode 100644
index 000000000..e7f572b7b
--- /dev/null
+++ b/doc/s3api/rnc/list_bucket_result.rnc
@@ -0,0 +1,33 @@
+include "common.rnc"
+
+start =
+ element ListBucketResult {
+ element Name { xsd:string },
+ element Prefix { xsd:string },
+ (
+ (
+ element Marker { xsd:string },
+ element NextMarker { xsd:string }?
+ ) | (
+ element NextContinuationToken { xsd:string }?,
+ element ContinuationToken { xsd:string }?,
+ element StartAfter { xsd:string }?,
+ element KeyCount { xsd:int }
+ )
+ ),
+ element MaxKeys { xsd:int },
+ element EncodingType { xsd:string }?,
+ element Delimiter { xsd:string }?,
+ element IsTruncated { xsd:boolean },
+ element Contents {
+ element Key { xsd:string },
+ element LastModified { xsd:dateTime },
+ element ETag { xsd:string },
+ element Size { xsd:long },
+ element Owner { CanonicalUser }?,
+ element StorageClass { StorageClass }
+ }*,
+ element CommonPrefixes {
+ element Prefix { xsd:string }
+ }*
+ }
diff --git a/doc/s3api/rnc/list_multipart_uploads_result.rnc b/doc/s3api/rnc/list_multipart_uploads_result.rnc
new file mode 100644
index 000000000..6ac1e1237
--- /dev/null
+++ b/doc/s3api/rnc/list_multipart_uploads_result.rnc
@@ -0,0 +1,26 @@
+include "common.rnc"
+
+start =
+ element ListMultipartUploadsResult {
+ element Bucket { xsd:string },
+ element KeyMarker { xsd:string },
+ element UploadIdMarker { xsd:string },
+ element NextKeyMarker { xsd:string },
+ element NextUploadIdMarker { xsd:string },
+ element Delimiter { xsd:string }?,
+ element Prefix { xsd:string }?,
+ element MaxUploads { xsd:int },
+ element EncodingType { xsd:string }?,
+ element IsTruncated { xsd:boolean },
+ element Upload {
+ element Key { xsd:string },
+ element UploadId { xsd:string },
+ element Initiator { CanonicalUser },
+ element Owner { CanonicalUser },
+ element StorageClass { StorageClass },
+ element Initiated { xsd:dateTime }
+ }*,
+ element CommonPrefixes {
+ element Prefix { xsd:string }
+ }*
+ }
diff --git a/doc/s3api/rnc/list_parts_result.rnc b/doc/s3api/rnc/list_parts_result.rnc
new file mode 100644
index 000000000..214331542
--- /dev/null
+++ b/doc/s3api/rnc/list_parts_result.rnc
@@ -0,0 +1,22 @@
+include "common.rnc"
+
+start =
+ element ListPartsResult {
+ element Bucket { xsd:string },
+ element Key { xsd:string },
+ element UploadId { xsd:string },
+ element Initiator { CanonicalUser },
+ element Owner { CanonicalUser },
+ element StorageClass { StorageClass },
+ element PartNumberMarker { xsd:int },
+ element NextPartNumberMarker { xsd:int },
+ element MaxParts { xsd:int },
+ element EncodingType { xsd:string }?,
+ element IsTruncated { xsd:boolean },
+ element Part {
+ element PartNumber { xsd:int },
+ element LastModified { xsd:dateTime },
+ element ETag { xsd:string },
+ element Size { xsd:long }
+ }*
+ }
diff --git a/doc/s3api/rnc/list_versions_result.rnc b/doc/s3api/rnc/list_versions_result.rnc
new file mode 100644
index 000000000..969073f3b
--- /dev/null
+++ b/doc/s3api/rnc/list_versions_result.rnc
@@ -0,0 +1,37 @@
+include "common.rnc"
+
+start =
+ element ListVersionsResult {
+ element Name { xsd:string },
+ element Prefix { xsd:string },
+ element KeyMarker { xsd:string },
+ element VersionIdMarker { xsd:string },
+ element NextKeyMarker { xsd:string }?,
+ element NextVersionIdMarker { xsd:string }?,
+ element MaxKeys { xsd:int },
+ element EncodingType { xsd:string }?,
+ element Delimiter { xsd:string }?,
+ element IsTruncated { xsd:boolean },
+ (
+ element Version {
+ element Key { xsd:string },
+ element VersionId { xsd:string },
+ element IsLatest { xsd:boolean },
+ element LastModified { xsd:dateTime },
+ element ETag { xsd:string },
+ element Size { xsd:long },
+ element Owner { CanonicalUser }?,
+ element StorageClass { StorageClass }
+ } |
+ element DeleteMarker {
+ element Key { xsd:string },
+ element VersionId { xsd:string },
+ element IsLatest { xsd:boolean },
+ element LastModified { xsd:dateTime },
+ element Owner { CanonicalUser }?
+ }
+ )*,
+ element CommonPrefixes {
+ element Prefix { xsd:string }
+ }*
+ }
diff --git a/doc/s3api/rnc/location_constraint.rnc b/doc/s3api/rnc/location_constraint.rnc
new file mode 100644
index 000000000..829176ff9
--- /dev/null
+++ b/doc/s3api/rnc/location_constraint.rnc
@@ -0,0 +1 @@
+start = element LocationConstraint { xsd:string }
diff --git a/doc/s3api/rnc/versioning_configuration.rnc b/doc/s3api/rnc/versioning_configuration.rnc
new file mode 100644
index 000000000..87e5d15a9
--- /dev/null
+++ b/doc/s3api/rnc/versioning_configuration.rnc
@@ -0,0 +1,5 @@
+start =
+ element VersioningConfiguration {
+ element Status { "Enabled" | "Suspended" }? &
+ element MfaDelete { "Enabled" | "Disabled" }?
+ }
diff --git a/doc/source/associated_projects.rst b/doc/source/associated_projects.rst
index 5635d6ba3..4b165ed5b 100644
--- a/doc/source/associated_projects.rst
+++ b/doc/source/associated_projects.rst
@@ -107,7 +107,6 @@ Alternative API
* `ProxyFS <https://github.com/swiftstack/ProxyFS>`_ - Integrated file and
object access for Swift object storage
-* `Swift3 <https://github.com/openstack/swift3>`_ - Amazon S3 API emulation.
* `SwiftHLM <https://github.com/ibm-research/SwiftHLM>`_ - a middleware for
using OpenStack Swift with tape and other high latency media storage
backends.
@@ -176,3 +175,4 @@ Other
web browser
* `swiftbackmeup <https://github.com/redhat-cip/swiftbackmeup>`_ -
Utility that allows one to create backups and upload them to OpenStack Swift
+* `s3compat <https://github.com/swiftstack/s3compat>`_ - S3 API compatibility checker
diff --git a/doc/source/middleware.rst b/doc/source/middleware.rst
index faeb93b67..eeb8e988c 100644
--- a/doc/source/middleware.rst
+++ b/doc/source/middleware.rst
@@ -11,6 +11,95 @@ Account Quotas
:members:
:show-inheritance:
+.. _s3api:
+
+AWS S3 Api
+==========
+
+.. automodule:: swift.common.middleware.s3api.s3api
+ :members:
+ :show-inheritance:
+
+.. automodule:: swift.common.middleware.s3api.s3token
+ :members:
+ :show-inheritance:
+
+.. automodule:: swift.common.middleware.s3api.s3request
+ :members:
+ :show-inheritance:
+
+.. automodule:: swift.common.middleware.s3api.s3response
+ :members:
+ :show-inheritance:
+
+.. automodule:: swift.common.middleware.s3api.exception
+ :members:
+ :show-inheritance:
+
+.. automodule:: swift.common.middleware.s3api.etree
+ :members:
+ :show-inheritance:
+
+.. automodule:: swift.common.middleware.s3api.utils
+ :members:
+ :show-inheritance:
+
+.. automodule:: swift.common.middleware.s3api.subresource
+ :members:
+ :show-inheritance:
+
+.. automodule:: swift.common.middleware.s3api.acl_handlers
+ :members:
+ :show-inheritance:
+
+.. automodule:: swift.common.middleware.s3api.acl_utils
+ :members:
+ :show-inheritance:
+
+.. automodule:: swift.common.middleware.s3api.controllers.base
+ :members:
+ :show-inheritance:
+
+.. automodule:: swift.common.middleware.s3api.controllers.service
+ :members:
+ :show-inheritance:
+
+.. automodule:: swift.common.middleware.s3api.controllers.bucket
+ :members:
+ :show-inheritance:
+
+.. automodule:: swift.common.middleware.s3api.controllers.obj
+ :members:
+ :show-inheritance:
+
+.. automodule:: swift.common.middleware.s3api.controllers.acl
+ :members:
+ :show-inheritance:
+
+.. automodule:: swift.common.middleware.s3api.controllers.s3_acl
+ :members:
+ :show-inheritance:
+
+.. automodule:: swift.common.middleware.s3api.controllers.multi_upload
+ :members:
+ :show-inheritance:
+
+.. automodule:: swift.common.middleware.s3api.controllers.multi_delete
+ :members:
+ :show-inheritance:
+
+.. automodule:: swift.common.middleware.s3api.controllers.versioning
+ :members:
+ :show-inheritance:
+
+.. automodule:: swift.common.middleware.s3api.controllers.location
+ :members:
+ :show-inheritance:
+
+.. automodule:: swift.common.middleware.s3api.controllers.logging
+ :members:
+ :show-inheritance:
+
.. _bulk:
Bulk Operations (Delete and Archive Auto Extraction)
diff --git a/etc/object-server.conf-sample b/etc/object-server.conf-sample
index 8aae6ba45..f2a3ddfd0 100644
--- a/etc/object-server.conf-sample
+++ b/etc/object-server.conf-sample
@@ -120,6 +120,8 @@ use = egg:swift#object
# Comma separated list of headers that can be set in metadata on an object.
# This list is in addition to X-Object-Meta-* headers and cannot include
# Content-Type, etag, Content-Length, or deleted
+# Note that you may add some extra headers for better S3 compatibility, they are:
+# Cache-Control, Content-Language, Expires, and X-Robots-Tag
# allowed_headers = Content-Disposition, Content-Encoding, X-Delete-At, X-Object-Manifest, X-Static-Large-Object
#
# auto_create_account_prefix = .
diff --git a/etc/proxy-server.conf-sample b/etc/proxy-server.conf-sample
index 6bdd27bef..aee9c192c 100644
--- a/etc/proxy-server.conf-sample
+++ b/etc/proxy-server.conf-sample
@@ -442,6 +442,145 @@ user_test5_tester5 = testing5 service
# in ACLs by setting allow_names_in_acls to false:
# allow_names_in_acls = true
+[filter:s3api]
+use = egg:swift#s3api
+
+# s3api setup:
+#
+# With either tempauth or your custom auth:
+# - Put s3api just before your auth filter(s) in the pipeline
+# With keystone:
+# - Put s3api and s3token before keystoneauth in the pipeline
+#
+# Swift has no concept of the S3's resource owner; the resources
+# (i.e. containers and objects) created via the Swift API have no owner
+# information. This option specifies how the s3api middleware handles them
+# with the S3 API. If this option is 'false', such kinds of resources will be
+# invisible and no users can access them with the S3 API. If set to 'true',
+# a resource without an owner belongs to everyone and everyone can access it
+# with the S3 API. If you care about S3 compatibility, set 'false' here. This
+# option makes sense only when the s3_acl option is set to 'true' and your
+# Swift cluster has the resources created via the Swift API.
+# allow_no_owner = false
+#
+# Set a region name of your Swift cluster. Note that the s3api doesn't choose
+# a region of the newly created bucket. This value is used for the
+# GET Bucket location API and v4 signatures calculation.
+# location = US
+#
+# Set whether to enforce DNS-compliant bucket names. Note that S3 enforces
+# these conventions in all regions except the US Standard region.
+# dns_compliant_bucket_names = True
+#
+# Set the default maximum number of objects returned in the GET Bucket
+# response.
+# max_bucket_listing = 1000
+#
+# Set the maximum number of parts returned in the List Parts operation.
+# (default: 1000 as well as S3 specification)
+# If setting it larger than 10000 (swift container_listing_limit default)
+# make sure you also increase the container_listing_limit in swift.conf.
+# max_parts_listing = 1000
+#
+# Set the maximum number of objects we can delete with the Multi-Object Delete
+# operation.
+# max_multi_delete_objects = 1000
+#
+# If set to 'true', s3api uses its own metadata for ACLs
+# (e.g. X-Container-Sysmeta-S3Api-Acl) to achieve the best S3 compatibility.
+# If set to 'false', s3api tries to use Swift ACLs (e.g. X-Container-Read)
+# instead of S3 ACLs as far as possible.
+# There are some caveats that one should know about this setting. Firstly,
+# if set to 'false' after being previously set to 'true' any new objects or
+# containers stored while 'true' setting will be accessible to all users
+# because the s3 ACLs will be ignored under s3_acl=False setting. Secondly,
+# s3_acl True mode don't keep ACL consistency between both the S3 and Swift
+# API. Meaning with s3_acl enabled S3 ACLs only effect objects and buckets
+# via the S3 API. As this ACL information wont be available via the Swift API
+# and so the ACL wont be applied.
+# Note that s3_acl currently supports only keystone and tempauth.
+# DON'T USE THIS for production before enough testing for your use cases.
+# This stuff is still under development and it might cause something
+# you don't expect.
+# s3_acl = false
+#
+# Specify a host name of your Swift cluster. This enables virtual-hosted style
+# requests.
+# storage_domain =
+#
+# Enable pipeline order check for SLO, s3token, authtoken, keystoneauth
+# according to standard s3api/Swift construction using either tempauth or
+# keystoneauth. If the order is incorrect, it raises an exception to stop
+# proxy. Turn auth_pipeline_check off only when you want to bypass these
+# authenticate middlewares in order to use other 3rd party (or your
+# proprietary) authenticate middleware.
+# auth_pipeline_check = True
+#
+# Enable multi-part uploads. (default: true)
+# This is required to store files larger than Swift's max_file_size (by
+# default, 5GiB). Note that has performance implications when deleting objects,
+# as we now have to check for whether there are also segments to delete.
+# allow_multipart_uploads = True
+#
+# Set the maximum number of parts for Upload Part operation.(default: 1000)
+# When setting it to be larger than the default value in order to match the
+# specification of S3, set to be larger max_manifest_segments for slo
+# middleware.(specification of S3: 10000)
+# max_upload_part_num = 1000
+#
+# Enable returning only buckets which owner are the user who requested
+# GET Service operation. (default: false)
+# If you want to enable the above feature, set this and s3_acl to true.
+# That might cause significant performance degradation. So, only if your
+# service absolutely need this feature, set this setting to true.
+# If you set this to false, s3api returns all buckets.
+# check_bucket_owner = false
+#
+# By default, Swift reports only S3 style access log.
+# (e.g. PUT /bucket/object) If set force_swift_request_proxy_log
+# to be 'true', Swift will become to output Swift style log
+# (e.g. PUT /v1/account/container/object) in addition to S3 style log.
+# Note that they will be reported twice (i.e. s3api doesn't care about
+# the duplication) and Swift style log will includes also various subrequests
+# to achieve S3 compatibilities when force_swift_request_proxy_log is set to
+# 'true'
+# force_swift_request_proxy_log = false
+#
+# AWS S3 document says that each part must be at least 5 MB in a multipart
+# upload, except the last part.
+# min_segment_size = 5242880
+
+# You can override the default log routing for this filter here:
+# log_name = s3api
+
+[filter:s3token]
+# s3token middleware authenticates with keystone using the s3 credentials
+# provided in the request header. Please put s3token between s3api
+# and keystoneauth if you're using keystoneauth.
+use = egg:swift#s3token
+
+# Prefix that will be prepended to the tenant to form the account
+reseller_prefix = AUTH_
+
+# By default, s3token will reject all invalid S3-style requests. Set this to
+# True to delegate that decision to downstream WSGI components. This may be
+# useful if there are multiple auth systems in the proxy pipeline.
+delay_auth_decision = False
+
+# Keystone server details
+auth_uri = http://keystonehost:35357/v3
+
+# Connect/read timeout to use when communicating with Keystone
+http_timeout = 10.0
+
+# SSL-related options
+# insecure = False
+# certfile =
+# keyfile =
+
+# You can override the default log routing for this filter here:
+# log_name = s3token
+
[filter:healthcheck]
use = egg:swift#healthcheck
# An optional filesystem path, which if present, will cause the healthcheck
diff --git a/requirements.txt b/requirements.txt
index b2a7b6adb..c1151134b 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -7,6 +7,8 @@ eventlet>=0.17.4 # MIT
greenlet>=0.3.1
netifaces>=0.5,!=0.10.0,!=0.10.1
PasteDeploy>=1.3.3
+lxml
+requests>=2.14.2 # Apache-2.0
six>=1.9.0
xattr>=0.4
PyECLib>=1.3.1 # BSD
diff --git a/setup.cfg b/setup.cfg
index 3cfdfc068..7ed7f1ec1 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -110,6 +110,8 @@ paste.filter_factory =
kms_keymaster = swift.common.middleware.crypto.kms_keymaster:filter_factory
listing_formats = swift.common.middleware.listing_formats:filter_factory
symlink = swift.common.middleware.symlink:filter_factory
+ s3api = swift.common.middleware.s3api.s3api:filter_factory
+ s3token = swift.common.middleware.s3api.s3token:filter_factory
[egg_info]
diff --git a/swift/common/middleware/s3api/__init__.py b/swift/common/middleware/s3api/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/swift/common/middleware/s3api/__init__.py
diff --git a/swift/common/middleware/s3api/acl_handlers.py b/swift/common/middleware/s3api/acl_handlers.py
new file mode 100644
index 000000000..17d5101f7
--- /dev/null
+++ b/swift/common/middleware/s3api/acl_handlers.py
@@ -0,0 +1,479 @@
+# Copyright (c) 2014 OpenStack Foundation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+------------
+Acl Handlers
+------------
+
+Why do we need this
+^^^^^^^^^^^^^^^^^^^
+
+To make controller classes clean, we need these handlers.
+It is really useful for customizing acl checking algorithms for
+each controller.
+
+Basic Information
+^^^^^^^^^^^^^^^^^
+
+BaseAclHandler wraps basic Acl handling.
+(i.e. it will check acl from ACL_MAP by using HEAD)
+
+How to extend
+^^^^^^^^^^^^^
+
+Make a handler with the name of the controller.
+(e.g. BucketAclHandler is for BucketController)
+It consists of method(s) for actual S3 method on controllers as follows.
+
+Example::
+
+ class BucketAclHandler(BaseAclHandler):
+ def PUT:
+ << put acl handling algorithms here for PUT bucket >>
+
+.. note::
+ If the method DON'T need to recall _get_response in outside of
+ acl checking, the method have to return the response it needs at
+ the end of method.
+
+"""
+import sys
+
+from swift.common.middleware.s3api.subresource import ACL, Owner, encode_acl
+from swift.common.middleware.s3api.s3response import MissingSecurityHeader, \
+ MalformedACLError, UnexpectedContent
+from swift.common.middleware.s3api.etree import fromstring, XMLSyntaxError, \
+ DocumentInvalid
+from swift.common.middleware.s3api.utils import MULTIUPLOAD_SUFFIX, \
+ sysmeta_header
+from contextlib import contextmanager
+
+
+def get_acl_handler(controller_name):
+ for base_klass in [BaseAclHandler, MultiUploadAclHandler]:
+ # pylint: disable-msg=E1101
+ for handler in base_klass.__subclasses__():
+ handler_suffix_len = len('AclHandler') \
+ if not handler.__name__ == 'S3AclHandler' else len('Handler')
+ if handler.__name__[:-handler_suffix_len] == controller_name:
+ return handler
+ return BaseAclHandler
+
+
+class BaseAclHandler(object):
+ """
+ BaseAclHandler: Handling ACL for basic requests mapped on ACL_MAP
+ """
+ def __init__(self, req, logger):
+ self.req = req
+ self.container = self.req.container_name
+ self.obj = self.req.object_name
+ self.method = req.environ['REQUEST_METHOD']
+ self.user_id = self.req.user_id
+ self.headers = self.req.headers
+ self.logger = logger
+
+ @contextmanager
+ def request_with(self, container=None, obj=None, headers=None):
+ try:
+ org_cont = self.container
+ org_obj = self.obj
+ org_headers = self.headers
+
+ self.container = container or org_cont
+ self.obj = obj or org_obj
+ self.headers = headers or org_headers
+ yield
+
+ finally:
+ self.container = org_cont
+ self.obj = org_obj
+ self.headers = org_headers
+
+ def handle_acl(self, app, method, container=None, obj=None, headers=None):
+ method = method or self.method
+
+ with self.request_with(container, obj, headers):
+ if hasattr(self, method):
+ return getattr(self, method)(app)
+ else:
+ return self._handle_acl(app, method)
+
+ def _handle_acl(self, app, sw_method, container=None, obj=None,
+ permission=None, headers=None):
+ """
+ General acl handling method.
+ This method expects to call Request._get_response() in outside of
+ this method so that this method returns response only when sw_method
+ is HEAD.
+ """
+
+ container = self.container if container is None else container
+ obj = self.obj if obj is None else obj
+ sw_method = sw_method or self.req.environ['REQUEST_METHOD']
+ resource = 'object' if obj else 'container'
+ headers = self.headers if headers is None else headers
+
+ self.logger.debug(
+ 'checking permission: %s %s %s %s' %
+ (container, obj, sw_method, dict(headers)))
+
+ if not container:
+ return
+
+ if not permission and (self.method, sw_method, resource) in ACL_MAP:
+ acl_check = ACL_MAP[(self.method, sw_method, resource)]
+ resource = acl_check.get('Resource') or resource
+ permission = acl_check['Permission']
+
+ if not permission:
+ self.logger.debug(
+ '%s %s %s %s' % (container, obj, sw_method, headers))
+ raise Exception('No permission to be checked exists')
+
+ if resource == 'object':
+ resp = self.req.get_acl_response(app, 'HEAD',
+ container, obj,
+ headers)
+ acl = resp.object_acl
+ elif resource == 'container':
+ resp = self.req.get_acl_response(app, 'HEAD',
+ container, '')
+ acl = resp.bucket_acl
+
+ try:
+ acl.check_permission(self.user_id, permission)
+ except Exception as e:
+ self.logger.debug(acl)
+ self.logger.debug('permission denined: %s %s %s' %
+ (e, self.user_id, permission))
+ raise
+
+ if sw_method == 'HEAD':
+ return resp
+
+ def get_acl(self, headers, body, bucket_owner, object_owner=None):
+ """
+ Get ACL instance from S3 (e.g. x-amz-grant) headers or S3 acl xml body.
+ """
+ acl = ACL.from_headers(headers, bucket_owner, object_owner,
+ as_private=False)
+
+ if acl is None:
+ # Get acl from request body if possible.
+ if not body:
+ raise MissingSecurityHeader(missing_header_name='x-amz-acl')
+ try:
+ elem = fromstring(body, ACL.root_tag)
+ acl = ACL.from_elem(
+ elem, True, self.req.allow_no_owner)
+ except(XMLSyntaxError, DocumentInvalid):
+ raise MalformedACLError()
+ except Exception as e:
+ exc_type, exc_value, exc_traceback = sys.exc_info()
+ self.logger.error(e)
+ raise exc_type, exc_value, exc_traceback
+ else:
+ if body:
+ # Specifying grant with both header and xml is not allowed.
+ raise UnexpectedContent()
+
+ return acl
+
+
+class BucketAclHandler(BaseAclHandler):
+ """
+ BucketAclHandler: Handler for BucketController
+ """
+ def DELETE(self, app):
+ if self.container.endswith(MULTIUPLOAD_SUFFIX):
+ # anyways, delete multiupload container doesn't need acls
+ # because it depends on GET segment container result for
+ # cleanup
+ pass
+ else:
+ return self._handle_acl(app, 'DELETE')
+
+ def HEAD(self, app):
+ if self.method == 'DELETE':
+ return self._handle_acl(app, 'DELETE')
+ else:
+ return self._handle_acl(app, 'HEAD')
+
+ def GET(self, app):
+ if self.method == 'DELETE' and \
+ self.container.endswith(MULTIUPLOAD_SUFFIX):
+ pass
+ else:
+ return self._handle_acl(app, 'GET')
+
+ def PUT(self, app):
+ req_acl = ACL.from_headers(self.req.headers,
+ Owner(self.user_id, self.user_id))
+
+ # To avoid overwriting the existing bucket's ACL, we send PUT
+ # request first before setting the ACL to make sure that the target
+ # container does not exist.
+ self.req.get_acl_response(app, 'PUT')
+
+ # update metadata
+ self.req.bucket_acl = req_acl
+
+ # FIXME If this request is failed, there is a possibility that the
+ # bucket which has no ACL is left.
+ return self.req.get_acl_response(app, 'POST')
+
+
+class ObjectAclHandler(BaseAclHandler):
+ """
+ ObjectAclHandler: Handler for ObjectController
+ """
+ def HEAD(self, app):
+ # No check object permission needed at DELETE Object
+ if self.method != 'DELETE':
+ return self._handle_acl(app, 'HEAD')
+
+ def PUT(self, app):
+ b_resp = self._handle_acl(app, 'HEAD', obj='')
+ req_acl = ACL.from_headers(self.req.headers,
+ b_resp.bucket_acl.owner,
+ Owner(self.user_id, self.user_id))
+ self.req.object_acl = req_acl
+
+
+class S3AclHandler(BaseAclHandler):
+ """
+ S3AclHandler: Handler for S3AclController
+ """
+ def GET(self, app):
+ self._handle_acl(app, 'HEAD', permission='READ_ACP')
+
+ def PUT(self, app):
+ if self.req.is_object_request:
+ b_resp = self.req.get_acl_response(app, 'HEAD', obj='')
+ o_resp = self._handle_acl(app, 'HEAD', permission='WRITE_ACP')
+ req_acl = self.get_acl(self.req.headers,
+ self.req.xml(ACL.max_xml_length),
+ b_resp.bucket_acl.owner,
+ o_resp.object_acl.owner)
+
+ # Don't change the owner of the resource by PUT acl request.
+ o_resp.object_acl.check_owner(req_acl.owner.id)
+
+ for g in req_acl.grants:
+ self.logger.debug(
+ 'Grant %s %s permission on the object /%s/%s' %
+ (g.grantee, g.permission, self.req.container_name,
+ self.req.object_name))
+ self.req.object_acl = req_acl
+ else:
+ self._handle_acl(app, self.method)
+
+ def POST(self, app):
+ if self.req.is_bucket_request:
+ resp = self._handle_acl(app, 'HEAD', permission='WRITE_ACP')
+
+ req_acl = self.get_acl(self.req.headers,
+ self.req.xml(ACL.max_xml_length),
+ resp.bucket_acl.owner)
+
+ # Don't change the owner of the resource by PUT acl request.
+ resp.bucket_acl.check_owner(req_acl.owner.id)
+
+ for g in req_acl.grants:
+ self.logger.debug(
+ 'Grant %s %s permission on the bucket /%s' %
+ (g.grantee, g.permission, self.req.container_name))
+ self.req.bucket_acl = req_acl
+ else:
+ self._handle_acl(app, self.method)
+
+
+class MultiObjectDeleteAclHandler(BaseAclHandler):
+ """
+ MultiObjectDeleteAclHandler: Handler for MultiObjectDeleteController
+ """
+ def HEAD(self, app):
+ # Only bucket write acl is required
+ if not self.obj:
+ return self._handle_acl(app, 'HEAD')
+
+ def DELETE(self, app):
+ # Only bucket write acl is required
+ pass
+
+
+class MultiUploadAclHandler(BaseAclHandler):
+ """
+ MultiUpload stuff requires acl checking just once for BASE container
+ so that MultiUploadAclHandler extends BaseAclHandler to check acl only
+ when the verb defined. We should define the verb as the first step to
+ request to backend Swift at incoming request.
+
+ Basic Rules:
+ - BASE container name is always w/o 'MULTIUPLOAD_SUFFIX'
+ - Any check timing is ok but we should check it as soon as possible.
+
+ ========== ====== ============= ==========
+ Controller Verb CheckResource Permission
+ ========== ====== ============= ==========
+ Part PUT Container WRITE
+ Uploads GET Container READ
+ Uploads POST Container WRITE
+ Upload GET Container READ
+ Upload DELETE Container WRITE
+ Upload POST Container WRITE
+ ========== ====== ============= ==========
+
+ """
+ def __init__(self, req, logger):
+ super(MultiUploadAclHandler, self).__init__(req, logger)
+ self.acl_checked = False
+
+ def handle_acl(self, app, method, container=None, obj=None, headers=None):
+ method = method or self.method
+ with self.request_with(container, obj, headers):
+ # MultiUpload stuffs don't need acl check basically.
+ if hasattr(self, method):
+ return getattr(self, method)(app)
+ else:
+ pass
+
+ def HEAD(self, app):
+ # For _check_upload_info
+ self._handle_acl(app, 'HEAD', self.container, '')
+
+
+class PartAclHandler(MultiUploadAclHandler):
+ """
+ PartAclHandler: Handler for PartController
+ """
+ def __init__(self, req, logger):
+ # pylint: disable-msg=E1003
+ super(MultiUploadAclHandler, self).__init__(req, logger)
+
+ def HEAD(self, app):
+ if self.container.endswith(MULTIUPLOAD_SUFFIX):
+ # For _check_upload_info
+ container = self.container[:-len(MULTIUPLOAD_SUFFIX)]
+ self._handle_acl(app, 'HEAD', container, '')
+ else:
+ # For check_copy_source
+ return self._handle_acl(app, 'HEAD', self.container, self.obj)
+
+
+class UploadsAclHandler(MultiUploadAclHandler):
+ """
+ UploadsAclHandler: Handler for UploadsController
+ """
+ def handle_acl(self, app, method, *args, **kwargs):
+ method = method or self.method
+ if hasattr(self, method):
+ return getattr(self, method)(app)
+ else:
+ pass
+
+ def GET(self, app):
+ # List Multipart Upload
+ self._handle_acl(app, 'GET', self.container, '')
+
+ def PUT(self, app):
+ if not self.acl_checked:
+ resp = self._handle_acl(app, 'HEAD', obj='')
+ req_acl = ACL.from_headers(self.req.headers,
+ resp.bucket_acl.owner,
+ Owner(self.user_id, self.user_id))
+ acl_headers = encode_acl('object', req_acl)
+ self.req.headers[sysmeta_header('object', 'tmpacl')] = \
+ acl_headers[sysmeta_header('object', 'acl')]
+ self.acl_checked = True
+
+
+class UploadAclHandler(MultiUploadAclHandler):
+ """
+ UploadAclHandler: Handler for UploadController
+ """
+ def handle_acl(self, app, method, *args, **kwargs):
+ method = method or self.method
+ if hasattr(self, method):
+ return getattr(self, method)(app)
+ else:
+ pass
+
+ def HEAD(self, app):
+ # FIXME: GET HEAD case conflicts with GET service
+ method = 'GET' if self.method == 'GET' else 'HEAD'
+ self._handle_acl(app, method, self.container, '')
+
+ def PUT(self, app):
+ container = self.req.container_name + MULTIUPLOAD_SUFFIX
+ obj = '%s/%s' % (self.obj, self.req.params['uploadId'])
+ resp = self.req._get_response(app, 'HEAD', container, obj)
+ self.req.headers[sysmeta_header('object', 'acl')] = \
+ resp.sysmeta_headers.get(sysmeta_header('object', 'tmpacl'))
+
+
+"""
+ACL_MAP =
+ {
+ ('<s3_method>', '<swift_method>', '<swift_resource>'):
+ {'Resource': '<check_resource>',
+ 'Permission': '<check_permission>'},
+ ...
+ }
+
+s3_method: Method of S3 Request from user to s3api
+swift_method: Method of Swift Request from s3api to swift
+swift_resource: Resource of Swift Request from s3api to swift
+check_resource: <container/object>
+check_permission: <OWNER/READ/WRITE/READ_ACP/WRITE_ACP>
+"""
+ACL_MAP = {
+ # HEAD Bucket
+ ('HEAD', 'HEAD', 'container'):
+ {'Permission': 'READ'},
+ # GET Service
+ ('GET', 'HEAD', 'container'):
+ {'Permission': 'OWNER'},
+ # GET Bucket, List Parts, List Multipart Upload
+ ('GET', 'GET', 'container'):
+ {'Permission': 'READ'},
+ # PUT Object, PUT Object Copy
+ ('PUT', 'HEAD', 'container'):
+ {'Permission': 'WRITE'},
+ # DELETE Bucket
+ ('DELETE', 'DELETE', 'container'):
+ {'Permission': 'OWNER'},
+ # HEAD Object
+ ('HEAD', 'HEAD', 'object'):
+ {'Permission': 'READ'},
+ # GET Object
+ ('GET', 'GET', 'object'):
+ {'Permission': 'READ'},
+ # PUT Object Copy, Upload Part Copy
+ ('PUT', 'HEAD', 'object'):
+ {'Permission': 'READ'},
+ # Abort Multipart Upload
+ ('DELETE', 'HEAD', 'container'):
+ {'Permission': 'WRITE'},
+ # Delete Object
+ ('DELETE', 'DELETE', 'object'):
+ {'Resource': 'container',
+ 'Permission': 'WRITE'},
+ # Complete Multipart Upload, DELETE Multiple Objects,
+ # Initiate Multipart Upload
+ ('POST', 'HEAD', 'container'):
+ {'Permission': 'WRITE'},
+}
diff --git a/swift/common/middleware/s3api/acl_utils.py b/swift/common/middleware/s3api/acl_utils.py
new file mode 100644
index 000000000..4951038d1
--- /dev/null
+++ b/swift/common/middleware/s3api/acl_utils.py
@@ -0,0 +1,95 @@
+# Copyright (c) 2014 OpenStack Foundation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from swift.common.middleware.s3api.exception import ACLError
+from swift.common.middleware.s3api.etree import fromstring, XMLSyntaxError, \
+ DocumentInvalid, XMLNS_XSI
+from swift.common.middleware.s3api.s3response import S3NotImplemented, \
+ MalformedACLError, InvalidArgument
+
+
+def swift_acl_translate(acl, group='', user='', xml=False):
+ """
+ Takes an S3 style ACL and returns a list of header/value pairs that
+ implement that ACL in Swift, or "NotImplemented" if there isn't a way to do
+ that yet.
+ """
+ swift_acl = {}
+ swift_acl['public-read'] = [['X-Container-Read', '.r:*,.rlistings']]
+ # Swift does not support public write:
+ # https://answers.launchpad.net/swift/+question/169541
+ swift_acl['public-read-write'] = [['X-Container-Write', '.r:*'],
+ ['X-Container-Read',
+ '.r:*,.rlistings']]
+
+ # TODO: if there's a way to get group and user, this should work for
+ # private:
+ # swift_acl['private'] = \
+ # [['HTTP_X_CONTAINER_WRITE', group + ':' + user], \
+ # ['HTTP_X_CONTAINER_READ', group + ':' + user]]
+ swift_acl['private'] = [['X-Container-Write', '.'],
+ ['X-Container-Read', '.']]
+ if xml:
+ # We are working with XML and need to parse it
+ try:
+ elem = fromstring(acl, 'AccessControlPolicy')
+ except (XMLSyntaxError, DocumentInvalid):
+ raise MalformedACLError()
+ acl = 'unknown'
+ for grant in elem.findall('./AccessControlList/Grant'):
+ permission = grant.find('./Permission').text
+ grantee = grant.find('./Grantee').get('{%s}type' % XMLNS_XSI)
+ if permission == "FULL_CONTROL" and grantee == 'CanonicalUser' and\
+ acl != 'public-read' and acl != 'public-read-write':
+ acl = 'private'
+ elif permission == "READ" and grantee == 'Group' and\
+ acl != 'public-read-write':
+ acl = 'public-read'
+ elif permission == "WRITE" and grantee == 'Group':
+ acl = 'public-read-write'
+ else:
+ acl = 'unsupported'
+
+ if acl == 'authenticated-read':
+ raise S3NotImplemented()
+ elif acl not in swift_acl:
+ raise ACLError()
+
+ return swift_acl[acl]
+
+
+def handle_acl_header(req):
+ """
+ Handle the x-amz-acl header.
+ Note that this header currently used for only normal-acl
+ (not implemented) on s3acl.
+ TODO: add translation to swift acl like as x-container-read to s3acl
+ """
+
+ amz_acl = req.environ['HTTP_X_AMZ_ACL']
+ # Translate the Amazon ACL to something that can be
+ # implemented in Swift, 501 otherwise. Swift uses POST
+ # for ACLs, whereas S3 uses PUT.
+ del req.environ['HTTP_X_AMZ_ACL']
+ if req.query_string:
+ req.query_string = ''
+
+ try:
+ translated_acl = swift_acl_translate(amz_acl)
+ except ACLError:
+ raise InvalidArgument('x-amz-acl', amz_acl)
+
+ for header, acl in translated_acl:
+ req.headers[header] = acl
diff --git a/swift/common/middleware/s3api/controllers/__init__.py b/swift/common/middleware/s3api/controllers/__init__.py
new file mode 100644
index 000000000..831e2c7d3
--- /dev/null
+++ b/swift/common/middleware/s3api/controllers/__init__.py
@@ -0,0 +1,52 @@
+# Copyright (c) 2014 OpenStack Foundation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from swift.common.middleware.s3api.controllers.base import Controller, \
+ UnsupportedController
+from swift.common.middleware.s3api.controllers.service import ServiceController
+from swift.common.middleware.s3api.controllers.bucket import BucketController
+from swift.common.middleware.s3api.controllers.obj import ObjectController
+
+from swift.common.middleware.s3api.controllers.acl import AclController
+from swift.common.middleware.s3api.controllers.s3_acl import S3AclController
+from swift.common.middleware.s3api.controllers.multi_delete import \
+ MultiObjectDeleteController
+from swift.common.middleware.s3api.controllers.multi_upload import \
+ UploadController, PartController, UploadsController
+from swift.common.middleware.s3api.controllers.location import \
+ LocationController
+from swift.common.middleware.s3api.controllers.logging import \
+ LoggingStatusController
+from swift.common.middleware.s3api.controllers.versioning import \
+ VersioningController
+
+__all__ = [
+ 'Controller',
+ 'ServiceController',
+ 'BucketController',
+ 'ObjectController',
+
+ 'AclController',
+ 'S3AclController',
+ 'MultiObjectDeleteController',
+ 'PartController',
+ 'UploadsController',
+ 'UploadController',
+ 'LocationController',
+ 'LoggingStatusController',
+ 'VersioningController',
+
+ 'UnsupportedController',
+]
diff --git a/swift/common/middleware/s3api/controllers/acl.py b/swift/common/middleware/s3api/controllers/acl.py
new file mode 100644
index 000000000..7e627a396
--- /dev/null
+++ b/swift/common/middleware/s3api/controllers/acl.py
@@ -0,0 +1,130 @@
+# Copyright (c) 2010-2014 OpenStack Foundation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from swift.common.http import HTTP_OK
+from swift.common.middleware.acl import parse_acl, referrer_allowed
+from swift.common.utils import public
+
+from swift.common.middleware.s3api.exception import ACLError
+from swift.common.middleware.s3api.controllers.base import Controller
+from swift.common.middleware.s3api.s3response import HTTPOk, S3NotImplemented, \
+ MalformedACLError, UnexpectedContent, MissingSecurityHeader
+from swift.common.middleware.s3api.etree import Element, SubElement, tostring
+from swift.common.middleware.s3api.acl_utils import swift_acl_translate, \
+ XMLNS_XSI
+
+
+MAX_ACL_BODY_SIZE = 200 * 1024
+
+
+def get_acl(account_name, headers):
+ """
+ Attempts to construct an S3 ACL based on what is found in the swift headers
+ """
+
+ elem = Element('AccessControlPolicy')
+ owner = SubElement(elem, 'Owner')
+ SubElement(owner, 'ID').text = account_name
+ SubElement(owner, 'DisplayName').text = account_name
+ access_control_list = SubElement(elem, 'AccessControlList')
+
+ # grant FULL_CONTROL to myself by default
+ grant = SubElement(access_control_list, 'Grant')
+ grantee = SubElement(grant, 'Grantee', nsmap={'xsi': XMLNS_XSI})
+ grantee.set('{%s}type' % XMLNS_XSI, 'CanonicalUser')
+ SubElement(grantee, 'ID').text = account_name
+ SubElement(grantee, 'DisplayName').text = account_name
+ SubElement(grant, 'Permission').text = 'FULL_CONTROL'
+
+ referrers, _ = parse_acl(headers.get('x-container-read'))
+ if referrer_allowed('unknown', referrers):
+ # grant public-read access
+ grant = SubElement(access_control_list, 'Grant')
+ grantee = SubElement(grant, 'Grantee', nsmap={'xsi': XMLNS_XSI})
+ grantee.set('{%s}type' % XMLNS_XSI, 'Group')
+ SubElement(grantee, 'URI').text = \
+ 'http://acs.amazonaws.com/groups/global/AllUsers'
+ SubElement(grant, 'Permission').text = 'READ'
+
+ referrers, _ = parse_acl(headers.get('x-container-write'))
+ if referrer_allowed('unknown', referrers):
+ # grant public-write access
+ grant = SubElement(access_control_list, 'Grant')
+ grantee = SubElement(grant, 'Grantee', nsmap={'xsi': XMLNS_XSI})
+ grantee.set('{%s}type' % XMLNS_XSI, 'Group')
+ SubElement(grantee, 'URI').text = \
+ 'http://acs.amazonaws.com/groups/global/AllUsers'
+ SubElement(grant, 'Permission').text = 'WRITE'
+
+ body = tostring(elem)
+
+ return HTTPOk(body=body, content_type="text/plain")
+
+
+class AclController(Controller):
+ """
+ Handles the following APIs:
+
+ * GET Bucket acl
+ * PUT Bucket acl
+ * GET Object acl
+ * PUT Object acl
+
+ Those APIs are logged as ACL operations in the S3 server log.
+ """
+ @public
+ def GET(self, req):
+ """
+ Handles GET Bucket acl and GET Object acl.
+ """
+ resp = req.get_response(self.app, method='HEAD')
+
+ return get_acl(req.user_id, resp.headers)
+
+ @public
+ def PUT(self, req):
+ """
+ Handles PUT Bucket acl and PUT Object acl.
+ """
+ if req.is_object_request:
+ # Handle Object ACL
+ raise S3NotImplemented()
+ else:
+ # Handle Bucket ACL
+ xml = req.xml(MAX_ACL_BODY_SIZE)
+ if all(['HTTP_X_AMZ_ACL' in req.environ, xml]):
+ # S3 doesn't allow to give ACL with both ACL header and body.
+ raise UnexpectedContent()
+ elif not any(['HTTP_X_AMZ_ACL' in req.environ, xml]):
+ # Both canned ACL header and xml body are missing
+ raise MissingSecurityHeader(missing_header_name='x-amz-acl')
+ else:
+ # correct ACL exists in the request
+ if xml:
+ # We very likely have an XML-based ACL request.
+ # let's try to translate to the request header
+ try:
+ translated_acl = swift_acl_translate(xml, xml=True)
+ except ACLError:
+ raise MalformedACLError()
+
+ for header, acl in translated_acl:
+ req.headers[header] = acl
+
+ resp = req.get_response(self.app, 'POST')
+ resp.status = HTTP_OK
+ resp.headers.update({'Location': req.container_name})
+
+ return resp
diff --git a/swift/common/middleware/s3api/controllers/base.py b/swift/common/middleware/s3api/controllers/base.py
new file mode 100644
index 000000000..3652b151f
--- /dev/null
+++ b/swift/common/middleware/s3api/controllers/base.py
@@ -0,0 +1,100 @@
+# Copyright (c) 2010-2014 OpenStack Foundation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import functools
+
+from swift.common.middleware.s3api.s3response import S3NotImplemented, \
+ InvalidRequest
+from swift.common.middleware.s3api.utils import camel_to_snake
+
+
+def bucket_operation(func=None, err_resp=None, err_msg=None):
+ """
+ A decorator to ensure that the request is a bucket operation. If the
+ target resource is an object, this decorator updates the request by default
+ so that the controller handles it as a bucket operation. If 'err_resp' is
+ specified, this raises it on error instead.
+ """
+ def _bucket_operation(func):
+ @functools.wraps(func)
+ def wrapped(self, req):
+ if not req.is_bucket_request:
+ if err_resp:
+ raise err_resp(msg=err_msg)
+
+ self.logger.debug('A key is specified for bucket API.')
+ req.object_name = None
+
+ return func(self, req)
+
+ return wrapped
+
+ if func:
+ return _bucket_operation(func)
+ else:
+ return _bucket_operation
+
+
+def object_operation(func):
+ """
+ A decorator to ensure that the request is an object operation. If the
+ target resource is not an object, this raises an error response.
+ """
+ @functools.wraps(func)
+ def wrapped(self, req):
+ if not req.is_object_request:
+ raise InvalidRequest('A key must be specified')
+
+ return func(self, req)
+
+ return wrapped
+
+
+def check_container_existence(func):
+ """
+ A decorator to ensure the container existence.
+ """
+ @functools.wraps(func)
+ def check_container(self, req):
+ req.get_container_info(self.app)
+ return func(self, req)
+
+ return check_container
+
+
+class Controller(object):
+ """
+ Base WSGI controller class for the middleware
+ """
+ def __init__(self, app, conf, logger, **kwargs):
+ self.app = app
+ self.conf = conf
+ self.logger = logger
+
+ @classmethod
+ def resource_type(cls):
+ """
+ Returns the target resource type of this controller.
+ """
+ name = cls.__name__[:-len('Controller')]
+ return camel_to_snake(name).upper()
+
+
+class UnsupportedController(Controller):
+ """
+ Handles unsupported requests.
+ """
+ def __init__(self, app, conf, logger, **kwargs):
+ raise S3NotImplemented('The requested resource is not implemented')
diff --git a/swift/common/middleware/s3api/controllers/bucket.py b/swift/common/middleware/s3api/controllers/bucket.py
new file mode 100644
index 000000000..df8068493
--- /dev/null
+++ b/swift/common/middleware/s3api/controllers/bucket.py
@@ -0,0 +1,251 @@
+# Copyright (c) 2010-2014 OpenStack Foundation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+from base64 import standard_b64encode as b64encode
+from base64 import standard_b64decode as b64decode
+
+from swift.common.http import HTTP_OK
+from swift.common.utils import json, public, config_true_value
+
+from swift.common.middleware.s3api.controllers.base import Controller
+from swift.common.middleware.s3api.etree import Element, SubElement, tostring, \
+ fromstring, XMLSyntaxError, DocumentInvalid
+from swift.common.middleware.s3api.s3response import HTTPOk, S3NotImplemented, \
+ InvalidArgument, \
+ MalformedXML, InvalidLocationConstraint, NoSuchBucket, \
+ BucketNotEmpty, InternalError, ServiceUnavailable, NoSuchKey
+from swift.common.middleware.s3api.utils import MULTIUPLOAD_SUFFIX
+
+MAX_PUT_BUCKET_BODY_SIZE = 10240
+
+
+class BucketController(Controller):
+ """
+ Handles bucket request.
+ """
+ def _delete_segments_bucket(self, req):
+ """
+ Before delete bucket, delete segments bucket if existing.
+ """
+ container = req.container_name + MULTIUPLOAD_SUFFIX
+ marker = ''
+ seg = ''
+
+ try:
+ resp = req.get_response(self.app, 'HEAD')
+ if int(resp.sw_headers['X-Container-Object-Count']) > 0:
+ raise BucketNotEmpty()
+ # FIXME: This extra HEAD saves unexpected segment deletion
+ # but if a complete multipart upload happen while cleanup
+ # segment container below, completed object may be missing its
+ # segments unfortunately. To be safer, it might be good
+ # to handle if the segments can be deleted for each object.
+ except NoSuchBucket:
+ pass
+
+ try:
+ while True:
+ # delete all segments
+ resp = req.get_response(self.app, 'GET', container,
+ query={'format': 'json',
+ 'marker': marker})
+ segments = json.loads(resp.body)
+ for seg in segments:
+ try:
+ req.get_response(self.app, 'DELETE', container,
+ seg['name'])
+ except NoSuchKey:
+ pass
+ except InternalError:
+ raise ServiceUnavailable()
+ if segments:
+ marker = seg['name']
+ else:
+ break
+ req.get_response(self.app, 'DELETE', container)
+ except NoSuchBucket:
+ return
+ except (BucketNotEmpty, InternalError):
+ raise ServiceUnavailable()
+
+ @public
+ def HEAD(self, req):
+ """
+ Handle HEAD Bucket (Get Metadata) request
+ """
+ resp = req.get_response(self.app)
+
+ return HTTPOk(headers=resp.headers)
+
+ @public
+ def GET(self, req):
+ """
+ Handle GET Bucket (List Objects) request
+ """
+
+ max_keys = req.get_validated_param(
+ 'max-keys', self.conf.max_bucket_listing)
+ # TODO: Separate max_bucket_listing and default_bucket_listing
+ tag_max_keys = max_keys
+ max_keys = min(max_keys, self.conf.max_bucket_listing)
+
+ encoding_type = req.params.get('encoding-type')
+ if encoding_type is not None and encoding_type != 'url':
+ err_msg = 'Invalid Encoding Method specified in Request'
+ raise InvalidArgument('encoding-type', encoding_type, err_msg)
+
+ query = {
+ 'format': 'json',
+ 'limit': max_keys + 1,
+ }
+ if 'marker' in req.params:
+ query.update({'marker': req.params['marker']})
+ if 'prefix' in req.params:
+ query.update({'prefix': req.params['prefix']})
+ if 'delimiter' in req.params:
+ query.update({'delimiter': req.params['delimiter']})
+
+ # GET Bucket (List Objects) Version 2 parameters
+ is_v2 = int(req.params.get('list-type', '1')) == 2
+ fetch_owner = False
+ if is_v2:
+ if 'start-after' in req.params:
+ query.update({'marker': req.params['start-after']})
+ # continuation-token overrides start-after
+ if 'continuation-token' in req.params:
+ decoded = b64decode(req.params['continuation-token'])
+ query.update({'marker': decoded})
+ if 'fetch-owner' in req.params:
+ fetch_owner = config_true_value(req.params['fetch-owner'])
+
+ resp = req.get_response(self.app, query=query)
+
+ objects = json.loads(resp.body)
+
+ elem = Element('ListBucketResult')
+ SubElement(elem, 'Name').text = req.container_name
+ SubElement(elem, 'Prefix').text = req.params.get('prefix')
+
+ # in order to judge that truncated is valid, check whether
+ # max_keys + 1 th element exists in swift.
+ is_truncated = max_keys > 0 and len(objects) > max_keys
+ objects = objects[:max_keys]
+
+ if not is_v2:
+ SubElement(elem, 'Marker').text = req.params.get('marker')
+ if is_truncated and 'delimiter' in req.params:
+ if 'name' in objects[-1]:
+ SubElement(elem, 'NextMarker').text = \
+ objects[-1]['name']
+ if 'subdir' in objects[-1]:
+ SubElement(elem, 'NextMarker').text = \
+ objects[-1]['subdir']
+ else:
+ if is_truncated:
+ if 'name' in objects[-1]:
+ SubElement(elem, 'NextContinuationToken').text = \
+ b64encode(objects[-1]['name'])
+ if 'subdir' in objects[-1]:
+ SubElement(elem, 'NextContinuationToken').text = \
+ b64encode(objects[-1]['subdir'])
+ if 'continuation-token' in req.params:
+ SubElement(elem, 'ContinuationToken').text = \
+ req.params['continuation-token']
+ if 'start-after' in req.params:
+ SubElement(elem, 'StartAfter').text = \
+ req.params['start-after']
+ SubElement(elem, 'KeyCount').text = str(len(objects))
+
+ SubElement(elem, 'MaxKeys').text = str(tag_max_keys)
+
+ if 'delimiter' in req.params:
+ SubElement(elem, 'Delimiter').text = req.params['delimiter']
+
+ if encoding_type is not None:
+ SubElement(elem, 'EncodingType').text = encoding_type
+
+ SubElement(elem, 'IsTruncated').text = \
+ 'true' if is_truncated else 'false'
+
+ for o in objects:
+ if 'subdir' not in o:
+ contents = SubElement(elem, 'Contents')
+ SubElement(contents, 'Key').text = o['name']
+ SubElement(contents, 'LastModified').text = \
+ o['last_modified'][:-3] + 'Z'
+ SubElement(contents, 'ETag').text = '"%s"' % o['hash']
+ SubElement(contents, 'Size').text = str(o['bytes'])
+ if fetch_owner or not is_v2:
+ owner = SubElement(contents, 'Owner')
+ SubElement(owner, 'ID').text = req.user_id
+ SubElement(owner, 'DisplayName').text = req.user_id
+ SubElement(contents, 'StorageClass').text = 'STANDARD'
+
+ for o in objects:
+ if 'subdir' in o:
+ common_prefixes = SubElement(elem, 'CommonPrefixes')
+ SubElement(common_prefixes, 'Prefix').text = o['subdir']
+
+ body = tostring(elem, encoding_type=encoding_type)
+
+ return HTTPOk(body=body, content_type='application/xml')
+
+ @public
+ def PUT(self, req):
+ """
+ Handle PUT Bucket request
+ """
+ xml = req.xml(MAX_PUT_BUCKET_BODY_SIZE)
+ if xml:
+ # check location
+ try:
+ elem = fromstring(
+ xml, 'CreateBucketConfiguration', self.logger)
+ location = elem.find('./LocationConstraint').text
+ except (XMLSyntaxError, DocumentInvalid):
+ raise MalformedXML()
+ except Exception as e:
+ exc_type, exc_value, exc_traceback = sys.exc_info()
+ self.logger.error(e)
+ raise exc_type, exc_value, exc_traceback
+
+ if location != self.conf.location:
+ # s3api cannot support multiple regions currently.
+ raise InvalidLocationConstraint()
+
+ resp = req.get_response(self.app)
+
+ resp.status = HTTP_OK
+ resp.location = '/' + req.container_name
+
+ return resp
+
+ @public
+ def DELETE(self, req):
+ """
+ Handle DELETE Bucket request
+ """
+ if self.conf.allow_multipart_uploads:
+ self._delete_segments_bucket(req)
+ resp = req.get_response(self.app)
+ return resp
+
+ @public
+ def POST(self, req):
+ """
+ Handle POST Bucket request
+ """
+ raise S3NotImplemented()
diff --git a/swift/common/middleware/s3api/controllers/location.py b/swift/common/middleware/s3api/controllers/location.py
new file mode 100644
index 000000000..9384ee4d8
--- /dev/null
+++ b/swift/common/middleware/s3api/controllers/location.py
@@ -0,0 +1,42 @@
+# Copyright (c) 2010-2014 OpenStack Foundation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from swift.common.utils import public
+
+from swift.common.middleware.s3api.controllers.base import Controller, \
+ bucket_operation
+from swift.common.middleware.s3api.etree import Element, tostring
+from swift.common.middleware.s3api.s3response import HTTPOk
+
+
+class LocationController(Controller):
+ """
+ Handles GET Bucket location, which is logged as a LOCATION operation in the
+ S3 server log.
+ """
+ @public
+ @bucket_operation
+ def GET(self, req):
+ """
+ Handles GET Bucket location.
+ """
+ req.get_response(self.app, method='HEAD')
+
+ elem = Element('LocationConstraint')
+ if self.conf.location != 'US':
+ elem.text = self.conf.location
+ body = tostring(elem)
+
+ return HTTPOk(body=body, content_type='application/xml')
diff --git a/swift/common/middleware/s3api/controllers/logging.py b/swift/common/middleware/s3api/controllers/logging.py
new file mode 100644
index 000000000..d353a5beb
--- /dev/null
+++ b/swift/common/middleware/s3api/controllers/logging.py
@@ -0,0 +1,54 @@
+# Copyright (c) 2010-2014 OpenStack Foundation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from swift.common.utils import public
+
+from swift.common.middleware.s3api.controllers.base import Controller, \
+ bucket_operation
+from swift.common.middleware.s3api.etree import Element, tostring
+from swift.common.middleware.s3api.s3response import HTTPOk, S3NotImplemented, \
+ NoLoggingStatusForKey
+
+
+class LoggingStatusController(Controller):
+ """
+ Handles the following APIs:
+
+ * GET Bucket logging
+ * PUT Bucket logging
+
+ Those APIs are logged as LOGGING_STATUS operations in the S3 server log.
+ """
+ @public
+ @bucket_operation(err_resp=NoLoggingStatusForKey)
+ def GET(self, req):
+ """
+ Handles GET Bucket logging.
+ """
+ req.get_response(self.app, method='HEAD')
+
+ # logging disabled
+ elem = Element('BucketLoggingStatus')
+ body = tostring(elem)
+
+ return HTTPOk(body=body, content_type='application/xml')
+
+ @public
+ @bucket_operation(err_resp=NoLoggingStatusForKey)
+ def PUT(self, req):
+ """
+ Handles PUT Bucket logging.
+ """
+ raise S3NotImplemented()
diff --git a/swift/common/middleware/s3api/controllers/multi_delete.py b/swift/common/middleware/s3api/controllers/multi_delete.py
new file mode 100644
index 000000000..a4326dd33
--- /dev/null
+++ b/swift/common/middleware/s3api/controllers/multi_delete.py
@@ -0,0 +1,126 @@
+# Copyright (c) 2010-2014 OpenStack Foundation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+
+from swift.common.utils import public
+
+from swift.common.middleware.s3api.controllers.base import Controller, \
+ bucket_operation
+from swift.common.middleware.s3api.etree import Element, SubElement, \
+ fromstring, tostring, XMLSyntaxError, DocumentInvalid
+from swift.common.middleware.s3api.s3response import HTTPOk, S3NotImplemented, \
+ NoSuchKey, ErrorResponse, MalformedXML, UserKeyMustBeSpecified, \
+ AccessDenied, MissingRequestBodyError
+
+MAX_MULTI_DELETE_BODY_SIZE = 61365
+
+
+class MultiObjectDeleteController(Controller):
+ """
+ Handles Delete Multiple Objects, which is logged as a MULTI_OBJECT_DELETE
+ operation in the S3 server log.
+ """
+ def _gen_error_body(self, error, elem, delete_list):
+ for key, version in delete_list:
+ if version is not None:
+ # TODO: delete the specific version of the object
+ raise S3NotImplemented()
+
+ error_elem = SubElement(elem, 'Error')
+ SubElement(error_elem, 'Key').text = key
+ SubElement(error_elem, 'Code').text = error.__class__.__name__
+ SubElement(error_elem, 'Message').text = error._msg
+
+ return tostring(elem)
+
+ @public
+ @bucket_operation
+ def POST(self, req):
+ """
+ Handles Delete Multiple Objects.
+ """
+ def object_key_iter(elem):
+ for obj in elem.iterchildren('Object'):
+ key = obj.find('./Key').text
+ if not key:
+ raise UserKeyMustBeSpecified()
+ version = obj.find('./VersionId')
+ if version is not None:
+ version = version.text
+
+ yield key, version
+
+ try:
+ xml = req.xml(MAX_MULTI_DELETE_BODY_SIZE)
+ if not xml:
+ raise MissingRequestBodyError()
+
+ req.check_md5(xml)
+ elem = fromstring(xml, 'Delete', self.logger)
+
+ quiet = elem.find('./Quiet')
+ if quiet is not None and quiet.text.lower() == 'true':
+ self.quiet = True
+ else:
+ self.quiet = False
+
+ delete_list = list(object_key_iter(elem))
+ if len(delete_list) > self.conf.max_multi_delete_objects:
+ raise MalformedXML()
+ except (XMLSyntaxError, DocumentInvalid):
+ raise MalformedXML()
+ except ErrorResponse:
+ raise
+ except Exception as e:
+ exc_type, exc_value, exc_traceback = sys.exc_info()
+ self.logger.error(e)
+ raise exc_type, exc_value, exc_traceback
+
+ elem = Element('DeleteResult')
+
+ # check bucket existence
+ try:
+ req.get_response(self.app, 'HEAD')
+ except AccessDenied as error:
+ body = self._gen_error_body(error, elem, delete_list)
+ return HTTPOk(body=body)
+
+ for key, version in delete_list:
+ if version is not None:
+ # TODO: delete the specific version of the object
+ raise S3NotImplemented()
+
+ req.object_name = key
+
+ try:
+ query = req.gen_multipart_manifest_delete_query(self.app)
+ req.get_response(self.app, method='DELETE', query=query)
+ except NoSuchKey:
+ pass
+ except ErrorResponse as e:
+ error = SubElement(elem, 'Error')
+ SubElement(error, 'Key').text = key
+ SubElement(error, 'Code').text = e.__class__.__name__
+ SubElement(error, 'Message').text = e._msg
+ continue
+
+ if not self.quiet:
+ deleted = SubElement(elem, 'Deleted')
+ SubElement(deleted, 'Key').text = key
+
+ body = tostring(elem)
+
+ return HTTPOk(body=body)
diff --git a/swift/common/middleware/s3api/controllers/multi_upload.py b/swift/common/middleware/s3api/controllers/multi_upload.py
new file mode 100644
index 000000000..396e39cf5
--- /dev/null
+++ b/swift/common/middleware/s3api/controllers/multi_upload.py
@@ -0,0 +1,671 @@
+# Copyright (c) 2010-2014 OpenStack Foundation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+Implementation of S3 Multipart Upload.
+
+This module implements S3 Multipart Upload APIs with the Swift SLO feature.
+The following explains how S3api uses swift container and objects to store S3
+upload information:
+
+-----------------
+[bucket]+segments
+-----------------
+
+A container to store upload information. [bucket] is the original bucket
+where multipart upload is initiated.
+
+-----------------------------
+[bucket]+segments/[upload_id]
+-----------------------------
+
+A object of the ongoing upload id. The object is empty and used for
+checking the target upload status. If the object exists, it means that the
+upload is initiated but not either completed or aborted.
+
+-------------------------------------------
+[bucket]+segments/[upload_id]/[part_number]
+-------------------------------------------
+
+The last suffix is the part number under the upload id. When the client uploads
+the parts, they will be stored in the namespace with
+[bucket]+segments/[upload_id]/[part_number].
+
+Example listing result in the [bucket]+segments container::
+
+ [bucket]+segments/[upload_id1] # upload id object for upload_id1
+ [bucket]+segments/[upload_id1]/1 # part object for upload_id1
+ [bucket]+segments/[upload_id1]/2 # part object for upload_id1
+ [bucket]+segments/[upload_id1]/3 # part object for upload_id1
+ [bucket]+segments/[upload_id2] # upload id object for upload_id2
+ [bucket]+segments/[upload_id2]/1 # part object for upload_id2
+ [bucket]+segments/[upload_id2]/2 # part object for upload_id2
+ .
+ .
+
+Those part objects are directly used as segments of a Swift
+Static Large Object when the multipart upload is completed.
+
+"""
+
+import os
+import re
+import sys
+
+from swift.common.swob import Range
+from swift.common.utils import json, public
+from swift.common.db import utf8encode
+
+from six.moves.urllib.parse import urlparse # pylint: disable=F0401
+
+from swift.common.middleware.s3api.controllers.base import Controller, \
+ bucket_operation, object_operation, check_container_existence
+from swift.common.middleware.s3api.s3response import InvalidArgument, \
+ ErrorResponse, MalformedXML, \
+ InvalidPart, BucketAlreadyExists, EntityTooSmall, InvalidPartOrder, \
+ InvalidRequest, HTTPOk, HTTPNoContent, NoSuchKey, NoSuchUpload, \
+ NoSuchBucket
+from swift.common.middleware.s3api.exception import BadSwiftRequest
+from swift.common.middleware.s3api.utils import unique_id, \
+ MULTIUPLOAD_SUFFIX, S3Timestamp, sysmeta_header
+from swift.common.middleware.s3api.etree import Element, SubElement, \
+ fromstring, tostring, XMLSyntaxError, DocumentInvalid
+
+DEFAULT_MAX_PARTS_LISTING = 1000
+DEFAULT_MAX_UPLOADS = 1000
+
+MAX_COMPLETE_UPLOAD_BODY_SIZE = 2048 * 1024
+
+
+def _get_upload_info(req, app, upload_id):
+
+ container = req.container_name + MULTIUPLOAD_SUFFIX
+ obj = '%s/%s' % (req.object_name, upload_id)
+
+ try:
+ return req.get_response(app, 'HEAD', container=container, obj=obj)
+ except NoSuchKey:
+ raise NoSuchUpload(upload_id=upload_id)
+
+
+def _check_upload_info(req, app, upload_id):
+
+ _get_upload_info(req, app, upload_id)
+
+
+class PartController(Controller):
+ """
+ Handles the following APIs:
+
+ * Upload Part
+ * Upload Part - Copy
+
+ Those APIs are logged as PART operations in the S3 server log.
+ """
+ @public
+ @object_operation
+ @check_container_existence
+ def PUT(self, req):
+ """
+ Handles Upload Part and Upload Part Copy.
+ """
+
+ if 'uploadId' not in req.params:
+ raise InvalidArgument('ResourceType', 'partNumber',
+ 'Unexpected query string parameter')
+
+ try:
+ part_number = int(req.params['partNumber'])
+ if part_number < 1 or self.conf.max_upload_part_num < part_number:
+ raise Exception()
+ except Exception:
+ err_msg = 'Part number must be an integer between 1 and %d,' \
+ ' inclusive' % self.conf.max_upload_part_num
+ raise InvalidArgument('partNumber', req.params['partNumber'],
+ err_msg)
+
+ upload_id = req.params['uploadId']
+ _check_upload_info(req, self.app, upload_id)
+
+ req.container_name += MULTIUPLOAD_SUFFIX
+ req.object_name = '%s/%s/%d' % (req.object_name, upload_id,
+ part_number)
+
+ req_timestamp = S3Timestamp.now()
+ req.headers['X-Timestamp'] = req_timestamp.internal
+ source_resp = req.check_copy_source(self.app)
+ if 'X-Amz-Copy-Source' in req.headers and \
+ 'X-Amz-Copy-Source-Range' in req.headers:
+ rng = req.headers['X-Amz-Copy-Source-Range']
+
+ header_valid = True
+ try:
+ rng_obj = Range(rng)
+ if len(rng_obj.ranges) != 1:
+ header_valid = False
+ except ValueError:
+ header_valid = False
+ if not header_valid:
+ err_msg = ('The x-amz-copy-source-range value must be of the '
+ 'form bytes=first-last where first and last are '
+ 'the zero-based offsets of the first and last '
+ 'bytes to copy')
+ raise InvalidArgument('x-amz-source-range', rng, err_msg)
+
+ source_size = int(source_resp.headers['Content-Length'])
+ if not rng_obj.ranges_for_length(source_size):
+ err_msg = ('Range specified is not valid for source object '
+ 'of size: %s' % source_size)
+ raise InvalidArgument('x-amz-source-range', rng, err_msg)
+
+ req.headers['Range'] = rng
+ del req.headers['X-Amz-Copy-Source-Range']
+ resp = req.get_response(self.app)
+
+ if 'X-Amz-Copy-Source' in req.headers:
+ resp.append_copy_resp_body(req.controller_name,
+ req_timestamp.s3xmlformat)
+
+ resp.status = 200
+ return resp
+
+
+class UploadsController(Controller):
+ """
+ Handles the following APIs:
+
+ * List Multipart Uploads
+ * Initiate Multipart Upload
+
+ Those APIs are logged as UPLOADS operations in the S3 server log.
+ """
+ @public
+ @bucket_operation(err_resp=InvalidRequest,
+ err_msg="Key is not expected for the GET method "
+ "?uploads subresource")
+ @check_container_existence
+ def GET(self, req):
+ """
+ Handles List Multipart Uploads
+ """
+
+ def separate_uploads(uploads, prefix, delimiter):
+ """
+ separate_uploads will separate uploads into non_delimited_uploads
+ (a subset of uploads) and common_prefixes according to the
+ specified delimiter. non_delimited_uploads is a list of uploads
+ which exclude the delimiter. common_prefixes is a set of prefixes
+ prior to the specified delimiter. Note that the prefix in the
+ common_prefixes includes the delimiter itself.
+
+ i.e. if '/' delimiter specified and then the uploads is consists of
+ ['foo', 'foo/bar'], this function will return (['foo'], ['foo/']).
+
+ :param uploads: A list of uploads dictionary
+ :param prefix: A string of prefix reserved on the upload path.
+ (i.e. the delimiter must be searched behind the
+ prefix)
+ :param delimiter: A string of delimiter to split the path in each
+ upload
+
+ :return (non_delimited_uploads, common_prefixes)
+ """
+ (prefix, delimiter) = \
+ utf8encode(prefix, delimiter)
+ non_delimited_uploads = []
+ common_prefixes = set()
+ for upload in uploads:
+ key = upload['key']
+ end = key.find(delimiter, len(prefix))
+ if end >= 0:
+ common_prefix = key[:end + len(delimiter)]
+ common_prefixes.add(common_prefix)
+ else:
+ non_delimited_uploads.append(upload)
+ return non_delimited_uploads, sorted(common_prefixes)
+
+ encoding_type = req.params.get('encoding-type')
+ if encoding_type is not None and encoding_type != 'url':
+ err_msg = 'Invalid Encoding Method specified in Request'
+ raise InvalidArgument('encoding-type', encoding_type, err_msg)
+
+ keymarker = req.params.get('key-marker', '')
+ uploadid = req.params.get('upload-id-marker', '')
+ maxuploads = req.get_validated_param(
+ 'max-uploads', DEFAULT_MAX_UPLOADS, DEFAULT_MAX_UPLOADS)
+
+ query = {
+ 'format': 'json',
+ 'limit': maxuploads + 1,
+ }
+
+ if uploadid and keymarker:
+ query.update({'marker': '%s/%s' % (keymarker, uploadid)})
+ elif keymarker:
+ query.update({'marker': '%s/~' % (keymarker)})
+ if 'prefix' in req.params:
+ query.update({'prefix': req.params['prefix']})
+
+ container = req.container_name + MULTIUPLOAD_SUFFIX
+ try:
+ resp = req.get_response(self.app, container=container, query=query)
+ objects = json.loads(resp.body)
+ except NoSuchBucket:
+ # Assume NoSuchBucket as no uploads
+ objects = []
+
+ def object_to_upload(object_info):
+ obj, upid = object_info['name'].rsplit('/', 1)
+ obj_dict = {'key': obj,
+ 'upload_id': upid,
+ 'last_modified': object_info['last_modified']}
+ return obj_dict
+
+ # uploads is a list consists of dict, {key, upload_id, last_modified}
+ # Note that pattern matcher will drop whole segments objects like as
+ # object_name/upload_id/1.
+ pattern = re.compile('/[0-9]+$')
+ uploads = [object_to_upload(obj) for obj in objects if
+ pattern.search(obj.get('name', '')) is None]
+
+ prefixes = []
+ if 'delimiter' in req.params:
+ prefix = req.params.get('prefix', '')
+ delimiter = req.params['delimiter']
+ uploads, prefixes = \
+ separate_uploads(uploads, prefix, delimiter)
+
+ if len(uploads) > maxuploads:
+ uploads = uploads[:maxuploads]
+ truncated = True
+ else:
+ truncated = False
+
+ nextkeymarker = ''
+ nextuploadmarker = ''
+ if len(uploads) > 1:
+ nextuploadmarker = uploads[-1]['upload_id']
+ nextkeymarker = uploads[-1]['key']
+
+ result_elem = Element('ListMultipartUploadsResult')
+ SubElement(result_elem, 'Bucket').text = req.container_name
+ SubElement(result_elem, 'KeyMarker').text = keymarker
+ SubElement(result_elem, 'UploadIdMarker').text = uploadid
+ SubElement(result_elem, 'NextKeyMarker').text = nextkeymarker
+ SubElement(result_elem, 'NextUploadIdMarker').text = nextuploadmarker
+ if 'delimiter' in req.params:
+ SubElement(result_elem, 'Delimiter').text = \
+ req.params['delimiter']
+ if 'prefix' in req.params:
+ SubElement(result_elem, 'Prefix').text = req.params['prefix']
+ SubElement(result_elem, 'MaxUploads').text = str(maxuploads)
+ if encoding_type is not None:
+ SubElement(result_elem, 'EncodingType').text = encoding_type
+ SubElement(result_elem, 'IsTruncated').text = \
+ 'true' if truncated else 'false'
+
+ # TODO: don't show uploads which are initiated before this bucket is
+ # created.
+ for u in uploads:
+ upload_elem = SubElement(result_elem, 'Upload')
+ SubElement(upload_elem, 'Key').text = u['key']
+ SubElement(upload_elem, 'UploadId').text = u['upload_id']
+ initiator_elem = SubElement(upload_elem, 'Initiator')
+ SubElement(initiator_elem, 'ID').text = req.user_id
+ SubElement(initiator_elem, 'DisplayName').text = req.user_id
+ owner_elem = SubElement(upload_elem, 'Owner')
+ SubElement(owner_elem, 'ID').text = req.user_id
+ SubElement(owner_elem, 'DisplayName').text = req.user_id
+ SubElement(upload_elem, 'StorageClass').text = 'STANDARD'
+ SubElement(upload_elem, 'Initiated').text = \
+ u['last_modified'][:-3] + 'Z'
+
+ for p in prefixes:
+ elem = SubElement(result_elem, 'CommonPrefixes')
+ SubElement(elem, 'Prefix').text = p
+
+ body = tostring(result_elem, encoding_type=encoding_type)
+
+ return HTTPOk(body=body, content_type='application/xml')
+
+ @public
+ @object_operation
+ @check_container_existence
+ def POST(self, req):
+ """
+ Handles Initiate Multipart Upload.
+ """
+
+ # Create a unique S3 upload id from UUID to avoid duplicates.
+ upload_id = unique_id()
+
+ container = req.container_name + MULTIUPLOAD_SUFFIX
+ content_type = req.headers.get('Content-Type')
+ if content_type:
+ req.headers[sysmeta_header('object', 'has-content-type')] = 'yes'
+ req.headers[
+ sysmeta_header('object', 'content-type')] = content_type
+ else:
+ req.headers[sysmeta_header('object', 'has-content-type')] = 'no'
+ req.headers['Content-Type'] = 'application/directory'
+
+ try:
+ req.get_response(self.app, 'PUT', container, '')
+ except BucketAlreadyExists:
+ pass
+
+ obj = '%s/%s' % (req.object_name, upload_id)
+
+ req.headers.pop('Etag', None)
+ req.headers.pop('Content-Md5', None)
+
+ req.get_response(self.app, 'PUT', container, obj, body='')
+
+ result_elem = Element('InitiateMultipartUploadResult')
+ SubElement(result_elem, 'Bucket').text = req.container_name
+ SubElement(result_elem, 'Key').text = req.object_name
+ SubElement(result_elem, 'UploadId').text = upload_id
+
+ body = tostring(result_elem)
+
+ return HTTPOk(body=body, content_type='application/xml')
+
+
+class UploadController(Controller):
+ """
+ Handles the following APIs:
+
+ * List Parts
+ * Abort Multipart Upload
+ * Complete Multipart Upload
+
+ Those APIs are logged as UPLOAD operations in the S3 server log.
+ """
+ @public
+ @object_operation
+ @check_container_existence
+ def GET(self, req):
+ """
+ Handles List Parts.
+ """
+ def filter_part_num_marker(o):
+ try:
+ num = int(os.path.basename(o['name']))
+ return num > part_num_marker
+ except ValueError:
+ return False
+
+ encoding_type = req.params.get('encoding-type')
+ if encoding_type is not None and encoding_type != 'url':
+ err_msg = 'Invalid Encoding Method specified in Request'
+ raise InvalidArgument('encoding-type', encoding_type, err_msg)
+
+ upload_id = req.params['uploadId']
+ _check_upload_info(req, self.app, upload_id)
+
+ maxparts = req.get_validated_param(
+ 'max-parts', DEFAULT_MAX_PARTS_LISTING,
+ self.conf.max_parts_listing)
+ part_num_marker = req.get_validated_param(
+ 'part-number-marker', 0)
+
+ query = {
+ 'format': 'json',
+ 'limit': maxparts + 1,
+ 'prefix': '%s/%s/' % (req.object_name, upload_id),
+ 'delimiter': '/'
+ }
+
+ container = req.container_name + MULTIUPLOAD_SUFFIX
+ resp = req.get_response(self.app, container=container, obj='',
+ query=query)
+ objects = json.loads(resp.body)
+
+ last_part = 0
+
+ # If the caller requested a list starting at a specific part number,
+ # construct a sub-set of the object list.
+ objList = filter(filter_part_num_marker, objects)
+
+ # pylint: disable-msg=E1103
+ objList.sort(key=lambda o: int(o['name'].split('/')[-1]))
+
+ if len(objList) > maxparts:
+ objList = objList[:maxparts]
+ truncated = True
+ else:
+ truncated = False
+ # TODO: We have to retrieve object list again when truncated is True
+ # and some objects filtered by invalid name because there could be no
+ # enough objects for limit defined by maxparts.
+
+ if objList:
+ o = objList[-1]
+ last_part = os.path.basename(o['name'])
+
+ result_elem = Element('ListPartsResult')
+ SubElement(result_elem, 'Bucket').text = req.container_name
+ SubElement(result_elem, 'Key').text = req.object_name
+ SubElement(result_elem, 'UploadId').text = upload_id
+
+ initiator_elem = SubElement(result_elem, 'Initiator')
+ SubElement(initiator_elem, 'ID').text = req.user_id
+ SubElement(initiator_elem, 'DisplayName').text = req.user_id
+ owner_elem = SubElement(result_elem, 'Owner')
+ SubElement(owner_elem, 'ID').text = req.user_id
+ SubElement(owner_elem, 'DisplayName').text = req.user_id
+
+ SubElement(result_elem, 'StorageClass').text = 'STANDARD'
+ SubElement(result_elem, 'PartNumberMarker').text = str(part_num_marker)
+ SubElement(result_elem, 'NextPartNumberMarker').text = str(last_part)
+ SubElement(result_elem, 'MaxParts').text = str(maxparts)
+ if 'encoding-type' in req.params:
+ SubElement(result_elem, 'EncodingType').text = \
+ req.params['encoding-type']
+ SubElement(result_elem, 'IsTruncated').text = \
+ 'true' if truncated else 'false'
+
+ for i in objList:
+ part_elem = SubElement(result_elem, 'Part')
+ SubElement(part_elem, 'PartNumber').text = i['name'].split('/')[-1]
+ SubElement(part_elem, 'LastModified').text = \
+ i['last_modified'][:-3] + 'Z'
+ SubElement(part_elem, 'ETag').text = '"%s"' % i['hash']
+ SubElement(part_elem, 'Size').text = str(i['bytes'])
+
+ body = tostring(result_elem, encoding_type=encoding_type)
+
+ return HTTPOk(body=body, content_type='application/xml')
+
+ @public
+ @object_operation
+ @check_container_existence
+ def DELETE(self, req):
+ """
+ Handles Abort Multipart Upload.
+ """
+ upload_id = req.params['uploadId']
+ _check_upload_info(req, self.app, upload_id)
+
+ # First check to see if this multi-part upload was already
+ # completed. Look in the primary container, if the object exists,
+ # then it was completed and we return an error here.
+ container = req.container_name + MULTIUPLOAD_SUFFIX
+ obj = '%s/%s' % (req.object_name, upload_id)
+ req.get_response(self.app, container=container, obj=obj)
+
+ # The completed object was not found so this
+ # must be a multipart upload abort.
+ # We must delete any uploaded segments for this UploadID and then
+ # delete the object in the main container as well
+ query = {
+ 'format': 'json',
+ 'prefix': '%s/%s/' % (req.object_name, upload_id),
+ 'delimiter': '/',
+ }
+
+ resp = req.get_response(self.app, 'GET', container, '', query=query)
+
+ # Iterate over the segment objects and delete them individually
+ objects = json.loads(resp.body)
+ for o in objects:
+ container = req.container_name + MULTIUPLOAD_SUFFIX
+ req.get_response(self.app, container=container, obj=o['name'])
+
+ return HTTPNoContent()
+
+ @public
+ @object_operation
+ @check_container_existence
+ def POST(self, req):
+ """
+ Handles Complete Multipart Upload.
+ """
+ upload_id = req.params['uploadId']
+ resp = _get_upload_info(req, self.app, upload_id)
+ headers = {}
+ for key, val in resp.headers.iteritems():
+ _key = key.lower()
+ if _key.startswith('x-amz-meta-'):
+ headers['x-object-meta-' + _key[11:]] = val
+
+ hct_header = sysmeta_header('object', 'has-content-type')
+ if resp.sysmeta_headers.get(hct_header) == 'yes':
+ content_type = resp.sysmeta_headers.get(
+ sysmeta_header('object', 'content-type'))
+ elif hct_header in resp.sysmeta_headers:
+ # has-content-type is present but false, so no content type was
+ # set on initial upload. In that case, we won't set one on our
+ # PUT request. Swift will end up guessing one based on the
+ # object name.
+ content_type = None
+ else:
+ content_type = resp.headers.get('Content-Type')
+
+ if content_type:
+ headers['Content-Type'] = content_type
+
+ # Query for the objects in the segments area to make sure it completed
+ query = {
+ 'format': 'json',
+ 'prefix': '%s/%s/' % (req.object_name, upload_id),
+ 'delimiter': '/'
+ }
+
+ container = req.container_name + MULTIUPLOAD_SUFFIX
+ resp = req.get_response(self.app, 'GET', container, '', query=query)
+ objinfo = json.loads(resp.body)
+ objtable = dict((o['name'],
+ {'path': '/'.join(['', container, o['name']]),
+ 'etag': o['hash'],
+ 'size_bytes': o['bytes']}) for o in objinfo)
+
+ manifest = []
+ previous_number = 0
+ try:
+ xml = req.xml(MAX_COMPLETE_UPLOAD_BODY_SIZE)
+ if not xml:
+ raise InvalidRequest(msg='You must specify at least one part')
+
+ complete_elem = fromstring(
+ xml, 'CompleteMultipartUpload', self.logger)
+ for part_elem in complete_elem.iterchildren('Part'):
+ part_number = int(part_elem.find('./PartNumber').text)
+
+ if part_number <= previous_number:
+ raise InvalidPartOrder(upload_id=upload_id)
+ previous_number = part_number
+
+ etag = part_elem.find('./ETag').text
+ if len(etag) >= 2 and etag[0] == '"' and etag[-1] == '"':
+ # strip double quotes
+ etag = etag[1:-1]
+
+ info = objtable.get("%s/%s/%s" % (req.object_name, upload_id,
+ part_number))
+ if info is None or info['etag'] != etag:
+ raise InvalidPart(upload_id=upload_id,
+ part_number=part_number)
+
+ info['size_bytes'] = int(info['size_bytes'])
+ manifest.append(info)
+ except (XMLSyntaxError, DocumentInvalid):
+ raise MalformedXML()
+ except ErrorResponse:
+ raise
+ except Exception as e:
+ exc_type, exc_value, exc_traceback = sys.exc_info()
+ self.logger.error(e)
+ raise exc_type, exc_value, exc_traceback
+
+ # Check the size of each segment except the last and make sure they are
+ # all more than the minimum upload chunk size
+ for info in manifest[:-1]:
+ if info['size_bytes'] < self.conf.min_segment_size:
+ raise EntityTooSmall()
+
+ try:
+ # TODO: add support for versioning
+ if manifest:
+ resp = req.get_response(self.app, 'PUT',
+ body=json.dumps(manifest),
+ query={'multipart-manifest': 'put'},
+ headers=headers)
+ else:
+ # the upload must have consisted of a single zero-length part
+ # just write it directly
+ resp = req.get_response(self.app, 'PUT', body='',
+ headers=headers)
+ except BadSwiftRequest as e:
+ msg = str(e)
+ expected_msg = 'too small; each segment must be at least 1 byte'
+ if expected_msg in msg:
+ # FIXME: AWS S3 allows a smaller object than 5 MB if there is
+ # only one part. Use a COPY request to copy the part object
+ # from the segments container instead.
+ raise EntityTooSmall(msg)
+ else:
+ raise
+
+ # clean up the multipart-upload record
+ obj = '%s/%s' % (req.object_name, upload_id)
+ try:
+ req.get_response(self.app, 'DELETE', container, obj)
+ except NoSuchKey:
+ pass # We know that this existed long enough for us to HEAD
+
+ result_elem = Element('CompleteMultipartUploadResult')
+
+ # NOTE: boto with sig v4 appends port to HTTP_HOST value at the
+ # request header when the port is non default value and it makes
+ # req.host_url like as http://localhost:8080:8080/path
+ # that obviously invalid. Probably it should be resolved at
+ # swift.common.swob though, tentatively we are parsing and
+ # reconstructing the correct host_url info here.
+ # in detail, https://github.com/boto/boto/pull/3513
+ parsed_url = urlparse(req.host_url)
+ host_url = '%s://%s' % (parsed_url.scheme, parsed_url.hostname)
+ if parsed_url.port:
+ host_url += ':%s' % parsed_url.port
+
+ SubElement(result_elem, 'Location').text = host_url + req.path
+ SubElement(result_elem, 'Bucket').text = req.container_name
+ SubElement(result_elem, 'Key').text = req.object_name
+ SubElement(result_elem, 'ETag').text = resp.etag
+
+ resp.body = tostring(result_elem)
+ resp.status = 200
+ resp.content_type = "application/xml"
+
+ return resp
diff --git a/swift/common/middleware/s3api/controllers/obj.py b/swift/common/middleware/s3api/controllers/obj.py
new file mode 100644
index 000000000..7017170a7
--- /dev/null
+++ b/swift/common/middleware/s3api/controllers/obj.py
@@ -0,0 +1,150 @@
+# Copyright (c) 2010-2014 OpenStack Foundation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+
+from swift.common.http import HTTP_OK, HTTP_PARTIAL_CONTENT, HTTP_NO_CONTENT
+from swift.common.swob import Range, content_range_header_value
+from swift.common.utils import public
+
+from swift.common.middleware.s3api.utils import S3Timestamp
+from swift.common.middleware.s3api.controllers.base import Controller
+from swift.common.middleware.s3api.s3response import S3NotImplemented, \
+ InvalidRange, NoSuchKey, InvalidArgument
+
+
+class ObjectController(Controller):
+ """
+ Handles requests on objects
+ """
+ def _gen_head_range_resp(self, req_range, resp):
+ """
+ Swift doesn't handle Range header for HEAD requests.
+ So, this method generates HEAD range response from HEAD response.
+ S3 return HEAD range response, if the value of range satisfies the
+ conditions which are described in the following document.
+ - http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.35
+ """
+ length = long(resp.headers.get('Content-Length'))
+
+ try:
+ content_range = Range(req_range)
+ except ValueError:
+ return resp
+
+ ranges = content_range.ranges_for_length(length)
+ if ranges == []:
+ raise InvalidRange()
+ elif ranges:
+ if len(ranges) == 1:
+ start, end = ranges[0]
+ resp.headers['Content-Range'] = \
+ content_range_header_value(start, end, length)
+ resp.headers['Content-Length'] = (end - start)
+ resp.status = HTTP_PARTIAL_CONTENT
+ return resp
+ else:
+ # TODO: It is necessary to confirm whether need to respond to
+ # multi-part response.(e.g. bytes=0-10,20-30)
+ pass
+
+ return resp
+
+ def GETorHEAD(self, req):
+ resp = req.get_response(self.app)
+
+ if req.method == 'HEAD':
+ resp.app_iter = None
+
+ for key in ('content-type', 'content-language', 'expires',
+ 'cache-control', 'content-disposition',
+ 'content-encoding'):
+ if 'response-' + key in req.params:
+ resp.headers[key] = req.params['response-' + key]
+
+ return resp
+
+ @public
+ def HEAD(self, req):
+ """
+ Handle HEAD Object request
+ """
+ resp = self.GETorHEAD(req)
+
+ if 'range' in req.headers:
+ req_range = req.headers['range']
+ resp = self._gen_head_range_resp(req_range, resp)
+
+ return resp
+
+ @public
+ def GET(self, req):
+ """
+ Handle GET Object request
+ """
+ return self.GETorHEAD(req)
+
+ @public
+ def PUT(self, req):
+ """
+ Handle PUT Object and PUT Object (Copy) request
+ """
+ # set X-Timestamp by s3api to use at copy resp body
+ req_timestamp = S3Timestamp.now()
+ req.headers['X-Timestamp'] = req_timestamp.internal
+ if all(h in req.headers
+ for h in ('X-Amz-Copy-Source', 'X-Amz-Copy-Source-Range')):
+ raise InvalidArgument('x-amz-copy-source-range',
+ req.headers['X-Amz-Copy-Source-Range'],
+ 'Illegal copy header')
+ req.check_copy_source(self.app)
+ resp = req.get_response(self.app)
+
+ if 'X-Amz-Copy-Source' in req.headers:
+ resp.append_copy_resp_body(req.controller_name,
+ req_timestamp.s3xmlformat)
+
+ # delete object metadata from response
+ for key in list(resp.headers.keys()):
+ if key.startswith('x-amz-meta-'):
+ del resp.headers[key]
+
+ resp.status = HTTP_OK
+ return resp
+
+ @public
+ def POST(self, req):
+ raise S3NotImplemented()
+
+ @public
+ def DELETE(self, req):
+ """
+ Handle DELETE Object request
+ """
+ try:
+ query = req.gen_multipart_manifest_delete_query(self.app)
+ req.headers['Content-Type'] = None # Ignore client content-type
+ resp = req.get_response(self.app, query=query)
+ if query and resp.status_int == HTTP_OK:
+ for chunk in resp.app_iter:
+ pass # drain the bulk-deleter response
+ resp.status = HTTP_NO_CONTENT
+ resp.body = ''
+ except NoSuchKey:
+ # expect to raise NoSuchBucket when the bucket doesn't exist
+ exc_type, exc_value, exc_traceback = sys.exc_info()
+ req.get_container_info(self.app)
+ raise exc_type, exc_value, exc_traceback
+ return resp
diff --git a/swift/common/middleware/s3api/controllers/s3_acl.py b/swift/common/middleware/s3api/controllers/s3_acl.py
new file mode 100644
index 000000000..a99c85b70
--- /dev/null
+++ b/swift/common/middleware/s3api/controllers/s3_acl.py
@@ -0,0 +1,67 @@
+# Copyright (c) 2014 OpenStack Foundation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from urllib import quote
+from swift.common.utils import public
+
+from swift.common.middleware.s3api.controllers.base import Controller
+from swift.common.middleware.s3api.s3response import HTTPOk
+from swift.common.middleware.s3api.etree import tostring
+
+
+class S3AclController(Controller):
+ """
+ Handles the following APIs:
+
+ * GET Bucket acl
+ * PUT Bucket acl
+ * GET Object acl
+ * PUT Object acl
+
+ Those APIs are logged as ACL operations in the S3 server log.
+ """
+ @public
+ def GET(self, req):
+ """
+ Handles GET Bucket acl and GET Object acl.
+ """
+ resp = req.get_response(self.app)
+
+ acl = resp.object_acl if req.is_object_request else resp.bucket_acl
+
+ resp = HTTPOk()
+ resp.body = tostring(acl.elem())
+
+ return resp
+
+ @public
+ def PUT(self, req):
+ """
+ Handles PUT Bucket acl and PUT Object acl.
+ """
+ if req.is_object_request:
+ headers = {}
+ src_path = '/%s/%s' % (req.container_name, req.object_name)
+
+ # object-sysmeta' can be updated by 'Copy' method,
+ # but can not be by 'POST' method.
+ # So headers['X-Copy-From'] for copy request is added here.
+ headers['X-Copy-From'] = quote(src_path)
+ headers['Content-Length'] = 0
+ req.get_response(self.app, 'PUT', headers=headers)
+ else:
+ req.get_response(self.app, 'POST')
+
+ return HTTPOk()
diff --git a/swift/common/middleware/s3api/controllers/service.py b/swift/common/middleware/s3api/controllers/service.py
new file mode 100644
index 000000000..976a8afa4
--- /dev/null
+++ b/swift/common/middleware/s3api/controllers/service.py
@@ -0,0 +1,68 @@
+# Copyright (c) 2010-2014 OpenStack Foundation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from swift.common.utils import json, public
+
+from swift.common.middleware.s3api.controllers.base import Controller
+from swift.common.middleware.s3api.etree import Element, SubElement, tostring
+from swift.common.middleware.s3api.s3response import HTTPOk, AccessDenied, \
+ NoSuchBucket
+from swift.common.middleware.s3api.utils import validate_bucket_name
+
+
+class ServiceController(Controller):
+ """
+ Handles account level requests.
+ """
+ @public
+ def GET(self, req):
+ """
+ Handle GET Service request
+ """
+ resp = req.get_response(self.app, query={'format': 'json'})
+
+ containers = json.loads(resp.body)
+
+ containers = filter(
+ lambda item: validate_bucket_name(
+ item['name'], self.conf.dns_compliant_bucket_names),
+ containers)
+
+ # we don't keep the creation time of a bucket (s3cmd doesn't
+ # work without that) so we use something bogus.
+ elem = Element('ListAllMyBucketsResult')
+
+ owner = SubElement(elem, 'Owner')
+ SubElement(owner, 'ID').text = req.user_id
+ SubElement(owner, 'DisplayName').text = req.user_id
+
+ buckets = SubElement(elem, 'Buckets')
+ for c in containers:
+ if self.conf.s3_acl and self.conf.check_bucket_owner:
+ try:
+ req.get_response(self.app, 'HEAD', c['name'])
+ except AccessDenied:
+ continue
+ except NoSuchBucket:
+ continue
+
+ bucket = SubElement(buckets, 'Bucket')
+ SubElement(bucket, 'Name').text = c['name']
+ SubElement(bucket, 'CreationDate').text = \
+ '2009-02-03T16:45:09.000Z'
+
+ body = tostring(elem)
+
+ return HTTPOk(content_type='application/xml', body=body)
diff --git a/swift/common/middleware/s3api/controllers/versioning.py b/swift/common/middleware/s3api/controllers/versioning.py
new file mode 100644
index 000000000..21d49cc0e
--- /dev/null
+++ b/swift/common/middleware/s3api/controllers/versioning.py
@@ -0,0 +1,53 @@
+# Copyright (c) 2010-2014 OpenStack Foundation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from swift.common.utils import public
+
+from swift.common.middleware.s3api.controllers.base import Controller, \
+ bucket_operation
+from swift.common.middleware.s3api.etree import Element, tostring
+from swift.common.middleware.s3api.s3response import HTTPOk, S3NotImplemented
+
+
+class VersioningController(Controller):
+ """
+ Handles the following APIs:
+
+ * GET Bucket versioning
+ * PUT Bucket versioning
+
+ Those APIs are logged as VERSIONING operations in the S3 server log.
+ """
+ @public
+ @bucket_operation
+ def GET(self, req):
+ """
+ Handles GET Bucket versioning.
+ """
+ req.get_response(self.app, method='HEAD')
+
+ # Just report there is no versioning configured here.
+ elem = Element('VersioningConfiguration')
+ body = tostring(elem)
+
+ return HTTPOk(body=body, content_type="text/plain")
+
+ @public
+ @bucket_operation
+ def PUT(self, req):
+ """
+ Handles PUT Bucket versioning.
+ """
+ raise S3NotImplemented()
diff --git a/swift/common/middleware/s3api/etree.py b/swift/common/middleware/s3api/etree.py
new file mode 100644
index 000000000..cfbc8b24d
--- /dev/null
+++ b/swift/common/middleware/s3api/etree.py
@@ -0,0 +1,146 @@
+# Copyright (c) 2014 OpenStack Foundation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import lxml.etree
+from urllib import quote
+from copy import deepcopy
+from pkg_resources import resource_stream # pylint: disable-msg=E0611
+import sys
+
+from swift.common.utils import get_logger
+from swift.common.middleware.s3api.exception import S3Exception
+from swift.common.middleware.s3api.utils import camel_to_snake, \
+ utf8encode, utf8decode
+
+XMLNS_S3 = 'http://s3.amazonaws.com/doc/2006-03-01/'
+XMLNS_XSI = 'http://www.w3.org/2001/XMLSchema-instance'
+
+
+class XMLSyntaxError(S3Exception):
+ pass
+
+
+class DocumentInvalid(S3Exception):
+ pass
+
+
+def cleanup_namespaces(elem):
+ def remove_ns(tag, ns):
+ if tag.startswith('{%s}' % ns):
+ tag = tag[len('{%s}' % ns):]
+ return tag
+
+ if not isinstance(elem.tag, basestring):
+ # elem is a comment element.
+ return
+
+ # remove s3 namespace
+ elem.tag = remove_ns(elem.tag, XMLNS_S3)
+
+ # remove default namespace
+ if elem.nsmap and None in elem.nsmap:
+ elem.tag = remove_ns(elem.tag, elem.nsmap[None])
+
+ for e in elem.iterchildren():
+ cleanup_namespaces(e)
+
+
+def fromstring(text, root_tag=None, logger=None):
+ try:
+ elem = lxml.etree.fromstring(text, parser)
+ except lxml.etree.XMLSyntaxError as e:
+ if logger:
+ logger.debug(e)
+ raise XMLSyntaxError(e)
+
+ cleanup_namespaces(elem)
+
+ if root_tag is not None:
+ # validate XML
+ try:
+ path = 'schema/%s.rng' % camel_to_snake(root_tag)
+ with resource_stream(__name__, path) as rng:
+ lxml.etree.RelaxNG(file=rng).assertValid(elem)
+ except IOError as e:
+ # Probably, the schema file doesn't exist.
+ exc_type, exc_value, exc_traceback = sys.exc_info()
+ logger = logger or get_logger({}, log_route='s3api')
+ logger.error(e)
+ raise exc_type, exc_value, exc_traceback
+ except lxml.etree.DocumentInvalid as e:
+ if logger:
+ logger.debug(e)
+ raise DocumentInvalid(e)
+
+ return elem
+
+
+def tostring(tree, encoding_type=None, use_s3ns=True):
+ if use_s3ns:
+ nsmap = tree.nsmap.copy()
+ nsmap[None] = XMLNS_S3
+
+ root = Element(tree.tag, attrib=tree.attrib, nsmap=nsmap)
+ root.text = tree.text
+ root.extend(deepcopy(tree.getchildren()))
+ tree = root
+
+ if encoding_type == 'url':
+ tree = deepcopy(tree)
+ for e in tree.iter():
+ # Some elements are not url-encoded even when we specify
+ # encoding_type=url.
+ blacklist = ['LastModified', 'ID', 'DisplayName', 'Initiated']
+ if e.tag not in blacklist:
+ if isinstance(e.text, basestring):
+ e.text = quote(e.text)
+
+ return lxml.etree.tostring(tree, xml_declaration=True, encoding='UTF-8')
+
+
+class _Element(lxml.etree.ElementBase):
+ """
+ Wrapper Element class of lxml.etree.Element to support
+ a utf-8 encoded non-ascii string as a text.
+
+ Why we need this?:
+ Original lxml.etree.Element supports only unicode for the text.
+ It declines maintainability because we have to call a lot of encode/decode
+ methods to apply account/container/object name (i.e. PATH_INFO) to each
+ Element instance. When using this class, we can remove such a redundant
+ codes from swift.common.middleware.s3api middleware.
+ """
+ def __init__(self, *args, **kwargs):
+ # pylint: disable-msg=E1002
+ super(_Element, self).__init__(*args, **kwargs)
+
+ @property
+ def text(self):
+ """
+ utf-8 wrapper property of lxml.etree.Element.text
+ """
+ return utf8encode(lxml.etree.ElementBase.text.__get__(self))
+
+ @text.setter
+ def text(self, value):
+ lxml.etree.ElementBase.text.__set__(self, utf8decode(value))
+
+
+parser_lookup = lxml.etree.ElementDefaultClassLookup(element=_Element)
+parser = lxml.etree.XMLParser()
+parser.set_element_class_lookup(parser_lookup)
+
+Element = parser.makeelement
+SubElement = lxml.etree.SubElement
diff --git a/swift/common/middleware/s3api/exception.py b/swift/common/middleware/s3api/exception.py
new file mode 100644
index 000000000..0e060395d
--- /dev/null
+++ b/swift/common/middleware/s3api/exception.py
@@ -0,0 +1,36 @@
+# Copyright (c) 2014 OpenStack Foundation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+class S3Exception(Exception):
+ pass
+
+
+class NotS3Request(S3Exception):
+ pass
+
+
+class BadSwiftRequest(S3Exception):
+ pass
+
+
+class ACLError(S3Exception):
+ pass
+
+
+class InvalidSubresource(S3Exception):
+ def __init__(self, resource, cause):
+ self.resource = resource
+ self.cause = cause
diff --git a/swift/common/middleware/s3api/s3api.py b/swift/common/middleware/s3api/s3api.py
new file mode 100644
index 000000000..ea7b328e6
--- /dev/null
+++ b/swift/common/middleware/s3api/s3api.py
@@ -0,0 +1,280 @@
+# Copyright (c) 2010-2014 OpenStack Foundation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+The s3api middleware will emulate the S3 REST api on top of swift.
+
+To enable this middleware to your configuration, add the s3api middleware
+in front of the auth middleware. See ``proxy-server.conf-sample`` for more
+detail and configurable options.
+
+To set up your client, the access key will be the concatenation of the
+account and user strings that should look like test:tester, and the
+secret access key is the account password. The host should also point
+to the swift storage hostname.
+
+An example client using the python boto library is as follows::
+
+ from boto.s3.connection import S3Connection
+ connection = S3Connection(
+ aws_access_key_id='test:tester',
+ aws_secret_access_key='testing',
+ port=8080,
+ host='127.0.0.1',
+ is_secure=False,
+ calling_format=boto.s3.connection.OrdinaryCallingFormat())
+
+----------
+Deployment
+----------
+
+Proxy-Server Setting
+^^^^^^^^^^^^^^^^^^^^
+
+Set s3api before your auth in your pipeline in ``proxy-server.conf`` file.
+To enable all compatiblity currently supported, you should make sure that
+bulk, slo, and your auth middleware are also included in your proxy
+pipeline setting.
+
+Minimum example config is::
+
+ [pipeline:main]
+ pipeline = proxy-logging cache s3api tempauth bulk slo proxy-logging
+ proxy-server
+
+When using keystone, the config will be::
+
+ [pipeline:main]
+ pipeline = proxy-logging cache s3api s3token keystoneauth bulk slo
+ proxy-logging proxy-server
+
+.. note::
+ ``keystonemiddleware.authtoken`` can be located before/after s3api but
+ we recommend to put it before s3api because when authtoken is after s3api,
+ both authtoken and s3token will issue the acceptable token to keystone
+ (i.e. authenticate twice).
+
+Object-Server Setting
+^^^^^^^^^^^^^^^^^^^^^
+
+To get better compatibility, you may add S3 supported headers (
+Cache-Control, Content-Language, Expires, and X-Robots-Tag), that are
+not supporeted in Swift by default, into allowed_headers option in
+``object-server.conf`` Please see ``object-server.conf`` for more detail.
+
+-----------
+Constraints
+-----------
+Currently, the s3api is being ported from https://github.com/openstack/swift3
+so any existing issues in swift3 are still remaining. Please make sure
+descriptions in the example ``proxy-server.conf`` and what happens with the
+config, before enabling the options.
+
+-------------
+Supported API
+-------------
+The compatibility will continue to be improved upstream, you can keep and
+eye on compatibility via a check tool build by SwiftStack. See
+https://github.com/swiftstack/s3compat in detail.
+
+"""
+
+from paste.deploy import loadwsgi
+
+from swift.common.wsgi import PipelineWrapper, loadcontext
+
+from swift.common.middleware.s3api.exception import NotS3Request, \
+ InvalidSubresource
+from swift.common.middleware.s3api.s3request import get_request_class
+from swift.common.middleware.s3api.s3response import ErrorResponse, \
+ InternalError, MethodNotAllowed, S3ResponseBase, S3NotImplemented
+from swift.common.utils import get_logger, register_swift_info, \
+ config_true_value, config_positive_int_value
+from swift.common.middleware.s3api.utils import Config
+from swift.common.middleware.s3api.acl_handlers import get_acl_handler
+
+
+class S3ApiMiddleware(object):
+ """S3Api: S3 compatibility middleware"""
+ def __init__(self, app, conf, *args, **kwargs):
+ self.app = app
+ self.conf = Config()
+
+ # Set default values if they are not configured
+ self.conf.allow_no_owner = config_true_value(
+ conf.get('allow_no_owner', False))
+ self.conf.location = conf.get('location', 'US')
+ self.conf.dns_compliant_bucket_names = config_true_value(
+ conf.get('dns_compliant_bucket_names', True))
+ self.conf.max_bucket_listing = config_positive_int_value(
+ conf.get('max_bucket_listing', 1000))
+ self.conf.max_parts_listing = config_positive_int_value(
+ conf.get('max_parts_listing', 1000))
+ self.conf.max_multi_delete_objects = config_positive_int_value(
+ conf.get('max_multi_delete_objects', 1000))
+ self.conf.s3_acl = config_true_value(
+ conf.get('s3_acl', False))
+ self.conf.storage_domain = conf.get('storage_domain', '')
+ self.conf.auth_pipeline_check = config_true_value(
+ conf.get('auth_pipeline_check', True))
+ self.conf.max_upload_part_num = config_positive_int_value(
+ conf.get('max_upload_part_num', 1000))
+ self.conf.check_bucket_owner = config_true_value(
+ conf.get('check_bucket_owner', False))
+ self.conf.force_swift_request_proxy_log = config_true_value(
+ conf.get('force_swift_request_proxy_log', False))
+ self.conf.allow_multipart_uploads = config_true_value(
+ conf.get('allow_multipart_uploads', True))
+ self.conf.min_segment_size = config_positive_int_value(
+ conf.get('min_segment_size', 5242880))
+
+ self.logger = get_logger(
+ conf, log_route=conf.get('log_name', 's3api'))
+ self.slo_enabled = self.conf.allow_multipart_uploads
+ self.check_pipeline(self.conf)
+
+ def __call__(self, env, start_response):
+ try:
+ req_class = get_request_class(env, self.conf.s3_acl)
+ req = req_class(
+ env, self.app, self.slo_enabled, self.conf.storage_domain,
+ self.conf.location, self.conf.force_swift_request_proxy_log,
+ self.conf.dns_compliant_bucket_names,
+ self.conf.allow_multipart_uploads, self.conf.allow_no_owner)
+ resp = self.handle_request(req)
+ except NotS3Request:
+ resp = self.app
+ except InvalidSubresource as e:
+ self.logger.debug(e.cause)
+ except ErrorResponse as err_resp:
+ if isinstance(err_resp, InternalError):
+ self.logger.exception(err_resp)
+ resp = err_resp
+ except Exception as e:
+ self.logger.exception(e)
+ resp = InternalError(reason=e)
+
+ if isinstance(resp, S3ResponseBase) and 'swift.trans_id' in env:
+ resp.headers['x-amz-id-2'] = env['swift.trans_id']
+ resp.headers['x-amz-request-id'] = env['swift.trans_id']
+
+ return resp(env, start_response)
+
+ def handle_request(self, req):
+ self.logger.debug('Calling S3Api Middleware')
+ self.logger.debug(req.__dict__)
+ try:
+ controller = req.controller(self.app, self.conf, self.logger)
+ except S3NotImplemented:
+ # TODO: Probably we should distinct the error to log this warning
+ self.logger.warning('multipart: No SLO middleware in pipeline')
+ raise
+
+ acl_handler = get_acl_handler(req.controller_name)(req, self.logger)
+ req.set_acl_handler(acl_handler)
+
+ if hasattr(controller, req.method):
+ handler = getattr(controller, req.method)
+ if not getattr(handler, 'publicly_accessible', False):
+ raise MethodNotAllowed(req.method,
+ req.controller.resource_type())
+ res = handler(req)
+ else:
+ raise MethodNotAllowed(req.method,
+ req.controller.resource_type())
+
+ return res
+
+ def check_pipeline(self, conf):
+ """
+ Check that proxy-server.conf has an appropriate pipeline for s3api.
+ """
+ if conf.get('__file__', None) is None:
+ return
+
+ ctx = loadcontext(loadwsgi.APP, conf.__file__)
+ pipeline = str(PipelineWrapper(ctx)).split(' ')
+
+ # Add compatible with 3rd party middleware.
+ self.check_filter_order(pipeline, ['s3api', 'proxy-server'])
+
+ auth_pipeline = pipeline[pipeline.index('s3api') + 1:
+ pipeline.index('proxy-server')]
+
+ # Check SLO middleware
+ if self.slo_enabled and 'slo' not in auth_pipeline:
+ self.slo_enabled = False
+ self.logger.warning('s3api middleware requires SLO middleware '
+ 'to support multi-part upload, please add it '
+ 'in pipeline')
+
+ if not conf.auth_pipeline_check:
+ self.logger.debug('Skip pipeline auth check.')
+ return
+
+ if 'tempauth' in auth_pipeline:
+ self.logger.debug('Use tempauth middleware.')
+ elif 'keystoneauth' in auth_pipeline:
+ self.check_filter_order(
+ auth_pipeline,
+ ['s3token', 'keystoneauth'])
+ self.logger.debug('Use keystone middleware.')
+ elif len(auth_pipeline):
+ self.logger.debug('Use third party(unknown) auth middleware.')
+ else:
+ raise ValueError('Invalid pipeline %r: expected auth between '
+ 's3api and proxy-server ' % pipeline)
+
+ def check_filter_order(self, pipeline, required_filters):
+ """
+ Check that required filters are present in order in the pipeline.
+ """
+ indexes = []
+ missing_filters = []
+ for required_filter in required_filters:
+ try:
+ indexes.append(pipeline.index(required_filter))
+ except ValueError as e:
+ self.logger.debug(e)
+ missing_filters.append(required_filter)
+
+ if missing_filters:
+ raise ValueError('Invalid pipeline %r: missing filters %r' % (
+ pipeline, missing_filters))
+
+ if indexes != sorted(indexes):
+ raise ValueError('Invalid pipeline %r: expected filter %s' % (
+ pipeline, ' before '.join(required_filters)))
+
+
+def filter_factory(global_conf, **local_conf):
+ """Standard filter factory to use the middleware with paste.deploy"""
+ conf = global_conf.copy()
+ conf.update(local_conf)
+
+ register_swift_info(
+ 's3api',
+ # TODO: make default values as variables
+ max_bucket_listing=conf.get('max_bucket_listing', 1000),
+ max_parts_listing=conf.get('max_parts_listing', 1000),
+ max_upload_part_num=conf.get('max_upload_part_num', 1000),
+ max_multi_delete_objects=conf.get('max_multi_delete_objects', 1000),
+ allow_multipart_uploads=conf.get('allow_multipart_uploads', True),
+ min_segment_size=conf.get('min_segment_size', 5242880),
+ )
+
+ def s3api_filter(app):
+ return S3ApiMiddleware(app, conf)
+
+ return s3api_filter
diff --git a/swift/common/middleware/s3api/s3request.py b/swift/common/middleware/s3api/s3request.py
new file mode 100644
index 000000000..5833921f8
--- /dev/null
+++ b/swift/common/middleware/s3api/s3request.py
@@ -0,0 +1,1402 @@
+# Copyright (c) 2014 OpenStack Foundation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import base64
+from collections import defaultdict
+from email.header import Header
+from hashlib import sha1, sha256, md5
+import hmac
+import re
+import six
+# pylint: disable-msg=import-error
+from six.moves.urllib.parse import quote, unquote, parse_qsl
+import string
+
+from swift.common.utils import split_path
+from swift.common import swob
+from swift.common.http import HTTP_OK, HTTP_CREATED, HTTP_ACCEPTED, \
+ HTTP_NO_CONTENT, HTTP_UNAUTHORIZED, HTTP_FORBIDDEN, HTTP_NOT_FOUND, \
+ HTTP_CONFLICT, HTTP_UNPROCESSABLE_ENTITY, HTTP_REQUEST_ENTITY_TOO_LARGE, \
+ HTTP_PARTIAL_CONTENT, HTTP_NOT_MODIFIED, HTTP_PRECONDITION_FAILED, \
+ HTTP_REQUESTED_RANGE_NOT_SATISFIABLE, HTTP_LENGTH_REQUIRED, \
+ HTTP_BAD_REQUEST, HTTP_REQUEST_TIMEOUT, is_success
+
+from swift.common.constraints import check_utf8
+from swift.proxy.controllers.base import get_container_info, \
+ headers_to_container_info
+from swift.common.request_helpers import check_path_header
+
+from swift.common.middleware.s3api.controllers import ServiceController, \
+ ObjectController, AclController, MultiObjectDeleteController, \
+ LocationController, LoggingStatusController, PartController, \
+ UploadController, UploadsController, VersioningController, \
+ UnsupportedController, S3AclController, BucketController
+from swift.common.middleware.s3api.s3response import AccessDenied, \
+ InvalidArgument, InvalidDigest, \
+ RequestTimeTooSkewed, S3Response, SignatureDoesNotMatch, \
+ BucketAlreadyExists, BucketNotEmpty, EntityTooLarge, \
+ InternalError, NoSuchBucket, NoSuchKey, PreconditionFailed, InvalidRange, \
+ MissingContentLength, InvalidStorageClass, S3NotImplemented, InvalidURI, \
+ MalformedXML, InvalidRequest, RequestTimeout, InvalidBucketName, \
+ BadDigest, AuthorizationHeaderMalformed, AuthorizationQueryParametersError
+from swift.common.middleware.s3api.exception import NotS3Request, \
+ BadSwiftRequest
+from swift.common.middleware.s3api.utils import utf8encode, \
+ S3Timestamp, mktime, MULTIUPLOAD_SUFFIX
+from swift.common.middleware.s3api.subresource import decode_acl, encode_acl
+from swift.common.middleware.s3api.utils import sysmeta_header, \
+ validate_bucket_name
+from swift.common.middleware.s3api.acl_utils import handle_acl_header
+
+
+# List of sub-resources that must be maintained as part of the HMAC
+# signature string.
+ALLOWED_SUB_RESOURCES = sorted([
+ 'acl', 'delete', 'lifecycle', 'location', 'logging', 'notification',
+ 'partNumber', 'policy', 'requestPayment', 'torrent', 'uploads', 'uploadId',
+ 'versionId', 'versioning', 'versions', 'website',
+ 'response-cache-control', 'response-content-disposition',
+ 'response-content-encoding', 'response-content-language',
+ 'response-content-type', 'response-expires', 'cors', 'tagging', 'restore'
+])
+
+
+MAX_32BIT_INT = 2147483647
+SIGV2_TIMESTAMP_FORMAT = '%Y-%m-%dT%H:%M:%S'
+SIGV4_X_AMZ_DATE_FORMAT = '%Y%m%dT%H%M%SZ'
+SERVICE = 's3' # useful for mocking out in tests
+
+
+def _header_strip(value):
+ # S3 seems to strip *all* control characters
+ if value is None:
+ return None
+ stripped = _header_strip.re.sub('', value)
+ if value and not stripped:
+ # If there's nothing left after stripping,
+ # behave as though it wasn't provided
+ return None
+ return stripped
+_header_strip.re = re.compile('^[\x00-\x20]*|[\x00-\x20]*$')
+
+
+def _header_acl_property(resource):
+ """
+ Set and retrieve the acl in self.headers
+ """
+ def getter(self):
+ return getattr(self, '_%s' % resource)
+
+ def setter(self, value):
+ self.headers.update(encode_acl(resource, value))
+ setattr(self, '_%s' % resource, value)
+
+ def deleter(self):
+ self.headers[sysmeta_header(resource, 'acl')] = ''
+
+ return property(getter, setter, deleter,
+ doc='Get and set the %s acl property' % resource)
+
+
+class SigV4Mixin(object):
+ """
+ A request class mixin to provide S3 signature v4 functionality
+ """
+
+ def check_signature(self, secret):
+ user_signature = self.signature
+ derived_secret = 'AWS4' + secret
+ for scope_piece in self.scope:
+ derived_secret = hmac.new(
+ derived_secret, scope_piece, sha256).digest()
+ valid_signature = hmac.new(
+ derived_secret, self.string_to_sign, sha256).hexdigest()
+ return user_signature == valid_signature
+
+ @property
+ def _is_query_auth(self):
+ return 'X-Amz-Credential' in self.params
+
+ @property
+ def timestamp(self):
+ """
+ Return timestamp string according to the auth type
+ The difference from v2 is v4 have to see 'X-Amz-Date' even though
+ it's query auth type.
+ """
+ if not self._timestamp:
+ try:
+ if self._is_query_auth and 'X-Amz-Date' in self.params:
+ # NOTE(andrey-mp): Date in Signature V4 has different
+ # format
+ timestamp = mktime(
+ self.params['X-Amz-Date'], SIGV4_X_AMZ_DATE_FORMAT)
+ else:
+ if self.headers.get('X-Amz-Date'):
+ timestamp = mktime(
+ self.headers.get('X-Amz-Date'),
+ SIGV4_X_AMZ_DATE_FORMAT)
+ else:
+ timestamp = mktime(self.headers.get('Date'))
+ except (ValueError, TypeError):
+ raise AccessDenied('AWS authentication requires a valid Date '
+ 'or x-amz-date header')
+
+ if timestamp < 0:
+ raise AccessDenied('AWS authentication requires a valid Date '
+ 'or x-amz-date header')
+
+ try:
+ self._timestamp = S3Timestamp(timestamp)
+ except ValueError:
+ # Must be far-future; blame clock skew
+ raise RequestTimeTooSkewed()
+
+ return self._timestamp
+
+ def _validate_expire_param(self):
+ """
+ Validate X-Amz-Expires in query parameter
+ :raises: AccessDenied
+ :raises: AuthorizationQueryParametersError
+ :raises: AccessDenined
+ """
+ err = None
+ try:
+ expires = int(self.params['X-Amz-Expires'])
+ except ValueError:
+ err = 'X-Amz-Expires should be a number'
+ else:
+ if expires < 0:
+ err = 'X-Amz-Expires must be non-negative'
+ elif expires >= 2 ** 63:
+ err = 'X-Amz-Expires should be a number'
+ elif expires > 604800:
+ err = ('X-Amz-Expires must be less than a week (in seconds); '
+ 'that is, the given X-Amz-Expires must be less than '
+ '604800 seconds')
+ if err:
+ raise AuthorizationQueryParametersError(err)
+
+ if int(self.timestamp) + expires < S3Timestamp.now():
+ raise AccessDenied('Request has expired')
+
+ def _parse_query_authentication(self):
+ """
+ Parse v4 query authentication
+ - version 4:
+ 'X-Amz-Credential' and 'X-Amz-Signature' should be in param
+ :raises: AccessDenied
+ :raises: AuthorizationHeaderMalformed
+ """
+ if self.params.get('X-Amz-Algorithm') != 'AWS4-HMAC-SHA256':
+ raise InvalidArgument('X-Amz-Algorithm',
+ self.params.get('X-Amz-Algorithm'))
+ try:
+ cred_param = self.params['X-Amz-Credential'].split("/")
+ access = cred_param[0]
+ sig = self.params['X-Amz-Signature']
+ expires = self.params['X-Amz-Expires']
+ except KeyError:
+ raise AccessDenied()
+
+ try:
+ signed_headers = self.params['X-Amz-SignedHeaders']
+ except KeyError:
+ # TODO: make sure if is it malformed request?
+ raise AuthorizationHeaderMalformed()
+
+ self._signed_headers = set(signed_headers.split(';'))
+
+ # credential must be in following format:
+ # <access-key-id>/<date>/<AWS-region>/<AWS-service>/aws4_request
+ if not all([access, sig, len(cred_param) == 5, expires]):
+ raise AccessDenied()
+
+ return access, sig
+
+ def _parse_header_authentication(self):
+ """
+ Parse v4 header authentication
+ - version 4:
+ 'X-Amz-Credential' and 'X-Amz-Signature' should be in param
+ :raises: AccessDenied
+ :raises: AuthorizationHeaderMalformed
+ """
+
+ auth_str = self.headers['Authorization']
+ cred_param = auth_str.partition(
+ "Credential=")[2].split(',')[0].split("/")
+ access = cred_param[0]
+ sig = auth_str.partition("Signature=")[2].split(',')[0]
+ signed_headers = auth_str.partition(
+ "SignedHeaders=")[2].split(',', 1)[0]
+ # credential must be in following format:
+ # <access-key-id>/<date>/<AWS-region>/<AWS-service>/aws4_request
+ if not all([access, sig, len(cred_param) == 5]):
+ raise AccessDenied()
+ if not signed_headers:
+ # TODO: make sure if is it Malformed?
+ raise AuthorizationHeaderMalformed()
+
+ self._signed_headers = set(signed_headers.split(';'))
+
+ return access, sig
+
+ def _canonical_query_string(self):
+ return '&'.join(
+ '%s=%s' % (quote(key, safe='-_.~'),
+ quote(value, safe='-_.~'))
+ for key, value in sorted(self.params.items())
+ if key not in ('Signature', 'X-Amz-Signature'))
+
+ def _headers_to_sign(self):
+ """
+ Select the headers from the request that need to be included
+ in the StringToSign.
+
+ :return : dict of headers to sign, the keys are all lower case
+ """
+ if 'headers_raw' in self.environ: # eventlet >= 0.19.0
+ # See https://github.com/eventlet/eventlet/commit/67ec999
+ headers_lower_dict = defaultdict(list)
+ for key, value in self.environ['headers_raw']:
+ headers_lower_dict[key.lower().strip()].append(
+ ' '.join(_header_strip(value or '').split()))
+ headers_lower_dict = {k: ','.join(v)
+ for k, v in headers_lower_dict.items()}
+ else: # mostly-functional fallback
+ headers_lower_dict = dict(
+ (k.lower().strip(), ' '.join(_header_strip(v or '').split()))
+ for (k, v) in six.iteritems(self.headers))
+
+ if 'host' in headers_lower_dict and re.match(
+ 'Boto/2.[0-9].[0-2]',
+ headers_lower_dict.get('user-agent', '')):
+ # Boto versions < 2.9.3 strip the port component of the host:port
+ # header, so detect the user-agent via the header and strip the
+ # port if we detect an old boto version.
+ headers_lower_dict['host'] = \
+ headers_lower_dict['host'].split(':')[0]
+
+ headers_to_sign = [
+ (key, value) for key, value in sorted(headers_lower_dict.items())
+ if key in self._signed_headers]
+
+ if len(headers_to_sign) != len(self._signed_headers):
+ # NOTE: if we are missing the header suggested via
+ # signed_header in actual header, it results in
+ # SignatureDoesNotMatch in actual S3 so we can raise
+ # the error immediately here to save redundant check
+ # process.
+ raise SignatureDoesNotMatch()
+
+ return headers_to_sign
+
+ def _canonical_uri(self):
+ """
+ It won't require bucket name in canonical_uri for v4.
+ """
+ return self.environ.get('RAW_PATH_INFO', self.path)
+
+ def _canonical_request(self):
+ # prepare 'canonical_request'
+ # Example requests are like following:
+ #
+ # GET
+ # /
+ # Action=ListUsers&Version=2010-05-08
+ # content-type:application/x-www-form-urlencoded; charset=utf-8
+ # host:iam.amazonaws.com
+ # x-amz-date:20150830T123600Z
+ #
+ # content-type;host;x-amz-date
+ # e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855
+ #
+
+ # 1. Add verb like: GET
+ cr = [self.method.upper()]
+
+ # 2. Add path like: /
+ path = self._canonical_uri()
+ cr.append(path)
+
+ # 3. Add query like: Action=ListUsers&Version=2010-05-08
+ cr.append(self._canonical_query_string())
+
+ # 4. Add headers like:
+ # content-type:application/x-www-form-urlencoded; charset=utf-8
+ # host:iam.amazonaws.com
+ # x-amz-date:20150830T123600Z
+ headers_to_sign = self._headers_to_sign()
+ cr.append(''.join('%s:%s\n' % (key, value)
+ for key, value in headers_to_sign))
+
+ # 5. Add signed headers into canonical request like
+ # content-type;host;x-amz-date
+ cr.append(';'.join(k for k, v in headers_to_sign))
+
+ # 6. Add payload string at the tail
+ if 'X-Amz-Credential' in self.params:
+ # V4 with query parameters only
+ hashed_payload = 'UNSIGNED-PAYLOAD'
+ elif 'X-Amz-Content-SHA256' not in self.headers:
+ msg = 'Missing required header for this request: ' \
+ 'x-amz-content-sha256'
+ raise InvalidRequest(msg)
+ else:
+ hashed_payload = self.headers['X-Amz-Content-SHA256']
+ cr.append(hashed_payload)
+ return '\n'.join(cr).encode('utf-8')
+
+ @property
+ def scope(self):
+ return [self.timestamp.amz_date_format.split('T')[0],
+ self.location, SERVICE, 'aws4_request']
+
+ def _string_to_sign(self):
+ """
+ Create 'StringToSign' value in Amazon terminology for v4.
+ """
+ return '\n'.join(['AWS4-HMAC-SHA256',
+ self.timestamp.amz_date_format,
+ '/'.join(self.scope),
+ sha256(self._canonical_request()).hexdigest()])
+
+
+def get_request_class(env, s3_acl):
+ """
+ Helper function to find a request class to use from Map
+ """
+ if s3_acl:
+ request_classes = (S3AclRequest, SigV4S3AclRequest)
+ else:
+ request_classes = (S3Request, SigV4Request)
+
+ req = swob.Request(env)
+ if 'X-Amz-Credential' in req.params or \
+ req.headers.get('Authorization', '').startswith(
+ 'AWS4-HMAC-SHA256 '):
+ # This is an Amazon SigV4 request
+ return request_classes[1]
+ else:
+ # The others using Amazon SigV2 class
+ return request_classes[0]
+
+
+class S3Request(swob.Request):
+ """
+ S3 request object.
+ """
+
+ bucket_acl = _header_acl_property('container')
+ object_acl = _header_acl_property('object')
+
+ def __init__(self, env, app=None, slo_enabled=True,
+ storage_domain='', location='US', force_request_log=False,
+ dns_compliant_bucket_names=True, allow_multipart_uploads=True,
+ allow_no_owner=False):
+ # NOTE: app and allow_no_owner are not used by this class, need for
+ # compatibility of S3acl
+ swob.Request.__init__(self, env)
+ self.storage_domain = storage_domain
+ self.location = location
+ self.force_request_log = force_request_log
+ self.dns_compliant_bucket_names = dns_compliant_bucket_names
+ self.allow_multipart_uploads = allow_multipart_uploads
+ self._timestamp = None
+ self.access_key, self.signature = self._parse_auth_info()
+ self.bucket_in_host = self._parse_host()
+ self.container_name, self.object_name = self._parse_uri()
+ self._validate_headers()
+ # Lock in string-to-sign now, before we start messing with query params
+ self.string_to_sign = self._string_to_sign()
+ self.environ['s3api.auth_details'] = {
+ 'access_key': self.access_key,
+ 'signature': self.signature,
+ 'string_to_sign': self.string_to_sign,
+ 'check_signature': self.check_signature,
+ }
+ self.token = None
+ self.account = None
+ self.user_id = None
+ self.slo_enabled = slo_enabled
+
+ # NOTE(andrey-mp): substitute authorization header for next modules
+ # in pipeline (s3token). it uses this and X-Auth-Token in specific
+ # format.
+ # (kota_): yeah, the reason we need this is s3token only supports
+ # v2 like header consists of AWS access:signature. Since the commit
+ # b626a3ca86e467fc7564eac236b9ee2efd49bdcc, the s3token is in swift3
+ # repo so probably we need to change s3token to support v4 format.
+ self.headers['Authorization'] = 'AWS %s:%s' % (
+ self.access_key, self.signature)
+ # Avoids that swift.swob.Response replaces Location header value
+ # by full URL when absolute path given. See swift.swob for more detail.
+ self.environ['swift.leave_relative_location'] = True
+
+ def check_signature(self, secret):
+ user_signature = self.signature
+ valid_signature = base64.b64encode(hmac.new(
+ secret, self.string_to_sign, sha1).digest()).strip()
+ return user_signature == valid_signature
+
+ @property
+ def timestamp(self):
+ """
+ S3Timestamp from Date header. If X-Amz-Date header specified, it
+ will be prior to Date header.
+
+ :return : S3Timestamp instance
+ """
+ if not self._timestamp:
+ try:
+ if self._is_query_auth and 'Timestamp' in self.params:
+ # If Timestamp specified in query, it should be prior
+ # to any Date header (is this right?)
+ timestamp = mktime(
+ self.params['Timestamp'], SIGV2_TIMESTAMP_FORMAT)
+ else:
+ timestamp = mktime(
+ self.headers.get('X-Amz-Date',
+ self.headers.get('Date')))
+ except ValueError:
+ raise AccessDenied('AWS authentication requires a valid Date '
+ 'or x-amz-date header')
+
+ if timestamp < 0:
+ raise AccessDenied('AWS authentication requires a valid Date '
+ 'or x-amz-date header')
+ try:
+ self._timestamp = S3Timestamp(timestamp)
+ except ValueError:
+ # Must be far-future; blame clock skew
+ raise RequestTimeTooSkewed()
+
+ return self._timestamp
+
+ @property
+ def _is_header_auth(self):
+ return 'Authorization' in self.headers
+
+ @property
+ def _is_query_auth(self):
+ return 'AWSAccessKeyId' in self.params
+
+ def _parse_host(self):
+ storage_domain = self.storage_domain
+ if not storage_domain:
+ return None
+
+ if not storage_domain.startswith('.'):
+ storage_domain = '.' + storage_domain
+
+ if 'HTTP_HOST' in self.environ:
+ given_domain = self.environ['HTTP_HOST']
+ elif 'SERVER_NAME' in self.environ:
+ given_domain = self.environ['SERVER_NAME']
+ else:
+ return None
+
+ port = ''
+ if ':' in given_domain:
+ given_domain, port = given_domain.rsplit(':', 1)
+ if given_domain.endswith(storage_domain):
+ return given_domain[:-len(storage_domain)]
+
+ return None
+
+ def _parse_uri(self):
+ if not check_utf8(self.environ['PATH_INFO']):
+ raise InvalidURI(self.path)
+
+ if self.bucket_in_host:
+ obj = self.environ['PATH_INFO'][1:] or None
+ return self.bucket_in_host, obj
+
+ bucket, obj = self.split_path(0, 2, True)
+
+ if bucket and not validate_bucket_name(
+ bucket, self.dns_compliant_bucket_names):
+ # Ignore GET service case
+ raise InvalidBucketName(bucket)
+ return (bucket, obj)
+
+ def _parse_query_authentication(self):
+ """
+ Parse v2 authentication query args
+ TODO: make sure if 0, 1, 3 is supported?
+ - version 0, 1, 2, 3:
+ 'AWSAccessKeyId' and 'Signature' should be in param
+
+ :return: a tuple of access_key and signature
+ :raises: AccessDenied
+ """
+ try:
+ access = self.params['AWSAccessKeyId']
+ expires = self.params['Expires']
+ sig = self.params['Signature']
+ except KeyError:
+ raise AccessDenied()
+
+ if not all([access, sig, expires]):
+ raise AccessDenied()
+
+ return access, sig
+
+ def _parse_header_authentication(self):
+ """
+ Parse v2 header authentication info
+
+ :returns: a tuple of access_key and signature
+ :raises: AccessDenied
+ """
+ auth_str = self.headers['Authorization']
+ if not auth_str.startswith('AWS ') or ':' not in auth_str:
+ raise AccessDenied()
+ # This means signature format V2
+ access, sig = auth_str.split(' ', 1)[1].rsplit(':', 1)
+ return access, sig
+
+ def _parse_auth_info(self):
+ """Extract the access key identifier and signature.
+
+ :returns: a tuple of access_key and signature
+ :raises: NotS3Request
+ """
+ if self._is_query_auth:
+ return self._parse_query_authentication()
+ elif self._is_header_auth:
+ return self._parse_header_authentication()
+ else:
+ # if this request is neither query auth nor header auth
+ # s3api regard this as not s3 request
+ raise NotS3Request()
+
+ def _validate_expire_param(self):
+ """
+ Validate Expires in query parameters
+ :raises: AccessDenied
+ """
+ # Expires header is a float since epoch
+ try:
+ ex = S3Timestamp(float(self.params['Expires']))
+ except ValueError:
+ raise AccessDenied()
+
+ if S3Timestamp.now() > ex:
+ raise AccessDenied('Request has expired')
+
+ if ex >= 2 ** 31:
+ raise AccessDenied(
+ 'Invalid date (should be seconds since epoch): %s' %
+ self.params['Expires'])
+
+ def _validate_dates(self):
+ """
+ Validate Date/X-Amz-Date headers for signature v2
+ :raises: AccessDenied
+ :raises: RequestTimeTooSkewed
+ """
+ if self._is_query_auth:
+ self._validate_expire_param()
+ # TODO: make sure the case if timestamp param in query
+ return
+
+ date_header = self.headers.get('Date')
+ amz_date_header = self.headers.get('X-Amz-Date')
+ if not date_header and not amz_date_header:
+ raise AccessDenied('AWS authentication requires a valid Date '
+ 'or x-amz-date header')
+
+ # Anyways, request timestamp should be validated
+ epoch = S3Timestamp(0)
+ if self.timestamp < epoch:
+ raise AccessDenied()
+
+ # If the standard date is too far ahead or behind, it is an
+ # error
+ delta = 60 * 5
+ if abs(int(self.timestamp) - int(S3Timestamp.now())) > delta:
+ raise RequestTimeTooSkewed()
+
+ def _validate_headers(self):
+ if 'CONTENT_LENGTH' in self.environ:
+ try:
+ if self.content_length < 0:
+ raise InvalidArgument('Content-Length',
+ self.content_length)
+ except (ValueError, TypeError):
+ raise InvalidArgument('Content-Length',
+ self.environ['CONTENT_LENGTH'])
+
+ self._validate_dates()
+
+ value = _header_strip(self.headers.get('Content-MD5'))
+ if value is not None:
+ if not re.match('^[A-Za-z0-9+/]+={0,2}$', value):
+ # Non-base64-alphabet characters in value.
+ raise InvalidDigest(content_md5=value)
+ try:
+ self.headers['ETag'] = value.decode('base64').encode('hex')
+ except Exception:
+ raise InvalidDigest(content_md5=value)
+
+ if len(self.headers['ETag']) != 32:
+ raise InvalidDigest(content_md5=value)
+
+ if self.method == 'PUT' and any(h in self.headers for h in (
+ 'If-Match', 'If-None-Match',
+ 'If-Modified-Since', 'If-Unmodified-Since')):
+ raise S3NotImplemented(
+ 'Conditional object PUTs are not supported.')
+
+ if 'X-Amz-Copy-Source' in self.headers:
+ try:
+ check_path_header(self, 'X-Amz-Copy-Source', 2, '')
+ except swob.HTTPException:
+ msg = 'Copy Source must mention the source bucket and key: ' \
+ 'sourcebucket/sourcekey'
+ raise InvalidArgument('x-amz-copy-source',
+ self.headers['X-Amz-Copy-Source'],
+ msg)
+
+ if 'x-amz-metadata-directive' in self.headers:
+ value = self.headers['x-amz-metadata-directive']
+ if value not in ('COPY', 'REPLACE'):
+ err_msg = 'Unknown metadata directive.'
+ raise InvalidArgument('x-amz-metadata-directive', value,
+ err_msg)
+
+ if 'x-amz-storage-class' in self.headers:
+ # Only STANDARD is supported now.
+ if self.headers['x-amz-storage-class'] != 'STANDARD':
+ raise InvalidStorageClass()
+
+ if 'x-amz-mfa' in self.headers:
+ raise S3NotImplemented('MFA Delete is not supported.')
+
+ if 'x-amz-server-side-encryption' in self.headers:
+ raise S3NotImplemented('Server-side encryption is not supported.')
+
+ if 'x-amz-website-redirect-location' in self.headers:
+ raise S3NotImplemented('Website redirection is not supported.')
+
+ @property
+ def body(self):
+ """
+ swob.Request.body is not secure against malicious input. It consumes
+ too much memory without any check when the request body is excessively
+ large. Use xml() instead.
+ """
+ raise AttributeError("No attribute 'body'")
+
+ def xml(self, max_length):
+ """
+ Similar to swob.Request.body, but it checks the content length before
+ creating a body string.
+ """
+ te = self.headers.get('transfer-encoding', '')
+ te = [x.strip() for x in te.split(',') if x.strip()]
+ if te and (len(te) > 1 or te[-1] != 'chunked'):
+ raise S3NotImplemented('A header you provided implies '
+ 'functionality that is not implemented',
+ header='Transfer-Encoding')
+
+ if self.message_length() > max_length:
+ raise MalformedXML()
+
+ if te or self.message_length():
+ # Limit the read similar to how SLO handles manifests
+ body = self.body_file.read(max_length)
+ else:
+ # No (or zero) Content-Length provided, and not chunked transfer;
+ # no body. Assume zero-length, and enforce a required body below.
+ return None
+
+ return body
+
+ def check_md5(self, body):
+ if 'HTTP_CONTENT_MD5' not in self.environ:
+ raise InvalidRequest('Missing required header for this request: '
+ 'Content-MD5')
+
+ digest = md5(body).digest().encode('base64').strip()
+ if self.environ['HTTP_CONTENT_MD5'] != digest:
+ raise BadDigest(content_md5=self.environ['HTTP_CONTENT_MD5'])
+
+ def _copy_source_headers(self):
+ env = {}
+ for key, value in self.environ.items():
+ if key.startswith('HTTP_X_AMZ_COPY_SOURCE_'):
+ env[key.replace('X_AMZ_COPY_SOURCE_', '')] = value
+
+ return swob.HeaderEnvironProxy(env)
+
+ def check_copy_source(self, app):
+ """
+ check_copy_source checks the copy source existence and if copying an
+ object to itself, for illegal request parameters
+
+ :returns: the source HEAD response
+ """
+ try:
+ src_path = self.headers['X-Amz-Copy-Source']
+ except KeyError:
+ return None
+
+ if '?' in src_path:
+ src_path, qs = src_path.split('?', 1)
+ query = parse_qsl(qs, True)
+ if not query:
+ pass # ignore it
+ elif len(query) > 1 or query[0][0] != 'versionId':
+ raise InvalidArgument('X-Amz-Copy-Source',
+ self.headers['X-Amz-Copy-Source'],
+ 'Unsupported copy source parameter.')
+ elif query[0][1] != 'null':
+ # TODO: once we support versioning, we'll need to translate
+ # src_path to the proper location in the versions container
+ raise S3NotImplemented('Versioning is not yet supported')
+ self.headers['X-Amz-Copy-Source'] = src_path
+
+ src_path = unquote(src_path)
+ src_path = src_path if src_path.startswith('/') else ('/' + src_path)
+ src_bucket, src_obj = split_path(src_path, 0, 2, True)
+
+ headers = swob.HeaderKeyDict()
+ headers.update(self._copy_source_headers())
+
+ src_resp = self.get_response(app, 'HEAD', src_bucket, src_obj,
+ headers=headers)
+ if src_resp.status_int == 304: # pylint: disable-msg=E1101
+ raise PreconditionFailed()
+
+ self.headers['X-Amz-Copy-Source'] = \
+ '/' + self.headers['X-Amz-Copy-Source'].lstrip('/')
+ source_container, source_obj = \
+ split_path(self.headers['X-Amz-Copy-Source'], 1, 2, True)
+
+ if (self.container_name == source_container and
+ self.object_name == source_obj and
+ self.headers.get('x-amz-metadata-directive',
+ 'COPY') == 'COPY'):
+ raise InvalidRequest("This copy request is illegal "
+ "because it is trying to copy an "
+ "object to itself without "
+ "changing the object's metadata, "
+ "storage class, website redirect "
+ "location or encryption "
+ "attributes.")
+ return src_resp
+
+ def _canonical_uri(self):
+ """
+ Require bucket name in canonical_uri for v2 in virtual hosted-style.
+ """
+ raw_path_info = self.environ.get('RAW_PATH_INFO', self.path)
+ if self.bucket_in_host:
+ raw_path_info = '/' + self.bucket_in_host + raw_path_info
+ return raw_path_info
+
+ def _string_to_sign(self):
+ """
+ Create 'StringToSign' value in Amazon terminology for v2.
+ """
+ amz_headers = {}
+
+ buf = [self.method,
+ _header_strip(self.headers.get('Content-MD5')) or '',
+ _header_strip(self.headers.get('Content-Type')) or '']
+
+ if 'headers_raw' in self.environ: # eventlet >= 0.19.0
+ # See https://github.com/eventlet/eventlet/commit/67ec999
+ amz_headers = defaultdict(list)
+ for key, value in self.environ['headers_raw']:
+ key = key.lower()
+ if not key.startswith('x-amz-'):
+ continue
+ amz_headers[key.strip()].append(value.strip())
+ amz_headers = dict((key, ','.join(value))
+ for key, value in amz_headers.items())
+ else: # mostly-functional fallback
+ amz_headers = dict((key.lower(), value)
+ for key, value in self.headers.items()
+ if key.lower().startswith('x-amz-'))
+
+ if self._is_header_auth:
+ if 'x-amz-date' in amz_headers:
+ buf.append('')
+ elif 'Date' in self.headers:
+ buf.append(self.headers['Date'])
+ elif self._is_query_auth:
+ buf.append(self.params['Expires'])
+ else:
+ # Should have already raised NotS3Request in _parse_auth_info,
+ # but as a sanity check...
+ raise AccessDenied()
+
+ for key, value in sorted(amz_headers.items()):
+ buf.append("%s:%s" % (key, value))
+
+ path = self._canonical_uri()
+ if self.query_string:
+ path += '?' + self.query_string
+ params = []
+ if '?' in path:
+ path, args = path.split('?', 1)
+ for key, value in sorted(self.params.items()):
+ if key in ALLOWED_SUB_RESOURCES:
+ params.append('%s=%s' % (key, value) if value else key)
+ if params:
+ buf.append('%s?%s' % (path, '&'.join(params)))
+ else:
+ buf.append(path)
+ return '\n'.join(buf)
+
+ @property
+ def controller_name(self):
+ return self.controller.__name__[:-len('Controller')]
+
+ @property
+ def controller(self):
+ if self.is_service_request:
+ return ServiceController
+
+ if not self.slo_enabled:
+ multi_part = ['partNumber', 'uploadId', 'uploads']
+ if len([p for p in multi_part if p in self.params]):
+ raise S3NotImplemented("Multi-part feature isn't support")
+
+ if 'acl' in self.params:
+ return AclController
+ if 'delete' in self.params:
+ return MultiObjectDeleteController
+ if 'location' in self.params:
+ return LocationController
+ if 'logging' in self.params:
+ return LoggingStatusController
+ if 'partNumber' in self.params:
+ return PartController
+ if 'uploadId' in self.params:
+ return UploadController
+ if 'uploads' in self.params:
+ return UploadsController
+ if 'versioning' in self.params:
+ return VersioningController
+
+ unsupported = ('notification', 'policy', 'requestPayment', 'torrent',
+ 'website', 'cors', 'tagging', 'restore')
+ if set(unsupported) & set(self.params):
+ return UnsupportedController
+
+ if self.is_object_request:
+ return ObjectController
+ return BucketController
+
+ @property
+ def is_service_request(self):
+ return not self.container_name
+
+ @property
+ def is_bucket_request(self):
+ return self.container_name and not self.object_name
+
+ @property
+ def is_object_request(self):
+ return self.container_name and self.object_name
+
+ @property
+ def is_authenticated(self):
+ return self.account is not None
+
+ def to_swift_req(self, method, container, obj, query=None,
+ body=None, headers=None):
+ """
+ Create a Swift request based on this request's environment.
+ """
+ if self.account is None:
+ account = self.access_key
+ else:
+ account = self.account
+
+ env = self.environ.copy()
+
+ def sanitize(value):
+ if set(value).issubset(string.printable):
+ return value
+
+ value = Header(value, 'UTF-8').encode()
+ if value.startswith('=?utf-8?q?'):
+ return '=?UTF-8?Q?' + value[10:]
+ elif value.startswith('=?utf-8?b?'):
+ return '=?UTF-8?B?' + value[10:]
+ else:
+ return value
+
+ if 'headers_raw' in env: # eventlet >= 0.19.0
+ # See https://github.com/eventlet/eventlet/commit/67ec999
+ for key, value in env['headers_raw']:
+ if not key.lower().startswith('x-amz-meta-'):
+ continue
+ # AWS ignores user-defined headers with these characters
+ if any(c in key for c in ' "),/;<=>?@[\\]{}'):
+ # NB: apparently, '(' *is* allowed
+ continue
+ # Note that this may have already been deleted, e.g. if the
+ # client sent multiple headers with the same name, or both
+ # x-amz-meta-foo-bar and x-amz-meta-foo_bar
+ env.pop('HTTP_' + key.replace('-', '_').upper(), None)
+ # Need to preserve underscores. Since we know '=' can't be
+ # present, quoted-printable seems appropriate.
+ key = key.replace('_', '=5F').replace('-', '_').upper()
+ key = 'HTTP_X_OBJECT_META_' + key[11:]
+ if key in env:
+ env[key] += ',' + sanitize(value)
+ else:
+ env[key] = sanitize(value)
+ else: # mostly-functional fallback
+ for key in self.environ:
+ if not key.startswith('HTTP_X_AMZ_META_'):
+ continue
+ # AWS ignores user-defined headers with these characters
+ if any(c in key for c in ' "),/;<=>?@[\\]{}'):
+ # NB: apparently, '(' *is* allowed
+ continue
+ env['HTTP_X_OBJECT_META_' + key[16:]] = sanitize(env[key])
+ del env[key]
+
+ if 'HTTP_X_AMZ_COPY_SOURCE' in env:
+ env['HTTP_X_COPY_FROM'] = env['HTTP_X_AMZ_COPY_SOURCE']
+ del env['HTTP_X_AMZ_COPY_SOURCE']
+ env['CONTENT_LENGTH'] = '0'
+
+ if self.force_request_log:
+ env['swift.proxy_access_log_made'] = False
+ env['swift.source'] = 'S3'
+ if method is not None:
+ env['REQUEST_METHOD'] = method
+
+ env['HTTP_X_AUTH_TOKEN'] = self.token
+
+ if obj:
+ path = '/v1/%s/%s/%s' % (account, container, obj)
+ elif container:
+ path = '/v1/%s/%s' % (account, container)
+ else:
+ path = '/v1/%s' % (account)
+ env['PATH_INFO'] = path
+
+ query_string = ''
+ if query is not None:
+ params = []
+ for key, value in sorted(query.items()):
+ if value is not None:
+ params.append('%s=%s' % (key, quote(str(value))))
+ else:
+ params.append(key)
+ query_string = '&'.join(params)
+ env['QUERY_STRING'] = query_string
+
+ return swob.Request.blank(quote(path), environ=env, body=body,
+ headers=headers)
+
+ def _swift_success_codes(self, method, container, obj):
+ """
+ Returns a list of expected success codes from Swift.
+ """
+ if not container:
+ # Swift account access.
+ code_map = {
+ 'GET': [
+ HTTP_OK,
+ ],
+ }
+ elif not obj:
+ # Swift container access.
+ code_map = {
+ 'HEAD': [
+ HTTP_NO_CONTENT,
+ ],
+ 'GET': [
+ HTTP_OK,
+ HTTP_NO_CONTENT,
+ ],
+ 'PUT': [
+ HTTP_CREATED,
+ ],
+ 'POST': [
+ HTTP_NO_CONTENT,
+ ],
+ 'DELETE': [
+ HTTP_NO_CONTENT,
+ ],
+ }
+ else:
+ # Swift object access.
+ code_map = {
+ 'HEAD': [
+ HTTP_OK,
+ HTTP_PARTIAL_CONTENT,
+ HTTP_NOT_MODIFIED,
+ ],
+ 'GET': [
+ HTTP_OK,
+ HTTP_PARTIAL_CONTENT,
+ HTTP_NOT_MODIFIED,
+ ],
+ 'PUT': [
+ HTTP_CREATED,
+ ],
+ 'POST': [
+ HTTP_ACCEPTED,
+ ],
+ 'DELETE': [
+ HTTP_OK,
+ HTTP_NO_CONTENT,
+ ],
+ }
+
+ return code_map[method]
+
+ def _swift_error_codes(self, method, container, obj, env, app):
+ """
+ Returns a dict from expected Swift error codes to the corresponding S3
+ error responses.
+ """
+ if not container:
+ # Swift account access.
+ code_map = {
+ 'GET': {
+ },
+ }
+ elif not obj:
+ # Swift container access.
+ code_map = {
+ 'HEAD': {
+ HTTP_NOT_FOUND: (NoSuchBucket, container),
+ },
+ 'GET': {
+ HTTP_NOT_FOUND: (NoSuchBucket, container),
+ },
+ 'PUT': {
+ HTTP_ACCEPTED: (BucketAlreadyExists, container),
+ },
+ 'POST': {
+ HTTP_NOT_FOUND: (NoSuchBucket, container),
+ },
+ 'DELETE': {
+ HTTP_NOT_FOUND: (NoSuchBucket, container),
+ HTTP_CONFLICT: BucketNotEmpty,
+ },
+ }
+ else:
+ # Swift object access.
+
+ # 404s differ depending upon whether the bucket exists
+ # Note that base-container-existence checks happen elsewhere for
+ # multi-part uploads, and get_container_info should be pulling
+ # from the env cache
+ def not_found_handler():
+ if container.endswith(MULTIUPLOAD_SUFFIX) or \
+ is_success(get_container_info(
+ env, app, swift_source='S3').get('status')):
+ return NoSuchKey(obj)
+ return NoSuchBucket(container)
+
+ code_map = {
+ 'HEAD': {
+ HTTP_NOT_FOUND: not_found_handler,
+ HTTP_PRECONDITION_FAILED: PreconditionFailed,
+ },
+ 'GET': {
+ HTTP_NOT_FOUND: not_found_handler,
+ HTTP_PRECONDITION_FAILED: PreconditionFailed,
+ HTTP_REQUESTED_RANGE_NOT_SATISFIABLE: InvalidRange,
+ },
+ 'PUT': {
+ HTTP_NOT_FOUND: (NoSuchBucket, container),
+ HTTP_UNPROCESSABLE_ENTITY: BadDigest,
+ HTTP_REQUEST_ENTITY_TOO_LARGE: EntityTooLarge,
+ HTTP_LENGTH_REQUIRED: MissingContentLength,
+ HTTP_REQUEST_TIMEOUT: RequestTimeout,
+ },
+ 'POST': {
+ HTTP_NOT_FOUND: not_found_handler,
+ HTTP_PRECONDITION_FAILED: PreconditionFailed,
+ },
+ 'DELETE': {
+ HTTP_NOT_FOUND: (NoSuchKey, obj),
+ },
+ }
+
+ return code_map[method]
+
+ def _get_response(self, app, method, container, obj,
+ headers=None, body=None, query=None):
+ """
+ Calls the application with this request's environment. Returns a
+ S3Response object that wraps up the application's result.
+ """
+
+ method = method or self.environ['REQUEST_METHOD']
+
+ if container is None:
+ container = self.container_name
+ if obj is None:
+ obj = self.object_name
+
+ sw_req = self.to_swift_req(method, container, obj, headers=headers,
+ body=body, query=query)
+
+ sw_resp = sw_req.get_response(app)
+
+ # reuse account and tokens
+ _, self.account, _ = split_path(sw_resp.environ['PATH_INFO'],
+ 2, 3, True)
+ self.account = utf8encode(self.account)
+
+ resp = S3Response.from_swift_resp(sw_resp)
+ status = resp.status_int # pylint: disable-msg=E1101
+
+ if not self.user_id:
+ if 'HTTP_X_USER_NAME' in sw_resp.environ:
+ # keystone
+ self.user_id = \
+ utf8encode("%s:%s" %
+ (sw_resp.environ['HTTP_X_TENANT_NAME'],
+ sw_resp.environ['HTTP_X_USER_NAME']))
+ else:
+ # tempauth
+ self.user_id = self.access_key
+
+ success_codes = self._swift_success_codes(method, container, obj)
+ error_codes = self._swift_error_codes(method, container, obj,
+ sw_req.environ, app)
+
+ if status in success_codes:
+ return resp
+
+ err_msg = resp.body
+
+ if status in error_codes:
+ err_resp = \
+ error_codes[sw_resp.status_int] # pylint: disable-msg=E1101
+ if isinstance(err_resp, tuple):
+ raise err_resp[0](*err_resp[1:])
+ else:
+ raise err_resp()
+
+ if status == HTTP_BAD_REQUEST:
+ raise BadSwiftRequest(err_msg)
+ if status == HTTP_UNAUTHORIZED:
+ raise SignatureDoesNotMatch()
+ if status == HTTP_FORBIDDEN:
+ raise AccessDenied()
+
+ raise InternalError('unexpected status code %d' % status)
+
+ def get_response(self, app, method=None, container=None, obj=None,
+ headers=None, body=None, query=None):
+ """
+ get_response is an entry point to be extended for child classes.
+ If additional tasks needed at that time of getting swift response,
+ we can override this method.
+ swift.common.middleware.s3api.s3request.S3Request need to just call
+ _get_response to get pure swift response.
+ """
+
+ if 'HTTP_X_AMZ_ACL' in self.environ:
+ handle_acl_header(self)
+
+ return self._get_response(app, method, container, obj,
+ headers, body, query)
+
+ def get_validated_param(self, param, default, limit=MAX_32BIT_INT):
+ value = default
+ if param in self.params:
+ try:
+ value = int(self.params[param])
+ if value < 0:
+ err_msg = 'Argument %s must be an integer between 0 and' \
+ ' %d' % (param, MAX_32BIT_INT)
+ raise InvalidArgument(param, self.params[param], err_msg)
+
+ if value > MAX_32BIT_INT:
+ # check the value because int() could build either a long
+ # instance or a 64bit integer.
+ raise ValueError()
+
+ if limit < value:
+ value = limit
+
+ except ValueError:
+ err_msg = 'Provided %s not an integer or within ' \
+ 'integer range' % param
+ raise InvalidArgument(param, self.params[param], err_msg)
+
+ return value
+
+ def get_container_info(self, app):
+ """
+ get_container_info will return a result dict of get_container_info
+ from the backend Swift.
+
+ :returns: a dictionary of container info from
+ swift.controllers.base.get_container_info
+ :raises: NoSuchBucket when the container doesn't exist
+ :raises: InternalError when the request failed without 404
+ """
+ if self.is_authenticated:
+ # if we have already authenticated, yes we can use the account
+ # name like as AUTH_xxx for performance efficiency
+ sw_req = self.to_swift_req(app, self.container_name, None)
+ info = get_container_info(sw_req.environ, app)
+ if is_success(info['status']):
+ return info
+ elif info['status'] == 404:
+ raise NoSuchBucket(self.container_name)
+ else:
+ raise InternalError(
+ 'unexpected status code %d' % info['status'])
+ else:
+ # otherwise we do naive HEAD request with the authentication
+ resp = self.get_response(app, 'HEAD', self.container_name, '')
+ return headers_to_container_info(
+ resp.sw_headers, resp.status_int) # pylint: disable-msg=E1101
+
+ def gen_multipart_manifest_delete_query(self, app, obj=None):
+ if not self.allow_multipart_uploads:
+ return None
+ query = {'multipart-manifest': 'delete'}
+ if not obj:
+ obj = self.object_name
+ resp = self.get_response(app, 'HEAD', obj=obj)
+ return query if resp.is_slo else None
+
+ def set_acl_handler(self, handler):
+ pass
+
+
+class S3AclRequest(S3Request):
+ """
+ S3Acl request object.
+ """
+ def __init__(self, env, app, slo_enabled=True,
+ storage_domain='', location='US', force_request_log=False,
+ dns_compliant_bucket_names=True, allow_multipart_uploads=True,
+ allow_no_owner=False):
+ super(S3AclRequest, self).__init__(
+ env, app, slo_enabled, storage_domain, location, force_request_log,
+ dns_compliant_bucket_names, allow_multipart_uploads)
+ self.allow_no_owner = allow_no_owner
+ self.authenticate(app)
+ self.acl_handler = None
+
+ @property
+ def controller(self):
+ if 'acl' in self.params and not self.is_service_request:
+ return S3AclController
+ return super(S3AclRequest, self).controller
+
+ def authenticate(self, app):
+ """
+ authenticate method will run pre-authenticate request and retrieve
+ account information.
+ Note that it currently supports only keystone and tempauth.
+ (no support for the third party authentication middleware)
+ """
+ sw_req = self.to_swift_req('TEST', None, None, body='')
+ # don't show log message of this request
+ sw_req.environ['swift.proxy_access_log_made'] = True
+
+ sw_resp = sw_req.get_response(app)
+
+ if not sw_req.remote_user:
+ raise SignatureDoesNotMatch()
+
+ _, self.account, _ = split_path(sw_resp.environ['PATH_INFO'],
+ 2, 3, True)
+ self.account = utf8encode(self.account)
+
+ if 'HTTP_X_USER_NAME' in sw_resp.environ:
+ # keystone
+ self.user_id = "%s:%s" % (sw_resp.environ['HTTP_X_TENANT_NAME'],
+ sw_resp.environ['HTTP_X_USER_NAME'])
+ self.user_id = utf8encode(self.user_id)
+ self.token = sw_resp.environ.get('HTTP_X_AUTH_TOKEN')
+ else:
+ # tempauth
+ self.user_id = self.access_key
+
+ # Need to skip S3 authorization on subsequent requests to prevent
+ # overwriting the account in PATH_INFO
+ del self.headers['Authorization']
+ del self.environ['s3api.auth_details']
+
+ def to_swift_req(self, method, container, obj, query=None,
+ body=None, headers=None):
+ sw_req = super(S3AclRequest, self).to_swift_req(
+ method, container, obj, query, body, headers)
+ if self.account:
+ sw_req.environ['swift_owner'] = True # needed to set ACL
+ sw_req.environ['swift.authorize_override'] = True
+ sw_req.environ['swift.authorize'] = lambda req: None
+ return sw_req
+
+ def get_acl_response(self, app, method=None, container=None, obj=None,
+ headers=None, body=None, query=None):
+ """
+ Wrapper method of _get_response to add s3 acl information
+ from response sysmeta headers.
+ """
+
+ resp = self._get_response(
+ app, method, container, obj, headers, body, query)
+ resp.bucket_acl = decode_acl(
+ 'container', resp.sysmeta_headers, self.allow_no_owner)
+ resp.object_acl = decode_acl(
+ 'object', resp.sysmeta_headers, self.allow_no_owner)
+
+ return resp
+
+ def get_response(self, app, method=None, container=None, obj=None,
+ headers=None, body=None, query=None):
+ """
+ Wrap up get_response call to hook with acl handling method.
+ """
+ if not self.acl_handler:
+ # we should set acl_handler all time before calling get_response
+ raise Exception('get_response called before set_acl_handler')
+ resp = self.acl_handler.handle_acl(
+ app, method, container, obj, headers)
+
+ # possible to skip recalling get_response_acl if resp is not
+ # None (e.g. HEAD)
+ if resp:
+ return resp
+ return self.get_acl_response(app, method, container, obj,
+ headers, body, query)
+
+ def set_acl_handler(self, acl_handler):
+ self.acl_handler = acl_handler
+
+
+class SigV4Request(SigV4Mixin, S3Request):
+ pass
+
+
+class SigV4S3AclRequest(SigV4Mixin, S3AclRequest):
+ pass
diff --git a/swift/common/middleware/s3api/s3response.py b/swift/common/middleware/s3api/s3response.py
new file mode 100644
index 000000000..350b3eb13
--- /dev/null
+++ b/swift/common/middleware/s3api/s3response.py
@@ -0,0 +1,684 @@
+# Copyright (c) 2014 OpenStack Foundation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import re
+from UserDict import DictMixin
+from functools import partial
+
+from swift.common import swob
+from swift.common.utils import config_true_value
+from swift.common.request_helpers import is_sys_meta
+
+from swift.common.middleware.s3api.utils import snake_to_camel, sysmeta_prefix
+from swift.common.middleware.s3api.etree import Element, SubElement, tostring
+
+
+class HeaderKey(str):
+ """
+ A string object that normalizes string as S3 clients expect with title().
+ """
+ def title(self):
+ if self.lower() == 'etag':
+ # AWS Java SDK expects only 'ETag'.
+ return 'ETag'
+ if self.lower().startswith('x-amz-'):
+ # AWS headers returned by S3 are lowercase.
+ return self.lower()
+ return str.title(self)
+
+
+class HeaderKeyDict(swob.HeaderKeyDict):
+ """
+ Similar to the HeaderKeyDict class in Swift, but its key name is normalized
+ as S3 clients expect.
+ """
+ def __getitem__(self, key):
+ return swob.HeaderKeyDict.__getitem__(self, HeaderKey(key))
+
+ def __setitem__(self, key, value):
+ return swob.HeaderKeyDict.__setitem__(self, HeaderKey(key), value)
+
+ def __contains__(self, key):
+ return swob.HeaderKeyDict.__contains__(self, HeaderKey(key))
+
+ def __delitem__(self, key):
+ return swob.HeaderKeyDict.__delitem__(self, HeaderKey(key))
+
+ def get(self, key, default=None):
+ return swob.HeaderKeyDict.get(self, HeaderKey(key), default)
+
+ def pop(self, key, default=None):
+ return swob.HeaderKeyDict.pop(self, HeaderKey(key), default)
+
+
+class S3ResponseBase(object):
+ """
+ Base class for swift3 responses.
+ """
+ pass
+
+
+class S3Response(S3ResponseBase, swob.Response):
+ """
+ Similar to the Response class in Swift, but uses our HeaderKeyDict for
+ headers instead of Swift's HeaderKeyDict. This also translates Swift
+ specific headers to S3 headers.
+ """
+ def __init__(self, *args, **kwargs):
+ swob.Response.__init__(self, *args, **kwargs)
+
+ if self.etag:
+ # add double quotes to the etag header
+ self.etag = self.etag
+
+ sw_sysmeta_headers = swob.HeaderKeyDict()
+ sw_headers = swob.HeaderKeyDict()
+ headers = HeaderKeyDict()
+ self.is_slo = False
+
+ def is_swift3_sysmeta(sysmeta_key, server_type):
+ swift3_sysmeta_prefix = (
+ 'x-%s-sysmeta-swift3' % server_type).lower()
+ return sysmeta_key.lower().startswith(swift3_sysmeta_prefix)
+
+ def is_s3api_sysmeta(sysmeta_key, server_type):
+ s3api_sysmeta_prefix = sysmeta_prefix(_server_type).lower()
+ return sysmeta_key.lower().startswith(s3api_sysmeta_prefix)
+
+ for key, val in self.headers.iteritems():
+ if is_sys_meta('object', key) or is_sys_meta('container', key):
+ _server_type = key.split('-')[1]
+ if is_swift3_sysmeta(key, _server_type):
+ # To be compatible with older swift3, translate swift3
+ # sysmeta to s3api sysmeta here
+ key = sysmeta_prefix(_server_type) + \
+ key[len('x-%s-sysmeta-swift3-' % _server_type):]
+
+ if key not in sw_sysmeta_headers:
+ # To avoid overwrite s3api sysmeta by older swift3
+ # sysmeta set the key only when the key does not exist
+ sw_sysmeta_headers[key] = val
+ elif is_s3api_sysmeta(key, _server_type):
+ sw_sysmeta_headers[key] = val
+ else:
+ sw_headers[key] = val
+
+ # Handle swift headers
+ for key, val in sw_headers.iteritems():
+ _key = key.lower()
+
+ if _key.startswith('x-object-meta-'):
+ # Note that AWS ignores user-defined headers with '=' in the
+ # header name. We translated underscores to '=5F' on the way
+ # in, though.
+ headers['x-amz-meta-' + _key[14:].replace('=5f', '_')] = val
+ elif _key in ('content-length', 'content-type',
+ 'content-range', 'content-encoding',
+ 'content-disposition', 'content-language',
+ 'etag', 'last-modified', 'x-robots-tag',
+ 'cache-control', 'expires'):
+ headers[key] = val
+ elif _key == 'x-static-large-object':
+ # for delete slo
+ self.is_slo = config_true_value(val)
+
+ self.headers = headers
+ # Used for pure swift header handling at the request layer
+ self.sw_headers = sw_headers
+ self.sysmeta_headers = sw_sysmeta_headers
+
+ @classmethod
+ def from_swift_resp(cls, sw_resp):
+ """
+ Create a new S3 response object based on the given Swift response.
+ """
+ if sw_resp.app_iter:
+ body = None
+ app_iter = sw_resp.app_iter
+ else:
+ body = sw_resp.body
+ app_iter = None
+
+ resp = cls(status=sw_resp.status, headers=sw_resp.headers,
+ request=sw_resp.request, body=body, app_iter=app_iter,
+ conditional_response=sw_resp.conditional_response)
+ resp.environ.update(sw_resp.environ)
+
+ return resp
+
+ def append_copy_resp_body(self, controller_name, last_modified):
+ elem = Element('Copy%sResult' % controller_name)
+ SubElement(elem, 'LastModified').text = last_modified
+ SubElement(elem, 'ETag').text = '"%s"' % self.etag
+ self.headers['Content-Type'] = 'application/xml'
+ self.body = tostring(elem)
+ self.etag = None
+
+
+HTTPOk = partial(S3Response, status=200)
+HTTPCreated = partial(S3Response, status=201)
+HTTPAccepted = partial(S3Response, status=202)
+HTTPNoContent = partial(S3Response, status=204)
+HTTPPartialContent = partial(S3Response, status=206)
+
+
+class ErrorResponse(S3ResponseBase, swob.HTTPException):
+ """
+ S3 error object.
+
+ Reference information about S3 errors is available at:
+ http://docs.aws.amazon.com/AmazonS3/latest/API/ErrorResponses.html
+ """
+ _status = ''
+ _msg = ''
+ _code = ''
+
+ def __init__(self, msg=None, *args, **kwargs):
+ if msg:
+ self._msg = msg
+ if not self._code:
+ self._code = self.__class__.__name__
+
+ self.info = kwargs.copy()
+ for reserved_key in ('headers', 'body'):
+ if self.info.get(reserved_key):
+ del(self.info[reserved_key])
+
+ swob.HTTPException.__init__(self, status=self._status,
+ app_iter=self._body_iter(),
+ content_type='application/xml', *args,
+ **kwargs)
+ self.headers = HeaderKeyDict(self.headers)
+
+ def _body_iter(self):
+ error_elem = Element('Error')
+ SubElement(error_elem, 'Code').text = self._code
+ SubElement(error_elem, 'Message').text = self._msg
+ if 'swift.trans_id' in self.environ:
+ request_id = self.environ['swift.trans_id']
+ SubElement(error_elem, 'RequestId').text = request_id
+
+ self._dict_to_etree(error_elem, self.info)
+
+ yield tostring(error_elem, use_s3ns=False)
+
+ def _dict_to_etree(self, parent, d):
+ for key, value in d.items():
+ tag = re.sub('\W', '', snake_to_camel(key))
+ elem = SubElement(parent, tag)
+
+ if isinstance(value, (dict, DictMixin)):
+ self._dict_to_etree(elem, value)
+ else:
+ try:
+ elem.text = str(value)
+ except ValueError:
+ # We set an invalid string for XML.
+ elem.text = '(invalid string)'
+
+
+class AccessDenied(ErrorResponse):
+ _status = '403 Forbidden'
+ _msg = 'Access Denied.'
+
+
+class AccountProblem(ErrorResponse):
+ _status = '403 Forbidden'
+ _msg = 'There is a problem with your AWS account that prevents the ' \
+ 'operation from completing successfully.'
+
+
+class AmbiguousGrantByEmailAddress(ErrorResponse):
+ _status = '400 Bad Request'
+ _msg = 'The e-mail address you provided is associated with more than ' \
+ 'one account.'
+
+
+class AuthorizationHeaderMalformed(ErrorResponse):
+ _status = '400 Bad Request'
+ _msg = 'The authorization header is malformed; the authorization ' \
+ 'header requires three components: Credential, SignedHeaders, ' \
+ 'and Signature.'
+
+
+class AuthorizationQueryParametersError(ErrorResponse):
+ _status = '400 Bad Request'
+
+
+class BadDigest(ErrorResponse):
+ _status = '400 Bad Request'
+ _msg = 'The Content-MD5 you specified did not match what we received.'
+
+
+class BucketAlreadyExists(ErrorResponse):
+ _status = '409 Conflict'
+ _msg = 'The requested bucket name is not available. The bucket ' \
+ 'namespace is shared by all users of the system. Please select a ' \
+ 'different name and try again.'
+
+ def __init__(self, bucket, msg=None, *args, **kwargs):
+ ErrorResponse.__init__(self, msg, bucket_name=bucket, *args, **kwargs)
+
+
+class BucketAlreadyOwnedByYou(ErrorResponse):
+ _status = '409 Conflict'
+ _msg = 'Your previous request to create the named bucket succeeded and ' \
+ 'you already own it.'
+
+ def __init__(self, bucket, msg=None, *args, **kwargs):
+ ErrorResponse.__init__(self, msg, bucket_name=bucket, *args, **kwargs)
+
+
+class BucketNotEmpty(ErrorResponse):
+ _status = '409 Conflict'
+ _msg = 'The bucket you tried to delete is not empty'
+
+
+class CredentialsNotSupported(ErrorResponse):
+ _status = '400 Bad Request'
+ _msg = 'This request does not support credentials.'
+
+
+class CrossLocationLoggingProhibited(ErrorResponse):
+ _status = '403 Forbidden'
+ _msg = 'Cross location logging not allowed. Buckets in one geographic ' \
+ 'location cannot log information to a bucket in another location.'
+
+
+class EntityTooSmall(ErrorResponse):
+ _status = '400 Bad Request'
+ _msg = 'Your proposed upload is smaller than the minimum allowed object ' \
+ 'size.'
+
+
+class EntityTooLarge(ErrorResponse):
+ _status = '400 Bad Request'
+ _msg = 'Your proposed upload exceeds the maximum allowed object size.'
+
+
+class ExpiredToken(ErrorResponse):
+ _status = '400 Bad Request'
+ _msg = 'The provided token has expired.'
+
+
+class IllegalVersioningConfigurationException(ErrorResponse):
+ _status = '400 Bad Request'
+ _msg = 'The Versioning configuration specified in the request is invalid.'
+
+
+class IncompleteBody(ErrorResponse):
+ _status = '400 Bad Request'
+ _msg = 'You did not provide the number of bytes specified by the ' \
+ 'Content-Length HTTP header.'
+
+
+class IncorrectNumberOfFilesInPostRequest(ErrorResponse):
+ _status = '400 Bad Request'
+ _msg = 'POST requires exactly one file upload per request.'
+
+
+class InlineDataTooLarge(ErrorResponse):
+ _status = '400 Bad Request'
+ _msg = 'Inline data exceeds the maximum allowed size.'
+
+
+class InternalError(ErrorResponse):
+ _status = '500 Internal Server Error'
+ _msg = 'We encountered an internal error. Please try again.'
+
+
+class InvalidAccessKeyId(ErrorResponse):
+ _status = '403 Forbidden'
+ _msg = 'The AWS Access Key Id you provided does not exist in our records.'
+
+
+class InvalidArgument(ErrorResponse):
+ _status = '400 Bad Request'
+ _msg = 'Invalid Argument.'
+
+ def __init__(self, name, value, msg=None, *args, **kwargs):
+ ErrorResponse.__init__(self, msg, argument_name=name,
+ argument_value=value, *args, **kwargs)
+
+
+class InvalidBucketName(ErrorResponse):
+ _status = '400 Bad Request'
+ _msg = 'The specified bucket is not valid.'
+
+ def __init__(self, bucket, msg=None, *args, **kwargs):
+ ErrorResponse.__init__(self, msg, bucket_name=bucket, *args, **kwargs)
+
+
+class InvalidBucketState(ErrorResponse):
+ _status = '409 Conflict'
+ _msg = 'The request is not valid with the current state of the bucket.'
+
+
+class InvalidDigest(ErrorResponse):
+ _status = '400 Bad Request'
+ _msg = 'The Content-MD5 you specified was an invalid.'
+
+
+class InvalidLocationConstraint(ErrorResponse):
+ _status = '400 Bad Request'
+ _msg = 'The specified location constraint is not valid.'
+
+
+class InvalidObjectState(ErrorResponse):
+ _status = '403 Forbidden'
+ _msg = 'The operation is not valid for the current state of the object.'
+
+
+class InvalidPart(ErrorResponse):
+ _status = '400 Bad Request'
+ _msg = 'One or more of the specified parts could not be found. The part ' \
+ 'might not have been uploaded, or the specified entity tag might ' \
+ 'not have matched the part\'s entity tag.'
+
+
+class InvalidPartOrder(ErrorResponse):
+ _status = '400 Bad Request'
+ _msg = 'The list of parts was not in ascending order.Parts list must ' \
+ 'specified in order by part number.'
+
+
+class InvalidPayer(ErrorResponse):
+ _status = '403 Forbidden'
+ _msg = 'All access to this object has been disabled.'
+
+
+class InvalidPolicyDocument(ErrorResponse):
+ _status = '400 Bad Request'
+ _msg = 'The content of the form does not meet the conditions specified ' \
+ 'in the policy document.'
+
+
+class InvalidRange(ErrorResponse):
+ _status = '416 Requested Range Not Satisfiable'
+ _msg = 'The requested range cannot be satisfied.'
+
+
+class InvalidRequest(ErrorResponse):
+ _status = '400 Bad Request'
+ _msg = 'Invalid Request.'
+
+
+class InvalidSecurity(ErrorResponse):
+ _status = '403 Forbidden'
+ _msg = 'The provided security credentials are not valid.'
+
+
+class InvalidSOAPRequest(ErrorResponse):
+ _status = '400 Bad Request'
+ _msg = 'The SOAP request body is invalid.'
+
+
+class InvalidStorageClass(ErrorResponse):
+ _status = '400 Bad Request'
+ _msg = 'The storage class you specified is not valid.'
+
+
+class InvalidTargetBucketForLogging(ErrorResponse):
+ _status = '400 Bad Request'
+ _msg = 'The target bucket for logging does not exist, is not owned by ' \
+ 'you, or does not have the appropriate grants for the ' \
+ 'log-delivery group.'
+
+ def __init__(self, bucket, msg=None, *args, **kwargs):
+ ErrorResponse.__init__(self, msg, target_bucket=bucket, *args,
+ **kwargs)
+
+
+class InvalidToken(ErrorResponse):
+ _status = '400 Bad Request'
+ _msg = 'The provided token is malformed or otherwise invalid.'
+
+
+class InvalidURI(ErrorResponse):
+ _status = '400 Bad Request'
+ _msg = 'Couldn\'t parse the specified URI.'
+
+ def __init__(self, uri, msg=None, *args, **kwargs):
+ ErrorResponse.__init__(self, msg, uri=uri, *args, **kwargs)
+
+
+class KeyTooLong(ErrorResponse):
+ _status = '400 Bad Request'
+ _msg = 'Your key is too long.'
+
+
+class MalformedACLError(ErrorResponse):
+ _status = '400 Bad Request'
+ _msg = 'The XML you provided was not well-formed or did not validate ' \
+ 'against our published schema.'
+
+
+class MalformedPOSTRequest(ErrorResponse):
+ _status = '400 Bad Request'
+ _msg = 'The body of your POST request is not well-formed ' \
+ 'multipart/form-data.'
+
+
+class MalformedXML(ErrorResponse):
+ _status = '400 Bad Request'
+ _msg = 'The XML you provided was not well-formed or did not validate ' \
+ 'against our published schema.'
+
+
+class MaxMessageLengthExceeded(ErrorResponse):
+ _status = '400 Bad Request'
+ _msg = 'Your request was too big.'
+
+
+class MaxPostPreDataLengthExceededError(ErrorResponse):
+ _status = '400 Bad Request'
+ _msg = 'Your POST request fields preceding the upload file were too large.'
+
+
+class MetadataTooLarge(ErrorResponse):
+ _status = '400 Bad Request'
+ _msg = 'Your metadata headers exceed the maximum allowed metadata size.'
+
+
+class MethodNotAllowed(ErrorResponse):
+ _status = '405 Method Not Allowed'
+ _msg = 'The specified method is not allowed against this resource.'
+
+ def __init__(self, method, resource_type, msg=None, *args, **kwargs):
+ ErrorResponse.__init__(self, msg, method=method,
+ resource_type=resource_type, *args, **kwargs)
+
+
+class MissingContentLength(ErrorResponse):
+ _status = '411 Length Required'
+ _msg = 'You must provide the Content-Length HTTP header.'
+
+
+class MissingRequestBodyError(ErrorResponse):
+ _status = '400 Bad Request'
+ _msg = 'Request body is empty.'
+
+
+class MissingSecurityElement(ErrorResponse):
+ _status = '400 Bad Request'
+ _msg = 'The SOAP 1.1 request is missing a security element.'
+
+
+class MissingSecurityHeader(ErrorResponse):
+ _status = '400 Bad Request'
+ _msg = 'Your request was missing a required header.'
+
+
+class NoLoggingStatusForKey(ErrorResponse):
+ _status = '400 Bad Request'
+ _msg = 'There is no such thing as a logging status sub-resource for a key.'
+
+
+class NoSuchBucket(ErrorResponse):
+ _status = '404 Not Found'
+ _msg = 'The specified bucket does not exist.'
+
+ def __init__(self, bucket, msg=None, *args, **kwargs):
+ if not bucket:
+ raise InternalError()
+ ErrorResponse.__init__(self, msg, bucket_name=bucket, *args, **kwargs)
+
+
+class NoSuchKey(ErrorResponse):
+ _status = '404 Not Found'
+ _msg = 'The specified key does not exist.'
+
+ def __init__(self, key, msg=None, *args, **kwargs):
+ if not key:
+ raise InternalError()
+ ErrorResponse.__init__(self, msg, key=key, *args, **kwargs)
+
+
+class NoSuchLifecycleConfiguration(ErrorResponse):
+ _status = '404 Not Found'
+ _msg = 'The lifecycle configuration does not exist. .'
+
+
+class NoSuchUpload(ErrorResponse):
+ _status = '404 Not Found'
+ _msg = 'The specified multipart upload does not exist. The upload ID ' \
+ 'might be invalid, or the multipart upload might have been ' \
+ 'aborted or completed.'
+
+
+class NoSuchVersion(ErrorResponse):
+ _status = '404 Not Found'
+ _msg = 'The specified version does not exist.'
+
+ def __init__(self, key, version_id, msg=None, *args, **kwargs):
+ if not key:
+ raise InternalError()
+ ErrorResponse.__init__(self, msg, key=key, version_id=version_id,
+ *args, **kwargs)
+
+
+# NotImplemented is a python built-in constant. Use S3NotImplemented instead.
+class S3NotImplemented(ErrorResponse):
+ _status = '501 Not Implemented'
+ _msg = 'Not implemented.'
+ _code = 'NotImplemented'
+
+
+class NotSignedUp(ErrorResponse):
+ _status = '403 Forbidden'
+ _msg = 'Your account is not signed up for the Amazon S3 service.'
+
+
+class NotSuchBucketPolicy(ErrorResponse):
+ _status = '404 Not Found'
+ _msg = 'The specified bucket does not have a bucket policy.'
+
+
+class OperationAborted(ErrorResponse):
+ _status = '409 Conflict'
+ _msg = 'A conflicting conditional operation is currently in progress ' \
+ 'against this resource. Please try again.'
+
+
+class PermanentRedirect(ErrorResponse):
+ _status = '301 Moved Permanently'
+ _msg = 'The bucket you are attempting to access must be addressed using ' \
+ 'the specified endpoint. Please send all future requests to this ' \
+ 'endpoint.'
+
+
+class PreconditionFailed(ErrorResponse):
+ _status = '412 Precondition Failed'
+ _msg = 'At least one of the preconditions you specified did not hold.'
+
+
+class Redirect(ErrorResponse):
+ _status = '307 Moved Temporarily'
+ _msg = 'Temporary redirect.'
+
+
+class RestoreAlreadyInProgress(ErrorResponse):
+ _status = '409 Conflict'
+ _msg = 'Object restore is already in progress.'
+
+
+class RequestIsNotMultiPartContent(ErrorResponse):
+ _status = '400 Bad Request'
+ _msg = 'Bucket POST must be of the enclosure-type multipart/form-data.'
+
+
+class RequestTimeout(ErrorResponse):
+ _status = '400 Bad Request'
+ _msg = 'Your socket connection to the server was not read from or ' \
+ 'written to within the timeout period.'
+
+
+class RequestTimeTooSkewed(ErrorResponse):
+ _status = '403 Forbidden'
+ _msg = 'The difference between the request time and the current time ' \
+ 'is too large.'
+
+
+class RequestTorrentOfBucketError(ErrorResponse):
+ _status = '400 Bad Request'
+ _msg = 'Requesting the torrent file of a bucket is not permitted.'
+
+
+class SignatureDoesNotMatch(ErrorResponse):
+ _status = '403 Forbidden'
+ _msg = 'The request signature we calculated does not match the ' \
+ 'signature you provided. Check your key and signing method.'
+
+
+class ServiceUnavailable(ErrorResponse):
+ _status = '503 Service Unavailable'
+ _msg = 'Please reduce your request rate.'
+
+
+class SlowDown(ErrorResponse):
+ _status = '503 Slow Down'
+ _msg = 'Please reduce your request rate.'
+
+
+class TemporaryRedirect(ErrorResponse):
+ _status = '307 Moved Temporarily'
+ _msg = 'You are being redirected to the bucket while DNS updates.'
+
+
+class TokenRefreshRequired(ErrorResponse):
+ _status = '400 Bad Request'
+ _msg = 'The provided token must be refreshed.'
+
+
+class TooManyBuckets(ErrorResponse):
+ _status = '400 Bad Request'
+ _msg = 'You have attempted to create more buckets than allowed.'
+
+
+class UnexpectedContent(ErrorResponse):
+ _status = '400 Bad Request'
+ _msg = 'This request does not support content.'
+
+
+class UnresolvableGrantByEmailAddress(ErrorResponse):
+ _status = '400 Bad Request'
+ _msg = 'The e-mail address you provided does not match any account on ' \
+ 'record.'
+
+
+class UserKeyMustBeSpecified(ErrorResponse):
+ _status = '400 Bad Request'
+ _msg = 'The bucket POST must contain the specified field name. If it is ' \
+ 'specified, please check the order of the fields.'
diff --git a/swift/common/middleware/s3api/s3token.py b/swift/common/middleware/s3api/s3token.py
new file mode 100644
index 000000000..bf20bb823
--- /dev/null
+++ b/swift/common/middleware/s3api/s3token.py
@@ -0,0 +1,324 @@
+# Copyright 2012 OpenStack Foundation
+# Copyright 2010 United States Government as represented by the
+# Administrator of the National Aeronautics and Space Administration.
+# Copyright 2011,2012 Akira YOSHIYAMA <akirayoshiyama@gmail.com>
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# This source code is based ./auth_token.py and ./ec2_token.py.
+# See them for their copyright.
+
+"""
+-------------------
+S3 Token Middleware
+-------------------
+s3token middleware is for authentication with s3api + keystone.
+This middleware:
+
+* Gets a request from the s3api middleware with an S3 Authorization
+ access key.
+* Validates s3 token with Keystone.
+* Transforms the account name to AUTH_%(tenant_name).
+
+"""
+
+import base64
+import json
+
+import requests
+import six
+from six.moves import urllib
+
+from swift.common.swob import Request, HTTPBadRequest, HTTPUnauthorized, \
+ HTTPException
+from swift.common.utils import config_true_value, split_path, get_logger
+from swift.common.wsgi import ConfigFileError
+
+
+PROTOCOL_NAME = 'S3 Token Authentication'
+
+# Headers to purge if they came from (or may have come from) the client
+KEYSTONE_AUTH_HEADERS = (
+ 'X-Identity-Status', 'X-Service-Identity-Status',
+ 'X-Domain-Id', 'X-Service-Domain-Id',
+ 'X-Domain-Name', 'X-Service-Domain-Name',
+ 'X-Project-Id', 'X-Service-Project-Id',
+ 'X-Project-Name', 'X-Service-Project-Name',
+ 'X-Project-Domain-Id', 'X-Service-Project-Domain-Id',
+ 'X-Project-Domain-Name', 'X-Service-Project-Domain-Name',
+ 'X-User-Id', 'X-Service-User-Id',
+ 'X-User-Name', 'X-Service-User-Name',
+ 'X-User-Domain-Id', 'X-Service-User-Domain-Id',
+ 'X-User-Domain-Name', 'X-Service-User-Domain-Name',
+ 'X-Roles', 'X-Service-Roles',
+ 'X-Is-Admin-Project',
+ 'X-Service-Catalog',
+ # Deprecated headers, too...
+ 'X-Tenant-Id',
+ 'X-Tenant-Name',
+ 'X-Tenant',
+ 'X-User',
+ 'X-Role',
+)
+
+
+def parse_v2_response(token):
+ access_info = token['access']
+ headers = {
+ 'X-Identity-Status': 'Confirmed',
+ 'X-Roles': ','.join(r['name']
+ for r in access_info['user']['roles']),
+ 'X-User-Id': access_info['user']['id'],
+ 'X-User-Name': access_info['user']['name'],
+ 'X-Tenant-Id': access_info['token']['tenant']['id'],
+ 'X-Tenant-Name': access_info['token']['tenant']['name'],
+ 'X-Project-Id': access_info['token']['tenant']['id'],
+ 'X-Project-Name': access_info['token']['tenant']['name'],
+ }
+ return (
+ headers,
+ access_info['token'].get('id'),
+ access_info['token']['tenant'])
+
+
+def parse_v3_response(token):
+ token = token['token']
+ headers = {
+ 'X-Identity-Status': 'Confirmed',
+ 'X-Roles': ','.join(r['name']
+ for r in token['roles']),
+ 'X-User-Id': token['user']['id'],
+ 'X-User-Name': token['user']['name'],
+ 'X-User-Domain-Id': token['user']['domain']['id'],
+ 'X-User-Domain-Name': token['user']['domain']['name'],
+ 'X-Tenant-Id': token['project']['id'],
+ 'X-Tenant-Name': token['project']['name'],
+ 'X-Project-Id': token['project']['id'],
+ 'X-Project-Name': token['project']['name'],
+ 'X-Project-Domain-Id': token['project']['domain']['id'],
+ 'X-Project-Domain-Name': token['project']['domain']['name'],
+ }
+ return headers, None, token['project']
+
+
+class S3Token(object):
+ """Middleware that handles S3 authentication."""
+
+ def __init__(self, app, conf):
+ """Common initialization code."""
+ self._app = app
+ self._logger = get_logger(
+ conf, log_route=conf.get('log_name', 's3token'))
+ self._logger.debug('Starting the %s component', PROTOCOL_NAME)
+ self._timeout = float(conf.get('http_timeout', '10.0'))
+ if not (0 < self._timeout <= 60):
+ raise ValueError('http_timeout must be between 0 and 60 seconds')
+ self._reseller_prefix = conf.get('reseller_prefix', 'AUTH_')
+ self._delay_auth_decision = config_true_value(
+ conf.get('delay_auth_decision'))
+
+ # where to find the auth service (we use this to validate tokens)
+ self._request_uri = conf.get('auth_uri', '').rstrip('/') + '/s3tokens'
+ parsed = urllib.parse.urlsplit(self._request_uri)
+ if not parsed.scheme or not parsed.hostname:
+ raise ConfigFileError(
+ 'Invalid auth_uri; must include scheme and host')
+ if parsed.scheme not in ('http', 'https'):
+ raise ConfigFileError(
+ 'Invalid auth_uri; scheme must be http or https')
+ if parsed.query or parsed.fragment or '@' in parsed.netloc:
+ raise ConfigFileError('Invalid auth_uri; must not include '
+ 'username, query, or fragment')
+
+ # SSL
+ insecure = config_true_value(conf.get('insecure'))
+ cert_file = conf.get('certfile')
+ key_file = conf.get('keyfile')
+
+ if insecure:
+ self._verify = False
+ elif cert_file and key_file:
+ self._verify = (cert_file, key_file)
+ elif cert_file:
+ self._verify = cert_file
+ else:
+ self._verify = None
+
+ def _deny_request(self, code):
+ error_cls, message = {
+ 'AccessDenied': (HTTPUnauthorized, 'Access denied'),
+ 'InvalidURI': (HTTPBadRequest,
+ 'Could not parse the specified URI'),
+ }[code]
+ resp = error_cls(content_type='text/xml')
+ error_msg = ('<?xml version="1.0" encoding="UTF-8"?>\r\n'
+ '<Error>\r\n <Code>%s</Code>\r\n '
+ '<Message>%s</Message>\r\n</Error>\r\n' %
+ (code, message))
+ if six.PY3:
+ error_msg = error_msg.encode()
+ resp.body = error_msg
+ return resp
+
+ def _json_request(self, creds_json):
+ headers = {'Content-Type': 'application/json'}
+ try:
+ response = requests.post(self._request_uri,
+ headers=headers, data=creds_json,
+ verify=self._verify,
+ timeout=self._timeout)
+ except requests.exceptions.RequestException as e:
+ self._logger.info('HTTP connection exception: %s', e)
+ raise self._deny_request('InvalidURI')
+
+ if response.status_code < 200 or response.status_code >= 300:
+ self._logger.debug('Keystone reply error: status=%s reason=%s',
+ response.status_code, response.reason)
+ raise self._deny_request('AccessDenied')
+
+ return response
+
+ def __call__(self, environ, start_response):
+ """Handle incoming request. authenticate and send downstream."""
+ req = Request(environ)
+ self._logger.debug('Calling S3Token middleware.')
+
+ # Always drop auth headers if we're first in the pipeline
+ if 'keystone.token_info' not in req.environ:
+ req.headers.update({h: None for h in KEYSTONE_AUTH_HEADERS})
+
+ try:
+ parts = split_path(req.path, 1, 4, True)
+ version, account, container, obj = parts
+ except ValueError:
+ msg = 'Not a path query: %s, skipping.' % req.path
+ self._logger.debug(msg)
+ return self._app(environ, start_response)
+
+ # Read request signature and access id.
+ s3_auth_details = req.environ.get('s3api.auth_details')
+ if not s3_auth_details:
+ msg = 'No authorization details from s3api. skipping.'
+ self._logger.debug(msg)
+ return self._app(environ, start_response)
+
+ access = s3_auth_details['access_key']
+ if isinstance(access, six.binary_type):
+ access = access.decode('utf-8')
+
+ signature = s3_auth_details['signature']
+ if isinstance(signature, six.binary_type):
+ signature = signature.decode('utf-8')
+
+ string_to_sign = s3_auth_details['string_to_sign']
+ if isinstance(string_to_sign, six.text_type):
+ string_to_sign = string_to_sign.encode('utf-8')
+ token = base64.urlsafe_b64encode(string_to_sign).encode('ascii')
+
+ # NOTE(chmou): This is to handle the special case with nova
+ # when we have the option s3_affix_tenant. We will force it to
+ # connect to another account than the one
+ # authenticated. Before people start getting worried about
+ # security, I should point that we are connecting with
+ # username/token specified by the user but instead of
+ # connecting to its own account we will force it to go to an
+ # another account. In a normal scenario if that user don't
+ # have the reseller right it will just fail but since the
+ # reseller account can connect to every account it is allowed
+ # by the swift_auth middleware.
+ force_tenant = None
+ if ':' in access:
+ access, force_tenant = access.split(':')
+
+ # Authenticate request.
+ creds = {'credentials': {'access': access,
+ 'token': token,
+ 'signature': signature}}
+ creds_json = json.dumps(creds)
+ self._logger.debug('Connecting to Keystone sending this JSON: %s',
+ creds_json)
+ # NOTE(vish): We could save a call to keystone by having
+ # keystone return token, tenant, user, and roles
+ # from this call.
+ #
+ # NOTE(chmou): We still have the same problem we would need to
+ # change token_auth to detect if we already
+ # identified and not doing a second query and just
+ # pass it through to swiftauth in this case.
+ try:
+ # NB: requests.Response, not swob.Response
+ resp = self._json_request(creds_json)
+ except HTTPException as e_resp:
+ if self._delay_auth_decision:
+ msg = 'Received error, deferring rejection based on error: %s'
+ self._logger.debug(msg, e_resp.status)
+ return self._app(environ, start_response)
+ else:
+ msg = 'Received error, rejecting request with error: %s'
+ self._logger.debug(msg, e_resp.status)
+ # NB: swob.Response, not requests.Response
+ return e_resp(environ, start_response)
+
+ self._logger.debug('Keystone Reply: Status: %d, Output: %s',
+ resp.status_code, resp.content)
+
+ try:
+ token = resp.json()
+ if 'access' in token:
+ headers, token_id, tenant = parse_v2_response(token)
+ elif 'token' in token:
+ headers, token_id, tenant = parse_v3_response(token)
+ else:
+ raise ValueError
+
+ # Populate the environment similar to auth_token,
+ # so we don't have to contact Keystone again.
+ #
+ # Note that although the strings are unicode following json
+ # deserialization, Swift's HeaderEnvironProxy handles ensuring
+ # they're stored as native strings
+ req.headers.update(headers)
+ req.environ['keystone.token_info'] = token
+ except (ValueError, KeyError, TypeError):
+ if self._delay_auth_decision:
+ error = ('Error on keystone reply: %d %s - '
+ 'deferring rejection downstream')
+ self._logger.debug(error, resp.status_code, resp.content)
+ return self._app(environ, start_response)
+ else:
+ error = ('Error on keystone reply: %d %s - '
+ 'rejecting request')
+ self._logger.debug(error, resp.status_code, resp.content)
+ return self._deny_request('InvalidURI')(
+ environ, start_response)
+
+ req.headers['X-Auth-Token'] = token_id
+ tenant_to_connect = force_tenant or tenant['id']
+ if six.PY2 and isinstance(tenant_to_connect, six.text_type):
+ tenant_to_connect = tenant_to_connect.encode('utf-8')
+ self._logger.debug('Connecting with tenant: %s', tenant_to_connect)
+ new_tenant_name = '%s%s' % (self._reseller_prefix, tenant_to_connect)
+ environ['PATH_INFO'] = environ['PATH_INFO'].replace(account,
+ new_tenant_name)
+ return self._app(environ, start_response)
+
+
+def filter_factory(global_conf, **local_conf):
+ """Returns a WSGI filter app for use with paste.deploy."""
+ conf = global_conf.copy()
+ conf.update(local_conf)
+
+ def auth_filter(app):
+ return S3Token(app, conf)
+ return auth_filter
diff --git a/swift/common/middleware/s3api/schema/access_control_policy.rng b/swift/common/middleware/s3api/schema/access_control_policy.rng
new file mode 100644
index 000000000..5308a12f3
--- /dev/null
+++ b/swift/common/middleware/s3api/schema/access_control_policy.rng
@@ -0,0 +1,16 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<grammar xmlns="http://relaxng.org/ns/structure/1.0">
+ <include href="common.rng"/>
+ <start>
+ <element name="AccessControlPolicy">
+ <interleave>
+ <element name="Owner">
+ <ref name="CanonicalUser"/>
+ </element>
+ <element name="AccessControlList">
+ <ref name="AccessControlList"/>
+ </element>
+ </interleave>
+ </element>
+ </start>
+</grammar>
diff --git a/swift/common/middleware/s3api/schema/bucket_logging_status.rng b/swift/common/middleware/s3api/schema/bucket_logging_status.rng
new file mode 100644
index 000000000..27ea1e1dd
--- /dev/null
+++ b/swift/common/middleware/s3api/schema/bucket_logging_status.rng
@@ -0,0 +1,25 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<grammar xmlns="http://relaxng.org/ns/structure/1.0" datatypeLibrary="http://www.w3.org/2001/XMLSchema-datatypes">
+ <include href="common.rng"/>
+ <start>
+ <element name="BucketLoggingStatus">
+ <optional>
+ <element name="LoggingEnabled">
+ <interleave>
+ <element name="TargetBucket">
+ <data type="string"/>
+ </element>
+ <element name="TargetPrefix">
+ <data type="string"/>
+ </element>
+ <optional>
+ <element name="TargetGrants">
+ <ref name="AccessControlList"/>
+ </element>
+ </optional>
+ </interleave>
+ </element>
+ </optional>
+ </element>
+ </start>
+</grammar>
diff --git a/swift/common/middleware/s3api/schema/common.rng b/swift/common/middleware/s3api/schema/common.rng
new file mode 100644
index 000000000..22319c0eb
--- /dev/null
+++ b/swift/common/middleware/s3api/schema/common.rng
@@ -0,0 +1,66 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<grammar xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://relaxng.org/ns/structure/1.0" datatypeLibrary="http://www.w3.org/2001/XMLSchema-datatypes">
+ <define name="CanonicalUser">
+ <interleave>
+ <element name="ID">
+ <data type="string"/>
+ </element>
+ <optional>
+ <element name="DisplayName">
+ <data type="string"/>
+ </element>
+ </optional>
+ </interleave>
+ </define>
+ <define name="StorageClass">
+ <choice>
+ <value>STANDARD</value>
+ <value>REDUCED_REDUNDANCY</value>
+ <value>GLACIER</value>
+ <value>UNKNOWN</value>
+ </choice>
+ </define>
+ <define name="AccessControlList">
+ <zeroOrMore>
+ <element name="Grant">
+ <interleave>
+ <element name="Grantee">
+ <choice>
+ <group>
+ <attribute name="xsi:type">
+ <value>AmazonCustomerByEmail</value>
+ </attribute>
+ <element name="EmailAddress">
+ <data type="string"/>
+ </element>
+ </group>
+ <group>
+ <attribute name="xsi:type">
+ <value>CanonicalUser</value>
+ </attribute>
+ <ref name="CanonicalUser"/>
+ </group>
+ <group>
+ <attribute name="xsi:type">
+ <value>Group</value>
+ </attribute>
+ <element name="URI">
+ <data type="string"/>
+ </element>
+ </group>
+ </choice>
+ </element>
+ <element name="Permission">
+ <choice>
+ <value>READ</value>
+ <value>WRITE</value>
+ <value>READ_ACP</value>
+ <value>WRITE_ACP</value>
+ <value>FULL_CONTROL</value>
+ </choice>
+ </element>
+ </interleave>
+ </element>
+ </zeroOrMore>
+ </define>
+</grammar>
diff --git a/swift/common/middleware/s3api/schema/complete_multipart_upload.rng b/swift/common/middleware/s3api/schema/complete_multipart_upload.rng
new file mode 100644
index 000000000..d7ba2569b
--- /dev/null
+++ b/swift/common/middleware/s3api/schema/complete_multipart_upload.rng
@@ -0,0 +1,19 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<grammar xmlns="http://relaxng.org/ns/structure/1.0" datatypeLibrary="http://www.w3.org/2001/XMLSchema-datatypes">
+ <start>
+ <element name="CompleteMultipartUpload">
+ <oneOrMore>
+ <element name="Part">
+ <interleave>
+ <element name="PartNumber">
+ <data type="int"/>
+ </element>
+ <element name="ETag">
+ <data type="string"/>
+ </element>
+ </interleave>
+ </element>
+ </oneOrMore>
+ </element>
+ </start>
+</grammar>
diff --git a/swift/common/middleware/s3api/schema/complete_multipart_upload_result.rng b/swift/common/middleware/s3api/schema/complete_multipart_upload_result.rng
new file mode 100644
index 000000000..47406e1c5
--- /dev/null
+++ b/swift/common/middleware/s3api/schema/complete_multipart_upload_result.rng
@@ -0,0 +1,19 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<grammar xmlns="http://relaxng.org/ns/structure/1.0" datatypeLibrary="http://www.w3.org/2001/XMLSchema-datatypes">
+ <start>
+ <element name="CompleteMultipartUploadResult">
+ <element name="Location">
+ <data type="anyURI"/>
+ </element>
+ <element name="Bucket">
+ <data type="string"/>
+ </element>
+ <element name="Key">
+ <data type="string"/>
+ </element>
+ <element name="ETag">
+ <data type="string"/>
+ </element>
+ </element>
+ </start>
+</grammar>
diff --git a/swift/common/middleware/s3api/schema/copy_object_result.rng b/swift/common/middleware/s3api/schema/copy_object_result.rng
new file mode 100644
index 000000000..ec0ac95f2
--- /dev/null
+++ b/swift/common/middleware/s3api/schema/copy_object_result.rng
@@ -0,0 +1,13 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<grammar xmlns="http://relaxng.org/ns/structure/1.0" datatypeLibrary="http://www.w3.org/2001/XMLSchema-datatypes">
+ <start>
+ <element name="CopyObjectResult">
+ <element name="LastModified">
+ <data type="dateTime"/>
+ </element>
+ <element name="ETag">
+ <data type="string"/>
+ </element>
+ </element>
+ </start>
+</grammar>
diff --git a/swift/common/middleware/s3api/schema/copy_part_result.rng b/swift/common/middleware/s3api/schema/copy_part_result.rng
new file mode 100644
index 000000000..0370daad6
--- /dev/null
+++ b/swift/common/middleware/s3api/schema/copy_part_result.rng
@@ -0,0 +1,13 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<grammar xmlns="http://relaxng.org/ns/structure/1.0" datatypeLibrary="http://www.w3.org/2001/XMLSchema-datatypes">
+ <start>
+ <element name="CopyPartResult">
+ <element name="LastModified">
+ <data type="dateTime"/>
+ </element>
+ <element name="ETag">
+ <data type="string"/>
+ </element>
+ </element>
+ </start>
+</grammar>
diff --git a/swift/common/middleware/s3api/schema/create_bucket_configuration.rng b/swift/common/middleware/s3api/schema/create_bucket_configuration.rng
new file mode 100644
index 000000000..882edc465
--- /dev/null
+++ b/swift/common/middleware/s3api/schema/create_bucket_configuration.rng
@@ -0,0 +1,11 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<grammar xmlns="http://relaxng.org/ns/structure/1.0" datatypeLibrary="http://www.w3.org/2001/XMLSchema-datatypes">
+ <start>
+ <element>
+ <anyName/>
+ <element name="LocationConstraint">
+ <data type="string"/>
+ </element>
+ </element>
+ </start>
+</grammar>
diff --git a/swift/common/middleware/s3api/schema/delete.rng b/swift/common/middleware/s3api/schema/delete.rng
new file mode 100644
index 000000000..3417a3945
--- /dev/null
+++ b/swift/common/middleware/s3api/schema/delete.rng
@@ -0,0 +1,28 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<grammar xmlns="http://relaxng.org/ns/structure/1.0" datatypeLibrary="http://www.w3.org/2001/XMLSchema-datatypes">
+ <start>
+ <element name="Delete">
+ <interleave>
+ <optional>
+ <element name="Quiet">
+ <data type="boolean"/>
+ </element>
+ </optional>
+ <oneOrMore>
+ <element name="Object">
+ <interleave>
+ <element name="Key">
+ <data type="string"/>
+ </element>
+ <optional>
+ <element name="VersionId">
+ <data type="string"/>
+ </element>
+ </optional>
+ </interleave>
+ </element>
+ </oneOrMore>
+ </interleave>
+ </element>
+ </start>
+</grammar>
diff --git a/swift/common/middleware/s3api/schema/delete_result.rng b/swift/common/middleware/s3api/schema/delete_result.rng
new file mode 100644
index 000000000..1e28b3ceb
--- /dev/null
+++ b/swift/common/middleware/s3api/schema/delete_result.rng
@@ -0,0 +1,47 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<grammar xmlns="http://relaxng.org/ns/structure/1.0" datatypeLibrary="http://www.w3.org/2001/XMLSchema-datatypes">
+ <start>
+ <element name="DeleteResult">
+ <zeroOrMore>
+ <choice>
+ <element name="Deleted">
+ <element name="Key">
+ <data type="string"/>
+ </element>
+ <optional>
+ <element name="VersionId">
+ <data type="string"/>
+ </element>
+ </optional>
+ <optional>
+ <element name="DeleteMarker">
+ <data type="boolean"/>
+ </element>
+ </optional>
+ <optional>
+ <element name="DeleteMarkerVersionId">
+ <data type="string"/>
+ </element>
+ </optional>
+ </element>
+ <element name="Error">
+ <element name="Key">
+ <data type="string"/>
+ </element>
+ <optional>
+ <element name="VersionId">
+ <data type="string"/>
+ </element>
+ </optional>
+ <element name="Code">
+ <data type="string"/>
+ </element>
+ <element name="Message">
+ <data type="string"/>
+ </element>
+ </element>
+ </choice>
+ </zeroOrMore>
+ </element>
+ </start>
+</grammar>
diff --git a/swift/common/middleware/s3api/schema/error.rng b/swift/common/middleware/s3api/schema/error.rng
new file mode 100644
index 000000000..a0d61d485
--- /dev/null
+++ b/swift/common/middleware/s3api/schema/error.rng
@@ -0,0 +1,30 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<grammar xmlns="http://relaxng.org/ns/structure/1.0" datatypeLibrary="http://www.w3.org/2001/XMLSchema-datatypes">
+ <start>
+ <element name="Error">
+ <element name="Code">
+ <data type="string"/>
+ </element>
+ <element name="Message">
+ <data type="string"/>
+ </element>
+ <zeroOrMore>
+ <ref name="DebugInfo"/>
+ </zeroOrMore>
+ </element>
+ </start>
+ <define name="DebugInfo">
+ <element>
+ <anyName/>
+ <zeroOrMore>
+ <choice>
+ <attribute>
+ <anyName/>
+ </attribute>
+ <text/>
+ <ref name="DebugInfo"/>
+ </choice>
+ </zeroOrMore>
+ </element>
+ </define>
+</grammar>
diff --git a/swift/common/middleware/s3api/schema/initiate_multipart_upload_result.rng b/swift/common/middleware/s3api/schema/initiate_multipart_upload_result.rng
new file mode 100644
index 000000000..67d03016d
--- /dev/null
+++ b/swift/common/middleware/s3api/schema/initiate_multipart_upload_result.rng
@@ -0,0 +1,16 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<grammar xmlns="http://relaxng.org/ns/structure/1.0" datatypeLibrary="http://www.w3.org/2001/XMLSchema-datatypes">
+ <start>
+ <element name="InitiateMultipartUploadResult">
+ <element name="Bucket">
+ <data type="string"/>
+ </element>
+ <element name="Key">
+ <data type="string"/>
+ </element>
+ <element name="UploadId">
+ <data type="string"/>
+ </element>
+ </element>
+ </start>
+</grammar>
diff --git a/swift/common/middleware/s3api/schema/lifecycle_configuration.rng b/swift/common/middleware/s3api/schema/lifecycle_configuration.rng
new file mode 100644
index 000000000..dd0816e2f
--- /dev/null
+++ b/swift/common/middleware/s3api/schema/lifecycle_configuration.rng
@@ -0,0 +1,56 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<grammar xmlns="http://relaxng.org/ns/structure/1.0" datatypeLibrary="http://www.w3.org/2001/XMLSchema-datatypes">
+ <include href="common.rng"/>
+ <start>
+ <element name="LifecycleConfiguration">
+ <oneOrMore>
+ <element name="Rule">
+ <interleave>
+ <optional>
+ <element name="ID">
+ <data type="string"/>
+ </element>
+ </optional>
+ <element name="Prefix">
+ <data type="string"/>
+ </element>
+ <element name="Status">
+ <choice>
+ <value>Enabled</value>
+ <value>Disabled</value>
+ </choice>
+ </element>
+ <optional>
+ <element name="Transition">
+ <ref name="Transition"/>
+ </element>
+ </optional>
+ <optional>
+ <element name="Expiration">
+ <ref name="Expiration"/>
+ </element>
+ </optional>
+ </interleave>
+ </element>
+ </oneOrMore>
+ </element>
+ </start>
+ <define name="Expiration">
+ <choice>
+ <element name="Days">
+ <data type="int"/>
+ </element>
+ <element name="Date">
+ <data type="dateTime"/>
+ </element>
+ </choice>
+ </define>
+ <define name="Transition">
+ <interleave>
+ <ref name="Expiration"/>
+ <element name="StorageClass">
+ <ref name="StorageClass"/>
+ </element>
+ </interleave>
+ </define>
+</grammar>
diff --git a/swift/common/middleware/s3api/schema/list_all_my_buckets_result.rng b/swift/common/middleware/s3api/schema/list_all_my_buckets_result.rng
new file mode 100644
index 000000000..76959d7b8
--- /dev/null
+++ b/swift/common/middleware/s3api/schema/list_all_my_buckets_result.rng
@@ -0,0 +1,23 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<grammar xmlns="http://relaxng.org/ns/structure/1.0" datatypeLibrary="http://www.w3.org/2001/XMLSchema-datatypes">
+ <include href="common.rng"/>
+ <start>
+ <element name="ListAllMyBucketsResult">
+ <element name="Owner">
+ <ref name="CanonicalUser"/>
+ </element>
+ <element name="Buckets">
+ <zeroOrMore>
+ <element name="Bucket">
+ <element name="Name">
+ <data type="string"/>
+ </element>
+ <element name="CreationDate">
+ <data type="dateTime"/>
+ </element>
+ </element>
+ </zeroOrMore>
+ </element>
+ </element>
+ </start>
+</grammar>
diff --git a/swift/common/middleware/s3api/schema/list_bucket_result.rng b/swift/common/middleware/s3api/schema/list_bucket_result.rng
new file mode 100644
index 000000000..9c6640c69
--- /dev/null
+++ b/swift/common/middleware/s3api/schema/list_bucket_result.rng
@@ -0,0 +1,93 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<grammar xmlns="http://relaxng.org/ns/structure/1.0" datatypeLibrary="http://www.w3.org/2001/XMLSchema-datatypes">
+ <include href="common.rng"/>
+ <start>
+ <element name="ListBucketResult">
+ <element name="Name">
+ <data type="string"/>
+ </element>
+ <element name="Prefix">
+ <data type="string"/>
+ </element>
+ <choice>
+ <group>
+ <element name="Marker">
+ <data type="string"/>
+ </element>
+ <optional>
+ <element name="NextMarker">
+ <data type="string"/>
+ </element>
+ </optional>
+ </group>
+ <group>
+ <optional>
+ <element name="NextContinuationToken">
+ <data type="string"/>
+ </element>
+ </optional>
+ <optional>
+ <element name="ContinuationToken">
+ <data type="string"/>
+ </element>
+ </optional>
+ <optional>
+ <element name="StartAfter">
+ <data type="string"/>
+ </element>
+ </optional>
+ <element name="KeyCount">
+ <data type="int"/>
+ </element>
+ </group>
+ </choice>
+ <element name="MaxKeys">
+ <data type="int"/>
+ </element>
+ <optional>
+ <element name="EncodingType">
+ <data type="string"/>
+ </element>
+ </optional>
+ <optional>
+ <element name="Delimiter">
+ <data type="string"/>
+ </element>
+ </optional>
+ <element name="IsTruncated">
+ <data type="boolean"/>
+ </element>
+ <zeroOrMore>
+ <element name="Contents">
+ <element name="Key">
+ <data type="string"/>
+ </element>
+ <element name="LastModified">
+ <data type="dateTime"/>
+ </element>
+ <element name="ETag">
+ <data type="string"/>
+ </element>
+ <element name="Size">
+ <data type="long"/>
+ </element>
+ <optional>
+ <element name="Owner">
+ <ref name="CanonicalUser"/>
+ </element>
+ </optional>
+ <element name="StorageClass">
+ <ref name="StorageClass"/>
+ </element>
+ </element>
+ </zeroOrMore>
+ <zeroOrMore>
+ <element name="CommonPrefixes">
+ <element name="Prefix">
+ <data type="string"/>
+ </element>
+ </element>
+ </zeroOrMore>
+ </element>
+ </start>
+</grammar>
diff --git a/swift/common/middleware/s3api/schema/list_multipart_uploads_result.rng b/swift/common/middleware/s3api/schema/list_multipart_uploads_result.rng
new file mode 100644
index 000000000..2e20c840e
--- /dev/null
+++ b/swift/common/middleware/s3api/schema/list_multipart_uploads_result.rng
@@ -0,0 +1,73 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<grammar xmlns="http://relaxng.org/ns/structure/1.0" datatypeLibrary="http://www.w3.org/2001/XMLSchema-datatypes">
+ <include href="common.rng"/>
+ <start>
+ <element name="ListMultipartUploadsResult">
+ <element name="Bucket">
+ <data type="string"/>
+ </element>
+ <element name="KeyMarker">
+ <data type="string"/>
+ </element>
+ <element name="UploadIdMarker">
+ <data type="string"/>
+ </element>
+ <element name="NextKeyMarker">
+ <data type="string"/>
+ </element>
+ <element name="NextUploadIdMarker">
+ <data type="string"/>
+ </element>
+ <optional>
+ <element name="Delimiter">
+ <data type="string"/>
+ </element>
+ </optional>
+ <optional>
+ <element name="Prefix">
+ <data type="string"/>
+ </element>
+ </optional>
+ <element name="MaxUploads">
+ <data type="int"/>
+ </element>
+ <optional>
+ <element name="EncodingType">
+ <data type="string"/>
+ </element>
+ </optional>
+ <element name="IsTruncated">
+ <data type="boolean"/>
+ </element>
+ <zeroOrMore>
+ <element name="Upload">
+ <element name="Key">
+ <data type="string"/>
+ </element>
+ <element name="UploadId">
+ <data type="string"/>
+ </element>
+ <element name="Initiator">
+ <ref name="CanonicalUser"/>
+ </element>
+ <element name="Owner">
+ <ref name="CanonicalUser"/>
+ </element>
+ <element name="StorageClass">
+ <ref name="StorageClass"/>
+ </element>
+ <element name="Initiated">
+ <data type="dateTime"/>
+ </element>
+ </element>
+ </zeroOrMore>
+ <zeroOrMore>
+ <element name="CommonPrefixes">
+ <element name="Prefix">
+ <data type="string"/>
+ </element>
+ </element>
+ </zeroOrMore>
+ </element>
+ </start>
+</grammar>
diff --git a/swift/common/middleware/s3api/schema/list_parts_result.rng b/swift/common/middleware/s3api/schema/list_parts_result.rng
new file mode 100644
index 000000000..4cf5a0ce7
--- /dev/null
+++ b/swift/common/middleware/s3api/schema/list_parts_result.rng
@@ -0,0 +1,59 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<grammar xmlns="http://relaxng.org/ns/structure/1.0" datatypeLibrary="http://www.w3.org/2001/XMLSchema-datatypes">
+ <include href="common.rng"/>
+ <start>
+ <element name="ListPartsResult">
+ <element name="Bucket">
+ <data type="string"/>
+ </element>
+ <element name="Key">
+ <data type="string"/>
+ </element>
+ <element name="UploadId">
+ <data type="string"/>
+ </element>
+ <element name="Initiator">
+ <ref name="CanonicalUser"/>
+ </element>
+ <element name="Owner">
+ <ref name="CanonicalUser"/>
+ </element>
+ <element name="StorageClass">
+ <ref name="StorageClass"/>
+ </element>
+ <element name="PartNumberMarker">
+ <data type="int"/>
+ </element>
+ <element name="NextPartNumberMarker">
+ <data type="int"/>
+ </element>
+ <element name="MaxParts">
+ <data type="int"/>
+ </element>
+ <optional>
+ <element name="EncodingType">
+ <data type="string"/>
+ </element>
+ </optional>
+ <element name="IsTruncated">
+ <data type="boolean"/>
+ </element>
+ <zeroOrMore>
+ <element name="Part">
+ <element name="PartNumber">
+ <data type="int"/>
+ </element>
+ <element name="LastModified">
+ <data type="dateTime"/>
+ </element>
+ <element name="ETag">
+ <data type="string"/>
+ </element>
+ <element name="Size">
+ <data type="long"/>
+ </element>
+ </element>
+ </zeroOrMore>
+ </element>
+ </start>
+</grammar>
diff --git a/swift/common/middleware/s3api/schema/list_versions_result.rng b/swift/common/middleware/s3api/schema/list_versions_result.rng
new file mode 100644
index 000000000..464cfbcc4
--- /dev/null
+++ b/swift/common/middleware/s3api/schema/list_versions_result.rng
@@ -0,0 +1,104 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<grammar xmlns="http://relaxng.org/ns/structure/1.0" datatypeLibrary="http://www.w3.org/2001/XMLSchema-datatypes">
+ <include href="common.rng"/>
+ <start>
+ <element name="ListVersionsResult">
+ <element name="Name">
+ <data type="string"/>
+ </element>
+ <element name="Prefix">
+ <data type="string"/>
+ </element>
+ <element name="KeyMarker">
+ <data type="string"/>
+ </element>
+ <element name="VersionIdMarker">
+ <data type="string"/>
+ </element>
+ <optional>
+ <element name="NextKeyMarker">
+ <data type="string"/>
+ </element>
+ </optional>
+ <optional>
+ <element name="NextVersionIdMarker">
+ <data type="string"/>
+ </element>
+ </optional>
+ <element name="MaxKeys">
+ <data type="int"/>
+ </element>
+ <optional>
+ <element name="EncodingType">
+ <data type="string"/>
+ </element>
+ </optional>
+ <optional>
+ <element name="Delimiter">
+ <data type="string"/>
+ </element>
+ </optional>
+ <element name="IsTruncated">
+ <data type="boolean"/>
+ </element>
+ <zeroOrMore>
+ <choice>
+ <element name="Version">
+ <element name="Key">
+ <data type="string"/>
+ </element>
+ <element name="VersionId">
+ <data type="string"/>
+ </element>
+ <element name="IsLatest">
+ <data type="boolean"/>
+ </element>
+ <element name="LastModified">
+ <data type="dateTime"/>
+ </element>
+ <element name="ETag">
+ <data type="string"/>
+ </element>
+ <element name="Size">
+ <data type="long"/>
+ </element>
+ <optional>
+ <element name="Owner">
+ <ref name="CanonicalUser"/>
+ </element>
+ </optional>
+ <element name="StorageClass">
+ <ref name="StorageClass"/>
+ </element>
+ </element>
+ <element name="DeleteMarker">
+ <element name="Key">
+ <data type="string"/>
+ </element>
+ <element name="VersionId">
+ <data type="string"/>
+ </element>
+ <element name="IsLatest">
+ <data type="boolean"/>
+ </element>
+ <element name="LastModified">
+ <data type="dateTime"/>
+ </element>
+ <optional>
+ <element name="Owner">
+ <ref name="CanonicalUser"/>
+ </element>
+ </optional>
+ </element>
+ </choice>
+ </zeroOrMore>
+ <zeroOrMore>
+ <element name="CommonPrefixes">
+ <element name="Prefix">
+ <data type="string"/>
+ </element>
+ </element>
+ </zeroOrMore>
+ </element>
+ </start>
+</grammar>
diff --git a/swift/common/middleware/s3api/schema/location_constraint.rng b/swift/common/middleware/s3api/schema/location_constraint.rng
new file mode 100644
index 000000000..2f3a143b2
--- /dev/null
+++ b/swift/common/middleware/s3api/schema/location_constraint.rng
@@ -0,0 +1,8 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<grammar xmlns="http://relaxng.org/ns/structure/1.0" datatypeLibrary="http://www.w3.org/2001/XMLSchema-datatypes">
+ <start>
+ <element name="LocationConstraint">
+ <data type="string"/>
+ </element>
+ </start>
+</grammar>
diff --git a/swift/common/middleware/s3api/schema/versioning_configuration.rng b/swift/common/middleware/s3api/schema/versioning_configuration.rng
new file mode 100644
index 000000000..3d6d3d123
--- /dev/null
+++ b/swift/common/middleware/s3api/schema/versioning_configuration.rng
@@ -0,0 +1,25 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<grammar xmlns="http://relaxng.org/ns/structure/1.0">
+ <start>
+ <element name="VersioningConfiguration">
+ <interleave>
+ <optional>
+ <element name="Status">
+ <choice>
+ <value>Enabled</value>
+ <value>Suspended</value>
+ </choice>
+ </element>
+ </optional>
+ <optional>
+ <element name="MfaDelete">
+ <choice>
+ <value>Enabled</value>
+ <value>Disabled</value>
+ </choice>
+ </element>
+ </optional>
+ </interleave>
+ </element>
+ </start>
+</grammar>
diff --git a/swift/common/middleware/s3api/subresource.py b/swift/common/middleware/s3api/subresource.py
new file mode 100644
index 000000000..42bd67f00
--- /dev/null
+++ b/swift/common/middleware/s3api/subresource.py
@@ -0,0 +1,563 @@
+# Copyright (c) 2014 OpenStack Foundation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+---------------------------
+s3api's ACLs implementation
+---------------------------
+s3api uses a different implementation approach to achieve S3 ACLs.
+
+First, we should understand what we have to design to achieve real S3 ACLs.
+Current s3api(real S3)'s ACLs Model is as follows::
+
+ AccessControlPolicy:
+ Owner:
+ AccessControlList:
+ Grant[n]:
+ (Grantee, Permission)
+
+Each bucket or object has its own acl consisting of Owner and
+AcessControlList. AccessControlList can contain some Grants.
+By default, AccessControlList has only one Grant to allow FULL
+CONTROLL to owner. Each Grant includes single pair with Grantee,
+Permission. Grantee is the user (or user group) allowed the given permission.
+
+This module defines the groups and the relation tree.
+
+If you wanna get more information about S3's ACLs model in detail,
+please see official documentation here,
+
+http://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html
+
+"""
+from functools import partial
+
+from swift.common.utils import json
+
+from swift.common.middleware.s3api.s3response import InvalidArgument, \
+ MalformedACLError, S3NotImplemented, InvalidRequest, AccessDenied
+from swift.common.middleware.s3api.etree import Element, SubElement, tostring
+from swift.common.middleware.s3api.utils import sysmeta_header
+from swift.common.middleware.s3api.exception import InvalidSubresource
+
+XMLNS_XSI = 'http://www.w3.org/2001/XMLSchema-instance'
+PERMISSIONS = ['FULL_CONTROL', 'READ', 'WRITE', 'READ_ACP', 'WRITE_ACP']
+LOG_DELIVERY_USER = '.log_delivery'
+
+
+def encode_acl(resource, acl):
+ """
+ Encode an ACL instance to Swift metadata.
+
+ Given a resource type and an ACL instance, this method returns HTTP
+ headers, which can be used for Swift metadata.
+ """
+ header_value = {"Owner": acl.owner.id}
+ grants = []
+ for grant in acl.grants:
+ grant = {"Permission": grant.permission,
+ "Grantee": str(grant.grantee)}
+ grants.append(grant)
+ header_value.update({"Grant": grants})
+ headers = {}
+ key = sysmeta_header(resource, 'acl')
+ headers[key] = json.dumps(header_value, separators=(',', ':'))
+
+ return headers
+
+
+def decode_acl(resource, headers, allow_no_owner):
+ """
+ Decode Swift metadata to an ACL instance.
+
+ Given a resource type and HTTP headers, this method returns an ACL
+ instance.
+ """
+ value = ''
+
+ key = sysmeta_header(resource, 'acl')
+ if key in headers:
+ value = headers[key]
+
+ if value == '':
+ # Fix me: In the case of value is empty or not dict instance,
+ # I want an instance of Owner as None.
+ # However, in the above process would occur error in reference
+ # to an instance variable of Owner.
+ return ACL(Owner(None, None), [], True, allow_no_owner)
+
+ try:
+ encode_value = json.loads(value)
+ if not isinstance(encode_value, dict):
+ return ACL(Owner(None, None), [], True, allow_no_owner)
+
+ id = None
+ name = None
+ grants = []
+ if 'Owner' in encode_value:
+ id = encode_value['Owner']
+ name = encode_value['Owner']
+ if 'Grant' in encode_value:
+ for grant in encode_value['Grant']:
+ grantee = None
+ # pylint: disable-msg=E1101
+ for group in Group.__subclasses__():
+ if group.__name__ == grant['Grantee']:
+ grantee = group()
+ if not grantee:
+ grantee = User(grant['Grantee'])
+ permission = grant['Permission']
+ grants.append(Grant(grantee, permission))
+ return ACL(Owner(id, name), grants, True, allow_no_owner)
+ except Exception as e:
+ raise InvalidSubresource((resource, 'acl', value), e)
+
+
+class Grantee(object):
+ """
+ Base class for grantee.
+
+ Methods:
+
+ * init: create a Grantee instance
+ * elem: create an ElementTree from itself
+
+ Static Methods:
+
+ * from_header: convert a grantee string in the HTTP header
+ to an Grantee instance.
+ * from_elem: convert a ElementTree to an Grantee instance.
+
+ """
+ # Needs confirmation whether we really need these methods or not.
+ # * encode (method): create a JSON which includes whole own elements
+ # * encode_from_elem (static method): convert from an ElementTree to a JSON
+ # * elem_from_json (static method): convert from a JSON to an ElementTree
+ # * from_json (static method): convert a Json string to an Grantee
+ # instance.
+
+ def __contains__(self, key):
+ """
+ The key argument is a S3 user id. This method checks that the user id
+ belongs to this class.
+ """
+ raise S3NotImplemented()
+
+ def elem(self):
+ """
+ Get an etree element of this instance.
+ """
+ raise S3NotImplemented()
+
+ @staticmethod
+ def from_elem(elem):
+ type = elem.get('{%s}type' % XMLNS_XSI)
+ if type == 'CanonicalUser':
+ value = elem.find('./ID').text
+ return User(value)
+ elif type == 'Group':
+ value = elem.find('./URI').text
+ subclass = get_group_subclass_from_uri(value)
+ return subclass()
+ elif type == 'AmazonCustomerByEmail':
+ raise S3NotImplemented()
+ else:
+ raise MalformedACLError()
+
+ @staticmethod
+ def from_header(grantee):
+ """
+ Convert a grantee string in the HTTP header to an Grantee instance.
+ """
+ type, value = grantee.split('=', 1)
+ value = value.strip('"\'')
+ if type == 'id':
+ return User(value)
+ elif type == 'emailAddress':
+ raise S3NotImplemented()
+ elif type == 'uri':
+ # return a subclass instance of Group class
+ subclass = get_group_subclass_from_uri(value)
+ return subclass()
+ else:
+ raise InvalidArgument(type, value,
+ 'Argument format not recognized')
+
+
+class User(Grantee):
+ """
+ Canonical user class for S3 accounts.
+ """
+ type = 'CanonicalUser'
+
+ def __init__(self, name):
+ self.id = name
+ self.display_name = name
+
+ def __contains__(self, key):
+ return key == self.id
+
+ def elem(self):
+ elem = Element('Grantee', nsmap={'xsi': XMLNS_XSI})
+ elem.set('{%s}type' % XMLNS_XSI, self.type)
+ SubElement(elem, 'ID').text = self.id
+ SubElement(elem, 'DisplayName').text = self.display_name
+ return elem
+
+ def __str__(self):
+ return self.display_name
+
+
+class Owner(object):
+ """
+ Owner class for S3 accounts
+ """
+ def __init__(self, id, name):
+ self.id = id
+ self.name = name
+
+
+def get_group_subclass_from_uri(uri):
+ """
+ Convert a URI to one of the predefined groups.
+ """
+ for group in Group.__subclasses__(): # pylint: disable-msg=E1101
+ if group.uri == uri:
+ return group
+ raise InvalidArgument('uri', uri, 'Invalid group uri')
+
+
+class Group(Grantee):
+ """
+ Base class for Amazon S3 Predefined Groups
+ """
+ type = 'Group'
+ uri = ''
+
+ def __init__(self):
+ # Initialize method to clarify this has nothing to do
+ pass
+
+ def elem(self):
+ elem = Element('Grantee', nsmap={'xsi': XMLNS_XSI})
+ elem.set('{%s}type' % XMLNS_XSI, self.type)
+ SubElement(elem, 'URI').text = self.uri
+
+ return elem
+
+ def __str__(self):
+ return self.__class__.__name__
+
+
+def canned_acl_grantees(bucket_owner, object_owner=None):
+ """
+ A set of predefined grants supported by AWS S3.
+ """
+ owner = object_owner or bucket_owner
+
+ return {
+ 'private': [
+ ('FULL_CONTROL', User(owner.name)),
+ ],
+ 'public-read': [
+ ('READ', AllUsers()),
+ ('FULL_CONTROL', User(owner.name)),
+ ],
+ 'public-read-write': [
+ ('READ', AllUsers()),
+ ('WRITE', AllUsers()),
+ ('FULL_CONTROL', User(owner.name)),
+ ],
+ 'authenticated-read': [
+ ('READ', AuthenticatedUsers()),
+ ('FULL_CONTROL', User(owner.name)),
+ ],
+ 'bucket-owner-read': [
+ ('READ', User(bucket_owner.name)),
+ ('FULL_CONTROL', User(owner.name)),
+ ],
+ 'bucket-owner-full-control': [
+ ('FULL_CONTROL', User(owner.name)),
+ ('FULL_CONTROL', User(bucket_owner.name)),
+ ],
+ 'log-delivery-write': [
+ ('WRITE', LogDelivery()),
+ ('READ_ACP', LogDelivery()),
+ ('FULL_CONTROL', User(owner.name)),
+ ],
+ }
+
+
+class AuthenticatedUsers(Group):
+ """
+ This group represents all AWS accounts. Access permission to this group
+ allows any AWS account to access the resource. However, all requests must
+ be signed (authenticated).
+ """
+ uri = 'http://acs.amazonaws.com/groups/global/AuthenticatedUsers'
+
+ def __contains__(self, key):
+ # s3api handles only signed requests.
+ return True
+
+
+class AllUsers(Group):
+ """
+ Access permission to this group allows anyone to access the resource. The
+ requests can be signed (authenticated) or unsigned (anonymous). Unsigned
+ requests omit the Authentication header in the request.
+
+ Note: s3api regards unsigned requests as Swift API accesses, and bypasses
+ them to Swift. As a result, AllUsers behaves completely same as
+ AuthenticatedUsers.
+ """
+ uri = 'http://acs.amazonaws.com/groups/global/AllUsers'
+
+ def __contains__(self, key):
+ return True
+
+
+class LogDelivery(Group):
+ """
+ WRITE and READ_ACP permissions on a bucket enables this group to write
+ server access logs to the bucket.
+ """
+ uri = 'http://acs.amazonaws.com/groups/s3/LogDelivery'
+
+ def __contains__(self, key):
+ if ':' in key:
+ tenant, user = key.split(':', 1)
+ else:
+ user = key
+ return user == LOG_DELIVERY_USER
+
+
+class Grant(object):
+ """
+ Grant Class which includes both Grantee and Permission
+ """
+
+ def __init__(self, grantee, permission):
+ """
+ :param grantee: a grantee class or its subclass
+ :param permission: string
+ """
+ if permission.upper() not in PERMISSIONS:
+ raise S3NotImplemented()
+ if not isinstance(grantee, Grantee):
+ raise ValueError()
+ self.grantee = grantee
+ self.permission = permission
+
+ @classmethod
+ def from_elem(cls, elem):
+ """
+ Convert an ElementTree to an ACL instance
+ """
+ grantee = Grantee.from_elem(elem.find('./Grantee'))
+ permission = elem.find('./Permission').text
+ return cls(grantee, permission)
+
+ def elem(self):
+ """
+ Create an etree element.
+ """
+ elem = Element('Grant')
+ elem.append(self.grantee.elem())
+ SubElement(elem, 'Permission').text = self.permission
+
+ return elem
+
+ def allow(self, grantee, permission):
+ return permission == self.permission and grantee in self.grantee
+
+
+class ACL(object):
+ """
+ S3 ACL class.
+
+ Refs (S3 API - acl-overview:
+ http://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html):
+
+ The sample ACL includes an Owner element identifying the owner via the
+ AWS account's canonical user ID. The Grant element identifies the grantee
+ (either an AWS account or a predefined group), and the permission granted.
+ This default ACL has one Grant element for the owner. You grant permissions
+ by adding Grant elements, each grant identifying the grantee and the
+ permission.
+ """
+ metadata_name = 'acl'
+ root_tag = 'AccessControlPolicy'
+ max_xml_length = 200 * 1024
+
+ def __init__(self, owner, grants=None, s3_acl=False, allow_no_owner=False):
+ """
+ :param owner: Owner instance for ACL instance
+ :param grants: a list of Grant instances
+ :param s3_acl: boolean indicates whether this class is used under
+ s3_acl is True or False (from s3api middleware configuration)
+ :param allow_no_owner: boolean indicates this ACL instance can be
+ handled when no owner information found
+ """
+ self.owner = owner
+ self.grants = grants or []
+ self.s3_acl = s3_acl
+ self.allow_no_owner = allow_no_owner
+
+ def __repr__(self):
+ return tostring(self.elem())
+
+ @classmethod
+ def from_elem(cls, elem, s3_acl=False, allow_no_owner=False):
+ """
+ Convert an ElementTree to an ACL instance
+ """
+ id = elem.find('./Owner/ID').text
+ try:
+ name = elem.find('./Owner/DisplayName').text
+ except AttributeError:
+ name = id
+
+ grants = [Grant.from_elem(e)
+ for e in elem.findall('./AccessControlList/Grant')]
+ return cls(Owner(id, name), grants, s3_acl, allow_no_owner)
+
+ def elem(self):
+ """
+ Decode the value to an ACL instance.
+ """
+ elem = Element(self.root_tag)
+
+ owner = SubElement(elem, 'Owner')
+ SubElement(owner, 'ID').text = self.owner.id
+ SubElement(owner, 'DisplayName').text = self.owner.name
+
+ SubElement(elem, 'AccessControlList').extend(
+ g.elem() for g in self.grants
+ )
+
+ return elem
+
+ def check_owner(self, user_id):
+ """
+ Check that the user is an owner.
+ """
+ if not self.s3_acl:
+ # Ignore S3api ACL.
+ return
+
+ if not self.owner.id:
+ if self.allow_no_owner:
+ # No owner means public.
+ return
+ raise AccessDenied()
+
+ if user_id != self.owner.id:
+ raise AccessDenied()
+
+ def check_permission(self, user_id, permission):
+ """
+ Check that the user has a permission.
+ """
+ if not self.s3_acl:
+ # Ignore S3api ACL.
+ return
+
+ try:
+ # owners have full control permission
+ self.check_owner(user_id)
+ return
+ except AccessDenied:
+ pass
+
+ if permission in PERMISSIONS:
+ for g in self.grants:
+ if g.allow(user_id, 'FULL_CONTROL') or \
+ g.allow(user_id, permission):
+ return
+
+ raise AccessDenied()
+
+ @classmethod
+ def from_headers(cls, headers, bucket_owner, object_owner=None,
+ as_private=True):
+ """
+ Convert HTTP headers to an ACL instance.
+ """
+ grants = []
+ try:
+ for key, value in headers.items():
+ if key.lower().startswith('x-amz-grant-'):
+ permission = key[len('x-amz-grant-'):]
+ permission = permission.upper().replace('-', '_')
+ if permission not in PERMISSIONS:
+ continue
+ for grantee in value.split(','):
+ grants.append(
+ Grant(Grantee.from_header(grantee), permission))
+
+ if 'x-amz-acl' in headers:
+ try:
+ acl = headers['x-amz-acl']
+ if len(grants) > 0:
+ err_msg = 'Specifying both Canned ACLs and Header ' \
+ 'Grants is not allowed'
+ raise InvalidRequest(err_msg)
+ grantees = canned_acl_grantees(
+ bucket_owner, object_owner)[acl]
+ for permission, grantee in grantees:
+ grants.append(Grant(grantee, permission))
+ except KeyError:
+ # expects canned_acl_grantees()[] raises KeyError
+ raise InvalidArgument('x-amz-acl', headers['x-amz-acl'])
+ except (KeyError, ValueError):
+ # TODO: think about we really catch this except sequence
+ raise InvalidRequest()
+
+ if len(grants) == 0:
+ # No ACL headers
+ if as_private:
+ return ACLPrivate(bucket_owner, object_owner)
+ else:
+ return None
+
+ return cls(object_owner or bucket_owner, grants)
+
+
+class CannedACL(object):
+ """
+ A dict-like object that returns canned ACL.
+ """
+ def __getitem__(self, key):
+ def acl(key, bucket_owner, object_owner=None,
+ s3_acl=False, allow_no_owner=False):
+ grants = []
+ grantees = canned_acl_grantees(bucket_owner, object_owner)[key]
+ for permission, grantee in grantees:
+ grants.append(Grant(grantee, permission))
+ return ACL(object_owner or bucket_owner,
+ grants, s3_acl, allow_no_owner)
+
+ return partial(acl, key)
+
+
+canned_acl = CannedACL()
+
+ACLPrivate = canned_acl['private']
+ACLPublicRead = canned_acl['public-read']
+ACLPublicReadWrite = canned_acl['public-read-write']
+ACLAuthenticatedRead = canned_acl['authenticated-read']
+ACLBucketOwnerRead = canned_acl['bucket-owner-read']
+ACLBucketOwnerFullControl = canned_acl['bucket-owner-full-control']
+ACLLogDeliveryWrite = canned_acl['log-delivery-write']
diff --git a/swift/common/middleware/s3api/utils.py b/swift/common/middleware/s3api/utils.py
new file mode 100644
index 000000000..813d97918
--- /dev/null
+++ b/swift/common/middleware/s3api/utils.py
@@ -0,0 +1,190 @@
+# Copyright (c) 2014 OpenStack Foundation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import base64
+import calendar
+import email.utils
+import re
+import time
+import uuid
+
+# Need for check_path_header
+from swift.common import utils
+
+MULTIUPLOAD_SUFFIX = '+segments'
+
+
+def sysmeta_prefix(resource):
+ """
+ Returns the system metadata prefix for given resource type.
+ """
+ if resource.lower() == 'object':
+ return 'x-object-sysmeta-s3api-'
+ else:
+ return 'x-container-sysmeta-s3api-'
+
+
+def sysmeta_header(resource, name):
+ """
+ Returns the system metadata header for given resource type and name.
+ """
+ return sysmeta_prefix(resource) + name
+
+
+def camel_to_snake(camel):
+ return re.sub('(.)([A-Z])', r'\1_\2', camel).lower()
+
+
+def snake_to_camel(snake):
+ return snake.title().replace('_', '')
+
+
+def unique_id():
+ return base64.urlsafe_b64encode(str(uuid.uuid4()))
+
+
+def utf8encode(s):
+ if isinstance(s, unicode):
+ s = s.encode('utf8')
+ return s
+
+
+def utf8decode(s):
+ if isinstance(s, str):
+ s = s.decode('utf8')
+ return s
+
+
+def validate_bucket_name(name, dns_compliant_bucket_names):
+ """
+ Validates the name of the bucket against S3 criteria,
+ http://docs.amazonwebservices.com/AmazonS3/latest/BucketRestrictions.html
+ True is valid, False is invalid.
+ """
+ valid_chars = '-.a-z0-9'
+ if not dns_compliant_bucket_names:
+ valid_chars += 'A-Z_'
+ max_len = 63 if dns_compliant_bucket_names else 255
+
+ if len(name) < 3 or len(name) > max_len or not name[0].isalnum():
+ # Bucket names should be between 3 and 63 (or 255) characters long
+ # Bucket names must start with a letter or a number
+ return False
+ elif dns_compliant_bucket_names and (
+ '.-' in name or '-.' in name or '..' in name or
+ not name[-1].isalnum()):
+ # Bucket names cannot contain dashes next to periods
+ # Bucket names cannot contain two adjacent periods
+ # Bucket names must end with a letter or a number
+ return False
+ elif name.endswith('.'):
+ # Bucket names must not end with dot
+ return False
+ elif re.match("^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.)"
+ "{3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$",
+ name):
+ # Bucket names cannot be formatted as an IP Address
+ return False
+ elif not re.match("^[%s]*$" % valid_chars, name):
+ # Bucket names can contain lowercase letters, numbers, and hyphens.
+ return False
+ else:
+ return True
+
+
+class S3Timestamp(utils.Timestamp):
+ @property
+ def s3xmlformat(self):
+ return self.isoformat[:-7] + '.000Z'
+
+ @property
+ def amz_date_format(self):
+ """
+ this format should be like 'YYYYMMDDThhmmssZ'
+ """
+ return self.isoformat.replace(
+ '-', '').replace(':', '')[:-7] + 'Z'
+
+ @classmethod
+ def now(cls):
+ return cls(time.time())
+
+
+def mktime(timestamp_str, time_format='%Y-%m-%dT%H:%M:%S'):
+ """
+ mktime creates a float instance in epoch time really like as time.mktime
+
+ the difference from time.mktime is allowing to 2 formats string for the
+ argument for the S3 testing usage.
+ TODO: support
+
+ :param timestamp_str: a string of timestamp formatted as
+ (a) RFC2822 (e.g. date header)
+ (b) %Y-%m-%dT%H:%M:%S (e.g. copy result)
+ :param time_format: a string of format to parse in (b) process
+ :return : a float instance in epoch time
+ """
+ # time_tuple is the *remote* local time
+ time_tuple = email.utils.parsedate_tz(timestamp_str)
+ if time_tuple is None:
+ time_tuple = time.strptime(timestamp_str, time_format)
+ # add timezone info as utc (no time difference)
+ time_tuple += (0, )
+
+ # We prefer calendar.gmtime and a manual adjustment over
+ # email.utils.mktime_tz because older versions of Python (<2.7.4) may
+ # double-adjust for timezone in some situations (such when swift changes
+ # os.environ['TZ'] without calling time.tzset()).
+ epoch_time = calendar.timegm(time_tuple) - time_tuple[9]
+
+ return epoch_time
+
+
+class Config(dict):
+ def __init__(self, base=None):
+ if base is not None:
+ self.update(base)
+
+ def __getattr__(self, name):
+ if name not in self:
+ raise AttributeError("No attribute '%s'" % name)
+
+ return self[name]
+
+ def __setattr__(self, name, value):
+ self[name] = value
+
+ def __delattr__(self, name):
+ del self[name]
+
+ def update(self, other):
+ if hasattr(other, 'keys'):
+ for key in other.keys():
+ self[key] = other[key]
+ else:
+ for key, value in other:
+ self[key] = value
+
+ def __setitem__(self, key, value):
+ if isinstance(self.get(key), bool):
+ dict.__setitem__(self, key, utils.config_true_value(value))
+ elif isinstance(self.get(key), int):
+ try:
+ dict.__setitem__(self, key, int(value))
+ except ValueError:
+ if value: # No need to raise the error if value is ''
+ raise
+ else:
+ dict.__setitem__(self, key, value)
diff --git a/swift/common/middleware/tempauth.py b/swift/common/middleware/tempauth.py
index 55fc65272..646edc437 100644
--- a/swift/common/middleware/tempauth.py
+++ b/swift/common/middleware/tempauth.py
@@ -273,7 +273,7 @@ class TempAuth(object):
return self.app(env, start_response)
if env.get('PATH_INFO', '').startswith(self.auth_prefix):
return self.handle(env, start_response)
- s3 = env.get('swift3.auth_details')
+ s3 = env.get('s3api.auth_details')
token = env.get('HTTP_X_AUTH_TOKEN', env.get('HTTP_X_STORAGE_TOKEN'))
service_token = env.get('HTTP_X_SERVICE_TOKEN')
if s3 or (token and token.startswith(self.reseller_prefix)):
@@ -435,7 +435,7 @@ class TempAuth(object):
else:
groups = groups.encode('utf8')
- s3_auth_details = env.get('swift3.auth_details')
+ s3_auth_details = env.get('s3api.auth_details')
if s3_auth_details:
if 'check_signature' not in s3_auth_details:
self.logger.warning(
diff --git a/test-requirements.txt b/test-requirements.txt
index dc1e5dabc..5f515bd31 100644
--- a/test-requirements.txt
+++ b/test-requirements.txt
@@ -12,6 +12,12 @@ os-testr>=0.8.0 # Apache-2.0
mock>=2.0 # BSD
python-swiftclient
python-keystoneclient!=2.1.0,>=2.0.0 # Apache-2.0
+reno>=1.8.0 # Apache-2.0
+python-openstackclient
+boto
+requests-mock>=1.2.0 # Apache-2.0
+fixtures>=3.0.0 # Apache-2.0/BSD
+keystonemiddleware>=4.17.0 # Apache-2.0
# Security checks
bandit>=1.1.0 # Apache-2.0
diff --git a/test/functional/__init__.py b/test/functional/__init__.py
index 32f3c4a8d..7f69a7903 100644
--- a/test/functional/__init__.py
+++ b/test/functional/__init__.py
@@ -412,6 +412,46 @@ def _load_domain_remap_staticweb(proxy_conf_file, swift_conf_file, **kwargs):
return test_conf_file, swift_conf_file
+def _load_s3api(proxy_conf_file, swift_conf_file, **kwargs):
+ """
+ Load s3api configuration and override proxy-server.conf contents.
+
+ :param proxy_conf_file: Source proxy conf filename
+ :param swift_conf_file: Source swift conf filename
+ :returns: Tuple of paths to the proxy conf file and swift conf file to use
+ :raises InProcessException: raised if proxy conf contents are invalid
+ """
+ _debug('Setting configuration for s3api')
+
+ # The global conf dict cannot be used to modify the pipeline.
+ # The pipeline loader requires the pipeline to be set in the local_conf.
+ # If pipeline is set in the global conf dict (which in turn populates the
+ # DEFAULTS options) then it prevents pipeline being loaded into the local
+ # conf during wsgi load_app.
+ # Therefore we must modify the [pipeline:main] section.
+
+ conf = ConfigParser()
+ conf.read(proxy_conf_file)
+ try:
+ section = 'pipeline:main'
+ pipeline = conf.get(section, 'pipeline')
+ pipeline = pipeline.replace(
+ "tempauth",
+ "s3api tempauth")
+ conf.set(section, 'pipeline', pipeline)
+ conf.set('filter:s3api', 's3_acl', 'true')
+ except NoSectionError as err:
+ msg = 'Error problem with proxy conf file %s: %s' % \
+ (proxy_conf_file, err)
+ raise InProcessException(msg)
+
+ test_conf_file = os.path.join(_testdir, 'proxy-server.conf')
+ with open(test_conf_file, 'w') as fp:
+ conf.write(fp)
+
+ return test_conf_file, swift_conf_file
+
+
# Mapping from possible values of the variable
# SWIFT_TEST_IN_PROCESS_CONF_LOADER
# to the method to call for loading the associated configuration
@@ -421,6 +461,7 @@ conf_loaders = {
'encryption': _load_encryption,
'ec': _load_ec_as_default_policy,
'domain_remap_staticweb': _load_domain_remap_staticweb,
+ 's3api': _load_s3api,
}
@@ -520,6 +561,12 @@ def in_process_setup(the_object_server=object_server):
'account_autocreate': 'true',
'allow_versions': 'True',
'allow_versioned_writes': 'True',
+ # TODO: move this into s3api config loader because they are
+ # required by only s3api
+ 'allowed_headers':
+ "Content-Disposition, Content-Encoding, X-Delete-At, "
+ "X-Object-Manifest, X-Static-Large-Object, Cache-Control, "
+ "Content-Language, Expires, X-Robots-Tag",
# Below are values used by the functional test framework, as well as
# by the various in-process swift servers
'auth_host': '127.0.0.1',
@@ -531,6 +578,8 @@ def in_process_setup(the_object_server=object_server):
'account': 'test',
'username': 'tester',
'password': 'testing',
+ 's3_access_key': 'test:tester',
+ 's3_secret_key': 'testing',
# User on a second account (needs admin access to the account)
'account2': 'test2',
'username2': 'tester2',
@@ -538,6 +587,8 @@ def in_process_setup(the_object_server=object_server):
# User on same account as first, but without admin access
'username3': 'tester3',
'password3': 'testing3',
+ 's3_access_key2': 'test:tester3',
+ 's3_secret_key2': 'testing3',
# Service user and prefix (emulates glance, cinder, etc. user)
'account5': 'test5',
'username5': 'tester5',
diff --git a/test/functional/s3api/__init__.py b/test/functional/s3api/__init__.py
new file mode 100644
index 000000000..9fcd0a201
--- /dev/null
+++ b/test/functional/s3api/__init__.py
@@ -0,0 +1,61 @@
+# Copyright (c) 2011-2014 OpenStack Foundation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest2
+import traceback
+import test.functional as tf
+from test.functional.s3api.s3_test_client import Connection
+
+
+def setUpModule():
+ tf.setup_package()
+
+
+def tearDownModule():
+ tf.teardown_package()
+
+
+class S3ApiBase(unittest2.TestCase):
+ def __init__(self, method_name):
+ super(S3ApiBase, self).__init__(method_name)
+ self.method_name = method_name
+
+ def setUp(self):
+ if 's3api' not in tf.cluster_info:
+ raise tf.SkipTest('s3api middleware is not enabled')
+ try:
+ self.conn = Connection()
+ self.conn.reset()
+ except Exception:
+ message = '%s got an error during initialize process.\n\n%s' % \
+ (self.method_name, traceback.format_exc())
+ # TODO: Find a way to make this go to FAIL instead of Error
+ self.fail(message)
+
+ def assertCommonResponseHeaders(self, headers, etag=None):
+ """
+ asserting common response headers with args
+ :param headers: a dict of response headers
+ :param etag: a string of md5(content).hexdigest() if not given,
+ this won't assert anything about etag. (e.g. DELETE obj)
+ """
+ self.assertTrue(headers['x-amz-id-2'] is not None)
+ self.assertTrue(headers['x-amz-request-id'] is not None)
+ self.assertTrue(headers['date'] is not None)
+ # TODO; requires consideration
+ # self.assertTrue(headers['server'] is not None)
+ if etag is not None:
+ self.assertTrue('etag' in headers) # sanity
+ self.assertEqual(etag, headers['etag'].strip('"'))
diff --git a/test/functional/s3api/s3_test_client.py b/test/functional/s3api/s3_test_client.py
new file mode 100644
index 000000000..993bbdc9a
--- /dev/null
+++ b/test/functional/s3api/s3_test_client.py
@@ -0,0 +1,139 @@
+# Copyright (c) 2015 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import test.functional as tf
+from boto.s3.connection import S3Connection, OrdinaryCallingFormat, \
+ BotoClientError, S3ResponseError
+
+RETRY_COUNT = 3
+
+
+def setUpModule():
+ tf.setup_package()
+
+
+def tearDownModule():
+ tf.teardown_package()
+
+
+class Connection(object):
+ """
+ Connection class used for S3 functional testing.
+ """
+ def __init__(self, aws_access_key='test:tester',
+ aws_secret_key='testing',
+ user_id='test:tester'):
+ """
+ Initialize method.
+
+ :param aws_access_key: a string of aws access key
+ :param aws_secret_key: a string of aws secret key
+ :param user_id: a string consists of TENANT and USER name used for
+ asserting Owner ID (not required S3Connection)
+
+ In default, Connection class will be initialized as tester user
+ behaves as:
+ user_test_tester = testing .admin
+
+ """
+ self.aws_access_key = aws_access_key
+ self.aws_secret_key = aws_secret_key
+ self.user_id = user_id
+ # NOTE: auth_host and auth_port can be different from storage location
+ self.host = tf.config['auth_host']
+ self.port = int(tf.config['auth_port'])
+ self.conn = \
+ S3Connection(aws_access_key, aws_secret_key, is_secure=False,
+ host=self.host, port=self.port,
+ calling_format=OrdinaryCallingFormat())
+ self.conn.auth_region_name = 'US'
+
+ def reset(self):
+ """
+ Reset all swift environment to keep clean. As a result by calling this
+ method, we can assume the backend swift keeps no containers and no
+ objects on this connection's account.
+ """
+ exceptions = []
+ for i in range(RETRY_COUNT):
+ try:
+ buckets = self.conn.get_all_buckets()
+ if not buckets:
+ break
+
+ for bucket in buckets:
+ try:
+ for upload in bucket.list_multipart_uploads():
+ upload.cancel_upload()
+
+ for obj in bucket.list():
+ bucket.delete_key(obj.name)
+
+ self.conn.delete_bucket(bucket.name)
+ except S3ResponseError as e:
+ # 404 means NoSuchBucket, NoSuchKey, or NoSuchUpload
+ if e.status != 404:
+ raise
+ except (BotoClientError, S3ResponseError) as e:
+ exceptions.append(e)
+ if exceptions:
+ # raise the first exception
+ raise exceptions.pop(0)
+
+ def make_request(self, method, bucket='', obj='', headers=None, body='',
+ query=None):
+ """
+ Wrapper method of S3Connection.make_request.
+
+ :param method: a string of HTTP request method
+ :param bucket: a string of bucket name
+ :param obj: a string of object name
+ :param headers: a dictionary of headers
+ :param body: a string of data binary sent to S3 as a request body
+ :param query: a string of HTTP query argument
+
+ :returns: a tuple of (int(status_code), headers dict, response body)
+ """
+ response = \
+ self.conn.make_request(method, bucket=bucket, key=obj,
+ headers=headers, data=body,
+ query_args=query, sender=None,
+ override_num_retries=RETRY_COUNT,
+ retry_handler=None)
+ return response.status, dict(response.getheaders()), response.read()
+
+ def generate_url_and_headers(self, method, bucket='', obj='',
+ expires_in=3600):
+ url = self.conn.generate_url(expires_in, method, bucket, obj)
+ if os.environ.get('S3_USE_SIGV4') == "True":
+ # V4 signatures are known-broken in boto, but we can work around it
+ if url.startswith('https://'):
+ url = 'http://' + url[8:]
+ return url, {'Host': '%(host)s:%(port)d:%(port)d' % {
+ 'host': self.host, 'port': self.port}}
+ return url, {}
+
+
+# TODO: make sure where this function is used
+def get_admin_connection():
+ """
+ Return tester connection behaves as:
+ user_test_admin = admin .admin
+ """
+ aws_access_key = tf.config['s3_access_key']
+ aws_secret_key = tf.config['s3_secret_key']
+ user_id = tf.config['s3_access_key']
+ return Connection(aws_access_key, aws_secret_key, user_id)
diff --git a/test/functional/s3api/test_acl.py b/test/functional/s3api/test_acl.py
new file mode 100644
index 000000000..33b01c628
--- /dev/null
+++ b/test/functional/s3api/test_acl.py
@@ -0,0 +1,156 @@
+# Copyright (c) 2015 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest2
+import os
+import test.functional as tf
+from swift.common.middleware.s3api.etree import fromstring
+from test.functional.s3api import S3ApiBase
+from test.functional.s3api.s3_test_client import Connection
+from test.functional.s3api.utils import get_error_code
+
+
+def setUpModule():
+ tf.setup_package()
+
+
+def tearDownModule():
+ tf.teardown_package()
+
+
+class TestS3Acl(S3ApiBase):
+ def setUp(self):
+ super(TestS3Acl, self).setUp()
+ self.bucket = 'bucket'
+ self.obj = 'object'
+ if 's3_access_key2' not in tf.config or \
+ 's3_secret_key2' not in tf.config:
+ raise tf.SkipTest(
+ 'TestS3Acl requires s3_access_key2 and s3_secret_key2 setting')
+ self.conn.make_request('PUT', self.bucket)
+ access_key2 = tf.config['s3_access_key2']
+ secret_key2 = tf.config['s3_secret_key2']
+ self.conn2 = Connection(access_key2, secret_key2, access_key2)
+
+ def test_acl(self):
+ self.conn.make_request('PUT', self.bucket, self.obj)
+ query = 'acl'
+
+ # PUT Bucket ACL
+ headers = {'x-amz-acl': 'public-read'}
+ status, headers, body = \
+ self.conn.make_request('PUT', self.bucket, headers=headers,
+ query=query)
+ self.assertEqual(status, 200)
+ self.assertCommonResponseHeaders(headers)
+ self.assertEqual(headers['content-length'], '0')
+
+ # GET Bucket ACL
+ status, headers, body = \
+ self.conn.make_request('GET', self.bucket, query=query)
+ self.assertEqual(status, 200)
+ self.assertCommonResponseHeaders(headers)
+ # TODO: Fix the response that last-modified must be in the response.
+ # self.assertTrue(headers['last-modified'] is not None)
+ self.assertEqual(headers['content-length'], str(len(body)))
+ self.assertTrue(headers['content-type'] is not None)
+ elem = fromstring(body, 'AccessControlPolicy')
+ owner = elem.find('Owner')
+ self.assertEqual(owner.find('ID').text, self.conn.user_id)
+ self.assertEqual(owner.find('DisplayName').text, self.conn.user_id)
+ acl = elem.find('AccessControlList')
+ self.assertTrue(acl.find('Grant') is not None)
+
+ # GET Object ACL
+ status, headers, body = \
+ self.conn.make_request('GET', self.bucket, self.obj, query=query)
+ self.assertEqual(status, 200)
+ self.assertCommonResponseHeaders(headers)
+ # TODO: Fix the response that last-modified must be in the response.
+ # self.assertTrue(headers['last-modified'] is not None)
+ self.assertEqual(headers['content-length'], str(len(body)))
+ self.assertTrue(headers['content-type'] is not None)
+ elem = fromstring(body, 'AccessControlPolicy')
+ owner = elem.find('Owner')
+ self.assertEqual(owner.find('ID').text, self.conn.user_id)
+ self.assertEqual(owner.find('DisplayName').text, self.conn.user_id)
+ acl = elem.find('AccessControlList')
+ self.assertTrue(acl.find('Grant') is not None)
+
+ def test_put_bucket_acl_error(self):
+ req_headers = {'x-amz-acl': 'public-read'}
+ aws_error_conn = Connection(aws_secret_key='invalid')
+ status, headers, body = \
+ aws_error_conn.make_request('PUT', self.bucket,
+ headers=req_headers, query='acl')
+ self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
+
+ status, headers, body = \
+ self.conn.make_request('PUT', 'nothing',
+ headers=req_headers, query='acl')
+ self.assertEqual(get_error_code(body), 'NoSuchBucket')
+
+ status, headers, body = \
+ self.conn2.make_request('PUT', self.bucket,
+ headers=req_headers, query='acl')
+ self.assertEqual(get_error_code(body), 'AccessDenied')
+
+ def test_get_bucket_acl_error(self):
+ aws_error_conn = Connection(aws_secret_key='invalid')
+ status, headers, body = \
+ aws_error_conn.make_request('GET', self.bucket, query='acl')
+ self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
+
+ status, headers, body = \
+ self.conn.make_request('GET', 'nothing', query='acl')
+ self.assertEqual(get_error_code(body), 'NoSuchBucket')
+
+ status, headers, body = \
+ self.conn2.make_request('GET', self.bucket, query='acl')
+ self.assertEqual(get_error_code(body), 'AccessDenied')
+
+ def test_get_object_acl_error(self):
+ self.conn.make_request('PUT', self.bucket, self.obj)
+
+ aws_error_conn = Connection(aws_secret_key='invalid')
+ status, headers, body = \
+ aws_error_conn.make_request('GET', self.bucket, self.obj,
+ query='acl')
+ self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
+
+ status, headers, body = \
+ self.conn.make_request('GET', self.bucket, 'nothing', query='acl')
+ self.assertEqual(get_error_code(body), 'NoSuchKey')
+
+ status, headers, body = \
+ self.conn2.make_request('GET', self.bucket, self.obj, query='acl')
+ self.assertEqual(get_error_code(body), 'AccessDenied')
+
+
+class TestS3AclSigV4(TestS3Acl):
+ @classmethod
+ def setUpClass(cls):
+ os.environ['S3_USE_SIGV4'] = "True"
+
+ @classmethod
+ def tearDownClass(cls):
+ del os.environ['S3_USE_SIGV4']
+
+ def setUp(self):
+ super(TestS3AclSigV4, self).setUp()
+
+
+if __name__ == '__main__':
+ unittest2.main()
diff --git a/test/functional/s3api/test_bucket.py b/test/functional/s3api/test_bucket.py
new file mode 100644
index 000000000..ce508b5c3
--- /dev/null
+++ b/test/functional/s3api/test_bucket.py
@@ -0,0 +1,487 @@
+# Copyright (c) 2015 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest2
+import os
+
+import test.functional as tf
+from swift.common.middleware.s3api.etree import fromstring, tostring, Element, \
+ SubElement
+from test.functional.s3api import S3ApiBase
+from test.functional.s3api.s3_test_client import Connection
+from test.functional.s3api.utils import get_error_code
+
+
+def setUpModule():
+ tf.setup_package()
+
+
+def tearDownModule():
+ tf.teardown_package()
+
+
+class TestS3ApiBucket(S3ApiBase):
+ def setUp(self):
+ super(TestS3ApiBucket, self).setUp()
+
+ def _gen_location_xml(self, location):
+ elem = Element('CreateBucketConfiguration')
+ SubElement(elem, 'LocationConstraint').text = location
+ return tostring(elem)
+
+ def test_bucket(self):
+ bucket = 'bucket'
+ max_bucket_listing = tf.cluster_info['s3api'].get(
+ 'max_bucket_listing', 1000)
+
+ # PUT Bucket
+ status, headers, body = self.conn.make_request('PUT', bucket)
+ self.assertEqual(status, 200)
+
+ self.assertCommonResponseHeaders(headers)
+ self.assertIn(headers['location'], (
+ '/' + bucket, # swob won't touch it...
+ # but webob (which we get because of auth_token) *does*
+ 'http://%s%s/%s' % (
+ self.conn.host,
+ '' if self.conn.port == 80 else ':%d' % self.conn.port,
+ bucket),
+ # This is all based on the Host header the client provided,
+ # and boto will double-up ports for sig v4. See
+ # - https://github.com/boto/boto/issues/2623
+ # - https://github.com/boto/boto/issues/3716
+ # with proposed fixes at
+ # - https://github.com/boto/boto/pull/3513
+ # - https://github.com/boto/boto/pull/3676
+ 'http://%s%s:%d/%s' % (
+ self.conn.host,
+ '' if self.conn.port == 80 else ':%d' % self.conn.port,
+ self.conn.port,
+ bucket),
+ ))
+ self.assertEqual(headers['content-length'], '0')
+
+ # GET Bucket(Without Object)
+ status, headers, body = self.conn.make_request('GET', bucket)
+ self.assertEqual(status, 200)
+
+ self.assertCommonResponseHeaders(headers)
+ self.assertTrue(headers['content-type'] is not None)
+ self.assertEqual(headers['content-length'], str(len(body)))
+ # TODO; requires consideration
+ # self.assertEqual(headers['transfer-encoding'], 'chunked')
+
+ elem = fromstring(body, 'ListBucketResult')
+ self.assertEqual(elem.find('Name').text, bucket)
+ self.assertIsNone(elem.find('Prefix').text)
+ self.assertIsNone(elem.find('Marker').text)
+ self.assertEqual(
+ elem.find('MaxKeys').text, str(max_bucket_listing))
+ self.assertEqual(elem.find('IsTruncated').text, 'false')
+ objects = elem.findall('./Contents')
+ self.assertEqual(list(objects), [])
+
+ # GET Bucket(With Object)
+ req_objects = ('object', 'object2')
+ for obj in req_objects:
+ self.conn.make_request('PUT', bucket, obj)
+ status, headers, body = self.conn.make_request('GET', bucket)
+ self.assertEqual(status, 200)
+
+ elem = fromstring(body, 'ListBucketResult')
+ self.assertEqual(elem.find('Name').text, bucket)
+ self.assertIsNone(elem.find('Prefix').text)
+ self.assertIsNone(elem.find('Marker').text)
+ self.assertEqual(elem.find('MaxKeys').text,
+ str(max_bucket_listing))
+ self.assertEqual(elem.find('IsTruncated').text, 'false')
+ resp_objects = elem.findall('./Contents')
+ self.assertEqual(len(list(resp_objects)), 2)
+ for o in resp_objects:
+ self.assertTrue(o.find('Key').text in req_objects)
+ self.assertTrue(o.find('LastModified').text is not None)
+ self.assertRegexpMatches(
+ o.find('LastModified').text,
+ r'^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$')
+ self.assertTrue(o.find('ETag').text is not None)
+ self.assertTrue(o.find('Size').text is not None)
+ self.assertTrue(o.find('StorageClass').text is not None)
+ self.assertTrue(o.find('Owner/ID').text, self.conn.user_id)
+ self.assertTrue(o.find('Owner/DisplayName').text,
+ self.conn.user_id)
+
+ # HEAD Bucket
+ status, headers, body = self.conn.make_request('HEAD', bucket)
+ self.assertEqual(status, 200)
+
+ self.assertCommonResponseHeaders(headers)
+ self.assertTrue(headers['content-type'] is not None)
+ self.assertEqual(headers['content-length'], str(len(body)))
+ # TODO; requires consideration
+ # self.assertEqual(headers['transfer-encoding'], 'chunked')
+
+ # DELETE Bucket
+ for obj in req_objects:
+ self.conn.make_request('DELETE', bucket, obj)
+ status, headers, body = self.conn.make_request('DELETE', bucket)
+ self.assertEqual(status, 204)
+
+ self.assertCommonResponseHeaders(headers)
+
+ def test_put_bucket_error(self):
+ status, headers, body = \
+ self.conn.make_request('PUT', 'bucket+invalid')
+ self.assertEqual(get_error_code(body), 'InvalidBucketName')
+
+ auth_error_conn = Connection(aws_secret_key='invalid')
+ status, headers, body = auth_error_conn.make_request('PUT', 'bucket')
+ self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
+
+ self.conn.make_request('PUT', 'bucket')
+ status, headers, body = self.conn.make_request('PUT', 'bucket')
+ self.assertEqual(get_error_code(body), 'BucketAlreadyExists')
+
+ def test_put_bucket_with_LocationConstraint(self):
+ bucket = 'bucket'
+ xml = self._gen_location_xml('US')
+ status, headers, body = \
+ self.conn.make_request('PUT', bucket, body=xml)
+ self.assertEqual(status, 200)
+
+ def test_get_bucket_error(self):
+ self.conn.make_request('PUT', 'bucket')
+
+ status, headers, body = \
+ self.conn.make_request('GET', 'bucket+invalid')
+ self.assertEqual(get_error_code(body), 'InvalidBucketName')
+
+ auth_error_conn = Connection(aws_secret_key='invalid')
+ status, headers, body = auth_error_conn.make_request('GET', 'bucket')
+ self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
+
+ status, headers, body = self.conn.make_request('GET', 'nothing')
+ self.assertEqual(get_error_code(body), 'NoSuchBucket')
+
+ def _prepare_test_get_bucket(self, bucket, objects):
+ self.conn.make_request('PUT', bucket)
+ for obj in objects:
+ self.conn.make_request('PUT', bucket, obj)
+
+ def test_get_bucket_with_delimiter(self):
+ bucket = 'bucket'
+ put_objects = ('object', 'object2', 'subdir/object', 'subdir2/object',
+ 'dir/subdir/object')
+ self._prepare_test_get_bucket(bucket, put_objects)
+
+ delimiter = '/'
+ query = 'delimiter=%s' % delimiter
+ expect_objects = ('object', 'object2')
+ expect_prefixes = ('dir/', 'subdir/', 'subdir2/')
+ status, headers, body = \
+ self.conn.make_request('GET', bucket, query=query)
+ self.assertEqual(status, 200)
+ elem = fromstring(body, 'ListBucketResult')
+ self.assertEqual(elem.find('Delimiter').text, delimiter)
+ resp_objects = elem.findall('./Contents')
+ self.assertEqual(len(list(resp_objects)), len(expect_objects))
+ for i, o in enumerate(resp_objects):
+ self.assertEqual(o.find('Key').text, expect_objects[i])
+ self.assertTrue(o.find('LastModified').text is not None)
+ self.assertRegexpMatches(
+ o.find('LastModified').text,
+ r'^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$')
+ self.assertTrue(o.find('ETag').text is not None)
+ self.assertTrue(o.find('Size').text is not None)
+ self.assertEqual(o.find('StorageClass').text, 'STANDARD')
+ self.assertTrue(o.find('Owner/ID').text, self.conn.user_id)
+ self.assertTrue(o.find('Owner/DisplayName').text,
+ self.conn.user_id)
+ resp_prefixes = elem.findall('CommonPrefixes')
+ self.assertEqual(len(resp_prefixes), len(expect_prefixes))
+ for i, p in enumerate(resp_prefixes):
+ self.assertEqual(p.find('./Prefix').text, expect_prefixes[i])
+
+ def test_get_bucket_with_encoding_type(self):
+ bucket = 'bucket'
+ put_objects = ('object', 'object2')
+ self._prepare_test_get_bucket(bucket, put_objects)
+
+ encoding_type = 'url'
+ query = 'encoding-type=%s' % encoding_type
+ status, headers, body = \
+ self.conn.make_request('GET', bucket, query=query)
+ self.assertEqual(status, 200)
+ elem = fromstring(body, 'ListBucketResult')
+ self.assertEqual(elem.find('EncodingType').text, encoding_type)
+
+ def test_get_bucket_with_marker(self):
+ bucket = 'bucket'
+ put_objects = ('object', 'object2', 'subdir/object', 'subdir2/object',
+ 'dir/subdir/object')
+ self._prepare_test_get_bucket(bucket, put_objects)
+
+ marker = 'object'
+ query = 'marker=%s' % marker
+ expect_objects = ('object2', 'subdir/object', 'subdir2/object')
+ status, headers, body = \
+ self.conn.make_request('GET', bucket, query=query)
+ self.assertEqual(status, 200)
+ elem = fromstring(body, 'ListBucketResult')
+ self.assertEqual(elem.find('Marker').text, marker)
+ resp_objects = elem.findall('./Contents')
+ self.assertEqual(len(list(resp_objects)), len(expect_objects))
+ for i, o in enumerate(resp_objects):
+ self.assertEqual(o.find('Key').text, expect_objects[i])
+ self.assertTrue(o.find('LastModified').text is not None)
+ self.assertRegexpMatches(
+ o.find('LastModified').text,
+ r'^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$')
+ self.assertTrue(o.find('ETag').text is not None)
+ self.assertTrue(o.find('Size').text is not None)
+ self.assertEqual(o.find('StorageClass').text, 'STANDARD')
+ self.assertTrue(o.find('Owner/ID').text, self.conn.user_id)
+ self.assertTrue(o.find('Owner/DisplayName').text,
+ self.conn.user_id)
+
+ def test_get_bucket_with_max_keys(self):
+ bucket = 'bucket'
+ put_objects = ('object', 'object2', 'subdir/object', 'subdir2/object',
+ 'dir/subdir/object')
+ self._prepare_test_get_bucket(bucket, put_objects)
+
+ max_keys = '2'
+ query = 'max-keys=%s' % max_keys
+ expect_objects = ('dir/subdir/object', 'object')
+ status, headers, body = \
+ self.conn.make_request('GET', bucket, query=query)
+ self.assertEqual(status, 200)
+ elem = fromstring(body, 'ListBucketResult')
+ self.assertEqual(elem.find('MaxKeys').text, max_keys)
+ resp_objects = elem.findall('./Contents')
+ self.assertEqual(len(list(resp_objects)), len(expect_objects))
+ for i, o in enumerate(resp_objects):
+ self.assertEqual(o.find('Key').text, expect_objects[i])
+ self.assertTrue(o.find('LastModified').text is not None)
+ self.assertRegexpMatches(
+ o.find('LastModified').text,
+ r'^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$')
+ self.assertTrue(o.find('ETag').text is not None)
+ self.assertTrue(o.find('Size').text is not None)
+ self.assertEqual(o.find('StorageClass').text, 'STANDARD')
+ self.assertTrue(o.find('Owner/ID').text, self.conn.user_id)
+ self.assertTrue(o.find('Owner/DisplayName').text,
+ self.conn.user_id)
+
+ def test_get_bucket_with_prefix(self):
+ bucket = 'bucket'
+ req_objects = ('object', 'object2', 'subdir/object', 'subdir2/object',
+ 'dir/subdir/object')
+ self._prepare_test_get_bucket(bucket, req_objects)
+
+ prefix = 'object'
+ query = 'prefix=%s' % prefix
+ expect_objects = ('object', 'object2')
+ status, headers, body = \
+ self.conn.make_request('GET', bucket, query=query)
+ self.assertEqual(status, 200)
+ elem = fromstring(body, 'ListBucketResult')
+ self.assertEqual(elem.find('Prefix').text, prefix)
+ resp_objects = elem.findall('./Contents')
+ self.assertEqual(len(list(resp_objects)), len(expect_objects))
+ for i, o in enumerate(resp_objects):
+ self.assertEqual(o.find('Key').text, expect_objects[i])
+ self.assertTrue(o.find('LastModified').text is not None)
+ self.assertRegexpMatches(
+ o.find('LastModified').text,
+ r'^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$')
+ self.assertTrue(o.find('ETag').text is not None)
+ self.assertTrue(o.find('Size').text is not None)
+ self.assertEqual(o.find('StorageClass').text, 'STANDARD')
+ self.assertTrue(o.find('Owner/ID').text, self.conn.user_id)
+ self.assertTrue(o.find('Owner/DisplayName').text,
+ self.conn.user_id)
+
+ def test_get_bucket_v2_with_start_after(self):
+ bucket = 'bucket'
+ put_objects = ('object', 'object2', 'subdir/object', 'subdir2/object',
+ 'dir/subdir/object')
+ self._prepare_test_get_bucket(bucket, put_objects)
+
+ marker = 'object'
+ query = 'list-type=2&start-after=%s' % marker
+ expect_objects = ('object2', 'subdir/object', 'subdir2/object')
+ status, headers, body = \
+ self.conn.make_request('GET', bucket, query=query)
+ self.assertEqual(status, 200)
+ elem = fromstring(body, 'ListBucketResult')
+ self.assertEqual(elem.find('StartAfter').text, marker)
+ resp_objects = elem.findall('./Contents')
+ self.assertEqual(len(list(resp_objects)), len(expect_objects))
+ for i, o in enumerate(resp_objects):
+ self.assertEqual(o.find('Key').text, expect_objects[i])
+ self.assertTrue(o.find('LastModified').text is not None)
+ self.assertRegexpMatches(
+ o.find('LastModified').text,
+ r'^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$')
+ self.assertTrue(o.find('ETag').text is not None)
+ self.assertTrue(o.find('Size').text is not None)
+ self.assertEqual(o.find('StorageClass').text, 'STANDARD')
+ self.assertIsNone(o.find('Owner/ID'))
+ self.assertIsNone(o.find('Owner/DisplayName'))
+
+ def test_get_bucket_v2_with_fetch_owner(self):
+ bucket = 'bucket'
+ put_objects = ('object', 'object2', 'subdir/object', 'subdir2/object',
+ 'dir/subdir/object')
+ self._prepare_test_get_bucket(bucket, put_objects)
+
+ query = 'list-type=2&fetch-owner=true'
+ expect_objects = ('dir/subdir/object', 'object', 'object2',
+ 'subdir/object', 'subdir2/object')
+ status, headers, body = \
+ self.conn.make_request('GET', bucket, query=query)
+ self.assertEqual(status, 200)
+ elem = fromstring(body, 'ListBucketResult')
+ self.assertEqual(elem.find('KeyCount').text, '5')
+ resp_objects = elem.findall('./Contents')
+ self.assertEqual(len(list(resp_objects)), len(expect_objects))
+ for i, o in enumerate(resp_objects):
+ self.assertEqual(o.find('Key').text, expect_objects[i])
+ self.assertTrue(o.find('LastModified').text is not None)
+ self.assertRegexpMatches(
+ o.find('LastModified').text,
+ r'^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$')
+ self.assertTrue(o.find('ETag').text is not None)
+ self.assertTrue(o.find('Size').text is not None)
+ self.assertEqual(o.find('StorageClass').text, 'STANDARD')
+ self.assertTrue(o.find('Owner/ID').text, self.conn.user_id)
+ self.assertTrue(o.find('Owner/DisplayName').text,
+ self.conn.user_id)
+
+ def test_get_bucket_v2_with_continuation_token(self):
+ bucket = 'bucket'
+ put_objects = ('object', 'object2', 'subdir/object', 'subdir2/object',
+ 'dir/subdir/object')
+ self._prepare_test_get_bucket(bucket, put_objects)
+
+ query = 'list-type=2&max-keys=3'
+ expect_objects = ('dir/subdir/object', 'object', 'object2')
+ status, headers, body = \
+ self.conn.make_request('GET', bucket, query=query)
+ self.assertEqual(status, 200)
+ elem = fromstring(body, 'ListBucketResult')
+ self.assertEqual(elem.find('MaxKeys').text, '3')
+ self.assertEqual(elem.find('KeyCount').text, '3')
+ self.assertEqual(elem.find('IsTruncated').text, 'true')
+ next_cont_token_elem = elem.find('NextContinuationToken')
+ self.assertIsNotNone(next_cont_token_elem)
+ resp_objects = elem.findall('./Contents')
+ self.assertEqual(len(list(resp_objects)), len(expect_objects))
+ for i, o in enumerate(resp_objects):
+ self.assertEqual(o.find('Key').text, expect_objects[i])
+ self.assertTrue(o.find('LastModified').text is not None)
+ self.assertRegexpMatches(
+ o.find('LastModified').text,
+ r'^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$')
+ self.assertTrue(o.find('ETag').text is not None)
+ self.assertTrue(o.find('Size').text is not None)
+ self.assertEqual(o.find('StorageClass').text, 'STANDARD')
+ self.assertIsNone(o.find('Owner/ID'))
+ self.assertIsNone(o.find('Owner/DisplayName'))
+
+ query = 'list-type=2&max-keys=3&continuation-token=%s' % \
+ next_cont_token_elem.text
+ expect_objects = ('subdir/object', 'subdir2/object')
+ status, headers, body = \
+ self.conn.make_request('GET', bucket, query=query)
+ self.assertEqual(status, 200)
+ elem = fromstring(body, 'ListBucketResult')
+ self.assertEqual(elem.find('MaxKeys').text, '3')
+ self.assertEqual(elem.find('KeyCount').text, '2')
+ self.assertEqual(elem.find('IsTruncated').text, 'false')
+ self.assertIsNone(elem.find('NextContinuationToken'))
+ cont_token_elem = elem.find('ContinuationToken')
+ self.assertEqual(cont_token_elem.text, next_cont_token_elem.text)
+ resp_objects = elem.findall('./Contents')
+ self.assertEqual(len(list(resp_objects)), len(expect_objects))
+ for i, o in enumerate(resp_objects):
+ self.assertEqual(o.find('Key').text, expect_objects[i])
+ self.assertTrue(o.find('LastModified').text is not None)
+ self.assertRegexpMatches(
+ o.find('LastModified').text,
+ r'^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$')
+ self.assertTrue(o.find('ETag').text is not None)
+ self.assertTrue(o.find('Size').text is not None)
+ self.assertEqual(o.find('StorageClass').text, 'STANDARD')
+ self.assertIsNone(o.find('Owner/ID'))
+ self.assertIsNone(o.find('Owner/DisplayName'))
+
+ def test_head_bucket_error(self):
+ self.conn.make_request('PUT', 'bucket')
+
+ status, headers, body = \
+ self.conn.make_request('HEAD', 'bucket+invalid')
+ self.assertEqual(status, 400)
+ self.assertEqual(body, '') # sanity
+
+ auth_error_conn = Connection(aws_secret_key='invalid')
+ status, headers, body = \
+ auth_error_conn.make_request('HEAD', 'bucket')
+ self.assertEqual(status, 403)
+ self.assertEqual(body, '') # sanity
+
+ status, headers, body = self.conn.make_request('HEAD', 'nothing')
+ self.assertEqual(status, 404)
+ self.assertEqual(body, '') # sanity
+
+ def test_delete_bucket_error(self):
+ status, headers, body = \
+ self.conn.make_request('DELETE', 'bucket+invalid')
+ self.assertEqual(get_error_code(body), 'InvalidBucketName')
+
+ auth_error_conn = Connection(aws_secret_key='invalid')
+ status, headers, body = \
+ auth_error_conn.make_request('DELETE', 'bucket')
+ self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
+
+ status, headers, body = self.conn.make_request('DELETE', 'bucket')
+ self.assertEqual(get_error_code(body), 'NoSuchBucket')
+
+ def test_bucket_invalid_method_error(self):
+ # non existed verb in the controller
+ status, headers, body = \
+ self.conn.make_request('GETPUT', 'bucket')
+ self.assertEqual(get_error_code(body), 'MethodNotAllowed')
+ # the method exists in the controller but deny as MethodNotAllowed
+ status, headers, body = \
+ self.conn.make_request('_delete_segments_bucket', 'bucket')
+ self.assertEqual(get_error_code(body), 'MethodNotAllowed')
+
+
+class TestS3ApiBucketSigV4(TestS3ApiBucket):
+ @classmethod
+ def setUpClass(cls):
+ os.environ['S3_USE_SIGV4'] = "True"
+
+ @classmethod
+ def tearDownClass(cls):
+ del os.environ['S3_USE_SIGV4']
+
+ def setUp(self):
+ super(TestS3ApiBucket, self).setUp()
+
+
+if __name__ == '__main__':
+ unittest2.main()
diff --git a/test/functional/s3api/test_multi_delete.py b/test/functional/s3api/test_multi_delete.py
new file mode 100644
index 000000000..bd90166ce
--- /dev/null
+++ b/test/functional/s3api/test_multi_delete.py
@@ -0,0 +1,248 @@
+# Copyright (c) 2015 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest2
+import os
+import test.functional as tf
+from swift.common.middleware.s3api.etree import fromstring, tostring, Element, \
+ SubElement
+from swift.common.middleware.s3api.controllers.multi_delete import \
+ MAX_MULTI_DELETE_BODY_SIZE
+
+from test.functional.s3api import S3ApiBase
+from test.functional.s3api.s3_test_client import Connection
+from test.functional.s3api.utils import get_error_code, calculate_md5
+
+
+def setUpModule():
+ tf.setup_package()
+
+
+def tearDownModule():
+ tf.teardown_package()
+
+
+class TestS3ApiMultiDelete(S3ApiBase):
+ def setUp(self):
+ super(TestS3ApiMultiDelete, self).setUp()
+
+ def _prepare_test_delete_multi_objects(self, bucket, objects):
+ self.conn.make_request('PUT', bucket)
+ for obj in objects:
+ self.conn.make_request('PUT', bucket, obj)
+
+ def _gen_multi_delete_xml(self, objects, quiet=None):
+ elem = Element('Delete')
+ if quiet:
+ SubElement(elem, 'Quiet').text = quiet
+ for key in objects:
+ obj = SubElement(elem, 'Object')
+ SubElement(obj, 'Key').text = key
+
+ return tostring(elem, use_s3ns=False)
+
+ def _gen_invalid_multi_delete_xml(self, hasObjectTag=False):
+ elem = Element('Delete')
+ if hasObjectTag:
+ obj = SubElement(elem, 'Object')
+ SubElement(obj, 'Key').text = ''
+
+ return tostring(elem, use_s3ns=False)
+
+ def test_delete_multi_objects(self):
+ bucket = 'bucket'
+ put_objects = ['obj%s' % var for var in xrange(4)]
+ self._prepare_test_delete_multi_objects(bucket, put_objects)
+ query = 'delete'
+
+ # Delete an object via MultiDelete API
+ req_objects = ['obj0']
+ xml = self._gen_multi_delete_xml(req_objects)
+ content_md5 = calculate_md5(xml)
+ status, headers, body = \
+ self.conn.make_request('POST', bucket, body=xml,
+ headers={'Content-MD5': content_md5},
+ query=query)
+ self.assertEqual(status, 200)
+ self.assertCommonResponseHeaders(headers)
+ self.assertTrue(headers['content-type'] is not None)
+ self.assertEqual(headers['content-length'], str(len(body)))
+ elem = fromstring(body)
+ resp_objects = elem.findall('Deleted')
+ self.assertEqual(len(resp_objects), len(req_objects))
+ for o in resp_objects:
+ self.assertTrue(o.find('Key').text in req_objects)
+
+ # Delete 2 objects via MultiDelete API
+ req_objects = ['obj1', 'obj2']
+ xml = self._gen_multi_delete_xml(req_objects)
+ content_md5 = calculate_md5(xml)
+ status, headers, body = \
+ self.conn.make_request('POST', bucket, body=xml,
+ headers={'Content-MD5': content_md5},
+ query=query)
+ self.assertEqual(status, 200)
+ elem = fromstring(body, 'DeleteResult')
+ resp_objects = elem.findall('Deleted')
+ self.assertEqual(len(resp_objects), len(req_objects))
+ for o in resp_objects:
+ self.assertTrue(o.find('Key').text in req_objects)
+
+ # Delete 2 objects via MultiDelete API but one (obj4) doesn't exist.
+ req_objects = ['obj3', 'obj4']
+ xml = self._gen_multi_delete_xml(req_objects)
+ content_md5 = calculate_md5(xml)
+ status, headers, body = \
+ self.conn.make_request('POST', bucket, body=xml,
+ headers={'Content-MD5': content_md5},
+ query=query)
+ self.assertEqual(status, 200)
+ elem = fromstring(body, 'DeleteResult')
+ resp_objects = elem.findall('Deleted')
+ # S3 assumes a NoSuchKey object as deleted.
+ self.assertEqual(len(resp_objects), len(req_objects))
+ for o in resp_objects:
+ self.assertTrue(o.find('Key').text in req_objects)
+
+ # Delete 2 objects via MultiDelete API but no objects exist
+ req_objects = ['obj4', 'obj5']
+ xml = self._gen_multi_delete_xml(req_objects)
+ content_md5 = calculate_md5(xml)
+ status, headers, body = \
+ self.conn.make_request('POST', bucket, body=xml,
+ headers={'Content-MD5': content_md5},
+ query=query)
+ self.assertEqual(status, 200)
+ elem = fromstring(body, 'DeleteResult')
+ resp_objects = elem.findall('Deleted')
+ self.assertEqual(len(resp_objects), len(req_objects))
+ for o in resp_objects:
+ self.assertTrue(o.find('Key').text in req_objects)
+
+ def test_delete_multi_objects_error(self):
+ bucket = 'bucket'
+ put_objects = ['obj']
+ self._prepare_test_delete_multi_objects(bucket, put_objects)
+ xml = self._gen_multi_delete_xml(put_objects)
+ content_md5 = calculate_md5(xml)
+ query = 'delete'
+
+ auth_error_conn = Connection(aws_secret_key='invalid')
+ status, headers, body = \
+ auth_error_conn.make_request('POST', bucket, body=xml,
+ headers={
+ 'Content-MD5': content_md5
+ },
+ query=query)
+ self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
+
+ status, headers, body = \
+ self.conn.make_request('POST', 'nothing', body=xml,
+ headers={'Content-MD5': content_md5},
+ query=query)
+ self.assertEqual(get_error_code(body), 'NoSuchBucket')
+
+ # without Object tag
+ xml = self._gen_invalid_multi_delete_xml()
+ content_md5 = calculate_md5(xml)
+ status, headers, body = \
+ self.conn.make_request('POST', bucket, body=xml,
+ headers={'Content-MD5': content_md5},
+ query=query)
+ self.assertEqual(get_error_code(body), 'MalformedXML')
+
+ # without value of Key tag
+ xml = self._gen_invalid_multi_delete_xml(hasObjectTag=True)
+ content_md5 = calculate_md5(xml)
+ status, headers, body = \
+ self.conn.make_request('POST', bucket, body=xml,
+ headers={'Content-MD5': content_md5},
+ query=query)
+ self.assertEqual(get_error_code(body), 'UserKeyMustBeSpecified')
+
+ # specified number of objects are over max_multi_delete_objects
+ # (Default 1000), but xml size is smaller than 61365 bytes.
+ req_objects = ['obj%s' for var in xrange(1001)]
+ xml = self._gen_multi_delete_xml(req_objects)
+ self.assertTrue(len(xml.encode('utf-8')) <= MAX_MULTI_DELETE_BODY_SIZE)
+ content_md5 = calculate_md5(xml)
+ status, headers, body = \
+ self.conn.make_request('POST', bucket, body=xml,
+ headers={'Content-MD5': content_md5},
+ query=query)
+ self.assertEqual(get_error_code(body), 'MalformedXML')
+
+ # specified xml size is over 61365 bytes, but number of objects are
+ # smaller than max_multi_delete_objects.
+ obj = 'a' * 1024
+ req_objects = [obj + str(var) for var in xrange(999)]
+ xml = self._gen_multi_delete_xml(req_objects)
+ self.assertTrue(len(xml.encode('utf-8')) > MAX_MULTI_DELETE_BODY_SIZE)
+ content_md5 = calculate_md5(xml)
+ status, headers, body = \
+ self.conn.make_request('POST', bucket, body=xml,
+ headers={'Content-MD5': content_md5},
+ query=query)
+ self.assertEqual(get_error_code(body), 'MalformedXML')
+
+ def test_delete_multi_objects_with_quiet(self):
+ bucket = 'bucket'
+ put_objects = ['obj']
+ query = 'delete'
+
+ # with Quiet true
+ quiet = 'true'
+ self._prepare_test_delete_multi_objects(bucket, put_objects)
+ xml = self._gen_multi_delete_xml(put_objects, quiet)
+ content_md5 = calculate_md5(xml)
+ status, headers, body = \
+ self.conn.make_request('POST', bucket, body=xml,
+ headers={'Content-MD5': content_md5},
+ query=query)
+ self.assertEqual(status, 200)
+ elem = fromstring(body, 'DeleteResult')
+ resp_objects = elem.findall('Deleted')
+ self.assertEqual(len(resp_objects), 0)
+
+ # with Quiet false
+ quiet = 'false'
+ self._prepare_test_delete_multi_objects(bucket, put_objects)
+ xml = self._gen_multi_delete_xml(put_objects, quiet)
+ content_md5 = calculate_md5(xml)
+ status, headers, body = \
+ self.conn.make_request('POST', bucket, body=xml,
+ headers={'Content-MD5': content_md5},
+ query=query)
+ self.assertEqual(status, 200)
+ elem = fromstring(body, 'DeleteResult')
+ resp_objects = elem.findall('Deleted')
+ self.assertEqual(len(resp_objects), 1)
+
+
+class TestS3ApiMultiDeleteSigV4(TestS3ApiMultiDelete):
+ @classmethod
+ def setUpClass(cls):
+ os.environ['S3_USE_SIGV4'] = "True"
+
+ @classmethod
+ def tearDownClass(cls):
+ del os.environ['S3_USE_SIGV4']
+
+ def setUp(self):
+ super(TestS3ApiMultiDeleteSigV4, self).setUp()
+
+
+if __name__ == '__main__':
+ unittest2.main()
diff --git a/test/functional/s3api/test_multi_upload.py b/test/functional/s3api/test_multi_upload.py
new file mode 100644
index 000000000..33b6cadfe
--- /dev/null
+++ b/test/functional/s3api/test_multi_upload.py
@@ -0,0 +1,849 @@
+# Copyright (c) 2015 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import base64
+import unittest2
+import os
+import boto
+
+# For an issue with venv and distutils, disable pylint message here
+# pylint: disable-msg=E0611,F0401
+from distutils.version import StrictVersion
+
+from hashlib import md5
+from itertools import izip, izip_longest
+
+import test.functional as tf
+from swift.common.middleware.s3api.etree import fromstring, tostring, Element, \
+ SubElement
+from swift.common.middleware.s3api.utils import mktime
+
+from test.functional.s3api import S3ApiBase
+from test.functional.s3api.s3_test_client import Connection
+from test.functional.s3api.utils import get_error_code, get_error_msg
+
+
+def setUpModule():
+ tf.setup_package()
+
+
+def tearDownModule():
+ tf.teardown_package()
+
+
+class TestS3ApiMultiUpload(S3ApiBase):
+ def setUp(self):
+ super(TestS3ApiMultiUpload, self).setUp()
+ if not tf.cluster_info['s3api'].get('allow_multipart_uploads', False):
+ raise tf.SkipTest('multipart upload is not enebled')
+
+ self.min_segment_size = int(tf.cluster_info['s3api'].get(
+ 'min_segment_size', 5242880))
+
+ def _gen_comp_xml(self, etags):
+ elem = Element('CompleteMultipartUpload')
+ for i, etag in enumerate(etags):
+ elem_part = SubElement(elem, 'Part')
+ SubElement(elem_part, 'PartNumber').text = str(i + 1)
+ SubElement(elem_part, 'ETag').text = etag
+ return tostring(elem)
+
+ def _initiate_multi_uploads_result_generator(self, bucket, keys,
+ headers=None, trials=1):
+ if headers is None:
+ headers = [None] * len(keys)
+ self.conn.make_request('PUT', bucket)
+ query = 'uploads'
+ for key, key_headers in izip_longest(keys, headers):
+ for i in xrange(trials):
+ status, resp_headers, body = \
+ self.conn.make_request('POST', bucket, key,
+ headers=key_headers, query=query)
+ yield status, resp_headers, body
+
+ def _upload_part(self, bucket, key, upload_id, content=None, part_num=1):
+ query = 'partNumber=%s&uploadId=%s' % (part_num, upload_id)
+ content = content if content else 'a' * self.min_segment_size
+ status, headers, body = \
+ self.conn.make_request('PUT', bucket, key, body=content,
+ query=query)
+ return status, headers, body
+
+ def _upload_part_copy(self, src_bucket, src_obj, dst_bucket, dst_key,
+ upload_id, part_num=1, src_range=None):
+
+ src_path = '%s/%s' % (src_bucket, src_obj)
+ query = 'partNumber=%s&uploadId=%s' % (part_num, upload_id)
+ req_headers = {'X-Amz-Copy-Source': src_path}
+ if src_range:
+ req_headers['X-Amz-Copy-Source-Range'] = src_range
+ status, headers, body = \
+ self.conn.make_request('PUT', dst_bucket, dst_key,
+ headers=req_headers,
+ query=query)
+ elem = fromstring(body, 'CopyPartResult')
+ etag = elem.find('ETag').text.strip('"')
+ return status, headers, body, etag
+
+ def _complete_multi_upload(self, bucket, key, upload_id, xml):
+ query = 'uploadId=%s' % upload_id
+ status, headers, body = \
+ self.conn.make_request('POST', bucket, key, body=xml,
+ query=query)
+ return status, headers, body
+
+ def test_object_multi_upload(self):
+ bucket = 'bucket'
+ keys = ['obj1', 'obj2', 'obj3']
+ headers = [None,
+ {'Content-MD5': base64.b64encode('a' * 16).strip()},
+ {'Etag': 'nonsense'}]
+ uploads = []
+
+ results_generator = self._initiate_multi_uploads_result_generator(
+ bucket, keys, headers=headers)
+
+ # Initiate Multipart Upload
+ for expected_key, (status, headers, body) in \
+ izip(keys, results_generator):
+ self.assertEqual(status, 200)
+ self.assertCommonResponseHeaders(headers)
+ self.assertTrue('content-type' in headers)
+ self.assertEqual(headers['content-type'], 'application/xml')
+ self.assertTrue('content-length' in headers)
+ self.assertEqual(headers['content-length'], str(len(body)))
+ elem = fromstring(body, 'InitiateMultipartUploadResult')
+ self.assertEqual(elem.find('Bucket').text, bucket)
+ key = elem.find('Key').text
+ self.assertEqual(expected_key, key)
+ upload_id = elem.find('UploadId').text
+ self.assertTrue(upload_id is not None)
+ self.assertTrue((key, upload_id) not in uploads)
+ uploads.append((key, upload_id))
+
+ self.assertEqual(len(uploads), len(keys)) # sanity
+
+ # List Multipart Uploads
+ query = 'uploads'
+ status, headers, body = \
+ self.conn.make_request('GET', bucket, query=query)
+ self.assertEqual(status, 200)
+ self.assertCommonResponseHeaders(headers)
+ self.assertTrue('content-type' in headers)
+ self.assertEqual(headers['content-type'], 'application/xml')
+ self.assertTrue('content-length' in headers)
+ self.assertEqual(headers['content-length'], str(len(body)))
+ elem = fromstring(body, 'ListMultipartUploadsResult')
+ self.assertEqual(elem.find('Bucket').text, bucket)
+ self.assertIsNone(elem.find('KeyMarker').text)
+ self.assertEqual(elem.find('NextKeyMarker').text, uploads[-1][0])
+ self.assertIsNone(elem.find('UploadIdMarker').text)
+ self.assertEqual(elem.find('NextUploadIdMarker').text, uploads[-1][1])
+ self.assertEqual(elem.find('MaxUploads').text, '1000')
+ self.assertTrue(elem.find('EncodingType') is None)
+ self.assertEqual(elem.find('IsTruncated').text, 'false')
+ self.assertEqual(len(elem.findall('Upload')), 3)
+ for (expected_key, expected_upload_id), u in \
+ izip(uploads, elem.findall('Upload')):
+ key = u.find('Key').text
+ upload_id = u.find('UploadId').text
+ self.assertEqual(expected_key, key)
+ self.assertEqual(expected_upload_id, upload_id)
+ self.assertEqual(u.find('Initiator/ID').text,
+ self.conn.user_id)
+ self.assertEqual(u.find('Initiator/DisplayName').text,
+ self.conn.user_id)
+ self.assertEqual(u.find('Owner/ID').text, self.conn.user_id)
+ self.assertEqual(u.find('Owner/DisplayName').text,
+ self.conn.user_id)
+ self.assertEqual(u.find('StorageClass').text, 'STANDARD')
+ self.assertTrue(u.find('Initiated').text is not None)
+
+ # Upload Part
+ key, upload_id = uploads[0]
+ content = 'a' * self.min_segment_size
+ etag = md5(content).hexdigest()
+ status, headers, body = \
+ self._upload_part(bucket, key, upload_id, content)
+ self.assertEqual(status, 200)
+ self.assertCommonResponseHeaders(headers, etag)
+ self.assertTrue('content-type' in headers)
+ self.assertEqual(headers['content-type'], 'text/html; charset=UTF-8')
+ self.assertTrue('content-length' in headers)
+ self.assertEqual(headers['content-length'], '0')
+ expected_parts_list = [(headers['etag'], mktime(headers['date']))]
+
+ # Upload Part Copy
+ key, upload_id = uploads[1]
+ src_bucket = 'bucket2'
+ src_obj = 'obj3'
+ src_content = 'b' * self.min_segment_size
+ etag = md5(src_content).hexdigest()
+
+ # prepare src obj
+ self.conn.make_request('PUT', src_bucket)
+ self.conn.make_request('PUT', src_bucket, src_obj, body=src_content)
+ _, headers, _ = self.conn.make_request('HEAD', src_bucket, src_obj)
+ self.assertCommonResponseHeaders(headers)
+
+ status, headers, body, resp_etag = \
+ self._upload_part_copy(src_bucket, src_obj, bucket,
+ key, upload_id)
+ self.assertEqual(status, 200)
+ self.assertCommonResponseHeaders(headers)
+ self.assertTrue('content-type' in headers)
+ self.assertEqual(headers['content-type'], 'application/xml')
+ self.assertTrue('content-length' in headers)
+ self.assertEqual(headers['content-length'], str(len(body)))
+ self.assertTrue('etag' not in headers)
+ elem = fromstring(body, 'CopyPartResult')
+
+ last_modified = elem.find('LastModified').text
+ self.assertTrue(last_modified is not None)
+
+ self.assertEqual(resp_etag, etag)
+
+ # Check last-modified timestamp
+ key, upload_id = uploads[1]
+ query = 'uploadId=%s' % upload_id
+ status, headers, body = \
+ self.conn.make_request('GET', bucket, key, query=query)
+
+ self.assertEqual(200, status)
+ elem = fromstring(body, 'ListPartsResult')
+
+ # FIXME: COPY result drops milli/microseconds but GET doesn't
+ last_modified_gets = [p.find('LastModified').text
+ for p in elem.iterfind('Part')]
+ self.assertEqual(
+ last_modified_gets[0].rsplit('.', 1)[0],
+ last_modified.rsplit('.', 1)[0],
+ '%r != %r' % (last_modified_gets[0], last_modified))
+ # There should be *exactly* two parts in the result
+ self.assertEqual(1, len(last_modified_gets))
+
+ # List Parts
+ key, upload_id = uploads[0]
+ query = 'uploadId=%s' % upload_id
+ status, headers, body = \
+ self.conn.make_request('GET', bucket, key, query=query)
+ self.assertEqual(status, 200)
+ self.assertCommonResponseHeaders(headers)
+ self.assertTrue('content-type' in headers)
+ self.assertEqual(headers['content-type'], 'application/xml')
+ self.assertTrue('content-length' in headers)
+ self.assertEqual(headers['content-length'], str(len(body)))
+ elem = fromstring(body, 'ListPartsResult')
+ self.assertEqual(elem.find('Bucket').text, bucket)
+ self.assertEqual(elem.find('Key').text, key)
+ self.assertEqual(elem.find('UploadId').text, upload_id)
+ self.assertEqual(elem.find('Initiator/ID').text, self.conn.user_id)
+ self.assertEqual(elem.find('Initiator/DisplayName').text,
+ self.conn.user_id)
+ self.assertEqual(elem.find('Owner/ID').text, self.conn.user_id)
+ self.assertEqual(elem.find('Owner/DisplayName').text,
+ self.conn.user_id)
+ self.assertEqual(elem.find('StorageClass').text, 'STANDARD')
+ self.assertEqual(elem.find('PartNumberMarker').text, '0')
+ self.assertEqual(elem.find('NextPartNumberMarker').text, '1')
+ self.assertEqual(elem.find('MaxParts').text, '1000')
+ self.assertEqual(elem.find('IsTruncated').text, 'false')
+ self.assertEqual(len(elem.findall('Part')), 1)
+
+ # etags will be used to generate xml for Complete Multipart Upload
+ etags = []
+ for (expected_etag, expected_date), p in \
+ izip(expected_parts_list, elem.findall('Part')):
+ last_modified = p.find('LastModified').text
+ self.assertTrue(last_modified is not None)
+ # TODO: sanity check
+ # (kota_) How do we check the sanity?
+ # the last-modified header drops milli-seconds info
+ # by the constraint of the format.
+ # For now, we can do either the format check or round check
+ # last_modified_from_xml = mktime(last_modified)
+ # self.assertEqual(expected_date,
+ # last_modified_from_xml)
+ self.assertEqual(expected_etag, p.find('ETag').text)
+ self.assertEqual(self.min_segment_size, int(p.find('Size').text))
+ etags.append(p.find('ETag').text)
+
+ # Abort Multipart Uploads
+ # note that uploads[1] has part data while uploads[2] does not
+ for key, upload_id in uploads[1:]:
+ query = 'uploadId=%s' % upload_id
+ status, headers, body = \
+ self.conn.make_request('DELETE', bucket, key, query=query)
+ self.assertEqual(status, 204)
+ self.assertCommonResponseHeaders(headers)
+ self.assertTrue('content-type' in headers)
+ self.assertEqual(headers['content-type'],
+ 'text/html; charset=UTF-8')
+ self.assertTrue('content-length' in headers)
+ self.assertEqual(headers['content-length'], '0')
+
+ # Complete Multipart Upload
+ key, upload_id = uploads[0]
+ xml = self._gen_comp_xml(etags)
+ status, headers, body = \
+ self._complete_multi_upload(bucket, key, upload_id, xml)
+ self.assertEqual(status, 200)
+ self.assertCommonResponseHeaders(headers)
+ self.assertTrue('content-type' in headers)
+ self.assertEqual(headers['content-type'], 'application/xml')
+ self.assertTrue('content-length' in headers)
+ self.assertEqual(headers['content-length'], str(len(body)))
+ elem = fromstring(body, 'CompleteMultipartUploadResult')
+ # TODO: use tf.config value
+ self.assertEqual(
+ 'http://%s:%s/bucket/obj1' % (self.conn.host, self.conn.port),
+ elem.find('Location').text)
+ self.assertEqual(elem.find('Bucket').text, bucket)
+ self.assertEqual(elem.find('Key').text, key)
+ # TODO: confirm completed etag value
+ self.assertTrue(elem.find('ETag').text is not None)
+
+ def test_initiate_multi_upload_error(self):
+ bucket = 'bucket'
+ key = 'obj'
+ self.conn.make_request('PUT', bucket)
+ query = 'uploads'
+
+ auth_error_conn = Connection(aws_secret_key='invalid')
+ status, headers, body = \
+ auth_error_conn.make_request('POST', bucket, key, query=query)
+ self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
+
+ status, resp_headers, body = \
+ self.conn.make_request('POST', 'nothing', key, query=query)
+ self.assertEqual(get_error_code(body), 'NoSuchBucket')
+
+ def test_list_multi_uploads_error(self):
+ bucket = 'bucket'
+ self.conn.make_request('PUT', bucket)
+ query = 'uploads'
+
+ auth_error_conn = Connection(aws_secret_key='invalid')
+ status, headers, body = \
+ auth_error_conn.make_request('GET', bucket, query=query)
+ self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
+
+ status, headers, body = \
+ self.conn.make_request('GET', 'nothing', query=query)
+ self.assertEqual(get_error_code(body), 'NoSuchBucket')
+
+ def test_upload_part_error(self):
+ bucket = 'bucket'
+ self.conn.make_request('PUT', bucket)
+ query = 'uploads'
+ key = 'obj'
+ status, headers, body = \
+ self.conn.make_request('POST', bucket, key, query=query)
+ elem = fromstring(body, 'InitiateMultipartUploadResult')
+ upload_id = elem.find('UploadId').text
+
+ query = 'partNumber=%s&uploadId=%s' % (1, upload_id)
+ auth_error_conn = Connection(aws_secret_key='invalid')
+ status, headers, body = \
+ auth_error_conn.make_request('PUT', bucket, key, query=query)
+ self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
+
+ status, headers, body = \
+ self.conn.make_request('PUT', 'nothing', key, query=query)
+ self.assertEqual(get_error_code(body), 'NoSuchBucket')
+
+ query = 'partNumber=%s&uploadId=%s' % (1, 'nothing')
+ status, headers, body = \
+ self.conn.make_request('PUT', bucket, key, query=query)
+ self.assertEqual(get_error_code(body), 'NoSuchUpload')
+
+ query = 'partNumber=%s&uploadId=%s' % (0, upload_id)
+ status, headers, body = \
+ self.conn.make_request('PUT', bucket, key, query=query)
+ self.assertEqual(get_error_code(body), 'InvalidArgument')
+ err_msg = 'Part number must be an integer between 1 and'
+ self.assertTrue(err_msg in get_error_msg(body))
+
+ def test_upload_part_copy_error(self):
+ src_bucket = 'src'
+ src_obj = 'src'
+ self.conn.make_request('PUT', src_bucket)
+ self.conn.make_request('PUT', src_bucket, src_obj)
+ src_path = '%s/%s' % (src_bucket, src_obj)
+
+ bucket = 'bucket'
+ self.conn.make_request('PUT', bucket)
+ key = 'obj'
+ query = 'uploads'
+ status, headers, body = \
+ self.conn.make_request('POST', bucket, key, query=query)
+ elem = fromstring(body, 'InitiateMultipartUploadResult')
+ upload_id = elem.find('UploadId').text
+
+ query = 'partNumber=%s&uploadId=%s' % (1, upload_id)
+ auth_error_conn = Connection(aws_secret_key='invalid')
+ status, headers, body = \
+ auth_error_conn.make_request('PUT', bucket, key,
+ headers={
+ 'X-Amz-Copy-Source': src_path
+ },
+ query=query)
+ self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
+
+ status, headers, body = \
+ self.conn.make_request('PUT', 'nothing', key,
+ headers={'X-Amz-Copy-Source': src_path},
+ query=query)
+ self.assertEqual(get_error_code(body), 'NoSuchBucket')
+
+ query = 'partNumber=%s&uploadId=%s' % (1, 'nothing')
+ status, headers, body = \
+ self.conn.make_request('PUT', bucket, key,
+ headers={'X-Amz-Copy-Source': src_path},
+ query=query)
+ self.assertEqual(get_error_code(body), 'NoSuchUpload')
+
+ src_path = '%s/%s' % (src_bucket, 'nothing')
+ query = 'partNumber=%s&uploadId=%s' % (1, upload_id)
+ status, headers, body = \
+ self.conn.make_request('PUT', bucket, key,
+ headers={'X-Amz-Copy-Source': src_path},
+ query=query)
+ self.assertEqual(get_error_code(body), 'NoSuchKey')
+
+ def test_list_parts_error(self):
+ bucket = 'bucket'
+ self.conn.make_request('PUT', bucket)
+ key = 'obj'
+ query = 'uploads'
+ status, headers, body = \
+ self.conn.make_request('POST', bucket, key, query=query)
+ elem = fromstring(body, 'InitiateMultipartUploadResult')
+ upload_id = elem.find('UploadId').text
+
+ query = 'uploadId=%s' % upload_id
+ auth_error_conn = Connection(aws_secret_key='invalid')
+
+ status, headers, body = \
+ auth_error_conn.make_request('GET', bucket, key, query=query)
+ self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
+
+ status, headers, body = \
+ self.conn.make_request('GET', 'nothing', key, query=query)
+ self.assertEqual(get_error_code(body), 'NoSuchBucket')
+
+ query = 'uploadId=%s' % 'nothing'
+ status, headers, body = \
+ self.conn.make_request('GET', bucket, key, query=query)
+ self.assertEqual(get_error_code(body), 'NoSuchUpload')
+
+ def test_abort_multi_upload_error(self):
+ bucket = 'bucket'
+ self.conn.make_request('PUT', bucket)
+ key = 'obj'
+ query = 'uploads'
+ status, headers, body = \
+ self.conn.make_request('POST', bucket, key, query=query)
+ elem = fromstring(body, 'InitiateMultipartUploadResult')
+ upload_id = elem.find('UploadId').text
+ self._upload_part(bucket, key, upload_id)
+
+ query = 'uploadId=%s' % upload_id
+ auth_error_conn = Connection(aws_secret_key='invalid')
+ status, headers, body = \
+ auth_error_conn.make_request('DELETE', bucket, key, query=query)
+ self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
+
+ status, headers, body = \
+ self.conn.make_request('DELETE', 'nothing', key, query=query)
+ self.assertEqual(get_error_code(body), 'NoSuchBucket')
+
+ status, headers, body = \
+ self.conn.make_request('DELETE', bucket, 'nothing', query=query)
+ self.assertEqual(get_error_code(body), 'NoSuchUpload')
+
+ query = 'uploadId=%s' % 'nothing'
+ status, headers, body = \
+ self.conn.make_request('DELETE', bucket, key, query=query)
+ self.assertEqual(get_error_code(body), 'NoSuchUpload')
+
+ def test_complete_multi_upload_error(self):
+ bucket = 'bucket'
+ keys = ['obj', 'obj2']
+ self.conn.make_request('PUT', bucket)
+ query = 'uploads'
+ status, headers, body = \
+ self.conn.make_request('POST', bucket, keys[0], query=query)
+ elem = fromstring(body, 'InitiateMultipartUploadResult')
+ upload_id = elem.find('UploadId').text
+
+ etags = []
+ for i in xrange(1, 3):
+ query = 'partNumber=%s&uploadId=%s' % (i, upload_id)
+ status, headers, body = \
+ self.conn.make_request('PUT', bucket, keys[0], query=query)
+ etags.append(headers['etag'])
+ xml = self._gen_comp_xml(etags)
+
+ # part 1 too small
+ query = 'uploadId=%s' % upload_id
+ status, headers, body = \
+ self.conn.make_request('POST', bucket, keys[0], body=xml,
+ query=query)
+ self.assertEqual(get_error_code(body), 'EntityTooSmall')
+
+ # invalid credentials
+ auth_error_conn = Connection(aws_secret_key='invalid')
+ status, headers, body = \
+ auth_error_conn.make_request('POST', bucket, keys[0], body=xml,
+ query=query)
+ self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
+
+ # wrong/missing bucket
+ status, headers, body = \
+ self.conn.make_request('POST', 'nothing', keys[0], query=query)
+ self.assertEqual(get_error_code(body), 'NoSuchBucket')
+
+ # wrong upload ID
+ query = 'uploadId=%s' % 'nothing'
+ status, headers, body = \
+ self.conn.make_request('POST', bucket, keys[0], body=xml,
+ query=query)
+ self.assertEqual(get_error_code(body), 'NoSuchUpload')
+
+ # without Part tag in xml
+ query = 'uploadId=%s' % upload_id
+ xml = self._gen_comp_xml([])
+ status, headers, body = \
+ self.conn.make_request('POST', bucket, keys[0], body=xml,
+ query=query)
+ self.assertEqual(get_error_code(body), 'MalformedXML')
+
+ # with invalid etag in xml
+ invalid_etag = 'invalid'
+ xml = self._gen_comp_xml([invalid_etag])
+ status, headers, body = \
+ self.conn.make_request('POST', bucket, keys[0], body=xml,
+ query=query)
+ self.assertEqual(get_error_code(body), 'InvalidPart')
+
+ # without part in Swift
+ query = 'uploads'
+ status, headers, body = \
+ self.conn.make_request('POST', bucket, keys[1], query=query)
+ elem = fromstring(body, 'InitiateMultipartUploadResult')
+ upload_id = elem.find('UploadId').text
+ query = 'uploadId=%s' % upload_id
+ xml = self._gen_comp_xml([etags[0]])
+ status, headers, body = \
+ self.conn.make_request('POST', bucket, keys[1], body=xml,
+ query=query)
+ self.assertEqual(get_error_code(body), 'InvalidPart')
+
+ def test_complete_upload_min_segment_size(self):
+ bucket = 'bucket'
+ key = 'obj'
+ self.conn.make_request('PUT', bucket)
+ query = 'uploads'
+ status, headers, body = \
+ self.conn.make_request('POST', bucket, key, query=query)
+ elem = fromstring(body, 'InitiateMultipartUploadResult')
+ upload_id = elem.find('UploadId').text
+
+ # multi parts with no body
+ etags = []
+ for i in xrange(1, 3):
+ query = 'partNumber=%s&uploadId=%s' % (i, upload_id)
+ status, headers, body = \
+ self.conn.make_request('PUT', bucket, key, query=query)
+ etags.append(headers['etag'])
+ xml = self._gen_comp_xml(etags)
+
+ query = 'uploadId=%s' % upload_id
+ status, headers, body = \
+ self.conn.make_request('POST', bucket, key, body=xml,
+ query=query)
+ self.assertEqual(get_error_code(body), 'EntityTooSmall')
+
+ # multi parts with all parts less than min segment size
+ etags = []
+ for i in xrange(1, 3):
+ query = 'partNumber=%s&uploadId=%s' % (i, upload_id)
+ status, headers, body = \
+ self.conn.make_request('PUT', bucket, key, query=query,
+ body='AA')
+ etags.append(headers['etag'])
+ xml = self._gen_comp_xml(etags)
+
+ query = 'uploadId=%s' % upload_id
+ status, headers, body = \
+ self.conn.make_request('POST', bucket, key, body=xml,
+ query=query)
+ self.assertEqual(get_error_code(body), 'EntityTooSmall')
+
+ # one part and less than min segment size
+ etags = []
+ query = 'partNumber=1&uploadId=%s' % upload_id
+ status, headers, body = \
+ self.conn.make_request('PUT', bucket, key, query=query,
+ body='AA')
+ etags.append(headers['etag'])
+ xml = self._gen_comp_xml(etags)
+
+ query = 'uploadId=%s' % upload_id
+ status, headers, body = \
+ self.conn.make_request('POST', bucket, key, body=xml,
+ query=query)
+ self.assertEqual(status, 200)
+
+ # multi parts with all parts except the first part less than min
+ # segment size
+ query = 'uploads'
+ status, headers, body = \
+ self.conn.make_request('POST', bucket, key, query=query)
+ elem = fromstring(body, 'InitiateMultipartUploadResult')
+ upload_id = elem.find('UploadId').text
+
+ etags = []
+ body_size = [self.min_segment_size, self.min_segment_size - 1, 2]
+ for i in xrange(1, 3):
+ query = 'partNumber=%s&uploadId=%s' % (i, upload_id)
+ status, headers, body = \
+ self.conn.make_request('PUT', bucket, key, query=query,
+ body='A' * body_size[i])
+ etags.append(headers['etag'])
+ xml = self._gen_comp_xml(etags)
+
+ query = 'uploadId=%s' % upload_id
+ status, headers, body = \
+ self.conn.make_request('POST', bucket, key, body=xml,
+ query=query)
+ self.assertEqual(get_error_code(body), 'EntityTooSmall')
+
+ # multi parts with all parts except last part more than min segment
+ # size
+ query = 'uploads'
+ status, headers, body = \
+ self.conn.make_request('POST', bucket, key, query=query)
+ elem = fromstring(body, 'InitiateMultipartUploadResult')
+ upload_id = elem.find('UploadId').text
+
+ etags = []
+ body_size = [self.min_segment_size, self.min_segment_size, 2]
+ for i in xrange(1, 3):
+ query = 'partNumber=%s&uploadId=%s' % (i, upload_id)
+ status, headers, body = \
+ self.conn.make_request('PUT', bucket, key, query=query,
+ body='A' * body_size[i])
+ etags.append(headers['etag'])
+ xml = self._gen_comp_xml(etags)
+
+ query = 'uploadId=%s' % upload_id
+ status, headers, body = \
+ self.conn.make_request('POST', bucket, key, body=xml,
+ query=query)
+ self.assertEqual(status, 200)
+
+ def test_complete_upload_with_fewer_etags(self):
+ bucket = 'bucket'
+ key = 'obj'
+ self.conn.make_request('PUT', bucket)
+ query = 'uploads'
+ status, headers, body = \
+ self.conn.make_request('POST', bucket, key, query=query)
+ elem = fromstring(body, 'InitiateMultipartUploadResult')
+ upload_id = elem.find('UploadId').text
+
+ etags = []
+ for i in xrange(1, 4):
+ query = 'partNumber=%s&uploadId=%s' % (i, upload_id)
+ status, headers, body = \
+ self.conn.make_request('PUT', bucket, key,
+ body='A' * 1024 * 1024 * 5, query=query)
+ etags.append(headers['etag'])
+ query = 'uploadId=%s' % upload_id
+ xml = self._gen_comp_xml(etags[:-1])
+ status, headers, body = \
+ self.conn.make_request('POST', bucket, key, body=xml,
+ query=query)
+ self.assertEqual(status, 200)
+
+ def test_object_multi_upload_part_copy_range(self):
+ bucket = 'bucket'
+ keys = ['obj1']
+ uploads = []
+
+ results_generator = self._initiate_multi_uploads_result_generator(
+ bucket, keys)
+
+ # Initiate Multipart Upload
+ for expected_key, (status, headers, body) in \
+ izip(keys, results_generator):
+ self.assertEqual(status, 200)
+ self.assertCommonResponseHeaders(headers)
+ self.assertTrue('content-type' in headers)
+ self.assertEqual(headers['content-type'], 'application/xml')
+ self.assertTrue('content-length' in headers)
+ self.assertEqual(headers['content-length'], str(len(body)))
+ elem = fromstring(body, 'InitiateMultipartUploadResult')
+ self.assertEqual(elem.find('Bucket').text, bucket)
+ key = elem.find('Key').text
+ self.assertEqual(expected_key, key)
+ upload_id = elem.find('UploadId').text
+ self.assertTrue(upload_id is not None)
+ self.assertTrue((key, upload_id) not in uploads)
+ uploads.append((key, upload_id))
+
+ self.assertEqual(len(uploads), len(keys)) # sanity
+
+ # Upload Part Copy Range
+ key, upload_id = uploads[0]
+ src_bucket = 'bucket2'
+ src_obj = 'obj4'
+ src_content = 'y' * (self.min_segment_size / 2) + 'z' * \
+ self.min_segment_size
+ src_range = 'bytes=0-%d' % (self.min_segment_size - 1)
+ etag = md5(src_content[:self.min_segment_size]).hexdigest()
+
+ # prepare src obj
+ self.conn.make_request('PUT', src_bucket)
+ self.conn.make_request('PUT', src_bucket, src_obj, body=src_content)
+ _, headers, _ = self.conn.make_request('HEAD', src_bucket, src_obj)
+ self.assertCommonResponseHeaders(headers)
+
+ status, headers, body, resp_etag = \
+ self._upload_part_copy(src_bucket, src_obj, bucket,
+ key, upload_id, 1, src_range)
+ self.assertEqual(status, 200)
+ self.assertCommonResponseHeaders(headers)
+ self.assertTrue('content-type' in headers)
+ self.assertEqual(headers['content-type'], 'application/xml')
+ self.assertTrue('content-length' in headers)
+ self.assertEqual(headers['content-length'], str(len(body)))
+ self.assertTrue('etag' not in headers)
+ elem = fromstring(body, 'CopyPartResult')
+
+ last_modified = elem.find('LastModified').text
+ self.assertTrue(last_modified is not None)
+
+ self.assertEqual(resp_etag, etag)
+
+ # Check last-modified timestamp
+ key, upload_id = uploads[0]
+ query = 'uploadId=%s' % upload_id
+ status, headers, body = \
+ self.conn.make_request('GET', bucket, key, query=query)
+
+ elem = fromstring(body, 'ListPartsResult')
+
+ # FIXME: COPY result drops milli/microseconds but GET doesn't
+ last_modified_gets = [p.find('LastModified').text
+ for p in elem.iterfind('Part')]
+ self.assertEqual(
+ last_modified_gets[0].rsplit('.', 1)[0],
+ last_modified.rsplit('.', 1)[0],
+ '%r != %r' % (last_modified_gets[0], last_modified))
+
+ # There should be *exactly* one parts in the result
+ self.assertEqual(1, len(last_modified_gets))
+
+ # Abort Multipart Upload
+ key, upload_id = uploads[0]
+ query = 'uploadId=%s' % upload_id
+ status, headers, body = \
+ self.conn.make_request('DELETE', bucket, key, query=query)
+
+ # sanity checks
+ self.assertEqual(status, 204)
+ self.assertCommonResponseHeaders(headers)
+ self.assertTrue('content-type' in headers)
+ self.assertEqual(headers['content-type'], 'text/html; charset=UTF-8')
+ self.assertTrue('content-length' in headers)
+ self.assertEqual(headers['content-length'], '0')
+
+
+class TestS3ApiMultiUploadSigV4(TestS3ApiMultiUpload):
+ @classmethod
+ def setUpClass(cls):
+ os.environ['S3_USE_SIGV4'] = "True"
+
+ @classmethod
+ def tearDownClass(cls):
+ del os.environ['S3_USE_SIGV4']
+
+ def setUp(self):
+ super(TestS3ApiMultiUploadSigV4, self).setUp()
+
+ def test_object_multi_upload_part_copy_range(self):
+ if StrictVersion(boto.__version__) < StrictVersion('3.0'):
+ self.skipTest('This stuff got the issue of boto<=2.x')
+
+ def test_delete_bucket_multi_upload_object_exisiting(self):
+ bucket = 'bucket'
+ keys = ['obj1']
+ uploads = []
+
+ results_generator = self._initiate_multi_uploads_result_generator(
+ bucket, keys)
+
+ # Initiate Multipart Upload
+ for expected_key, (status, _, body) in \
+ izip(keys, results_generator):
+ self.assertEqual(status, 200) # sanity
+ elem = fromstring(body, 'InitiateMultipartUploadResult')
+ key = elem.find('Key').text
+ self.assertEqual(expected_key, key) # sanity
+ upload_id = elem.find('UploadId').text
+ self.assertTrue(upload_id is not None) # sanity
+ self.assertTrue((key, upload_id) not in uploads)
+ uploads.append((key, upload_id))
+
+ self.assertEqual(len(uploads), len(keys)) # sanity
+
+ # Upload Part
+ key, upload_id = uploads[0]
+ content = 'a' * self.min_segment_size
+ status, headers, body = \
+ self._upload_part(bucket, key, upload_id, content)
+ self.assertEqual(status, 200)
+
+ # Complete Multipart Upload
+ key, upload_id = uploads[0]
+ etags = [md5(content).hexdigest()]
+ xml = self._gen_comp_xml(etags)
+ status, headers, body = \
+ self._complete_multi_upload(bucket, key, upload_id, xml)
+ self.assertEqual(status, 200) # sanity
+
+ # GET multipart object
+ status, headers, body = \
+ self.conn.make_request('GET', bucket, key)
+ self.assertEqual(status, 200) # sanity
+ self.assertEqual(content, body) # sanity
+
+ # DELETE bucket while the object existing
+ status, headers, body = \
+ self.conn.make_request('DELETE', bucket)
+ self.assertEqual(status, 409) # sanity
+
+ # The object must still be there.
+ status, headers, body = \
+ self.conn.make_request('GET', bucket, key)
+ self.assertEqual(status, 200) # sanity
+ self.assertEqual(content, body) # sanity
+
+
+if __name__ == '__main__':
+ unittest2.main()
diff --git a/test/functional/s3api/test_object.py b/test/functional/s3api/test_object.py
new file mode 100644
index 000000000..783f48ae9
--- /dev/null
+++ b/test/functional/s3api/test_object.py
@@ -0,0 +1,873 @@
+# Copyright (c) 2015 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest2
+import os
+import boto
+
+# For an issue with venv and distutils, disable pylint message here
+# pylint: disable-msg=E0611,F0401
+from distutils.version import StrictVersion
+
+import email.parser
+from email.utils import formatdate, parsedate
+from time import mktime
+from hashlib import md5
+from urllib import quote
+
+import test.functional as tf
+
+from swift.common.middleware.s3api.etree import fromstring
+
+from test.functional.s3api import S3ApiBase
+from test.functional.s3api.s3_test_client import Connection
+from test.functional.s3api.utils import get_error_code, calculate_md5
+
+DAY = 86400.0 # 60 * 60 * 24 (sec)
+
+
+def setUpModule():
+ tf.setup_package()
+
+
+def tearDownModule():
+ tf.teardown_package()
+
+
+class TestS3ApiObject(S3ApiBase):
+ def setUp(self):
+ super(TestS3ApiObject, self).setUp()
+ self.bucket = 'bucket'
+ self.conn.make_request('PUT', self.bucket)
+
+ def _assertObjectEtag(self, bucket, obj, etag):
+ status, headers, _ = self.conn.make_request('HEAD', bucket, obj)
+ self.assertEqual(status, 200) # sanity
+ self.assertCommonResponseHeaders(headers, etag)
+
+ def test_object(self):
+ obj = 'object name with %-sign'
+ content = 'abc123'
+ etag = md5(content).hexdigest()
+
+ # PUT Object
+ status, headers, body = \
+ self.conn.make_request('PUT', self.bucket, obj, body=content)
+ self.assertEqual(status, 200)
+
+ self.assertCommonResponseHeaders(headers)
+ self.assertTrue('content-length' in headers) # sanity
+ self.assertEqual(headers['content-length'], '0')
+ self._assertObjectEtag(self.bucket, obj, etag)
+
+ # PUT Object Copy
+ dst_bucket = 'dst-bucket'
+ dst_obj = 'dst_obj'
+ self.conn.make_request('PUT', dst_bucket)
+ headers = {'x-amz-copy-source': '/%s/%s' % (self.bucket, obj)}
+ status, headers, body = \
+ self.conn.make_request('PUT', dst_bucket, dst_obj,
+ headers=headers)
+ self.assertEqual(status, 200)
+
+ # PUT Object Copy with URL-encoded Source
+ dst_bucket = 'dst-bucket'
+ dst_obj = 'dst_obj'
+ self.conn.make_request('PUT', dst_bucket)
+ headers = {'x-amz-copy-source': quote('/%s/%s' % (self.bucket, obj))}
+ status, headers, body = \
+ self.conn.make_request('PUT', dst_bucket, dst_obj,
+ headers=headers)
+ self.assertEqual(status, 200)
+
+ self.assertCommonResponseHeaders(headers)
+ self.assertEqual(headers['content-length'], str(len(body)))
+
+ elem = fromstring(body, 'CopyObjectResult')
+ self.assertTrue(elem.find('LastModified').text is not None)
+ last_modified_xml = elem.find('LastModified').text
+ self.assertTrue(elem.find('ETag').text is not None)
+ self.assertEqual(etag, elem.find('ETag').text.strip('"'))
+ self._assertObjectEtag(dst_bucket, dst_obj, etag)
+
+ # Check timestamp on Copy:
+ status, headers, body = \
+ self.conn.make_request('GET', dst_bucket)
+ self.assertEqual(status, 200)
+ elem = fromstring(body, 'ListBucketResult')
+
+ # FIXME: COPY result drops milli/microseconds but GET doesn't
+ self.assertEqual(
+ elem.find('Contents').find("LastModified").text.rsplit('.', 1)[0],
+ last_modified_xml.rsplit('.', 1)[0])
+
+ # GET Object
+ status, headers, body = \
+ self.conn.make_request('GET', self.bucket, obj)
+ self.assertEqual(status, 200)
+
+ self.assertCommonResponseHeaders(headers, etag)
+ self.assertTrue(headers['last-modified'] is not None)
+ self.assertTrue(headers['content-type'] is not None)
+ self.assertEqual(headers['content-length'], str(len(content)))
+
+ # HEAD Object
+ status, headers, body = \
+ self.conn.make_request('HEAD', self.bucket, obj)
+ self.assertEqual(status, 200)
+
+ self.assertCommonResponseHeaders(headers, etag)
+ self.assertTrue(headers['last-modified'] is not None)
+ self.assertTrue('content-type' in headers)
+ self.assertEqual(headers['content-length'], str(len(content)))
+
+ # DELETE Object
+ status, headers, body = \
+ self.conn.make_request('DELETE', self.bucket, obj)
+ self.assertEqual(status, 204)
+ self.assertCommonResponseHeaders(headers)
+
+ def test_put_object_error(self):
+ auth_error_conn = Connection(aws_secret_key='invalid')
+ status, headers, body = \
+ auth_error_conn.make_request('PUT', self.bucket, 'object')
+ self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
+ self.assertEqual(headers['content-type'], 'application/xml')
+
+ status, headers, body = \
+ self.conn.make_request('PUT', 'bucket2', 'object')
+ self.assertEqual(get_error_code(body), 'NoSuchBucket')
+ self.assertEqual(headers['content-type'], 'application/xml')
+
+ def test_put_object_copy_error(self):
+ obj = 'object'
+ self.conn.make_request('PUT', self.bucket, obj)
+ dst_bucket = 'dst-bucket'
+ self.conn.make_request('PUT', dst_bucket)
+ dst_obj = 'dst_object'
+
+ headers = {'x-amz-copy-source': '/%s/%s' % (self.bucket, obj)}
+ auth_error_conn = Connection(aws_secret_key='invalid')
+ status, headers, body = \
+ auth_error_conn.make_request('PUT', dst_bucket, dst_obj, headers)
+ self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
+ self.assertEqual(headers['content-type'], 'application/xml')
+
+ # /src/nothing -> /dst/dst
+ headers = {'X-Amz-Copy-Source': '/%s/%s' % (self.bucket, 'nothing')}
+ status, headers, body = \
+ self.conn.make_request('PUT', dst_bucket, dst_obj, headers)
+ self.assertEqual(get_error_code(body), 'NoSuchKey')
+ self.assertEqual(headers['content-type'], 'application/xml')
+
+ # /nothing/src -> /dst/dst
+ headers = {'X-Amz-Copy-Source': '/%s/%s' % ('nothing', obj)}
+ status, headers, body = \
+ self.conn.make_request('PUT', dst_bucket, dst_obj, headers)
+ # TODO: source bucket is not check.
+ # self.assertEqual(get_error_code(body), 'NoSuchBucket')
+
+ # /src/src -> /nothing/dst
+ headers = {'X-Amz-Copy-Source': '/%s/%s' % (self.bucket, obj)}
+ status, headers, body = \
+ self.conn.make_request('PUT', 'nothing', dst_obj, headers)
+ self.assertEqual(get_error_code(body), 'NoSuchBucket')
+ self.assertEqual(headers['content-type'], 'application/xml')
+
+ def test_get_object_error(self):
+ obj = 'object'
+ self.conn.make_request('PUT', self.bucket, obj)
+
+ auth_error_conn = Connection(aws_secret_key='invalid')
+ status, headers, body = \
+ auth_error_conn.make_request('GET', self.bucket, obj)
+ self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
+ self.assertEqual(headers['content-type'], 'application/xml')
+
+ status, headers, body = \
+ self.conn.make_request('GET', self.bucket, 'invalid')
+ self.assertEqual(get_error_code(body), 'NoSuchKey')
+ self.assertEqual(headers['content-type'], 'application/xml')
+
+ status, headers, body = self.conn.make_request('GET', 'invalid', obj)
+ self.assertEqual(get_error_code(body), 'NoSuchBucket')
+ self.assertEqual(headers['content-type'], 'application/xml')
+
+ def test_head_object_error(self):
+ obj = 'object'
+ self.conn.make_request('PUT', self.bucket, obj)
+
+ auth_error_conn = Connection(aws_secret_key='invalid')
+ status, headers, body = \
+ auth_error_conn.make_request('HEAD', self.bucket, obj)
+ self.assertEqual(status, 403)
+ self.assertEqual(body, '') # sanity
+ self.assertEqual(headers['content-type'], 'application/xml')
+
+ status, headers, body = \
+ self.conn.make_request('HEAD', self.bucket, 'invalid')
+ self.assertEqual(status, 404)
+ self.assertEqual(body, '') # sanity
+ self.assertEqual(headers['content-type'], 'application/xml')
+
+ status, headers, body = \
+ self.conn.make_request('HEAD', 'invalid', obj)
+ self.assertEqual(status, 404)
+ self.assertEqual(body, '') # sanity
+ self.assertEqual(headers['content-type'], 'application/xml')
+
+ def test_delete_object_error(self):
+ obj = 'object'
+ self.conn.make_request('PUT', self.bucket, obj)
+
+ auth_error_conn = Connection(aws_secret_key='invalid')
+ status, headers, body = \
+ auth_error_conn.make_request('DELETE', self.bucket, obj)
+ self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
+ self.assertEqual(headers['content-type'], 'application/xml')
+
+ status, headers, body = \
+ self.conn.make_request('DELETE', self.bucket, 'invalid')
+ self.assertEqual(get_error_code(body), 'NoSuchKey')
+ self.assertEqual(headers['content-type'], 'application/xml')
+
+ status, headers, body = \
+ self.conn.make_request('DELETE', 'invalid', obj)
+ self.assertEqual(get_error_code(body), 'NoSuchBucket')
+ self.assertEqual(headers['content-type'], 'application/xml')
+
+ def test_put_object_content_encoding(self):
+ obj = 'object'
+ etag = md5().hexdigest()
+ headers = {'Content-Encoding': 'gzip'}
+ status, headers, body = \
+ self.conn.make_request('PUT', self.bucket, obj, headers)
+ self.assertEqual(status, 200)
+ status, headers, body = \
+ self.conn.make_request('HEAD', self.bucket, obj)
+ self.assertTrue('content-encoding' in headers) # sanity
+ self.assertEqual(headers['content-encoding'], 'gzip')
+ self.assertCommonResponseHeaders(headers)
+ self._assertObjectEtag(self.bucket, obj, etag)
+
+ def test_put_object_content_md5(self):
+ obj = 'object'
+ content = 'abcdefghij'
+ etag = md5(content).hexdigest()
+ headers = {'Content-MD5': calculate_md5(content)}
+ status, headers, body = \
+ self.conn.make_request('PUT', self.bucket, obj, headers, content)
+ self.assertEqual(status, 200)
+ self.assertCommonResponseHeaders(headers)
+ self._assertObjectEtag(self.bucket, obj, etag)
+
+ def test_put_object_content_type(self):
+ obj = 'object'
+ content = 'abcdefghij'
+ etag = md5(content).hexdigest()
+ headers = {'Content-Type': 'text/plain'}
+ status, headers, body = \
+ self.conn.make_request('PUT', self.bucket, obj, headers, content)
+ self.assertEqual(status, 200)
+ status, headers, body = \
+ self.conn.make_request('HEAD', self.bucket, obj)
+ self.assertEqual(headers['content-type'], 'text/plain')
+ self.assertCommonResponseHeaders(headers)
+ self._assertObjectEtag(self.bucket, obj, etag)
+
+ def test_put_object_conditional_requests(self):
+ obj = 'object'
+ content = 'abcdefghij'
+ headers = {'If-None-Match': '*'}
+ status, headers, body = \
+ self.conn.make_request('PUT', self.bucket, obj, headers, content)
+ self.assertEqual(status, 501)
+
+ headers = {'If-Match': '*'}
+ status, headers, body = \
+ self.conn.make_request('PUT', self.bucket, obj, headers, content)
+ self.assertEqual(status, 501)
+
+ headers = {'If-Modified-Since': 'Sat, 27 Jun 2015 00:00:00 GMT'}
+ status, headers, body = \
+ self.conn.make_request('PUT', self.bucket, obj, headers, content)
+ self.assertEqual(status, 501)
+
+ headers = {'If-Unmodified-Since': 'Sat, 27 Jun 2015 00:00:00 GMT'}
+ status, headers, body = \
+ self.conn.make_request('PUT', self.bucket, obj, headers, content)
+ self.assertEqual(status, 501)
+
+ # None of the above should actually have created an object
+ status, headers, body = \
+ self.conn.make_request('HEAD', self.bucket, obj, {}, '')
+ self.assertEqual(status, 404)
+
+ def test_put_object_expect(self):
+ obj = 'object'
+ content = 'abcdefghij'
+ etag = md5(content).hexdigest()
+ headers = {'Expect': '100-continue'}
+ status, headers, body = \
+ self.conn.make_request('PUT', self.bucket, obj, headers, content)
+ self.assertEqual(status, 200)
+ self.assertCommonResponseHeaders(headers)
+ self._assertObjectEtag(self.bucket, obj, etag)
+
+ def _test_put_object_headers(self, req_headers, expected_headers=None):
+ if expected_headers is None:
+ expected_headers = req_headers
+ obj = 'object'
+ content = 'abcdefghij'
+ etag = md5(content).hexdigest()
+ status, headers, body = \
+ self.conn.make_request('PUT', self.bucket, obj,
+ req_headers, content)
+ self.assertEqual(status, 200)
+ status, headers, body = \
+ self.conn.make_request('HEAD', self.bucket, obj)
+ for header, value in expected_headers.items():
+ self.assertIn(header.lower(), headers)
+ self.assertEqual(headers[header.lower()], value)
+ self.assertCommonResponseHeaders(headers)
+ self._assertObjectEtag(self.bucket, obj, etag)
+
+ def test_put_object_metadata(self):
+ self._test_put_object_headers({
+ 'X-Amz-Meta-Bar': 'foo',
+ 'X-Amz-Meta-Bar2': 'foo2'})
+
+ def test_put_object_weird_metadata(self):
+ req_headers = dict(
+ ('x-amz-meta-' + c, c)
+ for c in '!"#$%&\'()*+-./<=>?@[\\]^`{|}~')
+ exp_headers = dict(
+ ('x-amz-meta-' + c, c)
+ for c in '!#$%&\'(*+-.^`|~')
+ self._test_put_object_headers(req_headers, exp_headers)
+
+ def test_put_object_underscore_in_metadata(self):
+ # Break this out separately for ease of testing pre-0.19.0 eventlet
+ self._test_put_object_headers({
+ 'X-Amz-Meta-Foo-Bar': 'baz',
+ 'X-Amz-Meta-Foo_Bar': 'also baz'})
+
+ def test_put_object_content_headers(self):
+ self._test_put_object_headers({
+ 'Content-Type': 'foo/bar',
+ 'Content-Encoding': 'baz',
+ 'Content-Disposition': 'attachment',
+ 'Content-Language': 'en'})
+
+ def test_put_object_cache_control(self):
+ self._test_put_object_headers({
+ 'Cache-Control': 'private, some-extension'})
+
+ def test_put_object_expires(self):
+ self._test_put_object_headers({
+ # We don't validate that the Expires header is a valid date
+ 'Expires': 'a valid HTTP-date timestamp'})
+
+ def test_put_object_robots_tag(self):
+ self._test_put_object_headers({
+ 'X-Robots-Tag': 'googlebot: noarchive'})
+
+ def test_put_object_storage_class(self):
+ obj = 'object'
+ content = 'abcdefghij'
+ etag = md5(content).hexdigest()
+ headers = {'X-Amz-Storage-Class': 'STANDARD'}
+ status, headers, body = \
+ self.conn.make_request('PUT', self.bucket, obj, headers, content)
+ self.assertEqual(status, 200)
+ self.assertCommonResponseHeaders(headers)
+ self._assertObjectEtag(self.bucket, obj, etag)
+
+ def test_put_object_copy_source_params(self):
+ obj = 'object'
+ src_headers = {'X-Amz-Meta-Test': 'src'}
+ src_body = 'some content'
+ dst_bucket = 'dst-bucket'
+ dst_obj = 'dst_object'
+ self.conn.make_request('PUT', self.bucket, obj, src_headers, src_body)
+ self.conn.make_request('PUT', dst_bucket)
+
+ headers = {'X-Amz-Copy-Source': '/%s/%s?nonsense' % (
+ self.bucket, obj)}
+ status, headers, body = \
+ self.conn.make_request('PUT', dst_bucket, dst_obj, headers)
+ self.assertEqual(status, 400)
+ self.assertEqual(get_error_code(body), 'InvalidArgument')
+
+ headers = {'X-Amz-Copy-Source': '/%s/%s?versionId=null&nonsense' % (
+ self.bucket, obj)}
+ status, headers, body = \
+ self.conn.make_request('PUT', dst_bucket, dst_obj, headers)
+ self.assertEqual(status, 400)
+ self.assertEqual(get_error_code(body), 'InvalidArgument')
+
+ headers = {'X-Amz-Copy-Source': '/%s/%s?versionId=null' % (
+ self.bucket, obj)}
+ status, headers, body = \
+ self.conn.make_request('PUT', dst_bucket, dst_obj, headers)
+ self.assertEqual(status, 200)
+ self.assertCommonResponseHeaders(headers)
+ status, headers, body = \
+ self.conn.make_request('GET', dst_bucket, dst_obj)
+ self.assertEqual(status, 200)
+ self.assertEqual(headers['x-amz-meta-test'], 'src')
+ self.assertEqual(body, src_body)
+
+ def test_put_object_copy_source(self):
+ obj = 'object'
+ content = 'abcdefghij'
+ etag = md5(content).hexdigest()
+ self.conn.make_request('PUT', self.bucket, obj, body=content)
+
+ dst_bucket = 'dst-bucket'
+ dst_obj = 'dst_object'
+ self.conn.make_request('PUT', dst_bucket)
+
+ # /src/src -> /dst/dst
+ headers = {'X-Amz-Copy-Source': '/%s/%s' % (self.bucket, obj)}
+ status, headers, body = \
+ self.conn.make_request('PUT', dst_bucket, dst_obj, headers)
+ self.assertEqual(status, 200)
+ self.assertCommonResponseHeaders(headers)
+ self._assertObjectEtag(dst_bucket, dst_obj, etag)
+
+ # /src/src -> /src/dst
+ headers = {'X-Amz-Copy-Source': '/%s/%s' % (self.bucket, obj)}
+ status, headers, body = \
+ self.conn.make_request('PUT', self.bucket, dst_obj, headers)
+ self.assertEqual(status, 200)
+ self.assertCommonResponseHeaders(headers)
+ self._assertObjectEtag(self.bucket, dst_obj, etag)
+
+ # /src/src -> /src/src
+ # need changes to copy itself (e.g. metadata)
+ headers = {'X-Amz-Copy-Source': '/%s/%s' % (self.bucket, obj),
+ 'X-Amz-Meta-Foo': 'bar',
+ 'X-Amz-Metadata-Directive': 'REPLACE'}
+ status, headers, body = \
+ self.conn.make_request('PUT', self.bucket, obj, headers)
+ self.assertEqual(status, 200)
+ self._assertObjectEtag(self.bucket, obj, etag)
+ self.assertCommonResponseHeaders(headers)
+
+ def test_put_object_copy_metadata_directive(self):
+ obj = 'object'
+ src_headers = {'X-Amz-Meta-Test': 'src'}
+ dst_bucket = 'dst-bucket'
+ dst_obj = 'dst_object'
+ self.conn.make_request('PUT', self.bucket, obj, headers=src_headers)
+ self.conn.make_request('PUT', dst_bucket)
+
+ headers = {'X-Amz-Copy-Source': '/%s/%s' % (self.bucket, obj),
+ 'X-Amz-Metadata-Directive': 'REPLACE',
+ 'X-Amz-Meta-Test': 'dst'}
+ status, headers, body = \
+ self.conn.make_request('PUT', dst_bucket, dst_obj, headers)
+ self.assertEqual(status, 200)
+ self.assertCommonResponseHeaders(headers)
+ status, headers, body = \
+ self.conn.make_request('HEAD', dst_bucket, dst_obj)
+ self.assertEqual(headers['x-amz-meta-test'], 'dst')
+
+ def test_put_object_copy_source_if_modified_since(self):
+ obj = 'object'
+ dst_bucket = 'dst-bucket'
+ dst_obj = 'dst_object'
+ etag = md5().hexdigest()
+ self.conn.make_request('PUT', self.bucket, obj)
+ self.conn.make_request('PUT', dst_bucket)
+
+ _, headers, _ = self.conn.make_request('HEAD', self.bucket, obj)
+ src_datetime = mktime(parsedate(headers['last-modified']))
+ src_datetime = src_datetime - DAY
+ headers = {'X-Amz-Copy-Source': '/%s/%s' % (self.bucket, obj),
+ 'X-Amz-Copy-Source-If-Modified-Since':
+ formatdate(src_datetime)}
+ status, headers, body = \
+ self.conn.make_request('PUT', dst_bucket, dst_obj, headers=headers)
+ self.assertEqual(status, 200)
+ self.assertCommonResponseHeaders(headers)
+ self._assertObjectEtag(self.bucket, obj, etag)
+
+ def test_put_object_copy_source_if_unmodified_since(self):
+ obj = 'object'
+ dst_bucket = 'dst-bucket'
+ dst_obj = 'dst_object'
+ etag = md5().hexdigest()
+ self.conn.make_request('PUT', self.bucket, obj)
+ self.conn.make_request('PUT', dst_bucket)
+
+ _, headers, _ = self.conn.make_request('HEAD', self.bucket, obj)
+ src_datetime = mktime(parsedate(headers['last-modified']))
+ src_datetime = src_datetime + DAY
+ headers = {'X-Amz-Copy-Source': '/%s/%s' % (self.bucket, obj),
+ 'X-Amz-Copy-Source-If-Unmodified-Since':
+ formatdate(src_datetime)}
+ status, headers, body = \
+ self.conn.make_request('PUT', dst_bucket, dst_obj, headers=headers)
+ self.assertEqual(status, 200)
+ self.assertCommonResponseHeaders(headers)
+ self._assertObjectEtag(self.bucket, obj, etag)
+
+ def test_put_object_copy_source_if_match(self):
+ obj = 'object'
+ dst_bucket = 'dst-bucket'
+ dst_obj = 'dst_object'
+ etag = md5().hexdigest()
+ self.conn.make_request('PUT', self.bucket, obj)
+ self.conn.make_request('PUT', dst_bucket)
+
+ status, headers, body = \
+ self.conn.make_request('HEAD', self.bucket, obj)
+
+ headers = {'X-Amz-Copy-Source': '/%s/%s' % (self.bucket, obj),
+ 'X-Amz-Copy-Source-If-Match': etag}
+ status, headers, body = \
+ self.conn.make_request('PUT', dst_bucket, dst_obj, headers=headers)
+ self.assertEqual(status, 200)
+ self.assertCommonResponseHeaders(headers)
+ self._assertObjectEtag(self.bucket, obj, etag)
+
+ def test_put_object_copy_source_if_none_match(self):
+ obj = 'object'
+ dst_bucket = 'dst-bucket'
+ dst_obj = 'dst_object'
+ etag = md5().hexdigest()
+ self.conn.make_request('PUT', self.bucket, obj)
+ self.conn.make_request('PUT', dst_bucket)
+
+ headers = {'X-Amz-Copy-Source': '/%s/%s' % (self.bucket, obj),
+ 'X-Amz-Copy-Source-If-None-Match': 'none-match'}
+ status, headers, body = \
+ self.conn.make_request('PUT', dst_bucket, dst_obj, headers=headers)
+ self.assertEqual(status, 200)
+ self.assertCommonResponseHeaders(headers)
+ self._assertObjectEtag(self.bucket, obj, etag)
+
+ def test_get_object_response_content_type(self):
+ obj = 'obj'
+ self.conn.make_request('PUT', self.bucket, obj)
+
+ query = 'response-content-type=text/plain'
+ status, headers, body = \
+ self.conn.make_request('GET', self.bucket, obj, query=query)
+ self.assertEqual(status, 200)
+ self.assertCommonResponseHeaders(headers)
+ self.assertEqual(headers['content-type'], 'text/plain')
+
+ def test_get_object_response_content_language(self):
+ obj = 'object'
+ self.conn.make_request('PUT', self.bucket, obj)
+
+ query = 'response-content-language=en'
+ status, headers, body = \
+ self.conn.make_request('GET', self.bucket, obj, query=query)
+ self.assertEqual(status, 200)
+ self.assertCommonResponseHeaders(headers)
+ self.assertEqual(headers['content-language'], 'en')
+
+ def test_get_object_response_cache_control(self):
+ obj = 'object'
+ self.conn.make_request('PUT', self.bucket, obj)
+
+ query = 'response-cache-control=private'
+ status, headers, body = \
+ self.conn.make_request('GET', self.bucket, obj, query=query)
+ self.assertEqual(status, 200)
+ self.assertCommonResponseHeaders(headers)
+ self.assertEqual(headers['cache-control'], 'private')
+
+ def test_get_object_response_content_disposition(self):
+ obj = 'object'
+ self.conn.make_request('PUT', self.bucket, obj)
+
+ query = 'response-content-disposition=inline'
+ status, headers, body = \
+ self.conn.make_request('GET', self.bucket, obj, query=query)
+ self.assertEqual(status, 200)
+ self.assertCommonResponseHeaders(headers)
+ self.assertEqual(headers['content-disposition'], 'inline')
+
+ def test_get_object_response_content_encoding(self):
+ obj = 'object'
+ self.conn.make_request('PUT', self.bucket, obj)
+
+ query = 'response-content-encoding=gzip'
+ status, headers, body = \
+ self.conn.make_request('GET', self.bucket, obj, query=query)
+ self.assertEqual(status, 200)
+ self.assertCommonResponseHeaders(headers)
+ self.assertEqual(headers['content-encoding'], 'gzip')
+
+ def test_get_object_range(self):
+ obj = 'object'
+ content = 'abcdefghij'
+ headers = {'x-amz-meta-test': 'swift'}
+ self.conn.make_request(
+ 'PUT', self.bucket, obj, headers=headers, body=content)
+
+ headers = {'Range': 'bytes=1-5'}
+ status, headers, body = \
+ self.conn.make_request('GET', self.bucket, obj, headers=headers)
+ self.assertEqual(status, 206)
+ self.assertCommonResponseHeaders(headers)
+ self.assertTrue('content-length' in headers)
+ self.assertEqual(headers['content-length'], '5')
+ self.assertTrue('x-amz-meta-test' in headers)
+ self.assertEqual('swift', headers['x-amz-meta-test'])
+ self.assertEqual(body, 'bcdef')
+
+ headers = {'Range': 'bytes=5-'}
+ status, headers, body = \
+ self.conn.make_request('GET', self.bucket, obj, headers=headers)
+ self.assertEqual(status, 206)
+ self.assertCommonResponseHeaders(headers)
+ self.assertTrue('content-length' in headers)
+ self.assertEqual(headers['content-length'], '5')
+ self.assertTrue('x-amz-meta-test' in headers)
+ self.assertEqual('swift', headers['x-amz-meta-test'])
+ self.assertEqual(body, 'fghij')
+
+ headers = {'Range': 'bytes=-5'}
+ status, headers, body = \
+ self.conn.make_request('GET', self.bucket, obj, headers=headers)
+ self.assertEqual(status, 206)
+ self.assertCommonResponseHeaders(headers)
+ self.assertTrue('content-length' in headers)
+ self.assertEqual(headers['content-length'], '5')
+ self.assertTrue('x-amz-meta-test' in headers)
+ self.assertEqual('swift', headers['x-amz-meta-test'])
+ self.assertEqual(body, 'fghij')
+
+ ranges = ['1-2', '4-5']
+
+ headers = {'Range': 'bytes=%s' % ','.join(ranges)}
+ status, headers, body = \
+ self.conn.make_request('GET', self.bucket, obj, headers=headers)
+ self.assertEqual(status, 206)
+ self.assertCommonResponseHeaders(headers)
+ self.assertTrue('content-length' in headers)
+
+ self.assertTrue('content-type' in headers) # sanity
+ content_type, boundary = headers['content-type'].split(';')
+
+ self.assertEqual('multipart/byteranges', content_type)
+ self.assertTrue(boundary.startswith('boundary=')) # sanity
+ boundary_str = boundary[len('boundary='):]
+
+ # TODO: Using swift.common.utils.multipart_byteranges_to_document_iters
+ # could be easy enough.
+ parser = email.parser.FeedParser()
+ parser.feed(
+ "Content-Type: multipart/byterange; boundary=%s\r\n\r\n" %
+ boundary_str)
+ parser.feed(body)
+ message = parser.close()
+
+ self.assertTrue(message.is_multipart()) # sanity check
+ mime_parts = message.get_payload()
+ self.assertEqual(len(mime_parts), len(ranges)) # sanity
+
+ for index, range_value in enumerate(ranges):
+ start, end = map(int, range_value.split('-'))
+ # go to next section and check sanity
+ self.assertTrue(mime_parts[index])
+
+ part = mime_parts[index]
+ self.assertEqual(
+ 'application/octet-stream', part.get_content_type())
+ expected_range = 'bytes %s/%s' % (range_value, len(content))
+ self.assertEqual(
+ expected_range, part.get('Content-Range'))
+ # rest
+ payload = part.get_payload().strip()
+ self.assertEqual(content[start:end + 1], payload)
+
+ def test_get_object_if_modified_since(self):
+ obj = 'object'
+ self.conn.make_request('PUT', self.bucket, obj)
+
+ _, headers, _ = self.conn.make_request('HEAD', self.bucket, obj)
+ src_datetime = mktime(parsedate(headers['last-modified']))
+ src_datetime = src_datetime - DAY
+ headers = {'If-Modified-Since': formatdate(src_datetime)}
+ status, headers, body = \
+ self.conn.make_request('GET', self.bucket, obj, headers=headers)
+ self.assertEqual(status, 200)
+ self.assertCommonResponseHeaders(headers)
+
+ def test_get_object_if_unmodified_since(self):
+ obj = 'object'
+ self.conn.make_request('PUT', self.bucket, obj)
+
+ _, headers, _ = self.conn.make_request('HEAD', self.bucket, obj)
+ src_datetime = mktime(parsedate(headers['last-modified']))
+ src_datetime = src_datetime + DAY
+ headers = \
+ {'If-Unmodified-Since': formatdate(src_datetime)}
+ status, headers, body = \
+ self.conn.make_request('GET', self.bucket, obj, headers=headers)
+ self.assertEqual(status, 200)
+ self.assertCommonResponseHeaders(headers)
+
+ def test_get_object_if_match(self):
+ obj = 'object'
+ self.conn.make_request('PUT', self.bucket, obj)
+
+ status, headers, body = \
+ self.conn.make_request('HEAD', self.bucket, obj)
+ etag = headers['etag']
+
+ headers = {'If-Match': etag}
+ status, headers, body = \
+ self.conn.make_request('GET', self.bucket, obj, headers=headers)
+ self.assertEqual(status, 200)
+ self.assertCommonResponseHeaders(headers)
+
+ def test_get_object_if_none_match(self):
+ obj = 'object'
+ self.conn.make_request('PUT', self.bucket, obj)
+
+ headers = {'If-None-Match': 'none-match'}
+ status, headers, body = \
+ self.conn.make_request('GET', self.bucket, obj, headers=headers)
+ self.assertEqual(status, 200)
+ self.assertCommonResponseHeaders(headers)
+
+ def test_head_object_range(self):
+ obj = 'object'
+ content = 'abcdefghij'
+ self.conn.make_request('PUT', self.bucket, obj, body=content)
+
+ headers = {'Range': 'bytes=1-5'}
+ status, headers, body = \
+ self.conn.make_request('HEAD', self.bucket, obj, headers=headers)
+ self.assertEqual(headers['content-length'], '5')
+ self.assertCommonResponseHeaders(headers)
+
+ headers = {'Range': 'bytes=5-'}
+ status, headers, body = \
+ self.conn.make_request('HEAD', self.bucket, obj, headers=headers)
+ self.assertEqual(headers['content-length'], '5')
+ self.assertCommonResponseHeaders(headers)
+
+ headers = {'Range': 'bytes=-5'}
+ status, headers, body = \
+ self.conn.make_request('HEAD', self.bucket, obj, headers=headers)
+ self.assertEqual(headers['content-length'], '5')
+ self.assertCommonResponseHeaders(headers)
+
+ def test_head_object_if_modified_since(self):
+ obj = 'object'
+ self.conn.make_request('PUT', self.bucket, obj)
+
+ _, headers, _ = self.conn.make_request('HEAD', self.bucket, obj)
+ dt = mktime(parsedate(headers['last-modified']))
+ dt = dt - DAY
+
+ headers = {'If-Modified-Since': formatdate(dt)}
+ status, headers, body = \
+ self.conn.make_request('HEAD', self.bucket, obj, headers=headers)
+ self.assertEqual(status, 200)
+ self.assertCommonResponseHeaders(headers)
+
+ def test_head_object_if_unmodified_since(self):
+ obj = 'object'
+ self.conn.make_request('PUT', self.bucket, obj)
+
+ _, headers, _ = self.conn.make_request('HEAD', self.bucket, obj)
+ dt = mktime(parsedate(headers['last-modified']))
+ dt = dt + DAY
+
+ headers = {'If-Unmodified-Since': formatdate(dt)}
+ status, headers, body = \
+ self.conn.make_request('HEAD', self.bucket, obj, headers=headers)
+ self.assertEqual(status, 200)
+ self.assertCommonResponseHeaders(headers)
+
+ def test_head_object_if_match(self):
+ obj = 'object'
+ self.conn.make_request('PUT', self.bucket, obj)
+
+ status, headers, body = \
+ self.conn.make_request('HEAD', self.bucket, obj)
+ etag = headers['etag']
+
+ headers = {'If-Match': etag}
+ status, headers, body = \
+ self.conn.make_request('HEAD', self.bucket, obj, headers=headers)
+ self.assertEqual(status, 200)
+ self.assertCommonResponseHeaders(headers)
+
+ def test_head_object_if_none_match(self):
+ obj = 'object'
+ self.conn.make_request('PUT', self.bucket, obj)
+
+ headers = {'If-None-Match': 'none-match'}
+ status, headers, body = \
+ self.conn.make_request('HEAD', self.bucket, obj, headers=headers)
+ self.assertEqual(status, 200)
+ self.assertCommonResponseHeaders(headers)
+
+
+class TestS3ApiObjectSigV4(TestS3ApiObject):
+ @classmethod
+ def setUpClass(cls):
+ os.environ['S3_USE_SIGV4'] = "True"
+
+ @classmethod
+ def tearDownClass(cls):
+ del os.environ['S3_USE_SIGV4']
+
+ def setUp(self):
+ super(TestS3ApiObjectSigV4, self).setUp()
+
+ @unittest2.skipIf(StrictVersion(boto.__version__) < StrictVersion('3.0'),
+ 'This stuff got the signing issue of boto<=2.x')
+ def test_put_object_metadata(self):
+ super(TestS3ApiObjectSigV4, self).test_put_object_metadata()
+
+ @unittest2.skipIf(StrictVersion(boto.__version__) < StrictVersion('3.0'),
+ 'This stuff got the signing issue of boto<=2.x')
+ def test_put_object_copy_source_if_modified_since(self):
+ super(TestS3ApiObjectSigV4, self).\
+ test_put_object_copy_source_if_modified_since()
+
+ @unittest2.skipIf(StrictVersion(boto.__version__) < StrictVersion('3.0'),
+ 'This stuff got the signing issue of boto<=2.x')
+ def test_put_object_copy_source_if_unmodified_since(self):
+ super(TestS3ApiObjectSigV4, self).\
+ test_put_object_copy_source_if_unmodified_since()
+
+ @unittest2.skipIf(StrictVersion(boto.__version__) < StrictVersion('3.0'),
+ 'This stuff got the signing issue of boto<=2.x')
+ def test_put_object_copy_source_if_match(self):
+ super(TestS3ApiObjectSigV4,
+ self).test_put_object_copy_source_if_match()
+
+ @unittest2.skipIf(StrictVersion(boto.__version__) < StrictVersion('3.0'),
+ 'This stuff got the signing issue of boto<=2.x')
+ def test_put_object_copy_source_if_none_match(self):
+ super(TestS3ApiObjectSigV4,
+ self).test_put_object_copy_source_if_none_match()
+
+
+if __name__ == '__main__':
+ unittest2.main()
diff --git a/test/functional/s3api/test_presigned.py b/test/functional/s3api/test_presigned.py
new file mode 100644
index 000000000..426a56ffa
--- /dev/null
+++ b/test/functional/s3api/test_presigned.py
@@ -0,0 +1,237 @@
+# Copyright (c) 2016 SwiftStack, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+
+import requests
+
+from swift.common.middleware.s3api.etree import fromstring
+
+import test.functional as tf
+
+from test.functional.s3api import S3ApiBase
+from test.functional.s3api.utils import get_error_code, get_error_msg
+
+
+def setUpModule():
+ tf.setup_package()
+
+
+def tearDownModule():
+ tf.teardown_package()
+
+
+class TestS3ApiPresignedUrls(S3ApiBase):
+ def test_bucket(self):
+ bucket = 'test-bucket'
+ req_objects = ('object', 'object2')
+ max_bucket_listing = tf.cluster_info['s3api'].get(
+ 'max_bucket_listing', 1000)
+
+ # GET Bucket (Without Object)
+ status, _junk, _junk = self.conn.make_request('PUT', bucket)
+ self.assertEqual(status, 200)
+
+ url, headers = self.conn.generate_url_and_headers('GET', bucket)
+ resp = requests.get(url, headers=headers)
+ self.assertEqual(resp.status_code, 200,
+ 'Got %d %s' % (resp.status_code, resp.content))
+ self.assertCommonResponseHeaders(resp.headers)
+ self.assertIsNotNone(resp.headers['content-type'])
+ self.assertEqual(resp.headers['content-length'],
+ str(len(resp.content)))
+
+ elem = fromstring(resp.content, 'ListBucketResult')
+ self.assertEqual(elem.find('Name').text, bucket)
+ self.assertIsNone(elem.find('Prefix').text)
+ self.assertIsNone(elem.find('Marker').text)
+ self.assertEqual(elem.find('MaxKeys').text,
+ str(max_bucket_listing))
+ self.assertEqual(elem.find('IsTruncated').text, 'false')
+ objects = elem.findall('./Contents')
+ self.assertEqual(list(objects), [])
+
+ # GET Bucket (With Object)
+ for obj in req_objects:
+ status, _junk, _junk = self.conn.make_request('PUT', bucket, obj)
+ self.assertEqual(
+ status, 200,
+ 'Got %d response while creating %s' % (status, obj))
+
+ resp = requests.get(url, headers=headers)
+ self.assertEqual(resp.status_code, 200,
+ 'Got %d %s' % (resp.status_code, resp.content))
+ self.assertCommonResponseHeaders(resp.headers)
+ self.assertIsNotNone(resp.headers['content-type'])
+ self.assertEqual(resp.headers['content-length'],
+ str(len(resp.content)))
+
+ elem = fromstring(resp.content, 'ListBucketResult')
+ self.assertEqual(elem.find('Name').text, bucket)
+ self.assertIsNone(elem.find('Prefix').text)
+ self.assertIsNone(elem.find('Marker').text)
+ self.assertEqual(elem.find('MaxKeys').text,
+ str(max_bucket_listing))
+ self.assertEqual(elem.find('IsTruncated').text, 'false')
+ resp_objects = elem.findall('./Contents')
+ self.assertEqual(len(list(resp_objects)), 2)
+ for o in resp_objects:
+ self.assertIn(o.find('Key').text, req_objects)
+ self.assertIsNotNone(o.find('LastModified').text)
+ self.assertRegexpMatches(
+ o.find('LastModified').text,
+ r'^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$')
+ self.assertIsNotNone(o.find('ETag').text)
+ self.assertEqual(o.find('Size').text, '0')
+ self.assertIsNotNone(o.find('StorageClass').text is not None)
+ self.assertEqual(o.find('Owner/ID').text, self.conn.user_id)
+ self.assertEqual(o.find('Owner/DisplayName').text,
+ self.conn.user_id)
+ # DELETE Bucket
+ for obj in req_objects:
+ self.conn.make_request('DELETE', bucket, obj)
+ url, headers = self.conn.generate_url_and_headers('DELETE', bucket)
+ resp = requests.delete(url, headers=headers)
+ self.assertEqual(resp.status_code, 204,
+ 'Got %d %s' % (resp.status_code, resp.content))
+
+ def test_expiration_limits(self):
+ if os.environ.get('S3_USE_SIGV4'):
+ self._test_expiration_limits_v4()
+ else:
+ self._test_expiration_limits_v2()
+
+ def _test_expiration_limits_v2(self):
+ bucket = 'test-bucket'
+
+ # Expiration date is too far in the future
+ url, headers = self.conn.generate_url_and_headers(
+ 'GET', bucket, expires_in=2 ** 32)
+ resp = requests.get(url, headers=headers)
+ self.assertEqual(resp.status_code, 403,
+ 'Got %d %s' % (resp.status_code, resp.content))
+ self.assertEqual(get_error_code(resp.content),
+ 'AccessDenied')
+ self.assertIn('Invalid date (should be seconds since epoch)',
+ get_error_msg(resp.content))
+
+ def _test_expiration_limits_v4(self):
+ bucket = 'test-bucket'
+
+ # Expiration is negative
+ url, headers = self.conn.generate_url_and_headers(
+ 'GET', bucket, expires_in=-1)
+ resp = requests.get(url, headers=headers)
+ self.assertEqual(resp.status_code, 400,
+ 'Got %d %s' % (resp.status_code, resp.content))
+ self.assertEqual(get_error_code(resp.content),
+ 'AuthorizationQueryParametersError')
+ self.assertIn('X-Amz-Expires must be non-negative',
+ get_error_msg(resp.content))
+
+ # Expiration date is too far in the future
+ for exp in (7 * 24 * 60 * 60 + 1,
+ 2 ** 63 - 1):
+ url, headers = self.conn.generate_url_and_headers(
+ 'GET', bucket, expires_in=exp)
+ resp = requests.get(url, headers=headers)
+ self.assertEqual(resp.status_code, 400,
+ 'Got %d %s' % (resp.status_code, resp.content))
+ self.assertEqual(get_error_code(resp.content),
+ 'AuthorizationQueryParametersError')
+ self.assertIn('X-Amz-Expires must be less than 604800 seconds',
+ get_error_msg(resp.content))
+
+ # Expiration date is *way* too far in the future, or isn't a number
+ for exp in (2 ** 63, 'foo'):
+ url, headers = self.conn.generate_url_and_headers(
+ 'GET', bucket, expires_in=2 ** 63)
+ resp = requests.get(url, headers=headers)
+ self.assertEqual(resp.status_code, 400,
+ 'Got %d %s' % (resp.status_code, resp.content))
+ self.assertEqual(get_error_code(resp.content),
+ 'AuthorizationQueryParametersError')
+ self.assertEqual('X-Amz-Expires should be a number',
+ get_error_msg(resp.content))
+
+ def test_object(self):
+ bucket = 'test-bucket'
+ obj = 'object'
+
+ status, _junk, _junk = self.conn.make_request('PUT', bucket)
+ self.assertEqual(status, 200)
+
+ # HEAD/missing object
+ head_url, headers = self.conn.generate_url_and_headers(
+ 'HEAD', bucket, obj)
+ resp = requests.head(head_url, headers=headers)
+ self.assertEqual(resp.status_code, 404,
+ 'Got %d %s' % (resp.status_code, resp.content))
+
+ # Wrong verb
+ resp = requests.get(head_url)
+ self.assertEqual(resp.status_code, 403,
+ 'Got %d %s' % (resp.status_code, resp.content))
+ self.assertEqual(get_error_code(resp.content),
+ 'SignatureDoesNotMatch')
+
+ # PUT empty object
+ put_url, headers = self.conn.generate_url_and_headers(
+ 'PUT', bucket, obj)
+ resp = requests.put(put_url, data='', headers=headers)
+ self.assertEqual(resp.status_code, 200,
+ 'Got %d %s' % (resp.status_code, resp.content))
+ # GET empty object
+ get_url, headers = self.conn.generate_url_and_headers(
+ 'GET', bucket, obj)
+ resp = requests.get(get_url, headers=headers)
+ self.assertEqual(resp.status_code, 200,
+ 'Got %d %s' % (resp.status_code, resp.content))
+ self.assertEqual(resp.content, '')
+
+ # PUT over object
+ resp = requests.put(put_url, data='foobar', headers=headers)
+ self.assertEqual(resp.status_code, 200,
+ 'Got %d %s' % (resp.status_code, resp.content))
+
+ # GET non-empty object
+ resp = requests.get(get_url, headers=headers)
+ self.assertEqual(resp.status_code, 200,
+ 'Got %d %s' % (resp.status_code, resp.content))
+ self.assertEqual(resp.content, 'foobar')
+
+ # DELETE Object
+ delete_url, headers = self.conn.generate_url_and_headers(
+ 'DELETE', bucket, obj)
+ resp = requests.delete(delete_url, headers=headers)
+ self.assertEqual(resp.status_code, 204,
+ 'Got %d %s' % (resp.status_code, resp.content))
+
+ # Final cleanup
+ status, _junk, _junk = self.conn.make_request('DELETE', bucket)
+ self.assertEqual(status, 204)
+
+
+class TestS3ApiPresignedUrlsSigV4(TestS3ApiPresignedUrls):
+ @classmethod
+ def setUpClass(cls):
+ os.environ['S3_USE_SIGV4'] = "True"
+
+ @classmethod
+ def tearDownClass(cls):
+ del os.environ['S3_USE_SIGV4']
+
+ def setUp(self):
+ super(TestS3ApiPresignedUrlsSigV4, self).setUp()
diff --git a/test/functional/s3api/test_service.py b/test/functional/s3api/test_service.py
new file mode 100644
index 000000000..0508f880a
--- /dev/null
+++ b/test/functional/s3api/test_service.py
@@ -0,0 +1,100 @@
+# Copyright (c) 2015 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest2
+import os
+
+import test.functional as tf
+
+from swift.common.middleware.s3api.etree import fromstring
+
+from test.functional.s3api import S3ApiBase
+from test.functional.s3api.s3_test_client import Connection
+from test.functional.s3api.utils import get_error_code
+
+
+def setUpModule():
+ tf.setup_package()
+
+
+def tearDownModule():
+ tf.teardown_package()
+
+
+class TestS3ApiService(S3ApiBase):
+ def setUp(self):
+ super(TestS3ApiService, self).setUp()
+
+ def test_service(self):
+ # GET Service(without bucket)
+ status, headers, body = self.conn.make_request('GET')
+ self.assertEqual(status, 200)
+
+ self.assertCommonResponseHeaders(headers)
+ self.assertTrue(headers['content-type'] is not None)
+ # TODO; requires consideration
+ # self.assertEqual(headers['transfer-encoding'], 'chunked')
+
+ elem = fromstring(body, 'ListAllMyBucketsResult')
+ buckets = elem.findall('./Buckets/Bucket')
+ self.assertEqual(list(buckets), [])
+ owner = elem.find('Owner')
+ self.assertEqual(self.conn.user_id, owner.find('ID').text)
+ self.assertEqual(self.conn.user_id, owner.find('DisplayName').text)
+
+ # GET Service(with Bucket)
+ req_buckets = ('bucket', 'bucket2')
+ for bucket in req_buckets:
+ self.conn.make_request('PUT', bucket)
+ status, headers, body = self.conn.make_request('GET')
+ self.assertEqual(status, 200)
+
+ elem = fromstring(body, 'ListAllMyBucketsResult')
+ resp_buckets = elem.findall('./Buckets/Bucket')
+ self.assertEqual(len(list(resp_buckets)), 2)
+ for b in resp_buckets:
+ self.assertTrue(b.find('Name').text in req_buckets)
+ self.assertTrue(b.find('CreationDate') is not None)
+
+ def test_service_error_signature_not_match(self):
+ auth_error_conn = Connection(aws_secret_key='invalid')
+ status, headers, body = auth_error_conn.make_request('GET')
+ self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
+ self.assertEqual(headers['content-type'], 'application/xml')
+
+ def test_service_error_no_date_header(self):
+ # Without x-amz-date/Date header, that makes 403 forbidden
+ status, headers, body = self.conn.make_request(
+ 'GET', headers={'Date': '', 'x-amz-date': ''})
+ self.assertEqual(status, 403)
+ self.assertEqual(get_error_code(body), 'AccessDenied')
+ self.assertIn('AWS authentication requires a valid Date '
+ 'or x-amz-date header', body)
+
+
+class TestS3ApiServiceSigV4(TestS3ApiService):
+ @classmethod
+ def setUpClass(cls):
+ os.environ['S3_USE_SIGV4'] = "True"
+
+ @classmethod
+ def tearDownClass(cls):
+ del os.environ['S3_USE_SIGV4']
+
+ def setUp(self):
+ super(TestS3ApiServiceSigV4, self).setUp()
+
+if __name__ == '__main__':
+ unittest2.main()
diff --git a/test/functional/s3api/utils.py b/test/functional/s3api/utils.py
new file mode 100644
index 000000000..e14be35e6
--- /dev/null
+++ b/test/functional/s3api/utils.py
@@ -0,0 +1,31 @@
+# Copyright (c) 2015 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from hashlib import md5
+from swift.common.middleware.s3api.etree import fromstring
+
+
+def get_error_code(body):
+ elem = fromstring(body, 'Error')
+ return elem.find('Code').text
+
+
+def get_error_msg(body):
+ elem = fromstring(body, 'Error')
+ return elem.find('Message').text
+
+
+def calculate_md5(body):
+ return md5(body).digest().encode('base64').strip()
diff --git a/test/sample.conf b/test/sample.conf
index 1262b80da..96abd68e4 100644
--- a/test/sample.conf
+++ b/test/sample.conf
@@ -17,6 +17,8 @@ auth_prefix = /auth/
account = test
username = tester
password = testing
+s3_access_key = test:tester
+s3_secret_key = testing
# User on a second account (needs admin access to the account)
account2 = test2
@@ -26,6 +28,9 @@ password2 = testing2
# User on same account as first, but without admin access
username3 = tester3
password3 = testing3
+# s3api requires the same account with the primary one and different users
+s3_access_key2 = test:tester3
+s3_secret_key2 = testing3
# Fourth user is required for keystone v3 specific tests.
# Account must be in a non-default domain.
diff --git a/test/unit/common/middleware/s3api/__init__.py b/test/unit/common/middleware/s3api/__init__.py
new file mode 100644
index 000000000..d2d8ce259
--- /dev/null
+++ b/test/unit/common/middleware/s3api/__init__.py
@@ -0,0 +1,163 @@
+# Copyright (c) 2011-2014 OpenStack Foundation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+from datetime import datetime
+import email
+import time
+
+from swift.common import swob
+
+from swift.common.middleware.s3api.s3api import S3ApiMiddleware
+from helpers import FakeSwift
+from swift.common.middleware.s3api.etree import fromstring
+from swift.common.middleware.s3api.utils import Config
+
+
+class FakeApp(object):
+ def __init__(self):
+ self.swift = FakeSwift()
+
+ def _update_s3_path_info(self, env):
+ """
+ For S3 requests, Swift auth middleware replaces a user name in
+ env['PATH_INFO'] with a valid tenant id.
+ E.g. '/v1/test:tester/bucket/object' will become
+ '/v1/AUTH_test/bucket/object'. This method emulates the behavior.
+ """
+ _, authorization = env['HTTP_AUTHORIZATION'].split(' ')
+ tenant_user, sign = authorization.rsplit(':', 1)
+ tenant, user = tenant_user.rsplit(':', 1)
+
+ path = env['PATH_INFO']
+ env['PATH_INFO'] = path.replace(tenant_user, 'AUTH_' + tenant)
+
+ def __call__(self, env, start_response):
+ if 'HTTP_AUTHORIZATION' in env:
+ self._update_s3_path_info(env)
+
+ return self.swift(env, start_response)
+
+
+class S3ApiTestCase(unittest.TestCase):
+ def __init__(self, name):
+ unittest.TestCase.__init__(self, name)
+
+ def setUp(self):
+ # setup default config
+ self.conf = Config({
+ 'allow_no_owner': False,
+ 'location': 'US',
+ 'dns_compliant_bucket_names': True,
+ 'max_bucket_listing': 1000,
+ 'max_parts_listing': 1000,
+ 'max_multi_delete_objects': 1000,
+ 's3_acl': False,
+ 'storage_domain': 'localhost',
+ 'auth_pipeline_check': True,
+ 'max_upload_part_num': 1000,
+ 'check_bucket_owner': False,
+ 'force_swift_request_proxy_log': False,
+ 'allow_multipart_uploads': True,
+ 'min_segment_size': 5242880,
+ })
+ # those 2 settings has existed the original test setup
+ self.conf.log_level = 'debug'
+
+ self.app = FakeApp()
+ self.swift = self.app.swift
+ self.s3api = S3ApiMiddleware(self.app, self.conf)
+
+ self.swift.register('HEAD', '/v1/AUTH_test',
+ swob.HTTPOk, {}, None)
+ self.swift.register('HEAD', '/v1/AUTH_test/bucket',
+ swob.HTTPNoContent, {}, None)
+ self.swift.register('PUT', '/v1/AUTH_test/bucket',
+ swob.HTTPCreated, {}, None)
+ self.swift.register('POST', '/v1/AUTH_test/bucket',
+ swob.HTTPNoContent, {}, None)
+ self.swift.register('DELETE', '/v1/AUTH_test/bucket',
+ swob.HTTPNoContent, {}, None)
+
+ self.swift.register('GET', '/v1/AUTH_test/bucket/object',
+ swob.HTTPOk, {}, "")
+ self.swift.register('PUT', '/v1/AUTH_test/bucket/object',
+ swob.HTTPCreated, {}, None)
+ self.swift.register('DELETE', '/v1/AUTH_test/bucket/object',
+ swob.HTTPNoContent, {}, None)
+
+ def _get_error_code(self, body):
+ elem = fromstring(body, 'Error')
+ return elem.find('./Code').text
+
+ def _get_error_message(self, body):
+ elem = fromstring(body, 'Error')
+ return elem.find('./Message').text
+
+ def _test_method_error(self, method, path, response_class, headers={}):
+ if not path.startswith('/'):
+ path = '/' + path # add a missing slash before the path
+
+ uri = '/v1/AUTH_test'
+ if path != '/':
+ uri += path
+
+ self.swift.register(method, uri, response_class, headers, None)
+ headers.update({'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ req = swob.Request.blank(path, environ={'REQUEST_METHOD': method},
+ headers=headers)
+ status, headers, body = self.call_s3api(req)
+ return self._get_error_code(body)
+
+ def get_date_header(self):
+ # email.utils.formatdate returns utc timestamp in default
+ return email.utils.formatdate(time.time())
+
+ def get_v4_amz_date_header(self):
+ return datetime.utcnow().strftime('%Y%m%dT%H%M%SZ')
+
+ def call_app(self, req, app=None, expect_exception=False):
+ if app is None:
+ app = self.app
+
+ req.headers.setdefault("User-Agent", "Mozzarella Foxfire")
+
+ status = [None]
+ headers = [None]
+
+ def start_response(s, h, ei=None):
+ status[0] = s
+ headers[0] = swob.HeaderKeyDict(h)
+
+ body_iter = app(req.environ, start_response)
+ body = ''
+ caught_exc = None
+ try:
+ for chunk in body_iter:
+ body += chunk
+ except Exception as exc:
+ if expect_exception:
+ caught_exc = exc
+ else:
+ raise
+
+ if expect_exception:
+ return status[0], headers[0], body, caught_exc
+ else:
+ return status[0], headers[0], body
+
+ def call_s3api(self, req, **kwargs):
+ return self.call_app(req, app=self.s3api, **kwargs)
diff --git a/test/unit/common/middleware/s3api/exceptions.py b/test/unit/common/middleware/s3api/exceptions.py
new file mode 100644
index 000000000..1f8d62f0d
--- /dev/null
+++ b/test/unit/common/middleware/s3api/exceptions.py
@@ -0,0 +1,18 @@
+# Copyright (c) 2013 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+class NotMethodException(Exception):
+ pass
diff --git a/test/unit/common/middleware/s3api/helpers.py b/test/unit/common/middleware/s3api/helpers.py
new file mode 100644
index 000000000..71d31f623
--- /dev/null
+++ b/test/unit/common/middleware/s3api/helpers.py
@@ -0,0 +1,185 @@
+# Copyright (c) 2013 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This stuff can't live in test/unit/__init__.py due to its swob dependency.
+
+from copy import deepcopy
+from hashlib import md5
+from swift.common import swob
+from swift.common.utils import split_path
+from swift.common.request_helpers import is_sys_meta
+
+
+class FakeSwift(object):
+ """
+ A good-enough fake Swift proxy server to use in testing middleware.
+ """
+
+ def __init__(self, s3_acl=False):
+ self._calls = []
+ self.req_method_paths = []
+ self.swift_sources = []
+ self.uploaded = {}
+ # mapping of (method, path) --> (response class, headers, body)
+ self._responses = {}
+ self.s3_acl = s3_acl
+
+ def _fake_auth_middleware(self, env):
+ if 'swift.authorize_override' in env:
+ return
+
+ if 'HTTP_AUTHORIZATION' not in env:
+ return
+
+ _, authorization = env['HTTP_AUTHORIZATION'].split(' ')
+ tenant_user, sign = authorization.rsplit(':', 1)
+ tenant, user = tenant_user.rsplit(':', 1)
+
+ path = env['PATH_INFO']
+ env['PATH_INFO'] = path.replace(tenant_user, 'AUTH_' + tenant)
+
+ env['REMOTE_USER'] = 'authorized'
+
+ if env['REQUEST_METHOD'] == 'TEST':
+ # AccessDenied by default at s3acl authenticate
+ env['swift.authorize'] = \
+ lambda req: swob.HTTPForbidden(request=req)
+ else:
+ env['swift.authorize'] = lambda req: None
+
+ def __call__(self, env, start_response):
+ if self.s3_acl:
+ self._fake_auth_middleware(env)
+
+ req = swob.Request(env)
+ method = env['REQUEST_METHOD']
+ path = env['PATH_INFO']
+ _, acc, cont, obj = split_path(env['PATH_INFO'], 0, 4,
+ rest_with_last=True)
+ if env.get('QUERY_STRING'):
+ path += '?' + env['QUERY_STRING']
+
+ if 'swift.authorize' in env:
+ resp = env['swift.authorize'](req)
+ if resp:
+ return resp(env, start_response)
+
+ headers = req.headers
+ self._calls.append((method, path, headers))
+ self.swift_sources.append(env.get('swift.source'))
+
+ try:
+ resp_class, raw_headers, body = self._responses[(method, path)]
+ headers = swob.HeaderKeyDict(raw_headers)
+ except KeyError:
+ # FIXME: suppress print state error for python3 compatibility.
+ # pylint: disable-msg=E1601
+ if (env.get('QUERY_STRING')
+ and (method, env['PATH_INFO']) in self._responses):
+ resp_class, raw_headers, body = self._responses[
+ (method, env['PATH_INFO'])]
+ headers = swob.HeaderKeyDict(raw_headers)
+ elif method == 'HEAD' and ('GET', path) in self._responses:
+ resp_class, raw_headers, _ = self._responses[('GET', path)]
+ body = None
+ headers = swob.HeaderKeyDict(raw_headers)
+ elif method == 'GET' and obj and path in self.uploaded:
+ resp_class = swob.HTTPOk
+ headers, body = self.uploaded[path]
+ else:
+ print("Didn't find %r in allowed responses" %
+ ((method, path),))
+ raise
+
+ # simulate object PUT
+ if method == 'PUT' and obj:
+ input = env['wsgi.input'].read()
+ etag = md5(input).hexdigest()
+ headers.setdefault('Etag', etag)
+ headers.setdefault('Content-Length', len(input))
+
+ # keep it for subsequent GET requests later
+ self.uploaded[path] = (deepcopy(headers), input)
+ if "CONTENT_TYPE" in env:
+ self.uploaded[path][0]['Content-Type'] = env["CONTENT_TYPE"]
+
+ # range requests ought to work, but copies are special
+ support_range_and_conditional = not (
+ method == 'PUT' and
+ 'X-Copy-From' in req.headers and
+ 'Range' in req.headers)
+ resp = resp_class(req=req, headers=headers, body=body,
+ conditional_response=support_range_and_conditional)
+ return resp(env, start_response)
+
+ @property
+ def calls(self):
+ return [(method, path) for method, path, headers in self._calls]
+
+ @property
+ def calls_with_headers(self):
+ return self._calls
+
+ @property
+ def call_count(self):
+ return len(self._calls)
+
+ def register(self, method, path, response_class, headers, body):
+ # assuming the path format like /v1/account/container/object
+ resource_map = ['account', 'container', 'object']
+ acos = filter(None, split_path(path, 0, 4, True)[1:])
+ index = len(acos) - 1
+ resource = resource_map[index]
+ if (method, path) in self._responses:
+ old_headers = self._responses[(method, path)][1]
+ headers = headers.copy()
+ for key, value in old_headers.iteritems():
+ if is_sys_meta(resource, key) and key not in headers:
+ # keep old sysmeta for s3acl
+ headers.update({key: value})
+
+ self._responses[(method, path)] = (response_class, headers, body)
+
+ def register_unconditionally(self, method, path, response_class, headers,
+ body):
+ # register() keeps old sysmeta around, but
+ # register_unconditionally() keeps nothing.
+ self._responses[(method, path)] = (response_class, headers, body)
+
+ def clear_calls(self):
+ del self._calls[:]
+
+
+class UnreadableInput(object):
+ # Some clients will send neither a Content-Length nor a Transfer-Encoding
+ # header, which will cause (some versions of?) eventlet to bomb out on
+ # reads. This class helps us simulate that behavior.
+ def __init__(self, test_case):
+ self.calls = 0
+ self.test_case = test_case
+
+ def read(self, *a, **kw):
+ self.calls += 1
+ # Calling wsgi.input.read with neither a Content-Length nor
+ # a Transfer-Encoding header will raise TypeError (See
+ # https://bugs.launchpad.net/swift3/+bug/1593870 in detail)
+ # This unreadable class emulates the behavior
+ raise TypeError
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *args):
+ self.test_case.assertEqual(0, self.calls)
diff --git a/test/unit/common/middleware/s3api/test_acl.py b/test/unit/common/middleware/s3api/test_acl.py
new file mode 100644
index 000000000..3e3ed17c7
--- /dev/null
+++ b/test/unit/common/middleware/s3api/test_acl.py
@@ -0,0 +1,230 @@
+# Copyright (c) 2014 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+import mock
+
+from cStringIO import StringIO
+from hashlib import md5
+
+from swift.common.swob import Request, HTTPAccepted
+from swift.common.middleware.s3api.etree import fromstring, tostring, \
+ Element, SubElement, XMLNS_XSI
+from swift.common.middleware.s3api.s3response import InvalidArgument
+from swift.common.middleware.s3api.acl_utils import handle_acl_header
+
+from test.unit.common.middleware.s3api import S3ApiTestCase
+from test.unit.common.middleware.s3api.helpers import UnreadableInput
+from test.unit.common.middleware.s3api.test_s3_acl import s3acl
+
+
+class TestS3ApiAcl(S3ApiTestCase):
+
+ def setUp(self):
+ super(TestS3ApiAcl, self).setUp()
+ # All ACL API should be called against to existing bucket.
+ self.swift.register('PUT', '/v1/AUTH_test/bucket',
+ HTTPAccepted, {}, None)
+
+ def _check_acl(self, owner, body):
+ elem = fromstring(body, 'AccessControlPolicy')
+ permission = elem.find('./AccessControlList/Grant/Permission').text
+ self.assertEqual(permission, 'FULL_CONTROL')
+ name = elem.find('./AccessControlList/Grant/Grantee/ID').text
+ self.assertEqual(name, owner)
+
+ def test_bucket_acl_GET(self):
+ req = Request.blank('/bucket?acl',
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self._check_acl('test:tester', body)
+
+ def test_bucket_acl_PUT(self):
+ elem = Element('AccessControlPolicy')
+ owner = SubElement(elem, 'Owner')
+ SubElement(owner, 'ID').text = 'id'
+ acl = SubElement(elem, 'AccessControlList')
+ grant = SubElement(acl, 'Grant')
+ grantee = SubElement(grant, 'Grantee', nsmap={'xsi': XMLNS_XSI})
+ grantee.set('{%s}type' % XMLNS_XSI, 'Group')
+ SubElement(grantee, 'URI').text = \
+ 'http://acs.amazonaws.com/groups/global/AllUsers'
+ SubElement(grant, 'Permission').text = 'READ'
+
+ xml = tostring(elem)
+ req = Request.blank('/bucket?acl',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()},
+ body=xml)
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '200')
+
+ req = Request.blank('/bucket?acl',
+ environ={'REQUEST_METHOD': 'PUT',
+ 'wsgi.input': StringIO(xml)},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header(),
+ 'Transfer-Encoding': 'chunked'})
+ self.assertIsNone(req.content_length)
+ self.assertIsNone(req.message_length())
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '200')
+
+ def test_bucket_canned_acl_PUT(self):
+ req = Request.blank('/bucket?acl',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header(),
+ 'X-AMZ-ACL': 'public-read'})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '200')
+
+ @s3acl(s3acl_only=True)
+ def test_bucket_canned_acl_PUT_with_s3acl(self):
+ req = Request.blank('/bucket?acl',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header(),
+ 'X-AMZ-ACL': 'public-read'})
+ with mock.patch('swift.common.middleware.s3api.s3request.'
+ 'handle_acl_header') as mock_handler:
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '200')
+ self.assertEqual(mock_handler.call_count, 0)
+
+ def test_bucket_fails_with_both_acl_header_and_xml_PUT(self):
+ elem = Element('AccessControlPolicy')
+ owner = SubElement(elem, 'Owner')
+ SubElement(owner, 'ID').text = 'id'
+ acl = SubElement(elem, 'AccessControlList')
+ grant = SubElement(acl, 'Grant')
+ grantee = SubElement(grant, 'Grantee', nsmap={'xsi': XMLNS_XSI})
+ grantee.set('{%s}type' % XMLNS_XSI, 'Group')
+ SubElement(grantee, 'URI').text = \
+ 'http://acs.amazonaws.com/groups/global/AllUsers'
+ SubElement(grant, 'Permission').text = 'READ'
+
+ xml = tostring(elem)
+ req = Request.blank('/bucket?acl',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header(),
+ 'X-AMZ-ACL': 'public-read'},
+ body=xml)
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body),
+ 'UnexpectedContent')
+
+ def _test_put_no_body(self, use_content_length=False,
+ use_transfer_encoding=False, string_to_md5=''):
+ content_md5 = md5(string_to_md5).digest().encode('base64').strip()
+ with UnreadableInput(self) as fake_input:
+ req = Request.blank(
+ '/bucket?acl',
+ environ={
+ 'REQUEST_METHOD': 'PUT',
+ 'wsgi.input': fake_input},
+ headers={
+ 'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header(),
+ 'Content-MD5': content_md5},
+ body='')
+ if not use_content_length:
+ req.environ.pop('CONTENT_LENGTH')
+ if use_transfer_encoding:
+ req.environ['HTTP_TRANSFER_ENCODING'] = 'chunked'
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status, '400 Bad Request')
+ self.assertEqual(self._get_error_code(body), 'MissingSecurityHeader')
+ self.assertEqual(self._get_error_message(body),
+ 'Your request was missing a required header.')
+ self.assertIn('<MissingHeaderName>x-amz-acl</MissingHeaderName>', body)
+
+ @s3acl
+ def test_bucket_fails_with_neither_acl_header_nor_xml_PUT(self):
+ self._test_put_no_body()
+ self._test_put_no_body(string_to_md5='test')
+ self._test_put_no_body(use_content_length=True)
+ self._test_put_no_body(use_content_length=True, string_to_md5='test')
+ self._test_put_no_body(use_transfer_encoding=True)
+ self._test_put_no_body(use_transfer_encoding=True, string_to_md5='zz')
+
+ def test_object_acl_GET(self):
+ req = Request.blank('/bucket/object?acl',
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self._check_acl('test:tester', body)
+
+ def test_invalid_xml(self):
+ req = Request.blank('/bucket?acl',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()},
+ body='invalid')
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'MalformedACLError')
+
+ def test_handle_acl_header(self):
+ def check_generated_acl_header(acl, targets):
+ req = Request.blank('/bucket',
+ headers={'X-Amz-Acl': acl})
+ handle_acl_header(req)
+ for target in targets:
+ self.assertTrue(target[0] in req.headers)
+ self.assertEqual(req.headers[target[0]], target[1])
+
+ check_generated_acl_header('public-read',
+ [('X-Container-Read', '.r:*,.rlistings')])
+ check_generated_acl_header('public-read-write',
+ [('X-Container-Read', '.r:*,.rlistings'),
+ ('X-Container-Write', '.r:*')])
+ check_generated_acl_header('private',
+ [('X-Container-Read', '.'),
+ ('X-Container-Write', '.')])
+
+ @s3acl(s3acl_only=True)
+ def test_handle_acl_header_with_s3acl(self):
+ def check_generated_acl_header(acl, targets):
+ req = Request.blank('/bucket',
+ headers={'X-Amz-Acl': acl})
+ for target in targets:
+ self.assertTrue(target not in req.headers)
+ self.assertTrue('HTTP_X_AMZ_ACL' in req.environ)
+ # TODO: add transration and assertion for s3acl
+
+ check_generated_acl_header('public-read',
+ ['X-Container-Read'])
+ check_generated_acl_header('public-read-write',
+ ['X-Container-Read', 'X-Container-Write'])
+ check_generated_acl_header('private',
+ ['X-Container-Read', 'X-Container-Write'])
+
+ def test_handle_acl_with_invalid_header_string(self):
+ req = Request.blank('/bucket', headers={'X-Amz-Acl': 'invalid'})
+ with self.assertRaises(InvalidArgument) as cm:
+ handle_acl_header(req)
+ self.assertTrue('argument_name' in cm.exception.info)
+ self.assertEqual(cm.exception.info['argument_name'], 'x-amz-acl')
+ self.assertTrue('argument_value' in cm.exception.info)
+ self.assertEqual(cm.exception.info['argument_value'], 'invalid')
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/test/unit/common/middleware/s3api/test_acl_handlers.py b/test/unit/common/middleware/s3api/test_acl_handlers.py
new file mode 100644
index 000000000..0c3a98cca
--- /dev/null
+++ b/test/unit/common/middleware/s3api/test_acl_handlers.py
@@ -0,0 +1,42 @@
+# Copyright (c) 2014 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+
+from swift.common.middleware.s3api.acl_handlers import S3AclHandler, \
+ BucketAclHandler, ObjectAclHandler, BaseAclHandler, PartAclHandler, \
+ UploadAclHandler, UploadsAclHandler, get_acl_handler
+
+
+class TestAclHandlers(unittest.TestCase):
+ def test_get_acl_handler(self):
+ expected_handlers = (('Bucket', BucketAclHandler),
+ ('Object', ObjectAclHandler),
+ ('S3Acl', S3AclHandler),
+ ('Part', PartAclHandler),
+ ('Upload', UploadAclHandler),
+ ('Uploads', UploadsAclHandler),
+ ('Foo', BaseAclHandler))
+ for name, expected in expected_handlers:
+ handler = get_acl_handler(name)
+ self.assertTrue(issubclass(handler, expected))
+
+ def test_handle_acl(self):
+ # we have already have tests for s3_acl checking at test_s3_acl.py
+ pass
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/test/unit/common/middleware/s3api/test_acl_utils.py b/test/unit/common/middleware/s3api/test_acl_utils.py
new file mode 100644
index 000000000..1ff03c49e
--- /dev/null
+++ b/test/unit/common/middleware/s3api/test_acl_utils.py
@@ -0,0 +1,49 @@
+# Copyright (c) 2014 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+
+from swift.common.swob import Request
+from swift.common.middleware.s3api.acl_utils import handle_acl_header
+
+from test.unit.common.middleware.s3api import S3ApiTestCase
+
+
+class TestS3ApiAclUtils(S3ApiTestCase):
+
+ def setUp(self):
+ super(TestS3ApiAclUtils, self).setUp()
+
+ def test_handle_acl_header(self):
+ def check_generated_acl_header(acl, targets):
+ req = Request.blank('/bucket',
+ headers={'X-Amz-Acl': acl})
+ handle_acl_header(req)
+ for target in targets:
+ self.assertTrue(target[0] in req.headers)
+ self.assertEqual(req.headers[target[0]], target[1])
+
+ check_generated_acl_header('public-read',
+ [('X-Container-Read', '.r:*,.rlistings')])
+ check_generated_acl_header('public-read-write',
+ [('X-Container-Read', '.r:*,.rlistings'),
+ ('X-Container-Write', '.r:*')])
+ check_generated_acl_header('private',
+ [('X-Container-Read', '.'),
+ ('X-Container-Write', '.')])
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/test/unit/common/middleware/s3api/test_bucket.py b/test/unit/common/middleware/s3api/test_bucket.py
new file mode 100644
index 000000000..e98e0e03a
--- /dev/null
+++ b/test/unit/common/middleware/s3api/test_bucket.py
@@ -0,0 +1,755 @@
+# Copyright (c) 2014 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+import cgi
+
+from swift.common import swob
+from swift.common.swob import Request
+from swift.common.utils import json
+
+from swift.common.middleware.s3api.etree import fromstring, tostring, \
+ Element, SubElement
+from swift.common.middleware.s3api.subresource import Owner, encode_acl, \
+ ACLPublicRead
+from swift.common.middleware.s3api.s3request import MAX_32BIT_INT
+
+from test.unit.common.middleware.s3api import S3ApiTestCase
+from test.unit.common.middleware.s3api.test_s3_acl import s3acl
+from test.unit.common.middleware.s3api.helpers import UnreadableInput
+
+
+class TestS3ApiBucket(S3ApiTestCase):
+ def setup_objects(self):
+ self.objects = (('rose', '2011-01-05T02:19:14.275290', 0, 303),
+ ('viola', '2011-01-05T02:19:14.275290', '0', 3909),
+ ('lily', '2011-01-05T02:19:14.275290', '0', '3909'),
+ ('with space', '2011-01-05T02:19:14.275290', 0, 390),
+ ('with%20space', '2011-01-05T02:19:14.275290', 0, 390))
+
+ objects = map(
+ lambda item: {'name': str(item[0]), 'last_modified': str(item[1]),
+ 'hash': str(item[2]), 'bytes': str(item[3])},
+ list(self.objects))
+ object_list = json.dumps(objects)
+
+ self.prefixes = ['rose', 'viola', 'lily']
+ object_list_subdir = []
+ for p in self.prefixes:
+ object_list_subdir.append({"subdir": p})
+
+ self.swift.register('DELETE', '/v1/AUTH_test/bucket+segments',
+ swob.HTTPNoContent, {}, json.dumps([]))
+ self.swift.register('DELETE', '/v1/AUTH_test/bucket+segments/rose',
+ swob.HTTPNoContent, {}, json.dumps([]))
+ self.swift.register('DELETE', '/v1/AUTH_test/bucket+segments/viola',
+ swob.HTTPNoContent, {}, json.dumps([]))
+ self.swift.register('DELETE', '/v1/AUTH_test/bucket+segments/lily',
+ swob.HTTPNoContent, {}, json.dumps([]))
+ self.swift.register('DELETE', '/v1/AUTH_test/bucket+segments/with'
+ ' space', swob.HTTPNoContent, {}, json.dumps([]))
+ self.swift.register('DELETE', '/v1/AUTH_test/bucket+segments/with%20'
+ 'space', swob.HTTPNoContent, {}, json.dumps([]))
+ self.swift.register('GET', '/v1/AUTH_test/bucket+segments?format=json'
+ '&marker=with%2520space', swob.HTTPOk, {},
+ json.dumps([]))
+ self.swift.register('GET', '/v1/AUTH_test/bucket+segments?format=json'
+ '&marker=', swob.HTTPOk, {}, object_list)
+ self.swift.register('HEAD', '/v1/AUTH_test/junk', swob.HTTPNoContent,
+ {}, None)
+ self.swift.register('HEAD', '/v1/AUTH_test/nojunk', swob.HTTPNotFound,
+ {}, None)
+ self.swift.register('GET', '/v1/AUTH_test/junk', swob.HTTPOk, {},
+ object_list)
+ self.swift.register(
+ 'GET',
+ '/v1/AUTH_test/junk?delimiter=a&format=json&limit=3&marker=viola',
+ swob.HTTPOk, {}, json.dumps(objects[2:]))
+ self.swift.register('GET', '/v1/AUTH_test/junk-subdir', swob.HTTPOk,
+ {}, json.dumps(object_list_subdir))
+ self.swift.register(
+ 'GET',
+ '/v1/AUTH_test/subdirs?delimiter=/&format=json&limit=3',
+ swob.HTTPOk, {}, json.dumps([
+ {'subdir': 'nothing/'},
+ {'subdir': 'but/'},
+ {'subdir': 'subdirs/'},
+ ]))
+
+ def setUp(self):
+ super(TestS3ApiBucket, self).setUp()
+ self.setup_objects()
+
+ def test_bucket_HEAD(self):
+ req = Request.blank('/junk',
+ environ={'REQUEST_METHOD': 'HEAD'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '200')
+
+ def test_bucket_HEAD_error(self):
+ req = Request.blank('/nojunk',
+ environ={'REQUEST_METHOD': 'HEAD'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '404')
+ self.assertEqual(body, '') # sanity
+
+ def test_bucket_HEAD_slash(self):
+ req = Request.blank('/junk/',
+ environ={'REQUEST_METHOD': 'HEAD'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '200')
+
+ def test_bucket_HEAD_slash_error(self):
+ req = Request.blank('/nojunk/',
+ environ={'REQUEST_METHOD': 'HEAD'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '404')
+
+ @s3acl
+ def test_bucket_GET_error(self):
+ code = self._test_method_error('GET', '/bucket', swob.HTTPUnauthorized)
+ self.assertEqual(code, 'SignatureDoesNotMatch')
+ code = self._test_method_error('GET', '/bucket', swob.HTTPForbidden)
+ self.assertEqual(code, 'AccessDenied')
+ code = self._test_method_error('GET', '/bucket', swob.HTTPNotFound)
+ self.assertEqual(code, 'NoSuchBucket')
+ code = self._test_method_error('GET', '/bucket', swob.HTTPServerError)
+ self.assertEqual(code, 'InternalError')
+
+ def test_bucket_GET(self):
+ bucket_name = 'junk'
+ req = Request.blank('/%s' % bucket_name,
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '200')
+
+ elem = fromstring(body, 'ListBucketResult')
+ name = elem.find('./Name').text
+ self.assertEqual(name, bucket_name)
+
+ objects = elem.iterchildren('Contents')
+
+ names = []
+ for o in objects:
+ names.append(o.find('./Key').text)
+ self.assertEqual('2011-01-05T02:19:14.275Z',
+ o.find('./LastModified').text)
+ self.assertEqual('"0"', o.find('./ETag').text)
+
+ self.assertEqual(len(names), len(self.objects))
+ for i in self.objects:
+ self.assertTrue(i[0] in names)
+
+ def test_bucket_GET_subdir(self):
+ bucket_name = 'junk-subdir'
+ req = Request.blank('/%s' % bucket_name,
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '200')
+ elem = fromstring(body, 'ListBucketResult')
+ name = elem.find('./Name').text
+ self.assertEqual(name, bucket_name)
+
+ prefixes = elem.findall('CommonPrefixes')
+
+ self.assertEqual(len(prefixes), len(self.prefixes))
+ for p in prefixes:
+ self.assertTrue(p.find('./Prefix').text in self.prefixes)
+
+ def test_bucket_GET_is_truncated(self):
+ bucket_name = 'junk'
+
+ req = Request.blank('/%s?max-keys=5' % bucket_name,
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ elem = fromstring(body, 'ListBucketResult')
+ self.assertEqual(elem.find('./IsTruncated').text, 'false')
+
+ req = Request.blank('/%s?max-keys=4' % bucket_name,
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ elem = fromstring(body, 'ListBucketResult')
+ self.assertEqual(elem.find('./IsTruncated').text, 'true')
+
+ req = Request.blank('/subdirs?delimiter=/&max-keys=2',
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ elem = fromstring(body, 'ListBucketResult')
+ self.assertEqual(elem.find('./IsTruncated').text, 'true')
+ self.assertEqual(elem.find('./NextMarker').text, 'but/')
+
+ def test_bucket_GET_v2_is_truncated(self):
+ bucket_name = 'junk'
+
+ req = Request.blank('/%s?list-type=2&max-keys=5' % bucket_name,
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ elem = fromstring(body, 'ListBucketResult')
+ self.assertEqual(elem.find('./KeyCount').text, '5')
+ self.assertEqual(elem.find('./IsTruncated').text, 'false')
+
+ req = Request.blank('/%s?list-type=2&max-keys=4' % bucket_name,
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ elem = fromstring(body, 'ListBucketResult')
+ self.assertIsNotNone(elem.find('./NextContinuationToken'))
+ self.assertEqual(elem.find('./KeyCount').text, '4')
+ self.assertEqual(elem.find('./IsTruncated').text, 'true')
+
+ req = Request.blank('/subdirs?list-type=2&delimiter=/&max-keys=2',
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ elem = fromstring(body, 'ListBucketResult')
+ self.assertIsNotNone(elem.find('./NextContinuationToken'))
+ self.assertEqual(elem.find('./KeyCount').text, '2')
+ self.assertEqual(elem.find('./IsTruncated').text, 'true')
+
+ def test_bucket_GET_max_keys(self):
+ bucket_name = 'junk'
+
+ req = Request.blank('/%s?max-keys=5' % bucket_name,
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ elem = fromstring(body, 'ListBucketResult')
+ self.assertEqual(elem.find('./MaxKeys').text, '5')
+ _, path = self.swift.calls[-1]
+ _, query_string = path.split('?')
+ args = dict(cgi.parse_qsl(query_string))
+ self.assertEqual(args['limit'], '6')
+
+ req = Request.blank('/%s?max-keys=5000' % bucket_name,
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ elem = fromstring(body, 'ListBucketResult')
+ self.assertEqual(elem.find('./MaxKeys').text, '5000')
+ _, path = self.swift.calls[-1]
+ _, query_string = path.split('?')
+ args = dict(cgi.parse_qsl(query_string))
+ self.assertEqual(args['limit'], '1001')
+
+ def test_bucket_GET_str_max_keys(self):
+ bucket_name = 'junk'
+
+ req = Request.blank('/%s?max-keys=invalid' % bucket_name,
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'InvalidArgument')
+
+ def test_bucket_GET_negative_max_keys(self):
+ bucket_name = 'junk'
+
+ req = Request.blank('/%s?max-keys=-1' % bucket_name,
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'InvalidArgument')
+
+ def test_bucket_GET_over_32bit_int_max_keys(self):
+ bucket_name = 'junk'
+
+ req = Request.blank('/%s?max-keys=%s' %
+ (bucket_name, MAX_32BIT_INT + 1),
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'InvalidArgument')
+
+ def test_bucket_GET_passthroughs(self):
+ bucket_name = 'junk'
+ req = Request.blank('/%s?delimiter=a&marker=b&prefix=c' % bucket_name,
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ elem = fromstring(body, 'ListBucketResult')
+ self.assertEqual(elem.find('./Prefix').text, 'c')
+ self.assertEqual(elem.find('./Marker').text, 'b')
+ self.assertEqual(elem.find('./Delimiter').text, 'a')
+ _, path = self.swift.calls[-1]
+ _, query_string = path.split('?')
+ args = dict(cgi.parse_qsl(query_string))
+ self.assertEqual(args['delimiter'], 'a')
+ self.assertEqual(args['marker'], 'b')
+ self.assertEqual(args['prefix'], 'c')
+
+ def test_bucket_GET_v2_passthroughs(self):
+ bucket_name = 'junk'
+ req = Request.blank(
+ '/%s?list-type=2&delimiter=a&start-after=b&prefix=c' % bucket_name,
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ elem = fromstring(body, 'ListBucketResult')
+ self.assertEqual(elem.find('./Prefix').text, 'c')
+ self.assertEqual(elem.find('./StartAfter').text, 'b')
+ self.assertEqual(elem.find('./Delimiter').text, 'a')
+ _, path = self.swift.calls[-1]
+ _, query_string = path.split('?')
+ args = dict(cgi.parse_qsl(query_string))
+ self.assertEqual(args['delimiter'], 'a')
+ # "start-after" is converted to "marker"
+ self.assertEqual(args['marker'], 'b')
+ self.assertEqual(args['prefix'], 'c')
+
+ def test_bucket_GET_with_nonascii_queries(self):
+ bucket_name = 'junk'
+ req = Request.blank(
+ '/%s?delimiter=\xef\xbc\xa1&marker=\xef\xbc\xa2&'
+ 'prefix=\xef\xbc\xa3' % bucket_name,
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ elem = fromstring(body, 'ListBucketResult')
+ self.assertEqual(elem.find('./Prefix').text, '\xef\xbc\xa3')
+ self.assertEqual(elem.find('./Marker').text, '\xef\xbc\xa2')
+ self.assertEqual(elem.find('./Delimiter').text, '\xef\xbc\xa1')
+ _, path = self.swift.calls[-1]
+ _, query_string = path.split('?')
+ args = dict(cgi.parse_qsl(query_string))
+ self.assertEqual(args['delimiter'], '\xef\xbc\xa1')
+ self.assertEqual(args['marker'], '\xef\xbc\xa2')
+ self.assertEqual(args['prefix'], '\xef\xbc\xa3')
+
+ def test_bucket_GET_v2_with_nonascii_queries(self):
+ bucket_name = 'junk'
+ req = Request.blank(
+ '/%s?list-type=2&delimiter=\xef\xbc\xa1&start-after=\xef\xbc\xa2&'
+ 'prefix=\xef\xbc\xa3' % bucket_name,
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ elem = fromstring(body, 'ListBucketResult')
+ self.assertEqual(elem.find('./Prefix').text, '\xef\xbc\xa3')
+ self.assertEqual(elem.find('./StartAfter').text, '\xef\xbc\xa2')
+ self.assertEqual(elem.find('./Delimiter').text, '\xef\xbc\xa1')
+ _, path = self.swift.calls[-1]
+ _, query_string = path.split('?')
+ args = dict(cgi.parse_qsl(query_string))
+ self.assertEqual(args['delimiter'], '\xef\xbc\xa1')
+ self.assertEqual(args['marker'], '\xef\xbc\xa2')
+ self.assertEqual(args['prefix'], '\xef\xbc\xa3')
+
+ def test_bucket_GET_with_delimiter_max_keys(self):
+ bucket_name = 'junk'
+ req = Request.blank('/%s?delimiter=a&max-keys=2' % bucket_name,
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '200')
+ elem = fromstring(body, 'ListBucketResult')
+ self.assertEqual(elem.find('./NextMarker').text, 'viola')
+ self.assertEqual(elem.find('./MaxKeys').text, '2')
+ self.assertEqual(elem.find('./IsTruncated').text, 'true')
+
+ def test_bucket_GET_v2_with_delimiter_max_keys(self):
+ bucket_name = 'junk'
+ req = Request.blank(
+ '/%s?list-type=2&delimiter=a&max-keys=2' % bucket_name,
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '200')
+ elem = fromstring(body, 'ListBucketResult')
+ next_token = elem.find('./NextContinuationToken')
+ self.assertIsNotNone(next_token)
+ self.assertEqual(elem.find('./MaxKeys').text, '2')
+ self.assertEqual(elem.find('./IsTruncated').text, 'true')
+
+ req = Request.blank(
+ '/%s?list-type=2&delimiter=a&max-keys=2&continuation-token=%s' %
+ (bucket_name, next_token.text),
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '200')
+ elem = fromstring(body, 'ListBucketResult')
+ names = [o.find('./Key').text for o in elem.iterchildren('Contents')]
+ self.assertEqual(names[0], 'lily')
+
+ def test_bucket_GET_subdir_with_delimiter_max_keys(self):
+ bucket_name = 'junk-subdir'
+ req = Request.blank('/%s?delimiter=a&max-keys=1' % bucket_name,
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '200')
+ elem = fromstring(body, 'ListBucketResult')
+ self.assertEqual(elem.find('./NextMarker').text, 'rose')
+ self.assertEqual(elem.find('./MaxKeys').text, '1')
+ self.assertEqual(elem.find('./IsTruncated').text, 'true')
+
+ def test_bucket_GET_v2_fetch_owner(self):
+ bucket_name = 'junk'
+ req = Request.blank('/%s?list-type=2' % bucket_name,
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '200')
+
+ elem = fromstring(body, 'ListBucketResult')
+ name = elem.find('./Name').text
+ self.assertEqual(name, bucket_name)
+
+ objects = elem.iterchildren('Contents')
+ for o in objects:
+ self.assertIsNone(o.find('./Owner'))
+
+ req = Request.blank('/%s?list-type=2&fetch-owner=true' % bucket_name,
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '200')
+
+ elem = fromstring(body, 'ListBucketResult')
+ name = elem.find('./Name').text
+ self.assertEqual(name, bucket_name)
+
+ objects = elem.iterchildren('Contents')
+ for o in objects:
+ self.assertIsNotNone(o.find('./Owner'))
+
+ @s3acl
+ def test_bucket_PUT_error(self):
+ code = self._test_method_error('PUT', '/bucket', swob.HTTPCreated,
+ headers={'Content-Length': 'a'})
+ self.assertEqual(code, 'InvalidArgument')
+ code = self._test_method_error('PUT', '/bucket', swob.HTTPCreated,
+ headers={'Content-Length': '-1'})
+ self.assertEqual(code, 'InvalidArgument')
+ code = self._test_method_error('PUT', '/bucket', swob.HTTPUnauthorized)
+ self.assertEqual(code, 'SignatureDoesNotMatch')
+ code = self._test_method_error('PUT', '/bucket', swob.HTTPForbidden)
+ self.assertEqual(code, 'AccessDenied')
+ code = self._test_method_error('PUT', '/bucket', swob.HTTPAccepted)
+ self.assertEqual(code, 'BucketAlreadyExists')
+ code = self._test_method_error('PUT', '/bucket', swob.HTTPServerError)
+ self.assertEqual(code, 'InternalError')
+ code = self._test_method_error(
+ 'PUT', '/bucket+bucket', swob.HTTPCreated)
+ self.assertEqual(code, 'InvalidBucketName')
+ code = self._test_method_error(
+ 'PUT', '/192.168.11.1', swob.HTTPCreated)
+ self.assertEqual(code, 'InvalidBucketName')
+ code = self._test_method_error(
+ 'PUT', '/bucket.-bucket', swob.HTTPCreated)
+ self.assertEqual(code, 'InvalidBucketName')
+ code = self._test_method_error(
+ 'PUT', '/bucket-.bucket', swob.HTTPCreated)
+ self.assertEqual(code, 'InvalidBucketName')
+ code = self._test_method_error('PUT', '/bucket*', swob.HTTPCreated)
+ self.assertEqual(code, 'InvalidBucketName')
+ code = self._test_method_error('PUT', '/b', swob.HTTPCreated)
+ self.assertEqual(code, 'InvalidBucketName')
+ code = self._test_method_error(
+ 'PUT', '/%s' % ''.join(['b' for x in xrange(64)]),
+ swob.HTTPCreated)
+ self.assertEqual(code, 'InvalidBucketName')
+
+ @s3acl
+ def test_bucket_PUT(self):
+ req = Request.blank('/bucket',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(body, '')
+ self.assertEqual(status.split()[0], '200')
+ self.assertEqual(headers['Location'], '/bucket')
+
+ # Apparently some clients will include a chunked transfer-encoding
+ # even with no body
+ req = Request.blank('/bucket',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header(),
+ 'Transfer-Encoding': 'chunked'})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(body, '')
+ self.assertEqual(status.split()[0], '200')
+ self.assertEqual(headers['Location'], '/bucket')
+
+ with UnreadableInput(self) as fake_input:
+ req = Request.blank(
+ '/bucket',
+ environ={'REQUEST_METHOD': 'PUT',
+ 'wsgi.input': fake_input},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(body, '')
+ self.assertEqual(status.split()[0], '200')
+ self.assertEqual(headers['Location'], '/bucket')
+
+ def _test_bucket_PUT_with_location(self, root_element):
+ elem = Element(root_element)
+ SubElement(elem, 'LocationConstraint').text = 'US'
+ xml = tostring(elem)
+
+ req = Request.blank('/bucket',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()},
+ body=xml)
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '200')
+
+ @s3acl
+ def test_bucket_PUT_with_location(self):
+ self._test_bucket_PUT_with_location('CreateBucketConfiguration')
+
+ @s3acl
+ def test_bucket_PUT_with_ami_location(self):
+ # ec2-ami-tools apparently uses CreateBucketConstraint instead?
+ self._test_bucket_PUT_with_location('CreateBucketConstraint')
+
+ @s3acl
+ def test_bucket_PUT_with_strange_location(self):
+ # Even crazier: it doesn't seem to matter
+ self._test_bucket_PUT_with_location('foo')
+
+ def test_bucket_PUT_with_canned_acl(self):
+ req = Request.blank('/bucket',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header(),
+ 'X-Amz-Acl': 'public-read'})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '200')
+ _, _, headers = self.swift.calls_with_headers[-1]
+ self.assertTrue('X-Container-Read' in headers)
+ self.assertEqual(headers.get('X-Container-Read'), '.r:*,.rlistings')
+ self.assertNotIn('X-Container-Sysmeta-S3api-Acl', headers)
+
+ @s3acl(s3acl_only=True)
+ def test_bucket_PUT_with_canned_s3acl(self):
+ account = 'test:tester'
+ acl = \
+ encode_acl('container', ACLPublicRead(Owner(account, account)))
+ req = Request.blank('/bucket',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header(),
+ 'X-Amz-Acl': 'public-read'})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '200')
+ _, _, headers = self.swift.calls_with_headers[-1]
+ self.assertNotIn('X-Container-Read', headers)
+ self.assertIn('X-Container-Sysmeta-S3api-Acl', headers)
+ self.assertEqual(headers.get('X-Container-Sysmeta-S3api-Acl'),
+ acl['x-container-sysmeta-s3api-acl'])
+
+ @s3acl
+ def test_bucket_PUT_with_location_error(self):
+ elem = Element('CreateBucketConfiguration')
+ SubElement(elem, 'LocationConstraint').text = 'XXX'
+ xml = tostring(elem)
+
+ req = Request.blank('/bucket',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()},
+ body=xml)
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body),
+ 'InvalidLocationConstraint')
+
+ @s3acl
+ def test_bucket_PUT_with_location_invalid_xml(self):
+ req = Request.blank('/bucket',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()},
+ body='invalid_xml')
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'MalformedXML')
+
+ def _test_method_error_delete(self, path, sw_resp):
+ self.swift.register('HEAD', '/v1/AUTH_test' + path, sw_resp, {}, None)
+ return self._test_method_error('DELETE', path, sw_resp)
+
+ @s3acl
+ def test_bucket_DELETE_error(self):
+ code = self._test_method_error_delete('/bucket', swob.HTTPUnauthorized)
+ self.assertEqual(code, 'SignatureDoesNotMatch')
+ code = self._test_method_error_delete('/bucket', swob.HTTPForbidden)
+ self.assertEqual(code, 'AccessDenied')
+ code = self._test_method_error_delete('/bucket', swob.HTTPNotFound)
+ self.assertEqual(code, 'NoSuchBucket')
+ code = self._test_method_error_delete('/bucket', swob.HTTPServerError)
+ self.assertEqual(code, 'InternalError')
+
+ # bucket not empty is now validated at s3api
+ self.swift.register('HEAD', '/v1/AUTH_test/bucket', swob.HTTPNoContent,
+ {'X-Container-Object-Count': '1'}, None)
+ code = self._test_method_error('DELETE', '/bucket', swob.HTTPConflict)
+ self.assertEqual(code, 'BucketNotEmpty')
+
+ @s3acl
+ def test_bucket_DELETE(self):
+ # overwrite default HEAD to return x-container-object-count
+ self.swift.register(
+ 'HEAD', '/v1/AUTH_test/bucket', swob.HTTPNoContent,
+ {'X-Container-Object-Count': 0}, None)
+
+ req = Request.blank('/bucket',
+ environ={'REQUEST_METHOD': 'DELETE'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '204')
+
+ @s3acl
+ def test_bucket_DELETE_error_while_segment_bucket_delete(self):
+ # An error occurred while deleting segment objects
+ self.swift.register('DELETE', '/v1/AUTH_test/bucket+segments/lily',
+ swob.HTTPServiceUnavailable, {}, json.dumps([]))
+ # overwrite default HEAD to return x-container-object-count
+ self.swift.register(
+ 'HEAD', '/v1/AUTH_test/bucket', swob.HTTPNoContent,
+ {'X-Container-Object-Count': 0}, None)
+
+ req = Request.blank('/bucket',
+ environ={'REQUEST_METHOD': 'DELETE'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '503')
+ called = [(method, path) for method, path, _ in
+ self.swift.calls_with_headers]
+ # Don't delete original bucket when error occurred in segment container
+ self.assertNotIn(('DELETE', '/v1/AUTH_test/bucket'), called)
+
+ def _test_bucket_for_s3acl(self, method, account):
+ req = Request.blank('/bucket',
+ environ={'REQUEST_METHOD': method},
+ headers={'Authorization': 'AWS %s:hmac' % account,
+ 'Date': self.get_date_header()})
+
+ return self.call_s3api(req)
+
+ @s3acl(s3acl_only=True)
+ def test_bucket_GET_without_permission(self):
+ status, headers, body = self._test_bucket_for_s3acl('GET',
+ 'test:other')
+ self.assertEqual(self._get_error_code(body), 'AccessDenied')
+
+ @s3acl(s3acl_only=True)
+ def test_bucket_GET_with_read_permission(self):
+ status, headers, body = self._test_bucket_for_s3acl('GET',
+ 'test:read')
+ self.assertEqual(status.split()[0], '200')
+
+ @s3acl(s3acl_only=True)
+ def test_bucket_GET_with_fullcontrol_permission(self):
+ status, headers, body = \
+ self._test_bucket_for_s3acl('GET', 'test:full_control')
+ self.assertEqual(status.split()[0], '200')
+
+ @s3acl(s3acl_only=True)
+ def test_bucket_GET_with_owner_permission(self):
+ status, headers, body = self._test_bucket_for_s3acl('GET',
+ 'test:tester')
+ self.assertEqual(status.split()[0], '200')
+
+ def _test_bucket_GET_canned_acl(self, bucket):
+ req = Request.blank('/%s' % bucket,
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+
+ return self.call_s3api(req)
+
+ @s3acl(s3acl_only=True)
+ def test_bucket_GET_authenticated_users(self):
+ status, headers, body = \
+ self._test_bucket_GET_canned_acl('authenticated')
+ self.assertEqual(status.split()[0], '200')
+
+ @s3acl(s3acl_only=True)
+ def test_bucket_GET_all_users(self):
+ status, headers, body = self._test_bucket_GET_canned_acl('public')
+ self.assertEqual(status.split()[0], '200')
+
+ @s3acl(s3acl_only=True)
+ def test_bucket_DELETE_without_permission(self):
+ status, headers, body = self._test_bucket_for_s3acl('DELETE',
+ 'test:other')
+ self.assertEqual(self._get_error_code(body), 'AccessDenied')
+ # Don't delete anything in backend Swift
+ called = [method for method, _, _ in self.swift.calls_with_headers]
+ self.assertNotIn('DELETE', called)
+
+ @s3acl(s3acl_only=True)
+ def test_bucket_DELETE_with_write_permission(self):
+ status, headers, body = self._test_bucket_for_s3acl('DELETE',
+ 'test:write')
+ self.assertEqual(self._get_error_code(body), 'AccessDenied')
+ # Don't delete anything in backend Swift
+ called = [method for method, _, _ in self.swift.calls_with_headers]
+ self.assertNotIn('DELETE', called)
+
+ @s3acl(s3acl_only=True)
+ def test_bucket_DELETE_with_fullcontrol_permission(self):
+ status, headers, body = \
+ self._test_bucket_for_s3acl('DELETE', 'test:full_control')
+ self.assertEqual(self._get_error_code(body), 'AccessDenied')
+ # Don't delete anything in backend Swift
+ called = [method for method, _, _ in self.swift.calls_with_headers]
+ self.assertNotIn('DELETE', called)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/test/unit/common/middleware/s3api/test_cfg.py b/test/unit/common/middleware/s3api/test_cfg.py
new file mode 100644
index 000000000..2a0347c14
--- /dev/null
+++ b/test/unit/common/middleware/s3api/test_cfg.py
@@ -0,0 +1,44 @@
+# Copyright (c) 2014 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+
+from swift.common.middleware.s3api.utils import Config
+
+
+class TestS3ApiCfg(unittest.TestCase):
+ def test_config(self):
+ conf = Config(
+ {
+ 'a': 'str',
+ 'b': 10,
+ 'c': True,
+ }
+ )
+
+ conf.update(
+ {
+ 'a': 'str2',
+ 'b': '100',
+ 'c': 'false',
+ }
+ )
+
+ self.assertEqual(conf['a'], 'str2')
+ self.assertEqual(conf['b'], 100)
+ self.assertEqual(conf['c'], False)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/test/unit/common/middleware/s3api/test_etree.py b/test/unit/common/middleware/s3api/test_etree.py
new file mode 100644
index 000000000..be2249ae0
--- /dev/null
+++ b/test/unit/common/middleware/s3api/test_etree.py
@@ -0,0 +1,73 @@
+# Copyright (c) 2014 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+
+from swift.common.middleware.s3api import etree
+
+
+class TestS3ApiEtree(unittest.TestCase):
+ def test_xml_namespace(self):
+ def test_xml(ns, prefix):
+ return '<A %(ns)s><%(prefix)sB>C</%(prefix)sB></A>' % \
+ ({'ns': ns, 'prefix': prefix})
+
+ # No namespace is same as having the S3 namespace.
+ xml = test_xml('', '')
+ elem = etree.fromstring(xml)
+ self.assertEqual(elem.find('./B').text, 'C')
+
+ # The S3 namespace is handled as no namespace.
+ xml = test_xml('xmlns="%s"' % etree.XMLNS_S3, '')
+ elem = etree.fromstring(xml)
+ self.assertEqual(elem.find('./B').text, 'C')
+
+ xml = test_xml('xmlns:s3="%s"' % etree.XMLNS_S3, 's3:')
+ elem = etree.fromstring(xml)
+ self.assertEqual(elem.find('./B').text, 'C')
+
+ # Any namespaces without a prefix work as no namespace.
+ xml = test_xml('xmlns="http://example.com/"', '')
+ elem = etree.fromstring(xml)
+ self.assertEqual(elem.find('./B').text, 'C')
+
+ xml = test_xml('xmlns:s3="http://example.com/"', 's3:')
+ elem = etree.fromstring(xml)
+ self.assertIsNone(elem.find('./B'))
+
+ def test_xml_with_comments(self):
+ xml = '<A><!-- comment --><B>C</B></A>'
+ elem = etree.fromstring(xml)
+ self.assertEqual(elem.find('./B').text, 'C')
+
+ def test_tostring_with_nonascii_text(self):
+ elem = etree.Element('Test')
+ sub = etree.SubElement(elem, 'FOO')
+ sub.text = '\xef\xbc\xa1'
+ self.assertTrue(isinstance(sub.text, str))
+ xml_string = etree.tostring(elem)
+ self.assertTrue(isinstance(xml_string, str))
+
+ def test_fromstring_with_nonascii_text(self):
+ input_str = '<?xml version="1.0" encoding="UTF-8"?>\n' \
+ '<Test><FOO>\xef\xbc\xa1</FOO></Test>'
+ elem = etree.fromstring(input_str)
+ text = elem.find('FOO').text
+ self.assertEqual(text, '\xef\xbc\xa1')
+ self.assertTrue(isinstance(text, str))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/test/unit/common/middleware/s3api/test_helpers.py b/test/unit/common/middleware/s3api/test_helpers.py
new file mode 100644
index 000000000..fa0ae5238
--- /dev/null
+++ b/test/unit/common/middleware/s3api/test_helpers.py
@@ -0,0 +1,69 @@
+# Copyright (c) 2013 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This stuff can't live in test/unit/__init__.py due to its swob dependency.
+
+import unittest
+from test.unit.common.middleware.s3api.helpers import FakeSwift
+from swift.common.middleware.s3api.utils import sysmeta_header
+from swift.common.swob import HeaderKeyDict
+from mock import MagicMock
+
+
+class S3ApiHelperTestCase(unittest.TestCase):
+ def setUp(self):
+ self.method = 'HEAD'
+ self.path = '/v1/AUTH_test/bucket'
+
+ def _check_headers(self, swift, method, path, headers):
+ _, response_headers, _ = swift._responses[(method, path)]
+ self.assertEqual(headers, response_headers)
+
+ def test_fake_swift_sysmeta(self):
+ swift = FakeSwift()
+ orig_headers = HeaderKeyDict()
+ orig_headers.update({sysmeta_header('container', 'acl'): 'test',
+ 'x-container-meta-foo': 'bar'})
+
+ swift.register(self.method, self.path, MagicMock(), orig_headers, None)
+
+ self._check_headers(swift, self.method, self.path, orig_headers)
+
+ new_headers = orig_headers.copy()
+ del new_headers[sysmeta_header('container', 'acl').title()]
+ swift.register(self.method, self.path, MagicMock(), new_headers, None)
+
+ self._check_headers(swift, self.method, self.path, orig_headers)
+
+ def test_fake_swift_sysmeta_overwrite(self):
+ swift = FakeSwift()
+ orig_headers = HeaderKeyDict()
+ orig_headers.update({sysmeta_header('container', 'acl'): 'test',
+ 'x-container-meta-foo': 'bar'})
+ swift.register(self.method, self.path, MagicMock(), orig_headers, None)
+
+ self._check_headers(swift, self.method, self.path, orig_headers)
+
+ new_headers = orig_headers.copy()
+ new_headers[sysmeta_header('container', 'acl').title()] = 'bar'
+
+ swift.register(self.method, self.path, MagicMock(), new_headers, None)
+
+ self.assertFalse(orig_headers == new_headers)
+ self._check_headers(swift, self.method, self.path, new_headers)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/test/unit/common/middleware/s3api/test_location.py b/test/unit/common/middleware/s3api/test_location.py
new file mode 100644
index 000000000..c8524e670
--- /dev/null
+++ b/test/unit/common/middleware/s3api/test_location.py
@@ -0,0 +1,51 @@
+# Copyright (c) 2014 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+
+from swift.common.swob import Request
+
+from test.unit.common.middleware.s3api import S3ApiTestCase
+from swift.common.middleware.s3api.etree import fromstring
+
+
+class TestS3ApiLocation(S3ApiTestCase):
+
+ def test_object_location(self):
+ req = Request.blank('/bucket?location',
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '200')
+ elem = fromstring(body, 'LocationConstraint')
+ location = elem.text
+ self.assertIsNone(location)
+
+ def test_object_location_setting_as_us_west_1(self):
+ self.s3api.conf.location = 'us-west-1'
+ req = Request.blank('/bucket?location',
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '200')
+ elem = fromstring(body, 'LocationConstraint')
+ location = elem.text
+ self.assertEqual(location, 'us-west-1')
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/test/unit/common/middleware/s3api/test_logging.py b/test/unit/common/middleware/s3api/test_logging.py
new file mode 100644
index 000000000..067f109d8
--- /dev/null
+++ b/test/unit/common/middleware/s3api/test_logging.py
@@ -0,0 +1,66 @@
+# Copyright (c) 2014 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+
+from swift.common.swob import Request
+
+from test.unit.common.middleware.s3api import S3ApiTestCase
+from swift.common.middleware.s3api.etree import fromstring
+
+
+class TestS3ApiLogging(S3ApiTestCase):
+
+ def setUp(self):
+ super(TestS3ApiLogging, self).setUp()
+
+ def test_bucket_logging_GET(self):
+ req = Request.blank('/bucket?logging',
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ xml = fromstring(body, 'BucketLoggingStatus')
+ self.assertEqual(xml.keys(), [])
+ self.assertEqual(status.split()[0], '200')
+
+ def test_object_logging_GET_error(self):
+ req = Request.blank('/bucket/object?logging',
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'NoLoggingStatusForKey')
+
+ def test_bucket_logging_PUT(self):
+ req = Request.blank('/bucket?logging',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ # FIXME: Support PUT logging
+ # self.assertEqual(status, 201)
+ self.assertEqual(self._get_error_code(body), 'NotImplemented')
+
+ def test_object_logging_PUT_error(self):
+ req = Request.blank('/bucket/object?logging',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'NoLoggingStatusForKey')
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/test/unit/common/middleware/s3api/test_multi_delete.py b/test/unit/common/middleware/s3api/test_multi_delete.py
new file mode 100644
index 000000000..c2ec2e649
--- /dev/null
+++ b/test/unit/common/middleware/s3api/test_multi_delete.py
@@ -0,0 +1,284 @@
+# Copyright (c) 2014 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+from datetime import datetime
+from hashlib import md5
+
+from six.moves import urllib
+from swift.common import swob
+from swift.common.swob import Request
+
+from test.unit.common.middleware.s3api import S3ApiTestCase
+from test.unit.common.middleware.s3api.helpers import UnreadableInput
+from swift.common.middleware.s3api.etree import fromstring, tostring, Element, \
+ SubElement
+from test.unit.common.middleware.s3api.test_s3_acl import s3acl
+
+
+class TestS3ApiMultiDelete(S3ApiTestCase):
+
+ def setUp(self):
+ super(TestS3ApiMultiDelete, self).setUp()
+ self.swift.register('HEAD', '/v1/AUTH_test/bucket/Key1',
+ swob.HTTPOk, {}, None)
+ self.swift.register('HEAD', '/v1/AUTH_test/bucket/Key2',
+ swob.HTTPNotFound, {}, None)
+
+ @s3acl
+ def test_object_multi_DELETE_to_object(self):
+ elem = Element('Delete')
+ obj = SubElement(elem, 'Object')
+ SubElement(obj, 'Key').text = 'object'
+ body = tostring(elem, use_s3ns=False)
+ content_md5 = md5(body).digest().encode('base64').strip()
+
+ req = Request.blank('/bucket/object?delete',
+ environ={'REQUEST_METHOD': 'POST'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header(),
+ 'Content-MD5': content_md5},
+ body=body)
+
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '200')
+
+ @s3acl
+ def test_object_multi_DELETE(self):
+ self.swift.register('HEAD', '/v1/AUTH_test/bucket/Key3',
+ swob.HTTPOk,
+ {'x-static-large-object': 'True'},
+ None)
+ self.swift.register('DELETE', '/v1/AUTH_test/bucket/Key1',
+ swob.HTTPNoContent, {}, None)
+ self.swift.register('DELETE', '/v1/AUTH_test/bucket/Key2',
+ swob.HTTPNotFound, {}, None)
+ self.swift.register('DELETE', '/v1/AUTH_test/bucket/Key3',
+ swob.HTTPOk, {}, None)
+
+ elem = Element('Delete')
+ for key in ['Key1', 'Key2', 'Key3']:
+ obj = SubElement(elem, 'Object')
+ SubElement(obj, 'Key').text = key
+ body = tostring(elem, use_s3ns=False)
+ content_md5 = md5(body).digest().encode('base64').strip()
+
+ req = Request.blank('/bucket?delete',
+ environ={'REQUEST_METHOD': 'POST'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Content-Type': 'multipart/form-data',
+ 'Date': self.get_date_header(),
+ 'Content-MD5': content_md5},
+ body=body)
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '200')
+
+ elem = fromstring(body)
+ self.assertEqual(len(elem.findall('Deleted')), 3)
+ _, path, _ = self.swift.calls_with_headers[-1]
+ path, query_string = path.split('?', 1)
+ self.assertEqual(path, '/v1/AUTH_test/bucket/Key3')
+ query = dict(urllib.parse.parse_qsl(query_string))
+ self.assertEqual(query['multipart-manifest'], 'delete')
+
+ @s3acl
+ def test_object_multi_DELETE_quiet(self):
+ self.swift.register('DELETE', '/v1/AUTH_test/bucket/Key1',
+ swob.HTTPNoContent, {}, None)
+ self.swift.register('DELETE', '/v1/AUTH_test/bucket/Key2',
+ swob.HTTPNotFound, {}, None)
+
+ elem = Element('Delete')
+ SubElement(elem, 'Quiet').text = 'true'
+ for key in ['Key1', 'Key2']:
+ obj = SubElement(elem, 'Object')
+ SubElement(obj, 'Key').text = key
+ body = tostring(elem, use_s3ns=False)
+ content_md5 = md5(body).digest().encode('base64').strip()
+
+ req = Request.blank('/bucket?delete',
+ environ={'REQUEST_METHOD': 'POST'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header(),
+ 'Content-MD5': content_md5},
+ body=body)
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '200')
+
+ elem = fromstring(body)
+ self.assertEqual(len(elem.findall('Deleted')), 0)
+
+ @s3acl
+ def test_object_multi_DELETE_no_key(self):
+ self.swift.register('DELETE', '/v1/AUTH_test/bucket/Key1',
+ swob.HTTPNoContent, {}, None)
+ self.swift.register('DELETE', '/v1/AUTH_test/bucket/Key2',
+ swob.HTTPNotFound, {}, None)
+
+ elem = Element('Delete')
+ SubElement(elem, 'Quiet').text = 'true'
+ for key in ['Key1', 'Key2']:
+ obj = SubElement(elem, 'Object')
+ SubElement(obj, 'Key')
+ body = tostring(elem, use_s3ns=False)
+ content_md5 = md5(body).digest().encode('base64').strip()
+
+ req = Request.blank('/bucket?delete',
+ environ={'REQUEST_METHOD': 'POST'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header(),
+ 'Content-MD5': content_md5},
+ body=body)
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'UserKeyMustBeSpecified')
+
+ @s3acl
+ def test_object_multi_DELETE_with_invalid_md5(self):
+ elem = Element('Delete')
+ for key in ['Key1', 'Key2']:
+ obj = SubElement(elem, 'Object')
+ SubElement(obj, 'Key').text = key
+ body = tostring(elem, use_s3ns=False)
+
+ req = Request.blank('/bucket?delete',
+ environ={'REQUEST_METHOD': 'POST'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header(),
+ 'Content-MD5': 'XXXX'},
+ body=body)
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'InvalidDigest')
+
+ @s3acl
+ def test_object_multi_DELETE_without_md5(self):
+ elem = Element('Delete')
+ for key in ['Key1', 'Key2']:
+ obj = SubElement(elem, 'Object')
+ SubElement(obj, 'Key').text = key
+ body = tostring(elem, use_s3ns=False)
+
+ req = Request.blank('/bucket?delete',
+ environ={'REQUEST_METHOD': 'POST'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()},
+ body=body)
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'InvalidRequest')
+
+ @s3acl
+ def test_object_multi_DELETE_too_many_keys(self):
+ elem = Element('Delete')
+ for i in range(self.conf.max_multi_delete_objects + 1):
+ obj = SubElement(elem, 'Object')
+ SubElement(obj, 'Key').text = str(i)
+ body = tostring(elem, use_s3ns=False)
+ content_md5 = md5(body).digest().encode('base64').strip()
+
+ req = Request.blank('/bucket?delete',
+ environ={'REQUEST_METHOD': 'POST'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header(),
+ 'Content-MD5': content_md5},
+ body=body)
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'MalformedXML')
+
+ def _test_object_multi_DELETE(self, account):
+ self.keys = ['Key1', 'Key2']
+ self.swift.register(
+ 'DELETE', '/v1/AUTH_test/bucket/%s' % self.keys[0],
+ swob.HTTPNoContent, {}, None)
+ self.swift.register(
+ 'DELETE', '/v1/AUTH_test/bucket/%s' % self.keys[1],
+ swob.HTTPNotFound, {}, None)
+
+ elem = Element('Delete')
+ for key in self.keys:
+ obj = SubElement(elem, 'Object')
+ SubElement(obj, 'Key').text = key
+ body = tostring(elem, use_s3ns=False)
+ content_md5 = md5(body).digest().encode('base64').strip()
+
+ req = Request.blank('/bucket?delete',
+ environ={'REQUEST_METHOD': 'POST'},
+ headers={'Authorization': 'AWS %s:hmac' % account,
+ 'Date': self.get_date_header(),
+ 'Content-MD5': content_md5},
+ body=body)
+ req.date = datetime.now()
+ req.content_type = 'text/plain'
+
+ return self.call_s3api(req)
+
+ @s3acl(s3acl_only=True)
+ def test_object_multi_DELETE_without_permission(self):
+ status, headers, body = self._test_object_multi_DELETE('test:other')
+ self.assertEqual(status.split()[0], '200')
+ elem = fromstring(body)
+ errors = elem.findall('Error')
+ self.assertEqual(len(errors), len(self.keys))
+ for e in errors:
+ self.assertTrue(e.find('Key').text in self.keys)
+ self.assertEqual(e.find('Code').text, 'AccessDenied')
+ self.assertEqual(e.find('Message').text, 'Access Denied.')
+
+ @s3acl(s3acl_only=True)
+ def test_object_multi_DELETE_with_write_permission(self):
+ status, headers, body = self._test_object_multi_DELETE('test:write')
+ self.assertEqual(status.split()[0], '200')
+ elem = fromstring(body)
+ self.assertEqual(len(elem.findall('Deleted')), len(self.keys))
+
+ @s3acl(s3acl_only=True)
+ def test_object_multi_DELETE_with_fullcontrol_permission(self):
+ status, headers, body = \
+ self._test_object_multi_DELETE('test:full_control')
+ self.assertEqual(status.split()[0], '200')
+ elem = fromstring(body)
+ self.assertEqual(len(elem.findall('Deleted')), len(self.keys))
+
+ def _test_no_body(self, use_content_length=False,
+ use_transfer_encoding=False, string_to_md5=''):
+ content_md5 = md5(string_to_md5).digest().encode('base64').strip()
+ with UnreadableInput(self) as fake_input:
+ req = Request.blank(
+ '/bucket?delete',
+ environ={
+ 'REQUEST_METHOD': 'POST',
+ 'wsgi.input': fake_input},
+ headers={
+ 'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header(),
+ 'Content-MD5': content_md5},
+ body='')
+ if not use_content_length:
+ req.environ.pop('CONTENT_LENGTH')
+ if use_transfer_encoding:
+ req.environ['HTTP_TRANSFER_ENCODING'] = 'chunked'
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status, '400 Bad Request')
+ self.assertEqual(self._get_error_code(body), 'MissingRequestBodyError')
+
+ @s3acl
+ def test_object_multi_DELETE_empty_body(self):
+ self._test_no_body()
+ self._test_no_body(string_to_md5='test')
+ self._test_no_body(use_content_length=True)
+ self._test_no_body(use_content_length=True, string_to_md5='test')
+ self._test_no_body(use_transfer_encoding=True)
+ self._test_no_body(use_transfer_encoding=True, string_to_md5='test')
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/test/unit/common/middleware/s3api/test_multi_upload.py b/test/unit/common/middleware/s3api/test_multi_upload.py
new file mode 100644
index 000000000..51cb58f0d
--- /dev/null
+++ b/test/unit/common/middleware/s3api/test_multi_upload.py
@@ -0,0 +1,1742 @@
+# Copyright (c) 2014 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import base64
+from hashlib import md5
+from mock import patch
+import os
+import time
+import unittest
+from urllib import quote
+
+from swift.common import swob
+from swift.common.swob import Request
+from swift.common.utils import json
+
+from test.unit.common.middleware.s3api import S3ApiTestCase
+from test.unit.common.middleware.s3api.helpers import UnreadableInput
+from swift.common.middleware.s3api.etree import fromstring, tostring
+from swift.common.middleware.s3api.subresource import Owner, Grant, User, ACL, \
+ encode_acl, decode_acl, ACLPublicRead
+from test.unit.common.middleware.s3api.test_s3_acl import s3acl
+from swift.common.middleware.s3api.utils import sysmeta_header, mktime, \
+ S3Timestamp
+from swift.common.middleware.s3api.s3request import MAX_32BIT_INT
+from swift.common.middleware.s3api.controllers.multi_upload import \
+ MULTIUPLOAD_SUFFIX
+
+xml = '<CompleteMultipartUpload>' \
+ '<Part>' \
+ '<PartNumber>1</PartNumber>' \
+ '<ETag>HASH</ETag>' \
+ '</Part>' \
+ '<Part>' \
+ '<PartNumber>2</PartNumber>' \
+ '<ETag>"HASH"</ETag>' \
+ '</Part>' \
+ '</CompleteMultipartUpload>'
+
+objects_template = \
+ (('object/X/1', '2014-05-07T19:47:51.592270', 'HASH', 100),
+ ('object/X/2', '2014-05-07T19:47:52.592270', 'HASH', 200))
+
+multiparts_template = \
+ (('object/X', '2014-05-07T19:47:50.592270', 'HASH', 1),
+ ('object/X/1', '2014-05-07T19:47:51.592270', 'HASH', 11),
+ ('object/X/2', '2014-05-07T19:47:52.592270', 'HASH', 21),
+ ('object/Y', '2014-05-07T19:47:53.592270', 'HASH', 2),
+ ('object/Y/1', '2014-05-07T19:47:54.592270', 'HASH', 12),
+ ('object/Y/2', '2014-05-07T19:47:55.592270', 'HASH', 22),
+ ('object/Z', '2014-05-07T19:47:56.592270', 'HASH', 3),
+ ('object/Z/1', '2014-05-07T19:47:57.592270', 'HASH', 13),
+ ('object/Z/2', '2014-05-07T19:47:58.592270', 'HASH', 23),
+ ('subdir/object/Z', '2014-05-07T19:47:58.592270', 'HASH', 4),
+ ('subdir/object/Z/1', '2014-05-07T19:47:58.592270', 'HASH', 41),
+ ('subdir/object/Z/2', '2014-05-07T19:47:58.592270', 'HASH', 41))
+
+
+class TestS3ApiMultiUpload(S3ApiTestCase):
+
+ def setUp(self):
+ super(TestS3ApiMultiUpload, self).setUp()
+
+ segment_bucket = '/v1/AUTH_test/bucket+segments'
+ self.etag = '7dfa07a8e59ddbcd1dc84d4c4f82aea1'
+ self.last_modified = 'Fri, 01 Apr 2014 12:00:00 GMT'
+ put_headers = {'etag': self.etag, 'last-modified': self.last_modified}
+
+ self.s3api.conf.min_segment_size = 1
+
+ objects = map(lambda item: {'name': item[0], 'last_modified': item[1],
+ 'hash': item[2], 'bytes': item[3]},
+ objects_template)
+ object_list = json.dumps(objects)
+
+ self.swift.register('PUT', segment_bucket,
+ swob.HTTPAccepted, {}, None)
+ self.swift.register('GET', segment_bucket, swob.HTTPOk, {},
+ object_list)
+ self.swift.register('HEAD', segment_bucket + '/object/X',
+ swob.HTTPOk,
+ {'x-object-meta-foo': 'bar',
+ 'content-type': 'application/directory',
+ 'x-object-sysmeta-s3api-has-content-type': 'yes',
+ 'x-object-sysmeta-s3api-content-type':
+ 'baz/quux'}, None)
+ self.swift.register('PUT', segment_bucket + '/object/X',
+ swob.HTTPCreated, {}, None)
+ self.swift.register('DELETE', segment_bucket + '/object/X',
+ swob.HTTPNoContent, {}, None)
+ self.swift.register('GET', segment_bucket + '/object/invalid',
+ swob.HTTPNotFound, {}, None)
+ self.swift.register('PUT', segment_bucket + '/object/X/1',
+ swob.HTTPCreated, put_headers, None)
+ self.swift.register('DELETE', segment_bucket + '/object/X/1',
+ swob.HTTPNoContent, {}, None)
+ self.swift.register('DELETE', segment_bucket + '/object/X/2',
+ swob.HTTPNoContent, {}, None)
+
+ @s3acl
+ def test_bucket_upload_part(self):
+ req = Request.blank('/bucket?partNumber=1&uploadId=x',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'InvalidRequest')
+
+ @s3acl
+ def test_object_multipart_uploads_list(self):
+ req = Request.blank('/bucket/object?uploads',
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'InvalidRequest')
+
+ @s3acl
+ def test_bucket_multipart_uploads_initiate(self):
+ req = Request.blank('/bucket?uploads',
+ environ={'REQUEST_METHOD': 'POST'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'InvalidRequest')
+
+ @s3acl
+ def test_bucket_list_parts(self):
+ req = Request.blank('/bucket?uploadId=x',
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'InvalidRequest')
+
+ @s3acl
+ def test_bucket_multipart_uploads_abort(self):
+ req = Request.blank('/bucket?uploadId=x',
+ environ={'REQUEST_METHOD': 'DELETE'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'InvalidRequest')
+
+ @s3acl
+ def test_bucket_multipart_uploads_complete(self):
+ req = Request.blank('/bucket?uploadId=x',
+ environ={'REQUEST_METHOD': 'POST'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'InvalidRequest')
+
+ def _test_bucket_multipart_uploads_GET(self, query=None,
+ multiparts=None):
+ segment_bucket = '/v1/AUTH_test/bucket+segments'
+ objects = multiparts or multiparts_template
+ objects = map(lambda item: {'name': item[0], 'last_modified': item[1],
+ 'hash': item[2], 'bytes': item[3]},
+ objects)
+ object_list = json.dumps(objects)
+ self.swift.register('GET', segment_bucket, swob.HTTPOk, {},
+ object_list)
+
+ query = '?uploads&' + query if query else '?uploads'
+ req = Request.blank('/bucket/%s' % query,
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ return self.call_s3api(req)
+
+ @s3acl
+ def test_bucket_multipart_uploads_GET(self):
+ status, headers, body = self._test_bucket_multipart_uploads_GET()
+ elem = fromstring(body, 'ListMultipartUploadsResult')
+ self.assertEqual(elem.find('Bucket').text, 'bucket')
+ self.assertIsNone(elem.find('KeyMarker').text)
+ self.assertIsNone(elem.find('UploadIdMarker').text)
+ self.assertEqual(elem.find('NextUploadIdMarker').text, 'Z')
+ self.assertEqual(elem.find('MaxUploads').text, '1000')
+ self.assertEqual(elem.find('IsTruncated').text, 'false')
+ self.assertEqual(len(elem.findall('Upload')), 4)
+ objects = [(o[0], o[1][:-3] + 'Z') for o in multiparts_template]
+ for u in elem.findall('Upload'):
+ name = u.find('Key').text + '/' + u.find('UploadId').text
+ initiated = u.find('Initiated').text
+ self.assertTrue((name, initiated) in objects)
+ self.assertEqual(u.find('Initiator/ID').text, 'test:tester')
+ self.assertEqual(u.find('Initiator/DisplayName').text,
+ 'test:tester')
+ self.assertEqual(u.find('Owner/ID').text, 'test:tester')
+ self.assertEqual(u.find('Owner/DisplayName').text, 'test:tester')
+ self.assertEqual(u.find('StorageClass').text, 'STANDARD')
+ self.assertEqual(status.split()[0], '200')
+
+ @s3acl
+ def test_bucket_multipart_uploads_GET_without_segment_bucket(self):
+ segment_bucket = '/v1/AUTH_test/bucket+segments'
+ self.swift.register('GET', segment_bucket, swob.HTTPNotFound, {}, '')
+
+ req = Request.blank('/bucket?uploads',
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+
+ status, haeaders, body = self.call_s3api(req)
+
+ self.assertEqual(status.split()[0], '200')
+ elem = fromstring(body, 'ListMultipartUploadsResult')
+ self.assertEqual(elem.find('Bucket').text, 'bucket')
+ self.assertIsNone(elem.find('KeyMarker').text)
+ self.assertIsNone(elem.find('UploadIdMarker').text)
+ self.assertIsNone(elem.find('NextUploadIdMarker').text)
+ self.assertEqual(elem.find('MaxUploads').text, '1000')
+ self.assertEqual(elem.find('IsTruncated').text, 'false')
+ self.assertEqual(len(elem.findall('Upload')), 0)
+
+ @s3acl
+ @patch('swift.common.middleware.s3api.s3request.get_container_info',
+ lambda x, y: {'status': 404})
+ def test_bucket_multipart_uploads_GET_without_bucket(self):
+ self.swift.register('HEAD', '/v1/AUTH_test/bucket',
+ swob.HTTPNotFound, {}, '')
+ req = Request.blank('/bucket?uploads',
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, haeaders, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '404')
+ self.assertEqual(self._get_error_code(body), 'NoSuchBucket')
+
+ @s3acl
+ def test_bucket_multipart_uploads_GET_encoding_type_error(self):
+ query = 'encoding-type=xml'
+ status, headers, body = \
+ self._test_bucket_multipart_uploads_GET(query)
+ self.assertEqual(self._get_error_code(body), 'InvalidArgument')
+
+ @s3acl
+ def test_bucket_multipart_uploads_GET_maxuploads(self):
+ query = 'max-uploads=2'
+ status, headers, body = \
+ self._test_bucket_multipart_uploads_GET(query)
+ elem = fromstring(body, 'ListMultipartUploadsResult')
+ self.assertEqual(len(elem.findall('Upload/UploadId')), 2)
+ self.assertEqual(elem.find('NextKeyMarker').text, 'object')
+ self.assertEqual(elem.find('NextUploadIdMarker').text, 'Y')
+ self.assertEqual(elem.find('MaxUploads').text, '2')
+ self.assertEqual(elem.find('IsTruncated').text, 'true')
+ self.assertEqual(status.split()[0], '200')
+
+ @s3acl
+ def test_bucket_multipart_uploads_GET_str_maxuploads(self):
+ query = 'max-uploads=invalid'
+ status, headers, body = \
+ self._test_bucket_multipart_uploads_GET(query)
+ self.assertEqual(self._get_error_code(body), 'InvalidArgument')
+
+ @s3acl
+ def test_bucket_multipart_uploads_GET_negative_maxuploads(self):
+ query = 'max-uploads=-1'
+ status, headers, body = \
+ self._test_bucket_multipart_uploads_GET(query)
+ self.assertEqual(self._get_error_code(body), 'InvalidArgument')
+
+ @s3acl
+ def test_bucket_multipart_uploads_GET_maxuploads_over_default(self):
+ query = 'max-uploads=1001'
+ status, headers, body = \
+ self._test_bucket_multipart_uploads_GET(query)
+ elem = fromstring(body, 'ListMultipartUploadsResult')
+ self.assertEqual(len(elem.findall('Upload/UploadId')), 4)
+ self.assertEqual(elem.find('NextKeyMarker').text, 'subdir/object')
+ self.assertEqual(elem.find('NextUploadIdMarker').text, 'Z')
+ self.assertEqual(elem.find('MaxUploads').text, '1000')
+ self.assertEqual(elem.find('IsTruncated').text, 'false')
+ self.assertEqual(status.split()[0], '200')
+
+ @s3acl
+ def test_bucket_multipart_uploads_GET_maxuploads_over_max_32bit_int(self):
+ query = 'max-uploads=%s' % (MAX_32BIT_INT + 1)
+ status, headers, body = \
+ self._test_bucket_multipart_uploads_GET(query)
+ self.assertEqual(self._get_error_code(body), 'InvalidArgument')
+
+ @s3acl
+ def test_bucket_multipart_uploads_GET_with_id_and_key_marker(self):
+ query = 'upload-id-marker=Y&key-marker=object'
+ multiparts = \
+ (('object/Y', '2014-05-07T19:47:53.592270', 'HASH', 2),
+ ('object/Y/1', '2014-05-07T19:47:54.592270', 'HASH', 12),
+ ('object/Y/2', '2014-05-07T19:47:55.592270', 'HASH', 22))
+
+ status, headers, body = \
+ self._test_bucket_multipart_uploads_GET(query, multiparts)
+ elem = fromstring(body, 'ListMultipartUploadsResult')
+ self.assertEqual(elem.find('KeyMarker').text, 'object')
+ self.assertEqual(elem.find('UploadIdMarker').text, 'Y')
+ self.assertEqual(len(elem.findall('Upload')), 1)
+ objects = [(o[0], o[1][:-3] + 'Z') for o in multiparts]
+ for u in elem.findall('Upload'):
+ name = u.find('Key').text + '/' + u.find('UploadId').text
+ initiated = u.find('Initiated').text
+ self.assertTrue((name, initiated) in objects)
+ self.assertEqual(status.split()[0], '200')
+
+ _, path, _ = self.swift.calls_with_headers[-1]
+ path, query_string = path.split('?', 1)
+ query = {}
+ for q in query_string.split('&'):
+ key, arg = q.split('=')
+ query[key] = arg
+ self.assertEqual(query['format'], 'json')
+ self.assertEqual(query['limit'], '1001')
+ self.assertEqual(query['marker'], 'object/Y')
+
+ @s3acl
+ def test_bucket_multipart_uploads_GET_with_key_marker(self):
+ query = 'key-marker=object'
+ multiparts = \
+ (('object/X', '2014-05-07T19:47:50.592270', 'HASH', 1),
+ ('object/X/1', '2014-05-07T19:47:51.592270', 'HASH', 11),
+ ('object/X/2', '2014-05-07T19:47:52.592270', 'HASH', 21),
+ ('object/Y', '2014-05-07T19:47:53.592270', 'HASH', 2),
+ ('object/Y/1', '2014-05-07T19:47:54.592270', 'HASH', 12),
+ ('object/Y/2', '2014-05-07T19:47:55.592270', 'HASH', 22))
+ status, headers, body = \
+ self._test_bucket_multipart_uploads_GET(query, multiparts)
+ elem = fromstring(body, 'ListMultipartUploadsResult')
+ self.assertEqual(elem.find('KeyMarker').text, 'object')
+ self.assertEqual(elem.find('NextKeyMarker').text, 'object')
+ self.assertEqual(elem.find('NextUploadIdMarker').text, 'Y')
+ self.assertEqual(len(elem.findall('Upload')), 2)
+ objects = [(o[0], o[1][:-3] + 'Z') for o in multiparts]
+ for u in elem.findall('Upload'):
+ name = u.find('Key').text + '/' + u.find('UploadId').text
+ initiated = u.find('Initiated').text
+ self.assertTrue((name, initiated) in objects)
+ self.assertEqual(status.split()[0], '200')
+
+ _, path, _ = self.swift.calls_with_headers[-1]
+ path, query_string = path.split('?', 1)
+ query = {}
+ for q in query_string.split('&'):
+ key, arg = q.split('=')
+ query[key] = arg
+ self.assertEqual(query['format'], 'json')
+ self.assertEqual(query['limit'], '1001')
+ self.assertEqual(query['marker'], quote('object/~'))
+
+ @s3acl
+ def test_bucket_multipart_uploads_GET_with_prefix(self):
+ query = 'prefix=X'
+ multiparts = \
+ (('object/X', '2014-05-07T19:47:50.592270', 'HASH', 1),
+ ('object/X/1', '2014-05-07T19:47:51.592270', 'HASH', 11),
+ ('object/X/2', '2014-05-07T19:47:52.592270', 'HASH', 21))
+ status, headers, body = \
+ self._test_bucket_multipart_uploads_GET(query, multiparts)
+ elem = fromstring(body, 'ListMultipartUploadsResult')
+ self.assertEqual(len(elem.findall('Upload')), 1)
+ objects = [(o[0], o[1][:-3] + 'Z') for o in multiparts]
+ for u in elem.findall('Upload'):
+ name = u.find('Key').text + '/' + u.find('UploadId').text
+ initiated = u.find('Initiated').text
+ self.assertTrue((name, initiated) in objects)
+ self.assertEqual(status.split()[0], '200')
+
+ _, path, _ = self.swift.calls_with_headers[-1]
+ path, query_string = path.split('?', 1)
+ query = {}
+ for q in query_string.split('&'):
+ key, arg = q.split('=')
+ query[key] = arg
+ self.assertEqual(query['format'], 'json')
+ self.assertEqual(query['limit'], '1001')
+ self.assertEqual(query['prefix'], 'X')
+
+ @s3acl
+ def test_bucket_multipart_uploads_GET_with_delimiter(self):
+ query = 'delimiter=/'
+ multiparts = \
+ (('object/X', '2014-05-07T19:47:50.592270', 'HASH', 1),
+ ('object/X/1', '2014-05-07T19:47:51.592270', 'HASH', 11),
+ ('object/X/2', '2014-05-07T19:47:52.592270', 'HASH', 21),
+ ('object/Y', '2014-05-07T19:47:50.592270', 'HASH', 2),
+ ('object/Y/1', '2014-05-07T19:47:51.592270', 'HASH', 21),
+ ('object/Y/2', '2014-05-07T19:47:52.592270', 'HASH', 22),
+ ('object/Z', '2014-05-07T19:47:50.592270', 'HASH', 3),
+ ('object/Z/1', '2014-05-07T19:47:51.592270', 'HASH', 31),
+ ('object/Z/2', '2014-05-07T19:47:52.592270', 'HASH', 32),
+ ('subdir/object/X', '2014-05-07T19:47:50.592270', 'HASH', 4),
+ ('subdir/object/X/1', '2014-05-07T19:47:51.592270', 'HASH', 41),
+ ('subdir/object/X/2', '2014-05-07T19:47:52.592270', 'HASH', 42),
+ ('subdir/object/Y', '2014-05-07T19:47:50.592270', 'HASH', 5),
+ ('subdir/object/Y/1', '2014-05-07T19:47:51.592270', 'HASH', 51),
+ ('subdir/object/Y/2', '2014-05-07T19:47:52.592270', 'HASH', 52),
+ ('subdir2/object/Z', '2014-05-07T19:47:50.592270', 'HASH', 6),
+ ('subdir2/object/Z/1', '2014-05-07T19:47:51.592270', 'HASH', 61),
+ ('subdir2/object/Z/2', '2014-05-07T19:47:52.592270', 'HASH', 62))
+
+ status, headers, body = \
+ self._test_bucket_multipart_uploads_GET(query, multiparts)
+ elem = fromstring(body, 'ListMultipartUploadsResult')
+ self.assertEqual(len(elem.findall('Upload')), 3)
+ self.assertEqual(len(elem.findall('CommonPrefixes')), 2)
+ objects = [(o[0], o[1][:-3] + 'Z') for o in multiparts
+ if o[0].startswith('o')]
+ prefixes = set([o[0].split('/')[0] + '/' for o in multiparts
+ if o[0].startswith('s')])
+ for u in elem.findall('Upload'):
+ name = u.find('Key').text + '/' + u.find('UploadId').text
+ initiated = u.find('Initiated').text
+ self.assertTrue((name, initiated) in objects)
+ for p in elem.findall('CommonPrefixes'):
+ prefix = p.find('Prefix').text
+ self.assertTrue(prefix in prefixes)
+
+ self.assertEqual(status.split()[0], '200')
+ _, path, _ = self.swift.calls_with_headers[-1]
+ path, query_string = path.split('?', 1)
+ query = {}
+ for q in query_string.split('&'):
+ key, arg = q.split('=')
+ query[key] = arg
+ self.assertEqual(query['format'], 'json')
+ self.assertEqual(query['limit'], '1001')
+ self.assertTrue(query.get('delimiter') is None)
+
+ @s3acl
+ def test_bucket_multipart_uploads_GET_with_multi_chars_delimiter(self):
+ query = 'delimiter=subdir'
+ multiparts = \
+ (('object/X', '2014-05-07T19:47:50.592270', 'HASH', 1),
+ ('object/X/1', '2014-05-07T19:47:51.592270', 'HASH', 11),
+ ('object/X/2', '2014-05-07T19:47:52.592270', 'HASH', 21),
+ ('dir/subdir/object/X', '2014-05-07T19:47:50.592270',
+ 'HASH', 3),
+ ('dir/subdir/object/X/1', '2014-05-07T19:47:51.592270',
+ 'HASH', 31),
+ ('dir/subdir/object/X/2', '2014-05-07T19:47:52.592270',
+ 'HASH', 32),
+ ('subdir/object/X', '2014-05-07T19:47:50.592270', 'HASH', 4),
+ ('subdir/object/X/1', '2014-05-07T19:47:51.592270', 'HASH', 41),
+ ('subdir/object/X/2', '2014-05-07T19:47:52.592270', 'HASH', 42),
+ ('subdir/object/Y', '2014-05-07T19:47:50.592270', 'HASH', 5),
+ ('subdir/object/Y/1', '2014-05-07T19:47:51.592270', 'HASH', 51),
+ ('subdir/object/Y/2', '2014-05-07T19:47:52.592270', 'HASH', 52),
+ ('subdir2/object/Z', '2014-05-07T19:47:50.592270', 'HASH', 6),
+ ('subdir2/object/Z/1', '2014-05-07T19:47:51.592270', 'HASH', 61),
+ ('subdir2/object/Z/2', '2014-05-07T19:47:52.592270', 'HASH', 62))
+
+ status, headers, body = \
+ self._test_bucket_multipart_uploads_GET(query, multiparts)
+ elem = fromstring(body, 'ListMultipartUploadsResult')
+ self.assertEqual(len(elem.findall('Upload')), 1)
+ self.assertEqual(len(elem.findall('CommonPrefixes')), 2)
+ objects = [(o[0], o[1][:-3] + 'Z') for o in multiparts
+ if o[0].startswith('object')]
+ prefixes = ('dir/subdir', 'subdir')
+ for u in elem.findall('Upload'):
+ name = u.find('Key').text + '/' + u.find('UploadId').text
+ initiated = u.find('Initiated').text
+ self.assertTrue((name, initiated) in objects)
+ for p in elem.findall('CommonPrefixes'):
+ prefix = p.find('Prefix').text
+ self.assertTrue(prefix in prefixes)
+
+ self.assertEqual(status.split()[0], '200')
+ _, path, _ = self.swift.calls_with_headers[-1]
+ path, query_string = path.split('?', 1)
+ query = {}
+ for q in query_string.split('&'):
+ key, arg = q.split('=')
+ query[key] = arg
+ self.assertEqual(query['format'], 'json')
+ self.assertEqual(query['limit'], '1001')
+ self.assertTrue(query.get('delimiter') is None)
+
+ @s3acl
+ def test_bucket_multipart_uploads_GET_with_prefix_and_delimiter(self):
+ query = 'prefix=dir/&delimiter=/'
+ multiparts = \
+ (('dir/subdir/object/X', '2014-05-07T19:47:50.592270',
+ 'HASH', 4),
+ ('dir/subdir/object/X/1', '2014-05-07T19:47:51.592270',
+ 'HASH', 41),
+ ('dir/subdir/object/X/2', '2014-05-07T19:47:52.592270',
+ 'HASH', 42),
+ ('dir/object/X', '2014-05-07T19:47:50.592270', 'HASH', 5),
+ ('dir/object/X/1', '2014-05-07T19:47:51.592270', 'HASH', 51),
+ ('dir/object/X/2', '2014-05-07T19:47:52.592270', 'HASH', 52))
+
+ status, headers, body = \
+ self._test_bucket_multipart_uploads_GET(query, multiparts)
+ elem = fromstring(body, 'ListMultipartUploadsResult')
+ self.assertEqual(len(elem.findall('Upload')), 1)
+ self.assertEqual(len(elem.findall('CommonPrefixes')), 1)
+ objects = [(o[0], o[1][:-3] + 'Z') for o in multiparts
+ if o[0].startswith('dir/o')]
+ prefixes = ['dir/subdir/']
+ for u in elem.findall('Upload'):
+ name = u.find('Key').text + '/' + u.find('UploadId').text
+ initiated = u.find('Initiated').text
+ self.assertTrue((name, initiated) in objects)
+ for p in elem.findall('CommonPrefixes'):
+ prefix = p.find('Prefix').text
+ self.assertTrue(prefix in prefixes)
+
+ self.assertEqual(status.split()[0], '200')
+ _, path, _ = self.swift.calls_with_headers[-1]
+ path, query_string = path.split('?', 1)
+ query = {}
+ for q in query_string.split('&'):
+ key, arg = q.split('=')
+ query[key] = arg
+ self.assertEqual(query['format'], 'json')
+ self.assertEqual(query['limit'], '1001')
+ self.assertEqual(query['prefix'], 'dir/')
+ self.assertTrue(query.get('delimiter') is None)
+
+ @patch('swift.common.middleware.s3api.controllers.'
+ 'multi_upload.unique_id', lambda: 'X')
+ def _test_object_multipart_upload_initiate(self, headers):
+ headers.update({
+ 'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header(),
+ 'x-amz-meta-foo': 'bar',
+ })
+ req = Request.blank('/bucket/object?uploads',
+ environ={'REQUEST_METHOD': 'POST'},
+ headers=headers)
+ status, headers, body = self.call_s3api(req)
+ fromstring(body, 'InitiateMultipartUploadResult')
+ self.assertEqual(status.split()[0], '200')
+
+ _, _, req_headers = self.swift.calls_with_headers[-1]
+ self.assertEqual(req_headers.get('X-Object-Meta-Foo'), 'bar')
+ self.assertNotIn('Etag', req_headers)
+ self.assertNotIn('Content-MD5', req_headers)
+ method, path, _ = self.swift.calls_with_headers[-2]
+ self.assertEqual(method, 'PUT')
+ self.assertEqual(
+ path,
+ '/v1/AUTH_test/bucket%s' % MULTIUPLOAD_SUFFIX)
+
+ def test_object_multipart_upload_initiate(self):
+ self._test_object_multipart_upload_initiate({})
+ self._test_object_multipart_upload_initiate({'Etag': 'blahblahblah'})
+ self._test_object_multipart_upload_initiate({
+ 'Content-MD5': base64.b64encode('blahblahblahblah').strip()})
+
+ @s3acl(s3acl_only=True)
+ @patch('swift.common.middleware.s3api.controllers.multi_upload.'
+ 'unique_id', lambda: 'X')
+ def test_object_multipart_upload_initiate_s3acl(self):
+ req = Request.blank('/bucket/object?uploads',
+ environ={'REQUEST_METHOD': 'POST'},
+ headers={'Authorization':
+ 'AWS test:tester:hmac',
+ 'Date': self.get_date_header(),
+ 'x-amz-acl': 'public-read',
+ 'x-amz-meta-foo': 'bar',
+ 'Content-Type': 'cat/picture'})
+ status, headers, body = self.call_s3api(req)
+ fromstring(body, 'InitiateMultipartUploadResult')
+ self.assertEqual(status.split()[0], '200')
+
+ _, _, req_headers = self.swift.calls_with_headers[-1]
+ self.assertEqual(req_headers.get('X-Object-Meta-Foo'), 'bar')
+ self.assertEqual(req_headers.get(
+ 'X-Object-Sysmeta-S3api-Has-Content-Type'), 'yes')
+ self.assertEqual(req_headers.get(
+ 'X-Object-Sysmeta-S3api-Content-Type'), 'cat/picture')
+ tmpacl_header = req_headers.get(sysmeta_header('object', 'tmpacl'))
+ self.assertTrue(tmpacl_header)
+ acl_header = encode_acl('object',
+ ACLPublicRead(Owner('test:tester',
+ 'test:tester')))
+ self.assertEqual(acl_header.get(sysmeta_header('object', 'acl')),
+ tmpacl_header)
+
+ @s3acl(s3acl_only=True)
+ @patch('swift.common.middleware.s3api.controllers.'
+ 'multi_upload.unique_id', lambda: 'X')
+ def test_object_multipart_upload_initiate_no_content_type(self):
+ req = Request.blank('/bucket/object?uploads',
+ environ={'REQUEST_METHOD': 'POST'},
+ headers={'Authorization':
+ 'AWS test:tester:hmac',
+ 'Date': self.get_date_header(),
+ 'x-amz-acl': 'public-read',
+ 'x-amz-meta-foo': 'bar'})
+ status, headers, body = self.call_s3api(req)
+ fromstring(body, 'InitiateMultipartUploadResult')
+ self.assertEqual(status.split()[0], '200')
+
+ _, _, req_headers = self.swift.calls_with_headers[-1]
+ self.assertEqual(req_headers.get('X-Object-Meta-Foo'), 'bar')
+ self.assertEqual(req_headers.get(
+ 'X-Object-Sysmeta-S3api-Has-Content-Type'), 'no')
+ tmpacl_header = req_headers.get(sysmeta_header('object', 'tmpacl'))
+ self.assertTrue(tmpacl_header)
+ acl_header = encode_acl('object',
+ ACLPublicRead(Owner('test:tester',
+ 'test:tester')))
+ self.assertEqual(acl_header.get(sysmeta_header('object', 'acl')),
+ tmpacl_header)
+
+ @patch('swift.common.middleware.s3api.controllers.'
+ 'multi_upload.unique_id', lambda: 'X')
+ def test_object_multipart_upload_initiate_without_bucket(self):
+ self.swift.register('HEAD', '/v1/AUTH_test/bucket',
+ swob.HTTPNotFound, {}, None)
+ req = Request.blank('/bucket/object?uploads',
+ environ={'REQUEST_METHOD': 'POST'},
+ headers={'Authorization':
+ 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '404')
+ self.assertEqual(self._get_error_code(body), 'NoSuchBucket')
+
+ @s3acl
+ def test_object_multipart_upload_complete_error(self):
+ malformed_xml = 'malformed_XML'
+ req = Request.blank('/bucket/object?uploadId=X',
+ environ={'REQUEST_METHOD': 'POST'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()},
+ body=malformed_xml)
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'MalformedXML')
+
+ # without target bucket
+ req = Request.blank('/nobucket/object?uploadId=X',
+ environ={'REQUEST_METHOD': 'POST'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header(), },
+ body=xml)
+ with patch(
+ 'swift.common.middleware.s3api.s3request.get_container_info',
+ lambda x, y: {'status': 404}):
+ self.swift.register('HEAD', '/v1/AUTH_test/nobucket',
+ swob.HTTPNotFound, {}, None)
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'NoSuchBucket')
+
+ def test_object_multipart_upload_complete(self):
+ req = Request.blank('/bucket/object?uploadId=X',
+ environ={'REQUEST_METHOD': 'POST'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header(), },
+ body=xml)
+ status, headers, body = self.call_s3api(req)
+ fromstring(body, 'CompleteMultipartUploadResult')
+ self.assertEqual(status.split()[0], '200')
+
+ _, _, headers = self.swift.calls_with_headers[-2]
+ self.assertEqual(headers.get('X-Object-Meta-Foo'), 'bar')
+ self.assertEqual(headers.get('Content-Type'), 'baz/quux')
+
+ def test_object_multipart_upload_complete_404_on_marker_delete(self):
+ segment_bucket = '/v1/AUTH_test/bucket+segments'
+ self.swift.register('DELETE', segment_bucket + '/object/X',
+ swob.HTTPNotFound, {}, None)
+ req = Request.blank('/bucket/object?uploadId=X',
+ environ={'REQUEST_METHOD': 'POST'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header(), },
+ body=xml)
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '200')
+ fromstring(body, 'CompleteMultipartUploadResult')
+
+ _, _, headers = self.swift.calls_with_headers[-2]
+ self.assertEqual(headers.get('X-Object-Meta-Foo'), 'bar')
+ self.assertEqual(headers.get('Content-Type'), 'baz/quux')
+
+ def test_object_multipart_upload_complete_old_content_type(self):
+ self.swift.register_unconditionally(
+ 'HEAD', '/v1/AUTH_test/bucket+segments/object/X',
+ swob.HTTPOk, {"Content-Type": "thingy/dingy"}, None)
+
+ req = Request.blank('/bucket/object?uploadId=X',
+ environ={'REQUEST_METHOD': 'POST'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header(), },
+ body=xml)
+ status, headers, body = self.call_s3api(req)
+ fromstring(body, 'CompleteMultipartUploadResult')
+ self.assertEqual(status.split()[0], '200')
+
+ _, _, headers = self.swift.calls_with_headers[-2]
+ self.assertEqual(headers.get('Content-Type'), 'thingy/dingy')
+
+ def test_object_multipart_upload_complete_no_content_type(self):
+ self.swift.register_unconditionally(
+ 'HEAD', '/v1/AUTH_test/bucket+segments/object/X',
+ swob.HTTPOk, {"X-Object-Sysmeta-S3api-Has-Content-Type": "no"},
+ None)
+
+ req = Request.blank('/bucket/object?uploadId=X',
+ environ={'REQUEST_METHOD': 'POST'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header(), },
+ body=xml)
+ status, headers, body = self.call_s3api(req)
+ fromstring(body, 'CompleteMultipartUploadResult')
+ self.assertEqual(status.split()[0], '200')
+
+ _, _, headers = self.swift.calls_with_headers[-2]
+ self.assertNotIn('Content-Type', headers)
+
+ def test_object_multipart_upload_complete_weird_host_name(self):
+ # This happens via boto signature v4
+ req = Request.blank('/bucket/object?uploadId=X',
+ environ={'REQUEST_METHOD': 'POST',
+ 'HTTP_HOST': 'localhost:8080:8080'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header(), },
+ body=xml)
+ status, headers, body = self.call_s3api(req)
+ fromstring(body, 'CompleteMultipartUploadResult')
+ self.assertEqual(status.split()[0], '200')
+
+ _, _, headers = self.swift.calls_with_headers[-2]
+ self.assertEqual(headers.get('X-Object-Meta-Foo'), 'bar')
+
+ def test_object_multipart_upload_complete_segment_too_small(self):
+ msg = 'Index 0: too small; each segment must be at least 1 byte.'
+
+ req = Request.blank(
+ '/bucket/object?uploadId=X',
+ environ={'REQUEST_METHOD': 'POST'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header(), },
+ body=xml)
+
+ self.swift.register('PUT', '/v1/AUTH_test/bucket/object',
+ swob.HTTPBadRequest, {}, msg)
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '400')
+ self.assertEqual(self._get_error_code(body), 'EntityTooSmall')
+ self.assertEqual(self._get_error_message(body), msg)
+
+ self.swift.clear_calls()
+ self.s3api.conf.min_segment_size = 5242880
+ req = Request.blank(
+ '/bucket/object?uploadId=X',
+ environ={'REQUEST_METHOD': 'POST'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header(), },
+ body=xml)
+
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '400')
+ self.assertEqual(self._get_error_code(body), 'EntityTooSmall')
+ self.assertEqual(self._get_error_message(body),
+ 'Your proposed upload is smaller than the minimum '
+ 'allowed object size.')
+ self.assertNotIn('PUT', [method for method, _ in self.swift.calls])
+
+ def test_object_multipart_upload_complete_single_zero_length_segment(self):
+ segment_bucket = '/v1/AUTH_test/empty-bucket+segments'
+ put_headers = {'etag': self.etag, 'last-modified': self.last_modified}
+
+ object_list = [{
+ 'name': 'object/X/1',
+ 'last_modified': self.last_modified,
+ 'hash': 'd41d8cd98f00b204e9800998ecf8427e',
+ 'bytes': '0',
+ }]
+
+ self.swift.register('GET', segment_bucket, swob.HTTPOk, {},
+ json.dumps(object_list))
+ self.swift.register('HEAD', '/v1/AUTH_test/empty-bucket',
+ swob.HTTPNoContent, {}, None)
+ self.swift.register('HEAD', segment_bucket + '/object/X',
+ swob.HTTPOk, {'x-object-meta-foo': 'bar',
+ 'content-type': 'baz/quux'}, None)
+ self.swift.register('PUT', '/v1/AUTH_test/empty-bucket/object',
+ swob.HTTPCreated, {}, None)
+ self.swift.register('DELETE', segment_bucket + '/object/X/1',
+ swob.HTTPOk, {}, None)
+ self.swift.register('DELETE', segment_bucket + '/object/X',
+ swob.HTTPOk, {}, None)
+
+ xml = '<CompleteMultipartUpload>' \
+ '<Part>' \
+ '<PartNumber>1</PartNumber>' \
+ '<ETag>d41d8cd98f00b204e9800998ecf8427e</ETag>' \
+ '</Part>' \
+ '</CompleteMultipartUpload>'
+
+ req = Request.blank('/empty-bucket/object?uploadId=X',
+ environ={'REQUEST_METHOD': 'POST'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header(), },
+ body=xml)
+ status, headers, body = self.call_s3api(req)
+ fromstring(body, 'CompleteMultipartUploadResult')
+ self.assertEqual(status.split()[0], '200')
+
+ self.assertEqual(self.swift.calls, [
+ ('HEAD', '/v1/AUTH_test/empty-bucket'),
+ ('HEAD', '/v1/AUTH_test/empty-bucket+segments/object/X'),
+ ('GET', '/v1/AUTH_test/empty-bucket+segments?delimiter=/&'
+ 'format=json&prefix=object/X/'),
+ ('PUT',
+ '/v1/AUTH_test/empty-bucket/object?multipart-manifest=put'),
+ ('DELETE', '/v1/AUTH_test/empty-bucket+segments/object/X'),
+ ])
+ _, _, put_headers = self.swift.calls_with_headers[-2]
+ self.assertEqual(put_headers.get('X-Object-Meta-Foo'), 'bar')
+ self.assertEqual(put_headers.get('Content-Type'), 'baz/quux')
+
+ def test_object_multipart_upload_complete_double_zero_length_segment(self):
+ segment_bucket = '/v1/AUTH_test/empty-bucket+segments'
+
+ object_list = [{
+ 'name': 'object/X/1',
+ 'last_modified': self.last_modified,
+ 'hash': 'd41d8cd98f00b204e9800998ecf8427e',
+ 'bytes': '0',
+ }, {
+ 'name': 'object/X/2',
+ 'last_modified': self.last_modified,
+ 'hash': 'd41d8cd98f00b204e9800998ecf8427e',
+ 'bytes': '0',
+ }]
+
+ self.swift.register('GET', segment_bucket, swob.HTTPOk, {},
+ json.dumps(object_list))
+ self.swift.register('HEAD', '/v1/AUTH_test/empty-bucket',
+ swob.HTTPNoContent, {}, None)
+ self.swift.register('HEAD', segment_bucket + '/object/X',
+ swob.HTTPOk, {'x-object-meta-foo': 'bar',
+ 'content-type': 'baz/quux'}, None)
+
+ xml = '<CompleteMultipartUpload>' \
+ '<Part>' \
+ '<PartNumber>1</PartNumber>' \
+ '<ETag>d41d8cd98f00b204e9800998ecf8427e</ETag>' \
+ '</Part>' \
+ '<Part>' \
+ '<PartNumber>2</PartNumber>' \
+ '<ETag>d41d8cd98f00b204e9800998ecf8427e</ETag>' \
+ '</Part>' \
+ '</CompleteMultipartUpload>'
+
+ req = Request.blank('/empty-bucket/object?uploadId=X',
+ environ={'REQUEST_METHOD': 'POST'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header(), },
+ body=xml)
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'EntityTooSmall')
+ self.assertEqual(status.split()[0], '400')
+
+ self.assertEqual(self.swift.calls, [
+ ('HEAD', '/v1/AUTH_test/empty-bucket'),
+ ('HEAD', '/v1/AUTH_test/empty-bucket+segments/object/X'),
+ ('GET', '/v1/AUTH_test/empty-bucket+segments?delimiter=/&'
+ 'format=json&prefix=object/X/'),
+ ])
+
+ def test_object_multipart_upload_complete_zero_length_final_segment(self):
+ segment_bucket = '/v1/AUTH_test/bucket+segments'
+
+ object_list = [{
+ 'name': 'object/X/1',
+ 'last_modified': self.last_modified,
+ 'hash': 'some hash',
+ 'bytes': '100',
+ }, {
+ 'name': 'object/X/2',
+ 'last_modified': self.last_modified,
+ 'hash': 'some other hash',
+ 'bytes': '1',
+ }, {
+ 'name': 'object/X/3',
+ 'last_modified': self.last_modified,
+ 'hash': 'd41d8cd98f00b204e9800998ecf8427e',
+ 'bytes': '0',
+ }]
+
+ self.swift.register('GET', segment_bucket, swob.HTTPOk, {},
+ json.dumps(object_list))
+ self.swift.register('HEAD', '/v1/AUTH_test/bucket',
+ swob.HTTPNoContent, {}, None)
+ self.swift.register('HEAD', segment_bucket + '/object/X',
+ swob.HTTPOk, {'x-object-meta-foo': 'bar',
+ 'content-type': 'baz/quux'}, None)
+ self.swift.register('DELETE', segment_bucket + '/object/X/3',
+ swob.HTTPNoContent, {}, None)
+
+ xml = '<CompleteMultipartUpload>' \
+ '<Part>' \
+ '<PartNumber>1</PartNumber>' \
+ '<ETag>some hash</ETag>' \
+ '</Part>' \
+ '<Part>' \
+ '<PartNumber>2</PartNumber>' \
+ '<ETag>some other hash</ETag>' \
+ '</Part>' \
+ '<Part>' \
+ '<PartNumber>3</PartNumber>' \
+ '<ETag>d41d8cd98f00b204e9800998ecf8427e</ETag>' \
+ '</Part>' \
+ '</CompleteMultipartUpload>'
+
+ req = Request.blank('/bucket/object?uploadId=X',
+ environ={'REQUEST_METHOD': 'POST'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header(), },
+ body=xml)
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '200')
+
+ self.assertEqual(self.swift.calls, [
+ ('HEAD', '/v1/AUTH_test/bucket'),
+ ('HEAD', '/v1/AUTH_test/bucket+segments/object/X'),
+ ('GET', '/v1/AUTH_test/bucket+segments?delimiter=/&'
+ 'format=json&prefix=object/X/'),
+ ('PUT', '/v1/AUTH_test/bucket/object?multipart-manifest=put'),
+ ('DELETE', '/v1/AUTH_test/bucket+segments/object/X'),
+ ])
+
+ @s3acl(s3acl_only=True)
+ def test_object_multipart_upload_complete_s3acl(self):
+ acl_headers = encode_acl('object', ACLPublicRead(Owner('test:tester',
+ 'test:tester')))
+ headers = {}
+ headers[sysmeta_header('object', 'tmpacl')] = \
+ acl_headers.get(sysmeta_header('object', 'acl'))
+ headers['X-Object-Meta-Foo'] = 'bar'
+ headers['Content-Type'] = 'baz/quux'
+ self.swift.register('HEAD', '/v1/AUTH_test/bucket+segments/object/X',
+ swob.HTTPOk, headers, None)
+ req = Request.blank('/bucket/object?uploadId=X',
+ environ={'REQUEST_METHOD': 'POST'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()},
+ body=xml)
+ status, headers, body = self.call_s3api(req)
+ fromstring(body, 'CompleteMultipartUploadResult')
+ self.assertEqual(status.split()[0], '200')
+
+ _, _, headers = self.swift.calls_with_headers[-2]
+ self.assertEqual(headers.get('X-Object-Meta-Foo'), 'bar')
+ self.assertEqual(headers.get('Content-Type'), 'baz/quux')
+ self.assertEqual(
+ tostring(ACLPublicRead(Owner('test:tester',
+ 'test:tester')).elem()),
+ tostring(decode_acl('object', headers, False).elem()))
+
+ @s3acl
+ def test_object_multipart_upload_abort_error(self):
+ req = Request.blank('/bucket/object?uploadId=invalid',
+ environ={'REQUEST_METHOD': 'DELETE'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'NoSuchUpload')
+
+ # without target bucket
+ req = Request.blank('/nobucket/object?uploadId=X',
+ environ={'REQUEST_METHOD': 'DELETE'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ with patch(
+ 'swift.common.middleware.s3api.s3request.get_container_info',
+ lambda x, y: {'status': 404}):
+ self.swift.register('HEAD', '/v1/AUTH_test/nobucket',
+ swob.HTTPNotFound, {}, None)
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'NoSuchBucket')
+
+ @s3acl
+ def test_object_multipart_upload_abort(self):
+ req = Request.blank('/bucket/object?uploadId=X',
+ environ={'REQUEST_METHOD': 'DELETE'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '204')
+
+ @s3acl
+ @patch('swift.common.middleware.s3api.s3request.'
+ 'get_container_info', lambda x, y: {'status': 204})
+ def test_object_upload_part_error(self):
+ # without upload id
+ req = Request.blank('/bucket/object?partNumber=1',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()},
+ body='part object')
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'InvalidArgument')
+
+ # invalid part number
+ req = Request.blank('/bucket/object?partNumber=invalid&uploadId=X',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()},
+ body='part object')
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'InvalidArgument')
+
+ # part number must be > 0
+ req = Request.blank('/bucket/object?partNumber=0&uploadId=X',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()},
+ body='part object')
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'InvalidArgument')
+
+ # part number must be < 1001
+ req = Request.blank('/bucket/object?partNumber=1001&uploadId=X',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()},
+ body='part object')
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'InvalidArgument')
+
+ # without target bucket
+ req = Request.blank('/nobucket/object?partNumber=1&uploadId=X',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()},
+ body='part object')
+ with patch(
+ 'swift.common.middleware.s3api.s3request.get_container_info',
+ lambda x, y: {'status': 404}):
+ self.swift.register('HEAD', '/v1/AUTH_test/nobucket',
+ swob.HTTPNotFound, {}, None)
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'NoSuchBucket')
+
+ @s3acl
+ def test_object_upload_part(self):
+ req = Request.blank('/bucket/object?partNumber=1&uploadId=X',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()},
+ body='part object')
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '200')
+
+ @s3acl
+ def test_object_list_parts_error(self):
+ req = Request.blank('/bucket/object?uploadId=invalid',
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'NoSuchUpload')
+
+ # without target bucket
+ req = Request.blank('/nobucket/object?uploadId=X',
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ with patch(
+ 'swift.common.middleware.s3api.s3request.get_container_info',
+ lambda x, y: {'status': 404}):
+ self.swift.register('HEAD', '/v1/AUTH_test/nobucket',
+ swob.HTTPNotFound, {}, None)
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'NoSuchBucket')
+
+ @s3acl
+ def test_object_list_parts(self):
+ req = Request.blank('/bucket/object?uploadId=X',
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ elem = fromstring(body, 'ListPartsResult')
+ self.assertEqual(elem.find('Bucket').text, 'bucket')
+ self.assertEqual(elem.find('Key').text, 'object')
+ self.assertEqual(elem.find('UploadId').text, 'X')
+ self.assertEqual(elem.find('Initiator/ID').text, 'test:tester')
+ self.assertEqual(elem.find('Initiator/ID').text, 'test:tester')
+ self.assertEqual(elem.find('Owner/ID').text, 'test:tester')
+ self.assertEqual(elem.find('Owner/ID').text, 'test:tester')
+ self.assertEqual(elem.find('StorageClass').text, 'STANDARD')
+ self.assertEqual(elem.find('PartNumberMarker').text, '0')
+ self.assertEqual(elem.find('NextPartNumberMarker').text, '2')
+ self.assertEqual(elem.find('MaxParts').text, '1000')
+ self.assertEqual(elem.find('IsTruncated').text, 'false')
+ self.assertEqual(len(elem.findall('Part')), 2)
+ for p in elem.findall('Part'):
+ partnum = int(p.find('PartNumber').text)
+ self.assertEqual(p.find('LastModified').text,
+ objects_template[partnum - 1][1][:-3]
+ + 'Z')
+ self.assertEqual(p.find('ETag').text.strip(),
+ '"%s"' % objects_template[partnum - 1][2])
+ self.assertEqual(p.find('Size').text,
+ str(objects_template[partnum - 1][3]))
+ self.assertEqual(status.split()[0], '200')
+
+ def test_object_list_parts_encoding_type(self):
+ self.swift.register('HEAD', '/v1/AUTH_test/bucket+segments/object@@/X',
+ swob.HTTPOk, {}, None)
+ req = Request.blank('/bucket/object@@?uploadId=X&encoding-type=url',
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ elem = fromstring(body, 'ListPartsResult')
+ self.assertEqual(elem.find('Key').text, quote('object@@'))
+ self.assertEqual(elem.find('EncodingType').text, 'url')
+ self.assertEqual(status.split()[0], '200')
+
+ def test_object_list_parts_without_encoding_type(self):
+ self.swift.register('HEAD', '/v1/AUTH_test/bucket+segments/object@@/X',
+ swob.HTTPOk, {}, None)
+ req = Request.blank('/bucket/object@@?uploadId=X',
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ elem = fromstring(body, 'ListPartsResult')
+ self.assertEqual(elem.find('Key').text, 'object@@')
+ self.assertEqual(status.split()[0], '200')
+
+ def test_object_list_parts_encoding_type_error(self):
+ req = Request.blank('/bucket/object?uploadId=X&encoding-type=xml',
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'InvalidArgument')
+
+ def test_object_list_parts_max_parts(self):
+ req = Request.blank('/bucket/object?uploadId=X&max-parts=1',
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ elem = fromstring(body, 'ListPartsResult')
+ self.assertEqual(elem.find('IsTruncated').text, 'true')
+ self.assertEqual(len(elem.findall('Part')), 1)
+ self.assertEqual(status.split()[0], '200')
+
+ def test_object_list_parts_str_max_parts(self):
+ req = Request.blank('/bucket/object?uploadId=X&max-parts=invalid',
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'InvalidArgument')
+
+ def test_object_list_parts_negative_max_parts(self):
+ req = Request.blank('/bucket/object?uploadId=X&max-parts=-1',
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'InvalidArgument')
+
+ def test_object_list_parts_over_max_parts(self):
+ req = Request.blank('/bucket/object?uploadId=X&max-parts=%d' %
+ (self.s3api.conf.max_parts_listing + 1),
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ elem = fromstring(body, 'ListPartsResult')
+ self.assertEqual(elem.find('Bucket').text, 'bucket')
+ self.assertEqual(elem.find('Key').text, 'object')
+ self.assertEqual(elem.find('UploadId').text, 'X')
+ self.assertEqual(elem.find('Initiator/ID').text, 'test:tester')
+ self.assertEqual(elem.find('Owner/ID').text, 'test:tester')
+ self.assertEqual(elem.find('StorageClass').text, 'STANDARD')
+ self.assertEqual(elem.find('PartNumberMarker').text, '0')
+ self.assertEqual(elem.find('NextPartNumberMarker').text, '2')
+ self.assertEqual(elem.find('MaxParts').text, '1000')
+ self.assertEqual(elem.find('IsTruncated').text, 'false')
+ self.assertEqual(len(elem.findall('Part')), 2)
+ for p in elem.findall('Part'):
+ partnum = int(p.find('PartNumber').text)
+ self.assertEqual(p.find('LastModified').text,
+ objects_template[partnum - 1][1][:-3]
+ + 'Z')
+ self.assertEqual(p.find('ETag').text,
+ '"%s"' % objects_template[partnum - 1][2])
+ self.assertEqual(p.find('Size').text,
+ str(objects_template[partnum - 1][3]))
+ self.assertEqual(status.split()[0], '200')
+
+ def test_object_list_parts_over_max_32bit_int(self):
+ req = Request.blank('/bucket/object?uploadId=X&max-parts=%d' %
+ (MAX_32BIT_INT + 1),
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'InvalidArgument')
+
+ def test_object_list_parts_with_part_number_marker(self):
+ req = Request.blank('/bucket/object?uploadId=X&'
+ 'part-number-marker=1',
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ elem = fromstring(body, 'ListPartsResult')
+ self.assertEqual(len(elem.findall('Part')), 1)
+ self.assertEqual(elem.find('Part/PartNumber').text, '2')
+ self.assertEqual(elem.find('PartNumberMarker').text, '1')
+ self.assertEqual(status.split()[0], '200')
+
+ def test_object_list_parts_str_part_number_marker(self):
+ req = Request.blank('/bucket/object?uploadId=X&part-number-marker='
+ 'invalid',
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'InvalidArgument')
+
+ def test_object_list_parts_negative_part_number_marker(self):
+ req = Request.blank('/bucket/object?uploadId=X&part-number-marker='
+ '-1',
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'InvalidArgument')
+
+ def test_object_list_parts_over_part_number_marker(self):
+ part_number_marker = str(self.s3api.conf.max_upload_part_num + 1)
+ req = Request.blank('/bucket/object?uploadId=X&'
+ 'part-number-marker=%s' % part_number_marker,
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ elem = fromstring(body, 'ListPartsResult')
+ self.assertEqual(len(elem.findall('Part')), 0)
+ self.assertEqual(elem.find('PartNumberMarker').text,
+ part_number_marker)
+ self.assertEqual(status.split()[0], '200')
+
+ def test_object_list_parts_over_max_32bit_int_part_number_marker(self):
+ req = Request.blank('/bucket/object?uploadId=X&part-number-marker='
+ '%s' % ((MAX_32BIT_INT + 1)),
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'InvalidArgument')
+
+ def test_object_list_parts_same_max_marts_as_objects_num(self):
+ req = Request.blank('/bucket/object?uploadId=X&max-parts=2',
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ elem = fromstring(body, 'ListPartsResult')
+ self.assertEqual(len(elem.findall('Part')), 2)
+ self.assertEqual(status.split()[0], '200')
+
+ def _test_for_s3acl(self, method, query, account, hasObj=True, body=None):
+ path = '/bucket%s' % ('/object' + query if hasObj else query)
+ req = Request.blank(path,
+ environ={'REQUEST_METHOD': method},
+ headers={'Authorization': 'AWS %s:hmac' % account,
+ 'Date': self.get_date_header()},
+ body=body)
+ return self.call_s3api(req)
+
+ @s3acl(s3acl_only=True)
+ def test_upload_part_acl_without_permission(self):
+ status, headers, body = \
+ self._test_for_s3acl('PUT', '?partNumber=1&uploadId=X',
+ 'test:other')
+ self.assertEqual(status.split()[0], '403')
+
+ @s3acl(s3acl_only=True)
+ def test_upload_part_acl_with_write_permission(self):
+ status, headers, body = \
+ self._test_for_s3acl('PUT', '?partNumber=1&uploadId=X',
+ 'test:write')
+ self.assertEqual(status.split()[0], '200')
+
+ @s3acl(s3acl_only=True)
+ def test_upload_part_acl_with_fullcontrol_permission(self):
+ status, headers, body = \
+ self._test_for_s3acl('PUT', '?partNumber=1&uploadId=X',
+ 'test:full_control')
+ self.assertEqual(status.split()[0], '200')
+
+ @s3acl(s3acl_only=True)
+ def test_list_multipart_uploads_acl_without_permission(self):
+ status, headers, body = \
+ self._test_for_s3acl('GET', '?uploads', 'test:other',
+ hasObj=False)
+ self.assertEqual(status.split()[0], '403')
+
+ @s3acl(s3acl_only=True)
+ def test_list_multipart_uploads_acl_with_read_permission(self):
+ status, headers, body = \
+ self._test_for_s3acl('GET', '?uploads', 'test:read',
+ hasObj=False)
+ self.assertEqual(status.split()[0], '200')
+
+ @s3acl(s3acl_only=True)
+ def test_list_multipart_uploads_acl_with_fullcontrol_permission(self):
+ status, headers, body = \
+ self._test_for_s3acl('GET', '?uploads', 'test:full_control',
+ hasObj=False)
+ self.assertEqual(status.split()[0], '200')
+
+ @s3acl(s3acl_only=True)
+ @patch('swift.common.middleware.s3api.controllers.'
+ 'multi_upload.unique_id', lambda: 'X')
+ def test_initiate_multipart_upload_acl_without_permission(self):
+ status, headers, body = \
+ self._test_for_s3acl('POST', '?uploads', 'test:other')
+ self.assertEqual(status.split()[0], '403')
+
+ @s3acl(s3acl_only=True)
+ @patch('swift.common.middleware.s3api.controllers.'
+ 'multi_upload.unique_id', lambda: 'X')
+ def test_initiate_multipart_upload_acl_with_write_permission(self):
+ status, headers, body = \
+ self._test_for_s3acl('POST', '?uploads', 'test:write')
+ self.assertEqual(status.split()[0], '200')
+
+ @s3acl(s3acl_only=True)
+ @patch('swift.common.middleware.s3api.controllers.'
+ 'multi_upload.unique_id', lambda: 'X')
+ def test_initiate_multipart_upload_acl_with_fullcontrol_permission(self):
+ status, headers, body = \
+ self._test_for_s3acl('POST', '?uploads', 'test:full_control')
+ self.assertEqual(status.split()[0], '200')
+
+ @s3acl(s3acl_only=True)
+ def test_list_parts_acl_without_permission(self):
+ status, headers, body = \
+ self._test_for_s3acl('GET', '?uploadId=X', 'test:other')
+ self.assertEqual(status.split()[0], '403')
+
+ @s3acl(s3acl_only=True)
+ def test_list_parts_acl_with_read_permission(self):
+ status, headers, body = \
+ self._test_for_s3acl('GET', '?uploadId=X', 'test:read')
+ self.assertEqual(status.split()[0], '200')
+
+ @s3acl(s3acl_only=True)
+ def test_list_parts_acl_with_fullcontrol_permission(self):
+ status, headers, body = \
+ self._test_for_s3acl('GET', '?uploadId=X', 'test:full_control')
+ self.assertEqual(status.split()[0], '200')
+
+ @s3acl(s3acl_only=True)
+ def test_abort_multipart_upload_acl_without_permission(self):
+ status, headers, body = \
+ self._test_for_s3acl('DELETE', '?uploadId=X', 'test:other')
+ self.assertEqual(status.split()[0], '403')
+
+ @s3acl(s3acl_only=True)
+ def test_abort_multipart_upload_acl_with_write_permission(self):
+ status, headers, body = \
+ self._test_for_s3acl('DELETE', '?uploadId=X', 'test:write')
+ self.assertEqual(status.split()[0], '204')
+
+ @s3acl(s3acl_only=True)
+ def test_abort_multipart_upload_acl_with_fullcontrol_permission(self):
+ status, headers, body = \
+ self._test_for_s3acl('DELETE', '?uploadId=X', 'test:full_control')
+ self.assertEqual(status.split()[0], '204')
+
+ @s3acl(s3acl_only=True)
+ def test_complete_multipart_upload_acl_without_permission(self):
+ status, headers, body = \
+ self._test_for_s3acl('POST', '?uploadId=X', 'test:other',
+ body=xml)
+ self.assertEqual(status.split()[0], '403')
+
+ @s3acl(s3acl_only=True)
+ def test_complete_multipart_upload_acl_with_write_permission(self):
+ status, headers, body = \
+ self._test_for_s3acl('POST', '?uploadId=X', 'test:write',
+ body=xml)
+ self.assertEqual(status.split()[0], '200')
+
+ @s3acl(s3acl_only=True)
+ def test_complete_multipart_upload_acl_with_fullcontrol_permission(self):
+ status, headers, body = \
+ self._test_for_s3acl('POST', '?uploadId=X', 'test:full_control',
+ body=xml)
+ self.assertEqual(status.split()[0], '200')
+
+ def _test_copy_for_s3acl(self, account, src_permission=None,
+ src_path='/src_bucket/src_obj', src_headers=None,
+ head_resp=swob.HTTPOk, put_header=None,
+ timestamp=None):
+ owner = 'test:tester'
+ grants = [Grant(User(account), src_permission)] \
+ if src_permission else [Grant(User(owner), 'FULL_CONTROL')]
+ src_o_headers = encode_acl('object', ACL(Owner(owner, owner), grants))
+ src_o_headers.update({'last-modified': self.last_modified})
+ src_o_headers.update(src_headers or {})
+ self.swift.register('HEAD', '/v1/AUTH_test/%s' % src_path.lstrip('/'),
+ head_resp, src_o_headers, None)
+ put_header = put_header or {}
+ put_headers = {'Authorization': 'AWS %s:hmac' % account,
+ 'Date': self.get_date_header(),
+ 'X-Amz-Copy-Source': src_path}
+ put_headers.update(put_header)
+ req = Request.blank(
+ '/bucket/object?partNumber=1&uploadId=X',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers=put_headers)
+ timestamp = timestamp or time.time()
+ with patch('swift.common.middleware.s3api.utils.time.time',
+ return_value=timestamp):
+ return self.call_s3api(req)
+
+ @s3acl
+ def test_upload_part_copy(self):
+ date_header = self.get_date_header()
+ timestamp = mktime(date_header)
+ last_modified = S3Timestamp(timestamp).s3xmlformat
+ status, headers, body = self._test_copy_for_s3acl(
+ 'test:tester', put_header={'Date': date_header},
+ timestamp=timestamp)
+ self.assertEqual(status.split()[0], '200')
+ self.assertEqual(headers['Content-Type'], 'application/xml')
+ self.assertTrue(headers.get('etag') is None)
+ elem = fromstring(body, 'CopyPartResult')
+ self.assertEqual(elem.find('LastModified').text, last_modified)
+ self.assertEqual(elem.find('ETag').text, '"%s"' % self.etag)
+
+ _, _, headers = self.swift.calls_with_headers[-1]
+ self.assertEqual(headers['X-Copy-From'], '/src_bucket/src_obj')
+ self.assertEqual(headers['Content-Length'], '0')
+
+ @s3acl(s3acl_only=True)
+ def test_upload_part_copy_acl_with_owner_permission(self):
+ status, headers, body = \
+ self._test_copy_for_s3acl('test:tester')
+ self.assertEqual(status.split()[0], '200')
+
+ @s3acl(s3acl_only=True)
+ def test_upload_part_copy_acl_without_permission(self):
+ status, headers, body = \
+ self._test_copy_for_s3acl('test:other', 'READ')
+ self.assertEqual(status.split()[0], '403')
+
+ @s3acl(s3acl_only=True)
+ def test_upload_part_copy_acl_with_write_permission(self):
+ status, headers, body = \
+ self._test_copy_for_s3acl('test:write', 'READ')
+ self.assertEqual(status.split()[0], '200')
+
+ @s3acl(s3acl_only=True)
+ def test_upload_part_copy_acl_with_fullcontrol_permission(self):
+ status, headers, body = \
+ self._test_copy_for_s3acl('test:full_control', 'READ')
+ self.assertEqual(status.split()[0], '200')
+
+ @s3acl(s3acl_only=True)
+ def test_upload_part_copy_acl_without_src_permission(self):
+ status, headers, body = \
+ self._test_copy_for_s3acl('test:write', 'WRITE')
+ self.assertEqual(status.split()[0], '403')
+
+ @s3acl(s3acl_only=True)
+ def test_upload_part_copy_acl_invalid_source(self):
+ status, headers, body = \
+ self._test_copy_for_s3acl('test:write', 'WRITE', '')
+ self.assertEqual(status.split()[0], '400')
+
+ status, headers, body = \
+ self._test_copy_for_s3acl('test:write', 'WRITE', '/')
+ self.assertEqual(status.split()[0], '400')
+
+ status, headers, body = \
+ self._test_copy_for_s3acl('test:write', 'WRITE', '/bucket')
+ self.assertEqual(status.split()[0], '400')
+
+ status, headers, body = \
+ self._test_copy_for_s3acl('test:write', 'WRITE', '/bucket/')
+ self.assertEqual(status.split()[0], '400')
+
+ @s3acl
+ def test_upload_part_copy_headers_error(self):
+ account = 'test:tester'
+ etag = '7dfa07a8e59ddbcd1dc84d4c4f82aea1'
+ last_modified_since = 'Fri, 01 Apr 2014 12:00:00 GMT'
+
+ header = {'X-Amz-Copy-Source-If-Match': etag}
+ status, header, body = \
+ self._test_copy_for_s3acl(account,
+ head_resp=swob.HTTPPreconditionFailed,
+ put_header=header)
+ self.assertEqual(self._get_error_code(body), 'PreconditionFailed')
+
+ header = {'X-Amz-Copy-Source-If-None-Match': etag}
+ status, header, body = \
+ self._test_copy_for_s3acl(account,
+ head_resp=swob.HTTPNotModified,
+ put_header=header)
+ self.assertEqual(self._get_error_code(body), 'PreconditionFailed')
+
+ header = {'X-Amz-Copy-Source-If-Modified-Since': last_modified_since}
+ status, header, body = \
+ self._test_copy_for_s3acl(account,
+ head_resp=swob.HTTPNotModified,
+ put_header=header)
+ self.assertEqual(self._get_error_code(body), 'PreconditionFailed')
+
+ header = \
+ {'X-Amz-Copy-Source-If-Unmodified-Since': last_modified_since}
+ status, header, body = \
+ self._test_copy_for_s3acl(account,
+ head_resp=swob.HTTPPreconditionFailed,
+ put_header=header)
+ self.assertEqual(self._get_error_code(body), 'PreconditionFailed')
+
+ def test_upload_part_copy_headers_with_match(self):
+ account = 'test:tester'
+ etag = '7dfa07a8e59ddbcd1dc84d4c4f82aea1'
+ last_modified_since = 'Fri, 01 Apr 2014 11:00:00 GMT'
+
+ header = {'X-Amz-Copy-Source-If-Match': etag,
+ 'X-Amz-Copy-Source-If-Modified-Since': last_modified_since}
+ status, header, body = \
+ self._test_copy_for_s3acl(account, put_header=header)
+
+ self.assertEqual(status.split()[0], '200')
+
+ self.assertEqual(len(self.swift.calls_with_headers), 4)
+ _, _, headers = self.swift.calls_with_headers[-2]
+ self.assertEqual(headers['If-Match'], etag)
+ self.assertEqual(headers['If-Modified-Since'], last_modified_since)
+ _, _, headers = self.swift.calls_with_headers[-1]
+ self.assertTrue(headers.get('If-Match') is None)
+ self.assertTrue(headers.get('If-Modified-Since') is None)
+ _, _, headers = self.swift.calls_with_headers[0]
+ self.assertTrue(headers.get('If-Match') is None)
+ self.assertTrue(headers.get('If-Modified-Since') is None)
+
+ @s3acl(s3acl_only=True)
+ def test_upload_part_copy_headers_with_match_and_s3acl(self):
+ account = 'test:tester'
+ etag = '7dfa07a8e59ddbcd1dc84d4c4f82aea1'
+ last_modified_since = 'Fri, 01 Apr 2014 11:00:00 GMT'
+
+ header = {'X-Amz-Copy-Source-If-Match': etag,
+ 'X-Amz-Copy-Source-If-Modified-Since': last_modified_since}
+ status, header, body = \
+ self._test_copy_for_s3acl(account, put_header=header)
+
+ self.assertEqual(status.split()[0], '200')
+ self.assertEqual(len(self.swift.calls_with_headers), 4)
+ # Before the check of the copy source in the case of s3acl is valid,
+ # s3api check the bucket write permissions and the object existence
+ # of the destination.
+ _, _, headers = self.swift.calls_with_headers[-3]
+ self.assertTrue(headers.get('If-Match') is None)
+ self.assertTrue(headers.get('If-Modified-Since') is None)
+ _, _, headers = self.swift.calls_with_headers[-2]
+ self.assertEqual(headers['If-Match'], etag)
+ self.assertEqual(headers['If-Modified-Since'], last_modified_since)
+ _, _, headers = self.swift.calls_with_headers[-1]
+ self.assertTrue(headers.get('If-Match') is None)
+ self.assertTrue(headers.get('If-Modified-Since') is None)
+ _, _, headers = self.swift.calls_with_headers[0]
+ self.assertTrue(headers.get('If-Match') is None)
+ self.assertTrue(headers.get('If-Modified-Since') is None)
+
+ def test_upload_part_copy_headers_with_not_match(self):
+ account = 'test:tester'
+ etag = '7dfa07a8e59ddbcd1dc84d4c4f82aea1'
+ last_modified_since = 'Fri, 01 Apr 2014 12:00:00 GMT'
+
+ header = {'X-Amz-Copy-Source-If-None-Match': etag,
+ 'X-Amz-Copy-Source-If-Unmodified-Since': last_modified_since}
+ status, header, body = \
+ self._test_copy_for_s3acl(account, put_header=header)
+
+ self.assertEqual(status.split()[0], '200')
+ self.assertEqual(len(self.swift.calls_with_headers), 4)
+ _, _, headers = self.swift.calls_with_headers[-2]
+ self.assertEqual(headers['If-None-Match'], etag)
+ self.assertEqual(headers['If-Unmodified-Since'], last_modified_since)
+ _, _, headers = self.swift.calls_with_headers[-1]
+ self.assertTrue(headers.get('If-None-Match') is None)
+ self.assertTrue(headers.get('If-Unmodified-Since') is None)
+ _, _, headers = self.swift.calls_with_headers[0]
+ self.assertTrue(headers.get('If-None-Match') is None)
+ self.assertTrue(headers.get('If-Unmodified-Since') is None)
+
+ @s3acl(s3acl_only=True)
+ def test_upload_part_copy_headers_with_not_match_and_s3acl(self):
+ account = 'test:tester'
+ etag = '7dfa07a8e59ddbcd1dc84d4c4f82aea1'
+ last_modified_since = 'Fri, 01 Apr 2014 12:00:00 GMT'
+
+ header = {'X-Amz-Copy-Source-If-None-Match': etag,
+ 'X-Amz-Copy-Source-If-Unmodified-Since': last_modified_since}
+ status, header, body = \
+ self._test_copy_for_s3acl(account, put_header=header)
+
+ self.assertEqual(status.split()[0], '200')
+ self.assertEqual(len(self.swift.calls_with_headers), 4)
+ # Before the check of the copy source in the case of s3acl is valid,
+ # s3api check the bucket write permissions and the object existence
+ # of the destination.
+ _, _, headers = self.swift.calls_with_headers[-3]
+ self.assertTrue(headers.get('If-Match') is None)
+ self.assertTrue(headers.get('If-Modified-Since') is None)
+ _, _, headers = self.swift.calls_with_headers[-2]
+ self.assertEqual(headers['If-None-Match'], etag)
+ self.assertEqual(headers['If-Unmodified-Since'], last_modified_since)
+ self.assertTrue(headers.get('If-Match') is None)
+ self.assertTrue(headers.get('If-Modified-Since') is None)
+ _, _, headers = self.swift.calls_with_headers[-1]
+ self.assertTrue(headers.get('If-None-Match') is None)
+ self.assertTrue(headers.get('If-Unmodified-Since') is None)
+ _, _, headers = self.swift.calls_with_headers[0]
+
+ def test_upload_part_copy_range_unsatisfiable(self):
+ account = 'test:tester'
+
+ header = {'X-Amz-Copy-Source-Range': 'bytes=1000-'}
+ status, header, body = self._test_copy_for_s3acl(
+ account, src_headers={'Content-Length': '10'}, put_header=header)
+
+ self.assertEqual(status.split()[0], '400')
+ self.assertIn('Range specified is not valid for '
+ 'source object of size: 10', body)
+
+ self.assertEqual([
+ ('HEAD', '/v1/AUTH_test/bucket'),
+ ('HEAD', '/v1/AUTH_test/bucket+segments/object/X'),
+ ('HEAD', '/v1/AUTH_test/src_bucket/src_obj'),
+ ], self.swift.calls)
+
+ def test_upload_part_copy_range_invalid(self):
+ account = 'test:tester'
+
+ header = {'X-Amz-Copy-Source-Range': '0-9'}
+ status, header, body = \
+ self._test_copy_for_s3acl(account, put_header=header)
+
+ self.assertEqual(status.split()[0], '400', body)
+
+ header = {'X-Amz-Copy-Source-Range': 'asdf'}
+ status, header, body = \
+ self._test_copy_for_s3acl(account, put_header=header)
+
+ self.assertEqual(status.split()[0], '400', body)
+
+ def test_upload_part_copy_range(self):
+ account = 'test:tester'
+
+ header = {'X-Amz-Copy-Source-Range': 'bytes=0-9'}
+ status, header, body = self._test_copy_for_s3acl(
+ account, src_headers={'Content-Length': '20'}, put_header=header)
+
+ self.assertEqual(status.split()[0], '200', body)
+
+ self.assertEqual([
+ ('HEAD', '/v1/AUTH_test/bucket'),
+ ('HEAD', '/v1/AUTH_test/bucket+segments/object/X'),
+ ('HEAD', '/v1/AUTH_test/src_bucket/src_obj'),
+ ('PUT', '/v1/AUTH_test/bucket+segments/object/X/1'),
+ ], self.swift.calls)
+ put_headers = self.swift.calls_with_headers[-1][2]
+ self.assertEqual('bytes=0-9', put_headers['Range'])
+ self.assertEqual('/src_bucket/src_obj', put_headers['X-Copy-From'])
+
+ def _test_no_body(self, use_content_length=False,
+ use_transfer_encoding=False, string_to_md5=''):
+ content_md5 = md5(string_to_md5).digest().encode('base64').strip()
+ with UnreadableInput(self) as fake_input:
+ req = Request.blank(
+ '/bucket/object?uploadId=X',
+ environ={
+ 'REQUEST_METHOD': 'POST',
+ 'wsgi.input': fake_input},
+ headers={
+ 'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header(),
+ 'Content-MD5': content_md5},
+ body='')
+ if not use_content_length:
+ req.environ.pop('CONTENT_LENGTH')
+ if use_transfer_encoding:
+ req.environ['HTTP_TRANSFER_ENCODING'] = 'chunked'
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status, '400 Bad Request')
+ self.assertEqual(self._get_error_code(body), 'InvalidRequest')
+ self.assertEqual(self._get_error_message(body),
+ 'You must specify at least one part')
+
+ @s3acl
+ def test_object_multi_upload_empty_body(self):
+ self._test_no_body()
+ self._test_no_body(string_to_md5='test')
+ self._test_no_body(use_content_length=True)
+ self._test_no_body(use_content_length=True, string_to_md5='test')
+ self._test_no_body(use_transfer_encoding=True)
+ self._test_no_body(use_transfer_encoding=True, string_to_md5='test')
+
+
+class TestS3ApiMultiUploadNonUTC(TestS3ApiMultiUpload):
+ def setUp(self):
+ self.orig_tz = os.environ.get('TZ', '')
+ os.environ['TZ'] = 'EST+05EDT,M4.1.0,M10.5.0'
+ time.tzset()
+ super(TestS3ApiMultiUploadNonUTC, self).setUp()
+
+ def tearDown(self):
+ super(TestS3ApiMultiUploadNonUTC, self).tearDown()
+ os.environ['TZ'] = self.orig_tz
+ time.tzset()
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/test/unit/common/middleware/s3api/test_obj.py b/test/unit/common/middleware/s3api/test_obj.py
new file mode 100644
index 000000000..601cd7cd0
--- /dev/null
+++ b/test/unit/common/middleware/s3api/test_obj.py
@@ -0,0 +1,1010 @@
+# Copyright (c) 2014 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+from datetime import datetime
+import hashlib
+import os
+from os.path import join
+import time
+from mock import patch
+
+from swift.common import swob
+from swift.common.swob import Request
+
+from test.unit.common.middleware.s3api import S3ApiTestCase
+from test.unit.common.middleware.s3api.test_s3_acl import s3acl
+from swift.common.middleware.s3api.subresource import ACL, User, encode_acl, \
+ Owner, Grant
+from swift.common.middleware.s3api.etree import fromstring
+from swift.common.middleware.s3api.utils import mktime, S3Timestamp
+from test.unit.common.middleware.s3api.helpers import FakeSwift
+
+
+def _wrap_fake_auth_middleware(org_func):
+ def fake_fake_auth_middleware(self, env):
+ org_func(env)
+
+ if 'swift.authorize_override' in env:
+ return
+
+ if 'HTTP_AUTHORIZATION' not in env:
+ return
+
+ _, authorization = env['HTTP_AUTHORIZATION'].split(' ')
+ tenant_user, sign = authorization.rsplit(':', 1)
+ tenant, user = tenant_user.rsplit(':', 1)
+
+ env['HTTP_X_TENANT_NAME'] = tenant
+ env['HTTP_X_USER_NAME'] = user
+
+ return fake_fake_auth_middleware
+
+
+class TestS3ApiObj(S3ApiTestCase):
+
+ def setUp(self):
+ super(TestS3ApiObj, self).setUp()
+
+ self.object_body = 'hello'
+ self.etag = hashlib.md5(self.object_body).hexdigest()
+ self.last_modified = 'Fri, 01 Apr 2014 12:00:00 GMT'
+
+ self.response_headers = {'Content-Type': 'text/html',
+ 'Content-Length': len(self.object_body),
+ 'Content-Disposition': 'inline',
+ 'Content-Language': 'en',
+ 'x-object-meta-test': 'swift',
+ 'etag': self.etag,
+ 'last-modified': self.last_modified,
+ 'expires': 'Mon, 21 Sep 2015 12:00:00 GMT',
+ 'x-robots-tag': 'nofollow',
+ 'cache-control': 'private'}
+
+ self.swift.register('GET', '/v1/AUTH_test/bucket/object',
+ swob.HTTPOk, self.response_headers,
+ self.object_body)
+ self.swift.register('PUT', '/v1/AUTH_test/bucket/object',
+ swob.HTTPCreated,
+ {'etag': self.etag,
+ 'last-modified': self.last_modified,
+ 'x-object-meta-something': 'oh hai'},
+ None)
+
+ def _test_object_GETorHEAD(self, method):
+ req = Request.blank('/bucket/object',
+ environ={'REQUEST_METHOD': method},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '200')
+
+ unexpected_headers = []
+ for key, val in self.response_headers.iteritems():
+ if key in ('Content-Length', 'Content-Type', 'content-encoding',
+ 'last-modified', 'cache-control', 'Content-Disposition',
+ 'Content-Language', 'expires', 'x-robots-tag'):
+ self.assertIn(key, headers)
+ self.assertEqual(headers[key], str(val))
+
+ elif key == 'etag':
+ self.assertEqual(headers[key], '"%s"' % val)
+
+ elif key.startswith('x-object-meta-'):
+ self.assertIn('x-amz-meta-' + key[14:], headers)
+ self.assertEqual(headers['x-amz-meta-' + key[14:]], val)
+
+ else:
+ unexpected_headers.append((key, val))
+
+ if unexpected_headers:
+ self.fail('unexpected headers: %r' % unexpected_headers)
+
+ self.assertEqual(headers['etag'],
+ '"%s"' % self.response_headers['etag'])
+
+ if method == 'GET':
+ self.assertEqual(body, self.object_body)
+
+ @s3acl
+ def test_object_HEAD_error(self):
+ # HEAD does not return the body even an error response in the
+ # specifications of the REST API.
+ # So, check the response code for error test of HEAD.
+ req = Request.blank('/bucket/object',
+ environ={'REQUEST_METHOD': 'HEAD'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
+ swob.HTTPUnauthorized, {}, None)
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '403')
+ self.assertEqual(body, '') # sanity
+
+ req = Request.blank('/bucket/object',
+ environ={'REQUEST_METHOD': 'HEAD'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
+ swob.HTTPForbidden, {}, None)
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '403')
+ self.assertEqual(body, '') # sanity
+
+ req = Request.blank('/bucket/object',
+ environ={'REQUEST_METHOD': 'HEAD'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
+ swob.HTTPNotFound, {}, None)
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '404')
+ self.assertEqual(body, '') # sanity
+
+ req = Request.blank('/bucket/object',
+ environ={'REQUEST_METHOD': 'HEAD'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
+ swob.HTTPPreconditionFailed, {}, None)
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '412')
+ self.assertEqual(body, '') # sanity
+
+ req = Request.blank('/bucket/object',
+ environ={'REQUEST_METHOD': 'HEAD'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
+ swob.HTTPServerError, {}, None)
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '500')
+ self.assertEqual(body, '') # sanity
+
+ req = Request.blank('/bucket/object',
+ environ={'REQUEST_METHOD': 'HEAD'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
+ swob.HTTPServiceUnavailable, {}, None)
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '500')
+ self.assertEqual(body, '') # sanity
+
+ def test_object_HEAD(self):
+ self._test_object_GETorHEAD('HEAD')
+
+ def _test_object_HEAD_Range(self, range_value):
+ req = Request.blank('/bucket/object',
+ environ={'REQUEST_METHOD': 'HEAD'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Range': range_value,
+ 'Date': self.get_date_header()})
+ return self.call_s3api(req)
+
+ @s3acl
+ def test_object_HEAD_Range_with_invalid_value(self):
+ range_value = ''
+ status, headers, body = self._test_object_HEAD_Range(range_value)
+ self.assertEqual(status.split()[0], '200')
+ self.assertTrue('content-length' in headers)
+ self.assertEqual(headers['content-length'], '5')
+ self.assertTrue('content-range' not in headers)
+
+ range_value = 'hoge'
+ status, headers, body = self._test_object_HEAD_Range(range_value)
+ self.assertEqual(status.split()[0], '200')
+ self.assertTrue('content-length' in headers)
+ self.assertEqual(headers['content-length'], '5')
+ self.assertTrue('content-range' not in headers)
+
+ range_value = 'bytes='
+ status, headers, body = self._test_object_HEAD_Range(range_value)
+ self.assertEqual(status.split()[0], '200')
+ self.assertTrue('content-length' in headers)
+ self.assertEqual(headers['content-length'], '5')
+ self.assertTrue('content-range' not in headers)
+
+ range_value = 'bytes=1'
+ status, headers, body = self._test_object_HEAD_Range(range_value)
+ self.assertEqual(status.split()[0], '200')
+ self.assertTrue('content-length' in headers)
+ self.assertEqual(headers['content-length'], '5')
+ self.assertTrue('content-range' not in headers)
+
+ range_value = 'bytes=5-1'
+ status, headers, body = self._test_object_HEAD_Range(range_value)
+ self.assertEqual(status.split()[0], '200')
+ self.assertTrue('content-length' in headers)
+ self.assertEqual(headers['content-length'], '5')
+ self.assertTrue('content-range' not in headers)
+
+ range_value = 'bytes=5-10'
+ status, headers, body = self._test_object_HEAD_Range(range_value)
+ self.assertEqual(status.split()[0], '416')
+
+ @s3acl
+ def test_object_HEAD_Range(self):
+ # update response headers
+ self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
+ swob.HTTPOk, self.response_headers,
+ self.object_body)
+ range_value = 'bytes=0-3'
+ status, headers, body = self._test_object_HEAD_Range(range_value)
+ self.assertEqual(status.split()[0], '206')
+ self.assertTrue('content-length' in headers)
+ self.assertEqual(headers['content-length'], '4')
+ self.assertTrue('content-range' in headers)
+ self.assertTrue(headers['content-range'].startswith('bytes 0-3'))
+ self.assertTrue('x-amz-meta-test' in headers)
+ self.assertEqual('swift', headers['x-amz-meta-test'])
+
+ range_value = 'bytes=3-3'
+ status, headers, body = self._test_object_HEAD_Range(range_value)
+ self.assertEqual(status.split()[0], '206')
+ self.assertTrue('content-length' in headers)
+ self.assertEqual(headers['content-length'], '1')
+ self.assertTrue('content-range' in headers)
+ self.assertTrue(headers['content-range'].startswith('bytes 3-3'))
+ self.assertTrue('x-amz-meta-test' in headers)
+ self.assertEqual('swift', headers['x-amz-meta-test'])
+
+ range_value = 'bytes=1-'
+ status, headers, body = self._test_object_HEAD_Range(range_value)
+ self.assertEqual(status.split()[0], '206')
+ self.assertTrue('content-length' in headers)
+ self.assertEqual(headers['content-length'], '4')
+ self.assertTrue('content-range' in headers)
+ self.assertTrue(headers['content-range'].startswith('bytes 1-4'))
+ self.assertTrue('x-amz-meta-test' in headers)
+ self.assertEqual('swift', headers['x-amz-meta-test'])
+
+ range_value = 'bytes=-3'
+ status, headers, body = self._test_object_HEAD_Range(range_value)
+ self.assertEqual(status.split()[0], '206')
+ self.assertTrue('content-length' in headers)
+ self.assertEqual(headers['content-length'], '3')
+ self.assertTrue('content-range' in headers)
+ self.assertTrue(headers['content-range'].startswith('bytes 2-4'))
+ self.assertTrue('x-amz-meta-test' in headers)
+ self.assertEqual('swift', headers['x-amz-meta-test'])
+
+ @s3acl
+ def test_object_GET_error(self):
+ code = self._test_method_error('GET', '/bucket/object',
+ swob.HTTPUnauthorized)
+ self.assertEqual(code, 'SignatureDoesNotMatch')
+ code = self._test_method_error('GET', '/bucket/object',
+ swob.HTTPForbidden)
+ self.assertEqual(code, 'AccessDenied')
+ code = self._test_method_error('GET', '/bucket/object',
+ swob.HTTPNotFound)
+ self.assertEqual(code, 'NoSuchKey')
+ code = self._test_method_error('GET', '/bucket/object',
+ swob.HTTPServerError)
+ self.assertEqual(code, 'InternalError')
+ code = self._test_method_error('GET', '/bucket/object',
+ swob.HTTPPreconditionFailed)
+ self.assertEqual(code, 'PreconditionFailed')
+ code = self._test_method_error('GET', '/bucket/object',
+ swob.HTTPServiceUnavailable)
+ self.assertEqual(code, 'InternalError')
+
+ @s3acl
+ def test_object_GET(self):
+ self._test_object_GETorHEAD('GET')
+
+ @s3acl(s3acl_only=True)
+ def test_object_GET_with_s3acl_and_keystone(self):
+ # for passing keystone authentication root
+ fake_auth = self.swift._fake_auth_middleware
+ with patch.object(FakeSwift, '_fake_auth_middleware',
+ _wrap_fake_auth_middleware(fake_auth)):
+
+ self._test_object_GETorHEAD('GET')
+ _, _, headers = self.swift.calls_with_headers[-1]
+ self.assertNotIn('Authorization', headers)
+ _, _, headers = self.swift.calls_with_headers[0]
+ self.assertNotIn('Authorization', headers)
+
+ @s3acl
+ def test_object_GET_Range(self):
+ req = Request.blank('/bucket/object',
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Range': 'bytes=0-3',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '206')
+
+ self.assertTrue('content-range' in headers)
+ self.assertTrue(headers['content-range'].startswith('bytes 0-3'))
+
+ @s3acl
+ def test_object_GET_Range_error(self):
+ code = self._test_method_error('GET', '/bucket/object',
+ swob.HTTPRequestedRangeNotSatisfiable)
+ self.assertEqual(code, 'InvalidRange')
+
+ @s3acl
+ def test_object_GET_Response(self):
+ req = Request.blank('/bucket/object',
+ environ={'REQUEST_METHOD': 'GET',
+ 'QUERY_STRING':
+ 'response-content-type=%s&'
+ 'response-content-language=%s&'
+ 'response-expires=%s&'
+ 'response-cache-control=%s&'
+ 'response-content-disposition=%s&'
+ 'response-content-encoding=%s&'
+ % ('text/plain', 'en',
+ 'Fri, 01 Apr 2014 12:00:00 GMT',
+ 'no-cache',
+ 'attachment',
+ 'gzip')},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '200')
+
+ self.assertTrue('content-type' in headers)
+ self.assertEqual(headers['content-type'], 'text/plain')
+ self.assertTrue('content-language' in headers)
+ self.assertEqual(headers['content-language'], 'en')
+ self.assertTrue('expires' in headers)
+ self.assertEqual(headers['expires'], 'Fri, 01 Apr 2014 12:00:00 GMT')
+ self.assertTrue('cache-control' in headers)
+ self.assertEqual(headers['cache-control'], 'no-cache')
+ self.assertTrue('content-disposition' in headers)
+ self.assertEqual(headers['content-disposition'],
+ 'attachment')
+ self.assertTrue('content-encoding' in headers)
+ self.assertEqual(headers['content-encoding'], 'gzip')
+
+ @s3acl
+ def test_object_PUT_error(self):
+ code = self._test_method_error('PUT', '/bucket/object',
+ swob.HTTPUnauthorized)
+ self.assertEqual(code, 'SignatureDoesNotMatch')
+ code = self._test_method_error('PUT', '/bucket/object',
+ swob.HTTPForbidden)
+ self.assertEqual(code, 'AccessDenied')
+ code = self._test_method_error('PUT', '/bucket/object',
+ swob.HTTPNotFound)
+ self.assertEqual(code, 'NoSuchBucket')
+ code = self._test_method_error('PUT', '/bucket/object',
+ swob.HTTPRequestEntityTooLarge)
+ self.assertEqual(code, 'EntityTooLarge')
+ code = self._test_method_error('PUT', '/bucket/object',
+ swob.HTTPServerError)
+ self.assertEqual(code, 'InternalError')
+ code = self._test_method_error('PUT', '/bucket/object',
+ swob.HTTPUnprocessableEntity)
+ self.assertEqual(code, 'BadDigest')
+ code = self._test_method_error('PUT', '/bucket/object',
+ swob.HTTPLengthRequired)
+ self.assertEqual(code, 'MissingContentLength')
+ code = self._test_method_error('PUT', '/bucket/object',
+ swob.HTTPPreconditionFailed)
+ self.assertEqual(code, 'InternalError')
+ code = self._test_method_error('PUT', '/bucket/object',
+ swob.HTTPServiceUnavailable)
+ self.assertEqual(code, 'InternalError')
+ code = self._test_method_error('PUT', '/bucket/object',
+ swob.HTTPCreated,
+ {'X-Amz-Copy-Source': ''})
+ self.assertEqual(code, 'InvalidArgument')
+ code = self._test_method_error('PUT', '/bucket/object',
+ swob.HTTPCreated,
+ {'X-Amz-Copy-Source': '/'})
+ self.assertEqual(code, 'InvalidArgument')
+ code = self._test_method_error('PUT', '/bucket/object',
+ swob.HTTPCreated,
+ {'X-Amz-Copy-Source': '/bucket'})
+ self.assertEqual(code, 'InvalidArgument')
+ code = self._test_method_error('PUT', '/bucket/object',
+ swob.HTTPCreated,
+ {'X-Amz-Copy-Source': '/bucket/'})
+ self.assertEqual(code, 'InvalidArgument')
+ code = self._test_method_error(
+ 'PUT', '/bucket/object',
+ swob.HTTPCreated,
+ {'X-Amz-Copy-Source': '/bucket/src_obj?foo=bar'})
+ self.assertEqual(code, 'InvalidArgument')
+ # adding other query paramerters will cause an error
+ code = self._test_method_error(
+ 'PUT', '/bucket/object',
+ swob.HTTPCreated,
+ {'X-Amz-Copy-Source': '/bucket/src_obj?versionId=foo&bar=baz'})
+ self.assertEqual(code, 'InvalidArgument')
+ # ...even versionId appears in the last
+ code = self._test_method_error(
+ 'PUT', '/bucket/object',
+ swob.HTTPCreated,
+ {'X-Amz-Copy-Source': '/bucket/src_obj?bar=baz&versionId=foo'})
+ self.assertEqual(code, 'InvalidArgument')
+ code = self._test_method_error(
+ 'PUT', '/bucket/object',
+ swob.HTTPCreated,
+ {'X-Amz-Copy-Source': '/bucket/src_obj?versionId=foo'})
+ self.assertEqual(code, 'NotImplemented')
+ code = self._test_method_error(
+ 'PUT', '/bucket/object',
+ swob.HTTPCreated,
+ {'X-Amz-Copy-Source': '/src_bucket/src_object',
+ 'X-Amz-Copy-Source-Range': 'bytes=0-0'})
+ self.assertEqual(code, 'InvalidArgument')
+ code = self._test_method_error('PUT', '/bucket/object',
+ swob.HTTPRequestTimeout)
+ self.assertEqual(code, 'RequestTimeout')
+
+ @s3acl
+ def test_object_PUT(self):
+ etag = self.response_headers['etag']
+ content_md5 = etag.decode('hex').encode('base64').strip()
+
+ req = Request.blank(
+ '/bucket/object',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'x-amz-storage-class': 'STANDARD',
+ 'Content-MD5': content_md5,
+ 'Date': self.get_date_header()},
+ body=self.object_body)
+ req.date = datetime.now()
+ req.content_type = 'text/plain'
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '200')
+ # Check that s3api returns an etag header.
+ self.assertEqual(headers['etag'], '"%s"' % etag)
+
+ _, _, headers = self.swift.calls_with_headers[-1]
+ # Check that s3api converts a Content-MD5 header into an etag.
+ self.assertEqual(headers['etag'], etag)
+
+ def test_object_PUT_headers(self):
+ content_md5 = self.etag.decode('hex').encode('base64').strip()
+
+ self.swift.register('HEAD', '/v1/AUTH_test/some/source',
+ swob.HTTPOk, {'last-modified': self.last_modified},
+ None)
+ req = Request.blank(
+ '/bucket/object',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'X-Amz-Storage-Class': 'STANDARD',
+ 'X-Amz-Meta-Something': 'oh hai',
+ 'X-Amz-Meta-Unreadable-Prefix': '\x04w',
+ 'X-Amz-Meta-Unreadable-Suffix': 'h\x04',
+ 'X-Amz-Meta-Lots-Of-Unprintable': 5 * '\x04',
+ 'X-Amz-Copy-Source': '/some/source',
+ 'Content-MD5': content_md5,
+ 'Date': self.get_date_header()})
+ req.date = datetime.now()
+ req.content_type = 'text/plain'
+ status, headers, body = self.call_s3api(req)
+ # Check that s3api does not return an etag header,
+ # specified copy source.
+ self.assertTrue(headers.get('etag') is None)
+ # Check that s3api does not return custom metadata in response
+ self.assertTrue(headers.get('x-amz-meta-something') is None)
+
+ _, _, headers = self.swift.calls_with_headers[-1]
+ # Check that s3api converts a Content-MD5 header into an etag.
+ self.assertEqual(headers['ETag'], self.etag)
+ self.assertEqual(headers['X-Object-Meta-Something'], 'oh hai')
+ self.assertEqual(headers['X-Object-Meta-Unreadable-Prefix'],
+ '=?UTF-8?Q?=04w?=')
+ self.assertEqual(headers['X-Object-Meta-Unreadable-Suffix'],
+ '=?UTF-8?Q?h=04?=')
+ self.assertEqual(headers['X-Object-Meta-Lots-Of-Unprintable'],
+ '=?UTF-8?B?BAQEBAQ=?=')
+ self.assertEqual(headers['X-Copy-From'], '/some/source')
+ self.assertEqual(headers['Content-Length'], '0')
+
+ def _test_object_PUT_copy(self, head_resp, put_header=None,
+ src_path='/some/source', timestamp=None):
+ account = 'test:tester'
+ grants = [Grant(User(account), 'FULL_CONTROL')]
+ head_headers = \
+ encode_acl('object',
+ ACL(Owner(account, account), grants))
+ head_headers.update({'last-modified': self.last_modified})
+ self.swift.register('HEAD', '/v1/AUTH_test/some/source',
+ head_resp, head_headers, None)
+ put_header = put_header or {}
+ return self._call_object_copy(src_path, put_header, timestamp)
+
+ def _test_object_PUT_copy_self(self, head_resp,
+ put_header=None, timestamp=None):
+ account = 'test:tester'
+ grants = [Grant(User(account), 'FULL_CONTROL')]
+ head_headers = \
+ encode_acl('object',
+ ACL(Owner(account, account), grants))
+ head_headers.update({'last-modified': self.last_modified})
+ self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
+ head_resp, head_headers, None)
+ put_header = put_header or {}
+ return self._call_object_copy('/bucket/object', put_header, timestamp)
+
+ def _call_object_copy(self, src_path, put_header, timestamp=None):
+ put_headers = {'Authorization': 'AWS test:tester:hmac',
+ 'X-Amz-Copy-Source': src_path,
+ 'Date': self.get_date_header()}
+ put_headers.update(put_header)
+
+ req = Request.blank('/bucket/object',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers=put_headers)
+
+ req.date = datetime.now()
+ req.content_type = 'text/plain'
+ timestamp = timestamp or time.time()
+ with patch('swift.common.middleware.s3api.utils.time.time',
+ return_value=timestamp):
+ return self.call_s3api(req)
+
+ @s3acl
+ def test_object_PUT_copy(self):
+ def do_test(src_path=None):
+ date_header = self.get_date_header()
+ timestamp = mktime(date_header)
+ last_modified = S3Timestamp(timestamp).s3xmlformat
+ status, headers, body = self._test_object_PUT_copy(
+ swob.HTTPOk, put_header={'Date': date_header},
+ timestamp=timestamp, src_path=src_path)
+ self.assertEqual(status.split()[0], '200')
+ self.assertEqual(headers['Content-Type'], 'application/xml')
+
+ self.assertTrue(headers.get('etag') is None)
+ self.assertTrue(headers.get('x-amz-meta-something') is None)
+ elem = fromstring(body, 'CopyObjectResult')
+ self.assertEqual(elem.find('LastModified').text, last_modified)
+ self.assertEqual(elem.find('ETag').text, '"%s"' % self.etag)
+
+ _, _, headers = self.swift.calls_with_headers[-1]
+ self.assertEqual(headers['X-Copy-From'], '/some/source')
+ self.assertEqual(headers['Content-Length'], '0')
+
+ do_test('/some/source')
+ do_test('/some/source?')
+ do_test('/some/source?versionId=null')
+ # Some clients (like Boto) don't include the leading slash;
+ # AWS seems to tolerate this so we should, too
+ do_test('some/source')
+
+ @s3acl
+ def test_object_PUT_copy_self(self):
+ status, headers, body = \
+ self._test_object_PUT_copy_self(swob.HTTPOk)
+ self.assertEqual(status.split()[0], '400')
+ elem = fromstring(body, 'Error')
+ err_msg = ("This copy request is illegal because it is trying to copy "
+ "an object to itself without changing the object's "
+ "metadata, storage class, website redirect location or "
+ "encryption attributes.")
+ self.assertEqual(elem.find('Code').text, 'InvalidRequest')
+ self.assertEqual(elem.find('Message').text, err_msg)
+
+ @s3acl
+ def test_object_PUT_copy_self_metadata_copy(self):
+ header = {'x-amz-metadata-directive': 'COPY'}
+ status, headers, body = \
+ self._test_object_PUT_copy_self(swob.HTTPOk, header)
+ self.assertEqual(status.split()[0], '400')
+ elem = fromstring(body, 'Error')
+ err_msg = ("This copy request is illegal because it is trying to copy "
+ "an object to itself without changing the object's "
+ "metadata, storage class, website redirect location or "
+ "encryption attributes.")
+ self.assertEqual(elem.find('Code').text, 'InvalidRequest')
+ self.assertEqual(elem.find('Message').text, err_msg)
+
+ @s3acl
+ def test_object_PUT_copy_self_metadata_replace(self):
+ date_header = self.get_date_header()
+ timestamp = mktime(date_header)
+ last_modified = S3Timestamp(timestamp).s3xmlformat
+ header = {'x-amz-metadata-directive': 'REPLACE',
+ 'Date': date_header}
+ status, headers, body = self._test_object_PUT_copy_self(
+ swob.HTTPOk, header, timestamp=timestamp)
+ self.assertEqual(status.split()[0], '200')
+ self.assertEqual(headers['Content-Type'], 'application/xml')
+ self.assertTrue(headers.get('etag') is None)
+ elem = fromstring(body, 'CopyObjectResult')
+ self.assertEqual(elem.find('LastModified').text, last_modified)
+ self.assertEqual(elem.find('ETag').text, '"%s"' % self.etag)
+
+ _, _, headers = self.swift.calls_with_headers[-1]
+ self.assertEqual(headers['X-Copy-From'], '/bucket/object')
+ self.assertEqual(headers['Content-Length'], '0')
+
+ @s3acl
+ def test_object_PUT_copy_headers_error(self):
+ etag = '7dfa07a8e59ddbcd1dc84d4c4f82aea1'
+ last_modified_since = 'Fri, 01 Apr 2014 12:00:00 GMT'
+
+ header = {'X-Amz-Copy-Source-If-Match': etag,
+ 'Date': self.get_date_header()}
+ status, header, body = \
+ self._test_object_PUT_copy(swob.HTTPPreconditionFailed,
+ header)
+ self.assertEqual(self._get_error_code(body), 'PreconditionFailed')
+
+ header = {'X-Amz-Copy-Source-If-None-Match': etag}
+ status, header, body = \
+ self._test_object_PUT_copy(swob.HTTPNotModified,
+ header)
+ self.assertEqual(self._get_error_code(body), 'PreconditionFailed')
+
+ header = {'X-Amz-Copy-Source-If-Modified-Since': last_modified_since}
+ status, header, body = \
+ self._test_object_PUT_copy(swob.HTTPNotModified,
+ header)
+ self.assertEqual(self._get_error_code(body), 'PreconditionFailed')
+
+ header = \
+ {'X-Amz-Copy-Source-If-Unmodified-Since': last_modified_since}
+ status, header, body = \
+ self._test_object_PUT_copy(swob.HTTPPreconditionFailed,
+ header)
+ self.assertEqual(self._get_error_code(body), 'PreconditionFailed')
+
+ def test_object_PUT_copy_headers_with_match(self):
+ etag = '7dfa07a8e59ddbcd1dc84d4c4f82aea1'
+ last_modified_since = 'Fri, 01 Apr 2014 11:00:00 GMT'
+
+ header = {'X-Amz-Copy-Source-If-Match': etag,
+ 'X-Amz-Copy-Source-If-Modified-Since': last_modified_since,
+ 'Date': self.get_date_header()}
+ status, header, body = \
+ self._test_object_PUT_copy(swob.HTTPOk, header)
+ self.assertEqual(status.split()[0], '200')
+ self.assertEqual(len(self.swift.calls_with_headers), 2)
+ _, _, headers = self.swift.calls_with_headers[-1]
+ self.assertTrue(headers.get('If-Match') is None)
+ self.assertTrue(headers.get('If-Modified-Since') is None)
+ _, _, headers = self.swift.calls_with_headers[0]
+ self.assertEqual(headers['If-Match'], etag)
+ self.assertEqual(headers['If-Modified-Since'], last_modified_since)
+
+ @s3acl(s3acl_only=True)
+ def test_object_PUT_copy_headers_with_match_and_s3acl(self):
+ etag = '7dfa07a8e59ddbcd1dc84d4c4f82aea1'
+ last_modified_since = 'Fri, 01 Apr 2014 11:00:00 GMT'
+
+ header = {'X-Amz-Copy-Source-If-Match': etag,
+ 'X-Amz-Copy-Source-If-Modified-Since': last_modified_since,
+ 'Date': self.get_date_header()}
+ status, header, body = \
+ self._test_object_PUT_copy(swob.HTTPOk, header)
+
+ self.assertEqual(status.split()[0], '200')
+ self.assertEqual(len(self.swift.calls_with_headers), 3)
+ # After the check of the copy source in the case of s3acl is valid,
+ # s3api check the bucket write permissions of the destination.
+ _, _, headers = self.swift.calls_with_headers[-2]
+ self.assertTrue(headers.get('If-Match') is None)
+ self.assertTrue(headers.get('If-Modified-Since') is None)
+ _, _, headers = self.swift.calls_with_headers[-1]
+ self.assertTrue(headers.get('If-Match') is None)
+ self.assertTrue(headers.get('If-Modified-Since') is None)
+ _, _, headers = self.swift.calls_with_headers[0]
+ self.assertEqual(headers['If-Match'], etag)
+ self.assertEqual(headers['If-Modified-Since'], last_modified_since)
+
+ def test_object_PUT_copy_headers_with_not_match(self):
+ etag = '7dfa07a8e59ddbcd1dc84d4c4f82aea1'
+ last_modified_since = 'Fri, 01 Apr 2014 12:00:00 GMT'
+
+ header = {'X-Amz-Copy-Source-If-None-Match': etag,
+ 'X-Amz-Copy-Source-If-Unmodified-Since': last_modified_since,
+ 'Date': self.get_date_header()}
+ status, header, body = \
+ self._test_object_PUT_copy(swob.HTTPOk, header)
+
+ self.assertEqual(status.split()[0], '200')
+ self.assertEqual(len(self.swift.calls_with_headers), 2)
+ _, _, headers = self.swift.calls_with_headers[-1]
+ self.assertTrue(headers.get('If-None-Match') is None)
+ self.assertTrue(headers.get('If-Unmodified-Since') is None)
+ _, _, headers = self.swift.calls_with_headers[0]
+ self.assertEqual(headers['If-None-Match'], etag)
+ self.assertEqual(headers['If-Unmodified-Since'], last_modified_since)
+
+ @s3acl(s3acl_only=True)
+ def test_object_PUT_copy_headers_with_not_match_and_s3acl(self):
+ etag = '7dfa07a8e59ddbcd1dc84d4c4f82aea1'
+ last_modified_since = 'Fri, 01 Apr 2014 12:00:00 GMT'
+
+ header = {'X-Amz-Copy-Source-If-None-Match': etag,
+ 'X-Amz-Copy-Source-If-Unmodified-Since': last_modified_since,
+ 'Date': self.get_date_header()}
+ status, header, body = \
+ self._test_object_PUT_copy(swob.HTTPOk, header)
+ self.assertEqual(status.split()[0], '200')
+ # After the check of the copy source in the case of s3acl is valid,
+ # s3api check the bucket write permissions of the destination.
+ self.assertEqual(len(self.swift.calls_with_headers), 3)
+ _, _, headers = self.swift.calls_with_headers[-1]
+ self.assertTrue(headers.get('If-None-Match') is None)
+ self.assertTrue(headers.get('If-Unmodified-Since') is None)
+ _, _, headers = self.swift.calls_with_headers[0]
+ self.assertEqual(headers['If-None-Match'], etag)
+ self.assertEqual(headers['If-Unmodified-Since'], last_modified_since)
+
+ @s3acl
+ def test_object_POST_error(self):
+ code = self._test_method_error('POST', '/bucket/object', None)
+ self.assertEqual(code, 'NotImplemented')
+
+ @s3acl
+ def test_object_DELETE_error(self):
+ code = self._test_method_error('DELETE', '/bucket/object',
+ swob.HTTPUnauthorized)
+ self.assertEqual(code, 'SignatureDoesNotMatch')
+ code = self._test_method_error('DELETE', '/bucket/object',
+ swob.HTTPForbidden)
+ self.assertEqual(code, 'AccessDenied')
+ code = self._test_method_error('DELETE', '/bucket/object',
+ swob.HTTPServerError)
+ self.assertEqual(code, 'InternalError')
+ code = self._test_method_error('DELETE', '/bucket/object',
+ swob.HTTPServiceUnavailable)
+ self.assertEqual(code, 'InternalError')
+
+ with patch(
+ 'swift.common.middleware.s3api.s3request.get_container_info',
+ return_value={'status': 204}):
+ code = self._test_method_error('DELETE', '/bucket/object',
+ swob.HTTPNotFound)
+ self.assertEqual(code, 'NoSuchKey')
+
+ with patch(
+ 'swift.common.middleware.s3api.s3request.get_container_info',
+ return_value={'status': 404}):
+ code = self._test_method_error('DELETE', '/bucket/object',
+ swob.HTTPNotFound)
+ self.assertEqual(code, 'NoSuchBucket')
+
+ @s3acl
+ def test_object_DELETE_no_multipart(self):
+ self.s3api.conf.allow_multipart_uploads = False
+ req = Request.blank('/bucket/object',
+ environ={'REQUEST_METHOD': 'DELETE'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '204')
+
+ self.assertNotIn(('HEAD', '/v1/AUTH_test/bucket/object'),
+ self.swift.calls)
+ self.assertIn(('DELETE', '/v1/AUTH_test/bucket/object'),
+ self.swift.calls)
+ _, path = self.swift.calls[-1]
+ self.assertEqual(path.count('?'), 0)
+
+ @s3acl
+ def test_object_DELETE_multipart(self):
+ req = Request.blank('/bucket/object',
+ environ={'REQUEST_METHOD': 'DELETE'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '204')
+
+ self.assertIn(('HEAD', '/v1/AUTH_test/bucket/object'),
+ self.swift.calls)
+ self.assertIn(('DELETE', '/v1/AUTH_test/bucket/object'),
+ self.swift.calls)
+ _, path = self.swift.calls[-1]
+ self.assertEqual(path.count('?'), 0)
+
+ @s3acl
+ def test_slo_object_DELETE(self):
+ self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
+ swob.HTTPOk,
+ {'x-static-large-object': 'True'},
+ None)
+ self.swift.register('DELETE', '/v1/AUTH_test/bucket/object',
+ swob.HTTPOk, {}, '<SLO delete results>')
+ req = Request.blank('/bucket/object',
+ environ={'REQUEST_METHOD': 'DELETE'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header(),
+ 'Content-Type': 'foo/bar'})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '204')
+ self.assertEqual(body, '')
+
+ self.assertIn(('HEAD', '/v1/AUTH_test/bucket/object'),
+ self.swift.calls)
+ self.assertIn(('DELETE', '/v1/AUTH_test/bucket/object'
+ '?multipart-manifest=delete'),
+ self.swift.calls)
+ _, path, headers = self.swift.calls_with_headers[-1]
+ path, query_string = path.split('?', 1)
+ query = {}
+ for q in query_string.split('&'):
+ key, arg = q.split('=')
+ query[key] = arg
+ self.assertEqual(query['multipart-manifest'], 'delete')
+ self.assertNotIn('Content-Type', headers)
+
+ def _test_object_for_s3acl(self, method, account):
+ req = Request.blank('/bucket/object',
+ environ={'REQUEST_METHOD': method},
+ headers={'Authorization': 'AWS %s:hmac' % account,
+ 'Date': self.get_date_header()})
+ return self.call_s3api(req)
+
+ def _test_set_container_permission(self, account, permission):
+ grants = [Grant(User(account), permission)]
+ headers = \
+ encode_acl('container',
+ ACL(Owner('test:tester', 'test:tester'), grants))
+ self.swift.register('HEAD', '/v1/AUTH_test/bucket',
+ swob.HTTPNoContent, headers, None)
+
+ @s3acl(s3acl_only=True)
+ def test_object_GET_without_permission(self):
+ status, headers, body = self._test_object_for_s3acl('GET',
+ 'test:other')
+ self.assertEqual(self._get_error_code(body), 'AccessDenied')
+
+ @s3acl(s3acl_only=True)
+ def test_object_GET_with_read_permission(self):
+ status, headers, body = self._test_object_for_s3acl('GET',
+ 'test:read')
+ self.assertEqual(status.split()[0], '200')
+
+ @s3acl(s3acl_only=True)
+ def test_object_GET_with_fullcontrol_permission(self):
+ status, headers, body = \
+ self._test_object_for_s3acl('GET', 'test:full_control')
+ self.assertEqual(status.split()[0], '200')
+
+ @s3acl(s3acl_only=True)
+ def test_object_PUT_without_permission(self):
+ status, headers, body = self._test_object_for_s3acl('PUT',
+ 'test:other')
+ self.assertEqual(self._get_error_code(body), 'AccessDenied')
+
+ @s3acl(s3acl_only=True)
+ def test_object_PUT_with_owner_permission(self):
+ status, headers, body = self._test_object_for_s3acl('PUT',
+ 'test:tester')
+ self.assertEqual(status.split()[0], '200')
+
+ @s3acl(s3acl_only=True)
+ def test_object_PUT_with_write_permission(self):
+ account = 'test:other'
+ self._test_set_container_permission(account, 'WRITE')
+ status, headers, body = self._test_object_for_s3acl('PUT', account)
+ self.assertEqual(status.split()[0], '200')
+
+ @s3acl(s3acl_only=True)
+ def test_object_PUT_with_fullcontrol_permission(self):
+ account = 'test:other'
+ self._test_set_container_permission(account, 'FULL_CONTROL')
+ status, headers, body = \
+ self._test_object_for_s3acl('PUT', account)
+ self.assertEqual(status.split()[0], '200')
+
+ @s3acl(s3acl_only=True)
+ def test_object_DELETE_without_permission(self):
+ account = 'test:other'
+ status, headers, body = self._test_object_for_s3acl('DELETE',
+ account)
+ self.assertEqual(self._get_error_code(body), 'AccessDenied')
+
+ @s3acl(s3acl_only=True)
+ def test_object_DELETE_with_owner_permission(self):
+ status, headers, body = self._test_object_for_s3acl('DELETE',
+ 'test:tester')
+ self.assertEqual(status.split()[0], '204')
+
+ @s3acl(s3acl_only=True)
+ def test_object_DELETE_with_write_permission(self):
+ account = 'test:other'
+ self._test_set_container_permission(account, 'WRITE')
+ status, headers, body = self._test_object_for_s3acl('DELETE',
+ account)
+ self.assertEqual(status.split()[0], '204')
+
+ @s3acl(s3acl_only=True)
+ def test_object_DELETE_with_fullcontrol_permission(self):
+ account = 'test:other'
+ self._test_set_container_permission(account, 'FULL_CONTROL')
+ status, headers, body = self._test_object_for_s3acl('DELETE', account)
+ self.assertEqual(status.split()[0], '204')
+
+ def _test_object_copy_for_s3acl(self, account, src_permission=None,
+ src_path='/src_bucket/src_obj'):
+ owner = 'test:tester'
+ grants = [Grant(User(account), src_permission)] \
+ if src_permission else [Grant(User(owner), 'FULL_CONTROL')]
+ src_o_headers = \
+ encode_acl('object', ACL(Owner(owner, owner), grants))
+ src_o_headers.update({'last-modified': self.last_modified})
+ self.swift.register(
+ 'HEAD', join('/v1/AUTH_test', src_path.lstrip('/')),
+ swob.HTTPOk, src_o_headers, None)
+
+ req = Request.blank(
+ '/bucket/object',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS %s:hmac' % account,
+ 'X-Amz-Copy-Source': src_path,
+ 'Date': self.get_date_header()})
+
+ return self.call_s3api(req)
+
+ @s3acl(s3acl_only=True)
+ def test_object_PUT_copy_with_owner_permission(self):
+ status, headers, body = \
+ self._test_object_copy_for_s3acl('test:tester')
+ self.assertEqual(status.split()[0], '200')
+
+ @s3acl(s3acl_only=True)
+ def test_object_PUT_copy_with_fullcontrol_permission(self):
+ status, headers, body = \
+ self._test_object_copy_for_s3acl('test:full_control',
+ 'FULL_CONTROL')
+ self.assertEqual(status.split()[0], '200')
+
+ @s3acl(s3acl_only=True)
+ def test_object_PUT_copy_with_grantee_permission(self):
+ status, headers, body = \
+ self._test_object_copy_for_s3acl('test:write', 'READ')
+ self.assertEqual(status.split()[0], '200')
+
+ @s3acl(s3acl_only=True)
+ def test_object_PUT_copy_without_src_obj_permission(self):
+ status, headers, body = \
+ self._test_object_copy_for_s3acl('test:write')
+ self.assertEqual(status.split()[0], '403')
+
+ @s3acl(s3acl_only=True)
+ def test_object_PUT_copy_without_dst_container_permission(self):
+ status, headers, body = \
+ self._test_object_copy_for_s3acl('test:other', 'READ')
+ self.assertEqual(status.split()[0], '403')
+
+ @s3acl(s3acl_only=True)
+ def test_object_PUT_copy_empty_src_path(self):
+ self.swift.register('PUT', '/v1/AUTH_test/bucket/object',
+ swob.HTTPPreconditionFailed, {}, None)
+ status, headers, body = self._test_object_copy_for_s3acl(
+ 'test:write', 'READ', src_path='')
+ self.assertEqual(status.split()[0], '400')
+
+
+class TestS3ApiObjNonUTC(TestS3ApiObj):
+ def setUp(self):
+ self.orig_tz = os.environ.get('TZ', '')
+ os.environ['TZ'] = 'EST+05EDT,M4.1.0,M10.5.0'
+ time.tzset()
+ super(TestS3ApiObjNonUTC, self).setUp()
+
+ def tearDown(self):
+ super(TestS3ApiObjNonUTC, self).tearDown()
+ os.environ['TZ'] = self.orig_tz
+ time.tzset()
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/test/unit/common/middleware/s3api/test_s3_acl.py b/test/unit/common/middleware/s3api/test_s3_acl.py
new file mode 100644
index 000000000..9d347b5b0
--- /dev/null
+++ b/test/unit/common/middleware/s3api/test_s3_acl.py
@@ -0,0 +1,540 @@
+# Copyright (c) 2014 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+import functools
+import sys
+import traceback
+from mock import patch, MagicMock
+
+from swift.common import swob
+from swift.common.swob import Request
+from swift.common.utils import json
+
+from swift.common.middleware.s3api.etree import tostring, Element, SubElement
+from swift.common.middleware.s3api.subresource import ACL, ACLPrivate, User, \
+ encode_acl, AuthenticatedUsers, AllUsers, Owner, Grant, PERMISSIONS
+from test.unit.common.middleware.s3api.test_s3api import S3ApiTestCase
+from test.unit.common.middleware.s3api.exceptions import NotMethodException
+from test.unit.common.middleware.s3api import FakeSwift
+
+
+XMLNS_XSI = 'http://www.w3.org/2001/XMLSchema-instance'
+
+
+def s3acl(func=None, s3acl_only=False):
+ """
+ NOTE: s3acl decorator needs an instance of s3api testing framework.
+ (i.e. An instance for first argument is necessary)
+ """
+ if func is None:
+ return functools.partial(s3acl, s3acl_only=s3acl_only)
+
+ @functools.wraps(func)
+ def s3acl_decorator(*args, **kwargs):
+ if not args and not kwargs:
+ raise NotMethodException('Use s3acl decorator for a method')
+
+ def call_func(failing_point=''):
+ try:
+ # For maintainability, we patch 204 status for every
+ # get_container_info. if you want, we can rewrite the
+ # statement easily with nested decorator like as:
+ #
+ # @s3acl
+ # @patch(xxx)
+ # def test_xxxx(self)
+
+ with patch('swift.common.middleware.s3api.s3request.'
+ 'get_container_info',
+ return_value={'status': 204}):
+ func(*args, **kwargs)
+ except AssertionError:
+ # Make traceback message to clarify the assertion
+ exc_type, exc_instance, exc_traceback = sys.exc_info()
+ formatted_traceback = ''.join(traceback.format_tb(
+ exc_traceback))
+ message = '\n%s\n%s:\n%s' % (formatted_traceback,
+ exc_type.__name__,
+ exc_instance.message)
+ message += failing_point
+ raise exc_type(message)
+
+ instance = args[0]
+
+ if not s3acl_only:
+ call_func()
+ instance.swift._calls = []
+
+ instance.s3api.conf.s3_acl = True
+ instance.swift.s3_acl = True
+ owner = Owner('test:tester', 'test:tester')
+ generate_s3acl_environ('test', instance.swift, owner)
+ call_func(' (fail at s3_acl)')
+
+ return s3acl_decorator
+
+
+def _gen_test_headers(owner, grants=[], resource='container'):
+ if not grants:
+ grants = [Grant(User('test:tester'), 'FULL_CONTROL')]
+ return encode_acl(resource, ACL(owner, grants))
+
+
+def _make_xml(grantee):
+ owner = 'test:tester'
+ permission = 'READ'
+ elem = Element('AccessControlPolicy')
+ elem_owner = SubElement(elem, 'Owner')
+ SubElement(elem_owner, 'ID').text = owner
+ SubElement(elem_owner, 'DisplayName').text = owner
+ acl_list_elem = SubElement(elem, 'AccessControlList')
+ elem_grant = SubElement(acl_list_elem, 'Grant')
+ elem_grant.append(grantee)
+ SubElement(elem_grant, 'Permission').text = permission
+ return tostring(elem)
+
+
+def generate_s3acl_environ(account, swift, owner):
+
+ def gen_grant(permission):
+ # generate Grant with a grantee named by "permission"
+ account_name = '%s:%s' % (account, permission.lower())
+ return Grant(User(account_name), permission)
+
+ grants = map(gen_grant, PERMISSIONS)
+ container_headers = _gen_test_headers(owner, grants)
+ object_headers = _gen_test_headers(owner, grants, 'object')
+ object_body = 'hello'
+ object_headers['Content-Length'] = len(object_body)
+
+ # TEST method is used to resolve a tenant name
+ swift.register('TEST', '/v1/AUTH_test', swob.HTTPMethodNotAllowed,
+ {}, None)
+ swift.register('TEST', '/v1/AUTH_X', swob.HTTPMethodNotAllowed,
+ {}, None)
+
+ # for bucket
+ swift.register('HEAD', '/v1/AUTH_test/bucket', swob.HTTPNoContent,
+ container_headers, None)
+ swift.register('HEAD', '/v1/AUTH_test/bucket+segments', swob.HTTPNoContent,
+ container_headers, None)
+ swift.register('PUT', '/v1/AUTH_test/bucket',
+ swob.HTTPCreated, {}, None)
+ swift.register('GET', '/v1/AUTH_test/bucket', swob.HTTPNoContent,
+ container_headers, json.dumps([]))
+ swift.register('POST', '/v1/AUTH_test/bucket',
+ swob.HTTPNoContent, {}, None)
+ swift.register('DELETE', '/v1/AUTH_test/bucket',
+ swob.HTTPNoContent, {}, None)
+
+ # necessary for canned-acl tests
+ public_headers = _gen_test_headers(owner, [Grant(AllUsers(), 'READ')])
+ swift.register('GET', '/v1/AUTH_test/public', swob.HTTPNoContent,
+ public_headers, json.dumps([]))
+ authenticated_headers = _gen_test_headers(
+ owner, [Grant(AuthenticatedUsers(), 'READ')], 'bucket')
+ swift.register('GET', '/v1/AUTH_test/authenticated',
+ swob.HTTPNoContent, authenticated_headers,
+ json.dumps([]))
+
+ # for object
+ swift.register('HEAD', '/v1/AUTH_test/bucket/object', swob.HTTPOk,
+ object_headers, None)
+
+
+class TestS3ApiS3Acl(S3ApiTestCase):
+
+ def setUp(self):
+ super(TestS3ApiS3Acl, self).setUp()
+
+ self.s3api.conf.s3_acl = True
+ self.swift.s3_acl = True
+
+ account = 'test'
+ owner_name = '%s:tester' % account
+ self.default_owner = Owner(owner_name, owner_name)
+ generate_s3acl_environ(account, self.swift, self.default_owner)
+
+ def tearDown(self):
+ self.s3api.conf.s3_acl = False
+
+ def test_bucket_acl_PUT_with_other_owner(self):
+ req = Request.blank('/bucket?acl',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()},
+ body=tostring(
+ ACLPrivate(
+ Owner(id='test:other',
+ name='test:other')).elem()))
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'AccessDenied')
+
+ def test_object_acl_PUT_xml_error(self):
+ req = Request.blank('/bucket/object?acl',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()},
+ body="invalid xml")
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'MalformedACLError')
+
+ def test_canned_acl_private(self):
+ req = Request.blank('/bucket/object?acl',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header(),
+ 'x-amz-acl': 'private'})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '200')
+
+ def test_canned_acl_public_read(self):
+ req = Request.blank('/bucket/object?acl',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header(),
+ 'x-amz-acl': 'public-read'})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '200')
+
+ def test_canned_acl_public_read_write(self):
+ req = Request.blank('/bucket/object?acl',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header(),
+ 'x-amz-acl': 'public-read-write'})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '200')
+
+ def test_canned_acl_authenticated_read(self):
+ req = Request.blank('/bucket/object?acl',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header(),
+ 'x-amz-acl': 'authenticated-read'})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '200')
+
+ def test_canned_acl_bucket_owner_read(self):
+ req = Request.blank('/bucket/object?acl',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header(),
+ 'x-amz-acl': 'bucket-owner-read'})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '200')
+
+ def test_canned_acl_bucket_owner_full_control(self):
+ req = Request.blank('/bucket/object?acl',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header(),
+ 'x-amz-acl': 'bucket-owner-full-control'})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '200')
+
+ def test_invalid_canned_acl(self):
+ req = Request.blank('/bucket/object?acl',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header(),
+ 'x-amz-acl': 'invalid'})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'InvalidArgument')
+
+ def _test_grant_header(self, permission):
+ req = Request.blank('/bucket/object?acl',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header(),
+ 'x-amz-grant-' + permission:
+ 'id=test:tester'})
+ return self.call_s3api(req)
+
+ def test_grant_read(self):
+ status, headers, body = self._test_grant_header('read')
+ self.assertEqual(status.split()[0], '200')
+
+ def test_grant_write(self):
+ status, headers, body = self._test_grant_header('write')
+ self.assertEqual(status.split()[0], '200')
+
+ def test_grant_read_acp(self):
+ status, headers, body = self._test_grant_header('read-acp')
+ self.assertEqual(status.split()[0], '200')
+
+ def test_grant_write_acp(self):
+ status, headers, body = self._test_grant_header('write-acp')
+ self.assertEqual(status.split()[0], '200')
+
+ def test_grant_full_control(self):
+ status, headers, body = self._test_grant_header('full-control')
+ self.assertEqual(status.split()[0], '200')
+
+ def test_grant_invalid_permission(self):
+ status, headers, body = self._test_grant_header('invalid')
+ self.assertEqual(self._get_error_code(body), 'MissingSecurityHeader')
+
+ def test_grant_with_both_header_and_xml(self):
+ req = Request.blank('/bucket/object?acl',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header(),
+ 'x-amz-grant-full-control':
+ 'id=test:tester'},
+ body=tostring(
+ ACLPrivate(
+ Owner(id='test:tester',
+ name='test:tester')).elem()))
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'UnexpectedContent')
+
+ def test_grant_with_both_header_and_canned_acl(self):
+ req = Request.blank('/bucket/object?acl',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header(),
+ 'x-amz-grant-full-control':
+ 'id=test:tester',
+ 'x-amz-acl': 'public-read'})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'InvalidRequest')
+
+ def test_grant_email(self):
+ req = Request.blank('/bucket/object?acl',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header(),
+ 'x-amz-grant-read': 'emailAddress=a@b.c'})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'NotImplemented')
+
+ def test_grant_email_xml(self):
+ grantee = Element('Grantee', nsmap={'xsi': XMLNS_XSI})
+ grantee.set('{%s}type' % XMLNS_XSI, 'AmazonCustomerByEmail')
+ SubElement(grantee, 'EmailAddress').text = 'Grantees@email.com'
+ xml = _make_xml(grantee=grantee)
+ req = Request.blank('/bucket/object?acl',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()},
+ body=xml)
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'NotImplemented')
+
+ def test_grant_invalid_group_xml(self):
+ grantee = Element('Grantee', nsmap={'xsi': XMLNS_XSI})
+ grantee.set('{%s}type' % XMLNS_XSI, 'Invalid')
+ xml = _make_xml(grantee=grantee)
+ req = Request.blank('/bucket/object?acl',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()},
+ body=xml)
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'MalformedACLError')
+
+ def test_grant_authenticated_users(self):
+ req = Request.blank('/bucket/object?acl',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header(),
+ 'x-amz-grant-read':
+ 'uri="http://acs.amazonaws.com/groups/'
+ 'global/AuthenticatedUsers"'})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '200')
+
+ def test_grant_all_users(self):
+ req = Request.blank('/bucket/object?acl',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header(),
+ 'x-amz-grant-read':
+ 'uri="http://acs.amazonaws.com/groups/'
+ 'global/AllUsers"'})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '200')
+
+ def test_grant_invalid_uri(self):
+ req = Request.blank('/bucket/object?acl',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header(),
+ 'x-amz-grant-read':
+ 'uri="http://localhost/"'})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'InvalidArgument')
+
+ def test_grant_invalid_uri_xml(self):
+ grantee = Element('Grantee', nsmap={'xsi': XMLNS_XSI})
+ grantee.set('{%s}type' % XMLNS_XSI, 'Group')
+ SubElement(grantee, 'URI').text = 'invalid'
+ xml = _make_xml(grantee)
+
+ req = Request.blank('/bucket/object?acl',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()},
+ body=xml)
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'InvalidArgument')
+
+ def test_grant_invalid_target(self):
+ req = Request.blank('/bucket/object?acl',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header(),
+ 'x-amz-grant-read': 'key=value'})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'InvalidArgument')
+
+ def _test_bucket_acl_GET(self, account):
+ req = Request.blank('/bucket?acl',
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS %s:hmac' % account,
+ 'Date': self.get_date_header()})
+ return self.call_s3api(req)
+
+ def test_bucket_acl_GET_without_permission(self):
+ status, headers, body = self._test_bucket_acl_GET('test:other')
+ self.assertEqual(self._get_error_code(body), 'AccessDenied')
+
+ def test_bucket_acl_GET_with_read_acp_permission(self):
+ status, headers, body = self._test_bucket_acl_GET('test:read_acp')
+ self.assertEqual(status.split()[0], '200')
+
+ def test_bucket_acl_GET_with_fullcontrol_permission(self):
+ status, headers, body = self._test_bucket_acl_GET('test:full_control')
+ self.assertEqual(status.split()[0], '200')
+
+ def test_bucket_acl_GET_with_owner_permission(self):
+ status, headers, body = self._test_bucket_acl_GET('test:tester')
+ self.assertEqual(status.split()[0], '200')
+
+ def _test_bucket_acl_PUT(self, account, permission='FULL_CONTROL'):
+ acl = ACL(self.default_owner, [Grant(User(account), permission)])
+ req = Request.blank('/bucket?acl',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS %s:hmac' % account,
+ 'Date': self.get_date_header()},
+ body=tostring(acl.elem()))
+
+ return self.call_s3api(req)
+
+ def test_bucket_acl_PUT_without_permission(self):
+ status, headers, body = self._test_bucket_acl_PUT('test:other')
+ self.assertEqual(self._get_error_code(body), 'AccessDenied')
+
+ def test_bucket_acl_PUT_with_write_acp_permission(self):
+ status, headers, body = self._test_bucket_acl_PUT('test:write_acp')
+ self.assertEqual(status.split()[0], '200')
+
+ def test_bucket_acl_PUT_with_fullcontrol_permission(self):
+ status, headers, body = self._test_bucket_acl_PUT('test:full_control')
+ self.assertEqual(status.split()[0], '200')
+
+ def test_bucket_acl_PUT_with_owner_permission(self):
+ status, headers, body = self._test_bucket_acl_PUT('test:tester')
+ self.assertEqual(status.split()[0], '200')
+
+ def _test_object_acl_GET(self, account):
+ req = Request.blank('/bucket/object?acl',
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS %s:hmac' % account,
+ 'Date': self.get_date_header()})
+ return self.call_s3api(req)
+
+ def test_object_acl_GET_without_permission(self):
+ status, headers, body = self._test_object_acl_GET('test:other')
+ self.assertEqual(self._get_error_code(body), 'AccessDenied')
+
+ def test_object_acl_GET_with_read_acp_permission(self):
+ status, headers, body = self._test_object_acl_GET('test:read_acp')
+ self.assertEqual(status.split()[0], '200')
+
+ def test_object_acl_GET_with_fullcontrol_permission(self):
+ status, headers, body = self._test_object_acl_GET('test:full_control')
+ self.assertEqual(status.split()[0], '200')
+
+ def test_object_acl_GET_with_owner_permission(self):
+ status, headers, body = self._test_object_acl_GET('test:tester')
+ self.assertEqual(status.split()[0], '200')
+
+ def _test_object_acl_PUT(self, account, permission='FULL_CONTROL'):
+ acl = ACL(self.default_owner, [Grant(User(account), permission)])
+ req = Request.blank('/bucket/object?acl',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS %s:hmac' % account,
+ 'Date': self.get_date_header()},
+ body=tostring(acl.elem()))
+
+ return self.call_s3api(req)
+
+ def test_object_acl_PUT_without_permission(self):
+ status, headers, body = self._test_object_acl_PUT('test:other')
+ self.assertEqual(self._get_error_code(body), 'AccessDenied')
+
+ def test_object_acl_PUT_with_write_acp_permission(self):
+ status, headers, body = self._test_object_acl_PUT('test:write_acp')
+ self.assertEqual(status.split()[0], '200')
+
+ def test_object_acl_PUT_with_fullcontrol_permission(self):
+ status, headers, body = self._test_object_acl_PUT('test:full_control')
+ self.assertEqual(status.split()[0], '200')
+
+ def test_object_acl_PUT_with_owner_permission(self):
+ status, headers, body = self._test_object_acl_PUT('test:tester')
+ self.assertEqual(status.split()[0], '200')
+
+ def test_s3acl_decorator(self):
+ @s3acl
+ def non_class_s3acl_error():
+ raise TypeError()
+
+ class FakeClass(object):
+ def __init__(self):
+ self.s3api = MagicMock()
+ self.swift = FakeSwift()
+
+ @s3acl
+ def s3acl_error(self):
+ raise TypeError()
+
+ @s3acl
+ def s3acl_assert_fail(self):
+ assert False
+
+ @s3acl(s3acl_only=True)
+ def s3acl_s3only_error(self):
+ if self.s3api.conf.s3_acl:
+ raise TypeError()
+
+ @s3acl(s3acl_only=True)
+ def s3acl_s3only_no_error(self):
+ if not self.s3api.conf.s3_acl:
+ raise TypeError()
+
+ fake_class = FakeClass()
+
+ self.assertRaises(NotMethodException, non_class_s3acl_error)
+ self.assertRaises(TypeError, fake_class.s3acl_error)
+ self.assertRaises(AssertionError, fake_class.s3acl_assert_fail)
+ self.assertRaises(TypeError, fake_class.s3acl_s3only_error)
+ self.assertIsNone(fake_class.s3acl_s3only_no_error())
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/test/unit/common/middleware/s3api/test_s3api.py b/test/unit/common/middleware/s3api/test_s3api.py
new file mode 100644
index 000000000..86a4f23aa
--- /dev/null
+++ b/test/unit/common/middleware/s3api/test_s3api.py
@@ -0,0 +1,1049 @@
+# Copyright (c) 2011-2014 OpenStack Foundation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+from mock import patch, MagicMock
+from datetime import datetime
+import hashlib
+import mock
+import requests
+import json
+import copy
+from urllib import unquote, quote
+
+import swift.common.middleware.s3api
+from swift.common.middleware.keystoneauth import KeystoneAuth
+from swift.common import swob, utils
+from swift.common.swob import Request
+
+from keystonemiddleware.auth_token import AuthProtocol
+from keystoneauth1.access import AccessInfoV2
+
+from test.unit.common.middleware.s3api import S3ApiTestCase
+from test.unit.common.middleware.s3api.helpers import FakeSwift
+from test.unit.common.middleware.s3api.test_s3token import \
+ GOOD_RESPONSE_V2, GOOD_RESPONSE_V3
+from swift.common.middleware.s3api.s3request import SigV4Request, S3Request
+from swift.common.middleware.s3api.etree import fromstring
+from swift.common.middleware.s3api.s3api import filter_factory, \
+ S3ApiMiddleware
+from swift.common.middleware.s3api.s3token import S3Token
+
+
+class TestS3ApiMiddleware(S3ApiTestCase):
+ def setUp(self):
+ super(TestS3ApiMiddleware, self).setUp()
+
+ self.swift.register('GET', '/something', swob.HTTPOk, {}, 'FAKE APP')
+
+ def test_non_s3_request_passthrough(self):
+ req = Request.blank('/something')
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(body, 'FAKE APP')
+
+ def test_bad_format_authorization(self):
+ req = Request.blank('/something',
+ headers={'Authorization': 'hoge',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'AccessDenied')
+
+ def test_bad_method(self):
+ req = Request.blank('/',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'MethodNotAllowed')
+
+ def test_bad_method_but_method_exists_in_controller(self):
+ req = Request.blank(
+ '/bucket',
+ environ={'REQUEST_METHOD': '_delete_segments_bucket'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'MethodNotAllowed')
+
+ def test_path_info_encode(self):
+ bucket_name = 'b%75cket'
+ object_name = 'ob%6aect:1'
+ self.swift.register('GET', '/v1/AUTH_test/bucket/object:1',
+ swob.HTTPOk, {}, None)
+ req = Request.blank('/%s/%s' % (bucket_name, object_name),
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ raw_path_info = "/%s/%s" % (bucket_name, object_name)
+ path_info = req.environ['PATH_INFO']
+ self.assertEqual(path_info, unquote(raw_path_info))
+ self.assertEqual(req.path, quote(path_info))
+
+ def test_canonical_string_v2(self):
+ """
+ The hashes here were generated by running the same requests against
+ boto.utils.canonical_string
+ """
+ def canonical_string(path, headers):
+ if '?' in path:
+ path, query_string = path.split('?', 1)
+ else:
+ query_string = ''
+ env = {
+ 'REQUEST_METHOD': 'GET',
+ 'PATH_INFO': path,
+ 'QUERY_STRING': query_string,
+ 'HTTP_AUTHORIZATION': 'AWS X:Y:Z',
+ }
+ for header, value in headers.items():
+ header = 'HTTP_' + header.replace('-', '_').upper()
+ if header in ('HTTP_CONTENT_TYPE', 'HTTP_CONTENT_LENGTH'):
+ header = header[5:]
+ env[header] = value
+
+ with patch('swift.common.middleware.s3api.s3request.'
+ 'S3Request._validate_headers'):
+ req = S3Request(env)
+ return req.environ['s3api.auth_details']['string_to_sign']
+
+ def verify(hash, path, headers):
+ s = canonical_string(path, headers)
+ self.assertEqual(hash, hashlib.md5(s).hexdigest())
+
+ verify('6dd08c75e42190a1ce9468d1fd2eb787', '/bucket/object',
+ {'Content-Type': 'text/plain', 'X-Amz-Something': 'test',
+ 'Date': 'whatever'})
+
+ verify('c8447135da232ae7517328f3429df481', '/bucket/object',
+ {'Content-Type': 'text/plain', 'X-Amz-Something': 'test'})
+
+ verify('bf49304103a4de5c325dce6384f2a4a2', '/bucket/object',
+ {'content-type': 'text/plain'})
+
+ verify('be01bd15d8d47f9fe5e2d9248cc6f180', '/bucket/object', {})
+
+ verify('e9ec7dca45eef3e2c7276af23135e896', '/bucket/object',
+ {'Content-MD5': 'somestuff'})
+
+ verify('a822deb31213ad09af37b5a7fe59e55e', '/bucket/object?acl', {})
+
+ verify('cce5dd1016595cb706c93f28d3eaa18f', '/bucket/object',
+ {'Content-Type': 'text/plain', 'X-Amz-A': 'test',
+ 'X-Amz-Z': 'whatever', 'X-Amz-B': 'lalala',
+ 'X-Amz-Y': 'lalalalalalala'})
+
+ verify('7506d97002c7d2de922cc0ec34af8846', '/bucket/object',
+ {'Content-Type': None, 'X-Amz-Something': 'test'})
+
+ verify('28f76d6162444a193b612cd6cb20e0be', '/bucket/object',
+ {'Content-Type': None,
+ 'X-Amz-Date': 'Mon, 11 Jul 2011 10:52:57 +0000',
+ 'Date': 'Tue, 12 Jul 2011 10:52:57 +0000'})
+
+ verify('ed6971e3eca5af4ee361f05d7c272e49', '/bucket/object',
+ {'Content-Type': None,
+ 'Date': 'Tue, 12 Jul 2011 10:52:57 +0000'})
+
+ verify('41ecd87e7329c33fea27826c1c9a6f91', '/bucket/object?cors', {})
+
+ verify('d91b062f375d8fab407d6dab41fd154e', '/bucket/object?tagging',
+ {})
+
+ verify('ebab878a96814b30eb178e27efb3973f', '/bucket/object?restore',
+ {})
+
+ verify('f6bf1b2d92b054350d3679d28739fc69', '/bucket/object?'
+ 'response-cache-control&response-content-disposition&'
+ 'response-content-encoding&response-content-language&'
+ 'response-content-type&response-expires', {})
+
+ str1 = canonical_string('/', headers={'Content-Type': None,
+ 'X-Amz-Something': 'test'})
+ str2 = canonical_string('/', headers={'Content-Type': '',
+ 'X-Amz-Something': 'test'})
+ str3 = canonical_string('/', headers={'X-Amz-Something': 'test'})
+
+ self.assertEqual(str1, str2)
+ self.assertEqual(str2, str3)
+
+ # Note that boto does not do proper stripping (as of 2.42.0).
+ # These were determined by examining the StringToSignBytes element of
+ # resulting SignatureDoesNotMatch errors from AWS.
+ str1 = canonical_string('/', {'Content-Type': 'text/plain',
+ 'Content-MD5': '##'})
+ str2 = canonical_string('/', {'Content-Type': '\x01\x02text/plain',
+ 'Content-MD5': '\x1f ##'})
+ str3 = canonical_string('/', {'Content-Type': 'text/plain \x10',
+ 'Content-MD5': '##\x18'})
+
+ self.assertEqual(str1, str2)
+ self.assertEqual(str2, str3)
+
+ def test_signed_urls_expired(self):
+ expire = '1000000000'
+ req = Request.blank('/bucket/object?Signature=X&Expires=%s&'
+ 'AWSAccessKeyId=test:tester' % expire,
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Date': self.get_date_header()})
+ req.headers['Date'] = datetime.utcnow()
+ req.content_type = 'text/plain'
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'AccessDenied')
+
+ def test_signed_urls(self):
+ # Set expire to last 32b timestamp value
+ # This number can't be higher, because it breaks tests on 32b systems
+ expire = '2147483647' # 19 Jan 2038 03:14:07
+ utc_date = datetime.utcnow()
+ req = Request.blank('/bucket/object?Signature=X&Expires=%s&'
+ 'AWSAccessKeyId=test:tester&Timestamp=%s' %
+ (expire, utc_date.isoformat().rsplit('.')[0]),
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Date': self.get_date_header()})
+ req.content_type = 'text/plain'
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '200')
+ for _, _, headers in self.swift.calls_with_headers:
+ self.assertEqual(headers['Authorization'], 'AWS test:tester:X')
+
+ def test_signed_urls_no_timestamp(self):
+ expire = '2147483647' # 19 Jan 2038 03:14:07
+ req = Request.blank('/bucket/object?Signature=X&Expires=%s&'
+ 'AWSAccessKeyId=test:tester' % expire,
+ environ={'REQUEST_METHOD': 'GET'})
+ req.content_type = 'text/plain'
+ status, headers, body = self.call_s3api(req)
+ # Curious! But actually S3 doesn't verify any x-amz-date/date headers
+ # for signed_url access and it also doesn't check timestamp
+ self.assertEqual(status.split()[0], '200')
+ for _, _, headers in self.swift.calls_with_headers:
+ self.assertEqual(headers['Authorization'], 'AWS test:tester:X')
+
+ def test_signed_urls_invalid_expire(self):
+ expire = 'invalid'
+ req = Request.blank('/bucket/object?Signature=X&Expires=%s&'
+ 'AWSAccessKeyId=test:tester' % expire,
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Date': self.get_date_header()})
+ req.headers['Date'] = datetime.utcnow()
+ req.content_type = 'text/plain'
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'AccessDenied')
+
+ def test_signed_urls_no_sign(self):
+ expire = '2147483647' # 19 Jan 2038 03:14:07
+ req = Request.blank('/bucket/object?Expires=%s&'
+ 'AWSAccessKeyId=test:tester' % expire,
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Date': self.get_date_header()})
+ req.headers['Date'] = datetime.utcnow()
+ req.content_type = 'text/plain'
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'AccessDenied')
+
+ def test_signed_urls_no_access(self):
+ expire = '2147483647' # 19 Jan 2038 03:14:07
+ req = Request.blank('/bucket/object?Expires=%s&'
+ 'AWSAccessKeyId=' % expire,
+ environ={'REQUEST_METHOD': 'GET'})
+ req.headers['Date'] = datetime.utcnow()
+ req.content_type = 'text/plain'
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'AccessDenied')
+
+ def test_signed_urls_v4(self):
+ req = Request.blank(
+ '/bucket/object'
+ '?X-Amz-Algorithm=AWS4-HMAC-SHA256'
+ '&X-Amz-Credential=test:tester/20T20Z/US/s3/aws4_request'
+ '&X-Amz-Date=%s'
+ '&X-Amz-Expires=1000'
+ '&X-Amz-SignedHeaders=host'
+ '&X-Amz-Signature=X' %
+ self.get_v4_amz_date_header(),
+ headers={'Date': self.get_date_header()},
+ environ={'REQUEST_METHOD': 'GET'})
+ req.content_type = 'text/plain'
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '200', body)
+ for _, _, headers in self.swift.calls_with_headers:
+ self.assertEqual('AWS test:tester:X', headers['Authorization'])
+ self.assertIn('X-Auth-Token', headers)
+
+ def test_signed_urls_v4_missing_x_amz_date(self):
+ req = Request.blank('/bucket/object'
+ '?X-Amz-Algorithm=AWS4-HMAC-SHA256'
+ '&X-Amz-Credential=test/20T20Z/US/s3/aws4_request'
+ '&X-Amz-Expires=1000'
+ '&X-Amz-SignedHeaders=host'
+ '&X-Amz-Signature=X',
+ environ={'REQUEST_METHOD': 'GET'})
+ req.content_type = 'text/plain'
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'AccessDenied')
+
+ def test_signed_urls_v4_invalid_algorithm(self):
+ req = Request.blank('/bucket/object'
+ '?X-Amz-Algorithm=FAKE'
+ '&X-Amz-Credential=test/20T20Z/US/s3/aws4_request'
+ '&X-Amz-Date=%s'
+ '&X-Amz-Expires=1000'
+ '&X-Amz-SignedHeaders=host'
+ '&X-Amz-Signature=X' %
+ self.get_v4_amz_date_header(),
+ environ={'REQUEST_METHOD': 'GET'})
+ req.content_type = 'text/plain'
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'InvalidArgument')
+
+ def test_signed_urls_v4_missing_signed_headers(self):
+ req = Request.blank('/bucket/object'
+ '?X-Amz-Algorithm=AWS4-HMAC-SHA256'
+ '&X-Amz-Credential=test/20T20Z/US/s3/aws4_request'
+ '&X-Amz-Date=%s'
+ '&X-Amz-Expires=1000'
+ '&X-Amz-Signature=X' %
+ self.get_v4_amz_date_header(),
+ environ={'REQUEST_METHOD': 'GET'})
+ req.content_type = 'text/plain'
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body),
+ 'AuthorizationHeaderMalformed')
+
+ def test_signed_urls_v4_invalid_credentials(self):
+ req = Request.blank('/bucket/object'
+ '?X-Amz-Algorithm=AWS4-HMAC-SHA256'
+ '&X-Amz-Credential=test'
+ '&X-Amz-Date=%s'
+ '&X-Amz-Expires=1000'
+ '&X-Amz-SignedHeaders=host'
+ '&X-Amz-Signature=X' %
+ self.get_v4_amz_date_header(),
+ environ={'REQUEST_METHOD': 'GET'})
+ req.content_type = 'text/plain'
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'AccessDenied')
+
+ def test_signed_urls_v4_missing_signature(self):
+ req = Request.blank('/bucket/object'
+ '?X-Amz-Algorithm=AWS4-HMAC-SHA256'
+ '&X-Amz-Credential=test/20T20Z/US/s3/aws4_request'
+ '&X-Amz-Date=%s'
+ '&X-Amz-Expires=1000'
+ '&X-Amz-SignedHeaders=host' %
+ self.get_v4_amz_date_header(),
+ environ={'REQUEST_METHOD': 'GET'})
+ req.content_type = 'text/plain'
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'AccessDenied')
+
+ def test_bucket_virtual_hosted_style(self):
+ req = Request.blank('/',
+ environ={'HTTP_HOST': 'bucket.localhost:80',
+ 'REQUEST_METHOD': 'HEAD',
+ 'HTTP_AUTHORIZATION':
+ 'AWS test:tester:hmac'},
+ headers={'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '200')
+
+ def test_object_virtual_hosted_style(self):
+ req = Request.blank('/object',
+ environ={'HTTP_HOST': 'bucket.localhost:80',
+ 'REQUEST_METHOD': 'HEAD',
+ 'HTTP_AUTHORIZATION':
+ 'AWS test:tester:hmac'},
+ headers={'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '200')
+
+ def test_token_generation(self):
+ self.swift.register('HEAD', '/v1/AUTH_test/bucket+segments/'
+ 'object/123456789abcdef',
+ swob.HTTPOk, {}, None)
+ self.swift.register('PUT', '/v1/AUTH_test/bucket+segments/'
+ 'object/123456789abcdef/1',
+ swob.HTTPCreated, {}, None)
+ req = Request.blank('/bucket/object?uploadId=123456789abcdef'
+ '&partNumber=1',
+ environ={'REQUEST_METHOD': 'PUT'})
+ req.headers['Authorization'] = 'AWS test:tester:hmac'
+ date_header = self.get_date_header()
+ req.headers['Date'] = date_header
+ with mock.patch('swift.common.middleware.s3api.s3request.'
+ 'S3Request.check_signature') as mock_cs:
+ status, headers, body = self.call_s3api(req)
+ _, _, headers = self.swift.calls_with_headers[-1]
+ self.assertEqual(req.environ['s3api.auth_details'], {
+ 'access_key': 'test:tester',
+ 'signature': 'hmac',
+ 'string_to_sign': '\n'.join([
+ 'PUT', '', '', date_header,
+ '/bucket/object?partNumber=1&uploadId=123456789abcdef']),
+ 'check_signature': mock_cs})
+
+ def test_invalid_uri(self):
+ req = Request.blank('/bucket/invalid\xffname',
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'InvalidURI')
+
+ def test_object_create_bad_md5_unreadable(self):
+ req = Request.blank('/bucket/object',
+ environ={'REQUEST_METHOD': 'PUT',
+ 'HTTP_AUTHORIZATION': 'AWS X:Y:Z',
+ 'HTTP_CONTENT_MD5': '#'},
+ headers={'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'InvalidDigest')
+
+ def test_object_create_bad_md5_too_short(self):
+ too_short_digest = hashlib.md5('hey').hexdigest()[:-1]
+ md5_str = too_short_digest.encode('base64').strip()
+ req = Request.blank(
+ '/bucket/object',
+ environ={'REQUEST_METHOD': 'PUT',
+ 'HTTP_AUTHORIZATION': 'AWS X:Y:Z',
+ 'HTTP_CONTENT_MD5': md5_str},
+ headers={'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'InvalidDigest')
+
+ def test_object_create_bad_md5_too_long(self):
+ too_long_digest = hashlib.md5('hey').hexdigest() + 'suffix'
+ md5_str = too_long_digest.encode('base64').strip()
+ req = Request.blank(
+ '/bucket/object',
+ environ={'REQUEST_METHOD': 'PUT',
+ 'HTTP_AUTHORIZATION': 'AWS X:Y:Z',
+ 'HTTP_CONTENT_MD5': md5_str},
+ headers={'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'InvalidDigest')
+
+ def test_invalid_metadata_directive(self):
+ req = Request.blank('/',
+ environ={'REQUEST_METHOD': 'GET',
+ 'HTTP_AUTHORIZATION': 'AWS X:Y:Z',
+ 'HTTP_X_AMZ_METADATA_DIRECTIVE':
+ 'invalid'},
+ headers={'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'InvalidArgument')
+
+ def test_invalid_storage_class(self):
+ req = Request.blank('/',
+ environ={'REQUEST_METHOD': 'GET',
+ 'HTTP_AUTHORIZATION': 'AWS X:Y:Z',
+ 'HTTP_X_AMZ_STORAGE_CLASS': 'INVALID'},
+ headers={'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'InvalidStorageClass')
+
+ def _test_unsupported_header(self, header):
+ req = Request.blank('/error',
+ environ={'REQUEST_METHOD': 'GET',
+ 'HTTP_AUTHORIZATION': 'AWS X:Y:Z'},
+ headers={'x-amz-' + header: 'value',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'NotImplemented')
+
+ def test_mfa(self):
+ self._test_unsupported_header('mfa')
+
+ def test_server_side_encryption(self):
+ self._test_unsupported_header('server-side-encryption')
+
+ def test_website_redirect_location(self):
+ self._test_unsupported_header('website-redirect-location')
+
+ def _test_unsupported_resource(self, resource):
+ req = Request.blank('/error?' + resource,
+ environ={'REQUEST_METHOD': 'GET',
+ 'HTTP_AUTHORIZATION': 'AWS X:Y:Z'},
+ headers={'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'NotImplemented')
+
+ def test_notification(self):
+ self._test_unsupported_resource('notification')
+
+ def test_policy(self):
+ self._test_unsupported_resource('policy')
+
+ def test_request_payment(self):
+ self._test_unsupported_resource('requestPayment')
+
+ def test_torrent(self):
+ self._test_unsupported_resource('torrent')
+
+ def test_website(self):
+ self._test_unsupported_resource('website')
+
+ def test_cors(self):
+ self._test_unsupported_resource('cors')
+
+ def test_tagging(self):
+ self._test_unsupported_resource('tagging')
+
+ def test_restore(self):
+ self._test_unsupported_resource('restore')
+
+ def test_unsupported_method(self):
+ req = Request.blank('/bucket?acl',
+ environ={'REQUEST_METHOD': 'POST'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ elem = fromstring(body, 'Error')
+ self.assertEqual(elem.find('./Code').text, 'MethodNotAllowed')
+ self.assertEqual(elem.find('./Method').text, 'POST')
+ self.assertEqual(elem.find('./ResourceType').text, 'ACL')
+
+ def test_registered_defaults(self):
+ filter_factory(self.conf)
+ swift_info = utils.get_swift_info()
+ self.assertTrue('s3api' in swift_info)
+ self.assertEqual(swift_info['s3api'].get('max_bucket_listing'),
+ self.conf.max_bucket_listing)
+ self.assertEqual(swift_info['s3api'].get('max_parts_listing'),
+ self.conf.max_parts_listing)
+ self.assertEqual(swift_info['s3api'].get('max_upload_part_num'),
+ self.conf.max_upload_part_num)
+ self.assertEqual(swift_info['s3api'].get('max_multi_delete_objects'),
+ self.conf.max_multi_delete_objects)
+
+ def test_check_pipeline(self):
+ with patch("swift.common.middleware.s3api.s3api.loadcontext"), \
+ patch("swift.common.middleware.s3api.s3api.PipelineWrapper") \
+ as pipeline:
+ self.conf.auth_pipeline_check = True
+ self.conf.__file__ = ''
+
+ pipeline.return_value = 's3api tempauth proxy-server'
+ self.s3api.check_pipeline(self.conf)
+
+ # This *should* still work; authtoken will remove our auth details,
+ # but the X-Auth-Token we drop in will remain
+ # if we found one in the response
+ pipeline.return_value = 's3api s3token authtoken keystoneauth ' \
+ 'proxy-server'
+ self.s3api.check_pipeline(self.conf)
+
+ # This should work now; no more doubled-up requests to keystone!
+ pipeline.return_value = 's3api s3token keystoneauth proxy-server'
+ self.s3api.check_pipeline(self.conf)
+
+ pipeline.return_value = 's3api swauth proxy-server'
+ self.s3api.check_pipeline(self.conf)
+
+ # Note that authtoken would need to have delay_auth_decision=True
+ pipeline.return_value = 's3api authtoken s3token keystoneauth ' \
+ 'proxy-server'
+ self.s3api.check_pipeline(self.conf)
+
+ pipeline.return_value = 's3api proxy-server'
+ with self.assertRaises(ValueError) as cm:
+ self.s3api.check_pipeline(self.conf)
+ self.assertIn('expected auth between s3api and proxy-server',
+ cm.exception.message)
+
+ pipeline.return_value = 'proxy-server'
+ with self.assertRaises(ValueError) as cm:
+ self.s3api.check_pipeline(self.conf)
+ self.assertIn("missing filters ['s3api']",
+ cm.exception.message)
+
+ def test_s3api_initialization_with_disabled_pipeline_check(self):
+ with patch("swift.common.middleware.s3api.s3api.loadcontext"), \
+ patch("swift.common.middleware.s3api.s3api.PipelineWrapper") \
+ as pipeline:
+ # Disable pipeline check
+ self.conf.auth_pipeline_check = False
+ self.conf.__file__ = ''
+
+ pipeline.return_value = 's3api tempauth proxy-server'
+ self.s3api.check_pipeline(self.conf)
+
+ pipeline.return_value = 's3api s3token authtoken keystoneauth ' \
+ 'proxy-server'
+ self.s3api.check_pipeline(self.conf)
+
+ pipeline.return_value = 's3api swauth proxy-server'
+ self.s3api.check_pipeline(self.conf)
+
+ pipeline.return_value = 's3api authtoken s3token keystoneauth ' \
+ 'proxy-server'
+ self.s3api.check_pipeline(self.conf)
+
+ pipeline.return_value = 's3api proxy-server'
+ self.s3api.check_pipeline(self.conf)
+
+ pipeline.return_value = 'proxy-server'
+ with self.assertRaises(ValueError):
+ self.s3api.check_pipeline(self.conf)
+
+ def test_signature_v4(self):
+ environ = {
+ 'REQUEST_METHOD': 'GET'}
+ headers = {
+ 'Authorization':
+ 'AWS4-HMAC-SHA256 '
+ 'Credential=test:tester/20130524/US/s3/aws4_request, '
+ 'SignedHeaders=host;x-amz-date,'
+ 'Signature=X',
+ 'X-Amz-Date': self.get_v4_amz_date_header(),
+ 'X-Amz-Content-SHA256': '0123456789'}
+ req = Request.blank('/bucket/object', environ=environ, headers=headers)
+ req.content_type = 'text/plain'
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '200', body)
+ for _, _, headers in self.swift.calls_with_headers:
+ self.assertEqual('AWS test:tester:X', headers['Authorization'])
+ self.assertIn('X-Auth-Token', headers)
+
+ def test_signature_v4_no_date(self):
+ environ = {
+ 'REQUEST_METHOD': 'GET'}
+ headers = {
+ 'Authorization':
+ 'AWS4-HMAC-SHA256 '
+ 'Credential=test:tester/20130524/US/s3/aws4_request, '
+ 'SignedHeaders=host;range;x-amz-date,'
+ 'Signature=X',
+ 'X-Amz-Content-SHA256': '0123456789'}
+ req = Request.blank('/bucket/object', environ=environ, headers=headers)
+ req.content_type = 'text/plain'
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '403')
+ self.assertEqual(self._get_error_code(body), 'AccessDenied')
+
+ def test_signature_v4_no_payload(self):
+ environ = {
+ 'REQUEST_METHOD': 'GET'}
+ headers = {
+ 'Authorization':
+ 'AWS4-HMAC-SHA256 '
+ 'Credential=test:tester/20130524/US/s3/aws4_request, '
+ 'SignedHeaders=host;x-amz-date,'
+ 'Signature=X',
+ 'X-Amz-Date': self.get_v4_amz_date_header()}
+ req = Request.blank('/bucket/object', environ=environ, headers=headers)
+ req.content_type = 'text/plain'
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '400')
+ self.assertEqual(self._get_error_code(body), 'InvalidRequest')
+ self.assertEqual(
+ self._get_error_message(body),
+ 'Missing required header for this request: x-amz-content-sha256')
+
+ def test_signature_v4_bad_authorization_string(self):
+ def test(auth_str, error, msg):
+ environ = {
+ 'REQUEST_METHOD': 'GET'}
+ headers = {
+ 'Authorization': auth_str,
+ 'X-Amz-Date': self.get_v4_amz_date_header(),
+ 'X-Amz-Content-SHA256': '0123456789'}
+ req = Request.blank('/bucket/object', environ=environ,
+ headers=headers)
+ req.content_type = 'text/plain'
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), error)
+ self.assertEqual(self._get_error_message(body), msg)
+
+ auth_str = ('AWS4-HMAC-SHA256 '
+ 'SignedHeaders=host;x-amz-date,'
+ 'Signature=X')
+ test(auth_str, 'AccessDenied', 'Access Denied.')
+
+ auth_str = ('AWS4-HMAC-SHA256 '
+ 'Credential=test:tester/20130524/US/s3/aws4_request, '
+ 'Signature=X')
+ test(auth_str, 'AuthorizationHeaderMalformed',
+ 'The authorization header is malformed; the authorization '
+ 'header requires three components: Credential, SignedHeaders, '
+ 'and Signature.')
+
+ auth_str = ('AWS4-HMAC-SHA256 '
+ 'Credential=test:tester/20130524/US/s3/aws4_request, '
+ 'SignedHeaders=host;x-amz-date')
+ test(auth_str, 'AccessDenied', 'Access Denied.')
+
+ def test_canonical_string_v4(self):
+ def _get_req(path, environ):
+ if '?' in path:
+ path, query_string = path.split('?', 1)
+ else:
+ query_string = ''
+
+ env = {
+ 'REQUEST_METHOD': 'GET',
+ 'PATH_INFO': path,
+ 'QUERY_STRING': query_string,
+ 'HTTP_DATE': 'Mon, 09 Sep 2011 23:36:00 GMT',
+ 'HTTP_X_AMZ_CONTENT_SHA256':
+ 'e3b0c44298fc1c149afbf4c8996fb924'
+ '27ae41e4649b934ca495991b7852b855',
+ 'HTTP_AUTHORIZATION':
+ 'AWS4-HMAC-SHA256 '
+ 'Credential=X:Y/dt/reg/host/blah, '
+ 'SignedHeaders=content-md5;content-type;date, '
+ 'Signature=x',
+ }
+ env.update(environ)
+ with patch('swift.common.middleware.s3api.s3request.'
+ 'S3Request._validate_headers'):
+ req = SigV4Request(env, location=self.conf.location)
+ return req
+
+ def canonical_string(path, environ):
+ return _get_req(path, environ)._canonical_request()
+
+ def verify(hash_val, path, environ):
+ # See http://docs.aws.amazon.com/general/latest/gr
+ # /signature-v4-test-suite.html for where location, service, and
+ # signing key came from
+ with patch.object(self.conf, 'location', 'us-east-1'), \
+ patch.object(swift.common.middleware.s3api.s3request,
+ 'SERVICE', 'host'):
+ req = _get_req(path, environ)
+ hash_in_sts = req._string_to_sign().split('\n')[3]
+ self.assertEqual(hash_val, hash_in_sts)
+ self.assertTrue(req.check_signature(
+ 'wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY'))
+
+ # all next data got from aws4_testsuite from Amazon
+ # http://docs.aws.amazon.com/general/latest/gr/samples
+ # /aws4_testsuite.zip
+ # Each *expected* hash value is the 4th line in <test-name>.sts in the
+ # test suite.
+
+ # get-vanilla
+ env = {
+ 'HTTP_AUTHORIZATION': (
+ 'AWS4-HMAC-SHA256 '
+ 'Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, '
+ 'SignedHeaders=date;host, '
+ 'Signature=b27ccfbfa7df52a200ff74193ca6e32d'
+ '4b48b8856fab7ebf1c595d0670a7e470'),
+ 'HTTP_HOST': 'host.foo.com'}
+ verify('366b91fb121d72a00f46bbe8d395f53a'
+ '102b06dfb7e79636515208ed3fa606b1',
+ '/', env)
+
+ # get-header-value-trim
+ env = {
+ 'REQUEST_METHOD': 'POST',
+ 'HTTP_AUTHORIZATION': (
+ 'AWS4-HMAC-SHA256 '
+ 'Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, '
+ 'SignedHeaders=date;host;p, '
+ 'Signature=debf546796015d6f6ded8626f5ce9859'
+ '7c33b47b9164cf6b17b4642036fcb592'),
+ 'HTTP_HOST': 'host.foo.com',
+ 'HTTP_P': 'phfft'}
+ verify('dddd1902add08da1ac94782b05f9278c'
+ '08dc7468db178a84f8950d93b30b1f35',
+ '/', env)
+
+ # get-utf8 (not exact)
+ env = {
+ 'HTTP_AUTHORIZATION': (
+ 'AWS4-HMAC-SHA256 '
+ 'Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, '
+ 'SignedHeaders=date;host, '
+ 'Signature=8d6634c189aa8c75c2e51e106b6b5121'
+ 'bed103fdb351f7d7d4381c738823af74'),
+ 'HTTP_HOST': 'host.foo.com',
+ 'RAW_PATH_INFO': '/%E1%88%B4'}
+
+ # This might look weird because actually S3 doesn't care about utf-8
+ # encoded multi-byte bucket name from bucket-in-host name constraint.
+ # However, aws4_testsuite has only a sample hash with utf-8 *bucket*
+ # name to make sure the correctness (probably it can be used in other
+ # aws resource except s3) so, to test also utf-8, skip the bucket name
+ # validation in the following test.
+
+ # NOTE: eventlet's PATH_INFO is unquoted
+ with patch('swift.common.middleware.s3api.s3request.'
+ 'validate_bucket_name'):
+ verify('27ba31df5dbc6e063d8f87d62eb07143'
+ 'f7f271c5330a917840586ac1c85b6f6b',
+ unquote('/%E1%88%B4'), env)
+
+ # get-vanilla-query-order-key
+ env = {
+ 'HTTP_AUTHORIZATION': (
+ 'AWS4-HMAC-SHA256 '
+ 'Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, '
+ 'SignedHeaders=date;host, '
+ 'Signature=0dc122f3b28b831ab48ba65cb47300de'
+ '53fbe91b577fe113edac383730254a3b'),
+ 'HTTP_HOST': 'host.foo.com'}
+ verify('2f23d14fe13caebf6dfda346285c6d9c'
+ '14f49eaca8f5ec55c627dd7404f7a727',
+ '/?a=foo&b=foo', env)
+
+ # post-header-value-case
+ env = {
+ 'REQUEST_METHOD': 'POST',
+ 'HTTP_AUTHORIZATION': (
+ 'AWS4-HMAC-SHA256 '
+ 'Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, '
+ 'SignedHeaders=date;host;zoo, '
+ 'Signature=273313af9d0c265c531e11db70bbd653'
+ 'f3ba074c1009239e8559d3987039cad7'),
+ 'HTTP_HOST': 'host.foo.com',
+ 'HTTP_ZOO': 'ZOOBAR'}
+ verify('3aae6d8274b8c03e2cc96fc7d6bda4b9'
+ 'bd7a0a184309344470b2c96953e124aa',
+ '/', env)
+
+ # post-x-www-form-urlencoded-parameters
+ env = {
+ 'REQUEST_METHOD': 'POST',
+ 'HTTP_AUTHORIZATION': (
+ 'AWS4-HMAC-SHA256 '
+ 'Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, '
+ 'SignedHeaders=date;host;content-type, '
+ 'Signature=b105eb10c6d318d2294de9d49dd8b031'
+ 'b55e3c3fe139f2e637da70511e9e7b71'),
+ 'HTTP_HOST': 'host.foo.com',
+ 'HTTP_X_AMZ_CONTENT_SHA256':
+ '3ba8907e7a252327488df390ed517c45'
+ 'b96dead033600219bdca7107d1d3f88a',
+ 'CONTENT_TYPE':
+ 'application/x-www-form-urlencoded; charset=utf8'}
+ verify('c4115f9e54b5cecf192b1eaa23b8e88e'
+ 'd8dc5391bd4fde7b3fff3d9c9fe0af1f',
+ '/', env)
+
+ # post-x-www-form-urlencoded
+ env = {
+ 'REQUEST_METHOD': 'POST',
+ 'HTTP_AUTHORIZATION': (
+ 'AWS4-HMAC-SHA256 '
+ 'Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, '
+ 'SignedHeaders=date;host;content-type, '
+ 'Signature=5a15b22cf462f047318703b92e6f4f38'
+ '884e4a7ab7b1d6426ca46a8bd1c26cbc'),
+ 'HTTP_HOST': 'host.foo.com',
+ 'HTTP_X_AMZ_CONTENT_SHA256':
+ '3ba8907e7a252327488df390ed517c45'
+ 'b96dead033600219bdca7107d1d3f88a',
+ 'CONTENT_TYPE':
+ 'application/x-www-form-urlencoded'}
+ verify('4c5c6e4b52fb5fb947a8733982a8a5a6'
+ '1b14f04345cbfe6e739236c76dd48f74',
+ '/', env)
+
+ # Note that boto does not do proper stripping (as of 2.42.0).
+ # These were determined by examining the StringToSignBytes element of
+ # resulting SignatureDoesNotMatch errors from AWS.
+ str1 = canonical_string('/', {'CONTENT_TYPE': 'text/plain',
+ 'HTTP_CONTENT_MD5': '##'})
+ str2 = canonical_string('/', {'CONTENT_TYPE': '\x01\x02text/plain',
+ 'HTTP_CONTENT_MD5': '\x1f ##'})
+ str3 = canonical_string('/', {'CONTENT_TYPE': 'text/plain \x10',
+ 'HTTP_CONTENT_MD5': '##\x18'})
+
+ self.assertEqual(str1, str2)
+ self.assertEqual(str2, str3)
+
+ def test_mixture_param_v4(self):
+ # now we have an Authorization header
+ headers = {
+ 'Authorization':
+ 'AWS4-HMAC-SHA256 '
+ 'Credential=test/20130524/US/s3/aws4_request_A, '
+ 'SignedHeaders=hostA;rangeA;x-amz-dateA,'
+ 'Signature=X',
+ 'X-Amz-Date': self.get_v4_amz_date_header(),
+ 'X-Amz-Content-SHA256': '0123456789'}
+
+ # and then, different auth info (Credential, SignedHeaders, Signature)
+ # in query
+ req = Request.blank('/bucket/object'
+ '?X-Amz-Algorithm=AWS4-HMAC-SHA256'
+ '&X-Amz-Credential=test/20T20Z/US/s3/aws4_requestB'
+ '&X-Amz-SignedHeaders=hostB'
+ '&X-Amz-Signature=Y',
+ environ={'REQUEST_METHOD': 'GET'},
+ headers=headers)
+ req.content_type = 'text/plain'
+ status, headers, body = self.call_s3api(req)
+ # FIXME: should this failed as 400 or pass via query auth?
+ # for now, 403 forbidden for safety
+ self.assertEqual(status.split()[0], '403', body)
+
+ # But if we are missing Signature in query param
+ req = Request.blank('/bucket/object'
+ '?X-Amz-Algorithm=AWS4-HMAC-SHA256'
+ '&X-Amz-Credential=test/20T20Z/US/s3/aws4_requestB'
+ '&X-Amz-SignedHeaders=hostB',
+ environ={'REQUEST_METHOD': 'GET'},
+ headers=headers)
+ req.content_type = 'text/plain'
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '403', body)
+
+ def test_s3api_with_only_s3_token(self):
+ self.swift = FakeSwift()
+ self.keystone_auth = KeystoneAuth(
+ self.swift, {'operator_roles': 'swift-user'})
+ self.s3_token = S3Token(
+ self.keystone_auth, {'auth_uri': 'https://fakehost/identity'})
+ self.s3api = S3ApiMiddleware(self.s3_token, self.conf)
+ req = Request.blank(
+ '/bucket',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS access:signature',
+ 'Date': self.get_date_header()})
+ self.swift.register('PUT', '/v1/AUTH_TENANT_ID/bucket',
+ swob.HTTPCreated, {}, None)
+ self.swift.register('HEAD', '/v1/AUTH_TENANT_ID',
+ swob.HTTPOk, {}, None)
+ with patch.object(self.s3_token, '_json_request') as mock_req:
+ mock_resp = requests.Response()
+ mock_resp._content = json.dumps(GOOD_RESPONSE_V2)
+ mock_resp.status_code = 201
+ mock_req.return_value = mock_resp
+
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(body, '')
+ self.assertEqual(1, mock_req.call_count)
+
+ def test_s3api_with_only_s3_token_v3(self):
+ self.swift = FakeSwift()
+ self.keystone_auth = KeystoneAuth(
+ self.swift, {'operator_roles': 'swift-user'})
+ self.s3_token = S3Token(
+ self.keystone_auth, {'auth_uri': 'https://fakehost/identity'})
+ self.s3api = S3ApiMiddleware(self.s3_token, self.conf)
+ req = Request.blank(
+ '/bucket',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS access:signature',
+ 'Date': self.get_date_header()})
+ self.swift.register('PUT', '/v1/AUTH_PROJECT_ID/bucket',
+ swob.HTTPCreated, {}, None)
+ self.swift.register('HEAD', '/v1/AUTH_PROJECT_ID',
+ swob.HTTPOk, {}, None)
+ with patch.object(self.s3_token, '_json_request') as mock_req:
+ mock_resp = requests.Response()
+ mock_resp._content = json.dumps(GOOD_RESPONSE_V3)
+ mock_resp.status_code = 200
+ mock_req.return_value = mock_resp
+
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(body, '')
+ self.assertEqual(1, mock_req.call_count)
+
+ def test_s3api_with_s3_token_and_auth_token(self):
+ self.swift = FakeSwift()
+ self.keystone_auth = KeystoneAuth(
+ self.swift, {'operator_roles': 'swift-user'})
+ self.auth_token = AuthProtocol(
+ self.keystone_auth, {'delay_auth_decision': 'True'})
+ self.s3_token = S3Token(
+ self.auth_token, {'auth_uri': 'https://fakehost/identity'})
+ self.s3api = S3ApiMiddleware(self.s3_token, self.conf)
+ req = Request.blank(
+ '/bucket',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS access:signature',
+ 'Date': self.get_date_header()})
+ self.swift.register('PUT', '/v1/AUTH_TENANT_ID/bucket',
+ swob.HTTPCreated, {}, None)
+ self.swift.register('HEAD', '/v1/AUTH_TENANT_ID',
+ swob.HTTPOk, {}, None)
+ with patch.object(self.s3_token, '_json_request') as mock_req:
+ with patch.object(self.auth_token,
+ '_do_fetch_token') as mock_fetch:
+ mock_resp = requests.Response()
+ mock_resp._content = json.dumps(GOOD_RESPONSE_V2)
+ mock_resp.status_code = 201
+ mock_req.return_value = mock_resp
+
+ mock_access_info = AccessInfoV2(GOOD_RESPONSE_V2)
+ mock_access_info.will_expire_soon = \
+ lambda stale_duration: False
+ mock_fetch.return_value = (MagicMock(), mock_access_info)
+
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(body, '')
+ self.assertEqual(1, mock_req.call_count)
+ # With X-Auth-Token, auth_token will call _do_fetch_token to
+ # connect to keystone in auth_token, again
+ self.assertEqual(1, mock_fetch.call_count)
+
+ def test_s3api_with_s3_token_no_pass_token_to_auth_token(self):
+ self.swift = FakeSwift()
+ self.keystone_auth = KeystoneAuth(
+ self.swift, {'operator_roles': 'swift-user'})
+ self.auth_token = AuthProtocol(
+ self.keystone_auth, {'delay_auth_decision': 'True'})
+ self.s3_token = S3Token(
+ self.auth_token, {'auth_uri': 'https://fakehost/identity'})
+ self.s3api = S3ApiMiddleware(self.s3_token, self.conf)
+ req = Request.blank(
+ '/bucket',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS access:signature',
+ 'Date': self.get_date_header()})
+ self.swift.register('PUT', '/v1/AUTH_TENANT_ID/bucket',
+ swob.HTTPCreated, {}, None)
+ self.swift.register('HEAD', '/v1/AUTH_TENANT_ID',
+ swob.HTTPOk, {}, None)
+ with patch.object(self.s3_token, '_json_request') as mock_req:
+ with patch.object(self.auth_token,
+ '_do_fetch_token') as mock_fetch:
+ mock_resp = requests.Response()
+ no_token_id_good_resp = copy.deepcopy(GOOD_RESPONSE_V2)
+ # delete token id
+ del no_token_id_good_resp['access']['token']['id']
+ mock_resp._content = json.dumps(no_token_id_good_resp)
+ mock_resp.status_code = 201
+ mock_req.return_value = mock_resp
+
+ mock_access_info = AccessInfoV2(GOOD_RESPONSE_V2)
+ mock_access_info.will_expire_soon = \
+ lambda stale_duration: False
+ mock_fetch.return_value = (MagicMock(), mock_access_info)
+
+ status, headers, body = self.call_s3api(req)
+ # No token provided from keystone result in 401 Unauthorized
+ # at `swift.common.middleware.keystoneauth` because auth_token
+ # will remove all auth headers including 'X-Identity-Status'[1]
+ # and then, set X-Identity-Status: Invalid at [2]
+ #
+ # 1: https://github.com/openstack/keystonemiddleware/blob/
+ # master/keystonemiddleware/auth_token/__init__.py#L620
+ # 2: https://github.com/openstack/keystonemiddleware/blob/
+ # master/keystonemiddleware/auth_token/__init__.py#L627-L629
+
+ self.assertEqual('403 Forbidden', status)
+ self.assertEqual(1, mock_req.call_count)
+ # if no token provided from keystone, we can skip the call to
+ # fetch the token
+ self.assertEqual(0, mock_fetch.call_count)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/test/unit/common/middleware/s3api/test_s3request.py b/test/unit/common/middleware/s3api/test_s3request.py
new file mode 100644
index 000000000..8c8dc1730
--- /dev/null
+++ b/test/unit/common/middleware/s3api/test_s3request.py
@@ -0,0 +1,765 @@
+# Copyright (c) 2014 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from mock import patch, MagicMock
+import unittest
+
+from swift.common import swob
+from swift.common.swob import Request, HTTPNoContent
+from swift.common.middleware.s3api.utils import mktime
+from swift.common.middleware.s3api.acl_handlers import get_acl_handler
+from swift.common.middleware.s3api.subresource import ACL, User, Owner, \
+ Grant, encode_acl
+from test.unit.common.middleware.s3api.test_s3api import S3ApiTestCase
+from swift.common.middleware.s3api.s3request import S3Request, \
+ S3AclRequest, SigV4Request, SIGV4_X_AMZ_DATE_FORMAT
+from swift.common.middleware.s3api.s3response import InvalidArgument, \
+ NoSuchBucket, InternalError, \
+ AccessDenied, SignatureDoesNotMatch, RequestTimeTooSkewed
+
+from test.unit import DebugLogger
+
+Fake_ACL_MAP = {
+ # HEAD Bucket
+ ('HEAD', 'HEAD', 'container'):
+ {'Resource': 'container',
+ 'Permission': 'READ'},
+ # GET Bucket
+ ('GET', 'GET', 'container'):
+ {'Resource': 'container',
+ 'Permission': 'READ'},
+ # HEAD Object
+ ('HEAD', 'HEAD', 'object'):
+ {'Resource': 'object',
+ 'Permission': 'READ'},
+ # GET Object
+ ('GET', 'GET', 'object'):
+ {'Resource': 'object',
+ 'Permission': 'READ'},
+}
+
+
+def _gen_test_acl_header(owner, permission=None, grantee=None,
+ resource='container'):
+ if permission is None:
+ return ACL(owner, [])
+
+ if grantee is None:
+ grantee = User('test:tester')
+ return encode_acl(resource, ACL(owner, [Grant(grantee, permission)]))
+
+
+class FakeResponse(object):
+ def __init__(self, s3_acl):
+ self.sysmeta_headers = {}
+ if s3_acl:
+ owner = Owner(id='test:tester', name='test:tester')
+ self.sysmeta_headers.update(
+ _gen_test_acl_header(owner, 'FULL_CONTROL',
+ resource='container'))
+ self.sysmeta_headers.update(
+ _gen_test_acl_header(owner, 'FULL_CONTROL',
+ resource='object'))
+
+
+class FakeSwiftResponse(object):
+ def __init__(self):
+ self.environ = {
+ 'PATH_INFO': '/v1/AUTH_test',
+ 'HTTP_X_TENANT_NAME': 'test',
+ 'HTTP_X_USER_NAME': 'tester',
+ 'HTTP_X_AUTH_TOKEN': 'token',
+ }
+
+
+class TestRequest(S3ApiTestCase):
+
+ def setUp(self):
+ super(TestRequest, self).setUp()
+ self.s3api.conf.s3_acl = True
+ self.swift.s3_acl = True
+
+ @patch('swift.common.middleware.s3api.acl_handlers.ACL_MAP', Fake_ACL_MAP)
+ @patch('swift.common.middleware.s3api.s3request.S3AclRequest.authenticate',
+ lambda x, y: None)
+ def _test_get_response(self, method, container='bucket', obj=None,
+ permission=None, skip_check=False,
+ req_klass=S3Request, fake_swift_resp=None):
+ path = '/' + container + ('/' + obj if obj else '')
+ req = Request.blank(path,
+ environ={'REQUEST_METHOD': method},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ if issubclass(req_klass, S3AclRequest):
+ s3_req = req_klass(
+ req.environ, MagicMock(),
+ True, self.conf.storage_domain,
+ self.conf.location, self.conf.force_swift_request_proxy_log,
+ self.conf.dns_compliant_bucket_names,
+ self.conf.allow_multipart_uploads, self.conf.allow_no_owner)
+ else:
+ s3_req = req_klass(
+ req.environ, MagicMock(),
+ True, self.conf.storage_domain,
+ self.conf.location, self.conf.force_swift_request_proxy_log,
+ self.conf.dns_compliant_bucket_names,
+ self.conf.allow_multipart_uploads, self.conf.allow_no_owner)
+ s3_req.set_acl_handler(
+ get_acl_handler(s3_req.controller_name)(s3_req, DebugLogger()))
+ with patch('swift.common.middleware.s3api.s3request.S3Request.'
+ '_get_response') as mock_get_resp, \
+ patch('swift.common.middleware.s3api.subresource.ACL.'
+ 'check_permission') as m_check_permission:
+ mock_get_resp.return_value = fake_swift_resp \
+ or FakeResponse(self.conf.s3_acl)
+ return mock_get_resp, m_check_permission,\
+ s3_req.get_response(self.s3api)
+
+ def test_get_response_without_s3_acl(self):
+ self.s3api.conf.s3_acl = False
+ self.swift.s3_acl = False
+ mock_get_resp, m_check_permission, s3_resp = \
+ self._test_get_response('HEAD')
+ self.assertFalse(hasattr(s3_resp, 'bucket_acl'))
+ self.assertFalse(hasattr(s3_resp, 'object_acl'))
+ self.assertEqual(mock_get_resp.call_count, 1)
+ self.assertEqual(m_check_permission.call_count, 0)
+
+ def test_get_response_without_match_ACL_MAP(self):
+ with self.assertRaises(Exception) as e:
+ self._test_get_response('POST', req_klass=S3AclRequest)
+ self.assertEqual(e.exception.message,
+ 'No permission to be checked exists')
+
+ def test_get_response_without_duplication_HEAD_request(self):
+ obj = 'object'
+ mock_get_resp, m_check_permission, s3_resp = \
+ self._test_get_response('HEAD', obj=obj,
+ req_klass=S3AclRequest)
+ self.assertTrue(s3_resp.bucket_acl is not None)
+ self.assertTrue(s3_resp.object_acl is not None)
+ self.assertEqual(mock_get_resp.call_count, 1)
+ args, kargs = mock_get_resp.call_args_list[0]
+ get_resp_obj = args[3]
+ self.assertEqual(get_resp_obj, obj)
+ self.assertEqual(m_check_permission.call_count, 1)
+ args, kargs = m_check_permission.call_args
+ permission = args[1]
+ self.assertEqual(permission, 'READ')
+
+ def test_get_response_with_check_object_permission(self):
+ obj = 'object'
+ mock_get_resp, m_check_permission, s3_resp = \
+ self._test_get_response('GET', obj=obj,
+ req_klass=S3AclRequest)
+ self.assertTrue(s3_resp.bucket_acl is not None)
+ self.assertTrue(s3_resp.object_acl is not None)
+ self.assertEqual(mock_get_resp.call_count, 2)
+ args, kargs = mock_get_resp.call_args_list[0]
+ get_resp_obj = args[3]
+ self.assertEqual(get_resp_obj, obj)
+ self.assertEqual(m_check_permission.call_count, 1)
+ args, kargs = m_check_permission.call_args
+ permission = args[1]
+ self.assertEqual(permission, 'READ')
+
+ def test_get_response_with_check_container_permission(self):
+ mock_get_resp, m_check_permission, s3_resp = \
+ self._test_get_response('GET',
+ req_klass=S3AclRequest)
+ self.assertTrue(s3_resp.bucket_acl is not None)
+ self.assertTrue(s3_resp.object_acl is not None)
+ self.assertEqual(mock_get_resp.call_count, 2)
+ args, kargs = mock_get_resp.call_args_list[0]
+ get_resp_obj = args[3]
+ self.assertTrue(get_resp_obj is '')
+ self.assertEqual(m_check_permission.call_count, 1)
+ args, kargs = m_check_permission.call_args
+ permission = args[1]
+ self.assertEqual(permission, 'READ')
+
+ def test_get_validate_param(self):
+ def create_s3request_with_param(param, value):
+ req = Request.blank(
+ '/bucket?%s=%s' % (param, value),
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ return S3Request(req.environ)
+
+ s3req = create_s3request_with_param('max-keys', '1')
+
+ # a param in the range
+ self.assertEqual(s3req.get_validated_param('max-keys', 1000, 1000), 1)
+ self.assertEqual(s3req.get_validated_param('max-keys', 0, 1), 1)
+
+ # a param in the out of the range
+ self.assertEqual(s3req.get_validated_param('max-keys', 0, 0), 0)
+
+ # a param in the out of the integer range
+ s3req = create_s3request_with_param('max-keys', '1' * 30)
+ with self.assertRaises(InvalidArgument) as result:
+ s3req.get_validated_param('max-keys', 1)
+ self.assertTrue(
+ 'not an integer or within integer range' in result.exception.body)
+ self.assertEqual(
+ result.exception.headers['content-type'], 'application/xml')
+
+ # a param is negative integer
+ s3req = create_s3request_with_param('max-keys', '-1')
+ with self.assertRaises(InvalidArgument) as result:
+ s3req.get_validated_param('max-keys', 1)
+ self.assertTrue(
+ 'must be an integer between 0 and' in result.exception.body)
+ self.assertEqual(
+ result.exception.headers['content-type'], 'application/xml')
+
+ # a param is not integer
+ s3req = create_s3request_with_param('max-keys', 'invalid')
+ with self.assertRaises(InvalidArgument) as result:
+ s3req.get_validated_param('max-keys', 1)
+ self.assertTrue(
+ 'not an integer or within integer range' in result.exception.body)
+ self.assertEqual(
+ result.exception.headers['content-type'], 'application/xml')
+
+ def test_authenticate_delete_Authorization_from_s3req(self):
+ req = Request.blank('/bucket/obj',
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ with patch.object(Request, 'get_response') as m_swift_resp, \
+ patch.object(Request, 'remote_user', 'authorized'):
+
+ m_swift_resp.return_value = FakeSwiftResponse()
+ s3_req = S3AclRequest(req.environ, MagicMock())
+ self.assertNotIn('s3api.auth_details', s3_req.environ)
+ self.assertNotIn('HTTP_AUTHORIZATION', s3_req.environ)
+ self.assertNotIn('Authorization', s3_req.headers)
+ self.assertEqual(s3_req.token, 'token')
+
+ def test_to_swift_req_Authorization_not_exist_in_swreq(self):
+ container = 'bucket'
+ obj = 'obj'
+ method = 'GET'
+ req = Request.blank('/%s/%s' % (container, obj),
+ environ={'REQUEST_METHOD': method},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ with patch.object(Request, 'get_response') as m_swift_resp, \
+ patch.object(Request, 'remote_user', 'authorized'):
+
+ m_swift_resp.return_value = FakeSwiftResponse()
+ s3_req = S3AclRequest(req.environ, MagicMock())
+ sw_req = s3_req.to_swift_req(method, container, obj)
+ self.assertNotIn('s3api.auth_details', sw_req.environ)
+ self.assertNotIn('HTTP_AUTHORIZATION', sw_req.environ)
+ self.assertNotIn('Authorization', sw_req.headers)
+ self.assertEqual(sw_req.headers['X-Auth-Token'], 'token')
+
+ def test_to_swift_req_subrequest_proxy_access_log(self):
+ container = 'bucket'
+ obj = 'obj'
+ method = 'GET'
+
+ # force_swift_request_proxy_log is True
+ req = Request.blank('/%s/%s' % (container, obj),
+ environ={'REQUEST_METHOD': method,
+ 'swift.proxy_access_log_made': True},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ with patch.object(Request, 'get_response') as m_swift_resp, \
+ patch.object(Request, 'remote_user', 'authorized'):
+ m_swift_resp.return_value = FakeSwiftResponse()
+ s3_req = S3AclRequest(
+ req.environ, MagicMock(), force_request_log=True)
+ sw_req = s3_req.to_swift_req(method, container, obj)
+ self.assertFalse(sw_req.environ['swift.proxy_access_log_made'])
+
+ # force_swift_request_proxy_log is False
+ req = Request.blank('/%s/%s' % (container, obj),
+ environ={'REQUEST_METHOD': method,
+ 'swift.proxy_access_log_made': True},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ with patch.object(Request, 'get_response') as m_swift_resp, \
+ patch.object(Request, 'remote_user', 'authorized'):
+ m_swift_resp.return_value = FakeSwiftResponse()
+ s3_req = S3AclRequest(
+ req.environ, MagicMock(), force_request_log=False)
+ sw_req = s3_req.to_swift_req(method, container, obj)
+ self.assertTrue(sw_req.environ['swift.proxy_access_log_made'])
+
+ def test_get_container_info(self):
+ self.swift.register('HEAD', '/v1/AUTH_test/bucket', HTTPNoContent,
+ {'x-container-read': 'foo',
+ 'X-container-object-count': 5,
+ 'X-container-meta-foo': 'bar'}, None)
+ req = Request.blank('/bucket', environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ s3_req = S3Request(req.environ)
+ # first, call get_response('HEAD')
+ info = s3_req.get_container_info(self.app)
+ self.assertTrue('status' in info) # sanity
+ self.assertEqual(204, info['status']) # sanity
+ self.assertEqual('foo', info['read_acl']) # sanity
+ self.assertEqual('5', info['object_count']) # sanity
+ self.assertEqual({'foo': 'bar'}, info['meta']) # sanity
+ with patch(
+ 'swift.common.middleware.s3api.s3request.get_container_info',
+ return_value={'status': 204}) as mock_info:
+ # Then all calls goes to get_container_info
+ for x in xrange(10):
+ info = s3_req.get_container_info(self.swift)
+ self.assertTrue('status' in info) # sanity
+ self.assertEqual(204, info['status']) # sanity
+ self.assertEqual(10, mock_info.call_count)
+
+ expected_errors = [(404, NoSuchBucket), (0, InternalError)]
+ for status, expected_error in expected_errors:
+ with patch('swift.common.middleware.s3api.s3request.'
+ 'get_container_info',
+ return_value={'status': status}):
+ self.assertRaises(
+ expected_error, s3_req.get_container_info, MagicMock())
+
+ def test_date_header_missing(self):
+ self.swift.register('HEAD', '/v1/AUTH_test/nojunk', swob.HTTPNotFound,
+ {}, None)
+ req = Request.blank('/nojunk',
+ environ={'REQUEST_METHOD': 'HEAD'},
+ headers={'Authorization': 'AWS test:tester:hmac'})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '403')
+ self.assertEqual(body, '')
+
+ def test_date_header_expired(self):
+ self.swift.register('HEAD', '/v1/AUTH_test/nojunk', swob.HTTPNotFound,
+ {}, None)
+ req = Request.blank('/nojunk',
+ environ={'REQUEST_METHOD': 'HEAD'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': 'Fri, 01 Apr 2014 12:00:00 GMT'})
+
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '403')
+ self.assertEqual(body, '')
+
+ def test_date_header_with_x_amz_date_valid(self):
+ self.swift.register('HEAD', '/v1/AUTH_test/nojunk', swob.HTTPNotFound,
+ {}, None)
+ req = Request.blank('/nojunk',
+ environ={'REQUEST_METHOD': 'HEAD'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': 'Fri, 01 Apr 2014 12:00:00 GMT',
+ 'x-amz-date': self.get_date_header()})
+
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '404')
+ self.assertEqual(body, '')
+
+ def test_date_header_with_x_amz_date_expired(self):
+ self.swift.register('HEAD', '/v1/AUTH_test/nojunk', swob.HTTPNotFound,
+ {}, None)
+ req = Request.blank('/nojunk',
+ environ={'REQUEST_METHOD': 'HEAD'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header(),
+ 'x-amz-date':
+ 'Fri, 01 Apr 2014 12:00:00 GMT'})
+
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '403')
+ self.assertEqual(body, '')
+
+ def _test_request_timestamp_sigv4(self, date_header):
+ # signature v4 here
+ environ = {
+ 'REQUEST_METHOD': 'GET'}
+
+ if 'X-Amz-Date' in date_header:
+ included_header = 'x-amz-date'
+ elif 'Date' in date_header:
+ included_header = 'date'
+ else:
+ self.fail('Invalid date header specified as test')
+
+ headers = {
+ 'Authorization':
+ 'AWS4-HMAC-SHA256 '
+ 'Credential=test/20130524/US/s3/aws4_request, '
+ 'SignedHeaders=%s,'
+ 'Signature=X' % ';'.join(sorted(['host', included_header])),
+ 'X-Amz-Content-SHA256': '0123456789'}
+
+ headers.update(date_header)
+ req = Request.blank('/', environ=environ, headers=headers)
+ sigv4_req = SigV4Request(req.environ)
+
+ if 'X-Amz-Date' in date_header:
+ timestamp = mktime(
+ date_header['X-Amz-Date'], SIGV4_X_AMZ_DATE_FORMAT)
+ elif 'Date' in date_header:
+ timestamp = mktime(date_header['Date'])
+
+ self.assertEqual(timestamp, int(sigv4_req.timestamp))
+
+ def test_request_timestamp_sigv4(self):
+ access_denied_message = \
+ 'AWS authentication requires a valid Date or x-amz-date header'
+
+ # normal X-Amz-Date header
+ date_header = {'X-Amz-Date': self.get_v4_amz_date_header()}
+ self._test_request_timestamp_sigv4(date_header)
+
+ # normal Date header
+ date_header = {'Date': self.get_date_header()}
+ self._test_request_timestamp_sigv4(date_header)
+
+ # mangled X-Amz-Date header
+ date_header = {'X-Amz-Date': self.get_v4_amz_date_header()[:-1]}
+ with self.assertRaises(AccessDenied) as cm:
+ self._test_request_timestamp_sigv4(date_header)
+
+ self.assertEqual('403 Forbidden', cm.exception.message)
+ self.assertIn(access_denied_message, cm.exception.body)
+
+ # mangled Date header
+ date_header = {'Date': self.get_date_header()[20:]}
+ with self.assertRaises(AccessDenied) as cm:
+ self._test_request_timestamp_sigv4(date_header)
+
+ self.assertEqual('403 Forbidden', cm.exception.message)
+ self.assertIn(access_denied_message, cm.exception.body)
+
+ # Negative timestamp
+ date_header = {'X-Amz-Date': '00160523T054055Z'}
+ with self.assertRaises(AccessDenied) as cm:
+ self._test_request_timestamp_sigv4(date_header)
+
+ self.assertEqual('403 Forbidden', cm.exception.message)
+ self.assertIn(access_denied_message, cm.exception.body)
+
+ # far-past Date header
+ date_header = {'Date': 'Tue, 07 Jul 999 21:53:04 GMT'}
+ with self.assertRaises(AccessDenied) as cm:
+ self._test_request_timestamp_sigv4(date_header)
+
+ self.assertEqual('403 Forbidden', cm.exception.message)
+ self.assertIn(access_denied_message, cm.exception.body)
+
+ # far-future Date header
+ date_header = {'Date': 'Tue, 07 Jul 9999 21:53:04 GMT'}
+ with self.assertRaises(RequestTimeTooSkewed) as cm:
+ self._test_request_timestamp_sigv4(date_header)
+
+ self.assertEqual('403 Forbidden', cm.exception.message)
+ self.assertIn('The difference between the request time and the '
+ 'current time is too large.', cm.exception.body)
+
+ def _test_request_timestamp_sigv2(self, date_header):
+ # signature v4 here
+ environ = {
+ 'REQUEST_METHOD': 'GET'}
+
+ headers = {'Authorization': 'AWS test:tester:hmac'}
+ headers.update(date_header)
+ req = Request.blank('/', environ=environ, headers=headers)
+ sigv2_req = S3Request(req.environ)
+
+ if 'X-Amz-Date' in date_header:
+ timestamp = mktime(req.headers.get('X-Amz-Date'))
+ elif 'Date' in date_header:
+ timestamp = mktime(req.headers.get('Date'))
+ else:
+ self.fail('Invalid date header specified as test')
+ self.assertEqual(timestamp, int(sigv2_req.timestamp))
+
+ def test_request_timestamp_sigv2(self):
+ access_denied_message = \
+ 'AWS authentication requires a valid Date or x-amz-date header'
+
+ # In v2 format, normal X-Amz-Date header is same
+ date_header = {'X-Amz-Date': self.get_date_header()}
+ self._test_request_timestamp_sigv2(date_header)
+
+ # normal Date header
+ date_header = {'Date': self.get_date_header()}
+ self._test_request_timestamp_sigv2(date_header)
+
+ # mangled X-Amz-Date header
+ date_header = {'X-Amz-Date': self.get_date_header()[:-20]}
+ with self.assertRaises(AccessDenied) as cm:
+ self._test_request_timestamp_sigv2(date_header)
+
+ self.assertEqual('403 Forbidden', cm.exception.message)
+ self.assertIn(access_denied_message, cm.exception.body)
+
+ # mangled Date header
+ date_header = {'Date': self.get_date_header()[:-20]}
+ with self.assertRaises(AccessDenied) as cm:
+ self._test_request_timestamp_sigv2(date_header)
+
+ self.assertEqual('403 Forbidden', cm.exception.message)
+ self.assertIn(access_denied_message, cm.exception.body)
+
+ # Negative timestamp
+ date_header = {'X-Amz-Date': '00160523T054055Z'}
+ with self.assertRaises(AccessDenied) as cm:
+ self._test_request_timestamp_sigv2(date_header)
+
+ self.assertEqual('403 Forbidden', cm.exception.message)
+ self.assertIn(access_denied_message, cm.exception.body)
+
+ # far-past Date header
+ date_header = {'Date': 'Tue, 07 Jul 999 21:53:04 GMT'}
+ with self.assertRaises(AccessDenied) as cm:
+ self._test_request_timestamp_sigv2(date_header)
+
+ self.assertEqual('403 Forbidden', cm.exception.message)
+ self.assertIn(access_denied_message, cm.exception.body)
+
+ # far-future Date header
+ date_header = {'Date': 'Tue, 07 Jul 9999 21:53:04 GMT'}
+ with self.assertRaises(RequestTimeTooSkewed) as cm:
+ self._test_request_timestamp_sigv2(date_header)
+
+ self.assertEqual('403 Forbidden', cm.exception.message)
+ self.assertIn('The difference between the request time and the '
+ 'current time is too large.', cm.exception.body)
+
+ def test_headers_to_sign_sigv4(self):
+ environ = {
+ 'REQUEST_METHOD': 'GET'}
+
+ # host and x-amz-date
+ x_amz_date = self.get_v4_amz_date_header()
+ headers = {
+ 'Authorization':
+ 'AWS4-HMAC-SHA256 '
+ 'Credential=test/20130524/US/s3/aws4_request, '
+ 'SignedHeaders=host;x-amz-content-sha256;x-amz-date,'
+ 'Signature=X',
+ 'X-Amz-Content-SHA256': '0123456789',
+ 'Date': self.get_date_header(),
+ 'X-Amz-Date': x_amz_date}
+
+ req = Request.blank('/', environ=environ, headers=headers)
+ sigv4_req = SigV4Request(req.environ)
+
+ headers_to_sign = sigv4_req._headers_to_sign()
+ self.assertEqual(headers_to_sign, [
+ ('host', 'localhost:80'),
+ ('x-amz-content-sha256', '0123456789'),
+ ('x-amz-date', x_amz_date)])
+
+ # no x-amz-date
+ headers = {
+ 'Authorization':
+ 'AWS4-HMAC-SHA256 '
+ 'Credential=test/20130524/US/s3/aws4_request, '
+ 'SignedHeaders=host;x-amz-content-sha256,'
+ 'Signature=X',
+ 'X-Amz-Content-SHA256': '0123456789',
+ 'Date': self.get_date_header()}
+
+ req = Request.blank('/', environ=environ, headers=headers)
+ sigv4_req = SigV4Request(req.environ)
+
+ headers_to_sign = sigv4_req._headers_to_sign()
+ self.assertEqual(headers_to_sign, [
+ ('host', 'localhost:80'),
+ ('x-amz-content-sha256', '0123456789')])
+
+ # SignedHeaders says, host and x-amz-date included but there is not
+ # X-Amz-Date header
+ headers = {
+ 'Authorization':
+ 'AWS4-HMAC-SHA256 '
+ 'Credential=test/20130524/US/s3/aws4_request, '
+ 'SignedHeaders=host;x-amz-content-sha256;x-amz-date,'
+ 'Signature=X',
+ 'X-Amz-Content-SHA256': '0123456789',
+ 'Date': self.get_date_header()}
+
+ req = Request.blank('/', environ=environ, headers=headers)
+ with self.assertRaises(SignatureDoesNotMatch):
+ sigv4_req = SigV4Request(req.environ)
+ sigv4_req._headers_to_sign()
+
+ def test_canonical_uri_sigv2(self):
+ environ = {
+ 'HTTP_HOST': 'bucket1.s3.test.com',
+ 'REQUEST_METHOD': 'GET'}
+
+ headers = {'Authorization': 'AWS test:tester:hmac',
+ 'X-Amz-Date': self.get_date_header()}
+
+ # Virtual hosted-style
+ req = Request.blank('/', environ=environ, headers=headers)
+ sigv2_req = S3Request(
+ req.environ, storage_domain='s3.test.com')
+ uri = sigv2_req._canonical_uri()
+ self.assertEqual(uri, '/bucket1/')
+ self.assertEqual(req.environ['PATH_INFO'], '/')
+
+ req = Request.blank('/obj1', environ=environ, headers=headers)
+ sigv2_req = S3Request(
+ req.environ, storage_domain='s3.test.com')
+ uri = sigv2_req._canonical_uri()
+ self.assertEqual(uri, '/bucket1/obj1')
+ self.assertEqual(req.environ['PATH_INFO'], '/obj1')
+
+ environ = {
+ 'HTTP_HOST': 's3.test.com',
+ 'REQUEST_METHOD': 'GET'}
+
+ # Path-style
+ req = Request.blank('/', environ=environ, headers=headers)
+ sigv2_req = S3Request(req.environ, storage_domain='')
+ uri = sigv2_req._canonical_uri()
+
+ self.assertEqual(uri, '/')
+ self.assertEqual(req.environ['PATH_INFO'], '/')
+
+ req = Request.blank('/bucket1/obj1',
+ environ=environ,
+ headers=headers)
+ sigv2_req = S3Request(req.environ, storage_domain='')
+ uri = sigv2_req._canonical_uri()
+ self.assertEqual(uri, '/bucket1/obj1')
+ self.assertEqual(req.environ['PATH_INFO'], '/bucket1/obj1')
+
+ def test_canonical_uri_sigv4(self):
+ environ = {
+ 'HTTP_HOST': 'bucket.s3.test.com',
+ 'REQUEST_METHOD': 'GET'}
+
+ # host and x-amz-date
+ x_amz_date = self.get_v4_amz_date_header()
+ headers = {
+ 'Authorization':
+ 'AWS4-HMAC-SHA256 '
+ 'Credential=test/20130524/US/s3/aws4_request, '
+ 'SignedHeaders=host;x-amz-content-sha256;x-amz-date,'
+ 'Signature=X',
+ 'X-Amz-Content-SHA256': '0123456789',
+ 'Date': self.get_date_header(),
+ 'X-Amz-Date': x_amz_date}
+
+ # Virtual hosted-style
+ self.conf.storage_domain = 's3.test.com'
+ req = Request.blank('/', environ=environ, headers=headers)
+ sigv4_req = SigV4Request(req.environ)
+ uri = sigv4_req._canonical_uri()
+
+ self.assertEqual(uri, '/')
+ self.assertEqual(req.environ['PATH_INFO'], '/')
+
+ req = Request.blank('/obj1', environ=environ, headers=headers)
+ sigv4_req = SigV4Request(req.environ)
+ uri = sigv4_req._canonical_uri()
+
+ self.assertEqual(uri, '/obj1')
+ self.assertEqual(req.environ['PATH_INFO'], '/obj1')
+
+ environ = {
+ 'HTTP_HOST': 's3.test.com',
+ 'REQUEST_METHOD': 'GET'}
+
+ # Path-style
+ self.conf.storage_domain = ''
+ req = Request.blank('/', environ=environ, headers=headers)
+ sigv4_req = SigV4Request(req.environ)
+ uri = sigv4_req._canonical_uri()
+
+ self.assertEqual(uri, '/')
+ self.assertEqual(req.environ['PATH_INFO'], '/')
+
+ req = Request.blank('/bucket/obj1',
+ environ=environ,
+ headers=headers)
+ sigv4_req = SigV4Request(req.environ)
+ uri = sigv4_req._canonical_uri()
+
+ self.assertEqual(uri, '/bucket/obj1')
+ self.assertEqual(req.environ['PATH_INFO'], '/bucket/obj1')
+
+ @patch.object(S3Request, '_validate_headers', lambda *a: None)
+ def test_check_signature_sigv2(self):
+ # See https://web.archive.org/web/20151226025049/http://
+ # docs.aws.amazon.com//AmazonS3/latest/dev/RESTAuthentication.html
+ req = Request.blank('/photos/puppy.jpg', headers={
+ 'Host': 'johnsmith.s3.amazonaws.com',
+ 'Date': 'Tue, 27 Mar 2007 19:36:42 +0000',
+ 'Authorization': ('AWS AKIAIOSFODNN7EXAMPLE:'
+ 'bWq2s1WEIj+Ydj0vQ697zp+IXMU='),
+ })
+ sigv2_req = S3Request(req.environ, storage_domain='s3.amazonaws.com')
+ expected_sts = '\n'.join([
+ 'GET',
+ '',
+ '',
+ 'Tue, 27 Mar 2007 19:36:42 +0000',
+ '/johnsmith/photos/puppy.jpg',
+ ])
+ self.assertEqual(expected_sts, sigv2_req._string_to_sign())
+ self.assertTrue(sigv2_req.check_signature(
+ 'wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY'))
+
+ req = Request.blank('/photos/puppy.jpg', method='PUT', headers={
+ 'Content-Type': 'image/jpeg',
+ 'Content-Length': '94328',
+ 'Host': 'johnsmith.s3.amazonaws.com',
+ 'Date': 'Tue, 27 Mar 2007 21:15:45 +0000',
+ 'Authorization': ('AWS AKIAIOSFODNN7EXAMPLE:'
+ 'MyyxeRY7whkBe+bq8fHCL/2kKUg='),
+ })
+ sigv2_req = S3Request(req.environ, storage_domain='s3.amazonaws.com')
+ expected_sts = '\n'.join([
+ 'PUT',
+ '',
+ 'image/jpeg',
+ 'Tue, 27 Mar 2007 21:15:45 +0000',
+ '/johnsmith/photos/puppy.jpg',
+ ])
+ self.assertEqual(expected_sts, sigv2_req._string_to_sign())
+ self.assertTrue(sigv2_req.check_signature(
+ 'wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY'))
+
+ req = Request.blank(
+ '/?prefix=photos&max-keys=50&marker=puppy',
+ headers={
+ 'User-Agent': 'Mozilla/5.0',
+ 'Host': 'johnsmith.s3.amazonaws.com',
+ 'Date': 'Tue, 27 Mar 2007 19:42:41 +0000',
+ 'Authorization': ('AWS AKIAIOSFODNN7EXAMPLE:'
+ 'htDYFYduRNen8P9ZfE/s9SuKy0U='),
+ })
+ sigv2_req = S3Request(req.environ, storage_domain='s3.amazonaws.com')
+ expected_sts = '\n'.join([
+ 'GET',
+ '',
+ '',
+ 'Tue, 27 Mar 2007 19:42:41 +0000',
+ '/johnsmith/',
+ ])
+ self.assertEqual(expected_sts, sigv2_req._string_to_sign())
+ self.assertTrue(sigv2_req.check_signature(
+ 'wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY'))
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/test/unit/common/middleware/s3api/test_s3response.py b/test/unit/common/middleware/s3api/test_s3response.py
new file mode 100644
index 000000000..56f603420
--- /dev/null
+++ b/test/unit/common/middleware/s3api/test_s3response.py
@@ -0,0 +1,80 @@
+# Copyright (c) 2014 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+
+from swift.common.swob import Response
+from swift.common.utils import HeaderKeyDict
+from swift.common.middleware.s3api.s3response import S3Response
+from swift.common.middleware.s3api.utils import sysmeta_prefix
+
+
+class TestResponse(unittest.TestCase):
+ def test_from_swift_resp_slo(self):
+ for expected, header_vals in \
+ ((True, ('true', '1')), (False, ('false', 'ugahhh', None))):
+ for val in header_vals:
+ resp = Response(headers={'X-Static-Large-Object': val})
+ s3resp = S3Response.from_swift_resp(resp)
+ self.assertEqual(expected, s3resp.is_slo)
+
+ def test_response_s3api_sysmeta_headers(self):
+ for _server_type in ('object', 'container'):
+ swift_headers = HeaderKeyDict(
+ {sysmeta_prefix(_server_type) + 'test': 'ok'})
+ resp = Response(headers=swift_headers)
+ s3resp = S3Response.from_swift_resp(resp)
+ self.assertEqual(swift_headers, s3resp.sysmeta_headers)
+
+ def test_response_s3api_sysmeta_headers_ignore_other_sysmeta(self):
+ for _server_type in ('object', 'container'):
+ swift_headers = HeaderKeyDict(
+ # sysmeta not leading sysmeta_prefix even including s3api word
+ {'x-%s-sysmeta-test-s3api' % _server_type: 'ok',
+ sysmeta_prefix(_server_type) + 'test': 'ok'})
+ resp = Response(headers=swift_headers)
+ s3resp = S3Response.from_swift_resp(resp)
+ expected_headers = HeaderKeyDict(
+ {sysmeta_prefix(_server_type) + 'test': 'ok'})
+ self.assertEqual(expected_headers, s3resp.sysmeta_headers)
+
+ def test_response_s3api_sysmeta_from_swift3_sysmeta(self):
+ for _server_type in ('object', 'container'):
+ # swift could return older swift3 sysmeta
+ swift_headers = HeaderKeyDict(
+ {('x-%s-sysmeta-swift3-' % _server_type) + 'test': 'ok'})
+ resp = Response(headers=swift_headers)
+ s3resp = S3Response.from_swift_resp(resp)
+ expected_headers = HeaderKeyDict(
+ {sysmeta_prefix(_server_type) + 'test': 'ok'})
+ # but Response class should translates as s3api sysmeta
+ self.assertEqual(expected_headers, s3resp.sysmeta_headers)
+
+ def test_response_swift3_sysmeta_does_not_overwrite_s3api_sysmeta(self):
+ for _server_type in ('object', 'container'):
+ # same key name except sysmeta prefix
+ swift_headers = HeaderKeyDict(
+ {('x-%s-sysmeta-swift3-' % _server_type) + 'test': 'ng',
+ sysmeta_prefix(_server_type) + 'test': 'ok'})
+ resp = Response(headers=swift_headers)
+ s3resp = S3Response.from_swift_resp(resp)
+ expected_headers = HeaderKeyDict(
+ {sysmeta_prefix(_server_type) + 'test': 'ok'})
+ # but only s3api sysmeta remains in the response sysmeta_headers
+ self.assertEqual(expected_headers, s3resp.sysmeta_headers)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/test/unit/common/middleware/s3api/test_s3token.py b/test/unit/common/middleware/s3api/test_s3token.py
new file mode 100644
index 000000000..0ce3eae55
--- /dev/null
+++ b/test/unit/common/middleware/s3api/test_s3token.py
@@ -0,0 +1,821 @@
+# Copyright 2012 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import copy
+import base64
+import json
+import logging
+import time
+import unittest
+import uuid
+
+import fixtures
+import mock
+import requests
+from requests_mock.contrib import fixture as rm_fixture
+from six.moves import urllib
+
+from swift.common.middleware.s3api import s3token
+from swift.common.swob import Request, Response
+from swift.common.wsgi import ConfigFileError
+
+GOOD_RESPONSE_V2 = {'access': {
+ 'user': {
+ 'username': 'S3_USER',
+ 'name': 'S3_USER',
+ 'id': 'USER_ID',
+ 'roles': [
+ {'name': 'swift-user'},
+ {'name': '_member_'},
+ ],
+ },
+ 'token': {
+ 'id': 'TOKEN_ID',
+ 'tenant': {
+ 'id': 'TENANT_ID',
+ 'name': 'TENANT_NAME'
+ }
+ }
+}}
+GOOD_RESPONSE_V3 = {'token': {
+ 'user': {
+ 'domain': {
+ 'name': 'Default',
+ 'id': 'default',
+ },
+ 'name': 'S3_USER',
+ 'id': 'USER_ID',
+ },
+ 'project': {
+ 'domain': {
+ 'name': 'PROJECT_DOMAIN_NAME',
+ 'id': 'PROJECT_DOMAIN_ID',
+ },
+ 'name': 'PROJECT_NAME',
+ 'id': 'PROJECT_ID',
+ },
+ 'roles': [
+ {'name': 'swift-user'},
+ {'name': '_member_'},
+ ],
+}}
+
+
+class TestResponse(requests.Response):
+ """Utility class to wrap requests.Response.
+
+ Class used to wrap requests.Response and provide some convenience to
+ initialize with a dict.
+ """
+
+ def __init__(self, data):
+ self._text = None
+ super(TestResponse, self).__init__()
+ if isinstance(data, dict):
+ self.status_code = data.get('status_code', 200)
+ headers = data.get('headers')
+ if headers:
+ self.headers.update(headers)
+ # Fake the text attribute to streamline Response creation
+ # _content is defined by requests.Response
+ self._content = data.get('text')
+ else:
+ self.status_code = data
+
+ def __eq__(self, other):
+ return self.__dict__ == other.__dict__
+
+ @property
+ def text(self):
+ return self.content
+
+
+class FakeApp(object):
+ calls = 0
+ """This represents a WSGI app protected by the auth_token middleware."""
+ def __call__(self, env, start_response):
+ self.calls += 1
+ resp = Response()
+ resp.environ = env
+ return resp(env, start_response)
+
+
+class S3TokenMiddlewareTestBase(unittest.TestCase):
+
+ TEST_AUTH_URI = 'https://fakehost/identity/v2.0'
+ TEST_URL = '%s/s3tokens' % (TEST_AUTH_URI, )
+ TEST_DOMAIN_ID = '1'
+ TEST_DOMAIN_NAME = 'aDomain'
+ TEST_GROUP_ID = uuid.uuid4().hex
+ TEST_ROLE_ID = uuid.uuid4().hex
+ TEST_TENANT_ID = '1'
+ TEST_TENANT_NAME = 'aTenant'
+ TEST_TOKEN = 'aToken'
+ TEST_TRUST_ID = 'aTrust'
+ TEST_USER = 'test'
+ TEST_USER_ID = uuid.uuid4().hex
+
+ TEST_ROOT_URL = 'http://127.0.0.1:5000/'
+
+ def setUp(self):
+ super(S3TokenMiddlewareTestBase, self).setUp()
+ self.logger = fixtures.FakeLogger(level=logging.DEBUG)
+ self.logger.setUp()
+ self.time_patcher = mock.patch.object(time, 'time', lambda: 1234)
+ self.time_patcher.start()
+
+ self.app = FakeApp()
+ self.conf = {
+ 'auth_uri': self.TEST_AUTH_URI,
+ }
+ self.middleware = s3token.S3Token(self.app, self.conf)
+
+ self.requests_mock = rm_fixture.Fixture()
+ self.requests_mock.setUp()
+
+ def tearDown(self):
+ self.requests_mock.cleanUp()
+ self.time_patcher.stop()
+ self.logger.cleanUp()
+ super(S3TokenMiddlewareTestBase, self).tearDown()
+
+ def start_fake_response(self, status, headers):
+ self.response_status = int(status.split(' ', 1)[0])
+ self.response_headers = dict(headers)
+
+
+class S3TokenMiddlewareTestGood(S3TokenMiddlewareTestBase):
+
+ def setUp(self):
+ super(S3TokenMiddlewareTestGood, self).setUp()
+
+ self.requests_mock.post(self.TEST_URL,
+ status_code=201,
+ json=GOOD_RESPONSE_V2)
+
+ # Ignore the request and pass to the next middleware in the
+ # pipeline if no path has been specified.
+ def test_no_path_request(self):
+ req = Request.blank('/')
+ self.middleware(req.environ, self.start_fake_response)
+ self.assertEqual(self.response_status, 200)
+
+ # Ignore the request and pass to the next middleware in the
+ # pipeline if no Authorization header has been specified
+ def test_without_authorization(self):
+ req = Request.blank('/v1/AUTH_cfa/c/o')
+ self.middleware(req.environ, self.start_fake_response)
+ self.assertEqual(self.response_status, 200)
+
+ def test_nukes_auth_headers(self):
+ client_env = {
+ 'HTTP_X_IDENTITY_STATUS': 'Confirmed',
+ 'HTTP_X_ROLES': 'admin,_member_,swift-user',
+ 'HTTP_X_TENANT_ID': 'cfa'
+ }
+ req = Request.blank('/v1/AUTH_cfa/c/o', environ=client_env)
+ self.middleware(req.environ, self.start_fake_response)
+ self.assertEqual(self.response_status, 200)
+ for key in client_env:
+ self.assertNotIn(key, req.environ)
+
+ def test_without_auth_storage_token(self):
+ req = Request.blank('/v1/AUTH_cfa/c/o')
+ req.headers['Authorization'] = 'AWS badboy'
+ self.middleware(req.environ, self.start_fake_response)
+ self.assertEqual(self.response_status, 200)
+
+ def _assert_authorized(self, req, expect_token=True,
+ account_path='/v1/AUTH_TENANT_ID/'):
+ self.assertTrue(
+ req.path.startswith(account_path),
+ '%r does not start with %r' % (req.path, account_path))
+ expected_headers = {
+ 'X-Identity-Status': 'Confirmed',
+ 'X-Roles': 'swift-user,_member_',
+ 'X-User-Id': 'USER_ID',
+ 'X-User-Name': 'S3_USER',
+ 'X-Tenant-Id': 'TENANT_ID',
+ 'X-Tenant-Name': 'TENANT_NAME',
+ 'X-Project-Id': 'TENANT_ID',
+ 'X-Project-Name': 'TENANT_NAME',
+ 'X-Auth-Token': 'TOKEN_ID',
+ }
+ for header, value in expected_headers.items():
+ if header == 'X-Auth-Token' and not expect_token:
+ self.assertNotIn(header, req.headers)
+ continue
+ self.assertIn(header, req.headers)
+ self.assertEqual(value, req.headers[header])
+ # WSGI wants native strings for headers
+ self.assertIsInstance(req.headers[header], str)
+ self.assertEqual(1, self.middleware._app.calls)
+
+ self.assertEqual(1, self.requests_mock.call_count)
+ request_call = self.requests_mock.request_history[0]
+ self.assertEqual(json.loads(request_call.body), {'credentials': {
+ 'access': 'access',
+ 'signature': 'signature',
+ 'token': base64.urlsafe_b64encode(b'token').decode('ascii')}})
+
+ def test_authorized(self):
+ req = Request.blank('/v1/AUTH_cfa/c/o')
+ req.environ['s3api.auth_details'] = {
+ 'access_key': u'access',
+ 'signature': u'signature',
+ 'string_to_sign': u'token',
+ }
+ req.get_response(self.middleware)
+ self._assert_authorized(req)
+
+ def test_tolerate_missing_token_id(self):
+ resp = copy.deepcopy(GOOD_RESPONSE_V2)
+ del resp['access']['token']['id']
+ self.requests_mock.post(self.TEST_URL,
+ status_code=201,
+ json=resp)
+
+ req = Request.blank('/v1/AUTH_cfa/c/o')
+ req.environ['s3api.auth_details'] = {
+ 'access_key': u'access',
+ 'signature': u'signature',
+ 'string_to_sign': u'token',
+ }
+ req.get_response(self.middleware)
+ self._assert_authorized(req, expect_token=False)
+
+ def test_authorized_bytes(self):
+ req = Request.blank('/v1/AUTH_cfa/c/o')
+ req.environ['s3api.auth_details'] = {
+ 'access_key': b'access',
+ 'signature': b'signature',
+ 'string_to_sign': b'token',
+ }
+ req.get_response(self.middleware)
+ self._assert_authorized(req)
+
+ def test_authorized_http(self):
+ auth_uri = 'http://fakehost:35357/v2.0'
+ self.requests_mock.post(
+ '%s/s3tokens' % auth_uri,
+ status_code=201, json=GOOD_RESPONSE_V2)
+
+ self.middleware = s3token.filter_factory({
+ 'auth_uri': auth_uri})(self.app)
+ req = Request.blank('/v1/AUTH_cfa/c/o')
+ req.environ['s3api.auth_details'] = {
+ 'access_key': u'access',
+ 'signature': u'signature',
+ 'string_to_sign': u'token',
+ }
+ req.get_response(self.middleware)
+ self._assert_authorized(req)
+
+ def test_authorized_v3(self):
+ # Prior to https://github.com/openstack/keystone/commit/dd1e705
+ # even v3 URLs would respond with a v2-format response
+ auth_uri = 'http://fakehost:35357/v3'
+ self.requests_mock.post(
+ '%s/s3tokens' % auth_uri,
+ status_code=201, json=GOOD_RESPONSE_V2)
+
+ self.middleware = s3token.filter_factory({
+ 'auth_uri': auth_uri})(self.app)
+ req = Request.blank('/v1/AUTH_cfa/c/o')
+ req.environ['s3api.auth_details'] = {
+ 'access_key': u'access',
+ 'signature': u'signature',
+ 'string_to_sign': u'token',
+ }
+ req.get_response(self.middleware)
+ self._assert_authorized(req)
+
+ def test_authorized_trailing_slash(self):
+ self.middleware = s3token.filter_factory({
+ 'auth_uri': self.TEST_AUTH_URI + '/'})(self.app)
+ req = Request.blank('/v1/AUTH_cfa/c/o')
+ req.environ['s3api.auth_details'] = {
+ 'access_key': u'access',
+ 'signature': u'signature',
+ 'string_to_sign': u'token',
+ }
+ req.get_response(self.middleware)
+ self._assert_authorized(req)
+
+ def test_authorization_nova_toconnect(self):
+ req = Request.blank('/v1/AUTH_swiftint/c/o')
+ req.environ['s3api.auth_details'] = {
+ 'access_key': u'access:FORCED_TENANT_ID',
+ 'signature': u'signature',
+ 'string_to_sign': u'token',
+ }
+ req.get_response(self.middleware)
+ self._assert_authorized(req, account_path='/v1/AUTH_FORCED_TENANT_ID/')
+
+ @mock.patch.object(requests, 'post')
+ def test_insecure(self, MOCK_REQUEST):
+ self.middleware = s3token.filter_factory(
+ {'insecure': 'True', 'auth_uri': 'http://example.com'})(self.app)
+
+ text_return_value = json.dumps(GOOD_RESPONSE_V2)
+ MOCK_REQUEST.return_value = TestResponse({
+ 'status_code': 201,
+ 'text': text_return_value})
+
+ req = Request.blank('/v1/AUTH_cfa/c/o')
+ req.environ['s3api.auth_details'] = {
+ 'access_key': u'access',
+ 'signature': u'signature',
+ 'string_to_sign': u'token',
+ }
+ req.get_response(self.middleware)
+
+ self.assertTrue(MOCK_REQUEST.called)
+ mock_args, mock_kwargs = MOCK_REQUEST.call_args
+ self.assertIs(mock_kwargs['verify'], False)
+
+ def test_insecure_option(self):
+ # insecure is passed as a string.
+
+ # Some non-secure values.
+ true_values = ['true', 'True', '1', 'yes']
+ for val in true_values:
+ config = {'insecure': val,
+ 'certfile': 'false_ind',
+ 'auth_uri': 'http://example.com'}
+ middleware = s3token.filter_factory(config)(self.app)
+ self.assertIs(False, middleware._verify)
+
+ # Some "secure" values, including unexpected value.
+ false_values = ['false', 'False', '0', 'no', 'someweirdvalue']
+ for val in false_values:
+ config = {'insecure': val,
+ 'certfile': 'false_ind',
+ 'auth_uri': 'http://example.com'}
+ middleware = s3token.filter_factory(config)(self.app)
+ self.assertEqual('false_ind', middleware._verify)
+
+ # Default is secure.
+ config = {'certfile': 'false_ind',
+ 'auth_uri': 'http://example.com'}
+ middleware = s3token.filter_factory(config)(self.app)
+ self.assertIs('false_ind', middleware._verify)
+
+ def test_auth_uris(self):
+ for conf, expected in [
+ ({'auth_uri': 'https://example.com/v2.0'},
+ 'https://example.com/v2.0/s3tokens'),
+ # Trailing slash doesn't interfere
+ ({'auth_uri': 'https://example.com/v2.0/'},
+ 'https://example.com/v2.0/s3tokens'),
+ # keystone running under mod_wsgi often has a path prefix
+ ({'auth_uri': 'https://example.com/identity/v2.0'},
+ 'https://example.com/identity/v2.0/s3tokens'),
+ ({'auth_uri': 'https://example.com/identity/v2.0/'},
+ 'https://example.com/identity/v2.0/s3tokens'),
+ # IPv4 addresses are fine
+ ({'auth_uri': 'http://127.0.0.1:35357/v3'},
+ 'http://127.0.0.1:35357/v3/s3tokens'),
+ ({'auth_uri': 'http://127.0.0.1:35357/v3/'},
+ 'http://127.0.0.1:35357/v3/s3tokens'),
+ # IPv6 addresses need [brackets] per RFC 3986
+ ({'auth_uri': 'https://[::FFFF:129.144.52.38]:5000/v3'},
+ 'https://[::FFFF:129.144.52.38]:5000/v3/s3tokens'),
+ ({'auth_uri': 'https://[::FFFF:129.144.52.38]:5000/v3/'},
+ 'https://[::FFFF:129.144.52.38]:5000/v3/s3tokens'),
+ ]:
+ middleware = s3token.filter_factory(conf)(self.app)
+ self.assertEqual(expected, middleware._request_uri)
+
+ @mock.patch.object(requests, 'post')
+ def test_http_timeout(self, MOCK_REQUEST):
+ self.middleware = s3token.filter_factory({
+ 'http_timeout': '2',
+ 'auth_uri': 'http://example.com',
+ })(FakeApp())
+
+ MOCK_REQUEST.return_value = TestResponse({
+ 'status_code': 201,
+ 'text': json.dumps(GOOD_RESPONSE_V2)})
+
+ req = Request.blank('/v1/AUTH_cfa/c/o')
+ req.environ['s3api.auth_details'] = {
+ 'access_key': u'access',
+ 'signature': u'signature',
+ 'string_to_sign': u'token',
+ }
+ req.get_response(self.middleware)
+
+ self.assertTrue(MOCK_REQUEST.called)
+ mock_args, mock_kwargs = MOCK_REQUEST.call_args
+ self.assertEqual(mock_kwargs['timeout'], 2)
+
+ def test_http_timeout_option(self):
+ good_values = ['1', '5.3', '10', '.001']
+ for val in good_values:
+ middleware = s3token.filter_factory({
+ 'http_timeout': val,
+ 'auth_uri': 'http://example.com',
+ })(FakeApp())
+ self.assertEqual(float(val), middleware._timeout)
+
+ bad_values = ['1, 4', '-3', '100', 'foo', '0']
+ for val in bad_values:
+ with self.assertRaises(ValueError) as ctx:
+ s3token.filter_factory({
+ 'http_timeout': val,
+ 'auth_uri': 'http://example.com',
+ })(FakeApp())
+ self.assertTrue(ctx.exception.args[0].startswith((
+ 'invalid literal for float():',
+ 'could not convert string to float:',
+ 'http_timeout must be between 0 and 60 seconds',
+ )), 'Unexpected error message: %s' % ctx.exception)
+
+ # default is 10 seconds
+ middleware = s3token.filter_factory({
+ 'auth_uri': 'http://example.com'})(FakeApp())
+ self.assertEqual(10, middleware._timeout)
+
+ def test_bad_auth_uris(self):
+ for auth_uri in [
+ '/not/a/uri',
+ 'http://',
+ '//example.com/path']:
+ with self.assertRaises(ConfigFileError) as cm:
+ s3token.filter_factory({'auth_uri': auth_uri})(self.app)
+ self.assertEqual('Invalid auth_uri; must include scheme and host',
+ cm.exception.message)
+ with self.assertRaises(ConfigFileError) as cm:
+ s3token.filter_factory({
+ 'auth_uri': 'nonhttp://example.com'})(self.app)
+ self.assertEqual('Invalid auth_uri; scheme must be http or https',
+ cm.exception.message)
+ for auth_uri in [
+ 'http://user@example.com/',
+ 'http://example.com/?with=query',
+ 'http://example.com/#with-fragment']:
+ with self.assertRaises(ConfigFileError) as cm:
+ s3token.filter_factory({'auth_uri': auth_uri})(self.app)
+ self.assertEqual('Invalid auth_uri; must not include username, '
+ 'query, or fragment', cm.exception.message)
+
+ def test_unicode_path(self):
+ url = u'/v1/AUTH_cfa/c/euro\u20ac'.encode('utf8')
+ req = Request.blank(urllib.parse.quote(url))
+ req.environ['s3api.auth_details'] = {
+ 'access_key': u'access',
+ 'signature': u'signature',
+ 'string_to_sign': u'token',
+ }
+ req.get_response(self.middleware)
+ self._assert_authorized(req)
+
+
+class S3TokenMiddlewareTestBad(S3TokenMiddlewareTestBase):
+ def test_unauthorized_token(self):
+ ret = {"error":
+ {"message": "EC2 access key not found.",
+ "code": 401,
+ "title": "Unauthorized"}}
+ self.requests_mock.post(self.TEST_URL, status_code=403, json=ret)
+ req = Request.blank('/v1/AUTH_cfa/c/o')
+ req.environ['s3api.auth_details'] = {
+ 'access_key': u'access',
+ 'signature': u'signature',
+ 'string_to_sign': u'token',
+ }
+ resp = req.get_response(self.middleware)
+ s3_denied_req = self.middleware._deny_request('AccessDenied')
+ self.assertEqual(resp.body, s3_denied_req.body)
+ self.assertEqual(
+ resp.status_int, # pylint: disable-msg=E1101
+ s3_denied_req.status_int) # pylint: disable-msg=E1101
+ self.assertEqual(0, self.middleware._app.calls)
+
+ self.assertEqual(1, self.requests_mock.call_count)
+ request_call = self.requests_mock.request_history[0]
+ self.assertEqual(json.loads(request_call.body), {'credentials': {
+ 'access': 'access',
+ 'signature': 'signature',
+ 'token': base64.urlsafe_b64encode(b'token').decode('ascii')}})
+
+ def test_no_s3_creds_defers_to_auth_middleware(self):
+ # Without an Authorization header, we should just pass through to the
+ # auth system to make a decision.
+ req = Request.blank('/v1/AUTH_cfa/c/o')
+ resp = req.get_response(self.middleware)
+ self.assertEqual(resp.status_int, 200) # pylint: disable-msg=E1101
+ self.assertEqual(1, self.middleware._app.calls)
+
+ def test_fail_to_connect_to_keystone(self):
+ with mock.patch.object(self.middleware, '_json_request') as o:
+ s3_invalid_resp = self.middleware._deny_request('InvalidURI')
+ o.side_effect = s3_invalid_resp
+
+ req = Request.blank('/v1/AUTH_cfa/c/o')
+ req.environ['s3api.auth_details'] = {
+ 'access_key': u'access',
+ 'signature': u'signature',
+ 'string_to_sign': u'token',
+ }
+ resp = req.get_response(self.middleware)
+ self.assertEqual(resp.body, s3_invalid_resp.body)
+ self.assertEqual(
+ resp.status_int, # pylint: disable-msg=E1101
+ s3_invalid_resp.status_int) # pylint: disable-msg=E1101
+ self.assertEqual(0, self.middleware._app.calls)
+
+ def _test_bad_reply(self, response_body):
+ self.requests_mock.post(self.TEST_URL,
+ status_code=201,
+ text=response_body)
+
+ req = Request.blank('/v1/AUTH_cfa/c/o')
+ req.environ['s3api.auth_details'] = {
+ 'access_key': u'access',
+ 'signature': u'signature',
+ 'string_to_sign': u'token',
+ }
+ resp = req.get_response(self.middleware)
+ s3_invalid_resp = self.middleware._deny_request('InvalidURI')
+ self.assertEqual(resp.body, s3_invalid_resp.body)
+ self.assertEqual(
+ resp.status_int, # pylint: disable-msg=E1101
+ s3_invalid_resp.status_int) # pylint: disable-msg=E1101
+ self.assertEqual(0, self.middleware._app.calls)
+
+ def test_bad_reply_not_json(self):
+ self._test_bad_reply('<badreply>')
+
+ def _test_bad_reply_missing_parts(self, *parts):
+ resp = copy.deepcopy(GOOD_RESPONSE_V2)
+ part_dict = resp
+ for part in parts[:-1]:
+ part_dict = part_dict[part]
+ del part_dict[parts[-1]]
+ self._test_bad_reply(json.dumps(resp))
+
+ def test_bad_reply_missing_token_dict(self):
+ self._test_bad_reply_missing_parts('access', 'token')
+
+ def test_bad_reply_missing_user_dict(self):
+ self._test_bad_reply_missing_parts('access', 'user')
+
+ def test_bad_reply_missing_user_roles(self):
+ self._test_bad_reply_missing_parts('access', 'user', 'roles')
+
+ def test_bad_reply_missing_user_name(self):
+ self._test_bad_reply_missing_parts('access', 'user', 'name')
+
+ def test_bad_reply_missing_user_id(self):
+ self._test_bad_reply_missing_parts('access', 'user', 'id')
+
+ def test_bad_reply_missing_tenant_dict(self):
+ self._test_bad_reply_missing_parts('access', 'token', 'tenant')
+
+ def test_bad_reply_missing_tenant_id(self):
+ self._test_bad_reply_missing_parts('access', 'token', 'tenant', 'id')
+
+ def test_bad_reply_missing_tenant_name(self):
+ self._test_bad_reply_missing_parts('access', 'token', 'tenant', 'name')
+
+ def test_bad_reply_valid_but_bad_json(self):
+ self._test_bad_reply('{}')
+ self._test_bad_reply('[]')
+ self._test_bad_reply('null')
+ self._test_bad_reply('"foo"')
+ self._test_bad_reply('1')
+ self._test_bad_reply('true')
+
+
+class S3TokenMiddlewareTestDeferredAuth(S3TokenMiddlewareTestBase):
+ def setUp(self):
+ super(S3TokenMiddlewareTestDeferredAuth, self).setUp()
+ self.conf['delay_auth_decision'] = 'yes'
+ self.middleware = s3token.S3Token(FakeApp(), self.conf)
+
+ def test_unauthorized_token(self):
+ ret = {"error":
+ {"message": "EC2 access key not found.",
+ "code": 401,
+ "title": "Unauthorized"}}
+ self.requests_mock.post(self.TEST_URL, status_code=403, json=ret)
+ req = Request.blank('/v1/AUTH_cfa/c/o')
+ req.environ['s3api.auth_details'] = {
+ 'access_key': u'access',
+ 'signature': u'signature',
+ 'string_to_sign': u'token',
+ }
+ resp = req.get_response(self.middleware)
+ self.assertEqual(
+ resp.status_int, # pylint: disable-msg=E1101
+ 200)
+ self.assertNotIn('X-Auth-Token', req.headers)
+ self.assertEqual(1, self.middleware._app.calls)
+
+ self.assertEqual(1, self.requests_mock.call_count)
+ request_call = self.requests_mock.request_history[0]
+ self.assertEqual(json.loads(request_call.body), {'credentials': {
+ 'access': 'access',
+ 'signature': 'signature',
+ 'token': base64.urlsafe_b64encode(b'token').decode('ascii')}})
+
+ def test_fail_to_connect_to_keystone(self):
+ with mock.patch.object(self.middleware, '_json_request') as o:
+ o.side_effect = self.middleware._deny_request('InvalidURI')
+
+ req = Request.blank('/v1/AUTH_cfa/c/o')
+ req.environ['s3api.auth_details'] = {
+ 'access_key': u'access',
+ 'signature': u'signature',
+ 'string_to_sign': u'token',
+ }
+ resp = req.get_response(self.middleware)
+ self.assertEqual(
+ resp.status_int, # pylint: disable-msg=E1101
+ 200)
+ self.assertNotIn('X-Auth-Token', req.headers)
+ self.assertEqual(1, self.middleware._app.calls)
+
+ def test_bad_reply(self):
+ self.requests_mock.post(self.TEST_URL,
+ status_code=201,
+ text="<badreply>")
+
+ req = Request.blank('/v1/AUTH_cfa/c/o')
+ req.environ['s3api.auth_details'] = {
+ 'access_key': u'access',
+ 'signature': u'signature',
+ 'string_to_sign': u'token',
+ }
+ resp = req.get_response(self.middleware)
+ self.assertEqual(
+ resp.status_int, # pylint: disable-msg=E1101
+ 200)
+ self.assertNotIn('X-Auth-Token', req.headers)
+ self.assertEqual(1, self.middleware._app.calls)
+
+
+class S3TokenMiddlewareTestV3(S3TokenMiddlewareTestBase):
+
+ def setUp(self):
+ super(S3TokenMiddlewareTestV3, self).setUp()
+
+ self.requests_mock.post(self.TEST_URL,
+ status_code=200,
+ json=GOOD_RESPONSE_V3)
+
+ def _assert_authorized(self, req,
+ account_path='/v1/AUTH_PROJECT_ID/'):
+ self.assertTrue(req.path.startswith(account_path))
+ expected_headers = {
+ 'X-Identity-Status': 'Confirmed',
+ 'X-Roles': 'swift-user,_member_',
+ 'X-User-Id': 'USER_ID',
+ 'X-User-Name': 'S3_USER',
+ 'X-User-Domain-Id': 'default',
+ 'X-User-Domain-Name': 'Default',
+ 'X-Tenant-Id': 'PROJECT_ID',
+ 'X-Tenant-Name': 'PROJECT_NAME',
+ 'X-Project-Id': 'PROJECT_ID',
+ 'X-Project-Name': 'PROJECT_NAME',
+ 'X-Project-Domain-Id': 'PROJECT_DOMAIN_ID',
+ 'X-Project-Domain-Name': 'PROJECT_DOMAIN_NAME',
+ }
+ for header, value in expected_headers.items():
+ self.assertIn(header, req.headers)
+ self.assertEqual(value, req.headers[header])
+ # WSGI wants native strings for headers
+ self.assertIsInstance(req.headers[header], str)
+ self.assertNotIn('X-Auth-Token', req.headers)
+ self.assertEqual(1, self.middleware._app.calls)
+
+ def test_authorized(self):
+ req = Request.blank('/v1/AUTH_cfa/c/o')
+ req.environ['s3api.auth_details'] = {
+ 'access_key': u'access',
+ 'signature': u'signature',
+ 'string_to_sign': u'token',
+ }
+ req.get_response(self.middleware)
+ self._assert_authorized(req)
+
+ def test_authorized_bytes(self):
+ req = Request.blank('/v1/AUTH_cfa/c/o')
+ req.environ['s3api.auth_details'] = {
+ 'access_key': b'access',
+ 'signature': b'signature',
+ 'string_to_sign': b'token',
+ }
+ req.get_response(self.middleware)
+ self._assert_authorized(req)
+
+ def test_authorized_http(self):
+ # Following https://github.com/openstack/keystone/commit/3ec1aa4
+ # even v2 URLs would respond with a v3-format response
+ auth_uri = 'http://fakehost:35357/v2.0/'
+ self.requests_mock.post(
+ auth_uri + 's3tokens',
+ status_code=201, json=GOOD_RESPONSE_V3)
+
+ self.middleware = s3token.filter_factory({
+ 'auth_uri': auth_uri})(self.app)
+ req = Request.blank('/v1/AUTH_cfa/c/o')
+ req.environ['s3api.auth_details'] = {
+ 'access_key': u'access',
+ 'signature': u'signature',
+ 'string_to_sign': u'token',
+ }
+ req.get_response(self.middleware)
+ self._assert_authorized(req)
+
+ def test_authorized_v3(self):
+ auth_uri = 'http://fakehost:35357/v3/'
+ self.requests_mock.post(
+ auth_uri + 's3tokens',
+ status_code=201, json=GOOD_RESPONSE_V3)
+
+ self.middleware = s3token.filter_factory({
+ 'auth_uri': auth_uri})(self.app)
+ req = Request.blank('/v1/AUTH_cfa/c/o')
+ req.environ['s3api.auth_details'] = {
+ 'access_key': u'access',
+ 'signature': u'signature',
+ 'string_to_sign': u'token',
+ }
+ req.get_response(self.middleware)
+ self._assert_authorized(req)
+
+ def test_authorized_trailing_slash(self):
+ self.middleware = s3token.filter_factory({
+ 'auth_uri': self.TEST_AUTH_URI + '/'})(self.app)
+ req = Request.blank('/v1/AUTH_cfa/c/o')
+ req.environ['s3api.auth_details'] = {
+ 'access_key': u'access',
+ 'signature': u'signature',
+ 'string_to_sign': u'token',
+ }
+ req.get_response(self.middleware)
+ self._assert_authorized(req)
+
+ def test_authorization_nova_toconnect(self):
+ req = Request.blank('/v1/AUTH_swiftint/c/o')
+ req.environ['s3api.auth_details'] = {
+ 'access_key': u'access:FORCED_TENANT_ID',
+ 'signature': u'signature',
+ 'string_to_sign': u'token',
+ }
+ req.get_response(self.middleware)
+ self._assert_authorized(req, account_path='/v1/AUTH_FORCED_TENANT_ID/')
+
+ def _test_bad_reply_missing_parts(self, *parts):
+ resp = copy.deepcopy(GOOD_RESPONSE_V3)
+ part_dict = resp
+ for part in parts[:-1]:
+ part_dict = part_dict[part]
+ del part_dict[parts[-1]]
+ self.requests_mock.post(self.TEST_URL,
+ status_code=201,
+ text=json.dumps(resp))
+
+ req = Request.blank('/v1/AUTH_cfa/c/o')
+ req.environ['s3api.auth_details'] = {
+ 'access_key': u'access',
+ 'signature': u'signature',
+ 'string_to_sign': u'token',
+ }
+ resp = req.get_response(self.middleware)
+ s3_invalid_resp = self.middleware._deny_request('InvalidURI')
+ self.assertEqual(resp.body, s3_invalid_resp.body)
+ self.assertEqual(
+ resp.status_int, # pylint: disable-msg=E1101
+ s3_invalid_resp.status_int) # pylint: disable-msg=E1101
+ self.assertEqual(0, self.middleware._app.calls)
+
+ def test_bad_reply_missing_parts(self):
+ self._test_bad_reply_missing_parts('token', 'user', 'id')
+ self._test_bad_reply_missing_parts('token', 'user', 'name')
+ self._test_bad_reply_missing_parts('token', 'user', 'domain', 'id')
+ self._test_bad_reply_missing_parts('token', 'user', 'domain', 'name')
+ self._test_bad_reply_missing_parts('token', 'user', 'domain')
+ self._test_bad_reply_missing_parts('token', 'user')
+ self._test_bad_reply_missing_parts('token', 'project', 'id')
+ self._test_bad_reply_missing_parts('token', 'project', 'name')
+ self._test_bad_reply_missing_parts('token', 'project', 'domain', 'id')
+ self._test_bad_reply_missing_parts('token', 'project', 'domain',
+ 'name')
+ self._test_bad_reply_missing_parts('token', 'project', 'domain')
+ self._test_bad_reply_missing_parts('token', 'project')
+ self._test_bad_reply_missing_parts('token', 'roles')
diff --git a/test/unit/common/middleware/s3api/test_service.py b/test/unit/common/middleware/s3api/test_service.py
new file mode 100644
index 000000000..5571e56a1
--- /dev/null
+++ b/test/unit/common/middleware/s3api/test_service.py
@@ -0,0 +1,235 @@
+# Copyright (c) 2014 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+
+from swift.common import swob
+from swift.common.swob import Request
+from swift.common.utils import json
+
+from test.unit.common.middleware.s3api.test_s3_acl import s3acl
+from test.unit.common.middleware.s3api import S3ApiTestCase
+from swift.common.middleware.s3api.etree import fromstring
+from swift.common.middleware.s3api.subresource import ACL, Owner, encode_acl
+
+
+def create_bucket_list_json(buckets):
+ """
+ Create a json from bucket list
+ :param buckets: a list of tuples (or lists) consist of elements orderd as
+ name, count, bytes
+ """
+ bucket_list = map(
+ lambda item: {'name': item[0], 'count': item[1], 'bytes': item[2]},
+ list(buckets))
+ return json.dumps(bucket_list)
+
+
+class TestS3ApiService(S3ApiTestCase):
+ def setup_buckets(self):
+ self.buckets = (('apple', 1, 200), ('orange', 3, 430))
+ bucket_list = create_bucket_list_json(self.buckets)
+ self.swift.register('GET', '/v1/AUTH_test', swob.HTTPOk, {},
+ bucket_list)
+
+ def setUp(self):
+ super(TestS3ApiService, self).setUp()
+
+ self.setup_buckets()
+
+ def test_service_GET_error(self):
+ code = self._test_method_error('GET', '', swob.HTTPUnauthorized)
+ self.assertEqual(code, 'SignatureDoesNotMatch')
+ code = self._test_method_error('GET', '', swob.HTTPForbidden)
+ self.assertEqual(code, 'AccessDenied')
+ code = self._test_method_error('GET', '', swob.HTTPServerError)
+ self.assertEqual(code, 'InternalError')
+
+ @s3acl
+ def test_service_GET(self):
+ req = Request.blank('/',
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '200')
+
+ elem = fromstring(body, 'ListAllMyBucketsResult')
+
+ all_buckets = elem.find('./Buckets')
+ buckets = all_buckets.iterchildren('Bucket')
+ listing = list(list(buckets)[0])
+ self.assertEqual(len(listing), 2)
+
+ names = []
+ for b in all_buckets.iterchildren('Bucket'):
+ names.append(b.find('./Name').text)
+
+ self.assertEqual(len(names), len(self.buckets))
+ for i in self.buckets:
+ self.assertTrue(i[0] in names)
+
+ @s3acl
+ def test_service_GET_subresource(self):
+ req = Request.blank('/?acl',
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '200')
+
+ elem = fromstring(body, 'ListAllMyBucketsResult')
+
+ all_buckets = elem.find('./Buckets')
+ buckets = all_buckets.iterchildren('Bucket')
+ listing = list(list(buckets)[0])
+ self.assertEqual(len(listing), 2)
+
+ names = []
+ for b in all_buckets.iterchildren('Bucket'):
+ names.append(b.find('./Name').text)
+
+ self.assertEqual(len(names), len(self.buckets))
+ for i in self.buckets:
+ self.assertTrue(i[0] in names)
+
+ def test_service_GET_with_blind_resource(self):
+ buckets = (('apple', 1, 200), ('orange', 3, 430),
+ ('apple+segment', 1, 200))
+ expected = buckets[:-1]
+ bucket_list = create_bucket_list_json(buckets)
+ self.swift.register('GET', '/v1/AUTH_test', swob.HTTPOk, {},
+ bucket_list)
+
+ req = Request.blank('/',
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '200')
+
+ elem = fromstring(body, 'ListAllMyBucketsResult')
+ all_buckets = elem.find('./Buckets')
+ buckets = all_buckets.iterchildren('Bucket')
+ listing = list(list(buckets)[0])
+ self.assertEqual(len(listing), 2)
+
+ names = []
+ for b in all_buckets.iterchildren('Bucket'):
+ names.append(b.find('./Name').text)
+
+ self.assertEqual(len(names), len(expected))
+ for i in expected:
+ self.assertTrue(i[0] in names)
+
+ def _test_service_GET_for_check_bucket_owner(self, buckets):
+ self.s3api.conf.check_bucket_owner = True
+ bucket_list = create_bucket_list_json(buckets)
+ self.swift.register('GET', '/v1/AUTH_test', swob.HTTPOk, {},
+ bucket_list)
+
+ req = Request.blank('/',
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ return self.call_s3api(req)
+
+ @s3acl(s3acl_only=True)
+ def test_service_GET_without_bucket(self):
+ bucket_list = []
+ for var in range(0, 10):
+ bucket = 'bucket%s' % var
+ self.swift.register('HEAD', '/v1/AUTH_test/%s' % bucket,
+ swob.HTTPNotFound, {}, None)
+ bucket_list.append((bucket, var, 300 + var))
+
+ status, headers, body = \
+ self._test_service_GET_for_check_bucket_owner(bucket_list)
+ self.assertEqual(status.split()[0], '200')
+
+ elem = fromstring(body, 'ListAllMyBucketsResult')
+
+ resp_buckets = elem.find('./Buckets')
+ buckets = resp_buckets.iterchildren('Bucket')
+ self.assertEqual(len(list(buckets)), 0)
+
+ @s3acl(s3acl_only=True)
+ def test_service_GET_without_owner_bucket(self):
+ bucket_list = []
+ for var in range(0, 10):
+ user_id = 'test:other'
+ bucket = 'bucket%s' % var
+ owner = Owner(user_id, user_id)
+ headers = encode_acl('container', ACL(owner, []))
+ self.swift.register('HEAD', '/v1/AUTH_test/%s' % bucket,
+ swob.HTTPNoContent, headers, None)
+ bucket_list.append((bucket, var, 300 + var))
+
+ status, headers, body = \
+ self._test_service_GET_for_check_bucket_owner(bucket_list)
+ self.assertEqual(status.split()[0], '200')
+
+ elem = fromstring(body, 'ListAllMyBucketsResult')
+
+ resp_buckets = elem.find('./Buckets')
+ buckets = resp_buckets.iterchildren('Bucket')
+ self.assertEqual(len(list(buckets)), 0)
+
+ @s3acl(s3acl_only=True)
+ def test_service_GET_bucket_list(self):
+ bucket_list = []
+ for var in range(0, 10):
+ if var % 3 == 0:
+ user_id = 'test:tester'
+ else:
+ user_id = 'test:other'
+ bucket = 'bucket%s' % var
+ owner = Owner(user_id, user_id)
+ headers = encode_acl('container', ACL(owner, []))
+ # set register to get owner of buckets
+ if var % 3 == 2:
+ self.swift.register('HEAD', '/v1/AUTH_test/%s' % bucket,
+ swob.HTTPNotFound, {}, None)
+ else:
+ self.swift.register('HEAD', '/v1/AUTH_test/%s' % bucket,
+ swob.HTTPNoContent, headers, None)
+ bucket_list.append((bucket, var, 300 + var))
+
+ status, headers, body = \
+ self._test_service_GET_for_check_bucket_owner(bucket_list)
+ self.assertEqual(status.split()[0], '200')
+
+ elem = fromstring(body, 'ListAllMyBucketsResult')
+ resp_buckets = elem.find('./Buckets')
+ buckets = resp_buckets.iterchildren('Bucket')
+ listing = list(list(buckets)[0])
+ self.assertEqual(len(listing), 2)
+
+ names = []
+ for b in resp_buckets.iterchildren('Bucket'):
+ names.append(b.find('./Name').text)
+
+ # Check whether getting bucket only locate in multiples of 3 in
+ # bucket_list which mean requested user is owner.
+ expected_buckets = [b for i, b in enumerate(bucket_list)
+ if i % 3 == 0]
+ self.assertEqual(len(names), len(expected_buckets))
+ for i in expected_buckets:
+ self.assertTrue(i[0] in names)
+ self.assertEqual(len(self.swift.calls_with_headers), 11)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/test/unit/common/middleware/s3api/test_subresource.py b/test/unit/common/middleware/s3api/test_subresource.py
new file mode 100644
index 000000000..05526971b
--- /dev/null
+++ b/test/unit/common/middleware/s3api/test_subresource.py
@@ -0,0 +1,367 @@
+# Copyright (c) 2014 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+
+from swift.common.utils import json
+
+from swift.common.middleware.s3api.s3response import AccessDenied, \
+ InvalidArgument, S3NotImplemented
+from swift.common.middleware.s3api.subresource import User, \
+ AuthenticatedUsers, AllUsers, \
+ ACLPrivate, ACLPublicRead, ACLPublicReadWrite, ACLAuthenticatedRead, \
+ ACLBucketOwnerRead, ACLBucketOwnerFullControl, Owner, ACL, encode_acl, \
+ decode_acl, canned_acl_grantees, Grantee
+from swift.common.middleware.s3api.utils import sysmeta_header
+from swift.common.middleware.s3api.exception import InvalidSubresource
+
+
+class TestS3ApiSubresource(unittest.TestCase):
+
+ def setUp(self):
+ self.s3_acl = True
+ self.allow_no_owner = False
+
+ def test_acl_canonical_user(self):
+ grantee = User('test:tester')
+
+ self.assertTrue('test:tester' in grantee)
+ self.assertTrue('test:tester2' not in grantee)
+ self.assertEqual(str(grantee), 'test:tester')
+ self.assertEqual(grantee.elem().find('./ID').text, 'test:tester')
+
+ def test_acl_authenticated_users(self):
+ grantee = AuthenticatedUsers()
+
+ self.assertTrue('test:tester' in grantee)
+ self.assertTrue('test:tester2' in grantee)
+ uri = 'http://acs.amazonaws.com/groups/global/AuthenticatedUsers'
+ self.assertEqual(grantee.elem().find('./URI').text, uri)
+
+ def test_acl_all_users(self):
+ grantee = AllUsers()
+
+ self.assertTrue('test:tester' in grantee)
+ self.assertTrue('test:tester2' in grantee)
+ uri = 'http://acs.amazonaws.com/groups/global/AllUsers'
+ self.assertEqual(grantee.elem().find('./URI').text, uri)
+
+ def check_permission(self, acl, user_id, permission):
+ try:
+ acl.check_permission(user_id, permission)
+ return True
+ except AccessDenied:
+ return False
+
+ def test_acl_private(self):
+ acl = ACLPrivate(Owner(id='test:tester',
+ name='test:tester'),
+ s3_acl=self.s3_acl,
+ allow_no_owner=self.allow_no_owner)
+
+ self.assertTrue(self.check_permission(acl, 'test:tester', 'READ'))
+ self.assertTrue(self.check_permission(acl, 'test:tester', 'WRITE'))
+ self.assertTrue(self.check_permission(acl, 'test:tester', 'READ_ACP'))
+ self.assertTrue(self.check_permission(acl, 'test:tester', 'WRITE_ACP'))
+ self.assertFalse(self.check_permission(acl, 'test:tester2', 'READ'))
+ self.assertFalse(self.check_permission(acl, 'test:tester2', 'WRITE'))
+ self.assertFalse(self.check_permission(acl, 'test:tester2',
+ 'READ_ACP'))
+ self.assertFalse(self.check_permission(acl, 'test:tester2',
+ 'WRITE_ACP'))
+
+ def test_acl_public_read(self):
+ acl = ACLPublicRead(Owner(id='test:tester',
+ name='test:tester'),
+ s3_acl=self.s3_acl,
+ allow_no_owner=self.allow_no_owner)
+
+ self.assertTrue(self.check_permission(acl, 'test:tester', 'READ'))
+ self.assertTrue(self.check_permission(acl, 'test:tester', 'WRITE'))
+ self.assertTrue(self.check_permission(acl, 'test:tester', 'READ_ACP'))
+ self.assertTrue(self.check_permission(acl, 'test:tester', 'WRITE_ACP'))
+ self.assertTrue(self.check_permission(acl, 'test:tester2', 'READ'))
+ self.assertFalse(self.check_permission(acl, 'test:tester2', 'WRITE'))
+ self.assertFalse(self.check_permission(acl, 'test:tester2',
+ 'READ_ACP'))
+ self.assertFalse(self.check_permission(acl, 'test:tester2',
+ 'WRITE_ACP'))
+
+ def test_acl_public_read_write(self):
+ acl = ACLPublicReadWrite(Owner(id='test:tester',
+ name='test:tester'),
+ s3_acl=self.s3_acl,
+ allow_no_owner=self.allow_no_owner)
+
+ self.assertTrue(self.check_permission(acl, 'test:tester', 'READ'))
+ self.assertTrue(self.check_permission(acl, 'test:tester', 'WRITE'))
+ self.assertTrue(self.check_permission(acl, 'test:tester', 'READ_ACP'))
+ self.assertTrue(self.check_permission(acl, 'test:tester', 'WRITE_ACP'))
+ self.assertTrue(self.check_permission(acl, 'test:tester2', 'READ'))
+ self.assertTrue(self.check_permission(acl, 'test:tester2', 'WRITE'))
+ self.assertFalse(self.check_permission(acl, 'test:tester2',
+ 'READ_ACP'))
+ self.assertFalse(self.check_permission(acl, 'test:tester2',
+ 'WRITE_ACP'))
+
+ def test_acl_authenticated_read(self):
+ acl = ACLAuthenticatedRead(Owner(id='test:tester',
+ name='test:tester'),
+ s3_acl=self.s3_acl,
+ allow_no_owner=self.allow_no_owner)
+
+ self.assertTrue(self.check_permission(acl, 'test:tester', 'READ'))
+ self.assertTrue(self.check_permission(acl, 'test:tester', 'WRITE'))
+ self.assertTrue(self.check_permission(acl, 'test:tester', 'READ_ACP'))
+ self.assertTrue(self.check_permission(acl, 'test:tester', 'WRITE_ACP'))
+ self.assertTrue(self.check_permission(acl, 'test:tester2', 'READ'))
+ self.assertFalse(self.check_permission(acl, 'test:tester2', 'WRITE'))
+ self.assertFalse(self.check_permission(acl, 'test:tester2',
+ 'READ_ACP'))
+ self.assertFalse(self.check_permission(acl, 'test:tester2',
+ 'WRITE_ACP'))
+
+ def test_acl_bucket_owner_read(self):
+ acl = ACLBucketOwnerRead(
+ bucket_owner=Owner('test:tester2', 'test:tester2'),
+ object_owner=Owner('test:tester', 'test:tester'),
+ s3_acl=self.s3_acl,
+ allow_no_owner=self.allow_no_owner)
+
+ self.assertTrue(self.check_permission(acl, 'test:tester', 'READ'))
+ self.assertTrue(self.check_permission(acl, 'test:tester', 'WRITE'))
+ self.assertTrue(self.check_permission(acl, 'test:tester', 'READ_ACP'))
+ self.assertTrue(self.check_permission(acl, 'test:tester', 'WRITE_ACP'))
+ self.assertTrue(self.check_permission(acl, 'test:tester2', 'READ'))
+ self.assertFalse(self.check_permission(acl, 'test:tester2', 'WRITE'))
+ self.assertFalse(self.check_permission(acl, 'test:tester2',
+ 'READ_ACP'))
+ self.assertFalse(self.check_permission(acl, 'test:tester2',
+ 'WRITE_ACP'))
+
+ def test_acl_bucket_owner_full_control(self):
+ acl = ACLBucketOwnerFullControl(
+ bucket_owner=Owner('test:tester2', 'test:tester2'),
+ object_owner=Owner('test:tester', 'test:tester'),
+ s3_acl=self.s3_acl,
+ allow_no_owner=self.allow_no_owner)
+
+ self.assertTrue(self.check_permission(acl, 'test:tester', 'READ'))
+ self.assertTrue(self.check_permission(acl, 'test:tester', 'WRITE'))
+ self.assertTrue(self.check_permission(acl, 'test:tester', 'READ_ACP'))
+ self.assertTrue(self.check_permission(acl, 'test:tester', 'WRITE_ACP'))
+ self.assertTrue(self.check_permission(acl, 'test:tester2', 'READ'))
+ self.assertTrue(self.check_permission(acl, 'test:tester2', 'WRITE'))
+ self.assertTrue(self.check_permission(acl, 'test:tester2', 'READ_ACP'))
+ self.assertTrue(self.check_permission(acl, 'test:tester2',
+ 'WRITE_ACP'))
+
+ def test_acl_elem(self):
+ acl = ACLPrivate(Owner(id='test:tester',
+ name='test:tester'),
+ s3_acl=self.s3_acl,
+ allow_no_owner=self.allow_no_owner)
+ elem = acl.elem()
+ self.assertTrue(elem.find('./Owner') is not None)
+ self.assertTrue(elem.find('./AccessControlList') is not None)
+ grants = [e for e in elem.findall('./AccessControlList/Grant')]
+ self.assertEqual(len(grants), 1)
+ self.assertEqual(grants[0].find('./Grantee/ID').text, 'test:tester')
+ self.assertEqual(
+ grants[0].find('./Grantee/DisplayName').text, 'test:tester')
+
+ def test_acl_from_elem(self):
+ # check translation from element
+ acl = ACLPrivate(Owner(id='test:tester',
+ name='test:tester'),
+ s3_acl=self.s3_acl,
+ allow_no_owner=self.allow_no_owner)
+ elem = acl.elem()
+ acl = ACL.from_elem(elem, self.s3_acl, self.allow_no_owner)
+ self.assertTrue(self.check_permission(acl, 'test:tester', 'READ'))
+ self.assertTrue(self.check_permission(acl, 'test:tester', 'WRITE'))
+ self.assertTrue(self.check_permission(acl, 'test:tester', 'READ_ACP'))
+ self.assertTrue(self.check_permission(acl, 'test:tester', 'WRITE_ACP'))
+ self.assertFalse(self.check_permission(acl, 'test:tester2', 'READ'))
+ self.assertFalse(self.check_permission(acl, 'test:tester2', 'WRITE'))
+ self.assertFalse(self.check_permission(acl, 'test:tester2',
+ 'READ_ACP'))
+ self.assertFalse(self.check_permission(acl, 'test:tester2',
+ 'WRITE_ACP'))
+
+ def test_acl_from_elem_by_id_only(self):
+ elem = ACLPrivate(Owner(id='test:tester',
+ name='test:tester'),
+ s3_acl=self.s3_acl,
+ allow_no_owner=self.allow_no_owner).elem()
+ elem.find('./Owner').remove(elem.find('./Owner/DisplayName'))
+ acl = ACL.from_elem(elem, self.s3_acl, self.allow_no_owner)
+ self.assertTrue(self.check_permission(acl, 'test:tester', 'READ'))
+ self.assertTrue(self.check_permission(acl, 'test:tester', 'WRITE'))
+ self.assertTrue(self.check_permission(acl, 'test:tester', 'READ_ACP'))
+ self.assertTrue(self.check_permission(acl, 'test:tester', 'WRITE_ACP'))
+ self.assertFalse(self.check_permission(acl, 'test:tester2', 'READ'))
+ self.assertFalse(self.check_permission(acl, 'test:tester2', 'WRITE'))
+ self.assertFalse(self.check_permission(acl, 'test:tester2',
+ 'READ_ACP'))
+ self.assertFalse(self.check_permission(acl, 'test:tester2',
+ 'WRITE_ACP'))
+
+ def test_decode_acl_container(self):
+ access_control_policy = \
+ {'Owner': 'test:tester',
+ 'Grant': [{'Permission': 'FULL_CONTROL',
+ 'Grantee': 'test:tester'}]}
+ headers = {sysmeta_header('container', 'acl'):
+ json.dumps(access_control_policy)}
+ acl = decode_acl('container', headers, self.allow_no_owner)
+
+ self.assertEqual(type(acl), ACL)
+ self.assertEqual(acl.owner.id, 'test:tester')
+ self.assertEqual(len(acl.grants), 1)
+ self.assertEqual(str(acl.grants[0].grantee), 'test:tester')
+ self.assertEqual(acl.grants[0].permission, 'FULL_CONTROL')
+
+ def test_decode_acl_object(self):
+ access_control_policy = \
+ {'Owner': 'test:tester',
+ 'Grant': [{'Permission': 'FULL_CONTROL',
+ 'Grantee': 'test:tester'}]}
+ headers = {sysmeta_header('object', 'acl'):
+ json.dumps(access_control_policy)}
+ acl = decode_acl('object', headers, self.allow_no_owner)
+
+ self.assertEqual(type(acl), ACL)
+ self.assertEqual(acl.owner.id, 'test:tester')
+ self.assertEqual(len(acl.grants), 1)
+ self.assertEqual(str(acl.grants[0].grantee), 'test:tester')
+ self.assertEqual(acl.grants[0].permission, 'FULL_CONTROL')
+
+ def test_decode_acl_undefined(self):
+ headers = {}
+ acl = decode_acl('container', headers, self.allow_no_owner)
+
+ self.assertEqual(type(acl), ACL)
+ self.assertIsNone(acl.owner.id)
+ self.assertEqual(len(acl.grants), 0)
+
+ def test_decode_acl_empty_list(self):
+ headers = {sysmeta_header('container', 'acl'): '[]'}
+ acl = decode_acl('container', headers, self.allow_no_owner)
+ self.assertEqual(type(acl), ACL)
+ self.assertIsNone(acl.owner.id)
+ self.assertEqual(len(acl.grants), 0)
+
+ def test_decode_acl_with_invalid_json(self):
+ headers = {sysmeta_header('container', 'acl'): '['}
+ self.assertRaises(
+ InvalidSubresource, decode_acl, 'container',
+ headers, self.allow_no_owner)
+
+ def test_encode_acl_container(self):
+ acl = ACLPrivate(Owner(id='test:tester',
+ name='test:tester'))
+ acp = encode_acl('container', acl)
+ header_value = json.loads(acp[sysmeta_header('container', 'acl')])
+
+ self.assertTrue('Owner' in header_value)
+ self.assertTrue('Grant' in header_value)
+ self.assertEqual('test:tester', header_value['Owner'])
+ self.assertEqual(len(header_value['Grant']), 1)
+
+ def test_encode_acl_object(self):
+ acl = ACLPrivate(Owner(id='test:tester',
+ name='test:tester'))
+ acp = encode_acl('object', acl)
+ header_value = json.loads(acp[sysmeta_header('object', 'acl')])
+
+ self.assertTrue('Owner' in header_value)
+ self.assertTrue('Grant' in header_value)
+ self.assertEqual('test:tester', header_value['Owner'])
+ self.assertEqual(len(header_value['Grant']), 1)
+
+ def test_encode_acl_many_grant(self):
+ headers = {}
+ users = []
+ for i in range(0, 99):
+ users.append('id=test:tester%s' % str(i))
+ users = ','.join(users)
+ headers['x-amz-grant-read'] = users
+ acl = ACL.from_headers(headers, Owner('test:tester', 'test:tester'))
+ acp = encode_acl('container', acl)
+
+ header_value = acp[sysmeta_header('container', 'acl')]
+ header_value = json.loads(header_value)
+
+ self.assertTrue('Owner' in header_value)
+ self.assertTrue('Grant' in header_value)
+ self.assertEqual('test:tester', header_value['Owner'])
+ self.assertEqual(len(header_value['Grant']), 99)
+
+ def test_from_headers_x_amz_acl(self):
+ canned_acls = ['public-read', 'public-read-write',
+ 'authenticated-read', 'bucket-owner-read',
+ 'bucket-owner-full-control', 'log-delivery-write']
+
+ owner = Owner('test:tester', 'test:tester')
+ grantee_map = canned_acl_grantees(owner)
+
+ for acl_str in canned_acls:
+ acl = ACL.from_headers({'x-amz-acl': acl_str}, owner)
+ expected = grantee_map[acl_str]
+
+ self.assertEqual(len(acl.grants), len(expected)) # sanity
+
+ # parse Grant object to permission and grantee
+ actual_grants = [(grant.permission, grant.grantee)
+ for grant in acl.grants]
+
+ assertions = zip(sorted(expected), sorted(actual_grants))
+
+ for (expected_permission, expected_grantee), \
+ (permission, grantee) in assertions:
+ self.assertEqual(expected_permission, permission)
+ self.assertTrue(
+ isinstance(grantee, expected_grantee.__class__))
+ if isinstance(grantee, User):
+ self.assertEqual(expected_grantee.id, grantee.id)
+ self.assertEqual(expected_grantee.display_name,
+ grantee.display_name)
+
+ def test_from_headers_x_amz_acl_invalid(self):
+ with self.assertRaises(InvalidArgument) as cm:
+ ACL.from_headers({'x-amz-acl': 'invalid'},
+ Owner('test:tester', 'test:tester'))
+ self.assertTrue('argument_name' in cm.exception.info)
+ self.assertEqual(cm.exception.info['argument_name'], 'x-amz-acl')
+ self.assertTrue('argument_value' in cm.exception.info)
+ self.assertEqual(cm.exception.info['argument_value'], 'invalid')
+
+ def test_canned_acl_grantees(self):
+ grantee_map = canned_acl_grantees(Owner('test:tester', 'test:tester'))
+ canned_acls = ['private', 'public-read', 'public-read-write',
+ 'authenticated-read', 'bucket-owner-read',
+ 'bucket-owner-full-control', 'log-delivery-write']
+ for canned_acl in canned_acls:
+ self.assertTrue(canned_acl in grantee_map)
+ self.assertEqual(len(canned_acls), len(grantee_map)) # sanity
+
+ def test_base_grantee(self):
+ grantee = Grantee()
+ func = lambda: '' in grantee
+ self.assertRaises(S3NotImplemented, func)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/test/unit/common/middleware/s3api/test_utils.py b/test/unit/common/middleware/s3api/test_utils.py
new file mode 100644
index 000000000..ca67d0675
--- /dev/null
+++ b/test/unit/common/middleware/s3api/test_utils.py
@@ -0,0 +1,133 @@
+# Copyright (c) 2014 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import time
+import unittest
+
+from swift.common.middleware.s3api import utils, s3request
+
+strs = [
+ ('Owner', 'owner'),
+ ('DisplayName', 'display_name'),
+ ('AccessControlPolicy', 'access_control_policy'),
+]
+
+
+class TestS3ApiUtils(unittest.TestCase):
+ def test_camel_to_snake(self):
+ for s1, s2 in strs:
+ self.assertEqual(utils.camel_to_snake(s1), s2)
+
+ def test_snake_to_camel(self):
+ for s1, s2 in strs:
+ self.assertEqual(s1, utils.snake_to_camel(s2))
+
+ def test_validate_bucket_name(self):
+ # good cases
+ self.assertTrue(utils.validate_bucket_name('bucket', True))
+ self.assertTrue(utils.validate_bucket_name('bucket1', True))
+ self.assertTrue(utils.validate_bucket_name('bucket-1', True))
+ self.assertTrue(utils.validate_bucket_name('b.u.c.k.e.t', True))
+ self.assertTrue(utils.validate_bucket_name('a' * 63, True))
+ # bad cases
+ self.assertFalse(utils.validate_bucket_name('a', True))
+ self.assertFalse(utils.validate_bucket_name('aa', True))
+ self.assertFalse(utils.validate_bucket_name('a+a', True))
+ self.assertFalse(utils.validate_bucket_name('a_a', True))
+ self.assertFalse(utils.validate_bucket_name('Bucket', True))
+ self.assertFalse(utils.validate_bucket_name('BUCKET', True))
+ self.assertFalse(utils.validate_bucket_name('bucket-', True))
+ self.assertFalse(utils.validate_bucket_name('bucket.', True))
+ self.assertFalse(utils.validate_bucket_name('bucket_', True))
+ self.assertFalse(utils.validate_bucket_name('bucket.-bucket', True))
+ self.assertFalse(utils.validate_bucket_name('bucket-.bucket', True))
+ self.assertFalse(utils.validate_bucket_name('bucket..bucket', True))
+ self.assertFalse(utils.validate_bucket_name('a' * 64, True))
+
+ def test_validate_bucket_name_with_dns_compliant_bucket_names_false(self):
+ # good cases
+ self.assertTrue(utils.validate_bucket_name('bucket', False))
+ self.assertTrue(utils.validate_bucket_name('bucket1', False))
+ self.assertTrue(utils.validate_bucket_name('bucket-1', False))
+ self.assertTrue(utils.validate_bucket_name('b.u.c.k.e.t', False))
+ self.assertTrue(utils.validate_bucket_name('a' * 63, False))
+ self.assertTrue(utils.validate_bucket_name('a' * 255, False))
+ self.assertTrue(utils.validate_bucket_name('a_a', False))
+ self.assertTrue(utils.validate_bucket_name('Bucket', False))
+ self.assertTrue(utils.validate_bucket_name('BUCKET', False))
+ self.assertTrue(utils.validate_bucket_name('bucket-', False))
+ self.assertTrue(utils.validate_bucket_name('bucket_', False))
+ self.assertTrue(utils.validate_bucket_name('bucket.-bucket', False))
+ self.assertTrue(utils.validate_bucket_name('bucket-.bucket', False))
+ self.assertTrue(utils.validate_bucket_name('bucket..bucket', False))
+ # bad cases
+ self.assertFalse(utils.validate_bucket_name('a', False))
+ self.assertFalse(utils.validate_bucket_name('aa', False))
+ self.assertFalse(utils.validate_bucket_name('a+a', False))
+ # ending with dot seems invalid in US standard, too
+ self.assertFalse(utils.validate_bucket_name('bucket.', False))
+ self.assertFalse(utils.validate_bucket_name('a' * 256, False))
+
+ def test_s3timestamp(self):
+ expected = '1970-01-01T00:00:01.000Z'
+ # integer
+ ts = utils.S3Timestamp(1)
+ self.assertEqual(expected, ts.s3xmlformat)
+ # milliseconds unit should be floored
+ ts = utils.S3Timestamp(1.1)
+ self.assertEqual(expected, ts.s3xmlformat)
+ # float (microseconds) should be floored too
+ ts = utils.S3Timestamp(1.000001)
+ self.assertEqual(expected, ts.s3xmlformat)
+ # Bigger float (milliseconds) should be floored too
+ ts = utils.S3Timestamp(1.9)
+ self.assertEqual(expected, ts.s3xmlformat)
+
+ def test_mktime(self):
+ date_headers = [
+ 'Thu, 01 Jan 1970 00:00:00 -0000',
+ 'Thu, 01 Jan 1970 00:00:00 GMT',
+ 'Thu, 01 Jan 1970 00:00:00 UTC',
+ 'Thu, 01 Jan 1970 08:00:00 +0800',
+ 'Wed, 31 Dec 1969 16:00:00 -0800',
+ 'Wed, 31 Dec 1969 16:00:00 PST',
+ ]
+ for header in date_headers:
+ ts = utils.mktime(header)
+ self.assertEqual(0, ts, 'Got %r for header %s' % (ts, header))
+
+ # Last-Modified response style
+ self.assertEqual(0, utils.mktime('1970-01-01T00:00:00'))
+
+ # X-Amz-Date style
+ self.assertEqual(0, utils.mktime('19700101T000000Z',
+ s3request.SIGV4_X_AMZ_DATE_FORMAT))
+
+ def test_mktime_weird_tz(self):
+ orig_tz = os.environ.get('TZ', '')
+ try:
+ os.environ['TZ'] = 'EST+05EDT,M4.1.0,M10.5.0'
+ time.tzset()
+ os.environ['TZ'] = '+0000'
+ # No tzset! Simulating what Swift would do.
+ self.assertNotEqual(0, time.timezone)
+ self.test_mktime()
+ finally:
+ os.environ['TZ'] = orig_tz
+ time.tzset()
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/test/unit/common/middleware/s3api/test_versioning.py b/test/unit/common/middleware/s3api/test_versioning.py
new file mode 100644
index 000000000..edee6e841
--- /dev/null
+++ b/test/unit/common/middleware/s3api/test_versioning.py
@@ -0,0 +1,56 @@
+# Copyright (c) 2014 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+
+from swift.common.swob import Request
+
+from test.unit.common.middleware.s3api import S3ApiTestCase
+from swift.common.middleware.s3api.etree import fromstring
+
+
+class TestS3ApiVersioning(S3ApiTestCase):
+
+ def setUp(self):
+ super(TestS3ApiVersioning, self).setUp()
+
+ def test_object_versioning_GET(self):
+ req = Request.blank('/bucket/object?versioning',
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(status.split()[0], '200')
+ fromstring(body, 'VersioningConfiguration')
+
+ def test_object_versioning_PUT(self):
+ req = Request.blank('/bucket/object?versioning',
+ environ={'REQUEST_METHOD': 'PUT'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ self.assertEqual(self._get_error_code(body), 'NotImplemented')
+
+ def test_bucket_versioning_GET(self):
+ req = Request.blank('/bucket?versioning',
+ environ={'REQUEST_METHOD': 'GET'},
+ headers={'Authorization': 'AWS test:tester:hmac',
+ 'Date': self.get_date_header()})
+ status, headers, body = self.call_s3api(req)
+ fromstring(body, 'VersioningConfiguration')
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/test/unit/common/middleware/test_tempauth.py b/test/unit/common/middleware/test_tempauth.py
index 4a87b8421..317da713f 100644
--- a/test/unit/common/middleware/test_tempauth.py
+++ b/test/unit/common/middleware/test_tempauth.py
@@ -277,7 +277,7 @@ class TestAuth(unittest.TestCase):
local_auth = auth.filter_factory(
{'user_s3_s3': 'secret .admin'})(local_app)
req = self._make_request('/v1/s3:s3', environ={
- 'swift3.auth_details': {
+ 's3api.auth_details': {
'access_key': 's3:s3',
'signature': b64encode('sig'),
'string_to_sign': 't',
@@ -295,7 +295,7 @@ class TestAuth(unittest.TestCase):
local_auth = auth.filter_factory(
{'user_s3_s3': 'secret .admin'})(local_app)
req = self._make_request('/v1/s3:s3', environ={
- 'swift3.auth_details': {
+ 's3api.auth_details': {
'access_key': 's3:s3',
'signature': b64encode('sig'),
'string_to_sign': 't',
@@ -313,7 +313,7 @@ class TestAuth(unittest.TestCase):
local_auth = auth.filter_factory(
{'user_s3_s3': 'secret .admin'})(local_app)
req = self._make_request('/v1/s3:s3', environ={
- 'swift3.auth_details': {
+ 's3api.auth_details': {
'access_key': 's3:s3',
'signature': b64encode('sig'),
'string_to_sign': 't'}})
diff --git a/tox.ini b/tox.ini
index 64dbaed7d..547a6bf13 100644
--- a/tox.ini
+++ b/tox.ini
@@ -85,6 +85,11 @@ commands = ./.functests {posargs}
setenv = SWIFT_TEST_IN_PROCESS=1
SWIFT_TEST_IN_PROCESS_CONF_LOADER=ec
+[testenv:func-s3api]
+commands = ./.functests {posargs}
+setenv = SWIFT_TEST_IN_PROCESS=1
+ SWIFT_TEST_IN_PROCESS_CONF_LOADER=s3api
+
[testenv:venv]
commands = {posargs}