summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.zuul.yaml26
-rw-r--r--api-ref/source/v2/cache-manage.inc84
-rw-r--r--api-ref/source/v2/discovery-parameters.yaml25
-rw-r--r--api-ref/source/v2/discovery.inc59
-rw-r--r--api-ref/source/v2/images-parameters.yaml8
-rw-r--r--api-ref/source/v2/index.rst1
-rw-r--r--api-ref/source/v2/samples/stores-list-detail-response.json19
-rw-r--r--api-ref/source/v2/samples/usage-response.json20
-rw-r--r--doc/source/configuration/configuring.rst9
-rw-r--r--doc/source/contributor/releasecycle.rst12
-rw-r--r--glance/api/middleware/version_negotiation.py4
-rw-r--r--glance/api/v2/cached_images.py134
-rw-r--r--glance/api/v2/discovery.py85
-rw-r--r--glance/api/v2/policy.py24
-rw-r--r--glance/api/v2/router.py42
-rw-r--r--glance/api/versions.py14
-rw-r--r--glance/common/property_utils.py5
-rw-r--r--glance/db/migration.py2
-rw-r--r--glance/db/sqlalchemy/api.py10
-rw-r--r--glance/policies/__init__.py4
-rw-r--r--glance/policies/base.py1
-rw-r--r--glance/policies/cache.py75
-rw-r--r--glance/policies/discovery.py33
-rw-r--r--glance/quota/keystone.py26
-rw-r--r--glance/tests/functional/__init__.py2
-rw-r--r--glance/tests/functional/test_api.py15
-rw-r--r--glance/tests/functional/v2/test_cache_api.py360
-rw-r--r--glance/tests/functional/v2/test_discovery.py98
-rw-r--r--glance/tests/functional/v2/test_images.py8
-rw-r--r--glance/tests/unit/base.py9
-rw-r--r--glance/tests/unit/test_cached_images.py365
-rw-r--r--glance/tests/unit/test_versions.py103
-rw-r--r--glance/tests/unit/v2/test_cache_management_api.py123
-rw-r--r--glance/tests/unit/v2/test_discovery_stores.py31
-rw-r--r--glance/tests/unit/v2/test_v2_policy.py49
-rw-r--r--playbooks/enable-fips.yaml3
-rw-r--r--releasenotes/notes/added-quota-usage-api-f1914054132f2021.yaml9
-rw-r--r--releasenotes/notes/added-store-detail-api-215810aa85dfbb99.yaml6
-rw-r--r--releasenotes/notes/cache-api-b806ccfb8c5d9bb6.yaml9
-rw-r--r--requirements.txt2
-rw-r--r--setup.cfg1
-rwxr-xr-xtools/test-setup.sh28
42 files changed, 1845 insertions, 98 deletions
diff --git a/.zuul.yaml b/.zuul.yaml
index 80d690c3e..f74cc8af4 100644
--- a/.zuul.yaml
+++ b/.zuul.yaml
@@ -268,6 +268,27 @@
GLANCE_CINDER_DEFAULT_BACKEND: lvmdriver-1
- job:
+ name: glance-multistore-cinder-import-fips
+ parent: tempest-integrated-storage-import
+ description: |
+ The regular import workflow job to test with multiple cinder stores with
+ fips enabled
+ nodeset: devstack-single-node-centos-8-stream
+ pre-run: playbooks/enable-fips.yaml
+ vars:
+ configure_swap_size: 4096
+ devstack_localrc:
+ USE_CINDER_FOR_GLANCE: True
+ GLANCE_ENABLE_MULTIPLE_STORES: True
+ CINDER_ENABLED_BACKENDS: lvm:lvmdriver-1,lvm:lvmdriver-2
+ GLANCE_CINDER_DEFAULT_BACKEND: lvmdriver-1
+ devstack_local_conf:
+ test-config:
+ "$TEMPEST_CONFIG":
+ validation:
+ ssh_key_type: 'ecdsa'
+
+- job:
name: tempest-integrated-storage-import-standalone
parent: tempest-integrated-storage-import
description: |
@@ -303,7 +324,7 @@
- release-notes-jobs-python3
check:
jobs:
- - openstack-tox-functional-py36
+ - openstack-tox-functional-py36-fips
- openstack-tox-functional-py39
- glance-tox-functional-py39-rbac-defaults
- glance-code-constants-check
@@ -329,6 +350,9 @@
irrelevant-files: *tempest-irrelevant-files
- glance-multistore-cinder-import:
irrelevant-files: *tempest-irrelevant-files
+ - glance-multistore-cinder-import-fips:
+ voting: false
+ irrelevant-files: *tempest-irrelevant-files
- grenade:
irrelevant-files: *tempest-irrelevant-files
- tempest-ipv6-only:
diff --git a/api-ref/source/v2/cache-manage.inc b/api-ref/source/v2/cache-manage.inc
new file mode 100644
index 000000000..1993cb191
--- /dev/null
+++ b/api-ref/source/v2/cache-manage.inc
@@ -0,0 +1,84 @@
+.. -*- rst -*-
+
+Cache Manage
+************
+
+List and manage the cache.
+
+
+Query cache status
+~~~~~~~~~~~~~~~~~~
+
+.. rest_method:: GET /v2/cache/
+
+Lists all images in cache or queue.
+*(Since Image API v2.14)*
+
+Normal response codes: 200
+
+Error response codes: 400, 401, 403
+
+
+Request
+-------
+
+No request parameters.
+
+
+Queue image
+~~~~~~~~~~~
+
+.. rest_method:: PUT /v2/cache/{image_id}/
+
+Queues image for caching.
+*(Since Image API v2.14)*
+
+Normal response codes: 200
+
+Error response codes: 400, 401, 403, 404
+
+
+Request
+-------
+
+ - image_id: image_id-in-path
+
+
+Delete image from cache
+~~~~~~~~~~~~~~~~~~~~~~~
+
+.. rest_method:: DELETE /v2/cache/{image_id}/
+
+Deletes a image from cache.
+*(Since Image API v2.14)*
+
+Normal response codes: 204
+
+Error response codes: 400, 401, 403, 404
+
+
+Request
+-------
+
+ - image_id: image_id-in-path
+
+
+Clear images from cache
+~~~~~~~~~~~~~~~~~~~~~~~
+
+.. rest_method:: DELETE /v2/cache/
+
+Clears the cache and its queue.
+*(Since Image API v2.14)*
+
+Normal response codes: 204
+
+Error response codes: 400, 401, 403
+
+
+Request
+-------
+
+.. rest_parameters:: images-parameters.yaml
+
+ - x-image-cache-clear-target: cache-clear-header
diff --git a/api-ref/source/v2/discovery-parameters.yaml b/api-ref/source/v2/discovery-parameters.yaml
index b4b4d3d6e..205d6378e 100644
--- a/api-ref/source/v2/discovery-parameters.yaml
+++ b/api-ref/source/v2/discovery-parameters.yaml
@@ -18,3 +18,28 @@ stores:
in: body
required: true
type: array
+stores-detail:
+ description: |
+ A list of store objects, where each store object may contain the
+ following fields:
+
+ ``id``
+ Operator-defined identifier for the store.
+ ``type``
+ Specify the type of store.
+ ``description``
+ Operator-supplied description of this store.
+ ``default`` (optional)
+ Only present on the default store. This is the store where image
+ data is placed if you do not indicate a specific store when supplying
+ data to the Image Service. (See the :ref:`Image data <image-data>`
+ and :ref:`Interoperable image import <image-import-process>` sections
+ for more information.)
+ ``read-only`` (optional)
+ Included only when the store is read only.
+ ``properties``
+ Contains store specific properties
+ in: body
+ required: true
+ type: array
+
diff --git a/api-ref/source/v2/discovery.inc b/api-ref/source/v2/discovery.inc
index dff7a7f40..e1401bd83 100644
--- a/api-ref/source/v2/discovery.inc
+++ b/api-ref/source/v2/discovery.inc
@@ -103,3 +103,62 @@ Response Example
.. literalinclude:: samples/stores-list-response.json
:language: json
+
+Quota usage
+~~~~~~~~~~~
+
+.. rest_method:: GET /v2/info/usage
+
+The user's quota and current usage are displayed, if enabled by
+server-side configuration.
+
+Normal response codes: 200
+
+Request
+-------
+
+There are no request parameters.
+
+This call does not allow a request body.
+
+Response Example
+----------------
+
+.. literalinclude:: samples/usage-response.json
+ :language: json
+
+List stores detail
+~~~~~~~~~~~~~~~~~~
+
+.. rest_method:: GET /v2/info/stores/detail
+
+Lists all the backend stores, with detail, accessible to admins,
+for non-admin user API will return bad request.
+
+Normal response codes: 200
+
+Error response codes: 403, 404
+
+
+Request
+-------
+
+There are no request parameters.
+
+This call does not allow a request body.
+
+
+Response Parameters
+-------------------
+
+.. rest_parameters:: discovery-parameters.yaml
+
+ - stores: stores-detail
+
+
+Response Example
+----------------
+
+.. literalinclude:: samples/stores-list-detail-response.json
+ :language: json
+
diff --git a/api-ref/source/v2/images-parameters.yaml b/api-ref/source/v2/images-parameters.yaml
index 459a20b1c..a99bbd601 100644
--- a/api-ref/source/v2/images-parameters.yaml
+++ b/api-ref/source/v2/images-parameters.yaml
@@ -1,4 +1,12 @@
# variables in header
+cache-clear-header:
+ description: |
+ A keyword indicating 'cache', 'queue' or empty string to indicate the delete
+ API to delete images from cache or queue or delete from both. If this header
+ is missing then all cached and queued images for caching will be deleted.
+ in: header
+ required: false
+ type: string
Content-Length:
description: |
The length of the body in octets (8-bit bytes)
diff --git a/api-ref/source/v2/index.rst b/api-ref/source/v2/index.rst
index f18dfbf31..61b2eb7cd 100644
--- a/api-ref/source/v2/index.rst
+++ b/api-ref/source/v2/index.rst
@@ -33,3 +33,4 @@ Image Service API v2 (CURRENT)
.. include:: discovery.inc
.. include:: tasks.inc
.. include:: tasks-schemas.inc
+.. include:: cache-manage.inc
diff --git a/api-ref/source/v2/samples/stores-list-detail-response.json b/api-ref/source/v2/samples/stores-list-detail-response.json
new file mode 100644
index 000000000..f377518f5
--- /dev/null
+++ b/api-ref/source/v2/samples/stores-list-detail-response.json
@@ -0,0 +1,19 @@
+{
+ "stores": [
+ {
+ "id":"reliable",
+ "type": "rbd",
+ "description": "More expensive store with data redundancy",
+ "default": true,
+ "properties": {
+ "pool": "pool1"
+ }
+ },
+ {
+ "id":"cheap",
+ "type": "file",
+ "description": "Less expensive store for seldom-used images",
+ "properties": {}
+ }
+ ]
+}
diff --git a/api-ref/source/v2/samples/usage-response.json b/api-ref/source/v2/samples/usage-response.json
new file mode 100644
index 000000000..961d8153c
--- /dev/null
+++ b/api-ref/source/v2/samples/usage-response.json
@@ -0,0 +1,20 @@
+{
+ "usage": {
+ "image_size_total": {
+ "limit": 1024,
+ "usage": 256
+ },
+ "image_count_total": {
+ "limit": 10,
+ "usage": 2
+ },
+ "image_stage_total": {
+ "limit": 512,
+ "usage": 0
+ },
+ "image_count_uploading": {
+ "limit": 2,
+ "usage": 0
+ }
+ }
+}
diff --git a/doc/source/configuration/configuring.rst b/doc/source/configuration/configuring.rst
index 9cb26f74a..8d02775d2 100644
--- a/doc/source/configuration/configuring.rst
+++ b/doc/source/configuration/configuring.rst
@@ -1390,8 +1390,8 @@ configuration file, select the appropriate deployment flavor like so::
[paste_deploy]
flavor = caching
-Enabling the Image Cache Management Middleware
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+Enabling the Image Cache Management Middleware (DEPRECATED)
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
There is an optional ``cachemanage`` middleware that allows you to
directly interact with cache images. Use this flavor in place of the
@@ -1402,6 +1402,11 @@ can chose: ``cachemanagement``, ``keystone+cachemanagement`` and
[paste_deploy]
flavor = keystone+cachemanagement
+The new cache management endpoints were introduced in Images API v. 2.13.
+If cache middleware is configured the new endpoints will be active and
+there is no need to use the cachemanagement middleware unless the old
+`glance-cache-manage` tooling is desired to be still used.
+
Configuration Options Affecting the Image Cache
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
diff --git a/doc/source/contributor/releasecycle.rst b/doc/source/contributor/releasecycle.rst
index 7e9dd5bb1..9fd46cf2a 100644
--- a/doc/source/contributor/releasecycle.rst
+++ b/doc/source/contributor/releasecycle.rst
@@ -24,6 +24,18 @@ Between Summit and Milestone-1
#. Add any Glance-specific schedule information to the release calendar
(https://review.opendev.org/#/c/505425/)
+#. Update the ``CURRENT_RELEASE`` constant in ``glance/db/migration.py``.
+ Include a ``Sem-Ver`` pseudo-header in the commit message so that
+ PBR will increment the glance version number to match the release
+ name.
+
+ * The value of the ``Sem-Ver`` pseudo-header must be ``api-break``
+ (which is a little disconcerting) because we need to increment the
+ major digit in the **Glance** version number (we aren't signalling
+ anything about the **Images** API), and that's the constant
+ that pbr recognizes for this purpose.
+ * Example patch: https://review.opendev.org/c/openstack/glance/+/827919
+
#. Focus on spec reviews to get them approved and updated early in
the cycle to allow enough time for implementation.
diff --git a/glance/api/middleware/version_negotiation.py b/glance/api/middleware/version_negotiation.py
index d645b0ca3..d5ab5e2d1 100644
--- a/glance/api/middleware/version_negotiation.py
+++ b/glance/api/middleware/version_negotiation.py
@@ -82,6 +82,10 @@ class VersionNegotiationFilter(wsgi.Middleware):
allowed_versions['v2.6'] = 2
allowed_versions['v2.7'] = 2
allowed_versions['v2.9'] = 2
+ allowed_versions['v2.13'] = 2
+ if CONF.image_cache_dir:
+ allowed_versions['v2.14'] = 2
+ allowed_versions['v2.15'] = 2
if CONF.enabled_backends:
allowed_versions['v2.8'] = 2
allowed_versions['v2.10'] = 2
diff --git a/glance/api/v2/cached_images.py b/glance/api/v2/cached_images.py
index 93663df20..002b8cf9b 100644
--- a/glance/api/v2/cached_images.py
+++ b/glance/api/v2/cached_images.py
@@ -17,6 +17,8 @@
Controller for Image Cache Management API
"""
+import glance_store
+from oslo_config import cfg
from oslo_log import log as logging
import webob.exc
@@ -24,8 +26,14 @@ from glance.api import policy
from glance.api.v2 import policy as api_policy
from glance.common import exception
from glance.common import wsgi
+import glance.db
+import glance.gateway
+from glance.i18n import _
from glance import image_cache
+import glance.notifier
+
+CONF = cfg.CONF
LOG = logging.getLogger(__name__)
@@ -34,19 +42,36 @@ class CacheController(object):
A controller for managing cached images.
"""
- def __init__(self):
- self.cache = image_cache.ImageCache()
- self.policy = policy.Enforcer()
-
- def _enforce(self, req):
- """Authorize request against 'manage_image_cache' policy"""
+ def __init__(self, db_api=None, policy_enforcer=None, notifier=None,
+ store_api=None):
+ if not CONF.image_cache_dir:
+ self.cache = None
+ else:
+ self.cache = image_cache.ImageCache()
+
+ self.policy = policy_enforcer or policy.Enforcer()
+ self.db_api = db_api or glance.db.get_api()
+ self.notifier = notifier or glance.notifier.Notifier()
+ self.store_api = store_api or glance_store
+ self.gateway = glance.gateway.Gateway(self.db_api, self.store_api,
+ self.notifier, self.policy)
+
+ def _enforce(self, req, image=None, new_policy=None):
+ """Authorize request against given policy"""
+ if not new_policy:
+ new_policy = 'manage_image_cache'
try:
api_policy.CacheImageAPIPolicy(
- req.context, enforcer=self.policy).manage_image_cache()
+ req.context, image=image, enforcer=self.policy,
+ policy_str=new_policy).manage_image_cache()
except exception.Forbidden:
- LOG.debug("User not permitted to manage the image cache")
+ LOG.debug("User not permitted by '%s' policy" % new_policy)
raise webob.exc.HTTPForbidden()
+ if not CONF.image_cache_dir:
+ msg = _("Caching via API is not supported at this site.")
+ raise webob.exc.HTTPNotFound(explanation=msg)
+
def get_cached_images(self, req):
"""
GET /cached_images
@@ -114,6 +139,99 @@ class CacheController(object):
self._enforce(req)
return dict(num_deleted=self.cache.delete_all_queued_images())
+ def delete_cache_entry(self, req, image_id):
+ """
+ DELETE /cache/<IMAGE_ID> - Remove image from cache
+
+ Removes the image from cache or queue.
+ """
+ image_repo = self.gateway.get_repo(
+ req.context, authorization_layer=False)
+ try:
+ image = image_repo.get(image_id)
+ except exception.NotFound:
+ # We are going to raise this error only if image is
+ # not present in cache or queue list
+ image = None
+ if not self.image_exists_in_cache(image_id):
+ msg = _("Image %s not found.") % image_id
+ LOG.warning(msg)
+ raise webob.exc.HTTPNotFound(explanation=msg)
+
+ self._enforce(req, new_policy='cache_delete', image=image)
+ self.cache.delete_cached_image(image_id)
+ self.cache.delete_queued_image(image_id)
+
+ def image_exists_in_cache(self, image_id):
+ queued_images = self.cache.get_queued_images()
+ if image_id in queued_images:
+ return True
+
+ cached_images = self.cache.get_cached_images()
+ if image_id in [image['image_id'] for image in cached_images]:
+ return True
+
+ return False
+
+ def clear_cache(self, req):
+ """
+ DELETE /cache - Clear cache and queue
+
+ Removes all images from cache and queue.
+ """
+ self._enforce(req, new_policy='cache_delete')
+ target = req.headers.get('x-image-cache-clear-target', '').lower()
+ if target == '':
+ res = dict(cache_deleted=self.cache.delete_all_cached_images(),
+ queue_deleted=self.cache.delete_all_queued_images())
+ elif target == 'cache':
+ res = dict(cache_deleted=self.cache.delete_all_cached_images())
+ elif target == 'queue':
+ res = dict(queue_deleted=self.cache.delete_all_queued_images())
+ else:
+ reason = (_("If provided 'x-image-cache-clear-target' must be "
+ "'cache', 'queue' or empty string."))
+ raise webob.exc.HTTPBadRequest(explanation=reason,
+ request=req,
+ content_type='text/plain')
+ return res
+
+ def get_cache_state(self, req):
+ """
+ GET /cache/ - Get currently cached and queued images
+
+ Returns dict of cached and queued images
+ """
+ self._enforce(req, new_policy='cache_list')
+ return dict(cached_images=self.cache.get_cached_images(),
+ queued_images=self.cache.get_queued_images())
+
+ def queue_image_from_api(self, req, image_id):
+ """
+ PUT /cache/<IMAGE_ID>
+
+ Queues an image for caching. We do not check to see if
+ the image is in the registry here. That is done by the
+ prefetcher...
+ """
+ image_repo = self.gateway.get_repo(
+ req.context, authorization_layer=False)
+ try:
+ image = image_repo.get(image_id)
+ except exception.NotFound:
+ msg = _("Image %s not found.") % image_id
+ LOG.warning(msg)
+ raise webob.exc.HTTPNotFound(explanation=msg)
+
+ self._enforce(req, new_policy='cache_image', image=image)
+
+ if image.status != 'active':
+ msg = _("Only images with status active can be targeted for "
+ "queueing")
+ raise webob.exc.HTTPBadRequest(explanation=msg)
+
+ self.cache.queue_image(image_id)
+
class CachedImageDeserializer(wsgi.JSONRequestDeserializer):
pass
diff --git a/glance/api/v2/discovery.py b/glance/api/v2/discovery.py
index 1002c8a5d..c0aa477dd 100644
--- a/glance/api/v2/discovery.py
+++ b/glance/api/v2/discovery.py
@@ -13,17 +13,31 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import copy
+
+import glance_store as g_store
from oslo_config import cfg
+from oslo_log import log as logging
+import oslo_serialization.jsonutils as json
import webob.exc
+from glance.api import policy
+from glance.api.v2 import policy as api_policy
+from glance.common import exception
from glance.common import wsgi
+import glance.db
from glance.i18n import _
-
+from glance.quota import keystone as ks_quota
CONF = cfg.CONF
+LOG = logging.getLogger(__name__)
+
class InfoController(object):
+ def __init__(self, policy_enforcer=None):
+ self.policy = policy_enforcer or policy.Enforcer()
+
def get_image_import(self, req):
# TODO(jokke): All the rest of the boundaries should be implemented.
import_methods = {
@@ -63,6 +77,73 @@ class InfoController(object):
return {'stores': backends}
+ def get_stores_detail(self, req):
+ enabled_backends = CONF.enabled_backends
+ stores = self.get_stores(req).get('stores')
+ try:
+ api_policy.DiscoveryAPIPolicy(
+ req.context,
+ enforcer=self.policy).stores_info_detail()
+ for store in stores:
+ store['type'] = enabled_backends[store['id']]
+ store['properties'] = {}
+ if store['type'] == 'rbd':
+ store_detail = g_store.get_store_from_store_identifier(
+ store['id'])
+ store['properties'] = {'chunk_size':
+ store_detail.chunk_size,
+ 'pool': store_detail.pool,
+ 'thin_provisioning':
+ store_detail.thin_provisioning}
+ except exception.Forbidden as e:
+ LOG.debug("User not permitted to view details")
+ raise webob.exc.HTTPForbidden(explanation=e.msg)
+
+ return {'stores': stores}
+
+ def get_usage(self, req):
+ project_usage = ks_quota.get_usage(req.context)
+ return {'usage':
+ {name: {'usage': usage.usage,
+ 'limit': usage.limit}
+ for name, usage in project_usage.items()}}
+
+
+class ResponseSerializer(wsgi.JSONResponseSerializer):
+ def __init__(self, usage_schema=None):
+ super(ResponseSerializer, self).__init__()
+ self.schema = usage_schema or get_usage_schema()
+
+ def get_usage(self, response, usage):
+ body = json.dumps(self.schema.filter(usage), ensure_ascii=False)
+ response.unicode_body = str(body)
+ response.content_type = 'application/json'
+
+
+_USAGE_SCHEMA = {
+ 'usage': {
+ 'type': 'array',
+ 'items': {
+ 'type': 'object',
+ 'additionalProperties': True,
+ 'validation_data': {
+ 'type': 'object',
+ 'additonalProperties': False,
+ 'properties': {
+ 'usage': {'type': 'integer'},
+ 'limit': {'type': 'integer'},
+ },
+ },
+ },
+ },
+}
+
+
+def get_usage_schema():
+ return glance.schema.Schema('usage', copy.deepcopy(_USAGE_SCHEMA))
+
def create_resource():
- return wsgi.Resource(InfoController())
+ usage_schema = get_usage_schema()
+ serializer = ResponseSerializer(usage_schema)
+ return wsgi.Resource(InfoController(), None, serializer)
diff --git a/glance/api/v2/policy.py b/glance/api/v2/policy.py
index 78f7a23d9..8ad404647 100644
--- a/glance/api/v2/policy.py
+++ b/glance/api/v2/policy.py
@@ -105,14 +105,32 @@ class APIPolicyBase(object):
class CacheImageAPIPolicy(APIPolicyBase):
- def __init__(self, context, target=None, enforcer=None):
+ def __init__(self, context, image=None, policy_str=None,
+ target=None, enforcer=None):
self._context = context
- self._target = target or {}
+ target = {}
+ self._image = image
+ if self._image:
+ target = policy.ImageTarget(self._image)
+
+ self._target = target
self.enforcer = enforcer or policy.Enforcer()
+ self.policy_str = policy_str
super(CacheImageAPIPolicy, self).__init__(context, target, enforcer)
def manage_image_cache(self):
- self._enforce('manage_image_cache')
+ self._enforce(self.policy_str)
+
+
+class DiscoveryAPIPolicy(APIPolicyBase):
+ def __init__(self, context, target=None, enforcer=None):
+ self._context = context
+ self._target = target or {}
+ self.enforcer = enforcer or policy.Enforcer()
+ super(DiscoveryAPIPolicy, self).__init__(context, target, enforcer)
+
+ def stores_info_detail(self):
+ self._enforce('stores_info_detail')
class ImageAPIPolicy(APIPolicyBase):
diff --git a/glance/api/v2/router.py b/glance/api/v2/router.py
index 2055d5cbd..d8ebdf398 100644
--- a/glance/api/v2/router.py
+++ b/glance/api/v2/router.py
@@ -13,6 +13,7 @@
# License for the specific language governing permissions and limitations
# under the License.
+from glance.api.v2 import cached_images
from glance.api.v2 import discovery
from glance.api.v2 import image_actions
from glance.api.v2 import image_data
@@ -588,5 +589,46 @@ class API(wsgi.Router):
controller=reject_method_resource,
action='reject',
allowed_methods='GET')
+ mapper.connect('/info/usage',
+ controller=info_resource,
+ action='get_usage',
+ conditions={'method': ['GET']})
+ mapper.connect('/info/stores/detail',
+ controller=info_resource,
+ action='get_stores_detail',
+ conditions={'method': ['GET']},
+ body_reject=True)
+ mapper.connect('/info/stores/detail',
+ controller=reject_method_resource,
+ action='reject',
+ allowed_methods='GET')
+
+ # Cache Management API
+ cache_manage_resource = cached_images.create_resource()
+ mapper.connect('/cache',
+ controller=cache_manage_resource,
+ action='get_cache_state',
+ conditions={'method': ['GET']},
+ body_reject=True)
+ mapper.connect('/cache',
+ controller=cache_manage_resource,
+ action='clear_cache',
+ conditions={'method': ['DELETE']})
+ mapper.connect('/cache',
+ controller=reject_method_resource,
+ action='reject',
+ allowed_methods='GET, DELETE')
+ mapper.connect('/cache/{image_id}',
+ controller=cache_manage_resource,
+ action='delete_cache_entry',
+ conditions={'method': ['DELETE']})
+ mapper.connect('/cache/{image_id}',
+ controller=cache_manage_resource,
+ action='queue_image_from_api',
+ conditions={'method': ['PUT']})
+ mapper.connect('/cache/{image_id}',
+ controller=reject_method_resource,
+ action='reject',
+ allowed_methods='DELETE, PUT')
super(API, self).__init__(mapper)
diff --git a/glance/api/versions.py b/glance/api/versions.py
index f9278a2a4..15b048751 100644
--- a/glance/api/versions.py
+++ b/glance/api/versions.py
@@ -77,9 +77,19 @@ class Controller(object):
}
version_objs = []
+ if CONF.image_cache_dir:
+ version_objs.extend([
+ build_version_object(2.15, 'v2', 'CURRENT'),
+ build_version_object(2.14, 'v2', 'SUPPORTED'),
+ ])
+ else:
+ version_objs.extend([
+ build_version_object(2.15, 'v2', 'CURRENT'),
+ ])
if CONF.enabled_backends:
version_objs.extend([
- build_version_object(2.12, 'v2', 'CURRENT'),
+ build_version_object(2.13, 'v2', 'SUPPORTED'),
+ build_version_object(2.12, 'v2', 'SUPPORTED'),
build_version_object(2.11, 'v2', 'SUPPORTED'),
build_version_object('2.10', 'v2', 'SUPPORTED'),
build_version_object(2.9, 'v2', 'SUPPORTED'),
@@ -87,7 +97,7 @@ class Controller(object):
])
else:
version_objs.extend([
- build_version_object(2.9, 'v2', 'CURRENT'),
+ build_version_object(2.9, 'v2', 'SUPPORTED'),
])
version_objs.extend([
build_version_object(2.7, 'v2', 'SUPPORTED'),
diff --git a/glance/common/property_utils.py b/glance/common/property_utils.py
index 6345f164a..fc4c8ea14 100644
--- a/glance/common/property_utils.py
+++ b/glance/common/property_utils.py
@@ -134,7 +134,10 @@ class PropertyRules(object):
compiled_rule = self._compile_rule(property_exp)
for operation in operations:
- permissions = CONFIG.get(property_exp, operation)
+ try:
+ permissions = CONFIG.get(property_exp, operation)
+ except configparser.NoOptionError:
+ raise InvalidPropProtectConf()
if permissions:
if self.prop_prot_rule_format == 'policies':
if ',' in permissions:
diff --git a/glance/db/migration.py b/glance/db/migration.py
index 9b2151bbb..9db8a67f7 100644
--- a/glance/db/migration.py
+++ b/glance/db/migration.py
@@ -29,5 +29,5 @@ db_options.set_defaults(cfg.CONF)
# Migration-related constants
EXPAND_BRANCH = 'expand'
CONTRACT_BRANCH = 'contract'
-CURRENT_RELEASE = 'xena'
+CURRENT_RELEASE = 'yoga'
ALEMBIC_INIT_VERSION = 'liberty'
diff --git a/glance/db/sqlalchemy/api.py b/glance/db/sqlalchemy/api.py
index 2099d378e..ff8c6b1a3 100644
--- a/glance/db/sqlalchemy/api.py
+++ b/glance/db/sqlalchemy/api.py
@@ -870,13 +870,13 @@ def image_set_property_atomic(image_id, name, value):
table = models.ImageProperty.__table__
# This should be:
- # UPDATE image_properties SET value=$value, deleted=0
- # WHERE name=$name AND deleted!=0
+ # UPDATE image_properties SET value=$value, deleted=False
+ # WHERE name=$name AND deleted!=False
result = connection.execute(table.update().where(
sa_sql.and_(table.c.name == name,
table.c.image_id == image_id,
- table.c.deleted != 0)).values(
- value=value, deleted=0))
+ table.c.deleted != False)).values(
+ value=value, deleted=False))
if result.rowcount == 1:
# Found and updated a deleted property, so we win
return
@@ -921,7 +921,7 @@ def image_delete_property_atomic(image_id, name, value):
sa_sql.and_(table.c.name == name,
table.c.value == value,
table.c.image_id == image_id,
- table.c.deleted == 0)))
+ table.c.deleted == False)))
if result.rowcount == 1:
return
diff --git a/glance/policies/__init__.py b/glance/policies/__init__.py
index 67b9dfc07..55ceda7c6 100644
--- a/glance/policies/__init__.py
+++ b/glance/policies/__init__.py
@@ -13,6 +13,8 @@
import itertools
from glance.policies import base
+from glance.policies import cache
+from glance.policies import discovery
from glance.policies import image
from glance.policies import metadef
from glance.policies import tasks
@@ -24,4 +26,6 @@ def list_rules():
image.list_rules(),
tasks.list_rules(),
metadef.list_rules(),
+ cache.list_rules(),
+ discovery.list_rules(),
)
diff --git a/glance/policies/base.py b/glance/policies/base.py
index 397896614..ef908eae2 100644
--- a/glance/policies/base.py
+++ b/glance/policies/base.py
@@ -90,6 +90,7 @@ ADMIN_OR_PROJECT_READER_OR_SHARED_MEMBER = (
f'role:reader and (project_id:%(project_id)s or {IMAGE_MEMBER_CHECK})'
)
+ADMIN = f'role:admin'
rules = [
policy.RuleDefault(name='default', check_str='',
diff --git a/glance/policies/cache.py b/glance/policies/cache.py
new file mode 100644
index 000000000..ec8c1ccb1
--- /dev/null
+++ b/glance/policies/cache.py
@@ -0,0 +1,75 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+from oslo_log import versionutils
+from oslo_policy import policy
+
+from glance.policies import base
+
+
+DEPRECATED_REASON = """
+The image API now supports roles.
+"""
+
+
+cache_policies = [
+ policy.DocumentedRuleDefault(
+ name="cache_image",
+ check_str=base.ADMIN,
+ scope_types=['project'],
+ description='Queue image for caching',
+ operations=[
+ {'path': '/v2/cache/{image_id}',
+ 'method': 'PUT'}
+ ],
+ deprecated_rule=policy.DeprecatedRule(
+ name="cache_image", check_str="rule:manage_image_cache",
+ deprecated_reason=DEPRECATED_REASON,
+ deprecated_since=versionutils.deprecated.XENA
+ ),
+ ),
+ policy.DocumentedRuleDefault(
+ name="cache_list",
+ check_str=base.ADMIN,
+ scope_types=['project'],
+ description='List cache status',
+ operations=[
+ {'path': '/v2/cache',
+ 'method': 'GET'}
+ ],
+ deprecated_rule=policy.DeprecatedRule(
+ name="cache_list", check_str="rule:manage_image_cache",
+ deprecated_reason=DEPRECATED_REASON,
+ deprecated_since=versionutils.deprecated.XENA
+ ),
+ ),
+ policy.DocumentedRuleDefault(
+ name="cache_delete",
+ check_str=base.ADMIN,
+ scope_types=['project'],
+ description='Delete image(s) from cache and/or queue',
+ operations=[
+ {'path': '/v2/cache',
+ 'method': 'DELETE'},
+ {'path': '/v2/cache/{image_id}',
+ 'method': 'DELETE'}
+ ],
+ deprecated_rule=policy.DeprecatedRule(
+ name="cache_delete", check_str="rule:manage_image_cache",
+ deprecated_reason=DEPRECATED_REASON,
+ deprecated_since=versionutils.deprecated.XENA
+ ),
+ ),
+]
+
+
+def list_rules():
+ return cache_policies
diff --git a/glance/policies/discovery.py b/glance/policies/discovery.py
new file mode 100644
index 000000000..3273a48ec
--- /dev/null
+++ b/glance/policies/discovery.py
@@ -0,0 +1,33 @@
+# Copyright 2021 Red Hat, Inc.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+from oslo_policy import policy
+
+
+discovery_policies = [
+ policy.DocumentedRuleDefault(
+ name="stores_info_detail",
+ check_str='role:admin',
+ scope_types=['system', 'project'],
+ description='Expose store specific information',
+ operations=[
+ {'path': '/v2/info/stores/detail',
+ 'method': 'GET'}
+ ]
+ ),
+]
+
+
+def list_rules():
+ return discovery_policies
diff --git a/glance/quota/keystone.py b/glance/quota/keystone.py
index fa8e8c3e1..67b48d328 100644
--- a/glance/quota/keystone.py
+++ b/glance/quota/keystone.py
@@ -142,3 +142,29 @@ def enforce_image_count_uploading(context, project_id):
context, project_id, QUOTA_IMAGE_COUNT_UPLOADING,
lambda: db.user_get_uploading_count(context, project_id),
delta=0)
+
+
+def get_usage(context, project_id=None):
+ if not CONF.use_keystone_limits:
+ return {}
+
+ if not project_id:
+ project_id = context.project_id
+
+ usages = {
+ QUOTA_IMAGE_SIZE_TOTAL: lambda: db.user_get_storage_usage(
+ context, project_id) // units.Mi,
+ QUOTA_IMAGE_STAGING_TOTAL: lambda: db.user_get_staging_usage(
+ context, project_id) // units.Mi,
+ QUOTA_IMAGE_COUNT_TOTAL: lambda: db.user_get_image_count(
+ context, project_id),
+ QUOTA_IMAGE_COUNT_UPLOADING: lambda: db.user_get_uploading_count(
+ context, project_id),
+ }
+
+ def callback(project_id, resource_names):
+ return {name: usages[name]()
+ for name in resource_names}
+
+ enforcer = limit.Enforcer(callback)
+ return enforcer.calculate_usage(project_id, list(usages.keys()))
diff --git a/glance/tests/functional/__init__.py b/glance/tests/functional/__init__.py
index 14dab2d6c..a0e03af39 100644
--- a/glance/tests/functional/__init__.py
+++ b/glance/tests/functional/__init__.py
@@ -1550,6 +1550,8 @@ class SynchronousAPIBase(test_utils.BaseTestCase):
CacheManageFilter.factory
[pipeline:glance-api-cachemanagement]
pipeline = context cache cachemanage rootapp
+ [pipeline:glance-api-caching]
+ pipeline = context cache rootapp
[pipeline:glance-api]
pipeline = context rootapp
[composite:rootapp]
diff --git a/glance/tests/functional/test_api.py b/glance/tests/functional/test_api.py
index fa5608d64..cf93517a8 100644
--- a/glance/tests/functional/test_api.py
+++ b/glance/tests/functional/test_api.py
@@ -30,7 +30,8 @@ class TestApiVersions(functional.FunctionalTest):
self.start_servers(**self.__dict__.copy())
url = 'http://127.0.0.1:%d' % self.api_port
- versions = {'versions': tv.get_versions_list(url)}
+ versions = {'versions': tv.get_versions_list(url,
+ enabled_cache=True)}
# Verify version choices returned.
path = 'http://%s:%d' % ('127.0.0.1', self.api_port)
@@ -44,7 +45,8 @@ class TestApiVersions(functional.FunctionalTest):
self.start_servers(**self.__dict__.copy())
url = 'http://127.0.0.1:%d' % self.api_port
- versions = {'versions': tv.get_versions_list(url)}
+ versions = {'versions': tv.get_versions_list(url,
+ enabled_cache=True)}
# Verify version choices returned.
path = 'http://%s:%d' % ('127.0.0.1', self.api_port)
@@ -62,7 +64,8 @@ class TestApiVersionsMultistore(functional.MultipleBackendFunctionalTest):
url = 'http://127.0.0.1:%d' % self.api_port
versions = {'versions': tv.get_versions_list(url,
- enabled_backends=True)}
+ enabled_backends=True,
+ enabled_cache=True)}
# Verify version choices returned.
path = 'http://%s:%d' % ('127.0.0.1', self.api_port)
@@ -77,7 +80,8 @@ class TestApiVersionsMultistore(functional.MultipleBackendFunctionalTest):
url = 'http://127.0.0.1:%d' % self.api_port
versions = {'versions': tv.get_versions_list(url,
- enabled_backends=True)}
+ enabled_backends=True,
+ enabled_cache=True)}
# Verify version choices returned.
path = 'http://%s:%d' % ('127.0.0.1', self.api_port)
@@ -94,7 +98,8 @@ class TestApiPaths(functional.FunctionalTest):
self.start_servers(**self.__dict__.copy())
url = 'http://127.0.0.1:%d' % self.api_port
- self.versions = {'versions': tv.get_versions_list(url)}
+ self.versions = {'versions': tv.get_versions_list(url,
+ enabled_cache=True)}
images = {'images': []}
self.images_json = jsonutils.dumps(images)
diff --git a/glance/tests/functional/v2/test_cache_api.py b/glance/tests/functional/v2/test_cache_api.py
new file mode 100644
index 000000000..bd3048aef
--- /dev/null
+++ b/glance/tests/functional/v2/test_cache_api.py
@@ -0,0 +1,360 @@
+# Copyright 2021 Red Hat, Inc.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+from unittest import mock
+
+import oslo_policy.policy
+
+from glance.api import policy
+from glance.image_cache import prefetcher
+from glance.tests import functional
+
+
+class TestImageCache(functional.SynchronousAPIBase):
+ # ToDo(abhishekk): Once system scope is enabled and RBAC is fully
+ # supported, enable these tests for RBAC as well
+ def setUp(self):
+ super(TestImageCache, self).setUp()
+ self.policy = policy.Enforcer(suppress_deprecation_warnings=True)
+
+ def set_policy_rules(self, rules):
+ self.policy.set_rules(
+ oslo_policy.policy.Rules.from_dict(rules),
+ overwrite=True)
+
+ def start_server(self, enable_cache=True):
+ with mock.patch.object(policy, 'Enforcer') as mock_enf:
+ mock_enf.return_value = self.policy
+ super(TestImageCache, self).start_server(enable_cache=enable_cache)
+
+ def load_data(self):
+ output = {}
+ # Create 1 queued image as well for testing
+ path = "/v2/images"
+ data = {
+ 'name': 'queued-image',
+ 'container_format': 'bare',
+ 'disk_format': 'raw'
+ }
+ response = self.api_post(path, json=data)
+ self.assertEqual(201, response.status_code)
+ image_id = response.json['id']
+ output['queued'] = image_id
+
+ for visibility in ['public', 'private', 'community', 'shared']:
+ data = {
+ 'name': '%s-image' % visibility,
+ 'visibility': visibility,
+ 'container_format': 'bare',
+ 'disk_format': 'raw'
+ }
+ response = self.api_post(path, json=data)
+ self.assertEqual(201, response.status_code)
+ image_id = response.json['id']
+ # Upload some data to image
+ response = self.api_put(
+ '/v2/images/%s/file' % image_id,
+ headers={'Content-Type': 'application/octet-stream'},
+ data=b'IMAGEDATA')
+ self.assertEqual(204, response.status_code)
+ output[visibility] = image_id
+
+ return output
+
+ def list_cache(self, expected_code=200):
+ path = '/v2/cache'
+ response = self.api_get(path)
+ self.assertEqual(expected_code, response.status_code)
+ if response.status_code == 200:
+ return response.json
+
+ def cache_queue(self, image_id, expected_code=200):
+ # Queue image for prefetching
+ path = '/v2/cache/%s' % image_id
+ response = self.api_put(path)
+ self.assertEqual(expected_code, response.status_code)
+
+ def cache_delete(self, image_id, expected_code=200):
+ path = '/v2/cache/%s' % image_id
+ response = self.api_delete(path)
+ self.assertEqual(expected_code, response.status_code)
+
+ def cache_clear(self, target='', expected_code=200):
+ path = '/v2/cache'
+ headers = {}
+ if target:
+ headers['x-image-cache-clear-target'] = target
+ response = self.api_delete(path, headers=headers)
+ if target not in ('', 'cache', 'queue'):
+ self.assertEqual(expected_code, response.status_code)
+ else:
+ self.assertEqual(expected_code, response.status_code)
+
+ def cache_image(self):
+ # NOTE(abhishekk): Here we are not running periodic job which caches
+ # queued images as precaching is not part of this patch, so to test
+ # all caching operations we are using this way to cache images for us
+ cache_prefetcher = prefetcher.Prefetcher()
+ cache_prefetcher.run()
+
+ def test_cache_api_lifecycle(self):
+ self.start_server(enable_cache=True)
+ images = self.load_data()
+
+ # Ensure that nothing is cached and nothing is queued for caching
+ output = self.list_cache()
+ self.assertEqual(0, len(output['queued_images']))
+ self.assertEqual(0, len(output['cached_images']))
+
+ # Try non-existing image to queue for caching
+ self.cache_queue('non-existing-image-id', expected_code=404)
+
+ # Verify that you can not queue non-active image
+ self.cache_queue(images['queued'], expected_code=400)
+
+ # Queue 1 image for caching
+ self.cache_queue(images['public'])
+ # Now verify that we have 1 image queued for caching and 0
+ # cached images
+ output = self.list_cache()
+ self.assertEqual(1, len(output['queued_images']))
+ self.assertEqual(0, len(output['cached_images']))
+ # Verify same image is queued for caching
+ self.assertIn(images['public'], output['queued_images'])
+
+ # Cache the image
+ self.cache_image()
+ # Now verify that we have 0 queued image and 1 cached image
+ output = self.list_cache()
+ self.assertEqual(0, len(output['queued_images']))
+ self.assertEqual(1, len(output['cached_images']))
+ # Verify same image is queued for caching
+ self.assertIn(images['public'], output['cached_images'][0]['image_id'])
+
+ # Queue 2nd image for caching
+ self.cache_queue(images['community'])
+ # Now verify that we have 1 image queued for caching and 1
+ # cached images
+ output = self.list_cache()
+ self.assertEqual(1, len(output['queued_images']))
+ self.assertEqual(1, len(output['cached_images']))
+ # Verify same image is queued for caching
+ self.assertIn(images['community'], output['queued_images'])
+ self.assertIn(images['public'], output['cached_images'][0]['image_id'])
+
+ # Queue 3rd image for caching
+ self.cache_queue(images['private'])
+ # Now verify that we have 2 images queued for caching and 1
+ # cached images
+ output = self.list_cache()
+ self.assertEqual(2, len(output['queued_images']))
+ self.assertEqual(1, len(output['cached_images']))
+ # Verify same image is queued for caching
+ self.assertIn(images['private'], output['queued_images'])
+
+ # Try to delete non-existing image from cache
+ self.cache_delete('non-existing-image-id', expected_code=404)
+
+ # Delete public image from cache
+ self.cache_delete(images['public'])
+ # Now verify that we have 2 image queued for caching and no
+ # cached images
+ output = self.list_cache()
+ self.assertEqual(2, len(output['queued_images']))
+ self.assertEqual(0, len(output['cached_images']))
+
+ # Verify clearing cache fails with 400 if invalid header is passed
+ self.cache_clear(target='both', expected_code=400)
+
+ # Delete all queued images
+ self.cache_clear(target='queue')
+ # Now verify that we have 0 image queued for caching and 0
+ # cached images
+ output = self.list_cache()
+ self.assertEqual(0, len(output['queued_images']))
+ self.assertEqual(0, len(output['cached_images']))
+
+ # Queue and cache image so we have something to clear
+ self.cache_queue(images['public'])
+ # Now verify that we have 1 queued image
+ output = self.list_cache()
+ self.assertEqual(1, len(output['queued_images']))
+ self.cache_image()
+ # Now verify that we have 0 queued image and 1 cached image
+ output = self.list_cache()
+ self.assertEqual(0, len(output['queued_images']))
+ self.assertEqual(1, len(output['cached_images']))
+
+ # Delete all cached images
+ self.cache_clear(target='cache')
+ # Now verify that we have 0 image queued for caching and 0
+ # cached images
+ output = self.list_cache()
+ self.assertEqual(0, len(output['queued_images']))
+ self.assertEqual(0, len(output['cached_images']))
+
+ # Now we need 2 queued images and 2 cached images in order
+ # to delete both of them together
+ self.cache_queue(images['public'])
+ self.cache_queue(images['private'])
+ # Now verify that we have 2 queued images
+ output = self.list_cache()
+ self.assertEqual(2, len(output['queued_images']))
+
+ self.cache_image()
+ # Now verify that we have 0 queued images and 2 cached images
+ output = self.list_cache()
+ self.assertEqual(0, len(output['queued_images']))
+ self.assertEqual(2, len(output['cached_images']))
+
+ self.cache_queue(images['community'])
+ self.cache_queue(images['shared'])
+ # Verify we have 2 queued and 2 cached images
+ output = self.list_cache()
+ self.assertEqual(2, len(output['queued_images']))
+ self.assertEqual(2, len(output['cached_images']))
+
+ # Now delete all queued and all cached images at once
+ self.cache_clear()
+ # Now verify that we have 0 image queued for caching and 0
+ # cached images
+ output = self.list_cache()
+ self.assertEqual(0, len(output['queued_images']))
+ self.assertEqual(0, len(output['cached_images']))
+
+ # Try to cache image again to validate nothing will be cached
+ self.cache_image()
+ output = self.list_cache()
+ self.assertEqual(0, len(output['queued_images']))
+ self.assertEqual(0, len(output['cached_images']))
+
+ def test_cache_image_queue_delete(self):
+ # This test verifies that if image is queued for caching
+ # and user deletes the original image, but it is still
+ # present in queued list and deleted with cache-delete API.
+ self.start_server(enable_cache=True)
+ images = self.load_data()
+
+ # Ensure that nothing is cached and nothing is queued for caching
+ output = self.list_cache()
+ self.assertEqual(0, len(output['queued_images']))
+ self.assertEqual(0, len(output['cached_images']))
+
+ self.cache_queue(images['public'])
+ # Now verify that we have 1 image queued for caching and 0
+ # cached images
+ output = self.list_cache()
+ self.assertEqual(1, len(output['queued_images']))
+ self.assertEqual(0, len(output['cached_images']))
+ # Verify same image is queued for caching
+ self.assertIn(images['public'], output['queued_images'])
+
+ # Delete image and verify that it is still present
+ # in queued list
+ path = '/v2/images/%s' % images['public']
+ response = self.api_delete(path)
+ self.assertEqual(204, response.status_code)
+
+ output = self.list_cache()
+ self.assertEqual(1, len(output['queued_images']))
+ self.assertEqual(0, len(output['cached_images']))
+ self.assertIn(images['public'], output['queued_images'])
+
+ # Deleted the image from queued list
+ self.cache_delete(images['public'])
+ output = self.list_cache()
+ self.assertEqual(0, len(output['queued_images']))
+ self.assertEqual(0, len(output['cached_images']))
+
+ def test_cache_image_cache_delete(self):
+ # This test verifies that if image is queued for caching
+ # and user deletes the original image, but it is still
+ # present in queued list and deleted with cache-delete API.
+ self.start_server(enable_cache=True)
+ images = self.load_data()
+
+ # Ensure that nothing is cached and nothing is queued for caching
+ output = self.list_cache()
+ self.assertEqual(0, len(output['queued_images']))
+ self.assertEqual(0, len(output['cached_images']))
+
+ self.cache_queue(images['public'])
+ # Now verify that we have 1 image queued for caching and 0
+ # cached images
+ output = self.list_cache()
+ self.assertEqual(1, len(output['queued_images']))
+ self.assertEqual(0, len(output['cached_images']))
+ # Verify same image is queued for caching
+ self.assertIn(images['public'], output['queued_images'])
+
+ # Cache the image
+ self.cache_image()
+ # Now verify that we have 0 queued image and 1 cached image
+ output = self.list_cache()
+ self.assertEqual(0, len(output['queued_images']))
+ self.assertEqual(1, len(output['cached_images']))
+ # Verify same image is queued for caching
+ self.assertIn(images['public'], output['cached_images'][0]['image_id'])
+
+ # Delete image and verify that it is deleted from
+ # cache as well
+ path = '/v2/images/%s' % images['public']
+ response = self.api_delete(path)
+ self.assertEqual(204, response.status_code)
+
+ output = self.list_cache()
+ self.assertEqual(0, len(output['queued_images']))
+ self.assertEqual(0, len(output['cached_images']))
+
+ def test_cache_api_cache_disabled(self):
+ self.start_server(enable_cache=False)
+ images = self.load_data()
+ # As cache is not enabled each API call should return 404 response
+ self.list_cache(expected_code=404)
+ self.cache_queue(images['public'], expected_code=404)
+ self.cache_delete(images['public'], expected_code=404)
+ self.cache_clear(expected_code=404)
+ self.cache_clear(target='both', expected_code=404)
+
+ # Now disable cache policies and ensure that you will get 403
+ self.set_policy_rules({
+ 'cache_list': '!',
+ 'cache_delete': '!',
+ 'cache_image': '!',
+ 'add_image': '',
+ 'upload_image': ''
+ })
+ self.list_cache(expected_code=403)
+ self.cache_queue(images['public'], expected_code=403)
+ self.cache_delete(images['public'], expected_code=403)
+ self.cache_clear(expected_code=403)
+ self.cache_clear(target='both', expected_code=403)
+
+ def test_cache_api_not_allowed(self):
+ self.start_server(enable_cache=True)
+ images = self.load_data()
+ # As cache operations are not allowed each API call should return
+ # 403 response
+ self.set_policy_rules({
+ 'cache_list': '!',
+ 'cache_delete': '!',
+ 'cache_image': '!',
+ 'add_image': '',
+ 'upload_image': ''
+ })
+ self.list_cache(expected_code=403)
+ self.cache_queue(images['public'], expected_code=403)
+ self.cache_delete(images['public'], expected_code=403)
+ self.cache_clear(expected_code=403)
+ self.cache_clear(target='both', expected_code=403)
diff --git a/glance/tests/functional/v2/test_discovery.py b/glance/tests/functional/v2/test_discovery.py
new file mode 100644
index 000000000..537f19558
--- /dev/null
+++ b/glance/tests/functional/v2/test_discovery.py
@@ -0,0 +1,98 @@
+# Copyright 2021 Red Hat, Inc.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import fixtures
+
+from oslo_utils import units
+
+from glance.quota import keystone as ks_quota
+from glance.tests import functional
+from glance.tests.functional.v2.test_images import get_enforcer_class
+from glance.tests import utils as test_utils
+
+
+class TestDiscovery(functional.SynchronousAPIBase):
+ def setUp(self):
+ super(TestDiscovery, self).setUp()
+ self.config(use_keystone_limits=True)
+
+ self.enforcer_mock = self.useFixture(
+ fixtures.MockPatchObject(ks_quota, 'limit')).mock
+
+ def set_limit(self, limits):
+ self.enforcer_mock.Enforcer = get_enforcer_class(limits)
+
+ def _assert_usage(self, expected):
+ usage = self.api_get('/v2/info/usage')
+ usage = usage.json['usage']
+ for item in ('count', 'size', 'stage'):
+ key = 'image_%s_total' % item
+ self.assertEqual(expected[key], usage[key],
+ 'Mismatch in %s' % key)
+ self.assertEqual(expected['image_count_uploading'],
+ usage['image_count_uploading'])
+
+ def test_quota_with_usage(self):
+ self.set_limit({'image_size_total': 5,
+ 'image_count_total': 10,
+ 'image_stage_total': 15,
+ 'image_count_uploading': 20})
+
+ self.start_server()
+
+ # Initially we expect no usage, but our limits in place.
+ expected = {
+ 'image_size_total': {'limit': 5, 'usage': 0},
+ 'image_count_total': {'limit': 10, 'usage': 0},
+ 'image_stage_total': {'limit': 15, 'usage': 0},
+ 'image_count_uploading': {'limit': 20, 'usage': 0},
+ }
+ self._assert_usage(expected)
+
+ # Stage 1MiB and see our total count, uploading count, and
+ # staging area usage increase.
+ data = test_utils.FakeData(1 * units.Mi)
+ image_id = self._create_and_stage(data_iter=data)
+ expected['image_count_uploading']['usage'] = 1
+ expected['image_count_total']['usage'] = 1
+ expected['image_stage_total']['usage'] = 1
+ self._assert_usage(expected)
+
+ # Doing the import does not change anything (since we are
+ # synchronous and the task will not have run yet).
+ self._import_direct(image_id, ['store1'])
+ self._assert_usage(expected)
+
+ # After the import is complete, our usage of the staging area
+ # drops to zero, and our consumption of actual store space
+ # reflects the new active image.
+ self._wait_for_import(image_id)
+ expected['image_count_uploading']['usage'] = 0
+ expected['image_stage_total']['usage'] = 0
+ expected['image_size_total']['usage'] = 1
+ self._assert_usage(expected)
+
+ # Upload also yields a new active image and store usage.
+ data = test_utils.FakeData(1 * units.Mi)
+ image_id = self._create_and_upload(data_iter=data)
+ expected['image_count_total']['usage'] = 2
+ expected['image_size_total']['usage'] = 2
+ self._assert_usage(expected)
+
+ # Deleting an image drops the usage down.
+ self.api_delete('/v2/images/%s' % image_id)
+ expected['image_count_total']['usage'] = 1
+ expected['image_size_total']['usage'] = 1
+ self._assert_usage(expected)
diff --git a/glance/tests/functional/v2/test_images.py b/glance/tests/functional/v2/test_images.py
index 32a2e5818..8b117c8f0 100644
--- a/glance/tests/functional/v2/test_images.py
+++ b/glance/tests/functional/v2/test_images.py
@@ -24,6 +24,7 @@ import uuid
import fixtures
from oslo_limit import exception as ol_exc
+from oslo_limit import limit
from oslo_serialization import jsonutils
from oslo_utils.secretutils import md5
from oslo_utils import units
@@ -7018,6 +7019,13 @@ def get_enforcer_class(limits):
over_limit_info_list=[ol_exc.OverLimitInfo(
name, limits.get(name), current.get(name), delta)])
+ def calculate_usage(self, project_id, names):
+ return {
+ name: limit.ProjectUsage(
+ limits.get(name, 0),
+ self._callback(project_id, [name])[name])
+ for name in names}
+
return FakeEnforcer
diff --git a/glance/tests/unit/base.py b/glance/tests/unit/base.py
index d859dbea4..2cdd8fa55 100644
--- a/glance/tests/unit/base.py
+++ b/glance/tests/unit/base.py
@@ -17,6 +17,7 @@ import os
from unittest import mock
import glance_store as store
+from glance_store._drivers import rbd as rbd_store
from glance_store import location
from oslo_concurrency import lockutils
from oslo_config import cfg
@@ -70,9 +71,13 @@ class MultiStoreClearingUnitTest(test_utils.BaseTestCase):
:param passing_config: making store driver passes basic configurations.
:returns: the number of how many store drivers been loaded.
"""
+ rbd_store.rados = mock.MagicMock()
+ rbd_store.rbd = mock.MagicMock()
+ rbd_store.Store._set_url_prefix = mock.MagicMock()
self.config(enabled_backends={'fast': 'file', 'cheap': 'file',
'readonly_store': 'http',
- 'fast-cinder': 'cinder'})
+ 'fast-cinder': 'cinder',
+ 'fast-rbd': 'rbd'})
store.register_store_opts(CONF)
self.config(default_backend='fast',
@@ -82,6 +87,8 @@ class MultiStoreClearingUnitTest(test_utils.BaseTestCase):
group='fast')
self.config(filesystem_store_datadir=self.test_dir2,
group='cheap')
+ self.config(rbd_store_chunk_size=8688388, rbd_store_pool='images',
+ rbd_thin_provisioning=False, group='fast-rbd')
store.create_multi_stores(CONF)
diff --git a/glance/tests/unit/test_cached_images.py b/glance/tests/unit/test_cached_images.py
index 4617bbdaa..5be3df5ab 100644
--- a/glance/tests/unit/test_cached_images.py
+++ b/glance/tests/unit/test_cached_images.py
@@ -13,30 +13,51 @@
# License for the specific language governing permissions and limitations
# under the License.
-import testtools
+from unittest import mock
+
import webob
-from glance.api import policy
from glance.api.v2 import cached_images
-from glance.common import exception
+import glance.gateway
from glance import image_cache
-
-
-class FakePolicyEnforcer(policy.Enforcer):
- def __init__(self):
- self.default_rule = ''
- self.policy_path = ''
- self.policy_file_mtime = None
- self.policy_file_contents = None
-
- def enforce(self, context, action, target):
- return 'pass'
-
- def check(rule, target, creds, exc=None, *args, **kwargs):
- return 'pass'
-
- def _check(self, context, rule, target, *args, **kwargs):
- return 'pass'
+from glance import notifier
+import glance.tests.unit.utils as unit_test_utils
+import glance.tests.utils as test_utils
+
+
+UUID4 = '6bbe7cc2-eae7-4c0f-b50d-a7160b0c6a86'
+
+
+class FakeImage(object):
+ def __init__(self, id=None, status='active', container_format='ami',
+ disk_format='ami', locations=None):
+ self.id = id or UUID4
+ self.status = status
+ self.container_format = container_format
+ self.disk_format = disk_format
+ self.locations = locations
+ self.owner = unit_test_utils.TENANT1
+ self.created_at = ''
+ self.updated_at = ''
+ self.min_disk = ''
+ self.min_ram = ''
+ self.protected = False
+ self.checksum = ''
+ self.os_hash_algo = ''
+ self.os_hash_value = ''
+ self.size = 0
+ self.virtual_size = 0
+ self.visibility = 'public'
+ self.os_hidden = False
+ self.name = 'foo'
+ self.tags = []
+ self.extra_properties = {}
+ self.member = self.owner
+
+ # NOTE(danms): This fixture looks more like the db object than
+ # the proxy model. This needs fixing all through the tests
+ # below.
+ self.image_id = self.id
class FakeCache(image_cache.ImageCache):
@@ -48,13 +69,14 @@ class FakeCache(image_cache.ImageCache):
pass
def get_cached_images(self):
- return {'id': 'test'}
+ return [{'image_id': 'test'}]
def delete_cached_image(self, image_id):
self.deleted_images.append(image_id)
def delete_all_cached_images(self):
- self.delete_cached_image(self.get_cached_images().get('id'))
+ self.delete_cached_image(
+ self.get_cached_images()[0].get('image_id'))
return 1
def get_queued_images(self):
@@ -74,72 +96,315 @@ class FakeCache(image_cache.ImageCache):
class FakeController(cached_images.CacheController):
def __init__(self):
self.cache = FakeCache()
- self.policy = FakePolicyEnforcer()
+ self.db = unit_test_utils.FakeDB(initialize=False)
+ self.policy = unit_test_utils.FakePolicyEnforcer()
+ self.notifier = unit_test_utils.FakeNotifier()
+ self.store = unit_test_utils.FakeStoreAPI()
+ self.gateway = glance.gateway.Gateway(self.db, self.store,
+ self.notifier, self.policy)
-class TestController(testtools.TestCase):
+class TestController(test_utils.BaseTestCase):
def test_initialization_without_conf(self):
- self.assertRaises(exception.BadDriverConfiguration,
- cached_images.CacheController)
+ # NOTE(abhishekk): Since we are initializing cache driver only
+ # if image_cache_dir is set, here we are checking that cache
+ # object is None when it is not set
+ caching_controller = cached_images.CacheController()
+ self.assertIsNone(caching_controller.cache)
-class TestCachedImages(testtools.TestCase):
+class TestCachedImages(test_utils.BaseTestCase):
def setUp(self):
super(TestCachedImages, self).setUp()
test_controller = FakeController()
self.controller = test_controller
def test_get_cached_images(self):
+ self.config(image_cache_dir='fake_cache_directory')
req = webob.Request.blank('')
req.context = 'test'
result = self.controller.get_cached_images(req)
- self.assertEqual({'cached_images': {'id': 'test'}}, result)
+ self.assertEqual({'cached_images': [{'image_id': 'test'}]}, result)
def test_delete_cached_image(self):
- req = webob.Request.blank('')
- req.context = 'test'
- self.controller.delete_cached_image(req, image_id='test')
- self.assertEqual(['test'], self.controller.cache.deleted_images)
+ self.config(image_cache_dir='fake_cache_directory')
+ req = unit_test_utils.get_fake_request()
+ with mock.patch.object(notifier.ImageRepoProxy,
+ 'get') as mock_get:
+ mock_get.return_value = FakeImage()
+ self.controller.delete_cached_image(req, image_id=UUID4)
+ self.assertEqual([UUID4], self.controller.cache.deleted_images)
def test_delete_cached_images(self):
+ self.config(image_cache_dir='fake_cache_directory')
req = webob.Request.blank('')
req.context = 'test'
self.assertEqual({'num_deleted': 1},
self.controller.delete_cached_images(req))
self.assertEqual(['test'], self.controller.cache.deleted_images)
- def test_policy_enforce_forbidden(self):
- def fake_enforce(context, action, target):
- raise exception.Forbidden()
-
- self.controller.policy.enforce = fake_enforce
- req = webob.Request.blank('')
- req.context = 'test'
- self.assertRaises(webob.exc.HTTPForbidden,
- self.controller.get_cached_images, req)
-
def test_get_queued_images(self):
+ self.config(image_cache_dir='fake_cache_directory')
req = webob.Request.blank('')
req.context = 'test'
result = self.controller.get_queued_images(req)
self.assertEqual({'queued_images': {'test': 'passed'}}, result)
def test_queue_image(self):
- req = webob.Request.blank('')
- req.context = 'test'
- self.controller.queue_image(req, image_id='test1')
+ self.config(image_cache_dir='fake_cache_directory')
+ req = unit_test_utils.get_fake_request()
+ with mock.patch.object(notifier.ImageRepoProxy,
+ 'get') as mock_get:
+ mock_get.return_value = FakeImage()
+ self.controller.queue_image(req, image_id=UUID4)
def test_delete_queued_image(self):
- req = webob.Request.blank('')
- req.context = 'test'
- self.controller.delete_queued_image(req, 'deleted_img')
- self.assertEqual(['deleted_img'],
- self.controller.cache.deleted_images)
+ self.config(image_cache_dir='fake_cache_directory')
+ req = unit_test_utils.get_fake_request()
+ with mock.patch.object(notifier.ImageRepoProxy,
+ 'get') as mock_get:
+ mock_get.return_value = FakeImage()
+ self.controller.delete_queued_image(req, UUID4)
+ self.assertEqual([UUID4],
+ self.controller.cache.deleted_images)
def test_delete_queued_images(self):
+ self.config(image_cache_dir='fake_cache_directory')
req = webob.Request.blank('')
req.context = 'test'
self.assertEqual({'num_deleted': 1},
self.controller.delete_queued_images(req))
self.assertEqual(['deleted_img'],
self.controller.cache.deleted_images)
+
+
+class TestCachedImagesNegative(test_utils.BaseTestCase):
+ def setUp(self):
+ super(TestCachedImagesNegative, self).setUp()
+ test_controller = FakeController()
+ self.controller = test_controller
+
+ def test_get_cached_images_disabled(self):
+ req = webob.Request.blank('')
+ req.context = 'test'
+ self.assertRaises(webob.exc.HTTPNotFound,
+ self.controller.get_cached_images, req)
+
+ def test_get_cached_images_forbidden(self):
+ self.config(image_cache_dir='fake_cache_directory')
+ self.controller.policy.rules = {"manage_image_cache": False}
+ req = unit_test_utils.get_fake_request()
+ with mock.patch.object(notifier.ImageRepoProxy,
+ 'get') as mock_get:
+ mock_get.return_value = FakeImage()
+ self.assertRaises(webob.exc.HTTPForbidden,
+ self.controller.get_cached_images,
+ req)
+
+ def test_delete_cached_image_disabled(self):
+ req = webob.Request.blank('')
+ req.context = 'test'
+ self.assertRaises(webob.exc.HTTPNotFound,
+ self.controller.delete_cached_image, req,
+ image_id='test')
+
+ def test_delete_cached_image_forbidden(self):
+ self.config(image_cache_dir='fake_cache_directory')
+ self.controller.policy.rules = {"manage_image_cache": False}
+ req = unit_test_utils.get_fake_request()
+ with mock.patch.object(notifier.ImageRepoProxy,
+ 'get') as mock_get:
+ mock_get.return_value = FakeImage()
+ self.assertRaises(webob.exc.HTTPForbidden,
+ self.controller.delete_cached_image,
+ req, image_id=UUID4)
+
+ def test_delete_cached_images_disabled(self):
+ req = webob.Request.blank('')
+ req.context = 'test'
+ self.assertRaises(webob.exc.HTTPNotFound,
+ self.controller.delete_cached_images, req)
+
+ def test_delete_cached_images_forbidden(self):
+ self.config(image_cache_dir='fake_cache_directory')
+ self.controller.policy.rules = {"manage_image_cache": False}
+ req = unit_test_utils.get_fake_request()
+ with mock.patch.object(notifier.ImageRepoProxy,
+ 'get') as mock_get:
+ mock_get.return_value = FakeImage()
+ self.assertRaises(webob.exc.HTTPForbidden,
+ self.controller.delete_cached_images,
+ req)
+
+ def test_get_queued_images_disabled(self):
+ req = webob.Request.blank('')
+ req.context = 'test'
+ self.assertRaises(webob.exc.HTTPNotFound,
+ self.controller.get_queued_images, req)
+
+ def test_get_queued_images_forbidden(self):
+ self.config(image_cache_dir='fake_cache_directory')
+ self.controller.policy.rules = {"manage_image_cache": False}
+ req = unit_test_utils.get_fake_request()
+ with mock.patch.object(notifier.ImageRepoProxy,
+ 'get') as mock_get:
+ mock_get.return_value = FakeImage()
+ self.assertRaises(webob.exc.HTTPForbidden,
+ self.controller.get_queued_images,
+ req)
+
+ def test_queue_image_disabled(self):
+ req = webob.Request.blank('')
+ req.context = 'test'
+ self.assertRaises(webob.exc.HTTPNotFound,
+ self.controller.queue_image,
+ req, image_id='test1')
+
+ def test_queue_image_forbidden(self):
+ self.config(image_cache_dir='fake_cache_directory')
+ self.controller.policy.rules = {"manage_image_cache": False}
+ req = unit_test_utils.get_fake_request()
+ with mock.patch.object(notifier.ImageRepoProxy,
+ 'get') as mock_get:
+ mock_get.return_value = FakeImage()
+ self.assertRaises(webob.exc.HTTPForbidden,
+ self.controller.queue_image,
+ req, image_id=UUID4)
+
+ def test_delete_queued_image_disabled(self):
+ req = webob.Request.blank('')
+ req.context = 'test'
+ self.assertRaises(webob.exc.HTTPNotFound,
+ self.controller.delete_queued_image,
+ req, image_id='test1')
+
+ def test_delete_queued_image_forbidden(self):
+ self.config(image_cache_dir='fake_cache_directory')
+ self.controller.policy.rules = {"manage_image_cache": False}
+ req = unit_test_utils.get_fake_request()
+ with mock.patch.object(notifier.ImageRepoProxy,
+ 'get') as mock_get:
+ mock_get.return_value = FakeImage()
+ self.assertRaises(webob.exc.HTTPForbidden,
+ self.controller.delete_queued_image,
+ req, image_id=UUID4)
+
+ def test_delete_queued_images_disabled(self):
+ req = webob.Request.blank('')
+ req.context = 'test'
+ self.assertRaises(webob.exc.HTTPNotFound,
+ self.controller.delete_queued_images, req)
+
+ def test_delete_queued_images_forbidden(self):
+ self.config(image_cache_dir='fake_cache_directory')
+ self.controller.policy.rules = {"manage_image_cache": False}
+ req = unit_test_utils.get_fake_request()
+ with mock.patch.object(notifier.ImageRepoProxy,
+ 'get') as mock_get:
+ mock_get.return_value = FakeImage()
+ self.assertRaises(webob.exc.HTTPForbidden,
+ self.controller.delete_queued_images,
+ req)
+
+ def test_delete_cache_entry_forbidden(self):
+ self.config(image_cache_dir='fake_cache_directory')
+ self.controller.policy.rules = {"cache_delete": False}
+ req = unit_test_utils.get_fake_request()
+ with mock.patch.object(notifier.ImageRepoProxy,
+ 'get') as mock_get:
+ mock_get.return_value = FakeImage()
+ self.assertRaises(webob.exc.HTTPForbidden,
+ self.controller.delete_cache_entry,
+ req, image_id=UUID4)
+
+ def test_delete_cache_entry_disabled(self):
+ req = unit_test_utils.get_fake_request()
+ with mock.patch.object(notifier.ImageRepoProxy,
+ 'get') as mock_get:
+ mock_get.return_value = FakeImage()
+ self.assertRaises(webob.exc.HTTPNotFound,
+ self.controller.delete_cache_entry,
+ req, image_id=UUID4)
+
+ def test_delete_non_existing_cache_entries(self):
+ self.config(image_cache_dir='fake_cache_directory')
+ req = unit_test_utils.get_fake_request()
+ self.assertRaises(webob.exc.HTTPNotFound,
+ self.controller.delete_cache_entry,
+ req, image_id='non-existing-queued-image')
+
+ def test_clear_cache_forbidden(self):
+ self.config(image_cache_dir='fake_cache_directory')
+ self.controller.policy.rules = {"cache_delete": False}
+ req = unit_test_utils.get_fake_request()
+ self.assertRaises(webob.exc.HTTPForbidden,
+ self.controller.clear_cache,
+ req)
+
+ def test_clear_cache_disabled(self):
+ req = webob.Request.blank('')
+ req.context = 'test'
+ self.assertRaises(webob.exc.HTTPNotFound,
+ self.controller.clear_cache, req)
+
+ def test_cache_clear_invalid_target(self):
+ self.config(image_cache_dir='fake_cache_directory')
+ req = unit_test_utils.get_fake_request()
+ req.headers.update({'x-image-cache-clear-target': 'invalid'})
+ self.assertRaises(webob.exc.HTTPBadRequest,
+ self.controller.clear_cache,
+ req)
+
+ def test_get_cache_state_disabled(self):
+ req = webob.Request.blank('')
+ req.context = 'test'
+ self.assertRaises(webob.exc.HTTPNotFound,
+ self.controller.get_cache_state, req)
+
+ def test_get_cache_state_forbidden(self):
+ self.config(image_cache_dir='fake_cache_directory')
+ self.controller.policy.rules = {"cache_list": False}
+ req = unit_test_utils.get_fake_request()
+ with mock.patch.object(notifier.ImageRepoProxy,
+ 'get') as mock_get:
+ mock_get.return_value = FakeImage()
+ self.assertRaises(webob.exc.HTTPForbidden,
+ self.controller.get_cache_state,
+ req)
+
+ def test_queue_image_from_api_disabled(self):
+ req = webob.Request.blank('')
+ req.context = 'test'
+ self.assertRaises(webob.exc.HTTPNotFound,
+ self.controller.queue_image_from_api,
+ req, image_id='test1')
+
+ def test_queue_image_from_api_forbidden(self):
+ self.config(image_cache_dir='fake_cache_directory')
+ self.controller.policy.rules = {"cache_image": False}
+ req = unit_test_utils.get_fake_request()
+ with mock.patch.object(notifier.ImageRepoProxy,
+ 'get') as mock_get:
+ mock_get.return_value = FakeImage()
+ self.assertRaises(webob.exc.HTTPForbidden,
+ self.controller.queue_image_from_api,
+ req, image_id=UUID4)
+
+ def test_non_active_image_for_queue_api(self):
+ self.config(image_cache_dir='fake_cache_directory')
+ req = unit_test_utils.get_fake_request()
+ for status in ('saving', 'queued', 'pending_delete',
+ 'deactivated', 'importing', 'uploading'):
+ with mock.patch.object(notifier.ImageRepoProxy,
+ 'get') as mock_get:
+ mock_get.return_value = FakeImage(status=status)
+ self.assertRaises(webob.exc.HTTPBadRequest,
+ self.controller.queue_image_from_api,
+ req, image_id=UUID4)
+
+ def test_queue_api_non_existing_image_(self):
+ self.config(image_cache_dir='fake_cache_directory')
+ req = unit_test_utils.get_fake_request()
+ self.assertRaises(webob.exc.HTTPNotFound,
+ self.controller.queue_image_from_api,
+ req, image_id='non-existing-image-id')
diff --git a/glance/tests/unit/test_versions.py b/glance/tests/unit/test_versions.py
index e296bc7b6..4acda1fa8 100644
--- a/glance/tests/unit/test_versions.py
+++ b/glance/tests/unit/test_versions.py
@@ -28,9 +28,22 @@ from glance.tests.unit import base
# make this public so it doesn't need to be repeated for the
# functional tests
-def get_versions_list(url, enabled_backends=False):
+def get_versions_list(url, enabled_backends=False,
+ enabled_cache=False):
image_versions = [
{
+ 'id': 'v2.15',
+ 'status': 'CURRENT',
+ 'links': [{'rel': 'self',
+ 'href': '%s/v2/' % url}],
+ },
+ {
+ 'id': 'v2.9',
+ 'status': 'SUPPORTED',
+ 'links': [{'rel': 'self',
+ 'href': '%s/v2/' % url}],
+ },
+ {
'id': 'v2.7',
'status': 'SUPPORTED',
'links': [{'rel': 'self',
@@ -82,12 +95,24 @@ def get_versions_list(url, enabled_backends=False):
if enabled_backends:
image_versions = [
{
- 'id': 'v2.12',
+ 'id': 'v2.15',
'status': 'CURRENT',
'links': [{'rel': 'self',
'href': '%s/v2/' % url}],
},
{
+ 'id': 'v2.13',
+ 'status': 'SUPPORTED',
+ 'links': [{'rel': 'self',
+ 'href': '%s/v2/' % url}],
+ },
+ {
+ 'id': 'v2.12',
+ 'status': 'SUPPORTED',
+ 'links': [{'rel': 'self',
+ 'href': '%s/v2/' % url}],
+ },
+ {
'id': 'v2.11',
'status': 'SUPPORTED',
'links': [{'rel': 'self',
@@ -111,11 +136,12 @@ def get_versions_list(url, enabled_backends=False):
'links': [{'rel': 'self',
'href': '%s/v2/' % url}],
}
- ] + image_versions
- else:
- image_versions.insert(0, {
- 'id': 'v2.9',
- 'status': 'CURRENT',
+ ] + image_versions[2:]
+
+ if enabled_cache:
+ image_versions.insert(1, {
+ 'id': 'v2.14',
+ 'status': 'SUPPORTED',
'links': [{'rel': 'self',
'href': '%s/v2/' % url}],
})
@@ -145,6 +171,14 @@ class VersionsTest(base.IsolatedUnitTest):
enabled_backends=True)
self.assertEqual(expected, results)
+ self.config(image_cache_dir='/tmp/cache')
+ res = versions.Controller().index(req)
+ results = jsonutils.loads(res.body)['versions']
+ expected = get_versions_list('http://127.0.0.1:9292',
+ enabled_backends=True,
+ enabled_cache=True)
+ self.assertEqual(expected, results)
+
def test_get_version_list_public_endpoint(self):
req = webob.Request.blank('/', base_url='http://127.0.0.1:9292/')
req.accept = 'application/json'
@@ -164,6 +198,14 @@ class VersionsTest(base.IsolatedUnitTest):
enabled_backends=True)
self.assertEqual(expected, results)
+ self.config(image_cache_dir='/tmp/cache')
+ res = versions.Controller().index(req)
+ results = jsonutils.loads(res.body)['versions']
+ expected = get_versions_list('https://example.com:9292',
+ enabled_backends=True,
+ enabled_cache=True)
+ self.assertEqual(expected, results)
+
def test_get_version_list_secure_proxy_ssl_header(self):
self.config(secure_proxy_ssl_header='HTTP_X_FORWARDED_PROTO')
url = 'http://localhost:9292'
@@ -182,6 +224,14 @@ class VersionsTest(base.IsolatedUnitTest):
expected = get_versions_list(url, enabled_backends=True)
self.assertEqual(expected, results)
+ self.config(image_cache_dir='/tmp/cache')
+ res = versions.Controller().index(req)
+ results = jsonutils.loads(res.body)['versions']
+ expected = get_versions_list(url,
+ enabled_backends=True,
+ enabled_cache=True)
+ self.assertEqual(expected, results)
+
def test_get_version_list_secure_proxy_ssl_header_https(self):
self.config(secure_proxy_ssl_header='HTTP_X_FORWARDED_PROTO')
url = 'http://localhost:9292'
@@ -202,6 +252,14 @@ class VersionsTest(base.IsolatedUnitTest):
expected = get_versions_list(ssl_url, enabled_backends=True)
self.assertEqual(expected, results)
+ self.config(image_cache_dir='/tmp/cache')
+ res = versions.Controller().index(req)
+ results = jsonutils.loads(res.body)['versions']
+ expected = get_versions_list(ssl_url,
+ enabled_backends=True,
+ enabled_cache=True)
+ self.assertEqual(expected, results)
+
def test_get_version_list_for_external_app(self):
url = 'http://customhost:9292/app/api'
req = webob.Request.blank('/', base_url=url)
@@ -219,6 +277,13 @@ class VersionsTest(base.IsolatedUnitTest):
expected = get_versions_list(url, enabled_backends=True)
self.assertEqual(expected, results)
+ self.config(image_cache_dir='/tmp/cache')
+ res = versions.Controller().index(req)
+ results = jsonutils.loads(res.body)['versions']
+ expected = get_versions_list(url,
+ enabled_backends=True,
+ enabled_cache=True)
+
class VersionNegotiationTest(base.IsolatedUnitTest):
@@ -322,15 +387,31 @@ class VersionNegotiationTest(base.IsolatedUnitTest):
self.middleware.process_request(request)
self.assertEqual('/v2/images', request.path_info)
- # version 2.13 does not exist
- def test_request_url_v2_13_default_unsupported(self):
+ def test_request_url_v2_13_enabled_supported(self):
request = webob.Request.blank('/v2.13/images')
+ self.middleware.process_request(request)
+ self.assertEqual('/v2/images', request.path_info)
+
+ def test_request_url_v2_14_enabled_supported(self):
+ self.config(image_cache_dir='/tmp/cache')
+ request = webob.Request.blank('/v2.14/images')
+ self.middleware.process_request(request)
+ self.assertEqual('/v2/images', request.path_info)
+
+ def test_request_url_v2_15_enabled_supported(self):
+ request = webob.Request.blank('/v2.15/images')
+ self.middleware.process_request(request)
+ self.assertEqual('/v2/images', request.path_info)
+
+ # version 2.16 does not exist
+ def test_request_url_v2_16_default_unsupported(self):
+ request = webob.Request.blank('/v2.16/images')
resp = self.middleware.process_request(request)
self.assertIsInstance(resp, versions.Controller)
- def test_request_url_v2_13_enabled_unsupported(self):
+ def test_request_url_v2_16_enabled_unsupported(self):
self.config(enabled_backends='slow:one,fast:two')
- request = webob.Request.blank('/v2.13/images')
+ request = webob.Request.blank('/v2.16/images')
resp = self.middleware.process_request(request)
self.assertIsInstance(resp, versions.Controller)
diff --git a/glance/tests/unit/v2/test_cache_management_api.py b/glance/tests/unit/v2/test_cache_management_api.py
new file mode 100644
index 000000000..ea45f6848
--- /dev/null
+++ b/glance/tests/unit/v2/test_cache_management_api.py
@@ -0,0 +1,123 @@
+# Copyright 2021 Red Hat Inc.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+from unittest import mock
+
+from glance.api.v2 import cached_images
+from glance import notifier
+import glance.tests.unit.utils as unit_test_utils
+import glance.tests.utils as test_utils
+
+
+UUID1 = 'c80a1a6c-bd1f-41c5-90ee-81afedb1d58d'
+
+
+class FakeImage(object):
+ def __init__(self, id=None, status='active', container_format='ami',
+ disk_format='ami', locations=None):
+ self.id = id or UUID1
+ self.status = status
+ self.container_format = container_format
+ self.disk_format = disk_format
+ self.locations = locations
+ self.owner = unit_test_utils.TENANT1
+ self.created_at = ''
+ self.updated_at = ''
+ self.min_disk = ''
+ self.min_ram = ''
+ self.protected = False
+ self.checksum = ''
+ self.os_hash_algo = ''
+ self.os_hash_value = ''
+ self.size = 0
+ self.virtual_size = 0
+ self.visibility = 'public'
+ self.os_hidden = False
+ self.name = 'foo'
+ self.tags = []
+ self.extra_properties = {}
+ self.member = self.owner
+
+ # NOTE(danms): This fixture looks more like the db object than
+ # the proxy model. This needs fixing all through the tests
+ # below.
+ self.image_id = self.id
+
+
+class TestCacheManageAPI(test_utils.BaseTestCase):
+
+ def setUp(self):
+ super(TestCacheManageAPI, self).setUp()
+ self.req = unit_test_utils.get_fake_request()
+
+ def _main_test_helper(self, argv, status='active', image_mock=True):
+ with mock.patch.object(notifier.ImageRepoProxy,
+ 'get') as mock_get:
+ image = FakeImage(status=status)
+ mock_get.return_value = image
+ with mock.patch.object(cached_images.CacheController,
+ '_enforce') as e:
+ with mock.patch('glance.image_cache.ImageCache') as ic:
+ cc = cached_images.CacheController()
+ cc.cache = ic
+ c_calls = []
+ c_calls += argv[0].split(',')
+ for call in c_calls:
+ mock.patch.object(ic, call)
+ test_call = getattr(cc, argv[1])
+ new_policy = argv[2]
+ args = []
+ if len(argv) == 4:
+ args = argv[3:]
+ test_call(self.req, *args)
+ if image_mock:
+ e.assert_called_once_with(self.req, image=image,
+ new_policy=new_policy)
+ else:
+ e.assert_called_once_with(self.req,
+ new_policy=new_policy)
+ mcs = []
+ for method in ic.method_calls:
+ mcs.append(str(method))
+ for call in c_calls:
+ if args == []:
+ args.append("")
+ elif args[0] and not args[0].endswith("'"):
+ args[0] = "'" + args[0] + "'"
+ self.assertIn("call." + call + "(" + args[0] + ")",
+ mcs)
+ self.assertEqual(len(c_calls), len(mcs))
+
+ def test_delete_cache_entry(self):
+ self._main_test_helper(['delete_cached_image,delete_queued_image',
+ 'delete_cache_entry',
+ 'cache_delete',
+ UUID1])
+
+ def test_clear_cache(self):
+ self._main_test_helper(
+ ['delete_all_cached_images,delete_all_queued_images',
+ 'clear_cache',
+ 'cache_delete'], image_mock=False)
+
+ def test_get_cache_state(self):
+ self._main_test_helper(['get_cached_images,get_queued_images',
+ 'get_cache_state',
+ 'cache_list'], image_mock=False)
+
+ def test_queue_image_from_api(self):
+ self._main_test_helper(['queue_image',
+ 'queue_image_from_api',
+ 'cache_image',
+ UUID1])
diff --git a/glance/tests/unit/v2/test_discovery_stores.py b/glance/tests/unit/v2/test_discovery_stores.py
index 0f1fad05d..111064767 100644
--- a/glance/tests/unit/v2/test_discovery_stores.py
+++ b/glance/tests/unit/v2/test_discovery_stores.py
@@ -39,7 +39,8 @@ class TestInfoControllers(base.MultiStoreClearingUnitTest):
req)
def test_get_stores(self):
- available_stores = ['cheap', 'fast', 'readonly_store', 'fast-cinder']
+ available_stores = ['cheap', 'fast', 'readonly_store', 'fast-cinder',
+ 'fast-rbd']
req = unit_test_utils.get_fake_request()
output = self.controller.get_stores(req)
self.assertIn('stores', output)
@@ -48,7 +49,8 @@ class TestInfoControllers(base.MultiStoreClearingUnitTest):
self.assertIn(stores['id'], available_stores)
def test_get_stores_read_only_store(self):
- available_stores = ['cheap', 'fast', 'readonly_store', 'fast-cinder']
+ available_stores = ['cheap', 'fast', 'readonly_store', 'fast-cinder',
+ 'fast-rbd']
req = unit_test_utils.get_fake_request()
output = self.controller.get_stores(req)
self.assertIn('stores', output)
@@ -72,3 +74,28 @@ class TestInfoControllers(base.MultiStoreClearingUnitTest):
self.assertEqual(2, len(output['stores']))
for stores in output["stores"]:
self.assertFalse(stores["id"].startswith("os_glance_"))
+
+ def test_get_stores_detail(self):
+ available_stores = ['cheap', 'fast', 'readonly_store', 'fast-cinder',
+ 'fast-rbd']
+ available_store_type = ['file', 'file', 'http', 'cinder', 'rbd']
+ req = unit_test_utils.get_fake_request(roles=['admin'])
+ output = self.controller.get_stores_detail(req)
+ self.assertIn('stores', output)
+ for stores in output['stores']:
+ self.assertIn('id', stores)
+ self.assertIn(stores['id'], available_stores)
+ self.assertIn(stores['type'], available_store_type)
+ self.assertIsNotNone(stores['properties'])
+ if stores['id'] == 'fast-rbd':
+ self.assertIn('chunk_size', stores['properties'])
+ self.assertIn('pool', stores['properties'])
+ self.assertIn('thin_provisioning', stores['properties'])
+ else:
+ self.assertEqual({}, stores['properties'])
+
+ def test_get_stores_detail_non_admin(self):
+ req = unit_test_utils.get_fake_request()
+ self.assertRaises(webob.exc.HTTPForbidden,
+ self.controller.get_stores_detail,
+ req)
diff --git a/glance/tests/unit/v2/test_v2_policy.py b/glance/tests/unit/v2/test_v2_policy.py
index 9bb296a55..ae04c7de8 100644
--- a/glance/tests/unit/v2/test_v2_policy.py
+++ b/glance/tests/unit/v2/test_v2_policy.py
@@ -780,16 +780,59 @@ class TestTasksAPIPolicy(APIPolicyBase):
mock.ANY)
-class TestCacheImageAPIPolicy(APIPolicyBase):
+class TestCacheImageAPIPolicy(utils.BaseTestCase):
def setUp(self):
super(TestCacheImageAPIPolicy, self).setUp()
self.enforcer = mock.MagicMock()
self.context = mock.MagicMock()
- self.policy = policy.CacheImageAPIPolicy(
- self.context, enforcer=self.enforcer)
def test_manage_image_cache(self):
+ self.policy = policy.CacheImageAPIPolicy(
+ self.context, enforcer=self.enforcer,
+ policy_str='manage_image_cache')
self.policy.manage_image_cache()
self.enforcer.enforce.assert_called_once_with(self.context,
'manage_image_cache',
mock.ANY)
+
+ def test_manage_image_cache_with_cache_delete(self):
+ self.policy = policy.CacheImageAPIPolicy(
+ self.context, enforcer=self.enforcer,
+ policy_str='cache_delete')
+ self.policy.manage_image_cache()
+ self.enforcer.enforce.assert_called_once_with(self.context,
+ 'cache_delete',
+ mock.ANY)
+
+ def test_manage_image_cache_with_cache_list(self):
+ self.policy = policy.CacheImageAPIPolicy(
+ self.context, enforcer=self.enforcer,
+ policy_str='cache_list')
+ self.policy.manage_image_cache()
+ self.enforcer.enforce.assert_called_once_with(self.context,
+ 'cache_list',
+ mock.ANY)
+
+ def test_manage_image_cache_with_cache_image(self):
+ self.policy = policy.CacheImageAPIPolicy(
+ self.context, enforcer=self.enforcer,
+ policy_str='cache_image')
+ self.policy.manage_image_cache()
+ self.enforcer.enforce.assert_called_once_with(self.context,
+ 'cache_image',
+ mock.ANY)
+
+
+class TestDiscoveryAPIPolicy(APIPolicyBase):
+ def setUp(self):
+ super(TestDiscoveryAPIPolicy, self).setUp()
+ self.enforcer = mock.MagicMock()
+ self.context = mock.MagicMock()
+ self.policy = policy.DiscoveryAPIPolicy(
+ self.context, enforcer=self.enforcer)
+
+ def test_stores_info_detail(self):
+ self.policy.stores_info_detail()
+ self.enforcer.enforce.assert_called_once_with(self.context,
+ 'stores_info_detail',
+ mock.ANY)
diff --git a/playbooks/enable-fips.yaml b/playbooks/enable-fips.yaml
new file mode 100644
index 000000000..bc1dc04ea
--- /dev/null
+++ b/playbooks/enable-fips.yaml
@@ -0,0 +1,3 @@
+- hosts: all
+ roles:
+ - enable-fips
diff --git a/releasenotes/notes/added-quota-usage-api-f1914054132f2021.yaml b/releasenotes/notes/added-quota-usage-api-f1914054132f2021.yaml
new file mode 100644
index 000000000..35da85ee8
--- /dev/null
+++ b/releasenotes/notes/added-quota-usage-api-f1914054132f2021.yaml
@@ -0,0 +1,9 @@
+---
+features:
+ - |
+ This release brings additional functionality to the unified quota
+ work done in the previous release. A usage API is now available,
+ which provides a way for users to see their current quota limits
+ and their active resource usage towards them. For more
+ information, see the discovery section in the `api-ref
+ <https://developer.openstack.org/api-ref/image/v2/index.html#image-service-info-discovery>`_.
diff --git a/releasenotes/notes/added-store-detail-api-215810aa85dfbb99.yaml b/releasenotes/notes/added-store-detail-api-215810aa85dfbb99.yaml
new file mode 100644
index 000000000..ef67e48cc
--- /dev/null
+++ b/releasenotes/notes/added-store-detail-api-215810aa85dfbb99.yaml
@@ -0,0 +1,6 @@
+---
+features:
+ - |
+ This release brings additional functionality to the stores API.
+ The stores detail API helps in providing the store specific
+ information.
diff --git a/releasenotes/notes/cache-api-b806ccfb8c5d9bb6.yaml b/releasenotes/notes/cache-api-b806ccfb8c5d9bb6.yaml
new file mode 100644
index 000000000..d061fb9f1
--- /dev/null
+++ b/releasenotes/notes/cache-api-b806ccfb8c5d9bb6.yaml
@@ -0,0 +1,9 @@
+---
+features:
+ - |
+ This release introduces new APIs for cache related operations. This new
+ version of the cache API will help administrators to cache images on
+ dedicated glance nodes as well. For more information, see the
+ ``Cache Manage`` section in the `api-ref-guide
+ <https://developer.openstack.org/api-ref/image/v2/index.html#cache-manage>`_.
+
diff --git a/requirements.txt b/requirements.txt
index 42778574c..d14c82561 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -36,7 +36,7 @@ pyOpenSSL>=17.1.0 # Apache-2.0
oslo.db>=5.0.0 # Apache-2.0
oslo.i18n>=5.0.0 # Apache-2.0
-oslo.limit>=1.0.0 # Apache-2.0
+oslo.limit>=1.4.0 # Apache-2.0
oslo.log>=4.5.0 # Apache-2.0
oslo.messaging>=5.29.0,!=9.0.0 # Apache-2.0
oslo.middleware>=3.31.0 # Apache-2.0
diff --git a/setup.cfg b/setup.cfg
index 0925b8c44..32b88a939 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -14,6 +14,7 @@ classifier =
License :: OSI Approved :: Apache Software License
Operating System :: POSIX :: Linux
Programming Language :: Python
+ Programming Language :: Python :: 3 :: Only
Programming Language :: Python :: 3
Programming Language :: Python :: 3.6
Programming Language :: Python :: 3.7
diff --git a/tools/test-setup.sh b/tools/test-setup.sh
index 505a58cb7..2d8ed73c1 100755
--- a/tools/test-setup.sh
+++ b/tools/test-setup.sh
@@ -15,6 +15,34 @@ DB_ROOT_PW=${MYSQL_ROOT_PW:-insecure_slave}
DB_USER=openstack_citest
DB_PW=openstack_citest
+function set_conf_line {
+ # parameters: file regex value
+ # check if the regex occurs in the file
+ # If so, replace with the value.
+ # If not, append the value to the end of the file.
+ sudo sh -c "grep -q -e '$2' $1 && \
+ sed -i 's|$2|$3|g' $1 || \
+ echo '$3' >> $1"
+}
+
+if $(egrep -q "^.*(centos:centos:|cloudlinux:cloudlinux:|redhat:enterprise_linux:)[78].*$" /etc/*release); then
+ # mysql needs to be started on centos/rhel
+ sudo systemctl restart mariadb.service
+
+ # postgres setup for centos
+ # make sure to use scram-sha-256 instead of md5 for fips!
+ sudo postgresql-setup --initdb
+ PG_CONF=/var/lib/pgsql/data/postgresql.conf
+ set_conf_line $PG_CONF '^password_encryption =.*' 'password_encryption = scram-sha-256'
+
+ PG_HBA=/var/lib/pgsql/data/pg_hba.conf
+ set_conf_line $PG_HBA '^local[ \t]*all[ \t]*all.*' 'local all all peer'
+ set_conf_line $PG_HBA '^host[ \t]*all[ \t]*all[ \t]*127.0.0.1\/32.*' 'host all all 127.0.0.1/32 scram-sha-256'
+ set_conf_line $PG_HBA '^host[ \t]*all[ \t]*all[ \t]*::1\/128.*' 'host all all ::1/128 scram-sha-256'
+
+ sudo systemctl restart postgresql.service
+fi
+
sudo -H mysqladmin -u root password $DB_ROOT_PW
# It's best practice to remove anonymous users from the database. If