Migrate object_store to resource2/proxy2

This is the last thing we need to migrate before deleting the old
resource/proxy class.

There's a good pile of things related to uploading and downloading we
should port over from the openstack.cloud code. One of them, streaming
the object data, was added here. We can come back around and get all of
the large-object upload code supported.

Adds alias support into _BaseComponent much like prop used to have
because Swift needs it.

Port some of the unittests to requests-mock because it was just easier
to do that. Doing so uncovered some issues with case-sensitivity and
headers. As a result, there are some changes to resource2.Resource to
get case sensitivity sorted out. TODO comments have been left indicating
a few places for further cleanup, but those are internal and
non-essential.

The default value of has_body is changed to False for head calls.
Because. Well. Let's be honest. It's HEAD. There is no body. By
definition.

Change-Id: I8c4f18f78a77149e23b98f78af82b1d25ab7c4cf
This commit is contained in:
Monty Taylor
2018-01-12 14:59:47 -06:00
parent 536f347a6c
commit 34bae5a192
15 changed files with 646 additions and 455 deletions

View File

@@ -29,7 +29,7 @@ from openstack import exceptions
from openstack import task_manager as _task_manager from openstack import task_manager as _task_manager
def _extract_name(url): def _extract_name(url, service_type=None):
'''Produce a key name to use in logging/metrics from the URL path. '''Produce a key name to use in logging/metrics from the URL path.
We want to be able to logic/metric sane general things, so we pull We want to be able to logic/metric sane general things, so we pull
@@ -81,7 +81,10 @@ def _extract_name(url):
# Getting the root of an endpoint is doing version discovery # Getting the root of an endpoint is doing version discovery
if not name_parts: if not name_parts:
name_parts = ['discovery'] if service_type == 'object-store':
name_parts = ['account']
else:
name_parts = ['discovery']
# Strip out anything that's empty or None # Strip out anything that's empty or None
return [part for part in name_parts if part] return [part for part in name_parts if part]
@@ -124,8 +127,14 @@ class OpenStackSDKAdapter(adapter.Adapter):
def request( def request(
self, url, method, run_async=False, error_message=None, self, url, method, run_async=False, error_message=None,
raise_exc=False, connect_retries=1, *args, **kwargs): raise_exc=False, connect_retries=1, *args, **kwargs):
name_parts = _extract_name(url) name_parts = _extract_name(url, self.service_type)
name = '.'.join([self.service_type, method] + name_parts) # TODO(mordred) This if is in service of unit tests that are making
# calls without a service_type. It should be fixable once we shift
# to requests-mock and stop mocking internals.
if self.service_type:
name = '.'.join([self.service_type, method] + name_parts)
else:
name = '.'.join([method] + name_parts)
request_method = functools.partial( request_method = functools.partial(
super(OpenStackSDKAdapter, self).request, url, method) super(OpenStackSDKAdapter, self).request, url, method)

View File

@@ -11,13 +11,17 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
from openstack import exceptions
from openstack.object_store import object_store_service from openstack.object_store import object_store_service
from openstack import resource from openstack import resource2 as resource
class BaseResource(resource.Resource): class BaseResource(resource.Resource):
service = object_store_service.ObjectStoreService() service = object_store_service.ObjectStoreService()
update_method = 'POST'
create_method = 'PUT'
#: Metadata stored for this resource. *Type: dict* #: Metadata stored for this resource. *Type: dict*
metadata = dict() metadata = dict()
@@ -25,7 +29,7 @@ class BaseResource(resource.Resource):
_system_metadata = dict() _system_metadata = dict()
def _calculate_headers(self, metadata): def _calculate_headers(self, metadata):
headers = dict() headers = {}
for key in metadata: for key in metadata:
if key in self._system_metadata.keys(): if key in self._system_metadata.keys():
header = self._system_metadata[key] header = self._system_metadata[key]
@@ -40,52 +44,34 @@ class BaseResource(resource.Resource):
return headers return headers
def set_metadata(self, session, metadata): def set_metadata(self, session, metadata):
url = self._get_url(self, self.id) request = self._prepare_request()
session.post(url, response = session.post(
headers=self._calculate_headers(metadata)) request.url,
headers=self._calculate_headers(metadata))
self._translate_response(response, has_body=False)
response = session.head(request.url)
self._translate_response(response, has_body=False)
return self
def delete_metadata(self, session, keys): def delete_metadata(self, session, keys):
url = self._get_url(self, self.id) request = self._prepare_request()
headers = {key: '' for key in keys} headers = {key: '' for key in keys}
session.post(url, response = session.post(
headers=self._calculate_headers(headers)) request.url,
headers=self._calculate_headers(headers))
exceptions.raise_from_response(
response, error_message="Error deleting metadata keys")
return self
def _set_metadata(self): def _set_metadata(self, headers):
self.metadata = dict() self.metadata = dict()
headers = self.get_headers()
for header in headers: for header in headers:
if header.startswith(self._custom_metadata_prefix): if header.startswith(self._custom_metadata_prefix):
key = header[len(self._custom_metadata_prefix):].lower() key = header[len(self._custom_metadata_prefix):].lower()
self.metadata[key] = headers[header] self.metadata[key] = headers[header]
def get(self, session, include_headers=False, args=None): def _translate_response(self, response, has_body=None, error_message=None):
super(BaseResource, self).get(session, include_headers, args) super(BaseResource, self)._translate_response(
self._set_metadata() response, has_body=has_body, error_message=error_message)
return self self._set_metadata(response.headers)
def head(self, session):
super(BaseResource, self).head(session)
self._set_metadata()
return self
@classmethod
def update_by_id(cls, session, resource_id, attrs, path_args=None):
"""Update a Resource with the given attributes.
:param session: The session to use for making this request.
:type session: :class:`~keystoneauth1.adapter.Adapter`
:param resource_id: This resource's identifier, if needed by
the request. The default is ``None``.
:param dict attrs: The attributes to be sent in the body
of the request.
:param dict path_args: This parameter is sent by the base
class but is ignored for this method.
:return: A ``dict`` representing the response headers.
"""
url = cls._get_url(None, resource_id)
headers = attrs.get(resource.HEADERS, dict())
headers['Accept'] = ''
return session.post(url,
headers=headers).headers

View File

@@ -13,11 +13,15 @@
from openstack.object_store.v1 import account as _account from openstack.object_store.v1 import account as _account
from openstack.object_store.v1 import container as _container from openstack.object_store.v1 import container as _container
from openstack.object_store.v1 import obj as _obj from openstack.object_store.v1 import obj as _obj
from openstack import proxy from openstack import proxy2 as proxy
class Proxy(proxy.BaseProxy): class Proxy(proxy.BaseProxy):
Account = _account.Account
Container = _container.Container
Object = _obj.Object
def get_account_metadata(self): def get_account_metadata(self):
"""Get metadata for this account. """Get metadata for this account.
@@ -54,11 +58,12 @@ class Proxy(proxy.BaseProxy):
:rtype: A generator of :rtype: A generator of
:class:`~openstack.object_store.v1.container.Container` objects. :class:`~openstack.object_store.v1.container.Container` objects.
""" """
return _container.Container.list(self, **query) return self._list(_container.Container, paginated=True, **query)
def create_container(self, **attrs): def create_container(self, name, **attrs):
"""Create a new container from attributes """Create a new container from attributes
:param container: Name of the container to create.
:param dict attrs: Keyword arguments which will be used to create :param dict attrs: Keyword arguments which will be used to create
a :class:`~openstack.object_store.v1.container.Container`, a :class:`~openstack.object_store.v1.container.Container`,
comprised of the properties on the Container class. comprised of the properties on the Container class.
@@ -66,7 +71,7 @@ class Proxy(proxy.BaseProxy):
:returns: The results of container creation :returns: The results of container creation
:rtype: :class:`~openstack.object_store.v1.container.Container` :rtype: :class:`~openstack.object_store.v1.container.Container`
""" """
return self._create(_container.Container, **attrs) return self._create(_container.Container, name=name, **attrs)
def delete_container(self, container, ignore_missing=True): def delete_container(self, container, ignore_missing=True):
"""Delete a container """Delete a container
@@ -122,6 +127,7 @@ class Proxy(proxy.BaseProxy):
""" """
res = self._get_resource(_container.Container, container) res = self._get_resource(_container.Container, container)
res.set_metadata(self, metadata) res.set_metadata(self, metadata)
return res
def delete_container_metadata(self, container, keys): def delete_container_metadata(self, container, keys):
"""Delete metadata for a container. """Delete metadata for a container.
@@ -133,6 +139,7 @@ class Proxy(proxy.BaseProxy):
""" """
res = self._get_resource(_container.Container, container) res = self._get_resource(_container.Container, container)
res.delete_metadata(self, keys) res.delete_metadata(self, keys)
return res
def objects(self, container, **query): def objects(self, container, **query):
"""Return a generator that yields the Container's objects. """Return a generator that yields the Container's objects.
@@ -147,21 +154,21 @@ class Proxy(proxy.BaseProxy):
:rtype: A generator of :rtype: A generator of
:class:`~openstack.object_store.v1.obj.Object` objects. :class:`~openstack.object_store.v1.obj.Object` objects.
""" """
container = _container.Container.from_id(container) container = self._get_container_name(container=container)
objs = _obj.Object.list(self, for obj in self._list(
path_args={"container": container.name}, _obj.Object, container=container,
**query) paginated=True, **query):
for obj in objs: obj.container = container
obj.container = container.name
yield obj yield obj
def _get_container_name(self, obj, container): def _get_container_name(self, obj=None, container=None):
if isinstance(obj, _obj.Object): if obj is not None:
obj = self._get_resource(_obj.Object, obj)
if obj.container is not None: if obj.container is not None:
return obj.container return obj.container
if container is not None: if container is not None:
container = _container.Container.from_id(container) container = self._get_resource(_container.Container, container)
return container.name return container.name
raise ValueError("container must be specified") raise ValueError("container must be specified")
@@ -181,52 +188,69 @@ class Proxy(proxy.BaseProxy):
:raises: :class:`~openstack.exceptions.ResourceNotFound` :raises: :class:`~openstack.exceptions.ResourceNotFound`
when no resource can be found. when no resource can be found.
""" """
# TODO(briancurtin): call this download_object and make sure it's container_name = self._get_container_name(
# just returning the raw data, like download_image does obj=obj, container=container)
container_name = self._get_container_name(obj, container) return self._get(_obj.Object, obj, container=container_name)
return self._get(_obj.Object, obj, def download_object(self, obj, container=None, **attrs):
path_args={"container": container_name}) """Download the data contained inside an object.
def download_object(self, obj, container=None, path=None):
"""Download the data contained inside an object to disk.
:param obj: The value can be the name of an object or a :param obj: The value can be the name of an object or a
:class:`~openstack.object_store.v1.obj.Object` instance. :class:`~openstack.object_store.v1.obj.Object` instance.
:param container: The value can be the name of a container or a :param container: The value can be the name of a container or a
:class:`~openstack.object_store.v1.container.Container` :class:`~openstack.object_store.v1.container.Container`
instance. instance.
:param path str: Location to write the object contents.
:raises: :class:`~openstack.exceptions.ResourceNotFound` :raises: :class:`~openstack.exceptions.ResourceNotFound`
when no resource can be found. when no resource can be found.
""" """
# TODO(briancurtin): download_object should really have the behavior container_name = self._get_container_name(
# of get_object, and this writing to a file should not exist. obj=obj, container=container)
# TODO(briancurtin): This method should probably offload the get obj = self._get_resource(
# operation into another thread or something of that nature. _obj.Object, obj, container=container_name, **attrs)
with open(path, "w") as out: return obj.download(self)
out.write(self.get_object(obj, container))
def upload_object(self, **attrs): def stream_object(self, obj, container=None, chunk_size=1024, **attrs):
"""Stream the data contained inside an object.
:param obj: The value can be the name of an object or a
:class:`~openstack.object_store.v1.obj.Object` instance.
:param container: The value can be the name of a container or a
:class:`~openstack.object_store.v1.container.Container`
instance.
:raises: :class:`~openstack.exceptions.ResourceNotFound`
when no resource can be found.
:returns: An iterator that iterates over chunk_size bytes
"""
container_name = self._get_container_name(
obj=obj, container=container)
container_name = self._get_container_name(container=container)
obj = self._get_resource(
_obj.Object, obj, container=container_name, **attrs)
return obj.stream(self, chunk_size=chunk_size)
def create_object(self, container, name, **attrs):
"""Upload a new object from attributes """Upload a new object from attributes
:param container: The value can be the name of a container or a
:class:`~openstack.object_store.v1.container.Container`
instance.
:param name: Name of the object to create.
:param dict attrs: Keyword arguments which will be used to create :param dict attrs: Keyword arguments which will be used to create
a :class:`~openstack.object_store.v1.obj.Object`, a :class:`~openstack.object_store.v1.obj.Object`,
comprised of the properties on the Object class. comprised of the properties on the Object class.
**Required**: A `container` argument must be specified,
which is either the ID of a container or a
:class:`~openstack.object_store.v1.container.Container`
instance.
:returns: The results of object creation :returns: The results of object creation
:rtype: :class:`~openstack.object_store.v1.container.Container` :rtype: :class:`~openstack.object_store.v1.container.Container`
""" """
container = attrs.pop("container", None) # TODO(mordred) Add ability to stream data from a file
container_name = self._get_container_name(None, container) # TODO(mordred) Use create_object from OpenStackCloud
container_name = self._get_container_name(container=container)
return self._create(_obj.Object, return self._create(
path_args={"container": container_name}, **attrs) _obj.Object, container=container_name, name=name, **attrs)
# Backwards compat
upload_object = create_object
def copy_object(self): def copy_object(self):
"""Copy an object.""" """Copy an object."""
@@ -252,7 +276,7 @@ class Proxy(proxy.BaseProxy):
container_name = self._get_container_name(obj, container) container_name = self._get_container_name(obj, container)
self._delete(_obj.Object, obj, ignore_missing=ignore_missing, self._delete(_obj.Object, obj, ignore_missing=ignore_missing,
path_args={"container": container_name}) container=container_name)
def get_object_metadata(self, obj, container=None): def get_object_metadata(self, obj, container=None):
"""Get metadata for an object. """Get metadata for an object.
@@ -269,8 +293,7 @@ class Proxy(proxy.BaseProxy):
""" """
container_name = self._get_container_name(obj, container) container_name = self._get_container_name(obj, container)
return self._head(_obj.Object, obj, return self._head(_obj.Object, obj, container=container_name)
path_args={"container": container_name})
def set_object_metadata(self, obj, container=None, **metadata): def set_object_metadata(self, obj, container=None, **metadata):
"""Set metadata for an object. """Set metadata for an object.
@@ -298,9 +321,9 @@ class Proxy(proxy.BaseProxy):
- `is_content_type_detected` - `is_content_type_detected`
""" """
container_name = self._get_container_name(obj, container) container_name = self._get_container_name(obj, container)
res = self._get_resource(_obj.Object, obj, res = self._get_resource(_obj.Object, obj, container=container_name)
path_args={"container": container_name})
res.set_metadata(self, metadata) res.set_metadata(self, metadata)
return res
def delete_object_metadata(self, obj, container=None, keys=None): def delete_object_metadata(self, obj, container=None, keys=None):
"""Delete metadata for an object. """Delete metadata for an object.
@@ -313,6 +336,6 @@ class Proxy(proxy.BaseProxy):
:param keys: The keys of metadata to be deleted. :param keys: The keys of metadata to be deleted.
""" """
container_name = self._get_container_name(obj, container) container_name = self._get_container_name(obj, container)
res = self._get_resource(_obj.Object, obj, res = self._get_resource(_obj.Object, obj, container=container_name)
path_args={"container": container_name})
res.delete_metadata(self, keys) res.delete_metadata(self, keys)
return res

View File

@@ -12,7 +12,7 @@
# under the License. # under the License.
from openstack.object_store.v1 import _base from openstack.object_store.v1 import _base
from openstack import resource from openstack import resource2 as resource
class Account(_base.BaseResource): class Account(_base.BaseResource):
@@ -20,23 +20,26 @@ class Account(_base.BaseResource):
base_path = "/" base_path = "/"
allow_retrieve = True allow_get = True
allow_update = True allow_update = True
allow_head = True allow_head = True
#: The total number of bytes that are stored in Object Storage for #: The total number of bytes that are stored in Object Storage for
#: the account. #: the account.
account_bytes_used = resource.header("x-account-bytes-used", type=int) account_bytes_used = resource.Header("x-account-bytes-used", type=int)
#: The number of containers. #: The number of containers.
account_container_count = resource.header("x-account-container-count", account_container_count = resource.Header("x-account-container-count",
type=int) type=int)
#: The number of objects in the account. #: The number of objects in the account.
account_object_count = resource.header("x-account-object-count", type=int) account_object_count = resource.Header("x-account-object-count", type=int)
#: The secret key value for temporary URLs. If not set, #: The secret key value for temporary URLs. If not set,
#: this header is not returned by this operation. #: this header is not returned by this operation.
meta_temp_url_key = resource.header("x-account-meta-temp-url-key") meta_temp_url_key = resource.Header("x-account-meta-temp-url-key")
#: A second secret key value for temporary URLs. If not set, #: A second secret key value for temporary URLs. If not set,
#: this header is not returned by this operation. #: this header is not returned by this operation.
meta_temp_url_key_2 = resource.header("x-account-meta-temp-url-key-2") meta_temp_url_key_2 = resource.Header("x-account-meta-temp-url-key-2")
#: The timestamp of the transaction. #: The timestamp of the transaction.
timestamp = resource.header("x-timestamp") timestamp = resource.Header("x-timestamp")
has_body = False
requires_id = False

View File

@@ -12,7 +12,7 @@
# under the License. # under the License.
from openstack.object_store.v1 import _base from openstack.object_store.v1 import _base
from openstack import resource from openstack import resource2 as resource
class Container(_base.BaseResource): class Container(_base.BaseResource):
@@ -28,10 +28,10 @@ class Container(_base.BaseResource):
} }
base_path = "/" base_path = "/"
id_attribute = "name" pagination_key = 'X-Account-Container-Count'
allow_create = True allow_create = True
allow_retrieve = True allow_get = True
allow_update = True allow_update = True
allow_delete = True allow_delete = True
allow_list = True allow_list = True
@@ -39,20 +39,22 @@ class Container(_base.BaseResource):
# Container body data (when id=None) # Container body data (when id=None)
#: The name of the container. #: The name of the container.
name = resource.prop("name") name = resource.Body("name", alternate_id=True, alias='id')
#: The number of objects in the container. #: The number of objects in the container.
count = resource.prop("count") count = resource.Body("count", type=int, alias='object_count')
#: The total number of bytes that are stored in Object Storage #: The total number of bytes that are stored in Object Storage
#: for the container. #: for the container.
bytes = resource.prop("bytes") bytes = resource.Body("bytes", type=int, alias='bytes_used')
# Container metadata (when id=name) # Container metadata (when id=name)
#: The number of objects. #: The number of objects.
object_count = resource.header("x-container-object-count", type=int) object_count = resource.Header(
"x-container-object-count", type=int, alias='count')
#: The count of bytes used in total. #: The count of bytes used in total.
bytes_used = resource.header("x-container-bytes-used", type=int) bytes_used = resource.Header(
"x-container-bytes-used", type=int, alias='bytes')
#: The timestamp of the transaction. #: The timestamp of the transaction.
timestamp = resource.header("x-timestamp") timestamp = resource.Header("x-timestamp")
# Request headers (when id=None) # Request headers (when id=None)
#: If set to True, Object Storage queries all replicas to return the #: If set to True, Object Storage queries all replicas to return the
@@ -60,66 +62,66 @@ class Container(_base.BaseResource):
#: faster after it finds one valid replica. Because setting this #: faster after it finds one valid replica. Because setting this
#: header to True is more expensive for the back end, use it only #: header to True is more expensive for the back end, use it only
#: when it is absolutely needed. *Type: bool* #: when it is absolutely needed. *Type: bool*
is_newest = resource.header("x-newest", type=bool) is_newest = resource.Header("x-newest", type=bool)
# Request headers (when id=name) # Request headers (when id=name)
#: The ACL that grants read access. If not set, this header is not #: The ACL that grants read access. If not set, this header is not
#: returned by this operation. #: returned by this operation.
read_ACL = resource.header("x-container-read") read_ACL = resource.Header("x-container-read")
#: The ACL that grants write access. If not set, this header is not #: The ACL that grants write access. If not set, this header is not
#: returned by this operation. #: returned by this operation.
write_ACL = resource.header("x-container-write") write_ACL = resource.Header("x-container-write")
#: The destination for container synchronization. If not set, #: The destination for container synchronization. If not set,
#: this header is not returned by this operation. #: this header is not returned by this operation.
sync_to = resource.header("x-container-sync-to") sync_to = resource.Header("x-container-sync-to")
#: The secret key for container synchronization. If not set, #: The secret key for container synchronization. If not set,
#: this header is not returned by this operation. #: this header is not returned by this operation.
sync_key = resource.header("x-container-sync-key") sync_key = resource.Header("x-container-sync-key")
#: Enables versioning on this container. The value is the name #: Enables versioning on this container. The value is the name
#: of another container. You must UTF-8-encode and then URL-encode #: of another container. You must UTF-8-encode and then URL-encode
#: the name before you include it in the header. To disable #: the name before you include it in the header. To disable
#: versioning, set the header to an empty string. #: versioning, set the header to an empty string.
versions_location = resource.header("x-versions-location") versions_location = resource.Header("x-versions-location")
#: The MIME type of the list of names. #: The MIME type of the list of names.
content_type = resource.header("content-type") content_type = resource.Header("content-type")
#: If set to true, Object Storage guesses the content type based #: If set to true, Object Storage guesses the content type based
#: on the file extension and ignores the value sent in the #: on the file extension and ignores the value sent in the
#: Content-Type header, if present. *Type: bool* #: Content-Type header, if present. *Type: bool*
is_content_type_detected = resource.header("x-detect-content-type", is_content_type_detected = resource.Header("x-detect-content-type",
type=bool) type=bool)
# TODO(mordred) Shouldn't if-none-match be handled more systemically?
#: In combination with Expect: 100-Continue, specify an #: In combination with Expect: 100-Continue, specify an
#: "If-None-Match: \*" header to query whether the server already #: "If-None-Match: \*" header to query whether the server already
#: has a copy of the object before any data is sent. #: has a copy of the object before any data is sent.
if_none_match = resource.header("if-none-match") if_none_match = resource.Header("if-none-match")
@classmethod @classmethod
def create_by_id(cls, session, attrs, resource_id=None): def new(cls, **kwargs):
"""Create a Resource from its attributes. # Container uses name as id. Proxy._get_resource calls
# Resource.new(id=name) but then we need to do container.name
# It's the same thing for Container - make it be the same.
name = kwargs.pop('id', None)
if name:
kwargs.setdefault('name', name)
return Container(_synchronized=True, **kwargs)
def create(self, session, prepend_key=True):
"""Create a remote resource based on this instance.
:param session: The session to use for making this request. :param session: The session to use for making this request.
:type session: :class:`~keystoneauth1.adapter.Adapter` :type session: :class:`~keystoneauth1.adapter.Adapter`
:param dict attrs: The attributes to be sent in the body :param prepend_key: A boolean indicating whether the resource_key
of the request. should be prepended in a resource creation
:param resource_id: This resource's identifier, if needed by request. Default to True.
the request. The default is ``None``.
:return: A ``dict`` representing the response headers. :return: This :class:`Resource` instance.
:raises: :exc:`~openstack.exceptions.MethodNotSupported` if
:data:`Resource.allow_create` is not set to ``True``.
""" """
url = cls._get_url(None, resource_id) request = self._prepare_request(
headers = attrs.get(resource.HEADERS, dict()) requires_id=True, prepend_key=prepend_key)
headers['Accept'] = '' response = session.put(
return session.put(url, request.url, json=request.body, headers=request.headers)
headers=headers).headers
def create(self, session): self._translate_response(response, has_body=False)
"""Create a Resource from this instance.
:param session: The session to use for making this request.
:type session: :class:`~keystoneauth1.adapter.Adapter`
:return: This instance.
"""
resp = self.create_by_id(session, self._attrs, self.id)
self.set_headers(resp)
self._reset_dirty()
return self return self

View File

@@ -13,9 +13,10 @@
import copy import copy
from openstack import exceptions
from openstack.object_store import object_store_service from openstack.object_store import object_store_service
from openstack.object_store.v1 import _base from openstack.object_store.v1 import _base
from openstack import resource from openstack import resource2 as resource
class Object(_base.BaseResource): class Object(_base.BaseResource):
@@ -30,28 +31,36 @@ class Object(_base.BaseResource):
} }
base_path = "/%(container)s" base_path = "/%(container)s"
pagination_key = 'X-Container-Object-Count'
service = object_store_service.ObjectStoreService() service = object_store_service.ObjectStoreService()
id_attribute = "name"
allow_create = True allow_create = True
allow_retrieve = True allow_get = True
allow_update = True allow_update = True
allow_delete = True allow_delete = True
allow_list = True allow_list = True
allow_head = True allow_head = True
# Data to be passed during a POST call to create an object on the server. # Data to be passed during a POST call to create an object on the server.
# TODO(mordred) Make a base class BaseDataResource that can be used here
# and with glance images that has standard overrides for dealing with
# binary data.
data = None data = None
# URL parameters # URL parameters
#: The unique name for the container. #: The unique name for the container.
container = resource.prop("container") container = resource.URI("container")
#: The unique name for the object. #: The unique name for the object.
name = resource.prop("name") name = resource.Body("name", alternate_id=True)
# Object details # Object details
hash = resource.prop("hash") # Make these private because they should only matter in the case where
bytes = resource.prop("bytes") # we have a Body with no headers (like if someone programmatically is
# creating an Object)
_hash = resource.Body("hash")
_bytes = resource.Body("bytes", type=int)
_last_modified = resource.Body("last_modified")
_content_type = resource.Body("content_type")
# Headers for HEAD and GET requests # Headers for HEAD and GET requests
#: If set to True, Object Storage queries all replicas to return #: If set to True, Object Storage queries all replicas to return
@@ -59,46 +68,49 @@ class Object(_base.BaseResource):
#: responds faster after it finds one valid replica. Because #: responds faster after it finds one valid replica. Because
#: setting this header to True is more expensive for the back end, #: setting this header to True is more expensive for the back end,
#: use it only when it is absolutely needed. *Type: bool* #: use it only when it is absolutely needed. *Type: bool*
is_newest = resource.header("x-newest", type=bool) is_newest = resource.Header("x-newest", type=bool)
#: TODO(briancurtin) there's a lot of content here... #: TODO(briancurtin) there's a lot of content here...
range = resource.header("range", type=dict) range = resource.Header("range", type=dict)
#: See http://www.ietf.org/rfc/rfc2616.txt. #: See http://www.ietf.org/rfc/rfc2616.txt.
if_match = resource.header("if-match", type=dict) # TODO(mordred) We need a string-or-list formatter. type=list with a string
# value results in a list containing the characters.
if_match = resource.Header("if-match", type=list)
#: In combination with Expect: 100-Continue, specify an #: In combination with Expect: 100-Continue, specify an
#: "If-None-Match: \*" header to query whether the server already #: "If-None-Match: \*" header to query whether the server already
#: has a copy of the object before any data is sent. #: has a copy of the object before any data is sent.
if_none_match = resource.header("if-none-match", type=dict) if_none_match = resource.Header("if-none-match", type=list)
#: See http://www.ietf.org/rfc/rfc2616.txt. #: See http://www.ietf.org/rfc/rfc2616.txt.
if_modified_since = resource.header("if-modified-since", type=dict) if_modified_since = resource.Header("if-modified-since", type=str)
#: See http://www.ietf.org/rfc/rfc2616.txt. #: See http://www.ietf.org/rfc/rfc2616.txt.
if_unmodified_since = resource.header("if-unmodified-since", type=dict) if_unmodified_since = resource.Header("if-unmodified-since", type=str)
# Query parameters # Query parameters
#: Used with temporary URLs to sign the request. For more #: Used with temporary URLs to sign the request. For more
#: information about temporary URLs, see OpenStack Object Storage #: information about temporary URLs, see OpenStack Object Storage
#: API v1 Reference. #: API v1 Reference.
signature = resource.header("signature") signature = resource.Header("signature")
#: Used with temporary URLs to specify the expiry time of the #: Used with temporary URLs to specify the expiry time of the
#: signature. For more information about temporary URLs, see #: signature. For more information about temporary URLs, see
#: OpenStack Object Storage API v1 Reference. #: OpenStack Object Storage API v1 Reference.
expires_at = resource.header("expires") expires_at = resource.Header("expires")
#: If you include the multipart-manifest=get query parameter and #: If you include the multipart-manifest=get query parameter and
#: the object is a large object, the object contents are not #: the object is a large object, the object contents are not
#: returned. Instead, the manifest is returned in the #: returned. Instead, the manifest is returned in the
#: X-Object-Manifest response header for dynamic large objects #: X-Object-Manifest response header for dynamic large objects
#: or in the response body for static large objects. #: or in the response body for static large objects.
multipart_manifest = resource.header("multipart-manifest") multipart_manifest = resource.Header("multipart-manifest")
# Response headers from HEAD and GET # Response headers from HEAD and GET
#: HEAD operations do not return content. However, in this #: HEAD operations do not return content. However, in this
#: operation the value in the Content-Length header is not the #: operation the value in the Content-Length header is not the
#: size of the response body. Instead it contains the size of #: size of the response body. Instead it contains the size of
#: the object, in bytes. #: the object, in bytes.
content_length = resource.header("content-length") content_length = resource.Header(
"content-length", type=int, alias='_bytes')
#: The MIME type of the object. #: The MIME type of the object.
content_type = resource.header("content-type") content_type = resource.Header("content-type", alias="_content_type")
#: The type of ranges that the object accepts. #: The type of ranges that the object accepts.
accept_ranges = resource.header("accept-ranges") accept_ranges = resource.Header("accept-ranges")
#: For objects smaller than 5 GB, this value is the MD5 checksum #: For objects smaller than 5 GB, this value is the MD5 checksum
#: of the object content. The value is not quoted. #: of the object content. The value is not quoted.
#: For manifest objects, this value is the MD5 checksum of the #: For manifest objects, this value is the MD5 checksum of the
@@ -110,46 +122,46 @@ class Object(_base.BaseResource):
#: the response body as it is received and compare this value #: the response body as it is received and compare this value
#: with the one in the ETag header. If they differ, the content #: with the one in the ETag header. If they differ, the content
#: was corrupted, so retry the operation. #: was corrupted, so retry the operation.
etag = resource.header("etag") etag = resource.Header("etag", alias='_hash')
#: Set to True if this object is a static large object manifest object. #: Set to True if this object is a static large object manifest object.
#: *Type: bool* #: *Type: bool*
is_static_large_object = resource.header("x-static-large-object", is_static_large_object = resource.Header("x-static-large-object",
type=bool) type=bool)
#: If set, the value of the Content-Encoding metadata. #: If set, the value of the Content-Encoding metadata.
#: If not set, this header is not returned by this operation. #: If not set, this header is not returned by this operation.
content_encoding = resource.header("content-encoding") content_encoding = resource.Header("content-encoding")
#: If set, specifies the override behavior for the browser. #: If set, specifies the override behavior for the browser.
#: For example, this header might specify that the browser use #: For example, this header might specify that the browser use
#: a download program to save this file rather than show the file, #: a download program to save this file rather than show the file,
#: which is the default. #: which is the default.
#: If not set, this header is not returned by this operation. #: If not set, this header is not returned by this operation.
content_disposition = resource.header("content-disposition") content_disposition = resource.Header("content-disposition")
#: Specifies the number of seconds after which the object is #: Specifies the number of seconds after which the object is
#: removed. Internally, the Object Storage system stores this #: removed. Internally, the Object Storage system stores this
#: value in the X-Delete-At metadata item. #: value in the X-Delete-At metadata item.
delete_after = resource.header("x-delete-after", type=int) delete_after = resource.Header("x-delete-after", type=int)
#: If set, the time when the object will be deleted by the system #: If set, the time when the object will be deleted by the system
#: in the format of a UNIX Epoch timestamp. #: in the format of a UNIX Epoch timestamp.
#: If not set, this header is not returned by this operation. #: If not set, this header is not returned by this operation.
delete_at = resource.header("x-delete-at") delete_at = resource.Header("x-delete-at")
#: If set, to this is a dynamic large object manifest object. #: If set, to this is a dynamic large object manifest object.
#: The value is the container and object name prefix of the #: The value is the container and object name prefix of the
#: segment objects in the form container/prefix. #: segment objects in the form container/prefix.
object_manifest = resource.header("x-object-manifest") object_manifest = resource.Header("x-object-manifest")
#: The timestamp of the transaction. #: The timestamp of the transaction.
timestamp = resource.header("x-timestamp") timestamp = resource.Header("x-timestamp")
#: The date and time that the object was created or the last #: The date and time that the object was created or the last
#: time that the metadata was changed. #: time that the metadata was changed.
last_modified_at = resource.header("last_modified", alias="last-modified") last_modified_at = resource.Header("last-modified", alias='_last_modified')
# Headers for PUT and POST requests # Headers for PUT and POST requests
#: Set to chunked to enable chunked transfer encoding. If used, #: Set to chunked to enable chunked transfer encoding. If used,
#: do not set the Content-Length header to a non-zero value. #: do not set the Content-Length header to a non-zero value.
transfer_encoding = resource.header("transfer-encoding") transfer_encoding = resource.Header("transfer-encoding")
#: If set to true, Object Storage guesses the content type based #: If set to true, Object Storage guesses the content type based
#: on the file extension and ignores the value sent in the #: on the file extension and ignores the value sent in the
#: Content-Type header, if present. *Type: bool* #: Content-Type header, if present. *Type: bool*
is_content_type_detected = resource.header("x-detect-content-type", is_content_type_detected = resource.Header("x-detect-content-type",
type=bool) type=bool)
#: If set, this is the name of an object used to create the new #: If set, this is the name of an object used to create the new
#: object by copying the X-Copy-From object. The value is in form #: object by copying the X-Copy-From object. The value is in form
@@ -158,7 +170,13 @@ class Object(_base.BaseResource):
#: in the header. #: in the header.
#: Using PUT with X-Copy-From has the same effect as using the #: Using PUT with X-Copy-From has the same effect as using the
#: COPY operation to copy an object. #: COPY operation to copy an object.
copy_from = resource.header("x-copy-from") copy_from = resource.Header("x-copy-from")
has_body = False
def __init__(self, data=None, **attrs):
super(_base.BaseResource, self).__init__(**attrs)
self.data = data
# The Object Store treats the metadata for its resources inconsistently so # The Object Store treats the metadata for its resources inconsistently so
# Object.set_metadata must override the BaseResource.set_metadata to # Object.set_metadata must override the BaseResource.set_metadata to
@@ -169,66 +187,111 @@ class Object(_base.BaseResource):
filtered_metadata = \ filtered_metadata = \
{key: value for key, value in metadata.items() if value} {key: value for key, value in metadata.items() if value}
# Update from remote if we only have locally created information
if not self.last_modified_at:
self.head(session)
# Get a copy of the original metadata so it doesn't get erased on POST # Get a copy of the original metadata so it doesn't get erased on POST
# and update it with the new metadata values. # and update it with the new metadata values.
obj = self.head(session) metadata = copy.deepcopy(self.metadata)
metadata2 = copy.deepcopy(obj.metadata) metadata.update(filtered_metadata)
metadata2.update(filtered_metadata)
# Include any original system metadata so it doesn't get erased on POST # Include any original system metadata so it doesn't get erased on POST
for key in self._system_metadata: for key in self._system_metadata:
value = getattr(obj, key) value = getattr(self, key)
if value and key not in metadata2: if value and key not in metadata:
metadata2[key] = value metadata[key] = value
super(Object, self).set_metadata(session, metadata2) request = self._prepare_request()
headers = self._calculate_headers(metadata)
response = session.post(request.url, headers=headers)
self._translate_response(response, has_body=False)
self.metadata.update(metadata)
return self
# The Object Store treats the metadata for its resources inconsistently so # The Object Store treats the metadata for its resources inconsistently so
# Object.delete_metadata must override the BaseResource.delete_metadata to # Object.delete_metadata must override the BaseResource.delete_metadata to
# account for it. # account for it.
def delete_metadata(self, session, keys): def delete_metadata(self, session, keys):
# Get a copy of the original metadata so it doesn't get erased on POST if not keys:
# and update it with the new metadata values. return
obj = self.head(session) # If we have an empty object, update it from the remote side so that
metadata = copy.deepcopy(obj.metadata) # we have a copy of the original metadata. Deleting metadata requires
# POSTing and overwriting all of the metadata. If we already have
# metadata locally, assume this is an existing object.
if not self.metadata:
self.head(session)
metadata = copy.deepcopy(self.metadata)
# Include any original system metadata so it doesn't get erased on POST # Include any original system metadata so it doesn't get erased on POST
for key in self._system_metadata: for key in self._system_metadata:
value = getattr(obj, key) value = getattr(self, key)
if value: if value:
metadata[key] = value metadata[key] = value
# Remove the metadata # Remove the requested metadata keys
# TODO(mordred) Why don't we just look at self._header_mapping()
# instead of having system_metadata?
deleted = False
attr_keys_to_delete = set()
for key in keys: for key in keys:
if key == 'delete_after': if key == 'delete_after':
del(metadata['delete_at']) del(metadata['delete_at'])
else: else:
del(metadata[key]) if key in metadata:
del(metadata[key])
# Delete the attribute from the local copy of the object.
# Metadata that doesn't have Component attributes is
# handled by self.metadata being reset when we run
# self.head
if hasattr(self, key):
attr_keys_to_delete.add(key)
deleted = True
url = self._get_url(self, self.id) # Nothing to delete, skip the POST
session.post(url, if not deleted:
headers=self._calculate_headers(metadata)) return self
def get(self, session, include_headers=False, args=None, request = self._prepare_request()
error_message=None): response = session.post(
url = self._get_url(self, self.id) request.url, headers=self._calculate_headers(metadata))
headers = {'Accept': 'bytes'} exceptions.raise_from_response(
resp = session.get(url, headers=headers, error_message=error_message) response, error_message="Error deleting metadata keys")
resp = resp.content
self._set_metadata() # Only delete from local object if the remote delete was successful
return resp for key in attr_keys_to_delete:
delattr(self, key)
# Just update ourselves from remote again.
return self.head(session)
def _download(self, session, error_message=None, stream=False):
request = self._prepare_request()
request.headers['Accept'] = 'bytes'
response = session.get(
request.url, headers=request.headers, stream=stream)
exceptions.raise_from_response(response, error_message=error_message)
return response
def download(self, session, error_message=None):
response = self._download(session, error_message=error_message)
return response.content
def stream(self, session, error_message=None, chunk_size=1024):
response = self._download(
session, error_message=error_message, stream=True)
return response.iter_content(chunk_size, decode_unicode=False)
def create(self, session): def create(self, session):
url = self._get_url(self, self.id) request = self._prepare_request()
request.headers['Accept'] = ''
headers = self.get_headers() response = session.put(
headers['Accept'] = '' request.url,
if self.data is not None: data=self.data,
resp = session.put(url, headers=request.headers)
data=self.data, self._translate_response(response, has_body=False)
headers=headers).headers
else:
resp = session.post(url, data=None,
headers=headers).headers
self.set_headers(resp)
return self return self

View File

@@ -34,6 +34,8 @@ and then returned to the caller.
import collections import collections
import itertools import itertools
from requests import structures
from openstack import exceptions from openstack import exceptions
from openstack import format from openstack import format
from openstack import utils from openstack import utils
@@ -44,7 +46,8 @@ class _BaseComponent(object):
# The name this component is being tracked as in the Resource # The name this component is being tracked as in the Resource
key = None key = None
def __init__(self, name, type=None, default=None, alternate_id=False): def __init__(self, name, type=None, default=None, alias=None,
alternate_id=False, **kwargs):
"""A typed descriptor for a component that makes up a Resource """A typed descriptor for a component that makes up a Resource
:param name: The name this component exists as on the server :param name: The name this component exists as on the server
@@ -53,6 +56,7 @@ class _BaseComponent(object):
will work. If you specify type=dict and then set a will work. If you specify type=dict and then set a
component to a string, __set__ will fail, for example. component to a string, __set__ will fail, for example.
:param default: Typically None, but any other default can be set. :param default: Typically None, but any other default can be set.
:param alias: If set, alternative attribute on object to return.
:param alternate_id: When `True`, this property is known :param alternate_id: When `True`, this property is known
internally as a value that can be sent internally as a value that can be sent
with requests that require an ID but with requests that require an ID but
@@ -63,6 +67,7 @@ class _BaseComponent(object):
self.name = name self.name = name
self.type = type self.type = type
self.default = default self.default = default
self.alias = alias
self.alternate_id = alternate_id self.alternate_id = alternate_id
def __get__(self, instance, owner): def __get__(self, instance, owner):
@@ -74,6 +79,8 @@ class _BaseComponent(object):
try: try:
value = attributes[self.name] value = attributes[self.name]
except KeyError: except KeyError:
if self.alias:
return getattr(instance, self.alias)
return self.default return self.default
# self.type() should not be called on None objects. # self.type() should not be called on None objects.
@@ -253,6 +260,11 @@ class Resource(object):
#: Method for creating a resource (POST, PUT) #: Method for creating a resource (POST, PUT)
create_method = "POST" create_method = "POST"
#: Do calls for this resource require an id
requires_id = True
#: Do responses for this resource have bodies
has_body = True
def __init__(self, _synchronized=False, **attrs): def __init__(self, _synchronized=False, **attrs):
"""The base resource """The base resource
@@ -331,12 +343,13 @@ class Resource(object):
attributes that exist on this class. attributes that exist on this class.
""" """
body = self._consume_attrs(self._body_mapping(), attrs) body = self._consume_attrs(self._body_mapping(), attrs)
header = self._consume_attrs(self._header_mapping(), attrs) header = self._consume_attrs(
self._header_mapping(), attrs, insensitive=True)
uri = self._consume_attrs(self._uri_mapping(), attrs) uri = self._consume_attrs(self._uri_mapping(), attrs)
return body, header, uri return body, header, uri
def _consume_attrs(self, mapping, attrs): def _consume_attrs(self, mapping, attrs, insensitive=False):
"""Given a mapping and attributes, return relevant matches """Given a mapping and attributes, return relevant matches
This method finds keys in attrs that exist in the mapping, then This method finds keys in attrs that exist in the mapping, then
@@ -347,16 +360,29 @@ class Resource(object):
same source dict several times. same source dict several times.
""" """
relevant_attrs = {} relevant_attrs = {}
if insensitive:
relevant_attrs = structures.CaseInsensitiveDict()
consumed_keys = [] consumed_keys = []
nonce = object()
# TODO(mordred) Invert the loop - loop over mapping, look in attrs
# and we should be able to simplify the logic, since CID should
# handle the case matching
for key in attrs: for key in attrs:
if key in mapping: value = mapping.get(key, nonce)
if value is not nonce:
# Convert client-side key names into server-side. # Convert client-side key names into server-side.
relevant_attrs[mapping[key]] = attrs[key] relevant_attrs[mapping[key]] = attrs[key]
consumed_keys.append(key) consumed_keys.append(key)
elif key in mapping.values(): else:
# Server-side names can be stored directly. # Server-side names can be stored directly.
relevant_attrs[key] = attrs[key] search_key = key
consumed_keys.append(key) values = mapping.values()
if insensitive:
search_key = search_key.lower()
values = [v.lower() for v in values]
if search_key in values:
relevant_attrs[key] = attrs[key]
consumed_keys.append(key)
for key in consumed_keys: for key in consumed_keys:
attrs.pop(key) attrs.pop(key)
@@ -366,6 +392,10 @@ class Resource(object):
@classmethod @classmethod
def _get_mapping(cls, component): def _get_mapping(cls, component):
"""Return a dict of attributes of a given component on the class""" """Return a dict of attributes of a given component on the class"""
# TODO(mordred) Invert this mapping, it should be server-side to local.
# The reason for that is that headers are case insensitive, whereas
# our local values are case sensitive. If we invert this dict, we can
# rely on CaseInsensitiveDict when doing comparisons.
mapping = {} mapping = {}
# Since we're looking at class definitions we need to include # Since we're looking at class definitions we need to include
# subclasses, so check the whole MRO. # subclasses, so check the whole MRO.
@@ -386,7 +416,8 @@ class Resource(object):
@classmethod @classmethod
def _header_mapping(cls): def _header_mapping(cls):
"""Return all Header members of this class""" """Return all Header members of this class"""
return cls._get_mapping(Header) # TODO(mordred) this isn't helpful until we invert the dict
return structures.CaseInsensitiveDict(cls._get_mapping(Header))
@classmethod @classmethod
def _uri_mapping(cls): def _uri_mapping(cls):
@@ -501,7 +532,7 @@ class Resource(object):
return mapping return mapping
def _prepare_request(self, requires_id=True, prepend_key=False): def _prepare_request(self, requires_id=None, prepend_key=False):
"""Prepare a request to be sent to the server """Prepare a request to be sent to the server
Create operations don't require an ID, but all others do, Create operations don't require an ID, but all others do,
@@ -515,11 +546,20 @@ class Resource(object):
as well a body and headers that are ready to send. as well a body and headers that are ready to send.
Only dirty body and header contents will be returned. Only dirty body and header contents will be returned.
""" """
if requires_id is None:
requires_id = self.requires_id
body = self._body.dirty body = self._body.dirty
if prepend_key and self.resource_key is not None: if prepend_key and self.resource_key is not None:
body = {self.resource_key: body} body = {self.resource_key: body}
headers = self._header.dirty # TODO(mordred) Ensure headers have string values better than this
headers = {}
for k, v in self._header.dirty.items():
if isinstance(v, list):
headers[k] = ", ".join(v)
else:
headers[k] = str(v)
uri = self.base_path % self._uri.attributes uri = self.base_path % self._uri.attributes
if requires_id: if requires_id:
@@ -539,7 +579,7 @@ class Resource(object):
""" """
return {k: v for k, v in component.items() if k in mapping.values()} return {k: v for k, v in component.items() if k in mapping.values()}
def _translate_response(self, response, has_body=True, error_message=None): def _translate_response(self, response, has_body=None, error_message=None):
"""Given a KSA response, inflate this instance with its data """Given a KSA response, inflate this instance with its data
DELETE operations don't return a body, so only try to work DELETE operations don't return a body, so only try to work
@@ -548,6 +588,8 @@ class Resource(object):
This method updates attributes that correspond to headers This method updates attributes that correspond to headers
and body on this instance and clears the dirty set. and body on this instance and clears the dirty set.
""" """
if has_body is None:
has_body = self.has_body
exceptions.raise_from_response(response, error_message=error_message) exceptions.raise_from_response(response, error_message=error_message)
if has_body: if has_body:
body = response.json() body = response.json()
@@ -560,6 +602,8 @@ class Resource(object):
headers = self._filter_component(response.headers, headers = self._filter_component(response.headers,
self._header_mapping()) self._header_mapping())
headers = self._consume_attrs(
self._header_mapping(), response.headers.copy(), insensitive=True)
self._header.attributes.update(headers) self._header.attributes.update(headers)
self._header.clean() self._header.clean()
@@ -637,7 +681,7 @@ class Resource(object):
response = session.head(request.url, response = session.head(request.url,
headers={"Accept": ""}) headers={"Accept": ""})
self._translate_response(response) self._translate_response(response, has_body=False)
return self return self
def update(self, session, prepend_key=True, has_body=True): def update(self, session, prepend_key=True, has_body=True):

View File

@@ -36,11 +36,11 @@ class TestObject(base.BaseFunctionalTest):
in self.conn.object_store.objects(container=self.FOLDER)] in self.conn.object_store.objects(container=self.FOLDER)]
self.assertIn(self.FILE, names) self.assertIn(self.FILE, names)
def test_get_object(self): def test_download_object(self):
result = self.conn.object_store.get_object( result = self.conn.object_store.download_object(
self.FILE, container=self.FOLDER) self.FILE, container=self.FOLDER)
self.assertEqual(self.DATA, result) self.assertEqual(self.DATA, result)
result = self.conn.object_store.get_object(self.sot) result = self.conn.object_store.download_object(self.sot)
self.assertEqual(self.DATA, result) self.assertEqual(self.DATA, result)
def test_system_metadata(self): def test_system_metadata(self):

View File

@@ -611,6 +611,13 @@ class RequestsMockTestCase(BaseTestCase):
mock_method, mock_uri, params['response_list'], mock_method, mock_uri, params['response_list'],
**params['kw_params']) **params['kw_params'])
def assert_no_calls(self):
# TODO(mordred) For now, creating the adapter for self.conn is
# triggering catalog lookups. Make sure no_calls is only 2.
# When we can make that on-demand through a descriptor object,
# drop this to 0.
self.assertEqual(2, len(self.adapter.request_history))
def assert_calls(self, stop_after=None, do_count=True): def assert_calls(self, stop_after=None, do_count=True):
for (x, (call, history)) in enumerate( for (x, (call, history)) in enumerate(
zip(self.calls, self.adapter.request_history)): zip(self.calls, self.adapter.request_history)):

View File

@@ -32,20 +32,20 @@ ACCOUNT_EXAMPLE = {
class TestAccount(testtools.TestCase): class TestAccount(testtools.TestCase):
def test_basic(self): def test_basic(self):
sot = account.Account.new(**ACCOUNT_EXAMPLE) sot = account.Account(**ACCOUNT_EXAMPLE)
self.assertIsNone(sot.resources_key) self.assertIsNone(sot.resources_key)
self.assertIsNone(sot.id) self.assertIsNone(sot.id)
self.assertEqual('/', sot.base_path) self.assertEqual('/', sot.base_path)
self.assertEqual('object-store', sot.service.service_type) self.assertEqual('object-store', sot.service.service_type)
self.assertTrue(sot.allow_update) self.assertTrue(sot.allow_update)
self.assertTrue(sot.allow_head) self.assertTrue(sot.allow_head)
self.assertTrue(sot.allow_retrieve) self.assertTrue(sot.allow_get)
self.assertFalse(sot.allow_delete) self.assertFalse(sot.allow_delete)
self.assertFalse(sot.allow_list) self.assertFalse(sot.allow_list)
self.assertFalse(sot.allow_create) self.assertFalse(sot.allow_create)
def test_make_it(self): def test_make_it(self):
sot = account.Account.new(**{'headers': ACCOUNT_EXAMPLE}) sot = account.Account(**ACCOUNT_EXAMPLE)
self.assertIsNone(sot.id) self.assertIsNone(sot.id)
self.assertEqual(int(ACCOUNT_EXAMPLE['x-account-bytes-used']), self.assertEqual(int(ACCOUNT_EXAMPLE['x-account-bytes-used']),
sot.account_bytes_used) sot.account_bytes_used)

View File

@@ -10,125 +10,123 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
import mock
import testtools
from openstack.object_store.v1 import container from openstack.object_store.v1 import container
from openstack.tests.unit import base
CONTAINER_NAME = "mycontainer" class TestContainer(base.RequestsMockTestCase):
CONT_EXAMPLE = {
"count": 999,
"bytes": 12345,
"name": CONTAINER_NAME
}
HEAD_EXAMPLE = {
'content-length': '346',
'x-container-object-count': '2',
'accept-ranges': 'bytes',
'id': 'tx1878fdc50f9b4978a3fdc-0053c31462',
'date': 'Sun, 13 Jul 2014 23:21:06 GMT',
'x-container-read': 'read-settings',
'x-container-write': 'write-settings',
'x-container-sync-to': 'sync-to',
'x-container-sync-key': 'sync-key',
'x-container-bytes-used': '630666',
'x-versions-location': 'versions-location',
'content-type': 'application/json; charset=utf-8',
'x-timestamp': '1453414055.48672'
}
LIST_EXAMPLE = [
{
"count": 999,
"bytes": 12345,
"name": "container1"
},
{
"count": 888,
"bytes": 54321,
"name": "container2"
}
]
class TestContainer(testtools.TestCase):
def setUp(self): def setUp(self):
super(TestContainer, self).setUp() super(TestContainer, self).setUp()
self.resp = mock.Mock() self.container = self.getUniqueString()
self.resp.body = {} self.endpoint = self.conn.object_store.get_endpoint() + '/'
self.resp.json = mock.Mock(return_value=self.resp.body) self.container_endpoint = '{endpoint}{container}'.format(
self.resp.headers = {"X-Trans-Id": "abcdef"} endpoint=self.endpoint, container=self.container)
self.sess = mock.Mock()
self.sess.put = mock.Mock(return_value=self.resp) self.body = {
self.sess.post = mock.Mock(return_value=self.resp) "count": 2,
"bytes": 630666,
"name": self.container,
}
self.headers = {
'x-container-object-count': '2',
'x-container-read': 'read-settings',
'x-container-write': 'write-settings',
'x-container-sync-to': 'sync-to',
'x-container-sync-key': 'sync-key',
'x-container-bytes-used': '630666',
'x-versions-location': 'versions-location',
'content-type': 'application/json; charset=utf-8',
'x-timestamp': '1453414055.48672'
}
self.body_plus_headers = dict(self.body, **self.headers)
def test_basic(self): def test_basic(self):
sot = container.Container.new(**CONT_EXAMPLE) sot = container.Container.new(**self.body)
self.assertIsNone(sot.resources_key) self.assertIsNone(sot.resources_key)
self.assertEqual('name', sot.id_attribute) self.assertEqual('name', sot._alternate_id())
self.assertEqual('/', sot.base_path) self.assertEqual('/', sot.base_path)
self.assertEqual('object-store', sot.service.service_type) self.assertEqual('object-store', sot.service.service_type)
self.assertTrue(sot.allow_update) self.assertTrue(sot.allow_update)
self.assertTrue(sot.allow_create) self.assertTrue(sot.allow_create)
self.assertTrue(sot.allow_retrieve) self.assertTrue(sot.allow_get)
self.assertTrue(sot.allow_delete) self.assertTrue(sot.allow_delete)
self.assertTrue(sot.allow_list) self.assertTrue(sot.allow_list)
self.assertTrue(sot.allow_head) self.assertTrue(sot.allow_head)
self.assert_no_calls()
def test_make_it(self): def test_make_it(self):
sot = container.Container.new(**CONT_EXAMPLE) sot = container.Container.new(**self.body)
self.assertEqual(CONT_EXAMPLE['name'], sot.id) self.assertEqual(self.body['name'], sot.id)
self.assertEqual(CONT_EXAMPLE['name'], sot.name) self.assertEqual(self.body['name'], sot.name)
self.assertEqual(CONT_EXAMPLE['count'], sot.count) self.assertEqual(self.body['count'], sot.count)
self.assertEqual(CONT_EXAMPLE['bytes'], sot.bytes) self.assertEqual(self.body['count'], sot.object_count)
self.assertEqual(self.body['bytes'], sot.bytes)
self.assertEqual(self.body['bytes'], sot.bytes_used)
self.assert_no_calls()
def test_create_and_head(self): def test_create_and_head(self):
sot = container.Container(CONT_EXAMPLE) sot = container.Container(**self.body_plus_headers)
# Update container with HEAD data
sot._attrs.update({'headers': HEAD_EXAMPLE})
# Attributes from create # Attributes from create
self.assertEqual(CONT_EXAMPLE['name'], sot.id) self.assertEqual(self.body_plus_headers['name'], sot.id)
self.assertEqual(CONT_EXAMPLE['name'], sot.name) self.assertEqual(self.body_plus_headers['name'], sot.name)
self.assertEqual(CONT_EXAMPLE['count'], sot.count) self.assertEqual(self.body_plus_headers['count'], sot.count)
self.assertEqual(CONT_EXAMPLE['bytes'], sot.bytes) self.assertEqual(self.body_plus_headers['bytes'], sot.bytes)
# Attributes from header # Attributes from header
self.assertEqual(int(HEAD_EXAMPLE['x-container-object-count']), self.assertEqual(
sot.object_count) int(self.body_plus_headers['x-container-object-count']),
self.assertEqual(int(HEAD_EXAMPLE['x-container-bytes-used']), sot.object_count)
sot.bytes_used) self.assertEqual(
self.assertEqual(HEAD_EXAMPLE['x-container-read'], int(self.body_plus_headers['x-container-bytes-used']),
sot.read_ACL) sot.bytes_used)
self.assertEqual(HEAD_EXAMPLE['x-container-write'], self.assertEqual(
sot.write_ACL) self.body_plus_headers['x-container-read'],
self.assertEqual(HEAD_EXAMPLE['x-container-sync-to'], sot.read_ACL)
sot.sync_to) self.assertEqual(
self.assertEqual(HEAD_EXAMPLE['x-container-sync-key'], self.body_plus_headers['x-container-write'],
sot.sync_key) sot.write_ACL)
self.assertEqual(HEAD_EXAMPLE['x-versions-location'], self.assertEqual(
sot.versions_location) self.body_plus_headers['x-container-sync-to'],
self.assertEqual(HEAD_EXAMPLE['x-timestamp'], sot.timestamp) sot.sync_to)
self.assertEqual(
self.body_plus_headers['x-container-sync-key'],
sot.sync_key)
self.assertEqual(
self.body_plus_headers['x-versions-location'],
sot.versions_location)
self.assertEqual(self.body_plus_headers['x-timestamp'], sot.timestamp)
@mock.patch("openstack.resource.Resource.list") def test_list(self):
def test_list(self, fake_list): containers = [
fake_val = [container.Container.existing(**ex) for ex in LIST_EXAMPLE] {
fake_list.return_value = fake_val "count": 999,
"bytes": 12345,
"name": "container1"
},
{
"count": 888,
"bytes": 54321,
"name": "container2"
}
]
self.register_uris([
dict(method='GET', uri=self.endpoint,
json=containers)
])
# Since the list method is mocked out, just pass None for the session. response = container.Container.list(self.conn.object_store)
response = container.Container.list(None)
self.assertEqual(len(LIST_EXAMPLE), len(response)) self.assertEqual(len(containers), len(list(response)))
for item in range(len(response)): for index, item in enumerate(response):
self.assertEqual(container.Container, type(response[item])) self.assertEqual(container.Container, type(item))
self.assertEqual(LIST_EXAMPLE[item]["name"], response[item].name) self.assertEqual(containers[index]["name"], item.name)
self.assertEqual(LIST_EXAMPLE[item]["count"], response[item].count) self.assertEqual(containers[index]["count"], item.count)
self.assertEqual(LIST_EXAMPLE[item]["bytes"], response[item].bytes) self.assertEqual(containers[index]["bytes"], item.bytes)
self.assert_calls()
def _test_create_update(self, sot, sot_call, sess_method): def _test_create_update(self, sot, sot_call, sess_method):
sot.read_ACL = "some ACL" sot.read_ACL = "some ACL"
@@ -137,35 +135,43 @@ class TestContainer(testtools.TestCase):
headers = { headers = {
"x-container-read": "some ACL", "x-container-read": "some ACL",
"x-container-write": "another ACL", "x-container-write": "another ACL",
"x-detect-content-type": True, "x-detect-content-type": 'True',
"Accept": "",
} }
sot_call(self.sess) self.register_uris([
dict(method=sess_method, uri=self.container_endpoint,
json=self.body,
validate=dict(headers=headers)),
])
sot_call(self.conn.object_store)
url = "/%s" % CONTAINER_NAME self.assert_calls()
sess_method.assert_called_with(url,
headers=headers)
def test_create(self): def test_create(self):
sot = container.Container.new(name=CONTAINER_NAME) sot = container.Container.new(name=self.container)
self._test_create_update(sot, sot.create, self.sess.put) self._test_create_update(sot, sot.create, 'PUT')
def test_update(self): def test_update(self):
sot = container.Container.new(name=CONTAINER_NAME) sot = container.Container.new(name=self.container)
self._test_create_update(sot, sot.update, self.sess.post) self._test_create_update(sot, sot.update, 'POST')
def _test_no_headers(self, sot, sot_call, sess_method): def _test_no_headers(self, sot, sot_call, sess_method):
sot = container.Container.new(name=CONTAINER_NAME) headers = {}
sot.create(self.sess) data = {}
url = "/%s" % CONTAINER_NAME self.register_uris([
headers = {'Accept': ''} dict(method=sess_method, uri=self.container_endpoint,
self.sess.put.assert_called_with(url, json=self.body,
headers=headers) validate=dict(
headers=headers,
json=data))
])
sot_call(self.conn.object_store)
def test_create_no_headers(self): def test_create_no_headers(self):
sot = container.Container.new(name=CONTAINER_NAME) sot = container.Container.new(name=self.container)
self._test_no_headers(sot, sot.create, self.sess.put) self._test_no_headers(sot, sot.create, 'PUT')
self.assert_calls()
def test_update_no_headers(self): def test_update_no_headers(self):
sot = container.Container.new(name=CONTAINER_NAME) sot = container.Container.new(name=self.container)
self._test_no_headers(sot, sot.update, self.sess.post) self._test_no_headers(sot, sot.update, 'POST')
self.assert_no_calls()

View File

@@ -10,14 +10,8 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
import mock
import testtools
from openstack.object_store.v1 import obj from openstack.object_store.v1 import obj
from openstack.tests.unit.cloud import test_object as base_test_object
CONTAINER_NAME = "mycontainer"
OBJECT_NAME = "myobject"
# Object can receive both last-modified in headers and last_modified in # Object can receive both last-modified in headers and last_modified in
# the body. However, originally, only last-modified was handled as an # the body. However, originally, only last-modified was handled as an
@@ -30,109 +24,127 @@ OBJECT_NAME = "myobject"
# attribute which would follow the same pattern. # attribute which would follow the same pattern.
# This example should represent the body values returned by a GET, so the keys # This example should represent the body values returned by a GET, so the keys
# must be underscores. # must be underscores.
OBJ_EXAMPLE = {
"hash": "243f87b91224d85722564a80fd3cb1f1",
"last_modified": "2014-07-13T18:41:03.319240",
"bytes": 252466,
"name": OBJECT_NAME,
"content_type": "application/octet-stream"
}
DICT_EXAMPLE = {
'container': CONTAINER_NAME,
'name': OBJECT_NAME,
'content_type': 'application/octet-stream',
'headers': {
'content-length': '252466',
'accept-ranges': 'bytes',
'last-modified': 'Sun, 13 Jul 2014 18:41:04 GMT',
'etag': '243f87b91224d85722564a80fd3cb1f1',
'x-timestamp': '1453414256.28112',
'date': 'Thu, 28 Aug 2014 14:41:59 GMT',
'id': 'tx5fb5ad4f4d0846c6b2bc7-0053ff3fb7',
'x-delete-at': '1453416226.16744'
}
}
class TestObject(testtools.TestCase): class TestObject(base_test_object.BaseTestObject):
def setUp(self): def setUp(self):
super(TestObject, self).setUp() super(TestObject, self).setUp()
self.resp = mock.Mock() self.the_data = b'test body'
self.resp.content = "lol here's some content" self.the_data_length = len(self.the_data)
self.resp.headers = {"X-Trans-Id": "abcdef"} # TODO(mordred) Make the_data be from getUniqueString and then
self.sess = mock.Mock() # have hash and etag be actual md5 sums of that string
self.sess.get = mock.Mock(return_value=self.resp) self.body = {
self.sess.put = mock.Mock(return_value=self.resp) "hash": "243f87b91224d85722564a80fd3cb1f1",
self.sess.post = mock.Mock(return_value=self.resp) "last_modified": "2014-07-13T18:41:03.319240",
"bytes": self.the_data_length,
"name": self.object,
"content_type": "application/octet-stream"
}
self.headers = {
'Content-Length': str(len(self.the_data)),
'Content-Type': 'application/octet-stream',
'Accept-Ranges': 'bytes',
'Last-Modified': 'Thu, 15 Dec 2016 13:34:14 GMT',
'Etag': '"b5c454b44fbd5344793e3fb7e3850768"',
'X-Timestamp': '1481808853.65009',
'X-Trans-Id': 'tx68c2a2278f0c469bb6de1-005857ed80dfw1',
'Date': 'Mon, 19 Dec 2016 14:24:00 GMT',
'X-Static-Large-Object': 'True',
'X-Object-Meta-Mtime': '1481513709.168512',
'X-Delete-At': '1453416226.16744',
}
def test_basic(self): def test_basic(self):
sot = obj.Object.new(**OBJ_EXAMPLE) sot = obj.Object.new(**self.body)
self.assert_no_calls()
self.assertIsNone(sot.resources_key) self.assertIsNone(sot.resources_key)
self.assertEqual("name", sot.id_attribute) self.assertEqual('name', sot._alternate_id())
self.assertEqual('/%(container)s', sot.base_path) self.assertEqual('/%(container)s', sot.base_path)
self.assertEqual('object-store', sot.service.service_type) self.assertEqual('object-store', sot.service.service_type)
self.assertTrue(sot.allow_update) self.assertTrue(sot.allow_update)
self.assertTrue(sot.allow_create) self.assertTrue(sot.allow_create)
self.assertTrue(sot.allow_retrieve) self.assertTrue(sot.allow_get)
self.assertTrue(sot.allow_delete) self.assertTrue(sot.allow_delete)
self.assertTrue(sot.allow_list) self.assertTrue(sot.allow_list)
self.assertTrue(sot.allow_head) self.assertTrue(sot.allow_head)
def test_new(self): def test_new(self):
sot = obj.Object.new(container=CONTAINER_NAME, name=OBJECT_NAME) sot = obj.Object.new(container=self.container, name=self.object)
self.assertEqual(OBJECT_NAME, sot.name) self.assert_no_calls()
self.assertEqual(CONTAINER_NAME, sot.container) self.assertEqual(self.object, sot.name)
self.assertEqual(self.container, sot.container)
def test_head(self): def test_from_body(self):
sot = obj.Object.existing(**DICT_EXAMPLE) sot = obj.Object.existing(container=self.container, **self.body)
self.assert_no_calls()
# Attributes from header # Attributes from header
self.assertEqual(DICT_EXAMPLE['container'], sot.container) self.assertEqual(self.container, sot.container)
headers = DICT_EXAMPLE['headers'] self.assertEqual(
self.assertEqual(headers['content-length'], sot.content_length) int(self.body['bytes']), sot.content_length)
self.assertEqual(headers['accept-ranges'], sot.accept_ranges) self.assertEqual(self.body['last_modified'], sot.last_modified_at)
self.assertEqual(headers['last-modified'], sot.last_modified_at) self.assertEqual(self.body['hash'], sot.etag)
self.assertEqual(headers['etag'], sot.etag) self.assertEqual(self.body['content_type'], sot.content_type)
self.assertEqual(headers['x-timestamp'], sot.timestamp)
self.assertEqual(headers['content-type'], sot.content_type)
self.assertEqual(headers['x-delete-at'], sot.delete_at)
def test_get(self): def test_from_headers(self):
sot = obj.Object.new(container=CONTAINER_NAME, name=OBJECT_NAME) sot = obj.Object.existing(container=self.container, **self.headers)
self.assert_no_calls()
# Attributes from header
self.assertEqual(self.container, sot.container)
self.assertEqual(
int(self.headers['Content-Length']), sot.content_length)
self.assertEqual(self.headers['Accept-Ranges'], sot.accept_ranges)
self.assertEqual(self.headers['Last-Modified'], sot.last_modified_at)
self.assertEqual(self.headers['Etag'], sot.etag)
self.assertEqual(self.headers['X-Timestamp'], sot.timestamp)
self.assertEqual(self.headers['Content-Type'], sot.content_type)
self.assertEqual(self.headers['X-Delete-At'], sot.delete_at)
def test_download(self):
headers = {
'X-Newest': 'True',
'If-Match': self.headers['Etag'],
'Accept': 'bytes'
}
self.register_uris([
dict(method='GET', uri=self.object_endpoint,
headers=self.headers,
content=self.the_data,
validate=dict(
headers=headers
))
])
sot = obj.Object.new(container=self.container, name=self.object)
sot.is_newest = True sot.is_newest = True
sot.if_match = {"who": "what"} sot.if_match = [self.headers['Etag']]
rv = sot.get(self.sess) rv = sot.download(self.conn.object_store)
url = "%s/%s" % (CONTAINER_NAME, OBJECT_NAME) self.assertEqual(self.the_data, rv)
# TODO(thowe): Should allow filtering bug #1488269
# headers = {
# "x-newest": True,
# "if-match": {"who": "what"}
# }
headers = {'Accept': 'bytes'}
self.sess.get.assert_called_with(url,
headers=headers,
error_message=None)
self.assertEqual(self.resp.content, rv)
def _test_create(self, method, data, accept): self.assert_calls()
sot = obj.Object.new(container=CONTAINER_NAME, name=OBJECT_NAME,
def _test_create(self, method, data):
sot = obj.Object.new(container=self.container, name=self.object,
data=data) data=data)
sot.is_newest = True sot.is_newest = True
headers = {"x-newest": True, "Accept": ""} sent_headers = {"x-newest": 'True', "Accept": ""}
self.register_uris([
dict(method=method, uri=self.object_endpoint,
headers=self.headers,
validate=dict(
headers=sent_headers))
])
rv = sot.create(self.sess) rv = sot.create(self.conn.object_store)
self.assertEqual(rv.etag, self.headers['Etag'])
url = "%s/%s" % (CONTAINER_NAME, OBJECT_NAME) self.assert_calls()
method.assert_called_with(url, data=data,
headers=headers)
self.assertEqual(self.resp.headers, rv.get_headers())
def test_create_data(self): def test_create_data(self):
self._test_create(self.sess.put, "data", "bytes") self._test_create('PUT', self.the_data)
def test_create_no_data(self): def test_create_no_data(self):
self._test_create(self.sess.post, None, None) self._test_create('PUT', None)

View File

@@ -10,17 +10,19 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
import mock
import six import six
from openstack.object_store.v1 import _proxy from openstack.object_store.v1 import _proxy
from openstack.object_store.v1 import account from openstack.object_store.v1 import account
from openstack.object_store.v1 import container from openstack.object_store.v1 import container
from openstack.object_store.v1 import obj from openstack.object_store.v1 import obj
from openstack.tests.unit import test_proxy_base from openstack.tests.unit.cloud import test_object as base_test_object
from openstack.tests.unit import test_proxy_base2
class TestObjectStoreProxy(test_proxy_base.TestProxyBase): class TestObjectStoreProxy(test_proxy_base2.TestProxyBase):
kwargs_to_path_args = False
def setUp(self): def setUp(self):
super(TestObjectStoreProxy, self).setUp() super(TestObjectStoreProxy, self).setUp()
@@ -42,21 +44,26 @@ class TestObjectStoreProxy(test_proxy_base.TestProxyBase):
container.Container, True) container.Container, True)
def test_container_create_attrs(self): def test_container_create_attrs(self):
self.verify_create(self.proxy.create_container, container.Container) self.verify_create(
self.proxy.create_container,
container.Container,
method_args=['container_name'],
expected_kwargs={'name': 'container_name', "x": 1, "y": 2, "z": 3})
def test_object_metadata_get(self): def test_object_metadata_get(self):
self.verify_head(self.proxy.get_object_metadata, obj.Object, self.verify_head(self.proxy.get_object_metadata, obj.Object,
value="object", container="container") value="object", container="container")
def _test_object_delete(self, ignore): def _test_object_delete(self, ignore):
expected_kwargs = {"path_args": {"container": "name"}} expected_kwargs = {
expected_kwargs["ignore_missing"] = ignore "ignore_missing": ignore,
"container": "name",
}
self._verify2("openstack.proxy.BaseProxy._delete", self._verify2("openstack.proxy2.BaseProxy._delete",
self.proxy.delete_object, self.proxy.delete_object,
method_args=["resource"], method_args=["resource"],
method_kwargs={"container": "name", method_kwargs=expected_kwargs,
"ignore_missing": ignore},
expected_args=[obj.Object, "resource"], expected_args=[obj.Object, "resource"],
expected_kwargs=expected_kwargs) expected_kwargs=expected_kwargs)
@@ -67,25 +74,24 @@ class TestObjectStoreProxy(test_proxy_base.TestProxyBase):
self._test_object_delete(True) self._test_object_delete(True)
def test_object_create_attrs(self): def test_object_create_attrs(self):
path_args = {"path_args": {"container": "name"}} kwargs = {"name": "test", "data": "data", "container": "name"}
method_kwargs = {"name": "test", "data": "data", "container": "name"}
expected_kwargs = path_args.copy() self._verify2("openstack.proxy2.BaseProxy._create",
expected_kwargs.update(method_kwargs)
expected_kwargs.pop("container")
self._verify2("openstack.proxy.BaseProxy._create",
self.proxy.upload_object, self.proxy.upload_object,
method_kwargs=method_kwargs, method_kwargs=kwargs,
expected_args=[obj.Object], expected_args=[obj.Object],
expected_kwargs=expected_kwargs) expected_kwargs=kwargs)
def test_object_create_no_container(self): def test_object_create_no_container(self):
self.assertRaises(ValueError, self.proxy.upload_object) self.assertRaises(TypeError, self.proxy.upload_object)
def test_object_get(self): def test_object_get(self):
self.verify_get(self.proxy.get_object, obj.Object, kwargs = dict(container="container")
value=["object"], container="container") self.verify_get(
self.proxy.get_object, obj.Object,
value=["object"],
method_kwargs=kwargs,
expected_kwargs=kwargs)
class Test_containers(TestObjectStoreProxy): class Test_containers(TestObjectStoreProxy):
@@ -252,23 +258,45 @@ class Test_objects(TestObjectStoreProxy):
# httpretty.last_request().path) # httpretty.last_request().path)
class Test_download_object(TestObjectStoreProxy): class Test_download_object(base_test_object.BaseTestObject):
@mock.patch("openstack.object_store.v1._proxy.Proxy.get_object") def setUp(self):
def test_download(self, mock_get): super(Test_download_object, self).setUp()
the_data = "here's some data" self.the_data = b'test body'
mock_get.return_value = the_data self.register_uris([
ob = mock.Mock() dict(method='GET', uri=self.object_endpoint,
headers={
'Content-Length': str(len(self.the_data)),
'Content-Type': 'application/octet-stream',
'Accept-Ranges': 'bytes',
'Last-Modified': 'Thu, 15 Dec 2016 13:34:14 GMT',
'Etag': '"b5c454b44fbd5344793e3fb7e3850768"',
'X-Timestamp': '1481808853.65009',
'X-Trans-Id': 'tx68c2a2278f0c469bb6de1-005857ed80dfw1',
'Date': 'Mon, 19 Dec 2016 14:24:00 GMT',
'X-Static-Large-Object': 'True',
'X-Object-Meta-Mtime': '1481513709.168512',
},
content=self.the_data)])
fake_open = mock.mock_open() def test_download(self):
file_path = "blarga/somefile" data = self.conn.object_store.download_object(
with mock.patch("openstack.object_store.v1._proxy.open", self.object, container=self.container)
fake_open, create=True):
self.proxy.download_object(ob, container="tainer", path=file_path)
fake_open.assert_called_once_with(file_path, "w") self.assertEqual(data, self.the_data)
fake_handle = fake_open() self.assert_calls()
fake_handle.write.assert_called_once_with(the_data)
def test_stream(self):
chunk_size = 2
for index, chunk in enumerate(self.conn.object_store.stream_object(
self.object, container=self.container,
chunk_size=chunk_size)):
chunk_len = len(chunk)
start = index * chunk_size
end = start + chunk_len
self.assertLessEqual(chunk_len, chunk_size)
self.assertEqual(chunk, self.the_data[start:end])
self.assert_calls()
class Test_copy_object(TestObjectStoreProxy): class Test_copy_object(TestObjectStoreProxy):

View File

@@ -16,6 +16,11 @@ from openstack.tests.unit import base
class TestProxyBase(base.TestCase): class TestProxyBase(base.TestCase):
# object_store makes calls with container= rather than
# path_args=dict(container= because container needs to wind up
# in the uri components.
kwargs_to_path_args = True
def setUp(self): def setUp(self):
super(TestProxyBase, self).setUp() super(TestProxyBase, self).setUp()
self.session = mock.Mock() self.session = mock.Mock()
@@ -131,7 +136,7 @@ class TestProxyBase(base.TestCase):
method_kwargs = kwargs.pop("method_kwargs", kwargs) method_kwargs = kwargs.pop("method_kwargs", kwargs)
if args: if args:
expected_kwargs["args"] = args expected_kwargs["args"] = args
if kwargs: if kwargs and self.kwargs_to_path_args:
expected_kwargs["path_args"] = kwargs expected_kwargs["path_args"] = kwargs
if not expected_args: if not expected_args:
expected_args = [resource_type] + the_value expected_args = [resource_type] + the_value
@@ -145,7 +150,10 @@ class TestProxyBase(base.TestCase):
mock_method="openstack.proxy2.BaseProxy._head", mock_method="openstack.proxy2.BaseProxy._head",
value=None, **kwargs): value=None, **kwargs):
the_value = [value] if value is not None else [] the_value = [value] if value is not None else []
expected_kwargs = {"path_args": kwargs} if kwargs else {} if self.kwargs_to_path_args:
expected_kwargs = {"path_args": kwargs} if kwargs else {}
else:
expected_kwargs = kwargs or {}
self._verify2(mock_method, test_method, self._verify2(mock_method, test_method,
method_args=the_value, method_args=the_value,
method_kwargs=kwargs, method_kwargs=kwargs,

View File

@@ -852,10 +852,9 @@ class TestResource(base.TestCase):
class Test(resource2.Resource): class Test(resource2.Resource):
attr = resource2.Header("attr") attr = resource2.Header("attr")
response = FakeResponse({}) response = FakeResponse({}, headers={"attr": "value"})
sot = Test() sot = Test()
sot._filter_component = mock.Mock(return_value={"attr": "value"})
sot._translate_response(response, has_body=False) sot._translate_response(response, has_body=False)
@@ -1036,7 +1035,8 @@ class TestResourceActions(base.TestCase):
self.request.url, self.request.url,
headers={"Accept": ""}) headers={"Accept": ""})
self.sot._translate_response.assert_called_once_with(self.response) self.sot._translate_response.assert_called_once_with(
self.response, has_body=False)
self.assertEqual(result, self.sot) self.assertEqual(result, self.sot)
def _test_update(self, update_method='PUT', prepend_key=True, def _test_update(self, update_method='PUT', prepend_key=True,