Migrate object_store to resource2/proxy2
This is the last thing we need to migrate before deleting the old resource/proxy class. There's a good pile of things related to uploading and downloading we should port over from the openstack.cloud code. One of them, streaming the object data, was added here. We can come back around and get all of the large-object upload code supported. Adds alias support into _BaseComponent much like prop used to have because Swift needs it. Port some of the unittests to requests-mock because it was just easier to do that. Doing so uncovered some issues with case-sensitivity and headers. As a result, there are some changes to resource2.Resource to get case sensitivity sorted out. TODO comments have been left indicating a few places for further cleanup, but those are internal and non-essential. The default value of has_body is changed to False for head calls. Because. Well. Let's be honest. It's HEAD. There is no body. By definition. Change-Id: I8c4f18f78a77149e23b98f78af82b1d25ab7c4cf
This commit is contained in:
@@ -29,7 +29,7 @@ from openstack import exceptions
|
||||
from openstack import task_manager as _task_manager
|
||||
|
||||
|
||||
def _extract_name(url):
|
||||
def _extract_name(url, service_type=None):
|
||||
'''Produce a key name to use in logging/metrics from the URL path.
|
||||
|
||||
We want to be able to logic/metric sane general things, so we pull
|
||||
@@ -81,7 +81,10 @@ def _extract_name(url):
|
||||
|
||||
# Getting the root of an endpoint is doing version discovery
|
||||
if not name_parts:
|
||||
name_parts = ['discovery']
|
||||
if service_type == 'object-store':
|
||||
name_parts = ['account']
|
||||
else:
|
||||
name_parts = ['discovery']
|
||||
|
||||
# Strip out anything that's empty or None
|
||||
return [part for part in name_parts if part]
|
||||
@@ -124,8 +127,14 @@ class OpenStackSDKAdapter(adapter.Adapter):
|
||||
def request(
|
||||
self, url, method, run_async=False, error_message=None,
|
||||
raise_exc=False, connect_retries=1, *args, **kwargs):
|
||||
name_parts = _extract_name(url)
|
||||
name = '.'.join([self.service_type, method] + name_parts)
|
||||
name_parts = _extract_name(url, self.service_type)
|
||||
# TODO(mordred) This if is in service of unit tests that are making
|
||||
# calls without a service_type. It should be fixable once we shift
|
||||
# to requests-mock and stop mocking internals.
|
||||
if self.service_type:
|
||||
name = '.'.join([self.service_type, method] + name_parts)
|
||||
else:
|
||||
name = '.'.join([method] + name_parts)
|
||||
|
||||
request_method = functools.partial(
|
||||
super(OpenStackSDKAdapter, self).request, url, method)
|
||||
|
@@ -11,13 +11,17 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from openstack import exceptions
|
||||
from openstack.object_store import object_store_service
|
||||
from openstack import resource
|
||||
from openstack import resource2 as resource
|
||||
|
||||
|
||||
class BaseResource(resource.Resource):
|
||||
service = object_store_service.ObjectStoreService()
|
||||
|
||||
update_method = 'POST'
|
||||
create_method = 'PUT'
|
||||
|
||||
#: Metadata stored for this resource. *Type: dict*
|
||||
metadata = dict()
|
||||
|
||||
@@ -25,7 +29,7 @@ class BaseResource(resource.Resource):
|
||||
_system_metadata = dict()
|
||||
|
||||
def _calculate_headers(self, metadata):
|
||||
headers = dict()
|
||||
headers = {}
|
||||
for key in metadata:
|
||||
if key in self._system_metadata.keys():
|
||||
header = self._system_metadata[key]
|
||||
@@ -40,52 +44,34 @@ class BaseResource(resource.Resource):
|
||||
return headers
|
||||
|
||||
def set_metadata(self, session, metadata):
|
||||
url = self._get_url(self, self.id)
|
||||
session.post(url,
|
||||
headers=self._calculate_headers(metadata))
|
||||
request = self._prepare_request()
|
||||
response = session.post(
|
||||
request.url,
|
||||
headers=self._calculate_headers(metadata))
|
||||
self._translate_response(response, has_body=False)
|
||||
response = session.head(request.url)
|
||||
self._translate_response(response, has_body=False)
|
||||
return self
|
||||
|
||||
def delete_metadata(self, session, keys):
|
||||
url = self._get_url(self, self.id)
|
||||
request = self._prepare_request()
|
||||
headers = {key: '' for key in keys}
|
||||
session.post(url,
|
||||
headers=self._calculate_headers(headers))
|
||||
response = session.post(
|
||||
request.url,
|
||||
headers=self._calculate_headers(headers))
|
||||
exceptions.raise_from_response(
|
||||
response, error_message="Error deleting metadata keys")
|
||||
return self
|
||||
|
||||
def _set_metadata(self):
|
||||
def _set_metadata(self, headers):
|
||||
self.metadata = dict()
|
||||
headers = self.get_headers()
|
||||
|
||||
for header in headers:
|
||||
if header.startswith(self._custom_metadata_prefix):
|
||||
key = header[len(self._custom_metadata_prefix):].lower()
|
||||
self.metadata[key] = headers[header]
|
||||
|
||||
def get(self, session, include_headers=False, args=None):
|
||||
super(BaseResource, self).get(session, include_headers, args)
|
||||
self._set_metadata()
|
||||
return self
|
||||
|
||||
def head(self, session):
|
||||
super(BaseResource, self).head(session)
|
||||
self._set_metadata()
|
||||
return self
|
||||
|
||||
@classmethod
|
||||
def update_by_id(cls, session, resource_id, attrs, path_args=None):
|
||||
"""Update a Resource with the given attributes.
|
||||
|
||||
:param session: The session to use for making this request.
|
||||
:type session: :class:`~keystoneauth1.adapter.Adapter`
|
||||
:param resource_id: This resource's identifier, if needed by
|
||||
the request. The default is ``None``.
|
||||
:param dict attrs: The attributes to be sent in the body
|
||||
of the request.
|
||||
:param dict path_args: This parameter is sent by the base
|
||||
class but is ignored for this method.
|
||||
|
||||
:return: A ``dict`` representing the response headers.
|
||||
"""
|
||||
url = cls._get_url(None, resource_id)
|
||||
headers = attrs.get(resource.HEADERS, dict())
|
||||
headers['Accept'] = ''
|
||||
return session.post(url,
|
||||
headers=headers).headers
|
||||
def _translate_response(self, response, has_body=None, error_message=None):
|
||||
super(BaseResource, self)._translate_response(
|
||||
response, has_body=has_body, error_message=error_message)
|
||||
self._set_metadata(response.headers)
|
||||
|
@@ -13,11 +13,15 @@
|
||||
from openstack.object_store.v1 import account as _account
|
||||
from openstack.object_store.v1 import container as _container
|
||||
from openstack.object_store.v1 import obj as _obj
|
||||
from openstack import proxy
|
||||
from openstack import proxy2 as proxy
|
||||
|
||||
|
||||
class Proxy(proxy.BaseProxy):
|
||||
|
||||
Account = _account.Account
|
||||
Container = _container.Container
|
||||
Object = _obj.Object
|
||||
|
||||
def get_account_metadata(self):
|
||||
"""Get metadata for this account.
|
||||
|
||||
@@ -54,11 +58,12 @@ class Proxy(proxy.BaseProxy):
|
||||
:rtype: A generator of
|
||||
:class:`~openstack.object_store.v1.container.Container` objects.
|
||||
"""
|
||||
return _container.Container.list(self, **query)
|
||||
return self._list(_container.Container, paginated=True, **query)
|
||||
|
||||
def create_container(self, **attrs):
|
||||
def create_container(self, name, **attrs):
|
||||
"""Create a new container from attributes
|
||||
|
||||
:param container: Name of the container to create.
|
||||
:param dict attrs: Keyword arguments which will be used to create
|
||||
a :class:`~openstack.object_store.v1.container.Container`,
|
||||
comprised of the properties on the Container class.
|
||||
@@ -66,7 +71,7 @@ class Proxy(proxy.BaseProxy):
|
||||
:returns: The results of container creation
|
||||
:rtype: :class:`~openstack.object_store.v1.container.Container`
|
||||
"""
|
||||
return self._create(_container.Container, **attrs)
|
||||
return self._create(_container.Container, name=name, **attrs)
|
||||
|
||||
def delete_container(self, container, ignore_missing=True):
|
||||
"""Delete a container
|
||||
@@ -122,6 +127,7 @@ class Proxy(proxy.BaseProxy):
|
||||
"""
|
||||
res = self._get_resource(_container.Container, container)
|
||||
res.set_metadata(self, metadata)
|
||||
return res
|
||||
|
||||
def delete_container_metadata(self, container, keys):
|
||||
"""Delete metadata for a container.
|
||||
@@ -133,6 +139,7 @@ class Proxy(proxy.BaseProxy):
|
||||
"""
|
||||
res = self._get_resource(_container.Container, container)
|
||||
res.delete_metadata(self, keys)
|
||||
return res
|
||||
|
||||
def objects(self, container, **query):
|
||||
"""Return a generator that yields the Container's objects.
|
||||
@@ -147,21 +154,21 @@ class Proxy(proxy.BaseProxy):
|
||||
:rtype: A generator of
|
||||
:class:`~openstack.object_store.v1.obj.Object` objects.
|
||||
"""
|
||||
container = _container.Container.from_id(container)
|
||||
container = self._get_container_name(container=container)
|
||||
|
||||
objs = _obj.Object.list(self,
|
||||
path_args={"container": container.name},
|
||||
**query)
|
||||
for obj in objs:
|
||||
obj.container = container.name
|
||||
for obj in self._list(
|
||||
_obj.Object, container=container,
|
||||
paginated=True, **query):
|
||||
obj.container = container
|
||||
yield obj
|
||||
|
||||
def _get_container_name(self, obj, container):
|
||||
if isinstance(obj, _obj.Object):
|
||||
def _get_container_name(self, obj=None, container=None):
|
||||
if obj is not None:
|
||||
obj = self._get_resource(_obj.Object, obj)
|
||||
if obj.container is not None:
|
||||
return obj.container
|
||||
if container is not None:
|
||||
container = _container.Container.from_id(container)
|
||||
container = self._get_resource(_container.Container, container)
|
||||
return container.name
|
||||
|
||||
raise ValueError("container must be specified")
|
||||
@@ -181,52 +188,69 @@ class Proxy(proxy.BaseProxy):
|
||||
:raises: :class:`~openstack.exceptions.ResourceNotFound`
|
||||
when no resource can be found.
|
||||
"""
|
||||
# TODO(briancurtin): call this download_object and make sure it's
|
||||
# just returning the raw data, like download_image does
|
||||
container_name = self._get_container_name(obj, container)
|
||||
container_name = self._get_container_name(
|
||||
obj=obj, container=container)
|
||||
return self._get(_obj.Object, obj, container=container_name)
|
||||
|
||||
return self._get(_obj.Object, obj,
|
||||
path_args={"container": container_name})
|
||||
|
||||
def download_object(self, obj, container=None, path=None):
|
||||
"""Download the data contained inside an object to disk.
|
||||
def download_object(self, obj, container=None, **attrs):
|
||||
"""Download the data contained inside an object.
|
||||
|
||||
:param obj: The value can be the name of an object or a
|
||||
:class:`~openstack.object_store.v1.obj.Object` instance.
|
||||
:param container: The value can be the name of a container or a
|
||||
:class:`~openstack.object_store.v1.container.Container`
|
||||
instance.
|
||||
:param path str: Location to write the object contents.
|
||||
|
||||
:raises: :class:`~openstack.exceptions.ResourceNotFound`
|
||||
when no resource can be found.
|
||||
"""
|
||||
# TODO(briancurtin): download_object should really have the behavior
|
||||
# of get_object, and this writing to a file should not exist.
|
||||
# TODO(briancurtin): This method should probably offload the get
|
||||
# operation into another thread or something of that nature.
|
||||
with open(path, "w") as out:
|
||||
out.write(self.get_object(obj, container))
|
||||
container_name = self._get_container_name(
|
||||
obj=obj, container=container)
|
||||
obj = self._get_resource(
|
||||
_obj.Object, obj, container=container_name, **attrs)
|
||||
return obj.download(self)
|
||||
|
||||
def upload_object(self, **attrs):
|
||||
def stream_object(self, obj, container=None, chunk_size=1024, **attrs):
|
||||
"""Stream the data contained inside an object.
|
||||
|
||||
:param obj: The value can be the name of an object or a
|
||||
:class:`~openstack.object_store.v1.obj.Object` instance.
|
||||
:param container: The value can be the name of a container or a
|
||||
:class:`~openstack.object_store.v1.container.Container`
|
||||
instance.
|
||||
|
||||
:raises: :class:`~openstack.exceptions.ResourceNotFound`
|
||||
when no resource can be found.
|
||||
:returns: An iterator that iterates over chunk_size bytes
|
||||
"""
|
||||
container_name = self._get_container_name(
|
||||
obj=obj, container=container)
|
||||
container_name = self._get_container_name(container=container)
|
||||
obj = self._get_resource(
|
||||
_obj.Object, obj, container=container_name, **attrs)
|
||||
return obj.stream(self, chunk_size=chunk_size)
|
||||
|
||||
def create_object(self, container, name, **attrs):
|
||||
"""Upload a new object from attributes
|
||||
|
||||
:param container: The value can be the name of a container or a
|
||||
:class:`~openstack.object_store.v1.container.Container`
|
||||
instance.
|
||||
:param name: Name of the object to create.
|
||||
:param dict attrs: Keyword arguments which will be used to create
|
||||
a :class:`~openstack.object_store.v1.obj.Object`,
|
||||
comprised of the properties on the Object class.
|
||||
**Required**: A `container` argument must be specified,
|
||||
which is either the ID of a container or a
|
||||
:class:`~openstack.object_store.v1.container.Container`
|
||||
instance.
|
||||
|
||||
:returns: The results of object creation
|
||||
:rtype: :class:`~openstack.object_store.v1.container.Container`
|
||||
"""
|
||||
container = attrs.pop("container", None)
|
||||
container_name = self._get_container_name(None, container)
|
||||
|
||||
return self._create(_obj.Object,
|
||||
path_args={"container": container_name}, **attrs)
|
||||
# TODO(mordred) Add ability to stream data from a file
|
||||
# TODO(mordred) Use create_object from OpenStackCloud
|
||||
container_name = self._get_container_name(container=container)
|
||||
return self._create(
|
||||
_obj.Object, container=container_name, name=name, **attrs)
|
||||
# Backwards compat
|
||||
upload_object = create_object
|
||||
|
||||
def copy_object(self):
|
||||
"""Copy an object."""
|
||||
@@ -252,7 +276,7 @@ class Proxy(proxy.BaseProxy):
|
||||
container_name = self._get_container_name(obj, container)
|
||||
|
||||
self._delete(_obj.Object, obj, ignore_missing=ignore_missing,
|
||||
path_args={"container": container_name})
|
||||
container=container_name)
|
||||
|
||||
def get_object_metadata(self, obj, container=None):
|
||||
"""Get metadata for an object.
|
||||
@@ -269,8 +293,7 @@ class Proxy(proxy.BaseProxy):
|
||||
"""
|
||||
container_name = self._get_container_name(obj, container)
|
||||
|
||||
return self._head(_obj.Object, obj,
|
||||
path_args={"container": container_name})
|
||||
return self._head(_obj.Object, obj, container=container_name)
|
||||
|
||||
def set_object_metadata(self, obj, container=None, **metadata):
|
||||
"""Set metadata for an object.
|
||||
@@ -298,9 +321,9 @@ class Proxy(proxy.BaseProxy):
|
||||
- `is_content_type_detected`
|
||||
"""
|
||||
container_name = self._get_container_name(obj, container)
|
||||
res = self._get_resource(_obj.Object, obj,
|
||||
path_args={"container": container_name})
|
||||
res = self._get_resource(_obj.Object, obj, container=container_name)
|
||||
res.set_metadata(self, metadata)
|
||||
return res
|
||||
|
||||
def delete_object_metadata(self, obj, container=None, keys=None):
|
||||
"""Delete metadata for an object.
|
||||
@@ -313,6 +336,6 @@ class Proxy(proxy.BaseProxy):
|
||||
:param keys: The keys of metadata to be deleted.
|
||||
"""
|
||||
container_name = self._get_container_name(obj, container)
|
||||
res = self._get_resource(_obj.Object, obj,
|
||||
path_args={"container": container_name})
|
||||
res = self._get_resource(_obj.Object, obj, container=container_name)
|
||||
res.delete_metadata(self, keys)
|
||||
return res
|
||||
|
@@ -12,7 +12,7 @@
|
||||
# under the License.
|
||||
|
||||
from openstack.object_store.v1 import _base
|
||||
from openstack import resource
|
||||
from openstack import resource2 as resource
|
||||
|
||||
|
||||
class Account(_base.BaseResource):
|
||||
@@ -20,23 +20,26 @@ class Account(_base.BaseResource):
|
||||
|
||||
base_path = "/"
|
||||
|
||||
allow_retrieve = True
|
||||
allow_get = True
|
||||
allow_update = True
|
||||
allow_head = True
|
||||
|
||||
#: The total number of bytes that are stored in Object Storage for
|
||||
#: the account.
|
||||
account_bytes_used = resource.header("x-account-bytes-used", type=int)
|
||||
account_bytes_used = resource.Header("x-account-bytes-used", type=int)
|
||||
#: The number of containers.
|
||||
account_container_count = resource.header("x-account-container-count",
|
||||
account_container_count = resource.Header("x-account-container-count",
|
||||
type=int)
|
||||
#: The number of objects in the account.
|
||||
account_object_count = resource.header("x-account-object-count", type=int)
|
||||
account_object_count = resource.Header("x-account-object-count", type=int)
|
||||
#: The secret key value for temporary URLs. If not set,
|
||||
#: this header is not returned by this operation.
|
||||
meta_temp_url_key = resource.header("x-account-meta-temp-url-key")
|
||||
meta_temp_url_key = resource.Header("x-account-meta-temp-url-key")
|
||||
#: A second secret key value for temporary URLs. If not set,
|
||||
#: this header is not returned by this operation.
|
||||
meta_temp_url_key_2 = resource.header("x-account-meta-temp-url-key-2")
|
||||
meta_temp_url_key_2 = resource.Header("x-account-meta-temp-url-key-2")
|
||||
#: The timestamp of the transaction.
|
||||
timestamp = resource.header("x-timestamp")
|
||||
timestamp = resource.Header("x-timestamp")
|
||||
|
||||
has_body = False
|
||||
requires_id = False
|
||||
|
@@ -12,7 +12,7 @@
|
||||
# under the License.
|
||||
|
||||
from openstack.object_store.v1 import _base
|
||||
from openstack import resource
|
||||
from openstack import resource2 as resource
|
||||
|
||||
|
||||
class Container(_base.BaseResource):
|
||||
@@ -28,10 +28,10 @@ class Container(_base.BaseResource):
|
||||
}
|
||||
|
||||
base_path = "/"
|
||||
id_attribute = "name"
|
||||
pagination_key = 'X-Account-Container-Count'
|
||||
|
||||
allow_create = True
|
||||
allow_retrieve = True
|
||||
allow_get = True
|
||||
allow_update = True
|
||||
allow_delete = True
|
||||
allow_list = True
|
||||
@@ -39,20 +39,22 @@ class Container(_base.BaseResource):
|
||||
|
||||
# Container body data (when id=None)
|
||||
#: The name of the container.
|
||||
name = resource.prop("name")
|
||||
name = resource.Body("name", alternate_id=True, alias='id')
|
||||
#: The number of objects in the container.
|
||||
count = resource.prop("count")
|
||||
count = resource.Body("count", type=int, alias='object_count')
|
||||
#: The total number of bytes that are stored in Object Storage
|
||||
#: for the container.
|
||||
bytes = resource.prop("bytes")
|
||||
bytes = resource.Body("bytes", type=int, alias='bytes_used')
|
||||
|
||||
# Container metadata (when id=name)
|
||||
#: The number of objects.
|
||||
object_count = resource.header("x-container-object-count", type=int)
|
||||
object_count = resource.Header(
|
||||
"x-container-object-count", type=int, alias='count')
|
||||
#: The count of bytes used in total.
|
||||
bytes_used = resource.header("x-container-bytes-used", type=int)
|
||||
bytes_used = resource.Header(
|
||||
"x-container-bytes-used", type=int, alias='bytes')
|
||||
#: The timestamp of the transaction.
|
||||
timestamp = resource.header("x-timestamp")
|
||||
timestamp = resource.Header("x-timestamp")
|
||||
|
||||
# Request headers (when id=None)
|
||||
#: If set to True, Object Storage queries all replicas to return the
|
||||
@@ -60,66 +62,66 @@ class Container(_base.BaseResource):
|
||||
#: faster after it finds one valid replica. Because setting this
|
||||
#: header to True is more expensive for the back end, use it only
|
||||
#: when it is absolutely needed. *Type: bool*
|
||||
is_newest = resource.header("x-newest", type=bool)
|
||||
is_newest = resource.Header("x-newest", type=bool)
|
||||
|
||||
# Request headers (when id=name)
|
||||
#: The ACL that grants read access. If not set, this header is not
|
||||
#: returned by this operation.
|
||||
read_ACL = resource.header("x-container-read")
|
||||
read_ACL = resource.Header("x-container-read")
|
||||
#: The ACL that grants write access. If not set, this header is not
|
||||
#: returned by this operation.
|
||||
write_ACL = resource.header("x-container-write")
|
||||
write_ACL = resource.Header("x-container-write")
|
||||
#: The destination for container synchronization. If not set,
|
||||
#: this header is not returned by this operation.
|
||||
sync_to = resource.header("x-container-sync-to")
|
||||
sync_to = resource.Header("x-container-sync-to")
|
||||
#: The secret key for container synchronization. If not set,
|
||||
#: this header is not returned by this operation.
|
||||
sync_key = resource.header("x-container-sync-key")
|
||||
sync_key = resource.Header("x-container-sync-key")
|
||||
#: Enables versioning on this container. The value is the name
|
||||
#: of another container. You must UTF-8-encode and then URL-encode
|
||||
#: the name before you include it in the header. To disable
|
||||
#: versioning, set the header to an empty string.
|
||||
versions_location = resource.header("x-versions-location")
|
||||
versions_location = resource.Header("x-versions-location")
|
||||
#: The MIME type of the list of names.
|
||||
content_type = resource.header("content-type")
|
||||
content_type = resource.Header("content-type")
|
||||
#: If set to true, Object Storage guesses the content type based
|
||||
#: on the file extension and ignores the value sent in the
|
||||
#: Content-Type header, if present. *Type: bool*
|
||||
is_content_type_detected = resource.header("x-detect-content-type",
|
||||
is_content_type_detected = resource.Header("x-detect-content-type",
|
||||
type=bool)
|
||||
# TODO(mordred) Shouldn't if-none-match be handled more systemically?
|
||||
#: In combination with Expect: 100-Continue, specify an
|
||||
#: "If-None-Match: \*" header to query whether the server already
|
||||
#: has a copy of the object before any data is sent.
|
||||
if_none_match = resource.header("if-none-match")
|
||||
if_none_match = resource.Header("if-none-match")
|
||||
|
||||
@classmethod
|
||||
def create_by_id(cls, session, attrs, resource_id=None):
|
||||
"""Create a Resource from its attributes.
|
||||
def new(cls, **kwargs):
|
||||
# Container uses name as id. Proxy._get_resource calls
|
||||
# Resource.new(id=name) but then we need to do container.name
|
||||
# It's the same thing for Container - make it be the same.
|
||||
name = kwargs.pop('id', None)
|
||||
if name:
|
||||
kwargs.setdefault('name', name)
|
||||
return Container(_synchronized=True, **kwargs)
|
||||
|
||||
def create(self, session, prepend_key=True):
|
||||
"""Create a remote resource based on this instance.
|
||||
|
||||
:param session: The session to use for making this request.
|
||||
:type session: :class:`~keystoneauth1.adapter.Adapter`
|
||||
:param dict attrs: The attributes to be sent in the body
|
||||
of the request.
|
||||
:param resource_id: This resource's identifier, if needed by
|
||||
the request. The default is ``None``.
|
||||
:param prepend_key: A boolean indicating whether the resource_key
|
||||
should be prepended in a resource creation
|
||||
request. Default to True.
|
||||
|
||||
:return: A ``dict`` representing the response headers.
|
||||
:return: This :class:`Resource` instance.
|
||||
:raises: :exc:`~openstack.exceptions.MethodNotSupported` if
|
||||
:data:`Resource.allow_create` is not set to ``True``.
|
||||
"""
|
||||
url = cls._get_url(None, resource_id)
|
||||
headers = attrs.get(resource.HEADERS, dict())
|
||||
headers['Accept'] = ''
|
||||
return session.put(url,
|
||||
headers=headers).headers
|
||||
request = self._prepare_request(
|
||||
requires_id=True, prepend_key=prepend_key)
|
||||
response = session.put(
|
||||
request.url, json=request.body, headers=request.headers)
|
||||
|
||||
def create(self, session):
|
||||
"""Create a Resource from this instance.
|
||||
|
||||
:param session: The session to use for making this request.
|
||||
:type session: :class:`~keystoneauth1.adapter.Adapter`
|
||||
|
||||
:return: This instance.
|
||||
"""
|
||||
resp = self.create_by_id(session, self._attrs, self.id)
|
||||
self.set_headers(resp)
|
||||
self._reset_dirty()
|
||||
self._translate_response(response, has_body=False)
|
||||
return self
|
||||
|
@@ -13,9 +13,10 @@
|
||||
|
||||
import copy
|
||||
|
||||
from openstack import exceptions
|
||||
from openstack.object_store import object_store_service
|
||||
from openstack.object_store.v1 import _base
|
||||
from openstack import resource
|
||||
from openstack import resource2 as resource
|
||||
|
||||
|
||||
class Object(_base.BaseResource):
|
||||
@@ -30,28 +31,36 @@ class Object(_base.BaseResource):
|
||||
}
|
||||
|
||||
base_path = "/%(container)s"
|
||||
pagination_key = 'X-Container-Object-Count'
|
||||
service = object_store_service.ObjectStoreService()
|
||||
id_attribute = "name"
|
||||
|
||||
allow_create = True
|
||||
allow_retrieve = True
|
||||
allow_get = True
|
||||
allow_update = True
|
||||
allow_delete = True
|
||||
allow_list = True
|
||||
allow_head = True
|
||||
|
||||
# Data to be passed during a POST call to create an object on the server.
|
||||
# TODO(mordred) Make a base class BaseDataResource that can be used here
|
||||
# and with glance images that has standard overrides for dealing with
|
||||
# binary data.
|
||||
data = None
|
||||
|
||||
# URL parameters
|
||||
#: The unique name for the container.
|
||||
container = resource.prop("container")
|
||||
container = resource.URI("container")
|
||||
#: The unique name for the object.
|
||||
name = resource.prop("name")
|
||||
name = resource.Body("name", alternate_id=True)
|
||||
|
||||
# Object details
|
||||
hash = resource.prop("hash")
|
||||
bytes = resource.prop("bytes")
|
||||
# Make these private because they should only matter in the case where
|
||||
# we have a Body with no headers (like if someone programmatically is
|
||||
# creating an Object)
|
||||
_hash = resource.Body("hash")
|
||||
_bytes = resource.Body("bytes", type=int)
|
||||
_last_modified = resource.Body("last_modified")
|
||||
_content_type = resource.Body("content_type")
|
||||
|
||||
# Headers for HEAD and GET requests
|
||||
#: If set to True, Object Storage queries all replicas to return
|
||||
@@ -59,46 +68,49 @@ class Object(_base.BaseResource):
|
||||
#: responds faster after it finds one valid replica. Because
|
||||
#: setting this header to True is more expensive for the back end,
|
||||
#: use it only when it is absolutely needed. *Type: bool*
|
||||
is_newest = resource.header("x-newest", type=bool)
|
||||
is_newest = resource.Header("x-newest", type=bool)
|
||||
#: TODO(briancurtin) there's a lot of content here...
|
||||
range = resource.header("range", type=dict)
|
||||
range = resource.Header("range", type=dict)
|
||||
#: See http://www.ietf.org/rfc/rfc2616.txt.
|
||||
if_match = resource.header("if-match", type=dict)
|
||||
# TODO(mordred) We need a string-or-list formatter. type=list with a string
|
||||
# value results in a list containing the characters.
|
||||
if_match = resource.Header("if-match", type=list)
|
||||
#: In combination with Expect: 100-Continue, specify an
|
||||
#: "If-None-Match: \*" header to query whether the server already
|
||||
#: has a copy of the object before any data is sent.
|
||||
if_none_match = resource.header("if-none-match", type=dict)
|
||||
if_none_match = resource.Header("if-none-match", type=list)
|
||||
#: See http://www.ietf.org/rfc/rfc2616.txt.
|
||||
if_modified_since = resource.header("if-modified-since", type=dict)
|
||||
if_modified_since = resource.Header("if-modified-since", type=str)
|
||||
#: See http://www.ietf.org/rfc/rfc2616.txt.
|
||||
if_unmodified_since = resource.header("if-unmodified-since", type=dict)
|
||||
if_unmodified_since = resource.Header("if-unmodified-since", type=str)
|
||||
|
||||
# Query parameters
|
||||
#: Used with temporary URLs to sign the request. For more
|
||||
#: information about temporary URLs, see OpenStack Object Storage
|
||||
#: API v1 Reference.
|
||||
signature = resource.header("signature")
|
||||
signature = resource.Header("signature")
|
||||
#: Used with temporary URLs to specify the expiry time of the
|
||||
#: signature. For more information about temporary URLs, see
|
||||
#: OpenStack Object Storage API v1 Reference.
|
||||
expires_at = resource.header("expires")
|
||||
expires_at = resource.Header("expires")
|
||||
#: If you include the multipart-manifest=get query parameter and
|
||||
#: the object is a large object, the object contents are not
|
||||
#: returned. Instead, the manifest is returned in the
|
||||
#: X-Object-Manifest response header for dynamic large objects
|
||||
#: or in the response body for static large objects.
|
||||
multipart_manifest = resource.header("multipart-manifest")
|
||||
multipart_manifest = resource.Header("multipart-manifest")
|
||||
|
||||
# Response headers from HEAD and GET
|
||||
#: HEAD operations do not return content. However, in this
|
||||
#: operation the value in the Content-Length header is not the
|
||||
#: size of the response body. Instead it contains the size of
|
||||
#: the object, in bytes.
|
||||
content_length = resource.header("content-length")
|
||||
content_length = resource.Header(
|
||||
"content-length", type=int, alias='_bytes')
|
||||
#: The MIME type of the object.
|
||||
content_type = resource.header("content-type")
|
||||
content_type = resource.Header("content-type", alias="_content_type")
|
||||
#: The type of ranges that the object accepts.
|
||||
accept_ranges = resource.header("accept-ranges")
|
||||
accept_ranges = resource.Header("accept-ranges")
|
||||
#: For objects smaller than 5 GB, this value is the MD5 checksum
|
||||
#: of the object content. The value is not quoted.
|
||||
#: For manifest objects, this value is the MD5 checksum of the
|
||||
@@ -110,46 +122,46 @@ class Object(_base.BaseResource):
|
||||
#: the response body as it is received and compare this value
|
||||
#: with the one in the ETag header. If they differ, the content
|
||||
#: was corrupted, so retry the operation.
|
||||
etag = resource.header("etag")
|
||||
etag = resource.Header("etag", alias='_hash')
|
||||
#: Set to True if this object is a static large object manifest object.
|
||||
#: *Type: bool*
|
||||
is_static_large_object = resource.header("x-static-large-object",
|
||||
is_static_large_object = resource.Header("x-static-large-object",
|
||||
type=bool)
|
||||
#: If set, the value of the Content-Encoding metadata.
|
||||
#: If not set, this header is not returned by this operation.
|
||||
content_encoding = resource.header("content-encoding")
|
||||
content_encoding = resource.Header("content-encoding")
|
||||
#: If set, specifies the override behavior for the browser.
|
||||
#: For example, this header might specify that the browser use
|
||||
#: a download program to save this file rather than show the file,
|
||||
#: which is the default.
|
||||
#: If not set, this header is not returned by this operation.
|
||||
content_disposition = resource.header("content-disposition")
|
||||
content_disposition = resource.Header("content-disposition")
|
||||
#: Specifies the number of seconds after which the object is
|
||||
#: removed. Internally, the Object Storage system stores this
|
||||
#: value in the X-Delete-At metadata item.
|
||||
delete_after = resource.header("x-delete-after", type=int)
|
||||
delete_after = resource.Header("x-delete-after", type=int)
|
||||
#: If set, the time when the object will be deleted by the system
|
||||
#: in the format of a UNIX Epoch timestamp.
|
||||
#: If not set, this header is not returned by this operation.
|
||||
delete_at = resource.header("x-delete-at")
|
||||
delete_at = resource.Header("x-delete-at")
|
||||
#: If set, to this is a dynamic large object manifest object.
|
||||
#: The value is the container and object name prefix of the
|
||||
#: segment objects in the form container/prefix.
|
||||
object_manifest = resource.header("x-object-manifest")
|
||||
object_manifest = resource.Header("x-object-manifest")
|
||||
#: The timestamp of the transaction.
|
||||
timestamp = resource.header("x-timestamp")
|
||||
timestamp = resource.Header("x-timestamp")
|
||||
#: The date and time that the object was created or the last
|
||||
#: time that the metadata was changed.
|
||||
last_modified_at = resource.header("last_modified", alias="last-modified")
|
||||
last_modified_at = resource.Header("last-modified", alias='_last_modified')
|
||||
|
||||
# Headers for PUT and POST requests
|
||||
#: Set to chunked to enable chunked transfer encoding. If used,
|
||||
#: do not set the Content-Length header to a non-zero value.
|
||||
transfer_encoding = resource.header("transfer-encoding")
|
||||
transfer_encoding = resource.Header("transfer-encoding")
|
||||
#: If set to true, Object Storage guesses the content type based
|
||||
#: on the file extension and ignores the value sent in the
|
||||
#: Content-Type header, if present. *Type: bool*
|
||||
is_content_type_detected = resource.header("x-detect-content-type",
|
||||
is_content_type_detected = resource.Header("x-detect-content-type",
|
||||
type=bool)
|
||||
#: If set, this is the name of an object used to create the new
|
||||
#: object by copying the X-Copy-From object. The value is in form
|
||||
@@ -158,7 +170,13 @@ class Object(_base.BaseResource):
|
||||
#: in the header.
|
||||
#: Using PUT with X-Copy-From has the same effect as using the
|
||||
#: COPY operation to copy an object.
|
||||
copy_from = resource.header("x-copy-from")
|
||||
copy_from = resource.Header("x-copy-from")
|
||||
|
||||
has_body = False
|
||||
|
||||
def __init__(self, data=None, **attrs):
|
||||
super(_base.BaseResource, self).__init__(**attrs)
|
||||
self.data = data
|
||||
|
||||
# The Object Store treats the metadata for its resources inconsistently so
|
||||
# Object.set_metadata must override the BaseResource.set_metadata to
|
||||
@@ -169,66 +187,111 @@ class Object(_base.BaseResource):
|
||||
filtered_metadata = \
|
||||
{key: value for key, value in metadata.items() if value}
|
||||
|
||||
# Update from remote if we only have locally created information
|
||||
if not self.last_modified_at:
|
||||
self.head(session)
|
||||
|
||||
# Get a copy of the original metadata so it doesn't get erased on POST
|
||||
# and update it with the new metadata values.
|
||||
obj = self.head(session)
|
||||
metadata2 = copy.deepcopy(obj.metadata)
|
||||
metadata2.update(filtered_metadata)
|
||||
metadata = copy.deepcopy(self.metadata)
|
||||
metadata.update(filtered_metadata)
|
||||
|
||||
# Include any original system metadata so it doesn't get erased on POST
|
||||
for key in self._system_metadata:
|
||||
value = getattr(obj, key)
|
||||
if value and key not in metadata2:
|
||||
metadata2[key] = value
|
||||
value = getattr(self, key)
|
||||
if value and key not in metadata:
|
||||
metadata[key] = value
|
||||
|
||||
super(Object, self).set_metadata(session, metadata2)
|
||||
request = self._prepare_request()
|
||||
headers = self._calculate_headers(metadata)
|
||||
response = session.post(request.url, headers=headers)
|
||||
self._translate_response(response, has_body=False)
|
||||
self.metadata.update(metadata)
|
||||
|
||||
return self
|
||||
|
||||
# The Object Store treats the metadata for its resources inconsistently so
|
||||
# Object.delete_metadata must override the BaseResource.delete_metadata to
|
||||
# account for it.
|
||||
def delete_metadata(self, session, keys):
|
||||
# Get a copy of the original metadata so it doesn't get erased on POST
|
||||
# and update it with the new metadata values.
|
||||
obj = self.head(session)
|
||||
metadata = copy.deepcopy(obj.metadata)
|
||||
if not keys:
|
||||
return
|
||||
# If we have an empty object, update it from the remote side so that
|
||||
# we have a copy of the original metadata. Deleting metadata requires
|
||||
# POSTing and overwriting all of the metadata. If we already have
|
||||
# metadata locally, assume this is an existing object.
|
||||
if not self.metadata:
|
||||
self.head(session)
|
||||
|
||||
metadata = copy.deepcopy(self.metadata)
|
||||
|
||||
# Include any original system metadata so it doesn't get erased on POST
|
||||
for key in self._system_metadata:
|
||||
value = getattr(obj, key)
|
||||
value = getattr(self, key)
|
||||
if value:
|
||||
metadata[key] = value
|
||||
|
||||
# Remove the metadata
|
||||
# Remove the requested metadata keys
|
||||
# TODO(mordred) Why don't we just look at self._header_mapping()
|
||||
# instead of having system_metadata?
|
||||
deleted = False
|
||||
attr_keys_to_delete = set()
|
||||
for key in keys:
|
||||
if key == 'delete_after':
|
||||
del(metadata['delete_at'])
|
||||
else:
|
||||
del(metadata[key])
|
||||
if key in metadata:
|
||||
del(metadata[key])
|
||||
# Delete the attribute from the local copy of the object.
|
||||
# Metadata that doesn't have Component attributes is
|
||||
# handled by self.metadata being reset when we run
|
||||
# self.head
|
||||
if hasattr(self, key):
|
||||
attr_keys_to_delete.add(key)
|
||||
deleted = True
|
||||
|
||||
url = self._get_url(self, self.id)
|
||||
session.post(url,
|
||||
headers=self._calculate_headers(metadata))
|
||||
# Nothing to delete, skip the POST
|
||||
if not deleted:
|
||||
return self
|
||||
|
||||
def get(self, session, include_headers=False, args=None,
|
||||
error_message=None):
|
||||
url = self._get_url(self, self.id)
|
||||
headers = {'Accept': 'bytes'}
|
||||
resp = session.get(url, headers=headers, error_message=error_message)
|
||||
resp = resp.content
|
||||
self._set_metadata()
|
||||
return resp
|
||||
request = self._prepare_request()
|
||||
response = session.post(
|
||||
request.url, headers=self._calculate_headers(metadata))
|
||||
exceptions.raise_from_response(
|
||||
response, error_message="Error deleting metadata keys")
|
||||
|
||||
# Only delete from local object if the remote delete was successful
|
||||
for key in attr_keys_to_delete:
|
||||
delattr(self, key)
|
||||
|
||||
# Just update ourselves from remote again.
|
||||
return self.head(session)
|
||||
|
||||
def _download(self, session, error_message=None, stream=False):
|
||||
request = self._prepare_request()
|
||||
request.headers['Accept'] = 'bytes'
|
||||
|
||||
response = session.get(
|
||||
request.url, headers=request.headers, stream=stream)
|
||||
exceptions.raise_from_response(response, error_message=error_message)
|
||||
return response
|
||||
|
||||
def download(self, session, error_message=None):
|
||||
response = self._download(session, error_message=error_message)
|
||||
return response.content
|
||||
|
||||
def stream(self, session, error_message=None, chunk_size=1024):
|
||||
response = self._download(
|
||||
session, error_message=error_message, stream=True)
|
||||
return response.iter_content(chunk_size, decode_unicode=False)
|
||||
|
||||
def create(self, session):
|
||||
url = self._get_url(self, self.id)
|
||||
request = self._prepare_request()
|
||||
request.headers['Accept'] = ''
|
||||
|
||||
headers = self.get_headers()
|
||||
headers['Accept'] = ''
|
||||
if self.data is not None:
|
||||
resp = session.put(url,
|
||||
data=self.data,
|
||||
headers=headers).headers
|
||||
else:
|
||||
resp = session.post(url, data=None,
|
||||
headers=headers).headers
|
||||
self.set_headers(resp)
|
||||
response = session.put(
|
||||
request.url,
|
||||
data=self.data,
|
||||
headers=request.headers)
|
||||
self._translate_response(response, has_body=False)
|
||||
return self
|
||||
|
@@ -34,6 +34,8 @@ and then returned to the caller.
|
||||
import collections
|
||||
import itertools
|
||||
|
||||
from requests import structures
|
||||
|
||||
from openstack import exceptions
|
||||
from openstack import format
|
||||
from openstack import utils
|
||||
@@ -44,7 +46,8 @@ class _BaseComponent(object):
|
||||
# The name this component is being tracked as in the Resource
|
||||
key = None
|
||||
|
||||
def __init__(self, name, type=None, default=None, alternate_id=False):
|
||||
def __init__(self, name, type=None, default=None, alias=None,
|
||||
alternate_id=False, **kwargs):
|
||||
"""A typed descriptor for a component that makes up a Resource
|
||||
|
||||
:param name: The name this component exists as on the server
|
||||
@@ -53,6 +56,7 @@ class _BaseComponent(object):
|
||||
will work. If you specify type=dict and then set a
|
||||
component to a string, __set__ will fail, for example.
|
||||
:param default: Typically None, but any other default can be set.
|
||||
:param alias: If set, alternative attribute on object to return.
|
||||
:param alternate_id: When `True`, this property is known
|
||||
internally as a value that can be sent
|
||||
with requests that require an ID but
|
||||
@@ -63,6 +67,7 @@ class _BaseComponent(object):
|
||||
self.name = name
|
||||
self.type = type
|
||||
self.default = default
|
||||
self.alias = alias
|
||||
self.alternate_id = alternate_id
|
||||
|
||||
def __get__(self, instance, owner):
|
||||
@@ -74,6 +79,8 @@ class _BaseComponent(object):
|
||||
try:
|
||||
value = attributes[self.name]
|
||||
except KeyError:
|
||||
if self.alias:
|
||||
return getattr(instance, self.alias)
|
||||
return self.default
|
||||
|
||||
# self.type() should not be called on None objects.
|
||||
@@ -253,6 +260,11 @@ class Resource(object):
|
||||
#: Method for creating a resource (POST, PUT)
|
||||
create_method = "POST"
|
||||
|
||||
#: Do calls for this resource require an id
|
||||
requires_id = True
|
||||
#: Do responses for this resource have bodies
|
||||
has_body = True
|
||||
|
||||
def __init__(self, _synchronized=False, **attrs):
|
||||
"""The base resource
|
||||
|
||||
@@ -331,12 +343,13 @@ class Resource(object):
|
||||
attributes that exist on this class.
|
||||
"""
|
||||
body = self._consume_attrs(self._body_mapping(), attrs)
|
||||
header = self._consume_attrs(self._header_mapping(), attrs)
|
||||
header = self._consume_attrs(
|
||||
self._header_mapping(), attrs, insensitive=True)
|
||||
uri = self._consume_attrs(self._uri_mapping(), attrs)
|
||||
|
||||
return body, header, uri
|
||||
|
||||
def _consume_attrs(self, mapping, attrs):
|
||||
def _consume_attrs(self, mapping, attrs, insensitive=False):
|
||||
"""Given a mapping and attributes, return relevant matches
|
||||
|
||||
This method finds keys in attrs that exist in the mapping, then
|
||||
@@ -347,16 +360,29 @@ class Resource(object):
|
||||
same source dict several times.
|
||||
"""
|
||||
relevant_attrs = {}
|
||||
if insensitive:
|
||||
relevant_attrs = structures.CaseInsensitiveDict()
|
||||
consumed_keys = []
|
||||
nonce = object()
|
||||
# TODO(mordred) Invert the loop - loop over mapping, look in attrs
|
||||
# and we should be able to simplify the logic, since CID should
|
||||
# handle the case matching
|
||||
for key in attrs:
|
||||
if key in mapping:
|
||||
value = mapping.get(key, nonce)
|
||||
if value is not nonce:
|
||||
# Convert client-side key names into server-side.
|
||||
relevant_attrs[mapping[key]] = attrs[key]
|
||||
consumed_keys.append(key)
|
||||
elif key in mapping.values():
|
||||
else:
|
||||
# Server-side names can be stored directly.
|
||||
relevant_attrs[key] = attrs[key]
|
||||
consumed_keys.append(key)
|
||||
search_key = key
|
||||
values = mapping.values()
|
||||
if insensitive:
|
||||
search_key = search_key.lower()
|
||||
values = [v.lower() for v in values]
|
||||
if search_key in values:
|
||||
relevant_attrs[key] = attrs[key]
|
||||
consumed_keys.append(key)
|
||||
|
||||
for key in consumed_keys:
|
||||
attrs.pop(key)
|
||||
@@ -366,6 +392,10 @@ class Resource(object):
|
||||
@classmethod
|
||||
def _get_mapping(cls, component):
|
||||
"""Return a dict of attributes of a given component on the class"""
|
||||
# TODO(mordred) Invert this mapping, it should be server-side to local.
|
||||
# The reason for that is that headers are case insensitive, whereas
|
||||
# our local values are case sensitive. If we invert this dict, we can
|
||||
# rely on CaseInsensitiveDict when doing comparisons.
|
||||
mapping = {}
|
||||
# Since we're looking at class definitions we need to include
|
||||
# subclasses, so check the whole MRO.
|
||||
@@ -386,7 +416,8 @@ class Resource(object):
|
||||
@classmethod
|
||||
def _header_mapping(cls):
|
||||
"""Return all Header members of this class"""
|
||||
return cls._get_mapping(Header)
|
||||
# TODO(mordred) this isn't helpful until we invert the dict
|
||||
return structures.CaseInsensitiveDict(cls._get_mapping(Header))
|
||||
|
||||
@classmethod
|
||||
def _uri_mapping(cls):
|
||||
@@ -501,7 +532,7 @@ class Resource(object):
|
||||
|
||||
return mapping
|
||||
|
||||
def _prepare_request(self, requires_id=True, prepend_key=False):
|
||||
def _prepare_request(self, requires_id=None, prepend_key=False):
|
||||
"""Prepare a request to be sent to the server
|
||||
|
||||
Create operations don't require an ID, but all others do,
|
||||
@@ -515,11 +546,20 @@ class Resource(object):
|
||||
as well a body and headers that are ready to send.
|
||||
Only dirty body and header contents will be returned.
|
||||
"""
|
||||
if requires_id is None:
|
||||
requires_id = self.requires_id
|
||||
|
||||
body = self._body.dirty
|
||||
if prepend_key and self.resource_key is not None:
|
||||
body = {self.resource_key: body}
|
||||
|
||||
headers = self._header.dirty
|
||||
# TODO(mordred) Ensure headers have string values better than this
|
||||
headers = {}
|
||||
for k, v in self._header.dirty.items():
|
||||
if isinstance(v, list):
|
||||
headers[k] = ", ".join(v)
|
||||
else:
|
||||
headers[k] = str(v)
|
||||
|
||||
uri = self.base_path % self._uri.attributes
|
||||
if requires_id:
|
||||
@@ -539,7 +579,7 @@ class Resource(object):
|
||||
"""
|
||||
return {k: v for k, v in component.items() if k in mapping.values()}
|
||||
|
||||
def _translate_response(self, response, has_body=True, error_message=None):
|
||||
def _translate_response(self, response, has_body=None, error_message=None):
|
||||
"""Given a KSA response, inflate this instance with its data
|
||||
|
||||
DELETE operations don't return a body, so only try to work
|
||||
@@ -548,6 +588,8 @@ class Resource(object):
|
||||
This method updates attributes that correspond to headers
|
||||
and body on this instance and clears the dirty set.
|
||||
"""
|
||||
if has_body is None:
|
||||
has_body = self.has_body
|
||||
exceptions.raise_from_response(response, error_message=error_message)
|
||||
if has_body:
|
||||
body = response.json()
|
||||
@@ -560,6 +602,8 @@ class Resource(object):
|
||||
|
||||
headers = self._filter_component(response.headers,
|
||||
self._header_mapping())
|
||||
headers = self._consume_attrs(
|
||||
self._header_mapping(), response.headers.copy(), insensitive=True)
|
||||
self._header.attributes.update(headers)
|
||||
self._header.clean()
|
||||
|
||||
@@ -637,7 +681,7 @@ class Resource(object):
|
||||
response = session.head(request.url,
|
||||
headers={"Accept": ""})
|
||||
|
||||
self._translate_response(response)
|
||||
self._translate_response(response, has_body=False)
|
||||
return self
|
||||
|
||||
def update(self, session, prepend_key=True, has_body=True):
|
||||
|
@@ -36,11 +36,11 @@ class TestObject(base.BaseFunctionalTest):
|
||||
in self.conn.object_store.objects(container=self.FOLDER)]
|
||||
self.assertIn(self.FILE, names)
|
||||
|
||||
def test_get_object(self):
|
||||
result = self.conn.object_store.get_object(
|
||||
def test_download_object(self):
|
||||
result = self.conn.object_store.download_object(
|
||||
self.FILE, container=self.FOLDER)
|
||||
self.assertEqual(self.DATA, result)
|
||||
result = self.conn.object_store.get_object(self.sot)
|
||||
result = self.conn.object_store.download_object(self.sot)
|
||||
self.assertEqual(self.DATA, result)
|
||||
|
||||
def test_system_metadata(self):
|
||||
|
@@ -611,6 +611,13 @@ class RequestsMockTestCase(BaseTestCase):
|
||||
mock_method, mock_uri, params['response_list'],
|
||||
**params['kw_params'])
|
||||
|
||||
def assert_no_calls(self):
|
||||
# TODO(mordred) For now, creating the adapter for self.conn is
|
||||
# triggering catalog lookups. Make sure no_calls is only 2.
|
||||
# When we can make that on-demand through a descriptor object,
|
||||
# drop this to 0.
|
||||
self.assertEqual(2, len(self.adapter.request_history))
|
||||
|
||||
def assert_calls(self, stop_after=None, do_count=True):
|
||||
for (x, (call, history)) in enumerate(
|
||||
zip(self.calls, self.adapter.request_history)):
|
||||
|
@@ -32,20 +32,20 @@ ACCOUNT_EXAMPLE = {
|
||||
class TestAccount(testtools.TestCase):
|
||||
|
||||
def test_basic(self):
|
||||
sot = account.Account.new(**ACCOUNT_EXAMPLE)
|
||||
sot = account.Account(**ACCOUNT_EXAMPLE)
|
||||
self.assertIsNone(sot.resources_key)
|
||||
self.assertIsNone(sot.id)
|
||||
self.assertEqual('/', sot.base_path)
|
||||
self.assertEqual('object-store', sot.service.service_type)
|
||||
self.assertTrue(sot.allow_update)
|
||||
self.assertTrue(sot.allow_head)
|
||||
self.assertTrue(sot.allow_retrieve)
|
||||
self.assertTrue(sot.allow_get)
|
||||
self.assertFalse(sot.allow_delete)
|
||||
self.assertFalse(sot.allow_list)
|
||||
self.assertFalse(sot.allow_create)
|
||||
|
||||
def test_make_it(self):
|
||||
sot = account.Account.new(**{'headers': ACCOUNT_EXAMPLE})
|
||||
sot = account.Account(**ACCOUNT_EXAMPLE)
|
||||
self.assertIsNone(sot.id)
|
||||
self.assertEqual(int(ACCOUNT_EXAMPLE['x-account-bytes-used']),
|
||||
sot.account_bytes_used)
|
||||
|
@@ -10,125 +10,123 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import mock
|
||||
import testtools
|
||||
|
||||
from openstack.object_store.v1 import container
|
||||
from openstack.tests.unit import base
|
||||
|
||||
|
||||
CONTAINER_NAME = "mycontainer"
|
||||
|
||||
CONT_EXAMPLE = {
|
||||
"count": 999,
|
||||
"bytes": 12345,
|
||||
"name": CONTAINER_NAME
|
||||
}
|
||||
|
||||
HEAD_EXAMPLE = {
|
||||
'content-length': '346',
|
||||
'x-container-object-count': '2',
|
||||
'accept-ranges': 'bytes',
|
||||
'id': 'tx1878fdc50f9b4978a3fdc-0053c31462',
|
||||
'date': 'Sun, 13 Jul 2014 23:21:06 GMT',
|
||||
'x-container-read': 'read-settings',
|
||||
'x-container-write': 'write-settings',
|
||||
'x-container-sync-to': 'sync-to',
|
||||
'x-container-sync-key': 'sync-key',
|
||||
'x-container-bytes-used': '630666',
|
||||
'x-versions-location': 'versions-location',
|
||||
'content-type': 'application/json; charset=utf-8',
|
||||
'x-timestamp': '1453414055.48672'
|
||||
}
|
||||
|
||||
LIST_EXAMPLE = [
|
||||
{
|
||||
"count": 999,
|
||||
"bytes": 12345,
|
||||
"name": "container1"
|
||||
},
|
||||
{
|
||||
"count": 888,
|
||||
"bytes": 54321,
|
||||
"name": "container2"
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
class TestContainer(testtools.TestCase):
|
||||
class TestContainer(base.RequestsMockTestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(TestContainer, self).setUp()
|
||||
self.resp = mock.Mock()
|
||||
self.resp.body = {}
|
||||
self.resp.json = mock.Mock(return_value=self.resp.body)
|
||||
self.resp.headers = {"X-Trans-Id": "abcdef"}
|
||||
self.sess = mock.Mock()
|
||||
self.sess.put = mock.Mock(return_value=self.resp)
|
||||
self.sess.post = mock.Mock(return_value=self.resp)
|
||||
self.container = self.getUniqueString()
|
||||
self.endpoint = self.conn.object_store.get_endpoint() + '/'
|
||||
self.container_endpoint = '{endpoint}{container}'.format(
|
||||
endpoint=self.endpoint, container=self.container)
|
||||
|
||||
self.body = {
|
||||
"count": 2,
|
||||
"bytes": 630666,
|
||||
"name": self.container,
|
||||
}
|
||||
|
||||
self.headers = {
|
||||
'x-container-object-count': '2',
|
||||
'x-container-read': 'read-settings',
|
||||
'x-container-write': 'write-settings',
|
||||
'x-container-sync-to': 'sync-to',
|
||||
'x-container-sync-key': 'sync-key',
|
||||
'x-container-bytes-used': '630666',
|
||||
'x-versions-location': 'versions-location',
|
||||
'content-type': 'application/json; charset=utf-8',
|
||||
'x-timestamp': '1453414055.48672'
|
||||
}
|
||||
self.body_plus_headers = dict(self.body, **self.headers)
|
||||
|
||||
def test_basic(self):
|
||||
sot = container.Container.new(**CONT_EXAMPLE)
|
||||
sot = container.Container.new(**self.body)
|
||||
self.assertIsNone(sot.resources_key)
|
||||
self.assertEqual('name', sot.id_attribute)
|
||||
self.assertEqual('name', sot._alternate_id())
|
||||
self.assertEqual('/', sot.base_path)
|
||||
self.assertEqual('object-store', sot.service.service_type)
|
||||
self.assertTrue(sot.allow_update)
|
||||
self.assertTrue(sot.allow_create)
|
||||
self.assertTrue(sot.allow_retrieve)
|
||||
self.assertTrue(sot.allow_get)
|
||||
self.assertTrue(sot.allow_delete)
|
||||
self.assertTrue(sot.allow_list)
|
||||
self.assertTrue(sot.allow_head)
|
||||
self.assert_no_calls()
|
||||
|
||||
def test_make_it(self):
|
||||
sot = container.Container.new(**CONT_EXAMPLE)
|
||||
self.assertEqual(CONT_EXAMPLE['name'], sot.id)
|
||||
self.assertEqual(CONT_EXAMPLE['name'], sot.name)
|
||||
self.assertEqual(CONT_EXAMPLE['count'], sot.count)
|
||||
self.assertEqual(CONT_EXAMPLE['bytes'], sot.bytes)
|
||||
sot = container.Container.new(**self.body)
|
||||
self.assertEqual(self.body['name'], sot.id)
|
||||
self.assertEqual(self.body['name'], sot.name)
|
||||
self.assertEqual(self.body['count'], sot.count)
|
||||
self.assertEqual(self.body['count'], sot.object_count)
|
||||
self.assertEqual(self.body['bytes'], sot.bytes)
|
||||
self.assertEqual(self.body['bytes'], sot.bytes_used)
|
||||
self.assert_no_calls()
|
||||
|
||||
def test_create_and_head(self):
|
||||
sot = container.Container(CONT_EXAMPLE)
|
||||
|
||||
# Update container with HEAD data
|
||||
sot._attrs.update({'headers': HEAD_EXAMPLE})
|
||||
sot = container.Container(**self.body_plus_headers)
|
||||
|
||||
# Attributes from create
|
||||
self.assertEqual(CONT_EXAMPLE['name'], sot.id)
|
||||
self.assertEqual(CONT_EXAMPLE['name'], sot.name)
|
||||
self.assertEqual(CONT_EXAMPLE['count'], sot.count)
|
||||
self.assertEqual(CONT_EXAMPLE['bytes'], sot.bytes)
|
||||
self.assertEqual(self.body_plus_headers['name'], sot.id)
|
||||
self.assertEqual(self.body_plus_headers['name'], sot.name)
|
||||
self.assertEqual(self.body_plus_headers['count'], sot.count)
|
||||
self.assertEqual(self.body_plus_headers['bytes'], sot.bytes)
|
||||
|
||||
# Attributes from header
|
||||
self.assertEqual(int(HEAD_EXAMPLE['x-container-object-count']),
|
||||
sot.object_count)
|
||||
self.assertEqual(int(HEAD_EXAMPLE['x-container-bytes-used']),
|
||||
sot.bytes_used)
|
||||
self.assertEqual(HEAD_EXAMPLE['x-container-read'],
|
||||
sot.read_ACL)
|
||||
self.assertEqual(HEAD_EXAMPLE['x-container-write'],
|
||||
sot.write_ACL)
|
||||
self.assertEqual(HEAD_EXAMPLE['x-container-sync-to'],
|
||||
sot.sync_to)
|
||||
self.assertEqual(HEAD_EXAMPLE['x-container-sync-key'],
|
||||
sot.sync_key)
|
||||
self.assertEqual(HEAD_EXAMPLE['x-versions-location'],
|
||||
sot.versions_location)
|
||||
self.assertEqual(HEAD_EXAMPLE['x-timestamp'], sot.timestamp)
|
||||
self.assertEqual(
|
||||
int(self.body_plus_headers['x-container-object-count']),
|
||||
sot.object_count)
|
||||
self.assertEqual(
|
||||
int(self.body_plus_headers['x-container-bytes-used']),
|
||||
sot.bytes_used)
|
||||
self.assertEqual(
|
||||
self.body_plus_headers['x-container-read'],
|
||||
sot.read_ACL)
|
||||
self.assertEqual(
|
||||
self.body_plus_headers['x-container-write'],
|
||||
sot.write_ACL)
|
||||
self.assertEqual(
|
||||
self.body_plus_headers['x-container-sync-to'],
|
||||
sot.sync_to)
|
||||
self.assertEqual(
|
||||
self.body_plus_headers['x-container-sync-key'],
|
||||
sot.sync_key)
|
||||
self.assertEqual(
|
||||
self.body_plus_headers['x-versions-location'],
|
||||
sot.versions_location)
|
||||
self.assertEqual(self.body_plus_headers['x-timestamp'], sot.timestamp)
|
||||
|
||||
@mock.patch("openstack.resource.Resource.list")
|
||||
def test_list(self, fake_list):
|
||||
fake_val = [container.Container.existing(**ex) for ex in LIST_EXAMPLE]
|
||||
fake_list.return_value = fake_val
|
||||
def test_list(self):
|
||||
containers = [
|
||||
{
|
||||
"count": 999,
|
||||
"bytes": 12345,
|
||||
"name": "container1"
|
||||
},
|
||||
{
|
||||
"count": 888,
|
||||
"bytes": 54321,
|
||||
"name": "container2"
|
||||
}
|
||||
]
|
||||
self.register_uris([
|
||||
dict(method='GET', uri=self.endpoint,
|
||||
json=containers)
|
||||
])
|
||||
|
||||
# Since the list method is mocked out, just pass None for the session.
|
||||
response = container.Container.list(None)
|
||||
response = container.Container.list(self.conn.object_store)
|
||||
|
||||
self.assertEqual(len(LIST_EXAMPLE), len(response))
|
||||
for item in range(len(response)):
|
||||
self.assertEqual(container.Container, type(response[item]))
|
||||
self.assertEqual(LIST_EXAMPLE[item]["name"], response[item].name)
|
||||
self.assertEqual(LIST_EXAMPLE[item]["count"], response[item].count)
|
||||
self.assertEqual(LIST_EXAMPLE[item]["bytes"], response[item].bytes)
|
||||
self.assertEqual(len(containers), len(list(response)))
|
||||
for index, item in enumerate(response):
|
||||
self.assertEqual(container.Container, type(item))
|
||||
self.assertEqual(containers[index]["name"], item.name)
|
||||
self.assertEqual(containers[index]["count"], item.count)
|
||||
self.assertEqual(containers[index]["bytes"], item.bytes)
|
||||
|
||||
self.assert_calls()
|
||||
|
||||
def _test_create_update(self, sot, sot_call, sess_method):
|
||||
sot.read_ACL = "some ACL"
|
||||
@@ -137,35 +135,43 @@ class TestContainer(testtools.TestCase):
|
||||
headers = {
|
||||
"x-container-read": "some ACL",
|
||||
"x-container-write": "another ACL",
|
||||
"x-detect-content-type": True,
|
||||
"Accept": "",
|
||||
"x-detect-content-type": 'True',
|
||||
}
|
||||
sot_call(self.sess)
|
||||
self.register_uris([
|
||||
dict(method=sess_method, uri=self.container_endpoint,
|
||||
json=self.body,
|
||||
validate=dict(headers=headers)),
|
||||
])
|
||||
sot_call(self.conn.object_store)
|
||||
|
||||
url = "/%s" % CONTAINER_NAME
|
||||
sess_method.assert_called_with(url,
|
||||
headers=headers)
|
||||
self.assert_calls()
|
||||
|
||||
def test_create(self):
|
||||
sot = container.Container.new(name=CONTAINER_NAME)
|
||||
self._test_create_update(sot, sot.create, self.sess.put)
|
||||
sot = container.Container.new(name=self.container)
|
||||
self._test_create_update(sot, sot.create, 'PUT')
|
||||
|
||||
def test_update(self):
|
||||
sot = container.Container.new(name=CONTAINER_NAME)
|
||||
self._test_create_update(sot, sot.update, self.sess.post)
|
||||
sot = container.Container.new(name=self.container)
|
||||
self._test_create_update(sot, sot.update, 'POST')
|
||||
|
||||
def _test_no_headers(self, sot, sot_call, sess_method):
|
||||
sot = container.Container.new(name=CONTAINER_NAME)
|
||||
sot.create(self.sess)
|
||||
url = "/%s" % CONTAINER_NAME
|
||||
headers = {'Accept': ''}
|
||||
self.sess.put.assert_called_with(url,
|
||||
headers=headers)
|
||||
headers = {}
|
||||
data = {}
|
||||
self.register_uris([
|
||||
dict(method=sess_method, uri=self.container_endpoint,
|
||||
json=self.body,
|
||||
validate=dict(
|
||||
headers=headers,
|
||||
json=data))
|
||||
])
|
||||
sot_call(self.conn.object_store)
|
||||
|
||||
def test_create_no_headers(self):
|
||||
sot = container.Container.new(name=CONTAINER_NAME)
|
||||
self._test_no_headers(sot, sot.create, self.sess.put)
|
||||
sot = container.Container.new(name=self.container)
|
||||
self._test_no_headers(sot, sot.create, 'PUT')
|
||||
self.assert_calls()
|
||||
|
||||
def test_update_no_headers(self):
|
||||
sot = container.Container.new(name=CONTAINER_NAME)
|
||||
self._test_no_headers(sot, sot.update, self.sess.post)
|
||||
sot = container.Container.new(name=self.container)
|
||||
self._test_no_headers(sot, sot.update, 'POST')
|
||||
self.assert_no_calls()
|
||||
|
@@ -10,14 +10,8 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import mock
|
||||
import testtools
|
||||
|
||||
from openstack.object_store.v1 import obj
|
||||
|
||||
|
||||
CONTAINER_NAME = "mycontainer"
|
||||
OBJECT_NAME = "myobject"
|
||||
from openstack.tests.unit.cloud import test_object as base_test_object
|
||||
|
||||
# Object can receive both last-modified in headers and last_modified in
|
||||
# the body. However, originally, only last-modified was handled as an
|
||||
@@ -30,109 +24,127 @@ OBJECT_NAME = "myobject"
|
||||
# attribute which would follow the same pattern.
|
||||
# This example should represent the body values returned by a GET, so the keys
|
||||
# must be underscores.
|
||||
OBJ_EXAMPLE = {
|
||||
"hash": "243f87b91224d85722564a80fd3cb1f1",
|
||||
"last_modified": "2014-07-13T18:41:03.319240",
|
||||
"bytes": 252466,
|
||||
"name": OBJECT_NAME,
|
||||
"content_type": "application/octet-stream"
|
||||
}
|
||||
|
||||
DICT_EXAMPLE = {
|
||||
'container': CONTAINER_NAME,
|
||||
'name': OBJECT_NAME,
|
||||
'content_type': 'application/octet-stream',
|
||||
'headers': {
|
||||
'content-length': '252466',
|
||||
'accept-ranges': 'bytes',
|
||||
'last-modified': 'Sun, 13 Jul 2014 18:41:04 GMT',
|
||||
'etag': '243f87b91224d85722564a80fd3cb1f1',
|
||||
'x-timestamp': '1453414256.28112',
|
||||
'date': 'Thu, 28 Aug 2014 14:41:59 GMT',
|
||||
'id': 'tx5fb5ad4f4d0846c6b2bc7-0053ff3fb7',
|
||||
'x-delete-at': '1453416226.16744'
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class TestObject(testtools.TestCase):
|
||||
class TestObject(base_test_object.BaseTestObject):
|
||||
|
||||
def setUp(self):
|
||||
super(TestObject, self).setUp()
|
||||
self.resp = mock.Mock()
|
||||
self.resp.content = "lol here's some content"
|
||||
self.resp.headers = {"X-Trans-Id": "abcdef"}
|
||||
self.sess = mock.Mock()
|
||||
self.sess.get = mock.Mock(return_value=self.resp)
|
||||
self.sess.put = mock.Mock(return_value=self.resp)
|
||||
self.sess.post = mock.Mock(return_value=self.resp)
|
||||
self.the_data = b'test body'
|
||||
self.the_data_length = len(self.the_data)
|
||||
# TODO(mordred) Make the_data be from getUniqueString and then
|
||||
# have hash and etag be actual md5 sums of that string
|
||||
self.body = {
|
||||
"hash": "243f87b91224d85722564a80fd3cb1f1",
|
||||
"last_modified": "2014-07-13T18:41:03.319240",
|
||||
"bytes": self.the_data_length,
|
||||
"name": self.object,
|
||||
"content_type": "application/octet-stream"
|
||||
}
|
||||
self.headers = {
|
||||
'Content-Length': str(len(self.the_data)),
|
||||
'Content-Type': 'application/octet-stream',
|
||||
'Accept-Ranges': 'bytes',
|
||||
'Last-Modified': 'Thu, 15 Dec 2016 13:34:14 GMT',
|
||||
'Etag': '"b5c454b44fbd5344793e3fb7e3850768"',
|
||||
'X-Timestamp': '1481808853.65009',
|
||||
'X-Trans-Id': 'tx68c2a2278f0c469bb6de1-005857ed80dfw1',
|
||||
'Date': 'Mon, 19 Dec 2016 14:24:00 GMT',
|
||||
'X-Static-Large-Object': 'True',
|
||||
'X-Object-Meta-Mtime': '1481513709.168512',
|
||||
'X-Delete-At': '1453416226.16744',
|
||||
}
|
||||
|
||||
def test_basic(self):
|
||||
sot = obj.Object.new(**OBJ_EXAMPLE)
|
||||
sot = obj.Object.new(**self.body)
|
||||
self.assert_no_calls()
|
||||
self.assertIsNone(sot.resources_key)
|
||||
self.assertEqual("name", sot.id_attribute)
|
||||
self.assertEqual('name', sot._alternate_id())
|
||||
self.assertEqual('/%(container)s', sot.base_path)
|
||||
self.assertEqual('object-store', sot.service.service_type)
|
||||
self.assertTrue(sot.allow_update)
|
||||
self.assertTrue(sot.allow_create)
|
||||
self.assertTrue(sot.allow_retrieve)
|
||||
self.assertTrue(sot.allow_get)
|
||||
self.assertTrue(sot.allow_delete)
|
||||
self.assertTrue(sot.allow_list)
|
||||
self.assertTrue(sot.allow_head)
|
||||
|
||||
def test_new(self):
|
||||
sot = obj.Object.new(container=CONTAINER_NAME, name=OBJECT_NAME)
|
||||
self.assertEqual(OBJECT_NAME, sot.name)
|
||||
self.assertEqual(CONTAINER_NAME, sot.container)
|
||||
sot = obj.Object.new(container=self.container, name=self.object)
|
||||
self.assert_no_calls()
|
||||
self.assertEqual(self.object, sot.name)
|
||||
self.assertEqual(self.container, sot.container)
|
||||
|
||||
def test_head(self):
|
||||
sot = obj.Object.existing(**DICT_EXAMPLE)
|
||||
def test_from_body(self):
|
||||
sot = obj.Object.existing(container=self.container, **self.body)
|
||||
self.assert_no_calls()
|
||||
|
||||
# Attributes from header
|
||||
self.assertEqual(DICT_EXAMPLE['container'], sot.container)
|
||||
headers = DICT_EXAMPLE['headers']
|
||||
self.assertEqual(headers['content-length'], sot.content_length)
|
||||
self.assertEqual(headers['accept-ranges'], sot.accept_ranges)
|
||||
self.assertEqual(headers['last-modified'], sot.last_modified_at)
|
||||
self.assertEqual(headers['etag'], sot.etag)
|
||||
self.assertEqual(headers['x-timestamp'], sot.timestamp)
|
||||
self.assertEqual(headers['content-type'], sot.content_type)
|
||||
self.assertEqual(headers['x-delete-at'], sot.delete_at)
|
||||
self.assertEqual(self.container, sot.container)
|
||||
self.assertEqual(
|
||||
int(self.body['bytes']), sot.content_length)
|
||||
self.assertEqual(self.body['last_modified'], sot.last_modified_at)
|
||||
self.assertEqual(self.body['hash'], sot.etag)
|
||||
self.assertEqual(self.body['content_type'], sot.content_type)
|
||||
|
||||
def test_get(self):
|
||||
sot = obj.Object.new(container=CONTAINER_NAME, name=OBJECT_NAME)
|
||||
def test_from_headers(self):
|
||||
sot = obj.Object.existing(container=self.container, **self.headers)
|
||||
self.assert_no_calls()
|
||||
|
||||
# Attributes from header
|
||||
self.assertEqual(self.container, sot.container)
|
||||
self.assertEqual(
|
||||
int(self.headers['Content-Length']), sot.content_length)
|
||||
self.assertEqual(self.headers['Accept-Ranges'], sot.accept_ranges)
|
||||
self.assertEqual(self.headers['Last-Modified'], sot.last_modified_at)
|
||||
self.assertEqual(self.headers['Etag'], sot.etag)
|
||||
self.assertEqual(self.headers['X-Timestamp'], sot.timestamp)
|
||||
self.assertEqual(self.headers['Content-Type'], sot.content_type)
|
||||
self.assertEqual(self.headers['X-Delete-At'], sot.delete_at)
|
||||
|
||||
def test_download(self):
|
||||
headers = {
|
||||
'X-Newest': 'True',
|
||||
'If-Match': self.headers['Etag'],
|
||||
'Accept': 'bytes'
|
||||
}
|
||||
self.register_uris([
|
||||
dict(method='GET', uri=self.object_endpoint,
|
||||
headers=self.headers,
|
||||
content=self.the_data,
|
||||
validate=dict(
|
||||
headers=headers
|
||||
))
|
||||
])
|
||||
sot = obj.Object.new(container=self.container, name=self.object)
|
||||
sot.is_newest = True
|
||||
sot.if_match = {"who": "what"}
|
||||
sot.if_match = [self.headers['Etag']]
|
||||
|
||||
rv = sot.get(self.sess)
|
||||
rv = sot.download(self.conn.object_store)
|
||||
|
||||
url = "%s/%s" % (CONTAINER_NAME, OBJECT_NAME)
|
||||
# TODO(thowe): Should allow filtering bug #1488269
|
||||
# headers = {
|
||||
# "x-newest": True,
|
||||
# "if-match": {"who": "what"}
|
||||
# }
|
||||
headers = {'Accept': 'bytes'}
|
||||
self.sess.get.assert_called_with(url,
|
||||
headers=headers,
|
||||
error_message=None)
|
||||
self.assertEqual(self.resp.content, rv)
|
||||
self.assertEqual(self.the_data, rv)
|
||||
|
||||
def _test_create(self, method, data, accept):
|
||||
sot = obj.Object.new(container=CONTAINER_NAME, name=OBJECT_NAME,
|
||||
self.assert_calls()
|
||||
|
||||
def _test_create(self, method, data):
|
||||
sot = obj.Object.new(container=self.container, name=self.object,
|
||||
data=data)
|
||||
sot.is_newest = True
|
||||
headers = {"x-newest": True, "Accept": ""}
|
||||
sent_headers = {"x-newest": 'True', "Accept": ""}
|
||||
self.register_uris([
|
||||
dict(method=method, uri=self.object_endpoint,
|
||||
headers=self.headers,
|
||||
validate=dict(
|
||||
headers=sent_headers))
|
||||
])
|
||||
|
||||
rv = sot.create(self.sess)
|
||||
rv = sot.create(self.conn.object_store)
|
||||
self.assertEqual(rv.etag, self.headers['Etag'])
|
||||
|
||||
url = "%s/%s" % (CONTAINER_NAME, OBJECT_NAME)
|
||||
method.assert_called_with(url, data=data,
|
||||
headers=headers)
|
||||
self.assertEqual(self.resp.headers, rv.get_headers())
|
||||
self.assert_calls()
|
||||
|
||||
def test_create_data(self):
|
||||
self._test_create(self.sess.put, "data", "bytes")
|
||||
self._test_create('PUT', self.the_data)
|
||||
|
||||
def test_create_no_data(self):
|
||||
self._test_create(self.sess.post, None, None)
|
||||
self._test_create('PUT', None)
|
||||
|
@@ -10,17 +10,19 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import mock
|
||||
import six
|
||||
|
||||
from openstack.object_store.v1 import _proxy
|
||||
from openstack.object_store.v1 import account
|
||||
from openstack.object_store.v1 import container
|
||||
from openstack.object_store.v1 import obj
|
||||
from openstack.tests.unit import test_proxy_base
|
||||
from openstack.tests.unit.cloud import test_object as base_test_object
|
||||
from openstack.tests.unit import test_proxy_base2
|
||||
|
||||
|
||||
class TestObjectStoreProxy(test_proxy_base.TestProxyBase):
|
||||
class TestObjectStoreProxy(test_proxy_base2.TestProxyBase):
|
||||
|
||||
kwargs_to_path_args = False
|
||||
|
||||
def setUp(self):
|
||||
super(TestObjectStoreProxy, self).setUp()
|
||||
@@ -42,21 +44,26 @@ class TestObjectStoreProxy(test_proxy_base.TestProxyBase):
|
||||
container.Container, True)
|
||||
|
||||
def test_container_create_attrs(self):
|
||||
self.verify_create(self.proxy.create_container, container.Container)
|
||||
self.verify_create(
|
||||
self.proxy.create_container,
|
||||
container.Container,
|
||||
method_args=['container_name'],
|
||||
expected_kwargs={'name': 'container_name', "x": 1, "y": 2, "z": 3})
|
||||
|
||||
def test_object_metadata_get(self):
|
||||
self.verify_head(self.proxy.get_object_metadata, obj.Object,
|
||||
value="object", container="container")
|
||||
|
||||
def _test_object_delete(self, ignore):
|
||||
expected_kwargs = {"path_args": {"container": "name"}}
|
||||
expected_kwargs["ignore_missing"] = ignore
|
||||
expected_kwargs = {
|
||||
"ignore_missing": ignore,
|
||||
"container": "name",
|
||||
}
|
||||
|
||||
self._verify2("openstack.proxy.BaseProxy._delete",
|
||||
self._verify2("openstack.proxy2.BaseProxy._delete",
|
||||
self.proxy.delete_object,
|
||||
method_args=["resource"],
|
||||
method_kwargs={"container": "name",
|
||||
"ignore_missing": ignore},
|
||||
method_kwargs=expected_kwargs,
|
||||
expected_args=[obj.Object, "resource"],
|
||||
expected_kwargs=expected_kwargs)
|
||||
|
||||
@@ -67,25 +74,24 @@ class TestObjectStoreProxy(test_proxy_base.TestProxyBase):
|
||||
self._test_object_delete(True)
|
||||
|
||||
def test_object_create_attrs(self):
|
||||
path_args = {"path_args": {"container": "name"}}
|
||||
method_kwargs = {"name": "test", "data": "data", "container": "name"}
|
||||
kwargs = {"name": "test", "data": "data", "container": "name"}
|
||||
|
||||
expected_kwargs = path_args.copy()
|
||||
expected_kwargs.update(method_kwargs)
|
||||
expected_kwargs.pop("container")
|
||||
|
||||
self._verify2("openstack.proxy.BaseProxy._create",
|
||||
self._verify2("openstack.proxy2.BaseProxy._create",
|
||||
self.proxy.upload_object,
|
||||
method_kwargs=method_kwargs,
|
||||
method_kwargs=kwargs,
|
||||
expected_args=[obj.Object],
|
||||
expected_kwargs=expected_kwargs)
|
||||
expected_kwargs=kwargs)
|
||||
|
||||
def test_object_create_no_container(self):
|
||||
self.assertRaises(ValueError, self.proxy.upload_object)
|
||||
self.assertRaises(TypeError, self.proxy.upload_object)
|
||||
|
||||
def test_object_get(self):
|
||||
self.verify_get(self.proxy.get_object, obj.Object,
|
||||
value=["object"], container="container")
|
||||
kwargs = dict(container="container")
|
||||
self.verify_get(
|
||||
self.proxy.get_object, obj.Object,
|
||||
value=["object"],
|
||||
method_kwargs=kwargs,
|
||||
expected_kwargs=kwargs)
|
||||
|
||||
|
||||
class Test_containers(TestObjectStoreProxy):
|
||||
@@ -252,23 +258,45 @@ class Test_objects(TestObjectStoreProxy):
|
||||
# httpretty.last_request().path)
|
||||
|
||||
|
||||
class Test_download_object(TestObjectStoreProxy):
|
||||
class Test_download_object(base_test_object.BaseTestObject):
|
||||
|
||||
@mock.patch("openstack.object_store.v1._proxy.Proxy.get_object")
|
||||
def test_download(self, mock_get):
|
||||
the_data = "here's some data"
|
||||
mock_get.return_value = the_data
|
||||
ob = mock.Mock()
|
||||
def setUp(self):
|
||||
super(Test_download_object, self).setUp()
|
||||
self.the_data = b'test body'
|
||||
self.register_uris([
|
||||
dict(method='GET', uri=self.object_endpoint,
|
||||
headers={
|
||||
'Content-Length': str(len(self.the_data)),
|
||||
'Content-Type': 'application/octet-stream',
|
||||
'Accept-Ranges': 'bytes',
|
||||
'Last-Modified': 'Thu, 15 Dec 2016 13:34:14 GMT',
|
||||
'Etag': '"b5c454b44fbd5344793e3fb7e3850768"',
|
||||
'X-Timestamp': '1481808853.65009',
|
||||
'X-Trans-Id': 'tx68c2a2278f0c469bb6de1-005857ed80dfw1',
|
||||
'Date': 'Mon, 19 Dec 2016 14:24:00 GMT',
|
||||
'X-Static-Large-Object': 'True',
|
||||
'X-Object-Meta-Mtime': '1481513709.168512',
|
||||
},
|
||||
content=self.the_data)])
|
||||
|
||||
fake_open = mock.mock_open()
|
||||
file_path = "blarga/somefile"
|
||||
with mock.patch("openstack.object_store.v1._proxy.open",
|
||||
fake_open, create=True):
|
||||
self.proxy.download_object(ob, container="tainer", path=file_path)
|
||||
def test_download(self):
|
||||
data = self.conn.object_store.download_object(
|
||||
self.object, container=self.container)
|
||||
|
||||
fake_open.assert_called_once_with(file_path, "w")
|
||||
fake_handle = fake_open()
|
||||
fake_handle.write.assert_called_once_with(the_data)
|
||||
self.assertEqual(data, self.the_data)
|
||||
self.assert_calls()
|
||||
|
||||
def test_stream(self):
|
||||
chunk_size = 2
|
||||
for index, chunk in enumerate(self.conn.object_store.stream_object(
|
||||
self.object, container=self.container,
|
||||
chunk_size=chunk_size)):
|
||||
chunk_len = len(chunk)
|
||||
start = index * chunk_size
|
||||
end = start + chunk_len
|
||||
self.assertLessEqual(chunk_len, chunk_size)
|
||||
self.assertEqual(chunk, self.the_data[start:end])
|
||||
self.assert_calls()
|
||||
|
||||
|
||||
class Test_copy_object(TestObjectStoreProxy):
|
||||
|
@@ -16,6 +16,11 @@ from openstack.tests.unit import base
|
||||
|
||||
|
||||
class TestProxyBase(base.TestCase):
|
||||
# object_store makes calls with container= rather than
|
||||
# path_args=dict(container= because container needs to wind up
|
||||
# in the uri components.
|
||||
kwargs_to_path_args = True
|
||||
|
||||
def setUp(self):
|
||||
super(TestProxyBase, self).setUp()
|
||||
self.session = mock.Mock()
|
||||
@@ -131,7 +136,7 @@ class TestProxyBase(base.TestCase):
|
||||
method_kwargs = kwargs.pop("method_kwargs", kwargs)
|
||||
if args:
|
||||
expected_kwargs["args"] = args
|
||||
if kwargs:
|
||||
if kwargs and self.kwargs_to_path_args:
|
||||
expected_kwargs["path_args"] = kwargs
|
||||
if not expected_args:
|
||||
expected_args = [resource_type] + the_value
|
||||
@@ -145,7 +150,10 @@ class TestProxyBase(base.TestCase):
|
||||
mock_method="openstack.proxy2.BaseProxy._head",
|
||||
value=None, **kwargs):
|
||||
the_value = [value] if value is not None else []
|
||||
expected_kwargs = {"path_args": kwargs} if kwargs else {}
|
||||
if self.kwargs_to_path_args:
|
||||
expected_kwargs = {"path_args": kwargs} if kwargs else {}
|
||||
else:
|
||||
expected_kwargs = kwargs or {}
|
||||
self._verify2(mock_method, test_method,
|
||||
method_args=the_value,
|
||||
method_kwargs=kwargs,
|
||||
|
@@ -852,10 +852,9 @@ class TestResource(base.TestCase):
|
||||
class Test(resource2.Resource):
|
||||
attr = resource2.Header("attr")
|
||||
|
||||
response = FakeResponse({})
|
||||
response = FakeResponse({}, headers={"attr": "value"})
|
||||
|
||||
sot = Test()
|
||||
sot._filter_component = mock.Mock(return_value={"attr": "value"})
|
||||
|
||||
sot._translate_response(response, has_body=False)
|
||||
|
||||
@@ -1036,7 +1035,8 @@ class TestResourceActions(base.TestCase):
|
||||
self.request.url,
|
||||
headers={"Accept": ""})
|
||||
|
||||
self.sot._translate_response.assert_called_once_with(self.response)
|
||||
self.sot._translate_response.assert_called_once_with(
|
||||
self.response, has_body=False)
|
||||
self.assertEqual(result, self.sot)
|
||||
|
||||
def _test_update(self, update_method='PUT', prepend_key=True,
|
||||
|
Reference in New Issue
Block a user