hacking: upgrade to 0.9.x serie
Change-Id: I252758fd633662de9659a402c5e3d7e3ce1fae0f
This commit is contained in:
parent
353ae24d34
commit
fadbef8511
@ -787,8 +787,8 @@ class MetadefPropertyFactoryProxy(glance.domain.proxy.MetadefPropertyFactory):
|
||||
"owned by '%s'")
|
||||
raise exception.Forbidden(message % (owner))
|
||||
|
||||
return super(MetadefPropertyFactoryProxy, self).\
|
||||
new_namespace_property(**kwargs)
|
||||
return super(MetadefPropertyFactoryProxy, self).new_namespace_property(
|
||||
**kwargs)
|
||||
|
||||
|
||||
class MetadefPropertyRepoProxy(glance.domain.proxy.MetadefPropertyRepo):
|
||||
|
@ -93,9 +93,11 @@ def image_send_notification(bytes_written, expected_size, image_meta, request,
|
||||
|
||||
|
||||
def get_remaining_quota(context, db_api, image_id=None):
|
||||
"""
|
||||
This method is called to see if the user is allowed to store an image
|
||||
of the given size in glance based on their quota and current usage.
|
||||
"""Method called to see if the user is allowed to store an image.
|
||||
|
||||
Checks if it is allowed based on the given size in glance based on their
|
||||
quota and current usage.
|
||||
|
||||
:param context:
|
||||
:param db_api: The db_api in use for this configuration
|
||||
:param image_id: The image that will be replaced with this new data size
|
||||
@ -135,9 +137,11 @@ def get_remaining_quota(context, db_api, image_id=None):
|
||||
|
||||
|
||||
def check_quota(context, image_size, db_api, image_id=None):
|
||||
"""
|
||||
This method is called to see if the user is allowed to store an image
|
||||
of the given size in glance based on their quota and current usage.
|
||||
"""Method called to see if the user is allowed to store an image.
|
||||
|
||||
Checks if it is allowed based on the given size in glance based on their
|
||||
quota and current usage.
|
||||
|
||||
:param context:
|
||||
:param image_size: The size of the image we hope to store
|
||||
:param db_api: The db_api in use for this configuration
|
||||
|
@ -39,8 +39,8 @@ class ProtectedImageFactoryProxy(glance.domain.proxy.ImageFactory):
|
||||
extra_properties[key] = extra_props[key]
|
||||
else:
|
||||
raise exception.ReservedProperty(property=key)
|
||||
return super(ProtectedImageFactoryProxy, self).\
|
||||
new_image(extra_properties=extra_properties, **kwargs)
|
||||
return super(ProtectedImageFactoryProxy, self).new_image(
|
||||
extra_properties=extra_properties, **kwargs)
|
||||
|
||||
|
||||
class ProtectedImageRepoProxy(glance.domain.proxy.Repo):
|
||||
|
@ -13,9 +13,8 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import webob.exc
|
||||
|
||||
import glance_store as store
|
||||
import webob.exc
|
||||
|
||||
from glance.common import exception
|
||||
import glance.openstack.common.log as logging
|
||||
|
@ -268,8 +268,8 @@ class Controller(controller.BaseController):
|
||||
key, 'read', req.context) is False):
|
||||
# NOTE(bourke): if read protected, re-add to image_meta to
|
||||
# prevent deletion
|
||||
image_meta['properties'][key] = \
|
||||
orig_meta['properties'][key]
|
||||
image_meta['properties'][key] = orig_meta[
|
||||
'properties'][key]
|
||||
elif (self.prop_enforcer.check_property_rules(
|
||||
key, 'delete', req.context) is False):
|
||||
msg = "Property '%s' is protected" % key
|
||||
@ -1073,7 +1073,7 @@ class Controller(controller.BaseController):
|
||||
content_type="text/plain")
|
||||
except exception.InUseByStore as e:
|
||||
msg = (_LI("Image %s could not be deleted because it is in use: "
|
||||
"%s") % (id, utils.exception_to_str(e)))
|
||||
"%s") % (id, utils.exception_to_str(e))) # noqa
|
||||
for line in msg.split('\n'):
|
||||
LOG.info(line)
|
||||
raise HTTPConflict(explanation=msg,
|
||||
|
@ -12,12 +12,10 @@
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import glance_store as store_api
|
||||
from oslo.config import cfg
|
||||
import webob.exc
|
||||
|
||||
import glance_store as store_api
|
||||
|
||||
from glance.common import exception
|
||||
from glance.common import store_utils
|
||||
from glance.common import utils
|
||||
|
@ -12,10 +12,8 @@
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import webob.exc
|
||||
|
||||
import glance_store
|
||||
import webob.exc
|
||||
|
||||
import glance.api.policy
|
||||
from glance.common import exception
|
||||
@ -95,8 +93,8 @@ class ImageDataController(object):
|
||||
LOG.debug("Cannot save data for image %(id)s: %(e)s",
|
||||
{'id': image_id, 'e': utils.exception_to_str(e)})
|
||||
self._restore(image_repo, image)
|
||||
raise webob.exc.HTTPBadRequest(explanation=
|
||||
utils.exception_to_str(e))
|
||||
raise webob.exc.HTTPBadRequest(
|
||||
explanation=utils.exception_to_str(e))
|
||||
|
||||
except exception.InvalidImageStatusTransition as e:
|
||||
msg = utils.exception_to_str(e)
|
||||
|
@ -14,10 +14,10 @@
|
||||
# under the License.
|
||||
|
||||
import copy
|
||||
import six
|
||||
import webob
|
||||
|
||||
import glance_store
|
||||
import six
|
||||
import webob
|
||||
|
||||
from glance.api import policy
|
||||
from glance.common import exception
|
||||
@ -58,8 +58,8 @@ class ImageMembersController(object):
|
||||
|
||||
"""
|
||||
image_repo = self.gateway.get_repo(req.context)
|
||||
image_member_factory = self.gateway\
|
||||
.get_image_member_factory(req.context)
|
||||
image_member_factory = self.gateway.get_image_member_factory(
|
||||
req.context)
|
||||
try:
|
||||
image = image_repo.get(image_id)
|
||||
member_repo = image.get_member_repo()
|
||||
@ -106,8 +106,8 @@ class ImageMembersController(object):
|
||||
except exception.Forbidden as e:
|
||||
raise webob.exc.HTTPForbidden(explanation=e.msg)
|
||||
except ValueError as e:
|
||||
raise webob.exc.HTTPBadRequest(explanation=
|
||||
utils.exception_to_str(e))
|
||||
raise webob.exc.HTTPBadRequest(
|
||||
explanation=utils.exception_to_str(e))
|
||||
|
||||
def index(self, req, image_id):
|
||||
"""
|
||||
|
@ -12,10 +12,8 @@
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import webob.exc
|
||||
|
||||
import glance_store
|
||||
import webob.exc
|
||||
|
||||
from glance.api import policy
|
||||
from glance.common import exception
|
||||
|
@ -211,7 +211,7 @@ class ImagesController(object):
|
||||
raise webob.exc.HTTPNotFound(explanation=msg)
|
||||
except exception.InUseByStore as e:
|
||||
msg = (_LI("Image %s could not be deleted "
|
||||
"because it is in use: %s") % (image_id, e.msg))
|
||||
"because it is in use: %s") % (image_id, e.msg)) # noqa
|
||||
LOG.info(msg)
|
||||
raise webob.exc.HTTPConflict(explanation=msg)
|
||||
|
||||
@ -246,8 +246,8 @@ class ImagesController(object):
|
||||
except (exception.BadStoreUri, exception.DuplicateLocation) as bse:
|
||||
raise webob.exc.HTTPBadRequest(explanation=bse.msg)
|
||||
except ValueError as ve: # update image status failed.
|
||||
raise webob.exc.HTTPBadRequest(explanation=
|
||||
utils.exception_to_str(ve))
|
||||
raise webob.exc.HTTPBadRequest(
|
||||
explanation=utils.exception_to_str(ve))
|
||||
|
||||
def _do_add_locations(self, image, path_pos, value):
|
||||
pos = self._get_locations_op_pos(path_pos,
|
||||
@ -262,8 +262,8 @@ class ImagesController(object):
|
||||
except (exception.BadStoreUri, exception.DuplicateLocation) as bse:
|
||||
raise webob.exc.HTTPBadRequest(explanation=bse.msg)
|
||||
except ValueError as ve: # update image status failed.
|
||||
raise webob.exc.HTTPBadRequest(explanation=
|
||||
utils.exception_to_str(ve))
|
||||
raise webob.exc.HTTPBadRequest(
|
||||
explanation=utils.exception_to_str(ve))
|
||||
|
||||
def _do_remove_locations(self, image, path_pos):
|
||||
pos = self._get_locations_op_pos(path_pos,
|
||||
@ -276,8 +276,8 @@ class ImagesController(object):
|
||||
# from the backend store.
|
||||
image.locations.pop(pos)
|
||||
except Exception as e:
|
||||
raise webob.exc.HTTPInternalServerError(explanation=
|
||||
utils.exception_to_str(e))
|
||||
raise webob.exc.HTTPInternalServerError(
|
||||
explanation=utils.exception_to_str(e))
|
||||
if (len(image.locations) == 0) and (image.status == 'active'):
|
||||
image.status = 'queued'
|
||||
|
||||
@ -312,8 +312,8 @@ class RequestDeserializer(wsgi.JSONRequestDeserializer):
|
||||
for key in cls._disallowed_properties:
|
||||
if key in image:
|
||||
msg = _("Attribute '%s' is read-only.") % key
|
||||
raise webob.exc.HTTPForbidden(explanation=
|
||||
utils.exception_to_str(msg))
|
||||
raise webob.exc.HTTPForbidden(
|
||||
explanation=utils.exception_to_str(msg))
|
||||
|
||||
def create(self, request):
|
||||
body = self._get_request_body(request)
|
||||
|
@ -15,12 +15,12 @@
|
||||
# under the License.
|
||||
|
||||
import copy
|
||||
import webob.exc
|
||||
|
||||
import glance_store
|
||||
from oslo.config import cfg
|
||||
import six
|
||||
import six.moves.urllib.parse as urlparse
|
||||
import webob.exc
|
||||
|
||||
from glance.api import policy
|
||||
from glance.common import exception
|
||||
@ -228,8 +228,8 @@ class ResponseSerializer(wsgi.JSONResponseSerializer):
|
||||
def __init__(self, task_schema=None, partial_task_schema=None):
|
||||
super(ResponseSerializer, self).__init__()
|
||||
self.task_schema = task_schema or get_task_schema()
|
||||
self.partial_task_schema = partial_task_schema \
|
||||
or _get_partial_task_schema()
|
||||
self.partial_task_schema = (partial_task_schema
|
||||
or _get_partial_task_schema())
|
||||
|
||||
def _inject_location_header(self, response, task):
|
||||
location = self._get_task_location(task)
|
||||
|
@ -21,10 +21,11 @@
|
||||
Glance API Server
|
||||
"""
|
||||
|
||||
import eventlet
|
||||
import os
|
||||
import sys
|
||||
|
||||
import eventlet
|
||||
|
||||
from glance.common import utils
|
||||
|
||||
# Monkey patch socket, time, select, threads
|
||||
|
@ -65,8 +65,8 @@ And command is one of:
|
||||
|
||||
{1}
|
||||
|
||||
And CONFPATH is the optional configuration file to use.""".\
|
||||
format(', '.join(ALL_SERVERS), ', '.join(ALL_COMMANDS))
|
||||
And CONFPATH is the optional configuration file to use.""".format(
|
||||
', '.join(ALL_SERVERS), ', '.join(ALL_COMMANDS))
|
||||
|
||||
exitcode = 0
|
||||
|
||||
|
@ -21,10 +21,11 @@
|
||||
Reference implementation server for Glance Registry
|
||||
"""
|
||||
|
||||
import eventlet
|
||||
import os
|
||||
import sys
|
||||
|
||||
import eventlet
|
||||
|
||||
# Monkey patch socket and time
|
||||
eventlet.patcher.monkey_patch(all=False, socket=True, time=True, thread=True)
|
||||
|
||||
|
@ -444,7 +444,7 @@ def replication_load(options, args):
|
||||
updated.append(meta['id'])
|
||||
except ImageAlreadyPresentException:
|
||||
LOG.error(_LE(IMAGE_ALREADY_PRESENT_MESSAGE)
|
||||
% image_uuid)
|
||||
% image_uuid) # noqa
|
||||
|
||||
return updated
|
||||
|
||||
@ -515,7 +515,7 @@ def replication_livecopy(options, args):
|
||||
_check_upload_response_headers(headers, body)
|
||||
updated.append(image['id'])
|
||||
except ImageAlreadyPresentException:
|
||||
LOG.error(_LE(IMAGE_ALREADY_PRESENT_MESSAGE) % image['id'])
|
||||
LOG.error(_LE(IMAGE_ALREADY_PRESENT_MESSAGE) % image['id']) # noqa
|
||||
|
||||
return updated
|
||||
|
||||
@ -736,10 +736,10 @@ def main():
|
||||
try:
|
||||
command(options, args)
|
||||
except TypeError as e:
|
||||
LOG.error(_LE(command.__doc__) % {'prog': command.__name__})
|
||||
LOG.error(_LE(command.__doc__) % {'prog': command.__name__}) # noqa
|
||||
sys.exit("ERROR: %s" % e)
|
||||
except ValueError as e:
|
||||
LOG.error(_LE(command.__doc__) % {'prog': command.__name__})
|
||||
LOG.error(_LE(command.__doc__) % {'prog': command.__name__}) # noqa
|
||||
sys.exit("ERROR: %s" % e)
|
||||
|
||||
|
||||
|
@ -29,7 +29,6 @@ Keystone (an identity management system).
|
||||
http://service_endpoint/
|
||||
"""
|
||||
import httplib2
|
||||
|
||||
import six.moves.urllib.parse as urlparse
|
||||
|
||||
from glance.common import exception
|
||||
|
@ -70,10 +70,10 @@ def _execute(t_id, task_repo, image_repo, image_factory):
|
||||
# necessary
|
||||
err_msg = ("Error: " + six.text_type(type(e)) + ': ' +
|
||||
common_utils.exception_to_str(e))
|
||||
log_msg = _LE(err_msg + ("Task ID %s" % task.task_id))
|
||||
log_msg = _LE(err_msg + ("Task ID %s" % task.task_id)) # noqa
|
||||
LOG.exception(log_msg)
|
||||
|
||||
task.fail(_LE(err_msg))
|
||||
task.fail(_LE(err_msg)) # noqa
|
||||
finally:
|
||||
task_repo.save(task)
|
||||
|
||||
|
@ -67,8 +67,8 @@ class SwiftParams(object):
|
||||
|
||||
def _form_default_params(self):
|
||||
default = {}
|
||||
if CONF.swift_store_user and CONF.swift_store_key \
|
||||
and CONF.swift_store_auth_address:
|
||||
if (CONF.swift_store_user and CONF.swift_store_key
|
||||
and CONF.swift_store_auth_address):
|
||||
default['user'] = CONF.swift_store_user
|
||||
default['key'] = CONF.swift_store_key
|
||||
default['auth_address'] = CONF.swift_store_auth_address
|
||||
|
@ -38,9 +38,8 @@ import uuid
|
||||
import netaddr
|
||||
from OpenSSL import crypto
|
||||
from oslo.config import cfg
|
||||
from webob import exc
|
||||
|
||||
import six
|
||||
from webob import exc
|
||||
|
||||
from glance.common import exception
|
||||
from glance.openstack.common import excutils
|
||||
|
@ -433,7 +433,7 @@ class Debug(Middleware):
|
||||
sys.stdout.write(part)
|
||||
sys.stdout.flush()
|
||||
yield part
|
||||
print
|
||||
print()
|
||||
|
||||
|
||||
class APIMapper(routes.Mapper):
|
||||
|
@ -20,9 +20,11 @@ from glance.openstack.common import local
|
||||
|
||||
|
||||
class RequestContext(object):
|
||||
"""
|
||||
Stores information about the security context under which the user
|
||||
accesses the system, as well as additional request information.
|
||||
"""Stores information about the security context.
|
||||
|
||||
Stores how the user accesses the system, as well as additional request
|
||||
information.
|
||||
|
||||
"""
|
||||
|
||||
user_idt_format = '{user} {tenant} {domain} {user_domain} {p_domain}'
|
||||
@ -47,8 +49,7 @@ class RequestContext(object):
|
||||
self.user_domain = user_domain
|
||||
self.project_domain = project_domain
|
||||
if not self.is_admin:
|
||||
self.is_admin = \
|
||||
self.policy_enforcer.check_is_admin(self)
|
||||
self.is_admin = self.policy_enforcer.check_is_admin(self)
|
||||
|
||||
if not hasattr(local.store, 'context'):
|
||||
self.update_store()
|
||||
|
@ -34,11 +34,6 @@ import sqlalchemy.orm as sa_orm
|
||||
import sqlalchemy.sql as sa_sql
|
||||
|
||||
from glance.common import exception
|
||||
from glance.db.sqlalchemy import models
|
||||
from glance import i18n
|
||||
import glance.openstack.common.log as os_logging
|
||||
from glance.openstack.common import timeutils
|
||||
|
||||
from glance.db.sqlalchemy.metadef_api import namespace as metadef_namespace_api
|
||||
from glance.db.sqlalchemy.metadef_api import object as metadef_object_api
|
||||
from glance.db.sqlalchemy.metadef_api import property as metadef_property_api
|
||||
@ -46,6 +41,10 @@ from glance.db.sqlalchemy.metadef_api\
|
||||
import resource_type as metadef_resource_type_api
|
||||
from glance.db.sqlalchemy.metadef_api\
|
||||
import resource_type_association as metadef_association_api
|
||||
from glance.db.sqlalchemy import models
|
||||
from glance import i18n
|
||||
import glance.openstack.common.log as os_logging
|
||||
from glance.openstack.common import timeutils
|
||||
|
||||
BASE = models.BASE
|
||||
sa_logger = None
|
||||
@ -201,8 +200,8 @@ def _check_image_id(image_id):
|
||||
:param image_id: The id of the image we want to check
|
||||
:return: Raise NoFound exception if given image id is invalid
|
||||
"""
|
||||
if image_id and \
|
||||
len(image_id) > models.Image.id.property.columns[0].type.length:
|
||||
if (image_id and
|
||||
len(image_id) > models.Image.id.property.columns[0].type.length):
|
||||
raise exception.NotFound()
|
||||
|
||||
|
||||
@ -212,10 +211,10 @@ def _image_get(context, image_id, session=None, force_show_deleted=False):
|
||||
session = session or get_session()
|
||||
|
||||
try:
|
||||
query = session.query(models.Image)\
|
||||
.options(sa_orm.joinedload(models.Image.properties))\
|
||||
.options(sa_orm.joinedload(models.Image.locations))\
|
||||
.filter_by(id=image_id)
|
||||
query = session.query(models.Image).options(
|
||||
sa_orm.joinedload(models.Image.properties)).options(
|
||||
sa_orm.joinedload(
|
||||
models.Image.locations)).filter_by(id=image_id)
|
||||
|
||||
# filter out deleted images if context disallows it
|
||||
if not force_show_deleted and not _can_show_deleted(context):
|
||||
@ -396,7 +395,8 @@ def _paginate_query(query, model, limit, sort_keys, marker=None,
|
||||
|
||||
|
||||
def _make_conditions_from_filters(filters, is_public=None):
|
||||
#NOTE(venkatesh) make copy of the filters are to be altered in this method.
|
||||
# NOTE(venkatesh) make copy of the filters are to be altered in this
|
||||
# method.
|
||||
filters = filters.copy()
|
||||
|
||||
image_conditions = []
|
||||
@ -485,9 +485,8 @@ def _select_images_query(context, image_conditions, admin_as_user,
|
||||
|
||||
regular_user = (not context.is_admin) or admin_as_user
|
||||
|
||||
query_member = session.query(models.Image) \
|
||||
.join(models.Image.members) \
|
||||
.filter(img_conditional_clause)
|
||||
query_member = session.query(models.Image).join(
|
||||
models.Image.members).filter(img_conditional_clause)
|
||||
if regular_user:
|
||||
member_filters = [models.ImageMember.deleted == False]
|
||||
if context.owner is not None:
|
||||
@ -502,15 +501,14 @@ def _select_images_query(context, image_conditions, admin_as_user,
|
||||
if visibility is not None and visibility == 'shared':
|
||||
return query_member
|
||||
|
||||
query_image = session.query(models.Image)\
|
||||
.filter(img_conditional_clause)
|
||||
query_image = session.query(models.Image).filter(img_conditional_clause)
|
||||
if regular_user:
|
||||
query_image = query_image.filter(models.Image.is_public == True)
|
||||
query_image_owner = None
|
||||
if context.owner is not None:
|
||||
query_image_owner = session.query(models.Image) \
|
||||
.filter(models.Image.owner == context.owner) \
|
||||
.filter(img_conditional_clause)
|
||||
query_image_owner = session.query(models.Image).filter(
|
||||
models.Image.owner == context.owner).filter(
|
||||
img_conditional_clause)
|
||||
if query_image_owner is not None:
|
||||
query = query_image.union(query_image_owner, query_member)
|
||||
else:
|
||||
@ -552,11 +550,11 @@ def image_get_all(context, filters=None, marker=None, limit=None,
|
||||
showing_deleted = 'changes-since' in filters or filters.get('deleted',
|
||||
False)
|
||||
|
||||
img_conditions, prop_conditions, tag_conditions = \
|
||||
_make_conditions_from_filters(filters, is_public)
|
||||
img_cond, prop_cond, tag_cond = _make_conditions_from_filters(
|
||||
filters, is_public)
|
||||
|
||||
query = _select_images_query(context,
|
||||
img_conditions,
|
||||
img_cond,
|
||||
admin_as_user,
|
||||
member_status,
|
||||
visibility)
|
||||
@ -567,15 +565,15 @@ def image_get_all(context, filters=None, marker=None, limit=None,
|
||||
elif visibility == 'private':
|
||||
query = query.filter(models.Image.is_public == False)
|
||||
|
||||
if prop_conditions:
|
||||
for prop_condition in prop_conditions:
|
||||
query = query.join(models.ImageProperty, aliased=True)\
|
||||
.filter(sa_sql.and_(*prop_condition))
|
||||
if prop_cond:
|
||||
for prop_condition in prop_cond:
|
||||
query = query.join(models.ImageProperty, aliased=True).filter(
|
||||
sa_sql.and_(*prop_condition))
|
||||
|
||||
if tag_conditions:
|
||||
for tag_condition in tag_conditions:
|
||||
query = query.join(models.ImageTag, aliased=True)\
|
||||
.filter(sa_sql.and_(*tag_condition))
|
||||
if tag_cond:
|
||||
for tag_condition in tag_cond:
|
||||
query = query.join(models.ImageTag, aliased=True).filter(
|
||||
sa_sql.and_(*tag_condition))
|
||||
|
||||
marker_image = None
|
||||
if marker is not None:
|
||||
@ -591,8 +589,9 @@ def image_get_all(context, filters=None, marker=None, limit=None,
|
||||
marker=marker_image,
|
||||
sort_dir=sort_dir)
|
||||
|
||||
query = query.options(sa_orm.joinedload(models.Image.properties))\
|
||||
.options(sa_orm.joinedload(models.Image.locations))
|
||||
query = query.options(sa_orm.joinedload(
|
||||
models.Image.properties)).options(
|
||||
sa_orm.joinedload(models.Image.locations))
|
||||
if return_tag:
|
||||
query = query.options(sa_orm.joinedload(models.Image.tags))
|
||||
|
||||
@ -792,10 +791,8 @@ def image_location_update(context, image_id, location, session=None):
|
||||
|
||||
try:
|
||||
session = session or get_session()
|
||||
location_ref = session.query(models.ImageLocation)\
|
||||
.filter_by(id=loc_id)\
|
||||
.filter_by(image_id=image_id)\
|
||||
.one()
|
||||
location_ref = session.query(models.ImageLocation).filter_by(
|
||||
id=loc_id).filter_by(image_id=image_id).one()
|
||||
|
||||
deleted = location['status'] in ('deleted', 'pending_delete')
|
||||
updated_time = timeutils.utcnow()
|
||||
@ -824,10 +821,8 @@ def image_location_delete(context, image_id, location_id, status,
|
||||
|
||||
try:
|
||||
session = session or get_session()
|
||||
location_ref = session.query(models.ImageLocation)\
|
||||
.filter_by(id=location_id)\
|
||||
.filter_by(image_id=image_id)\
|
||||
.one()
|
||||
location_ref = session.query(models.ImageLocation).filter_by(
|
||||
id=location_id).filter_by(image_id=image_id).one()
|
||||
|
||||
delete_time = delete_time or timeutils.utcnow()
|
||||
|
||||
@ -846,10 +841,10 @@ def image_location_delete(context, image_id, location_id, status,
|
||||
def _image_locations_set(context, image_id, locations, session=None):
|
||||
# NOTE(zhiyan): 1. Remove records from DB for deleted locations
|
||||
session = session or get_session()
|
||||
query = session.query(models.ImageLocation) \
|
||||
.filter_by(image_id=image_id) \
|
||||
.filter_by(deleted=False) \
|
||||
.filter(~models.ImageLocation.id.in_([loc['id']
|
||||
query = session.query(models.ImageLocation).filter_by(
|
||||
image_id=image_id).filter_by(
|
||||
deleted=False).filter(~models.ImageLocation.id.in_(
|
||||
[loc['id']
|
||||
for loc in locations
|
||||
if loc.get('id')]))
|
||||
for loc_id in [loc_ref.id for loc_ref in query.all()]:
|
||||
@ -868,10 +863,8 @@ def _image_locations_delete_all(context, image_id,
|
||||
delete_time=None, session=None):
|
||||
"""Delete all image locations for given image"""
|
||||
session = session or get_session()
|
||||
location_refs = session.query(models.ImageLocation) \
|
||||
.filter_by(image_id=image_id) \
|
||||
.filter_by(deleted=False) \
|
||||
.all()
|
||||
location_refs = session.query(models.ImageLocation).filter_by(
|
||||
image_id=image_id).filter_by(deleted=False).all()
|
||||
|
||||
for loc_id in [loc_ref.id for loc_ref in location_refs]:
|
||||
image_location_delete(context, image_id, loc_id, 'deleted',
|
||||
@ -933,9 +926,8 @@ def _image_child_entry_delete_all(child_model_cls, image_id, delete_time=None,
|
||||
"""
|
||||
session = session or get_session()
|
||||
|
||||
query = session.query(child_model_cls) \
|
||||
.filter_by(image_id=image_id) \
|
||||
.filter_by(deleted=False)
|
||||
query = session.query(child_model_cls).filter_by(
|
||||
image_id=image_id).filter_by(deleted=False)
|
||||
|
||||
delete_time = delete_time or timeutils.utcnow()
|
||||
|
||||
@ -1143,10 +1135,9 @@ def image_tag_delete(context, image_id, value, session=None):
|
||||
"""Delete an image tag."""
|
||||
_check_image_id(image_id)
|
||||
session = session or get_session()
|
||||
query = session.query(models.ImageTag)\
|
||||
.filter_by(image_id=image_id)\
|
||||
.filter_by(value=value)\
|
||||
.filter_by(deleted=False)
|
||||
query = session.query(models.ImageTag).filter_by(
|
||||
image_id=image_id).filter_by(
|
||||
value=value).filter_by(deleted=False)
|
||||
try:
|
||||
tag_ref = query.one()
|
||||
except sa_orm.exc.NoResultFound:
|
||||
@ -1168,10 +1159,8 @@ def image_tag_get_all(context, image_id, session=None):
|
||||
"""Get a list of tags for a specific image."""
|
||||
_check_image_id(image_id)
|
||||
session = session or get_session()
|
||||
tags = session.query(models.ImageTag.value)\
|
||||
.filter_by(image_id=image_id)\
|
||||
.filter_by(deleted=False)\
|
||||
.all()
|
||||
tags = session.query(models.ImageTag.value).filter_by(
|
||||
image_id=image_id).filter_by(deleted=False).all()
|
||||
return [tag[0] for tag in tags]
|
||||
|
||||
|
||||
@ -1319,8 +1308,8 @@ def task_get_all(context, filters=None, marker=None, limit=None,
|
||||
session = get_session()
|
||||
query = session.query(models.Task)
|
||||
|
||||
if not (context.is_admin or admin_as_user == True) and \
|
||||
context.owner is not None:
|
||||
if (not (context.is_admin or admin_as_user == True)
|
||||
and context.owner is not None):
|
||||
query = query.filter(models.Task.owner == context.owner)
|
||||
|
||||
showing_deleted = False
|
||||
|
@ -287,8 +287,9 @@ def _export_data_to_file(meta, path):
|
||||
json_file.write(json.dumps(values))
|
||||
except Exception as e:
|
||||
LOG.exception(utils.exception_to_str(e))
|
||||
LOG.info(_LI("Namespace %s saved in %s"),
|
||||
namespace_file_name, file_name)
|
||||
msg = _LI("Namespace %(namespace_file_name)s saved in %(file_name)s")
|
||||
LOG.info(msg % {'namespace_file_name': namespace_file_name,
|
||||
'file_name': file_name})
|
||||
|
||||
|
||||
def db_load_metadefs(engine, metadata_path=None):
|
||||
|
@ -86,8 +86,8 @@ def _get(context, namespace_id, session):
|
||||
.filter_by(id=namespace_id)
|
||||
namespace_rec = query.one()
|
||||
except sa_orm.exc.NoResultFound:
|
||||
LOG.warn(_LW("Metadata definition namespace not found for id=%s",
|
||||
namespace_id))
|
||||
msg = _LW("Metadata definition namespace not found for id=%s")
|
||||
LOG.warn(msg % namespace_id)
|
||||
raise exc.MetadefRecordNotFound(record_type='namespace',
|
||||
id=namespace_id)
|
||||
|
||||
|
@ -34,8 +34,8 @@ def _get(context, object_id, session):
|
||||
.filter_by(id=object_id)
|
||||
metadef_object = query.one()
|
||||
except sa_orm.exc.NoResultFound:
|
||||
LOG.warn(_LW("Metadata definition object not found for id %s",
|
||||
object_id))
|
||||
msg = _LW("Metadata definition object not found for id %s")
|
||||
LOG.warn(msg % object_id)
|
||||
raise exc.MetadefRecordNotFound(record_type='object', id=object_id)
|
||||
|
||||
return metadef_object
|
||||
|
@ -36,8 +36,8 @@ def _get(context, property_id, session):
|
||||
property_rec = query.one()
|
||||
|
||||
except sa_orm.exc.NoResultFound:
|
||||
LOG.warn(_LW("Metadata definition property not found for id=%s",
|
||||
property_id))
|
||||
msg = _LW("Metadata definition property not found for id=%s")
|
||||
LOG.warn(msg % property_id)
|
||||
raise exc.MetadefRecordNotFound(
|
||||
record_type='property', id=property_id)
|
||||
|
||||
|
@ -1 +0,0 @@
|
||||
# template repository default module
|
@ -251,7 +251,7 @@ def _add_db2_constraints():
|
||||
|
||||
|
||||
def _remove_db2_constraints():
|
||||
#remove the foreign keys constraints
|
||||
# Remove the foreign keys constraints
|
||||
sql_commands = [
|
||||
"""ALTER TABLE image_members DROP CONSTRAINT member_image_id;""",
|
||||
"""ALTER TABLE image_properties DROP CONSTRAINT property_image_id;"""
|
||||
@ -536,23 +536,22 @@ def _update_all_ids_to_uuids(t_images, t_image_members, t_image_properties):
|
||||
old_id = image["id"]
|
||||
new_id = str(uuid.uuid4())
|
||||
|
||||
t_images.update().\
|
||||
where(t_images.c.id == old_id).\
|
||||
values(id=new_id).execute()
|
||||
t_images.update().where(
|
||||
t_images.c.id == old_id).values(id=new_id).execute()
|
||||
|
||||
t_image_members.update().\
|
||||
where(t_image_members.c.image_id == old_id).\
|
||||
values(image_id=new_id).execute()
|
||||
t_image_members.update().where(
|
||||
t_image_members.c.image_id == old_id).values(
|
||||
image_id=new_id).execute()
|
||||
|
||||
t_image_properties.update().\
|
||||
where(t_image_properties.c.image_id == old_id).\
|
||||
values(image_id=new_id).execute()
|
||||
t_image_properties.update().where(
|
||||
t_image_properties.c.image_id == old_id).values(
|
||||
image_id=new_id).execute()
|
||||
|
||||
t_image_properties.update().\
|
||||
where(and_(or_(t_image_properties.c.name == 'kernel_id',
|
||||
t_image_properties.update().where(
|
||||
and_(or_(t_image_properties.c.name == 'kernel_id',
|
||||
t_image_properties.c.name == 'ramdisk_id'),
|
||||
t_image_properties.c.value == old_id)).\
|
||||
values(value=new_id).execute()
|
||||
t_image_properties.c.value == old_id)).values(
|
||||
value=new_id).execute()
|
||||
|
||||
|
||||
def _update_all_uuids_to_ids(t_images, t_image_members, t_image_properties):
|
||||
@ -563,22 +562,22 @@ def _update_all_uuids_to_ids(t_images, t_image_members, t_image_properties):
|
||||
for image in images:
|
||||
old_id = image["id"]
|
||||
|
||||
t_images.update().\
|
||||
where(t_images.c.id == old_id).\
|
||||
values(id=str(new_id)).execute()
|
||||
t_images.update().where(
|
||||
t_images.c.id == old_id).values(
|
||||
id=str(new_id)).execute()
|
||||
|
||||
t_image_members.update().\
|
||||
where(t_image_members.c.image_id == old_id).\
|
||||
values(image_id=str(new_id)).execute()
|
||||
t_image_members.update().where(
|
||||
t_image_members.c.image_id == old_id).values(
|
||||
image_id=str(new_id)).execute()
|
||||
|
||||
t_image_properties.update().\
|
||||
where(t_image_properties.c.image_id == old_id).\
|
||||
values(image_id=str(new_id)).execute()
|
||||
t_image_properties.update().where(
|
||||
t_image_properties.c.image_id == old_id).values(
|
||||
image_id=str(new_id)).execute()
|
||||
|
||||
t_image_properties.update().\
|
||||
where(and_(or_(t_image_properties.c.name == 'kernel_id',
|
||||
t_image_properties.update().where(
|
||||
and_(or_(t_image_properties.c.name == 'kernel_id',
|
||||
t_image_properties.c.name == 'ramdisk_id'),
|
||||
t_image_properties.c.value == old_id)).\
|
||||
values(value=str(new_id)).execute()
|
||||
t_image_properties.c.value == old_id)).values(
|
||||
value=str(new_id)).execute()
|
||||
|
||||
new_id += 1
|
||||
|
@ -54,9 +54,9 @@ def migrate_location_credentials(migrate_engine, to_quoted):
|
||||
for image in images:
|
||||
try:
|
||||
fixed_uri = legacy_parse_uri(image['location'], to_quoted)
|
||||
images_table.update()\
|
||||
.where(images_table.c.id == image['id'])\
|
||||
.values(location=fixed_uri).execute()
|
||||
images_table.update().where(
|
||||
images_table.c.id == image['id']).values(
|
||||
location=fixed_uri).execute()
|
||||
except exception.BadStoreUri as e:
|
||||
reason = utils.exception_to_str(e)
|
||||
msg = _LE("Invalid store uri for image: %(image_id)s. "
|
||||
|
@ -84,9 +84,9 @@ def migrate_location_credentials(migrate_engine, to_quoted):
|
||||
for image in images:
|
||||
try:
|
||||
fixed_uri = fix_uri_credentials(image['location'], to_quoted)
|
||||
images_table.update()\
|
||||
.where(images_table.c.id == image['id'])\
|
||||
.values(location=fixed_uri).execute()
|
||||
images_table.update().where(
|
||||
images_table.c.id == image['id']).values(
|
||||
location=fixed_uri).execute()
|
||||
except exception.Invalid:
|
||||
msg = _LW("Failed to decrypt location value for image"
|
||||
" %(image_id)s") % {'image_id': image['id']}
|
||||
|
@ -53,6 +53,6 @@ def downgrade(migrate_engine):
|
||||
image_records = image_locations_table.select().execute().fetchall()
|
||||
|
||||
for image_location in image_records:
|
||||
images_table.update(values={'location': image_location.value})\
|
||||
.where(images_table.c.id == image_location.image_id)\
|
||||
.execute()
|
||||
images_table.update(
|
||||
values={'location': image_location.value}).where(
|
||||
images_table.c.id == image_location.image_id).execute()
|
||||
|
@ -29,8 +29,8 @@ def upgrade(migrate_engine):
|
||||
new_meta_data.create(image_locations)
|
||||
|
||||
noe = pickle.dumps({})
|
||||
s = sqlalchemy.sql.select([image_locations]).\
|
||||
where(image_locations.c.meta_data != noe)
|
||||
s = sqlalchemy.sql.select([image_locations]).where(
|
||||
image_locations.c.meta_data != noe)
|
||||
conn = migrate_engine.connect()
|
||||
res = conn.execute(s)
|
||||
|
||||
@ -38,9 +38,8 @@ def upgrade(migrate_engine):
|
||||
meta_data = row['meta_data']
|
||||
x = pickle.loads(meta_data)
|
||||
if x != {}:
|
||||
stmt = image_locations.update().\
|
||||
where(image_locations.c.id == row['id']).\
|
||||
values(storage_meta_data=x)
|
||||
stmt = image_locations.update().where(
|
||||
image_locations.c.id == row['id']).values(storage_meta_data=x)
|
||||
conn.execute(stmt)
|
||||
conn.close()
|
||||
image_locations.columns['meta_data'].drop()
|
||||
@ -55,8 +54,8 @@ def downgrade(migrate_engine):
|
||||
old_meta_data.create(image_locations)
|
||||
|
||||
noj = json.dumps({})
|
||||
s = sqlalchemy.sql.select([image_locations]).\
|
||||
where(image_locations.c.meta_data != noj)
|
||||
s = sqlalchemy.sql.select([image_locations]).where(
|
||||
image_locations.c.meta_data != noj)
|
||||
conn = migrate_engine.connect()
|
||||
res = conn.execute(s)
|
||||
|
||||
@ -64,9 +63,9 @@ def downgrade(migrate_engine):
|
||||
x = row['meta_data']
|
||||
meta_data = json.loads(x)
|
||||
if meta_data != {}:
|
||||
stmt = image_locations.update().\
|
||||
where(image_locations.c.id == row['id']).\
|
||||
values(old_meta_data=meta_data)
|
||||
stmt = image_locations.update().where(
|
||||
image_locations.c.id == row['id']).values(
|
||||
old_meta_data=meta_data)
|
||||
conn.execute(stmt)
|
||||
conn.close()
|
||||
image_locations.columns['meta_data'].drop()
|
||||
|
@ -86,9 +86,7 @@ def downgrade(migrate_engine):
|
||||
'message': task_info.message
|
||||
}
|
||||
|
||||
tasks_table\
|
||||
.update(values=values)\
|
||||
.where(tasks_table.c.id == task_info.task_id)\
|
||||
.execute()
|
||||
tasks_table.update(values=values).where(
|
||||
tasks_table.c.id == task_info.task_id).execute()
|
||||
|
||||
drop_tables([task_info_table])
|
||||
|
@ -35,11 +35,10 @@ def upgrade(migrate_engine):
|
||||
mapping = {'active': 'active', 'pending_delete': 'pending_delete',
|
||||
'deleted': 'deleted', 'killed': 'deleted'}
|
||||
for src, dst in six.iteritems(mapping):
|
||||
subq = sqlalchemy.sql.select([images_table.c.id])\
|
||||
.where(images_table.c.status == src)
|
||||
image_locations_table.update(values={'status': dst})\
|
||||
.where(image_locations_table.c.image_id.in_(subq))\
|
||||
.execute()
|
||||
subq = sqlalchemy.sql.select([images_table.c.id]).where(
|
||||
images_table.c.status == src)
|
||||
image_locations_table.update(values={'status': dst}).where(
|
||||
image_locations_table.c.image_id.in_(subq)).execute()
|
||||
|
||||
|
||||
def downgrade(migrate_engine):
|
||||
|
@ -1 +0,0 @@
|
||||
# template repository default versions module
|
@ -13,6 +13,7 @@
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import glance_store
|
||||
|
||||
from glance.api import authorization
|
||||
from glance.api import policy
|
||||
@ -24,7 +25,6 @@ import glance.domain
|
||||
import glance.location
|
||||
import glance.notifier
|
||||
import glance.quota
|
||||
import glance_store
|
||||
|
||||
|
||||
class Gateway(object):
|
||||
@ -48,11 +48,10 @@ class Gateway(object):
|
||||
policy_image_factory, context, self.notifier)
|
||||
if property_utils.is_property_protection_enabled():
|
||||
property_rules = property_utils.PropertyRules(self.policy)
|
||||
protected_image_factory = property_protections.\
|
||||
ProtectedImageFactoryProxy(notifier_image_factory, context,
|
||||
property_rules)
|
||||
pif = property_protections.ProtectedImageFactoryProxy(
|
||||
notifier_image_factory, context, property_rules)
|
||||
authorized_image_factory = authorization.ImageFactoryProxy(
|
||||
protected_image_factory, context)
|
||||
pif, context)
|
||||
else:
|
||||
authorized_image_factory = authorization.ImageFactoryProxy(
|
||||
notifier_image_factory, context)
|
||||
@ -80,11 +79,10 @@ class Gateway(object):
|
||||
policy_image_repo, context, self.notifier)
|
||||
if property_utils.is_property_protection_enabled():
|
||||
property_rules = property_utils.PropertyRules(self.policy)
|
||||
protected_image_repo = property_protections.\
|
||||
ProtectedImageRepoProxy(notifier_image_repo, context,
|
||||
property_rules)
|
||||
pir = property_protections.ProtectedImageRepoProxy(
|
||||
notifier_image_repo, context, property_rules)
|
||||
authorized_image_repo = authorization.ImageRepoProxy(
|
||||
protected_image_repo, context)
|
||||
pir, context)
|
||||
else:
|
||||
authorized_image_repo = authorization.ImageRepoProxy(
|
||||
notifier_image_repo, context)
|
||||
@ -165,20 +163,16 @@ class Gateway(object):
|
||||
resource_type_factory = glance.domain.MetadefResourceTypeFactory()
|
||||
policy_resource_type_factory = policy.MetadefResourceTypeFactoryProxy(
|
||||
resource_type_factory, context, self.policy)
|
||||
authorized_resource_type_factory = \
|
||||
authorization.MetadefResourceTypeFactoryProxy(
|
||||
return authorization.MetadefResourceTypeFactoryProxy(
|
||||
policy_resource_type_factory, context)
|
||||
return authorized_resource_type_factory
|
||||
|
||||
def get_metadef_resource_type_repo(self, context):
|
||||
resource_type_repo = glance.db.MetadefResourceTypeRepo(
|
||||
context, self.db_api)
|
||||
policy_object_repo = policy.MetadefResourceTypeRepoProxy(
|
||||
resource_type_repo, context, self.policy)
|
||||
authorized_resource_type_repo = \
|
||||
authorization.MetadefResourceTypeRepoProxy(policy_object_repo,
|
||||
return authorization.MetadefResourceTypeRepoProxy(policy_object_repo,
|
||||
context)
|
||||
return authorized_resource_type_repo
|
||||
|
||||
def get_metadef_property_factory(self, context):
|
||||
prop_factory = glance.domain.MetadefPropertyFactory()
|
||||
|
@ -20,13 +20,13 @@ Cache driver that uses SQLite to store information about cached images
|
||||
from __future__ import absolute_import
|
||||
from contextlib import contextmanager
|
||||
import os
|
||||
import sqlite3
|
||||
import stat
|
||||
import time
|
||||
|
||||
from eventlet import sleep
|
||||
from eventlet import timeout
|
||||
from oslo.config import cfg
|
||||
import sqlite3
|
||||
|
||||
from glance.common import exception
|
||||
from glance.image_cache.drivers import base
|
||||
@ -473,7 +473,7 @@ class Driver(base.Driver):
|
||||
items.append((mtime, os.path.basename(path)))
|
||||
|
||||
items.sort()
|
||||
return [image_id for (mtime, image_id) in items]
|
||||
return [image_id for (modtime, image_id) in items]
|
||||
|
||||
def get_cache_files(self, basepath):
|
||||
"""
|
||||
|
@ -376,7 +376,7 @@ class Driver(base.Driver):
|
||||
items.append((mtime, os.path.basename(path)))
|
||||
|
||||
items.sort()
|
||||
return [image_id for (mtime, image_id) in items]
|
||||
return [image_id for (modtime, image_id) in items]
|
||||
|
||||
def _reap_old_files(self, dirpath, entry_type, grace=None):
|
||||
now = time.time()
|
||||
|
@ -16,9 +16,7 @@
|
||||
"""
|
||||
Prefetches images into the Image Cache
|
||||
"""
|
||||
|
||||
import eventlet
|
||||
|
||||
import glance_store
|
||||
|
||||
from glance.common import exception
|
||||
|
@ -211,8 +211,8 @@ class ImageProxy(glance.domain.proxy.Image):
|
||||
{'image_id': self.image.image_id,
|
||||
'error': utils.exception_to_str(e)})
|
||||
self.notifier.error('image.upload', msg)
|
||||
raise webob.exc.HTTPBadRequest(explanation=
|
||||
utils.exception_to_str(e))
|
||||
raise webob.exc.HTTPBadRequest(
|
||||
explanation=utils.exception_to_str(e))
|
||||
except exception.Duplicate as e:
|
||||
msg = (_("Unable to upload duplicate image data for image"
|
||||
"%(image_id)s: %(error)s") %
|
||||
@ -260,8 +260,7 @@ class TaskRepoProxy(glance.domain.proxy.TaskRepo):
|
||||
self.context = context
|
||||
self.notifier = notifier
|
||||
proxy_kwargs = {'context': self.context, 'notifier': self.notifier}
|
||||
super(TaskRepoProxy, self) \
|
||||
.__init__(task_repo,
|
||||
super(TaskRepoProxy, self).__init__(task_repo,
|
||||
task_proxy_class=TaskProxy,
|
||||
task_proxy_kwargs=proxy_kwargs)
|
||||
|
||||
@ -285,8 +284,8 @@ class TaskStubRepoProxy(glance.domain.proxy.TaskStubRepo):
|
||||
self.context = context
|
||||
self.notifier = notifier
|
||||
proxy_kwargs = {'context': self.context, 'notifier': self.notifier}
|
||||
super(TaskStubRepoProxy, self) \
|
||||
.__init__(task_stub_repo,
|
||||
super(TaskStubRepoProxy, self).__init__(
|
||||
task_stub_repo,
|
||||
task_stub_proxy_class=TaskStubProxy,
|
||||
task_stub_proxy_kwargs=proxy_kwargs)
|
||||
|
||||
|
@ -23,12 +23,6 @@ __all__ = [
|
||||
import copy
|
||||
import itertools
|
||||
|
||||
from glance.openstack.common import gettextutils
|
||||
|
||||
# TODO(zhiyan): Remove translation from in-line
|
||||
# help message of option definition code.
|
||||
gettextutils.install('glance', lazy=False)
|
||||
|
||||
import glance.api.middleware.context
|
||||
import glance.api.policy
|
||||
import glance.common.config
|
||||
@ -40,6 +34,7 @@ import glance.common.wsgi
|
||||
import glance.image_cache
|
||||
import glance.image_cache.drivers.sqlite
|
||||
import glance.notifier
|
||||
from glance.openstack.common import gettextutils
|
||||
import glance.openstack.common.lockutils
|
||||
import glance.openstack.common.log
|
||||
import glance.registry
|
||||
@ -47,6 +42,10 @@ import glance.registry.client
|
||||
import glance.registry.client.v1.api
|
||||
import glance.scrubber
|
||||
|
||||
# TODO(zhiyan): Remove translation from in-line
|
||||
# help message of option definition code.
|
||||
gettextutils.install('glance', lazy=False)
|
||||
|
||||
|
||||
_global_opt_lists = [
|
||||
glance.openstack.common.log.common_cli_opts,
|
||||
|
@ -14,10 +14,9 @@
|
||||
|
||||
import copy
|
||||
|
||||
import six
|
||||
|
||||
import glance_store as store
|
||||
from oslo.config import cfg
|
||||
import six
|
||||
|
||||
import glance.api.common
|
||||
import glance.common.exception as exception
|
||||
|
@ -15,10 +15,10 @@
|
||||
|
||||
import abc
|
||||
import calendar
|
||||
import eventlet
|
||||
import os
|
||||
import time
|
||||
|
||||
import eventlet
|
||||
from oslo.config import cfg
|
||||
import six
|
||||
|
||||
@ -125,6 +125,7 @@ class ScrubQueue(object):
|
||||
@abc.abstractmethod
|
||||
def has_image(self, image_id):
|
||||
"""Returns whether the queue contains an image or not.
|
||||
|
||||
:param image_id: The opaque image identifier
|
||||
|
||||
:retval a boolean value to inform including or not
|
||||
|
@ -549,8 +549,7 @@ class FunctionalTest(test_utils.BaseTestCase):
|
||||
|
||||
self.api_protocol = 'http'
|
||||
self.api_port, api_sock = test_utils.get_unused_port_and_socket()
|
||||
self.registry_port, registry_sock = \
|
||||
test_utils.get_unused_port_and_socket()
|
||||
self.registry_port, reg_sock = test_utils.get_unused_port_and_socket()
|
||||
|
||||
conf_dir = os.path.join(self.test_dir, 'etc')
|
||||
utils.safe_mkdirs(conf_dir)
|
||||
@ -572,7 +571,7 @@ class FunctionalTest(test_utils.BaseTestCase):
|
||||
|
||||
self.registry_server = RegistryServer(self.test_dir,
|
||||
self.registry_port,
|
||||
sock=registry_sock)
|
||||
sock=reg_sock)
|
||||
|
||||
self.scrubber_daemon = ScrubberDaemon(self.test_dir)
|
||||
|
||||
|
@ -894,7 +894,8 @@ class DriverTests(object):
|
||||
auth_tok='user:%s:user' % TENANT2,
|
||||
owner_is_tenant=False)
|
||||
UUIDX = str(uuid.uuid4())
|
||||
#we need private image and context.owner should not match image owner
|
||||
# We need private image and context.owner should not match image
|
||||
# owner
|
||||
self.db_api.image_create(ctxt1, {'id': UUIDX,
|
||||
'status': 'queued',
|
||||
'is_public': False,
|
||||
@ -946,7 +947,8 @@ class DriverTests(object):
|
||||
auth_tok='user:%s:user' % TENANT2,
|
||||
owner_is_tenant=False)
|
||||
UUIDX = str(uuid.uuid4())
|
||||
#we need private image and context.owner should not match image owner
|
||||
# We need private image and context.owner should not match image
|
||||
# owner
|
||||
image = self.db_api.image_create(ctxt1, {'id': UUIDX,
|
||||
'status': 'queued',
|
||||
'is_public': False,
|
||||
@ -1699,7 +1701,7 @@ class TestVisibility(test_utils.BaseTestCase):
|
||||
'is_public': is_public,
|
||||
}
|
||||
fixtures.append(fixture)
|
||||
return [build_image_fixture(**fixture) for fixture in fixtures]
|
||||
return [build_image_fixture(**f) for f in fixtures]
|
||||
|
||||
def create_images(self, images):
|
||||
for fixture in images:
|
||||
|
@ -14,11 +14,10 @@
|
||||
|
||||
import copy
|
||||
|
||||
from glance import context
|
||||
import glance.tests.functional.db as db_tests
|
||||
|
||||
from glance.common import config
|
||||
from glance.common import exception
|
||||
from glance import context
|
||||
import glance.tests.functional.db as db_tests
|
||||
from glance.tests import utils as test_utils
|
||||
|
||||
|
||||
|
@ -21,7 +21,7 @@ Utility methods to set testcases up for Swift and/or S3 tests.
|
||||
from __future__ import print_function
|
||||
|
||||
import BaseHTTPServer
|
||||
import thread
|
||||
import threading
|
||||
|
||||
from glance.openstack.common import units
|
||||
|
||||
@ -76,7 +76,7 @@ def setup_http(test):
|
||||
def serve_requests(httpd):
|
||||
httpd.serve_forever()
|
||||
|
||||
thread.start_new_thread(serve_requests, (remote_server,))
|
||||
threading.Thread(target=serve_requests, args=(remote_server,)).start()
|
||||
test.http_server = remote_server
|
||||
test.http_ip = remote_ip
|
||||
test.http_port = remote_port
|
||||
|
@ -17,10 +17,10 @@
|
||||
|
||||
import datetime
|
||||
import hashlib
|
||||
import httplib2
|
||||
import os
|
||||
import sys
|
||||
|
||||
import httplib2
|
||||
from six.moves import xrange
|
||||
|
||||
from glance.openstack.common import jsonutils
|
||||
|
@ -32,14 +32,13 @@ from six.moves import xrange
|
||||
from glance.openstack.common import jsonutils
|
||||
from glance.openstack.common import units
|
||||
from glance.tests import functional
|
||||
from glance.tests.functional.store_utils import get_http_uri
|
||||
from glance.tests.functional.store_utils import setup_http
|
||||
from glance.tests.utils import execute
|
||||
from glance.tests.utils import minimal_headers
|
||||
from glance.tests.utils import skip_if_disabled
|
||||
from glance.tests.utils import xattr_writes_supported
|
||||
|
||||
from glance.tests.functional.store_utils import get_http_uri
|
||||
from glance.tests.functional.store_utils import setup_http
|
||||
|
||||
FIVE_KB = 5 * units.Ki
|
||||
|
||||
|
||||
|
@ -15,10 +15,11 @@
|
||||
|
||||
"""Functional test case that tests logging output"""
|
||||
|
||||
import httplib2
|
||||
import os
|
||||
import stat
|
||||
|
||||
import httplib2
|
||||
|
||||
from glance.tests import functional
|
||||
|
||||
|
||||
|
@ -433,8 +433,8 @@ class TestApi(functional.FunctionalTest):
|
||||
|
||||
# 21. GET /images with filter on user-defined property 'distro'.
|
||||
# Verify both images are returned
|
||||
path = "http://%s:%d/v1/images?property-distro=Ubuntu" % \
|
||||
("127.0.0.1", self.api_port)
|
||||
path = "http://%s:%d/v1/images?property-distro=Ubuntu" % (
|
||||
"127.0.0.1", self.api_port)
|
||||
http = httplib2.Http()
|
||||
response, content = http.request(path, 'GET')
|
||||
self.assertEqual(response.status, 200)
|
||||
@ -445,8 +445,8 @@ class TestApi(functional.FunctionalTest):
|
||||
|
||||
# 22. GET /images with filter on user-defined property 'distro' but
|
||||
# with non-existent value. Verify no images are returned
|
||||
path = "http://%s:%d/v1/images?property-distro=fedora" % \
|
||||
("127.0.0.1", self.api_port)
|
||||
path = "http://%s:%d/v1/images?property-distro=fedora" % (
|
||||
"127.0.0.1", self.api_port)
|
||||
http = httplib2.Http()
|
||||
response, content = http.request(path, 'GET')
|
||||
self.assertEqual(response.status, 200)
|
||||
|
@ -20,10 +20,10 @@ based storage backend.
|
||||
"""
|
||||
|
||||
import hashlib
|
||||
import httplib2
|
||||
import tempfile
|
||||
import time
|
||||
|
||||
import httplib2
|
||||
from six.moves import xrange
|
||||
|
||||
from glance.openstack.common import jsonutils
|
||||
|
@ -12,11 +12,11 @@
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import hashlib
|
||||
import httplib2
|
||||
import os
|
||||
|
||||
import httplib2
|
||||
|
||||
from glance.openstack.common import jsonutils
|
||||
from glance.openstack.common import units
|
||||
from glance.tests import functional
|
||||
|
@ -27,18 +27,17 @@ test accounts only.
|
||||
If a connection cannot be established, all the test cases are
|
||||
skipped.
|
||||
"""
|
||||
|
||||
import datetime
|
||||
import hashlib
|
||||
import httplib2
|
||||
import os
|
||||
import tempfile
|
||||
import uuid
|
||||
|
||||
import httplib2
|
||||
|
||||
from glance.openstack.common import jsonutils
|
||||
from glance.openstack.common import timeutils
|
||||
from glance.openstack.common import units
|
||||
|
||||
from glance.tests import functional
|
||||
from glance.tests.utils import minimal_headers
|
||||
from glance.tests.utils import skip_if_disabled
|
||||
|
@ -887,7 +887,7 @@ class TestImages(functional.FunctionalTest):
|
||||
images = jsonutils.loads(response.text)['images']
|
||||
self.assertEqual(0, len(images))
|
||||
|
||||
## Create an image for role member with extra props
|
||||
# Create an image for role member with extra props
|
||||
# Raises 403 since user is not allowed to set 'foo'
|
||||
path = self._url('/v2/images')
|
||||
headers = self._headers({'content-type': 'application/json',
|
||||
@ -899,7 +899,7 @@ class TestImages(functional.FunctionalTest):
|
||||
response = requests.post(path, headers=headers, data=data)
|
||||
self.assertEqual(403, response.status_code)
|
||||
|
||||
## Create an image for role member without 'foo'
|
||||
# Create an image for role member without 'foo'
|
||||
path = self._url('/v2/images')
|
||||
headers = self._headers({'content-type': 'application/json',
|
||||
'X-Roles': 'member'})
|
||||
@ -1037,7 +1037,7 @@ class TestImages(functional.FunctionalTest):
|
||||
images = jsonutils.loads(response.text)['images']
|
||||
self.assertEqual(0, len(images))
|
||||
|
||||
## Create an image for role member with extra props
|
||||
# Create an image for role member with extra props
|
||||
# Raises 403 since user is not allowed to set 'foo'
|
||||
path = self._url('/v2/images')
|
||||
headers = self._headers({'content-type': 'application/json',
|
||||
@ -1049,7 +1049,7 @@ class TestImages(functional.FunctionalTest):
|
||||
response = requests.post(path, headers=headers, data=data)
|
||||
self.assertEqual(403, response.status_code)
|
||||
|
||||
## Create an image for role member without 'foo'
|
||||
# Create an image for role member without 'foo'
|
||||
path = self._url('/v2/images')
|
||||
headers = self._headers({'content-type': 'application/json',
|
||||
'X-Roles': 'member'})
|
||||
|
@ -154,8 +154,8 @@ class RequestDeserializer(wsgi.JSONRequestDeserializer):
|
||||
for key in cls._disallowed_properties:
|
||||
if key in image:
|
||||
msg = _("Attribute '%s' is read-only.") % key
|
||||
raise webob.exc.HTTPForbidden(explanation=
|
||||
utils.exception_to_str(msg))
|
||||
raise webob.exc.HTTPForbidden(
|
||||
explanation=utils.exception_to_str(msg))
|
||||
|
||||
def create(self, request):
|
||||
body = self._get_request_body(request)
|
||||
|
@ -14,6 +14,7 @@ import datetime
|
||||
import hashlib
|
||||
import os
|
||||
import tempfile
|
||||
|
||||
import testtools
|
||||
|
||||
from glance.openstack.common import jsonutils
|
||||
|
@ -78,9 +78,9 @@ class FakeRegistryConnection(object):
|
||||
|
||||
|
||||
def stub_out_registry_and_store_server(stubs, base_dir, **kwargs):
|
||||
"""
|
||||
Mocks calls to 127.0.0.1 on 9191 and 9292 for testing so
|
||||
that a real Glance server does not need to be up and
|
||||
"""Mocks calls to 127.0.0.1 on 9191 and 9292 for testing.
|
||||
|
||||
Done so that a real Glance server does not need to be up and
|
||||
running
|
||||
"""
|
||||
|
||||
@ -132,9 +132,10 @@ def stub_out_registry_and_store_server(stubs, base_dir, **kwargs):
|
||||
|
||||
def endheaders(self):
|
||||
hl = [i.lower() for i in self.req.headers.keys()]
|
||||
assert not ('content-length' in hl and
|
||||
'transfer-encoding' in hl), \
|
||||
'Content-Length and Transfer-Encoding are mutually exclusive'
|
||||
assert(not ('content-length' in hl and
|
||||
'transfer-encoding' in hl),
|
||||
'Content-Length and Transfer-Encoding '
|
||||
'are mutually exclusive')
|
||||
|
||||
def send(self, data):
|
||||
# send() is called during chunked-transfer encoding, and
|
||||
@ -163,9 +164,7 @@ def stub_out_registry_and_store_server(stubs, base_dir, **kwargs):
|
||||
return res
|
||||
|
||||
def fake_get_connection_type(client):
|
||||
"""
|
||||
Returns the proper connection type
|
||||
"""
|
||||
"""Returns the proper connection type."""
|
||||
DEFAULT_REGISTRY_PORT = 9191
|
||||
DEFAULT_API_PORT = 9292
|
||||
|
||||
@ -199,15 +198,13 @@ def stub_out_registry_and_store_server(stubs, base_dir, **kwargs):
|
||||
|
||||
|
||||
def stub_out_registry_server(stubs, **kwargs):
|
||||
"""
|
||||
Mocks calls to 127.0.0.1 on 9191 for testing so
|
||||
that a real Glance Registry server does not need to be up and
|
||||
running
|
||||
"""Mocks calls to 127.0.0.1 on 9191 for testing.
|
||||
|
||||
Done so that a real Glance Registry server does not need to be up and
|
||||
running.
|
||||
"""
|
||||
def fake_get_connection_type(client):
|
||||
"""
|
||||
Returns the proper connection type
|
||||
"""
|
||||
"""Returns the proper connection type."""
|
||||
DEFAULT_REGISTRY_PORT = 9191
|
||||
|
||||
if (client.port == DEFAULT_REGISTRY_PORT and
|
||||
|
@ -9,11 +9,10 @@
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import mock
|
||||
import sys
|
||||
|
||||
import glance_store as store
|
||||
import mock
|
||||
from oslo.config import cfg
|
||||
import six
|
||||
|
||||
|
@ -14,7 +14,6 @@
|
||||
# under the License.
|
||||
|
||||
import testtools
|
||||
|
||||
import webob
|
||||
|
||||
import glance.api.common
|
||||
|
@ -12,9 +12,9 @@
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import urllib2
|
||||
|
||||
import mock
|
||||
import urllib2
|
||||
|
||||
from glance.common.scripts.image_import import main as image_import_script
|
||||
import glance.tests.utils as test_utils
|
||||
|
@ -12,9 +12,9 @@
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import urllib2
|
||||
|
||||
import mock
|
||||
import urllib2
|
||||
|
||||
from glance.common import exception
|
||||
from glance.common.scripts import utils as script_utils
|
||||
|
@ -15,12 +15,12 @@
|
||||
# under the License.
|
||||
|
||||
import datetime
|
||||
import gettext
|
||||
import socket
|
||||
|
||||
from babel import localedata
|
||||
import eventlet.patcher
|
||||
import fixtures
|
||||
import gettext
|
||||
import mock
|
||||
import routes
|
||||
import six
|
||||
|
@ -29,7 +29,6 @@ from glance.db.sqlalchemy import api
|
||||
import glance.tests.unit.utils as unit_test_utils
|
||||
import glance.tests.utils as test_utils
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
CONF.import_opt('metadata_encryption_key', 'glance.common.config')
|
||||
|
||||
@ -547,8 +546,8 @@ class TestImageMemberRepo(test_utils.BaseTestCase):
|
||||
def test_remove_image_member_does_not_exist(self):
|
||||
fake_uuid = str(uuid.uuid4())
|
||||
image = self.image_repo.get(UUID2)
|
||||
fake_member = glance.domain.ImageMemberFactory()\
|
||||
.new_image_member(image, TENANT4)
|
||||
fake_member = glance.domain.ImageMemberFactory().new_image_member(
|
||||
image, TENANT4)
|
||||
fake_member.id = fake_uuid
|
||||
exc = self.assertRaises(exception.NotFound,
|
||||
self.image_member_repo.remove,
|
||||
|
@ -423,6 +423,6 @@ class TestMetadefRepo(test_utils.BaseTestCase):
|
||||
object)
|
||||
|
||||
def test_list_resource_type(self):
|
||||
resource_type = self.resource_type_repo.list(filters=
|
||||
{'namespace': NAMESPACE1})
|
||||
resource_type = self.resource_type_repo.list(
|
||||
filters={'namespace': NAMESPACE1})
|
||||
self.assertEqual(len(resource_type), 0)
|
||||
|
@ -195,8 +195,8 @@ class TestImageMember(test_utils.BaseTestCase):
|
||||
self.image_member_factory = domain.ImageMemberFactory()
|
||||
self.image_factory = domain.ImageFactory()
|
||||
self.image = self.image_factory.new_image()
|
||||
self.image_member = self.image_member_factory\
|
||||
.new_image_member(image=self.image,
|
||||
self.image_member = self.image_member_factory.new_image_member(
|
||||
image=self.image,
|
||||
member_id=TENANT1)
|
||||
|
||||
def test_status_enumerated(self):
|
||||
|
@ -215,8 +215,8 @@ class ImageServiceTestCase(test_utils.BaseTestCase):
|
||||
image_meta_with_proto['Content-Length'] = len(image_body)
|
||||
|
||||
for key in IMG_RESPONSE_ACTIVE:
|
||||
image_meta_with_proto['x-image-meta-%s' % key] = \
|
||||
IMG_RESPONSE_ACTIVE[key]
|
||||
image_meta_with_proto[
|
||||
'x-image-meta-%s' % key] = IMG_RESPONSE_ACTIVE[key]
|
||||
|
||||
c.conn.prime_request('POST', 'v1/images',
|
||||
image_body, image_meta_with_proto,
|
||||
@ -230,8 +230,8 @@ class ImageServiceTestCase(test_utils.BaseTestCase):
|
||||
c = glance_replicator.ImageService(FakeHTTPConnection(), 'noauth')
|
||||
|
||||
image_meta = {'id': '5dcddce0-cba5-4f18-9cf4-9853c7b207a6'}
|
||||
image_meta_headers = \
|
||||
glance_replicator.ImageService._dict_to_headers(image_meta)
|
||||
image_meta_headers = glance_replicator.ImageService._dict_to_headers(
|
||||
image_meta)
|
||||
image_meta_headers['x-auth-token'] = 'noauth'
|
||||
image_meta_headers['Content-Type'] = 'application/octet-stream'
|
||||
c.conn.prime_request('PUT', 'v1/images/%s' % image_meta['id'],
|
||||
|
@ -152,8 +152,8 @@ class ImageCacheTestCase(object):
|
||||
incomplete_file.close()
|
||||
|
||||
mtime = os.path.getmtime(incomplete_file_path_1)
|
||||
pastday = datetime.datetime.fromtimestamp(mtime) - \
|
||||
datetime.timedelta(days=1)
|
||||
pastday = (datetime.datetime.fromtimestamp(mtime) -
|
||||
datetime.timedelta(days=1))
|
||||
atime = int(time.mktime(pastday.timetuple()))
|
||||
mtime = atime
|
||||
os.utime(incomplete_file_path_1, (atime, mtime))
|
||||
|
@ -12,9 +12,9 @@
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import os
|
||||
|
||||
import mock
|
||||
import os
|
||||
|
||||
from glance.common import exception
|
||||
from glance.image_cache import client
|
||||
@ -35,22 +35,22 @@ class CacheClientTestCase(utils.BaseTestCase):
|
||||
|
||||
def test_get_cached_images(self):
|
||||
expected_data = '{"cached_images": "some_images"}'
|
||||
self.client.do_request.return_value = \
|
||||
utils.FakeHTTPResponse(data=expected_data)
|
||||
self.client.do_request.return_value = utils.FakeHTTPResponse(
|
||||
data=expected_data)
|
||||
self.assertEqual(self.client.get_cached_images(), "some_images")
|
||||
self.client.do_request.assert_called_with("GET", "/cached_images")
|
||||
|
||||
def test_get_queued_images(self):
|
||||
expected_data = '{"queued_images": "some_images"}'
|
||||
self.client.do_request.return_value = \
|
||||
utils.FakeHTTPResponse(data=expected_data)
|
||||
self.client.do_request.return_value = utils.FakeHTTPResponse(
|
||||
data=expected_data)
|
||||
self.assertEqual(self.client.get_queued_images(), "some_images")
|
||||
self.client.do_request.assert_called_with("GET", "/queued_images")
|
||||
|
||||
def test_delete_all_cached_images(self):
|
||||
expected_data = '{"num_deleted": 4}'
|
||||
self.client.do_request.return_value = \
|
||||
utils.FakeHTTPResponse(data=expected_data)
|
||||
self.client.do_request.return_value = utils.FakeHTTPResponse(
|
||||
data=expected_data)
|
||||
self.assertEqual(self.client.delete_all_cached_images(), 4)
|
||||
self.client.do_request.assert_called_with("DELETE", "/cached_images")
|
||||
|
||||
@ -68,8 +68,8 @@ class CacheClientTestCase(utils.BaseTestCase):
|
||||
|
||||
def test_delete_all_queued_images(self):
|
||||
expected_data = '{"num_deleted": 4}'
|
||||
self.client.do_request.return_value = \
|
||||
utils.FakeHTTPResponse(data=expected_data)
|
||||
self.client.do_request.return_value = utils.FakeHTTPResponse(
|
||||
data=expected_data)
|
||||
self.assertEqual(self.client.delete_all_queued_images(), 4)
|
||||
self.client.do_request.assert_called_with("DELETE", "/queued_images")
|
||||
|
||||
|
@ -49,6 +49,7 @@ from glance.db.sqlalchemy import models
|
||||
from glance.openstack.common import jsonutils
|
||||
from glance.openstack.common import timeutils
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
CONF.import_opt('metadata_encryption_key', 'glance.common.config')
|
||||
|
||||
@ -319,9 +320,8 @@ class MigrationsMixin(test_migrations.WalkVersionsMixin):
|
||||
uuids = {}
|
||||
for name in ('kernel', 'ramdisk', 'normal'):
|
||||
image_name = '%s migration 012 test' % name
|
||||
rows = images.select()\
|
||||
.where(images.c.name == image_name)\
|
||||
.execute().fetchall()
|
||||
rows = images.select().where(
|
||||
images.c.name == image_name).execute().fetchall()
|
||||
|
||||
self.assertEqual(len(rows), 1)
|
||||
|
||||
@ -331,19 +331,16 @@ class MigrationsMixin(test_migrations.WalkVersionsMixin):
|
||||
uuids[name] = row['id']
|
||||
|
||||
# Find all image_members to ensure image_id has been updated
|
||||
results = image_members.select()\
|
||||
.where(image_members.c.image_id ==
|
||||
uuids['normal'])\
|
||||
.execute().fetchall()
|
||||
results = image_members.select().where(
|
||||
image_members.c.image_id == uuids['normal']).execute().fetchall()
|
||||
self.assertEqual(len(results), 1)
|
||||
|
||||
# Find all image_properties to ensure image_id has been updated
|
||||
# as well as ensure kernel_id and ramdisk_id values have been
|
||||
# updated too
|
||||
results = image_properties.select()\
|
||||
.where(image_properties.c.image_id ==
|
||||
uuids['normal'])\
|
||||
.execute().fetchall()
|
||||
results = image_properties.select().where(
|
||||
image_properties.c.image_id == uuids['normal']
|
||||
).execute().fetchall()
|
||||
self.assertEqual(len(results), 2)
|
||||
for row in results:
|
||||
self.assertIn(row['name'], ('kernel_id', 'ramdisk_id'))
|
||||
@ -363,9 +360,8 @@ class MigrationsMixin(test_migrations.WalkVersionsMixin):
|
||||
ids = {}
|
||||
for name in ('kernel', 'ramdisk', 'normal'):
|
||||
image_name = '%s migration 012 test' % name
|
||||
rows = images.select()\
|
||||
.where(images.c.name == image_name)\
|
||||
.execute().fetchall()
|
||||
rows = images.select().where(
|
||||
images.c.name == image_name).execute().fetchall()
|
||||
self.assertEqual(len(rows), 1)
|
||||
|
||||
row = rows[0]
|
||||
@ -374,19 +370,15 @@ class MigrationsMixin(test_migrations.WalkVersionsMixin):
|
||||
ids[name] = row['id']
|
||||
|
||||
# Find all image_members to ensure image_id has been updated
|
||||
results = image_members.select()\
|
||||
.where(image_members.c.image_id ==
|
||||
ids['normal'])\
|
||||
.execute().fetchall()
|
||||
results = image_members.select().where(
|
||||
image_members.c.image_id == ids['normal']).execute().fetchall()
|
||||
self.assertEqual(len(results), 1)
|
||||
|
||||
# Find all image_properties to ensure image_id has been updated
|
||||
# as well as ensure kernel_id and ramdisk_id values have been
|
||||
# updated too
|
||||
results = image_properties.select()\
|
||||
.where(image_properties.c.image_id ==
|
||||
ids['normal'])\
|
||||
.execute().fetchall()
|
||||
results = image_properties.select().where(
|
||||
image_properties.c.image_id == ids['normal']).execute().fetchall()
|
||||
self.assertEqual(len(results), 2)
|
||||
for row in results:
|
||||
self.assertIn(row['name'], ('kernel_id', 'ramdisk_id'))
|
||||
@ -670,8 +662,8 @@ class MigrationsMixin(test_migrations.WalkVersionsMixin):
|
||||
|
||||
def _check_026(self, engine, data):
|
||||
image_locations = db_utils.get_table(engine, 'image_locations')
|
||||
results = image_locations.select()\
|
||||
.where(image_locations.c.image_id == data).execute()
|
||||
results = image_locations.select().where(
|
||||
image_locations.c.image_id == data).execute()
|
||||
|
||||
r = list(results)
|
||||
self.assertEqual(len(r), 1)
|
||||
@ -756,8 +748,8 @@ class MigrationsMixin(test_migrations.WalkVersionsMixin):
|
||||
image_id = data[1]
|
||||
image_locations = db_utils.get_table(engine, 'image_locations')
|
||||
|
||||
records = image_locations.select().\
|
||||
where(image_locations.c.image_id == image_id).execute().fetchall()
|
||||
records = image_locations.select().where(
|
||||
image_locations.c.image_id == image_id).execute().fetchall()
|
||||
|
||||
for r in records:
|
||||
d = jsonutils.loads(r['meta_data'])
|
||||
@ -768,8 +760,8 @@ class MigrationsMixin(test_migrations.WalkVersionsMixin):
|
||||
|
||||
image_locations = db_utils.get_table(engine, 'image_locations')
|
||||
|
||||
records = image_locations.select().\
|
||||
where(image_locations.c.image_id == image_id).execute().fetchall()
|
||||
records = image_locations.select().where(
|
||||
image_locations.c.image_id == image_id).execute().fetchall()
|
||||
|
||||
for r in records:
|
||||
md = r['meta_data']
|
||||
@ -855,9 +847,8 @@ class MigrationsMixin(test_migrations.WalkVersionsMixin):
|
||||
|
||||
def _check_031(self, engine, image_id):
|
||||
locations_table = db_utils.get_table(engine, 'image_locations')
|
||||
result = locations_table.select()\
|
||||
.where(locations_table.c.image_id == image_id)\
|
||||
.execute().fetchall()
|
||||
result = locations_table.select().where(
|
||||
locations_table.c.image_id == image_id).execute().fetchall()
|
||||
|
||||
locations = set([(x['value'], x['meta_data']) for x in result])
|
||||
actual_locations = set([
|
||||
@ -980,8 +971,8 @@ class MigrationsMixin(test_migrations.WalkVersionsMixin):
|
||||
'deleted', 'pending_delete', 'deleted']
|
||||
|
||||
for (idx, image_id) in enumerate(data):
|
||||
results = image_locations.select()\
|
||||
.where(image_locations.c.image_id == image_id).execute()
|
||||
results = image_locations.select().where(
|
||||
image_locations.c.image_id == image_id).execute()
|
||||
r = list(results)
|
||||
self.assertEqual(len(r), 1)
|
||||
self.assertIn('status', r[0])
|
||||
|
@ -12,11 +12,10 @@
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import uuid
|
||||
|
||||
import mock
|
||||
from mock import patch
|
||||
import uuid
|
||||
|
||||
import six
|
||||
|
||||
from glance.common import exception
|
||||
@ -592,16 +591,16 @@ class TestImageMemberQuotas(test_utils.BaseTestCase):
|
||||
|
||||
self.image_member_factory.new_image_member(self.image,
|
||||
'fake_id')
|
||||
self.base_image_member_factory.new_image_member\
|
||||
.assert_called_once_with(self.image.base, 'fake_id')
|
||||
nim = self.base_image_member_factory.new_image_member
|
||||
nim .assert_called_once_with(self.image.base, 'fake_id')
|
||||
|
||||
def test_new_image_member_unlimited_members(self):
|
||||
self.config(image_member_quota=-1)
|
||||
|
||||
self.image_member_factory.new_image_member(self.image,
|
||||
'fake_id')
|
||||
self.base_image_member_factory.new_image_member\
|
||||
.assert_called_once_with(self.image.base, 'fake_id')
|
||||
nim = self.base_image_member_factory.new_image_member
|
||||
nim.assert_called_once_with(self.image.base, 'fake_id')
|
||||
|
||||
def test_new_image_member_too_many_members(self):
|
||||
self.config(image_member_quota=0)
|
||||
|
@ -12,9 +12,8 @@
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import mox
|
||||
|
||||
import glance_store
|
||||
import mox
|
||||
|
||||
from glance.common import exception
|
||||
import glance.location
|
||||
|
@ -12,10 +12,8 @@
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import mock
|
||||
|
||||
import glance_store
|
||||
import mock
|
||||
|
||||
import glance.location
|
||||
from glance.tests.unit import base
|
||||
|
@ -38,7 +38,6 @@ from glance.db.sqlalchemy import api as db_api
|
||||
from glance.db.sqlalchemy import models as db_models
|
||||
from glance.openstack.common import jsonutils
|
||||
from glance.openstack.common import timeutils
|
||||
|
||||
import glance.registry.client.v1.api as registry
|
||||
from glance.tests.unit import base
|
||||
import glance.tests.unit.utils as unit_test_utils
|
||||
@ -1406,20 +1405,19 @@ class TestGlanceAPI(base.IsolatedUnitTest):
|
||||
|
||||
req = webob.Request.blank("/images/%s" % image_id)
|
||||
req.method = 'PUT'
|
||||
req.headers['Content-Type'] = \
|
||||
'application/octet-stream'
|
||||
req.headers['Content-Type'] = 'application/octet-stream'
|
||||
req.body = "chunk00000remainder"
|
||||
|
||||
with mock.patch.object(upload_utils, 'initiate_deletion') as \
|
||||
mock_init_del:
|
||||
with mock.patch.object(
|
||||
upload_utils, 'initiate_deletion') as mock_init_del:
|
||||
mock_init_del.side_effect = mock_initiate_deletion
|
||||
with mock.patch.object(registry, 'get_image_metadata') as \
|
||||
mock_get_meta:
|
||||
with mock.patch.object(
|
||||
registry, 'get_image_metadata') as mock_get_meta:
|
||||
mock_get_meta.side_effect = mock_get_image_metadata
|
||||
with mock.patch.object(db_api, '_image_get') as mock_db_get:
|
||||
mock_db_get.side_effect = mock_image_get
|
||||
with mock.patch.object(db_api, '_image_update') as \
|
||||
mock_db_update:
|
||||
with mock.patch.object(
|
||||
db_api, '_image_update') as mock_db_update:
|
||||
mock_db_update.side_effect = mock_image_update
|
||||
|
||||
# Expect a 409 Conflict.
|
||||
|
@ -32,7 +32,6 @@ from glance.db.sqlalchemy import api as db_api
|
||||
from glance.db.sqlalchemy import models as db_models
|
||||
from glance.openstack.common import jsonutils
|
||||
from glance.openstack.common import timeutils
|
||||
|
||||
from glance.registry.api import v1 as rserver
|
||||
from glance.tests.unit import base
|
||||
from glance.tests import utils as test_utils
|
||||
@ -1287,8 +1286,8 @@ class TestRegistryAPI(base.IsolatedUnitTest, test_utils.RegistryAPIMixIn):
|
||||
self.log_image_id = False
|
||||
|
||||
def fake_log_info(msg):
|
||||
if 'Successfully created image ' \
|
||||
'0564c64c-3545-4e34-abfb-9d18e5f2f2f9' in msg:
|
||||
if ('Successfully created image '
|
||||
'0564c64c-3545-4e34-abfb-9d18e5f2f2f9' in msg):
|
||||
self.log_image_id = True
|
||||
self.stubs.Set(rserver.images.LOG, 'info', fake_log_info)
|
||||
|
||||
@ -1940,7 +1939,8 @@ class TestSharability(test_utils.BaseTestCase):
|
||||
auth_tok='user:%s:admin' % TENANT2,
|
||||
owner_is_tenant=False)
|
||||
UUIDX = str(uuid.uuid4())
|
||||
#we need private image and context.owner should not match image owner
|
||||
# We need private image and context.owner should not match image
|
||||
# owner
|
||||
image = db_api.image_create(ctxt1, {'id': UUIDX,
|
||||
'status': 'queued',
|
||||
'is_public': False,
|
||||
@ -1955,7 +1955,8 @@ class TestSharability(test_utils.BaseTestCase):
|
||||
auth_tok='user:%s:user' % TENANT1,
|
||||
owner_is_tenant=True)
|
||||
UUIDX = str(uuid.uuid4())
|
||||
#we need private image and context.owner should not match image owner
|
||||
# We need private image and context.owner should not match image
|
||||
# owner
|
||||
image = db_api.image_create(ctxt1, {'id': UUIDX,
|
||||
'status': 'queued',
|
||||
'is_public': False,
|
||||
@ -1974,7 +1975,8 @@ class TestSharability(test_utils.BaseTestCase):
|
||||
auth_tok='user:%s:user' % TENANT2,
|
||||
owner_is_tenant=False)
|
||||
UUIDX = str(uuid.uuid4())
|
||||
#we need private image and context.owner should not match image owner
|
||||
# We need private image and context.owner should not match image
|
||||
# owner
|
||||
image = db_api.image_create(ctxt1, {'id': UUIDX,
|
||||
'status': 'queued',
|
||||
'is_public': False,
|
||||
@ -1993,7 +1995,8 @@ class TestSharability(test_utils.BaseTestCase):
|
||||
auth_tok='user:%s:user' % TENANT2,
|
||||
owner_is_tenant=False)
|
||||
UUIDX = str(uuid.uuid4())
|
||||
#we need private image and context.owner should not match image owner
|
||||
# We need private image and context.owner should not match image
|
||||
# owner
|
||||
image = db_api.image_create(ctxt1, {'id': UUIDX,
|
||||
'status': 'queued',
|
||||
'is_public': False,
|
||||
@ -2018,7 +2021,8 @@ class TestSharability(test_utils.BaseTestCase):
|
||||
auth_tok='user:%s:user' % TENANT2,
|
||||
owner_is_tenant=False)
|
||||
UUIDX = str(uuid.uuid4())
|
||||
#we need private image and context.owner should not match image owner
|
||||
# We need private image and context.owner should not match image
|
||||
# owner
|
||||
image = db_api.image_create(ctxt1, {'id': UUIDX,
|
||||
'status': 'queued',
|
||||
'is_public': False,
|
||||
@ -2042,7 +2046,8 @@ class TestSharability(test_utils.BaseTestCase):
|
||||
auth_tok='user:%s:user' % TENANT1,
|
||||
owner_is_tenant=True)
|
||||
UUIDX = str(uuid.uuid4())
|
||||
#we need private image and context.owner should not match image owner
|
||||
# We need private image and context.owner should not match image
|
||||
# owner
|
||||
image = db_api.image_create(ctxt1, {'id': UUIDX,
|
||||
'status': 'queued',
|
||||
'is_public': False,
|
||||
|
@ -14,10 +14,10 @@
|
||||
# under the License.
|
||||
|
||||
from contextlib import contextmanager
|
||||
import mock
|
||||
from mock import patch
|
||||
|
||||
import glance_store
|
||||
import mock
|
||||
from mock import patch
|
||||
import webob.exc
|
||||
|
||||
from glance.api.v1 import upload_utils
|
||||
|
@ -12,18 +12,16 @@
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import mock
|
||||
import uuid
|
||||
|
||||
import glance_store
|
||||
import mock
|
||||
import six
|
||||
import webob
|
||||
|
||||
import glance.api.v2.image_data
|
||||
from glance.common import exception
|
||||
from glance.common import wsgi
|
||||
|
||||
from glance.tests.unit import base
|
||||
import glance.tests.unit.utils as unit_test_utils
|
||||
import glance.tests.utils as test_utils
|
||||
|
@ -24,7 +24,6 @@ from glance.openstack.common import jsonutils
|
||||
import glance.tests.unit.utils as unit_test_utils
|
||||
import glance.tests.utils as test_utils
|
||||
|
||||
|
||||
DATETIME = datetime.datetime(2012, 5, 16, 15, 27, 36, 325355)
|
||||
ISOTIME = '2012-05-16T15:27:36Z'
|
||||
|
||||
@ -97,8 +96,8 @@ class TestImageMembersController(test_utils.BaseTestCase):
|
||||
self.notifier = unit_test_utils.FakeNotifier()
|
||||
self._create_images()
|
||||
self._create_image_members()
|
||||
self.controller = glance.api.v2.image_members\
|
||||
.ImageMembersController(self.db,
|
||||
self.controller = glance.api.v2.image_members.ImageMembersController(
|
||||
self.db,
|
||||
self.policy,
|
||||
self.notifier,
|
||||
self.store)
|
||||
|
@ -848,8 +848,7 @@ class TestImagesController(base.IsolatedUnitTest):
|
||||
self.assertEqual(resp.container_format, 'bare')
|
||||
|
||||
def test_update_remove_property_while_over_limit(self):
|
||||
"""
|
||||
Ensure that image properties can be removed.
|
||||
"""Ensure that image properties can be removed.
|
||||
|
||||
Image properties should be able to be removed as long as the image has
|
||||
fewer than the limited number of image properties after the
|
||||
@ -879,8 +878,7 @@ class TestImagesController(base.IsolatedUnitTest):
|
||||
self.assertNotEqual(output.created_at, output.updated_at)
|
||||
|
||||
def test_update_add_and_remove_property_under_limit(self):
|
||||
"""
|
||||
Ensure that image properties can be removed.
|
||||
"""Ensure that image properties can be removed.
|
||||
|
||||
Image properties should be able to be added and removed simultaneously
|
||||
as long as the image has fewer than the limited number of image
|
||||
@ -1132,10 +1130,7 @@ class TestImagesController(base.IsolatedUnitTest):
|
||||
another_request, created_image.image_id, changes)
|
||||
|
||||
def test_create_non_protected_prop(self):
|
||||
"""
|
||||
Verify property marked with special char '@' is creatable by an unknown
|
||||
role
|
||||
"""
|
||||
"""Property marked with special char @ creatable by an unknown role"""
|
||||
self.set_property_protections()
|
||||
request = unit_test_utils.get_fake_request(roles=['admin'])
|
||||
image = {'name': 'image-1'}
|
||||
@ -1154,10 +1149,7 @@ class TestImagesController(base.IsolatedUnitTest):
|
||||
'2')
|
||||
|
||||
def test_read_non_protected_prop(self):
|
||||
"""
|
||||
Verify property marked with special char '@' is readable by an unknown
|
||||
role
|
||||
"""
|
||||
"""Property marked with special char @ readable by an unknown role"""
|
||||
self.set_property_protections()
|
||||
request = unit_test_utils.get_fake_request(roles=['admin'])
|
||||
image = {'name': 'image-1'}
|
||||
@ -1170,10 +1162,7 @@ class TestImagesController(base.IsolatedUnitTest):
|
||||
self.assertEqual(output.extra_properties['x_all_permitted'], '1')
|
||||
|
||||
def test_update_non_protected_prop(self):
|
||||
"""
|
||||
Verify property marked with special char '@' is updatable by an unknown
|
||||
role
|
||||
"""
|
||||
"""Property marked with special char @ updatable by an unknown role"""
|
||||
self.set_property_protections()
|
||||
request = unit_test_utils.get_fake_request(roles=['admin'])
|
||||
image = {'name': 'image-1'}
|
||||
@ -1190,10 +1179,7 @@ class TestImagesController(base.IsolatedUnitTest):
|
||||
self.assertEqual(output.extra_properties['x_all_permitted'], 'baz')
|
||||
|
||||
def test_delete_non_protected_prop(self):
|
||||
"""
|
||||
Verify property marked with special char '@' is deletable by an unknown
|
||||
role
|
||||
"""
|
||||
"""Property marked with special char @ deletable by an unknown role"""
|
||||
self.set_property_protections()
|
||||
request = unit_test_utils.get_fake_request(roles=['admin'])
|
||||
image = {'name': 'image-1'}
|
||||
@ -1211,9 +1197,7 @@ class TestImagesController(base.IsolatedUnitTest):
|
||||
'x_all_permitted')
|
||||
|
||||
def test_create_locked_down_protected_prop(self):
|
||||
"""
|
||||
Verify a property protected by special char '!' is creatable by no one
|
||||
"""
|
||||
"""Property marked with special char ! creatable by no one"""
|
||||
self.set_property_protections()
|
||||
request = unit_test_utils.get_fake_request(roles=['admin'])
|
||||
image = {'name': 'image-1'}
|
||||
@ -1230,9 +1214,7 @@ class TestImagesController(base.IsolatedUnitTest):
|
||||
created_image.image_id, changes)
|
||||
|
||||
def test_read_locked_down_protected_prop(self):
|
||||
"""
|
||||
Verify a property protected by special char '!' is readable by no one
|
||||
"""
|
||||
"""Property marked with special char ! readable by no one"""
|
||||
self.set_property_protections()
|
||||
request = unit_test_utils.get_fake_request(roles=['member'])
|
||||
image = {'name': 'image-1'}
|
||||
@ -1246,9 +1228,7 @@ class TestImagesController(base.IsolatedUnitTest):
|
||||
'x_none_read')
|
||||
|
||||
def test_update_locked_down_protected_prop(self):
|
||||
"""
|
||||
Verify a property protected by special char '!' is updatable by no one
|
||||
"""
|
||||
"""Property marked with special char ! updatable by no one"""
|
||||
self.set_property_protections()
|
||||
request = unit_test_utils.get_fake_request(roles=['admin'])
|
||||
image = {'name': 'image-1'}
|
||||
@ -1264,9 +1244,7 @@ class TestImagesController(base.IsolatedUnitTest):
|
||||
another_request, created_image.image_id, changes)
|
||||
|
||||
def test_delete_locked_down_protected_prop(self):
|
||||
"""
|
||||
Verify a property protected by special char '!' is deletable by no one
|
||||
"""
|
||||
"""Property marked with special char ! deletable by no one"""
|
||||
self.set_property_protections()
|
||||
request = unit_test_utils.get_fake_request(roles=['admin'])
|
||||
image = {'name': 'image-1'}
|
||||
@ -1559,8 +1537,7 @@ class TestImagesController(base.IsolatedUnitTest):
|
||||
self.assertNotEqual(output.created_at, output.updated_at)
|
||||
|
||||
def test_update_remove_location_while_over_limit(self):
|
||||
"""
|
||||
Ensure that image locations can be removed.
|
||||
"""Ensure that image locations can be removed.
|
||||
|
||||
Image locations should be able to be removed as long as the image has
|
||||
fewer than the limited number of image locations after the
|
||||
@ -1592,8 +1569,7 @@ class TestImagesController(base.IsolatedUnitTest):
|
||||
self.assertNotEqual(output.created_at, output.updated_at)
|
||||
|
||||
def test_update_add_and_remove_location_under_limit(self):
|
||||
"""
|
||||
Ensure that image locations can be removed.
|
||||
"""Ensure that image locations can be removed.
|
||||
|
||||
Image locations should be able to be added and removed simultaneously
|
||||
as long as the image has fewer than the limited number of image
|
||||
@ -1787,9 +1763,9 @@ class TestImagesController(base.IsolatedUnitTest):
|
||||
self.assertEqual(image['status'], 'deleted')
|
||||
|
||||
def test_delete_queued_updates_status_delayed_delete(self):
|
||||
"""
|
||||
Ensure status of queued image is updated (LP bug #1048851)
|
||||
to 'deleted' when delayed_delete isenabled
|
||||
"""Ensure status of queued image is updated (LP bug #1048851).
|
||||
|
||||
Must be set to 'deleted' when delayed_delete isenabled.
|
||||
"""
|
||||
scrubber_dir = os.path.join(self.test_dir, 'scrubber')
|
||||
self.config(delayed_delete=True, scrubber_datadir=scrubber_dir)
|
||||
@ -2054,13 +2030,7 @@ class TestImagesDeserializer(test_utils.BaseTestCase):
|
||||
|
||||
def test_create_readonly_attributes_forbidden(self):
|
||||
bodies = [
|
||||
#{'created_at': ISOTIME},
|
||||
#{'updated_at': ISOTIME},
|
||||
#{'status': 'saving'},
|
||||
{'direct_url': 'http://example.com'},
|
||||
#{'size': 10},
|
||||
#{'virtual_size': 10},
|
||||
#{'checksum': 'asdf'},
|
||||
{'self': 'http://example.com'},
|
||||
{'file': 'http://example.com'},
|
||||
{'schema': 'http://example.com'},
|
||||
@ -2779,9 +2749,11 @@ class TestImagesSerializer(test_utils.BaseTestCase):
|
||||
unit_test_utils.sort_url_by_qs_keys(output['next']))
|
||||
|
||||
def test_index_forbidden_get_image_location(self):
|
||||
"""Make sure the serializer works fine no mater if current user is
|
||||
authorized to get image location if the show_multiple_locations is
|
||||
False.
|
||||
"""Make sure the serializer works fine.
|
||||
|
||||
No mater if current user is authorized to get image location if the
|
||||
show_multiple_locations is False.
|
||||
|
||||
"""
|
||||
class ImageLocations(object):
|
||||
def __len__(self):
|
||||
@ -3173,8 +3145,10 @@ class TestImagesSerializerWithAdditionalProperties(test_utils.BaseTestCase):
|
||||
self.assertEqual(expected, jsonutils.loads(response.body))
|
||||
|
||||
def test_show_invalid_additional_property(self):
|
||||
"""Ensure that the serializer passes through invalid additional
|
||||
properties (i.e. non-string) without complaining.
|
||||
"""Ensure that the serializer passes
|
||||
through invalid additional properties.
|
||||
|
||||
It must not complains with i.e. non-string.
|
||||
"""
|
||||
serializer = glance.api.v2.images.ResponseSerializer()
|
||||
self.fixture.extra_properties['marx'] = 123
|
||||
|
@ -30,7 +30,6 @@ from glance.db.sqlalchemy import api as db_api
|
||||
from glance.db.sqlalchemy import models as db_models
|
||||
from glance.openstack.common import jsonutils
|
||||
from glance.openstack.common import timeutils
|
||||
|
||||
from glance.registry.api import v2 as rserver
|
||||
from glance.tests.unit import base
|
||||
from glance.tests import utils as test_utils
|
||||
@ -114,10 +113,7 @@ class TestRegistryRPC(base.IsolatedUnitTest):
|
||||
db_models.register_models(db_api.get_engine())
|
||||
|
||||
def test_show(self):
|
||||
"""
|
||||
Tests that registry API endpoint
|
||||
returns the expected image
|
||||
"""
|
||||
"""Tests that registry API endpoint returns the expected image."""
|
||||
fixture = {'id': UUID2,
|
||||
'name': 'fake image #2',
|
||||
'size': 19,
|
||||
@ -139,10 +135,7 @@ class TestRegistryRPC(base.IsolatedUnitTest):
|
||||
self.assertEqual(v, image[k])
|
||||
|
||||
def test_show_unknown(self):
|
||||
"""
|
||||
Tests that the registry API endpoint
|
||||
returns a 404 for an unknown image id
|
||||
"""
|
||||
"""Tests the registry API endpoint returns 404 for an unknown id."""
|
||||
req = webob.Request.blank('/rpc')
|
||||
req.method = "POST"
|
||||
cmd = [{
|
||||
@ -156,10 +149,7 @@ class TestRegistryRPC(base.IsolatedUnitTest):
|
||||
'glance.common.exception.NotFound')
|
||||
|
||||
def test_get_index(self):
|
||||
"""
|
||||
Tests that the image_get_all command returns list of
|
||||
images
|
||||
"""
|
||||
"""Tests that the image_get_all command returns list of images."""
|
||||
fixture = {'id': UUID2,
|
||||
'name': 'fake image #2',
|
||||
'size': 19,
|
||||
@ -183,9 +173,9 @@ class TestRegistryRPC(base.IsolatedUnitTest):
|
||||
self.assertEqual(v, images[0][k])
|
||||
|
||||
def test_get_index_marker(self):
|
||||
"""
|
||||
Tests that the registry API returns list of
|
||||
public images that conforms to a marker query param
|
||||
"""Tests that the registry API returns list of public images.
|
||||
|
||||
Must conforms to a marker query param.
|
||||
"""
|
||||
uuid5_time = timeutils.utcnow() + datetime.timedelta(seconds=10)
|
||||
uuid4_time = uuid5_time + datetime.timedelta(seconds=5)
|
||||
@ -450,10 +440,7 @@ class TestRegistryRPC(base.IsolatedUnitTest):
|
||||
self.assertEqual(len(images), 0)
|
||||
|
||||
def test_get_index_unknown_marker(self):
|
||||
"""
|
||||
Tests that the registry API returns a NotFound
|
||||
when an unknown marker is provided
|
||||
"""
|
||||
"""Tests the registry API returns a NotFound with unknown marker."""
|
||||
req = webob.Request.blank('/rpc')
|
||||
req.method = "POST"
|
||||
cmd = [{
|
||||
@ -468,9 +455,9 @@ class TestRegistryRPC(base.IsolatedUnitTest):
|
||||
self.assertIn("NotFound", result["_error"]["cls"])
|
||||
|
||||
def test_get_index_limit(self):
|
||||
"""
|
||||
Tests that the registry API returns list of
|
||||
public images that conforms to a limit query param
|
||||
"""Tests that the registry API returns list of public images.
|
||||
|
||||
Must conforms to a limit query param.
|
||||
"""
|
||||
uuid3_time = timeutils.utcnow() + datetime.timedelta(seconds=10)
|
||||
uuid4_time = uuid3_time + datetime.timedelta(seconds=5)
|
||||
@ -521,9 +508,9 @@ class TestRegistryRPC(base.IsolatedUnitTest):
|
||||
self.assertEqual(images[0]['id'], UUID4)
|
||||
|
||||
def test_get_index_limit_marker(self):
|
||||
"""
|
||||
Tests that the registry API returns list of
|
||||
public images that conforms to limit and marker query params
|
||||
"""Tests that the registry API returns list of public images.
|
||||
|
||||
Must conforms to limit and marker query params.
|
||||
"""
|
||||
uuid3_time = timeutils.utcnow() + datetime.timedelta(seconds=10)
|
||||
uuid4_time = uuid3_time + datetime.timedelta(seconds=5)
|
||||
@ -573,10 +560,11 @@ class TestRegistryRPC(base.IsolatedUnitTest):
|
||||
self.assertEqual(images[0]['id'], UUID2)
|
||||
|
||||
def test_get_index_filter_name(self):
|
||||
"""
|
||||
Tests that the registry API returns list of
|
||||
public images that have a specific name. This is really a sanity
|
||||
check, filtering is tested more in-depth using /images/detail
|
||||
"""Tests that the registry API returns list of public images.
|
||||
|
||||
Use a specific name. This is really a sanity check, filtering is
|
||||
tested more in-depth using /images/detail
|
||||
|
||||
"""
|
||||
extra_fixture = {'id': _gen_uuid(),
|
||||
'status': 'active',
|
||||
@ -618,9 +606,9 @@ class TestRegistryRPC(base.IsolatedUnitTest):
|
||||
self.assertEqual('new name! #123', image['name'])
|
||||
|
||||
def test_get_index_filter_on_user_defined_properties(self):
|
||||
"""
|
||||
Tests that the registry API returns list of
|
||||
public images that have a specific user-defined properties.
|
||||
"""Tests that the registry API returns list of public images.
|
||||
|
||||
Use a specific user-defined properties.
|
||||
"""
|
||||
properties = {'distro': 'ubuntu', 'arch': 'i386', 'type': 'kernel'}
|
||||
extra_id = _gen_uuid()
|
||||
@ -728,9 +716,9 @@ class TestRegistryRPC(base.IsolatedUnitTest):
|
||||
self.assertEqual(len(images), 0)
|
||||
|
||||
def test_get_index_sort_default_created_at_desc(self):
|
||||
"""
|
||||
Tests that the registry API returns list of
|
||||
public images that conforms to a default sort key/dir
|
||||
"""Tests that the registry API returns list of public images.
|
||||
|
||||
Must conforms to a default sort key/dir.
|
||||
"""
|
||||
uuid5_time = timeutils.utcnow() + datetime.timedelta(seconds=10)
|
||||
uuid4_time = uuid5_time + datetime.timedelta(seconds=5)
|
||||
@ -800,10 +788,9 @@ class TestRegistryRPC(base.IsolatedUnitTest):
|
||||
self.assertEqual(images[4]['id'], UUID1)
|
||||
|
||||
def test_get_index_sort_name_asc(self):
|
||||
"""
|
||||
Tests that the registry API returns list of
|
||||
public images sorted alphabetically by name in
|
||||
ascending order.
|
||||
"""Tests that the registry API returns list of public images.
|
||||
|
||||
Must be sorted alphabetically by name in ascending order.
|
||||
"""
|
||||
UUID3 = _gen_uuid()
|
||||
extra_fixture = {'id': UUID3,
|
||||
@ -860,10 +847,9 @@ class TestRegistryRPC(base.IsolatedUnitTest):
|
||||
self.assertEqual(images[4]['id'], UUID4)
|
||||
|
||||
def test_get_index_sort_status_desc(self):
|
||||
"""
|
||||
Tests that the registry API returns list of
|
||||
public images sorted alphabetically by status in
|
||||
descending order.
|
||||
"""Tests that the registry API returns list of public images.
|
||||
|
||||
Must be sorted alphabetically by status in descending order.
|
||||
"""
|
||||
uuid4_time = timeutils.utcnow() + datetime.timedelta(seconds=10)
|
||||
|
||||
@ -912,10 +898,9 @@ class TestRegistryRPC(base.IsolatedUnitTest):
|
||||
self.assertEqual(images[3]['id'], UUID3)
|
||||
|
||||
def test_get_index_sort_disk_format_asc(self):
|
||||
"""
|
||||
Tests that the registry API returns list of
|
||||
public images sorted alphabetically by disk_format in
|
||||
ascending order.
|
||||
"""Tests that the registry API returns list of public images.
|
||||
|
||||
Must be sorted alphabetically by disk_format in ascending order.
|
||||
"""
|
||||
uuid3_time = timeutils.utcnow() + datetime.timedelta(seconds=5)
|
||||
|
||||
@ -964,10 +949,9 @@ class TestRegistryRPC(base.IsolatedUnitTest):
|
||||
self.assertEqual(images[3]['id'], UUID2)
|
||||
|
||||
def test_get_index_sort_container_format_desc(self):
|
||||
"""
|
||||
Tests that the registry API returns list of
|
||||
public images sorted alphabetically by container_format in
|
||||
descending order.
|
||||
"""Tests that the registry API returns list of public images.
|
||||
|
||||
Must be sorted alphabetically by container_format in descending order.
|
||||
"""
|
||||
uuid3_time = timeutils.utcnow() + datetime.timedelta(seconds=5)
|
||||
|
||||
@ -1017,9 +1001,9 @@ class TestRegistryRPC(base.IsolatedUnitTest):
|
||||
self.assertEqual(images[3]['id'], UUID1)
|
||||
|
||||
def test_get_index_sort_size_asc(self):
|
||||
"""
|
||||
Tests that the registry API returns list of
|
||||
public images sorted by size in ascending order.
|
||||
"""Tests that the registry API returns list of public images.
|
||||
|
||||
Must be sorted by size in ascending order.
|
||||
"""
|
||||
UUID3 = _gen_uuid()
|
||||
extra_fixture = {'id': UUID3,
|
||||
@ -1065,9 +1049,9 @@ class TestRegistryRPC(base.IsolatedUnitTest):
|
||||
self.assertEqual(images[3]['id'], UUID3)
|
||||
|
||||
def test_get_index_sort_created_at_asc(self):
|
||||
"""
|
||||
Tests that the registry API returns list of
|
||||
public images sorted by created_at in ascending order.
|
||||
"""Tests that the registry API returns list of public images.
|
||||
|
||||
Must be sorted by created_at in ascending order.
|
||||
"""
|
||||
uuid4_time = timeutils.utcnow() + datetime.timedelta(seconds=10)
|
||||
uuid3_time = uuid4_time + datetime.timedelta(seconds=5)
|
||||
@ -1120,9 +1104,9 @@ class TestRegistryRPC(base.IsolatedUnitTest):
|
||||
self.assertEqual(images[3]['id'], UUID3)
|
||||
|
||||
def test_get_index_sort_updated_at_desc(self):
|
||||
"""
|
||||
Tests that the registry API returns list of
|
||||
public images sorted by updated_at in descending order.
|
||||
"""Tests that the registry API returns list of public images.
|
||||
|
||||
Must be sorted by updated_at in descending order.
|
||||
"""
|
||||
uuid4_time = timeutils.utcnow() + datetime.timedelta(seconds=10)
|
||||
uuid3_time = uuid4_time + datetime.timedelta(seconds=5)
|
||||
@ -1383,9 +1367,7 @@ class TestRegistryRPC(base.IsolatedUnitTest):
|
||||
self.assertTrue(deleted_image['deleted_at'])
|
||||
|
||||
def test_get_image_members(self):
|
||||
"""
|
||||
Tests members listing for existing images
|
||||
"""
|
||||
"""Tests members listing for existing images."""
|
||||
req = webob.Request.blank('/rpc')
|
||||
req.method = 'POST'
|
||||
cmd = [{
|
||||
|
@ -31,7 +31,6 @@ from glance.common import exception
|
||||
from glance import context
|
||||
from glance.db.sqlalchemy import api as db_api
|
||||
from glance.openstack.common import timeutils
|
||||
|
||||
from glance.registry.api import v2 as rserver
|
||||
import glance.registry.client.v2.api as rapi
|
||||
from glance.registry.client.v2.api import client as rclient
|
||||
@ -50,9 +49,9 @@ config.parse_args(args=[])
|
||||
|
||||
class TestRegistryV2Client(base.IsolatedUnitTest,
|
||||
test_utils.RegistryAPIMixIn):
|
||||
"""
|
||||
Test proper actions made for both valid and invalid requests
|
||||
against a Registry service
|
||||
"""Test proper actions made against a registry service.
|
||||
|
||||
Test for both valid and invalid requests.
|
||||
"""
|
||||
|
||||
# Registry server to user
|
||||
@ -101,10 +100,9 @@ class TestRegistryV2Client(base.IsolatedUnitTest,
|
||||
self.assertEqual(0, image["min_disk"])
|
||||
|
||||
def test_get_index_sort_name_asc(self):
|
||||
"""
|
||||
Tests that the registry API returns list of
|
||||
public images sorted alphabetically by name in
|
||||
ascending order.
|
||||
"""Tests that the registry API returns list of public images.
|
||||
|
||||
Must be sorted alphabetically by name in ascending order.
|
||||
"""
|
||||
UUID3 = _gen_uuid()
|
||||
extra_fixture = self.get_fixture(id=UUID3, name='asdf')
|
||||
@ -122,10 +120,9 @@ class TestRegistryV2Client(base.IsolatedUnitTest,
|
||||
unjsonify=False)
|
||||
|
||||
def test_get_index_sort_status_desc(self):
|
||||
"""
|
||||
Tests that the registry API returns list of
|
||||
public images sorted alphabetically by status in
|
||||
descending order.
|
||||
"""Tests that the registry API returns list of public images.
|
||||
|
||||
Must be sorted alphabetically by status in descending order.
|
||||
"""
|
||||
uuid4_time = timeutils.utcnow() + datetime.timedelta(seconds=10)
|
||||
|
||||
@ -147,10 +144,9 @@ class TestRegistryV2Client(base.IsolatedUnitTest,
|
||||
unjsonify=False)
|
||||
|
||||
def test_get_index_sort_disk_format_asc(self):
|
||||
"""
|
||||
Tests that the registry API returns list of
|
||||
public images sorted alphabetically by disk_format in
|
||||
ascending order.
|
||||
"""Tests that the registry API returns list of public images.
|
||||
|
||||
Must besorted alphabetically by disk_format in ascending order.
|
||||
"""
|
||||
UUID3 = _gen_uuid()
|
||||
extra_fixture = self.get_fixture(id=UUID3, name='asdf',
|
||||
@ -172,10 +168,9 @@ class TestRegistryV2Client(base.IsolatedUnitTest,
|
||||
unjsonify=False)
|
||||
|
||||
def test_get_index_sort_container_format_desc(self):
|
||||
"""
|
||||
Tests that the registry API returns list of
|
||||
public images sorted alphabetically by container_format in
|
||||
descending order.
|
||||
"""Tests that the registry API returns list of public images.
|
||||
|
||||
Must be sorted alphabetically by container_format in descending order.
|
||||
"""
|
||||
UUID3 = _gen_uuid()
|
||||
extra_fixture = self.get_fixture(id=UUID3, name='asdf',
|
||||
@ -198,9 +193,9 @@ class TestRegistryV2Client(base.IsolatedUnitTest,
|
||||
unjsonify=False)
|
||||
|
||||
def test_get_index_sort_size_asc(self):
|
||||
"""
|
||||
Tests that the registry API returns list of
|
||||
public images sorted by size in ascending order.
|
||||
"""Tests that the registry API returns list of public images.
|
||||
|
||||
Must be sorted by size in ascending order.
|
||||
"""
|
||||
UUID3 = _gen_uuid()
|
||||
extra_fixture = self.get_fixture(id=UUID3, name='asdf',
|
||||
@ -224,9 +219,9 @@ class TestRegistryV2Client(base.IsolatedUnitTest,
|
||||
unjsonify=False)
|
||||
|
||||
def test_get_index_sort_created_at_asc(self):
|
||||
"""
|
||||
Tests that the registry API returns list of
|
||||
public images sorted by created_at in ascending order.
|
||||
"""Tests that the registry API returns list of public images.
|
||||
|
||||
Must be sorted by created_at in ascending order.
|
||||
"""
|
||||
uuid4_time = timeutils.utcnow() + datetime.timedelta(seconds=10)
|
||||
uuid3_time = uuid4_time + datetime.timedelta(seconds=5)
|
||||
@ -248,9 +243,9 @@ class TestRegistryV2Client(base.IsolatedUnitTest,
|
||||
unjsonify=False)
|
||||
|
||||
def test_get_index_sort_updated_at_desc(self):
|
||||
"""
|
||||
Tests that the registry API returns list of
|
||||
public images sorted by updated_at in descending order.
|
||||
"""Tests that the registry API returns list of public images.
|
||||
|
||||
Must be sorted by updated_at in descending order.
|
||||
"""
|
||||
uuid4_time = timeutils.utcnow() + datetime.timedelta(seconds=10)
|
||||
uuid3_time = uuid4_time + datetime.timedelta(seconds=5)
|
||||
@ -354,9 +349,9 @@ class TestRegistryV2Client(base.IsolatedUnitTest,
|
||||
self.assertEqual(len(images), 4)
|
||||
|
||||
def test_image_get_index_by_name(self):
|
||||
"""
|
||||
Test correct set of public, name-filtered image returned. This
|
||||
is just a sanity check, we test the details call more in-depth.
|
||||
"""Test correct set of public, name-filtered image returned.
|
||||
|
||||
This is just a sanity check, we test the details call more in-depth.
|
||||
"""
|
||||
extra_fixture = self.get_fixture(id=_gen_uuid(),
|
||||
name='new name! #123')
|
||||
@ -577,10 +572,9 @@ class TestRegistryV2Client(base.IsolatedUnitTest,
|
||||
|
||||
|
||||
class TestRegistryV2ClientApi(base.IsolatedUnitTest):
|
||||
"""Test proper actions made against a registry service.
|
||||
|
||||
"""
|
||||
Test proper actions made for both valid and invalid requests
|
||||
against a Registry service
|
||||
Test for both valid and invalid requests.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
|
@ -294,9 +294,9 @@ def execute(cmd,
|
||||
exitcode = 0
|
||||
|
||||
if exitcode != expected_exitcode and raise_error:
|
||||
msg = "Command %(cmd)s did not succeed. Returned an exit "\
|
||||
"code of %(exitcode)d."\
|
||||
"\n\nSTDOUT: %(out)s"\
|
||||
msg = "Command %(cmd)s did not succeed. Returned an exit "
|
||||
"code of %(exitcode)d."
|
||||
"\n\nSTDOUT: %(out)s"
|
||||
"\n\nSTDERR: %(err)s" % {'cmd': cmd, 'exitcode': exitcode,
|
||||
'out': out, 'err': err}
|
||||
if context:
|
||||
|
@ -3,7 +3,7 @@
|
||||
# process, which may cause wedges in the gate later.
|
||||
|
||||
# Hacking already pins down pep8, pyflakes and flake8
|
||||
hacking>=0.8.0,<0.9
|
||||
hacking>=0.9.2,<0.10
|
||||
|
||||
# For translations processing
|
||||
Babel>=1.3
|
||||
|
@ -42,18 +42,17 @@
|
||||
"""Display a subunit stream through a colorized unittest test runner."""
|
||||
|
||||
import heapq
|
||||
import subunit
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
import subunit
|
||||
import testtools
|
||||
|
||||
|
||||
class _AnsiColorizer(object):
|
||||
"""
|
||||
A colorizer is an object that loosely wraps around a stream, allowing
|
||||
callers to write text to the stream in a particular color.
|
||||
"""A colorizer is an object that loosely wraps around a stream.
|
||||
|
||||
That allows callers to write text to the stream in a particular color.
|
||||
Colorizer classes must implement C{supported()} and C{write(text, color)}.
|
||||
"""
|
||||
_colors = dict(black=30, red=31, green=32, yellow=33,
|
||||
@ -64,9 +63,9 @@ class _AnsiColorizer(object):
|
||||
|
||||
@staticmethod
|
||||
def supported(stream=sys.stdout):
|
||||
"""
|
||||
A method that returns True if the current platform supports
|
||||
coloring terminal output using this method. Returns False otherwise.
|
||||
"""Method that checks if the current terminal supports coloring.
|
||||
|
||||
Returns True or False.
|
||||
"""
|
||||
if not stream.isatty():
|
||||
return False # auto color only on TTYs
|
||||
@ -86,21 +85,19 @@ class _AnsiColorizer(object):
|
||||
return False
|
||||
|
||||
def write(self, text, color):
|
||||
"""
|
||||
Write the given text to the stream in the given color.
|
||||
"""Write the given text to the stream in the given color.
|
||||
|
||||
@param text: Text to be written to the stream.
|
||||
|
||||
@param color: A string label for a color. e.g. 'red', 'white'.
|
||||
|
||||
"""
|
||||
color = self._colors[color]
|
||||
self.stream.write('\x1b[%s;1m%s\x1b[0m' % (color, text))
|
||||
|
||||
|
||||
class _Win32Colorizer(object):
|
||||
"""
|
||||
See _AnsiColorizer docstring.
|
||||
"""
|
||||
"""See _AnsiColorizer docstring."""
|
||||
def __init__(self, stream):
|
||||
import win32console
|
||||
red, green, blue, bold = (win32console.FOREGROUND_RED,
|
||||
@ -148,9 +145,7 @@ class _Win32Colorizer(object):
|
||||
|
||||
|
||||
class _NullColorizer(object):
|
||||
"""
|
||||
See _AnsiColorizer docstring.
|
||||
"""
|
||||
"""See _AnsiColorizer docstring."""
|
||||
def __init__(self, stream):
|
||||
self.stream = stream
|
||||
|
||||
|
4
tox.ini
4
tox.ini
@ -47,7 +47,9 @@ commands = python setup.py build_sphinx
|
||||
# H302 import only modules
|
||||
# H402 one line docstring needs punctuation.
|
||||
# H404 multi line docstring should start with a summary
|
||||
ignore = E711,E712,H302,H402,H404
|
||||
# H405 multi line docstring summary not separated with an empty line
|
||||
# H904 Wrap long lines in parentheses instead of a backslash
|
||||
ignore = E711,E712,H302,H402,H404,H405,H904
|
||||
builtins = _
|
||||
exclude = .venv,.git,.tox,dist,doc,etc,*glance/locale*,*openstack/common*,*lib/python*,*egg,build
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user