Glare-ectomy

Glare became a separate project during Newton.  The code was copied
out of the Glance tree, but remained in the Glance repository.  It
is no longer being maintained, and that has begun to cause some
problems, for example, blocking a recent stevedore upper constraints
change; see I141b17f9dd2acebe2b23f8fc93206e23bc70b568

This patch removes the Glare code from the Glance repository.  It
includes Alembic database migration scripts, in both the all-in-one
and E-M-C format.  It also includes release notes.

Implements: blueprint glare-ectomy
Change-Id: I3026ca6287a65ab5287bf3843f2a9d756ce15139
This commit is contained in:
Brian Rosmaita 2017-02-01 02:53:18 +00:00
parent 867ae562c3
commit 4b8026714f
73 changed files with 313 additions and 12729 deletions

View File

@ -60,8 +60,6 @@ config_generator_config_file = [
'_static/glance-api'), '_static/glance-api'),
('../../etc/oslo-config-generator/glance-cache.conf', ('../../etc/oslo-config-generator/glance-cache.conf',
'_static/glance-cache'), '_static/glance-cache'),
('../../etc/oslo-config-generator/glance-glare.conf',
'_static/glance-glare'),
('../../etc/oslo-config-generator/glance-manage.conf', ('../../etc/oslo-config-generator/glance-manage.conf',
'_static/glance-manage'), '_static/glance-manage'),
('../../etc/oslo-config-generator/glance-registry.conf', ('../../etc/oslo-config-generator/glance-registry.conf',

View File

@ -50,12 +50,3 @@ This sample configuration can also be viewed in `glance-cache.conf.sample
<_static/glance-cache.conf.sample>`_. <_static/glance-cache.conf.sample>`_.
.. literalinclude:: _static/glance-cache.conf.sample .. literalinclude:: _static/glance-cache.conf.sample
Sample configuration for Glare
------------------------------
This sample configuration can also be viewed in `glance-glare.conf.sample
<_static/glance-glare.conf.sample>`_.
.. literalinclude:: _static/glance-glare.conf.sample

File diff suppressed because it is too large Load Diff

View File

@ -1,10 +0,0 @@
[DEFAULT]
wrap_width = 80
output_file = etc/glance-glare.conf.sample
namespace = glance.glare
namespace = glance.store
namespace = oslo.db
namespace = oslo.db.concurrency
namespace = keystonemiddleware.auth_token
namespace = oslo.log
namespace = oslo.middleware.cors

View File

@ -1,922 +0,0 @@
# Copyright (c) 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import sys
import glance_store
import jsonschema
from oslo_config import cfg
from oslo_serialization import jsonutils as json
from oslo_utils import encodeutils
from oslo_utils import excutils
import semantic_version
import six
import six.moves.urllib.parse as urlparse
import webob.exc
from glance.common import exception
from glance.common.glare import loader
from glance.common.glare import serialization
from glance.common import jsonpatchvalidator
from glance.common import utils
from glance.common import wsgi
import glance.db
from glance.glare import gateway
from glance.glare import Showlevel
from glance.i18n import _, _LE
from oslo_log import log as logging
LOG = logging.getLogger(__name__)
possible_topdir = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
os.pardir,
os.pardir))
if os.path.exists(os.path.join(possible_topdir, 'glance', '__init__.py')):
sys.path.insert(0, possible_topdir)
CONF = cfg.CONF
CONF.import_group("profiler", "glance.common.wsgi")
class ArtifactsController(object):
def __init__(self, db_api=None, store_api=None, plugins=None):
self.db_api = db_api or glance.db.get_api()
self.store_api = store_api or glance_store
self.plugins = plugins or loader.ArtifactsPluginLoader(
'glance.artifacts.types')
self.gateway = gateway.Gateway(self.db_api,
self.store_api, self.plugins)
@staticmethod
def _do_update_op(artifact, change):
"""Call corresponding method of the updater proxy.
Here 'change' is a typical jsonpatch request dict:
* 'path' - a json-pointer string;
* 'op' - one of the allowed operation types;
* 'value' - value to set (omitted when op = remove)
"""
update_op = getattr(artifact, change['op'])
update_op(change['path'], change.get('value'))
return artifact
@staticmethod
def _get_artifact_with_dependencies(repo, art_id,
type_name=None, type_version=None):
"""Retrieves an artifact with dependencies from db by its id.
Show level is direct (only direct dependencies are shown).
"""
return repo.get(art_id, show_level=Showlevel.DIRECT,
type_name=type_name, type_version=type_version)
def show(self, req, type_name, type_version,
show_level=Showlevel.TRANSITIVE, **kwargs):
"""Retrieves one artifact by id with its dependencies"""
artifact_repo = self.gateway.get_artifact_repo(req.context)
try:
art_id = kwargs.get('id')
artifact = artifact_repo.get(art_id, type_name=type_name,
type_version=type_version,
show_level=show_level)
return artifact
except exception.ArtifactNotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.msg)
def list(self, req, type_name, type_version, state, **kwargs):
"""Retrieves a list of artifacts that match some params"""
artifact_repo = self.gateway.get_artifact_repo(req.context)
filters = kwargs.pop('filters', {})
filters.update(type_name={'value': type_name},
state={'value': state})
if type_version is not None:
filters['type_version'] = {'value': type_version}
if 'version' in filters:
for filter in filters['version']:
if filter['value'] == 'latest':
if 'name' not in filters:
raise webob.exc.HTTPBadRequest(
'Filtering by latest version without specifying'
' a name is not supported.')
filter['value'] = self._get_latest_version(
req, filters['name'][0]['value'], type_name,
type_version)
else:
try:
semantic_version.Version(filter['value'], partial=True)
except ValueError:
msg = (_('The format of the version %s is not valid. '
'Use semver notation') % filter['value'])
raise webob.exc.HTTPBadRequest(explanation=msg)
res = artifact_repo.list(filters=filters,
show_level=Showlevel.BASIC,
**kwargs)
result = {'artifacts': res}
limit = kwargs.get("limit")
if limit is not None and len(res) != 0 and len(res) == limit:
result['next_marker'] = res[-1].id
return result
def _get_latest_version(self, req, name, type_name, type_version=None,
state='creating'):
artifact_repo = self.gateway.get_artifact_repo(req.context)
filters = dict(name=[{"value": name}],
type_name={"value": type_name},
state={"value": state})
if type_version is not None:
filters["type_version"] = {"value": type_version}
result = artifact_repo.list(filters=filters,
show_level=Showlevel.NONE,
sort_keys=[('version', None)])
if len(result):
return result[0].version
msg = "No artifacts have been found"
raise exception.ArtifactNotFound(message=msg)
@utils.mutating
def create(self, req, artifact_type, artifact_data, **kwargs):
try:
artifact_factory = self.gateway.get_artifact_type_factory(
req.context, artifact_type)
new_artifact = artifact_factory.new_artifact(**artifact_data)
artifact_repo = self.gateway.get_artifact_repo(req.context)
artifact_repo.add(new_artifact)
# retrieve artifact from db
return self._get_artifact_with_dependencies(artifact_repo,
new_artifact.id)
except (TypeError,
exception.ArtifactNotFound,
exception.Invalid,
exception.DuplicateLocation) as e:
raise webob.exc.HTTPBadRequest(explanation=e)
except exception.Forbidden as e:
raise webob.exc.HTTPForbidden(explanation=e.msg)
except exception.LimitExceeded as e:
raise webob.exc.HTTPRequestEntityTooLarge(
explanation=e.msg, request=req, content_type='text/plain')
except exception.Duplicate as e:
raise webob.exc.HTTPConflict(explanation=e.msg)
except exception.NotAuthenticated as e:
raise webob.exc.HTTPUnauthorized(explanation=e.msg)
@utils.mutating
def update_property(self, req, id, type_name, type_version, path, data,
**kwargs):
"""Updates a single property specified by request url."""
artifact_repo = self.gateway.get_artifact_repo(req.context)
try:
artifact = self._get_artifact_with_dependencies(artifact_repo, id,
type_name,
type_version)
self._ensure_write_access(artifact, req.context)
if artifact.metadata.attributes.blobs.get(path) is not None:
msg = _('Invalid Content-Type for work with %s') % path
raise webob.exc.HTTPBadRequest(explanation=msg)
# use updater mixin to perform updates: generate update path
if req.method == "PUT":
# replaces existing value or creates a new one
if getattr(artifact, kwargs["attr"]):
artifact.replace(path=path, value=data)
else:
artifact.add(path=path, value=data)
else:
# append to an existing value or create a new one
artifact.add(path=path, value=data)
artifact_repo.save(artifact)
return self._get_artifact_with_dependencies(artifact_repo, id)
except (exception.InvalidArtifactPropertyValue,
exception.ArtifactInvalidProperty,
exception.InvalidJsonPatchPath,
exception.ArtifactCircularDependency) as e:
raise webob.exc.HTTPBadRequest(explanation=e.msg)
except exception.NotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.msg)
except exception.NotAuthenticated as e:
raise webob.exc.HTTPUnauthorized(explanation=e.msg)
@utils.mutating
def update(self, req, id, type_name, type_version, changes, **kwargs):
"""Performs an update via json patch request"""
artifact_repo = self.gateway.get_artifact_repo(req.context)
try:
artifact = self._get_artifact_with_dependencies(artifact_repo, id,
type_name,
type_version)
self._ensure_write_access(artifact, req.context)
updated = artifact
for change in changes:
if artifact.metadata.attributes.blobs.get(change['path']):
msg = _('Invalid request PATCH for work with blob')
raise webob.exc.HTTPBadRequest(explanation=msg)
else:
updated = self._do_update_op(updated, change)
artifact_repo.save(updated)
return self._get_artifact_with_dependencies(artifact_repo, id)
except (exception.InvalidJsonPatchPath,
exception.Invalid) as e:
raise webob.exc.HTTPBadRequest(explanation=e.msg)
except exception.NotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.msg)
except exception.Forbidden as e:
raise webob.exc.HTTPForbidden(explanation=e.msg)
except exception.StorageQuotaFull as e:
msg = (_("Denying attempt to upload artifact because it exceeds "
"the quota: %s") % encodeutils.exception_to_unicode(e))
raise webob.exc.HTTPRequestEntityTooLarge(
explanation=msg, request=req, content_type='text/plain')
except exception.LimitExceeded as e:
raise webob.exc.HTTPRequestEntityTooLarge(
explanation=e.msg, request=req, content_type='text/plain')
except exception.NotAuthenticated as e:
raise webob.exc.HTTPUnauthorized(explanation=e.msg)
@utils.mutating
def delete(self, req, id, type_name, type_version, **kwargs):
artifact_repo = self.gateway.get_artifact_repo(req.context)
try:
artifact = self._get_artifact_with_dependencies(
artifact_repo, id, type_name=type_name,
type_version=type_version)
self._ensure_write_access(artifact, req.context)
artifact_repo.remove(artifact)
except exception.Invalid as e:
raise webob.exc.HTTPBadRequest(explanation=e.msg)
except (glance_store.Forbidden, exception.Forbidden) as e:
raise webob.exc.HTTPForbidden(explanation=e.msg)
except (glance_store.NotFound, exception.NotFound) as e:
msg = (_("Failed to find artifact %(artifact_id)s to delete") %
{'artifact_id': id})
raise webob.exc.HTTPNotFound(explanation=msg)
except glance_store.exceptions.InUseByStore as e:
msg = (_("Artifact %s could not be deleted "
"because it is in use: %s") % (id, e.msg)) # noqa
raise webob.exc.HTTPConflict(explanation=msg)
except exception.NotAuthenticated as e:
raise webob.exc.HTTPUnauthorized(explanation=e.msg)
@utils.mutating
def publish(self, req, id, type_name, type_version, **kwargs):
artifact_repo = self.gateway.get_artifact_repo(req.context)
try:
artifact = self._get_artifact_with_dependencies(
artifact_repo, id, type_name=type_name,
type_version=type_version)
self._ensure_write_access(artifact, req.context)
return artifact_repo.publish(artifact, context=req.context)
except exception.Forbidden as e:
raise webob.exc.HTTPForbidden(explanation=e.msg)
except exception.NotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.msg)
except exception.Invalid as e:
raise webob.exc.HTTPBadRequest(explanation=e.msg)
except exception.NotAuthenticated as e:
raise webob.exc.HTTPUnauthorized(explanation=e.msg)
def _upload_list_property(self, method, blob_list, index, data, size):
if method == 'PUT' and not index and len(blob_list) > 0:
# PUT replaces everything, so PUT to non-empty collection is
# forbidden
raise webob.exc.HTTPMethodNotAllowed(
explanation=_("Unable to PUT to non-empty collection"))
if index is not None and index > len(blob_list):
raise webob.exc.HTTPBadRequest(
explanation=_("Index is out of range"))
if index is None:
# both POST and PUT create a new blob list
blob_list.append((data, size))
elif method == 'POST':
blob_list.insert(index, (data, size))
else:
blob_list[index] = (data, size)
@utils.mutating
def upload(self, req, id, type_name, type_version, attr, size, data,
index, **kwargs):
artifact_repo = self.gateway.get_artifact_repo(req.context)
artifact = None
try:
artifact = self._get_artifact_with_dependencies(artifact_repo,
id,
type_name,
type_version)
self._ensure_write_access(artifact, req.context)
blob_prop = artifact.metadata.attributes.blobs.get(attr)
if blob_prop is None:
raise webob.exc.HTTPBadRequest(
explanation=_("Not a blob property '%s'") % attr)
if isinstance(blob_prop, list):
blob_list = getattr(artifact, attr)
self._upload_list_property(req.method, blob_list,
index, data, size)
else:
if index is not None:
raise webob.exc.HTTPBadRequest(
explanation=_("Not a list property '%s'") % attr)
setattr(artifact, attr, (data, size))
artifact_repo.save(artifact)
return artifact
except ValueError as e:
exc_message = encodeutils.exception_to_unicode(e)
LOG.debug("Cannot save data for artifact %(id)s: %(e)s",
{'id': id, 'e': exc_message})
self._restore(artifact_repo, artifact)
raise webob.exc.HTTPBadRequest(
explanation=exc_message)
except glance_store.StoreAddDisabled:
msg = _("Error in store configuration. Adding artifacts to store "
"is disabled.")
LOG.exception(msg)
self._restore(artifact_repo, artifact)
raise webob.exc.HTTPGone(explanation=msg, request=req,
content_type='text/plain')
except (glance_store.Duplicate,
exception.InvalidImageStatusTransition) as e:
LOG.exception(encodeutils.exception_to_unicode(e))
raise webob.exc.HTTPConflict(explanation=e.msg, request=req)
except exception.Forbidden as e:
msg = ("Not allowed to upload data for artifact %s" %
id)
LOG.debug(msg)
raise webob.exc.HTTPForbidden(explanation=msg, request=req)
except exception.NotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.msg)
except glance_store.StorageFull as e:
msg = _("Artifact storage media "
"is full: %s") % encodeutils.exception_to_unicode(e)
LOG.error(msg)
self._restore(artifact_repo, artifact)
raise webob.exc.HTTPRequestEntityTooLarge(explanation=msg,
request=req)
except exception.StorageQuotaFull as e:
msg = _("Artifact exceeds the storage "
"quota: %s") % encodeutils.exception_to_unicode(e)
LOG.error(msg)
self._restore(artifact_repo, artifact)
raise webob.exc.HTTPRequestEntityTooLarge(explanation=msg,
request=req)
except exception.ImageSizeLimitExceeded as e:
msg = _("The incoming artifact blob is "
"too large: %s") % encodeutils.exception_to_unicode(e)
LOG.error(msg)
self._restore(artifact_repo, artifact)
raise webob.exc.HTTPRequestEntityTooLarge(explanation=msg,
request=req)
except glance_store.StorageWriteDenied as e:
msg = _("Insufficient permissions on artifact "
"storage media: %s") % encodeutils.exception_to_unicode(e)
LOG.error(msg)
self._restore(artifact_repo, artifact)
raise webob.exc.HTTPServiceUnavailable(explanation=msg,
request=req)
except webob.exc.HTTPGone as e:
with excutils.save_and_reraise_exception():
LOG.error(_LE("Failed to upload artifact blob data due to"
" HTTP error"))
except webob.exc.HTTPError as e:
with excutils.save_and_reraise_exception():
LOG.error(_LE("Failed to upload artifact blob data due to HTTP"
" error"))
self._restore(artifact_repo, artifact)
except Exception as e:
with excutils.save_and_reraise_exception():
LOG.exception(_LE("Failed to upload artifact blob data due to "
"internal error"))
self._restore(artifact_repo, artifact)
def download(self, req, id, type_name, type_version, attr, index,
**kwargs):
artifact_repo = self.gateway.get_artifact_repo(req.context)
try:
artifact = artifact_repo.get(id, type_name, type_version)
if attr in artifact.metadata.attributes.blobs:
if isinstance(artifact.metadata.attributes.blobs[attr], list):
if index is None:
raise webob.exc.HTTPBadRequest(
explanation=_("Index is required"))
blob_list = getattr(artifact, attr)
try:
return blob_list[index]
except IndexError as e:
raise webob.exc.HTTPBadRequest(explanation=e.message)
else:
if index is not None:
raise webob.exc.HTTPBadRequest(_("Not a list "
"property"))
return getattr(artifact, attr)
else:
message = _("Not a downloadable entity")
raise webob.exc.HTTPBadRequest(explanation=message)
except exception.Forbidden as e:
raise webob.exc.HTTPForbidden(explanation=e.msg)
except (glance_store.NotFound, exception.NotFound) as e:
raise webob.exc.HTTPNotFound(explanation=e.msg)
except exception.Invalid as e:
raise webob.exc.HTTPBadRequest(explanation=e.msg)
def _restore(self, artifact_repo, artifact):
"""Restore the artifact to queued status.
:param artifact_repo: The instance of ArtifactRepo
:param artifact: The artifact will be restored
"""
try:
if artifact_repo and artifact:
artifact.state = 'creating'
artifact_repo.save(artifact)
except Exception as e:
msg = (_LE("Unable to restore artifact %(artifact_id)s: %(e)s") %
{'artifact_id': artifact.id,
'e': encodeutils.exception_to_unicode(e)})
LOG.exception(msg)
def list_artifact_types(self, req):
plugins = self.plugins.plugin_map
response = []
base_link = "%s/v0.1/artifacts" % (CONF.public_endpoint or
req.host_url)
for type_name, plugin in six.iteritems(plugins.get("by_typename")):
metadata = dict(
type_name=type_name,
displayed_name=plugin[0].metadata.type_display_name,
versions=[]
)
for version in plugin:
endpoint = version.metadata.endpoint
type_version = "v" + version.metadata.type_version
version_metadata = dict(
id=type_version,
link="%s/%s/%s" % (base_link, endpoint, type_version)
)
type_description = version.metadata.type_description
if type_description is not None:
version_metadata['description'] = type_description
metadata['versions'].append(version_metadata)
response.append(metadata)
return {"artifact_types": response}
@staticmethod
def _ensure_write_access(artifact, context):
if context.is_admin:
return
if context.owner is None or context.owner != artifact.owner:
raise exception.ArtifactForbidden(id=artifact.id)
class RequestDeserializer(wsgi.JSONRequestDeserializer,
jsonpatchvalidator.JsonPatchValidatorMixin):
_available_sort_keys = ('name', 'status', 'container_format',
'disk_format', 'size', 'id', 'created_at',
'updated_at', 'version')
_default_sort_dir = 'desc'
_max_limit_number = 1000
def __init__(self, schema=None, plugins=None):
super(RequestDeserializer, self).__init__(
methods_allowed=["replace", "remove", "add"])
self.plugins = plugins or loader.ArtifactsPluginLoader(
'glance.artifacts.types')
def _validate_show_level(self, show_level):
try:
return Showlevel.from_str(show_level.strip().lower())
except exception.ArtifactUnsupportedShowLevel as e:
raise webob.exc.HTTPBadRequest(explanation=e.message)
def show(self, req):
res = self._process_type_from_request(req, True)
params = req.params.copy()
show_level = params.pop('show_level', None)
if show_level is not None:
res['show_level'] = self._validate_show_level(show_level)
return res
def _get_request_body(self, req):
output = super(RequestDeserializer, self).default(req)
if 'body' not in output:
msg = _('Body expected in request.')
raise webob.exc.HTTPBadRequest(explanation=msg)
return output['body']
def validate_body(self, request):
try:
body = self._get_request_body(request)
return super(RequestDeserializer, self).validate_body(body)
except exception.JsonPatchException as e:
raise webob.exc.HTTPBadRequest(explanation=e)
def default(self, request):
return self._process_type_from_request(request)
def _check_type_version(self, type_version):
try:
semantic_version.Version(type_version, partial=True)
except ValueError as e:
raise webob.exc.HTTPBadRequest(explanation=e)
def _process_type_from_request(self, req,
allow_implicit_version=False):
try:
type_name = req.urlvars.get('type_name')
type_version = req.urlvars.get('type_version')
if type_version is not None:
self._check_type_version(type_version)
# Even if the type_version is not specified and
# 'allow_implicit_version' is False, this call is still needed to
# ensure that at least one version of this type exists.
artifact_type = self.plugins.get_class_by_endpoint(type_name,
type_version)
res = {
'type_name': artifact_type.metadata.type_name,
'type_version':
artifact_type.metadata.type_version
if type_version is not None else None
}
if allow_implicit_version:
res['artifact_type'] = artifact_type
return res
except exception.ArtifactPluginNotFound as e:
raise webob.exc.HTTPBadRequest(explanation=e.msg)
def _validate_headers(self, req, content_type='application/json'):
header = req.headers.get('Content-Type')
if header != content_type:
msg = _('Invalid headers "Content-Type": %s') % header
raise webob.exc.HTTPBadRequest(explanation=msg)
def create(self, req):
self._validate_headers(req)
res = self._process_type_from_request(req, True)
res["artifact_data"] = self._get_request_body(req)
return res
def update(self, req):
self._validate_headers(req)
res = self._process_type_from_request(req)
res["changes"] = self.validate_body(req)
return res
def update_property(self, req):
self._validate_headers(req)
"""Data is expected in form {'data': ...}"""
res = self._process_type_from_request(req)
data_schema = {
"type": "object",
"properties": {"data": {}},
"required": ["data"],
"$schema": "http://json-schema.org/draft-04/schema#"}
try:
json_body = json.loads(req.body)
jsonschema.validate(json_body, data_schema)
# TODO(ivasilevskaya):
# by now the deepest nesting level == 1 (ex. some_list/3),
# has to be fixed for dict properties
attr = req.urlvars["attr"]
path_left = req.urlvars["path_left"]
path = (attr if not path_left
else "%(attr)s/%(path_left)s" % {'attr': attr,
'path_left': path_left})
res.update(data=json_body["data"], path=path)
return res
except (ValueError, jsonschema.ValidationError) as e:
msg = _("Invalid json body: %s") % e.message
raise webob.exc.HTTPBadRequest(explanation=msg)
def upload(self, req):
self._validate_headers(req, content_type='application/octet-stream')
res = self._process_type_from_request(req)
index = req.urlvars.get('path_left')
try:
# for blobs only one level of indexing is supported
# (ex. bloblist/0)
if index is not None:
index = int(index)
except ValueError:
msg = _("Only list indexes are allowed for blob lists")
raise webob.exc.HTTPBadRequest(explanation=msg)
artifact_size = req.content_length or None
res.update(size=artifact_size, data=req.body_file,
index=index)
return res
def download(self, req):
res = self._process_type_from_request(req)
index = req.urlvars.get('index')
if index is not None:
index = int(index)
res.update(index=index)
return res
def _validate_limit(self, limit):
if limit is None:
return self._max_limit_number
try:
limit = int(limit)
except ValueError:
msg = _("Limit param must be an integer")
raise webob.exc.HTTPBadRequest(explanation=msg)
if limit < 0:
msg = _("Limit param must be positive")
raise webob.exc.HTTPBadRequest(explanation=msg)
if limit > self._max_limit_number:
msg = _("Limit param"
" must not be higher than %d") % self._max_limit_number
raise webob.exc.HTTPBadRequest(explanation=msg)
return limit
def _validate_sort_key(self, sort_key, artifact_type, type_version=None):
if sort_key in self._available_sort_keys:
return sort_key, None
elif type_version is None:
msg = (_('Invalid sort key: %(sort_key)s. '
'If type version is not set it must be one of'
' the following: %(available)s.') %
{'sort_key': sort_key,
'available': ', '.join(self._available_sort_keys)})
raise webob.exc.HTTPBadRequest(explanation=msg)
prop_type = artifact_type.metadata.attributes.all.get(sort_key)
if prop_type is None or prop_type.DB_TYPE not in ['string',
'numeric',
'int',
'bool']:
msg = (_('Invalid sort key: %(sort_key)s. '
'You cannot sort by this property') %
{'sort_key': sort_key})
raise webob.exc.HTTPBadRequest(explanation=msg)
return sort_key, prop_type.DB_TYPE
def _validate_sort_dir(self, sort_dir):
if sort_dir not in ['asc', 'desc']:
msg = _('Invalid sort direction: %s') % sort_dir
raise webob.exc.HTTPBadRequest(explanation=msg)
return sort_dir
def _get_sorting_params(self, params, artifact_type, type_version=None):
sort_keys = []
sort_dirs = []
if 'sort' in params:
for sort_param in params.pop('sort').strip().split(','):
key, _sep, dir = sort_param.partition(':')
if not dir:
dir = self._default_sort_dir
sort_keys.append(self._validate_sort_key(key.strip(),
artifact_type,
type_version))
sort_dirs.append(self._validate_sort_dir(dir.strip()))
if not sort_keys:
sort_keys = [('created_at', None)]
if not sort_dirs:
sort_dirs = [self._default_sort_dir]
return sort_keys, sort_dirs
def _bring_to_type(self, type_name, value):
mapper = {'int': int,
'string': str,
'text': str,
'bool': bool,
'numeric': float}
return mapper[type_name](value)
def _get_filters(self, artifact_type, params):
error_msg = 'Unexpected filter property'
filters = dict()
for filter, raw_value in params.items():
# first, get the comparison operator
left, sep, right = raw_value.strip().partition(':')
if not sep:
op = "default"
value = left.strip()
else:
op = left.strip().upper()
value = right.strip()
# then, understand what's the property to filter and its value
if '.' in filter: # Indicates a dict-valued property with a key
prop_name, key = filter.split('.', 1)
else:
prop_name = filter
key = None
prop_type = artifact_type.metadata.attributes.all.get(prop_name)
if prop_type is None:
raise webob.exc.HTTPBadRequest(error_msg)
key_only_check = False
position = None
if isinstance(prop_type, dict):
if key is None:
key = value
val = None
key_only_check = True
else:
val = value
if isinstance(prop_type.properties, dict):
# This one is to handle the case of composite dict, having
# different types of values at different keys, i.e. object
prop_type = prop_type.properties.get(key)
if prop_type is None:
raise webob.exc.HTTPBadRequest(error_msg)
else:
prop_type = prop_type.properties
property_name = prop_name + '.' + key
property_value = val
else:
if key is not None:
raise webob.exc.HTTPBadRequest(error_msg)
property_name = prop_name
property_value = value
# now detect the value DB type
if prop_type.DB_TYPE is not None:
str_type = prop_type.DB_TYPE
elif isinstance(prop_type, list):
if not isinstance(prop_type.item_type, list):
position = "any"
str_type = prop_type.item_type.DB_TYPE
else:
raise webob.exc.HTTPBadRequest('Filtering by tuple-like'
' fields is not supported')
else:
raise webob.exc.HTTPBadRequest(error_msg)
if property_value is not None:
property_value = self._bring_to_type(str_type, property_value)
# convert the default operation to NE, EQ or IN
if key_only_check:
if op == 'default':
op = 'NE'
else:
raise webob.exc.HTTPBadRequest('Comparison not supported '
'for key-only filtering')
else:
if op == 'default':
op = 'IN' if isinstance(prop_type, list) else 'EQ'
filters.setdefault(property_name, [])
filters[property_name].append(dict(operator=op, position=position,
value=property_value,
type=str_type))
return filters
def list(self, req):
res = self._process_type_from_request(req, True)
params = req.params.copy()
show_level = params.pop('show_level', None)
if show_level is not None:
res['show_level'] = self._validate_show_level(show_level.strip())
limit = params.pop('limit', None)
marker = params.pop('marker', None)
query_params = dict()
query_params['sort_keys'], query_params['sort_dirs'] = (
self._get_sorting_params(params, res['artifact_type'],
res['type_version']))
if marker is not None:
query_params['marker'] = marker
query_params['limit'] = self._validate_limit(limit)
query_params['filters'] = self._get_filters(res['artifact_type'],
params)
query_params['type_name'] = res['artifact_type'].metadata.type_name
return query_params
def list_artifact_types(self, req):
return {}
class ResponseSerializer(wsgi.JSONResponseSerializer):
# TODO(ivasilevskaya): ideally this should be autogenerated/loaded
ARTIFACTS_ENDPOINT = '/v0.1/artifacts'
fields = ['id', 'name', 'version', 'type_name', 'type_version',
'visibility', 'state', 'owner', 'scope', 'created_at',
'updated_at', 'tags', 'dependencies', 'blobs', 'properties']
def __init__(self, schema=None):
super(ResponseSerializer, self).__init__()
def default(self, response, res):
artifact = serialization.serialize_for_client(
res, show_level=Showlevel.DIRECT)
body = json.dumps(artifact, ensure_ascii=False)
response.unicode_body = six.text_type(body)
response.content_type = 'application/json'
def create(self, response, artifact):
response.status_int = 201
self.default(response, artifact)
response.location = (
'%(root_url)s/%(type_name)s/v%(type_version)s/%(id)s' % dict(
root_url=ResponseSerializer.ARTIFACTS_ENDPOINT,
type_name=artifact.metadata.endpoint,
type_version=artifact.metadata.type_version,
id=artifact.id))
def list(self, response, res):
params = dict(response.request.params)
params.pop('marker', None)
query = urlparse.urlencode(params)
type_name = response.request.urlvars.get('type_name')
type_version = response.request.urlvars.get('type_version')
if response.request.urlvars.get('state') == 'creating':
drafts = "/drafts"
else:
drafts = ""
artifacts_list = [
serialization.serialize_for_client(a, show_level=Showlevel.NONE)
for a in res['artifacts']]
url = "/v0.1/artifacts"
if type_name:
url += "/" + type_name
if type_version:
url += "/v" + type_version
url += drafts
if query:
first_url = url + "?" + query
else:
first_url = url
body = {
"artifacts": artifacts_list,
"first": first_url
}
if 'next_marker' in res:
params['marker'] = res['next_marker']
next_query = urlparse.urlencode(params)
body['next'] = url + '?' + next_query
content = json.dumps(body, ensure_ascii=False)
response.unicode_body = six.text_type(content)
response.content_type = 'application/json'
def delete(self, response, result):
response.status_int = 204
def download(self, response, blob):
response.headers['Content-Type'] = 'application/octet-stream'
response.app_iter = iter(blob.data_stream)
if blob.checksum:
response.headers['Content-MD5'] = blob.checksum
response.headers['Content-Length'] = str(blob.size)
def list_artifact_types(self, response, res):
body = json.dumps(res, ensure_ascii=False)
response.unicode_body = six.text_type(body)
response.content_type = 'application/json'
def create_resource():
"""Images resource factory method"""
plugins = loader.ArtifactsPluginLoader('glance.artifacts.types')
deserializer = RequestDeserializer(plugins=plugins)
serializer = ResponseSerializer()
controller = ArtifactsController(plugins=plugins)
return wsgi.Resource(controller, deserializer, serializer)

View File

@ -1,98 +0,0 @@
# Copyright (c) 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from glance.api.glare.v0_1 import glare
from glance.common import wsgi
UUID_REGEX = (
R'[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}')
class API(wsgi.Router):
def _get_artifacts_resource(self):
if not self.artifacts_resource:
self.artifacts_resource = glare.create_resource()
return self.artifacts_resource
def __init__(self, mapper):
self.artifacts_resource = None
artifacts_resource = self._get_artifacts_resource()
reject_method_resource = wsgi.Resource(wsgi.RejectMethodController())
def _check_json_content_type(environ, result):
return "application/json" in environ["CONTENT_TYPE"]
def _check_octet_stream_content_type(environ, result):
return "application/octet-stream" in environ["CONTENT_TYPE"]
def connect_routes(m, read_only):
with m.submapper(resource_name="artifact_operations",
path_prefix="/{id}",
requirements={'id': UUID_REGEX}) as art:
art.show()
if not read_only:
art.delete()
art.action('update', method='PATCH')
art.link('publish', method='POST')
def connect_attr_action(attr):
if not read_only:
attr.action("upload", conditions={
'method': ["POST", "PUT"],
'function': _check_octet_stream_content_type})
attr.action("update_property",
conditions={
'method': ["POST", "PUT"],
'function': _check_json_content_type})
attr.link("download", method="GET")
attr_map = art.submapper(resource_name="attr_operations",
path_prefix="/{attr}", path_left=None)
attr_items = art.submapper(
resource_name="attr_item_ops",
path_prefix="/{attr}/{path_left:.*}")
connect_attr_action(attr_map)
connect_attr_action(attr_items)
m.connect("", action='list', conditions={'method': 'GET'},
state='active')
m.connect("/drafts", action='list', conditions={'method': 'GET'},
state='creating')
if not read_only:
m.connect("/drafts", action='create',
conditions={'method': 'POST'})
mapper.connect('/artifacts',
controller=artifacts_resource,
action='list_artifact_types',
conditions={'method': ['GET']})
versioned = mapper.submapper(path_prefix='/artifacts/{type_name}/'
'v{type_version}',
controller=artifacts_resource)
non_versioned = mapper.submapper(path_prefix='/artifacts/{type_name}',
type_version=None,
controller=artifacts_resource)
connect_routes(versioned, False)
connect_routes(non_versioned, True)
mapper.connect('/artifacts',
controller=reject_method_resource,
action='reject',
allowed_methods='GET')
super(API, self).__init__(mapper)

View File

@ -1,93 +0,0 @@
# Copyright 2012 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_serialization import jsonutils
from six.moves import http_client
import webob.dec
from glance.common import wsgi
from glance import i18n
_ = i18n._
versions_opts = [
# Note: Since both glance-api and glare-api have the same name for the
# option public_endpoint, oslo.config generator throws a DuplicateError
# exception during the conf file generation incase of differing help
# texts. Hence we have to have identical help texts for glance-api and
# glare-api's public_endpoint if not for changing the conf opt name.
cfg.StrOpt('public_endpoint',
help=_("""
Public url endpoint to use for Glance/Glare versions response.
This is the public url endpoint that will appear in the Glance/Glare
"versions" response. If no value is specified, the endpoint that is
displayed in the version's response is that of the host running the
API service. Change the endpoint to represent the proxy URL if the
API service is running behind a proxy. If the service is running
behind a load balancer, add the load balancer's URL for this value.
Possible values:
* None
* Proxy URL
* Load balancer URL
Related options:
* None
""")),
]
CONF = cfg.CONF
CONF.register_opts(versions_opts)
class Controller(object):
"""A wsgi controller that reports which API versions are supported."""
def index(self, req, explicit=False):
"""Respond to a request for all OpenStack API versions."""
def build_version_object(version, path, status):
url = CONF.public_endpoint or req.host_url
return {
'id': 'v%s' % version,
'status': status,
'links': [
{
'rel': 'self',
'href': '%s/%s/' % (url, path),
},
],
}
version_objs = [build_version_object(0.1, 'v0.1', 'EXPERIMENTAL')]
status = explicit and http_client.OK or http_client.MULTIPLE_CHOICES
response = webob.Response(request=req,
status=status,
content_type='application/json')
response.body = jsonutils.dump_as_bytes(dict(versions=version_objs))
return response
@webob.dec.wsgify(RequestClass=wsgi.Request)
def __call__(self, req):
return self.index(req)
def create_resource(conf):
return wsgi.Resource(Controller())

View File

@ -22,7 +22,6 @@ return
from oslo_config import cfg from oslo_config import cfg
from oslo_log import log as logging from oslo_log import log as logging
from glance.api.glare import versions as artifacts_versions
from glance.api import versions from glance.api import versions
from glance.common import wsgi from glance.common import wsgi
@ -119,15 +118,3 @@ class VersionNegotiationFilter(wsgi.Middleware):
r = path[:idx] r = path[:idx]
req.path_info = path[idx:] req.path_info = path[idx:]
return r return r
class GlareVersionNegotiationFilter(VersionNegotiationFilter):
def __init__(self, app):
super(GlareVersionNegotiationFilter, self).__init__(app)
self.versions_app = artifacts_versions.Controller()
self.vnd_mime_type = 'application/vnd.openstack.artifacts-'
def _get_allowed_versions(self):
return {
'v0.1': 0.1
}

View File

@ -24,18 +24,11 @@ from glance.i18n import _, _LW
versions_opts = [ versions_opts = [
# Note: Since both glance-api and glare-api have the same name for the
# option public_endpoint, oslo.config generator throws a DuplicateError
# exception during the conf file generation incase of differing help
# texts. Hence we have to have identical help texts for glance-api and
# glare-api's public_endpoint if not for changing the conf opt name.
cfg.StrOpt('public_endpoint', cfg.StrOpt('public_endpoint',
help=_(""" help=_("""
Public url endpoint to use for Glance/Glare versions response. Public url endpoint to use for Glance versions response.
This is the public url endpoint that will appear in the Glance/Glare This is the public url endpoint that will appear in the Glance
"versions" response. If no value is specified, the endpoint that is "versions" response. If no value is specified, the endpoint that is
displayed in the version's response is that of the host running the displayed in the version's response is that of the host running the
API service. Change the endpoint to represent the proxy URL if the API service. Change the endpoint to represent the proxy URL if the

View File

@ -1,87 +0,0 @@
#!/usr/bin/env python
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Glare (Glance Artifact Repository) API service
"""
import sys
import eventlet
from oslo_utils import encodeutils
eventlet.patcher.monkey_patch(all=False, socket=True, time=True,
select=True, thread=True, os=True)
import glance_store
from oslo_config import cfg
from oslo_log import log as logging
import oslo_messaging
import osprofiler.notifier
import osprofiler.web
from glance.common import config
from glance.common import exception
from glance.common import wsgi
from glance import notifier
CONF = cfg.CONF
CONF.import_group("profiler", "glance.common.wsgi")
logging.register_options(CONF)
KNOWN_EXCEPTIONS = (RuntimeError,
exception.WorkerCreationFailure,
glance_store.exceptions.BadStoreConfiguration)
def fail(e):
global KNOWN_EXCEPTIONS
return_code = KNOWN_EXCEPTIONS.index(type(e)) + 1
sys.stderr.write("ERROR: %s\n" % encodeutils.exception_to_unicode(e))
sys.exit(return_code)
def main():
try:
config.parse_args()
wsgi.set_eventlet_hub()
logging.setup(CONF, 'glare')
if cfg.CONF.profiler.enabled:
_notifier = osprofiler.notifier.create("Messaging",
oslo_messaging, {},
notifier.get_transport(),
"glance", "artifacts",
cfg.CONF.bind_host)
osprofiler.notifier.set(_notifier)
else:
osprofiler.web.disable()
server = wsgi.Server(initialize_glance_store=True)
server.start(config.load_paste_app('glare-api'), default_port=9494)
server.wait()
except KNOWN_EXCEPTIONS as e:
fail(e)
if __name__ == '__main__':
main()

View File

@ -452,101 +452,6 @@ class MetadefTagNotFound(NotFound):
" namespace=%(namespace_name)s.") " namespace=%(namespace_name)s.")
class InvalidVersion(Invalid):
message = _("Version is invalid: %(reason)s")
class InvalidArtifactTypePropertyDefinition(Invalid):
message = _("Invalid property definition")
class InvalidArtifactTypeDefinition(Invalid):
message = _("Invalid type definition")
class InvalidArtifactPropertyValue(Invalid):
message = _("Property '%(name)s' may not have value '%(val)s': %(msg)s")
def __init__(self, message=None, *args, **kwargs):
super(InvalidArtifactPropertyValue, self).__init__(message, *args,
**kwargs)
self.name = kwargs.get('name')
self.value = kwargs.get('val')
class ArtifactNotFound(NotFound):
message = _("Artifact with id=%(id)s was not found")
class ArtifactForbidden(Forbidden):
message = _("Artifact with id=%(id)s is not accessible")
class ArtifactDuplicateNameTypeVersion(Duplicate):
message = _("Artifact with the specified type, name and version"
" already exists")
class InvalidArtifactStateTransition(Invalid):
message = _("Artifact cannot change state from %(source)s to %(target)s")
class ArtifactDuplicateDirectDependency(Duplicate):
message = _("Artifact with the specified type, name and version"
" already has the direct dependency=%(dep)s")
class ArtifactDuplicateTransitiveDependency(Duplicate):
message = _("Artifact with the specified type, name and version"
" already has the transitive dependency=%(dep)s")
class ArtifactCircularDependency(Invalid):
message = _("Artifact with a circular dependency can not be created")
class ArtifactUnsupportedPropertyOperator(Invalid):
message = _("Operator %(op)s is not supported")
class ArtifactUnsupportedShowLevel(Invalid):
message = _("Show level %(shl)s is not supported in this operation")
class ArtifactPropertyValueNotFound(NotFound):
message = _("Property's %(prop)s value has not been found")
class ArtifactInvalidProperty(Invalid):
message = _("Artifact has no property %(prop)s")
class ArtifactInvalidPropertyParameter(Invalid):
message = _("Cannot use this parameter with the operator %(op)s")
class ArtifactLoadError(GlanceException):
message = _("Cannot load artifact '%(name)s'")
class ArtifactNonMatchingTypeName(ArtifactLoadError):
message = _("Plugin name '%(plugin)s' should match "
"artifact typename '%(name)s'")
class ArtifactPluginNotFound(NotFound):
message = _("No plugin for '%(name)s' has been loaded")
class UnknownArtifactType(NotFound):
message = _("Artifact type with name '%(name)s' and version '%(version)s' "
"is not known")
class ArtifactInvalidStateTransition(Invalid):
message = _("Artifact state cannot be changed from %(curr)s to %(to)s")
class JsonPatchException(GlanceException): class JsonPatchException(GlanceException):
message = _("Invalid jsonpatch request") message = _("Invalid jsonpatch request")

View File

@ -1,743 +0,0 @@
# Copyright (c) 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import re
import semantic_version
import six
from glance.common import exception as exc
from glance.i18n import _
class AttributeDefinition(object):
"""A base class for the attribute definitions which may be added to
declaratively defined artifact types
"""
ALLOWED_TYPES = (object,)
def __init__(self,
display_name=None,
description=None,
readonly=False,
mutable=True,
required=False,
default=None):
"""Initializes attribute definition
:param display_name: Display name of the attribute
:param description: Description of the attribute
:param readonly: Flag indicating if the value of attribute may not be
changed once an artifact is created
:param mutable: Flag indicating if the value of attribute may not be
changed once an artifact is published
:param required: Flag indicating if the value of attribute is required
:param default: default value of the attribute
"""
self.name = None
self.display_name = display_name
self.description = description
self.readonly = readonly
self.required = required
self.mutable = mutable
self.default = default
self._add_validator('type',
lambda v: isinstance(v, self.ALLOWED_TYPES),
_("Not a valid value type"))
self._validate_default()
def _set_name(self, value):
self.name = value
if self.display_name is None:
self.display_name = value
def _add_validator(self, name, func, message):
if not hasattr(self, '_validators'):
self._validators = []
self._validators_index = {}
pair = (func, message)
self._validators.append(pair)
self._validators_index[name] = pair
def _get_validator(self, name):
return self._validators_index.get(name)
def _remove_validator(self, name):
pair = self._validators_index.pop(name, None)
if pair is not None:
self._validators.remove(pair)
def _check_definition(self):
self._validate_default()
def _validate_default(self):
if self.default:
try:
self.validate(self.default, 'default')
except exc.InvalidArtifactPropertyValue:
raise exc.InvalidArtifactTypePropertyDefinition(
_("Default value is invalid"))
def get_value(self, obj):
return getattr(obj, self.name)
def set_value(self, obj, value):
return setattr(obj, self.name, value)
def validate(self, value, name=None):
if value is None:
if self.required:
raise exc.InvalidArtifactPropertyValue(
name=name or self.name,
val=value,
msg=_('Value is required'))
else:
return
first_error = next((msg for v_func, msg in self._validators
if not v_func(value)), None)
if first_error:
raise exc.InvalidArtifactPropertyValue(name=name or self.name,
val=value,
msg=first_error)
class ListAttributeDefinition(AttributeDefinition):
"""A base class for Attribute definitions having List-semantics
Is inherited by Array, ArtifactReferenceList and BinaryObjectList
"""
ALLOWED_TYPES = (list,)
ALLOWED_ITEM_TYPES = (AttributeDefinition, )
def _check_item_type(self, item):
if not isinstance(item, self.ALLOWED_ITEM_TYPES):
raise exc.InvalidArtifactTypePropertyDefinition(
_('Invalid item type specification'))
if item.default is not None:
raise exc.InvalidArtifactTypePropertyDefinition(
_('List definitions may hot have defaults'))
def __init__(self, item_type, min_size=0, max_size=None, unique=False,
**kwargs):
super(ListAttributeDefinition, self).__init__(**kwargs)
if isinstance(item_type, list):
for it in item_type:
self._check_item_type(it)
# we need to copy the item_type collection
self.item_type = item_type[:]
if min_size != 0:
raise exc.InvalidArtifactTypePropertyDefinition(
_("Cannot specify 'min_size' explicitly")
)
if max_size is not None:
raise exc.InvalidArtifactTypePropertyDefinition(
_("Cannot specify 'max_size' explicitly")
)
# setting max_size and min_size to the length of item_type,
# as tuple-semantic assumes that the number of elements is set
# by the type spec
min_size = max_size = len(item_type)
else:
self._check_item_type(item_type)
self.item_type = item_type
if min_size:
self.min_size(min_size)
if max_size:
self.max_size(max_size)
if unique:
self.unique()
def min_size(self, value):
self._min_size = value
if value is not None:
self._add_validator('min_size',
lambda v: len(v) >= self._min_size,
_('List size is less than minimum'))
else:
self._remove_validator('min_size')
def max_size(self, value):
self._max_size = value
if value is not None:
self._add_validator('max_size',
lambda v: len(v) <= self._max_size,
_('List size is greater than maximum'))
else:
self._remove_validator('max_size')
def unique(self, value=True):
self._unique = value
if value:
def _unique(items):
seen = set()
for item in items:
if item in seen:
return False
seen.add(item)
return True
self._add_validator('unique',
_unique, _('Items have to be unique'))
else:
self._remove_validator('unique')
def _set_name(self, value):
super(ListAttributeDefinition, self)._set_name(value)
if isinstance(self.item_type, list):
for i, item in enumerate(self.item_type):
item._set_name("%s[%i]" % (value, i))
else:
self.item_type._set_name("%s[*]" % value)
def validate(self, value, name=None):
super(ListAttributeDefinition, self).validate(value, name)
if value is not None:
for i, item in enumerate(value):
self._validate_item_at(item, i)
def get_item_definition_at_index(self, index):
if isinstance(self.item_type, list):
if index < len(self.item_type):
return self.item_type[index]
else:
return None
return self.item_type
def _validate_item_at(self, item, index):
item_type = self.get_item_definition_at_index(index)
# set name if none has been given to the list element at given index
if (isinstance(self.item_type, list) and item_type and
not item_type.name):
item_type.name = "%s[%i]" % (self.name, index)
if item_type:
item_type.validate(item)
class DictAttributeDefinition(AttributeDefinition):
"""A base class for Attribute definitions having Map-semantics
Is inherited by Dict
"""
ALLOWED_TYPES = (dict,)
ALLOWED_PROPERTY_TYPES = (AttributeDefinition,)
def _check_prop(self, key, item):
if (not isinstance(item, self.ALLOWED_PROPERTY_TYPES) or
(key is not None and not isinstance(key, six.string_types))):
raise exc.InvalidArtifactTypePropertyDefinition(
_('Invalid dict property type specification'))
@staticmethod
def _validate_key(key):
if not isinstance(key, six.string_types):
raise exc.InvalidArtifactPropertyValue(
_('Invalid dict property type'))
def __init__(self, properties, min_properties=0, max_properties=0,
**kwargs):
super(DictAttributeDefinition, self).__init__(**kwargs)
if isinstance(properties, dict):
for key, value in six.iteritems(properties):
self._check_prop(key, value)
# copy the properties dict
self.properties = properties.copy()
self._add_validator('keys',
lambda v: set(v.keys()) <= set(
self.properties.keys()),
_('Dictionary contains unexpected key(s)'))
else:
self._check_prop(None, properties)
self.properties = properties
if min_properties:
self.min_properties(min_properties)
if max_properties:
self.max_properties(max_properties)
def min_properties(self, value):
self._min_properties = value
if value is not None:
self._add_validator('min_properties',
lambda v: len(v) >= self._min_properties,
_('Dictionary size is less than '
'minimum'))
else:
self._remove_validator('min_properties')
def max_properties(self, value):
self._max_properties = value
if value is not None:
self._add_validator('max_properties',
lambda v: len(v) <= self._max_properties,
_('Dictionary size is '
'greater than maximum'))
else:
self._remove_validator('max_properties')
def _set_name(self, value):
super(DictAttributeDefinition, self)._set_name(value)
if isinstance(self.properties, dict):
for k, v in six.iteritems(self.properties):
v._set_name(value)
else:
self.properties._set_name(value)
def validate(self, value, name=None):
super(DictAttributeDefinition, self).validate(value, name)
if value is not None:
for k, v in six.iteritems(value):
self._validate_item_with_key(v, k)
def _validate_item_with_key(self, value, key):
self._validate_key(key)
if isinstance(self.properties, dict):
prop_def = self.properties.get(key)
if prop_def is not None:
name = "%s[%s]" % (prop_def.name, key)
prop_def.validate(value, name=name)
else:
name = "%s[%s]" % (self.properties.name, key)
self.properties.validate(value, name=name)
def get_prop_definition_at_key(self, key):
if isinstance(self.properties, dict):
return self.properties.get(key)
else:
return self.properties
class PropertyDefinition(AttributeDefinition):
"""A base class for Attributes defining generic or type-specific metadata
properties
"""
DB_TYPE = None
def __init__(self,
internal=False,
allowed_values=None,
validators=None,
**kwargs):
"""Defines a metadata property
:param internal: a flag indicating that the property is internal, i.e.
not returned to client
:param allowed_values: specifies a list of values allowed for the
property
:param validators: specifies a list of custom validators for the
property
"""
super(PropertyDefinition, self).__init__(**kwargs)
self.internal = internal
self._allowed_values = None
if validators is not None:
try:
for i, (f, m) in enumerate(validators):
self._add_validator("custom_%i" % i, f, m)
except ValueError:
raise exc.InvalidArtifactTypePropertyDefinition(
_("Custom validators list should contain tuples "
"'(function, message)'"))
if allowed_values is not None:
# copy the allowed_values, as this is going to create a
# closure, and we need to make sure that external modification of
# this list does not affect the created validator
self.allowed_values(allowed_values)
self._check_definition()
def _validate_allowed_values(self):
if self._allowed_values:
try:
for allowed_value in self._allowed_values:
self.validate(allowed_value, 'allowed_value')
except exc.InvalidArtifactPropertyValue:
raise exc.InvalidArtifactTypePropertyDefinition(
_("Allowed values %s are invalid under given validators") %
self._allowed_values)
def allowed_values(self, values):
self._allowed_values = values[:]
if values is not None:
self._add_validator('allowed', lambda v: v in self._allowed_values,
_("Is not allowed value"))
else:
self._remove_validator('allowed')
self._check_definition()
def _check_definition(self):
self._validate_allowed_values()
super(PropertyDefinition, self)._check_definition()
class RelationDefinition(AttributeDefinition):
"""A base class for Attributes defining cross-artifact relations"""
def __init__(self, internal=False, **kwargs):
self.internal = internal
kwargs.setdefault('mutable', False)
# if mutable=True has been passed -> raise an exception
if kwargs['mutable'] is True:
raise exc.InvalidArtifactTypePropertyDefinition(
_("Dependency relations cannot be mutable"))
super(RelationDefinition, self).__init__(**kwargs)
class BlobDefinition(AttributeDefinition):
"""A base class for Attributes defining binary objects"""
pass
class ArtifactTypeMetaclass(type):
"""A metaclass to build Artifact Types. Not intended to be used directly
Use `get_declarative_base` to get the base class instead
"""
def __init__(cls, class_name, bases, attributes):
if '_declarative_artifact_type' not in cls.__dict__:
_build_declarative_meta(cls)
super(ArtifactTypeMetaclass, cls).__init__(class_name, bases,
attributes)
class ArtifactPropertyDescriptor(object):
"""A descriptor object for working with artifact attributes"""
def __init__(self, prop, collection_wrapper_class=None):
self.prop = prop
self.collection_wrapper_class = collection_wrapper_class
def __get__(self, instance, owner):
if instance is None:
# accessed via owner class
return self.prop
else:
v = getattr(instance, '_' + self.prop.name, None)
if v is None and self.prop.default is not None:
v = copy.copy(self.prop.default)
self.__set__(instance, v, ignore_mutability=True)
return self.__get__(instance, owner)
else:
if v is not None and self.collection_wrapper_class:
if self.prop.readonly:
readonly = True
elif (not self.prop.mutable and
hasattr(instance, '__is_mutable__') and
not hasattr(instance,
'__suspend_mutability_checks__')):
readonly = not instance.__is_mutable__()
else:
readonly = False
if readonly:
v = v.__make_immutable__()
return v
def __set__(self, instance, value, ignore_mutability=False):
if instance:
if self.prop.readonly:
if hasattr(instance, '_' + self.prop.name):
raise exc.InvalidArtifactPropertyValue(
_('Attempt to set readonly property'))
if not self.prop.mutable:
if (hasattr(instance, '__is_mutable__') and
not hasattr(instance,
'__suspend_mutability_checks__')):
mutable = instance.__is_mutable__() or ignore_mutability
if not mutable:
raise exc.InvalidArtifactPropertyValue(
_('Attempt to set value of immutable property'))
if value is not None and self.collection_wrapper_class:
value = self.collection_wrapper_class(value)
value.property = self.prop
self.prop.validate(value)
setattr(instance, '_' + self.prop.name, value)
class ArtifactAttributes(object):
"""A container class storing description of Artifact Type attributes"""
def __init__(self):
self.properties = {}
self.dependencies = {}
self.blobs = {}
self.all = {}
@property
def default_dependency(self):
"""Returns the default dependency relation for an artifact type"""
if len(self.dependencies) == 1:
return list(self.dependencies.values())[0]
@property
def default_blob(self):
"""Returns the default blob object for an artifact type"""
if len(self.blobs) == 1:
return list(self.blobs.values())[0]
@property
def default_properties_dict(self):
"""Returns a default properties dict for an artifact type"""
dict_props = [v for v in self.properties.values() if
isinstance(v, DictAttributeDefinition)]
if len(dict_props) == 1:
return dict_props[0]
@property
def tags(self):
"""Returns tags property for an artifact type"""
return self.properties.get('tags')
def add(self, attribute):
self.all[attribute.name] = attribute
if isinstance(attribute, PropertyDefinition):
self.properties[attribute.name] = attribute
elif isinstance(attribute, BlobDefinition):
self.blobs[attribute.name] = attribute
elif isinstance(attribute, RelationDefinition):
self.dependencies[attribute.name] = attribute
class ArtifactTypeMetadata(object):
"""A container to store the meta-information about an artifact type"""
def __init__(self, type_name, type_display_name, type_version,
type_description, endpoint):
"""Initializes the Artifact Type metadata
:param type_name: name of the artifact type
:param type_display_name: display name of the artifact type
:param type_version: version of the artifact type
:param type_description: description of the artifact type
:param endpoint: REST API URI suffix to call the artifacts of this type
"""
self.attributes = ArtifactAttributes()
# These are going to be defined by third-party plugin
# developers, so we need to do some validations on these values and
# raise InvalidArtifactTypeDefinition if they are violated
self.type_name = type_name
self.type_display_name = type_display_name or type_name
self.type_version = type_version or '1.0'
self.type_description = type_description
self.endpoint = endpoint or type_name.lower()
self._validate_string(self.type_name, 'Type name', min_length=1,
max_length=255)
self._validate_string(self.type_display_name, 'Type display name',
max_length=255)
self._validate_string(self.type_description, 'Type description')
self._validate_string(self.endpoint, 'endpoint', min_length=1)
try:
semantic_version.Version(self.type_version, partial=True)
except ValueError:
raise exc.InvalidArtifactTypeDefinition(
message=_("Type version has to be a valid semver string"))
@staticmethod
def _validate_string(value, name, min_length=0, max_length=None,
pattern=None):
if value is None:
if min_length > 0:
raise exc.InvalidArtifactTypeDefinition(
message=_("%(attribute)s is required"), attribute=name)
else:
return
if not isinstance(value, six.string_types):
raise exc.InvalidArtifactTypeDefinition(
message=_("%(attribute)s have to be string"), attribute=name)
if max_length and len(value) > max_length:
raise exc.InvalidArtifactTypeDefinition(
message=_("%(attribute)s may not be longer than %(length)i"),
attribute=name, length=max_length)
if min_length and len(value) < min_length:
raise exc.InvalidArtifactTypeDefinition(
message=_("%(attribute)s may not be shorter than %(length)i"),
attribute=name, length=min_length)
if pattern and not re.match(pattern, value):
raise exc.InvalidArtifactTypeDefinition(
message=_("%(attribute)s should match pattern %(pattern)s"),
attribute=name, pattern=pattern.pattern)
def _build_declarative_meta(cls):
attrs = dict(cls.__dict__)
type_name = None
type_display_name = None
type_version = None
type_description = None
endpoint = None
for base in cls.__mro__:
for name, value in six.iteritems(vars(base)):
if name == '__type_name__':
if not type_name:
type_name = cls.__type_name__
elif name == '__type_version__':
if not type_version:
type_version = cls.__type_version__
elif name == '__type_description__':
if not type_description:
type_description = cls.__type_description__
elif name == '__endpoint__':
if not endpoint:
endpoint = cls.__endpoint__
elif name == '__type_display_name__':
if not type_display_name:
type_display_name = cls.__type_display_name__
elif base is not cls and name not in attrs:
if isinstance(value, AttributeDefinition):
attrs[name] = value
elif isinstance(value, ArtifactPropertyDescriptor):
attrs[name] = value.prop
meta = ArtifactTypeMetadata(type_name=type_name or cls.__name__,
type_display_name=type_display_name,
type_version=type_version,
type_description=type_description,
endpoint=endpoint)
setattr(cls, 'metadata', meta)
for k, v in attrs.items():
if k == 'metadata':
raise exc.InvalidArtifactTypePropertyDefinition(
_("Cannot declare artifact property with reserved name "
"'metadata'"))
if isinstance(v, AttributeDefinition):
v._set_name(k)
wrapper_class = None
if isinstance(v, ListAttributeDefinition):
wrapper_class = type("ValidatedList", (list,), {})
_add_validation_to_list(wrapper_class)
if isinstance(v, DictAttributeDefinition):
wrapper_class = type("ValidatedDict", (dict,), {})
_add_validation_to_dict(wrapper_class)
prop_descr = ArtifactPropertyDescriptor(v, wrapper_class)
setattr(cls, k, prop_descr)
meta.attributes.add(v)
def _validating_method(method, klass):
def wrapper(self, *args, **kwargs):
instance_copy = klass(self)
method(instance_copy, *args, **kwargs)
self.property.validate(instance_copy)
method(self, *args, **kwargs)
return wrapper
def _immutable_method(method):
def substitution(*args, **kwargs):
raise exc.InvalidArtifactPropertyValue(
_("Unable to modify collection in "
"immutable or readonly property"))
return substitution
def _add_immutable_wrappers(class_to_add, wrapped_methods):
for method_name in wrapped_methods:
method = getattr(class_to_add, method_name, None)
if method:
setattr(class_to_add, method_name, _immutable_method(method))
def _add_validation_wrappers(class_to_validate, base_class, validated_methods):
for method_name in validated_methods:
method = getattr(class_to_validate, method_name, None)
if method:
setattr(class_to_validate, method_name,
_validating_method(method, base_class))
readonly_class = type("Readonly" + class_to_validate.__name__,
(class_to_validate,), {})
_add_immutable_wrappers(readonly_class, validated_methods)
def __make_immutable__(self):
return readonly_class(self)
class_to_validate.__make_immutable__ = __make_immutable__
def _add_validation_to_list(list_based_class):
validated_methods = ['append', 'extend', 'insert', 'pop', 'remove',
'reverse', 'sort', '__setitem__', '__delitem__',
'__delslice__']
_add_validation_wrappers(list_based_class, list, validated_methods)
def _add_validation_to_dict(dict_based_class):
validated_methods = ['pop', 'popitem', 'setdefault', 'update',
'__delitem__', '__setitem__', 'clear']
_add_validation_wrappers(dict_based_class, dict, validated_methods)
def _kwarg_init_constructor(self, **kwargs):
self.__suspend_mutability_checks__ = True
try:
for k in kwargs:
if not hasattr(type(self), k):
raise exc.ArtifactInvalidProperty(prop=k)
setattr(self, k, kwargs[k])
self._validate_required(self.metadata.attributes.properties)
finally:
del self.__suspend_mutability_checks__
def _validate_required(self, attribute_dict):
for k, v in six.iteritems(attribute_dict):
if v.required and (not hasattr(self, k) or getattr(self, k) is None):
raise exc.InvalidArtifactPropertyValue(name=k, val=None,
msg=_('Value is required'))
def _update(self, values):
for k in values:
if hasattr(type(self), k):
setattr(self, k, values[k])
else:
raise exc.ArtifactInvalidProperty(prop=k)
def _pre_publish_validator(self, *args, **kwargs):
self._validate_required(self.metadata.attributes.blobs)
self._validate_required(self.metadata.attributes.dependencies)
_kwarg_init_constructor.__name__ = '__init__'
_pre_publish_validator.__name__ = '__pre_publish__'
_update.__name__ = 'update'
def get_declarative_base(name='base', base_class=object):
"""Returns a base class which should be inherited to construct Artifact
Type object using the declarative syntax of attribute definition
"""
bases = not isinstance(base_class, tuple) and (base_class,) or base_class
class_dict = {'__init__': _kwarg_init_constructor,
'_validate_required': _validate_required,
'__pre_publish__': _pre_publish_validator,
'_declarative_artifact_type': True,
'update': _update}
return ArtifactTypeMetaclass(name, bases, class_dict)

View File

@ -1,571 +0,0 @@
# Copyright (c) 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import numbers
import re
import semantic_version
import six
import glance.common.exception as exc
from glance.common.glare import declarative
from glance.i18n import _
class Text(declarative.PropertyDefinition):
"""A text metadata property of arbitrary length
Maps to TEXT columns in database, does not support sorting or filtering
"""
ALLOWED_TYPES = (six.string_types,)
DB_TYPE = 'text'
# noinspection PyAttributeOutsideInit
class String(Text):
"""A string metadata property of limited length
Maps to VARCHAR columns in database, supports filtering and sorting.
May have constrains on length and regexp patterns.
The maximum length is limited to 255 characters
"""
DB_TYPE = 'string'
def __init__(self, max_length=255, min_length=0, pattern=None, **kwargs):
"""Defines a String metadata property.
:param max_length: maximum value length
:param min_length: minimum value length
:param pattern: regexp pattern to match
"""
super(String, self).__init__(**kwargs)
self.max_length(max_length)
self.min_length(min_length)
if pattern:
self.pattern(pattern)
# if default and/or allowed_values are specified (in base classes)
# then we need to validate them against the newly added validators
self._check_definition()
def max_length(self, value):
"""Sets the maximum value length"""
self._max_length = value
if value is not None:
if value > 255:
raise exc.InvalidArtifactTypePropertyDefinition(
_('Max string length may not exceed 255 characters'))
self._add_validator('max_length',
lambda v: len(v) <= self._max_length,
_('Length is greater than maximum'))
else:
self._remove_validator('max_length')
self._check_definition()
def min_length(self, value):
"""Sets the minimum value length"""
self._min_length = value
if value is not None:
if value < 0:
raise exc.InvalidArtifactTypePropertyDefinition(
_('Min string length may not be negative'))
self._add_validator('min_length',
lambda v: len(v) >= self._min_length,
_('Length is less than minimum'))
else:
self._remove_validator('min_length')
self._check_definition()
def pattern(self, value):
"""Sets the regexp pattern to match"""
self._pattern = value
if value is not None:
self._add_validator('pattern',
lambda v: re.match(self._pattern,
v) is not None,
_('Does not match pattern'))
else:
self._remove_validator('pattern')
self._check_definition()
class SemVerString(String):
"""A String metadata property matching semver pattern"""
def __init__(self, **kwargs):
def validate(value):
try:
semantic_version.Version(value, partial=True)
except ValueError:
return False
return True
super(SemVerString,
self).__init__(validators=[(validate,
"Invalid semver string")],
**kwargs)
# noinspection PyAttributeOutsideInit
class Integer(declarative.PropertyDefinition):
"""An Integer metadata property
Maps to INT columns in Database, supports filtering and sorting.
May have constraints on value
"""
ALLOWED_TYPES = (six.integer_types,)
DB_TYPE = 'int'
def __init__(self, min_value=None, max_value=None, **kwargs):
"""Defines an Integer metadata property
:param min_value: minimum allowed value
:param max_value: maximum allowed value
"""
super(Integer, self).__init__(**kwargs)
if min_value is not None:
self.min_value(min_value)
if max_value is not None:
self.max_value(max_value)
# if default and/or allowed_values are specified (in base classes)
# then we need to validate them against the newly added validators
self._check_definition()
def min_value(self, value):
"""Sets the minimum allowed value"""
self._min_value = value
if value is not None:
self._add_validator('min_value',
lambda v: v >= self._min_value,
_('Value is less than minimum'))
else:
self._remove_validator('min_value')
self._check_definition()
def max_value(self, value):
"""Sets the maximum allowed value"""
self._max_value = value
if value is not None:
self._add_validator('max_value',
lambda v: v <= self._max_value,
_('Value is greater than maximum'))
else:
self._remove_validator('max_value')
self._check_definition()
# noinspection PyAttributeOutsideInit
class DateTime(declarative.PropertyDefinition):
"""A DateTime metadata property
Maps to a DATETIME columns in database.
Is not supported as Type Specific property, may be used only as Generic one
May have constraints on value
"""
ALLOWED_TYPES = (datetime.datetime,)
DB_TYPE = 'datetime'
def __init__(self, min_value=None, max_value=None, **kwargs):
"""Defines a DateTime metadata property
:param min_value: minimum allowed value
:param max_value: maximum allowed value
"""
super(DateTime, self).__init__(**kwargs)
if min_value is not None:
self.min_value(min_value)
if max_value is not None:
self.max_value(max_value)
# if default and/or allowed_values are specified (in base classes)
# then we need to validate them against the newly added validators
self._check_definition()
def min_value(self, value):
"""Sets the minimum allowed value"""
self._min_value = value
if value is not None:
self._add_validator('min_value',
lambda v: v >= self._min_value,
_('Value is less than minimum'))
else:
self._remove_validator('min_value')
self._check_definition()
def max_value(self, value):
"""Sets the maximum allowed value"""
self._max_value = value
if value is not None:
self._add_validator('max_value',
lambda v: v <= self._max_value,
_('Value is greater than maximum'))
else:
self._remove_validator('max_value')
self._check_definition()
# noinspection PyAttributeOutsideInit
class Numeric(declarative.PropertyDefinition):
"""A Numeric metadata property
Maps to floating point number columns in Database, supports filtering and
sorting. May have constraints on value
"""
ALLOWED_TYPES = numbers.Number
DB_TYPE = 'numeric'
def __init__(self, min_value=None, max_value=None, **kwargs):
"""Defines a Numeric metadata property
:param min_value: minimum allowed value
:param max_value: maximum allowed value
"""
super(Numeric, self).__init__(**kwargs)
if min_value is not None:
self.min_value(min_value)
if max_value is not None:
self.max_value(max_value)
# if default and/or allowed_values are specified (in base classes)
# then we need to validate them against the newly added validators
self._check_definition()
def min_value(self, value):
"""Sets the minimum allowed value"""
self._min_value = value
if value is not None:
self._add_validator('min_value',
lambda v: v >= self._min_value,
_('Value is less than minimum'))
else:
self._remove_validator('min_value')
self._check_definition()
def max_value(self, value):
"""Sets the maximum allowed value"""
self._max_value = value
if value is not None:
self._add_validator('max_value',
lambda v: v <= self._max_value,
_('Value is greater than maximum'))
else:
self._remove_validator('max_value')
self._check_definition()
class Boolean(declarative.PropertyDefinition):
"""A Boolean metadata property
Maps to Boolean columns in database. Supports filtering and sorting.
"""
ALLOWED_TYPES = (bool,)
DB_TYPE = 'bool'
class Array(declarative.ListAttributeDefinition,
declarative.PropertyDefinition, list):
"""An array metadata property
May contain elements of any other PropertyDefinition types except Dict and
Array. Each elements maps to appropriate type of columns in database.
Preserves order. Allows filtering based on "Array contains Value" semantics
May specify constrains on types of elements, their amount and uniqueness.
"""
ALLOWED_ITEM_TYPES = (declarative.PropertyDefinition,)
def __init__(self, item_type=String(), min_size=0, max_size=None,
unique=False, extra_items=True, **kwargs):
"""Defines an Array metadata property
:param item_type: defines the types of elements in Array. If set to an
instance of PropertyDefinition then all the elements have to be of that
type. If set to list of such instances, then the elements on the
corresponding positions have to be of the appropriate type.
:param min_size: minimum size of the Array
:param max_size: maximum size of the Array
:param unique: if set to true, all the elements in the Array have to be
unique
"""
if isinstance(item_type, Array):
msg = _("Array property can't have item_type=Array")
raise exc.InvalidArtifactTypePropertyDefinition(msg)
declarative.ListAttributeDefinition.__init__(self,
item_type=item_type,
min_size=min_size,
max_size=max_size,
unique=unique)
declarative.PropertyDefinition.__init__(self, **kwargs)
class Dict(declarative.DictAttributeDefinition,
declarative.PropertyDefinition, dict):
"""A dictionary metadata property
May contain elements of any other PropertyDefinition types except Dict.
Each elements maps to appropriate type of columns in database. Allows
filtering and sorting by values of each key except the ones mapping the
Text fields.
May specify constrains on types of elements and their amount.
"""
ALLOWED_PROPERTY_TYPES = (declarative.PropertyDefinition,)
def __init__(self, properties=String(), min_properties=0,
max_properties=None, **kwargs):
"""Defines a dictionary metadata property
:param properties: defines the types of dictionary values. If set to an
instance of PropertyDefinition then all the value have to be of that
type. If set to a dictionary with string keys and values of
PropertyDefinition type, then the elements mapped by the corresponding
have have to be of the appropriate type.
:param min_properties: minimum allowed amount of properties in the dict
:param max_properties: maximum allowed amount of properties in the dict
"""
declarative.DictAttributeDefinition.__init__(
self,
properties=properties,
min_properties=min_properties,
max_properties=max_properties)
declarative.PropertyDefinition.__init__(self, **kwargs)
class ArtifactType(declarative.get_declarative_base()): # noqa
"""A base class for all the Artifact Type definitions
Defines the Generic metadata properties as attributes.
"""
id = String(required=True, readonly=True)
type_name = String(required=True, readonly=True)
type_version = SemVerString(required=True, readonly=True)
name = String(required=True, mutable=False)
version = SemVerString(required=True, mutable=False)
description = Text()
tags = Array(unique=True, default=[])
visibility = String(required=True,
allowed_values=["private", "public", "shared",
"community"],
default="private")
state = String(required=True, readonly=True, allowed_values=["creating",
"active",
"deactivated",
"deleted"])
owner = String(required=True, readonly=True)
created_at = DateTime(required=True, readonly=True)
updated_at = DateTime(required=True, readonly=True)
published_at = DateTime(readonly=True)
deleted_at = DateTime(readonly=True)
def __init__(self, **kwargs):
if "type_name" in kwargs:
raise exc.InvalidArtifactPropertyValue(
_("Unable to specify artifact type explicitly"))
if "type_version" in kwargs:
raise exc.InvalidArtifactPropertyValue(
_("Unable to specify artifact type version explicitly"))
super(ArtifactType,
self).__init__(type_name=self.metadata.type_name,
type_version=self.metadata.type_version, **kwargs)
def __eq__(self, other):
if not isinstance(other, ArtifactType):
return False
return self.id == other.id
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash(self.id)
def __is_mutable__(self):
return self.state == "creating"
class ArtifactReference(declarative.RelationDefinition):
"""An artifact reference definition
Allows to define constraints by the name and version of target artifact
"""
ALLOWED_TYPES = ArtifactType
def __init__(self, type_name=None, type_version=None, **kwargs):
"""Defines an artifact reference
:param type_name: type name of the target artifact
:param type_version: type version of the target artifact
"""
super(ArtifactReference, self).__init__(**kwargs)
if type_name is not None:
if isinstance(type_name, list):
type_names = list(type_name)
if type_version is not None:
raise exc.InvalidArtifactTypePropertyDefinition(
_('Unable to specify version '
'if multiple types are possible'))
else:
type_names = [type_name]
def validate_reference(artifact):
if artifact.type_name not in type_names:
return False
if (type_version is not None and
artifact.type_version != type_version):
return False
return True
self._add_validator('referenced_type',
validate_reference,
_("Invalid referenced type"))
elif type_version is not None:
raise exc.InvalidArtifactTypePropertyDefinition(
_('Unable to specify version '
'if type is not specified'))
self._check_definition()
class ArtifactReferenceList(declarative.ListAttributeDefinition,
declarative.RelationDefinition, list):
"""A list of Artifact References
Allows to define a collection of references to other artifacts, each
optionally constrained by type name and type version
"""
ALLOWED_ITEM_TYPES = (ArtifactReference,)
def __init__(self, references=ArtifactReference(), min_size=0,
max_size=None, **kwargs):
if isinstance(references, list):
raise exc.InvalidArtifactTypePropertyDefinition(
_("Invalid reference list specification"))
declarative.RelationDefinition.__init__(self, **kwargs)
declarative.ListAttributeDefinition.__init__(self,
item_type=references,
min_size=min_size,
max_size=max_size,
unique=True,
default=[]
if min_size == 0 else
None)
class Blob(object):
"""A Binary object being part of the Artifact"""
def __init__(self, size=0, locations=None, checksum=None, item_key=None):
"""Initializes a new Binary Object for an Artifact
:param size: the size of Binary Data
:param locations: a list of data locations in backing stores
:param checksum: a checksum for the data
"""
if locations is None:
locations = []
self.size = size
self.checksum = checksum
self.locations = locations
self.item_key = item_key
def to_dict(self):
return {
"size": self.size,
"checksum": self.checksum,
}
class BinaryObject(declarative.BlobDefinition, Blob):
"""A definition of BinaryObject binding
Adds a BinaryObject to an Artifact Type, optionally constrained by file
size and amount of locations
"""
ALLOWED_TYPES = (Blob,)
def __init__(self,
max_file_size=None,
min_file_size=None,
min_locations=None,
max_locations=None,
**kwargs):
"""Defines a binary object as part of Artifact Type
:param max_file_size: maximum size of the associate Blob
:param min_file_size: minimum size of the associated Blob
:param min_locations: minimum number of locations in the associated
Blob
:param max_locations: maximum number of locations in the associated
Blob
"""
mutable = kwargs.pop('mutable', False)
if mutable:
raise exc.InvalidArtifactTypePropertyDefinition(
_("BinaryObject property cannot be declared mutable"))
super(BinaryObject, self).__init__(default=None, readonly=False,
mutable=mutable, **kwargs)
self._max_file_size = max_file_size
self._min_file_size = min_file_size
self._min_locations = min_locations
self._max_locations = max_locations
self._add_validator('size_not_empty',
lambda v: v.size is not None,
_('Blob size is not set'))
if max_file_size:
self._add_validator('max_size',
lambda v: v.size <= self._max_file_size,
_("File too large"))
if min_file_size:
self._add_validator('min_size',
lambda v: v.size >= self._min_file_size,
_("File too small"))
if min_locations:
self._add_validator('min_locations',
lambda v: len(
v.locations) >= self._min_locations,
_("Too few locations"))
if max_locations:
self._add_validator(
'max_locations',
lambda v: len(v.locations) <= self._max_locations,
_("Too many locations"))
class BinaryObjectList(declarative.ListAttributeDefinition,
declarative.BlobDefinition, list):
"""A definition of binding to the list of BinaryObject
Adds a list of BinaryObject's to an artifact type, optionally constrained
by the number of objects in the list and their uniqueness
"""
ALLOWED_ITEM_TYPES = (BinaryObject,)
def __init__(self, objects=BinaryObject(), min_count=0, max_count=None,
**kwargs):
declarative.BlobDefinition.__init__(self, **kwargs)
declarative.ListAttributeDefinition.__init__(self,
item_type=objects,
min_size=min_count,
max_size=max_count,
unique=True)
self.default = [] if min_count == 0 else None

View File

@ -1,190 +0,0 @@
# Copyright 2011-2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
from oslo_config import cfg
import semantic_version
from stevedore import enabled
from glance.common import exception
from glance.common.glare import definitions
from glance.i18n import _, _LE, _LI, _LW
from oslo_log import log as logging
LOG = logging.getLogger(__name__)
plugins_opts = [
cfg.BoolOpt('load_enabled', default=True,
help=_('When false, no artifacts can be loaded regardless of'
' available_plugins. When true, artifacts can be'
' loaded.')),
cfg.ListOpt('available_plugins', default=[],
help=_('A list of artifacts that are allowed in the'
' format name or name-version. Empty list means that'
' any artifact can be loaded.'))
]
CONF = cfg.CONF
CONF.register_opts(plugins_opts)
class ArtifactsPluginLoader(object):
def __init__(self, namespace, test_plugins=None):
self.mgr = test_plugins or enabled.EnabledExtensionManager(
check_func=self._gen_check_func(),
namespace=namespace,
propagate_map_exceptions=True,
on_load_failure_callback=self._on_load_failure)
self.plugin_map = {'by_typename': {},
'by_endpoint': {}}
def _add_extension(ext):
"""
Plugins can be loaded as entry_point=single plugin and
entry_point=PLUGIN_LIST, where PLUGIN_LIST is a python variable
holding a list of plugins
"""
def _load_one(plugin):
if issubclass(plugin, definitions.ArtifactType):
# make sure that have correct plugin name
art_name = plugin.metadata.type_name
if art_name != ext.name:
raise exception.ArtifactNonMatchingTypeName(
name=art_name, plugin=ext.name)
# make sure that no plugin with the same name and version
# already exists
exists = self._get_plugins(ext.name)
new_tv = plugin.metadata.type_version
if any(e.metadata.type_version == new_tv for e in exists):
raise exception.ArtifactDuplicateNameTypeVersion()
self._add_plugin("by_endpoint", plugin.metadata.endpoint,
plugin)
self._add_plugin("by_typename", plugin.metadata.type_name,
plugin)
if isinstance(ext.plugin, list):
for p in ext.plugin:
_load_one(p)
else:
_load_one(ext.plugin)
# (ivasilevskaya) that looks pretty bad as RuntimeError is too general,
# but stevedore has awful exception wrapping with no specific class
# for this very case (no extensions for given namespace found)
try:
self.mgr.map(_add_extension)
except RuntimeError as re:
LOG.error(_LE("Unable to load artifacts: %s") % re.message)
def _version(self, artifact):
return semantic_version.Version.coerce(artifact.metadata.type_version)
def _add_plugin(self, spec, name, plugin):
"""
Inserts a new plugin into a sorted by desc type_version list
of existing plugins in order to retrieve the latest by next()
"""
def _add(name, value):
self.plugin_map[spec][name] = value
old_order = copy.copy(self._get_plugins(name, spec=spec))
for i, p in enumerate(old_order):
if self._version(p) < self._version(plugin):
_add(name, old_order[0:i] + [plugin] + old_order[i:])
return
_add(name, old_order + [plugin])
def _get_plugins(self, name, spec="by_typename"):
if spec not in self.plugin_map.keys():
return []
return self.plugin_map[spec].get(name, [])
def _gen_check_func(self):
"""generates check_func for EnabledExtensionManager"""
def _all_forbidden(ext):
LOG.warn(_LW("Can't load artifact %s: load disabled in config") %
ext.name)
raise exception.ArtifactLoadError(name=ext.name)
def _all_allowed(ext):
LOG.info(
_LI("Artifact %s has been successfully loaded"), ext.name)
return True
if not CONF.load_enabled:
return _all_forbidden
if len(CONF.available_plugins) == 0:
return _all_allowed
available = []
for name in CONF.available_plugins:
type_name, version = (name.split('-', 1)
if '-' in name else (name, None))
available.append((type_name, version))
def _check_ext(ext):
try:
next(n for n, v in available
if n == ext.plugin.metadata.type_name and
(v is None or v == ext.plugin.metadata.type_version))
except StopIteration:
LOG.warn(_LW("Can't load artifact %s: not in"
" available_plugins list") % ext.name)
raise exception.ArtifactLoadError(name=ext.name)
LOG.info(
_LI("Artifact %s has been successfully loaded"), ext.name)
return True
return _check_ext
# this has to be done explicitly as stevedore is pretty ignorant when
# face to face with an Exception and tries to swallow it and print sth
# irrelevant instead of expected error message
def _on_load_failure(self, manager, ep, exc):
msg = (_LE("Could not load plugin from %(module)s") %
{"module": ep.module_name})
LOG.exception(msg)
raise exc
def _find_class_in_collection(self, collection, name, version=None):
try:
def _cmp_version(plugin, version):
ver = semantic_version.Version.coerce
return (ver(plugin.metadata.type_version) ==
ver(version))
if version:
return next((p for p in collection
if _cmp_version(p, version)))
return next((p for p in collection))
except StopIteration:
raise exception.ArtifactPluginNotFound(
name="%s %s" % (name, "v %s" % version if version else ""))
def get_class_by_endpoint(self, name, version=None):
if version is None:
classlist = self._get_plugins(name, spec="by_endpoint")
if not classlist:
raise exception.ArtifactPluginNotFound(name=name)
return self._find_class_in_collection(classlist, name)
return self._find_class_in_collection(
self._get_plugins(name, spec="by_endpoint"), name, version)
def get_class_by_typename(self, name, version=None):
return self._find_class_in_collection(
self._get_plugins(name, spec="by_typename"), name, version)

View File

@ -1,328 +0,0 @@
# Copyright (c) 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import six
from glance.common import exception
from glance.common.glare import declarative
from glance.common.glare import definitions
from glance import glare as ga
from glance.i18n import _
COMMON_ARTIFACT_PROPERTIES = ['id',
'type_name',
'type_version',
'name',
'version',
'description',
'visibility',
'state',
'tags',
'owner',
'created_at',
'updated_at',
'published_at',
'deleted_at']
def _serialize_list_prop(prop, values):
"""
A helper func called to correctly serialize an Array property.
Returns a dict {'type': some_supported_db_type, 'value': serialized_data}
"""
# FIXME(Due to a potential bug in declarative framework, for Arrays, that
# are values to some dict items (Dict(properties={"foo": Array()})),
# prop.get_value(artifact) returns not the real list of items, but the
# whole dict). So we can't rely on prop.get_value(artifact) and will pass
# correctly retrieved values to this function
serialized_value = []
for i, val in enumerate(values or []):
db_type = prop.get_item_definition_at_index(i).DB_TYPE
if db_type is None:
continue
serialized_value.append({
'type': db_type,
'value': val
})
return serialized_value
def _serialize_dict_prop(artifact, prop, key, value, save_prop_func):
key_to_save = prop.name + '.' + key
dict_key_prop = prop.get_prop_definition_at_key(key)
db_type = dict_key_prop.DB_TYPE
if (db_type is None and
not isinstance(dict_key_prop,
declarative.ListAttributeDefinition)):
# nothing to do here, don't know how to deal with this type
return
elif isinstance(dict_key_prop,
declarative.ListAttributeDefinition):
serialized = _serialize_list_prop(
dict_key_prop,
# FIXME(see comment for _serialize_list_prop func)
values=(dict_key_prop.get_value(artifact) or {}).get(key, []))
save_prop_func(key_to_save, 'array', serialized)
else:
save_prop_func(key_to_save, db_type, value)
def _serialize_dependencies(artifact):
"""Returns a dict of serialized dependencies for given artifact"""
dependencies = {}
for relation in artifact.metadata.attributes.dependencies.values():
serialized_dependency = []
if isinstance(relation, declarative.ListAttributeDefinition):
for dep in relation.get_value(artifact):
serialized_dependency.append(dep.id)
else:
relation_data = relation.get_value(artifact)
if relation_data:
serialized_dependency.append(relation.get_value(artifact).id)
dependencies[relation.name] = serialized_dependency
return dependencies
def _serialize_blobs(artifact):
"""Return a dict of serialized blobs for given artifact"""
blobs = {}
for blob in artifact.metadata.attributes.blobs.values():
serialized_blob = []
if isinstance(blob, declarative.ListAttributeDefinition):
for b in blob.get_value(artifact) or []:
serialized_blob.append({
'size': b.size,
'locations': b.locations,
'checksum': b.checksum,
'item_key': b.item_key
})
else:
b = blob.get_value(artifact)
# if no value for blob has been set -> continue
if not b:
continue
serialized_blob.append({
'size': b.size,
'locations': b.locations,
'checksum': b.checksum,
'item_key': b.item_key
})
blobs[blob.name] = serialized_blob
return blobs
def serialize_for_db(artifact):
result = {}
custom_properties = {}
def _save_prop(prop_key, prop_type, value):
custom_properties[prop_key] = {
'type': prop_type,
'value': value
}
for prop in artifact.metadata.attributes.properties.values():
if prop.name in COMMON_ARTIFACT_PROPERTIES:
result[prop.name] = prop.get_value(artifact)
continue
if isinstance(prop, declarative.ListAttributeDefinition):
serialized_value = _serialize_list_prop(prop,
prop.get_value(artifact))
_save_prop(prop.name, 'array', serialized_value)
elif isinstance(prop, declarative.DictAttributeDefinition):
fields_to_set = prop.get_value(artifact) or {}
# if some keys are not present (like in prop == {}), then have to
# set their values to None.
# XXX FIXME prop.properties may be a dict ({'foo': '', 'bar': ''})
# or String\Integer\whatsoever, limiting the possible dict values.
# In the latter case have no idea how to remove old values during
# serialization process.
if isinstance(prop.properties, dict):
for key in [k for k in prop.properties
if k not in fields_to_set.keys()]:
_serialize_dict_prop(artifact, prop, key, None, _save_prop)
# serialize values of properties present
for key, value in six.iteritems(fields_to_set):
_serialize_dict_prop(artifact, prop, key, value, _save_prop)
elif prop.DB_TYPE is not None:
_save_prop(prop.name, prop.DB_TYPE, prop.get_value(artifact))
result['properties'] = custom_properties
result['dependencies'] = _serialize_dependencies(artifact)
result['blobs'] = _serialize_blobs(artifact)
return result
def _deserialize_blobs(artifact_type, blobs_from_db, artifact_properties):
"""Retrieves blobs from database"""
for blob_name, blob_value in six.iteritems(blobs_from_db):
if not blob_value:
continue
if isinstance(artifact_type.metadata.attributes.blobs.get(blob_name),
declarative.ListAttributeDefinition):
val = []
for v in blob_value:
b = definitions.Blob(size=v['size'],
locations=v['locations'],
checksum=v['checksum'],
item_key=v['item_key'])
val.append(b)
elif len(blob_value) == 1:
val = definitions.Blob(size=blob_value[0]['size'],
locations=blob_value[0]['locations'],
checksum=blob_value[0]['checksum'],
item_key=blob_value[0]['item_key'])
else:
raise exception.InvalidArtifactPropertyValue(
message=_('Blob %(name)s may not have multiple values'),
name=blob_name)
artifact_properties[blob_name] = val
def _deserialize_dependencies(artifact_type, deps_from_db,
artifact_properties, plugins):
"""Retrieves dependencies from database"""
for dep_name, dep_value in six.iteritems(deps_from_db):
if not dep_value:
continue
if isinstance(
artifact_type.metadata.attributes.dependencies.get(dep_name),
declarative.ListAttributeDefinition):
val = []
for v in dep_value:
val.append(deserialize_from_db(v, plugins))
elif len(dep_value) == 1:
val = deserialize_from_db(dep_value[0], plugins)
else:
raise exception.InvalidArtifactPropertyValue(
message=_('Relation %(name)s may not have multiple values'),
name=dep_name)
artifact_properties[dep_name] = val
def deserialize_from_db(db_dict, plugins):
artifact_properties = {}
type_name = None
type_version = None
for prop_name in COMMON_ARTIFACT_PROPERTIES:
prop_value = db_dict.pop(prop_name, None)
if prop_name == 'type_name':
type_name = prop_value
elif prop_name == 'type_version':
type_version = prop_value
else:
artifact_properties[prop_name] = prop_value
try:
artifact_type = plugins.get_class_by_typename(type_name, type_version)
except exception.ArtifactPluginNotFound:
raise exception.UnknownArtifactType(name=type_name,
version=type_version)
type_specific_properties = db_dict.pop('properties', {})
for prop_name, prop_value in six.iteritems(type_specific_properties):
prop_type = prop_value.get('type')
prop_value = prop_value.get('value')
if prop_value is None:
continue
if '.' in prop_name: # dict-based property
name, key = prop_name.split('.', 1)
artifact_properties.setdefault(name, {})
if prop_type == 'array':
artifact_properties[name][key] = [item.get('value') for item in
prop_value]
else:
artifact_properties[name][key] = prop_value
elif prop_type == 'array': # list-based property
artifact_properties[prop_name] = [item.get('value') for item in
prop_value]
else:
artifact_properties[prop_name] = prop_value
blobs = db_dict.pop('blobs', {})
_deserialize_blobs(artifact_type, blobs, artifact_properties)
dependencies = db_dict.pop('dependencies', {})
_deserialize_dependencies(artifact_type, dependencies,
artifact_properties, plugins)
return artifact_type(**artifact_properties)
def _process_blobs_for_client(artifact, result):
"""Processes artifact's blobs: adds download links and pretty-printed data.
The result is stored in 'result' dict.
"""
def build_uri(blob_attr, position=None):
"""A helper func to build download uri"""
template = "/artifacts/%(type)s/v%(version)s/%(id)s/%(prop)s/download"
format_dict = {
"type": artifact.metadata.endpoint,
"version": artifact.type_version,
"id": artifact.id,
"prop": blob_attr.name
}
if position is not None:
template = ("/artifacts/%(type)s/v%(version)s/"
"%(id)s/%(prop)s/%(position)s/download")
format_dict["position"] = position
return template % format_dict
for blob_attr in artifact.metadata.attributes.blobs.values():
value = blob_attr.get_value(artifact)
if value is None:
result[blob_attr.name] = None
elif isinstance(value, collections.Iterable):
res_list = []
for pos, blob in enumerate(value):
blob_dict = blob.to_dict()
blob_dict["download_link"] = build_uri(blob_attr, pos)
res_list.append(blob_dict)
result[blob_attr.name] = res_list
else:
result[blob_attr.name] = value.to_dict()
result[blob_attr.name]["download_link"] = build_uri(blob_attr)
def serialize_for_client(artifact, show_level=ga.Showlevel.NONE):
# use serialize_for_db and modify some fields
# (like properties, show only value, not type)
result = {}
for prop in artifact.metadata.attributes.properties.values():
result[prop.name] = prop.get_value(artifact)
if show_level > ga.Showlevel.NONE:
for dep in artifact.metadata.attributes.dependencies.values():
inner_show_level = (ga.Showlevel.DIRECT
if show_level == ga.Showlevel.DIRECT
else ga.Showlevel.NONE)
value = dep.get_value(artifact)
if value is None:
result[dep.name] = None
elif isinstance(value, list):
result[dep.name] = [serialize_for_client(v, inner_show_level)
for v in value]
else:
result[dep.name] = serialize_for_client(value,
inner_show_level)
_process_blobs_for_client(artifact, result)
return result

View File

@ -1,175 +0,0 @@
# Copyright (c) 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import operator
import semantic_version
from sqlalchemy.orm.properties import CompositeProperty
from sqlalchemy import sql
from glance.common import exception
from glance.i18n import _
MAX_COMPONENT_LENGTH = pow(2, 16) - 1
MAX_NUMERIC_PRERELEASE_LENGTH = 6
class DBVersion(object):
def __init__(self, components_long, prerelease, build):
"""
Creates a DBVersion object out of 3 component fields. This initializer
is supposed to be called from SQLAlchemy if 3 database columns are
mapped to this composite field.
:param components_long: a 64-bit long value, containing numeric
components of the version
:param prerelease: a prerelease label of the version, optionally
preformatted with leading zeroes in numeric-only parts of the label
:param build: a build label of the version
"""
version_string = '%s.%s.%s' % _long_to_components(components_long)
if prerelease:
version_string += '-' + _strip_leading_zeroes_from_prerelease(
prerelease)
if build:
version_string += '+' + build
self.version = semantic_version.Version(version_string)
def __repr__(self):
return str(self.version)
def __eq__(self, other):
return (isinstance(other, DBVersion) and
other.version == self.version)
def __ne__(self, other):
return (not isinstance(other, DBVersion)
or self.version != other.version)
def __composite_values__(self):
long_version = _version_to_long(self.version)
prerelease = _add_leading_zeroes_to_prerelease(self.version.prerelease)
build = '.'.join(self.version.build) if self.version.build else None
return long_version, prerelease, build
def parse(version_string):
version = semantic_version.Version.coerce(version_string)
return DBVersion(_version_to_long(version),
'.'.join(version.prerelease),
'.'.join(version.build))
def _check_limit(value):
if value > MAX_COMPONENT_LENGTH:
reason = _("Version component is too "
"large (%d max)") % MAX_COMPONENT_LENGTH
raise exception.InvalidVersion(reason=reason)
def _version_to_long(version):
"""
Converts the numeric part of the semver version into the 64-bit long value
using the following logic:
* major version is stored in first 16 bits of the value
* minor version is stored in next 16 bits
* patch version is stored in following 16 bits
* next 2 bits are used to store the flag: if the version has pre-release
label then these bits are 00, otherwise they are 11. Intermediate values
of the flag (01 and 10) are reserved for future usage.
* last 14 bits of the value are reserved for future usage
The numeric components of version are checked so their value does not
exceed 16 bits.
:param version: a semantic_version.Version object
"""
_check_limit(version.major)
_check_limit(version.minor)
_check_limit(version.patch)
major = version.major << 48
minor = version.minor << 32
patch = version.patch << 16
flag = 0 if version.prerelease else 2
flag <<= 14
return major | minor | patch | flag
def _long_to_components(value):
major = value >> 48
minor = (value - (major << 48)) >> 32
patch = (value - (major << 48) - (minor << 32)) >> 16
return str(major), str(minor), str(patch)
def _add_leading_zeroes_to_prerelease(label_tuple):
if label_tuple is None:
return None
res = []
for component in label_tuple:
if component.isdigit():
if len(component) > MAX_NUMERIC_PRERELEASE_LENGTH:
reason = _("Prerelease numeric component is too large "
"(%d characters "
"max)") % MAX_NUMERIC_PRERELEASE_LENGTH
raise exception.InvalidVersion(reason=reason)
res.append(component.rjust(MAX_NUMERIC_PRERELEASE_LENGTH, '0'))
else:
res.append(component)
return '.'.join(res)
def _strip_leading_zeroes_from_prerelease(string_value):
res = []
for component in string_value.split('.'):
if component.isdigit():
val = component.lstrip('0')
if len(val) == 0: # Corner case: when the component is just '0'
val = '0' # it will be stripped completely, so restore it
res.append(val)
else:
res.append(component)
return '.'.join(res)
strict_op_map = {
operator.ge: operator.gt,
operator.le: operator.lt
}
class VersionComparator(CompositeProperty.Comparator):
def _get_comparison(self, values, op):
columns = self.__clause_element__().clauses
if op in strict_op_map:
stricter_op = strict_op_map[op]
else:
stricter_op = op
return sql.or_(stricter_op(columns[0], values[0]),
sql.and_(columns[0] == values[0],
op(columns[1], values[1])))
def __gt__(self, other):
return self._get_comparison(other.__composite_values__(), operator.gt)
def __ge__(self, other):
return self._get_comparison(other.__composite_values__(), operator.ge)
def __lt__(self, other):
return self._get_comparison(other.__composite_values__(), operator.lt)
def __le__(self, other):
return self._get_comparison(other.__composite_values__(), operator.le)

View File

@ -1,5 +0,0 @@
from glance.contrib.plugins.artifacts_sample.v1 import artifact as art1
from glance.contrib.plugins.artifacts_sample.v2 import artifact as art2
MY_ARTIFACT = [art1.MyArtifact, art2.MyArtifact]

View File

@ -1,29 +0,0 @@
# Copyright 2011-2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from glance.common.glare import definitions
class BaseArtifact(definitions.ArtifactType):
__type_version__ = "1.0"
prop1 = definitions.String()
prop2 = definitions.Integer()
int_list = definitions.Array(item_type=definitions.Integer(max_value=10,
min_value=1))
depends_on = definitions.ArtifactReference(type_name='MyArtifact')
references = definitions.ArtifactReferenceList()
image_file = definitions.BinaryObject()
screenshots = definitions.BinaryObjectList()

View File

@ -1,25 +0,0 @@
[metadata]
name = artifact
version = 0.0.1
description = A sample plugin for artifact loading
author = Inessa Vasilevskaya
author-email = ivasilevskaya@mirantis.com
classifier =
Development Status :: 3 - Alpha
License :: OSI Approved :: Apache Software License
Programming Language :: Python
Programming Language :: Python :: 2
Programming Language :: Python :: 2.7
Programming Language :: Python :: 3
Programming Language :: Python :: 3.2
Programming Language :: Python :: 3.3
Intended Audience :: Developers
Environment :: Console
[global]
setup-hooks =
pbr.hooks.setup_hook
[entry_points]
glance.artifacts.types =
MyArtifact = glance.contrib.plugins.artifacts_sample:MY_ARTIFACT

View File

@ -1,20 +0,0 @@
# Copyright 2011-2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import setuptools
# all other params will be taken from setup.cfg
setuptools.setup(packages=setuptools.find_packages(),
setup_requires=['pbr'], pbr=True)

View File

@ -1 +0,0 @@
python-glanceclient

View File

@ -1,25 +0,0 @@
[metadata]
name = image_artifact_plugin
version = 2.0
description = An artifact plugin for Imaging functionality
author = Alexander Tivelkov
author-email = ativelkov@mirantis.com
classifier =
Development Status :: 3 - Alpha
License :: OSI Approved :: Apache Software License
Programming Language :: Python
Programming Language :: Python :: 2
Programming Language :: Python :: 2.7
Programming Language :: Python :: 3
Programming Language :: Python :: 3.2
Programming Language :: Python :: 3.3
Intended Audience :: Developers
Environment :: Console
[global]
setup-hooks =
pbr.hooks.setup_hook
[entry_points]
glance.artifacts.types =
Image = glance.contrib.plugins.image_artifact.version_selector:versions

View File

@ -1,38 +0,0 @@
# Copyright (c) 2014 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from glance.common.glare import definitions
class ImageAsAnArtifact(definitions.ArtifactType):
__type_name__ = 'Image'
__endpoint__ = 'images'
file = definitions.BinaryObject(required=True)
disk_format = definitions.String(allowed_values=['ami', 'ari', 'aki',
'vhd', 'vhdx', 'vmdk',
'raw', 'qcow2', 'vdi',
'iso'],
required=True,
mutable=False)
container_format = definitions.String(allowed_values=['ami', 'ari',
'aki', 'bare',
'ovf', 'ova',
'docker'],
required=True,
mutable=False)
min_disk = definitions.Integer(min_value=0, default=0)
min_ram = definitions.Integer(min_value=0, default=0)
virtual_size = definitions.Integer(min_value=0)

View File

@ -1,27 +0,0 @@
# Copyright (c) 2014 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from glance.common.glare import definitions
import glance.contrib.plugins.image_artifact.v1.image as v1
class ImageAsAnArtifact(v1.ImageAsAnArtifact):
__type_version__ = '1.1'
icons = definitions.BinaryObjectList()
similar_images = (definitions.
ArtifactReferenceList(references=definitions.
ArtifactReference('Image')))

View File

@ -1,83 +0,0 @@
# Copyright (c) 2014 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from glance.common import exception
from glance.common.glare import definitions
import glance.contrib.plugins.image_artifact.v1_1.image as v1_1
# Since this is not in the test-requirements.txt and the class below,
# ImageAsAnArtifact, is pending removal a try except is added to prevent
# an ImportError when module docs are generated
try:
import glanceclient
except ImportError:
glanceclient = None
from glance.i18n import _
class ImageAsAnArtifact(v1_1.ImageAsAnArtifact):
__type_version__ = '2.0'
file = definitions.BinaryObject(required=False)
legacy_image_id = definitions.String(required=False, mutable=False,
pattern=R'[0-9a-f]{8}-[0-9a-f]{4}'
R'-4[0-9a-f]{3}-[89ab]'
R'[0-9a-f]{3}-[0-9a-f]{12}')
def __pre_publish__(self, context, *args, **kwargs):
super(ImageAsAnArtifact, self).__pre_publish__(*args, **kwargs)
if self.file is None and self.legacy_image_id is None:
raise exception.InvalidArtifactPropertyValue(
message=_("Either a file or a legacy_image_id has to be "
"specified")
)
if self.file is not None and self.legacy_image_id is not None:
raise exception.InvalidArtifactPropertyValue(
message=_("Both file and legacy_image_id may not be "
"specified at the same time"))
if self.legacy_image_id:
glance_endpoint = next(service['endpoints'][0]['publicURL']
for service in context.service_catalog
if service['name'] == 'glance')
# Ensure glanceclient is imported correctly since we are catching
# the ImportError on initialization
if glanceclient == None:
raise ImportError(_("Glance client not installed"))
try:
client = glanceclient.Client(version=2,
endpoint=glance_endpoint,
token=context.auth_token)
legacy_image = client.images.get(self.legacy_image_id)
except Exception:
raise exception.InvalidArtifactPropertyValue(
message=_('Unable to get legacy image')
)
if legacy_image is not None:
self.file = definitions.Blob(size=legacy_image.size,
locations=[
{
"status": "active",
"value":
legacy_image.direct_url
}],
checksum=legacy_image.checksum,
item_key=legacy_image.id)
else:
raise exception.InvalidArtifactPropertyValue(
message=_("Legacy image was not found")
)

View File

@ -1,19 +0,0 @@
# Copyright (c) 2014 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from glance.contrib.plugins.image_artifact.v1 import image as v1
from glance.contrib.plugins.image_artifact.v1_1 import image as v1_1
from glance.contrib.plugins.image_artifact.v2 import image as v2
versions = [v1.ImageAsAnArtifact, v1_1.ImageAsAnArtifact, v2.ImageAsAnArtifact]

View File

@ -24,11 +24,9 @@ from wsme.rest import json
from glance.api.v2.model.metadef_property_type import PropertyType from glance.api.v2.model.metadef_property_type import PropertyType
from glance.common import crypt from glance.common import crypt
from glance.common import exception from glance.common import exception
from glance.common.glare import serialization
from glance.common import location_strategy from glance.common import location_strategy
import glance.domain import glance.domain
import glance.domain.proxy import glance.domain.proxy
from glance import glare as ga
from glance.i18n import _ from glance.i18n import _
CONF = cfg.CONF CONF = cfg.CONF
@ -59,99 +57,6 @@ IMAGE_ATTRS = BASE_MODEL_ATTRS | set(['name', 'status', 'size', 'virtual_size',
'protected']) 'protected'])
class ArtifactRepo(object):
fields = ['id', 'name', 'version', 'type_name', 'type_version',
'visibility', 'state', 'owner', 'scope', 'created_at',
'updated_at', 'tags', 'dependencies', 'blobs', 'properties']
def __init__(self, context, db_api, plugins):
self.context = context
self.db_api = db_api
self.plugins = plugins
def get(self, artifact_id, type_name=None, type_version=None,
show_level=None, include_deleted=False):
if show_level is None:
show_level = ga.Showlevel.BASIC
try:
db_api_artifact = self.db_api.artifact_get(self.context,
artifact_id,
type_name,
type_version,
show_level)
if db_api_artifact["state"] == 'deleted' and not include_deleted:
raise exception.ArtifactNotFound(artifact_id)
except (exception.ArtifactNotFound, exception.ArtifactForbidden):
msg = _("No artifact found with ID %s") % artifact_id
raise exception.ArtifactNotFound(msg)
return serialization.deserialize_from_db(db_api_artifact, self.plugins)
def list(self, marker=None, limit=None,
sort_keys=None, sort_dirs=None, filters=None,
show_level=None):
sort_keys = ['created_at'] if sort_keys is None else sort_keys
sort_dirs = ['desc'] if sort_dirs is None else sort_dirs
if show_level is None:
show_level = ga.Showlevel.NONE
db_api_artifacts = self.db_api.artifact_get_all(
self.context, filters=filters, marker=marker, limit=limit,
sort_keys=sort_keys, sort_dirs=sort_dirs, show_level=show_level)
artifacts = []
for db_api_artifact in db_api_artifacts:
artifact = serialization.deserialize_from_db(db_api_artifact,
self.plugins)
artifacts.append(artifact)
return artifacts
def _format_artifact_from_db(self, db_artifact):
kwargs = {k: db_artifact.get(k, None) for k in self.fields}
return glance.domain.Artifact(**kwargs)
def add(self, artifact):
artifact_values = serialization.serialize_for_db(artifact)
artifact_values['updated_at'] = artifact.updated_at
self.db_api.artifact_create(self.context, artifact_values,
artifact.type_name, artifact.type_version)
def save(self, artifact):
artifact_values = serialization.serialize_for_db(artifact)
try:
db_api_artifact = self.db_api.artifact_update(
self.context,
artifact_values,
artifact.id,
artifact.type_name,
artifact.type_version)
except (exception.ArtifactNotFound,
exception.ArtifactForbidden):
msg = _("No artifact found with ID %s") % artifact.id
raise exception.ArtifactNotFound(msg)
return serialization.deserialize_from_db(db_api_artifact, self.plugins)
def remove(self, artifact):
try:
self.db_api.artifact_delete(self.context, artifact.id,
artifact.type_name,
artifact.type_version)
except (exception.NotFound, exception.Forbidden):
msg = _("No artifact found with ID %s") % artifact.id
raise exception.ArtifactNotFound(msg)
def publish(self, artifact):
try:
artifact_changed = (
self.db_api.artifact_publish(
self.context,
artifact.id,
artifact.type_name,
artifact.type_version))
return serialization.deserialize_from_db(artifact_changed,
self.plugins)
except (exception.NotFound, exception.Forbidden):
msg = _("No artifact found with ID %s") % artifact.id
raise exception.ArtifactNotFound(msg)
class ImageRepo(object): class ImageRepo(object):
def __init__(self, context, db_api): def __init__(self, context, db_api):

View File

@ -49,7 +49,7 @@ EXPAND_BRANCH = 'expand'
CONTRACT_BRANCH = 'contract' CONTRACT_BRANCH = 'contract'
CURRENT_RELEASE = 'pike' CURRENT_RELEASE = 'pike'
ALEMBIC_INIT_VERSION = 'liberty' ALEMBIC_INIT_VERSION = 'liberty'
LATEST_REVISION = 'ocata01' LATEST_REVISION = 'pike01'
INIT_VERSION = 0 INIT_VERSION = 0
MIGRATE_REPO_PATH = os.path.join( MIGRATE_REPO_PATH = os.path.join(

View File

@ -32,7 +32,6 @@ database back-end.
import functools import functools
from glance.db import utils as db_utils from glance.db import utils as db_utils
from glance import glare
from glance.registry.client.v2 import api from glance.registry.client.v2 import api
@ -545,53 +544,3 @@ def metadef_tag_delete_namespace_content(
@_get_client @_get_client
def metadef_tag_count(client, namespace_name, session=None): def metadef_tag_count(client, namespace_name, session=None):
return client.metadef_tag_count(namespace_name=namespace_name) return client.metadef_tag_count(namespace_name=namespace_name)
@_get_client
def artifact_create(client, values,
type_name, type_version=None, session=None):
return client.artifact_create(values=values,
type_name=type_name,
type_version=type_version)
@_get_client
def artifact_update(client, values, artifact_id,
type_name, type_version=None, session=None):
return client.artifact_update(values=values, artifact_id=artifact_id,
type_name=type_name,
type_version=type_version)
@_get_client
def artifact_delete(client, artifact_id,
type_name, type_version=None, session=None):
return client.artifact_delete(artifact_id=artifact_id,
type_name=type_name,
type_version=type_version)
@_get_client
def artifact_get(client, artifact_id,
type_name, type_version=None, session=None):
return client.artifact_get(artifact_id=artifact_id,
type_name=type_name,
type_version=type_version)
@_get_client
def artifact_get_all(client, marker=None, limit=None, sort_key=None,
sort_dir=None, filters=None,
show_level=glare.Showlevel.NONE, session=None):
if filters is None:
filters = {}
return client.artifact_create(marker, limit, sort_key,
sort_dir, filters, show_level)
@_get_client
def artifact_publish(client, artifact_id,
type_name, type_version=None, session=None):
return client.artifact_publish(artifact_id=artifact_id,
type_name=type_name,
type_version=type_version)

View File

@ -43,12 +43,6 @@ DATA = {
'locations': [], 'locations': [],
'tasks': {}, 'tasks': {},
'task_info': {}, 'task_info': {},
'artifacts': {},
'artifact_properties': {},
'artifact_tags': {},
'artifact_dependencies': {},
'artifact_blobs': {},
'artifact_blob_locations': {}
} }
INDEX = 0 INDEX = 0
@ -85,7 +79,6 @@ def reset():
'locations': [], 'locations': [],
'tasks': {}, 'tasks': {},
'task_info': {}, 'task_info': {},
'artifacts': {}
} }
@ -1939,96 +1932,6 @@ def metadef_tag_count(context, namespace_name):
return count return count
def _artifact_format(artifact_id, **values):
dt = timeutils.utcnow()
artifact = {
'id': artifact_id,
'type_name': None,
'type_version_prefix': None,
'type_version_suffix': None,
'type_version_meta': None,
'version_prefix': None,
'version_suffix': None,
'version_meta': None,
'description': None,
'visibility': None,
'state': None,
'owner': None,
'scope': None,
'tags': [],
'properties': {},
'blobs': [],
'created_at': dt,
'updated_at': dt,
'deleted_at': None,
'deleted': False,
}
artifact.update(values)
return artifact
@log_call
def artifact_create(context, values, type_name, type_version):
global DATA
artifact_id = values.get('id', str(uuid.uuid4()))
if artifact_id in DATA['artifacts']:
raise exception.Duplicate()
if 'state' not in values:
raise exception.Invalid('state is a required attribute')
allowed_keys = set(['id',
'type_name',
'type_version',
'name',
'version',
'description',
'visibility',
'state',
'owner',
'scope'])
incorrect_keys = set(values.keys()) - allowed_keys
if incorrect_keys:
raise exception.Invalid(
'The keys %s are not valid' % str(incorrect_keys))
artifact = _artifact_format(artifact_id, **values)
DATA['artifacts'][artifact_id] = artifact
return copy.deepcopy(artifact)
def _artifact_get(context, artifact_id, type_name,
type_version=None):
try:
artifact = DATA['artifacts'][artifact_id]
if (artifact['type_name'] != type_name or
(type_version is not None and
artifact['type_version'] != type_version)):
raise KeyError
except KeyError:
LOG.info(_LI('Could not find artifact %s'), artifact_id)
raise exception.NotFound()
if artifact['deleted_at']:
LOG.info(_LI('Unable to get deleted image'))
raise exception.NotFound()
return artifact
@log_call
def artifact_get(context, artifact_id,
type_name,
type_version=None, session=None):
artifact = _artifact_get(context, artifact_id, type_name,
type_version)
return copy.deepcopy(artifact)
def _format_association(namespace, resource_type, association_values): def _format_association(namespace, resource_type, association_values):
association = { association = {
'namespace_id': namespace['id'], 'namespace_id': namespace['id'],

View File

@ -1,6 +1,3 @@
# Copyright 2011-2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain # not use this file except in compliance with the License. You may obtain
# a copy of the License at # a copy of the License at
@ -13,9 +10,17 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
# NOTE(rosmaita): This file implements the migration interface, but doesn't
from glance.contrib.plugins.artifacts_sample import base # migrate any data. The pike01 migration is contract-only.
class MyArtifact(base.BaseArtifact): def has_migrations(engine):
__type_version__ = "1.0.1" """Returns true if at least one data row can be migrated."""
return False
def migrate(engine):
"""Return the number of rows migrated."""
return 0

View File

@ -20,7 +20,6 @@ from alembic import context
from sqlalchemy import engine_from_config, pool from sqlalchemy import engine_from_config, pool
from glance.db.sqlalchemy import models from glance.db.sqlalchemy import models
from glance.db.sqlalchemy import models_glare
from glance.db.sqlalchemy import models_metadef from glance.db.sqlalchemy import models_metadef
# this is the Alembic Config object, which provides # this is the Alembic Config object, which provides
@ -39,8 +38,6 @@ log_config.fileConfig(config.config_file_name)
# add your model's MetaData object here # add your model's MetaData object here
# for 'autogenerate' support # for 'autogenerate' support
target_metadata = models.BASE.metadata target_metadata = models.BASE.metadata
for table in models_glare.BASE.metadata.sorted_tables:
target_metadata._add_table(table.name, table.schema, table)
for table in models_metadef.BASE_DICT.metadata.sorted_tables: for table in models_metadef.BASE_DICT.metadata.sorted_tables:
target_metadata._add_table(table.name, table.schema, table) target_metadata._add_table(table.name, table.schema, table)

View File

@ -0,0 +1,41 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""drop glare artifacts tables
Revision ID: pike01
Revises: ocata01
Create Date: 2017-02-08 20:32:51.200867
"""
from alembic import op
# revision identifiers, used by Alembic.
revision = 'pike01'
down_revision = 'ocata01'
branch_labels = None
depends_on = None
def upgrade():
# create list of artifact tables in reverse order of their creation
table_names = []
table_names.append('artifact_blob_locations')
table_names.append('artifact_properties')
table_names.append('artifact_blobs')
table_names.append('artifact_dependencies')
table_names.append('artifact_tags')
table_names.append('artifacts')
for table_name in table_names:
op.drop_table(table_name=table_name)

View File

@ -0,0 +1,41 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""drop glare artifacts tables
Revision ID: pike_contract01
Revises: ocata_contract01
Create Date: 2017-02-09 20:32:51.222867
"""
from alembic import op
# revision identifiers, used by Alembic.
revision = 'pike_contract01'
down_revision = 'ocata_contract01'
branch_labels = None
depends_on = 'pike_expand01'
def upgrade():
# create list of artifact tables in reverse order of their creation
table_names = []
table_names.append('artifact_blob_locations')
table_names.append('artifact_properties')
table_names.append('artifact_blobs')
table_names.append('artifact_dependencies')
table_names.append('artifact_tags')
table_names.append('artifacts')
for table_name in table_names:
op.drop_table(table_name=table_name)

View File

@ -1,6 +1,3 @@
# Copyright 2011-2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain # not use this file except in compliance with the License. You may obtain
# a copy of the License at # a copy of the License at
@ -13,11 +10,20 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
"""empty expand for symmetry with pike_contract01
from glance.common.glare import definitions Revision ID: pike_expand01
from glance.contrib.plugins.artifacts_sample import base Revises: ocata_expand01
Create Date: 2017-02-09 19:55:16.657499
"""
# revision identifiers, used by Alembic.
revision = 'pike_expand01'
down_revision = 'ocata_expand01'
branch_labels = None
depends_on = None
class MyArtifact(base.BaseArtifact): def upgrade():
__type_version__ = "2.0" pass
depends_on = definitions.ArtifactReference(type_name="MyArtifact")

View File

@ -43,7 +43,6 @@ import sqlalchemy.sql as sa_sql
from glance.common import exception from glance.common import exception
from glance.common import timeutils from glance.common import timeutils
from glance.common import utils from glance.common import utils
from glance.db.sqlalchemy import glare
from glance.db.sqlalchemy.metadef_api import (resource_type from glance.db.sqlalchemy.metadef_api import (resource_type
as metadef_resource_type_api) as metadef_resource_type_api)
from glance.db.sqlalchemy.metadef_api import (resource_type_association from glance.db.sqlalchemy.metadef_api import (resource_type_association
@ -54,7 +53,6 @@ from glance.db.sqlalchemy.metadef_api import property as metadef_property_api
from glance.db.sqlalchemy.metadef_api import tag as metadef_tag_api from glance.db.sqlalchemy.metadef_api import tag as metadef_tag_api
from glance.db.sqlalchemy import models from glance.db.sqlalchemy import models
from glance.db import utils as db_utils from glance.db import utils as db_utils
from glance import glare as ga
from glance.i18n import _, _LW, _LI from glance.i18n import _, _LW, _LI
sa_logger = None sa_logger = None
@ -1873,58 +1871,3 @@ def metadef_tag_count(context, namespace_name, session=None):
"""Get count of tags for a namespace, raise if ns doesn't exist.""" """Get count of tags for a namespace, raise if ns doesn't exist."""
session = session or get_session() session = session or get_session()
return metadef_tag_api.count(context, namespace_name, session) return metadef_tag_api.count(context, namespace_name, session)
def artifact_create(context, values, type_name,
type_version=None, session=None):
session = session or get_session()
artifact = glare.create(context, values, session, type_name,
type_version)
return artifact
def artifact_delete(context, artifact_id, type_name,
type_version=None, session=None):
session = session or get_session()
artifact = glare.delete(context, artifact_id, session, type_name,
type_version)
return artifact
def artifact_update(context, values, artifact_id, type_name,
type_version=None, session=None):
session = session or get_session()
artifact = glare.update(context, values, artifact_id, session,
type_name, type_version)
return artifact
def artifact_get(context, artifact_id,
type_name=None,
type_version=None,
show_level=ga.Showlevel.BASIC,
session=None):
session = session or get_session()
return glare.get(context, artifact_id, session, type_name,
type_version, show_level)
def artifact_publish(context,
artifact_id,
type_name,
type_version=None,
session=None):
session = session or get_session()
return glare.publish(context,
artifact_id,
session,
type_name,
type_version)
def artifact_get_all(context, marker=None, limit=None, sort_keys=None,
sort_dirs=None, filters=None,
show_level=ga.Showlevel.NONE, session=None):
session = session or get_session()
return glare.get_all(context, session, marker, limit, sort_keys,
sort_dirs, filters, show_level)

View File

@ -1,784 +0,0 @@
# Copyright (c) 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import operator
import uuid
from enum import Enum
from oslo_db import exception as db_exc
import sqlalchemy
from sqlalchemy import and_
from sqlalchemy import case
from sqlalchemy import or_
import sqlalchemy.orm as orm
from sqlalchemy.orm import joinedload
from glance.common import exception
from glance.common import semver_db
from glance.common import timeutils
from glance.db.sqlalchemy import models_glare as models
import glance.glare as ga
from glance.i18n import _LE, _LW
from oslo_log import log as os_logging
LOG = os_logging.getLogger(__name__)
class Visibility(Enum):
PRIVATE = 'private'
PUBLIC = 'public'
SHARED = 'shared'
class State(Enum):
CREATING = 'creating'
ACTIVE = 'active'
DEACTIVATED = 'deactivated'
DELETED = 'deleted'
TRANSITIONS = {
State.CREATING: [State.ACTIVE, State.DELETED],
State.ACTIVE: [State.DEACTIVATED, State.DELETED],
State.DEACTIVATED: [State.ACTIVE, State.DELETED],
State.DELETED: []
}
def create(context, values, session, type_name, type_version=None):
return _out(_create_or_update(context, values, None, session,
type_name, type_version))
def update(context, values, artifact_id, session,
type_name, type_version=None):
return _out(_create_or_update(context, values, artifact_id, session,
type_name, type_version))
def delete(context, artifact_id, session, type_name, type_version=None):
values = {'state': 'deleted'}
return _out(_create_or_update(context, values, artifact_id, session,
type_name, type_version))
def _create_or_update(context, values, artifact_id, session, type_name,
type_version=None):
values = copy.deepcopy(values)
with session.begin():
_set_version_fields(values)
_validate_values(values)
_drop_protected_attrs(models.Artifact, values)
if artifact_id:
# update existing artifact
state = values.get('state')
show_level = ga.Showlevel.BASIC
if state is not None:
if state == 'active':
show_level = ga.Showlevel.DIRECT
values['published_at'] = timeutils.utcnow()
if state == 'deleted':
values['deleted_at'] = timeutils.utcnow()
artifact = _get(context, artifact_id, session, type_name,
type_version, show_level=show_level)
_validate_transition(artifact.state,
values.get('state') or artifact.state)
else:
# create new artifact
artifact = models.Artifact()
if 'id' not in values:
artifact.id = str(uuid.uuid4())
else:
artifact.id = values['id']
if 'tags' in values:
tags = values.pop('tags')
artifact.tags = _do_tags(artifact, tags)
if 'properties' in values:
properties = values.pop('properties', {})
artifact.properties = _do_properties(artifact, properties)
if 'blobs' in values:
blobs = values.pop('blobs')
artifact.blobs = _do_blobs(artifact, blobs)
if 'dependencies' in values:
dependencies = values.pop('dependencies')
_do_dependencies(artifact, dependencies, session)
if values.get('state', None) == 'publish':
artifact.dependencies.extend(
_do_transitive_dependencies(artifact, session))
artifact.update(values)
try:
artifact.save(session=session)
except db_exc.DBDuplicateEntry:
LOG.warn(_LW("Artifact with the specified type, name and version "
"already exists"))
raise exception.ArtifactDuplicateNameTypeVersion()
return artifact
def get(context, artifact_id, session, type_name=None, type_version=None,
show_level=ga.Showlevel.BASIC):
artifact = _get(context, artifact_id, session, type_name, type_version,
show_level)
return _out(artifact, show_level)
def publish(context, artifact_id, session, type_name,
type_version=None):
"""
Because transitive dependencies are not initially created it has to be done
manually by calling this function.
It creates transitive dependencies for the given artifact_id and saves
them in DB.
:returns: artifact dict with Transitive show level
"""
values = {'state': 'active'}
return _out(_create_or_update(context, values, artifact_id, session,
type_name, type_version))
def _validate_transition(source_state, target_state):
if target_state == source_state:
return
try:
source_state = State(source_state)
target_state = State(target_state)
except ValueError:
raise exception.InvalidArtifactStateTransition(source=source_state,
target=target_state)
if (source_state not in TRANSITIONS or
target_state not in TRANSITIONS[source_state]):
raise exception.InvalidArtifactStateTransition(source=source_state,
target=target_state)
def _out(artifact, show_level=ga.Showlevel.BASIC, show_text_properties=True):
"""
Transforms sqlalchemy object into dict depending on the show level.
:param artifact: sql
:param show_level: constant from Showlevel class
:param show_text_properties: for performance optimization it's possible
to disable loading of massive text properties
:returns: generated dict
"""
res = artifact.to_dict(show_level=show_level,
show_text_properties=show_text_properties)
if show_level >= ga.Showlevel.DIRECT:
dependencies = artifact.dependencies
dependencies.sort(key=lambda elem: (elem.artifact_origin,
elem.name, elem.position))
res['dependencies'] = {}
if show_level == ga.Showlevel.DIRECT:
new_show_level = ga.Showlevel.BASIC
else:
new_show_level = ga.Showlevel.TRANSITIVE
for dep in dependencies:
if dep.artifact_origin == artifact.id:
# make array
for p in res['dependencies'].keys():
if p == dep.name:
# add value to array
res['dependencies'][p].append(
_out(dep.dest, new_show_level))
break
else:
# create new array
deparr = [_out(dep.dest, new_show_level)]
res['dependencies'][dep.name] = deparr
return res
def _get(context, artifact_id, session, type_name=None, type_version=None,
show_level=ga.Showlevel.BASIC):
values = dict(id=artifact_id)
if type_name is not None:
values['type_name'] = type_name
if type_version is not None:
values['type_version'] = type_version
_set_version_fields(values)
try:
if show_level == ga.Showlevel.NONE:
query = (
session.query(models.Artifact).
options(joinedload(models.Artifact.tags)).
filter_by(**values))
else:
query = (
session.query(models.Artifact).
options(joinedload(models.Artifact.properties)).
options(joinedload(models.Artifact.tags)).
options(joinedload(models.Artifact.blobs).
joinedload(models.ArtifactBlob.locations)).
filter_by(**values))
artifact = query.one()
except orm.exc.NoResultFound:
LOG.warn(_LW("Artifact with id=%s not found") % artifact_id)
raise exception.ArtifactNotFound(id=artifact_id)
if not _check_visibility(context, artifact):
LOG.warn(_LW("Artifact with id=%s is not accessible") % artifact_id)
raise exception.ArtifactForbidden(id=artifact_id)
return artifact
def get_all(context, session, marker=None, limit=None,
sort_keys=None, sort_dirs=None, filters=None,
show_level=ga.Showlevel.NONE):
"""List all visible artifacts"""
filters = filters or {}
artifacts = _get_all(
context, session, filters, marker,
limit, sort_keys, sort_dirs, show_level)
return [_out(ns, show_level, show_text_properties=False)
for ns in artifacts]
def _get_all(context, session, filters=None, marker=None,
limit=None, sort_keys=None, sort_dirs=None,
show_level=ga.Showlevel.NONE):
"""Get all namespaces that match zero or more filters.
:param filters: dict of filter keys and values.
:param marker: namespace id after which to start page
:param limit: maximum number of namespaces to return
:param sort_keys: namespace attributes by which results should be sorted
:param sort_dirs: directions in which results should be sorted (asc, desc)
"""
filters = filters or {}
query = _do_artifacts_query(context, session, show_level)
basic_conds, tag_conds, prop_conds = _do_query_filters(filters)
if basic_conds:
for basic_condition in basic_conds:
query = query.filter(and_(*basic_condition))
if tag_conds:
for tag_condition in tag_conds:
query = query.join(models.ArtifactTag, aliased=True).filter(
and_(*tag_condition))
if prop_conds:
for prop_condition in prop_conds:
query = query.join(models.ArtifactProperty, aliased=True).filter(
and_(*prop_condition))
marker_artifact = None
if marker is not None:
marker_artifact = _get(context, marker, session, None, None)
if sort_keys is None:
sort_keys = [('created_at', None), ('id', None)]
sort_dirs = ['desc', 'desc']
else:
for key in [('created_at', None), ('id', None)]:
if key not in sort_keys:
sort_keys.append(key)
sort_dirs.append('desc')
# Note(mfedosin): Workaround to deal with situation that sqlalchemy cannot
# work with composite keys correctly
if ('version', None) in sort_keys:
i = sort_keys.index(('version', None))
version_sort_dir = sort_dirs[i]
sort_keys[i:i + 1] = [('version_prefix', None),
('version_suffix', None),
('version_meta', None)]
sort_dirs[i:i + 1] = [version_sort_dir] * 3
query = _do_paginate_query(query=query,
limit=limit,
sort_keys=sort_keys,
marker=marker_artifact,
sort_dirs=sort_dirs)
return query.all()
def _do_paginate_query(query, sort_keys=None, sort_dirs=None,
marker=None, limit=None):
# Default the sort direction to ascending
sort_dir = 'asc'
# Ensure a per-column sort direction
if sort_dirs is None:
sort_dirs = [sort_dir] * len(sort_keys)
assert(len(sort_dirs) == len(sort_keys)) # nosec
# nosec: This function runs safely if the assertion fails.
if len(sort_dirs) < len(sort_keys):
sort_dirs += [sort_dir] * (len(sort_keys) - len(sort_dirs))
# Add sorting
for current_sort_key, current_sort_dir in zip(sort_keys, sort_dirs):
try:
sort_dir_func = {
'asc': sqlalchemy.asc,
'desc': sqlalchemy.desc,
}[current_sort_dir]
except KeyError:
raise ValueError(_LE("Unknown sort direction, "
"must be 'desc' or 'asc'"))
if current_sort_key[1] is None:
# sort by generic property
query = query.order_by(sort_dir_func(getattr(
models.Artifact,
current_sort_key[0])))
else:
# sort by custom property
prop_type = current_sort_key[1] + "_value"
query = (
query.join(models.ArtifactProperty).
filter(models.ArtifactProperty.name == current_sort_key[0]).
order_by(sort_dir_func(getattr(models.ArtifactProperty,
prop_type))))
default = ''
# Add pagination
if marker is not None:
marker_values = []
for sort_key in sort_keys:
v = getattr(marker, sort_key[0])
if v is None:
v = default
marker_values.append(v)
# Build up an array of sort criteria as in the docstring
criteria_list = []
for i in range(len(sort_keys)):
crit_attrs = []
if marker_values[i] is None:
continue
for j in range(i):
if sort_keys[j][1] is None:
model_attr = getattr(models.Artifact, sort_keys[j][0])
else:
model_attr = getattr(models.ArtifactProperty,
sort_keys[j][1] + "_value")
default = None if isinstance(
model_attr.property.columns[0].type,
sqlalchemy.DateTime) else ''
attr = case([(model_attr != None,
model_attr), ],
else_=default)
crit_attrs.append((attr == marker_values[j]))
if sort_keys[i][1] is None:
model_attr = getattr(models.Artifact, sort_keys[i][0])
else:
model_attr = getattr(models.ArtifactProperty,
sort_keys[i][1] + "_value")
default = None if isinstance(model_attr.property.columns[0].type,
sqlalchemy.DateTime) else ''
attr = case([(model_attr != None,
model_attr), ],
else_=default)
if sort_dirs[i] == 'desc':
crit_attrs.append((attr < marker_values[i]))
else:
crit_attrs.append((attr > marker_values[i]))
criteria = and_(*crit_attrs)
criteria_list.append(criteria)
f = or_(*criteria_list)
query = query.filter(f)
if limit is not None:
query = query.limit(limit)
return query
def _do_artifacts_query(context, session, show_level=ga.Showlevel.NONE):
"""Build the query to get all artifacts based on the context"""
LOG.debug("context.is_admin=%(is_admin)s; context.owner=%(owner)s",
{'is_admin': context.is_admin, 'owner': context.owner})
if show_level == ga.Showlevel.NONE:
query = session.query(models.Artifact).options(
joinedload(models.Artifact.tags))
elif show_level == ga.Showlevel.BASIC:
query = (
session.query(models.Artifact).
options(joinedload(
models.Artifact.properties).
defer(models.ArtifactProperty.text_value)).
options(joinedload(models.Artifact.tags)).
options(joinedload(models.Artifact.blobs).
joinedload(models.ArtifactBlob.locations)))
else:
# other show_levels aren't supported
msg = _LW("Show level %s is not supported in this "
"operation") % ga.Showlevel.to_str(show_level)
LOG.warn(msg)
raise exception.ArtifactUnsupportedShowLevel(shl=show_level)
# If admin, return everything.
if context.is_admin:
return query
else:
# If regular user, return only public artifacts.
# However, if context.owner has a value, return both
# public and private artifacts of the context.owner.
if context.owner is not None:
query = query.filter(
or_(models.Artifact.owner == context.owner,
models.Artifact.visibility == 'public'))
else:
query = query.filter(
models.Artifact.visibility == 'public')
return query
op_mappings = {
'EQ': operator.eq,
'GT': operator.gt,
'GE': operator.ge,
'LT': operator.lt,
'LE': operator.le,
'NE': operator.ne,
'IN': operator.eq # it must be eq
}
def _do_query_filters(filters):
basic_conds = []
tag_conds = []
prop_conds = []
# don't show deleted artifacts
basic_conds.append([models.Artifact.state != 'deleted'])
visibility = filters.pop('visibility', None)
if visibility is not None:
# ignore operator. always consider it EQ
basic_conds.append(
[models.Artifact.visibility == visibility[0]['value']])
type_name = filters.pop('type_name', None)
if type_name is not None:
# ignore operator. always consider it EQ
basic_conds.append([models.Artifact.type_name == type_name['value']])
type_version = filters.pop('type_version', None)
if type_version is not None:
# ignore operator. always consider it EQ
# TODO(mfedosin) add support of LIKE operator
type_version = semver_db.parse(type_version['value'])
basic_conds.append([models.Artifact.type_version == type_version])
name = filters.pop('name', None)
if name is not None:
# ignore operator. always consider it EQ
basic_conds.append([models.Artifact.name == name[0]['value']])
versions = filters.pop('version', None)
if versions is not None:
for version in versions:
value = semver_db.parse(version['value'])
op = version['operator']
fn = op_mappings[op]
basic_conds.append([fn(models.Artifact.version, value)])
state = filters.pop('state', None)
if state is not None:
# ignore operator. always consider it EQ
basic_conds.append([models.Artifact.state == state['value']])
owner = filters.pop('owner', None)
if owner is not None:
# ignore operator. always consider it EQ
basic_conds.append([models.Artifact.owner == owner[0]['value']])
id_list = filters.pop('id_list', None)
if id_list is not None:
basic_conds.append([models.Artifact.id.in_(id_list['value'])])
name_list = filters.pop('name_list', None)
if name_list is not None:
basic_conds.append([models.Artifact.name.in_(name_list['value'])])
tags = filters.pop('tags', None)
if tags is not None:
for tag in tags:
tag_conds.append([models.ArtifactTag.value == tag['value']])
# process remaining filters
for filtername, filtervalues in filters.items():
for filtervalue in filtervalues:
db_prop_op = filtervalue['operator']
db_prop_value = filtervalue['value']
db_prop_type = filtervalue['type'] + "_value"
db_prop_position = filtervalue.get('position')
conds = [models.ArtifactProperty.name == filtername]
if db_prop_op in op_mappings:
fn = op_mappings[db_prop_op]
result = fn(getattr(models.ArtifactProperty, db_prop_type),
db_prop_value)
cond = [result]
if db_prop_position is not 'any':
cond.append(
models.ArtifactProperty.position == db_prop_position)
if db_prop_op == 'IN':
if (db_prop_position is not None and
db_prop_position is not 'any'):
msg = _LE("Cannot use this parameter with "
"the operator IN")
LOG.error(msg)
raise exception.ArtifactInvalidPropertyParameter(
op='IN')
cond = [result,
models.ArtifactProperty.position >= 0]
else:
msg = _LE("Operator %s is not supported") % db_prop_op
LOG.error(msg)
raise exception.ArtifactUnsupportedPropertyOperator(
op=db_prop_op)
conds.extend(cond)
prop_conds.append(conds)
return basic_conds, tag_conds, prop_conds
def _do_tags(artifact, new_tags):
tags_to_update = []
# don't touch existing tags
for tag in artifact.tags:
if tag.value in new_tags:
tags_to_update.append(tag)
new_tags.remove(tag.value)
# add new tags
for tag in new_tags:
db_tag = models.ArtifactTag()
db_tag.value = tag
tags_to_update.append(db_tag)
return tags_to_update
def _do_property(propname, prop, position=None):
db_prop = models.ArtifactProperty()
db_prop.name = propname
setattr(db_prop,
(prop['type'] + "_value"),
prop['value'])
db_prop.position = position
return db_prop
def _do_properties(artifact, new_properties):
props_to_update = []
# don't touch existing properties
for prop in artifact.properties:
if prop.name not in new_properties:
props_to_update.append(prop)
for propname, prop in new_properties.items():
if prop['type'] == 'array':
for pos, arrprop in enumerate(prop['value']):
props_to_update.append(
_do_property(propname, arrprop, pos)
)
else:
props_to_update.append(
_do_property(propname, prop)
)
return props_to_update
def _do_blobs(artifact, new_blobs):
blobs_to_update = []
# don't touch existing blobs
for blob in artifact.blobs:
if blob.name not in new_blobs:
blobs_to_update.append(blob)
for blobname, blobs in new_blobs.items():
for pos, blob in enumerate(blobs):
for db_blob in artifact.blobs:
if db_blob.name == blobname and db_blob.position == pos:
# update existing blobs
db_blob.size = blob['size']
db_blob.checksum = blob['checksum']
db_blob.item_key = blob['item_key']
db_blob.locations = _do_locations(db_blob,
blob['locations'])
blobs_to_update.append(db_blob)
break
else:
# create new blob
db_blob = models.ArtifactBlob()
db_blob.name = blobname
db_blob.size = blob['size']
db_blob.checksum = blob['checksum']
db_blob.item_key = blob['item_key']
db_blob.position = pos
db_blob.locations = _do_locations(db_blob, blob['locations'])
blobs_to_update.append(db_blob)
return blobs_to_update
def _do_locations(blob, new_locations):
locs_to_update = []
for pos, loc in enumerate(new_locations):
for db_loc in blob.locations:
if db_loc.value == loc['value']:
# update existing location
db_loc.position = pos
db_loc.status = loc['status']
locs_to_update.append(db_loc)
break
else:
# create new location
db_loc = models.ArtifactBlobLocation()
db_loc.value = loc['value']
db_loc.status = loc['status']
db_loc.position = pos
locs_to_update.append(db_loc)
return locs_to_update
def _do_dependencies(artifact, new_dependencies, session):
deps_to_update = []
# small check that all dependencies are new
if artifact.dependencies is not None:
for db_dep in artifact.dependencies:
for dep in new_dependencies.keys():
if db_dep.name == dep:
msg = _LW("Artifact with the specified type, name "
"and versions already has the direct "
"dependency=%s") % dep
LOG.warn(msg)
# change values of former dependency
for dep in artifact.dependencies:
session.delete(dep)
artifact.dependencies = []
for depname, depvalues in new_dependencies.items():
for pos, depvalue in enumerate(depvalues):
db_dep = models.ArtifactDependency()
db_dep.name = depname
db_dep.artifact_source = artifact.id
db_dep.artifact_dest = depvalue
db_dep.artifact_origin = artifact.id
db_dep.is_direct = True
db_dep.position = pos
deps_to_update.append(db_dep)
artifact.dependencies = deps_to_update
def _do_transitive_dependencies(artifact, session):
deps_to_update = []
for dependency in artifact.dependencies:
depvalue = dependency.artifact_dest
transitdeps = session.query(models.ArtifactDependency).filter_by(
artifact_source=depvalue).all()
for transitdep in transitdeps:
if not transitdep.is_direct:
# transitive dependencies are already created
msg = _LW("Artifact with the specified type, "
"name and version already has the "
"direct dependency=%d") % transitdep.id
LOG.warn(msg)
raise exception.ArtifactDuplicateTransitiveDependency(
dep=transitdep.id)
db_dep = models.ArtifactDependency()
db_dep.name = transitdep['name']
db_dep.artifact_source = artifact.id
db_dep.artifact_dest = transitdep.artifact_dest
db_dep.artifact_origin = transitdep.artifact_source
db_dep.is_direct = False
db_dep.position = transitdep.position
deps_to_update.append(db_dep)
return deps_to_update
def _check_visibility(context, artifact):
if context.is_admin:
return True
if not artifact.owner:
return True
if artifact.visibility == Visibility.PUBLIC.value:
return True
if artifact.visibility == Visibility.PRIVATE.value:
if context.owner and context.owner == artifact.owner:
return True
else:
return False
if artifact.visibility == Visibility.SHARED.value:
return False
return False
def _set_version_fields(values):
if 'type_version' in values:
values['type_version'] = semver_db.parse(values['type_version'])
if 'version' in values:
values['version'] = semver_db.parse(values['version'])
def _validate_values(values):
if 'state' in values:
try:
State(values['state'])
except ValueError:
msg = "Invalid artifact state '%s'" % values['state']
raise exception.Invalid(msg)
if 'visibility' in values:
try:
Visibility(values['visibility'])
except ValueError:
msg = "Invalid artifact visibility '%s'" % values['visibility']
raise exception.Invalid(msg)
# TODO(mfedosin): it's an idea to validate tags someday
# (check that all tags match the regexp)
def _drop_protected_attrs(model_class, values):
"""
Removed protected attributes from values dictionary using the models
__protected_attributes__ field.
"""
for attr in model_class.__protected_attributes__:
if attr in values:
del values[attr]

View File

@ -1,337 +0,0 @@
# Copyright (c) 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from oslo_db.sqlalchemy import models
from sqlalchemy import BigInteger
from sqlalchemy import Boolean
from sqlalchemy import Column
from sqlalchemy import DateTime
from sqlalchemy.ext import declarative
from sqlalchemy import ForeignKey
from sqlalchemy import Index
from sqlalchemy import Integer
from sqlalchemy import Numeric
from sqlalchemy.orm import backref
from sqlalchemy.orm import composite
from sqlalchemy.orm import relationship
from sqlalchemy import String
from sqlalchemy import Text
from glance.common import semver_db
from glance.common import timeutils
import glance.glare as ga
BASE = declarative.declarative_base()
class ArtifactBase(models.ModelBase, models.TimestampMixin):
"""Base class for Artifact Models."""
__table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'}
__table_initialized__ = False
__protected_attributes__ = set([
"created_at", "updated_at"])
created_at = Column(DateTime, default=lambda: timeutils.utcnow(),
nullable=False)
updated_at = Column(DateTime, default=lambda: timeutils.utcnow(),
nullable=False, onupdate=lambda: timeutils.utcnow())
def save(self, session=None):
from glance.db.sqlalchemy import api as db_api
super(ArtifactBase, self).save(session or db_api.get_session())
def keys(self):
return self.__dict__.keys()
def values(self):
return self.__dict__.values()
def items(self):
return self.__dict__.items()
def to_dict(self):
d = {}
for c in self.__table__.columns:
d[c.name] = self[c.name]
return d
def _parse_property_type_value(prop, show_text_properties=True):
columns = [
'int_value',
'string_value',
'bool_value',
'numeric_value']
if show_text_properties:
columns.append('text_value')
for prop_type in columns:
if getattr(prop, prop_type) is not None:
return prop_type.rpartition('_')[0], getattr(prop, prop_type)
return None, None
class Artifact(BASE, ArtifactBase):
__tablename__ = 'artifacts'
__table_args__ = (
Index('ix_artifact_name_and_version', 'name', 'version_prefix',
'version_suffix'),
Index('ix_artifact_type', 'type_name', 'type_version_prefix',
'type_version_suffix'),
Index('ix_artifact_state', 'state'),
Index('ix_artifact_owner', 'owner'),
Index('ix_artifact_visibility', 'visibility'),
{'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'})
__protected_attributes__ = ArtifactBase.__protected_attributes__.union(
set(['published_at', 'deleted_at']))
id = Column(String(36), primary_key=True,
default=lambda: str(uuid.uuid4()))
name = Column(String(255), nullable=False)
type_name = Column(String(255), nullable=False)
type_version_prefix = Column(BigInteger().with_variant(Integer, "sqlite"),
nullable=False)
type_version_suffix = Column(String(255))
type_version_meta = Column(String(255))
type_version = composite(semver_db.DBVersion, type_version_prefix,
type_version_suffix, type_version_meta,
comparator_factory=semver_db.VersionComparator)
version_prefix = Column(BigInteger().with_variant(Integer, "sqlite"),
nullable=False)
version_suffix = Column(String(255))
version_meta = Column(String(255))
version = composite(semver_db.DBVersion, version_prefix,
version_suffix, version_meta,
comparator_factory=semver_db.VersionComparator)
description = Column(Text)
visibility = Column(String(32), nullable=False)
state = Column(String(32), nullable=False)
owner = Column(String(255), nullable=False)
published_at = Column(DateTime)
deleted_at = Column(DateTime)
def to_dict(self, show_level=ga.Showlevel.BASIC,
show_text_properties=True):
d = super(Artifact, self).to_dict()
d.pop('type_version_prefix')
d.pop('type_version_suffix')
d.pop('type_version_meta')
d.pop('version_prefix')
d.pop('version_suffix')
d.pop('version_meta')
d['type_version'] = str(self.type_version)
d['version'] = str(self.version)
tags = []
for tag in self.tags:
tags.append(tag.value)
d['tags'] = tags
if show_level == ga.Showlevel.NONE:
return d
properties = {}
# sort properties
self.properties.sort(key=lambda elem: (elem.name, elem.position))
for prop in self.properties:
proptype, propvalue = _parse_property_type_value(
prop, show_text_properties)
if proptype is None:
continue
if prop.position is not None:
# make array
for p in properties.keys():
if p == prop.name:
# add value to array
properties[p]['value'].append(dict(type=proptype,
value=propvalue))
break
else:
# create new array
p = dict(type='array',
value=[])
p['value'].append(dict(type=proptype,
value=propvalue))
properties[prop.name] = p
else:
# make scalar
properties[prop.name] = dict(type=proptype,
value=propvalue)
d['properties'] = properties
blobs = {}
# sort blobs
self.blobs.sort(key=lambda elem: elem.position)
for blob in self.blobs:
locations = []
# sort locations
blob.locations.sort(key=lambda elem: elem.position)
for loc in blob.locations:
locations.append(dict(value=loc.value,
status=loc.status))
if blob.name in blobs:
blobs[blob.name].append(dict(size=blob.size,
checksum=blob.checksum,
locations=locations,
item_key=blob.item_key))
else:
blobs[blob.name] = []
blobs[blob.name].append(dict(size=blob.size,
checksum=blob.checksum,
locations=locations,
item_key=blob.item_key))
d['blobs'] = blobs
return d
class ArtifactDependency(BASE, ArtifactBase):
__tablename__ = 'artifact_dependencies'
__table_args__ = (Index('ix_artifact_dependencies_source_id',
'artifact_source'),
Index('ix_artifact_dependencies_origin_id',
'artifact_origin'),
Index('ix_artifact_dependencies_dest_id',
'artifact_dest'),
Index('ix_artifact_dependencies_direct_dependencies',
'artifact_source', 'is_direct'),
{'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'})
id = Column(String(36), primary_key=True, nullable=False,
default=lambda: str(uuid.uuid4()))
artifact_source = Column(String(36), ForeignKey('artifacts.id'),
nullable=False)
artifact_dest = Column(String(36), ForeignKey('artifacts.id'),
nullable=False)
artifact_origin = Column(String(36), ForeignKey('artifacts.id'),
nullable=False)
is_direct = Column(Boolean, nullable=False)
position = Column(Integer)
name = Column(String(36))
source = relationship('Artifact',
backref=backref('dependencies', cascade="all, "
"delete"),
foreign_keys="ArtifactDependency.artifact_source")
dest = relationship('Artifact',
foreign_keys="ArtifactDependency.artifact_dest")
origin = relationship('Artifact',
foreign_keys="ArtifactDependency.artifact_origin")
class ArtifactTag(BASE, ArtifactBase):
__tablename__ = 'artifact_tags'
__table_args__ = (Index('ix_artifact_tags_artifact_id', 'artifact_id'),
Index('ix_artifact_tags_artifact_id_tag_value',
'artifact_id', 'value'),
{'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'},)
id = Column(String(36), primary_key=True, nullable=False,
default=lambda: str(uuid.uuid4()))
artifact_id = Column(String(36), ForeignKey('artifacts.id'),
nullable=False)
artifact = relationship(Artifact,
backref=backref('tags',
cascade="all, delete-orphan"))
value = Column(String(255), nullable=False)
class ArtifactProperty(BASE, ArtifactBase):
__tablename__ = 'artifact_properties'
__table_args__ = (
Index('ix_artifact_properties_artifact_id', 'artifact_id'),
Index('ix_artifact_properties_name', 'name'),
{'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'},)
id = Column(String(36), primary_key=True, nullable=False,
default=lambda: str(uuid.uuid4()))
artifact_id = Column(String(36), ForeignKey('artifacts.id'),
nullable=False)
artifact = relationship(Artifact,
backref=backref('properties',
cascade="all, delete-orphan"))
name = Column(String(255), nullable=False)
string_value = Column(String(255))
int_value = Column(Integer)
numeric_value = Column(Numeric)
bool_value = Column(Boolean)
text_value = Column(Text)
position = Column(Integer)
class ArtifactBlob(BASE, ArtifactBase):
__tablename__ = 'artifact_blobs'
__table_args__ = (
Index('ix_artifact_blobs_artifact_id', 'artifact_id'),
Index('ix_artifact_blobs_name', 'name'),
{'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'},)
id = Column(String(36), primary_key=True, nullable=False,
default=lambda: str(uuid.uuid4()))
artifact_id = Column(String(36), ForeignKey('artifacts.id'),
nullable=False)
name = Column(String(255), nullable=False)
item_key = Column(String(329))
size = Column(BigInteger().with_variant(Integer, "sqlite"),
nullable=False)
checksum = Column(String(32))
position = Column(Integer)
artifact = relationship(Artifact,
backref=backref('blobs',
cascade="all, delete-orphan"))
class ArtifactBlobLocation(BASE, ArtifactBase):
__tablename__ = 'artifact_blob_locations'
__table_args__ = (Index('ix_artifact_blob_locations_blob_id',
'blob_id'),
{'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'})
id = Column(String(36), primary_key=True, nullable=False,
default=lambda: str(uuid.uuid4()))
blob_id = Column(String(36), ForeignKey('artifact_blobs.id'),
nullable=False)
value = Column(Text, nullable=False)
position = Column(Integer)
status = Column(String(36), default='active', nullable=True)
blob = relationship(ArtifactBlob,
backref=backref('locations',
cascade="all, delete-orphan"))
def register_models(engine):
"""Create database tables for all models with the given engine."""
models = (Artifact, ArtifactTag, ArtifactProperty,
ArtifactBlob, ArtifactBlobLocation, ArtifactDependency)
for model in models:
model.metadata.create_all(engine)
def unregister_models(engine):
"""Drop database tables for all models with the given engine."""
models = (ArtifactDependency, ArtifactBlobLocation, ArtifactBlob,
ArtifactProperty, ArtifactTag, Artifact)
for model in models:
model.metadata.drop_all(engine)

View File

@ -1,46 +0,0 @@
# Copyright (c) 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import six
from glance.common import exception
class Showlevel(object):
# None - do not show additional properties and blobs with locations;
# Basic - show all artifact fields except dependencies;
# Direct - show all artifact fields with only direct dependencies;
# Transitive - show all artifact fields with all of dependencies.
NONE = 0
BASIC = 1
DIRECT = 2
TRANSITIVE = 3
_level_map = {'none': NONE, 'basic': BASIC, 'direct': DIRECT,
'transitive': TRANSITIVE}
_inverted_level_map = {v: k for k, v in six.iteritems(_level_map)}
@staticmethod
def to_str(n):
try:
return Showlevel._inverted_level_map[n]
except KeyError:
raise exception.ArtifactUnsupportedShowLevel()
@staticmethod
def from_str(str_value):
try:
return Showlevel._level_map[str_value]
except KeyError:
raise exception.ArtifactUnsupportedShowLevel()

View File

@ -1,126 +0,0 @@
# Copyright (c) 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import glance.common.exception as exc
import glance.common.glare.definitions as definitions
from glance.glare.domain import proxy
from glance.i18n import _
class ArtifactProxy(proxy.Artifact):
def __init__(self, artifact, repo):
super(ArtifactProxy, self).__init__(artifact)
self.artifact = artifact
self.repo = repo
def set_type_specific_property(self, prop_name, value):
if prop_name not in self.metadata.attributes.dependencies:
return super(ArtifactProxy, self).set_type_specific_property(
prop_name, value)
# for every dependency have to transfer dep_id into a dependency itself
if value is None:
setattr(self.artifact, prop_name, None)
else:
if not isinstance(value, list):
setattr(self.artifact, prop_name,
self._fetch_dependency(value))
else:
setattr(self.artifact, prop_name,
[self._fetch_dependency(dep_id) for dep_id in value])
def _fetch_dependency(self, dep_id):
# check for circular dependency id -> id
if self.id == dep_id:
raise exc.ArtifactCircularDependency()
art = self.repo.get(artifact_id=dep_id)
# repo returns a proxy of some level.
# Need to find the base declarative artifact
while not isinstance(art, definitions.ArtifactType):
art = art.base
return art
class ArtifactRepo(proxy.ArtifactRepo):
def __init__(self, repo, plugins,
item_proxy_class=None, item_proxy_kwargs=None):
self.plugins = plugins
super(ArtifactRepo, self).__init__(repo,
item_proxy_class=ArtifactProxy,
item_proxy_kwargs={'repo': self})
def _check_dep_state(self, dep, state):
"""Raises an exception if dependency 'dep' is not in state 'state'"""
if dep.state != state:
raise exc.Invalid(_(
"Not all dependencies are in '%s' state") % state)
def publish(self, artifact, *args, **kwargs):
"""
Creates transitive dependencies,
checks that all dependencies are in active state and
transfers artifact from creating to active state
"""
# make sure that all required dependencies exist
artifact.__pre_publish__(*args, **kwargs)
# make sure that all dependencies are active
for param in artifact.metadata.attributes.dependencies:
dependency = getattr(artifact, param)
if isinstance(dependency, list):
for dep in dependency:
self._check_dep_state(dep, 'active')
elif dependency:
self._check_dep_state(dependency, 'active')
# as state is changed on db save, have to retrieve the freshly changed
# artifact (the one passed into the func will have old state value)
artifact = self.base.publish(self.helper.unproxy(artifact))
return self.helper.proxy(artifact)
def remove(self, artifact):
"""
Checks that artifact has no dependencies and removes it.
Otherwise an exception is raised
"""
for param in artifact.metadata.attributes.dependencies:
if getattr(artifact, param):
raise exc.Invalid(_(
"Dependency property '%s' has to be deleted first") %
param)
return self.base.remove(self.helper.unproxy(artifact))
class ArtifactFactory(proxy.ArtifactFactory):
def __init__(self, base, klass, repo):
self.klass = klass
self.repo = repo
super(ArtifactFactory, self).__init__(
base, artifact_proxy_class=ArtifactProxy,
artifact_proxy_kwargs={'repo': self.repo})
def new_artifact(self, *args, **kwargs):
"""
Creates an artifact without dependencies first
and then adds them to the newly created artifact
"""
# filter dependencies
no_deps = {p: kwargs[p] for p in kwargs
if p not in self.klass.metadata.attributes.dependencies}
deps = {p: kwargs[p] for p in kwargs
if p in self.klass.metadata.attributes.dependencies}
artifact = super(ArtifactFactory, self).new_artifact(*args, **no_deps)
# now set dependencies
for dep_param, dep_value in deps.items():
setattr(artifact, dep_param, dep_value)
return artifact

View File

@ -1,69 +0,0 @@
# Copyright (c) 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from glance.common import timeutils
from glance.i18n import _
class Artifact(object):
def __init__(self, id, name, version, type_name, type_version,
state, owner, visibility='private', created_at=None,
updated_at=None, **kwargs):
self.id = id
self.name = name
self.type_name = type_name
self.version = version
self.type_version = type_version
self.visibility = visibility
self.state = state
self.owner = owner
self.created_at = created_at
self.updated_at = updated_at
self.description = kwargs.pop('description', None)
self.blobs = kwargs.pop('blobs', {})
self.properties = kwargs.pop('properties', {})
self.dependencies = kwargs.pop('dependencies', {})
self.tags = kwargs.pop('tags', [])
if kwargs:
message = _("__init__() got unexpected keyword argument '%s'")
raise TypeError(message % list(kwargs.keys())[0])
class ArtifactFactory(object):
def __init__(self, context, klass):
self.klass = klass
self.context = context
def new_artifact(self, name, version, **kwargs):
id = kwargs.pop('id', str(uuid.uuid4()))
tags = kwargs.pop('tags', [])
# pop reserved fields from kwargs dict
for param in ['owner', 'created_at', 'updated_at',
'deleted_at', 'state']:
kwargs.pop(param, '')
curr_timestamp = timeutils.utcnow()
base = self.klass(id=id,
name=name,
version=version,
state='creating',
owner=self.context.owner or '',
created_at=curr_timestamp,
updated_at=curr_timestamp,
tags=tags,
**kwargs)
return base

View File

@ -1,200 +0,0 @@
# Copyright (c) 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
from glance.common import exception as exc
from glance.domain import proxy as image_proxy
def _proxy_artifact_property(attr):
def getter(self):
return self.get_type_specific_property(attr)
def setter(self, value):
return self.set_type_specific_property(attr, value)
return property(getter, setter)
class ArtifactHelper(image_proxy.Helper):
"""
Artifact-friendly proxy helper: does all the same as regular helper
but also dynamically proxies all the type-specific attributes,
including properties, blobs and dependencies
"""
def proxy(self, obj):
if obj is None or self.proxy_class is None:
return obj
if not hasattr(obj, 'metadata'):
return super(ArtifactHelper, self).proxy(obj)
extra_attrs = {}
for att_name in obj.metadata.attributes.all.keys():
extra_attrs[att_name] = _proxy_artifact_property(att_name)
new_proxy_class = type("%s(%s)" % (obj.metadata.type_name,
self.proxy_class.__module__),
(self.proxy_class,),
extra_attrs)
return new_proxy_class(obj, **self.proxy_kwargs)
class ArtifactRepo(object):
def __init__(self, base, proxy_helper=None, item_proxy_class=None,
item_proxy_kwargs=None):
self.base = base
if proxy_helper is None:
proxy_helper = ArtifactHelper(item_proxy_class, item_proxy_kwargs)
self.helper = proxy_helper
def get(self, *args, **kwargs):
return self.helper.proxy(self.base.get(*args, **kwargs))
def list(self, *args, **kwargs):
items = self.base.list(*args, **kwargs)
return [self.helper.proxy(item) for item in items]
def add(self, item):
base_item = self.helper.unproxy(item)
result = self.base.add(base_item)
return self.helper.proxy(result)
def save(self, item):
base_item = self.helper.unproxy(item)
result = self.base.save(base_item)
return self.helper.proxy(result)
def remove(self, item):
base_item = self.helper.unproxy(item)
result = self.base.remove(base_item)
return self.helper.proxy(result)
def publish(self, item, *args, **kwargs):
base_item = self.helper.unproxy(item)
result = self.base.publish(base_item, *args, **kwargs)
return self.helper.proxy(result)
class Artifact(object):
def __init__(self, base, proxy_class=None, proxy_kwargs=None):
self.base = base
self.helper = ArtifactHelper(proxy_class, proxy_kwargs)
# it is enough to proxy metadata only, other properties will be proxied
# automatically by ArtifactHelper
metadata = _proxy_artifact_property('metadata')
def set_type_specific_property(self, prop_name, value):
setattr(self.base, prop_name, value)
def get_type_specific_property(self, prop_name):
try:
return getattr(self.base, prop_name)
except AttributeError:
raise exc.ArtifactInvalidProperty(prop=prop_name)
def __pre_publish__(self, *args, **kwargs):
self.base.__pre_publish__(*args, **kwargs)
class ArtifactFactory(object):
def __init__(self, base,
artifact_proxy_class=Artifact,
artifact_proxy_kwargs=None):
self.artifact_helper = ArtifactHelper(artifact_proxy_class,
artifact_proxy_kwargs)
self.base = base
def new_artifact(self, *args, **kwargs):
t = self.base.new_artifact(*args, **kwargs)
return self.artifact_helper.proxy(t)
class ArtifactBlob(object):
def __init__(self, base, artifact_blob_proxy_class=None,
artifact_blob_proxy_kwargs=None):
self.base = base
self.helper = image_proxy.Helper(artifact_blob_proxy_class,
artifact_blob_proxy_kwargs)
size = _proxy_artifact_property('size')
locations = _proxy_artifact_property('locations')
checksum = _proxy_artifact_property('checksum')
item_key = _proxy_artifact_property('item_key')
def set_type_specific_property(self, prop_name, value):
setattr(self.base, prop_name, value)
def get_type_specific_property(self, prop_name):
return getattr(self.base, prop_name)
def to_dict(self):
return self.base.to_dict()
class ArtifactProperty(object):
def __init__(self, base, proxy_class=None, proxy_kwargs=None):
self.base = base
self.helper = ArtifactHelper(proxy_class, proxy_kwargs)
def set_type_specific_property(self, prop_name, value):
setattr(self.base, prop_name, value)
def get_type_specific_property(self, prop_name):
return getattr(self.base, prop_name)
class List(collections.MutableSequence):
def __init__(self, base, item_proxy_class=None,
item_proxy_kwargs=None):
self.base = base
self.helper = image_proxy.Helper(item_proxy_class, item_proxy_kwargs)
def __len__(self):
return len(self.base)
def __delitem__(self, index):
del self.base[index]
def __getitem__(self, index):
item = self.base[index]
return self.helper.proxy(item)
def insert(self, index, value):
self.base.insert(index, self.helper.unproxy(value))
def __setitem__(self, index, value):
self.base[index] = self.helper.unproxy(value)
class Dict(collections.MutableMapping):
def __init__(self, base, item_proxy_class=None, item_proxy_kwargs=None):
self.base = base
self.helper = image_proxy.Helper(item_proxy_class, item_proxy_kwargs)
def __setitem__(self, key, value):
self.base[key] = self.helper.unproxy(value)
def __getitem__(self, key):
item = self.base[key]
return self.helper.proxy(item)
def __delitem__(self, key):
del self.base[key]
def __len__(self):
return len(self.base)
def __iter__(self):
for key in self.base.keys():
yield key

View File

@ -1,54 +0,0 @@
# Copyright (c) 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import glance_store
from glance.common import store_utils
import glance.db
from glance.glare import dependency
from glance.glare import domain
from glance.glare import location
from glance.glare import updater
class Gateway(object):
def __init__(self, db_api=None, store_api=None, plugins=None):
self.db_api = db_api or glance.db.get_api()
self.store_api = store_api or glance_store
self.store_utils = store_utils
self.plugins = plugins
def get_artifact_type_factory(self, context, klass):
declarative_factory = domain.ArtifactFactory(context, klass)
repo = self.get_artifact_repo(context)
dependencies_factory = dependency.ArtifactFactory(declarative_factory,
klass, repo)
factory = location.ArtifactFactoryProxy(dependencies_factory,
context,
self.store_api,
self.store_utils)
updater_factory = updater.ArtifactFactoryProxy(factory)
return updater_factory
def get_artifact_repo(self, context):
artifact_repo = glance.db.ArtifactRepo(context,
self.db_api,
self.plugins)
dependencies_repo = dependency.ArtifactRepo(artifact_repo,
self.plugins)
repo = location.ArtifactRepoProxy(dependencies_repo,
context,
self.store_api,
self.store_utils)
updater_repo = updater.ArtifactRepoProxy(repo)
return updater_repo

View File

@ -1,198 +0,0 @@
# Copyright (c) 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
import uuid
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import encodeutils
from glance.common.glare import definitions
from glance.common import utils
from glance.glare.domain import proxy
from glance.i18n import _LE, _LW
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
class ArtifactFactoryProxy(proxy.ArtifactFactory):
def __init__(self, factory, context, store_api, store_utils):
self.context = context
self.store_api = store_api
self.store_utils = store_utils
proxy_kwargs = {'store_api': store_api,
'store_utils': store_utils,
'context': self.context}
super(ArtifactFactoryProxy, self).__init__(
factory,
artifact_proxy_class=ArtifactProxy,
artifact_proxy_kwargs=proxy_kwargs)
class ArtifactProxy(proxy.Artifact):
def __init__(self, artifact, context, store_api, store_utils):
self.artifact = artifact
self.context = context
self.store_api = store_api
self.store_utils = store_utils
super(ArtifactProxy,
self).__init__(artifact,
proxy_class=ArtifactBlobProxy,
proxy_kwargs={"context": self.context,
"store_api": self.store_api})
def set_type_specific_property(self, prop_name, value):
if prop_name not in self.artifact.metadata.attributes.blobs:
super(ArtifactProxy, self).set_type_specific_property(prop_name,
value)
return
item_key = "%s.%s" % (self.artifact.id, prop_name)
# XXX FIXME have to add support for BinaryObjectList properties
blob = definitions.Blob(item_key=item_key)
blob_proxy = self.helper.proxy(blob)
if value is None:
for location in blob_proxy.locations:
blob_proxy.delete_from_store(location)
else:
data = value[0]
size = value[1]
blob_proxy.upload_to_store(data, size)
setattr(self.artifact, prop_name, blob)
def get_type_specific_property(self, prop_name):
base = super(ArtifactProxy, self).get_type_specific_property(prop_name)
if base is None:
return None
if prop_name in self.artifact.metadata.attributes.blobs:
if isinstance(self.artifact.metadata.attributes.blobs[prop_name],
list):
return ArtifactBlobProxyList(self.artifact.id,
prop_name,
base,
self.context,
self.store_api)
else:
return self.helper.proxy(base)
else:
return base
class ArtifactRepoProxy(proxy.ArtifactRepo):
def __init__(self, artifact_repo, context, store_api, store_utils):
self.context = context
self.store_api = store_api
proxy_kwargs = {'context': context, 'store_api': store_api,
'store_utils': store_utils}
super(ArtifactRepoProxy, self).__init__(
artifact_repo,
proxy_helper=proxy.ArtifactHelper(ArtifactProxy, proxy_kwargs))
def get(self, *args, **kwargs):
return self.helper.proxy(self.base.get(*args, **kwargs))
class ArtifactBlobProxy(proxy.ArtifactBlob):
def __init__(self, blob, context, store_api):
self.context = context
self.store_api = store_api
self.blob = blob
super(ArtifactBlobProxy, self).__init__(blob)
def delete_from_store(self, location):
try:
ret = self.store_api.delete_from_backend(location['value'],
context=self.context)
location['status'] = 'deleted'
return ret
except self.store_api.NotFound:
msg = _LW('Failed to delete blob'
' %s in store from URI') % self.blob.id
LOG.warn(msg)
except self.store_api.StoreDeleteNotSupported as e:
LOG.warn(encodeutils.exception_to_unicode(e))
except self.store_api.UnsupportedBackend:
exc_type = sys.exc_info()[0].__name__
msg = (_LE('Failed to delete blob'
' %(blob_id)s from store: %(exc)s') %
dict(blob_id=self.blob.id, exc=exc_type))
LOG.error(msg)
def upload_to_store(self, data, size):
if size is None: # NOTE(ativelkov): None is "unknown size"
size = 0
location, ret_size, checksum, loc_meta = self.store_api.add_to_backend(
CONF,
self.blob.item_key,
utils.LimitingReader(utils.CooperativeReader(data),
CONF.image_size_cap),
size,
context=self.context)
self.blob.size = ret_size
self.blob.locations = [{'status': 'active', 'value': location}]
self.blob.checksum = checksum
@property
def data_stream(self):
if len(self.locations) > 0:
err = None
try:
for location in self.locations:
data, size = self.store_api.get_from_backend(
location['value'],
context=self.context)
return data
except Exception as e:
LOG.warn(_LW('Get blob %(name)s data failed: '
'%(err)s.')
% {'name': self.blob.item_key,
'err': encodeutils.exception_to_unicode(e)})
err = e
# tried all locations
LOG.error(_LE('Glance tried all active locations to get data '
'for blob %s '
'but all have failed.') % self.blob.item_key)
raise err
class ArtifactBlobProxyList(proxy.List):
def __init__(self, artifact_id, prop_name, bloblist, context, store_api):
self.artifact_id = artifact_id
self.prop_name = prop_name
self.context = context
self.store_api = store_api
super(ArtifactBlobProxyList,
self).__init__(bloblist,
item_proxy_class=ArtifactBlobProxy,
item_proxy_kwargs={'context': context,
'store_api': store_api})
def insert(self, index, value):
data = value[0]
size = value[1]
item_key = "%s.%s.%s" % (self.artifact_id, self.prop_name,
uuid.uuid4())
blob = definitions.Blob(item_key=item_key)
blob_proxy = self.helper.proxy(blob)
blob_proxy.upload_to_store(data, size)
super(ArtifactBlobProxyList, self).insert(index, blob_proxy)
def __setitem__(self, index, value):
blob = self[index]
data = value[0]
size = value[1]
blob.upload_to_store(data, size)

View File

@ -1,205 +0,0 @@
# Copyright (c) 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from glance.common import exception as exc
from glance.glare.domain import proxy
from glance.i18n import _
class ArtifactProxy(proxy.Artifact):
"""A proxy that is capable of modifying an artifact via jsonpatch methods.
Currently supported methods are update, remove, replace.
"""
def __init__(self, artifact):
self.artifact = artifact
super(ArtifactProxy, self).__init__(artifact)
def __getattr__(self, name):
if not hasattr(self, name):
raise exc.ArtifactInvalidProperty(prop=name)
return super(ArtifactProxy, self).__getattr__(name)
def _perform_op(self, op, **kwargs):
path = kwargs.get("path")
value = kwargs.get("value")
prop_name, delimiter, path_left = path.lstrip('/').partition('/')
super(ArtifactProxy, self).get_type_specific_property(prop_name)
if not path_left:
return setattr(self, prop_name, value)
try:
prop = self._get_prop_to_update(prop_name, path_left)
# correct path_left and call corresponding update method
kwargs["path"] = path_left
getattr(prop, op)(path=kwargs["path"], value=kwargs.get("value"))
return setattr(self, prop_name, prop)
except exc.InvalidJsonPatchPath:
# NOTE(ivasilevskaya): here exception is reraised with
# 'part of path' substituted with with 'full path' to form a
# more relevant message
raise exc.InvalidJsonPatchPath(
path=path, explanation=_("No property to access"))
def _get_prop_to_update(self, prop_name, path):
"""Proxies properties that can be modified via update request.
All properties can be updated save for 'metadata' and blobs.
Due to the fact that empty lists and dicts are represented with null
values, have to check precise type definition by consulting metadata.
"""
prop = super(ArtifactProxy, self).get_type_specific_property(
prop_name)
if (prop_name == "metadata" or
prop_name in self.artifact.metadata.attributes.blobs):
return prop
if not prop:
# get correct type for empty list/dict
klass = self.artifact.metadata.attributes.all[prop_name]
if isinstance(klass, list):
prop = []
elif isinstance(klass, dict):
prop = {}
return wrap_property(prop, path)
def replace(self, path, value):
self._perform_op("replace", path=path, value=value)
def remove(self, path, value=None):
self._perform_op("remove", path=path)
def add(self, path, value):
self._perform_op("add", path=path, value=value)
class ArtifactFactoryProxy(proxy.ArtifactFactory):
def __init__(self, factory):
super(ArtifactFactoryProxy, self).__init__(factory)
class ArtifactRepoProxy(proxy.ArtifactRepo):
def __init__(self, repo):
super(ArtifactRepoProxy, self).__init__(
repo, item_proxy_class=ArtifactProxy)
def wrap_property(prop_value, full_path):
if isinstance(prop_value, list):
return ArtifactListPropertyProxy(prop_value, full_path)
if isinstance(prop_value, dict):
return ArtifactDictPropertyProxy(prop_value, full_path)
# no other types are supported
raise exc.InvalidJsonPatchPath(path=full_path)
class ArtifactListPropertyProxy(proxy.List):
"""A class to wrap a list property.
Makes possible to modify the property value via supported jsonpatch
requests (update/remove/replace).
"""
def __init__(self, prop_value, path):
super(ArtifactListPropertyProxy, self).__init__(
prop_value)
def _proc_key(self, idx_str, should_exist=True):
"""JsonPatchUpdateMixin method overload.
Only integers less than current array length and '-' (last elem)
in path are allowed.
Raises an InvalidJsonPatchPath exception if any of the conditions above
are not met.
"""
if idx_str == '-':
return len(self) - 1
try:
idx = int(idx_str)
if not should_exist and len(self) == 0:
return 0
if len(self) < idx + 1:
msg = _("Array has no element at position %d") % idx
raise exc.InvalidJsonPatchPath(explanation=msg, path=idx)
return idx
except (ValueError, TypeError):
msg = _("Not an array idx '%s'") % idx_str
raise exc.InvalidJsonPatchPath(explanation=msg, path=idx_str)
def add(self, path, value):
# by now arrays can't contain complex structures (due to Declarative
# Framework limitations and DB storage model),
# so will 'path' == idx equality is implied.
idx = self._proc_key(path, False)
if idx == len(self) - 1:
self.append(value)
else:
self.insert(idx, value)
return self.base
def remove(self, path, value=None):
# by now arrays can't contain complex structures, so will imply that
# 'path' == idx [see comment for add()]
del self[self._proc_key(path)]
return self.base
def replace(self, path, value):
# by now arrays can't contain complex structures, so will imply that
# 'path' == idx [see comment for add()]
self[self._proc_key(path)] = value
return self.base
class ArtifactDictPropertyProxy(proxy.Dict):
"""A class to wrap a dict property.
Makes possible to modify the property value via supported jsonpatch
requests (update/remove/replace).
"""
def __init__(self, prop_value, path):
super(ArtifactDictPropertyProxy, self).__init__(
prop_value)
def _proc_key(self, key_str, should_exist=True):
"""JsonPatchUpdateMixin method overload"""
if should_exist and key_str not in self.keys():
msg = _("No such key '%s' in a dict") % key_str
raise exc.InvalidJsonPatchPath(path=key_str, explanation=msg)
return key_str
def replace(self, path, value):
start, delimiter, rest = path.partition('/')
# the full path MUST exist in replace operation, so let's check
# that such key exists
key = self._proc_key(start)
if not rest:
self[key] = value
else:
prop = wrap_property(self[key], rest)
self[key] = prop.replace(rest, value)
def remove(self, path, value=None):
start, delimiter, rest = path.partition('/')
key = self._proc_key(start)
if not rest:
del self[key]
else:
prop = wrap_property(self[key], rest)
prop.remove(rest)
def add(self, path, value):
start, delimiter, rest = path.partition('/')
if not rest:
self[start] = value
else:
key = self._proc_key(start)
prop = wrap_property(self[key], rest)
self[key] = prop.add(rest, value)

View File

@ -18,7 +18,6 @@ __all__ = [
'list_scrubber_opts', 'list_scrubber_opts',
'list_cache_opts', 'list_cache_opts',
'list_manage_opts', 'list_manage_opts',
'list_artifacts_opts'
] ]
import copy import copy
@ -108,17 +107,6 @@ _cache_opts = [
_manage_opts = [ _manage_opts = [
(None, []) (None, [])
] ]
_artifacts_opts = [
(None, list(itertools.chain(
glance.api.middleware.context.context_opts,
glance.api.versions.versions_opts,
glance.common.wsgi.bind_opts,
glance.common.wsgi.eventlet_opts,
glance.common.wsgi.socket_opts,
glance.notifier.notifier_opts))),
profiler.list_opts()[0],
('paste_deploy', glance.common.config.paste_deploy_opts)
]
def list_api_opts(): def list_api_opts():
@ -165,8 +153,3 @@ def list_cache_opts():
def list_manage_opts(): def list_manage_opts():
"""Return a list of oslo_config options available in Glance manage.""" """Return a list of oslo_config options available in Glance manage."""
return [(g, copy.deepcopy(o)) for g, o in _manage_opts] return [(g, copy.deepcopy(o)) for g, o in _manage_opts]
def list_artifacts_opts():
"""Return a list of oslo_config options available in Glance artifacts"""
return [(g, copy.deepcopy(o)) for g, o in _artifacts_opts]

View File

@ -1,907 +0,0 @@
# Copyright (c) 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
import six
from six.moves import range
from glance.common import exception as exc
from glance import context
import glance.glare as ga
import glance.tests.functional.db as db_tests
from glance.tests import utils as test_utils
UUID1, UUID2 = ('80cc6551-9db4-42aa-bb58-51c48757f285',
'f89c675a-e01c-436c-a384-7d2e784fb2d9')
TYPE_NAME = u'TestArtifactType'
TYPE_VERSION = u'1.0.0'
class ArtifactsTestDriver(test_utils.BaseTestCase):
def setUp(self):
super(ArtifactsTestDriver, self).setUp()
context_cls = context.RequestContext
self.adm_context = context_cls(is_admin=True,
auth_token='user:user:admin',
tenant='admin-tenant')
self.context = context_cls(is_admin=False,
auth_token='user:user:user',
tenant='test-tenant')
self.db_api = db_tests.get_db(self.config)
db_tests.reset_db(self.db_api)
self.create_test_artifacts()
def create_test_artifacts(self):
dependency = {'2->1': [UUID1]}
self.db_api.artifact_create(self.adm_context,
get_fixture(id=UUID1,
name="TestArtifact1",
visibility="public"),
TYPE_NAME,
TYPE_VERSION)
self.db_api.artifact_create(self.adm_context,
get_fixture(id=UUID2,
name="TestArtifact2",
visibility="public",
dependencies=dependency),
TYPE_NAME,
TYPE_VERSION)
self.art1 = self.db_api.artifact_get(self.context, UUID1, TYPE_NAME,
TYPE_VERSION)
self.art2 = self.db_api.artifact_get(self.context, UUID2, TYPE_NAME,
TYPE_VERSION)
class ArtifactTests(object):
def test_artifact_create(self):
artifact = get_fixture()
created = self.db_api.artifact_create(self.context, artifact,
TYPE_NAME, TYPE_VERSION)
self.assertIsNotNone(created)
self.assertEqual(artifact['name'], created['name'])
self.assertEqual(artifact['type_name'], created['type_name'])
self.assertEqual(artifact['type_version'], created['type_version'])
def test_artifact_create_none_valued_props(self):
artifact = get_fixture()
artifact['properties']['lylyly'] = dict(value=None, type='int')
artifact['properties']['hihihi'] = dict(value=5, type='int')
created = self.db_api.artifact_create(self.context, artifact,
TYPE_NAME, TYPE_VERSION)
self.assertIsNotNone(created)
self.assertIn('hihihi', created['properties'])
self.assertNotIn('lylyly', created['properties'])
def test_artifact_update(self):
fixture = {'name': 'UpdatedName'}
updated = self.db_api.artifact_update(self.context, fixture, UUID1,
TYPE_NAME, TYPE_VERSION)
self.assertIsNotNone(updated)
self.assertEqual('UpdatedName', updated['name'])
self.assertNotEqual(updated['created_at'], updated['updated_at'])
def test_artifact_create_same_version_different_users(self):
tenant1 = str(uuid.uuid4())
tenant2 = str(uuid.uuid4())
ctx1 = context.RequestContext(is_admin=False, tenant=tenant1)
ctx2 = context.RequestContext(is_admin=False, tenant=tenant2)
artifact1 = get_fixture(owner=tenant1)
artifact2 = get_fixture(owner=tenant2)
self.db_api.artifact_create(ctx1, artifact1,
TYPE_NAME, TYPE_VERSION)
self.assertIsNotNone(
self.db_api.artifact_create(ctx2, artifact2,
TYPE_NAME, TYPE_VERSION))
def test_artifact_create_same_version_deleted(self):
artifact1 = get_fixture()
artifact2 = get_fixture(state='deleted')
artifact3 = get_fixture(state='deleted')
self.db_api.artifact_create(self.context, artifact1,
TYPE_NAME, TYPE_VERSION)
self.assertIsNotNone(
self.db_api.artifact_create(self.context, artifact2,
TYPE_NAME, TYPE_VERSION))
self.assertIsNotNone(
self.db_api.artifact_create(self.context, artifact3,
TYPE_NAME, TYPE_VERSION))
def test_artifact_get(self):
res = self.db_api.artifact_get(self.context, UUID1,
TYPE_NAME, TYPE_VERSION)
self.assertEqual('TestArtifact1', res['name'])
self.assertEqual('TestArtifactType', res['type_name'])
self.assertEqual('1.0.0', res['type_version'])
self.assertEqual('10.0.3-alpha+some-date', res['version'])
self.assertEqual('creating', res['state'])
self.assertEqual('test-tenant', res['owner'])
def test_artifact_get_owned(self):
tenant1 = str(uuid.uuid4())
tenant2 = str(uuid.uuid4())
ctx1 = context.RequestContext(is_admin=False, tenant=tenant1)
ctx2 = context.RequestContext(is_admin=False, tenant=tenant2)
artifact = get_fixture(owner=tenant1)
created = self.db_api.artifact_create(ctx1, artifact,
TYPE_NAME, TYPE_VERSION)
self.assertIsNotNone(self.db_api.artifact_get(ctx1, created['id'],
TYPE_NAME, TYPE_VERSION))
self.assertRaises(exc.ArtifactForbidden, self.db_api.artifact_get,
ctx2, created['id'], TYPE_NAME, TYPE_VERSION)
def test_artifact_get_public(self):
tenant1 = str(uuid.uuid4())
tenant2 = str(uuid.uuid4())
ctx1 = context.RequestContext(is_admin=False, tenant=tenant1)
ctx2 = context.RequestContext(is_admin=False, tenant=tenant2)
artifact = get_fixture(owner=tenant1, visibility='public')
created = self.db_api.artifact_create(ctx1, artifact,
TYPE_NAME, TYPE_VERSION)
self.assertIsNotNone(self.db_api.artifact_get(ctx1, created['id'],
TYPE_NAME, TYPE_VERSION))
self.assertIsNotNone(self.db_api.artifact_get(ctx2, created['id'],
TYPE_NAME, TYPE_VERSION))
def test_artifact_update_state(self):
res = self.db_api.artifact_update(self.context, {'state': 'active'},
UUID1, TYPE_NAME, TYPE_VERSION)
self.assertEqual('active', res['state'])
self.assertRaises(exc.InvalidArtifactStateTransition,
self.db_api.artifact_update, self.context,
{'state': 'creating'}, UUID1,
TYPE_NAME, TYPE_VERSION)
res = self.db_api.artifact_update(self.context,
{'state': 'deactivated'}, UUID1,
TYPE_NAME, TYPE_VERSION)
self.assertEqual('deactivated', res['state'])
res = self.db_api.artifact_update(self.context, {'state': 'active'},
UUID1, TYPE_NAME, TYPE_VERSION)
self.assertEqual('active', res['state'])
res = self.db_api.artifact_update(self.context, {'state': 'deleted'},
UUID1, TYPE_NAME, TYPE_VERSION)
self.assertEqual('deleted', res['state'])
self.assertRaises(exc.InvalidArtifactStateTransition,
self.db_api.artifact_update, self.context,
{'state': 'active'}, UUID1,
TYPE_NAME, TYPE_VERSION)
self.assertRaises(exc.InvalidArtifactStateTransition,
self.db_api.artifact_update, self.context,
{'state': 'deactivated'}, UUID1,
TYPE_NAME, TYPE_VERSION)
self.assertRaises(exc.InvalidArtifactStateTransition,
self.db_api.artifact_update, self.context,
{'state': 'creating'}, UUID1,
TYPE_NAME, TYPE_VERSION)
def test_artifact_update_tags(self):
res = self.db_api.artifact_update(self.context,
{'tags': ['gagaga', 'lalala']},
UUID1, TYPE_NAME, TYPE_VERSION)
self.assertEqual(set(['gagaga', 'lalala']), set(res['tags']))
def test_artifact_update_properties(self):
new_properties = {'properties': {
'propname1': {
'type': 'string',
'value': 'qeqeqe'},
'propname2': {
'type': 'int',
'value': 6},
'propname3': {
'type': 'int',
'value': '5'},
'proparray': {
'type': 'string',
'value': 'notarray'
}}
}
res = self.db_api.artifact_update(self.context,
new_properties,
UUID1, TYPE_NAME, TYPE_VERSION)
bd_properties = res['properties']
self.assertEqual(4, len(bd_properties))
for prop in bd_properties:
self.assertIn(prop, new_properties['properties'])
def test_artifact_update_blobs(self):
new_blobs = {'blobs': {
'blob1': [{
'size': 2600000,
'checksum': 'abc',
'item_key': 'some',
'locations': [
{'value': 'URL11',
'status': 'active'},
{'value': 'URL12',
'status': 'active'}]
}, {
'size': 200000,
'checksum': 'abc',
'item_key': 'some',
'locations': [
{'value': 'newURL21',
'status': 'active'},
{'value': 'URL22',
'status': 'passive'}]
}
],
'blob2': [{
'size': 120000,
'checksum': 'abc',
'item_key': 'some',
'locations': [
{'value': 'URL21',
'status': 'active'},
{'value': 'URL22',
'status': 'active'}]
}, {
'size': 300000,
'checksum': 'abc',
'item_key': 'some',
'locations': [
{'value': 'URL21',
'status': 'active'},
{'value': 'bl1URL2',
'status': 'passive'}]
}
]
}
}
res = self.db_api.artifact_update(self.context,
new_blobs,
UUID1, TYPE_NAME, TYPE_VERSION)
bd_blobs = res['blobs']
self.assertEqual(2, len(bd_blobs))
for blob in bd_blobs:
self.assertIn(blob, new_blobs['blobs'])
def test_artifact_create_with_dependency(self):
dependencies = {"new->2": [UUID2]}
artifact = get_fixture(dependencies=dependencies)
res = self.db_api.artifact_create(self.context, artifact,
TYPE_NAME, TYPE_VERSION)
self.assertIsNotNone(res)
created = self.db_api.artifact_get(
self.context, res['id'], TYPE_NAME, TYPE_VERSION,
show_level=ga.Showlevel.DIRECT)
bd_dependencies = created['dependencies']
self.assertEqual(1, len(bd_dependencies))
# now try to update artifact with the same dependency
new_dependencies = {"dependencies": {"new->2": [UUID2],
"new->3": [UUID2]}}
res = self.db_api.artifact_update(self.context,
new_dependencies,
UUID1, TYPE_NAME, TYPE_VERSION)
retrieved = self.db_api.artifact_get(
self.context, res['id'],
TYPE_NAME, TYPE_VERSION, show_level=ga.Showlevel.DIRECT)
self.assertEqual(2, len(retrieved["dependencies"]))
def test_artifact_create_transitive_dependencies(self):
dependencies = {"new->2": [UUID2]}
artifact = get_fixture(dependencies=dependencies, id='new')
res = self.db_api.artifact_create(self.context, artifact,
TYPE_NAME, TYPE_VERSION)
self.assertIsNotNone(res)
created = self.db_api.artifact_get(
self.context, res['id'], TYPE_NAME, TYPE_VERSION,
show_level=ga.Showlevel.DIRECT)
bd_dependencies = created['dependencies']
self.assertEqual(1, len(bd_dependencies))
res = self.db_api.artifact_publish(
self.context,
res['id'], TYPE_NAME, TYPE_VERSION
)
res = self.db_api.artifact_get(
self.context, res['id'], TYPE_NAME, TYPE_VERSION,
show_level=ga.Showlevel.TRANSITIVE)
self.assertIsNotNone(res.pop('created_at'))
self.assertIsNotNone(res.pop('updated_at'))
# NOTE(mfedosin): tags is a set, so we have to check it separately
tags = res.pop('tags', None)
self.assertIsNotNone(tags)
self.assertEqual(set(['gugugu', 'lalala']), set(tags))
tags = res['dependencies']['new->2'][0].pop('tags', None)
self.assertIsNotNone(tags)
self.assertEqual(set(['gugugu', 'lalala']), set(tags))
tags = (res['dependencies']['new->2'][0]['dependencies']['2->1'][0].
pop('tags', None))
self.assertIsNotNone(tags)
self.assertEqual(set(['gugugu', 'lalala']), set(tags))
expected = {
'id': 'new',
'name': u'SomeArtifact',
'description': None,
'type_name': TYPE_NAME,
'type_version': TYPE_VERSION,
'version': u'10.0.3-alpha+some-date',
'visibility': u'private',
'state': u'active',
'owner': u'test-tenant',
'published_at': None,
'deleted_at': None,
'properties': {
'propname1': {
'type': 'string',
'value': 'tututu'},
'propname2': {
'type': 'int',
'value': 5},
'propname3': {
'type': 'string',
'value': 'vavava'},
'proparray': {
'type': 'array',
'value': [
{'type': 'int',
'value': 6},
{'type': 'string',
'value': 'rerere'}
]
}
},
'blobs': {
'blob1': [{
'size': 1600000,
'checksum': 'abc',
'item_key': 'some',
'locations': [
{'value': 'URL11',
'status': 'active'},
{'value': 'URL12',
'status': 'active'}]
}, {
'size': 100000,
'checksum': 'abc',
'item_key': 'some',
'locations': [
{'value': 'URL21',
'status': 'active'},
{'value': 'URL22',
'status': 'active'}]
}]
},
'dependencies': {
'new->2': [
{
'id': UUID2,
'created_at': self.art2['created_at'],
'updated_at': self.art2['updated_at'],
'published_at': None,
'deleted_at': None,
'name': u'TestArtifact2',
'description': None,
'type_name': TYPE_NAME,
'type_version': TYPE_VERSION,
'version': u'10.0.3-alpha+some-date',
'visibility': 'public',
'state': u'creating',
'owner': u'test-tenant',
'properties': {
'propname1': {
'type': 'string',
'value': 'tututu'},
'propname2': {
'type': 'int',
'value': 5},
'propname3': {
'type': 'string',
'value': 'vavava'},
'proparray': {
'type': 'array',
'value': [
{'type': 'int',
'value': 6},
{'type': 'string',
'value': 'rerere'}
]
}
},
'blobs': {
'blob1': [{
'size': 1600000,
'checksum': 'abc',
'item_key': 'some',
'locations': [
{'value': 'URL11',
'status': 'active'},
{'value': 'URL12',
'status': 'active'}]
}, {
'size': 100000,
'checksum': 'abc',
'item_key': 'some',
'locations': [
{'value': 'URL21',
'status': 'active'},
{'value': 'URL22',
'status': 'active'}]
}]
},
'dependencies': {
'2->1': [
{
'id': UUID1,
'created_at': self.art1['created_at'],
'updated_at': self.art1['updated_at'],
'published_at': None,
'deleted_at': None,
'dependencies': {},
'name': u'TestArtifact1',
'description': None,
'type_name': TYPE_NAME,
'type_version': TYPE_VERSION,
'version': u'10.0.3-alpha+some-date',
'visibility': 'public',
'state': u'creating',
'owner': u'test-tenant',
'properties': {
'propname1': {
'type': 'string',
'value': 'tututu'},
'propname2': {
'type': 'int',
'value': 5},
'propname3': {
'type': 'string',
'value': 'vavava'},
'proparray': {
'type': 'array',
'value': [
{'type': 'int',
'value': 6},
{'type': 'string',
'value': 'rerere'}
]
}
},
'blobs': {
'blob1': [{
'size': 1600000,
'checksum': 'abc',
'item_key': 'some',
'locations': [
{'value': 'URL11',
'status': 'active'},
{'value': 'URL12',
'status': 'active'}]
}, {
'size': 100000,
'checksum': 'abc',
'item_key': 'some',
'locations': [
{'value': 'URL21',
'status': 'active'},
{'value': 'URL22',
'status': 'active'}]
}]
}
}
]
}
}
]
}
}
self.assertIsNotNone(res['published_at'])
published_at = res['published_at']
expected['published_at'] = published_at
for key, value in six.iteritems(expected):
self.assertEqual(expected[key], res[key])
def test_artifact_get_all(self):
artifact = get_fixture(name='new_artifact')
self.db_api.artifact_create(self.context, artifact,
TYPE_NAME, TYPE_VERSION)
artifacts = self.db_api.artifact_get_all(self.context)
self.assertEqual(3, len(artifacts))
def test_artifact_sort_order(self):
arts = [get_fixture(version='1.2.3-alpha.4.df.00f'),
get_fixture(version='1.2.2'),
get_fixture(version='1.2.3+some-metadata'),
get_fixture(version='1.2.4'),
get_fixture(version='1.2.3-release.2'),
get_fixture(version='1.2.3-release.1+metadata'),
get_fixture(version='1.2.3-final'),
get_fixture(version='1.2.3-alpha.14.df.00f')]
for art in arts:
self.db_api.artifact_create(self.context, art, TYPE_NAME,
TYPE_VERSION)
artifacts = self.db_api.artifact_get_all(self.context,
sort_keys=[('version',
None)],
sort_dirs=['asc'])
expected_versions = [
'1.2.2',
'1.2.3-alpha.4.df.00f',
'1.2.3-alpha.14.df.00f',
'1.2.3-final',
'1.2.3-release.1+metadata',
'1.2.3-release.2',
'1.2.3+some-metadata',
'1.2.4']
for i in range(len(expected_versions)):
self.assertEqual(expected_versions[i], artifacts[i]['version'])
def test_artifact_get_all_show_level(self):
artifacts = self.db_api.artifact_get_all(self.context)
self.assertEqual(2, len(artifacts))
self.assertRaises(KeyError, lambda: artifacts[0]['properties'])
artifacts = self.db_api.artifact_get_all(
self.context, show_level=ga.Showlevel.BASIC)
self.assertEqual(2, len(artifacts))
self.assertEqual(4, len(artifacts[0]['properties']))
self.assertRaises(exc.ArtifactUnsupportedShowLevel,
self.db_api.artifact_get_all, self.context,
show_level=ga.Showlevel.DIRECT)
def test_artifact_get_all_tags(self):
artifact = get_fixture(name='new_artifact',
tags=['qwerty', 'uiop'])
self.db_api.artifact_create(self.context, artifact,
TYPE_NAME, TYPE_VERSION)
artifacts = self.db_api.artifact_get_all(self.context)
self.assertEqual(3, len(artifacts))
filters = {'tags': [{
'value': 'notag',
}]}
artifacts = self.db_api.artifact_get_all(self.context, filters=filters)
self.assertEqual(0, len(artifacts))
filters = {'tags': [{
'value': 'lalala',
}]}
artifacts = self.db_api.artifact_get_all(self.context, filters=filters)
self.assertEqual(2, len(artifacts))
for artifact in artifacts:
self.assertIn(artifact['name'], ['TestArtifact1', 'TestArtifact2'])
def test_artifact_get_all_properties(self):
artifact = get_fixture(
name='new_artifact',
properties={
'newprop2': {
'type': 'string',
'value': 'tututu'},
'propname2': {
'type': 'int',
'value': 3},
'propname3': {
'type': 'string',
'value': 'vavava'},
'proptext': {
'type': 'text',
'value': 'bebebe' * 100},
'proparray': {
'type': 'array',
'value': [
{'type': 'int',
'value': 17},
{'type': 'string',
'value': 'rerere'}
]
}})
self.db_api.artifact_create(self.context, artifact,
TYPE_NAME, TYPE_VERSION)
filters = {'propname2': [{
'value': 4,
'operator': 'GT',
'type': 'int'}]}
artifacts = self.db_api.artifact_get_all(self.context, filters=filters)
self.assertEqual(2, len(artifacts))
for artifact in artifacts:
self.assertIn(artifact['name'], ['TestArtifact1', 'TestArtifact2'])
# position hasn't been set
filters = {'proparray': [{
'value': 6,
'operator': 'LE',
'type': 'int'}]}
artifacts = self.db_api.artifact_get_all(self.context, filters=filters)
self.assertEqual(0, len(artifacts))
for artifact in artifacts:
self.assertIn(artifact['name'], ['TestArtifact1', 'TestArtifact2'])
# position has been set
filters = {'proparray': [{
'value': 6,
'position': 0,
'operator': 'LE',
'type': 'int'}]}
artifacts = self.db_api.artifact_get_all(self.context, filters=filters)
self.assertEqual(2, len(artifacts))
for artifact in artifacts:
self.assertIn(artifact['name'], ['TestArtifact1', 'TestArtifact2'])
filters = {'proparray': [{
'value': 6,
'operator': 'IN',
'type': 'int'}]}
artifacts = self.db_api.artifact_get_all(self.context, filters=filters)
self.assertEqual(2, len(artifacts))
for artifact in artifacts:
self.assertIn(artifact['name'], ['TestArtifact1', 'TestArtifact2'])
filters = {'name': [{'value': 'new_artifact'}]}
artifacts = self.db_api.artifact_get_all(self.context,
filters=filters,
show_level=ga.Showlevel.BASIC)
self.assertEqual(1, len(artifacts))
artifact = artifacts[0]
self.assertEqual('new_artifact', artifact['name'])
for prop in artifact['properties'].keys():
self.assertNotEqual('proptext', prop)
filters = {'propname2': [{
'value': 4,
'operator': 'FOO',
'type': 'int'}]}
self.assertRaises(
exc.ArtifactUnsupportedPropertyOperator,
self.db_api.artifact_get_all, self.context, filters=filters)
def test_artifact_delete(self):
res = self.db_api.artifact_delete(self.context, UUID1,
TYPE_NAME, TYPE_VERSION)
self.assertEqual('TestArtifact1', res['name'])
self.assertEqual('deleted', res['state'])
self.assertIsNotNone(res['deleted_at'])
artifacts = self.db_api.artifact_get_all(self.context)
self.assertEqual(1, len(artifacts))
def test_artifact_delete_property(self):
new_properties = {'properties': {
'proparray': {'value': [],
'type': 'array'}
}
}
res = self.db_api.artifact_update(self.context,
new_properties,
UUID1, TYPE_NAME, TYPE_VERSION)
bd_properties = res['properties']
self.assertEqual(3, len(bd_properties))
expected = {
'propname1': {
'type': 'string',
'value': 'tututu'},
'propname2': {
'type': 'int',
'value': 5},
'propname3': {
'type': 'string',
'value': 'vavava'}
}
for prop in bd_properties:
self.assertIn(prop, expected)
def test_artifact_delete_blob(self):
new_blobs = {'blobs': {
'blob2': [{
'size': 2600000,
'checksum': 'abc',
'item_key': 'some',
'locations': [
{'value': 'URL11',
'status': 'active'},
{'value': 'URL12',
'status': 'active'}]
}, {
'size': 200000,
'checksum': 'abc',
'item_key': 'some',
'locations': [
{'value': 'newURL21',
'status': 'active'},
{'value': 'URL22',
'status': 'passive'}]
}
],
'blob3': [{
'size': 120000,
'checksum': 'abc',
'item_key': 'some',
'locations': [
{'value': 'URL21',
'status': 'active'},
{'value': 'URL22',
'status': 'active'}]
}, {
'size': 300000,
'checksum': 'abc',
'item_key': 'some',
'locations': [
{'value': 'URL21',
'status': 'active'},
{'value': 'bl1URL2',
'status': 'passive'}]
}
]
}
}
expected = {'blobs': {
'blob1': [{
'size': 1600000,
'checksum': 'abc',
'item_key': 'some',
'locations': [
{'value': 'URL11',
'status': 'active'},
{'value': 'URL12',
'status': 'active'}]
}, {
'size': 100000,
'checksum': 'abc',
'item_key': 'some',
'locations': [
{'value': 'URL21',
'status': 'active'},
{'value': 'URL22',
'status': 'active'}]
}
],
'blob2': [{
'size': 2600000,
'checksum': 'abc',
'item_key': 'some',
'locations': [
{'value': 'URL11',
'status': 'active'},
{'value': 'URL12',
'status': 'active'}]
}, {
'size': 200000,
'checksum': 'abc',
'item_key': 'some',
'locations': [
{'value': 'newURL21',
'status': 'active'},
{'value': 'URL22',
'status': 'passive'}]
}
],
'blob3': [{
'size': 120000,
'checksum': 'abc',
'item_key': 'some',
'locations': [
{'value': 'URL21',
'status': 'active'},
{'value': 'URL22',
'status': 'active'}]
}, {
'size': 300000,
'checksum': 'abc',
'item_key': 'some',
'locations': [
{'value': 'URL21',
'status': 'active'},
{'value': 'bl1URL2',
'status': 'passive'}]
}
]
}
}
res = self.db_api.artifact_update(self.context,
new_blobs,
UUID1, TYPE_NAME, TYPE_VERSION)
bd_blobs = res['blobs']
self.assertEqual(3, len(bd_blobs))
for blob in bd_blobs:
self.assertIn(blob, expected['blobs'])
del_blobs = {'blobs': {
'blob1': []}
}
res = self.db_api.artifact_update(self.context,
del_blobs,
UUID1, TYPE_NAME, TYPE_VERSION)
bd_blobs = res['blobs']
self.assertEqual(2, len(bd_blobs))
for blob in bd_blobs:
self.assertIn(blob, new_blobs['blobs'])
def get_fixture(**kwargs):
artifact = {
'name': u'SomeArtifact',
'type_name': TYPE_NAME,
'type_version': TYPE_VERSION,
'version': u'10.0.3-alpha+some-date',
'visibility': u'private',
'state': u'creating',
'owner': u'test-tenant',
'tags': ['lalala', 'gugugu'],
'properties': {
'propname1': {
'type': 'string',
'value': 'tututu'},
'propname2': {
'type': 'int',
'value': 5},
'propname3': {
'type': 'string',
'value': 'vavava'},
'proparray': {
'type': 'array',
'value': [
{'type': 'int',
'value': 6},
{'type': 'string',
'value': 'rerere'}
]
}
},
'blobs': {
'blob1': [{
'size': 1600000,
'checksum': 'abc',
'item_key': 'some',
'locations': [
{'value': 'URL11',
'status': 'active'},
{'value': 'URL12',
'status': 'active'}]
}, {
'size': 100000,
'checksum': 'abc',
'item_key': 'some',
'locations': [
{'value': 'URL21',
'status': 'active'},
{'value': 'URL22',
'status': 'active'}]
}
]
}
}
artifact.update(kwargs)
return artifact

View File

@ -0,0 +1,54 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_db.sqlalchemy import test_base
from oslo_db.sqlalchemy import utils as db_utils
import sqlalchemy
from glance.tests.functional.db import test_migrations
class TestPike01Mixin(test_migrations.AlembicMigrationsMixin):
artifacts_table_names = [
'artifact_blob_locations',
'artifact_properties',
'artifact_blobs',
'artifact_dependencies',
'artifact_tags',
'artifacts'
]
def _pre_upgrade_pike01(self, engine):
# verify presence of the artifacts tables
for table_name in self.artifacts_table_names:
table = db_utils.get_table(engine, table_name)
self.assertIsNotNone(table)
def _check_pike01(self, engine, data):
# verify absence of the artifacts tables
for table_name in self.artifacts_table_names:
self.assertRaises(sqlalchemy.exc.NoSuchTableError,
db_utils.get_table, engine, table_name)
class TestPike01MySQL(TestPike01Mixin, test_base.MySQLOpportunisticTestCase):
pass
class TestPike01PostgresSQL(TestPike01Mixin,
test_base.PostgreSQLOpportunisticTestCase):
pass
class TestPike01Sqlite(TestPike01Mixin, test_base.DbTestCase):
pass

View File

@ -0,0 +1,50 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_db.sqlalchemy import test_base
from oslo_db.sqlalchemy import utils as db_utils
import sqlalchemy
from glance.tests.functional.db import test_migrations
class TestPikeContract01Mixin(test_migrations.AlembicMigrationsMixin):
artifacts_table_names = [
'artifact_blob_locations',
'artifact_properties',
'artifact_blobs',
'artifact_dependencies',
'artifact_tags',
'artifacts'
]
def _get_revisions(self, config):
return test_migrations.AlembicMigrationsMixin._get_revisions(
self, config, head='pike_contract01')
def _pre_upgrade_pike_contract01(self, engine):
# verify presence of the artifacts tables
for table_name in self.artifacts_table_names:
table = db_utils.get_table(engine, table_name)
self.assertIsNotNone(table)
def _check_pike_contract01(self, engine, data):
# verify absence of the artifacts tables
for table_name in self.artifacts_table_names:
self.assertRaises(sqlalchemy.exc.NoSuchTableError,
db_utils.get_table, engine, table_name)
class TestPikeContract01MySQL(TestPikeContract01Mixin,
test_base.MySQLOpportunisticTestCase):
pass

View File

@ -0,0 +1,47 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_db.sqlalchemy import test_base
from oslo_db.sqlalchemy import utils as db_utils
from glance.tests.functional.db import test_migrations
class TestPikeExpand01Mixin(test_migrations.AlembicMigrationsMixin):
artifacts_table_names = [
'artifact_blob_locations',
'artifact_properties',
'artifact_blobs',
'artifact_dependencies',
'artifact_tags',
'artifacts'
]
def _get_revisions(self, config):
return test_migrations.AlembicMigrationsMixin._get_revisions(
self, config, head='pike_expand01')
def _pre_upgrade_pike_expand01(self, engine):
# verify presence of the artifacts tables
for table_name in self.artifacts_table_names:
table = db_utils.get_table(engine, table_name)
self.assertIsNotNone(table)
def _check_pike_expand01(self, engine, data):
# should be no changes, so re-run pre-upgrade check
self._pre_upgrade_pike_expand01(engine)
class TestPikeExpand01MySQL(TestPikeExpand01Mixin,
test_base.MySQLOpportunisticTestCase):
pass

View File

@ -1,6 +1,3 @@
# Copyright 2011-2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain # not use this file except in compliance with the License. You may obtain
# a copy of the License at # a copy of the License at
@ -13,8 +10,14 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
import setuptools from oslo_db.sqlalchemy import test_base
# all other params will be taken from setup.cfg import glance.tests.functional.db.migrations.test_pike_expand01 as tpe01
setuptools.setup(packages=setuptools.find_packages(),
setup_requires=['pbr'], pbr=True)
# no TestPikeMigrate01Mixin class needed, can use TestPikeExpand01Mixin instead
class TestPikeMigrate01MySQL(tpe01.TestPikeExpand01Mixin,
test_base.MySQLOpportunisticTestCase):
pass

View File

@ -27,7 +27,6 @@ from glance.db import migration as db_migration
from glance.db.sqlalchemy import alembic_migrations from glance.db.sqlalchemy import alembic_migrations
from glance.db.sqlalchemy.alembic_migrations import versions from glance.db.sqlalchemy.alembic_migrations import versions
from glance.db.sqlalchemy import models from glance.db.sqlalchemy import models
from glance.db.sqlalchemy import models_glare
from glance.db.sqlalchemy import models_metadef from glance.db.sqlalchemy import models_metadef
import glance.tests.utils as test_utils import glance.tests.utils as test_utils
@ -120,8 +119,6 @@ class ModelsMigrationSyncMixin(object):
def get_metadata(self): def get_metadata(self):
for table in models_metadef.BASE_DICT.metadata.sorted_tables: for table in models_metadef.BASE_DICT.metadata.sorted_tables:
models.BASE.metadata._add_table(table.name, table.schema, table) models.BASE.metadata._add_table(table.name, table.schema, table)
for table in models_glare.BASE.metadata.sorted_tables:
models.BASE.metadata._add_table(table.name, table.schema, table)
return models.BASE.metadata return models.BASE.metadata
def get_engine(self): def get_engine(self):

View File

@ -20,11 +20,9 @@ from oslo_db import options
from glance.common import exception from glance.common import exception
import glance.db.sqlalchemy.api import glance.db.sqlalchemy.api
from glance.db.sqlalchemy import models as db_models from glance.db.sqlalchemy import models as db_models
from glance.db.sqlalchemy import models_glare as artifact_models
from glance.db.sqlalchemy import models_metadef as metadef_models from glance.db.sqlalchemy import models_metadef as metadef_models
import glance.tests.functional.db as db_tests import glance.tests.functional.db as db_tests
from glance.tests.functional.db import base from glance.tests.functional.db import base
from glance.tests.functional.db import base_glare
from glance.tests.functional.db import base_metadef from glance.tests.functional.db import base_metadef
CONF = cfg.CONF CONF = cfg.CONF
@ -47,11 +45,6 @@ def reset_db_metadef(db_api):
metadef_models.register_models(db_api.get_engine()) metadef_models.register_models(db_api.get_engine())
def reset_db_artifacts(db_api):
artifact_models.unregister_models(db_api.get_engine())
artifact_models.register_models(db_api.get_engine())
class TestSqlAlchemyDriver(base.TestDriver, class TestSqlAlchemyDriver(base.TestDriver,
base.DriverTests, base.DriverTests,
base.FunctionalInitWrapper): base.FunctionalInitWrapper):
@ -169,14 +162,6 @@ class TestDBPurge(base.DBPurgeTests,
self.addCleanup(db_tests.reset) self.addCleanup(db_tests.reset)
class TestArtifacts(base_glare.ArtifactsTestDriver,
base_glare.ArtifactTests):
def setUp(self):
db_tests.load(get_db, reset_db_artifacts)
super(TestArtifacts, self).setUp()
self.addCleanup(db_tests.reset)
class TestMetadefSqlAlchemyDriver(base_metadef.TestMetadefDriver, class TestMetadefSqlAlchemyDriver(base_metadef.TestMetadefDriver,
base_metadef.MetadefDriverTests, base_metadef.MetadefDriverTests,
base.FunctionalInitWrapper): base.FunctionalInitWrapper):

File diff suppressed because it is too large Load Diff

View File

@ -1,77 +0,0 @@
# Copyright (c) 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from glance.common import exception
from glance.common import semver_db
from glance.tests import utils as test_utils
class SemVerTestCase(test_utils.BaseTestCase):
def test_long_conversion(self):
initial = '1.2.3-beta+07.17.2014'
v = semver_db.parse(initial)
l, prerelease, build = v.__composite_values__()
v2 = semver_db.DBVersion(l, prerelease, build)
self.assertEqual(initial, str(v2))
def test_major_comparison_as_long(self):
v1 = semver_db.parse("1.1.100")
v2 = semver_db.parse("2.0.0")
self.assertTrue(v2.__composite_values__()[0] >
v1.__composite_values__()[0])
def test_minor_comparison_as_long(self):
v1 = semver_db.parse("1.1.100")
v2 = semver_db.parse("2.0.0")
self.assertTrue(v2.__composite_values__()[0] >
v1.__composite_values__()[0])
def test_patch_comparison_as_long(self):
v1 = semver_db.parse("1.1.1")
v2 = semver_db.parse("1.1.100")
self.assertTrue(v2.__composite_values__()[0] >
v1.__composite_values__()[0])
def test_label_comparison_as_long(self):
v1 = semver_db.parse("1.1.1-alpha")
v2 = semver_db.parse("1.1.1")
self.assertTrue(v2.__composite_values__()[0] >
v1.__composite_values__()[0])
def test_label_comparison_as_string(self):
versions = [
semver_db.parse("1.1.1-0.10.a.23.y.255").__composite_values__()[1],
semver_db.parse("1.1.1-0.10.z.23.x.255").__composite_values__()[1],
semver_db.parse("1.1.1-0.10.z.23.y.255").__composite_values__()[1],
semver_db.parse("1.1.1-0.10.z.23.y.256").__composite_values__()[1],
semver_db.parse("1.1.1-0.10.z.24.y.255").__composite_values__()[1],
semver_db.parse("1.1.1-0.11.z.24.y.255").__composite_values__()[1],
semver_db.parse("1.1.1-1.11.z.24.y.255").__composite_values__()[1],
semver_db.parse("1.1.1-alp.1.2.3.4.5.6").__composite_values__()[1]]
for i in range(len(versions) - 1):
self.assertLess(versions[i], versions[i + 1])
def test_too_large_version(self):
version1 = '1.1.65536'
version2 = '1.65536.1'
version3 = '65536.1.1'
self.assertRaises(exception.InvalidVersion, semver_db.parse, version1)
self.assertRaises(exception.InvalidVersion, semver_db.parse, version2)
self.assertRaises(exception.InvalidVersion, semver_db.parse, version3)
def test_too_long_numeric_segments(self):
version = semver_db.parse('1.0.0-alpha.1234567')
self.assertRaises(exception.InvalidVersion,
version.__composite_values__)

View File

@ -24,10 +24,8 @@ import oslo_utils.importutils
import glance.async import glance.async
from glance.async import taskflow_executor from glance.async import taskflow_executor
from glance.common import exception from glance.common import exception
from glance.common.glare import definitions
from glance.common import timeutils from glance.common import timeutils
from glance import domain from glance import domain
from glance.glare import domain as artifacts_domain
import glance.tests.utils as test_utils import glance.tests.utils as test_utils
@ -575,22 +573,3 @@ class TestTaskExecutorFactory(test_utils.BaseTestCase):
# NOTE(flaper87): "eventlet" executor. short name to avoid > 79. # NOTE(flaper87): "eventlet" executor. short name to avoid > 79.
te_evnt = task_executor_factory.new_task_executor(context) te_evnt = task_executor_factory.new_task_executor(context)
self.assertIsInstance(te_evnt, taskflow_executor.TaskExecutor) self.assertIsInstance(te_evnt, taskflow_executor.TaskExecutor)
class TestArtifact(definitions.ArtifactType):
prop1 = definitions.Dict()
prop2 = definitions.Integer(min_value=10)
class TestArtifactTypeFactory(test_utils.BaseTestCase):
def setUp(self):
super(TestArtifactTypeFactory, self).setUp()
context = mock.Mock(owner='me')
self.factory = artifacts_domain.ArtifactFactory(context, TestArtifact)
def test_new_artifact_min_params(self):
artifact = self.factory.new_artifact("foo", "1.0.0-alpha")
self.assertEqual('creating', artifact.state)
self.assertEqual('me', artifact.owner)
self.assertIsNotNone(artifact.id)

View File

@ -1,169 +0,0 @@
# Copyright (c) 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from stevedore import extension
from glance.common import exception
from glance.common.glare import loader
from glance.contrib.plugins.artifacts_sample.v1 import artifact as art1
from glance.contrib.plugins.artifacts_sample.v2 import artifact as art2
from glance.tests import utils
class MyArtifactDuplicate(art1.MyArtifact):
__type_version__ = '1.0.1'
__type_name__ = 'MyArtifact'
class MyArtifactOk(art1.MyArtifact):
__type_version__ = '1.0.2'
__type_name__ = 'MyArtifact'
class TestArtifactsLoader(utils.BaseTestCase):
def setUp(self):
self._setup_loader([('MyArtifact', art1.MyArtifact)])
super(TestArtifactsLoader, self).setUp()
def _setup_loader(self, artifacts):
self.loader = None
self.extensions = [
extension.Extension(
name=a[0],
entry_point=mock.Mock(),
plugin=a[1],
obj=None,
)
for a in artifacts
]
test_plugins = extension.ExtensionManager.make_test_instance(
extensions=self.extensions,
propagate_map_exceptions=True,
)
self.loader = loader.ArtifactsPluginLoader(
'glance.artifacts.types',
test_plugins=test_plugins,
)
def test_load(self):
"""
Plugins can be loaded as entrypoint=single plugin and
entrypoint=[a, list, of, plugins]
"""
# single version
self.assertEqual(1, len(self.loader.mgr.extensions))
self.assertEqual(art1.MyArtifact,
self.loader.get_class_by_endpoint('myartifact'))
# entrypoint = [a, list]
self._setup_loader([
('MyArtifact', MyArtifactOk),
('MyArtifact', art2.MyArtifact),
('MyArtifact', art1.MyArtifact),
])
self.assertEqual(3, len(self.loader.mgr.extensions))
# returns the plugin with the latest version
self.assertEqual(art2.MyArtifact,
self.loader.get_class_by_endpoint('myartifact'))
self.assertEqual(art1.MyArtifact,
self.loader.get_class_by_endpoint('myartifact',
'1.0.1'))
def test_basic_loader_func(self):
"""Test public methods of PluginLoader class here"""
# type_version 2 == 2.0 == 2.0.0
self._setup_loader([('MyArtifact', art2.MyArtifact)])
self.assertEqual(art2.MyArtifact,
self.loader.get_class_by_endpoint('myartifact'))
self.assertEqual(art2.MyArtifact,
self.loader.get_class_by_endpoint('myartifact',
'2.0'))
self.assertEqual(art2.MyArtifact,
self.loader.get_class_by_endpoint('myartifact',
'2.0.0'))
self.assertEqual(art2.MyArtifact,
self.loader.get_class_by_endpoint('myartifact',
'2'))
# now make sure that get_class_by_typename works as well
self.assertEqual(art2.MyArtifact,
self.loader.get_class_by_typename('MyArtifact'))
self.assertEqual(art2.MyArtifact,
self.loader.get_class_by_typename('MyArtifact', '2'))
def test_config_validation(self):
"""
Plugins can be loaded on certain conditions:
* entry point name == type_name
* no plugin with the same type_name and version has been already
loaded
"""
# here artifacts specific validation is checked
self.assertRaises(exception.ArtifactNonMatchingTypeName,
self._setup_loader,
[('non_matching_name', art1.MyArtifact)],
)
# make sure this call is ok
self._setup_loader([('MyArtifact', art1.MyArtifact)])
art_type = self.loader.get_class_by_endpoint('myartifact')
self.assertEqual('MyArtifact', art_type.metadata.type_name)
self.assertEqual('1.0.1', art_type.metadata.type_version)
# now try to add duplicate artifact with the same type_name and
# type_version as already exists
self.assertEqual(art_type.metadata.type_version,
MyArtifactDuplicate.metadata.type_version)
self.assertEqual(art_type.metadata.type_name,
MyArtifactDuplicate.metadata.type_name)
# should raise an exception as (name, version) is not unique
self.assertRaises(
exception.ArtifactDuplicateNameTypeVersion, self._setup_loader,
[('MyArtifact', art1.MyArtifact),
('MyArtifact', MyArtifactDuplicate)])
# two artifacts with the same name but different versions coexist fine
self.assertEqual('MyArtifact', MyArtifactOk.metadata.type_name)
self.assertNotEqual(art_type.metadata.type_version,
MyArtifactOk.metadata.type_version)
self._setup_loader([('MyArtifact', art1.MyArtifact),
('MyArtifact', MyArtifactOk)])
def test_check_function(self):
"""
A test to show that plugin-load specific options in artifacts.conf
are correctly processed:
* no plugins can be loaded if load_enabled = False
* if available_plugins list is given only plugins specified can be
be loaded
"""
self.config(load_enabled=False)
self._setup_loader([('MyArtifact', art1.MyArtifact)])
checker = self.loader._gen_check_func()
self.assertRaises(
exception.ArtifactLoadError,
checker,
self.extensions[0],
)
self.config(load_enabled=True, available_plugins=['MyArtifact-1.0.2'])
self._setup_loader([('MyArtifact', art1.MyArtifact)])
checker = self.loader._gen_check_func()
self.assertRaises(
exception.ArtifactLoadError,
checker,
self.extensions[0],
)
self._setup_loader([('MyArtifact', MyArtifactOk)])
# make sure that plugin_map has the expected plugin
self.assertEqual(MyArtifactOk,
self.loader.get_class_by_endpoint('myartifact',
'1.0.2'))

File diff suppressed because it is too large Load Diff

View File

@ -1,71 +0,0 @@
# Copyright (c) 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from datetime import datetime
from glance.common.glare import definitions
import glance.context
from glance.glare.domain import proxy
from glance.glare import location
from glance.tests.unit import utils as unit_test_utils
from glance.tests import utils
BASE_URI = 'http://storeurl.com/container'
UUID1 = 'c80a1a6c-bd1f-41c5-90ee-81afedb1d58d'
UUID2 = '971ec09a-8067-4bc8-a91f-ae3557f1c4c7'
USER1 = '54492ba0-f4df-4e4e-be62-27f4d76b29cf'
TENANT1 = '6838eb7b-6ded-434a-882c-b344c77fe8df'
TENANT2 = '2c014f32-55eb-467d-8fcb-4bd706012f81'
TENANT3 = '228c6da5-29cd-4d67-9457-ed632e083fc0'
class ArtifactStub(definitions.ArtifactType):
file = definitions.BinaryObject()
file_list = definitions.BinaryObjectList()
class TestStoreArtifact(utils.BaseTestCase):
def setUp(self):
self.store_api = unit_test_utils.FakeStoreAPI()
self.store_utils = unit_test_utils.FakeStoreUtils(self.store_api)
ts = datetime.now()
self.artifact_stub = ArtifactStub(id=UUID2, state='creating',
created_at=ts, updated_at=ts,
version='1.0', owner='me',
name='foo')
super(TestStoreArtifact, self).setUp()
def test_set_blob_data(self):
context = glance.context.RequestContext(user=USER1)
helper = proxy.ArtifactHelper(location.ArtifactProxy,
proxy_kwargs={
'context': context,
'store_api': self.store_api,
'store_utils': self.store_utils
})
artifact = helper.proxy(self.artifact_stub)
artifact.file = ('YYYY', 4)
self.assertEqual(4, artifact.file.size)
def test_set_bloblist_data(self):
context = glance.context.RequestContext(user=USER1)
helper = proxy.ArtifactHelper(location.ArtifactProxy,
proxy_kwargs={
'context': context,
'store_api': self.store_api,
'store_utils': self.store_utils
})
artifact = helper.proxy(self.artifact_stub)
artifact.file_list.append(('YYYY', 4))
self.assertEqual(4, artifact.file_list[0].size)

View File

@ -0,0 +1,41 @@
---
upgrade:
- |
Code for the OpenStack Artifacts Service (`Glare`_) and its EXPERIMENTAL
API has been removed from the Glance codebase, as it was relocated into an
independent `Glare`_ project repository during a previous release cycle.
The database upgrade for the Glance Pike release drops the Glare tables
(named 'artifacts' and 'artifact_*') from the Glance database.
OpenStack deployments, packagers, and deployment projects which provided
Glare should have begun to consume Glare from its own `Glare`_ respository
during the Newton and Ocata releases. With the Pike release, it is no
longer possible to consume Glare code from the Glance repository.
.. _`Glare`: https://git.openstack.org/cgit/openstack/glare
other:
- |
Code for the OpenStack Artifacts Service (Glare) and its EXPERIMENTAL API
has been `removed`_ from the Glance codebase.
The Artifacts API was an EXPERIMENTAL API that ran on the Glance service
endpoint as ``/v3`` in the Liberty release. In the Mitaka release, the
Glance ``/v3`` EXPERIMENTAL API was deprecated and the Artifacts Service
ran on its own endpoint (completely independent from the Glance service
endpoint) as an EXPERIMENTAL API, versioned as ``v0.1``. In both the
Liberty and Mitaka releases, Glare ran on code stored in the Glance code
repository and used its own tables in the Glance database.
In the Newton release, the Glare code was relocated into its own `Glare`_
project repository. Also in the Newton release, Glare ran an EXPERIMENTAL
Artifacts API versioned as ``v1.0`` on its own endpoint and used its own
database.
For the Pike release, the legacy Glare code has been removed from the
Glance code repository and the legacy 'artifacts' and 'artifact_*' database
tables are dropped from the Glance database. As the Artifacts service API
was an EXPERIMENTAL API in Glance and has not used the Glance database
since Mitaka, no provision is made for migrating data from the Glance
database to the Glare database.
.. _`removed`: http://specs.openstack.org/openstack/glance-specs/specs/mitaka/implemented/deprecate-v3-api.html

View File

@ -51,9 +51,6 @@ osprofiler>=1.4.0 # Apache-2.0
glance-store>=0.18.0 # Apache-2.0 glance-store>=0.18.0 # Apache-2.0
# Artifact repository
semantic-version>=2.3.1 # BSD
debtcollector>=1.2.0 # Apache-2.0 debtcollector>=1.2.0 # Apache-2.0
cryptography!=1.3.0,>=1.0 # BSD/Apache-2.0 cryptography!=1.3.0,>=1.0 # BSD/Apache-2.0
cursive>=0.1.1 # Apache-2.0 cursive>=0.1.1 # Apache-2.0