Merge "Add Share Affinity/Anti-Affinity Scheduler Filters"
This commit is contained in:
commit
c419a7bad0
@ -2200,6 +2200,15 @@ revert_to_snapshot_support_share_capability:
|
||||
required: true
|
||||
type: boolean
|
||||
min_version: 2.27
|
||||
scheduler_hints:
|
||||
description: |
|
||||
One or more scheduler_hints key and value pairs as a dictionary
|
||||
of strings. e.g. keys are same_host, different_host and values must be
|
||||
a comma separated list of Share IDs.
|
||||
in: body
|
||||
required: false
|
||||
type: object
|
||||
min_version: 2.65
|
||||
security_service_dns_ip:
|
||||
description: |
|
||||
The DNS IP address that is used inside the project network.
|
||||
|
@ -12,6 +12,10 @@
|
||||
"metadata": {
|
||||
"project": "my_app",
|
||||
"aim": "doc"
|
||||
},
|
||||
"scheduler_hints": {
|
||||
"same_host": "d9c66489-cf02-4156-b0f2-527f3211b243,4ffee55f-ba98-42d2-a8ce-e7cecb169182",
|
||||
"different_host": "903685eb-f242-4105-903d-4bef2db94be4"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -370,6 +370,7 @@ Request
|
||||
- metadata: metadata
|
||||
- share_network_id: share_network_id_request
|
||||
- availability_zone: availability_zone_request
|
||||
- scheduler_hints: scheduler_hints
|
||||
|
||||
Request example
|
||||
---------------
|
||||
|
@ -171,13 +171,14 @@ REST_API_VERSION_HISTORY = """
|
||||
actions on the share network's endpoint:
|
||||
'update_security_service', 'update_security_service_check' and
|
||||
'add_security_service_check'.
|
||||
* 2.65 - Added ability to set scheduler hints via the share create API.
|
||||
"""
|
||||
|
||||
# The minimum and maximum versions of the API supported
|
||||
# The default api version request is defined to be the
|
||||
# minimum version of the API supported.
|
||||
_MIN_API_VERSION = "2.0"
|
||||
_MAX_API_VERSION = "2.63"
|
||||
_MAX_API_VERSION = "2.65"
|
||||
DEFAULT_API_VERSION = _MIN_API_VERSION
|
||||
|
||||
|
||||
|
@ -358,3 +358,8 @@ user documentation.
|
||||
Added 'force' field to extend share api, which can extend share directly
|
||||
without go through share scheduler.
|
||||
|
||||
2.65
|
||||
----
|
||||
Added ability to specify "scheduler_hints" in the request body of the POST
|
||||
/shares request. These hints will invoke Affinity/Anti-Affinity scheduler
|
||||
filters during share creation and share migration.
|
||||
|
@ -235,7 +235,8 @@ class ShareMixin(object):
|
||||
@wsgi.Controller.authorize('create')
|
||||
def _create(self, req, body,
|
||||
check_create_share_from_snapshot_support=False,
|
||||
check_availability_zones_extra_spec=False):
|
||||
check_availability_zones_extra_spec=False,
|
||||
scheduler_hints=None):
|
||||
"""Creates a new share."""
|
||||
context = req.environ['manila.context']
|
||||
|
||||
@ -412,6 +413,9 @@ class ShareMixin(object):
|
||||
kwargs['share_type'] = share_type
|
||||
if share_network_id:
|
||||
kwargs['share_network_id'] = share_network_id
|
||||
|
||||
kwargs['scheduler_hints'] = scheduler_hints
|
||||
|
||||
new_share = self.share_api.create(context,
|
||||
share_proto,
|
||||
size,
|
||||
|
@ -178,8 +178,20 @@ class ShareController(shares.ShareMixin,
|
||||
|
||||
return data
|
||||
|
||||
@wsgi.Controller.api_version("2.48")
|
||||
@wsgi.Controller.api_version("2.65")
|
||||
def create(self, req, body):
|
||||
if not self.is_valid_body(body, 'share'):
|
||||
raise exc.HTTPUnprocessableEntity()
|
||||
|
||||
share = body['share']
|
||||
scheduler_hints = share.pop('scheduler_hints', None)
|
||||
return self._create(req, body,
|
||||
check_create_share_from_snapshot_support=True,
|
||||
check_availability_zones_extra_spec=True,
|
||||
scheduler_hints=scheduler_hints)
|
||||
|
||||
@wsgi.Controller.api_version("2.48", "2.64") # noqa
|
||||
def create(self, req, body): # pylint: disable=function-redefined # noqa F811
|
||||
return self._create(req, body,
|
||||
check_create_share_from_snapshot_support=True,
|
||||
check_availability_zones_extra_spec=True)
|
||||
|
@ -766,13 +766,16 @@ def security_service_get_all_by_project(context, project_id):
|
||||
|
||||
|
||||
####################
|
||||
|
||||
|
||||
def share_metadata_get(context, share_id):
|
||||
"""Get all metadata for a share."""
|
||||
return IMPL.share_metadata_get(context, share_id)
|
||||
|
||||
|
||||
def share_metadata_get_item(context, share_id, key):
|
||||
"""Get metadata item for given key and for a given share.."""
|
||||
return IMPL.share_metadata_get_item(context, share_id, key)
|
||||
|
||||
|
||||
def share_metadata_delete(context, share_id, key):
|
||||
"""Delete the given metadata item."""
|
||||
IMPL.share_metadata_delete(context, share_id, key)
|
||||
@ -783,6 +786,11 @@ def share_metadata_update(context, share, metadata, delete):
|
||||
IMPL.share_metadata_update(context, share, metadata, delete)
|
||||
|
||||
|
||||
def share_metadata_update_item(context, share_id, item):
|
||||
"""update meta item containing key and value for given share."""
|
||||
IMPL.share_metadata_update_item(context, share_id, item)
|
||||
|
||||
|
||||
###################
|
||||
|
||||
def share_export_location_get_by_uuid(context, export_location_uuid,
|
||||
|
@ -3416,6 +3416,22 @@ def share_metadata_get(context, share_id):
|
||||
return _share_metadata_get(context, share_id)
|
||||
|
||||
|
||||
@require_context
|
||||
@require_share_exists
|
||||
def share_metadata_get_item(context, share_id, key, session=None):
|
||||
session = session or get_session()
|
||||
try:
|
||||
row = _share_metadata_get_item(context, share_id, key,
|
||||
session=session)
|
||||
except exception.ShareMetadataNotFound:
|
||||
raise exception.ShareMetadataNotFound()
|
||||
|
||||
result = {}
|
||||
result[row['key']] = row['value']
|
||||
|
||||
return result
|
||||
|
||||
|
||||
@require_context
|
||||
@require_share_exists
|
||||
def share_metadata_delete(context, share_id, key):
|
||||
@ -3429,6 +3445,12 @@ def share_metadata_update(context, share_id, metadata, delete):
|
||||
return _share_metadata_update(context, share_id, metadata, delete)
|
||||
|
||||
|
||||
@require_context
|
||||
@require_share_exists
|
||||
def share_metadata_update_item(context, share_id, item, session=None):
|
||||
return _share_metadata_update(context, share_id, item, delete=False)
|
||||
|
||||
|
||||
def _share_metadata_get_query(context, share_id, session=None):
|
||||
return (model_query(context, models.ShareMetadata, session=session,
|
||||
read_deleted="no").
|
||||
|
@ -175,7 +175,7 @@ class InvalidParameterValue(Invalid):
|
||||
|
||||
|
||||
class InvalidUUID(Invalid):
|
||||
message = _("Expected a uuid but received %(uuid)s.")
|
||||
message = _("%(uuid)s is not a valid uuid.")
|
||||
|
||||
|
||||
class InvalidDriverMode(Invalid):
|
||||
@ -546,6 +546,10 @@ class ShareNotFound(NotFound):
|
||||
message = _("Share %(share_id)s could not be found.")
|
||||
|
||||
|
||||
class ShareInstanceNotFound(NotFound):
|
||||
message = _("Share instance %(share_instance_id)s could not be found.")
|
||||
|
||||
|
||||
class ShareSnapshotNotFound(NotFound):
|
||||
message = _("Snapshot %(snapshot_id)s could not be found.")
|
||||
|
||||
|
@ -23,6 +23,7 @@ filters and weighing functions.
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log
|
||||
|
||||
from manila.db import api as db_api
|
||||
from manila import exception
|
||||
from manila.i18n import _
|
||||
from manila.message import api as message_api
|
||||
@ -34,6 +35,11 @@ from manila.share import share_types
|
||||
CONF = cfg.CONF
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
AFFINITY_HINT = 'same_host'
|
||||
ANTI_AFFINITY_HINT = 'different_host'
|
||||
AFFINITY_KEY = "__affinity_same_host"
|
||||
ANTI_AFFINITY_KEY = "__affinity_different_host"
|
||||
|
||||
|
||||
class FilterScheduler(base.Scheduler):
|
||||
"""Scheduler that can be used for filtering and weighing."""
|
||||
@ -214,7 +220,8 @@ class FilterScheduler(base.Scheduler):
|
||||
'replication_domain': replication_domain,
|
||||
})
|
||||
|
||||
self.populate_filter_properties_share(request_spec, filter_properties)
|
||||
self.populate_filter_properties_share(context, request_spec,
|
||||
filter_properties)
|
||||
|
||||
return filter_properties, share_properties
|
||||
|
||||
@ -315,7 +322,17 @@ class FilterScheduler(base.Scheduler):
|
||||
"exc": "exc"
|
||||
})
|
||||
|
||||
def populate_filter_properties_share(self, request_spec,
|
||||
def populate_filter_properties_share_scheduler_hint(self, context,
|
||||
share_id, hints,
|
||||
key, hint):
|
||||
try:
|
||||
result = db_api.share_metadata_get_item(context, share_id, key)
|
||||
except exception.ShareMetadataNotFound:
|
||||
pass
|
||||
else:
|
||||
hints.update({hint: result.get(key)})
|
||||
|
||||
def populate_filter_properties_share(self, context, request_spec,
|
||||
filter_properties):
|
||||
"""Stuff things into filter_properties.
|
||||
|
||||
@ -331,6 +348,25 @@ class FilterScheduler(base.Scheduler):
|
||||
filter_properties['metadata'] = shr.get('metadata')
|
||||
filter_properties['snapshot_id'] = shr.get('snapshot_id')
|
||||
|
||||
share_id = request_spec.get('share_id', None)
|
||||
if not share_id:
|
||||
filter_properties['scheduler_hints'] = {}
|
||||
return
|
||||
|
||||
try:
|
||||
db_api.share_get(context, share_id)
|
||||
except exception.NotFound:
|
||||
filter_properties['scheduler_hints'] = {}
|
||||
else:
|
||||
hints = {}
|
||||
self.populate_filter_properties_share_scheduler_hint(
|
||||
context, share_id, hints,
|
||||
AFFINITY_KEY, AFFINITY_HINT)
|
||||
self.populate_filter_properties_share_scheduler_hint(
|
||||
context, share_id, hints,
|
||||
ANTI_AFFINITY_KEY, ANTI_AFFINITY_HINT)
|
||||
filter_properties['scheduler_hints'] = hints
|
||||
|
||||
def schedule_create_share_group(self, context, share_group_id,
|
||||
request_spec, filter_properties):
|
||||
|
||||
|
121
manila/scheduler/filters/affinity.py
Normal file
121
manila/scheduler/filters/affinity.py
Normal file
@ -0,0 +1,121 @@
|
||||
# Copyright (c) 2021 SAP.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from oslo_log import log
|
||||
|
||||
from manila import exception
|
||||
from manila.scheduler.filters import base_host
|
||||
from manila.share import api
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
AFFINITY_FILTER = 'same_host'
|
||||
ANTI_AFFINITY_FILTER = 'different_host'
|
||||
|
||||
|
||||
class AffinityBaseFilter(base_host.BaseHostFilter):
|
||||
"""Base class of affinity filters"""
|
||||
_filter_type = None
|
||||
|
||||
def __init__(self):
|
||||
self.share_api = api.API()
|
||||
|
||||
def filter_all(self, filter_obj_list, filter_properties):
|
||||
# _filter_type should be defined in subclass
|
||||
if self._filter_type is None:
|
||||
raise AffinityFilterTypeNotSetError
|
||||
|
||||
try:
|
||||
filter_properties = self._validate(filter_properties)
|
||||
except SchedulerHintsNotSet:
|
||||
# AffinityFilter/AntiAffinityFilter is skipped if corresponding
|
||||
# hint is not set. If the "scheduler_hints" is not set, both
|
||||
# filters are skipped.
|
||||
return filter_obj_list
|
||||
except (exception.InvalidUUID,
|
||||
exception.ShareNotFound,
|
||||
exception.ShareInstanceNotFound) as e:
|
||||
# Stop scheduling share when above errors are caught
|
||||
LOG.error('%(filter_name)s: %(error)s', {
|
||||
'filter_name': self.__class__.__name__,
|
||||
'error': e})
|
||||
return None
|
||||
else:
|
||||
# Return list of hosts which pass the function host_passes()
|
||||
# overriden in AffinityFilter and AntiAffinityFilter.
|
||||
return [obj for obj in filter_obj_list
|
||||
if self._filter_one(obj, filter_properties)]
|
||||
|
||||
def _validate(self, filter_properties):
|
||||
context = filter_properties['context']
|
||||
hints = filter_properties.get('scheduler_hints')
|
||||
|
||||
if hints is None:
|
||||
raise SchedulerHintsNotSet
|
||||
else:
|
||||
share_uuids = hints.get(self._filter_type)
|
||||
if share_uuids is None:
|
||||
raise SchedulerHintsNotSet
|
||||
|
||||
share_uuids = share_uuids.split(",")
|
||||
|
||||
filter_properties['scheduler_hints'][self._filter_type] = []
|
||||
|
||||
filtered_hosts = []
|
||||
for uuid in share_uuids:
|
||||
try:
|
||||
share = self.share_api.get(context, uuid)
|
||||
except exception.NotFound:
|
||||
raise exception.ShareNotFound(uuid)
|
||||
instances = share.get('instances')
|
||||
if len(instances) == 0:
|
||||
raise exception.ShareInstanceNotFound(share_instance_id=uuid)
|
||||
filtered_hosts.append(
|
||||
[instance.get('host') for instance in instances])
|
||||
|
||||
if self._filter_type == AFFINITY_FILTER:
|
||||
filter_properties['scheduler_hints'][self._filter_type] = list(
|
||||
set.intersection(*map(set, filtered_hosts)))
|
||||
else:
|
||||
filter_properties['scheduler_hints'][self._filter_type] = list(
|
||||
set.union(*map(set, filtered_hosts)))
|
||||
|
||||
return filter_properties
|
||||
|
||||
|
||||
class AffinityFilter(AffinityBaseFilter):
|
||||
_filter_type = AFFINITY_FILTER
|
||||
|
||||
def host_passes(self, host_state, filter_properties):
|
||||
allowed_hosts = \
|
||||
filter_properties['scheduler_hints'][self._filter_type]
|
||||
return host_state.host in allowed_hosts
|
||||
|
||||
|
||||
class AntiAffinityFilter(AffinityBaseFilter):
|
||||
_filter_type = ANTI_AFFINITY_FILTER
|
||||
|
||||
def host_passes(self, host_state, filter_properties):
|
||||
forbidden_hosts = \
|
||||
filter_properties['scheduler_hints'][self._filter_type]
|
||||
return host_state.host not in forbidden_hosts
|
||||
|
||||
|
||||
class SchedulerHintsNotSet(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class AffinityFilterTypeNotSetError(Exception):
|
||||
pass
|
@ -48,6 +48,8 @@ host_manager_opts = [
|
||||
'DriverFilter',
|
||||
'ShareReplicationFilter',
|
||||
'CreateFromSnapshotFilter',
|
||||
'AffinityFilter',
|
||||
'AntiAffinityFilter',
|
||||
],
|
||||
help='Which filter class names to use for filtering hosts '
|
||||
'when not specified in the request.'),
|
||||
|
@ -26,6 +26,7 @@ from oslo_log import log
|
||||
from oslo_utils import excutils
|
||||
from oslo_utils import strutils
|
||||
from oslo_utils import timeutils
|
||||
from oslo_utils import uuidutils
|
||||
import six
|
||||
|
||||
from manila.api import common as api_common
|
||||
@ -64,6 +65,11 @@ LOG = log.getLogger(__name__)
|
||||
GB = 1048576 * 1024
|
||||
QUOTAS = quota.QUOTAS
|
||||
|
||||
AFFINITY_HINT = 'same_host'
|
||||
ANTI_AFFINITY_HINT = 'different_host'
|
||||
AFFINITY_KEY = "__affinity_same_host"
|
||||
ANTI_AFFINITY_KEY = "__affinity_different_host"
|
||||
|
||||
|
||||
def locked_security_service_update_operation(operation):
|
||||
"""Lock decorator for security service operation.
|
||||
@ -184,7 +190,7 @@ class API(base.Base):
|
||||
snapshot_id=None, availability_zone=None, metadata=None,
|
||||
share_network_id=None, share_type=None, is_public=False,
|
||||
share_group_id=None, share_group_snapshot_member=None,
|
||||
availability_zones=None):
|
||||
availability_zones=None, scheduler_hints=None):
|
||||
"""Create new share."""
|
||||
|
||||
self._check_metadata_properties(metadata)
|
||||
@ -367,6 +373,8 @@ class API(base.Base):
|
||||
QUOTAS.rollback(
|
||||
context, reservations, share_type_id=share_type_id)
|
||||
|
||||
self.save_scheduler_hints(context, share, scheduler_hints)
|
||||
|
||||
host = None
|
||||
snapshot_host = None
|
||||
if snapshot:
|
||||
@ -384,7 +392,7 @@ class API(base.Base):
|
||||
availability_zone=availability_zone, share_group=share_group,
|
||||
share_group_snapshot_member=share_group_snapshot_member,
|
||||
share_type_id=share_type_id, availability_zones=availability_zones,
|
||||
snapshot_host=snapshot_host)
|
||||
snapshot_host=snapshot_host, scheduler_hints=scheduler_hints)
|
||||
|
||||
# Retrieve the share with instance details
|
||||
share = self.db.share_get(context, share['id'])
|
||||
@ -461,7 +469,7 @@ class API(base.Base):
|
||||
host=None, availability_zone=None,
|
||||
share_group=None, share_group_snapshot_member=None,
|
||||
share_type_id=None, availability_zones=None,
|
||||
snapshot_host=None):
|
||||
snapshot_host=None, scheduler_hints=None):
|
||||
request_spec, share_instance = (
|
||||
self.create_share_instance_and_get_request_spec(
|
||||
context, share, availability_zone=availability_zone,
|
||||
@ -493,14 +501,17 @@ class API(base.Base):
|
||||
share_instance,
|
||||
host,
|
||||
request_spec=request_spec,
|
||||
filter_properties={},
|
||||
filter_properties={'scheduler_hints': scheduler_hints},
|
||||
snapshot_id=share['snapshot_id'],
|
||||
)
|
||||
else:
|
||||
# Create share instance from scratch or from snapshot could happen
|
||||
# on hosts other than the source host.
|
||||
self.scheduler_rpcapi.create_share_instance(
|
||||
context, request_spec=request_spec, filter_properties={})
|
||||
context,
|
||||
request_spec=request_spec,
|
||||
filter_properties={'scheduler_hints': scheduler_hints},
|
||||
)
|
||||
|
||||
return share_instance
|
||||
|
||||
@ -961,6 +972,7 @@ class API(base.Base):
|
||||
'terminated_at': timeutils.utcnow()}
|
||||
share_ref = self.db.share_update(context, share['id'], update_data)
|
||||
|
||||
self.delete_scheduler_hints(context, share)
|
||||
self.share_rpcapi.unmanage_share(context, share_ref)
|
||||
|
||||
# NOTE(u_glide): We should update 'updated_at' timestamp of
|
||||
@ -1170,6 +1182,8 @@ class API(base.Base):
|
||||
"members.") % share_group_snapshot_members_count)
|
||||
raise exception.InvalidShare(reason=msg)
|
||||
self._check_is_share_busy(share)
|
||||
self.delete_scheduler_hints(context, share)
|
||||
|
||||
for share_instance in share.instances:
|
||||
if share_instance['host']:
|
||||
self.delete_instance(context, share_instance, force=force)
|
||||
@ -2052,6 +2066,81 @@ class API(base.Base):
|
||||
"""Delete the given metadata item from a share."""
|
||||
self.db.share_metadata_delete(context, share['id'], key)
|
||||
|
||||
def _validate_scheduler_hints(self, context, share, share_uuids):
|
||||
for uuid in share_uuids:
|
||||
if not uuidutils.is_uuid_like(uuid):
|
||||
raise exception.InvalidUUID(uuid=uuid)
|
||||
try:
|
||||
self.get(context, uuid)
|
||||
except (exception.NotFound, exception.PolicyNotAuthorized):
|
||||
raise exception.ShareNotFound(share_id=uuid)
|
||||
|
||||
def _save_scheduler_hints(self, context, share, share_uuids, key):
|
||||
share_uuids = share_uuids.split(",")
|
||||
|
||||
self._validate_scheduler_hints(context, share, share_uuids)
|
||||
val_uuids = None
|
||||
for uuid in share_uuids:
|
||||
try:
|
||||
result = self.db.share_metadata_get_item(context, uuid, key)
|
||||
except exception.ShareMetadataNotFound:
|
||||
item = {key: share['id']}
|
||||
else:
|
||||
existing_uuids = result.get(key, "")
|
||||
item = {key:
|
||||
','.join(existing_uuids.split(',') + [share['id']])}
|
||||
self.db.share_metadata_update_item(context, uuid, item)
|
||||
if not val_uuids:
|
||||
val_uuids = uuid
|
||||
else:
|
||||
val_uuids = val_uuids + "," + uuid
|
||||
|
||||
if val_uuids:
|
||||
item = {key: val_uuids}
|
||||
self.db.share_metadata_update_item(context, share['id'], item)
|
||||
|
||||
def save_scheduler_hints(self, context, share, scheduler_hints=None):
|
||||
if scheduler_hints is None:
|
||||
return
|
||||
|
||||
same_host_uuids = scheduler_hints.get(AFFINITY_HINT, None)
|
||||
different_host_uuids = scheduler_hints.get(ANTI_AFFINITY_HINT, None)
|
||||
|
||||
if same_host_uuids:
|
||||
self._save_scheduler_hints(context, share, same_host_uuids,
|
||||
AFFINITY_KEY)
|
||||
if different_host_uuids:
|
||||
self._save_scheduler_hints(context, share, different_host_uuids,
|
||||
ANTI_AFFINITY_KEY)
|
||||
|
||||
def _delete_scheduler_hints(self, context, share, key):
|
||||
try:
|
||||
result = self.db.share_metadata_get_item(context, share['id'],
|
||||
key)
|
||||
except exception.ShareMetadataNotFound:
|
||||
return
|
||||
|
||||
share_uuids = result.get(key, "").split(",")
|
||||
for uuid in share_uuids:
|
||||
try:
|
||||
result = self.db.share_metadata_get_item(context, uuid, key)
|
||||
except exception.ShareMetadataNotFound:
|
||||
continue
|
||||
|
||||
new_val_uuids = [val_uuid for val_uuid
|
||||
in result.get(key, "").split(",")
|
||||
if val_uuid != share['id']]
|
||||
if not new_val_uuids:
|
||||
self.db.share_metadata_delete(context, uuid, key)
|
||||
else:
|
||||
item = {key: ','.join(new_val_uuids)}
|
||||
self.db.share_metadata_update_item(context, uuid, item)
|
||||
self.db.share_metadata_delete(context, share['id'], key)
|
||||
|
||||
def delete_scheduler_hints(self, context, share):
|
||||
self._delete_scheduler_hints(context, share, AFFINITY_KEY)
|
||||
self._delete_scheduler_hints(context, share, ANTI_AFFINITY_KEY)
|
||||
|
||||
def _check_is_share_busy(self, share):
|
||||
"""Raises an exception if share is busy with an active task."""
|
||||
if share.is_busy:
|
||||
|
@ -560,7 +560,8 @@ class ShareAPITest(test.TestCase):
|
||||
is_public=False,
|
||||
metadata=None,
|
||||
snapshot_id=None,
|
||||
availability_zone=az_name)
|
||||
availability_zone=az_name,
|
||||
scheduler_hints=None)
|
||||
|
||||
def test_share_create_with_sg_and_different_availability_zone(self):
|
||||
sg_id = 'fake_sg_id'
|
||||
|
@ -152,7 +152,7 @@ class FilterSchedulerTestCase(test_base.SchedulerTestCase):
|
||||
'share_properties': {'project_id': 1, 'size': 1},
|
||||
'share_instance_properties': {},
|
||||
'share_type': {'name': 'NFS'},
|
||||
'share_id': ['fake-id1'],
|
||||
'share_id': 'fake-id1',
|
||||
}
|
||||
self.assertRaises(exception.NoValidHost, sched.schedule_create_share,
|
||||
fake_context, request_spec, {})
|
||||
@ -177,7 +177,7 @@ class FilterSchedulerTestCase(test_base.SchedulerTestCase):
|
||||
'share_properties': {'project_id': 1, 'size': 1},
|
||||
'share_instance_properties': {},
|
||||
'share_type': {'name': 'NFS'},
|
||||
'share_id': ['fake-id1'],
|
||||
'share_id': 'fake-id1',
|
||||
}
|
||||
self.assertRaises(exception.NoValidHost, sched.schedule_create_share,
|
||||
fake_context, request_spec, {})
|
||||
|
132
manila/tests/scheduler/filters/test_affinity.py
Normal file
132
manila/tests/scheduler/filters/test_affinity.py
Normal file
@ -0,0 +1,132 @@
|
||||
# Copyright (c) 2021 SAP.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import ddt
|
||||
from unittest import mock
|
||||
|
||||
from manila import exception
|
||||
from manila.scheduler.filters import affinity
|
||||
from manila import test
|
||||
from manila.tests.scheduler import fakes
|
||||
|
||||
|
||||
fake_hosts = [
|
||||
fakes.FakeHostState('host1', {}),
|
||||
fakes.FakeHostState('host2', {}),
|
||||
fakes.FakeHostState('host3', {}),
|
||||
]
|
||||
|
||||
fake_shares_1 = {
|
||||
'abb6e0ac-7c3e-4ce0-8a69-5a166d246882': {
|
||||
'instances': [
|
||||
{'host': fake_hosts[0].host}
|
||||
]
|
||||
},
|
||||
'4de0cc74-450c-4468-8159-52128cf03407': {
|
||||
'instances': [
|
||||
{'host': fake_hosts[0].host}
|
||||
]
|
||||
},
|
||||
}
|
||||
|
||||
fake_shares_2 = {
|
||||
'c920fb61-e250-4c3c-a25d-1fdd9ca7cbc3': {
|
||||
'instances': [
|
||||
{'host': fake_hosts[1].host}
|
||||
]
|
||||
},
|
||||
}
|
||||
|
||||
fake_shares_3 = {
|
||||
'3923bebf-9825-4a66-971e-6092a9fe2dbb': {
|
||||
'instances': [
|
||||
{'host': fake_hosts[2].host}
|
||||
]
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@ddt.ddt
|
||||
class AffinityFilterTestCase(test.TestCase):
|
||||
"""Test case for AffinityFilter."""
|
||||
|
||||
def setUp(self):
|
||||
super(AffinityFilterTestCase, self).setUp()
|
||||
self.filter = affinity.AffinityFilter()
|
||||
self.anti_filter = affinity.AntiAffinityFilter()
|
||||
|
||||
def _make_filter_hints(self, *hints):
|
||||
return {
|
||||
'context': None,
|
||||
'scheduler_hints': {'same_host': ','.join(list(hints))},
|
||||
}
|
||||
|
||||
def _make_anti_filter_hints(self, *hints):
|
||||
return {
|
||||
'context': None,
|
||||
'scheduler_hints': {'different_host': ','.join(list(hints))},
|
||||
}
|
||||
|
||||
def _fake_get(self, context, uuid):
|
||||
if uuid in fake_shares_1.keys():
|
||||
return fake_shares_1[uuid]
|
||||
if uuid in fake_shares_2.keys():
|
||||
return fake_shares_2[uuid]
|
||||
if uuid in fake_shares_3.keys():
|
||||
return fake_shares_3[uuid]
|
||||
raise exception.ShareNotFound(uuid)
|
||||
|
||||
@ddt.data('b5c207da-ac0b-43b0-8691-c6c9e860199d')
|
||||
@mock.patch('manila.share.api.API.get')
|
||||
def test_affinity_share_not_found(self, unknown_id, mock_share_get):
|
||||
mock_share_get.side_effect = self._fake_get
|
||||
self.assertRaises(exception.ShareNotFound,
|
||||
self.filter._validate,
|
||||
self._make_filter_hints(unknown_id))
|
||||
|
||||
@ddt.data(
|
||||
{'context': None},
|
||||
{'context': None, 'scheduler_hints': None},
|
||||
{'context': None, 'scheduler_hints': {}},
|
||||
)
|
||||
def test_affinity_scheduler_hint_not_set(self, hints):
|
||||
self.assertRaises(affinity.SchedulerHintsNotSet,
|
||||
self.filter._validate, hints)
|
||||
|
||||
@ mock.patch('manila.share.api.API.get')
|
||||
def test_affinity_filter(self, mock_share_get):
|
||||
mock_share_get.side_effect = self._fake_get
|
||||
|
||||
share_ids = fake_shares_1.keys()
|
||||
hints = self._make_filter_hints(*share_ids)
|
||||
valid_hosts = self.filter.filter_all(fake_hosts, hints)
|
||||
valid_hosts = [h.host for h in valid_hosts]
|
||||
|
||||
self.assertIn('host1', valid_hosts)
|
||||
self.assertNotIn('host2', valid_hosts)
|
||||
self.assertNotIn('host3', valid_hosts)
|
||||
|
||||
@ mock.patch('manila.share.api.API.get')
|
||||
def test_anti_affinity_filter(self, mock_share_get):
|
||||
mock_share_get.side_effect = self._fake_get
|
||||
|
||||
share_ids = fake_shares_2.keys()
|
||||
hints = self._make_anti_filter_hints(*share_ids)
|
||||
valid_hosts = self.anti_filter.filter_all(fake_hosts, hints)
|
||||
valid_hosts = [h.host for h in valid_hosts]
|
||||
|
||||
self.assertIn('host1', valid_hosts)
|
||||
self.assertIn('host3', valid_hosts)
|
||||
self.assertNotIn('host2', valid_hosts)
|
@ -825,7 +825,8 @@ class ShareAPITestCase(test.TestCase):
|
||||
self.context, share, share_network_id=fake_share_network_id,
|
||||
host=None, availability_zone=None, share_group=None,
|
||||
share_group_snapshot_member=None, share_type_id=None,
|
||||
availability_zones=expected_azs, snapshot_host=None
|
||||
availability_zones=expected_azs, snapshot_host=None,
|
||||
scheduler_hints=None
|
||||
)
|
||||
db_api.share_get.assert_called_once()
|
||||
|
||||
@ -980,7 +981,7 @@ class ShareAPITestCase(test.TestCase):
|
||||
share_instance,
|
||||
host,
|
||||
request_spec=mock.ANY,
|
||||
filter_properties={},
|
||||
filter_properties={'scheduler_hints': None},
|
||||
snapshot_id=share['snapshot_id'],
|
||||
)
|
||||
self.assertFalse(
|
||||
@ -993,7 +994,8 @@ class ShareAPITestCase(test.TestCase):
|
||||
|
||||
(self.api.scheduler_rpcapi.create_share_instance.
|
||||
assert_called_once_with(
|
||||
self.context, request_spec=mock.ANY, filter_properties={}))
|
||||
self.context, request_spec=mock.ANY,
|
||||
filter_properties={'scheduler_hints': None}))
|
||||
self.assertFalse(self.api.share_rpcapi.create_share_instance.called)
|
||||
|
||||
def test_create_share_instance_from_snapshot(self):
|
||||
@ -2273,7 +2275,8 @@ class ShareAPITestCase(test.TestCase):
|
||||
availability_zone=az,
|
||||
share_group=None, share_group_snapshot_member=None,
|
||||
availability_zones=None,
|
||||
snapshot_host=snapshot['share']['instance']['host'])
|
||||
snapshot_host=snapshot['share']['instance']['host'],
|
||||
scheduler_hints=None)
|
||||
share_api.policy.check_policy.assert_called_once_with(
|
||||
self.context, 'share_snapshot', 'get_snapshot')
|
||||
quota.QUOTAS.reserve.assert_called_once_with(
|
||||
|
14
releasenotes/notes/affinity-filter-747d3d7c51157172.yaml
Normal file
14
releasenotes/notes/affinity-filter-747d3d7c51157172.yaml
Normal file
@ -0,0 +1,14 @@
|
||||
---
|
||||
features:
|
||||
- Add AffinityFilter and AntiAffinityFilter to manila's scheduler.
|
||||
These hard affinity and anti-affinity filter needs user to specify
|
||||
affinity/anti-affinity share ids to the field
|
||||
"share.scheduler_hints.same_host" or
|
||||
"share.scheduler_hints.different_host" in the request payload when
|
||||
creating a manila share. The hints are stored as share metadata. The
|
||||
filter properties are populated from this metadata during share
|
||||
migration and so filters will be applied when migrating a manila share.
|
||||
upgrade:
|
||||
- To add AffinityFilter and AntiAffinityFilter to an active deployment,
|
||||
their references must be added to the manila.scheduler.filters section in
|
||||
setup.cfg and must be enabled in manila.conf.
|
@ -44,6 +44,8 @@ console_scripts =
|
||||
wsgi_scripts =
|
||||
manila-wsgi = manila.wsgi.wsgi:initialize_application
|
||||
manila.scheduler.filters =
|
||||
AffinityFilter = manila.scheduler.filters.affinity:AffinityFilter
|
||||
AntiAffinityFilter = manila.scheduler.filters.affinity:AntiAffinityFilter
|
||||
AvailabilityZoneFilter = manila.scheduler.filters.availability_zone:AvailabilityZoneFilter
|
||||
CapabilitiesFilter = manila.scheduler.filters.capabilities:CapabilitiesFilter
|
||||
CapacityFilter = manila.scheduler.filters.capacity:CapacityFilter
|
||||
|
Loading…
Reference in New Issue
Block a user