Merge "Reorganize scheduler and merge code from Oslo incubator"
This commit is contained in:
commit
0ed23b2f3e
@ -76,7 +76,7 @@ fi
|
|||||||
# Common opts
|
# Common opts
|
||||||
SHARE_NAME_PREFIX=${SHARE_NAME_PREFIX:-share-}
|
SHARE_NAME_PREFIX=${SHARE_NAME_PREFIX:-share-}
|
||||||
MANILA_ENABLED_SHARE_PROTOCOLS=${ENABLED_SHARE_PROTOCOLS:-"NFS,CIFS"}
|
MANILA_ENABLED_SHARE_PROTOCOLS=${ENABLED_SHARE_PROTOCOLS:-"NFS,CIFS"}
|
||||||
MANILA_SCHEDULER_DRIVER=${MANILA_SCHEDULER_DRIVER:-manila.scheduler.filter_scheduler.FilterScheduler}
|
MANILA_SCHEDULER_DRIVER=${MANILA_SCHEDULER_DRIVER:-manila.scheduler.drivers.filter.FilterScheduler}
|
||||||
MANILA_SERVICE_SECGROUP="manila-service"
|
MANILA_SERVICE_SECGROUP="manila-service"
|
||||||
|
|
||||||
# Following env var defines whether to apply downgrade migrations setting up DB or not.
|
# Following env var defines whether to apply downgrade migrations setting up DB or not.
|
||||||
|
@ -317,7 +317,7 @@ Open Manila configuration file `/etc/manila/manila.conf`::
|
|||||||
share_name_template = share-%s
|
share_name_template = share-%s
|
||||||
|
|
||||||
# Set scheduler driver with usage of filters. Recommended.
|
# Set scheduler driver with usage of filters. Recommended.
|
||||||
scheduler_driver = manila.scheduler.filter_scheduler.FilterScheduler
|
scheduler_driver = manila.scheduler.drivers.filter.FilterScheduler
|
||||||
|
|
||||||
# Set following two opts to ‘True’ to get maximum info in logging.
|
# Set following two opts to ‘True’ to get maximum info in logging.
|
||||||
verbose = True
|
verbose = True
|
||||||
|
@ -1,16 +0,0 @@
|
|||||||
oslo-incubator
|
|
||||||
--------------
|
|
||||||
|
|
||||||
A number of modules from oslo-incubator are imported into this project.
|
|
||||||
You can clone the oslo-incubator repository using the following url:
|
|
||||||
|
|
||||||
git://git.openstack.org/openstack/oslo-incubator
|
|
||||||
|
|
||||||
These modules are "incubating" in oslo-incubator and are kept in sync
|
|
||||||
with the help of oslo-incubator's update.py script. See:
|
|
||||||
|
|
||||||
https://wiki.openstack.org/wiki/Oslo#Syncing_Code_from_Incubator
|
|
||||||
|
|
||||||
The copy of the code should never be directly modified here. Please
|
|
||||||
always update oslo-incubator first and then run the script to copy
|
|
||||||
the changes across.
|
|
@ -1,71 +0,0 @@
|
|||||||
# Copyright (c) 2011-2012 OpenStack Foundation.
|
|
||||||
# All Rights Reserved.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
"""
|
|
||||||
Filter support
|
|
||||||
"""
|
|
||||||
|
|
||||||
import inspect
|
|
||||||
|
|
||||||
from stevedore import extension
|
|
||||||
|
|
||||||
|
|
||||||
class BaseFilter(object):
|
|
||||||
"""Base class for all filter classes."""
|
|
||||||
def _filter_one(self, obj, filter_properties):
|
|
||||||
"""Return True if it passes the filter, False otherwise.
|
|
||||||
Override this in a subclass.
|
|
||||||
"""
|
|
||||||
return True
|
|
||||||
|
|
||||||
def filter_all(self, filter_obj_list, filter_properties):
|
|
||||||
"""Yield objects that pass the filter.
|
|
||||||
|
|
||||||
Can be overriden in a subclass, if you need to base filtering
|
|
||||||
decisions on all objects. Otherwise, one can just override
|
|
||||||
_filter_one() to filter a single object.
|
|
||||||
"""
|
|
||||||
for obj in filter_obj_list:
|
|
||||||
if self._filter_one(obj, filter_properties):
|
|
||||||
yield obj
|
|
||||||
|
|
||||||
|
|
||||||
class BaseFilterHandler(object):
|
|
||||||
""" Base class to handle loading filter classes.
|
|
||||||
|
|
||||||
This class should be subclassed where one needs to use filters.
|
|
||||||
"""
|
|
||||||
def __init__(self, filter_class_type, filter_namespace):
|
|
||||||
self.namespace = filter_namespace
|
|
||||||
self.filter_class_type = filter_class_type
|
|
||||||
self.filter_manager = extension.ExtensionManager(filter_namespace)
|
|
||||||
|
|
||||||
def _is_correct_class(self, obj):
|
|
||||||
"""Return whether an object is a class of the correct type and
|
|
||||||
is not prefixed with an underscore.
|
|
||||||
"""
|
|
||||||
return (inspect.isclass(obj) and
|
|
||||||
not obj.__name__.startswith('_') and
|
|
||||||
issubclass(obj, self.filter_class_type))
|
|
||||||
|
|
||||||
def get_all_classes(self):
|
|
||||||
return [x.plugin for x in self.filter_manager
|
|
||||||
if self._is_correct_class(x.plugin)]
|
|
||||||
|
|
||||||
def get_filtered_objects(self, filter_classes, objs,
|
|
||||||
filter_properties):
|
|
||||||
for filter_cls in filter_classes:
|
|
||||||
objs = filter_cls().filter_all(objs, filter_properties)
|
|
||||||
return list(objs)
|
|
@ -1,91 +0,0 @@
|
|||||||
# Copyright (c) 2011-2012 OpenStack Foundation.
|
|
||||||
# All Rights Reserved.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
"""
|
|
||||||
Pluggable Weighing support
|
|
||||||
"""
|
|
||||||
|
|
||||||
import inspect
|
|
||||||
|
|
||||||
from stevedore import extension
|
|
||||||
|
|
||||||
|
|
||||||
class WeighedObject(object):
|
|
||||||
"""Object with weight information."""
|
|
||||||
def __init__(self, obj, weight):
|
|
||||||
self.obj = obj
|
|
||||||
self.weight = weight
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return "<WeighedObject '%s': %s>" % (self.obj, self.weight)
|
|
||||||
|
|
||||||
|
|
||||||
class BaseWeigher(object):
|
|
||||||
"""Base class for pluggable weighers."""
|
|
||||||
def _weight_multiplier(self):
|
|
||||||
"""How weighted this weigher should be. Normally this would
|
|
||||||
be overriden in a subclass based on a config value.
|
|
||||||
"""
|
|
||||||
return 1.0
|
|
||||||
|
|
||||||
def _weigh_object(self, obj, weight_properties):
|
|
||||||
"""Override in a subclass to specify a weight for a specific
|
|
||||||
object.
|
|
||||||
"""
|
|
||||||
return 0.0
|
|
||||||
|
|
||||||
def weigh_objects(self, weighed_obj_list, weight_properties):
|
|
||||||
"""Weigh multiple objects. Override in a subclass if you need
|
|
||||||
need access to all objects in order to manipulate weights.
|
|
||||||
"""
|
|
||||||
constant = self._weight_multiplier()
|
|
||||||
for obj in weighed_obj_list:
|
|
||||||
obj.weight += (constant *
|
|
||||||
self._weigh_object(obj.obj, weight_properties))
|
|
||||||
|
|
||||||
|
|
||||||
class BaseWeightHandler(object):
|
|
||||||
object_class = WeighedObject
|
|
||||||
|
|
||||||
def __init__(self, weighed_object_type, weight_namespace):
|
|
||||||
self.namespace = weight_namespace
|
|
||||||
self.weighed_object_type = weighed_object_type
|
|
||||||
self.weight_manager = extension.ExtensionManager(weight_namespace)
|
|
||||||
|
|
||||||
def _is_correct_class(self, obj):
|
|
||||||
"""Return whether an object is a class of the correct type and
|
|
||||||
is not prefixed with an underscore.
|
|
||||||
"""
|
|
||||||
return (inspect.isclass(obj) and
|
|
||||||
not obj.__name__.startswith('_') and
|
|
||||||
issubclass(obj, self.weighed_object_type))
|
|
||||||
|
|
||||||
def get_all_classes(self):
|
|
||||||
return [x.plugin for x in self.weight_manager
|
|
||||||
if self._is_correct_class(x.plugin)]
|
|
||||||
|
|
||||||
def get_weighed_objects(self, weigher_classes, obj_list,
|
|
||||||
weighing_properties):
|
|
||||||
"""Return a sorted (highest score first) list of WeighedObjects."""
|
|
||||||
|
|
||||||
if not obj_list:
|
|
||||||
return []
|
|
||||||
|
|
||||||
weighed_objs = [self.object_class(obj, 0.0) for obj in obj_list]
|
|
||||||
for weigher_cls in weigher_classes:
|
|
||||||
weigher = weigher_cls()
|
|
||||||
weigher.weigh_objects(weighed_objs, weighing_properties)
|
|
||||||
|
|
||||||
return sorted(weighed_objs, key=lambda x: x.weight, reverse=True)
|
|
@ -39,14 +39,14 @@ import manila.network.neutron.neutron_network_plugin
|
|||||||
import manila.network.nova_network_plugin
|
import manila.network.nova_network_plugin
|
||||||
import manila.network.standalone_network_plugin
|
import manila.network.standalone_network_plugin
|
||||||
import manila.quota
|
import manila.quota
|
||||||
import manila.scheduler.driver
|
import manila.scheduler.drivers.base
|
||||||
|
import manila.scheduler.drivers.simple
|
||||||
import manila.scheduler.host_manager
|
import manila.scheduler.host_manager
|
||||||
import manila.scheduler.manager
|
import manila.scheduler.manager
|
||||||
import manila.scheduler.scheduler_options
|
import manila.scheduler.scheduler_options
|
||||||
import manila.scheduler.simple
|
import manila.scheduler.weighers
|
||||||
import manila.scheduler.weights
|
import manila.scheduler.weighers.capacity
|
||||||
import manila.scheduler.weights.capacity
|
import manila.scheduler.weighers.pool
|
||||||
import manila.scheduler.weights.pool
|
|
||||||
import manila.service
|
import manila.service
|
||||||
import manila.share.api
|
import manila.share.api
|
||||||
import manila.share.driver
|
import manila.share.driver
|
||||||
@ -99,13 +99,13 @@ _global_opt_lists = [
|
|||||||
manila.network.nova_network_plugin.nova_single_network_plugin_opts,
|
manila.network.nova_network_plugin.nova_single_network_plugin_opts,
|
||||||
manila.network.standalone_network_plugin.standalone_network_plugin_opts,
|
manila.network.standalone_network_plugin.standalone_network_plugin_opts,
|
||||||
manila.quota.quota_opts,
|
manila.quota.quota_opts,
|
||||||
manila.scheduler.driver.scheduler_driver_opts,
|
manila.scheduler.drivers.base.scheduler_driver_opts,
|
||||||
manila.scheduler.host_manager.host_manager_opts,
|
manila.scheduler.host_manager.host_manager_opts,
|
||||||
[manila.scheduler.manager.scheduler_driver_opt],
|
[manila.scheduler.manager.scheduler_driver_opt],
|
||||||
[manila.scheduler.scheduler_options.scheduler_json_config_location_opt],
|
[manila.scheduler.scheduler_options.scheduler_json_config_location_opt],
|
||||||
manila.scheduler.simple.simple_scheduler_opts,
|
manila.scheduler.drivers.simple.simple_scheduler_opts,
|
||||||
manila.scheduler.weights.capacity.capacity_weight_opts,
|
manila.scheduler.weighers.capacity.capacity_weight_opts,
|
||||||
manila.scheduler.weights.pool.pool_weight_opts,
|
manila.scheduler.weighers.pool.pool_weight_opts,
|
||||||
manila.service.service_opts,
|
manila.service.service_opts,
|
||||||
manila.share.api.share_api_opts,
|
manila.share.api.share_api_opts,
|
||||||
manila.share.driver.ganesha_opts,
|
manila.share.driver.ganesha_opts,
|
||||||
|
@ -1,25 +0,0 @@
|
|||||||
# Copyright (c) 2010 OpenStack, LLC.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
"""
|
|
||||||
:mod:`manila.scheduler` -- Scheduler Nodes
|
|
||||||
=====================================================
|
|
||||||
|
|
||||||
.. automodule:: manila.scheduler
|
|
||||||
:platform: Unix
|
|
||||||
:synopsis: Module that picks a volume node to create a volume.
|
|
||||||
.. moduleauthor:: Sandy Walsh <sandy.walsh@rackspace.com>
|
|
||||||
.. moduleauthor:: Ed Leafe <ed@leafe.com>
|
|
||||||
.. moduleauthor:: Chris Behrens <cbehrens@codestud.com>
|
|
||||||
"""
|
|
@ -32,7 +32,9 @@ class BaseHandler(object):
|
|||||||
self.extension_manager = extension.ExtensionManager(modifier_namespace)
|
self.extension_manager = extension.ExtensionManager(modifier_namespace)
|
||||||
|
|
||||||
def _is_correct_class(self, cls):
|
def _is_correct_class(self, cls):
|
||||||
"""Return whether an object is a class of the correct type and
|
"""Check if an object is the correct type.
|
||||||
|
|
||||||
|
Return whether an object is a class of the correct type and
|
||||||
is not prefixed with an underscore.
|
is not prefixed with an underscore.
|
||||||
"""
|
"""
|
||||||
return (inspect.isclass(cls) and
|
return (inspect.isclass(cls) and
|
@ -25,12 +25,12 @@ from oslo_config import cfg
|
|||||||
|
|
||||||
from manila import exception
|
from manila import exception
|
||||||
from manila.i18n import _
|
from manila.i18n import _
|
||||||
from manila.scheduler import driver
|
from manila.scheduler.drivers import base
|
||||||
|
|
||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
|
|
||||||
|
|
||||||
class ChanceScheduler(driver.Scheduler):
|
class ChanceScheduler(base.Scheduler):
|
||||||
"""Implements Scheduler as a random node selector."""
|
"""Implements Scheduler as a random node selector."""
|
||||||
|
|
||||||
def _filter_hosts(self, request_spec, hosts, **kwargs):
|
def _filter_hosts(self, request_spec, hosts, **kwargs):
|
||||||
@ -65,7 +65,7 @@ class ChanceScheduler(driver.Scheduler):
|
|||||||
share_id = request_spec['share_id']
|
share_id = request_spec['share_id']
|
||||||
snapshot_id = request_spec['snapshot_id']
|
snapshot_id = request_spec['snapshot_id']
|
||||||
|
|
||||||
updated_share = driver.share_update_db(context, share_id, host)
|
updated_share = base.share_update_db(context, share_id, host)
|
||||||
self.share_rpcapi.create_share_instance(
|
self.share_rpcapi.create_share_instance(
|
||||||
context,
|
context,
|
||||||
updated_share.instance,
|
updated_share.instance,
|
@ -26,7 +26,7 @@ from oslo_log import log
|
|||||||
from manila import exception
|
from manila import exception
|
||||||
from manila.i18n import _
|
from manila.i18n import _
|
||||||
from manila.i18n import _LE, _LI
|
from manila.i18n import _LE, _LI
|
||||||
from manila.scheduler import driver
|
from manila.scheduler.drivers import base
|
||||||
from manila.scheduler import scheduler_options
|
from manila.scheduler import scheduler_options
|
||||||
from manila.share import share_types
|
from manila.share import share_types
|
||||||
|
|
||||||
@ -34,7 +34,7 @@ CONF = cfg.CONF
|
|||||||
LOG = log.getLogger(__name__)
|
LOG = log.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class FilterScheduler(driver.Scheduler):
|
class FilterScheduler(base.Scheduler):
|
||||||
"""Scheduler that can be used for filtering and weighing."""
|
"""Scheduler that can be used for filtering and weighing."""
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super(FilterScheduler, self).__init__(*args, **kwargs)
|
super(FilterScheduler, self).__init__(*args, **kwargs)
|
||||||
@ -42,10 +42,6 @@ class FilterScheduler(driver.Scheduler):
|
|||||||
self.options = scheduler_options.SchedulerOptions()
|
self.options = scheduler_options.SchedulerOptions()
|
||||||
self.max_attempts = self._max_attempts()
|
self.max_attempts = self._max_attempts()
|
||||||
|
|
||||||
def schedule(self, context, topic, method, *args, **kwargs):
|
|
||||||
"""Return best-suited host for request."""
|
|
||||||
self._schedule(context, topic, *args, **kwargs)
|
|
||||||
|
|
||||||
def _get_configuration_options(self):
|
def _get_configuration_options(self):
|
||||||
"""Fetch options dictionary. Broken out for testing."""
|
"""Fetch options dictionary. Broken out for testing."""
|
||||||
return self.options.get_configuration()
|
return self.options.get_configuration()
|
||||||
@ -95,7 +91,7 @@ class FilterScheduler(driver.Scheduler):
|
|||||||
share_id = request_spec['share_id']
|
share_id = request_spec['share_id']
|
||||||
snapshot_id = request_spec['snapshot_id']
|
snapshot_id = request_spec['snapshot_id']
|
||||||
|
|
||||||
updated_share = driver.share_update_db(context, share_id, host)
|
updated_share = base.share_update_db(context, share_id, host)
|
||||||
self._post_select_populate_filter_properties(filter_properties,
|
self._post_select_populate_filter_properties(filter_properties,
|
||||||
weighed_host.obj)
|
weighed_host.obj)
|
||||||
|
|
||||||
@ -296,7 +292,7 @@ class FilterScheduler(driver.Scheduler):
|
|||||||
msg = _LI("Chose host %(host)s for create_consistency_group %(cg_id)s")
|
msg = _LI("Chose host %(host)s for create_consistency_group %(cg_id)s")
|
||||||
LOG.info(msg % {'host': host, 'cg_id': group_id})
|
LOG.info(msg % {'host': host, 'cg_id': group_id})
|
||||||
|
|
||||||
updated_group = driver.cg_update_db(context, group_id, host)
|
updated_group = base.cg_update_db(context, group_id, host)
|
||||||
|
|
||||||
self.share_rpcapi.create_consistency_group(context,
|
self.share_rpcapi.create_consistency_group(context,
|
||||||
updated_group, host)
|
updated_group, host)
|
@ -24,8 +24,8 @@ from oslo_config import cfg
|
|||||||
from manila import db
|
from manila import db
|
||||||
from manila import exception
|
from manila import exception
|
||||||
from manila.i18n import _
|
from manila.i18n import _
|
||||||
from manila.scheduler import chance
|
from manila.scheduler.drivers import base
|
||||||
from manila.scheduler import driver
|
from manila.scheduler.drivers import chance
|
||||||
from manila import utils
|
from manila import utils
|
||||||
|
|
||||||
simple_scheduler_opts = [
|
simple_scheduler_opts = [
|
||||||
@ -64,8 +64,9 @@ class SimpleScheduler(chance.ChanceScheduler):
|
|||||||
msg = _("Not enough allocatable share gigabytes remaining")
|
msg = _("Not enough allocatable share gigabytes remaining")
|
||||||
raise exception.NoValidHost(reason=msg)
|
raise exception.NoValidHost(reason=msg)
|
||||||
if utils.service_is_up(service) and not service['disabled']:
|
if utils.service_is_up(service) and not service['disabled']:
|
||||||
updated_share = driver.share_update_db(context, share_id,
|
updated_share = base.share_update_db(context,
|
||||||
service['host'])
|
share_id,
|
||||||
|
service['host'])
|
||||||
self.share_rpcapi.create_share_instance(
|
self.share_rpcapi.create_share_instance(
|
||||||
context,
|
context,
|
||||||
updated_share.instance,
|
updated_share.instance,
|
@ -13,10 +13,10 @@
|
|||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
from manila.openstack.common.scheduler import filters
|
from manila.scheduler.filters import base_host
|
||||||
|
|
||||||
|
|
||||||
class AvailabilityZoneFilter(filters.BaseHostFilter):
|
class AvailabilityZoneFilter(base_host.BaseHostFilter):
|
||||||
"""Filters Hosts by availability zone."""
|
"""Filters Hosts by availability zone."""
|
||||||
|
|
||||||
# Availability zones do not change within a request
|
# Availability zones do not change within a request
|
@ -18,8 +18,8 @@ Filter support
|
|||||||
"""
|
"""
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from manila.openstack.common._i18n import _LI
|
from manila.i18n import _LI
|
||||||
from manila.openstack.common.scheduler import base_handler
|
from manila.scheduler import base_handler
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -27,7 +27,9 @@ LOG = logging.getLogger(__name__)
|
|||||||
class BaseFilter(object):
|
class BaseFilter(object):
|
||||||
"""Base class for all filter classes."""
|
"""Base class for all filter classes."""
|
||||||
def _filter_one(self, obj, filter_properties):
|
def _filter_one(self, obj, filter_properties):
|
||||||
"""Return True if it passes the filter, False otherwise.
|
"""Check if an object passes a filter.
|
||||||
|
|
||||||
|
Return True if it passes the filter, False otherwise.
|
||||||
Override this in a subclass.
|
Override this in a subclass.
|
||||||
"""
|
"""
|
||||||
return True
|
return True
|
||||||
@ -48,7 +50,9 @@ class BaseFilter(object):
|
|||||||
run_filter_once_per_request = False
|
run_filter_once_per_request = False
|
||||||
|
|
||||||
def run_filter_for_index(self, index):
|
def run_filter_for_index(self, index):
|
||||||
"""Return True if the filter needs to be run for the "index-th"
|
"""Check if filter needs to be run for the "index-th" instance.
|
||||||
|
|
||||||
|
Return True if the filter needs to be run for the "index-th"
|
||||||
instance in a request. Only need to override this if a filter
|
instance in a request. Only need to override this if a filter
|
||||||
needs anything other than "first only" or "all" behaviour.
|
needs anything other than "first only" or "all" behaviour.
|
||||||
"""
|
"""
|
@ -17,10 +17,10 @@
|
|||||||
Scheduler host filters
|
Scheduler host filters
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from manila.openstack.common.scheduler import base_filter
|
from manila.scheduler.filters import base
|
||||||
|
|
||||||
|
|
||||||
class BaseHostFilter(base_filter.BaseFilter):
|
class BaseHostFilter(base.BaseFilter):
|
||||||
"""Base class for host filters."""
|
"""Base class for host filters."""
|
||||||
def _filter_one(self, obj, filter_properties):
|
def _filter_one(self, obj, filter_properties):
|
||||||
"""Return True if the object passes the filter, otherwise False."""
|
"""Return True if the object passes the filter, otherwise False."""
|
||||||
@ -28,11 +28,12 @@ class BaseHostFilter(base_filter.BaseFilter):
|
|||||||
|
|
||||||
def host_passes(self, host_state, filter_properties):
|
def host_passes(self, host_state, filter_properties):
|
||||||
"""Return True if the HostState passes the filter, otherwise False.
|
"""Return True if the HostState passes the filter, otherwise False.
|
||||||
|
|
||||||
Override this in a subclass.
|
Override this in a subclass.
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
|
||||||
class HostFilterHandler(base_filter.BaseFilterHandler):
|
class HostFilterHandler(base.BaseFilterHandler):
|
||||||
def __init__(self, namespace):
|
def __init__(self, namespace):
|
||||||
super(HostFilterHandler, self).__init__(BaseHostFilter, namespace)
|
super(HostFilterHandler, self).__init__(BaseHostFilter, namespace)
|
@ -17,17 +17,19 @@ import logging
|
|||||||
|
|
||||||
import six
|
import six
|
||||||
|
|
||||||
from manila.openstack.common.scheduler import filters
|
from manila.scheduler.filters import base_host
|
||||||
from manila.openstack.common.scheduler.filters import extra_specs_ops
|
from manila.scheduler.filters import extra_specs_ops
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class CapabilitiesFilter(filters.BaseHostFilter):
|
class CapabilitiesFilter(base_host.BaseHostFilter):
|
||||||
"""HostFilter to work with resource (instance & volume) type records."""
|
"""HostFilter to work with resource (instance & volume) type records."""
|
||||||
|
|
||||||
def _satisfies_extra_specs(self, capabilities, resource_type):
|
def _satisfies_extra_specs(self, capabilities, resource_type):
|
||||||
"""Check that the capabilities provided by the services satisfy
|
"""Compare capabilities against extra specs.
|
||||||
|
|
||||||
|
Check that the capabilities provided by the services satisfy
|
||||||
the extra specs associated with the resource type.
|
the extra specs associated with the resource type.
|
||||||
"""
|
"""
|
||||||
extra_specs = resource_type.get('extra_specs', [])
|
extra_specs = resource_type.get('extra_specs', [])
|
@ -23,12 +23,12 @@ from oslo_log import log
|
|||||||
|
|
||||||
from manila.i18n import _LE
|
from manila.i18n import _LE
|
||||||
from manila.i18n import _LW
|
from manila.i18n import _LW
|
||||||
from manila.openstack.common.scheduler import filters
|
from manila.scheduler.filters import base_host
|
||||||
|
|
||||||
LOG = log.getLogger(__name__)
|
LOG = log.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class CapacityFilter(filters.BaseHostFilter):
|
class CapacityFilter(base_host.BaseHostFilter):
|
||||||
"""CapacityFilter filters based on share host's capacity utilization."""
|
"""CapacityFilter filters based on share host's capacity utilization."""
|
||||||
|
|
||||||
def host_passes(self, host_state, filter_properties):
|
def host_passes(self, host_state, filter_properties):
|
@ -16,13 +16,13 @@
|
|||||||
|
|
||||||
from oslo_log import log
|
from oslo_log import log
|
||||||
|
|
||||||
from manila.openstack.common.scheduler import filters
|
from manila.scheduler.filters import base_host
|
||||||
from manila.share import utils as share_utils
|
from manila.share import utils as share_utils
|
||||||
|
|
||||||
LOG = log.getLogger(__name__)
|
LOG = log.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class ConsistencyGroupFilter(filters.BaseHostFilter):
|
class ConsistencyGroupFilter(base_host.BaseHostFilter):
|
||||||
"""ConsistencyGroupFilter filters host based on compatibility with CG."""
|
"""ConsistencyGroupFilter filters host based on compatibility with CG."""
|
||||||
|
|
||||||
def host_passes(self, host_state, filter_properties):
|
def host_passes(self, host_state, filter_properties):
|
@ -15,12 +15,12 @@
|
|||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from manila.openstack.common.scheduler import filters
|
from manila.scheduler.filters import base_host
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class IgnoreAttemptedHostsFilter(filters.BaseHostFilter):
|
class IgnoreAttemptedHostsFilter(base_host.BaseHostFilter):
|
||||||
"""Filter out previously attempted hosts
|
"""Filter out previously attempted hosts
|
||||||
|
|
||||||
A host passes this filter if it has not already been attempted for
|
A host passes this filter if it has not already been attempted for
|
@ -18,15 +18,16 @@ import operator
|
|||||||
from oslo_serialization import jsonutils
|
from oslo_serialization import jsonutils
|
||||||
import six
|
import six
|
||||||
|
|
||||||
from manila.openstack.common.scheduler import filters
|
from manila.scheduler.filters import base_host
|
||||||
|
|
||||||
|
|
||||||
class JsonFilter(filters.BaseHostFilter):
|
class JsonFilter(base_host.BaseHostFilter):
|
||||||
"""Host Filter to allow simple JSON-based grammar for
|
"""Host Filter to allow simple JSON-based grammar for selecting hosts."""
|
||||||
selecting hosts.
|
|
||||||
"""
|
|
||||||
def _op_compare(self, args, op):
|
def _op_compare(self, args, op):
|
||||||
"""Returns True if the specified operator can successfully
|
"""Check if operator can compare the first arg with the others.
|
||||||
|
|
||||||
|
Returns True if the specified operator can successfully
|
||||||
compare the first item in the args with all the rest. Will
|
compare the first item in the args with all the rest. Will
|
||||||
return False if only one item is in the list.
|
return False if only one item is in the list.
|
||||||
"""
|
"""
|
||||||
@ -88,7 +89,9 @@ class JsonFilter(filters.BaseHostFilter):
|
|||||||
}
|
}
|
||||||
|
|
||||||
def _parse_string(self, string, host_state):
|
def _parse_string(self, string, host_state):
|
||||||
"""Strings prefixed with $ are capability lookups in the
|
"""Parse string.
|
||||||
|
|
||||||
|
Strings prefixed with $ are capability lookups in the
|
||||||
form '$variable' where 'variable' is an attribute in the
|
form '$variable' where 'variable' is an attribute in the
|
||||||
HostState class. If $variable is a dictionary, you may
|
HostState class. If $variable is a dictionary, you may
|
||||||
use: $variable.dictkey
|
use: $variable.dictkey
|
||||||
@ -126,7 +129,9 @@ class JsonFilter(filters.BaseHostFilter):
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
def host_passes(self, host_state, filter_properties):
|
def host_passes(self, host_state, filter_properties):
|
||||||
"""Return a list of hosts that can fulfill the requirements
|
"""Filters hosts.
|
||||||
|
|
||||||
|
Return a list of hosts that can fulfill the requirements
|
||||||
specified in the query.
|
specified in the query.
|
||||||
"""
|
"""
|
||||||
# TODO(zhiteng) Add description for filter_properties structure
|
# TODO(zhiteng) Add description for filter_properties structure
|
@ -15,12 +15,12 @@
|
|||||||
|
|
||||||
from oslo_log import log
|
from oslo_log import log
|
||||||
|
|
||||||
from manila.openstack.common.scheduler import filters
|
from manila.scheduler.filters import base_host
|
||||||
|
|
||||||
LOG = log.getLogger(__name__)
|
LOG = log.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class RetryFilter(filters.BaseHostFilter):
|
class RetryFilter(base_host.BaseHostFilter):
|
||||||
"""Filter out already tried nodes for scheduling purposes."""
|
"""Filter out already tried nodes for scheduling purposes."""
|
||||||
|
|
||||||
def host_passes(self, host_state, filter_properties):
|
def host_passes(self, host_state, filter_properties):
|
@ -34,8 +34,8 @@ import six
|
|||||||
from manila import db
|
from manila import db
|
||||||
from manila import exception
|
from manila import exception
|
||||||
from manila.i18n import _LI, _LW
|
from manila.i18n import _LI, _LW
|
||||||
from manila.openstack.common.scheduler import filters
|
from manila.scheduler.filters import base_host as base_host_filter
|
||||||
from manila.openstack.common.scheduler import weights
|
from manila.scheduler.weighers import base_host as base_host_weigher
|
||||||
from manila.share import utils as share_utils
|
from manila.share import utils as share_utils
|
||||||
from manila import utils
|
from manila import utils
|
||||||
|
|
||||||
@ -379,11 +379,11 @@ class HostManager(object):
|
|||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.service_states = {} # { <host>: {<service>: {cap k : v}}}
|
self.service_states = {} # { <host>: {<service>: {cap k : v}}}
|
||||||
self.host_state_map = {}
|
self.host_state_map = {}
|
||||||
self.filter_handler = filters.HostFilterHandler('manila.scheduler.'
|
self.filter_handler = base_host_filter.HostFilterHandler(
|
||||||
'filters')
|
'manila.scheduler.filters')
|
||||||
self.filter_classes = self.filter_handler.get_all_classes()
|
self.filter_classes = self.filter_handler.get_all_classes()
|
||||||
self.weight_handler = weights.HostWeightHandler('manila.scheduler.'
|
self.weight_handler = base_host_weigher.HostWeightHandler(
|
||||||
'weights')
|
'manila.scheduler.weighers')
|
||||||
self.weight_classes = self.weight_handler.get_all_classes()
|
self.weight_classes = self.weight_handler.get_all_classes()
|
||||||
|
|
||||||
def _choose_host_filters(self, filter_cls_names):
|
def _choose_host_filters(self, filter_cls_names):
|
||||||
|
@ -37,13 +37,24 @@ from manila.share import rpcapi as share_rpcapi
|
|||||||
LOG = log.getLogger(__name__)
|
LOG = log.getLogger(__name__)
|
||||||
|
|
||||||
scheduler_driver_opt = cfg.StrOpt('scheduler_driver',
|
scheduler_driver_opt = cfg.StrOpt('scheduler_driver',
|
||||||
default='manila.scheduler.filter_scheduler.'
|
default='manila.scheduler.drivers.'
|
||||||
'FilterScheduler',
|
'filter.FilterScheduler',
|
||||||
help='Default scheduler driver to use.')
|
help='Default scheduler driver to use.')
|
||||||
|
|
||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
CONF.register_opt(scheduler_driver_opt)
|
CONF.register_opt(scheduler_driver_opt)
|
||||||
|
|
||||||
|
# Drivers that need to change module paths or class names can add their
|
||||||
|
# old/new path here to maintain backward compatibility.
|
||||||
|
MAPPING = {
|
||||||
|
'manila.scheduler.chance.ChanceScheduler':
|
||||||
|
'manila.scheduler.drivers.chance.ChanceScheduler',
|
||||||
|
'manila.scheduler.filter_scheduler.FilterScheduler':
|
||||||
|
'manila.scheduler.drivers.filter.FilterScheduler',
|
||||||
|
'manila.scheduler.simple.SimpleScheduler':
|
||||||
|
'manila.scheduler.drivers.simple.SimpleScheduler',
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
class SchedulerManager(manager.Manager):
|
class SchedulerManager(manager.Manager):
|
||||||
"""Chooses a host to create shares."""
|
"""Chooses a host to create shares."""
|
||||||
@ -52,8 +63,19 @@ class SchedulerManager(manager.Manager):
|
|||||||
|
|
||||||
def __init__(self, scheduler_driver=None, service_name=None,
|
def __init__(self, scheduler_driver=None, service_name=None,
|
||||||
*args, **kwargs):
|
*args, **kwargs):
|
||||||
|
|
||||||
if not scheduler_driver:
|
if not scheduler_driver:
|
||||||
scheduler_driver = CONF.scheduler_driver
|
scheduler_driver = CONF.scheduler_driver
|
||||||
|
if scheduler_driver in MAPPING:
|
||||||
|
msg_args = {
|
||||||
|
'old': scheduler_driver,
|
||||||
|
'new': MAPPING[scheduler_driver],
|
||||||
|
}
|
||||||
|
LOG.warning(_LW("Scheduler driver path %(old)s is deprecated, "
|
||||||
|
"update your configuration to the new path "
|
||||||
|
"%(new)s"), msg_args)
|
||||||
|
scheduler_driver = MAPPING[scheduler_driver]
|
||||||
|
|
||||||
self.driver = importutils.import_object(scheduler_driver)
|
self.driver = importutils.import_object(scheduler_driver)
|
||||||
super(SchedulerManager, self).__init__(*args, **kwargs)
|
super(SchedulerManager, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
@ -21,7 +21,7 @@ import abc
|
|||||||
|
|
||||||
import six
|
import six
|
||||||
|
|
||||||
from manila.openstack.common.scheduler import base_handler
|
from manila.scheduler import base_handler
|
||||||
|
|
||||||
|
|
||||||
def normalize(weight_list, minval=None, maxval=None):
|
def normalize(weight_list, minval=None, maxval=None):
|
||||||
@ -70,7 +70,7 @@ class BaseWeigher(object):
|
|||||||
The attributes maxval and minval can be specified to set up the maximum
|
The attributes maxval and minval can be specified to set up the maximum
|
||||||
and minimum values for the weighed objects. These values will then be
|
and minimum values for the weighed objects. These values will then be
|
||||||
taken into account in the normalization step, instead of taking the values
|
taken into account in the normalization step, instead of taking the values
|
||||||
from the calculated weights.
|
from the calculated weighers.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
minval = None
|
minval = None
|
||||||
@ -87,18 +87,16 @@ class BaseWeigher(object):
|
|||||||
|
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def _weigh_object(self, obj, weight_properties):
|
def _weigh_object(self, obj, weight_properties):
|
||||||
"""Override in a subclass to specify a weight for a specific
|
"""Override in a subclass to specify a weight for a specific object."""
|
||||||
object.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def weigh_objects(self, weighed_obj_list, weight_properties):
|
def weigh_objects(self, weighed_obj_list, weight_properties):
|
||||||
"""Weigh multiple objects.
|
"""Weigh multiple objects.
|
||||||
|
|
||||||
Override in a subclass if you need access to all objects in order
|
Override in a subclass if you need access to all objects in order
|
||||||
to calculate weights. Do not modify the weight of an object here,
|
to calculate weighers. Do not modify the weight of an object here,
|
||||||
just return a list of weights.
|
just return a list of weighers.
|
||||||
"""
|
"""
|
||||||
# Calculate the weights
|
# Calculate the weighers
|
||||||
weights = []
|
weights = []
|
||||||
for obj in weighed_obj_list:
|
for obj in weighed_obj_list:
|
||||||
weight = self._weigh_object(obj.obj, weight_properties)
|
weight = self._weigh_object(obj.obj, weight_properties)
|
||||||
@ -135,7 +133,7 @@ class BaseWeightHandler(base_handler.BaseHandler):
|
|||||||
weigher = weigher_cls()
|
weigher = weigher_cls()
|
||||||
weights = weigher.weigh_objects(weighed_objs, weighing_properties)
|
weights = weigher.weigh_objects(weighed_objs, weighing_properties)
|
||||||
|
|
||||||
# Normalize the weights
|
# Normalize the weighers
|
||||||
weights = normalize(weights,
|
weights = normalize(weights,
|
||||||
minval=weigher.minval,
|
minval=weigher.minval,
|
||||||
maxval=weigher.maxval)
|
maxval=weigher.maxval)
|
@ -14,14 +14,13 @@
|
|||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Scheduler host weights
|
Scheduler host weighers
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from manila.scheduler.weighers import base
|
||||||
from manila.openstack.common.scheduler import base_weight
|
|
||||||
|
|
||||||
|
|
||||||
class WeighedHost(base_weight.WeighedObject):
|
class WeighedHost(base.WeighedObject):
|
||||||
def to_dict(self):
|
def to_dict(self):
|
||||||
return {
|
return {
|
||||||
'weight': self.weight,
|
'weight': self.weight,
|
||||||
@ -33,12 +32,12 @@ class WeighedHost(base_weight.WeighedObject):
|
|||||||
(self.obj.host, self.weight))
|
(self.obj.host, self.weight))
|
||||||
|
|
||||||
|
|
||||||
class BaseHostWeigher(base_weight.BaseWeigher):
|
class BaseHostWeigher(base.BaseWeigher):
|
||||||
"""Base class for host weights."""
|
"""Base class for host weighers."""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class HostWeightHandler(base_weight.BaseWeightHandler):
|
class HostWeightHandler(base.BaseWeightHandler):
|
||||||
object_class = WeighedHost
|
object_class = WeighedHost
|
||||||
|
|
||||||
def __init__(self, namespace):
|
def __init__(self, namespace):
|
@ -32,7 +32,7 @@ import math
|
|||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
|
|
||||||
|
|
||||||
from manila.openstack.common.scheduler import weights
|
from manila.scheduler.weighers import base_host
|
||||||
|
|
||||||
capacity_weight_opts = [
|
capacity_weight_opts = [
|
||||||
cfg.FloatOpt('capacity_weight_multiplier',
|
cfg.FloatOpt('capacity_weight_multiplier',
|
||||||
@ -45,13 +45,13 @@ CONF = cfg.CONF
|
|||||||
CONF.register_opts(capacity_weight_opts)
|
CONF.register_opts(capacity_weight_opts)
|
||||||
|
|
||||||
|
|
||||||
class CapacityWeigher(weights.BaseHostWeigher):
|
class CapacityWeigher(base_host.BaseHostWeigher):
|
||||||
def weight_multiplier(self):
|
def weight_multiplier(self):
|
||||||
"""Override the weight multiplier."""
|
"""Override the weight multiplier."""
|
||||||
return CONF.capacity_weight_multiplier
|
return CONF.capacity_weight_multiplier
|
||||||
|
|
||||||
def _weigh_object(self, host_state, weight_properties):
|
def _weigh_object(self, host_state, weight_properties):
|
||||||
"""Higher weights win. We want spreading to be the default."""
|
"""Higher weighers win. We want spreading to be the default."""
|
||||||
reserved = float(host_state.reserved_percentage) / 100
|
reserved = float(host_state.reserved_percentage) / 100
|
||||||
free_space = host_state.free_capacity_gb
|
free_space = host_state.free_capacity_gb
|
||||||
total_space = host_state.total_capacity_gb
|
total_space = host_state.total_capacity_gb
|
||||||
@ -81,7 +81,7 @@ class CapacityWeigher(weights.BaseHostWeigher):
|
|||||||
weight_properties)
|
weight_properties)
|
||||||
# NOTE(u_glide): Replace -inf with (minimum - 1) and
|
# NOTE(u_glide): Replace -inf with (minimum - 1) and
|
||||||
# inf with (maximum + 1) to avoid errors in
|
# inf with (maximum + 1) to avoid errors in
|
||||||
# manila.openstack.common.scheduler.base_weight.normalize() method
|
# manila.scheduler.weighers.base.normalize() method
|
||||||
if self.minval == float('-inf'):
|
if self.minval == float('-inf'):
|
||||||
self.minval = self.maxval
|
self.minval = self.maxval
|
||||||
for val in weights:
|
for val in weights:
|
@ -17,7 +17,7 @@ from oslo_config import cfg
|
|||||||
|
|
||||||
from manila import context
|
from manila import context
|
||||||
from manila.db import api as db_api
|
from manila.db import api as db_api
|
||||||
from manila.openstack.common.scheduler import weights
|
from manila.scheduler.weighers import base_host
|
||||||
from manila.share import utils
|
from manila.share import utils
|
||||||
|
|
||||||
pool_weight_opts = [
|
pool_weight_opts = [
|
||||||
@ -32,7 +32,7 @@ CONF = cfg.CONF
|
|||||||
CONF.register_opts(pool_weight_opts)
|
CONF.register_opts(pool_weight_opts)
|
||||||
|
|
||||||
|
|
||||||
class PoolWeigher(weights.BaseHostWeigher):
|
class PoolWeigher(base_host.BaseHostWeigher):
|
||||||
def weight_multiplier(self):
|
def weight_multiplier(self):
|
||||||
"""Override the weight multiplier."""
|
"""Override the weight multiplier."""
|
||||||
return CONF.pool_weight_multiplier
|
return CONF.pool_weight_multiplier
|
105
manila/tests/scheduler/drivers/test_base.py
Normal file
105
manila/tests/scheduler/drivers/test_base.py
Normal file
@ -0,0 +1,105 @@
|
|||||||
|
# Copyright 2010 United States Government as represented by the
|
||||||
|
# Administrator of the National Aeronautics and Space Administration.
|
||||||
|
# All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
"""
|
||||||
|
Tests For Base Scheduler
|
||||||
|
"""
|
||||||
|
|
||||||
|
import mock
|
||||||
|
from oslo_config import cfg
|
||||||
|
from oslo_utils import timeutils
|
||||||
|
|
||||||
|
from manila import context
|
||||||
|
from manila import db
|
||||||
|
from manila.scheduler.drivers import base
|
||||||
|
from manila import test
|
||||||
|
from manila import utils
|
||||||
|
|
||||||
|
CONF = cfg.CONF
|
||||||
|
|
||||||
|
|
||||||
|
class SchedulerTestCase(test.TestCase):
|
||||||
|
"""Test case for base scheduler driver class."""
|
||||||
|
|
||||||
|
# So we can subclass this test and re-use tests if we need.
|
||||||
|
driver_cls = base.Scheduler
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
super(SchedulerTestCase, self).setUp()
|
||||||
|
self.driver = self.driver_cls()
|
||||||
|
self.context = context.RequestContext('fake_user', 'fake_project')
|
||||||
|
self.topic = 'fake_topic'
|
||||||
|
|
||||||
|
def test_update_service_capabilities(self):
|
||||||
|
service_name = 'fake_service'
|
||||||
|
host = 'fake_host'
|
||||||
|
capabilities = {'fake_capability': 'fake_value'}
|
||||||
|
with mock.patch.object(self.driver.host_manager,
|
||||||
|
'update_service_capabilities', mock.Mock()):
|
||||||
|
self.driver.update_service_capabilities(
|
||||||
|
service_name, host, capabilities)
|
||||||
|
self.driver.host_manager.update_service_capabilities.\
|
||||||
|
assert_called_once_with(service_name, host, capabilities)
|
||||||
|
|
||||||
|
def test_hosts_up(self):
|
||||||
|
service1 = {'host': 'host1'}
|
||||||
|
service2 = {'host': 'host2'}
|
||||||
|
services = [service1, service2]
|
||||||
|
|
||||||
|
def fake_service_is_up(*args, **kwargs):
|
||||||
|
if args[0]['host'] == 'host1':
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
with mock.patch.object(db, 'service_get_all_by_topic',
|
||||||
|
mock.Mock(return_value=services)):
|
||||||
|
with mock.patch.object(utils, 'service_is_up',
|
||||||
|
mock.Mock(side_effect=fake_service_is_up)):
|
||||||
|
result = self.driver.hosts_up(self.context, self.topic)
|
||||||
|
self.assertEqual(['host2'], result)
|
||||||
|
db.service_get_all_by_topic.assert_called_once_with(
|
||||||
|
self.context, self.topic)
|
||||||
|
|
||||||
|
|
||||||
|
class SchedulerDriverBaseTestCase(SchedulerTestCase):
|
||||||
|
"""Test cases for base scheduler driver class methods.
|
||||||
|
|
||||||
|
These can't fail if the driver is changed.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def test_unimplemented_schedule(self):
|
||||||
|
fake_args = (1, 2, 3)
|
||||||
|
fake_kwargs = {'cat': 'meow'}
|
||||||
|
|
||||||
|
self.assertRaises(NotImplementedError, self.driver.schedule,
|
||||||
|
self.context, self.topic, 'schedule_something',
|
||||||
|
*fake_args, **fake_kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class SchedulerDriverModuleTestCase(test.TestCase):
|
||||||
|
"""Test case for scheduler driver module methods."""
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
super(SchedulerDriverModuleTestCase, self).setUp()
|
||||||
|
self.context = context.RequestContext('fake_user', 'fake_project')
|
||||||
|
|
||||||
|
@mock.patch.object(db, 'share_update', mock.Mock())
|
||||||
|
def test_share_host_update_db(self):
|
||||||
|
with mock.patch.object(timeutils, 'utcnow',
|
||||||
|
mock.Mock(return_value='fake-now')):
|
||||||
|
base.share_update_db(self.context, 31337, 'fake_host')
|
||||||
|
db.share_update.assert_called_once_with(
|
||||||
|
self.context, 31337,
|
||||||
|
{'host': 'fake_host', 'scheduled_at': 'fake-now'})
|
@ -23,20 +23,20 @@ from oslo_utils import strutils
|
|||||||
from manila.common import constants
|
from manila.common import constants
|
||||||
from manila import context
|
from manila import context
|
||||||
from manila import exception
|
from manila import exception
|
||||||
from manila.scheduler import driver
|
from manila.scheduler.drivers import base
|
||||||
from manila.scheduler import filter_scheduler
|
from manila.scheduler.drivers import filter
|
||||||
from manila.scheduler import host_manager
|
from manila.scheduler import host_manager
|
||||||
|
from manila.tests.scheduler.drivers import test_base
|
||||||
from manila.tests.scheduler import fakes
|
from manila.tests.scheduler import fakes
|
||||||
from manila.tests.scheduler import test_scheduler
|
|
||||||
|
|
||||||
SNAPSHOT_SUPPORT = constants.ExtraSpecs.SNAPSHOT_SUPPORT
|
SNAPSHOT_SUPPORT = constants.ExtraSpecs.SNAPSHOT_SUPPORT
|
||||||
|
|
||||||
|
|
||||||
@ddt.ddt
|
@ddt.ddt
|
||||||
class FilterSchedulerTestCase(test_scheduler.SchedulerTestCase):
|
class FilterSchedulerTestCase(test_base.SchedulerTestCase):
|
||||||
"""Test case for Filter Scheduler."""
|
"""Test case for Filter Scheduler."""
|
||||||
|
|
||||||
driver_cls = filter_scheduler.FilterScheduler
|
driver_cls = filter.FilterScheduler
|
||||||
|
|
||||||
def test_create_share_no_hosts(self):
|
def test_create_share_no_hosts(self):
|
||||||
# Ensure empty hosts/child_zones result in NoValidHosts exception.
|
# Ensure empty hosts/child_zones result in NoValidHosts exception.
|
||||||
@ -315,7 +315,7 @@ class FilterSchedulerTestCase(test_scheduler.SchedulerTestCase):
|
|||||||
self.mock_object(sched, "_get_best_host_for_consistency_group",
|
self.mock_object(sched, "_get_best_host_for_consistency_group",
|
||||||
mock.Mock(return_value=fake_host))
|
mock.Mock(return_value=fake_host))
|
||||||
fake_updated_group = mock.Mock()
|
fake_updated_group = mock.Mock()
|
||||||
self.mock_object(driver, "cg_update_db", mock.Mock(
|
self.mock_object(base, "cg_update_db", mock.Mock(
|
||||||
return_value=fake_updated_group))
|
return_value=fake_updated_group))
|
||||||
self.mock_object(sched.share_rpcapi, "create_consistency_group")
|
self.mock_object(sched.share_rpcapi, "create_consistency_group")
|
||||||
|
|
||||||
@ -324,8 +324,8 @@ class FilterSchedulerTestCase(test_scheduler.SchedulerTestCase):
|
|||||||
|
|
||||||
sched._get_best_host_for_consistency_group.assert_called_once_with(
|
sched._get_best_host_for_consistency_group.assert_called_once_with(
|
||||||
fake_context, request_spec)
|
fake_context, request_spec)
|
||||||
driver.cg_update_db.assert_called_once_with(fake_context, 'fake_id',
|
base.cg_update_db.assert_called_once_with(
|
||||||
fake_host)
|
fake_context, 'fake_id', fake_host)
|
||||||
sched.share_rpcapi.create_consistency_group.assert_called_once_with(
|
sched.share_rpcapi.create_consistency_group.assert_called_once_with(
|
||||||
fake_context, fake_updated_group, fake_host)
|
fake_context, fake_updated_group, fake_host)
|
||||||
|
|
169
manila/tests/scheduler/drivers/test_simple.py
Normal file
169
manila/tests/scheduler/drivers/test_simple.py
Normal file
@ -0,0 +1,169 @@
|
|||||||
|
# Copyright 2010 United States Government as represented by the
|
||||||
|
# Administrator of the National Aeronautics and Space Administration.
|
||||||
|
# All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
"""
|
||||||
|
Tests For Simple Scheduler
|
||||||
|
"""
|
||||||
|
|
||||||
|
import mock
|
||||||
|
from oslo_config import cfg
|
||||||
|
|
||||||
|
from manila import context
|
||||||
|
from manila import db
|
||||||
|
from manila import exception
|
||||||
|
from manila.scheduler.drivers import base
|
||||||
|
from manila.scheduler.drivers import simple
|
||||||
|
from manila.share import rpcapi as share_rpcapi
|
||||||
|
from manila import test
|
||||||
|
from manila.tests import db_utils
|
||||||
|
from manila import utils
|
||||||
|
|
||||||
|
CONF = cfg.CONF
|
||||||
|
|
||||||
|
|
||||||
|
class SimpleSchedulerSharesTestCase(test.TestCase):
|
||||||
|
"""Test case for simple scheduler create share method."""
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
super(SimpleSchedulerSharesTestCase, self).setUp()
|
||||||
|
self.mock_object(share_rpcapi, 'ShareAPI')
|
||||||
|
self.driver = simple.SimpleScheduler()
|
||||||
|
|
||||||
|
self.context = context.RequestContext('fake_user', 'fake_project')
|
||||||
|
self.admin_context = context.RequestContext('fake_admin_user',
|
||||||
|
'fake_project')
|
||||||
|
self.admin_context.is_admin = True
|
||||||
|
|
||||||
|
@mock.patch.object(utils, 'service_is_up', mock.Mock(return_value=True))
|
||||||
|
def test_create_share_if_two_services_up(self):
|
||||||
|
share_id = 'fake'
|
||||||
|
fake_share = {'id': share_id, 'size': 1}
|
||||||
|
fake_service_1 = {'disabled': False, 'host': 'fake_host1'}
|
||||||
|
fake_service_2 = {'disabled': False, 'host': 'fake_host2'}
|
||||||
|
fake_result = [(fake_service_1, 2), (fake_service_2, 1)]
|
||||||
|
fake_request_spec = {
|
||||||
|
'share_id': share_id,
|
||||||
|
'share_properties': fake_share,
|
||||||
|
}
|
||||||
|
self.mock_object(db, 'service_get_all_share_sorted',
|
||||||
|
mock.Mock(return_value=fake_result))
|
||||||
|
self.mock_object(base, 'share_update_db',
|
||||||
|
mock.Mock(return_value=db_utils.create_share()))
|
||||||
|
|
||||||
|
self.driver.schedule_create_share(self.context,
|
||||||
|
fake_request_spec, {})
|
||||||
|
utils.service_is_up.assert_called_once_with(utils.IsAMatcher(dict))
|
||||||
|
db.service_get_all_share_sorted.assert_called_once_with(
|
||||||
|
utils.IsAMatcher(context.RequestContext))
|
||||||
|
base.share_update_db.assert_called_once_with(
|
||||||
|
utils.IsAMatcher(context.RequestContext), share_id, 'fake_host1')
|
||||||
|
|
||||||
|
def test_create_share_if_services_not_available(self):
|
||||||
|
share_id = 'fake'
|
||||||
|
fake_share = {'id': share_id, 'size': 1}
|
||||||
|
fake_result = []
|
||||||
|
fake_request_spec = {
|
||||||
|
'share_id': share_id,
|
||||||
|
'share_properties': fake_share,
|
||||||
|
}
|
||||||
|
with mock.patch.object(db, 'service_get_all_share_sorted',
|
||||||
|
mock.Mock(return_value=fake_result)):
|
||||||
|
self.assertRaises(exception.NoValidHost,
|
||||||
|
self.driver.schedule_create_share,
|
||||||
|
self.context, fake_request_spec, {})
|
||||||
|
db.service_get_all_share_sorted.assert_called_once_with(
|
||||||
|
utils.IsAMatcher(context.RequestContext))
|
||||||
|
|
||||||
|
def test_create_share_if_max_gigabytes_exceeded(self):
|
||||||
|
share_id = 'fake'
|
||||||
|
fake_share = {'id': share_id, 'size': 10001}
|
||||||
|
fake_service_1 = {'disabled': False, 'host': 'fake_host1'}
|
||||||
|
fake_service_2 = {'disabled': False, 'host': 'fake_host2'}
|
||||||
|
fake_result = [(fake_service_1, 5), (fake_service_2, 7)]
|
||||||
|
fake_request_spec = {
|
||||||
|
'share_id': share_id,
|
||||||
|
'share_properties': fake_share,
|
||||||
|
}
|
||||||
|
with mock.patch.object(db, 'service_get_all_share_sorted',
|
||||||
|
mock.Mock(return_value=fake_result)):
|
||||||
|
self.assertRaises(exception.NoValidHost,
|
||||||
|
self.driver.schedule_create_share,
|
||||||
|
self.context, fake_request_spec, {})
|
||||||
|
db.service_get_all_share_sorted.assert_called_once_with(
|
||||||
|
utils.IsAMatcher(context.RequestContext))
|
||||||
|
|
||||||
|
@mock.patch.object(utils, 'service_is_up', mock.Mock(return_value=True))
|
||||||
|
def test_create_share_availability_zone(self):
|
||||||
|
share_id = 'fake'
|
||||||
|
fake_share = {
|
||||||
|
'id': share_id,
|
||||||
|
'size': 1,
|
||||||
|
}
|
||||||
|
fake_instance = {
|
||||||
|
'availability_zone_id': 'fake',
|
||||||
|
}
|
||||||
|
fake_service_1 = {
|
||||||
|
'disabled': False, 'host': 'fake_host1',
|
||||||
|
'availability_zone_id': 'fake',
|
||||||
|
}
|
||||||
|
fake_service_2 = {
|
||||||
|
'disabled': False, 'host': 'fake_host2',
|
||||||
|
'availability_zone_id': 'super_fake',
|
||||||
|
}
|
||||||
|
fake_result = [(fake_service_1, 0), (fake_service_2, 1)]
|
||||||
|
fake_request_spec = {
|
||||||
|
'share_id': share_id,
|
||||||
|
'share_properties': fake_share,
|
||||||
|
'share_instance_properties': fake_instance,
|
||||||
|
}
|
||||||
|
self.mock_object(db, 'service_get_all_share_sorted',
|
||||||
|
mock.Mock(return_value=fake_result))
|
||||||
|
self.mock_object(base, 'share_update_db',
|
||||||
|
mock.Mock(return_value=db_utils.create_share()))
|
||||||
|
|
||||||
|
self.driver.schedule_create_share(self.context,
|
||||||
|
fake_request_spec, {})
|
||||||
|
utils.service_is_up.assert_called_once_with(fake_service_1)
|
||||||
|
base.share_update_db.assert_called_once_with(
|
||||||
|
utils.IsAMatcher(context.RequestContext), share_id,
|
||||||
|
fake_service_1['host'])
|
||||||
|
db.service_get_all_share_sorted.assert_called_once_with(
|
||||||
|
utils.IsAMatcher(context.RequestContext))
|
||||||
|
|
||||||
|
@mock.patch.object(utils, 'service_is_up', mock.Mock(return_value=True))
|
||||||
|
def test_create_share_availability_zone_on_host(self):
|
||||||
|
share_id = 'fake'
|
||||||
|
fake_share = {
|
||||||
|
'id': share_id,
|
||||||
|
'availability_zone': 'fake:fake',
|
||||||
|
'size': 1,
|
||||||
|
}
|
||||||
|
fake_service = {'disabled': False, 'host': 'fake'}
|
||||||
|
fake_request_spec = {
|
||||||
|
'share_id': share_id,
|
||||||
|
'share_properties': fake_share,
|
||||||
|
}
|
||||||
|
self.mock_object(db, 'service_get_all_share_sorted',
|
||||||
|
mock.Mock(return_value=[(fake_service, 1)]))
|
||||||
|
self.mock_object(base, 'share_update_db',
|
||||||
|
mock.Mock(return_value=db_utils.create_share()))
|
||||||
|
|
||||||
|
self.driver.schedule_create_share(self.admin_context,
|
||||||
|
fake_request_spec, {})
|
||||||
|
utils.service_is_up.assert_called_once_with(fake_service)
|
||||||
|
db.service_get_all_share_sorted.assert_called_once_with(
|
||||||
|
utils.IsAMatcher(context.RequestContext))
|
||||||
|
base.share_update_db.assert_called_once_with(
|
||||||
|
utils.IsAMatcher(context.RequestContext), share_id, 'fake')
|
@ -19,8 +19,9 @@ Fakes For Scheduler tests.
|
|||||||
from oslo_utils import timeutils
|
from oslo_utils import timeutils
|
||||||
import six
|
import six
|
||||||
|
|
||||||
from manila.scheduler import filter_scheduler
|
from manila.scheduler.drivers import filter
|
||||||
from manila.scheduler import host_manager
|
from manila.scheduler import host_manager
|
||||||
|
from manila.scheduler.weighers import base_host as base_host_weigher
|
||||||
|
|
||||||
SHARE_SERVICES_NO_POOLS = [
|
SHARE_SERVICES_NO_POOLS = [
|
||||||
dict(id=1, host='host1', topic='share', disabled=False,
|
dict(id=1, host='host1', topic='share', disabled=False,
|
||||||
@ -167,7 +168,7 @@ SHARE_SERVICE_STATES_WITH_POOLS = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class FakeFilterScheduler(filter_scheduler.FilterScheduler):
|
class FakeFilterScheduler(filter.FilterScheduler):
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super(FakeFilterScheduler, self).__init__(*args, **kwargs)
|
super(FakeFilterScheduler, self).__init__(*args, **kwargs)
|
||||||
self.host_manager = host_manager.HostManager()
|
self.host_manager = host_manager.HostManager()
|
||||||
@ -260,3 +261,18 @@ def mock_host_manager_db_calls(mock_obj, disabled=None):
|
|||||||
else:
|
else:
|
||||||
mock_obj.return_value = [service for service in services
|
mock_obj.return_value = [service for service in services
|
||||||
if service['disabled'] == disabled]
|
if service['disabled'] == disabled]
|
||||||
|
|
||||||
|
|
||||||
|
class FakeWeigher1(base_host_weigher.BaseHostWeigher):
|
||||||
|
def __init__(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class FakeWeigher2(base_host_weigher.BaseHostWeigher):
|
||||||
|
def __init__(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class FakeClass(object):
|
||||||
|
def __init__(self):
|
||||||
|
pass
|
||||||
|
0
manila/tests/scheduler/filters/__init__.py
Normal file
0
manila/tests/scheduler/filters/__init__.py
Normal file
66
manila/tests/scheduler/filters/test_availability_zone.py
Normal file
66
manila/tests/scheduler/filters/test_availability_zone.py
Normal file
@ -0,0 +1,66 @@
|
|||||||
|
# Copyright 2011 OpenStack Foundation.
|
||||||
|
# All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
"""
|
||||||
|
Tests For AvailabilityZoneFilter.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from oslo_context import context
|
||||||
|
|
||||||
|
from manila.scheduler.filters import availability_zone
|
||||||
|
from manila import test
|
||||||
|
from manila.tests.scheduler import fakes
|
||||||
|
|
||||||
|
|
||||||
|
class HostFiltersTestCase(test.TestCase):
|
||||||
|
"""Test case for AvailabilityZoneFilter."""
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
super(HostFiltersTestCase, self).setUp()
|
||||||
|
self.context = context.RequestContext('fake', 'fake')
|
||||||
|
self.filter = availability_zone.AvailabilityZoneFilter()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _make_zone_request(zone, is_admin=False):
|
||||||
|
ctxt = context.RequestContext('fake', 'fake', is_admin=is_admin)
|
||||||
|
return {
|
||||||
|
'context': ctxt,
|
||||||
|
'request_spec': {
|
||||||
|
'resource_properties': {
|
||||||
|
'availability_zone_id': zone
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def test_availability_zone_filter_same(self):
|
||||||
|
service = {'availability_zone_id': 'nova'}
|
||||||
|
request = self._make_zone_request('nova')
|
||||||
|
host = fakes.FakeHostState('host1',
|
||||||
|
{'service': service})
|
||||||
|
self.assertTrue(self.filter.host_passes(host, request))
|
||||||
|
|
||||||
|
def test_availability_zone_filter_different(self):
|
||||||
|
service = {'availability_zone_id': 'nova'}
|
||||||
|
request = self._make_zone_request('bad')
|
||||||
|
host = fakes.FakeHostState('host1',
|
||||||
|
{'service': service})
|
||||||
|
self.assertFalse(self.filter.host_passes(host, request))
|
||||||
|
|
||||||
|
def test_availability_zone_filter_empty(self):
|
||||||
|
service = {'availability_zone_id': 'nova'}
|
||||||
|
request = {}
|
||||||
|
host = fakes.FakeHostState('host1',
|
||||||
|
{'service': service})
|
||||||
|
self.assertTrue(self.filter.host_passes(host, request))
|
159
manila/tests/scheduler/filters/test_base.py
Normal file
159
manila/tests/scheduler/filters/test_base.py
Normal file
@ -0,0 +1,159 @@
|
|||||||
|
# Copyright (c) 2013 OpenStack Foundation.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
# implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import mock
|
||||||
|
|
||||||
|
from manila.scheduler.filters import base
|
||||||
|
from manila import test
|
||||||
|
|
||||||
|
|
||||||
|
class TestBaseFilter(test.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
super(TestBaseFilter, self).setUp()
|
||||||
|
self.filter = base.BaseFilter()
|
||||||
|
|
||||||
|
def test_filter_one_is_called(self):
|
||||||
|
|
||||||
|
filters = [1, 2, 3, 4]
|
||||||
|
filter_properties = {'x': 'y'}
|
||||||
|
|
||||||
|
side_effect = lambda value, props: value in [2, 3]
|
||||||
|
self.mock_object(self.filter,
|
||||||
|
'_filter_one',
|
||||||
|
mock.Mock(side_effect=side_effect))
|
||||||
|
|
||||||
|
result = list(self.filter.filter_all(filters, filter_properties))
|
||||||
|
|
||||||
|
self.assertEqual([2, 3], result)
|
||||||
|
|
||||||
|
|
||||||
|
class FakeExtension(object):
|
||||||
|
|
||||||
|
def __init__(self, plugin):
|
||||||
|
self.plugin = plugin
|
||||||
|
|
||||||
|
|
||||||
|
class BaseFakeFilter(base.BaseFilter):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class FakeFilter1(BaseFakeFilter):
|
||||||
|
"""Derives from BaseFakeFilter and has a fake entry point defined.
|
||||||
|
|
||||||
|
Entry point is returned by fake ExtensionManager.
|
||||||
|
Should be included in the output of all_classes.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class FakeFilter2(BaseFakeFilter):
|
||||||
|
"""Derives from BaseFakeFilter but has no entry point.
|
||||||
|
|
||||||
|
Should be not included in all_classes.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class FakeFilter3(base.BaseFilter):
|
||||||
|
"""Does not derive from BaseFakeFilter.
|
||||||
|
|
||||||
|
Should not be included.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class FakeFilter4(BaseFakeFilter):
|
||||||
|
"""Derives from BaseFakeFilter and has an entry point.
|
||||||
|
|
||||||
|
Should be included.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class FakeFilter5(BaseFakeFilter):
|
||||||
|
"""Derives from BaseFakeFilter but has no entry point.
|
||||||
|
|
||||||
|
Should not be included.
|
||||||
|
"""
|
||||||
|
run_filter_once_per_request = True
|
||||||
|
|
||||||
|
|
||||||
|
class FakeExtensionManager(list):
|
||||||
|
|
||||||
|
def __init__(self, namespace):
|
||||||
|
classes = [FakeFilter1, FakeFilter3, FakeFilter4]
|
||||||
|
exts = map(FakeExtension, classes)
|
||||||
|
super(FakeExtensionManager, self).__init__(exts)
|
||||||
|
self.namespace = namespace
|
||||||
|
|
||||||
|
|
||||||
|
class TestBaseFilterHandler(test.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
super(TestBaseFilterHandler, self).setUp()
|
||||||
|
self.mock_object(base.base_handler.extension,
|
||||||
|
'ExtensionManager',
|
||||||
|
FakeExtensionManager)
|
||||||
|
self.handler = base.BaseFilterHandler(BaseFakeFilter, 'fake_filters')
|
||||||
|
|
||||||
|
def test_get_all_classes(self):
|
||||||
|
# In order for a FakeFilter to be returned by get_all_classes, it has
|
||||||
|
# to comply with these rules:
|
||||||
|
# * It must be derived from BaseFakeFilter
|
||||||
|
# AND
|
||||||
|
# * It must have a python entrypoint assigned (returned by
|
||||||
|
# FakeExtensionManager)
|
||||||
|
expected = [FakeFilter1, FakeFilter4]
|
||||||
|
result = self.handler.get_all_classes()
|
||||||
|
self.assertEqual(expected, result)
|
||||||
|
|
||||||
|
def _get_filtered_objects(self, filter_classes, index=0):
|
||||||
|
filter_objs_initial = [1, 2, 3, 4]
|
||||||
|
filter_properties = {'x': 'y'}
|
||||||
|
return self.handler.get_filtered_objects(filter_classes,
|
||||||
|
filter_objs_initial,
|
||||||
|
filter_properties,
|
||||||
|
index)
|
||||||
|
|
||||||
|
@mock.patch.object(FakeFilter4, 'filter_all')
|
||||||
|
@mock.patch.object(FakeFilter3, 'filter_all', return_value=None)
|
||||||
|
def test_get_filtered_objects_return_none(self, fake3_filter_all,
|
||||||
|
fake4_filter_all):
|
||||||
|
filter_classes = [FakeFilter1, FakeFilter2, FakeFilter3, FakeFilter4]
|
||||||
|
result = self._get_filtered_objects(filter_classes)
|
||||||
|
self.assertIsNone(result)
|
||||||
|
self.assertFalse(fake4_filter_all.called)
|
||||||
|
|
||||||
|
def test_get_filtered_objects(self):
|
||||||
|
filter_objs_expected = [1, 2, 3, 4]
|
||||||
|
filter_classes = [FakeFilter1, FakeFilter2, FakeFilter3, FakeFilter4]
|
||||||
|
result = self._get_filtered_objects(filter_classes)
|
||||||
|
self.assertEqual(filter_objs_expected, result)
|
||||||
|
|
||||||
|
def test_get_filtered_objects_with_filter_run_once(self):
|
||||||
|
filter_objs_expected = [1, 2, 3, 4]
|
||||||
|
filter_classes = [FakeFilter5]
|
||||||
|
|
||||||
|
with mock.patch.object(FakeFilter5, 'filter_all',
|
||||||
|
return_value=filter_objs_expected
|
||||||
|
) as fake5_filter_all:
|
||||||
|
result = self._get_filtered_objects(filter_classes)
|
||||||
|
self.assertEqual(filter_objs_expected, result)
|
||||||
|
self.assertEqual(1, fake5_filter_all.call_count)
|
||||||
|
|
||||||
|
result = self._get_filtered_objects(filter_classes, index=1)
|
||||||
|
self.assertEqual(filter_objs_expected, result)
|
||||||
|
self.assertEqual(1, fake5_filter_all.call_count)
|
||||||
|
|
||||||
|
result = self._get_filtered_objects(filter_classes, index=2)
|
||||||
|
self.assertEqual(filter_objs_expected, result)
|
||||||
|
self.assertEqual(1, fake5_filter_all.call_count)
|
56
manila/tests/scheduler/filters/test_base_host.py
Normal file
56
manila/tests/scheduler/filters/test_base_host.py
Normal file
@ -0,0 +1,56 @@
|
|||||||
|
# Copyright 2011 OpenStack Foundation.
|
||||||
|
# All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
"""
|
||||||
|
Tests For Scheduler Host Filters.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from oslo_context import context
|
||||||
|
from oslo_serialization import jsonutils
|
||||||
|
|
||||||
|
from manila.scheduler.filters import base_host
|
||||||
|
from manila import test
|
||||||
|
|
||||||
|
|
||||||
|
class TestFilter(test.TestCase):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class TestBogusFilter(object):
|
||||||
|
"""Class that doesn't inherit from BaseHostFilter."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class HostFiltersTestCase(test.TestCase):
|
||||||
|
"""Test case for host filters."""
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
super(HostFiltersTestCase, self).setUp()
|
||||||
|
self.context = context.RequestContext('fake', 'fake')
|
||||||
|
self.json_query = jsonutils.dumps(
|
||||||
|
['and', ['>=', '$free_ram_mb', 1024],
|
||||||
|
['>=', '$free_disk_mb', 200 * 1024]])
|
||||||
|
namespace = 'manila.scheduler.filters'
|
||||||
|
filter_handler = base_host.HostFilterHandler(namespace)
|
||||||
|
classes = filter_handler.get_all_classes()
|
||||||
|
self.class_map = {}
|
||||||
|
for cls in classes:
|
||||||
|
self.class_map[cls.__name__] = cls
|
||||||
|
|
||||||
|
def test_all_filters(self):
|
||||||
|
# Double check at least a couple of known filters exist
|
||||||
|
self.assertTrue('JsonFilter' in self.class_map)
|
||||||
|
self.assertTrue('CapabilitiesFilter' in self.class_map)
|
||||||
|
self.assertTrue('AvailabilityZoneFilter' in self.class_map)
|
101
manila/tests/scheduler/filters/test_capabilities.py
Normal file
101
manila/tests/scheduler/filters/test_capabilities.py
Normal file
@ -0,0 +1,101 @@
|
|||||||
|
# Copyright 2011 OpenStack Foundation.
|
||||||
|
# All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
"""
|
||||||
|
Tests For CapabilitiesFilter.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from oslo_context import context
|
||||||
|
|
||||||
|
from manila.scheduler.filters import capabilities
|
||||||
|
from manila import test
|
||||||
|
from manila.tests.scheduler import fakes
|
||||||
|
|
||||||
|
|
||||||
|
class HostFiltersTestCase(test.TestCase):
|
||||||
|
"""Test case for CapabilitiesFilter."""
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
super(HostFiltersTestCase, self).setUp()
|
||||||
|
self.context = context.RequestContext('fake', 'fake')
|
||||||
|
self.filter = capabilities.CapabilitiesFilter()
|
||||||
|
|
||||||
|
def _do_test_type_filter_extra_specs(self, ecaps, especs, passes):
|
||||||
|
capabilities = {'enabled': True}
|
||||||
|
capabilities.update(ecaps)
|
||||||
|
service = {'disabled': False}
|
||||||
|
filter_properties = {'resource_type': {'name': 'fake_type',
|
||||||
|
'extra_specs': especs}}
|
||||||
|
host = fakes.FakeHostState('host1',
|
||||||
|
{'free_capacity_gb': 1024,
|
||||||
|
'capabilities': capabilities,
|
||||||
|
'service': service})
|
||||||
|
assertion = self.assertTrue if passes else self.assertFalse
|
||||||
|
assertion(self.filter.host_passes(host, filter_properties))
|
||||||
|
|
||||||
|
def test_capability_filter_passes_extra_specs_simple(self):
|
||||||
|
self._do_test_type_filter_extra_specs(
|
||||||
|
ecaps={'opt1': '1', 'opt2': '2'},
|
||||||
|
especs={'opt1': '1', 'opt2': '2'},
|
||||||
|
passes=True)
|
||||||
|
|
||||||
|
def test_capability_filter_fails_extra_specs_simple(self):
|
||||||
|
self._do_test_type_filter_extra_specs(
|
||||||
|
ecaps={'opt1': '1', 'opt2': '2'},
|
||||||
|
especs={'opt1': '1', 'opt2': '222'},
|
||||||
|
passes=False)
|
||||||
|
|
||||||
|
def test_capability_filter_passes_extra_specs_complex(self):
|
||||||
|
self._do_test_type_filter_extra_specs(
|
||||||
|
ecaps={'opt1': 10, 'opt2': 5},
|
||||||
|
especs={'opt1': '>= 2', 'opt2': '<= 8'},
|
||||||
|
passes=True)
|
||||||
|
|
||||||
|
def test_capability_filter_fails_extra_specs_complex(self):
|
||||||
|
self._do_test_type_filter_extra_specs(
|
||||||
|
ecaps={'opt1': 10, 'opt2': 5},
|
||||||
|
especs={'opt1': '>= 2', 'opt2': '>= 8'},
|
||||||
|
passes=False)
|
||||||
|
|
||||||
|
def test_capability_filter_passes_scope_extra_specs(self):
|
||||||
|
self._do_test_type_filter_extra_specs(
|
||||||
|
ecaps={'scope_lv1': {'opt1': 10}},
|
||||||
|
especs={'capabilities:scope_lv1:opt1': '>= 2'},
|
||||||
|
passes=True)
|
||||||
|
|
||||||
|
def test_capability_filter_passes_fakescope_extra_specs(self):
|
||||||
|
self._do_test_type_filter_extra_specs(
|
||||||
|
ecaps={'scope_lv1': {'opt1': 10}, 'opt2': 5},
|
||||||
|
especs={'scope_lv1:opt1': '= 2', 'opt2': '>= 3'},
|
||||||
|
passes=True)
|
||||||
|
|
||||||
|
def test_capability_filter_fails_scope_extra_specs(self):
|
||||||
|
self._do_test_type_filter_extra_specs(
|
||||||
|
ecaps={'scope_lv1': {'opt1': 10}},
|
||||||
|
especs={'capabilities:scope_lv1:opt1': '<= 2'},
|
||||||
|
passes=False)
|
||||||
|
|
||||||
|
def test_capability_filter_passes_multi_level_scope_extra_specs(self):
|
||||||
|
self._do_test_type_filter_extra_specs(
|
||||||
|
ecaps={'scope_lv0': {'scope_lv1':
|
||||||
|
{'scope_lv2': {'opt1': 10}}}},
|
||||||
|
especs={'capabilities:scope_lv0:scope_lv1:scope_lv2:opt1': '>= 2'},
|
||||||
|
passes=True)
|
||||||
|
|
||||||
|
def test_capability_filter_fails_wrong_scope_extra_specs(self):
|
||||||
|
self._do_test_type_filter_extra_specs(
|
||||||
|
ecaps={'scope_lv0': {'opt1': 10}},
|
||||||
|
especs={'capabilities:scope_lv1:opt1': '>= 2'},
|
||||||
|
passes=False)
|
@ -12,14 +12,13 @@
|
|||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
"""
|
"""
|
||||||
Tests For Scheduler Host Filters.
|
Tests For CapacityFilter.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import ddt
|
import ddt
|
||||||
from oslo_serialization import jsonutils
|
|
||||||
|
|
||||||
from manila import context
|
from manila import context
|
||||||
from manila.openstack.common.scheduler import filters
|
from manila.scheduler.filters import capacity
|
||||||
from manila import test
|
from manila import test
|
||||||
from manila.tests.scheduler import fakes
|
from manila.tests.scheduler import fakes
|
||||||
from manila import utils
|
from manila import utils
|
||||||
@ -27,21 +26,12 @@ from manila import utils
|
|||||||
|
|
||||||
@ddt.ddt
|
@ddt.ddt
|
||||||
class HostFiltersTestCase(test.TestCase):
|
class HostFiltersTestCase(test.TestCase):
|
||||||
"""Test case for host filters."""
|
"""Test case CapacityFilter."""
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
super(HostFiltersTestCase, self).setUp()
|
super(HostFiltersTestCase, self).setUp()
|
||||||
self.context = context.RequestContext('fake', 'fake')
|
self.context = context.RequestContext('fake', 'fake')
|
||||||
self.json_query = jsonutils.dumps(
|
self.filter = capacity.CapacityFilter()
|
||||||
['and', ['>=', '$free_capacity_gb', 1024],
|
|
||||||
['>=', '$total_capacity_gb', 10 * 1024]])
|
|
||||||
# This has a side effect of testing 'get_filter_classes'
|
|
||||||
# when specifying a method (in this case, our standard filters)
|
|
||||||
filter_handler = filters.HostFilterHandler('manila.scheduler.filters')
|
|
||||||
classes = filter_handler.get_all_classes()
|
|
||||||
self.class_map = {}
|
|
||||||
for cls in classes:
|
|
||||||
self.class_map[cls.__name__] = cls
|
|
||||||
|
|
||||||
def _stub_service_is_up(self, ret_value):
|
def _stub_service_is_up(self, ret_value):
|
||||||
def fake_service_is_up(service):
|
def fake_service_is_up(service):
|
||||||
@ -54,7 +44,6 @@ class HostFiltersTestCase(test.TestCase):
|
|||||||
@ddt.unpack
|
@ddt.unpack
|
||||||
def test_capacity_filter_passes(self, size, share_on, host):
|
def test_capacity_filter_passes(self, size, share_on, host):
|
||||||
self._stub_service_is_up(True)
|
self._stub_service_is_up(True)
|
||||||
filt_cls = self.class_map['CapacityFilter']()
|
|
||||||
filter_properties = {'size': size,
|
filter_properties = {'size': size,
|
||||||
'share_exists_on': share_on}
|
'share_exists_on': share_on}
|
||||||
service = {'disabled': False}
|
service = {'disabled': False}
|
||||||
@ -63,7 +52,7 @@ class HostFiltersTestCase(test.TestCase):
|
|||||||
'free_capacity_gb': 200,
|
'free_capacity_gb': 200,
|
||||||
'updated_at': None,
|
'updated_at': None,
|
||||||
'service': service})
|
'service': service})
|
||||||
self.assertTrue(filt_cls.host_passes(host, filter_properties))
|
self.assertTrue(self.filter.host_passes(host, filter_properties))
|
||||||
|
|
||||||
@ddt.data(
|
@ddt.data(
|
||||||
{'free_capacity': 120, 'total_capacity': 200,
|
{'free_capacity': 120, 'total_capacity': 200,
|
||||||
@ -74,7 +63,6 @@ class HostFiltersTestCase(test.TestCase):
|
|||||||
def test_capacity_filter_fails(self, free_capacity, total_capacity,
|
def test_capacity_filter_fails(self, free_capacity, total_capacity,
|
||||||
reserved):
|
reserved):
|
||||||
self._stub_service_is_up(True)
|
self._stub_service_is_up(True)
|
||||||
filt_cls = self.class_map['CapacityFilter']()
|
|
||||||
filter_properties = {'size': 100}
|
filter_properties = {'size': 100}
|
||||||
service = {'disabled': False}
|
service = {'disabled': False}
|
||||||
host = fakes.FakeHostState('host1',
|
host = fakes.FakeHostState('host1',
|
||||||
@ -83,19 +71,18 @@ class HostFiltersTestCase(test.TestCase):
|
|||||||
'reserved_percentage': reserved,
|
'reserved_percentage': reserved,
|
||||||
'updated_at': None,
|
'updated_at': None,
|
||||||
'service': service})
|
'service': service})
|
||||||
self.assertFalse(filt_cls.host_passes(host, filter_properties))
|
self.assertFalse(self.filter.host_passes(host, filter_properties))
|
||||||
|
|
||||||
def test_capacity_filter_passes_unknown(self):
|
def test_capacity_filter_passes_unknown(self):
|
||||||
free = 'unknown'
|
free = 'unknown'
|
||||||
self._stub_service_is_up(True)
|
self._stub_service_is_up(True)
|
||||||
filt_cls = self.class_map['CapacityFilter']()
|
|
||||||
filter_properties = {'size': 100}
|
filter_properties = {'size': 100}
|
||||||
service = {'disabled': False}
|
service = {'disabled': False}
|
||||||
host = fakes.FakeHostState('host1',
|
host = fakes.FakeHostState('host1',
|
||||||
{'free_capacity_gb': free,
|
{'free_capacity_gb': free,
|
||||||
'updated_at': None,
|
'updated_at': None,
|
||||||
'service': service})
|
'service': service})
|
||||||
self.assertTrue(filt_cls.host_passes(host, filter_properties))
|
self.assertTrue(self.filter.host_passes(host, filter_properties))
|
||||||
|
|
||||||
@ddt.data(
|
@ddt.data(
|
||||||
{'free_capacity': 'unknown', 'total_capacity': 'unknown'},
|
{'free_capacity': 'unknown', 'total_capacity': 'unknown'},
|
||||||
@ -104,7 +91,6 @@ class HostFiltersTestCase(test.TestCase):
|
|||||||
def test_capacity_filter_passes_total(self, free_capacity,
|
def test_capacity_filter_passes_total(self, free_capacity,
|
||||||
total_capacity):
|
total_capacity):
|
||||||
self._stub_service_is_up(True)
|
self._stub_service_is_up(True)
|
||||||
filt_cls = self.class_map['CapacityFilter']()
|
|
||||||
filter_properties = {'size': 100}
|
filter_properties = {'size': 100}
|
||||||
service = {'disabled': False}
|
service = {'disabled': False}
|
||||||
host = fakes.FakeHostState('host1',
|
host = fakes.FakeHostState('host1',
|
||||||
@ -113,7 +99,7 @@ class HostFiltersTestCase(test.TestCase):
|
|||||||
'reserved_percentage': 0,
|
'reserved_percentage': 0,
|
||||||
'updated_at': None,
|
'updated_at': None,
|
||||||
'service': service})
|
'service': service})
|
||||||
self.assertTrue(filt_cls.host_passes(host, filter_properties))
|
self.assertTrue(self.filter.host_passes(host, filter_properties))
|
||||||
|
|
||||||
@ddt.data(
|
@ddt.data(
|
||||||
{'free': 200, 'total': 'unknown', 'reserved': 5},
|
{'free': 200, 'total': 'unknown', 'reserved': 5},
|
||||||
@ -122,7 +108,6 @@ class HostFiltersTestCase(test.TestCase):
|
|||||||
@ddt.unpack
|
@ddt.unpack
|
||||||
def test_capacity_filter_fails_total(self, free, total, reserved):
|
def test_capacity_filter_fails_total(self, free, total, reserved):
|
||||||
self._stub_service_is_up(True)
|
self._stub_service_is_up(True)
|
||||||
filt_cls = self.class_map['CapacityFilter']()
|
|
||||||
filter_properties = {'size': 100}
|
filter_properties = {'size': 100}
|
||||||
service = {'disabled': False}
|
service = {'disabled': False}
|
||||||
host = fakes.FakeHostState('host1',
|
host = fakes.FakeHostState('host1',
|
||||||
@ -131,7 +116,7 @@ class HostFiltersTestCase(test.TestCase):
|
|||||||
'reserved_percentage': reserved,
|
'reserved_percentage': reserved,
|
||||||
'updated_at': None,
|
'updated_at': None,
|
||||||
'service': service})
|
'service': service})
|
||||||
self.assertFalse(filt_cls.host_passes(host, filter_properties))
|
self.assertFalse(self.filter.host_passes(host, filter_properties))
|
||||||
|
|
||||||
@ddt.data(
|
@ddt.data(
|
||||||
{'size': 100, 'cap_thin': '<is> True',
|
{'size': 100, 'cap_thin': '<is> True',
|
||||||
@ -159,7 +144,6 @@ class HostFiltersTestCase(test.TestCase):
|
|||||||
def test_filter_thin_passes(self, size, cap_thin, total, free, provisioned,
|
def test_filter_thin_passes(self, size, cap_thin, total, free, provisioned,
|
||||||
max_ratio, reserved, thin_prov):
|
max_ratio, reserved, thin_prov):
|
||||||
self._stub_service_is_up(True)
|
self._stub_service_is_up(True)
|
||||||
filt_cls = self.class_map['CapacityFilter']()
|
|
||||||
filter_properties = {'size': size,
|
filter_properties = {'size': size,
|
||||||
'capabilities:thin_provisioning': cap_thin}
|
'capabilities:thin_provisioning': cap_thin}
|
||||||
service = {'disabled': False}
|
service = {'disabled': False}
|
||||||
@ -172,7 +156,7 @@ class HostFiltersTestCase(test.TestCase):
|
|||||||
'thin_provisioning': thin_prov,
|
'thin_provisioning': thin_prov,
|
||||||
'updated_at': None,
|
'updated_at': None,
|
||||||
'service': service})
|
'service': service})
|
||||||
self.assertTrue(filt_cls.host_passes(host, filter_properties))
|
self.assertTrue(self.filter.host_passes(host, filter_properties))
|
||||||
|
|
||||||
@ddt.data(
|
@ddt.data(
|
||||||
{'size': 200, 'cap_thin': '<is> True',
|
{'size': 200, 'cap_thin': '<is> True',
|
||||||
@ -203,7 +187,6 @@ class HostFiltersTestCase(test.TestCase):
|
|||||||
def test_filter_thin_fails(self, size, cap_thin, total, free, provisioned,
|
def test_filter_thin_fails(self, size, cap_thin, total, free, provisioned,
|
||||||
max_ratio, reserved, thin_prov):
|
max_ratio, reserved, thin_prov):
|
||||||
self._stub_service_is_up(True)
|
self._stub_service_is_up(True)
|
||||||
filt_cls = self.class_map['CapacityFilter']()
|
|
||||||
filter_properties = {'size': size,
|
filter_properties = {'size': size,
|
||||||
'capabilities:thin_provisioning': cap_thin}
|
'capabilities:thin_provisioning': cap_thin}
|
||||||
service = {'disabled': False}
|
service = {'disabled': False}
|
||||||
@ -216,27 +199,4 @@ class HostFiltersTestCase(test.TestCase):
|
|||||||
'thin_provisioning': thin_prov,
|
'thin_provisioning': thin_prov,
|
||||||
'updated_at': None,
|
'updated_at': None,
|
||||||
'service': service})
|
'service': service})
|
||||||
self.assertFalse(filt_cls.host_passes(host, filter_properties))
|
self.assertFalse(self.filter.host_passes(host, filter_properties))
|
||||||
|
|
||||||
def test_retry_filter_disabled(self):
|
|
||||||
# Test case where retry/re-scheduling is disabled.
|
|
||||||
filt_cls = self.class_map['RetryFilter']()
|
|
||||||
host = fakes.FakeHostState('host1', {})
|
|
||||||
filter_properties = {}
|
|
||||||
self.assertTrue(filt_cls.host_passes(host, filter_properties))
|
|
||||||
|
|
||||||
def test_retry_filter_pass(self):
|
|
||||||
# Node not previously tried.
|
|
||||||
filt_cls = self.class_map['RetryFilter']()
|
|
||||||
host = fakes.FakeHostState('host1', {})
|
|
||||||
retry = dict(num_attempts=2, hosts=['host2'])
|
|
||||||
filter_properties = dict(retry=retry)
|
|
||||||
self.assertTrue(filt_cls.host_passes(host, filter_properties))
|
|
||||||
|
|
||||||
def test_retry_filter_fail(self):
|
|
||||||
# Node was already tried.
|
|
||||||
filt_cls = self.class_map['RetryFilter']()
|
|
||||||
host = fakes.FakeHostState('host1', {})
|
|
||||||
retry = dict(num_attempts=1, hosts=['host1'])
|
|
||||||
filter_properties = dict(retry=retry)
|
|
||||||
self.assertFalse(filt_cls.host_passes(host, filter_properties))
|
|
237
manila/tests/scheduler/filters/test_extra_specs_ops.py
Normal file
237
manila/tests/scheduler/filters/test_extra_specs_ops.py
Normal file
@ -0,0 +1,237 @@
|
|||||||
|
# Copyright 2011 OpenStack Foundation.
|
||||||
|
# All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
"""
|
||||||
|
Tests For Scheduler Host Filters.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from manila.scheduler.filters import extra_specs_ops
|
||||||
|
from manila import test
|
||||||
|
|
||||||
|
|
||||||
|
class ExtraSpecsOpsTestCase(test.TestCase):
|
||||||
|
def _do_extra_specs_ops_test(self, value, req, matches):
|
||||||
|
assertion = self.assertTrue if matches else self.assertFalse
|
||||||
|
assertion(extra_specs_ops.match(value, req))
|
||||||
|
|
||||||
|
def test_extra_specs_matches_simple(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='1',
|
||||||
|
req='1',
|
||||||
|
matches=True)
|
||||||
|
|
||||||
|
def test_extra_specs_fails_simple(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='',
|
||||||
|
req='1',
|
||||||
|
matches=False)
|
||||||
|
|
||||||
|
def test_extra_specs_fails_simple2(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='3',
|
||||||
|
req='1',
|
||||||
|
matches=False)
|
||||||
|
|
||||||
|
def test_extra_specs_fails_simple3(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='222',
|
||||||
|
req='2',
|
||||||
|
matches=False)
|
||||||
|
|
||||||
|
def test_extra_specs_fails_with_bogus_ops(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='4',
|
||||||
|
req='> 2',
|
||||||
|
matches=False)
|
||||||
|
|
||||||
|
def test_extra_specs_matches_with_op_eq(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='123',
|
||||||
|
req='= 123',
|
||||||
|
matches=True)
|
||||||
|
|
||||||
|
def test_extra_specs_matches_with_op_eq2(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='124',
|
||||||
|
req='= 123',
|
||||||
|
matches=True)
|
||||||
|
|
||||||
|
def test_extra_specs_fails_with_op_eq(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='34',
|
||||||
|
req='= 234',
|
||||||
|
matches=False)
|
||||||
|
|
||||||
|
def test_extra_specs_fails_with_op_eq3(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='34',
|
||||||
|
req='=',
|
||||||
|
matches=False)
|
||||||
|
|
||||||
|
def test_extra_specs_matches_with_op_seq(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='123',
|
||||||
|
req='s== 123',
|
||||||
|
matches=True)
|
||||||
|
|
||||||
|
def test_extra_specs_fails_with_op_seq(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='1234',
|
||||||
|
req='s== 123',
|
||||||
|
matches=False)
|
||||||
|
|
||||||
|
def test_extra_specs_matches_with_op_sneq(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='1234',
|
||||||
|
req='s!= 123',
|
||||||
|
matches=True)
|
||||||
|
|
||||||
|
def test_extra_specs_fails_with_op_sneq(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='123',
|
||||||
|
req='s!= 123',
|
||||||
|
matches=False)
|
||||||
|
|
||||||
|
def test_extra_specs_fails_with_op_sge(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='1000',
|
||||||
|
req='s>= 234',
|
||||||
|
matches=False)
|
||||||
|
|
||||||
|
def test_extra_specs_fails_with_op_sle(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='1234',
|
||||||
|
req='s<= 1000',
|
||||||
|
matches=False)
|
||||||
|
|
||||||
|
def test_extra_specs_fails_with_op_sl(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='2',
|
||||||
|
req='s< 12',
|
||||||
|
matches=False)
|
||||||
|
|
||||||
|
def test_extra_specs_fails_with_op_sg(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='12',
|
||||||
|
req='s> 2',
|
||||||
|
matches=False)
|
||||||
|
|
||||||
|
def test_extra_specs_matches_with_op_in(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='12311321',
|
||||||
|
req='<in> 11',
|
||||||
|
matches=True)
|
||||||
|
|
||||||
|
def test_extra_specs_matches_with_op_in2(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='12311321',
|
||||||
|
req='<in> 12311321',
|
||||||
|
matches=True)
|
||||||
|
|
||||||
|
def test_extra_specs_matches_with_op_in3(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='12311321',
|
||||||
|
req='<in> 12311321 <in>',
|
||||||
|
matches=True)
|
||||||
|
|
||||||
|
def test_extra_specs_fails_with_op_in(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='12310321',
|
||||||
|
req='<in> 11',
|
||||||
|
matches=False)
|
||||||
|
|
||||||
|
def test_extra_specs_fails_with_op_in2(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='12310321',
|
||||||
|
req='<in> 11 <in>',
|
||||||
|
matches=False)
|
||||||
|
|
||||||
|
def test_extra_specs_matches_with_op_is(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value=True,
|
||||||
|
req='<is> True',
|
||||||
|
matches=True)
|
||||||
|
|
||||||
|
def test_extra_specs_matches_with_op_is2(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value=False,
|
||||||
|
req='<is> False',
|
||||||
|
matches=True)
|
||||||
|
|
||||||
|
def test_extra_specs_matches_with_op_is3(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value=False,
|
||||||
|
req='<is> Nonsense',
|
||||||
|
matches=True)
|
||||||
|
|
||||||
|
def test_extra_specs_fails_with_op_is(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value=True,
|
||||||
|
req='<is> False',
|
||||||
|
matches=False)
|
||||||
|
|
||||||
|
def test_extra_specs_fails_with_op_is2(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value=False,
|
||||||
|
req='<is> True',
|
||||||
|
matches=False)
|
||||||
|
|
||||||
|
def test_extra_specs_matches_with_op_or(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='12',
|
||||||
|
req='<or> 11 <or> 12',
|
||||||
|
matches=True)
|
||||||
|
|
||||||
|
def test_extra_specs_matches_with_op_or2(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='12',
|
||||||
|
req='<or> 11 <or> 12 <or>',
|
||||||
|
matches=True)
|
||||||
|
|
||||||
|
def test_extra_specs_fails_with_op_or(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='13',
|
||||||
|
req='<or> 11 <or> 12',
|
||||||
|
matches=False)
|
||||||
|
|
||||||
|
def test_extra_specs_fails_with_op_or2(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='13',
|
||||||
|
req='<or> 11 <or> 12 <or>',
|
||||||
|
matches=False)
|
||||||
|
|
||||||
|
def test_extra_specs_matches_with_op_le(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='2',
|
||||||
|
req='<= 10',
|
||||||
|
matches=True)
|
||||||
|
|
||||||
|
def test_extra_specs_fails_with_op_le(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='3',
|
||||||
|
req='<= 2',
|
||||||
|
matches=False)
|
||||||
|
|
||||||
|
def test_extra_specs_matches_with_op_ge(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='3',
|
||||||
|
req='>= 1',
|
||||||
|
matches=True)
|
||||||
|
|
||||||
|
def test_extra_specs_fails_with_op_ge(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='2',
|
||||||
|
req='>= 3',
|
||||||
|
matches=False)
|
@ -0,0 +1,53 @@
|
|||||||
|
# Copyright 2011 OpenStack Foundation.
|
||||||
|
# All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
"""
|
||||||
|
Tests For IgnoreAttemptedHost filter.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from oslo_context import context
|
||||||
|
|
||||||
|
from manila.scheduler.filters import ignore_attempted_hosts
|
||||||
|
from manila import test
|
||||||
|
from manila.tests.scheduler import fakes
|
||||||
|
|
||||||
|
|
||||||
|
class HostFiltersTestCase(test.TestCase):
|
||||||
|
"""Test case for IgnoreAttemptedHost filter."""
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
super(HostFiltersTestCase, self).setUp()
|
||||||
|
self.context = context.RequestContext('fake', 'fake')
|
||||||
|
self.filter = ignore_attempted_hosts.IgnoreAttemptedHostsFilter()
|
||||||
|
|
||||||
|
def test_ignore_attempted_hosts_filter_disabled(self):
|
||||||
|
# Test case where re-scheduling is disabled.
|
||||||
|
host = fakes.FakeHostState('host1', {})
|
||||||
|
filter_properties = {}
|
||||||
|
self.assertTrue(self.filter.host_passes(host, filter_properties))
|
||||||
|
|
||||||
|
def test_ignore_attempted_hosts_filter_pass(self):
|
||||||
|
# Node not previously tried.
|
||||||
|
host = fakes.FakeHostState('host1', {})
|
||||||
|
attempted = dict(num_attempts=2, hosts=['host2'])
|
||||||
|
filter_properties = dict(retry=attempted)
|
||||||
|
self.assertTrue(self.filter.host_passes(host, filter_properties))
|
||||||
|
|
||||||
|
def test_ignore_attempted_hosts_filter_fail(self):
|
||||||
|
# Node was already tried.
|
||||||
|
host = fakes.FakeHostState('host1', {})
|
||||||
|
attempted = dict(num_attempts=2, hosts=['host1'])
|
||||||
|
filter_properties = dict(retry=attempted)
|
||||||
|
self.assertFalse(self.filter.host_passes(host, filter_properties))
|
326
manila/tests/scheduler/filters/test_json.py
Normal file
326
manila/tests/scheduler/filters/test_json.py
Normal file
@ -0,0 +1,326 @@
|
|||||||
|
# Copyright 2011 OpenStack Foundation.
|
||||||
|
# All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
"""
|
||||||
|
Tests For JsonFilter.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from oslo_context import context
|
||||||
|
from oslo_serialization import jsonutils
|
||||||
|
|
||||||
|
from manila.scheduler.filters import json
|
||||||
|
from manila import test
|
||||||
|
from manila.tests.scheduler import fakes
|
||||||
|
|
||||||
|
|
||||||
|
class HostFiltersTestCase(test.TestCase):
|
||||||
|
"""Test case for JsonFilter."""
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
super(HostFiltersTestCase, self).setUp()
|
||||||
|
self.context = context.RequestContext('fake', 'fake')
|
||||||
|
self.json_query = jsonutils.dumps(
|
||||||
|
['and', ['>=', '$free_ram_mb', 1024],
|
||||||
|
['>=', '$free_disk_mb', 200 * 1024]])
|
||||||
|
self.filter = json.JsonFilter()
|
||||||
|
|
||||||
|
def test_json_filter_passes(self):
|
||||||
|
filter_properties = {'resource_type': {'memory_mb': 1024,
|
||||||
|
'root_gb': 200,
|
||||||
|
'ephemeral_gb': 0},
|
||||||
|
'scheduler_hints': {'query': self.json_query}}
|
||||||
|
capabilities = {'enabled': True}
|
||||||
|
host = fakes.FakeHostState('host1',
|
||||||
|
{'free_ram_mb': 1024,
|
||||||
|
'free_disk_mb': 200 * 1024,
|
||||||
|
'capabilities': capabilities})
|
||||||
|
self.assertTrue(self.filter.host_passes(host, filter_properties))
|
||||||
|
|
||||||
|
def test_json_filter_passes_with_no_query(self):
|
||||||
|
filter_properties = {'resource_type': {'memory_mb': 1024,
|
||||||
|
'root_gb': 200,
|
||||||
|
'ephemeral_gb': 0}}
|
||||||
|
capabilities = {'enabled': True}
|
||||||
|
host = fakes.FakeHostState('host1',
|
||||||
|
{'free_ram_mb': 0,
|
||||||
|
'free_disk_mb': 0,
|
||||||
|
'capabilities': capabilities})
|
||||||
|
self.assertTrue(self.filter.host_passes(host, filter_properties))
|
||||||
|
|
||||||
|
def test_json_filter_fails_on_memory(self):
|
||||||
|
filter_properties = {'resource_type': {'memory_mb': 1024,
|
||||||
|
'root_gb': 200,
|
||||||
|
'ephemeral_gb': 0},
|
||||||
|
'scheduler_hints': {'query': self.json_query}}
|
||||||
|
capabilities = {'enabled': True}
|
||||||
|
host = fakes.FakeHostState('host1',
|
||||||
|
{'free_ram_mb': 1023,
|
||||||
|
'free_disk_mb': 200 * 1024,
|
||||||
|
'capabilities': capabilities})
|
||||||
|
self.assertFalse(self.filter.host_passes(host, filter_properties))
|
||||||
|
|
||||||
|
def test_json_filter_fails_on_disk(self):
|
||||||
|
filter_properties = {'resource_type': {'memory_mb': 1024,
|
||||||
|
'root_gb': 200,
|
||||||
|
'ephemeral_gb': 0},
|
||||||
|
'scheduler_hints': {'query': self.json_query}}
|
||||||
|
capabilities = {'enabled': True}
|
||||||
|
host = fakes.FakeHostState('host1',
|
||||||
|
{'free_ram_mb': 1024,
|
||||||
|
'free_disk_mb': (200 * 1024) - 1,
|
||||||
|
'capabilities': capabilities})
|
||||||
|
self.assertFalse(self.filter.host_passes(host, filter_properties))
|
||||||
|
|
||||||
|
def test_json_filter_fails_on_caps_disabled(self):
|
||||||
|
json_query = jsonutils.dumps(
|
||||||
|
['and', ['>=', '$free_ram_mb', 1024],
|
||||||
|
['>=', '$free_disk_mb', 200 * 1024],
|
||||||
|
'$capabilities.enabled'])
|
||||||
|
filter_properties = {'resource_type': {'memory_mb': 1024,
|
||||||
|
'root_gb': 200,
|
||||||
|
'ephemeral_gb': 0},
|
||||||
|
'scheduler_hints': {'query': json_query}}
|
||||||
|
capabilities = {'enabled': False}
|
||||||
|
host = fakes.FakeHostState('host1',
|
||||||
|
{'free_ram_mb': 1024,
|
||||||
|
'free_disk_mb': 200 * 1024,
|
||||||
|
'capabilities': capabilities})
|
||||||
|
self.assertFalse(self.filter.host_passes(host, filter_properties))
|
||||||
|
|
||||||
|
def test_json_filter_fails_on_service_disabled(self):
|
||||||
|
json_query = jsonutils.dumps(
|
||||||
|
['and', ['>=', '$free_ram_mb', 1024],
|
||||||
|
['>=', '$free_disk_mb', 200 * 1024],
|
||||||
|
['not', '$service.disabled']])
|
||||||
|
filter_properties = {'resource_type': {'memory_mb': 1024,
|
||||||
|
'local_gb': 200},
|
||||||
|
'scheduler_hints': {'query': json_query}}
|
||||||
|
capabilities = {'enabled': True}
|
||||||
|
host = fakes.FakeHostState('host1',
|
||||||
|
{'free_ram_mb': 1024,
|
||||||
|
'free_disk_mb': 200 * 1024,
|
||||||
|
'capabilities': capabilities})
|
||||||
|
self.assertFalse(self.filter.host_passes(host, filter_properties))
|
||||||
|
|
||||||
|
def test_json_filter_happy_day(self):
|
||||||
|
"""Test json filter more thoroughly."""
|
||||||
|
raw = ['and',
|
||||||
|
'$capabilities.enabled',
|
||||||
|
['=', '$capabilities.opt1', 'match'],
|
||||||
|
['or',
|
||||||
|
['and',
|
||||||
|
['<', '$free_ram_mb', 30],
|
||||||
|
['<', '$free_disk_mb', 300]],
|
||||||
|
['and',
|
||||||
|
['>', '$free_ram_mb', 30],
|
||||||
|
['>', '$free_disk_mb', 300]]]]
|
||||||
|
filter_properties = {
|
||||||
|
'scheduler_hints': {
|
||||||
|
'query': jsonutils.dumps(raw),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
# Passes
|
||||||
|
capabilities = {'enabled': True, 'opt1': 'match'}
|
||||||
|
service = {'disabled': False}
|
||||||
|
host = fakes.FakeHostState('host1',
|
||||||
|
{'free_ram_mb': 10,
|
||||||
|
'free_disk_mb': 200,
|
||||||
|
'capabilities': capabilities,
|
||||||
|
'service': service})
|
||||||
|
self.assertTrue(self.filter.host_passes(host, filter_properties))
|
||||||
|
|
||||||
|
# Passes
|
||||||
|
capabilities = {'enabled': True, 'opt1': 'match'}
|
||||||
|
service = {'disabled': False}
|
||||||
|
host = fakes.FakeHostState('host1',
|
||||||
|
{'free_ram_mb': 40,
|
||||||
|
'free_disk_mb': 400,
|
||||||
|
'capabilities': capabilities,
|
||||||
|
'service': service})
|
||||||
|
self.assertTrue(self.filter.host_passes(host, filter_properties))
|
||||||
|
|
||||||
|
# Fails due to capabilities being disabled
|
||||||
|
capabilities = {'enabled': False, 'opt1': 'match'}
|
||||||
|
service = {'disabled': False}
|
||||||
|
host = fakes.FakeHostState('host1',
|
||||||
|
{'free_ram_mb': 40,
|
||||||
|
'free_disk_mb': 400,
|
||||||
|
'capabilities': capabilities,
|
||||||
|
'service': service})
|
||||||
|
self.assertFalse(self.filter.host_passes(host, filter_properties))
|
||||||
|
|
||||||
|
# Fails due to being exact memory/disk we don't want
|
||||||
|
capabilities = {'enabled': True, 'opt1': 'match'}
|
||||||
|
service = {'disabled': False}
|
||||||
|
host = fakes.FakeHostState('host1',
|
||||||
|
{'free_ram_mb': 30,
|
||||||
|
'free_disk_mb': 300,
|
||||||
|
'capabilities': capabilities,
|
||||||
|
'service': service})
|
||||||
|
self.assertFalse(self.filter.host_passes(host, filter_properties))
|
||||||
|
|
||||||
|
# Fails due to memory lower but disk higher
|
||||||
|
capabilities = {'enabled': True, 'opt1': 'match'}
|
||||||
|
service = {'disabled': False}
|
||||||
|
host = fakes.FakeHostState('host1',
|
||||||
|
{'free_ram_mb': 20,
|
||||||
|
'free_disk_mb': 400,
|
||||||
|
'capabilities': capabilities,
|
||||||
|
'service': service})
|
||||||
|
self.assertFalse(self.filter.host_passes(host, filter_properties))
|
||||||
|
|
||||||
|
# Fails due to capabilities 'opt1' not equal
|
||||||
|
capabilities = {'enabled': True, 'opt1': 'no-match'}
|
||||||
|
service = {'enabled': True}
|
||||||
|
host = fakes.FakeHostState('host1',
|
||||||
|
{'free_ram_mb': 20,
|
||||||
|
'free_disk_mb': 400,
|
||||||
|
'capabilities': capabilities,
|
||||||
|
'service': service})
|
||||||
|
self.assertFalse(self.filter.host_passes(host, filter_properties))
|
||||||
|
|
||||||
|
def test_json_filter_basic_operators(self):
|
||||||
|
host = fakes.FakeHostState('host1',
|
||||||
|
{'capabilities': {'enabled': True}})
|
||||||
|
# (operator, arguments, expected_result)
|
||||||
|
ops_to_test = [
|
||||||
|
['=', [1, 1], True],
|
||||||
|
['=', [1, 2], False],
|
||||||
|
['<', [1, 2], True],
|
||||||
|
['<', [1, 1], False],
|
||||||
|
['<', [2, 1], False],
|
||||||
|
['>', [2, 1], True],
|
||||||
|
['>', [2, 2], False],
|
||||||
|
['>', [2, 3], False],
|
||||||
|
['<=', [1, 2], True],
|
||||||
|
['<=', [1, 1], True],
|
||||||
|
['<=', [2, 1], False],
|
||||||
|
['>=', [2, 1], True],
|
||||||
|
['>=', [2, 2], True],
|
||||||
|
['>=', [2, 3], False],
|
||||||
|
['in', [1, 1], True],
|
||||||
|
['in', [1, 1, 2, 3], True],
|
||||||
|
['in', [4, 1, 2, 3], False],
|
||||||
|
['not', [True], False],
|
||||||
|
['not', [False], True],
|
||||||
|
['or', [True, False], True],
|
||||||
|
['or', [False, False], False],
|
||||||
|
['and', [True, True], True],
|
||||||
|
['and', [False, False], False],
|
||||||
|
['and', [True, False], False],
|
||||||
|
# Nested ((True or False) and (2 > 1)) == Passes
|
||||||
|
['and', [['or', True, False], ['>', 2, 1]], True]]
|
||||||
|
|
||||||
|
for (op, args, expected) in ops_to_test:
|
||||||
|
raw = [op] + args
|
||||||
|
filter_properties = {
|
||||||
|
'scheduler_hints': {
|
||||||
|
'query': jsonutils.dumps(raw),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
self.assertEqual(expected,
|
||||||
|
self.filter.host_passes(host, filter_properties))
|
||||||
|
|
||||||
|
# This results in [False, True, False, True] and if any are True
|
||||||
|
# then it passes...
|
||||||
|
raw = ['not', True, False, True, False]
|
||||||
|
filter_properties = {
|
||||||
|
'scheduler_hints': {
|
||||||
|
'query': jsonutils.dumps(raw),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
self.assertTrue(self.filter.host_passes(host, filter_properties))
|
||||||
|
|
||||||
|
# This results in [False, False, False] and if any are True
|
||||||
|
# then it passes...which this doesn't
|
||||||
|
raw = ['not', True, True, True]
|
||||||
|
filter_properties = {
|
||||||
|
'scheduler_hints': {
|
||||||
|
'query': jsonutils.dumps(raw),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
self.assertFalse(self.filter.host_passes(host, filter_properties))
|
||||||
|
|
||||||
|
def test_json_filter_unknown_operator_raises(self):
|
||||||
|
raw = ['!=', 1, 2]
|
||||||
|
filter_properties = {
|
||||||
|
'scheduler_hints': {
|
||||||
|
'query': jsonutils.dumps(raw),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
host = fakes.FakeHostState('host1',
|
||||||
|
{'capabilities': {'enabled': True}})
|
||||||
|
self.assertRaises(KeyError,
|
||||||
|
self.filter.host_passes, host, filter_properties)
|
||||||
|
|
||||||
|
def test_json_filter_empty_filters_pass(self):
|
||||||
|
host = fakes.FakeHostState('host1',
|
||||||
|
{'capabilities': {'enabled': True}})
|
||||||
|
|
||||||
|
raw = []
|
||||||
|
filter_properties = {
|
||||||
|
'scheduler_hints': {
|
||||||
|
'query': jsonutils.dumps(raw),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
self.assertTrue(self.filter.host_passes(host, filter_properties))
|
||||||
|
raw = {}
|
||||||
|
filter_properties = {
|
||||||
|
'scheduler_hints': {
|
||||||
|
'query': jsonutils.dumps(raw),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
self.assertTrue(self.filter.host_passes(host, filter_properties))
|
||||||
|
|
||||||
|
def test_json_filter_invalid_num_arguments_fails(self):
|
||||||
|
host = fakes.FakeHostState('host1',
|
||||||
|
{'capabilities': {'enabled': True}})
|
||||||
|
|
||||||
|
raw = ['>', ['and', ['or', ['not', ['<', ['>=', ['<=', ['in', ]]]]]]]]
|
||||||
|
filter_properties = {
|
||||||
|
'scheduler_hints': {
|
||||||
|
'query': jsonutils.dumps(raw),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
self.assertFalse(self.filter.host_passes(host, filter_properties))
|
||||||
|
|
||||||
|
raw = ['>', 1]
|
||||||
|
filter_properties = {
|
||||||
|
'scheduler_hints': {
|
||||||
|
'query': jsonutils.dumps(raw),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
self.assertFalse(self.filter.host_passes(host, filter_properties))
|
||||||
|
|
||||||
|
def test_json_filter_unknown_variable_ignored(self):
|
||||||
|
host = fakes.FakeHostState('host1',
|
||||||
|
{'capabilities': {'enabled': True}})
|
||||||
|
|
||||||
|
raw = ['=', '$........', 1, 1]
|
||||||
|
filter_properties = {
|
||||||
|
'scheduler_hints': {
|
||||||
|
'query': jsonutils.dumps(raw),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
self.assertTrue(self.filter.host_passes(host, filter_properties))
|
||||||
|
|
||||||
|
raw = ['=', '$foo', 2, 2]
|
||||||
|
filter_properties = {
|
||||||
|
'scheduler_hints': {
|
||||||
|
'query': jsonutils.dumps(raw),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
self.assertTrue(self.filter.host_passes(host, filter_properties))
|
50
manila/tests/scheduler/filters/test_retry.py
Normal file
50
manila/tests/scheduler/filters/test_retry.py
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
# Copyright 2011 OpenStack LLC. # All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
"""
|
||||||
|
Tests For RetryFilter.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from manila import context
|
||||||
|
from manila.scheduler.filters import retry
|
||||||
|
from manila import test
|
||||||
|
from manila.tests.scheduler import fakes
|
||||||
|
|
||||||
|
|
||||||
|
class HostFiltersTestCase(test.TestCase):
|
||||||
|
"""Test case for RetryFilter."""
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
super(HostFiltersTestCase, self).setUp()
|
||||||
|
self.context = context.RequestContext('fake', 'fake')
|
||||||
|
self.filter = retry.RetryFilter()
|
||||||
|
|
||||||
|
def test_retry_filter_disabled(self):
|
||||||
|
# Test case where retry/re-scheduling is disabled.
|
||||||
|
host = fakes.FakeHostState('host1', {})
|
||||||
|
filter_properties = {}
|
||||||
|
self.assertTrue(self.filter.host_passes(host, filter_properties))
|
||||||
|
|
||||||
|
def test_retry_filter_pass(self):
|
||||||
|
# Node not previously tried.
|
||||||
|
host = fakes.FakeHostState('host1', {})
|
||||||
|
retry = dict(num_attempts=2, hosts=['host2'])
|
||||||
|
filter_properties = dict(retry=retry)
|
||||||
|
self.assertTrue(self.filter.host_passes(host, filter_properties))
|
||||||
|
|
||||||
|
def test_retry_filter_fail(self):
|
||||||
|
# Node was already tried.
|
||||||
|
host = fakes.FakeHostState('host1', {})
|
||||||
|
retry = dict(num_attempts=1, hosts=['host1'])
|
||||||
|
filter_properties = dict(retry=retry)
|
||||||
|
self.assertFalse(self.filter.host_passes(host, filter_properties))
|
@ -26,7 +26,7 @@ from six import moves
|
|||||||
|
|
||||||
from manila import db
|
from manila import db
|
||||||
from manila import exception
|
from manila import exception
|
||||||
from manila.openstack.common.scheduler import filters
|
from manila.scheduler.filters import base_host
|
||||||
from manila.scheduler import host_manager
|
from manila.scheduler import host_manager
|
||||||
from manila import test
|
from manila import test
|
||||||
from manila.tests.scheduler import fakes
|
from manila.tests.scheduler import fakes
|
||||||
@ -36,12 +36,12 @@ from manila import utils
|
|||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
|
|
||||||
|
|
||||||
class FakeFilterClass1(filters.BaseHostFilter):
|
class FakeFilterClass1(base_host.BaseHostFilter):
|
||||||
def host_passes(self, host_state, filter_properties):
|
def host_passes(self, host_state, filter_properties):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class FakeFilterClass2(filters.BaseHostFilter):
|
class FakeFilterClass2(base_host.BaseHostFilter):
|
||||||
def host_passes(self, host_state, filter_properties):
|
def host_passes(self, host_state, filter_properties):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
245
manila/tests/scheduler/test_manager.py
Normal file
245
manila/tests/scheduler/test_manager.py
Normal file
@ -0,0 +1,245 @@
|
|||||||
|
# Copyright 2010 United States Government as represented by the
|
||||||
|
# Administrator of the National Aeronautics and Space Administration.
|
||||||
|
# All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
"""
|
||||||
|
Tests For Scheduler Manager
|
||||||
|
"""
|
||||||
|
|
||||||
|
import ddt
|
||||||
|
import mock
|
||||||
|
from oslo_config import cfg
|
||||||
|
|
||||||
|
from manila import context
|
||||||
|
from manila import db
|
||||||
|
from manila import exception
|
||||||
|
from manila.scheduler.drivers import base
|
||||||
|
from manila.scheduler.drivers import filter
|
||||||
|
from manila.scheduler import manager
|
||||||
|
from manila.share import rpcapi as share_rpcapi
|
||||||
|
from manila import test
|
||||||
|
from manila.tests import db_utils
|
||||||
|
|
||||||
|
CONF = cfg.CONF
|
||||||
|
|
||||||
|
|
||||||
|
@ddt.ddt
|
||||||
|
class SchedulerManagerTestCase(test.TestCase):
|
||||||
|
"""Test case for scheduler manager."""
|
||||||
|
|
||||||
|
manager_cls = manager.SchedulerManager
|
||||||
|
driver_cls = base.Scheduler
|
||||||
|
driver_cls_name = 'manila.scheduler.drivers.base.Scheduler'
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
super(SchedulerManagerTestCase, self).setUp()
|
||||||
|
self.flags(scheduler_driver=self.driver_cls_name)
|
||||||
|
self.manager = self.manager_cls()
|
||||||
|
self.context = context.RequestContext('fake_user', 'fake_project')
|
||||||
|
self.topic = 'fake_topic'
|
||||||
|
self.fake_args = (1, 2, 3)
|
||||||
|
self.fake_kwargs = {'cat': 'meow', 'dog': 'woof'}
|
||||||
|
|
||||||
|
def test_1_correct_init(self):
|
||||||
|
# Correct scheduler driver
|
||||||
|
manager = self.manager
|
||||||
|
self.assertTrue(isinstance(manager.driver,
|
||||||
|
self.driver_cls))
|
||||||
|
|
||||||
|
@ddt.data('manila.scheduler.filter_scheduler.FilterScheduler',
|
||||||
|
'manila.scheduler.drivers.filter.FilterScheduler')
|
||||||
|
def test_scheduler_driver_mapper(self, driver_class):
|
||||||
|
|
||||||
|
test_manager = manager.SchedulerManager(scheduler_driver=driver_class)
|
||||||
|
|
||||||
|
self.assertTrue(isinstance(test_manager.driver,
|
||||||
|
filter.FilterScheduler))
|
||||||
|
|
||||||
|
def test_init_host(self):
|
||||||
|
|
||||||
|
self.mock_object(context,
|
||||||
|
'get_admin_context',
|
||||||
|
mock.Mock(return_value='fake_admin_context'))
|
||||||
|
self.mock_object(self.manager, 'request_service_capabilities')
|
||||||
|
|
||||||
|
self.manager.init_host()
|
||||||
|
|
||||||
|
self.manager.request_service_capabilities.assert_called_once_with(
|
||||||
|
'fake_admin_context')
|
||||||
|
|
||||||
|
def test_get_host_list(self):
|
||||||
|
|
||||||
|
self.mock_object(self.manager.driver, 'get_host_list')
|
||||||
|
|
||||||
|
self.manager.get_host_list(context)
|
||||||
|
|
||||||
|
self.manager.driver.get_host_list.assert_called_once_with()
|
||||||
|
|
||||||
|
def test_get_service_capabilities(self):
|
||||||
|
|
||||||
|
self.mock_object(self.manager.driver, 'get_service_capabilities')
|
||||||
|
|
||||||
|
self.manager.get_service_capabilities(context)
|
||||||
|
|
||||||
|
self.manager.driver.get_service_capabilities.assert_called_once_with()
|
||||||
|
|
||||||
|
def test_update_service_capabilities(self):
|
||||||
|
service_name = 'fake_service'
|
||||||
|
host = 'fake_host'
|
||||||
|
with mock.patch.object(self.manager.driver,
|
||||||
|
'update_service_capabilities', mock.Mock()):
|
||||||
|
self.manager.update_service_capabilities(
|
||||||
|
self.context, service_name=service_name, host=host)
|
||||||
|
(self.manager.driver.update_service_capabilities.
|
||||||
|
assert_called_once_with(service_name, host, {}))
|
||||||
|
with mock.patch.object(self.manager.driver,
|
||||||
|
'update_service_capabilities', mock.Mock()):
|
||||||
|
capabilities = {'fake_capability': 'fake_value'}
|
||||||
|
self.manager.update_service_capabilities(
|
||||||
|
self.context, service_name=service_name, host=host,
|
||||||
|
capabilities=capabilities)
|
||||||
|
(self.manager.driver.update_service_capabilities.
|
||||||
|
assert_called_once_with(service_name, host, capabilities))
|
||||||
|
|
||||||
|
@mock.patch.object(db, 'share_update', mock.Mock())
|
||||||
|
def test_create_share_exception_puts_share_in_error_state(self):
|
||||||
|
"""Test NoValidHost exception for create_share.
|
||||||
|
|
||||||
|
Puts the share in 'error' state and eats the exception.
|
||||||
|
"""
|
||||||
|
def raise_no_valid_host(*args, **kwargs):
|
||||||
|
raise exception.NoValidHost(reason="")
|
||||||
|
|
||||||
|
fake_share_id = 1
|
||||||
|
|
||||||
|
request_spec = {'share_id': fake_share_id}
|
||||||
|
with mock.patch.object(self.manager.driver,
|
||||||
|
'schedule_create_share',
|
||||||
|
mock.Mock(side_effect=raise_no_valid_host)):
|
||||||
|
self.mock_object(manager.LOG, 'error')
|
||||||
|
|
||||||
|
self.manager.create_share_instance(
|
||||||
|
self.context, request_spec=request_spec, filter_properties={})
|
||||||
|
|
||||||
|
db.share_update.assert_called_once_with(
|
||||||
|
self.context, fake_share_id, {'status': 'error'})
|
||||||
|
(self.manager.driver.schedule_create_share.
|
||||||
|
assert_called_once_with(self.context, request_spec, {}))
|
||||||
|
manager.LOG.error.assert_called_once_with(mock.ANY, mock.ANY)
|
||||||
|
|
||||||
|
@mock.patch.object(db, 'share_update', mock.Mock())
|
||||||
|
def test_create_share_other_exception_puts_share_in_error_state(self):
|
||||||
|
"""Test any exception except NoValidHost for create_share.
|
||||||
|
|
||||||
|
Puts the share in 'error' state and re-raises the exception.
|
||||||
|
"""
|
||||||
|
fake_share_id = 1
|
||||||
|
|
||||||
|
request_spec = {'share_id': fake_share_id}
|
||||||
|
with mock.patch.object(self.manager.driver,
|
||||||
|
'schedule_create_share',
|
||||||
|
mock.Mock(side_effect=exception.QuotaError)):
|
||||||
|
self.mock_object(manager.LOG, 'error')
|
||||||
|
|
||||||
|
self.assertRaises(exception.QuotaError,
|
||||||
|
self.manager.create_share_instance,
|
||||||
|
self.context,
|
||||||
|
request_spec=request_spec,
|
||||||
|
filter_properties={})
|
||||||
|
|
||||||
|
db.share_update.assert_called_once_with(
|
||||||
|
self.context, fake_share_id, {'status': 'error'})
|
||||||
|
(self.manager.driver.schedule_create_share.
|
||||||
|
assert_called_once_with(self.context, request_spec, {}))
|
||||||
|
manager.LOG.error.assert_called_once_with(mock.ANY, mock.ANY)
|
||||||
|
|
||||||
|
def test_get_pools(self):
|
||||||
|
"""Ensure get_pools exists and calls base_scheduler.get_pools."""
|
||||||
|
mock_get_pools = self.mock_object(self.manager.driver,
|
||||||
|
'get_pools',
|
||||||
|
mock.Mock(return_value='fake_pools'))
|
||||||
|
|
||||||
|
result = self.manager.get_pools(self.context, filters='fake_filters')
|
||||||
|
|
||||||
|
mock_get_pools.assert_called_once_with(self.context, 'fake_filters')
|
||||||
|
self.assertEqual('fake_pools', result)
|
||||||
|
|
||||||
|
@mock.patch.object(db, 'consistency_group_update', mock.Mock())
|
||||||
|
def test_create_cg_no_valid_host_puts_cg_in_error_state(self):
|
||||||
|
"""Test that NoValidHost is raised for create_consistency_group.
|
||||||
|
|
||||||
|
Puts the share in 'error' state and eats the exception.
|
||||||
|
"""
|
||||||
|
def raise_no_valid_host(*args, **kwargs):
|
||||||
|
raise exception.NoValidHost(reason="")
|
||||||
|
|
||||||
|
fake_cg_id = 1
|
||||||
|
cg_id = fake_cg_id
|
||||||
|
request_spec = {"consistency_group_id": cg_id}
|
||||||
|
with mock.patch.object(self.manager.driver,
|
||||||
|
'schedule_create_consistency_group',
|
||||||
|
mock.Mock(side_effect=raise_no_valid_host)):
|
||||||
|
self.manager.create_consistency_group(self.context,
|
||||||
|
fake_cg_id,
|
||||||
|
request_spec=request_spec,
|
||||||
|
filter_properties={})
|
||||||
|
db.consistency_group_update.assert_called_once_with(
|
||||||
|
self.context, fake_cg_id, {'status': 'error'})
|
||||||
|
(self.manager.driver.schedule_create_consistency_group.
|
||||||
|
assert_called_once_with(self.context, cg_id, request_spec, {}))
|
||||||
|
|
||||||
|
@mock.patch.object(db, 'consistency_group_update', mock.Mock())
|
||||||
|
def test_create_cg_exception_puts_cg_in_error_state(self):
|
||||||
|
"""Test that exceptions for create_consistency_group.
|
||||||
|
|
||||||
|
Puts the share in 'error' state and raises the exception.
|
||||||
|
"""
|
||||||
|
|
||||||
|
fake_cg_id = 1
|
||||||
|
cg_id = fake_cg_id
|
||||||
|
request_spec = {"consistency_group_id": cg_id}
|
||||||
|
with mock.patch.object(self.manager.driver,
|
||||||
|
'schedule_create_consistency_group',
|
||||||
|
mock.Mock(side_effect=exception.NotFound)):
|
||||||
|
self.assertRaises(exception.NotFound,
|
||||||
|
self.manager.create_consistency_group,
|
||||||
|
self.context, fake_cg_id,
|
||||||
|
request_spec=request_spec,
|
||||||
|
filter_properties={})
|
||||||
|
|
||||||
|
def test_migrate_share_to_host(self):
|
||||||
|
|
||||||
|
share = db_utils.create_share()
|
||||||
|
host = 'fake@backend#pool'
|
||||||
|
|
||||||
|
self.mock_object(db, 'share_get', mock.Mock(return_value=share))
|
||||||
|
self.mock_object(share_rpcapi.ShareAPI, 'migrate_share')
|
||||||
|
self.mock_object(base.Scheduler,
|
||||||
|
'host_passes_filters',
|
||||||
|
mock.Mock(return_value=host))
|
||||||
|
|
||||||
|
self.manager.migrate_share_to_host(self.context, share['id'], host,
|
||||||
|
False, {}, None)
|
||||||
|
|
||||||
|
def test_migrate_share_to_host_no_valid_host(self):
|
||||||
|
|
||||||
|
share = db_utils.create_share()
|
||||||
|
host = 'fake@backend#pool'
|
||||||
|
|
||||||
|
self.mock_object(
|
||||||
|
base.Scheduler, 'host_passes_filters',
|
||||||
|
mock.Mock(side_effect=[exception.NoValidHost('fake')]))
|
||||||
|
|
||||||
|
self.manager.migrate_share_to_host(self.context, share['id'], host,
|
||||||
|
False, {}, None)
|
@ -1,389 +0,0 @@
|
|||||||
# Copyright 2010 United States Government as represented by the
|
|
||||||
# Administrator of the National Aeronautics and Space Administration.
|
|
||||||
# All Rights Reserved.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
"""
|
|
||||||
Tests For Scheduler
|
|
||||||
"""
|
|
||||||
|
|
||||||
import mock
|
|
||||||
from oslo_config import cfg
|
|
||||||
from oslo_utils import timeutils
|
|
||||||
|
|
||||||
from manila import context
|
|
||||||
from manila import db
|
|
||||||
from manila import exception
|
|
||||||
from manila.scheduler import driver
|
|
||||||
from manila.scheduler import manager
|
|
||||||
from manila.scheduler import simple
|
|
||||||
from manila.share import rpcapi as share_rpcapi
|
|
||||||
from manila import test
|
|
||||||
from manila.tests import db_utils
|
|
||||||
from manila import utils
|
|
||||||
|
|
||||||
CONF = cfg.CONF
|
|
||||||
|
|
||||||
|
|
||||||
class SchedulerManagerTestCase(test.TestCase):
|
|
||||||
"""Test case for scheduler manager."""
|
|
||||||
|
|
||||||
manager_cls = manager.SchedulerManager
|
|
||||||
driver_cls = driver.Scheduler
|
|
||||||
driver_cls_name = 'manila.scheduler.driver.Scheduler'
|
|
||||||
|
|
||||||
def setUp(self):
|
|
||||||
super(SchedulerManagerTestCase, self).setUp()
|
|
||||||
self.flags(scheduler_driver=self.driver_cls_name)
|
|
||||||
self.manager = self.manager_cls()
|
|
||||||
self.context = context.RequestContext('fake_user', 'fake_project')
|
|
||||||
self.topic = 'fake_topic'
|
|
||||||
self.fake_args = (1, 2, 3)
|
|
||||||
self.fake_kwargs = {'cat': 'meow', 'dog': 'woof'}
|
|
||||||
|
|
||||||
def test_1_correct_init(self):
|
|
||||||
# Correct scheduler driver
|
|
||||||
manager = self.manager
|
|
||||||
self.assertTrue(isinstance(manager.driver, self.driver_cls))
|
|
||||||
|
|
||||||
def test_update_service_capabilities(self):
|
|
||||||
service_name = 'fake_service'
|
|
||||||
host = 'fake_host'
|
|
||||||
with mock.patch.object(self.manager.driver,
|
|
||||||
'update_service_capabilities', mock.Mock()):
|
|
||||||
self.manager.update_service_capabilities(
|
|
||||||
self.context, service_name=service_name, host=host)
|
|
||||||
self.manager.driver.update_service_capabilities.\
|
|
||||||
assert_called_once_with(service_name, host, {})
|
|
||||||
with mock.patch.object(self.manager.driver,
|
|
||||||
'update_service_capabilities', mock.Mock()):
|
|
||||||
capabilities = {'fake_capability': 'fake_value'}
|
|
||||||
self.manager.update_service_capabilities(
|
|
||||||
self.context, service_name=service_name, host=host,
|
|
||||||
capabilities=capabilities)
|
|
||||||
self.manager.driver.update_service_capabilities.\
|
|
||||||
assert_called_once_with(service_name, host, capabilities)
|
|
||||||
|
|
||||||
@mock.patch.object(db, 'share_update', mock.Mock())
|
|
||||||
def test_create_share_exception_puts_share_in_error_state(self):
|
|
||||||
"""Test that a NoValideHost exception for create_share.
|
|
||||||
|
|
||||||
Puts the share in 'error' state and eats the exception.
|
|
||||||
"""
|
|
||||||
def raise_no_valid_host(*args, **kwargs):
|
|
||||||
raise exception.NoValidHost(reason="")
|
|
||||||
|
|
||||||
fake_share_id = 1
|
|
||||||
|
|
||||||
request_spec = {'share_id': fake_share_id}
|
|
||||||
with mock.patch.object(self.manager.driver,
|
|
||||||
'schedule_create_share',
|
|
||||||
mock.Mock(side_effect=raise_no_valid_host)):
|
|
||||||
self.mock_object(manager.LOG, 'error')
|
|
||||||
self.manager.create_share_instance(
|
|
||||||
self.context, request_spec=request_spec, filter_properties={})
|
|
||||||
db.share_update.assert_called_once_with(
|
|
||||||
self.context, fake_share_id, {'status': 'error'})
|
|
||||||
self.manager.driver.schedule_create_share.assert_called_once_with(
|
|
||||||
self.context, request_spec, {})
|
|
||||||
manager.LOG.error.assert_called_once_with(mock.ANY, mock.ANY)
|
|
||||||
|
|
||||||
def test_get_pools(self):
|
|
||||||
"""Ensure get_pools exists and calls driver.get_pools."""
|
|
||||||
mock_get_pools = self.mock_object(self.manager.driver, 'get_pools',
|
|
||||||
mock.Mock(return_value='fake_pools'))
|
|
||||||
|
|
||||||
result = self.manager.get_pools(self.context, filters='fake_filters')
|
|
||||||
|
|
||||||
mock_get_pools.assert_called_once_with(self.context, 'fake_filters')
|
|
||||||
self.assertEqual('fake_pools', result)
|
|
||||||
|
|
||||||
@mock.patch.object(db, 'consistency_group_update', mock.Mock())
|
|
||||||
def test_create_cg_no_valid_host_puts_cg_in_error_state(self):
|
|
||||||
"""Test that NoValidHost is raised for create_consistency_group.
|
|
||||||
|
|
||||||
Puts the share in 'error' state and eats the exception.
|
|
||||||
"""
|
|
||||||
def raise_no_valid_host(*args, **kwargs):
|
|
||||||
raise exception.NoValidHost(reason="")
|
|
||||||
|
|
||||||
fake_cg_id = 1
|
|
||||||
cg_id = fake_cg_id
|
|
||||||
request_spec = {"consistency_group_id": cg_id}
|
|
||||||
with mock.patch.object(self.manager.driver,
|
|
||||||
'schedule_create_consistency_group',
|
|
||||||
mock.Mock(side_effect=raise_no_valid_host)):
|
|
||||||
self.manager.create_consistency_group(self.context,
|
|
||||||
fake_cg_id,
|
|
||||||
request_spec=request_spec,
|
|
||||||
filter_properties={})
|
|
||||||
db.consistency_group_update.assert_called_once_with(
|
|
||||||
self.context, fake_cg_id, {'status': 'error'})
|
|
||||||
self.manager.driver.schedule_create_consistency_group\
|
|
||||||
.assert_called_once_with(self.context, cg_id,
|
|
||||||
request_spec, {})
|
|
||||||
|
|
||||||
@mock.patch.object(db, 'consistency_group_update', mock.Mock())
|
|
||||||
def test_create_cg_exception_puts_cg_in_error_state(self):
|
|
||||||
"""Test that exceptions for create_consistency_group.
|
|
||||||
|
|
||||||
Puts the share in 'error' state and raises the exception.
|
|
||||||
"""
|
|
||||||
|
|
||||||
fake_cg_id = 1
|
|
||||||
cg_id = fake_cg_id
|
|
||||||
request_spec = {"consistency_group_id": cg_id}
|
|
||||||
with mock.patch.object(self.manager.driver,
|
|
||||||
'schedule_create_consistency_group',
|
|
||||||
mock.Mock(side_effect=exception.NotFound)):
|
|
||||||
self.assertRaises(exception.NotFound,
|
|
||||||
self.manager.create_consistency_group,
|
|
||||||
self.context, fake_cg_id,
|
|
||||||
request_spec=request_spec,
|
|
||||||
filter_properties={})
|
|
||||||
|
|
||||||
def test_migrate_share_to_host(self):
|
|
||||||
|
|
||||||
share = db_utils.create_share()
|
|
||||||
host = 'fake@backend#pool'
|
|
||||||
|
|
||||||
self.mock_object(db, 'share_get', mock.Mock(return_value=share))
|
|
||||||
self.mock_object(share_rpcapi.ShareAPI, 'migrate_share')
|
|
||||||
self.mock_object(driver.Scheduler, 'host_passes_filters',
|
|
||||||
mock.Mock(return_value=host))
|
|
||||||
|
|
||||||
self.manager.migrate_share_to_host(self.context, share['id'], host,
|
|
||||||
False, {}, None)
|
|
||||||
|
|
||||||
def test_migrate_share_to_host_no_valid_host(self):
|
|
||||||
|
|
||||||
share = db_utils.create_share()
|
|
||||||
host = 'fake@backend#pool'
|
|
||||||
|
|
||||||
self.mock_object(
|
|
||||||
driver.Scheduler, 'host_passes_filters',
|
|
||||||
mock.Mock(side_effect=[exception.NoValidHost('fake')]))
|
|
||||||
|
|
||||||
self.manager.migrate_share_to_host(self.context, share['id'], host,
|
|
||||||
False, {}, None)
|
|
||||||
|
|
||||||
|
|
||||||
class SchedulerTestCase(test.TestCase):
|
|
||||||
"""Test case for base scheduler driver class."""
|
|
||||||
|
|
||||||
# So we can subclass this test and re-use tests if we need.
|
|
||||||
driver_cls = driver.Scheduler
|
|
||||||
|
|
||||||
def setUp(self):
|
|
||||||
super(SchedulerTestCase, self).setUp()
|
|
||||||
self.driver = self.driver_cls()
|
|
||||||
self.context = context.RequestContext('fake_user', 'fake_project')
|
|
||||||
self.topic = 'fake_topic'
|
|
||||||
|
|
||||||
def test_update_service_capabilities(self):
|
|
||||||
service_name = 'fake_service'
|
|
||||||
host = 'fake_host'
|
|
||||||
capabilities = {'fake_capability': 'fake_value'}
|
|
||||||
with mock.patch.object(self.driver.host_manager,
|
|
||||||
'update_service_capabilities', mock.Mock()):
|
|
||||||
self.driver.update_service_capabilities(
|
|
||||||
service_name, host, capabilities)
|
|
||||||
self.driver.host_manager.update_service_capabilities.\
|
|
||||||
assert_called_once_with(service_name, host, capabilities)
|
|
||||||
|
|
||||||
def test_hosts_up(self):
|
|
||||||
service1 = {'host': 'host1'}
|
|
||||||
service2 = {'host': 'host2'}
|
|
||||||
services = [service1, service2]
|
|
||||||
|
|
||||||
def fake_service_is_up(*args, **kwargs):
|
|
||||||
if args[0]['host'] == 'host1':
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
with mock.patch.object(db, 'service_get_all_by_topic',
|
|
||||||
mock.Mock(return_value=services)):
|
|
||||||
with mock.patch.object(utils, 'service_is_up',
|
|
||||||
mock.Mock(side_effect=fake_service_is_up)):
|
|
||||||
result = self.driver.hosts_up(self.context, self.topic)
|
|
||||||
self.assertEqual(['host2'], result)
|
|
||||||
db.service_get_all_by_topic.assert_called_once_with(
|
|
||||||
self.context, self.topic)
|
|
||||||
|
|
||||||
|
|
||||||
class SchedulerDriverBaseTestCase(SchedulerTestCase):
|
|
||||||
"""Test cases for base scheduler driver class methods.
|
|
||||||
|
|
||||||
These can't fail if the driver is changed.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def test_unimplemented_schedule(self):
|
|
||||||
fake_args = (1, 2, 3)
|
|
||||||
fake_kwargs = {'cat': 'meow'}
|
|
||||||
|
|
||||||
self.assertRaises(NotImplementedError, self.driver.schedule,
|
|
||||||
self.context, self.topic, 'schedule_something',
|
|
||||||
*fake_args, **fake_kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
class SchedulerDriverModuleTestCase(test.TestCase):
|
|
||||||
"""Test case for scheduler driver module methods."""
|
|
||||||
|
|
||||||
def setUp(self):
|
|
||||||
super(SchedulerDriverModuleTestCase, self).setUp()
|
|
||||||
self.context = context.RequestContext('fake_user', 'fake_project')
|
|
||||||
|
|
||||||
@mock.patch.object(db, 'share_update', mock.Mock())
|
|
||||||
def test_share_host_update_db(self):
|
|
||||||
with mock.patch.object(timeutils, 'utcnow',
|
|
||||||
mock.Mock(return_value='fake-now')):
|
|
||||||
driver.share_update_db(self.context, 31337, 'fake_host')
|
|
||||||
db.share_update.assert_called_once_with(
|
|
||||||
self.context, 31337,
|
|
||||||
{'host': 'fake_host', 'scheduled_at': 'fake-now'})
|
|
||||||
|
|
||||||
|
|
||||||
class SimpleSchedulerSharesTestCase(test.TestCase):
|
|
||||||
"""Test case for simple scheduler create share method."""
|
|
||||||
|
|
||||||
def setUp(self):
|
|
||||||
super(SimpleSchedulerSharesTestCase, self).setUp()
|
|
||||||
self.mock_object(share_rpcapi, 'ShareAPI')
|
|
||||||
self.driver = simple.SimpleScheduler()
|
|
||||||
|
|
||||||
self.context = context.RequestContext('fake_user', 'fake_project')
|
|
||||||
self.admin_context = context.RequestContext('fake_admin_user',
|
|
||||||
'fake_project')
|
|
||||||
self.admin_context.is_admin = True
|
|
||||||
|
|
||||||
@mock.patch.object(utils, 'service_is_up', mock.Mock(return_value=True))
|
|
||||||
def test_create_share_if_two_services_up(self):
|
|
||||||
share_id = 'fake'
|
|
||||||
fake_share = {'id': share_id, 'size': 1}
|
|
||||||
fake_service_1 = {'disabled': False, 'host': 'fake_host1'}
|
|
||||||
fake_service_2 = {'disabled': False, 'host': 'fake_host2'}
|
|
||||||
fake_result = [(fake_service_1, 2), (fake_service_2, 1)]
|
|
||||||
fake_request_spec = {
|
|
||||||
'share_id': share_id,
|
|
||||||
'share_properties': fake_share,
|
|
||||||
}
|
|
||||||
self.mock_object(db, 'service_get_all_share_sorted',
|
|
||||||
mock.Mock(return_value=fake_result))
|
|
||||||
self.mock_object(driver, 'share_update_db',
|
|
||||||
mock.Mock(return_value=db_utils.create_share()))
|
|
||||||
|
|
||||||
self.driver.schedule_create_share(self.context,
|
|
||||||
fake_request_spec, {})
|
|
||||||
utils.service_is_up.assert_called_once_with(utils.IsAMatcher(dict))
|
|
||||||
db.service_get_all_share_sorted.assert_called_once_with(
|
|
||||||
utils.IsAMatcher(context.RequestContext))
|
|
||||||
driver.share_update_db.assert_called_once_with(
|
|
||||||
utils.IsAMatcher(context.RequestContext), share_id, 'fake_host1')
|
|
||||||
|
|
||||||
def test_create_share_if_services_not_available(self):
|
|
||||||
share_id = 'fake'
|
|
||||||
fake_share = {'id': share_id, 'size': 1}
|
|
||||||
fake_result = []
|
|
||||||
fake_request_spec = {
|
|
||||||
'share_id': share_id,
|
|
||||||
'share_properties': fake_share,
|
|
||||||
}
|
|
||||||
with mock.patch.object(db, 'service_get_all_share_sorted',
|
|
||||||
mock.Mock(return_value=fake_result)):
|
|
||||||
self.assertRaises(exception.NoValidHost,
|
|
||||||
self.driver.schedule_create_share,
|
|
||||||
self.context, fake_request_spec, {})
|
|
||||||
db.service_get_all_share_sorted.assert_called_once_with(
|
|
||||||
utils.IsAMatcher(context.RequestContext))
|
|
||||||
|
|
||||||
def test_create_share_if_max_gigabytes_exceeded(self):
|
|
||||||
share_id = 'fake'
|
|
||||||
fake_share = {'id': share_id, 'size': 10001}
|
|
||||||
fake_service_1 = {'disabled': False, 'host': 'fake_host1'}
|
|
||||||
fake_service_2 = {'disabled': False, 'host': 'fake_host2'}
|
|
||||||
fake_result = [(fake_service_1, 5), (fake_service_2, 7)]
|
|
||||||
fake_request_spec = {
|
|
||||||
'share_id': share_id,
|
|
||||||
'share_properties': fake_share,
|
|
||||||
}
|
|
||||||
with mock.patch.object(db, 'service_get_all_share_sorted',
|
|
||||||
mock.Mock(return_value=fake_result)):
|
|
||||||
self.assertRaises(exception.NoValidHost,
|
|
||||||
self.driver.schedule_create_share,
|
|
||||||
self.context, fake_request_spec, {})
|
|
||||||
db.service_get_all_share_sorted.assert_called_once_with(
|
|
||||||
utils.IsAMatcher(context.RequestContext))
|
|
||||||
|
|
||||||
@mock.patch.object(utils, 'service_is_up', mock.Mock(return_value=True))
|
|
||||||
def test_create_share_availability_zone(self):
|
|
||||||
share_id = 'fake'
|
|
||||||
fake_share = {
|
|
||||||
'id': share_id,
|
|
||||||
'size': 1,
|
|
||||||
}
|
|
||||||
fake_instance = {
|
|
||||||
'availability_zone_id': 'fake',
|
|
||||||
}
|
|
||||||
fake_service_1 = {
|
|
||||||
'disabled': False, 'host': 'fake_host1',
|
|
||||||
'availability_zone_id': 'fake',
|
|
||||||
}
|
|
||||||
fake_service_2 = {
|
|
||||||
'disabled': False, 'host': 'fake_host2',
|
|
||||||
'availability_zone_id': 'super_fake',
|
|
||||||
}
|
|
||||||
fake_result = [(fake_service_1, 0), (fake_service_2, 1)]
|
|
||||||
fake_request_spec = {
|
|
||||||
'share_id': share_id,
|
|
||||||
'share_properties': fake_share,
|
|
||||||
'share_instance_properties': fake_instance,
|
|
||||||
}
|
|
||||||
self.mock_object(db, 'service_get_all_share_sorted',
|
|
||||||
mock.Mock(return_value=fake_result))
|
|
||||||
self.mock_object(driver, 'share_update_db',
|
|
||||||
mock.Mock(return_value=db_utils.create_share()))
|
|
||||||
|
|
||||||
self.driver.schedule_create_share(self.context,
|
|
||||||
fake_request_spec, {})
|
|
||||||
utils.service_is_up.assert_called_once_with(fake_service_1)
|
|
||||||
driver.share_update_db.assert_called_once_with(
|
|
||||||
utils.IsAMatcher(context.RequestContext), share_id,
|
|
||||||
fake_service_1['host'])
|
|
||||||
db.service_get_all_share_sorted.assert_called_once_with(
|
|
||||||
utils.IsAMatcher(context.RequestContext))
|
|
||||||
|
|
||||||
@mock.patch.object(utils, 'service_is_up', mock.Mock(return_value=True))
|
|
||||||
def test_create_share_availability_zone_on_host(self):
|
|
||||||
share_id = 'fake'
|
|
||||||
fake_share = {
|
|
||||||
'id': share_id,
|
|
||||||
'availability_zone': 'fake:fake',
|
|
||||||
'size': 1,
|
|
||||||
}
|
|
||||||
fake_service = {'disabled': False, 'host': 'fake'}
|
|
||||||
fake_request_spec = {
|
|
||||||
'share_id': share_id,
|
|
||||||
'share_properties': fake_share,
|
|
||||||
}
|
|
||||||
self.mock_object(db, 'service_get_all_share_sorted',
|
|
||||||
mock.Mock(return_value=[(fake_service, 1)]))
|
|
||||||
self.mock_object(driver, 'share_update_db',
|
|
||||||
mock.Mock(return_value=db_utils.create_share()))
|
|
||||||
|
|
||||||
self.driver.schedule_create_share(self.admin_context,
|
|
||||||
fake_request_spec, {})
|
|
||||||
utils.service_is_up.assert_called_once_with(fake_service)
|
|
||||||
db.service_get_all_share_sorted.assert_called_once_with(
|
|
||||||
utils.IsAMatcher(context.RequestContext))
|
|
||||||
driver.share_update_db.assert_called_once_with(
|
|
||||||
utils.IsAMatcher(context.RequestContext), share_id, 'fake')
|
|
@ -13,7 +13,7 @@
|
|||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
"""
|
"""
|
||||||
Tests For PickledScheduler.
|
Tests For scheduler options.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
|
0
manila/tests/scheduler/weighers/__init__.py
Normal file
0
manila/tests/scheduler/weighers/__init__.py
Normal file
64
manila/tests/scheduler/weighers/test_base.py
Normal file
64
manila/tests/scheduler/weighers/test_base.py
Normal file
@ -0,0 +1,64 @@
|
|||||||
|
# Copyright 2011-2012 OpenStack Foundation.
|
||||||
|
# All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
"""
|
||||||
|
Tests For Scheduler weighers.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from manila.scheduler.weighers import base
|
||||||
|
from manila import test
|
||||||
|
from manila.tests.scheduler import fakes
|
||||||
|
|
||||||
|
|
||||||
|
class TestWeightHandler(test.TestCase):
|
||||||
|
def test_get_all_classes(self):
|
||||||
|
namespace = "manila.tests.scheduler.fakes"
|
||||||
|
handler = base.BaseWeightHandler(
|
||||||
|
base.BaseWeigher, namespace)
|
||||||
|
classes = handler.get_all_classes()
|
||||||
|
self.assertTrue(fakes.FakeWeigher1 in classes)
|
||||||
|
self.assertTrue(fakes.FakeWeigher2 in classes)
|
||||||
|
self.assertFalse(fakes.FakeClass in classes)
|
||||||
|
|
||||||
|
def test_no_multiplier(self):
|
||||||
|
class FakeWeigher(base.BaseWeigher):
|
||||||
|
def _weigh_object(self, *args, **kwargs):
|
||||||
|
pass
|
||||||
|
|
||||||
|
self.assertEqual(1.0,
|
||||||
|
FakeWeigher().weight_multiplier())
|
||||||
|
|
||||||
|
def test_no_weight_object(self):
|
||||||
|
class FakeWeigher(base.BaseWeigher):
|
||||||
|
def weight_multiplier(self, *args, **kwargs):
|
||||||
|
pass
|
||||||
|
self.assertRaises(TypeError,
|
||||||
|
FakeWeigher)
|
||||||
|
|
||||||
|
def test_normalization(self):
|
||||||
|
# weight_list, expected_result, minval, maxval
|
||||||
|
map_ = (
|
||||||
|
((), (), None, None),
|
||||||
|
((0.0, 0.0), (0.0, 0.0), None, None),
|
||||||
|
((1.0, 1.0), (0.0, 0.0), None, None),
|
||||||
|
|
||||||
|
((20.0, 50.0), (0.0, 1.0), None, None),
|
||||||
|
((20.0, 50.0), (0.0, 0.375), None, 100.0),
|
||||||
|
((20.0, 50.0), (0.4, 1.0), 0.0, None),
|
||||||
|
((20.0, 50.0), (0.2, 0.5), 0.0, 100.0),
|
||||||
|
)
|
||||||
|
for seq, result, minval, maxval in map_:
|
||||||
|
ret = base.normalize(seq, minval=minval, maxval=maxval)
|
||||||
|
self.assertEqual(result, tuple(ret))
|
@ -20,8 +20,8 @@ import mock
|
|||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
|
|
||||||
from manila import context
|
from manila import context
|
||||||
from manila.openstack.common.scheduler import weights
|
from manila.scheduler.weighers import base_host
|
||||||
from manila.scheduler.weights import capacity
|
from manila.scheduler.weighers import capacity
|
||||||
from manila.share import utils
|
from manila.share import utils
|
||||||
from manila import test
|
from manila import test
|
||||||
from manila.tests.scheduler import fakes
|
from manila.tests.scheduler import fakes
|
||||||
@ -33,8 +33,8 @@ class CapacityWeigherTestCase(test.TestCase):
|
|||||||
def setUp(self):
|
def setUp(self):
|
||||||
super(CapacityWeigherTestCase, self).setUp()
|
super(CapacityWeigherTestCase, self).setUp()
|
||||||
self.host_manager = fakes.FakeHostManager()
|
self.host_manager = fakes.FakeHostManager()
|
||||||
self.weight_handler = weights.HostWeightHandler(
|
self.weight_handler = base_host.HostWeightHandler(
|
||||||
'manila.scheduler.weights')
|
'manila.scheduler.weighers')
|
||||||
|
|
||||||
def _get_weighed_host(self, hosts, weight_properties=None, index=0):
|
def _get_weighed_host(self, hosts, weight_properties=None, index=0):
|
||||||
if weight_properties is None:
|
if weight_properties is None:
|
@ -12,6 +12,9 @@
|
|||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
"""
|
||||||
|
Tests For Pool Weigher.
|
||||||
|
"""
|
||||||
|
|
||||||
import mock
|
import mock
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
@ -19,8 +22,8 @@ from oslo_utils import timeutils
|
|||||||
|
|
||||||
from manila import context
|
from manila import context
|
||||||
from manila.db import api as db_api
|
from manila.db import api as db_api
|
||||||
from manila.openstack.common.scheduler import weights
|
from manila.scheduler.weighers import base_host
|
||||||
from manila.scheduler.weights import pool
|
from manila.scheduler.weighers import pool
|
||||||
from manila.share import utils
|
from manila.share import utils
|
||||||
from manila import test
|
from manila import test
|
||||||
from manila.tests.scheduler import fakes
|
from manila.tests.scheduler import fakes
|
||||||
@ -32,8 +35,8 @@ class PoolWeigherTestCase(test.TestCase):
|
|||||||
def setUp(self):
|
def setUp(self):
|
||||||
super(PoolWeigherTestCase, self).setUp()
|
super(PoolWeigherTestCase, self).setUp()
|
||||||
self.host_manager = fakes.FakeHostManager()
|
self.host_manager = fakes.FakeHostManager()
|
||||||
self.weight_handler = weights.HostWeightHandler(
|
self.weight_handler = base_host.HostWeightHandler(
|
||||||
'manila.scheduler.weights')
|
'manila.scheduler.weighers')
|
||||||
share_servers = [
|
share_servers = [
|
||||||
{'id': 'fake_server_id0'},
|
{'id': 'fake_server_id0'},
|
||||||
{'id': 'fake_server_id1'},
|
{'id': 'fake_server_id1'},
|
||||||
@ -170,7 +173,7 @@ class PoolWeigherTestCase(test.TestCase):
|
|||||||
# host4: weight = 1*(1.0)
|
# host4: weight = 1*(1.0)
|
||||||
# host5: weight = 1*(1.0)
|
# host5: weight = 1*(1.0)
|
||||||
|
|
||||||
# But after normalization all weights will be 0
|
# But after normalization all weighers will be 0
|
||||||
|
|
||||||
weighed_host = self._get_weighed_host(self._get_all_hosts(),
|
weighed_host = self._get_weighed_host(self._get_all_hosts(),
|
||||||
weight_properties)
|
weight_properties)
|
22
setup.cfg
22
setup.cfg
@ -34,15 +34,16 @@ console_scripts =
|
|||||||
manila-scheduler = manila.cmd.scheduler:main
|
manila-scheduler = manila.cmd.scheduler:main
|
||||||
manila-share = manila.cmd.share:main
|
manila-share = manila.cmd.share:main
|
||||||
manila.scheduler.filters =
|
manila.scheduler.filters =
|
||||||
AvailabilityZoneFilter = manila.openstack.common.scheduler.filters.availability_zone_filter:AvailabilityZoneFilter
|
AvailabilityZoneFilter = manila.scheduler.filters.availability_zone:AvailabilityZoneFilter
|
||||||
CapabilitiesFilter = manila.openstack.common.scheduler.filters.capabilities_filter:CapabilitiesFilter
|
CapabilitiesFilter = manila.scheduler.filters.capabilities:CapabilitiesFilter
|
||||||
CapacityFilter = manila.scheduler.filters.capacity_filter:CapacityFilter
|
CapacityFilter = manila.scheduler.filters.capacity:CapacityFilter
|
||||||
ConsistencyGroupFilter = manila.scheduler.filters.consistency_group_filter:ConsistencyGroupFilter
|
ConsistencyGroupFilter = manila.scheduler.filters.consistency_group:ConsistencyGroupFilter
|
||||||
JsonFilter = manila.openstack.common.scheduler.filters.json_filter:JsonFilter
|
IgnoreAttemptedHostsFilter = manila.scheduler.filters.ignore_attempted_hosts:IgnoreAttemptedHostsFilter
|
||||||
RetryFilter = manila.scheduler.filters.retry_filter:RetryFilter
|
JsonFilter = manila.scheduler.filters.json:JsonFilter
|
||||||
manila.scheduler.weights =
|
RetryFilter = manila.scheduler.filters.retry:RetryFilter
|
||||||
CapacityWeigher = manila.scheduler.weights.capacity:CapacityWeigher
|
manila.scheduler.weighers =
|
||||||
PoolWeigher = manila.scheduler.weights.pool:PoolWeigher
|
CapacityWeigher = manila.scheduler.weighers.capacity:CapacityWeigher
|
||||||
|
PoolWeigher = manila.scheduler.weighers.pool:PoolWeigher
|
||||||
# These are for backwards compat with Havana notification_driver configuration values
|
# These are for backwards compat with Havana notification_driver configuration values
|
||||||
oslo_messaging.notify.drivers =
|
oslo_messaging.notify.drivers =
|
||||||
manila.openstack.common.notifier.log_notifier = oslo_messaging.notify._impl_log:LogDriver
|
manila.openstack.common.notifier.log_notifier = oslo_messaging.notify._impl_log:LogDriver
|
||||||
@ -55,6 +56,9 @@ oslo.config.opts =
|
|||||||
manila.share.drivers.emc.plugins =
|
manila.share.drivers.emc.plugins =
|
||||||
vnx = manila.share.drivers.emc.plugins.vnx.connection:VNXStorageConnection
|
vnx = manila.share.drivers.emc.plugins.vnx.connection:VNXStorageConnection
|
||||||
isilon = manila.share.drivers.emc.plugins.isilon.isilon:IsilonStorageConnection
|
isilon = manila.share.drivers.emc.plugins.isilon.isilon:IsilonStorageConnection
|
||||||
|
manila.tests.scheduler.fakes =
|
||||||
|
FakeWeigher1 = manila.tests.scheduler.fakes:FakeWeigher1
|
||||||
|
FakeWeigher2 = manila.tests.scheduler.fakes:FakeWeigher2
|
||||||
tempest.test_plugins =
|
tempest.test_plugins =
|
||||||
manila_tests = manila_tempest_tests.plugin:ManilaTempestPlugin
|
manila_tests = manila_tempest_tests.plugin:ManilaTempestPlugin
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user