Move oslo-incubator's scheduler module to cinder
oslo-incubator is ending its life and we should move remaining dependencies from there to cinder namespace. This commit does so with openstack.common.scheduler. Apart from that tests from oslo-incubator repository are added. Change-Id: I10d88c120c9c847826986483065f5493e91f89d6 Closes-Bug: 1519337
This commit is contained in:
parent
cbd26a4143
commit
fab6b4ef58
@ -1,38 +0,0 @@
|
|||||||
# Copyright (c) 2011 OpenStack Foundation.
|
|
||||||
# All Rights Reserved.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
"""
|
|
||||||
Scheduler host filters
|
|
||||||
"""
|
|
||||||
|
|
||||||
from cinder.openstack.common.scheduler import base_filter
|
|
||||||
|
|
||||||
|
|
||||||
class BaseHostFilter(base_filter.BaseFilter):
|
|
||||||
"""Base class for host filters."""
|
|
||||||
def _filter_one(self, obj, filter_properties):
|
|
||||||
"""Return True if the object passes the filter, otherwise False."""
|
|
||||||
return self.host_passes(obj, filter_properties)
|
|
||||||
|
|
||||||
def host_passes(self, host_state, filter_properties):
|
|
||||||
"""Return True if the HostState passes the filter, otherwise False.
|
|
||||||
Override this in a subclass.
|
|
||||||
"""
|
|
||||||
raise NotImplementedError()
|
|
||||||
|
|
||||||
|
|
||||||
class HostFilterHandler(base_filter.BaseFilterHandler):
|
|
||||||
def __init__(self, namespace):
|
|
||||||
super(HostFilterHandler, self).__init__(BaseHostFilter, namespace)
|
|
@ -1,45 +0,0 @@
|
|||||||
# Copyright (c) 2011 OpenStack Foundation.
|
|
||||||
# All Rights Reserved.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
"""
|
|
||||||
Scheduler host weights
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
from cinder.openstack.common.scheduler import base_weight
|
|
||||||
|
|
||||||
|
|
||||||
class WeighedHost(base_weight.WeighedObject):
|
|
||||||
def to_dict(self):
|
|
||||||
return {
|
|
||||||
'weight': self.weight,
|
|
||||||
'host': self.obj.host,
|
|
||||||
}
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return ("WeighedHost [host: %s, weight: %s]" %
|
|
||||||
(self.obj.host, self.weight))
|
|
||||||
|
|
||||||
|
|
||||||
class BaseHostWeigher(base_weight.BaseWeigher):
|
|
||||||
"""Base class for host weights."""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class HostWeightHandler(base_weight.BaseWeightHandler):
|
|
||||||
object_class = WeighedHost
|
|
||||||
|
|
||||||
def __init__(self, namespace):
|
|
||||||
super(HostWeightHandler, self).__init__(BaseHostWeigher, namespace)
|
|
@ -19,7 +19,7 @@ Filter support
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
from cinder.openstack.common._i18n import _LI
|
from cinder.openstack.common._i18n import _LI
|
||||||
from cinder.openstack.common.scheduler import base_handler
|
from cinder.scheduler import base_handler
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -28,6 +28,7 @@ class BaseFilter(object):
|
|||||||
"""Base class for all filter classes."""
|
"""Base class for all filter classes."""
|
||||||
def _filter_one(self, obj, filter_properties):
|
def _filter_one(self, obj, filter_properties):
|
||||||
"""Return True if it passes the filter, False otherwise.
|
"""Return True if it passes the filter, False otherwise.
|
||||||
|
|
||||||
Override this in a subclass.
|
Override this in a subclass.
|
||||||
"""
|
"""
|
||||||
return True
|
return True
|
||||||
@ -48,9 +49,10 @@ class BaseFilter(object):
|
|||||||
run_filter_once_per_request = False
|
run_filter_once_per_request = False
|
||||||
|
|
||||||
def run_filter_for_index(self, index):
|
def run_filter_for_index(self, index):
|
||||||
"""Return True if the filter needs to be run for the "index-th"
|
"""Return True if the filter needs to be run for n-th instances.
|
||||||
instance in a request. Only need to override this if a filter
|
|
||||||
needs anything other than "first only" or "all" behaviour.
|
Only need to override this if a filter needs anything other than
|
||||||
|
"first only" or "all" behaviour.
|
||||||
"""
|
"""
|
||||||
return not (self.run_filter_once_per_request and index > 0)
|
return not (self.run_filter_once_per_request and index > 0)
|
||||||
|
|
@ -32,8 +32,9 @@ class BaseHandler(object):
|
|||||||
self.extension_manager = extension.ExtensionManager(modifier_namespace)
|
self.extension_manager = extension.ExtensionManager(modifier_namespace)
|
||||||
|
|
||||||
def _is_correct_class(self, cls):
|
def _is_correct_class(self, cls):
|
||||||
"""Return whether an object is a class of the correct type and
|
"""Return whether an object is a class of the correct type.
|
||||||
is not prefixed with an underscore.
|
|
||||||
|
(or is not prefixed with an underscore)
|
||||||
"""
|
"""
|
||||||
return (inspect.isclass(cls) and
|
return (inspect.isclass(cls) and
|
||||||
not cls.__name__.startswith('_') and
|
not cls.__name__.startswith('_') and
|
@ -21,7 +21,7 @@ import abc
|
|||||||
|
|
||||||
import six
|
import six
|
||||||
|
|
||||||
from cinder.openstack.common.scheduler import base_handler
|
from cinder.scheduler import base_handler
|
||||||
|
|
||||||
|
|
||||||
def normalize(weight_list, minval=None, maxval=None):
|
def normalize(weight_list, minval=None, maxval=None):
|
||||||
@ -87,9 +87,7 @@ class BaseWeigher(object):
|
|||||||
|
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def _weigh_object(self, obj, weight_properties):
|
def _weigh_object(self, obj, weight_properties):
|
||||||
"""Override in a subclass to specify a weight for a specific
|
"""Override in a subclass to specify a weight for a specific object."""
|
||||||
object.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def weigh_objects(self, weighed_obj_list, weight_properties):
|
def weigh_objects(self, weighed_obj_list, weight_properties):
|
||||||
"""Weigh multiple objects.
|
"""Weigh multiple objects.
|
@ -0,0 +1,39 @@
|
|||||||
|
# Copyright (c) 2011 OpenStack Foundation.
|
||||||
|
# All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
"""
|
||||||
|
Scheduler host filters
|
||||||
|
"""
|
||||||
|
|
||||||
|
from cinder.scheduler import base_filter
|
||||||
|
|
||||||
|
|
||||||
|
class BaseHostFilter(base_filter.BaseFilter):
|
||||||
|
"""Base class for host filters."""
|
||||||
|
def _filter_one(self, obj, filter_properties):
|
||||||
|
"""Return True if the object passes the filter, otherwise False."""
|
||||||
|
return self.host_passes(obj, filter_properties)
|
||||||
|
|
||||||
|
def host_passes(self, host_state, filter_properties):
|
||||||
|
"""Return True if the HostState passes the filter, otherwise False.
|
||||||
|
|
||||||
|
Override this in a subclass.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
|
||||||
|
class HostFilterHandler(base_filter.BaseFilterHandler):
|
||||||
|
def __init__(self, namespace):
|
||||||
|
super(HostFilterHandler, self).__init__(BaseHostFilter, namespace)
|
@ -17,7 +17,7 @@
|
|||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
from oslo_utils import uuidutils
|
from oslo_utils import uuidutils
|
||||||
|
|
||||||
from cinder.openstack.common.scheduler import filters
|
from cinder.scheduler import filters
|
||||||
from cinder.volume import api as volume
|
from cinder.volume import api as volume
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
@ -13,7 +13,7 @@
|
|||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
from cinder.openstack.common.scheduler import filters
|
from cinder.scheduler import filters
|
||||||
|
|
||||||
|
|
||||||
class AvailabilityZoneFilter(filters.BaseHostFilter):
|
class AvailabilityZoneFilter(filters.BaseHostFilter):
|
@ -17,8 +17,8 @@ import logging
|
|||||||
|
|
||||||
import six
|
import six
|
||||||
|
|
||||||
from cinder.openstack.common.scheduler import filters
|
from cinder.scheduler import filters
|
||||||
from cinder.openstack.common.scheduler.filters import extra_specs_ops
|
from cinder.scheduler.filters import extra_specs_ops
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -27,7 +27,9 @@ class CapabilitiesFilter(filters.BaseHostFilter):
|
|||||||
"""HostFilter to work with resource (instance & volume) type records."""
|
"""HostFilter to work with resource (instance & volume) type records."""
|
||||||
|
|
||||||
def _satisfies_extra_specs(self, capabilities, resource_type):
|
def _satisfies_extra_specs(self, capabilities, resource_type):
|
||||||
"""Check that the capabilities provided by the services satisfy
|
"""Check if capabilities satisfy resource type requirements.
|
||||||
|
|
||||||
|
Check that the capabilities provided by the services satisfy
|
||||||
the extra specs associated with the resource type.
|
the extra specs associated with the resource type.
|
||||||
"""
|
"""
|
||||||
extra_specs = resource_type.get('extra_specs', [])
|
extra_specs = resource_type.get('extra_specs', [])
|
@ -22,7 +22,7 @@ import math
|
|||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
|
|
||||||
from cinder.i18n import _LE, _LW
|
from cinder.i18n import _LE, _LW
|
||||||
from cinder.openstack.common.scheduler import filters
|
from cinder.scheduler import filters
|
||||||
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
@ -17,8 +17,8 @@ from oslo_log import log as logging
|
|||||||
import six
|
import six
|
||||||
|
|
||||||
from cinder.i18n import _LW
|
from cinder.i18n import _LW
|
||||||
from cinder.openstack.common.scheduler import filters
|
|
||||||
from cinder.scheduler.evaluator import evaluator
|
from cinder.scheduler.evaluator import evaluator
|
||||||
|
from cinder.scheduler import filters
|
||||||
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
@ -15,7 +15,7 @@
|
|||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from cinder.openstack.common.scheduler import filters
|
from cinder.scheduler import filters
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
@ -19,7 +19,7 @@ from oslo_utils import uuidutils
|
|||||||
from cinder.compute import nova
|
from cinder.compute import nova
|
||||||
from cinder import exception
|
from cinder import exception
|
||||||
from cinder.i18n import _, _LW
|
from cinder.i18n import _, _LW
|
||||||
from cinder.openstack.common.scheduler import filters
|
from cinder.scheduler import filters
|
||||||
from cinder.volume import utils as volume_utils
|
from cinder.volume import utils as volume_utils
|
||||||
|
|
||||||
|
|
||||||
|
@ -18,15 +18,15 @@ import operator
|
|||||||
from oslo_serialization import jsonutils
|
from oslo_serialization import jsonutils
|
||||||
import six
|
import six
|
||||||
|
|
||||||
from cinder.openstack.common.scheduler import filters
|
from cinder.scheduler import filters
|
||||||
|
|
||||||
|
|
||||||
class JsonFilter(filters.BaseHostFilter):
|
class JsonFilter(filters.BaseHostFilter):
|
||||||
"""Host Filter to allow simple JSON-based grammar for
|
"""Host Filter to allow simple JSON-based grammar for selecting hosts."""
|
||||||
selecting hosts.
|
|
||||||
"""
|
|
||||||
def _op_compare(self, args, op):
|
def _op_compare(self, args, op):
|
||||||
"""Returns True if the specified operator can successfully
|
"""Compare first item of args with the rest using specified operator.
|
||||||
|
|
||||||
|
Returns True if the specified operator can successfully
|
||||||
compare the first item in the args with all the rest. Will
|
compare the first item in the args with all the rest. Will
|
||||||
return False if only one item is in the list.
|
return False if only one item is in the list.
|
||||||
"""
|
"""
|
||||||
@ -88,7 +88,9 @@ class JsonFilter(filters.BaseHostFilter):
|
|||||||
}
|
}
|
||||||
|
|
||||||
def _parse_string(self, string, host_state):
|
def _parse_string(self, string, host_state):
|
||||||
"""Strings prefixed with $ are capability lookups in the
|
"""Parse capability lookup strings.
|
||||||
|
|
||||||
|
Strings prefixed with $ are capability lookups in the
|
||||||
form '$variable' where 'variable' is an attribute in the
|
form '$variable' where 'variable' is an attribute in the
|
||||||
HostState class. If $variable is a dictionary, you may
|
HostState class. If $variable is a dictionary, you may
|
||||||
use: $variable.dictkey
|
use: $variable.dictkey
|
||||||
@ -126,9 +128,7 @@ class JsonFilter(filters.BaseHostFilter):
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
def host_passes(self, host_state, filter_properties):
|
def host_passes(self, host_state, filter_properties):
|
||||||
"""Return a list of hosts that can fulfill the requirements
|
"""Return a list of hosts that can fulfill query requirements."""
|
||||||
specified in the query.
|
|
||||||
"""
|
|
||||||
# TODO(zhiteng) Add description for filter_properties structure
|
# TODO(zhiteng) Add description for filter_properties structure
|
||||||
# and scheduler_hints.
|
# and scheduler_hints.
|
||||||
try:
|
try:
|
@ -25,11 +25,11 @@ from oslo_utils import timeutils
|
|||||||
|
|
||||||
from cinder import context as cinder_context
|
from cinder import context as cinder_context
|
||||||
from cinder import exception
|
from cinder import exception
|
||||||
from cinder.i18n import _LI, _LW
|
|
||||||
from cinder import objects
|
from cinder import objects
|
||||||
from cinder.openstack.common.scheduler import filters
|
|
||||||
from cinder.openstack.common.scheduler import weights
|
|
||||||
from cinder import utils
|
from cinder import utils
|
||||||
|
from cinder.i18n import _LI, _LW
|
||||||
|
from cinder.scheduler import filters
|
||||||
|
from cinder.scheduler import weights
|
||||||
from cinder.volume import utils as vol_utils
|
from cinder.volume import utils as vol_utils
|
||||||
|
|
||||||
|
|
||||||
|
@ -0,0 +1,44 @@
|
|||||||
|
# Copyright (c) 2011 OpenStack Foundation.
|
||||||
|
# All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
"""
|
||||||
|
Scheduler host weights
|
||||||
|
"""
|
||||||
|
|
||||||
|
from cinder.scheduler import base_weight
|
||||||
|
|
||||||
|
|
||||||
|
class WeighedHost(base_weight.WeighedObject):
|
||||||
|
def to_dict(self):
|
||||||
|
return {
|
||||||
|
'weight': self.weight,
|
||||||
|
'host': self.obj.host,
|
||||||
|
}
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return ("WeighedHost [host: %s, weight: %s]" %
|
||||||
|
(self.obj.host, self.weight))
|
||||||
|
|
||||||
|
|
||||||
|
class BaseHostWeigher(base_weight.BaseWeigher):
|
||||||
|
"""Base class for host weights."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class HostWeightHandler(base_weight.BaseWeightHandler):
|
||||||
|
object_class = WeighedHost
|
||||||
|
|
||||||
|
def __init__(self, namespace):
|
||||||
|
super(HostWeightHandler, self).__init__(BaseHostWeigher, namespace)
|
@ -44,7 +44,7 @@ import math
|
|||||||
|
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
|
|
||||||
from cinder.openstack.common.scheduler import weights
|
from cinder.scheduler import weights
|
||||||
|
|
||||||
|
|
||||||
capacity_weight_opts = [
|
capacity_weight_opts = [
|
||||||
|
@ -20,7 +20,7 @@ Used to spread volumes randomly across a list of equally suitable hosts.
|
|||||||
|
|
||||||
import random
|
import random
|
||||||
|
|
||||||
from cinder.openstack.common.scheduler import weights
|
from cinder.scheduler import weights
|
||||||
|
|
||||||
|
|
||||||
class ChanceWeigher(weights.BaseHostWeigher):
|
class ChanceWeigher(weights.BaseHostWeigher):
|
||||||
|
@ -16,8 +16,8 @@ from oslo_log import log as logging
|
|||||||
import six
|
import six
|
||||||
|
|
||||||
from cinder.i18n import _LW
|
from cinder.i18n import _LW
|
||||||
from cinder.openstack.common.scheduler import weights
|
|
||||||
from cinder.scheduler.evaluator import evaluator
|
from cinder.scheduler.evaluator import evaluator
|
||||||
|
from cinder.scheduler import weights
|
||||||
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
@ -26,7 +26,7 @@ from oslo_config import cfg
|
|||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
|
|
||||||
from cinder import db
|
from cinder import db
|
||||||
from cinder.openstack.common.scheduler import weights
|
from cinder.scheduler import weights
|
||||||
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
53
cinder/tests/unit/scheduler/fake_hosts.py
Normal file
53
cinder/tests/unit/scheduler/fake_hosts.py
Normal file
@ -0,0 +1,53 @@
|
|||||||
|
# Copyright 2012 Intel Inc, OpenStack Foundation.
|
||||||
|
# All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
"""
|
||||||
|
Fakes For filters tests.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
|
|
||||||
|
class FakeHostManager(object):
|
||||||
|
"""Defines fake hosts.
|
||||||
|
|
||||||
|
host1: free_ram_mb=1024-512-512=0, free_disk_gb=1024-512-512=0
|
||||||
|
host2: free_ram_mb=2048-512=1536 free_disk_gb=2048-512=1536
|
||||||
|
host3: free_ram_mb=4096-1024=3072 free_disk_gb=4096-1024=3072
|
||||||
|
host4: free_ram_mb=8192 free_disk_gb=8192
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.service_states = {
|
||||||
|
'host1': {
|
||||||
|
'compute': {'host_memory_free': 1073741824},
|
||||||
|
},
|
||||||
|
'host2': {
|
||||||
|
'compute': {'host_memory_free': 2147483648},
|
||||||
|
},
|
||||||
|
'host3': {
|
||||||
|
'compute': {'host_memory_free': 3221225472},
|
||||||
|
},
|
||||||
|
'host4': {
|
||||||
|
'compute': {'host_memory_free': 999999999},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class FakeHostState(object):
|
||||||
|
def __init__(self, host, attribute_dict):
|
||||||
|
self.host = host
|
||||||
|
for (key, val) in six.iteritems(attribute_dict):
|
||||||
|
setattr(self, key, val)
|
@ -21,8 +21,7 @@ import mock
|
|||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
|
|
||||||
from cinder import context
|
from cinder import context
|
||||||
from cinder.openstack.common.scheduler import weights
|
from cinder.scheduler import weights
|
||||||
from cinder.scheduler.weights import capacity
|
|
||||||
from cinder import test
|
from cinder import test
|
||||||
from cinder.tests.unit.scheduler import fakes
|
from cinder.tests.unit.scheduler import fakes
|
||||||
from cinder.volume import utils
|
from cinder.volume import utils
|
||||||
@ -41,7 +40,7 @@ class AllocatedCapacityWeigherTestCase(test.TestCase):
|
|||||||
if weight_properties is None:
|
if weight_properties is None:
|
||||||
weight_properties = {}
|
weight_properties = {}
|
||||||
return self.weight_handler.get_weighed_objects(
|
return self.weight_handler.get_weighed_objects(
|
||||||
[capacity.AllocatedCapacityWeigher], hosts,
|
[weights.capacity.AllocatedCapacityWeigher], hosts,
|
||||||
weight_properties)[0]
|
weight_properties)[0]
|
||||||
|
|
||||||
@mock.patch('cinder.db.sqlalchemy.api.service_get_all_by_topic')
|
@mock.patch('cinder.db.sqlalchemy.api.service_get_all_by_topic')
|
||||||
|
168
cinder/tests/unit/scheduler/test_base_filter.py
Normal file
168
cinder/tests/unit/scheduler/test_base_filter.py
Normal file
@ -0,0 +1,168 @@
|
|||||||
|
# Copyright (c) 2013 OpenStack Foundation.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
# implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import mock
|
||||||
|
from oslotest import moxstubout
|
||||||
|
|
||||||
|
from cinder.scheduler import base_filter
|
||||||
|
from cinder import test
|
||||||
|
|
||||||
|
|
||||||
|
class TestBaseFilter(test.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
super(TestBaseFilter, self).setUp()
|
||||||
|
self.mox = self.useFixture(moxstubout.MoxStubout()).mox
|
||||||
|
self.filter = base_filter.BaseFilter()
|
||||||
|
|
||||||
|
def test_filter_one_is_called(self):
|
||||||
|
filters = [1, 2, 3, 4]
|
||||||
|
filter_properties = {'x': 'y'}
|
||||||
|
self.mox.StubOutWithMock(self.filter, '_filter_one')
|
||||||
|
|
||||||
|
self.filter._filter_one(1, filter_properties).AndReturn(False)
|
||||||
|
self.filter._filter_one(2, filter_properties).AndReturn(True)
|
||||||
|
self.filter._filter_one(3, filter_properties).AndReturn(True)
|
||||||
|
self.filter._filter_one(4, filter_properties).AndReturn(False)
|
||||||
|
|
||||||
|
self.mox.ReplayAll()
|
||||||
|
|
||||||
|
result = list(self.filter.filter_all(filters, filter_properties))
|
||||||
|
self.assertEqual([2, 3], result)
|
||||||
|
|
||||||
|
|
||||||
|
class FakeExtension(object):
|
||||||
|
|
||||||
|
def __init__(self, plugin):
|
||||||
|
self.plugin = plugin
|
||||||
|
|
||||||
|
|
||||||
|
class BaseFakeFilter(base_filter.BaseFilter):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class FakeFilter1(BaseFakeFilter):
|
||||||
|
"""Derives from BaseFakeFilter and has a fake entry point defined.
|
||||||
|
|
||||||
|
Entry point is returned by fake ExtensionManager.
|
||||||
|
Should be included in the output of all_classes.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class FakeFilter2(BaseFakeFilter):
|
||||||
|
"""Derives from BaseFakeFilter but has no entry point.
|
||||||
|
|
||||||
|
Should be not included in all_classes.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class FakeFilter3(base_filter.BaseFilter):
|
||||||
|
"""Does not derive from BaseFakeFilter.
|
||||||
|
|
||||||
|
Should not be included.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class FakeFilter4(BaseFakeFilter):
|
||||||
|
"""Derives from BaseFakeFilter and has an entry point.
|
||||||
|
|
||||||
|
Should be included.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class FakeFilter5(BaseFakeFilter):
|
||||||
|
"""Derives from BaseFakeFilter but has no entry point.
|
||||||
|
|
||||||
|
Should not be included.
|
||||||
|
"""
|
||||||
|
run_filter_once_per_request = True
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class FakeExtensionManager(list):
|
||||||
|
|
||||||
|
def __init__(self, namespace):
|
||||||
|
classes = [FakeFilter1, FakeFilter3, FakeFilter4]
|
||||||
|
exts = map(FakeExtension, classes)
|
||||||
|
super(FakeExtensionManager, self).__init__(exts)
|
||||||
|
self.namespace = namespace
|
||||||
|
|
||||||
|
|
||||||
|
class TestBaseFilterHandler(test.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
super(TestBaseFilterHandler, self).setUp()
|
||||||
|
self.stubs = self.useFixture(moxstubout.MoxStubout()).stubs
|
||||||
|
self.stubs.Set(base_filter.base_handler.extension, 'ExtensionManager',
|
||||||
|
FakeExtensionManager)
|
||||||
|
self.handler = base_filter.BaseFilterHandler(BaseFakeFilter,
|
||||||
|
'fake_filters')
|
||||||
|
|
||||||
|
def test_get_all_classes(self):
|
||||||
|
# In order for a FakeFilter to be returned by get_all_classes, it has
|
||||||
|
# to comply with these rules:
|
||||||
|
# * It must be derived from BaseFakeFilter
|
||||||
|
# AND
|
||||||
|
# * It must have a python entrypoint assigned (returned by
|
||||||
|
# FakeExtensionManager)
|
||||||
|
expected = [FakeFilter1, FakeFilter4]
|
||||||
|
result = self.handler.get_all_classes()
|
||||||
|
self.assertEqual(expected, result)
|
||||||
|
|
||||||
|
def _get_filtered_objects(self, filter_classes, index=0):
|
||||||
|
filter_objs_initial = [1, 2, 3, 4]
|
||||||
|
filter_properties = {'x': 'y'}
|
||||||
|
return self.handler.get_filtered_objects(filter_classes,
|
||||||
|
filter_objs_initial,
|
||||||
|
filter_properties,
|
||||||
|
index)
|
||||||
|
|
||||||
|
@mock.patch.object(FakeFilter4, 'filter_all')
|
||||||
|
@mock.patch.object(FakeFilter3, 'filter_all', return_value=None)
|
||||||
|
def test_get_filtered_objects_return_none(self, fake3_filter_all,
|
||||||
|
fake4_filter_all):
|
||||||
|
filter_classes = [FakeFilter1, FakeFilter2, FakeFilter3, FakeFilter4]
|
||||||
|
result = self._get_filtered_objects(filter_classes)
|
||||||
|
self.assertIsNone(result)
|
||||||
|
self.assertFalse(fake4_filter_all.called)
|
||||||
|
|
||||||
|
def test_get_filtered_objects(self):
|
||||||
|
filter_objs_expected = [1, 2, 3, 4]
|
||||||
|
filter_classes = [FakeFilter1, FakeFilter2, FakeFilter3, FakeFilter4]
|
||||||
|
result = self._get_filtered_objects(filter_classes)
|
||||||
|
self.assertEqual(filter_objs_expected, result)
|
||||||
|
|
||||||
|
def test_get_filtered_objects_with_filter_run_once(self):
|
||||||
|
filter_objs_expected = [1, 2, 3, 4]
|
||||||
|
filter_classes = [FakeFilter5]
|
||||||
|
|
||||||
|
with mock.patch.object(FakeFilter5, 'filter_all',
|
||||||
|
return_value=filter_objs_expected
|
||||||
|
) as fake5_filter_all:
|
||||||
|
result = self._get_filtered_objects(filter_classes)
|
||||||
|
self.assertEqual(filter_objs_expected, result)
|
||||||
|
self.assertEqual(1, fake5_filter_all.call_count)
|
||||||
|
|
||||||
|
result = self._get_filtered_objects(filter_classes, index=1)
|
||||||
|
self.assertEqual(filter_objs_expected, result)
|
||||||
|
self.assertEqual(1, fake5_filter_all.call_count)
|
||||||
|
|
||||||
|
result = self._get_filtered_objects(filter_classes, index=2)
|
||||||
|
self.assertEqual(filter_objs_expected, result)
|
||||||
|
self.assertEqual(1, fake5_filter_all.call_count)
|
@ -20,8 +20,7 @@ import mock
|
|||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
|
|
||||||
from cinder import context
|
from cinder import context
|
||||||
from cinder.openstack.common.scheduler import weights
|
from cinder.scheduler import weights
|
||||||
from cinder.scheduler.weights import capacity
|
|
||||||
from cinder import test
|
from cinder import test
|
||||||
from cinder.tests.unit.scheduler import fakes
|
from cinder.tests.unit.scheduler import fakes
|
||||||
from cinder.volume import utils
|
from cinder.volume import utils
|
||||||
@ -40,7 +39,7 @@ class CapacityWeigherTestCase(test.TestCase):
|
|||||||
if weight_properties is None:
|
if weight_properties is None:
|
||||||
weight_properties = {'size': 1}
|
weight_properties = {'size': 1}
|
||||||
return self.weight_handler.get_weighed_objects(
|
return self.weight_handler.get_weighed_objects(
|
||||||
[capacity.CapacityWeigher],
|
[weights.capacity.CapacityWeigher],
|
||||||
hosts,
|
hosts,
|
||||||
weight_properties)
|
weight_properties)
|
||||||
|
|
||||||
|
@ -23,7 +23,7 @@ from cinder.compute import nova
|
|||||||
from cinder import context
|
from cinder import context
|
||||||
from cinder import db
|
from cinder import db
|
||||||
from cinder import exception
|
from cinder import exception
|
||||||
from cinder.openstack.common.scheduler import filters
|
from cinder.scheduler import filters
|
||||||
from cinder import test
|
from cinder import test
|
||||||
from cinder.tests.unit.scheduler import fakes
|
from cinder.tests.unit.scheduler import fakes
|
||||||
from cinder.tests.unit import utils
|
from cinder.tests.unit import utils
|
||||||
@ -988,3 +988,674 @@ class InstanceLocalityFilterTestCase(HostFiltersTestCase):
|
|||||||
{'local_to_instance': 'e29b11d4-15ef-34a9-a716-598a6f0b5467'}}
|
{'local_to_instance': 'e29b11d4-15ef-34a9-a716-598a6f0b5467'}}
|
||||||
self.assertRaises(exception.APITimeout,
|
self.assertRaises(exception.APITimeout,
|
||||||
filt_cls.host_passes, host, filter_properties)
|
filt_cls.host_passes, host, filter_properties)
|
||||||
|
|
||||||
|
|
||||||
|
class TestFilter(filters.BaseHostFilter):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class TestBogusFilter(object):
|
||||||
|
"""Class that doesn't inherit from BaseHostFilter."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class ExtraSpecsOpsTestCase(test.TestCase):
|
||||||
|
def _do_extra_specs_ops_test(self, value, req, matches):
|
||||||
|
assertion = self.assertTrue if matches else self.assertFalse
|
||||||
|
assertion(filters.extra_specs_ops.match(value, req))
|
||||||
|
|
||||||
|
def test_extra_specs_matches_simple(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='1',
|
||||||
|
req='1',
|
||||||
|
matches=True)
|
||||||
|
|
||||||
|
def test_extra_specs_fails_simple(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='',
|
||||||
|
req='1',
|
||||||
|
matches=False)
|
||||||
|
|
||||||
|
def test_extra_specs_fails_simple2(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='3',
|
||||||
|
req='1',
|
||||||
|
matches=False)
|
||||||
|
|
||||||
|
def test_extra_specs_fails_simple3(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='222',
|
||||||
|
req='2',
|
||||||
|
matches=False)
|
||||||
|
|
||||||
|
def test_extra_specs_fails_with_bogus_ops(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='4',
|
||||||
|
req='> 2',
|
||||||
|
matches=False)
|
||||||
|
|
||||||
|
def test_extra_specs_matches_with_op_eq(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='123',
|
||||||
|
req='= 123',
|
||||||
|
matches=True)
|
||||||
|
|
||||||
|
def test_extra_specs_matches_with_op_eq2(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='124',
|
||||||
|
req='= 123',
|
||||||
|
matches=True)
|
||||||
|
|
||||||
|
def test_extra_specs_fails_with_op_eq(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='34',
|
||||||
|
req='= 234',
|
||||||
|
matches=False)
|
||||||
|
|
||||||
|
def test_extra_specs_fails_with_op_eq3(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='34',
|
||||||
|
req='=',
|
||||||
|
matches=False)
|
||||||
|
|
||||||
|
def test_extra_specs_matches_with_op_seq(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='123',
|
||||||
|
req='s== 123',
|
||||||
|
matches=True)
|
||||||
|
|
||||||
|
def test_extra_specs_fails_with_op_seq(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='1234',
|
||||||
|
req='s== 123',
|
||||||
|
matches=False)
|
||||||
|
|
||||||
|
def test_extra_specs_matches_with_op_sneq(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='1234',
|
||||||
|
req='s!= 123',
|
||||||
|
matches=True)
|
||||||
|
|
||||||
|
def test_extra_specs_fails_with_op_sneq(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='123',
|
||||||
|
req='s!= 123',
|
||||||
|
matches=False)
|
||||||
|
|
||||||
|
def test_extra_specs_fails_with_op_sge(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='1000',
|
||||||
|
req='s>= 234',
|
||||||
|
matches=False)
|
||||||
|
|
||||||
|
def test_extra_specs_fails_with_op_sle(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='1234',
|
||||||
|
req='s<= 1000',
|
||||||
|
matches=False)
|
||||||
|
|
||||||
|
def test_extra_specs_fails_with_op_sl(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='2',
|
||||||
|
req='s< 12',
|
||||||
|
matches=False)
|
||||||
|
|
||||||
|
def test_extra_specs_fails_with_op_sg(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='12',
|
||||||
|
req='s> 2',
|
||||||
|
matches=False)
|
||||||
|
|
||||||
|
def test_extra_specs_matches_with_op_in(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='12311321',
|
||||||
|
req='<in> 11',
|
||||||
|
matches=True)
|
||||||
|
|
||||||
|
def test_extra_specs_matches_with_op_in2(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='12311321',
|
||||||
|
req='<in> 12311321',
|
||||||
|
matches=True)
|
||||||
|
|
||||||
|
def test_extra_specs_matches_with_op_in3(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='12311321',
|
||||||
|
req='<in> 12311321 <in>',
|
||||||
|
matches=True)
|
||||||
|
|
||||||
|
def test_extra_specs_fails_with_op_in(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='12310321',
|
||||||
|
req='<in> 11',
|
||||||
|
matches=False)
|
||||||
|
|
||||||
|
def test_extra_specs_fails_with_op_in2(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='12310321',
|
||||||
|
req='<in> 11 <in>',
|
||||||
|
matches=False)
|
||||||
|
|
||||||
|
def test_extra_specs_matches_with_op_is(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value=True,
|
||||||
|
req='<is> True',
|
||||||
|
matches=True)
|
||||||
|
|
||||||
|
def test_extra_specs_matches_with_op_is2(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value=False,
|
||||||
|
req='<is> False',
|
||||||
|
matches=True)
|
||||||
|
|
||||||
|
def test_extra_specs_matches_with_op_is3(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value=False,
|
||||||
|
req='<is> Nonsense',
|
||||||
|
matches=True)
|
||||||
|
|
||||||
|
def test_extra_specs_fails_with_op_is(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value=True,
|
||||||
|
req='<is> False',
|
||||||
|
matches=False)
|
||||||
|
|
||||||
|
def test_extra_specs_fails_with_op_is2(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value=False,
|
||||||
|
req='<is> True',
|
||||||
|
matches=False)
|
||||||
|
|
||||||
|
def test_extra_specs_matches_with_op_or(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='12',
|
||||||
|
req='<or> 11 <or> 12',
|
||||||
|
matches=True)
|
||||||
|
|
||||||
|
def test_extra_specs_matches_with_op_or2(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='12',
|
||||||
|
req='<or> 11 <or> 12 <or>',
|
||||||
|
matches=True)
|
||||||
|
|
||||||
|
def test_extra_specs_fails_with_op_or(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='13',
|
||||||
|
req='<or> 11 <or> 12',
|
||||||
|
matches=False)
|
||||||
|
|
||||||
|
def test_extra_specs_fails_with_op_or2(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='13',
|
||||||
|
req='<or> 11 <or> 12 <or>',
|
||||||
|
matches=False)
|
||||||
|
|
||||||
|
def test_extra_specs_matches_with_op_le(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='2',
|
||||||
|
req='<= 10',
|
||||||
|
matches=True)
|
||||||
|
|
||||||
|
def test_extra_specs_fails_with_op_le(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='3',
|
||||||
|
req='<= 2',
|
||||||
|
matches=False)
|
||||||
|
|
||||||
|
def test_extra_specs_matches_with_op_ge(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='3',
|
||||||
|
req='>= 1',
|
||||||
|
matches=True)
|
||||||
|
|
||||||
|
def test_extra_specs_fails_with_op_ge(self):
|
||||||
|
self._do_extra_specs_ops_test(
|
||||||
|
value='2',
|
||||||
|
req='>= 3',
|
||||||
|
matches=False)
|
||||||
|
|
||||||
|
|
||||||
|
class BasicFiltersTestCase(HostFiltersTestCase):
|
||||||
|
"""Test case for host filters."""
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
super(BasicFiltersTestCase, self).setUp()
|
||||||
|
self.json_query = jsonutils.dumps(
|
||||||
|
['and', ['>=', '$free_ram_mb', 1024],
|
||||||
|
['>=', '$free_disk_mb', 200 * 1024]])
|
||||||
|
|
||||||
|
def test_all_filters(self):
|
||||||
|
# Double check at least a couple of known filters exist
|
||||||
|
self.assertTrue('JsonFilter' in self.class_map)
|
||||||
|
self.assertTrue('CapabilitiesFilter' in self.class_map)
|
||||||
|
self.assertTrue('AvailabilityZoneFilter' in self.class_map)
|
||||||
|
self.assertTrue('IgnoreAttemptedHostsFilter' in self.class_map)
|
||||||
|
|
||||||
|
def _do_test_type_filter_extra_specs(self, ecaps, especs, passes):
|
||||||
|
filt_cls = self.class_map['CapabilitiesFilter']()
|
||||||
|
capabilities = {'enabled': True}
|
||||||
|
capabilities.update(ecaps)
|
||||||
|
service = {'disabled': False}
|
||||||
|
filter_properties = {'resource_type': {'name': 'fake_type',
|
||||||
|
'extra_specs': especs}}
|
||||||
|
host = fakes.FakeHostState('host1',
|
||||||
|
{'free_capacity_gb': 1024,
|
||||||
|
'capabilities': capabilities,
|
||||||
|
'service': service})
|
||||||
|
assertion = self.assertTrue if passes else self.assertFalse
|
||||||
|
assertion(filt_cls.host_passes(host, filter_properties))
|
||||||
|
|
||||||
|
def test_capability_filter_passes_extra_specs_simple(self):
|
||||||
|
self._do_test_type_filter_extra_specs(
|
||||||
|
ecaps={'opt1': '1', 'opt2': '2'},
|
||||||
|
especs={'opt1': '1', 'opt2': '2'},
|
||||||
|
passes=True)
|
||||||
|
|
||||||
|
def test_capability_filter_fails_extra_specs_simple(self):
|
||||||
|
self._do_test_type_filter_extra_specs(
|
||||||
|
ecaps={'opt1': '1', 'opt2': '2'},
|
||||||
|
especs={'opt1': '1', 'opt2': '222'},
|
||||||
|
passes=False)
|
||||||
|
|
||||||
|
def test_capability_filter_passes_extra_specs_complex(self):
|
||||||
|
self._do_test_type_filter_extra_specs(
|
||||||
|
ecaps={'opt1': 10, 'opt2': 5},
|
||||||
|
especs={'opt1': '>= 2', 'opt2': '<= 8'},
|
||||||
|
passes=True)
|
||||||
|
|
||||||
|
def test_capability_filter_fails_extra_specs_complex(self):
|
||||||
|
self._do_test_type_filter_extra_specs(
|
||||||
|
ecaps={'opt1': 10, 'opt2': 5},
|
||||||
|
especs={'opt1': '>= 2', 'opt2': '>= 8'},
|
||||||
|
passes=False)
|
||||||
|
|
||||||
|
def test_capability_filter_passes_scope_extra_specs(self):
|
||||||
|
self._do_test_type_filter_extra_specs(
|
||||||
|
ecaps={'scope_lv1': {'opt1': 10}},
|
||||||
|
especs={'capabilities:scope_lv1:opt1': '>= 2'},
|
||||||
|
passes=True)
|
||||||
|
|
||||||
|
def test_capability_filter_passes_fakescope_extra_specs(self):
|
||||||
|
self._do_test_type_filter_extra_specs(
|
||||||
|
ecaps={'scope_lv1': {'opt1': 10}, 'opt2': 5},
|
||||||
|
especs={'scope_lv1:opt1': '= 2', 'opt2': '>= 3'},
|
||||||
|
passes=True)
|
||||||
|
|
||||||
|
def test_capability_filter_fails_scope_extra_specs(self):
|
||||||
|
self._do_test_type_filter_extra_specs(
|
||||||
|
ecaps={'scope_lv1': {'opt1': 10}},
|
||||||
|
especs={'capabilities:scope_lv1:opt1': '<= 2'},
|
||||||
|
passes=False)
|
||||||
|
|
||||||
|
def test_capability_filter_passes_multi_level_scope_extra_specs(self):
|
||||||
|
self._do_test_type_filter_extra_specs(
|
||||||
|
ecaps={'scope_lv0': {'scope_lv1':
|
||||||
|
{'scope_lv2': {'opt1': 10}}}},
|
||||||
|
especs={'capabilities:scope_lv0:scope_lv1:scope_lv2:opt1': '>= 2'},
|
||||||
|
passes=True)
|
||||||
|
|
||||||
|
def test_capability_filter_fails_wrong_scope_extra_specs(self):
|
||||||
|
self._do_test_type_filter_extra_specs(
|
||||||
|
ecaps={'scope_lv0': {'opt1': 10}},
|
||||||
|
especs={'capabilities:scope_lv1:opt1': '>= 2'},
|
||||||
|
passes=False)
|
||||||
|
|
||||||
|
def test_json_filter_passes(self):
|
||||||
|
filt_cls = self.class_map['JsonFilter']()
|
||||||
|
filter_properties = {'resource_type': {'memory_mb': 1024,
|
||||||
|
'root_gb': 200,
|
||||||
|
'ephemeral_gb': 0},
|
||||||
|
'scheduler_hints': {'query': self.json_query}}
|
||||||
|
capabilities = {'enabled': True}
|
||||||
|
host = fakes.FakeHostState('host1',
|
||||||
|
{'free_ram_mb': 1024,
|
||||||
|
'free_disk_mb': 200 * 1024,
|
||||||
|
'capabilities': capabilities})
|
||||||
|
self.assertTrue(filt_cls.host_passes(host, filter_properties))
|
||||||
|
|
||||||
|
def test_json_filter_passes_with_no_query(self):
|
||||||
|
filt_cls = self.class_map['JsonFilter']()
|
||||||
|
filter_properties = {'resource_type': {'memory_mb': 1024,
|
||||||
|
'root_gb': 200,
|
||||||
|
'ephemeral_gb': 0}}
|
||||||
|
capabilities = {'enabled': True}
|
||||||
|
host = fakes.FakeHostState('host1',
|
||||||
|
{'free_ram_mb': 0,
|
||||||
|
'free_disk_mb': 0,
|
||||||
|
'capabilities': capabilities})
|
||||||
|
self.assertTrue(filt_cls.host_passes(host, filter_properties))
|
||||||
|
|
||||||
|
def test_json_filter_fails_on_memory(self):
|
||||||
|
filt_cls = self.class_map['JsonFilter']()
|
||||||
|
filter_properties = {'resource_type': {'memory_mb': 1024,
|
||||||
|
'root_gb': 200,
|
||||||
|
'ephemeral_gb': 0},
|
||||||
|
'scheduler_hints': {'query': self.json_query}}
|
||||||
|
capabilities = {'enabled': True}
|
||||||
|
host = fakes.FakeHostState('host1',
|
||||||
|
{'free_ram_mb': 1023,
|
||||||
|
'free_disk_mb': 200 * 1024,
|
||||||
|
'capabilities': capabilities})
|
||||||
|
self.assertFalse(filt_cls.host_passes(host, filter_properties))
|
||||||
|
|
||||||
|
def test_json_filter_fails_on_disk(self):
|
||||||
|
filt_cls = self.class_map['JsonFilter']()
|
||||||
|
filter_properties = {'resource_type': {'memory_mb': 1024,
|
||||||
|
'root_gb': 200,
|
||||||
|
'ephemeral_gb': 0},
|
||||||
|
'scheduler_hints': {'query': self.json_query}}
|
||||||
|
capabilities = {'enabled': True}
|
||||||
|
host = fakes.FakeHostState('host1',
|
||||||
|
{'free_ram_mb': 1024,
|
||||||
|
'free_disk_mb': (200 * 1024) - 1,
|
||||||
|
'capabilities': capabilities})
|
||||||
|
self.assertFalse(filt_cls.host_passes(host, filter_properties))
|
||||||
|
|
||||||
|
def test_json_filter_fails_on_caps_disabled(self):
|
||||||
|
filt_cls = self.class_map['JsonFilter']()
|
||||||
|
json_query = jsonutils.dumps(
|
||||||
|
['and', ['>=', '$free_ram_mb', 1024],
|
||||||
|
['>=', '$free_disk_mb', 200 * 1024],
|
||||||
|
'$capabilities.enabled'])
|
||||||
|
filter_properties = {'resource_type': {'memory_mb': 1024,
|
||||||
|
'root_gb': 200,
|
||||||
|
'ephemeral_gb': 0},
|
||||||
|
'scheduler_hints': {'query': json_query}}
|
||||||
|
capabilities = {'enabled': False}
|
||||||
|
host = fakes.FakeHostState('host1',
|
||||||
|
{'free_ram_mb': 1024,
|
||||||
|
'free_disk_mb': 200 * 1024,
|
||||||
|
'capabilities': capabilities})
|
||||||
|
self.assertFalse(filt_cls.host_passes(host, filter_properties))
|
||||||
|
|
||||||
|
def test_json_filter_fails_on_service_disabled(self):
|
||||||
|
filt_cls = self.class_map['JsonFilter']()
|
||||||
|
json_query = jsonutils.dumps(
|
||||||
|
['and', ['>=', '$free_ram_mb', 1024],
|
||||||
|
['>=', '$free_disk_mb', 200 * 1024],
|
||||||
|
['not', '$service.disabled']])
|
||||||
|
filter_properties = {'resource_type': {'memory_mb': 1024,
|
||||||
|
'local_gb': 200},
|
||||||
|
'scheduler_hints': {'query': json_query}}
|
||||||
|
capabilities = {'enabled': True}
|
||||||
|
host = fakes.FakeHostState('host1',
|
||||||
|
{'free_ram_mb': 1024,
|
||||||
|
'free_disk_mb': 200 * 1024,
|
||||||
|
'capabilities': capabilities})
|
||||||
|
self.assertFalse(filt_cls.host_passes(host, filter_properties))
|
||||||
|
|
||||||
|
def test_json_filter_happy_day(self):
|
||||||
|
"""Test json filter more thoroughly."""
|
||||||
|
filt_cls = self.class_map['JsonFilter']()
|
||||||
|
raw = ['and',
|
||||||
|
'$capabilities.enabled',
|
||||||
|
['=', '$capabilities.opt1', 'match'],
|
||||||
|
['or',
|
||||||
|
['and',
|
||||||
|
['<', '$free_ram_mb', 30],
|
||||||
|
['<', '$free_disk_mb', 300]],
|
||||||
|
['and',
|
||||||
|
['>', '$free_ram_mb', 30],
|
||||||
|
['>', '$free_disk_mb', 300]]]]
|
||||||
|
filter_properties = {
|
||||||
|
'scheduler_hints': {
|
||||||
|
'query': jsonutils.dumps(raw),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
# Passes
|
||||||
|
capabilities = {'enabled': True, 'opt1': 'match'}
|
||||||
|
service = {'disabled': False}
|
||||||
|
host = fakes.FakeHostState('host1',
|
||||||
|
{'free_ram_mb': 10,
|
||||||
|
'free_disk_mb': 200,
|
||||||
|
'capabilities': capabilities,
|
||||||
|
'service': service})
|
||||||
|
self.assertTrue(filt_cls.host_passes(host, filter_properties))
|
||||||
|
|
||||||
|
# Passes
|
||||||
|
capabilities = {'enabled': True, 'opt1': 'match'}
|
||||||
|
service = {'disabled': False}
|
||||||
|
host = fakes.FakeHostState('host1',
|
||||||
|
{'free_ram_mb': 40,
|
||||||
|
'free_disk_mb': 400,
|
||||||
|
'capabilities': capabilities,
|
||||||
|
'service': service})
|
||||||
|
self.assertTrue(filt_cls.host_passes(host, filter_properties))
|
||||||
|
|
||||||
|
# Fails due to capabilities being disabled
|
||||||
|
capabilities = {'enabled': False, 'opt1': 'match'}
|
||||||
|
service = {'disabled': False}
|
||||||
|
host = fakes.FakeHostState('host1',
|
||||||
|
{'free_ram_mb': 40,
|
||||||
|
'free_disk_mb': 400,
|
||||||
|
'capabilities': capabilities,
|
||||||
|
'service': service})
|
||||||
|
self.assertFalse(filt_cls.host_passes(host, filter_properties))
|
||||||
|
|
||||||
|
# Fails due to being exact memory/disk we don't want
|
||||||
|
capabilities = {'enabled': True, 'opt1': 'match'}
|
||||||
|
service = {'disabled': False}
|
||||||
|
host = fakes.FakeHostState('host1',
|
||||||
|
{'free_ram_mb': 30,
|
||||||
|
'free_disk_mb': 300,
|
||||||
|
'capabilities': capabilities,
|
||||||
|
'service': service})
|
||||||
|
self.assertFalse(filt_cls.host_passes(host, filter_properties))
|
||||||
|
|
||||||
|
# Fails due to memory lower but disk higher
|
||||||
|
capabilities = {'enabled': True, 'opt1': 'match'}
|
||||||
|
service = {'disabled': False}
|
||||||
|
host = fakes.FakeHostState('host1',
|
||||||
|
{'free_ram_mb': 20,
|
||||||
|
'free_disk_mb': 400,
|
||||||
|
'capabilities': capabilities,
|
||||||
|
'service': service})
|
||||||
|
self.assertFalse(filt_cls.host_passes(host, filter_properties))
|
||||||
|
|
||||||
|
# Fails due to capabilities 'opt1' not equal
|
||||||
|
capabilities = {'enabled': True, 'opt1': 'no-match'}
|
||||||
|
service = {'enabled': True}
|
||||||
|
host = fakes.FakeHostState('host1',
|
||||||
|
{'free_ram_mb': 20,
|
||||||
|
'free_disk_mb': 400,
|
||||||
|
'capabilities': capabilities,
|
||||||
|
'service': service})
|
||||||
|
self.assertFalse(filt_cls.host_passes(host, filter_properties))
|
||||||
|
|
||||||
|
def test_json_filter_basic_operators(self):
|
||||||
|
filt_cls = self.class_map['JsonFilter']()
|
||||||
|
host = fakes.FakeHostState('host1',
|
||||||
|
{'capabilities': {'enabled': True}})
|
||||||
|
# (operator, arguments, expected_result)
|
||||||
|
ops_to_test = [
|
||||||
|
['=', [1, 1], True],
|
||||||
|
['=', [1, 2], False],
|
||||||
|
['<', [1, 2], True],
|
||||||
|
['<', [1, 1], False],
|
||||||
|
['<', [2, 1], False],
|
||||||
|
['>', [2, 1], True],
|
||||||
|
['>', [2, 2], False],
|
||||||
|
['>', [2, 3], False],
|
||||||
|
['<=', [1, 2], True],
|
||||||
|
['<=', [1, 1], True],
|
||||||
|
['<=', [2, 1], False],
|
||||||
|
['>=', [2, 1], True],
|
||||||
|
['>=', [2, 2], True],
|
||||||
|
['>=', [2, 3], False],
|
||||||
|
['in', [1, 1], True],
|
||||||
|
['in', [1, 1, 2, 3], True],
|
||||||
|
['in', [4, 1, 2, 3], False],
|
||||||
|
['not', [True], False],
|
||||||
|
['not', [False], True],
|
||||||
|
['or', [True, False], True],
|
||||||
|
['or', [False, False], False],
|
||||||
|
['and', [True, True], True],
|
||||||
|
['and', [False, False], False],
|
||||||
|
['and', [True, False], False],
|
||||||
|
# Nested ((True or False) and (2 > 1)) == Passes
|
||||||
|
['and', [['or', True, False], ['>', 2, 1]], True]]
|
||||||
|
|
||||||
|
for (op, args, expected) in ops_to_test:
|
||||||
|
raw = [op] + args
|
||||||
|
filter_properties = {
|
||||||
|
'scheduler_hints': {
|
||||||
|
'query': jsonutils.dumps(raw),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
self.assertEqual(expected,
|
||||||
|
filt_cls.host_passes(host, filter_properties))
|
||||||
|
|
||||||
|
# This results in [False, True, False, True] and if any are True
|
||||||
|
# then it passes...
|
||||||
|
raw = ['not', True, False, True, False]
|
||||||
|
filter_properties = {
|
||||||
|
'scheduler_hints': {
|
||||||
|
'query': jsonutils.dumps(raw),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
self.assertTrue(filt_cls.host_passes(host, filter_properties))
|
||||||
|
|
||||||
|
# This results in [False, False, False] and if any are True
|
||||||
|
# then it passes...which this doesn't
|
||||||
|
raw = ['not', True, True, True]
|
||||||
|
filter_properties = {
|
||||||
|
'scheduler_hints': {
|
||||||
|
'query': jsonutils.dumps(raw),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
self.assertFalse(filt_cls.host_passes(host, filter_properties))
|
||||||
|
|
||||||
|
def test_json_filter_unknown_operator_raises(self):
|
||||||
|
filt_cls = self.class_map['JsonFilter']()
|
||||||
|
raw = ['!=', 1, 2]
|
||||||
|
filter_properties = {
|
||||||
|
'scheduler_hints': {
|
||||||
|
'query': jsonutils.dumps(raw),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
host = fakes.FakeHostState('host1',
|
||||||
|
{'capabilities': {'enabled': True}})
|
||||||
|
self.assertRaises(KeyError,
|
||||||
|
filt_cls.host_passes, host, filter_properties)
|
||||||
|
|
||||||
|
def test_json_filter_empty_filters_pass(self):
|
||||||
|
filt_cls = self.class_map['JsonFilter']()
|
||||||
|
host = fakes.FakeHostState('host1',
|
||||||
|
{'capabilities': {'enabled': True}})
|
||||||
|
|
||||||
|
raw = []
|
||||||
|
filter_properties = {
|
||||||
|
'scheduler_hints': {
|
||||||
|
'query': jsonutils.dumps(raw),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
self.assertTrue(filt_cls.host_passes(host, filter_properties))
|
||||||
|
raw = {}
|
||||||
|
filter_properties = {
|
||||||
|
'scheduler_hints': {
|
||||||
|
'query': jsonutils.dumps(raw),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
self.assertTrue(filt_cls.host_passes(host, filter_properties))
|
||||||
|
|
||||||
|
def test_json_filter_invalid_num_arguments_fails(self):
|
||||||
|
filt_cls = self.class_map['JsonFilter']()
|
||||||
|
host = fakes.FakeHostState('host1',
|
||||||
|
{'capabilities': {'enabled': True}})
|
||||||
|
|
||||||
|
raw = ['>', ['and', ['or', ['not', ['<', ['>=', ['<=', ['in', ]]]]]]]]
|
||||||
|
filter_properties = {
|
||||||
|
'scheduler_hints': {
|
||||||
|
'query': jsonutils.dumps(raw),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
self.assertFalse(filt_cls.host_passes(host, filter_properties))
|
||||||
|
|
||||||
|
raw = ['>', 1]
|
||||||
|
filter_properties = {
|
||||||
|
'scheduler_hints': {
|
||||||
|
'query': jsonutils.dumps(raw),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
self.assertFalse(filt_cls.host_passes(host, filter_properties))
|
||||||
|
|
||||||
|
def test_json_filter_unknown_variable_ignored(self):
|
||||||
|
filt_cls = self.class_map['JsonFilter']()
|
||||||
|
host = fakes.FakeHostState('host1',
|
||||||
|
{'capabilities': {'enabled': True}})
|
||||||
|
|
||||||
|
raw = ['=', '$........', 1, 1]
|
||||||
|
filter_properties = {
|
||||||
|
'scheduler_hints': {
|
||||||
|
'query': jsonutils.dumps(raw),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
self.assertTrue(filt_cls.host_passes(host, filter_properties))
|
||||||
|
|
||||||
|
raw = ['=', '$foo', 2, 2]
|
||||||
|
filter_properties = {
|
||||||
|
'scheduler_hints': {
|
||||||
|
'query': jsonutils.dumps(raw),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
self.assertTrue(filt_cls.host_passes(host, filter_properties))
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _make_zone_request(zone, is_admin=False):
|
||||||
|
ctxt = context.RequestContext('fake', 'fake', is_admin=is_admin)
|
||||||
|
return {
|
||||||
|
'context': ctxt,
|
||||||
|
'request_spec': {
|
||||||
|
'resource_properties': {
|
||||||
|
'availability_zone': zone
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def test_availability_zone_filter_same(self):
|
||||||
|
filt_cls = self.class_map['AvailabilityZoneFilter']()
|
||||||
|
service = {'availability_zone': 'nova'}
|
||||||
|
request = self._make_zone_request('nova')
|
||||||
|
host = fakes.FakeHostState('host1',
|
||||||
|
{'service': service})
|
||||||
|
self.assertTrue(filt_cls.host_passes(host, request))
|
||||||
|
|
||||||
|
def test_availability_zone_filter_different(self):
|
||||||
|
filt_cls = self.class_map['AvailabilityZoneFilter']()
|
||||||
|
service = {'availability_zone': 'nova'}
|
||||||
|
request = self._make_zone_request('bad')
|
||||||
|
host = fakes.FakeHostState('host1',
|
||||||
|
{'service': service})
|
||||||
|
self.assertFalse(filt_cls.host_passes(host, request))
|
||||||
|
|
||||||
|
def test_availability_zone_filter_empty(self):
|
||||||
|
filt_cls = self.class_map['AvailabilityZoneFilter']()
|
||||||
|
service = {'availability_zone': 'nova'}
|
||||||
|
request = {}
|
||||||
|
host = fakes.FakeHostState('host1',
|
||||||
|
{'service': service})
|
||||||
|
self.assertTrue(filt_cls.host_passes(host, request))
|
||||||
|
|
||||||
|
def test_ignore_attempted_hosts_filter_disabled(self):
|
||||||
|
# Test case where re-scheduling is disabled.
|
||||||
|
filt_cls = self.class_map['IgnoreAttemptedHostsFilter']()
|
||||||
|
host = fakes.FakeHostState('host1', {})
|
||||||
|
filter_properties = {}
|
||||||
|
self.assertTrue(filt_cls.host_passes(host, filter_properties))
|
||||||
|
|
||||||
|
def test_ignore_attempted_hosts_filter_pass(self):
|
||||||
|
# Node not previously tried.
|
||||||
|
filt_cls = self.class_map['IgnoreAttemptedHostsFilter']()
|
||||||
|
host = fakes.FakeHostState('host1', {})
|
||||||
|
attempted = dict(num_attempts=2, hosts=['host2'])
|
||||||
|
filter_properties = dict(retry=attempted)
|
||||||
|
self.assertTrue(filt_cls.host_passes(host, filter_properties))
|
||||||
|
|
||||||
|
def test_ignore_attempted_hosts_filter_fail(self):
|
||||||
|
# Node was already tried.
|
||||||
|
filt_cls = self.class_map['IgnoreAttemptedHostsFilter']()
|
||||||
|
host = fakes.FakeHostState('host1', {})
|
||||||
|
attempted = dict(num_attempts=2, hosts=['host1'])
|
||||||
|
filter_properties = dict(retry=attempted)
|
||||||
|
self.assertFalse(filt_cls.host_passes(host, filter_properties))
|
||||||
|
@ -24,7 +24,7 @@ from oslo_utils import timeutils
|
|||||||
|
|
||||||
from cinder import exception
|
from cinder import exception
|
||||||
from cinder import objects
|
from cinder import objects
|
||||||
from cinder.openstack.common.scheduler import filters
|
from cinder.scheduler import filters
|
||||||
from cinder.scheduler import host_manager
|
from cinder.scheduler import host_manager
|
||||||
from cinder import test
|
from cinder import test
|
||||||
from cinder.tests.unit.objects import test_service
|
from cinder.tests.unit.objects import test_service
|
||||||
|
@ -21,8 +21,7 @@ from oslo_config import cfg
|
|||||||
|
|
||||||
from cinder import context
|
from cinder import context
|
||||||
from cinder.db.sqlalchemy import api
|
from cinder.db.sqlalchemy import api
|
||||||
from cinder.openstack.common.scheduler import weights
|
from cinder.scheduler import weights
|
||||||
from cinder.scheduler.weights import volume_number
|
|
||||||
from cinder import test
|
from cinder import test
|
||||||
from cinder.tests.unit.scheduler import fakes
|
from cinder.tests.unit.scheduler import fakes
|
||||||
from cinder.volume import utils
|
from cinder.volume import utils
|
||||||
@ -58,7 +57,7 @@ class VolumeNumberWeigherTestCase(test.TestCase):
|
|||||||
if weight_properties is None:
|
if weight_properties is None:
|
||||||
weight_properties = {'context': self.context}
|
weight_properties = {'context': self.context}
|
||||||
return self.weight_handler.get_weighed_objects(
|
return self.weight_handler.get_weighed_objects(
|
||||||
[volume_number.VolumeNumberWeigher],
|
[weights.volume_number.VolumeNumberWeigher],
|
||||||
hosts,
|
hosts,
|
||||||
weight_properties)[0]
|
weight_properties)[0]
|
||||||
|
|
||||||
|
54
cinder/tests/unit/scheduler/test_weights.py
Normal file
54
cinder/tests/unit/scheduler/test_weights.py
Normal file
@ -0,0 +1,54 @@
|
|||||||
|
# Copyright 2011-2012 OpenStack Foundation.
|
||||||
|
# All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
"""
|
||||||
|
Tests For Scheduler weights.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from cinder.scheduler import base_weight
|
||||||
|
from cinder import test
|
||||||
|
|
||||||
|
|
||||||
|
class TestWeightHandler(test.TestCase):
|
||||||
|
def test_no_multiplier(self):
|
||||||
|
class FakeWeigher(base_weight.BaseWeigher):
|
||||||
|
def _weigh_object(self, *args, **kwargs):
|
||||||
|
pass
|
||||||
|
|
||||||
|
self.assertEqual(1.0,
|
||||||
|
FakeWeigher().weight_multiplier())
|
||||||
|
|
||||||
|
def test_no_weight_object(self):
|
||||||
|
class FakeWeigher(base_weight.BaseWeigher):
|
||||||
|
def weight_multiplier(self, *args, **kwargs):
|
||||||
|
pass
|
||||||
|
self.assertRaises(TypeError,
|
||||||
|
FakeWeigher)
|
||||||
|
|
||||||
|
def test_normalization(self):
|
||||||
|
# weight_list, expected_result, minval, maxval
|
||||||
|
map_ = (
|
||||||
|
((), (), None, None),
|
||||||
|
((0.0, 0.0), (0.0, 0.0), None, None),
|
||||||
|
((1.0, 1.0), (0.0, 0.0), None, None),
|
||||||
|
|
||||||
|
((20.0, 50.0), (0.0, 1.0), None, None),
|
||||||
|
((20.0, 50.0), (0.0, 0.375), None, 100.0),
|
||||||
|
((20.0, 50.0), (0.4, 1.0), 0.0, None),
|
||||||
|
((20.0, 50.0), (0.2, 0.5), 0.0, 100.0),
|
||||||
|
)
|
||||||
|
for seq, result, minval, maxval in map_:
|
||||||
|
ret = base_weight.normalize(seq, minval=minval, maxval=maxval)
|
||||||
|
self.assertEqual(result, tuple(ret))
|
@ -2,9 +2,6 @@
|
|||||||
|
|
||||||
# The list of modules to copy from oslo-incubator
|
# The list of modules to copy from oslo-incubator
|
||||||
module=imageutils
|
module=imageutils
|
||||||
module=scheduler
|
|
||||||
module=scheduler.filters
|
|
||||||
module=scheduler.weights
|
|
||||||
|
|
||||||
# The base module to hold the copy of openstack.common
|
# The base module to hold the copy of openstack.common
|
||||||
base=cinder
|
base=cinder
|
||||||
|
@ -27,13 +27,13 @@ packages =
|
|||||||
|
|
||||||
[entry_points]
|
[entry_points]
|
||||||
cinder.scheduler.filters =
|
cinder.scheduler.filters =
|
||||||
AvailabilityZoneFilter = cinder.openstack.common.scheduler.filters.availability_zone_filter:AvailabilityZoneFilter
|
AvailabilityZoneFilter = cinder.scheduler.filters.availability_zone_filter:AvailabilityZoneFilter
|
||||||
CapabilitiesFilter = cinder.openstack.common.scheduler.filters.capabilities_filter:CapabilitiesFilter
|
CapabilitiesFilter = cinder.scheduler.filters.capabilities_filter:CapabilitiesFilter
|
||||||
CapacityFilter = cinder.scheduler.filters.capacity_filter:CapacityFilter
|
CapacityFilter = cinder.scheduler.filters.capacity_filter:CapacityFilter
|
||||||
DifferentBackendFilter = cinder.scheduler.filters.affinity_filter:DifferentBackendFilter
|
DifferentBackendFilter = cinder.scheduler.filters.affinity_filter:DifferentBackendFilter
|
||||||
DriverFilter = cinder.scheduler.filters.driver_filter:DriverFilter
|
DriverFilter = cinder.scheduler.filters.driver_filter:DriverFilter
|
||||||
JsonFilter = cinder.openstack.common.scheduler.filters.json_filter:JsonFilter
|
JsonFilter = cinder.scheduler.filters.json_filter:JsonFilter
|
||||||
RetryFilter = cinder.openstack.common.scheduler.filters.ignore_attempted_hosts_filter:IgnoreAttemptedHostsFilter
|
RetryFilter = cinder.scheduler.filters.ignore_attempted_hosts_filter:IgnoreAttemptedHostsFilter
|
||||||
SameBackendFilter = cinder.scheduler.filters.affinity_filter:SameBackendFilter
|
SameBackendFilter = cinder.scheduler.filters.affinity_filter:SameBackendFilter
|
||||||
InstanceLocalityFilter = cinder.scheduler.filters.instance_locality_filter:InstanceLocalityFilter
|
InstanceLocalityFilter = cinder.scheduler.filters.instance_locality_filter:InstanceLocalityFilter
|
||||||
cinder.scheduler.weights =
|
cinder.scheduler.weights =
|
||||||
|
Loading…
Reference in New Issue
Block a user