Merge "Fix issues with get_pool scheduler API"
This commit is contained in:
commit
7b00db5f37
contrib/tempest/tempest
etc/manila
manila
api
scheduler
tests
134
contrib/tempest/tempest/api/share/admin/test_scheduler_stats.py
Normal file
134
contrib/tempest/tempest/api/share/admin/test_scheduler_stats.py
Normal file
@ -0,0 +1,134 @@
|
||||
# Copyright (c) 2015 Clinton Knight. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from tempest_lib import exceptions as lib_exc # noqa
|
||||
|
||||
from tempest.api.share import base
|
||||
from tempest import config_share as config
|
||||
from tempest import test
|
||||
|
||||
CONF = config.CONF
|
||||
|
||||
|
||||
class SchedulerStatsAdminTest(base.BaseSharesAdminTest):
|
||||
|
||||
@test.attr(type=["gate", "smoke", ])
|
||||
def test_pool_list(self):
|
||||
|
||||
# List pools
|
||||
resp, pool_response = self.shares_client.list_pools()
|
||||
pool_list = pool_response.get('pools')
|
||||
self.assertIsNotNone(pool_list, 'No pools returned from pools API')
|
||||
self.assertNotEmpty(pool_list)
|
||||
pool = pool_list[0]
|
||||
required_keys = {'name', 'host', 'backend', 'pool'}
|
||||
actual_keys = set(pool.keys())
|
||||
self.assertTrue(actual_keys.issuperset(required_keys))
|
||||
|
||||
@test.attr(type=["gate", "smoke", ])
|
||||
def test_pool_list_with_filters(self):
|
||||
|
||||
# List pools
|
||||
resp, pool_response = self.shares_client.list_pools()
|
||||
pool_list = pool_response.get('pools')
|
||||
|
||||
# Ensure we got at least one pool
|
||||
self.assertIsNotNone(pool_list, 'No pools returned from pools API')
|
||||
self.assertNotEmpty(pool_list)
|
||||
pool = pool_list[0]
|
||||
|
||||
# Build search opts from data and get pools again with filter
|
||||
search_opts = {
|
||||
'host': pool.get('host'),
|
||||
'backend': pool.get('backend'),
|
||||
'pool': pool.get('pool'),
|
||||
}
|
||||
resp, pool_response = self.shares_client.list_pools(
|
||||
search_opts=search_opts)
|
||||
filtered_pool_list = pool_response.get('pools')
|
||||
|
||||
# Ensure we got exactly one pool matching the first one from above
|
||||
self.assertEqual(1, len(filtered_pool_list))
|
||||
self.assertDictEqual(pool, filtered_pool_list[0])
|
||||
|
||||
@test.attr(type=["gate", "smoke", ])
|
||||
def test_pool_list_with_filters_negative(self):
|
||||
|
||||
# Build search opts for a non-existent pool
|
||||
search_opts = {
|
||||
'host': 'foo',
|
||||
'backend': 'bar',
|
||||
'pool': 'shark',
|
||||
}
|
||||
resp, pool_response = self.shares_client.list_pools(
|
||||
search_opts=search_opts)
|
||||
pool_list = pool_response.get('pools')
|
||||
|
||||
# Ensure we got no pools
|
||||
self.assertEmpty(pool_list)
|
||||
|
||||
@test.attr(type=["gate", "smoke", ])
|
||||
def test_pool_list_detail(self):
|
||||
|
||||
# List pools
|
||||
resp, pool_response = self.shares_client.list_pools(detail=True)
|
||||
pool_list = pool_response.get('pools')
|
||||
self.assertIsNotNone(pool_list, 'No pools returned from pools API')
|
||||
self.assertNotEmpty(pool_list)
|
||||
pool = pool_list[0]
|
||||
required_keys = {'name', 'host', 'backend', 'pool', 'capabilities'}
|
||||
actual_keys = set(pool.keys())
|
||||
self.assertTrue(actual_keys.issuperset(required_keys))
|
||||
|
||||
@test.attr(type=["gate", "smoke", ])
|
||||
def test_pool_list_detail_with_filters(self):
|
||||
|
||||
# List pools
|
||||
resp, pool_response = self.shares_client.list_pools(detail=True)
|
||||
pool_list = pool_response.get('pools')
|
||||
|
||||
# Ensure we got at least one pool
|
||||
self.assertIsNotNone(pool_list, 'No pools returned from pools API')
|
||||
self.assertNotEmpty(pool_list)
|
||||
pool = pool_list[0]
|
||||
|
||||
# Build search opts from data and get pools again with filter
|
||||
search_opts = {
|
||||
'host': pool.get('host'),
|
||||
'backend': pool.get('backend'),
|
||||
'pool': pool.get('pool'),
|
||||
}
|
||||
resp, pool_response = self.shares_client.list_pools(
|
||||
detail=True, search_opts=search_opts)
|
||||
filtered_pool_list = pool_response.get('pools')
|
||||
|
||||
# Ensure we got exactly one pool matching the first one from above
|
||||
self.assertEqual(1, len(filtered_pool_list))
|
||||
self.assertDictEqual(pool, filtered_pool_list[0])
|
||||
|
||||
@test.attr(type=["gate", "smoke", ])
|
||||
def test_pool_list_detail_with_filters_negative(self):
|
||||
|
||||
# Build search opts for a non-existent pool
|
||||
search_opts = {
|
||||
'host': 'foo',
|
||||
'backend': 'bar',
|
||||
'pool': 'shark',
|
||||
}
|
||||
resp, pool_response = self.shares_client.list_pools(
|
||||
detail=True, search_opts=search_opts)
|
||||
pool_list = pool_response.get('pools')
|
||||
|
||||
# Ensure we got no pools
|
||||
self.assertEmpty(pool_list)
|
@ -600,3 +600,16 @@ class SharesClient(service_client.ServiceClient):
|
||||
uri = "share-servers/%s/details" % share_server_id
|
||||
resp, body = self.get(uri)
|
||||
return resp, self._parse_resp(body)
|
||||
|
||||
###############
|
||||
|
||||
def list_pools(self, detail=False, search_opts=None):
|
||||
"""Get list of scheduler pools."""
|
||||
uri = 'scheduler-stats/pools'
|
||||
if detail:
|
||||
uri += '/detail'
|
||||
if search_opts:
|
||||
uri += "?%s" % urllib.urlencode(search_opts)
|
||||
resp, body = self.get(uri)
|
||||
self.expected_success(200, resp.status)
|
||||
return resp, json.loads(body)
|
||||
|
@ -69,5 +69,6 @@
|
||||
"share_network:remove_security_service": [["rule:default"]],
|
||||
"share_network:get_all_share_networks": [["rule:admin_api"]],
|
||||
|
||||
"scheduler_extension:scheduler_stats:get_pools" : "rule:admin_api"
|
||||
"scheduler_stats:pools:index": [["rule:admin_api"]],
|
||||
"scheduler_stats:pools:detail": [["rule:admin_api"]]
|
||||
}
|
||||
|
@ -1,63 +0,0 @@
|
||||
# Copyright (c) 2014 eBay Inc.
|
||||
# Copyright (c) 2015 Rushil Chugh
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""The Scheduler Stats extension"""
|
||||
|
||||
from manila.api import extensions
|
||||
from manila.api.openstack import wsgi
|
||||
from manila.api.views import scheduler_stats as scheduler_stats_view
|
||||
from manila.scheduler import rpcapi
|
||||
|
||||
|
||||
def authorize(context, action_name):
|
||||
action = 'scheduler_stats:%s' % action_name
|
||||
extensions.extension_authorizer('scheduler', action)(context)
|
||||
|
||||
|
||||
class SchedulerStatsController(wsgi.Controller):
|
||||
"""The Scheduler Stats controller for the OpenStack API."""
|
||||
|
||||
_view_builder_class = scheduler_stats_view.ViewBuilder
|
||||
|
||||
def __init__(self):
|
||||
self.scheduler_api = rpcapi.SchedulerAPI()
|
||||
super(SchedulerStatsController, self).__init__()
|
||||
|
||||
def get_pools(self, req):
|
||||
"""List all active pools in scheduler."""
|
||||
context = req.environ['manila.context']
|
||||
authorize(context, 'get_pools')
|
||||
|
||||
detail = req.params.get('detail', False)
|
||||
pools = self.scheduler_api.get_pools(context, filters=None)
|
||||
|
||||
return self._view_builder.pools(req, pools, detail)
|
||||
|
||||
|
||||
class Scheduler_stats(extensions.ExtensionDescriptor):
|
||||
"""Scheduler stats support."""
|
||||
|
||||
name = "Scheduler_stats"
|
||||
alias = "scheduler-stats"
|
||||
updated = "2015-08-01T00:00:00+00:00"
|
||||
|
||||
def get_resources(self):
|
||||
res = extensions.ResourceExtension(
|
||||
Scheduler_stats.alias,
|
||||
SchedulerStatsController(),
|
||||
collection_actions={"get_pools": "GET"})
|
||||
|
||||
return [res]
|
@ -24,6 +24,7 @@ from oslo_log import log
|
||||
from manila.api import extensions
|
||||
import manila.api.openstack
|
||||
from manila.api.v1 import limits
|
||||
from manila.api.v1 import scheduler_stats
|
||||
from manila.api.v1 import security_service
|
||||
from manila.api.v1 import share_metadata
|
||||
from manila.api.v1 import share_networks
|
||||
@ -110,3 +111,13 @@ class APIRouter(manila.api.openstack.APIRouter):
|
||||
controller=self.resources['types'],
|
||||
collection={'detail': 'GET', 'default': 'GET'},
|
||||
member={'action': 'POST'})
|
||||
|
||||
self.resources['scheduler_stats'] = scheduler_stats.create_resource()
|
||||
mapper.connect('pools', '/{project_id}/scheduler-stats/pools',
|
||||
controller=self.resources['scheduler_stats'],
|
||||
action='pools_index',
|
||||
conditions={'method': ['GET']})
|
||||
mapper.connect('pools', '/{project_id}/scheduler-stats/pools/detail',
|
||||
controller=self.resources['scheduler_stats'],
|
||||
action='pools_detail',
|
||||
conditions={'method': ['GET']})
|
||||
|
53
manila/api/v1/scheduler_stats.py
Normal file
53
manila/api/v1/scheduler_stats.py
Normal file
@ -0,0 +1,53 @@
|
||||
# Copyright (c) 2015 Clinton Knight. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from oslo_log import log
|
||||
|
||||
from manila.api.openstack import wsgi
|
||||
from manila.api.views import scheduler_stats as scheduler_stats_views
|
||||
from manila import policy
|
||||
from manila.scheduler import rpcapi
|
||||
|
||||
POOLS_RESOURCES_NAME = 'scheduler_stats:pools'
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
||||
class SchedulerStatsController(wsgi.Controller):
|
||||
"""The Scheduler Stats API controller for the OpenStack API."""
|
||||
|
||||
def __init__(self):
|
||||
self.scheduler_api = rpcapi.SchedulerAPI()
|
||||
self._view_builder_class = scheduler_stats_views.ViewBuilder
|
||||
super(SchedulerStatsController, self).__init__()
|
||||
|
||||
def pools_index(self, req):
|
||||
"""Returns a list of storage pools known to the scheduler."""
|
||||
return self._pools(req, action='index')
|
||||
|
||||
def pools_detail(self, req):
|
||||
"""Returns a detailed list of storage pools known to the scheduler."""
|
||||
return self._pools(req, action='detail')
|
||||
|
||||
def _pools(self, req, action='index'):
|
||||
context = req.environ['manila.context']
|
||||
policy.check_policy(context, POOLS_RESOURCES_NAME, action)
|
||||
search_opts = {}
|
||||
search_opts.update(req.GET)
|
||||
pools = self.scheduler_api.get_pools(context, filters=search_opts)
|
||||
detail = (action == 'detail')
|
||||
return self._view_builder.pools(pools, detail=detail)
|
||||
|
||||
|
||||
def create_resource():
|
||||
return wsgi.Resource(SchedulerStatsController())
|
@ -1,5 +1,6 @@
|
||||
# Copyright (c) 2014 eBay Inc.
|
||||
# Copyright (c) 2015 Rushil Chugh
|
||||
# Copyright (c) 2015 Clinton Knight
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
@ -22,25 +23,30 @@ class ViewBuilder(common.ViewBuilder):
|
||||
|
||||
_collection_name = "scheduler-stats"
|
||||
|
||||
def summary(self, request, pool):
|
||||
def pool_summary(self, pool):
|
||||
"""Summary view of a single pool."""
|
||||
return {
|
||||
'pool': {
|
||||
'name': pool.get('name'),
|
||||
'host': pool.get('host'),
|
||||
'backend': pool.get('backend'),
|
||||
'pool': pool.get('pool'),
|
||||
}
|
||||
}
|
||||
|
||||
def detail(self, request, pool):
|
||||
def pool_detail(self, pool):
|
||||
"""Detailed view of a single pool."""
|
||||
return {
|
||||
'pool': {
|
||||
'name': pool.get('name'),
|
||||
'host': pool.get('host'),
|
||||
'backend': pool.get('backend'),
|
||||
'pool': pool.get('pool'),
|
||||
'capabilities': pool.get('capabilities'),
|
||||
}
|
||||
}
|
||||
|
||||
def pools(self, request, pools, detail):
|
||||
"""Summary view of a list of pools seen by scheduler."""
|
||||
pdict = self.detail if detail else self.summary
|
||||
|
||||
return {"pools": [pdict(request, pool)['pool'] for pool in pools]}
|
||||
def pools(self, pools, detail=False):
|
||||
"""View of a list of pools seen by scheduler."""
|
||||
view_method = self.pool_detail if detail else self.pool_summary
|
||||
return {"pools": [view_method(pool)['pool'] for pool in pools]}
|
||||
|
@ -91,5 +91,4 @@ class Scheduler(object):
|
||||
|
||||
def get_pools(self, context, filters):
|
||||
"""Must override schedule method for scheduler to work."""
|
||||
raise NotImplementedError(_(
|
||||
"Must implement get_pools"))
|
||||
raise NotImplementedError(_("Must implement get_pools"))
|
||||
|
@ -51,8 +51,7 @@ class FilterScheduler(driver.Scheduler):
|
||||
return self.options.get_configuration()
|
||||
|
||||
def get_pools(self, context, filters):
|
||||
# TODO(zhiteng) Add filters support
|
||||
return self.host_manager.get_pools(context)
|
||||
return self.host_manager.get_pools(context, filters)
|
||||
|
||||
def _post_select_populate_filter_properties(self, filter_properties,
|
||||
host_state):
|
||||
|
@ -1,5 +1,6 @@
|
||||
# Copyright (c) 2011 OpenStack, LLC.
|
||||
# Copyright (c) 2015 Rushil Chugh
|
||||
# Copyright (c) 2015 Clinton Knight
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
@ -18,6 +19,7 @@
|
||||
Manage hosts in the current zone.
|
||||
"""
|
||||
|
||||
import re
|
||||
import UserDict
|
||||
|
||||
from oslo_config import cfg
|
||||
@ -27,7 +29,7 @@ import six
|
||||
|
||||
from manila import db
|
||||
from manila import exception
|
||||
from manila.i18n import _LI
|
||||
from manila.i18n import _LI, _LW
|
||||
from manila.openstack.common.scheduler import filters
|
||||
from manila.openstack.common.scheduler import weights
|
||||
from manila.share import utils as share_utils
|
||||
@ -407,44 +409,40 @@ class HostManager(object):
|
||||
|
||||
def update_service_capabilities(self, service_name, host, capabilities):
|
||||
"""Update the per-service capabilities based on this notification."""
|
||||
if service_name not in ('share'):
|
||||
if service_name not in ('share',):
|
||||
LOG.debug('Ignoring %(service_name)s service update '
|
||||
'from %(host)s',
|
||||
{'service_name': service_name, 'host': host})
|
||||
return
|
||||
|
||||
# Copy the capabilities, so we don't modify the original dict
|
||||
capab_copy = dict(capabilities)
|
||||
capab_copy["timestamp"] = timeutils.utcnow() # Reported time
|
||||
self.service_states[host] = capab_copy
|
||||
capability_copy = dict(capabilities)
|
||||
capability_copy["timestamp"] = timeutils.utcnow() # Reported time
|
||||
self.service_states[host] = capability_copy
|
||||
|
||||
LOG.debug("Received %(service_name)s service update from "
|
||||
"%(host)s: %(cap)s" %
|
||||
{'service_name': service_name, 'host': host,
|
||||
'cap': capabilities})
|
||||
|
||||
def get_all_host_states_share(self, context):
|
||||
"""Get all hosts and their states.
|
||||
|
||||
Returns a dict of all the hosts the HostManager knows
|
||||
about. Also, each of the consumable resources in HostState are
|
||||
pre-populated and adjusted based on data in the db.
|
||||
|
||||
For example:
|
||||
{'192.168.1.100': HostState(), ...}
|
||||
"""
|
||||
def _update_host_state_map(self, context):
|
||||
|
||||
# Get resource usage across the available share nodes:
|
||||
all_pools = {}
|
||||
topic = CONF.share_topic
|
||||
share_services = db.service_get_all_by_topic(context, topic)
|
||||
|
||||
for service in share_services:
|
||||
host = service['host']
|
||||
|
||||
# Warn about down services and remove them from host_state_map
|
||||
if not utils.service_is_up(service) or service['disabled']:
|
||||
LOG.info(_LI("Removing non-active host: %(host)s from "
|
||||
"scheduler cache.") % {'host': host})
|
||||
self.host_state_map.pop(host, None)
|
||||
LOG.warn(_LW("Share service is down. (host: %s)") % host)
|
||||
if self.host_state_map.pop(host, None):
|
||||
LOG.info(_LI("Removing non-active host: %s from "
|
||||
"scheduler cache.") % host)
|
||||
continue
|
||||
|
||||
# Create and register host_state if not in host_state_map
|
||||
capabilities = self.service_states.get(host, None)
|
||||
host_state = self.host_state_map.get(host)
|
||||
if not host_state:
|
||||
@ -453,11 +451,26 @@ class HostManager(object):
|
||||
capabilities=capabilities,
|
||||
service=dict(six.iteritems(service)))
|
||||
self.host_state_map[host] = host_state
|
||||
# Update host_state
|
||||
|
||||
# Update capabilities and attributes in host_state
|
||||
host_state.update_from_share_capability(
|
||||
capabilities, service=dict(six.iteritems(service)))
|
||||
# Build a pool_state map and return that instead of host_state_map
|
||||
state = self.host_state_map[host]
|
||||
|
||||
def get_all_host_states_share(self, context):
|
||||
"""Returns a dict of all the hosts the HostManager knows about.
|
||||
|
||||
Each of the consumable resources in HostState are
|
||||
populated with capabilities scheduler received from RPC.
|
||||
|
||||
For example:
|
||||
{'192.168.1.100': HostState(), ...}
|
||||
"""
|
||||
|
||||
self._update_host_state_map(context)
|
||||
|
||||
# Build a pool_state map and return that map instead of host_state_map
|
||||
all_pools = {}
|
||||
for host, state in self.host_state_map.items():
|
||||
for key in state.pools:
|
||||
pool = state.pools[key]
|
||||
# Use host.pool_name to make sure key is unique
|
||||
@ -466,17 +479,54 @@ class HostManager(object):
|
||||
|
||||
return six.itervalues(all_pools)
|
||||
|
||||
def get_pools(self, context):
|
||||
def get_pools(self, context, filters=None):
|
||||
"""Returns a dict of all pools on all hosts HostManager knows about."""
|
||||
|
||||
all_pools = []
|
||||
for host, state in self.host_state_map.items():
|
||||
for key in state.pools:
|
||||
pool = state.pools[key]
|
||||
# Use host.pool_name to make sure key is unique
|
||||
pool_key = share_utils.append_host(host, pool.pool_name)
|
||||
new_pool = dict(name=pool_key)
|
||||
new_pool.update(dict(capabilities=pool.capabilities))
|
||||
all_pools.append(new_pool)
|
||||
self._update_host_state_map(context)
|
||||
|
||||
all_pools = []
|
||||
for host, host_state in self.host_state_map.items():
|
||||
for pool in host_state.pools.values():
|
||||
|
||||
fully_qualified_pool_name = share_utils.append_host(
|
||||
host, pool.pool_name)
|
||||
host_name = share_utils.extract_host(
|
||||
fully_qualified_pool_name, level='host')
|
||||
backend_name = share_utils.extract_host(
|
||||
fully_qualified_pool_name, level='backend').split('@')[1] \
|
||||
if '@' in fully_qualified_pool_name else None
|
||||
pool_name = share_utils.extract_host(
|
||||
fully_qualified_pool_name, level='pool')
|
||||
|
||||
new_pool = {
|
||||
'name': fully_qualified_pool_name,
|
||||
'host': host_name,
|
||||
'backend': backend_name,
|
||||
'pool': pool_name,
|
||||
'capabilities': pool.capabilities,
|
||||
}
|
||||
if self._passes_filters(new_pool, filters):
|
||||
all_pools.append(new_pool)
|
||||
return all_pools
|
||||
|
||||
def _passes_filters(self, dict_to_check, filter_dict):
|
||||
"""Applies a set of regex filters to a dictionary.
|
||||
|
||||
If no filter keys are supplied, the data passes unfiltered and
|
||||
the method returns True. Otherwise, each key in the filter
|
||||
(filter_dict) must be present in the data (dict_to_check)
|
||||
and the filter values are applied as regex expressions to
|
||||
the data values. If any of the filter values fail to match
|
||||
their corresponding data values, the method returns False.
|
||||
But if all filters match, the method returns True.
|
||||
"""
|
||||
if not filter_dict:
|
||||
return True
|
||||
|
||||
for filter_key, filter_value in six.iteritems(filter_dict):
|
||||
if filter_key not in dict_to_check:
|
||||
return False
|
||||
if not re.match(filter_value, dict_to_check.get(filter_key)):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
@ -1,111 +0,0 @@
|
||||
# Copyright 2014 eBay Inc.
|
||||
# Copyright 2013 OpenStack Foundation
|
||||
# Copyright (c) 2015 Rushil Chugh
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import mock
|
||||
|
||||
from manila.api.contrib import scheduler_stats
|
||||
from manila import context
|
||||
from manila import test
|
||||
from manila.tests.api import fakes
|
||||
|
||||
|
||||
def schedule_rpcapi_get_pools(self, context, filters=None):
|
||||
all_pools = []
|
||||
pool1 = dict(name='pool1',
|
||||
capabilities=dict(
|
||||
total_capacity=1024, free_capacity=100,
|
||||
share_backend_name='pool1', reserved_percentage=0,
|
||||
driver_version='1.0.0', storage_protocol='iSCSI',
|
||||
QoS_support='False', updated=None))
|
||||
all_pools.append(pool1)
|
||||
pool2 = dict(name='pool2',
|
||||
capabilities=dict(
|
||||
total_capacity=512, free_capacity=200,
|
||||
share_backend_name='pool2', reserved_percentage=0,
|
||||
driver_version='1.0.1', storage_protocol='iSER',
|
||||
QoS_support='True', updated=None))
|
||||
all_pools.append(pool2)
|
||||
|
||||
return all_pools
|
||||
|
||||
|
||||
@mock.patch('manila.scheduler.rpcapi.SchedulerAPI.get_pools',
|
||||
schedule_rpcapi_get_pools)
|
||||
class SchedulerStatsAPITest(test.TestCase):
|
||||
def setUp(self):
|
||||
super(SchedulerStatsAPITest, self).setUp()
|
||||
self.flags(host='fake')
|
||||
self.controller = scheduler_stats.SchedulerStatsController()
|
||||
self.ctxt = context.RequestContext('admin', 'fake', True)
|
||||
|
||||
def test_get_pools_summery(self):
|
||||
req = fakes.HTTPRequest.blank('/v2/fake/scheduler_stats')
|
||||
req.environ['manila.context'] = self.ctxt
|
||||
res = self.controller.get_pools(req)
|
||||
|
||||
self.assertEqual(2, len(res['pools']))
|
||||
|
||||
expected = {
|
||||
'pools': [
|
||||
{
|
||||
'name': 'pool1',
|
||||
},
|
||||
{
|
||||
'name': 'pool2',
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
self.assertDictMatch(res, expected)
|
||||
|
||||
def test_get_pools_detail(self):
|
||||
req = fakes.HTTPRequest.blank('/v2/fake/scheduler_stats?detail=True')
|
||||
req.environ['manila.context'] = self.ctxt
|
||||
res = self.controller.get_pools(req)
|
||||
|
||||
self.assertEqual(2, len(res['pools']))
|
||||
|
||||
expected = {
|
||||
'pools': [
|
||||
{
|
||||
'name': 'pool1',
|
||||
'capabilities': {
|
||||
'updated': None,
|
||||
'total_capacity': 1024,
|
||||
'free_capacity': 100,
|
||||
'share_backend_name': 'pool1',
|
||||
'reserved_percentage': 0,
|
||||
'driver_version': '1.0.0',
|
||||
'storage_protocol': 'iSCSI',
|
||||
'QoS_support': 'False', }
|
||||
},
|
||||
{
|
||||
'name': 'pool2',
|
||||
'capabilities': {
|
||||
'updated': None,
|
||||
'total_capacity': 512,
|
||||
'free_capacity': 200,
|
||||
'share_backend_name': 'pool2',
|
||||
'reserved_percentage': 0,
|
||||
'driver_version': '1.0.1',
|
||||
'storage_protocol': 'iSER',
|
||||
'QoS_support': 'True', }
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
self.assertDictMatch(res, expected)
|
187
manila/tests/api/v1/test_scheduler_stats.py
Normal file
187
manila/tests/api/v1/test_scheduler_stats.py
Normal file
@ -0,0 +1,187 @@
|
||||
# Copyright (c) 2015 Clinton Knight. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import mock
|
||||
|
||||
from manila.api.v1 import scheduler_stats
|
||||
from manila import context
|
||||
from manila.scheduler import rpcapi
|
||||
from manila import test
|
||||
from manila.tests.api import fakes
|
||||
|
||||
|
||||
FAKE_POOLS = [
|
||||
{
|
||||
'name': 'host1@backend1#pool1',
|
||||
'host': 'host1',
|
||||
'backend': 'backend1',
|
||||
'pool': 'pool1',
|
||||
'capabilities': {
|
||||
'updated': None,
|
||||
'total_capacity': 1024,
|
||||
'free_capacity': 100,
|
||||
'share_backend_name': 'pool1',
|
||||
'reserved_percentage': 0,
|
||||
'driver_version': '1.0.0',
|
||||
'storage_protocol': 'iSCSI',
|
||||
'QoS_support': 'False',
|
||||
},
|
||||
},
|
||||
{
|
||||
'name': 'host1@backend1#pool2',
|
||||
'host': 'host1',
|
||||
'backend': 'backend1',
|
||||
'pool': 'pool2',
|
||||
'capabilities': {
|
||||
'updated': None,
|
||||
'total_capacity': 512,
|
||||
'free_capacity': 200,
|
||||
'share_backend_name': 'pool2',
|
||||
'reserved_percentage': 0,
|
||||
'driver_version': '1.0.1',
|
||||
'storage_protocol': 'iSER',
|
||||
'QoS_support': 'True',
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
class SchedulerStatsControllerTestCase(test.TestCase):
|
||||
def setUp(self):
|
||||
super(SchedulerStatsControllerTestCase, self).setUp()
|
||||
self.flags(host='fake')
|
||||
self.controller = scheduler_stats.SchedulerStatsController()
|
||||
self.ctxt = context.RequestContext('admin', 'fake', True)
|
||||
|
||||
def test_pools_index(self):
|
||||
mock_get_pools = self.mock_object(rpcapi.SchedulerAPI,
|
||||
'get_pools',
|
||||
mock.Mock(return_value=FAKE_POOLS))
|
||||
req = fakes.HTTPRequest.blank('/v1/fake_project/scheduler_stats/pools')
|
||||
req.environ['manila.context'] = self.ctxt
|
||||
|
||||
result = self.controller.pools_index(req)
|
||||
|
||||
expected = {
|
||||
'pools': [
|
||||
{
|
||||
'name': 'host1@backend1#pool1',
|
||||
'host': 'host1',
|
||||
'backend': 'backend1',
|
||||
'pool': 'pool1',
|
||||
},
|
||||
{
|
||||
'name': 'host1@backend1#pool2',
|
||||
'host': 'host1',
|
||||
'backend': 'backend1',
|
||||
'pool': 'pool2',
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
self.assertDictMatch(result, expected)
|
||||
mock_get_pools.assert_called_once_with(self.ctxt, filters={})
|
||||
|
||||
def test_pools_index_with_filters(self):
|
||||
mock_get_pools = self.mock_object(rpcapi.SchedulerAPI,
|
||||
'get_pools',
|
||||
mock.Mock(return_value=FAKE_POOLS))
|
||||
|
||||
url = '/v1/fake_project/scheduler-stats/pools/detail'
|
||||
url += '?backend=.%2A&host=host1&pool=pool%2A'
|
||||
|
||||
req = fakes.HTTPRequest.blank(url)
|
||||
req.environ['manila.context'] = self.ctxt
|
||||
|
||||
result = self.controller.pools_index(req)
|
||||
|
||||
expected = {
|
||||
'pools': [
|
||||
{
|
||||
'name': 'host1@backend1#pool1',
|
||||
'host': 'host1',
|
||||
'backend': 'backend1',
|
||||
'pool': 'pool1',
|
||||
},
|
||||
{
|
||||
'name': 'host1@backend1#pool2',
|
||||
'host': 'host1',
|
||||
'backend': 'backend1',
|
||||
'pool': 'pool2',
|
||||
}
|
||||
]
|
||||
}
|
||||
expected_filters = {'host': 'host1', 'pool': 'pool*', 'backend': '.*'}
|
||||
|
||||
self.assertDictMatch(result, expected)
|
||||
mock_get_pools.assert_called_once_with(self.ctxt,
|
||||
filters=expected_filters)
|
||||
|
||||
def test_get_pools_detail(self):
|
||||
mock_get_pools = self.mock_object(rpcapi.SchedulerAPI,
|
||||
'get_pools',
|
||||
mock.Mock(return_value=FAKE_POOLS))
|
||||
req = fakes.HTTPRequest.blank(
|
||||
'/v1/fake_project/scheduler_stats/pools/detail')
|
||||
req.environ['manila.context'] = self.ctxt
|
||||
|
||||
result = self.controller.pools_detail(req)
|
||||
|
||||
expected = {
|
||||
'pools': [
|
||||
{
|
||||
'name': 'host1@backend1#pool1',
|
||||
'host': 'host1',
|
||||
'backend': 'backend1',
|
||||
'pool': 'pool1',
|
||||
'capabilities': {
|
||||
'updated': None,
|
||||
'total_capacity': 1024,
|
||||
'free_capacity': 100,
|
||||
'share_backend_name': 'pool1',
|
||||
'reserved_percentage': 0,
|
||||
'driver_version': '1.0.0',
|
||||
'storage_protocol': 'iSCSI',
|
||||
'QoS_support': 'False',
|
||||
},
|
||||
},
|
||||
{
|
||||
'name': 'host1@backend1#pool2',
|
||||
'host': 'host1',
|
||||
'backend': 'backend1',
|
||||
'pool': 'pool2',
|
||||
'capabilities': {
|
||||
'updated': None,
|
||||
'total_capacity': 512,
|
||||
'free_capacity': 200,
|
||||
'share_backend_name': 'pool2',
|
||||
'reserved_percentage': 0,
|
||||
'driver_version': '1.0.1',
|
||||
'storage_protocol': 'iSER',
|
||||
'QoS_support': 'True',
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
self.assertDictMatch(expected, result)
|
||||
mock_get_pools.assert_called_once_with(self.ctxt, filters={})
|
||||
|
||||
|
||||
class SchedulerStatsTestCase(test.TestCase):
|
||||
|
||||
def test_create_resource(self):
|
||||
result = scheduler_stats.create_resource()
|
||||
self.assertTrue(isinstance(result.controller,
|
||||
scheduler_stats.SchedulerStatsController))
|
107
manila/tests/api/views/test_scheduler_stats.py
Normal file
107
manila/tests/api/views/test_scheduler_stats.py
Normal file
@ -0,0 +1,107 @@
|
||||
# Copyright (c) 2015 Clinton Knight. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import copy
|
||||
|
||||
from manila.api.views import scheduler_stats
|
||||
from manila import test
|
||||
|
||||
|
||||
POOL1 = {
|
||||
'name': 'host1@backend1#pool1',
|
||||
'host': 'host1',
|
||||
'backend': 'backend1',
|
||||
'pool': 'pool1',
|
||||
'other': 'junk',
|
||||
'capabilities': {
|
||||
'pool_name': 'pool1',
|
||||
'driver_handles_share_servers': False,
|
||||
'QoS_support': 'False',
|
||||
'timestamp': '2015-03-15T19:15:42.611690',
|
||||
'allocated_capacity_gb': 5,
|
||||
'total_capacity_gb': 10,
|
||||
},
|
||||
}
|
||||
POOL2 = {
|
||||
'name': 'host1@backend1#pool2',
|
||||
'host': 'host1',
|
||||
'backend': 'backend1',
|
||||
'pool': 'pool2',
|
||||
'capabilities': {
|
||||
'pool_name': 'pool2',
|
||||
'driver_handles_share_servers': False,
|
||||
'QoS_support': 'False',
|
||||
'timestamp': '2015-03-15T19:15:42.611690',
|
||||
'allocated_capacity_gb': 15,
|
||||
'total_capacity_gb': 20,
|
||||
},
|
||||
}
|
||||
POOLS = [POOL1, POOL2]
|
||||
|
||||
POOLS_DETAIL_VIEW = {
|
||||
'pools': [
|
||||
{
|
||||
'name': 'host1@backend1#pool1',
|
||||
'host': 'host1',
|
||||
'backend': 'backend1',
|
||||
'pool': 'pool1',
|
||||
'capabilities': {
|
||||
'pool_name': 'pool1',
|
||||
'driver_handles_share_servers': False,
|
||||
'QoS_support': 'False',
|
||||
'timestamp': '2015-03-15T19:15:42.611690',
|
||||
'allocated_capacity_gb': 5,
|
||||
'total_capacity_gb': 10,
|
||||
},
|
||||
}, {
|
||||
'name': 'host1@backend1#pool2',
|
||||
'host': 'host1',
|
||||
'backend': 'backend1',
|
||||
'pool': 'pool2',
|
||||
'capabilities': {
|
||||
'pool_name': 'pool2',
|
||||
'driver_handles_share_servers': False,
|
||||
'QoS_support': 'False',
|
||||
'timestamp': '2015-03-15T19:15:42.611690',
|
||||
'allocated_capacity_gb': 15,
|
||||
'total_capacity_gb': 20,
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
class ViewBuilderTestCase(test.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(ViewBuilderTestCase, self).setUp()
|
||||
self.builder = scheduler_stats.ViewBuilder()
|
||||
|
||||
def test_pools(self):
|
||||
|
||||
result = self.builder.pools(POOLS)
|
||||
|
||||
# Remove capabilities for summary view
|
||||
expected = copy.deepcopy(POOLS_DETAIL_VIEW)
|
||||
for pool in expected['pools']:
|
||||
del pool['capabilities']
|
||||
|
||||
self.assertDictEqual(expected, result)
|
||||
|
||||
def test_pools_with_details(self):
|
||||
|
||||
result = self.builder.pools(POOLS, detail=True)
|
||||
|
||||
expected = POOLS_DETAIL_VIEW
|
||||
self.assertDictEqual(expected, result)
|
@ -48,5 +48,7 @@
|
||||
"security_service:get_all_security_services": [["rule:admin_api"]],
|
||||
|
||||
"limits_extension:used_limits": [],
|
||||
"scheduler_extension:scheduler_stats:get_pools" : "rule:admin_api"
|
||||
|
||||
"scheduler_stats:pools:index": [["rule:admin_api"]],
|
||||
"scheduler_stats:pools:detail": [["rule:admin_api"]]
|
||||
}
|
||||
|
@ -22,21 +22,90 @@ import six
|
||||
from manila.scheduler import filter_scheduler
|
||||
from manila.scheduler import host_manager
|
||||
|
||||
|
||||
SHARE_SERVICES = [
|
||||
SHARE_SERVICES_NO_POOLS = [
|
||||
dict(id=1, host='host1', topic='share', disabled=False,
|
||||
availability_zone='zone1', updated_at=timeutils.utcnow()),
|
||||
dict(id=2, host='host2', topic='share', disabled=False,
|
||||
dict(id=2, host='host2@back1', topic='share', disabled=False,
|
||||
availability_zone='zone1', updated_at=timeutils.utcnow()),
|
||||
dict(id=3, host='host3', topic='share', disabled=False,
|
||||
dict(id=3, host='host2@back2', topic='share', disabled=False,
|
||||
availability_zone='zone2', updated_at=timeutils.utcnow()),
|
||||
dict(id=4, host='host4', topic='share', disabled=False,
|
||||
]
|
||||
|
||||
SERVICE_STATES_NO_POOLS = {
|
||||
'host1': dict(share_backend_name='AAA',
|
||||
total_capacity_gb=512, free_capacity_gb=200,
|
||||
timestamp=None, reserved_percentage=0,
|
||||
driver_handles_share_servers=False),
|
||||
'host2@back1': dict(share_backend_name='BBB',
|
||||
total_capacity_gb=256, free_capacity_gb=100,
|
||||
timestamp=None, reserved_percentage=0,
|
||||
driver_handles_share_servers=False),
|
||||
'host2@back2': dict(share_backend_name='CCC',
|
||||
total_capacity_gb=10000, free_capacity_gb=700,
|
||||
timestamp=None, reserved_percentage=0,
|
||||
driver_handles_share_servers=False),
|
||||
}
|
||||
|
||||
SHARE_SERVICES_WITH_POOLS = [
|
||||
dict(id=1, host='host1@AAA', topic='share', disabled=False,
|
||||
availability_zone='zone1', updated_at=timeutils.utcnow()),
|
||||
dict(id=2, host='host2@BBB', topic='share', disabled=False,
|
||||
availability_zone='zone1', updated_at=timeutils.utcnow()),
|
||||
dict(id=3, host='host3@CCC', topic='share', disabled=False,
|
||||
availability_zone='zone2', updated_at=timeutils.utcnow()),
|
||||
dict(id=4, host='host4@DDD', topic='share', disabled=False,
|
||||
availability_zone='zone3', updated_at=timeutils.utcnow()),
|
||||
# service on host5 is disabled
|
||||
dict(id=5, host='host5', topic='share', disabled=True,
|
||||
dict(id=5, host='host5@EEE', topic='share', disabled=True,
|
||||
availability_zone='zone4', updated_at=timeutils.utcnow()),
|
||||
]
|
||||
|
||||
SHARE_SERVICE_STATES_WITH_POOLS = {
|
||||
'host1@AAA': dict(share_backend_name='AAA',
|
||||
timestamp=None, reserved_percentage=0,
|
||||
driver_handles_share_servers=False,
|
||||
pools=[dict(pool_name='pool1',
|
||||
total_capacity_gb=51,
|
||||
free_capacity_gb=41,
|
||||
reserved_percentage=0)]),
|
||||
'host2@BBB': dict(share_backend_name='BBB',
|
||||
timestamp=None, reserved_percentage=0,
|
||||
driver_handles_share_servers=False,
|
||||
pools=[dict(pool_name='pool2',
|
||||
total_capacity_gb=52,
|
||||
free_capacity_gb=42,
|
||||
reserved_percentage=0)]),
|
||||
'host3@CCC': dict(share_backend_name='CCC',
|
||||
timestamp=None, reserved_percentage=0,
|
||||
driver_handles_share_servers=False,
|
||||
pools=[dict(pool_name='pool3',
|
||||
total_capacity_gb=53,
|
||||
free_capacity_gb=43,
|
||||
reserved_percentage=0)]),
|
||||
'host4@DDD': dict(share_backend_name='DDD',
|
||||
timestamp=None, reserved_percentage=0,
|
||||
driver_handles_share_servers=False,
|
||||
pools=[dict(pool_name='pool4a',
|
||||
total_capacity_gb=541,
|
||||
free_capacity_gb=441,
|
||||
reserved_percentage=0),
|
||||
dict(pool_name='pool4b',
|
||||
total_capacity_gb=542,
|
||||
free_capacity_gb=442,
|
||||
reserved_percentage=0)]),
|
||||
'host5@EEE': dict(share_backend_name='EEE',
|
||||
timestamp=None, reserved_percentage=0,
|
||||
driver_handles_share_servers=False,
|
||||
pools=[dict(pool_name='pool5a',
|
||||
total_capacity_gb=551,
|
||||
free_capacity_gb=451,
|
||||
reserved_percentage=0),
|
||||
dict(pool_name='pool5b',
|
||||
total_capacity_gb=552,
|
||||
free_capacity_gb=452,
|
||||
reserved_percentage=0)]),
|
||||
}
|
||||
|
||||
|
||||
class FakeFilterScheduler(filter_scheduler.FilterScheduler):
|
||||
def __init__(self, *args, **kwargs):
|
||||
|
@ -1,5 +1,6 @@
|
||||
# Copyright (c) 2011 OpenStack, LLC
|
||||
# Copyright (c) 2015 Rushil Chugh
|
||||
# Copyright (c) 2015 Clinton Knight
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
@ -16,6 +17,8 @@
|
||||
"""
|
||||
Tests For HostManager
|
||||
"""
|
||||
|
||||
import ddt
|
||||
import mock
|
||||
from oslo_config import cfg
|
||||
from oslo_utils import timeutils
|
||||
@ -27,6 +30,7 @@ from manila.openstack.common.scheduler import filters
|
||||
from manila.scheduler import host_manager
|
||||
from manila import test
|
||||
from manila.tests.scheduler import fakes
|
||||
from manila import utils
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
@ -42,6 +46,7 @@ class FakeFilterClass2(filters.BaseHostFilter):
|
||||
pass
|
||||
|
||||
|
||||
@ddt.ddt
|
||||
class HostManagerTestCase(test.TestCase):
|
||||
"""Test case for HostManager class."""
|
||||
|
||||
@ -139,9 +144,12 @@ class HostManagerTestCase(test.TestCase):
|
||||
def test_get_all_host_states_share(self):
|
||||
context = 'fake_context'
|
||||
topic = CONF.share_topic
|
||||
ret_services = fakes.SHARE_SERVICES
|
||||
with mock.patch.object(db, 'service_get_all_by_topic',
|
||||
mock.Mock(return_value=ret_services)):
|
||||
self.mock_object(
|
||||
db, 'service_get_all_by_topic',
|
||||
mock.Mock(return_value=fakes.SHARE_SERVICES_WITH_POOLS))
|
||||
|
||||
with mock.patch.dict(self.host_manager.service_states,
|
||||
fakes.SHARE_SERVICE_STATES_WITH_POOLS):
|
||||
# Disabled service
|
||||
self.host_manager.get_all_host_states_share(context)
|
||||
host_state_map = self.host_manager.host_state_map
|
||||
@ -149,59 +157,214 @@ class HostManagerTestCase(test.TestCase):
|
||||
self.assertEqual(4, len(host_state_map))
|
||||
# Check that service is up
|
||||
for i in xrange(4):
|
||||
share_node = fakes.SHARE_SERVICES[i]
|
||||
share_node = fakes.SHARE_SERVICES_WITH_POOLS[i]
|
||||
host = share_node['host']
|
||||
self.assertEqual(share_node, host_state_map[host].service)
|
||||
db.service_get_all_by_topic.assert_called_once_with(context, topic)
|
||||
|
||||
@mock.patch('manila.db.service_get_all_by_topic')
|
||||
@mock.patch('manila.utils.service_is_up')
|
||||
def test_get_pools(self, _mock_service_is_up,
|
||||
_mock_service_get_all_by_topic):
|
||||
def test_get_pools_no_pools(self):
|
||||
context = 'fake_context'
|
||||
|
||||
services = [
|
||||
dict(id=1, host='host1', topic='share', disabled=False,
|
||||
availability_zone='zone1', updated_at=timeutils.utcnow()),
|
||||
dict(id=2, host='host2@back1', topic='share', disabled=False,
|
||||
availability_zone='zone1', updated_at=timeutils.utcnow()),
|
||||
dict(id=3, host='host2@back2', topic='share', disabled=False,
|
||||
availability_zone='zone2', updated_at=timeutils.utcnow()),
|
||||
]
|
||||
|
||||
mocked_service_states = {
|
||||
'host1': dict(share_backend_name='AAA',
|
||||
total_capacity_gb=512, free_capacity_gb=200,
|
||||
timestamp=None, reserved_percentage=0,
|
||||
driver_handles_share_servers=False),
|
||||
'host2@back1': dict(share_backend_name='BBB',
|
||||
total_capacity_gb=256, free_capacity_gb=100,
|
||||
timestamp=None, reserved_percentage=0,
|
||||
driver_handles_share_servers=False),
|
||||
'host2@back2': dict(share_backend_name='CCC',
|
||||
total_capacity_gb=10000, free_capacity_gb=700,
|
||||
timestamp=None, reserved_percentage=0,
|
||||
driver_handles_share_servers=False),
|
||||
}
|
||||
|
||||
_mock_service_get_all_by_topic.return_value = services
|
||||
_mock_service_is_up.return_value = True
|
||||
_mock_warning = mock.Mock()
|
||||
host_manager.LOG.warn = _mock_warning
|
||||
self.mock_object(utils, 'service_is_up', mock.Mock(return_value=True))
|
||||
self.mock_object(
|
||||
db, 'service_get_all_by_topic',
|
||||
mock.Mock(return_value=fakes.SHARE_SERVICES_NO_POOLS))
|
||||
host_manager.LOG.warn = mock.Mock()
|
||||
|
||||
with mock.patch.dict(self.host_manager.service_states,
|
||||
mocked_service_states):
|
||||
# Call get_all_host_states to populate host_state_map
|
||||
self.host_manager.get_all_host_states_share(context)
|
||||
fakes.SERVICE_STATES_NO_POOLS):
|
||||
|
||||
res = self.host_manager.get_pools(context)
|
||||
|
||||
# Check if get_pools returns all 3 pools
|
||||
self.assertEqual(3, len(res))
|
||||
|
||||
expected = [
|
||||
{
|
||||
'name': 'host1#AAA',
|
||||
'host': 'host1',
|
||||
'backend': None,
|
||||
'pool': 'AAA',
|
||||
'capabilities': {
|
||||
'timestamp': None,
|
||||
'share_backend_name': 'AAA',
|
||||
'free_capacity_gb': 200,
|
||||
'driver_version': None,
|
||||
'total_capacity_gb': 512,
|
||||
'reserved_percentage': 0,
|
||||
'vendor_name': None,
|
||||
'storage_protocol': None,
|
||||
'driver_handles_share_servers': False,
|
||||
},
|
||||
}, {
|
||||
'name': 'host2@back1#BBB',
|
||||
'host': 'host2',
|
||||
'backend': 'back1',
|
||||
'pool': 'BBB',
|
||||
'capabilities': {
|
||||
'timestamp': None,
|
||||
'share_backend_name': 'BBB',
|
||||
'free_capacity_gb': 100,
|
||||
'driver_version': None,
|
||||
'total_capacity_gb': 256,
|
||||
'reserved_percentage': 0,
|
||||
'vendor_name': None,
|
||||
'storage_protocol': None,
|
||||
'driver_handles_share_servers': False,
|
||||
},
|
||||
}, {
|
||||
'name': 'host2@back2#CCC',
|
||||
'host': 'host2',
|
||||
'backend': 'back2',
|
||||
'pool': 'CCC',
|
||||
'capabilities': {
|
||||
'timestamp': None,
|
||||
'share_backend_name': 'CCC',
|
||||
'free_capacity_gb': 700,
|
||||
'driver_version': None,
|
||||
'total_capacity_gb': 10000,
|
||||
'reserved_percentage': 0,
|
||||
'vendor_name': None,
|
||||
'storage_protocol': None,
|
||||
'driver_handles_share_servers': False,
|
||||
},
|
||||
},
|
||||
]
|
||||
self.assertEqual(len(expected), len(res))
|
||||
self.assertEqual(sorted(expected), sorted(res))
|
||||
|
||||
def test_get_pools(self):
|
||||
context = 'fake_context'
|
||||
self.mock_object(utils, 'service_is_up', mock.Mock(return_value=True))
|
||||
self.mock_object(
|
||||
db, 'service_get_all_by_topic',
|
||||
mock.Mock(return_value=fakes.SHARE_SERVICES_WITH_POOLS))
|
||||
host_manager.LOG.warn = mock.Mock()
|
||||
|
||||
with mock.patch.dict(self.host_manager.service_states,
|
||||
fakes.SHARE_SERVICE_STATES_WITH_POOLS):
|
||||
|
||||
res = self.host_manager.get_pools(context)
|
||||
|
||||
expected = [
|
||||
{
|
||||
'name': 'host1@AAA#pool1',
|
||||
'host': 'host1',
|
||||
'backend': 'AAA',
|
||||
'pool': 'pool1',
|
||||
'capabilities': {
|
||||
'pool_name': 'pool1',
|
||||
'timestamp': None,
|
||||
'share_backend_name': 'AAA',
|
||||
'free_capacity_gb': 41,
|
||||
'driver_version': None,
|
||||
'total_capacity_gb': 51,
|
||||
'reserved_percentage': 0,
|
||||
'vendor_name': None,
|
||||
'storage_protocol': None,
|
||||
'driver_handles_share_servers': False,
|
||||
},
|
||||
}, {
|
||||
'name': 'host2@BBB#pool2',
|
||||
'host': 'host2',
|
||||
'backend': 'BBB',
|
||||
'pool': 'pool2',
|
||||
'capabilities': {
|
||||
'pool_name': 'pool2',
|
||||
'timestamp': None,
|
||||
'share_backend_name': 'BBB',
|
||||
'free_capacity_gb': 42,
|
||||
'driver_version': None,
|
||||
'total_capacity_gb': 52,
|
||||
'reserved_percentage': 0,
|
||||
'vendor_name': None,
|
||||
'storage_protocol': None,
|
||||
'driver_handles_share_servers': False,
|
||||
},
|
||||
}, {
|
||||
'name': 'host3@CCC#pool3',
|
||||
'host': 'host3',
|
||||
'backend': 'CCC',
|
||||
'pool': 'pool3',
|
||||
'capabilities': {
|
||||
'pool_name': 'pool3',
|
||||
'timestamp': None,
|
||||
'share_backend_name': 'CCC',
|
||||
'free_capacity_gb': 43,
|
||||
'driver_version': None,
|
||||
'total_capacity_gb': 53,
|
||||
'reserved_percentage': 0,
|
||||
'vendor_name': None,
|
||||
'storage_protocol': None,
|
||||
'driver_handles_share_servers': False,
|
||||
},
|
||||
}, {
|
||||
'name': 'host4@DDD#pool4a',
|
||||
'host': 'host4',
|
||||
'backend': 'DDD',
|
||||
'pool': 'pool4a',
|
||||
'capabilities': {
|
||||
'pool_name': 'pool4a',
|
||||
'timestamp': None,
|
||||
'share_backend_name': 'DDD',
|
||||
'free_capacity_gb': 441,
|
||||
'driver_version': None,
|
||||
'total_capacity_gb': 541,
|
||||
'reserved_percentage': 0,
|
||||
'vendor_name': None,
|
||||
'storage_protocol': None,
|
||||
'driver_handles_share_servers': False,
|
||||
},
|
||||
}, {
|
||||
'name': 'host4@DDD#pool4b',
|
||||
'host': 'host4',
|
||||
'backend': 'DDD',
|
||||
'pool': 'pool4b',
|
||||
'capabilities': {
|
||||
'pool_name': 'pool4b',
|
||||
'timestamp': None,
|
||||
'share_backend_name': 'DDD',
|
||||
'free_capacity_gb': 442,
|
||||
'driver_version': None,
|
||||
'total_capacity_gb': 542,
|
||||
'reserved_percentage': 0,
|
||||
'vendor_name': None,
|
||||
'storage_protocol': None,
|
||||
'driver_handles_share_servers': False,
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
self.assertEqual(len(expected), len(res))
|
||||
self.assertEqual(sorted(expected), sorted(res))
|
||||
|
||||
def test_get_pools_host_down(self):
|
||||
context = 'fake_context'
|
||||
mock_service_is_up = self.mock_object(utils, 'service_is_up')
|
||||
self.mock_object(
|
||||
db, 'service_get_all_by_topic',
|
||||
mock.Mock(return_value=fakes.SHARE_SERVICES_NO_POOLS))
|
||||
host_manager.LOG.warn = mock.Mock()
|
||||
|
||||
with mock.patch.dict(self.host_manager.service_states,
|
||||
fakes.SERVICE_STATES_NO_POOLS):
|
||||
|
||||
# Initialize host data with all services present
|
||||
mock_service_is_up.side_effect = [True, True, True]
|
||||
|
||||
# Call once to update the host state map
|
||||
self.host_manager.get_pools(context)
|
||||
|
||||
self.assertEqual(len(fakes.SHARE_SERVICES_NO_POOLS),
|
||||
len(self.host_manager.host_state_map))
|
||||
|
||||
# Then mock one host as down
|
||||
mock_service_is_up.side_effect = [True, True, False]
|
||||
|
||||
res = self.host_manager.get_pools(context)
|
||||
|
||||
expected = [
|
||||
{
|
||||
'name': 'host1#AAA',
|
||||
'host': 'host1',
|
||||
'backend': None,
|
||||
'pool': 'AAA',
|
||||
'capabilities': {
|
||||
'timestamp': None,
|
||||
'driver_handles_share_servers': False,
|
||||
@ -211,10 +374,13 @@ class HostManagerTestCase(test.TestCase):
|
||||
'total_capacity_gb': 512,
|
||||
'reserved_percentage': 0,
|
||||
'vendor_name': None,
|
||||
'storage_protocol': None},
|
||||
},
|
||||
{
|
||||
'storage_protocol': None
|
||||
},
|
||||
}, {
|
||||
'name': 'host2@back1#BBB',
|
||||
'host': 'host2',
|
||||
'backend': 'back1',
|
||||
'pool': 'BBB',
|
||||
'capabilities': {
|
||||
'timestamp': None,
|
||||
'driver_handles_share_servers': False,
|
||||
@ -224,25 +390,74 @@ class HostManagerTestCase(test.TestCase):
|
||||
'total_capacity_gb': 256,
|
||||
'reserved_percentage': 0,
|
||||
'vendor_name': None,
|
||||
'storage_protocol': None},
|
||||
'storage_protocol': None
|
||||
},
|
||||
},
|
||||
]
|
||||
self.assertEqual(len(expected),
|
||||
len(self.host_manager.host_state_map))
|
||||
self.assertEqual(len(expected), len(res))
|
||||
self.assertEqual(sorted(expected), sorted(res))
|
||||
|
||||
def test_get_pools_with_filters(self):
|
||||
context = 'fake_context'
|
||||
self.mock_object(utils, 'service_is_up', mock.Mock(return_value=True))
|
||||
self.mock_object(
|
||||
db, 'service_get_all_by_topic',
|
||||
mock.Mock(return_value=fakes.SHARE_SERVICES_WITH_POOLS))
|
||||
host_manager.LOG.warn = mock.Mock()
|
||||
|
||||
with mock.patch.dict(self.host_manager.service_states,
|
||||
fakes.SHARE_SERVICE_STATES_WITH_POOLS):
|
||||
|
||||
res = self.host_manager.get_pools(
|
||||
context, filters={'host': 'host2', 'pool': 'pool*'})
|
||||
|
||||
expected = [
|
||||
{
|
||||
'name': 'host2@back2#CCC',
|
||||
'name': 'host2@BBB#pool2',
|
||||
'host': 'host2',
|
||||
'backend': 'BBB',
|
||||
'pool': 'pool2',
|
||||
'capabilities': {
|
||||
'pool_name': 'pool2',
|
||||
'timestamp': None,
|
||||
'driver_handles_share_servers': False,
|
||||
'share_backend_name': 'CCC',
|
||||
'free_capacity_gb': 700,
|
||||
'share_backend_name': 'BBB',
|
||||
'free_capacity_gb': 42,
|
||||
'driver_version': None,
|
||||
'total_capacity_gb': 10000,
|
||||
'total_capacity_gb': 52,
|
||||
'reserved_percentage': 0,
|
||||
'vendor_name': None,
|
||||
'storage_protocol': None},
|
||||
}
|
||||
'storage_protocol': None
|
||||
},
|
||||
},
|
||||
]
|
||||
self.assertEqual(len(expected), len(res))
|
||||
self.assertEqual(sorted(expected), sorted(res))
|
||||
|
||||
@ddt.data(
|
||||
None,
|
||||
{},
|
||||
{'key1': 'value1'},
|
||||
{'key1': 'value1', 'key2': 'value*'},
|
||||
{'key1': '.*', 'key2': '.*'},
|
||||
)
|
||||
def test_passes_filters_true(self, filter):
|
||||
|
||||
data = {'key1': 'value1', 'key2': 'value2', 'key3': 'value3'}
|
||||
self.assertTrue(self.host_manager._passes_filters(data, filter))
|
||||
|
||||
@ddt.data(
|
||||
{'key1': 'value$'},
|
||||
{'key4': 'value'},
|
||||
{'key1': 'value1.+', 'key2': 'value*'},
|
||||
)
|
||||
def test_passes_filters_false(self, filter):
|
||||
|
||||
data = {'key1': 'value1', 'key2': 'value2', 'key3': 'value3'}
|
||||
self.assertFalse(self.host_manager._passes_filters(data, filter))
|
||||
|
||||
|
||||
class HostStateTestCase(test.TestCase):
|
||||
"""Test case for HostState class."""
|
||||
|
Loading…
x
Reference in New Issue
Block a user