Merge "Improve scheduler performance on thin provisioning"
This commit is contained in:
@@ -386,6 +386,11 @@ def share_instance_get_all_by_share_group_id(context, share_group_id):
|
|||||||
return IMPL.share_instance_get_all_by_share_group_id(
|
return IMPL.share_instance_get_all_by_share_group_id(
|
||||||
context, share_group_id)
|
context, share_group_id)
|
||||||
|
|
||||||
|
|
||||||
|
def share_instance_sizes_sum_by_host(context, host):
|
||||||
|
"""Returns sum of sizes of all share instances on given host."""
|
||||||
|
return IMPL.share_instance_sizes_sum_by_host(context, host)
|
||||||
|
|
||||||
###################
|
###################
|
||||||
|
|
||||||
|
|
||||||
|
@@ -1997,7 +1997,6 @@ def share_instance_get_all_by_host(context, host, with_share_data=False,
|
|||||||
)
|
)
|
||||||
if status is not None:
|
if status is not None:
|
||||||
instances = instances.filter(models.ShareInstance.status == status)
|
instances = instances.filter(models.ShareInstance.status == status)
|
||||||
|
|
||||||
if with_share_data:
|
if with_share_data:
|
||||||
instances = _set_instances_share_data(instances)
|
instances = _set_instances_share_data(instances)
|
||||||
else:
|
else:
|
||||||
@@ -2006,6 +2005,20 @@ def share_instance_get_all_by_host(context, host, with_share_data=False,
|
|||||||
return instances
|
return instances
|
||||||
|
|
||||||
|
|
||||||
|
@require_context
|
||||||
|
@context_manager.reader
|
||||||
|
def share_instance_sizes_sum_by_host(context, host):
|
||||||
|
result = model_query(
|
||||||
|
context, models.Share, func.sum(models.Share.size),
|
||||||
|
).join(
|
||||||
|
models.ShareInstance.share,
|
||||||
|
).filter(or_(
|
||||||
|
models.ShareInstance.host == host,
|
||||||
|
models.ShareInstance.host.like("{0}#%".format(host)),
|
||||||
|
)).first()
|
||||||
|
return int(result[0] or 0)
|
||||||
|
|
||||||
|
|
||||||
@require_context
|
@require_context
|
||||||
@context_manager.reader
|
@context_manager.reader
|
||||||
def share_instance_get_all_by_share_network(context, share_network_id):
|
def share_instance_get_all_by_share_network(context, share_network_id):
|
||||||
|
@@ -451,15 +451,7 @@ class PoolState(HostState):
|
|||||||
|
|
||||||
def _estimate_provisioned_capacity(self, host_name, context=None):
|
def _estimate_provisioned_capacity(self, host_name, context=None):
|
||||||
"""Estimate provisioned capacity from share sizes on backend."""
|
"""Estimate provisioned capacity from share sizes on backend."""
|
||||||
provisioned_capacity = 0
|
return db.share_instance_sizes_sum_by_host(context, host_name)
|
||||||
|
|
||||||
instances = db.share_instance_get_all_by_host(
|
|
||||||
context, host_name, with_share_data=True)
|
|
||||||
|
|
||||||
for instance in instances:
|
|
||||||
# Size of share instance that's still being created, will be None.
|
|
||||||
provisioned_capacity += instance['size'] or 0
|
|
||||||
return provisioned_capacity
|
|
||||||
|
|
||||||
@utils.synchronized("update_from_share_capability")
|
@utils.synchronized("update_from_share_capability")
|
||||||
def update_from_share_capability(
|
def update_from_share_capability(
|
||||||
|
@@ -602,6 +602,18 @@ class ShareDatabaseAPITestCase(test.TestCase):
|
|||||||
|
|
||||||
self.assertEqual(0, len(instances))
|
self.assertEqual(0, len(instances))
|
||||||
|
|
||||||
|
@ddt.data(
|
||||||
|
{'status': constants.STATUS_AVAILABLE},
|
||||||
|
{'status': None})
|
||||||
|
@ddt.unpack
|
||||||
|
def test_share_instance_get_all_by_host_no_instance(self, status):
|
||||||
|
db_utils.create_share_without_instance()
|
||||||
|
instances = db_api.share_instance_get_all_by_host(
|
||||||
|
self.ctxt, "fake_host", with_share_data=True, status=status
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(0, len(instances))
|
||||||
|
|
||||||
def test_share_instance_get_all_by_share_group(self):
|
def test_share_instance_get_all_by_share_group(self):
|
||||||
group = db_utils.create_share_group()
|
group = db_utils.create_share_group()
|
||||||
db_utils.create_share(share_group_id=group['id'])
|
db_utils.create_share(share_group_id=group['id'])
|
||||||
|
@@ -1251,9 +1251,10 @@ class PoolStateTestCase(test.TestCase):
|
|||||||
@ddt.unpack
|
@ddt.unpack
|
||||||
def test_update_from_share_capability(self, share_capability, instances):
|
def test_update_from_share_capability(self, share_capability, instances):
|
||||||
fake_context = context.RequestContext('user', 'project', is_admin=True)
|
fake_context = context.RequestContext('user', 'project', is_admin=True)
|
||||||
|
sizes = [instance['size'] or 0 for instance in instances]
|
||||||
self.mock_object(
|
self.mock_object(
|
||||||
db, 'share_instance_get_all_by_host',
|
db, 'share_instance_sizes_sum_by_host',
|
||||||
mock.Mock(return_value=instances))
|
mock.Mock(return_value=sum(sizes)))
|
||||||
fake_pool = host_manager.PoolState('host1', None, 'pool0')
|
fake_pool = host_manager.PoolState('host1', None, 'pool0')
|
||||||
self.assertIsNone(fake_pool.free_capacity_gb)
|
self.assertIsNone(fake_pool.free_capacity_gb)
|
||||||
|
|
||||||
@@ -1276,19 +1277,19 @@ class PoolStateTestCase(test.TestCase):
|
|||||||
self.assertEqual(thin_provisioned, fake_pool.thin_provisioning)
|
self.assertEqual(thin_provisioned, fake_pool.thin_provisioning)
|
||||||
if 'provisioned_capacity_gb' not in share_capability or (
|
if 'provisioned_capacity_gb' not in share_capability or (
|
||||||
share_capability['provisioned_capacity_gb'] is None):
|
share_capability['provisioned_capacity_gb'] is None):
|
||||||
db.share_instance_get_all_by_host.assert_called_once_with(
|
db.share_instance_sizes_sum_by_host.assert_called_once_with(
|
||||||
fake_context, fake_pool.host, with_share_data=True)
|
fake_context, fake_pool.host)
|
||||||
if len(instances) > 0:
|
if len(instances) > 0:
|
||||||
self.assertEqual(4, fake_pool.provisioned_capacity_gb)
|
self.assertEqual(4, fake_pool.provisioned_capacity_gb)
|
||||||
else:
|
else:
|
||||||
self.assertEqual(0, fake_pool.provisioned_capacity_gb)
|
self.assertEqual(0, fake_pool.provisioned_capacity_gb)
|
||||||
else:
|
else:
|
||||||
self.assertFalse(db.share_instance_get_all_by_host.called)
|
self.assertFalse(db.share_instance_sizes_sum_by_host.called)
|
||||||
self.assertEqual(share_capability['provisioned_capacity_gb'],
|
self.assertEqual(share_capability['provisioned_capacity_gb'],
|
||||||
fake_pool.provisioned_capacity_gb)
|
fake_pool.provisioned_capacity_gb)
|
||||||
else:
|
else:
|
||||||
self.assertFalse(fake_pool.thin_provisioning)
|
self.assertFalse(fake_pool.thin_provisioning)
|
||||||
self.assertFalse(db.share_instance_get_all_by_host.called)
|
self.assertFalse(db.share_instance_sizes_sum_by_host.called)
|
||||||
if 'provisioned_capacity_gb' not in share_capability or (
|
if 'provisioned_capacity_gb' not in share_capability or (
|
||||||
share_capability['provisioned_capacity_gb'] is None):
|
share_capability['provisioned_capacity_gb'] is None):
|
||||||
self.assertIsNone(fake_pool.provisioned_capacity_gb)
|
self.assertIsNone(fake_pool.provisioned_capacity_gb)
|
||||||
|
@@ -0,0 +1,5 @@
|
|||||||
|
---
|
||||||
|
fixes:
|
||||||
|
- |
|
||||||
|
Improve scheduler performance to estimate the allocated capacity for thin
|
||||||
|
provisioning hosts.
|
Reference in New Issue
Block a user