diff --git a/manila/db/api.py b/manila/db/api.py index ccdb5c88b5..c37b80a856 100644 --- a/manila/db/api.py +++ b/manila/db/api.py @@ -386,6 +386,11 @@ def share_instance_get_all_by_share_group_id(context, share_group_id): return IMPL.share_instance_get_all_by_share_group_id( context, share_group_id) + +def share_instance_sizes_sum_by_host(context, host): + """Returns sum of sizes of all share instances on given host.""" + return IMPL.share_instance_sizes_sum_by_host(context, host) + ################### diff --git a/manila/db/sqlalchemy/api.py b/manila/db/sqlalchemy/api.py index 6f97d9e416..0790b62626 100644 --- a/manila/db/sqlalchemy/api.py +++ b/manila/db/sqlalchemy/api.py @@ -1997,7 +1997,6 @@ def share_instance_get_all_by_host(context, host, with_share_data=False, ) if status is not None: instances = instances.filter(models.ShareInstance.status == status) - if with_share_data: instances = _set_instances_share_data(instances) else: @@ -2006,6 +2005,20 @@ def share_instance_get_all_by_host(context, host, with_share_data=False, return instances +@require_context +@context_manager.reader +def share_instance_sizes_sum_by_host(context, host): + result = model_query( + context, models.Share, func.sum(models.Share.size), + ).join( + models.ShareInstance.share, + ).filter(or_( + models.ShareInstance.host == host, + models.ShareInstance.host.like("{0}#%".format(host)), + )).first() + return int(result[0] or 0) + + @require_context @context_manager.reader def share_instance_get_all_by_share_network(context, share_network_id): diff --git a/manila/scheduler/host_manager.py b/manila/scheduler/host_manager.py index 60570bc06b..1663254c19 100644 --- a/manila/scheduler/host_manager.py +++ b/manila/scheduler/host_manager.py @@ -451,15 +451,7 @@ class PoolState(HostState): def _estimate_provisioned_capacity(self, host_name, context=None): """Estimate provisioned capacity from share sizes on backend.""" - provisioned_capacity = 0 - - instances = db.share_instance_get_all_by_host( - context, host_name, with_share_data=True) - - for instance in instances: - # Size of share instance that's still being created, will be None. - provisioned_capacity += instance['size'] or 0 - return provisioned_capacity + return db.share_instance_sizes_sum_by_host(context, host_name) @utils.synchronized("update_from_share_capability") def update_from_share_capability( diff --git a/manila/tests/db/sqlalchemy/test_api.py b/manila/tests/db/sqlalchemy/test_api.py index 2c3b654f52..950ddec4a4 100644 --- a/manila/tests/db/sqlalchemy/test_api.py +++ b/manila/tests/db/sqlalchemy/test_api.py @@ -602,6 +602,18 @@ class ShareDatabaseAPITestCase(test.TestCase): self.assertEqual(0, len(instances)) + @ddt.data( + {'status': constants.STATUS_AVAILABLE}, + {'status': None}) + @ddt.unpack + def test_share_instance_get_all_by_host_no_instance(self, status): + db_utils.create_share_without_instance() + instances = db_api.share_instance_get_all_by_host( + self.ctxt, "fake_host", with_share_data=True, status=status + ) + + self.assertEqual(0, len(instances)) + def test_share_instance_get_all_by_share_group(self): group = db_utils.create_share_group() db_utils.create_share(share_group_id=group['id']) diff --git a/manila/tests/scheduler/test_host_manager.py b/manila/tests/scheduler/test_host_manager.py index ef5904f705..ea04f0704c 100644 --- a/manila/tests/scheduler/test_host_manager.py +++ b/manila/tests/scheduler/test_host_manager.py @@ -1251,9 +1251,10 @@ class PoolStateTestCase(test.TestCase): @ddt.unpack def test_update_from_share_capability(self, share_capability, instances): fake_context = context.RequestContext('user', 'project', is_admin=True) + sizes = [instance['size'] or 0 for instance in instances] self.mock_object( - db, 'share_instance_get_all_by_host', - mock.Mock(return_value=instances)) + db, 'share_instance_sizes_sum_by_host', + mock.Mock(return_value=sum(sizes))) fake_pool = host_manager.PoolState('host1', None, 'pool0') self.assertIsNone(fake_pool.free_capacity_gb) @@ -1276,19 +1277,19 @@ class PoolStateTestCase(test.TestCase): self.assertEqual(thin_provisioned, fake_pool.thin_provisioning) if 'provisioned_capacity_gb' not in share_capability or ( share_capability['provisioned_capacity_gb'] is None): - db.share_instance_get_all_by_host.assert_called_once_with( - fake_context, fake_pool.host, with_share_data=True) + db.share_instance_sizes_sum_by_host.assert_called_once_with( + fake_context, fake_pool.host) if len(instances) > 0: self.assertEqual(4, fake_pool.provisioned_capacity_gb) else: self.assertEqual(0, fake_pool.provisioned_capacity_gb) else: - self.assertFalse(db.share_instance_get_all_by_host.called) + self.assertFalse(db.share_instance_sizes_sum_by_host.called) self.assertEqual(share_capability['provisioned_capacity_gb'], fake_pool.provisioned_capacity_gb) else: self.assertFalse(fake_pool.thin_provisioning) - self.assertFalse(db.share_instance_get_all_by_host.called) + self.assertFalse(db.share_instance_sizes_sum_by_host.called) if 'provisioned_capacity_gb' not in share_capability or ( share_capability['provisioned_capacity_gb'] is None): self.assertIsNone(fake_pool.provisioned_capacity_gb) diff --git a/releasenotes/notes/bug-2020187-scheduler-performance-2edc4c706b2fea2f.yaml b/releasenotes/notes/bug-2020187-scheduler-performance-2edc4c706b2fea2f.yaml new file mode 100644 index 0000000000..4dca98b468 --- /dev/null +++ b/releasenotes/notes/bug-2020187-scheduler-performance-2edc4c706b2fea2f.yaml @@ -0,0 +1,5 @@ +--- +fixes: + - | + Improve scheduler performance to estimate the allocated capacity for thin + provisioning hosts.