Fix available share servers to be reused

The change [1] modified the function
`share_server_get_all_by_host_and_share_subnet_valid` adding the
status option. However, it removed the actual filter from the query
wrongly. As result, the method is returning all share servers without
taking the status in account.

This patch reverts that change and also creating a new one for
collecting all share servers no matter their states.

[1] https://review.opendev.org/c/openstack/manila/+/825110

Closes-Bug: #1978962
Change-Id: I8d9437eafde67407ba5e337dd495fdb74eefec70
This commit is contained in:
Felipe Rodrigues 2022-06-16 12:11:27 -03:00
parent 5df22b3320
commit 8dc6235b5f
6 changed files with 103 additions and 40 deletions

View File

@ -1095,13 +1095,20 @@ def share_server_search_by_identifier(context, identifier, session=None):
def share_server_get_all_by_host_and_share_subnet_valid(context, host,
share_subnet_id,
server_status=None,
session=None):
"""Get share server DB records by host and share net not error."""
return IMPL.share_server_get_all_by_host_and_share_subnet_valid(
context, host, share_subnet_id, session=session)
def share_server_get_all_by_host_and_share_subnet(context, host,
share_subnet_id,
session=None):
"""Get share server DB records by host and share net."""
return IMPL.share_server_get_all_by_host_and_share_subnet(
context, host, share_subnet_id, session=session)
def share_server_get_all(context):
"""Get all share server DB records."""
return IMPL.share_server_get_all(context)

View File

@ -4445,24 +4445,21 @@ def share_server_search_by_identifier(context, identifier, session=None):
@require_context
def share_server_get_all_by_host_and_share_subnet_valid(context, host,
share_subnet_id,
server_status=None,
session=None):
query = (_server_get_query(context, session)
result = (_server_get_query(context, session)
.filter_by(host=host)
.filter(models.ShareServer.share_network_subnets.any(
id=share_subnet_id)))
if server_status:
query.filter_by(status=server_status)
else:
query.filter(models.ShareServer.status.in_(
(constants.STATUS_CREATING, constants.STATUS_ACTIVE)))
id=share_subnet_id))
.filter(models.ShareServer.status.in_(
(constants.STATUS_CREATING,
constants.STATUS_ACTIVE))).all())
result = query.all()
if not result:
filters_description = ('share_network_subnet_id is "%(share_net_id)s",'
' host is "%(host)s" and status in'
' "%(status_cr)s" or "%(status_act)s"') % {
'share_net_id': share_subnet_id,
filters_description = ('share_network_subnet_id is '
'"%(share_subnet_id)s", host is "%(host)s" and '
'status in "%(status_cr)s" or '
'"%(status_act)s"') % {
'share_subnet_id': share_subnet_id,
'host': host,
'status_cr': constants.STATUS_CREATING,
'status_act': constants.STATUS_ACTIVE,
@ -4472,6 +4469,27 @@ def share_server_get_all_by_host_and_share_subnet_valid(context, host,
return result
@require_context
def share_server_get_all_by_host_and_share_subnet(context, host,
share_subnet_id,
session=None):
result = (_server_get_query(context, session)
.filter_by(host=host)
.filter(models.ShareServer.share_network_subnets.any(
id=share_subnet_id)).all())
if not result:
filters_description = ('share_network_subnet_id is '
'"%(share_subnet_id)s" and host is '
'"%(host)s".') % {
'share_subnet_id': share_subnet_id,
'host': host,
}
raise exception.ShareServerNotFoundByFilters(
filters_description=filters_description)
return result
@require_context
def share_server_get_all(context):
return _server_get_query(context).all()

View File

@ -6147,9 +6147,8 @@ class ShareManager(manager.SchedulerDependentManager):
current_subnets = [subnet for subnet in current_subnets
if subnet['id'] != new_share_network_subnet_id]
share_servers = (
self.db.share_server_get_all_by_host_and_share_subnet_valid(
context, self.host, new_share_network_subnet_id,
server_status=constants.STATUS_SERVER_NETWORK_CHANGE))
self.db.share_server_get_all_by_host_and_share_subnet(
context, self.host, new_share_network_subnet_id))
for share_server in share_servers:
share_server_id = share_server['id']

View File

@ -3314,8 +3314,7 @@ class ShareServerDatabaseAPITestCase(test.TestCase):
db_api.share_server_update,
self.ctxt, fake_id, {})
@ddt.data(None, constants.STATUS_SERVER_NETWORK_CHANGE)
def test_get_all_by_host_and_share_net_valid(self, server_status):
def test_get_all_by_host_and_share_subnet_valid(self):
subnet_1 = {
'id': '1',
'share_network_id': '1',
@ -3326,16 +3325,11 @@ class ShareServerDatabaseAPITestCase(test.TestCase):
}
share_net_subnets1 = db_utils.create_share_network_subnet(**subnet_1)
share_net_subnets2 = db_utils.create_share_network_subnet(**subnet_2)
valid_no_status = {
valid = {
'share_network_subnets': [share_net_subnets1],
'host': 'host1',
'status': constants.STATUS_ACTIVE,
}
valid_with_status = {
'share_network_subnets': [share_net_subnets1],
'host': 'host1',
'status': constants.STATUS_SERVER_NETWORK_CHANGE,
}
invalid = {
'share_network_subnets': [share_net_subnets2],
'host': 'host1',
@ -3346,28 +3340,67 @@ class ShareServerDatabaseAPITestCase(test.TestCase):
'host': 'host2',
'status': constants.STATUS_ACTIVE,
}
if server_status:
valid = db_utils.create_share_server(**valid_with_status)
else:
valid = db_utils.create_share_server(**valid_no_status)
valid = db_utils.create_share_server(**valid)
db_utils.create_share_server(**invalid)
db_utils.create_share_server(**other)
servers = db_api.share_server_get_all_by_host_and_share_subnet_valid(
self.ctxt,
host='host1',
share_subnet_id='1',
server_status=server_status)
share_subnet_id='1')
self.assertEqual(valid['id'], servers[0]['id'])
def test_get_all_by_host_and_share_net_not_found(self):
def test_get_all_by_host_and_share_subnet_valid_not_found(self):
self.assertRaises(
exception.ShareServerNotFound,
db_api.share_server_get_all_by_host_and_share_subnet_valid,
self.ctxt, host='fake', share_subnet_id='fake'
)
def test_get_all_by_host_and_share_subnet(self):
subnet_1 = {
'id': '1',
'share_network_id': '1',
}
share_net_subnets1 = db_utils.create_share_network_subnet(**subnet_1)
valid = {
'share_network_subnets': [share_net_subnets1],
'host': 'host1',
'status': constants.STATUS_SERVER_NETWORK_CHANGE,
}
other = {
'share_network_subnets': [share_net_subnets1],
'host': 'host1',
'status': constants.STATUS_ERROR,
}
invalid = {
'share_network_subnets': [share_net_subnets1],
'host': 'host2',
'status': constants.STATUS_ACTIVE,
}
valid = db_utils.create_share_server(**valid)
invalid = db_utils.create_share_server(**invalid)
other = db_utils.create_share_server(**other)
servers = db_api.share_server_get_all_by_host_and_share_subnet(
self.ctxt,
host='host1',
share_subnet_id='1')
self.assertEqual(2, len(servers))
ids = [s['id'] for s in servers]
self.assertTrue(valid['id'] in ids)
self.assertTrue(other['id'] in ids)
self.assertFalse(invalid['id'] in ids)
def test_get_all_by_host_and_share_subnet_not_found(self):
self.assertRaises(
exception.ShareServerNotFound,
db_api.share_server_get_all_by_host_and_share_subnet,
self.ctxt, host='fake', share_subnet_id='fake'
)
def test_get_all(self):
srv1 = {
'host': 'host1',

View File

@ -10011,7 +10011,7 @@ class ShareManagerTestCase(test.TestCase):
server = {'id': server_id}
mock_servers_get = self.mock_object(
self.share_manager.db,
'share_server_get_all_by_host_and_share_subnet_valid',
'share_server_get_all_by_host_and_share_subnet',
mock.Mock(return_value=[server]))
current_network_allocations = 'fake_current_net_allocations'
mock_form_net_allocations = self.mock_object(
@ -10047,8 +10047,7 @@ class ShareManagerTestCase(test.TestCase):
self.context, net_id, new_subnet['availability_zone_id'],
fallback_to_default=False)
mock_servers_get.assert_called_once_with(
self.context, self.share_manager.host, new_share_network_subnet_id,
server_status=constants.STATUS_SERVER_NETWORK_CHANGE)
self.context, self.share_manager.host, new_share_network_subnet_id)
mock_form_net_allocations.assert_called_once_with(
self.context, server['id'], subnets)
mock_instances_get.assert_called_once_with(
@ -10082,7 +10081,7 @@ class ShareManagerTestCase(test.TestCase):
server = {'id': server_id}
mock_servers_get = self.mock_object(
self.share_manager.db,
'share_server_get_all_by_host_and_share_subnet_valid',
'share_server_get_all_by_host_and_share_subnet',
mock.Mock(return_value=[server]))
current_network_allocations = 'fake_current_net_allocations'
mock_form_net_allocations = self.mock_object(
@ -10123,8 +10122,7 @@ class ShareManagerTestCase(test.TestCase):
self.context, net_id, new_subnet['availability_zone_id'],
fallback_to_default=False)
mock_servers_get.assert_called_once_with(
self.context, self.share_manager.host, new_share_network_subnet_id,
server_status=constants.STATUS_SERVER_NETWORK_CHANGE)
self.context, self.share_manager.host, new_share_network_subnet_id)
mock_form_net_allocations.assert_called_once_with(
self.context, server['id'], subnets)
mock_instances_get.assert_called_once_with(

View File

@ -0,0 +1,8 @@
---
fixes:
- |
Drivers using DHSS True mode has the server creation phase. This phase
tries to reuse one of available share servers, however, the Manila code
is considering all share servers states as available, rather than
considering only the active or creating ones. Now, only the correct share
servers are passed to drivers as available to be reused.