Prevent Share operations during share migration

Disabled following operations during migration because they are
not supported:
- extend
- shrink
- delete
- create snapshot
- unmanage

This patch also changes task_state field value from MIGRATING to
MIGRATION_IN_PROGRESS, in order to better parse the string to
determine if the share is busy specifically due to migration.

Also made some small adjustments to migration code, such as moving
the action of reverting access rules to status MIGRATION_IN_PROGRESS
instead of MIGRATION_COMPLETING, in order to better fit
the migration statuses for admin diagnosis, future 2-phase
and error cleanup implementations.

Replaced unused ShareIsBusy exception with ShareBusyException that
is thrown on busy task_state statuses.

Closes-Bug: #1513436
Change-Id: I28123fe04b93331d82932051539aba1d9e507f68
This commit is contained in:
Rodrigo Barbieri 2015-09-25 15:10:40 -03:00
parent f38b8d4efd
commit 92e7e5d5e9
7 changed files with 133 additions and 83 deletions

View File

@ -35,15 +35,15 @@ STATUS_SHRINKING_POSSIBLE_DATA_LOSS_ERROR = (
'shrinking_possible_data_loss_error'
)
STATUS_TASK_STATE_MIGRATION_STARTING = 'migration_starting'
STATUS_TASK_STATE_MIGRATION_IN_PROGRESS = 'migration_in_progress'
STATUS_TASK_STATE_MIGRATION_ERROR = 'migration_error'
STATUS_TASK_STATE_MIGRATION_SUCCESS = 'migration_success'
STATUS_TASK_STATE_MIGRATION_COMPLETING = 'migration_completing'
STATUS_TASK_STATE_MIGRATION_MIGRATING = 'migrating'
BUSY_TASK_STATES = (
STATUS_TASK_STATE_MIGRATION_COMPLETING,
STATUS_TASK_STATE_MIGRATION_MIGRATING,
STATUS_TASK_STATE_MIGRATION_STARTING,
STATUS_TASK_STATE_MIGRATION_IN_PROGRESS,
)
TRANSITIONAL_STATUSES = (
@ -100,7 +100,7 @@ TASK_STATE_STATUSES = (
STATUS_TASK_STATE_MIGRATION_ERROR,
STATUS_TASK_STATE_MIGRATION_SUCCESS,
STATUS_TASK_STATE_MIGRATION_COMPLETING,
STATUS_TASK_STATE_MIGRATION_MIGRATING,
STATUS_TASK_STATE_MIGRATION_IN_PROGRESS,
)

View File

@ -201,6 +201,13 @@ class Share(BASE, ManilaBase):
if len(self.instances) > 0:
return self.instance.export_location
@property
def is_busy(self):
# Make sure share is not busy, i.e., not part of a migration
if self.task_state in constants.BUSY_TASK_STATES:
return True
return False
@property
def export_locations(self):
# TODO(u_glide): Return a map with lists of locations per AZ when

View File

@ -384,6 +384,10 @@ class InvalidShare(Invalid):
message = _("Invalid share: %(reason)s.")
class ShareBusyException(Invalid):
message = _("Share is busy with an active task: %(reason)s.")
class InvalidShareInstance(Invalid):
message = _("Invalid share instance: %(reason)s.")
@ -414,10 +418,6 @@ class InvalidShareAccessLevel(Invalid):
message = _("Invalid or unsupported share access level: %(level)s.")
class ShareIsBusy(ManilaException):
message = _("Deleting $(share_name) share that used.")
class ShareBackendException(ManilaException):
message = _("Share backend error: %(msg)s.")

View File

@ -365,12 +365,7 @@ class API(base.Base):
def unmanage(self, context, share):
policy.check_policy(context, 'share', 'unmanage')
# Make sure share is not part of a migration
if share['task_state'] in constants.BUSY_TASK_STATES:
msg = _("Share %s is busy as part of an active "
"task.") % share['id']
LOG.error(msg)
raise exception.InvalidShare(reason=msg)
self._check_is_share_busy(share)
update_data = {'status': constants.STATUS_UNMANAGING,
'terminated_at': timeutils.utcnow()}
@ -412,14 +407,7 @@ class API(base.Base):
cgsnapshot_members_count)
raise exception.InvalidShare(reason=msg)
# Make sure share is not part of a migration
if share['task_state'] not in (
None, constants.STATUS_TASK_STATE_MIGRATION_ERROR,
constants.STATUS_TASK_STATE_MIGRATION_SUCCESS):
msg = _("Share %s is busy as part of an active "
"task.") % share['id']
LOG.error(msg)
raise exception.InvalidShare(reason=msg)
self._check_is_share_busy(share)
try:
reservations = QUOTAS.reserve(context,
@ -497,6 +485,8 @@ class API(base.Base):
size = share['size']
self._check_is_share_busy(share)
try:
reservations = QUOTAS.reserve(
context, snapshots=1, snapshot_gigabytes=size)
@ -563,15 +553,9 @@ class API(base.Base):
'but current status is: %(instance_status)s.') % {
'instance_id': share_instance['id'],
'instance_status': share_instance['status']}
LOG.error(msg)
raise exception.InvalidShare(reason=msg)
# Make sure share is not part of a migration
if share['task_state'] in constants.BUSY_TASK_STATES:
msg = _("Share %s is busy as part of an active "
"task.") % share['id']
LOG.error(msg)
raise exception.InvalidShare(reason=msg)
self._check_is_share_busy(share)
# Make sure the destination host is different than the current one
if host == share_instance['host']:
@ -579,14 +563,12 @@ class API(base.Base):
'than the current host %(src_host)s.') % {
'dest_host': host,
'src_host': share_instance['host']}
LOG.error(msg)
raise exception.InvalidHost(reason=msg)
# We only handle shares without snapshots for now
snaps = self.db.share_snapshot_get_all_for_share(context, share['id'])
if snaps:
msg = _("Share %s must not have snapshots.") % share['id']
LOG.error(msg)
raise exception.InvalidShare(reason=msg)
# Make sure the host is in the list of available hosts
@ -886,6 +868,16 @@ class API(base.Base):
"""Delete the given metadata item from a share."""
self.db.share_metadata_delete(context, share['id'], key)
def _check_is_share_busy(self, share):
"""Raises an exception if share is busy with an active task."""
if share.is_busy:
msg = _("Share %(share_id)s is busy as part of an active "
"task: %(task)s.") % {
'share_id': share['id'],
'task': share['task_state']
}
raise exception.ShareBusyException(reason=msg)
def _check_metadata_properties(self, context, metadata=None):
if not metadata:
metadata = {}
@ -947,6 +939,8 @@ class API(base.Base):
"%(status)s.") % msg_params
raise exception.InvalidShare(reason=msg)
self._check_is_share_busy(share)
size_increase = int(new_size) - share['size']
if size_increase <= 0:
msg = (_("New size for extend must be greater "
@ -1001,6 +995,8 @@ class API(base.Base):
"%(status)s.") % msg_params
raise exception.InvalidShare(reason=msg)
self._check_is_share_busy(share)
size_decrease = int(share['size']) - int(new_size)
if size_decrease <= 0 or new_size <= 0:
msg = (_("New size for shrink must be less "

View File

@ -221,6 +221,16 @@ class ShareManager(manager.SchedulerDependentManager):
self.host)
LOG.debug("Re-exporting %s shares", len(share_instances))
for share_instance in share_instances:
share_ref = self.db.share_get(ctxt, share_instance['share_id'])
if share_ref.is_busy:
LOG.info(
_LI("Share instance %(id)s: skipping export, "
"because it is busy with an active task: %(task)s."),
{'id': share_instance['id'],
'task': share_ref['task_state']},
)
continue
if share_instance['status'] != constants.STATUS_AVAILABLE:
LOG.info(
_LI("Share instance %(id)s: skipping export, "
@ -543,7 +553,7 @@ class ShareManager(manager.SchedulerDependentManager):
self.db.share_update(
ctxt, share_ref['id'],
{'task_state': constants.STATUS_TASK_STATE_MIGRATION_MIGRATING})
{'task_state': constants.STATUS_TASK_STATE_MIGRATION_IN_PROGRESS})
if not force_host_copy:
try:
@ -659,12 +669,12 @@ class ShareManager(manager.SchedulerDependentManager):
helper.revert_access_rules(readonly_support, saved_rules)
raise
helper.revert_access_rules(readonly_support, saved_rules)
self.db.share_update(
context, share['id'],
{'task_state': constants.STATUS_TASK_STATE_MIGRATION_COMPLETING})
helper.revert_access_rules(readonly_support, saved_rules)
self.db.share_instance_update(context, new_share_instance['id'],
{'status': constants.STATUS_AVAILABLE})

View File

@ -805,37 +805,38 @@ class ShareAPITestCase(test.TestCase):
self.context, share_data, driver_options)
def test_unmanage(self):
share_data = {
'id': 'fakeid',
'host': 'fake',
'size': '1',
'status': constants.STATUS_AVAILABLE,
'user_id': self.context.user_id,
'project_id': self.context.project_id,
'task_state': None
}
share = db_utils.create_share(
id='fakeid',
host='fake',
size='1',
status=constants.STATUS_AVAILABLE,
user_id=self.context.user_id,
project_id=self.context.project_id,
task_state=None)
self.mock_object(db_api, 'share_update', mock.Mock())
self.api.unmanage(self.context, share_data)
self.api.unmanage(self.context, share)
self.share_rpcapi.unmanage_share.assert_called_once_with(
self.context, mock.ANY)
db_api.share_update.assert_called_once_with(
mock.ANY, share_data['id'], mock.ANY)
mock.ANY, share['id'], mock.ANY)
def test_unmanage_task_state_busy(self):
share_data = {
'id': 'fakeid',
'host': 'fake',
'size': '1',
'status': constants.STATUS_AVAILABLE,
'user_id': self.context.user_id,
'project_id': self.context.project_id,
'task_state': constants.STATUS_TASK_STATE_MIGRATION_MIGRATING
}
self.assertRaises(exception.InvalidShare, self.api.unmanage,
self.context, share_data)
share = db_utils.create_share(
id='fakeid',
host='fake',
size='1',
status=constants.STATUS_AVAILABLE,
user_id=self.context.user_id,
project_id=self.context.project_id,
task_state=constants.STATUS_TASK_STATE_MIGRATION_IN_PROGRESS)
self.assertRaises(exception.ShareBusyException, self.api.unmanage,
self.context, share)
@mock.patch.object(quota.QUOTAS, 'reserve',
mock.Mock(return_value='reservation'))
@ -940,6 +941,19 @@ class ShareAPITestCase(test.TestCase):
share_api.policy.check_policy.assert_called_once_with(
self.context, 'share', 'create_snapshot', share)
def test_create_snapshot_invalid_task_state(self):
share = db_utils.create_share(
status=constants.STATUS_AVAILABLE,
task_state=constants.STATUS_TASK_STATE_MIGRATION_IN_PROGRESS)
self.assertRaises(exception.ShareBusyException,
self.api.create_snapshot,
self.context,
share,
'fakename',
'fakedesc')
share_api.policy.check_policy.assert_called_once_with(
self.context, 'share', 'create_snapshot', share)
@ddt.data({'use_scheduler': False, 'valid_host': 'fake'},
{'use_scheduler': True, 'valid_host': None})
@ddt.unpack
@ -1075,12 +1089,12 @@ class ShareAPITestCase(test.TestCase):
share
)
def test_delete_share_part_of_migration(self):
def test_delete_share_invalid_task_state(self):
share = db_utils.create_share(
status=constants.STATUS_AVAILABLE,
task_state=constants.STATUS_TASK_STATE_MIGRATION_MIGRATING)
task_state=constants.STATUS_TASK_STATE_MIGRATION_IN_PROGRESS)
self.assertRaises(exception.InvalidShare,
self.assertRaises(exception.ShareBusyException,
self.api.delete,
self.context, share)
@ -1505,6 +1519,15 @@ class ShareAPITestCase(test.TestCase):
self.assertRaises(exception.InvalidShare,
self.api.extend, self.context, share, new_size)
def test_extend_invalid_task_state(self):
share = db_utils.create_share(
status=constants.STATUS_AVAILABLE,
task_state=constants.STATUS_TASK_STATE_MIGRATION_IN_PROGRESS)
new_size = 123
self.assertRaises(exception.ShareBusyException,
self.api.extend, self.context, share, new_size)
def test_extend_invalid_size(self):
share = db_utils.create_share(status=constants.STATUS_AVAILABLE,
size=200)
@ -1547,6 +1570,14 @@ class ShareAPITestCase(test.TestCase):
self.assertRaises(exception.InvalidShare,
self.api.shrink, self.context, share, 123)
def test_shrink_invalid_task_state(self):
share = db_utils.create_share(
status=constants.STATUS_AVAILABLE,
task_state=constants.STATUS_TASK_STATE_MIGRATION_IN_PROGRESS)
self.assertRaises(exception.ShareBusyException,
self.api.shrink, self.context, share, 123)
@ddt.data(300, 0, -1)
def test_shrink_invalid_size(self, new_size):
share = db_utils.create_share(status=constants.STATUS_AVAILABLE,
@ -1596,23 +1627,17 @@ class ShareAPITestCase(test.TestCase):
share = db_utils.create_share(
status=constants.STATUS_ERROR)
mock_log = self.mock_object(share_api, 'LOG')
self.assertRaises(exception.InvalidShare, self.api.migrate_share,
self.context, share, host, True)
self.assertTrue(mock_log.error.called)
def test_migrate_share_task_state_invalid(self):
host = 'fake2@backend#pool'
share = db_utils.create_share(
status=constants.STATUS_AVAILABLE,
task_state=constants.STATUS_TASK_STATE_MIGRATION_MIGRATING)
task_state=constants.STATUS_TASK_STATE_MIGRATION_IN_PROGRESS)
mock_log = self.mock_object(share_api, 'LOG')
self.assertRaises(exception.InvalidShare, self.api.migrate_share,
self.assertRaises(exception.ShareBusyException, self.api.migrate_share,
self.context, share, host, True)
self.assertTrue(mock_log.error.called)
def test_migrate_share_with_snapshots(self):
host = 'fake2@backend#pool'
@ -1621,11 +1646,8 @@ class ShareAPITestCase(test.TestCase):
self.mock_object(db_api, 'share_snapshot_get_all_for_share',
mock.Mock(return_value=True))
mock_log = self.mock_object(share_api, 'LOG')
self.assertRaises(exception.InvalidShare, self.api.migrate_share,
self.context, share, host, True)
self.assertTrue(mock_log.error.called)
def test_migrate_share_invalid_host(self):
host = 'fake@backend#pool'
@ -1644,12 +1666,9 @@ class ShareAPITestCase(test.TestCase):
share = db_utils.create_share(
host='fake@backend#pool', status=constants.STATUS_AVAILABLE)
mock_log = self.mock_object(share_api, 'LOG')
self.assertRaises(exception.InvalidHost,
self.api.migrate_share,
self.context, share, host, True)
self.assertTrue(mock_log.error.called)
def test_migrate_share_exception(self):
host = 'fake2@backend#pool'

View File

@ -168,6 +168,11 @@ class ShareManagerTestCase(test.TestCase):
db_utils.create_share(id='fake_id_3',
status=constants.STATUS_AVAILABLE,
display_name='fake_name_3').instance,
db_utils.create_share(
id='fake_id_4',
status=constants.STATUS_AVAILABLE,
task_state=constants.STATUS_TASK_STATE_MIGRATION_IN_PROGRESS,
display_name='fake_name_4').instance,
]
if not setup_access_rules:
return instances
@ -195,7 +200,8 @@ class ShareManagerTestCase(test.TestCase):
'share_instances_get_all_by_host',
mock.Mock(return_value=instances))
self.mock_object(self.share_manager.db, 'share_instance_get',
mock.Mock(side_effect=[instances[0], instances[2]]))
mock.Mock(side_effect=[instances[0], instances[2],
instances[3]]))
self.mock_object(self.share_manager.db,
'share_export_locations_update')
self.mock_object(self.share_manager.driver, 'ensure_share',
@ -267,7 +273,8 @@ class ShareManagerTestCase(test.TestCase):
'share_instances_get_all_by_host',
mock.Mock(return_value=instances))
self.mock_object(self.share_manager.db, 'share_instance_get',
mock.Mock(side_effect=[instances[0], instances[2]]))
mock.Mock(side_effect=[instances[0], instances[2],
instances[3]]))
self.mock_object(self.share_manager.driver, 'ensure_share',
mock.Mock(side_effect=raise_exception))
self.mock_object(self.share_manager, '_ensure_share_instance_has_pool')
@ -304,7 +311,12 @@ class ShareManagerTestCase(test.TestCase):
self.share_manager.publish_service_capabilities.\
assert_called_once_with(
utils.IsAMatcher(context.RequestContext))
manager.LOG.info.assert_called_once_with(
manager.LOG.info.assert_any_call(
mock.ANY,
{'task': constants.STATUS_TASK_STATE_MIGRATION_IN_PROGRESS,
'id': instances[3]['id']},
)
manager.LOG.info.assert_any_call(
mock.ANY,
{'id': instances[1]['id'], 'status': instances[1]['status']},
)
@ -319,7 +331,8 @@ class ShareManagerTestCase(test.TestCase):
'share_instances_get_all_by_host',
mock.Mock(return_value=instances))
self.mock_object(self.share_manager.db, 'share_instance_get',
mock.Mock(side_effect=[instances[0], instances[2]]))
mock.Mock(side_effect=[instances[0], instances[2],
instances[3]]))
self.mock_object(self.share_manager.driver, 'ensure_share',
mock.Mock(return_value=None))
self.mock_object(self.share_manager, '_ensure_share_instance_has_pool')
@ -361,7 +374,12 @@ class ShareManagerTestCase(test.TestCase):
self.share_manager.publish_service_capabilities.\
assert_called_once_with(
utils.IsAMatcher(context.RequestContext))
manager.LOG.info.assert_called_once_with(
manager.LOG.info.assert_any_call(
mock.ANY,
{'task': constants.STATUS_TASK_STATE_MIGRATION_IN_PROGRESS,
'id': instances[3]['id']},
)
manager.LOG.info.assert_any_call(
mock.ANY,
{'id': instances[1]['id'], 'status': instances[1]['status']},
)
@ -2417,7 +2435,7 @@ class ShareManagerTestCase(test.TestCase):
share_id = share['id']
host = 'fake-host'
status_migrating = {
'task_state': constants.STATUS_TASK_STATE_MIGRATION_MIGRATING
'task_state': constants.STATUS_TASK_STATE_MIGRATION_IN_PROGRESS
}
status_success = {
'task_state': constants.STATUS_TASK_STATE_MIGRATION_SUCCESS
@ -2463,7 +2481,7 @@ class ShareManagerTestCase(test.TestCase):
share_id = share['id']
host = 'fake-host'
status_migrating = {
'task_state': constants.STATUS_TASK_STATE_MIGRATION_MIGRATING
'task_state': constants.STATUS_TASK_STATE_MIGRATION_IN_PROGRESS
}
status_success = {
'task_state': constants.STATUS_TASK_STATE_MIGRATION_SUCCESS
@ -2504,7 +2522,7 @@ class ShareManagerTestCase(test.TestCase):
share_id = share['id']
host = 'fake-host'
status_migrating = {
'task_state': constants.STATUS_TASK_STATE_MIGRATION_MIGRATING
'task_state': constants.STATUS_TASK_STATE_MIGRATION_IN_PROGRESS
}
status_success = {
'task_state': constants.STATUS_TASK_STATE_MIGRATION_SUCCESS
@ -2549,7 +2567,7 @@ class ShareManagerTestCase(test.TestCase):
share_id = share['id']
host = 'fake-host'
status_migrating = {
'task_state': constants.STATUS_TASK_STATE_MIGRATION_MIGRATING
'task_state': constants.STATUS_TASK_STATE_MIGRATION_IN_PROGRESS
}
status_error = {
'task_state': constants.STATUS_TASK_STATE_MIGRATION_ERROR
@ -2597,7 +2615,7 @@ class ShareManagerTestCase(test.TestCase):
share_id = share['id']
host = 'fake-host'
status_migrating = {
'task_state': constants.STATUS_TASK_STATE_MIGRATION_MIGRATING
'task_state': constants.STATUS_TASK_STATE_MIGRATION_IN_PROGRESS
}
status_error = {
'task_state': constants.STATUS_TASK_STATE_MIGRATION_ERROR
@ -2637,7 +2655,7 @@ class ShareManagerTestCase(test.TestCase):
share_id = share['id']
host = 'fake-host'
status_migrating = {
'task_state': constants.STATUS_TASK_STATE_MIGRATION_MIGRATING
'task_state': constants.STATUS_TASK_STATE_MIGRATION_IN_PROGRESS
}
status_success = {
'task_state': constants.STATUS_TASK_STATE_MIGRATION_SUCCESS