PowerMax Driver - Unisphere storage group/array tagging support
This feature allows the user to tag a storage group and/or an array in Unisphere with a user defined tag, to facilitate easy access and classification. Change-Id: Ifaf66360a604df55be8c0c197de6d9f1bf3cc9f6 Implements: blueprint powermax-storage-group-tagging
This commit is contained in:
parent
cd36b0ce8b
commit
7c8cd0ba05
@ -81,6 +81,7 @@ class PowerMaxData(object):
|
|||||||
temp_snapvx = 'temp-00001-snapshot_for_clone'
|
temp_snapvx = 'temp-00001-snapshot_for_clone'
|
||||||
next_gen_ucode = 5978
|
next_gen_ucode = 5978
|
||||||
gvg_group_id = 'test-gvg'
|
gvg_group_id = 'test-gvg'
|
||||||
|
sg_tags = 'production,test'
|
||||||
|
|
||||||
# connector info
|
# connector info
|
||||||
wwpn1 = '123456789012345'
|
wwpn1 = '123456789012345'
|
||||||
@ -272,6 +273,11 @@ class PowerMaxData(object):
|
|||||||
'interval': 3,
|
'interval': 3,
|
||||||
'retries': 120}
|
'retries': 120}
|
||||||
|
|
||||||
|
vol_type_extra_specs_tags = {
|
||||||
|
'storagetype:storagegrouptags': u'good, comma, separated,list'}
|
||||||
|
vol_type_extra_specs_tags_bad = {
|
||||||
|
'storagetype:storagegrouptags': u'B&d, [list]'}
|
||||||
|
|
||||||
extra_specs_migrate = deepcopy(extra_specs)
|
extra_specs_migrate = deepcopy(extra_specs)
|
||||||
extra_specs_migrate[utils.PORTGROUPNAME] = port_group_name_f
|
extra_specs_migrate[utils.PORTGROUPNAME] = port_group_name_f
|
||||||
|
|
||||||
@ -308,6 +314,9 @@ class PowerMaxData(object):
|
|||||||
rep_extra_specs_legacy = deepcopy(rep_extra_specs_ode)
|
rep_extra_specs_legacy = deepcopy(rep_extra_specs_ode)
|
||||||
rep_extra_specs_legacy['mode'] = 'Synchronous'
|
rep_extra_specs_legacy['mode'] = 'Synchronous'
|
||||||
|
|
||||||
|
extra_specs_tags = deepcopy(extra_specs)
|
||||||
|
extra_specs_tags.update({utils.STORAGE_GROUP_TAGS: sg_tags})
|
||||||
|
|
||||||
test_volume_type_1 = volume_type.VolumeType(
|
test_volume_type_1 = volume_type.VolumeType(
|
||||||
id='2b06255d-f5f0-4520-a953-b029196add6a', name='abc',
|
id='2b06255d-f5f0-4520-a953-b029196add6a', name='abc',
|
||||||
extra_specs=extra_specs)
|
extra_specs=extra_specs)
|
||||||
@ -415,6 +424,10 @@ class PowerMaxData(object):
|
|||||||
utils.OTHER_PARENT_SG: parent_sg_i, utils.FAST_SG:
|
utils.OTHER_PARENT_SG: parent_sg_i, utils.FAST_SG:
|
||||||
storagegroup_name_i, utils.NO_SLO_SG: no_slo_sg_name})
|
storagegroup_name_i, utils.NO_SLO_SG: no_slo_sg_name})
|
||||||
|
|
||||||
|
masking_view_dict_tags = deepcopy(masking_view_dict)
|
||||||
|
masking_view_dict_tags.update(
|
||||||
|
{'tag_list': sg_tags})
|
||||||
|
|
||||||
# vmax data
|
# vmax data
|
||||||
# sloprovisioning
|
# sloprovisioning
|
||||||
compression_info = {'symmetrixId': ['000197800128']}
|
compression_info = {'symmetrixId': ['000197800128']}
|
||||||
@ -1035,6 +1048,9 @@ class PowerMaxData(object):
|
|||||||
"vp_saved_percent": 99.9
|
"vp_saved_percent": 99.9
|
||||||
}
|
}
|
||||||
|
|
||||||
|
storage_group_with_tags = deepcopy(add_volume_sg_info_dict)
|
||||||
|
storage_group_with_tags.update({"tags": sg_tags})
|
||||||
|
|
||||||
data_dict = {volume_id: volume_info_dict}
|
data_dict = {volume_id: volume_info_dict}
|
||||||
platform = 'Linux-4.4.0-104-generic-x86_64-with-Ubuntu-16.04-xenial'
|
platform = 'Linux-4.4.0-104-generic-x86_64-with-Ubuntu-16.04-xenial'
|
||||||
unisphere_version = u'V9.1.0.5'
|
unisphere_version = u'V9.1.0.5'
|
||||||
|
@ -232,6 +232,8 @@ class FakeRequestsSession(object):
|
|||||||
break
|
break
|
||||||
elif 'info' in url:
|
elif 'info' in url:
|
||||||
return_object = self.data.version_details
|
return_object = self.data.version_details
|
||||||
|
elif 'tag' in url:
|
||||||
|
return_object = []
|
||||||
else:
|
else:
|
||||||
for symm in self.data.symmetrix:
|
for symm in self.data.symmetrix:
|
||||||
if symm['symmetrixId'] in url:
|
if symm['symmetrixId'] in url:
|
||||||
|
@ -1039,6 +1039,26 @@ class PowerMaxCommonTest(test.TestCase):
|
|||||||
ref_extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
|
ref_extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
|
||||||
self.assertEqual('NONE', extra_specs[utils.WORKLOAD])
|
self.assertEqual('NONE', extra_specs[utils.WORKLOAD])
|
||||||
|
|
||||||
|
def test_set_vmax_extra_specs_tags_not_set(self):
|
||||||
|
srp_record = self.common.get_attributes_from_cinder_config()
|
||||||
|
extra_specs = self.common._set_vmax_extra_specs(
|
||||||
|
self.data.vol_type_extra_specs, srp_record)
|
||||||
|
self.assertTrue('storagetype:storagegrouptags' not in extra_specs)
|
||||||
|
|
||||||
|
def test_set_vmax_extra_specs_tags_set_correctly(self):
|
||||||
|
srp_record = self.common.get_attributes_from_cinder_config()
|
||||||
|
extra_specs = self.common._set_vmax_extra_specs(
|
||||||
|
self.data.vol_type_extra_specs_tags, srp_record)
|
||||||
|
self.assertEqual(
|
||||||
|
self.data.vol_type_extra_specs_tags[utils.STORAGE_GROUP_TAGS],
|
||||||
|
extra_specs[utils.STORAGE_GROUP_TAGS])
|
||||||
|
|
||||||
|
def test_set_vmax_extra_specs_tags_set_incorrectly(self):
|
||||||
|
srp_record = self.common.get_attributes_from_cinder_config()
|
||||||
|
self.assertRaises(exception.VolumeBackendAPIException,
|
||||||
|
self.common._set_vmax_extra_specs,
|
||||||
|
self.data.vol_type_extra_specs_tags_bad, srp_record)
|
||||||
|
|
||||||
def test_delete_volume_from_srp_success(self):
|
def test_delete_volume_from_srp_success(self):
|
||||||
array = self.data.array
|
array = self.data.array
|
||||||
device_id = self.data.device_id
|
device_id = self.data.device_id
|
||||||
@ -3127,3 +3147,87 @@ class PowerMaxCommonTest(test.TestCase):
|
|||||||
exception.VolumeBackendAPIException,
|
exception.VolumeBackendAPIException,
|
||||||
self.common.update_metadata, model_update, existing_metadata,
|
self.common.update_metadata, model_update, existing_metadata,
|
||||||
object_metadata)
|
object_metadata)
|
||||||
|
|
||||||
|
@mock.patch.object(rest.PowerMaxRest, 'get_storage_group',
|
||||||
|
return_value=tpd.PowerMaxData.add_volume_sg_info_dict)
|
||||||
|
def test_get_tags_of_storage_group_none(self, mock_sg):
|
||||||
|
self.assertIsNone(self.common.get_tags_of_storage_group(
|
||||||
|
self.data.array, self.data.defaultstoragegroup_name))
|
||||||
|
|
||||||
|
@mock.patch.object(rest.PowerMaxRest, 'get_storage_group',
|
||||||
|
return_value=tpd.PowerMaxData.storage_group_with_tags)
|
||||||
|
def test_get_tags_of_storage_group_exists(self, mock_sg):
|
||||||
|
tag_list = self.common.get_tags_of_storage_group(
|
||||||
|
self.data.array, self.data.defaultstoragegroup_name)
|
||||||
|
self.assertEqual(tpd.PowerMaxData.sg_tags, tag_list)
|
||||||
|
|
||||||
|
@mock.patch.object(rest.PowerMaxRest, 'get_storage_group',
|
||||||
|
side_effect=exception.APIException)
|
||||||
|
def test_get_tags_of_storage_group_exception(self, mock_sg):
|
||||||
|
self.assertIsNone(self.common.get_tags_of_storage_group(
|
||||||
|
self.data.array, self.data.storagegroup_name_f))
|
||||||
|
|
||||||
|
@mock.patch.object(rest.PowerMaxRest, 'add_storage_array_tags')
|
||||||
|
@mock.patch.object(rest.PowerMaxRest, 'get_array_tags',
|
||||||
|
return_value=[])
|
||||||
|
def test_check_and_add_tags_to_storage_array(
|
||||||
|
self, mock_get_tags, mock_add_tags):
|
||||||
|
array_tag_list = ['OpenStack']
|
||||||
|
self.common._check_and_add_tags_to_storage_array(
|
||||||
|
self.data.array, array_tag_list, self.data.extra_specs)
|
||||||
|
mock_add_tags.assert_called_with(
|
||||||
|
self.data.array, array_tag_list, self.data.extra_specs)
|
||||||
|
|
||||||
|
@mock.patch.object(rest.PowerMaxRest, 'add_storage_array_tags')
|
||||||
|
@mock.patch.object(rest.PowerMaxRest, 'get_array_tags',
|
||||||
|
return_value=[])
|
||||||
|
def test_check_and_add_tags_to_storage_array_add_2_tags(
|
||||||
|
self, mock_get_tags, mock_add_tags):
|
||||||
|
array_tag_list = ['OpenStack', 'Production']
|
||||||
|
self.common._check_and_add_tags_to_storage_array(
|
||||||
|
self.data.array, array_tag_list, self.data.extra_specs)
|
||||||
|
mock_add_tags.assert_called_with(
|
||||||
|
self.data.array, array_tag_list, self.data.extra_specs)
|
||||||
|
|
||||||
|
@mock.patch.object(rest.PowerMaxRest, 'add_storage_array_tags')
|
||||||
|
@mock.patch.object(rest.PowerMaxRest, 'get_array_tags',
|
||||||
|
return_value=['Production'])
|
||||||
|
def test_check_and_add_tags_to_storage_array_add_1_tags(
|
||||||
|
self, mock_get_tags, mock_add_tags):
|
||||||
|
array_tag_list = ['OpenStack', 'Production']
|
||||||
|
add_tag_list = ['OpenStack']
|
||||||
|
self.common._check_and_add_tags_to_storage_array(
|
||||||
|
self.data.array, array_tag_list, self.data.extra_specs)
|
||||||
|
mock_add_tags.assert_called_with(
|
||||||
|
self.data.array, add_tag_list, self.data.extra_specs)
|
||||||
|
|
||||||
|
@mock.patch.object(rest.PowerMaxRest, 'add_storage_array_tags')
|
||||||
|
@mock.patch.object(rest.PowerMaxRest, 'get_array_tags',
|
||||||
|
return_value=['openstack'])
|
||||||
|
def test_check_and_add_tags_to_storage_array_already_tagged(
|
||||||
|
self, mock_get_tags, mock_add_tags):
|
||||||
|
array_tag_list = ['OpenStack']
|
||||||
|
self.common._check_and_add_tags_to_storage_array(
|
||||||
|
self.data.array, array_tag_list, self.data.extra_specs)
|
||||||
|
mock_add_tags.assert_not_called()
|
||||||
|
|
||||||
|
@mock.patch.object(rest.PowerMaxRest, 'get_array_tags',
|
||||||
|
return_value=[])
|
||||||
|
def test_check_and_add_tags_to_storage_array_invalid_tag(
|
||||||
|
self, mock_get_tags):
|
||||||
|
array_tag_list = ['Open$tack']
|
||||||
|
self.assertRaises(
|
||||||
|
exception.VolumeBackendAPIException,
|
||||||
|
self.common._check_and_add_tags_to_storage_array,
|
||||||
|
self.data.array, array_tag_list, self.data.extra_specs)
|
||||||
|
|
||||||
|
def test_validate_storage_group_tag_list_good_tag_list(self):
|
||||||
|
self.common._validate_storage_group_tag_list(
|
||||||
|
self.data.vol_type_extra_specs_tags)
|
||||||
|
|
||||||
|
@mock.patch.object(utils.PowerMaxUtils, 'verify_tag_list')
|
||||||
|
def test_validate_storage_group_tag_list_no_tag_list(
|
||||||
|
self, mock_verify):
|
||||||
|
self.common._validate_storage_group_tag_list(
|
||||||
|
self.data.extra_specs)
|
||||||
|
mock_verify.assert_not_called()
|
||||||
|
@ -234,7 +234,8 @@ class PowerMaxMaskingTest(test.TestCase):
|
|||||||
|
|
||||||
@mock.patch.object(
|
@mock.patch.object(
|
||||||
rest.PowerMaxRest, 'get_storage_group',
|
rest.PowerMaxRest, 'get_storage_group',
|
||||||
side_effect=[tpd.PowerMaxData.storagegroup_name_i, None, None])
|
side_effect=[tpd.PowerMaxData.storagegroup_name_i, None,
|
||||||
|
tpd.PowerMaxData.storagegroup_name_i])
|
||||||
@mock.patch.object(
|
@mock.patch.object(
|
||||||
provision.PowerMaxProvision, 'create_storage_group',
|
provision.PowerMaxProvision, 'create_storage_group',
|
||||||
side_effect=[tpd.PowerMaxData.storagegroup_name_i, None])
|
side_effect=[tpd.PowerMaxData.storagegroup_name_i, None])
|
||||||
@ -243,11 +244,21 @@ class PowerMaxMaskingTest(test.TestCase):
|
|||||||
self.driver.masking._get_or_create_storage_group(
|
self.driver.masking._get_or_create_storage_group(
|
||||||
self.data.array, self.maskingviewdict,
|
self.data.array, self.maskingviewdict,
|
||||||
self.data.storagegroup_name_i, self.extra_specs)
|
self.data.storagegroup_name_i, self.extra_specs)
|
||||||
|
self.assertEqual(3, mock_get_sg.call_count)
|
||||||
|
self.assertEqual(1, mock_sg.call_count)
|
||||||
|
|
||||||
|
@mock.patch.object(
|
||||||
|
rest.PowerMaxRest, 'get_storage_group',
|
||||||
|
side_effect=[None, tpd.PowerMaxData.storagegroup_name_i])
|
||||||
|
@mock.patch.object(
|
||||||
|
provision.PowerMaxProvision, 'create_storage_group',
|
||||||
|
side_effect=[tpd.PowerMaxData.storagegroup_name_i])
|
||||||
|
def test_get_or_create_storage_group_is_parent(self, mock_sg, mock_get_sg):
|
||||||
self.driver.masking._get_or_create_storage_group(
|
self.driver.masking._get_or_create_storage_group(
|
||||||
self.data.array, self.maskingviewdict,
|
self.data.array, self.maskingviewdict,
|
||||||
self.data.storagegroup_name_i, self.extra_specs, True)
|
self.data.storagegroup_name_i, self.extra_specs, True)
|
||||||
self.assertEqual(3, mock_get_sg.call_count)
|
self.assertEqual(2, mock_get_sg.call_count)
|
||||||
self.assertEqual(2, mock_sg.call_count)
|
self.assertEqual(1, mock_sg.call_count)
|
||||||
|
|
||||||
@mock.patch.object(masking.PowerMaxMasking, '_move_vol_from_default_sg',
|
@mock.patch.object(masking.PowerMaxMasking, '_move_vol_from_default_sg',
|
||||||
return_value=None)
|
return_value=None)
|
||||||
@ -271,7 +282,8 @@ class PowerMaxMaskingTest(test.TestCase):
|
|||||||
tpd.PowerMaxData.storagegroup_name_i]):
|
tpd.PowerMaxData.storagegroup_name_i]):
|
||||||
_, msg = (self.driver.masking._check_existing_storage_group(
|
_, msg = (self.driver.masking._check_existing_storage_group(
|
||||||
self.data.array, self.maskingviewdict['maskingview_name'],
|
self.data.array, self.maskingviewdict['maskingview_name'],
|
||||||
self.data.defaultstoragegroup_name, masking_view_dict))
|
self.data.defaultstoragegroup_name, masking_view_dict,
|
||||||
|
self.data.extra_specs))
|
||||||
self.assertIsNone(msg)
|
self.assertIsNone(msg)
|
||||||
mock_create_sg.assert_not_called()
|
mock_create_sg.assert_not_called()
|
||||||
|
|
||||||
@ -280,7 +292,8 @@ class PowerMaxMaskingTest(test.TestCase):
|
|||||||
tpd.PowerMaxData.parent_sg_i, None]):
|
tpd.PowerMaxData.parent_sg_i, None]):
|
||||||
_, msg = (self.driver.masking._check_existing_storage_group(
|
_, msg = (self.driver.masking._check_existing_storage_group(
|
||||||
self.data.array, self.maskingviewdict['maskingview_name'],
|
self.data.array, self.maskingviewdict['maskingview_name'],
|
||||||
self.data.defaultstoragegroup_name, masking_view_dict))
|
self.data.defaultstoragegroup_name, masking_view_dict,
|
||||||
|
self.data.extra_specs))
|
||||||
self.assertIsNone(msg)
|
self.assertIsNone(msg)
|
||||||
mock_create_sg.assert_called_once_with(
|
mock_create_sg.assert_called_once_with(
|
||||||
self.data.array, masking_view_dict,
|
self.data.array, masking_view_dict,
|
||||||
@ -305,7 +318,8 @@ class PowerMaxMaskingTest(test.TestCase):
|
|||||||
for x in range(0, 4):
|
for x in range(0, 4):
|
||||||
_, msg = (self.driver.masking._check_existing_storage_group(
|
_, msg = (self.driver.masking._check_existing_storage_group(
|
||||||
self.data.array, self.maskingviewdict['maskingview_name'],
|
self.data.array, self.maskingviewdict['maskingview_name'],
|
||||||
self.data.defaultstoragegroup_name, masking_view_dict))
|
self.data.defaultstoragegroup_name, masking_view_dict,
|
||||||
|
self.data.extra_specs))
|
||||||
self.assertIsNotNone(msg)
|
self.assertIsNotNone(msg)
|
||||||
self.assertEqual(7, mock_get_sg.call_count)
|
self.assertEqual(7, mock_get_sg.call_count)
|
||||||
self.assertEqual(1, mock_move.call_count)
|
self.assertEqual(1, mock_move.call_count)
|
||||||
@ -1050,3 +1064,32 @@ class PowerMaxMaskingTest(test.TestCase):
|
|||||||
self.data.parent_sg_f, self.data.extra_specs)
|
self.data.parent_sg_f, self.data.extra_specs)
|
||||||
mock_rm.assert_called_once()
|
mock_rm.assert_called_once()
|
||||||
self.assertEqual(2, mock_del.call_count)
|
self.assertEqual(2, mock_del.call_count)
|
||||||
|
|
||||||
|
@mock.patch.object(utils.PowerMaxUtils, 'verify_tag_list')
|
||||||
|
def test_add_tags_to_storage_group_disabled(self, mock_verify):
|
||||||
|
self.mask._add_tags_to_storage_group(
|
||||||
|
self.data.array, self.data.add_volume_sg_info_dict,
|
||||||
|
self.data.extra_specs)
|
||||||
|
mock_verify.assert_not_called()
|
||||||
|
|
||||||
|
@mock.patch.object(utils.PowerMaxUtils, 'verify_tag_list')
|
||||||
|
def test_add_tags_to_storage_group_enabled(self, mock_verify):
|
||||||
|
self.mask._add_tags_to_storage_group(
|
||||||
|
self.data.array, self.data.add_volume_sg_info_dict,
|
||||||
|
self.data.extra_specs_tags)
|
||||||
|
mock_verify.assert_called()
|
||||||
|
|
||||||
|
@mock.patch.object(utils.PowerMaxUtils, 'get_new_tags')
|
||||||
|
def test_add_tags_to_storage_group_existing_tags(self, mock_inter):
|
||||||
|
self.mask._add_tags_to_storage_group(
|
||||||
|
self.data.array, self.data.storage_group_with_tags,
|
||||||
|
self.data.extra_specs_tags)
|
||||||
|
mock_inter.assert_called()
|
||||||
|
|
||||||
|
@mock.patch.object(rest.PowerMaxRest, 'add_storage_group_tag',
|
||||||
|
side_effect=[exception.VolumeBackendAPIException])
|
||||||
|
def test_add_tags_to_storage_group_exception(self, mock_except):
|
||||||
|
self.mask._add_tags_to_storage_group(
|
||||||
|
self.data.array, self.data.add_volume_sg_info_dict,
|
||||||
|
self.data.extra_specs_tags)
|
||||||
|
mock_except.assert_called()
|
||||||
|
@ -71,6 +71,16 @@ class PowerMaxVolumeMetadataDebugTest(test.TestCase):
|
|||||||
False, False)
|
False, False)
|
||||||
mock_uvim.assert_called_once()
|
mock_uvim.assert_called_once()
|
||||||
|
|
||||||
|
@mock.patch.object(
|
||||||
|
metadata.PowerMaxVolumeMetadata, 'update_volume_info_metadata',
|
||||||
|
return_value={})
|
||||||
|
def test_capture_attach_info_tags(self, mock_uvim):
|
||||||
|
self.volume_metadata.capture_attach_info(
|
||||||
|
self.data.test_volume, self.data.extra_specs,
|
||||||
|
self.data.masking_view_dict_tags, self.data.fake_host,
|
||||||
|
False, False)
|
||||||
|
mock_uvim.assert_called_once()
|
||||||
|
|
||||||
@mock.patch.object(
|
@mock.patch.object(
|
||||||
metadata.PowerMaxVolumeMetadata, 'update_volume_info_metadata',
|
metadata.PowerMaxVolumeMetadata, 'update_volume_info_metadata',
|
||||||
return_value={})
|
return_value={})
|
||||||
@ -193,6 +203,20 @@ class PowerMaxVolumeMetadataDebugTest(test.TestCase):
|
|||||||
sg_list=sg_list)
|
sg_list=sg_list)
|
||||||
self.assertEqual(result_dict, volume_metadata)
|
self.assertEqual(result_dict, volume_metadata)
|
||||||
|
|
||||||
|
def test_fill_volume_trace_dict_array_tags(self):
|
||||||
|
datadict = {}
|
||||||
|
volume_trace_dict = {}
|
||||||
|
volume_key_value = {}
|
||||||
|
result_dict = {'successful_operation': 'create',
|
||||||
|
'volume_id': self.data.test_volume.id,
|
||||||
|
'array_tag_list': ['one', 'two']}
|
||||||
|
volume_metadata = self.volume_metadata._fill_volume_trace_dict(
|
||||||
|
self.data.test_volume.id, 'create', False, target_name=None,
|
||||||
|
datadict=datadict, volume_key_value=volume_key_value,
|
||||||
|
volume_trace_dict=volume_trace_dict,
|
||||||
|
array_tag_list=['one', 'two'])
|
||||||
|
self.assertEqual(result_dict, volume_metadata)
|
||||||
|
|
||||||
@mock.patch.object(utils.PowerMaxUtils, 'merge_dicts',
|
@mock.patch.object(utils.PowerMaxUtils, 'merge_dicts',
|
||||||
return_value={})
|
return_value={})
|
||||||
def test_consolidate_volume_trace_list(self, mock_m2d):
|
def test_consolidate_volume_trace_list(self, mock_m2d):
|
||||||
|
@ -620,3 +620,115 @@ class PowerMaxUtilsTest(test.TestCase):
|
|||||||
sl_3, wl_3 = self.utils.get_service_level_workload(extra_specs)
|
sl_3, wl_3 = self.utils.get_service_level_workload(extra_specs)
|
||||||
self.assertEqual('Diamond', sl_3)
|
self.assertEqual('Diamond', sl_3)
|
||||||
self.assertEqual('DSS', wl_3)
|
self.assertEqual('DSS', wl_3)
|
||||||
|
|
||||||
|
def test_get_new_tags_none(self):
|
||||||
|
list_str1 = 'finance, production, test'
|
||||||
|
list_str2 = 'production,test,finance'
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
[], self.utils.get_new_tags(list_str1, list_str2))
|
||||||
|
|
||||||
|
def test_get_new_tags_one(self):
|
||||||
|
list_str1 = 'finance, production, test'
|
||||||
|
list_str2 = 'production,test'
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
['finance'], self.utils.get_new_tags(list_str1, list_str2))
|
||||||
|
|
||||||
|
def test_get_new_tags_two(self):
|
||||||
|
list_str1 = 'finance, production, test, test2'
|
||||||
|
list_str2 = 'production,test'
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
['finance', 'test2'], self.utils.get_new_tags(
|
||||||
|
list_str1, list_str2))
|
||||||
|
|
||||||
|
def test_get_new_tags_case(self):
|
||||||
|
list_str1 = 'Finance, Production, test, tEst2'
|
||||||
|
list_str2 = 'production,test'
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
['Finance', 'tEst2'], self.utils.get_new_tags(
|
||||||
|
list_str1, list_str2))
|
||||||
|
|
||||||
|
def test_get_new_tags_empty_string_first(self):
|
||||||
|
list_str1 = ''
|
||||||
|
list_str2 = 'production,test'
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
[], self.utils.get_new_tags(
|
||||||
|
list_str1, list_str2))
|
||||||
|
|
||||||
|
def test_get_new_tags_empty_string_second(self):
|
||||||
|
list_str1 = 'production,test'
|
||||||
|
list_str2 = ' '
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
['production', 'test'], self.utils.get_new_tags(
|
||||||
|
list_str1, list_str2))
|
||||||
|
|
||||||
|
def test_get_intersection(self):
|
||||||
|
list_str1 = 'finance,production'
|
||||||
|
list_str2 = 'production'
|
||||||
|
|
||||||
|
common_list = self.utils._get_intersection(
|
||||||
|
list_str1, list_str2)
|
||||||
|
|
||||||
|
self.assertEqual(['production'], common_list)
|
||||||
|
|
||||||
|
def test_get_intersection_unordered_list(self):
|
||||||
|
list_str1 = 'finance,production'
|
||||||
|
list_str2 = 'production, finance'
|
||||||
|
|
||||||
|
common_list = (
|
||||||
|
self.utils._get_intersection(list_str1, list_str2))
|
||||||
|
|
||||||
|
self.assertEqual(['finance', 'production'], common_list)
|
||||||
|
|
||||||
|
def test_verify_tag_list_good(self):
|
||||||
|
tag_list = ['no', 'InValid', 'characters', 'dash-allowed',
|
||||||
|
'123', 'underscore_allowed',
|
||||||
|
' leading_space', 'trailing-space ']
|
||||||
|
self.assertTrue(self.utils.verify_tag_list(tag_list))
|
||||||
|
|
||||||
|
def test_verify_tag_list_space(self):
|
||||||
|
tag_list = ['bad space']
|
||||||
|
self.assertFalse(self.utils.verify_tag_list(tag_list))
|
||||||
|
|
||||||
|
def test_verify_tag_list_forward_slash(self):
|
||||||
|
tag_list = ['\\forward\\slash']
|
||||||
|
self.assertFalse(self.utils.verify_tag_list(tag_list))
|
||||||
|
|
||||||
|
def test_verify_tag_list_square_bracket(self):
|
||||||
|
tag_list = ['[squareBrackets]']
|
||||||
|
self.assertFalse(self.utils.verify_tag_list(tag_list))
|
||||||
|
|
||||||
|
def test_verify_tag_list_backward_slash(self):
|
||||||
|
tag_list = ['/backward/slash']
|
||||||
|
self.assertFalse(self.utils.verify_tag_list(tag_list))
|
||||||
|
|
||||||
|
def test_verify_tag_list_curly_bracket(self):
|
||||||
|
tag_list = ['{curlyBrackets}']
|
||||||
|
self.assertFalse(self.utils.verify_tag_list(tag_list))
|
||||||
|
|
||||||
|
def test_verify_tag_list_empty_list(self):
|
||||||
|
tag_list = []
|
||||||
|
self.assertFalse(self.utils.verify_tag_list(tag_list))
|
||||||
|
|
||||||
|
def test_verify_tag_list_not_a_list(self):
|
||||||
|
tag_list = '1,2,3,4'
|
||||||
|
self.assertFalse(self.utils.verify_tag_list(tag_list))
|
||||||
|
|
||||||
|
def test_verify_tag_list_exceeds_8(self):
|
||||||
|
tag_list = ['1', '2', '3', '4', '5', '6', '7', '8', '9']
|
||||||
|
self.assertFalse(self.utils.verify_tag_list(tag_list))
|
||||||
|
|
||||||
|
def test_convert_list_to_string(self):
|
||||||
|
input_list = ['one', 'two', 'three']
|
||||||
|
output_string = self.utils.convert_list_to_string(input_list)
|
||||||
|
self.assertEqual('one,two,three', output_string)
|
||||||
|
|
||||||
|
def test_convert_list_to_string_input_string(self):
|
||||||
|
input_list = 'one,two,three'
|
||||||
|
output_string = self.utils.convert_list_to_string(input_list)
|
||||||
|
self.assertEqual('one,two,three', output_string)
|
||||||
|
@ -137,7 +137,10 @@ powermax_opts = [
|
|||||||
cfg.ListOpt(utils.POWERMAX_PORT_GROUPS,
|
cfg.ListOpt(utils.POWERMAX_PORT_GROUPS,
|
||||||
bounds=True,
|
bounds=True,
|
||||||
help='List of port groups containing frontend ports '
|
help='List of port groups containing frontend ports '
|
||||||
'configured prior for server connection.')]
|
'configured prior for server connection.'),
|
||||||
|
cfg.ListOpt(utils.POWERMAX_ARRAY_TAG_LIST,
|
||||||
|
bounds=True,
|
||||||
|
help='List of user assigned name for storage array.')]
|
||||||
|
|
||||||
|
|
||||||
CONF.register_opts(powermax_opts, group=configuration.SHARED_CONF_GROUP)
|
CONF.register_opts(powermax_opts, group=configuration.SHARED_CONF_GROUP)
|
||||||
@ -181,6 +184,7 @@ class PowerMaxCommon(object):
|
|||||||
self.extend_replicated_vol = False
|
self.extend_replicated_vol = False
|
||||||
self.rep_devices = []
|
self.rep_devices = []
|
||||||
self.failover = False
|
self.failover = False
|
||||||
|
self.powermax_array_tag_list = None
|
||||||
|
|
||||||
# Gather environment info
|
# Gather environment info
|
||||||
self._get_replication_info()
|
self._get_replication_info()
|
||||||
@ -214,6 +218,8 @@ class PowerMaxCommon(object):
|
|||||||
self.snapvx_unlink_limit = self._get_unlink_configuration_value(
|
self.snapvx_unlink_limit = self._get_unlink_configuration_value(
|
||||||
utils.VMAX_SNAPVX_UNLINK_LIMIT,
|
utils.VMAX_SNAPVX_UNLINK_LIMIT,
|
||||||
utils.POWERMAX_SNAPVX_UNLINK_LIMIT)
|
utils.POWERMAX_SNAPVX_UNLINK_LIMIT)
|
||||||
|
self.powermax_array_tag_list = self.configuration.safe_get(
|
||||||
|
utils.POWERMAX_ARRAY_TAG_LIST)
|
||||||
self.pool_info['backend_name'] = (
|
self.pool_info['backend_name'] = (
|
||||||
self.configuration.safe_get('volume_backend_name'))
|
self.configuration.safe_get('volume_backend_name'))
|
||||||
mosr = volume_utils.get_max_over_subscription_ratio(
|
mosr = volume_utils.get_max_over_subscription_ratio(
|
||||||
@ -457,9 +463,13 @@ class PowerMaxCommon(object):
|
|||||||
model_update, volume.metadata, self.get_volume_metadata(
|
model_update, volume.metadata, self.get_volume_metadata(
|
||||||
volume_dict['array'], volume_dict['device_id']))
|
volume_dict['array'], volume_dict['device_id']))
|
||||||
|
|
||||||
|
array_tag_list = self.get_tags_of_storage_array(
|
||||||
|
extra_specs[utils.ARRAY])
|
||||||
|
|
||||||
self.volume_metadata.capture_create_volume(
|
self.volume_metadata.capture_create_volume(
|
||||||
volume_dict['device_id'], volume, group_name, group_id,
|
volume_dict['device_id'], volume, group_name, group_id,
|
||||||
extra_specs, rep_info_dict, 'create')
|
extra_specs, rep_info_dict, 'create',
|
||||||
|
array_tag_list=array_tag_list)
|
||||||
|
|
||||||
LOG.info("Leaving create_volume: %(name)s. Volume dict: %(dict)s.",
|
LOG.info("Leaving create_volume: %(name)s. Volume dict: %(dict)s.",
|
||||||
{'name': volume_name, 'dict': volume_dict})
|
{'name': volume_name, 'dict': volume_dict})
|
||||||
@ -523,10 +533,12 @@ class PowerMaxCommon(object):
|
|||||||
model_update = self.update_metadata(
|
model_update = self.update_metadata(
|
||||||
model_update, volume.metadata, self.get_volume_metadata(
|
model_update, volume.metadata, self.get_volume_metadata(
|
||||||
clone_dict['array'], clone_dict['device_id']))
|
clone_dict['array'], clone_dict['device_id']))
|
||||||
|
array_tag_list = self.get_tags_of_storage_array(
|
||||||
|
extra_specs[utils.ARRAY])
|
||||||
self.volume_metadata.capture_create_volume(
|
self.volume_metadata.capture_create_volume(
|
||||||
clone_dict['device_id'], volume, None, None,
|
clone_dict['device_id'], volume, None, None,
|
||||||
extra_specs, rep_info_dict, 'createFromSnapshot',
|
extra_specs, rep_info_dict, 'createFromSnapshot',
|
||||||
source_snapshot_id=snapshot.id)
|
source_snapshot_id=snapshot.id, array_tag_list=array_tag_list)
|
||||||
|
|
||||||
return model_update
|
return model_update
|
||||||
|
|
||||||
@ -553,11 +565,14 @@ class PowerMaxCommon(object):
|
|||||||
model_update = self.update_metadata(
|
model_update = self.update_metadata(
|
||||||
model_update, clone_volume.metadata, self.get_volume_metadata(
|
model_update, clone_volume.metadata, self.get_volume_metadata(
|
||||||
clone_dict['array'], clone_dict['device_id']))
|
clone_dict['array'], clone_dict['device_id']))
|
||||||
|
array_tag_list = self.get_tags_of_storage_array(
|
||||||
|
extra_specs[utils.ARRAY])
|
||||||
self.volume_metadata.capture_create_volume(
|
self.volume_metadata.capture_create_volume(
|
||||||
clone_dict['device_id'], clone_volume, None, None,
|
clone_dict['device_id'], clone_volume, None, None,
|
||||||
extra_specs, rep_info_dict, 'createFromVolume',
|
extra_specs, rep_info_dict, 'createFromVolume',
|
||||||
temporary_snapvx=clone_dict.get('snap_name'),
|
temporary_snapvx=clone_dict.get('snap_name'),
|
||||||
source_device_id=clone_dict.get('source_device_id'))
|
source_device_id=clone_dict.get('source_device_id'),
|
||||||
|
array_tag_list=array_tag_list)
|
||||||
return model_update
|
return model_update
|
||||||
|
|
||||||
def _replicate_volume(self, volume, volume_name, volume_dict, extra_specs,
|
def _replicate_volume(self, volume, volume_name, volume_dict, extra_specs,
|
||||||
@ -870,11 +885,19 @@ class PowerMaxCommon(object):
|
|||||||
rep_extra_specs[utils.ARRAY], remote_port_group))
|
rep_extra_specs[utils.ARRAY], remote_port_group))
|
||||||
device_info_dict['is_multipath'] = is_multipath
|
device_info_dict['is_multipath'] = is_multipath
|
||||||
|
|
||||||
|
array_tag_list = self.get_tags_of_storage_array(
|
||||||
|
extra_specs[utils.ARRAY])
|
||||||
|
if array_tag_list:
|
||||||
|
masking_view_dict['array_tag_list'] = array_tag_list
|
||||||
|
|
||||||
if is_multiattach and LOG.isEnabledFor(logging.DEBUG):
|
if is_multiattach and LOG.isEnabledFor(logging.DEBUG):
|
||||||
masking_view_dict['mv_list'], masking_view_dict['sg_list'] = (
|
masking_view_dict['mv_list'], masking_view_dict['sg_list'] = (
|
||||||
self._get_mvs_and_sgs_from_volume(
|
self._get_mvs_and_sgs_from_volume(
|
||||||
extra_specs[utils.ARRAY],
|
extra_specs[utils.ARRAY],
|
||||||
masking_view_dict[utils.DEVICE_ID]))
|
masking_view_dict[utils.DEVICE_ID]))
|
||||||
|
elif not is_multiattach and LOG.isEnabledFor(logging.DEBUG):
|
||||||
|
masking_view_dict['tag_list'] = self.get_tags_of_storage_group(
|
||||||
|
extra_specs[utils.ARRAY], masking_view_dict[utils.SG_NAME])
|
||||||
|
|
||||||
self.volume_metadata.capture_attach_info(
|
self.volume_metadata.capture_attach_info(
|
||||||
volume, extra_specs, masking_view_dict, connector['host'],
|
volume, extra_specs, masking_view_dict, connector['host'],
|
||||||
@ -882,6 +905,35 @@ class PowerMaxCommon(object):
|
|||||||
|
|
||||||
return device_info_dict
|
return device_info_dict
|
||||||
|
|
||||||
|
def get_tags_of_storage_group(self, array, storage_group_name):
|
||||||
|
"""Get the tag information from a storage group
|
||||||
|
|
||||||
|
:param array: serial number of array
|
||||||
|
:param storage_group_name: storage group name
|
||||||
|
|
||||||
|
:returns: tag list
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
storage_group = self.rest.get_storage_group(
|
||||||
|
array, storage_group_name)
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
return storage_group.get('tags')
|
||||||
|
|
||||||
|
def get_tags_of_storage_array(self, array):
|
||||||
|
"""Get the tag information from an array
|
||||||
|
|
||||||
|
:param array: serial number of array
|
||||||
|
|
||||||
|
:returns: tag list
|
||||||
|
"""
|
||||||
|
tag_name_list = None
|
||||||
|
try:
|
||||||
|
tag_name_list = self.rest.get_array_tags(array)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
return tag_name_list
|
||||||
|
|
||||||
def _attach_metro_volume(self, volume, connector, is_multiattach,
|
def _attach_metro_volume(self, volume, connector, is_multiattach,
|
||||||
extra_specs, rep_extra_specs):
|
extra_specs, rep_extra_specs):
|
||||||
"""Helper method to attach a metro volume.
|
"""Helper method to attach a metro volume.
|
||||||
@ -1929,6 +1981,8 @@ class PowerMaxCommon(object):
|
|||||||
LOG.error(error_message)
|
LOG.error(error_message)
|
||||||
raise exception.VolumeBackendAPIException(message=error_message)
|
raise exception.VolumeBackendAPIException(message=error_message)
|
||||||
|
|
||||||
|
self._validate_storage_group_tag_list(extra_specs)
|
||||||
|
|
||||||
extra_specs[utils.INTERVAL] = self.interval
|
extra_specs[utils.INTERVAL] = self.interval
|
||||||
LOG.debug("The interval is set at: %(intervalInSecs)s.",
|
LOG.debug("The interval is set at: %(intervalInSecs)s.",
|
||||||
{'intervalInSecs': self.interval})
|
{'intervalInSecs': self.interval})
|
||||||
@ -1990,6 +2044,10 @@ class PowerMaxCommon(object):
|
|||||||
else:
|
else:
|
||||||
extra_specs.pop(utils.DISABLECOMPRESSION, None)
|
extra_specs.pop(utils.DISABLECOMPRESSION, None)
|
||||||
|
|
||||||
|
self._check_and_add_tags_to_storage_array(
|
||||||
|
extra_specs[utils.ARRAY], self.powermax_array_tag_list,
|
||||||
|
extra_specs)
|
||||||
|
|
||||||
LOG.debug("SRP is: %(srp)s, Array is: %(array)s "
|
LOG.debug("SRP is: %(srp)s, Array is: %(array)s "
|
||||||
"SLO is: %(slo)s, Workload is: %(workload)s.",
|
"SLO is: %(slo)s, Workload is: %(workload)s.",
|
||||||
{'srp': extra_specs[utils.SRP],
|
{'srp': extra_specs[utils.SRP],
|
||||||
@ -2005,6 +2063,47 @@ class PowerMaxCommon(object):
|
|||||||
|
|
||||||
return extra_specs
|
return extra_specs
|
||||||
|
|
||||||
|
def _validate_storage_group_tag_list(self, extra_specs):
|
||||||
|
"""Validate the storagetype:storagegrouptags list
|
||||||
|
|
||||||
|
:param extra_specs: the extra specifications
|
||||||
|
:raises: VolumeBackendAPIException:
|
||||||
|
"""
|
||||||
|
tag_list = extra_specs.get(utils.STORAGE_GROUP_TAGS)
|
||||||
|
if tag_list:
|
||||||
|
if not self.utils.verify_tag_list(tag_list.split(',')):
|
||||||
|
exception_message = (_(
|
||||||
|
"Unable to get verify "
|
||||||
|
"storagetype:storagegrouptags in the Volume Type. "
|
||||||
|
"Only alpha-numeric, dashes and underscores "
|
||||||
|
"allowed. List values must be separated by commas. "
|
||||||
|
"The number of values must not exceed 8"))
|
||||||
|
raise exception.VolumeBackendAPIException(
|
||||||
|
message=exception_message)
|
||||||
|
else:
|
||||||
|
LOG.info("The tag list %(tag_list)s has been verified.",
|
||||||
|
{'tag_list': tag_list})
|
||||||
|
|
||||||
|
def _validate_array_tag_list(self, array_tag_list):
|
||||||
|
"""Validate the array tag list
|
||||||
|
|
||||||
|
:param array_tag_list: the array tag list
|
||||||
|
:raises: VolumeBackendAPIException:
|
||||||
|
"""
|
||||||
|
if array_tag_list:
|
||||||
|
if not self.utils.verify_tag_list(array_tag_list):
|
||||||
|
exception_message = (_(
|
||||||
|
"Unable to get verify "
|
||||||
|
"config option powermax_array_tag_list. "
|
||||||
|
"Only alpha-numeric, dashes and underscores "
|
||||||
|
"allowed. List values must be separated by commas. "
|
||||||
|
"The number of values must not exceed 8"))
|
||||||
|
raise exception.VolumeBackendAPIException(
|
||||||
|
message=exception_message)
|
||||||
|
else:
|
||||||
|
LOG.info("The tag list %(tag_list)s has been verified.",
|
||||||
|
{'tag_list': array_tag_list})
|
||||||
|
|
||||||
def _delete_from_srp(self, array, device_id, volume_name,
|
def _delete_from_srp(self, array, device_id, volume_name,
|
||||||
extra_specs):
|
extra_specs):
|
||||||
"""Delete from srp.
|
"""Delete from srp.
|
||||||
@ -5468,3 +5567,37 @@ class PowerMaxCommon(object):
|
|||||||
'SourceDeviceLabel': device_label}
|
'SourceDeviceLabel': device_label}
|
||||||
|
|
||||||
return metadata
|
return metadata
|
||||||
|
|
||||||
|
def _check_and_add_tags_to_storage_array(
|
||||||
|
self, serial_number, array_tag_list, extra_specs):
|
||||||
|
"""Add tags to a storage group.
|
||||||
|
|
||||||
|
:param serial_number: the array serial number
|
||||||
|
:param array_tag_list: the array tag list
|
||||||
|
:param extra_specs: the extra specifications
|
||||||
|
"""
|
||||||
|
if array_tag_list:
|
||||||
|
existing_array_tags = self.rest.get_array_tags(serial_number)
|
||||||
|
|
||||||
|
new_tag_list = self.utils.get_new_tags(
|
||||||
|
self.utils.convert_list_to_string(array_tag_list),
|
||||||
|
self.utils.convert_list_to_string(existing_array_tags))
|
||||||
|
if not new_tag_list:
|
||||||
|
LOG.warning("No new tags to add. Existing tags "
|
||||||
|
"associated with %(array)s are "
|
||||||
|
"%(tags)s.",
|
||||||
|
{'array': serial_number,
|
||||||
|
'tags': existing_array_tags})
|
||||||
|
else:
|
||||||
|
self._validate_array_tag_list(new_tag_list)
|
||||||
|
LOG.info("Adding the tags %(tag_list)s to %(array)s",
|
||||||
|
{'tag_list': new_tag_list,
|
||||||
|
'array': serial_number})
|
||||||
|
try:
|
||||||
|
self.rest.add_storage_array_tags(
|
||||||
|
serial_number, new_tag_list, extra_specs)
|
||||||
|
except Exception as ex:
|
||||||
|
LOG.warning("Unexpected error: %(ex)s. If you still "
|
||||||
|
"want to add tags to this storage array, "
|
||||||
|
"please do so on the Unisphere UI.",
|
||||||
|
{'ex': ex})
|
||||||
|
@ -116,9 +116,10 @@ class PowerMaxFCDriver(san.SanDriver, driver.FibreChannelDriver):
|
|||||||
- SnapVX noCopy mode enabled for all links
|
- SnapVX noCopy mode enabled for all links
|
||||||
- Volume/Snapshot backed metadata inclusion
|
- Volume/Snapshot backed metadata inclusion
|
||||||
- Debug metadata compression and service level info fix
|
- Debug metadata compression and service level info fix
|
||||||
|
4.2.0 - Support of Unisphere storage group and array tags
|
||||||
"""
|
"""
|
||||||
|
|
||||||
VERSION = "4.1.0"
|
VERSION = "4.2.0"
|
||||||
|
|
||||||
# ThirdPartySystems wiki
|
# ThirdPartySystems wiki
|
||||||
CI_WIKI_NAME = "EMC_VMAX_CI"
|
CI_WIKI_NAME = "EMC_VMAX_CI"
|
||||||
|
@ -121,9 +121,10 @@ class PowerMaxISCSIDriver(san.SanISCSIDriver):
|
|||||||
- SnapVX noCopy mode enabled for all links
|
- SnapVX noCopy mode enabled for all links
|
||||||
- Volume/Snapshot backed metadata inclusion
|
- Volume/Snapshot backed metadata inclusion
|
||||||
- Debug metadata compression and service level info fix
|
- Debug metadata compression and service level info fix
|
||||||
|
4.2.0 - Support of Unisphere storage group and array tags
|
||||||
"""
|
"""
|
||||||
|
|
||||||
VERSION = "4.1.0"
|
VERSION = "4.2.0"
|
||||||
|
|
||||||
# ThirdPartySystems wiki
|
# ThirdPartySystems wiki
|
||||||
CI_WIKI_NAME = "EMC_VMAX_CI"
|
CI_WIKI_NAME = "EMC_VMAX_CI"
|
||||||
|
@ -298,7 +298,7 @@ class PowerMaxMasking(object):
|
|||||||
"""
|
"""
|
||||||
storage_group_name, msg = self._check_existing_storage_group(
|
storage_group_name, msg = self._check_existing_storage_group(
|
||||||
serial_number, maskingview_name, default_sg_name,
|
serial_number, maskingview_name, default_sg_name,
|
||||||
masking_view_dict)
|
masking_view_dict, extra_specs)
|
||||||
if not msg:
|
if not msg:
|
||||||
portgroup_name = self.rest.get_element_from_masking_view(
|
portgroup_name = self.rest.get_element_from_masking_view(
|
||||||
serial_number, maskingview_name, portgroup=True)
|
serial_number, maskingview_name, portgroup=True)
|
||||||
@ -397,9 +397,11 @@ class PowerMaxMasking(object):
|
|||||||
storagegroup = self.rest.get_storage_group(
|
storagegroup = self.rest.get_storage_group(
|
||||||
serial_number, storagegroup_name)
|
serial_number, storagegroup_name)
|
||||||
if storagegroup is None:
|
if storagegroup is None:
|
||||||
storagegroup = self.provision.create_storage_group(
|
storagegroup_name = self.provision.create_storage_group(
|
||||||
serial_number, storagegroup_name, srp, slo, workload,
|
serial_number, storagegroup_name, srp, slo, workload,
|
||||||
extra_specs, do_disable_compression)
|
extra_specs, do_disable_compression)
|
||||||
|
storagegroup = self.rest.get_storage_group(
|
||||||
|
serial_number, storagegroup_name)
|
||||||
|
|
||||||
if storagegroup is None:
|
if storagegroup is None:
|
||||||
msg = ("Cannot get or create a storage group: "
|
msg = ("Cannot get or create a storage group: "
|
||||||
@ -413,11 +415,55 @@ class PowerMaxMasking(object):
|
|||||||
self.rest.update_storagegroup_qos(
|
self.rest.update_storagegroup_qos(
|
||||||
serial_number, storagegroup_name, extra_specs)
|
serial_number, storagegroup_name, extra_specs)
|
||||||
|
|
||||||
|
# If storagetype:storagegrouptags exist update storage group
|
||||||
|
# to add tags
|
||||||
|
if not parent:
|
||||||
|
self._add_tags_to_storage_group(
|
||||||
|
serial_number, storagegroup, extra_specs)
|
||||||
|
|
||||||
return msg
|
return msg
|
||||||
|
|
||||||
|
def _add_tags_to_storage_group(
|
||||||
|
self, serial_number, storage_group, extra_specs):
|
||||||
|
"""Add tags to a storage group.
|
||||||
|
|
||||||
|
:param serial_number: the array serial number
|
||||||
|
:param storage_group: the storage group object
|
||||||
|
:param extra_specs: the extra specifications
|
||||||
|
"""
|
||||||
|
if utils.STORAGE_GROUP_TAGS in extra_specs:
|
||||||
|
# Check if the tags exist
|
||||||
|
if 'tags' in storage_group:
|
||||||
|
new_tag_list = self.utils.get_new_tags(
|
||||||
|
extra_specs[utils.STORAGE_GROUP_TAGS],
|
||||||
|
storage_group['tags'])
|
||||||
|
if not new_tag_list:
|
||||||
|
LOG.info("No new tags to add. Existing tags "
|
||||||
|
"associated with %(sg_name)s are "
|
||||||
|
"%(tags)s.",
|
||||||
|
{'sg_name': storage_group['storageGroupId'],
|
||||||
|
'tags': storage_group['tags']})
|
||||||
|
else:
|
||||||
|
new_tag_list = (
|
||||||
|
extra_specs[utils.STORAGE_GROUP_TAGS].split(","))
|
||||||
|
|
||||||
|
if self.utils.verify_tag_list(new_tag_list):
|
||||||
|
LOG.info("Adding the tags %(tag_list)s to %(sg_name)s",
|
||||||
|
{'tag_list': new_tag_list,
|
||||||
|
'sg_name': storage_group['storageGroupId']})
|
||||||
|
try:
|
||||||
|
self.rest.add_storage_group_tag(
|
||||||
|
serial_number, storage_group['storageGroupId'],
|
||||||
|
new_tag_list, extra_specs)
|
||||||
|
except Exception as ex:
|
||||||
|
LOG.warning("Unexpected error: %(ex)s. If you still "
|
||||||
|
"want to add tags to this storage group, "
|
||||||
|
"please do so on the Unisphere UI.",
|
||||||
|
{'ex': ex})
|
||||||
|
|
||||||
def _check_existing_storage_group(
|
def _check_existing_storage_group(
|
||||||
self, serial_number, maskingview_name,
|
self, serial_number, maskingview_name,
|
||||||
default_sg_name, masking_view_dict):
|
default_sg_name, masking_view_dict, extra_specs):
|
||||||
"""Check if the masking view has the child storage group.
|
"""Check if the masking view has the child storage group.
|
||||||
|
|
||||||
Get the parent storage group associated with a masking view and check
|
Get the parent storage group associated with a masking view and check
|
||||||
@ -427,6 +473,7 @@ class PowerMaxMasking(object):
|
|||||||
:param maskingview_name: the masking view name
|
:param maskingview_name: the masking view name
|
||||||
:param default_sg_name: the default sg name
|
:param default_sg_name: the default sg name
|
||||||
:param masking_view_dict: the masking view dict
|
:param masking_view_dict: the masking view dict
|
||||||
|
:param extra_specs: the extra specifications
|
||||||
:returns: storage group name, msg
|
:returns: storage group name, msg
|
||||||
"""
|
"""
|
||||||
msg = None
|
msg = None
|
||||||
@ -458,6 +505,8 @@ class PowerMaxMasking(object):
|
|||||||
LOG.info("Retrieved child sg %(sg_name)s from %(mv_name)s",
|
LOG.info("Retrieved child sg %(sg_name)s from %(mv_name)s",
|
||||||
{'sg_name': child_sg_name,
|
{'sg_name': child_sg_name,
|
||||||
'mv_name': maskingview_name})
|
'mv_name': maskingview_name})
|
||||||
|
self._add_tags_to_storage_group(
|
||||||
|
serial_number, child_sg, extra_specs)
|
||||||
else:
|
else:
|
||||||
msg = self._get_or_create_storage_group(
|
msg = self._get_or_create_storage_group(
|
||||||
serial_number, masking_view_dict, child_sg_name,
|
serial_number, masking_view_dict, child_sg_name,
|
||||||
|
@ -294,6 +294,7 @@ class PowerMaxVolumeMetadata(object):
|
|||||||
mv_list, sg_list = [], []
|
mv_list, sg_list = [], []
|
||||||
child_storage_group, parent_storage_group = None, None
|
child_storage_group, parent_storage_group = None, None
|
||||||
initiator_group, port_group = None, None
|
initiator_group, port_group = None, None
|
||||||
|
child_storage_group_tag_list = None
|
||||||
|
|
||||||
if is_multiattach:
|
if is_multiattach:
|
||||||
successful_operation = 'multi_attach'
|
successful_operation = 'multi_attach'
|
||||||
@ -302,6 +303,8 @@ class PowerMaxVolumeMetadata(object):
|
|||||||
else:
|
else:
|
||||||
successful_operation = 'attach'
|
successful_operation = 'attach'
|
||||||
child_storage_group = masking_view_dict[utils.SG_NAME]
|
child_storage_group = masking_view_dict[utils.SG_NAME]
|
||||||
|
child_storage_group_tag_list = (
|
||||||
|
masking_view_dict.get(utils.TAG_LIST, None))
|
||||||
parent_storage_group = masking_view_dict[utils.PARENT_SG_NAME]
|
parent_storage_group = masking_view_dict[utils.PARENT_SG_NAME]
|
||||||
initiator_group = masking_view_dict[utils.IG_NAME]
|
initiator_group = masking_view_dict[utils.IG_NAME]
|
||||||
port_group = masking_view_dict[utils.PORTGROUPNAME]
|
port_group = masking_view_dict[utils.PORTGROUPNAME]
|
||||||
@ -319,7 +322,9 @@ class PowerMaxVolumeMetadata(object):
|
|||||||
host=host, is_multipath=is_multipath,
|
host=host, is_multipath=is_multipath,
|
||||||
identifier_name=self.utils.get_volume_element_name(volume.id),
|
identifier_name=self.utils.get_volume_element_name(volume.id),
|
||||||
openstack_name=volume.display_name,
|
openstack_name=volume.display_name,
|
||||||
mv_list=mv_list, sg_list=sg_list)
|
mv_list=mv_list, sg_list=sg_list,
|
||||||
|
child_storage_group_tag_list=child_storage_group_tag_list,
|
||||||
|
array_tag_list=masking_view_dict.get('array_tag_list', None))
|
||||||
|
|
||||||
volume_metadata = self.update_volume_info_metadata(
|
volume_metadata = self.update_volume_info_metadata(
|
||||||
datadict, self.version_dict)
|
datadict, self.version_dict)
|
||||||
@ -447,7 +452,8 @@ class PowerMaxVolumeMetadata(object):
|
|||||||
def capture_create_volume(
|
def capture_create_volume(
|
||||||
self, device_id, volume, group_name, group_id, extra_specs,
|
self, device_id, volume, group_name, group_id, extra_specs,
|
||||||
rep_info_dict, successful_operation, source_snapshot_id=None,
|
rep_info_dict, successful_operation, source_snapshot_id=None,
|
||||||
source_device_id=None, temporary_snapvx=None):
|
source_device_id=None, temporary_snapvx=None,
|
||||||
|
array_tag_list=None):
|
||||||
"""Captures create volume info in volume metadata
|
"""Captures create volume info in volume metadata
|
||||||
|
|
||||||
:param device_id: device id
|
:param device_id: device id
|
||||||
@ -458,6 +464,8 @@ class PowerMaxVolumeMetadata(object):
|
|||||||
:param rep_info_dict: information gathered from replication
|
:param rep_info_dict: information gathered from replication
|
||||||
:param successful_operation: the type of create operation
|
:param successful_operation: the type of create operation
|
||||||
:param source_snapshot_id: the source snapshot id
|
:param source_snapshot_id: the source snapshot id
|
||||||
|
:param temporary_snapvx: temporary snapVX
|
||||||
|
:param array_tag_list: array tag list
|
||||||
|
|
||||||
:returns: volume_metadata dict
|
:returns: volume_metadata dict
|
||||||
"""
|
"""
|
||||||
@ -502,7 +510,8 @@ class PowerMaxVolumeMetadata(object):
|
|||||||
extra_specs),
|
extra_specs),
|
||||||
source_device_id=source_device_id,
|
source_device_id=source_device_id,
|
||||||
temporary_snapvx=temporary_snapvx,
|
temporary_snapvx=temporary_snapvx,
|
||||||
target_array_model=target_array_model)
|
target_array_model=target_array_model,
|
||||||
|
array_tag_list=array_tag_list)
|
||||||
volume_metadata = self.update_volume_info_metadata(
|
volume_metadata = self.update_volume_info_metadata(
|
||||||
datadict, self.version_dict)
|
datadict, self.version_dict)
|
||||||
self.print_pretty_table(volume_metadata)
|
self.print_pretty_table(volume_metadata)
|
||||||
|
@ -552,6 +552,16 @@ class PowerMaxRest(object):
|
|||||||
{'array': array})
|
{'array': array})
|
||||||
return array_details
|
return array_details
|
||||||
|
|
||||||
|
def get_array_tags(self, array):
|
||||||
|
"""Get the tags from its serial number.
|
||||||
|
|
||||||
|
:param array: the array serial number
|
||||||
|
:returns: tag list -- list or empty list
|
||||||
|
"""
|
||||||
|
target_uri = '/%s/system/tag?array_id=%s' % (U4V_VERSION, array)
|
||||||
|
array_tags = self._get_request(target_uri, 'system')
|
||||||
|
return array_tags.get('tag_name')
|
||||||
|
|
||||||
def is_next_gen_array(self, array):
|
def is_next_gen_array(self, array):
|
||||||
"""Check to see if array is a next gen array(ucode 5978 or greater).
|
"""Check to see if array is a next gen array(ucode 5978 or greater).
|
||||||
|
|
||||||
@ -857,6 +867,20 @@ class PowerMaxRest(object):
|
|||||||
array, SLOPROVISIONING, 'storagegroup', payload, version,
|
array, SLOPROVISIONING, 'storagegroup', payload, version,
|
||||||
resource_name=storagegroup)
|
resource_name=storagegroup)
|
||||||
|
|
||||||
|
def modify_storage_array(self, array, payload):
|
||||||
|
"""Modify a storage array (PUT operation).
|
||||||
|
|
||||||
|
:param array: the array serial number
|
||||||
|
:param payload: the request payload
|
||||||
|
:returns: status_code -- int, message -- string, server response
|
||||||
|
"""
|
||||||
|
target_uri = '/%s/sloprovisioning/symmetrix/%s' % (U4V_VERSION, array)
|
||||||
|
status_code, message = self.request(target_uri, PUT,
|
||||||
|
request_object=payload)
|
||||||
|
operation = 'modify %(res)s resource' % {'res': 'symmetrix'}
|
||||||
|
self.check_status_code_success(operation, status_code, message)
|
||||||
|
return status_code, message
|
||||||
|
|
||||||
def create_volume_from_sg(self, array, volume_name, storagegroup_name,
|
def create_volume_from_sg(self, array, volume_name, storagegroup_name,
|
||||||
volume_size, extra_specs):
|
volume_size, extra_specs):
|
||||||
"""Create a new volume in the given storage group.
|
"""Create a new volume in the given storage group.
|
||||||
@ -921,6 +945,58 @@ class PowerMaxRest(object):
|
|||||||
volume_dict = {'array': array, 'device_id': device_id}
|
volume_dict = {'array': array, 'device_id': device_id}
|
||||||
return volume_dict
|
return volume_dict
|
||||||
|
|
||||||
|
def add_storage_group_tag(self, array, storagegroup_name,
|
||||||
|
tag_list, extra_specs):
|
||||||
|
"""Create a new tag(s) on a storage group
|
||||||
|
|
||||||
|
:param array: the array serial number
|
||||||
|
:param storagegroup_name: the storage group name
|
||||||
|
:param tag_list: comma delimited list
|
||||||
|
:param extra_specs: the extra specifications
|
||||||
|
"""
|
||||||
|
payload = (
|
||||||
|
{"executionOption": "ASYNCHRONOUS",
|
||||||
|
"editStorageGroupActionParam": {
|
||||||
|
"tagManagementParam": {
|
||||||
|
"addTagsParam": {
|
||||||
|
"tag_name": tag_list
|
||||||
|
}}}})
|
||||||
|
status_code, job = self.modify_storage_group(
|
||||||
|
array, storagegroup_name, payload)
|
||||||
|
|
||||||
|
LOG.debug("Add tag to storage group: %(sg_name)s. "
|
||||||
|
"Status code: %(sc)lu.",
|
||||||
|
{'sg_name': storagegroup_name,
|
||||||
|
'sc': status_code})
|
||||||
|
|
||||||
|
self.wait_for_job(
|
||||||
|
'Add tag to storage group', status_code, job, extra_specs)
|
||||||
|
|
||||||
|
def add_storage_array_tags(self, array, tag_list, extra_specs):
|
||||||
|
"""Create a new tag(s) on a storage group
|
||||||
|
|
||||||
|
:param array: the array serial number
|
||||||
|
:param tag_list: comma delimited list
|
||||||
|
:param extra_specs: the extra specifications
|
||||||
|
"""
|
||||||
|
payload = (
|
||||||
|
{"executionOption": "ASYNCHRONOUS",
|
||||||
|
"editSymmetrixActionParam": {
|
||||||
|
"tagManagementParam": {
|
||||||
|
"addTagsParam": {
|
||||||
|
"tag_name": tag_list
|
||||||
|
}}}})
|
||||||
|
status_code, job = self.modify_storage_array(
|
||||||
|
array, payload)
|
||||||
|
|
||||||
|
LOG.debug("Add tag to storage array: %(array)s. "
|
||||||
|
"Status code: %(sc)lu.",
|
||||||
|
{'array': array,
|
||||||
|
'sc': status_code})
|
||||||
|
|
||||||
|
self.wait_for_job(
|
||||||
|
'Add tag to storage array', status_code, job, extra_specs)
|
||||||
|
|
||||||
def check_volume_device_id(self, array, device_id, volume_id,
|
def check_volume_device_id(self, array, device_id, volume_id,
|
||||||
name_id=None):
|
name_id=None):
|
||||||
"""Check if the identifiers match for a given volume.
|
"""Check if the identifiers match for a given volume.
|
||||||
|
@ -78,6 +78,8 @@ RDF_CONS_EXEMPT = 'consExempt'
|
|||||||
METROBIAS = 'metro_bias'
|
METROBIAS = 'metro_bias'
|
||||||
DEFAULT_PORT = 8443
|
DEFAULT_PORT = 8443
|
||||||
CLONE_SNAPSHOT_NAME = "snapshot_for_clone"
|
CLONE_SNAPSHOT_NAME = "snapshot_for_clone"
|
||||||
|
STORAGE_GROUP_TAGS = 'storagetype:storagegrouptags'
|
||||||
|
TAG_LIST = 'tag_list'
|
||||||
|
|
||||||
# Multiattach constants
|
# Multiattach constants
|
||||||
IS_MULTIATTACH = 'multiattach'
|
IS_MULTIATTACH = 'multiattach'
|
||||||
@ -109,6 +111,7 @@ POWERMAX_SRP = 'powermax_srp'
|
|||||||
POWERMAX_SERVICE_LEVEL = 'powermax_service_level'
|
POWERMAX_SERVICE_LEVEL = 'powermax_service_level'
|
||||||
POWERMAX_PORT_GROUPS = 'powermax_port_groups'
|
POWERMAX_PORT_GROUPS = 'powermax_port_groups'
|
||||||
POWERMAX_SNAPVX_UNLINK_LIMIT = 'powermax_snapvx_unlink_limit'
|
POWERMAX_SNAPVX_UNLINK_LIMIT = 'powermax_snapvx_unlink_limit'
|
||||||
|
POWERMAX_ARRAY_TAG_LIST = 'powermax_array_tag_list'
|
||||||
|
|
||||||
|
|
||||||
class PowerMaxUtils(object):
|
class PowerMaxUtils(object):
|
||||||
@ -977,7 +980,7 @@ class PowerMaxUtils(object):
|
|||||||
"""Get the service level and workload combination from extra specs.
|
"""Get the service level and workload combination from extra specs.
|
||||||
|
|
||||||
:param extra_specs: extra specifications
|
:param extra_specs: extra specifications
|
||||||
:return: string, string
|
:returns: string, string
|
||||||
"""
|
"""
|
||||||
service_level, workload = 'None', 'None'
|
service_level, workload = 'None', 'None'
|
||||||
if extra_specs.get(SLO):
|
if extra_specs.get(SLO):
|
||||||
@ -986,3 +989,68 @@ class PowerMaxUtils(object):
|
|||||||
and 'NONE' not in extra_specs.get(WORKLOAD)):
|
and 'NONE' not in extra_specs.get(WORKLOAD)):
|
||||||
workload = extra_specs.get(WORKLOAD)
|
workload = extra_specs.get(WORKLOAD)
|
||||||
return service_level, workload
|
return service_level, workload
|
||||||
|
|
||||||
|
def get_new_tags(self, list_str1, list_str2):
|
||||||
|
"""Get elements in list_str1 not in list_str2
|
||||||
|
|
||||||
|
:param list_str1: list one in string format
|
||||||
|
:param list_str2: list two in string format
|
||||||
|
:returns: list
|
||||||
|
"""
|
||||||
|
list_str1 = re.sub(r"\s+", "", list_str1)
|
||||||
|
if not list_str1:
|
||||||
|
return []
|
||||||
|
common_list = self._get_intersection(
|
||||||
|
list_str1, list_str2)
|
||||||
|
|
||||||
|
my_list1 = sorted(list_str1.split(","))
|
||||||
|
return [x for x in my_list1 if x.lower() not in common_list]
|
||||||
|
|
||||||
|
def _get_intersection(self, list_str1, list_str2):
|
||||||
|
"""Get the common values between 2 comma separated list
|
||||||
|
|
||||||
|
:param list_str1: list one
|
||||||
|
:param list_str2: list two
|
||||||
|
:returns: sorted list
|
||||||
|
"""
|
||||||
|
list_str1 = re.sub(r"\s+", "", list_str1).lower()
|
||||||
|
list_str2 = re.sub(r"\s+", "", list_str2).lower()
|
||||||
|
my_list1 = sorted(list_str1.split(","))
|
||||||
|
my_list2 = sorted(list_str2.split(","))
|
||||||
|
sorted_common_list = (
|
||||||
|
sorted(list(set(my_list1).intersection(set(my_list2)))))
|
||||||
|
return sorted_common_list
|
||||||
|
|
||||||
|
def verify_tag_list(self, tag_list):
|
||||||
|
"""Verify that the tag list has allowable character
|
||||||
|
|
||||||
|
:param tag_list: list of tags
|
||||||
|
:returns: boolean
|
||||||
|
"""
|
||||||
|
if not tag_list:
|
||||||
|
return False
|
||||||
|
if not isinstance(tag_list, list):
|
||||||
|
LOG.warning("The list of tags %(tag_list)s is not "
|
||||||
|
"in list format. Tagging will not proceed.",
|
||||||
|
{'tag_list': tag_list})
|
||||||
|
return False
|
||||||
|
if len(tag_list) > 8:
|
||||||
|
LOG.warning("The list of tags %(tag_list)s is more "
|
||||||
|
"than the upper limit of 8. Tagging will not "
|
||||||
|
"proceed.",
|
||||||
|
{'tag_list': tag_list})
|
||||||
|
return False
|
||||||
|
for tag in tag_list:
|
||||||
|
tag = tag.strip()
|
||||||
|
if not re.match('^[a-zA-Z0-9_\\-]+$', tag):
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def convert_list_to_string(self, list_input):
|
||||||
|
"""Convert a list to a comma separated list
|
||||||
|
|
||||||
|
:param list_input: list
|
||||||
|
:returns: string or None
|
||||||
|
"""
|
||||||
|
return ','.join(map(str, list_input)) if isinstance(
|
||||||
|
list_input, list) else list_input
|
||||||
|
@ -0,0 +1,6 @@
|
|||||||
|
---
|
||||||
|
features:
|
||||||
|
- |
|
||||||
|
Dell EMC PowerMax driver now supports Unisphere storage group
|
||||||
|
and array tagging to allow the user to specify a user defined
|
||||||
|
tag to facilitate easy access and classification.
|
Loading…
x
Reference in New Issue
Block a user