Merge "Deprecate returning patches from plugins"
This commit is contained in:
commit
88277664f1
@ -154,11 +154,14 @@ Writing a Plugin
|
||||
called before any data processing, providing the raw data. Each plugin in
|
||||
the chain can modify the data, so order in which plugins are loaded
|
||||
matters here. Returns nothing.
|
||||
``before_update(introspection_data,node_info,node_patches,ports_patches,**)``
|
||||
``before_update(introspection_data,node_info,**)``
|
||||
called after node is found and ports are created, but before data is
|
||||
updated on a node. ``node_patches`` and ``ports_patches`` are JSON
|
||||
patches for node and ports to apply.
|
||||
Please refer to the docstring for details and examples.
|
||||
updated on a node. Please refer to the docstring for details
|
||||
and examples.
|
||||
|
||||
.. note::
|
||||
Keyword arguments node_patches and port_patches are also provided, but
|
||||
should not be used in new plugins.
|
||||
|
||||
Make your plugin a setuptools entry point under
|
||||
``ironic_inspector.hooks.processing`` namespace and enable it in the
|
||||
|
@ -40,32 +40,17 @@ class ProcessingHook(object): # pragma: no cover
|
||||
:returns: nothing.
|
||||
"""
|
||||
|
||||
def before_update(self, introspection_data, node_info, node_patches,
|
||||
ports_patches, **kwargs):
|
||||
def before_update(self, introspection_data, node_info, **kwargs):
|
||||
"""Hook to run before Ironic node update.
|
||||
|
||||
This hook is run after node is found and ports are created,
|
||||
just before the node is updated with the data.
|
||||
|
||||
To update node and/or ports use *node_patches* and *ports_patches*
|
||||
arguments.
|
||||
|
||||
:param introspection_data: processed data from the ramdisk.
|
||||
:param node_info: NodeInfo instance.
|
||||
:param node_patches: list of JSON patches [RFC 6902] to apply
|
||||
to the node, e.g.
|
||||
::
|
||||
[{'op': 'add',
|
||||
'path': '/extra/foo',
|
||||
'value': 'bar'}]
|
||||
:param ports_patches: dict where keys are port MAC's,
|
||||
values are lists of JSON patches, e.g.
|
||||
::
|
||||
{'11:22:33:44:55:55': [
|
||||
{'op': 'add', 'path': '/extra/foo',
|
||||
'value': 'bar'}
|
||||
]}
|
||||
:param kwargs: used for extensibility without breaking existing hooks
|
||||
:param kwargs: used for extensibility without breaking existing hooks,
|
||||
currently deprecated arguments node_patches and
|
||||
ports_patches are provided here.
|
||||
:returns: nothing.
|
||||
|
||||
[RFC 6902] - http://tools.ietf.org/html/rfc6902
|
||||
|
@ -25,8 +25,7 @@ class ExampleProcessingHook(base.ProcessingHook): # pragma: no cover
|
||||
def before_processing(self, introspection_data, **kwargs):
|
||||
LOG.debug('before_processing: %s', introspection_data)
|
||||
|
||||
def before_update(self, introspection_data, node_info, node_patches,
|
||||
ports_patches, **kwargs):
|
||||
def before_update(self, introspection_data, node_info, **kwargs):
|
||||
LOG.debug('before_update: %s (node %s)', introspection_data,
|
||||
node_info.uuid)
|
||||
|
||||
|
@ -41,8 +41,7 @@ class ExtraHardwareHook(base.ProcessingHook):
|
||||
swift_api = swift.SwiftAPI()
|
||||
swift_api.create_object(name, data)
|
||||
|
||||
def before_update(self, introspection_data, node_info, node_patches,
|
||||
ports_patches, **kwargs):
|
||||
def before_update(self, introspection_data, node_info, **kwargs):
|
||||
"""Stores the 'data' key from introspection_data in Swift.
|
||||
|
||||
If the 'data' key exists, updates Ironic extra column
|
||||
@ -60,6 +59,5 @@ class ExtraHardwareHook(base.ProcessingHook):
|
||||
self._store_extra_hardware(name,
|
||||
json.dumps(introspection_data['data']))
|
||||
|
||||
node_patches.append({'op': 'add',
|
||||
'path': '/extra/hardware_swift_object',
|
||||
'value': name})
|
||||
node_info.patch([{'op': 'add', 'path': '/extra/hardware_swift_object',
|
||||
'value': name}])
|
||||
|
@ -51,8 +51,7 @@ class RaidDeviceDetection(base.ProcessingHook):
|
||||
'value for "local_gb"'))
|
||||
introspection_data['local_gb'] = 1
|
||||
|
||||
def before_update(self, introspection_data, node_info, node_patches,
|
||||
ports_patches, **kwargs):
|
||||
def before_update(self, introspection_data, node_info, **kwargs):
|
||||
if 'block_devices' not in introspection_data:
|
||||
LOG.warning(_LW('No block device was received from ramdisk'))
|
||||
return
|
||||
@ -78,7 +77,7 @@ class RaidDeviceDetection(base.ProcessingHook):
|
||||
LOG.warning(_LW('No new devices were found'))
|
||||
return
|
||||
|
||||
node_patches.extend([
|
||||
node_info.patch([
|
||||
{'op': 'remove',
|
||||
'path': '/extra/block_devices'},
|
||||
{'op': 'add',
|
||||
@ -89,6 +88,6 @@ class RaidDeviceDetection(base.ProcessingHook):
|
||||
else:
|
||||
# No previously discovered devices - save the inspector block
|
||||
# devices in node.extra
|
||||
node_patches.append({'op': 'add',
|
||||
'path': '/extra/block_devices',
|
||||
'value': introspection_data['block_devices']})
|
||||
node_info.patch([{'op': 'add',
|
||||
'path': '/extra/block_devices',
|
||||
'value': introspection_data['block_devices']}])
|
||||
|
@ -49,15 +49,13 @@ class SchedulerHook(base.ProcessingHook):
|
||||
'memory %(memory_mb)s MiB, disk %(local_gb)s GiB'),
|
||||
{key: introspection_data.get(key) for key in self.KEYS})
|
||||
|
||||
def before_update(self, introspection_data, node_info, node_patches,
|
||||
ports_patches, **kwargs):
|
||||
def before_update(self, introspection_data, node_info, **kwargs):
|
||||
"""Update node with scheduler properties."""
|
||||
overwrite = CONF.processing.overwrite_existing
|
||||
patches = [{'op': 'add', 'path': '/properties/%s' % key,
|
||||
'value': str(introspection_data[key])}
|
||||
for key in self.KEYS
|
||||
if overwrite or not node_info.node().properties.get(key)]
|
||||
node_patches.extend(patches)
|
||||
properties = {key: str(introspection_data[key])
|
||||
for key in self.KEYS if overwrite or
|
||||
not node_info.node().properties.get(key)}
|
||||
node_info.update_properties(**properties)
|
||||
|
||||
|
||||
class ValidateInterfacesHook(base.ProcessingHook):
|
||||
@ -129,8 +127,7 @@ class ValidateInterfacesHook(base.ProcessingHook):
|
||||
valid_macs = [iface['mac'] for iface in valid_interfaces.values()]
|
||||
introspection_data['macs'] = valid_macs
|
||||
|
||||
def before_update(self, introspection_data, node_info, node_patches,
|
||||
ports_patches, **kwargs):
|
||||
def before_update(self, introspection_data, node_info, **kwargs):
|
||||
"""Drop ports that are not present in the data."""
|
||||
if CONF.processing.keep_ports == 'present':
|
||||
expected_macs = {
|
||||
|
@ -18,7 +18,7 @@ from ironicclient import exceptions
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log
|
||||
|
||||
from ironic_inspector.common.i18n import _, _LE, _LI
|
||||
from ironic_inspector.common.i18n import _, _LE, _LI, _LW
|
||||
from ironic_inspector.common import swift
|
||||
from ironic_inspector import firewall
|
||||
from ironic_inspector import node_cache
|
||||
@ -124,16 +124,20 @@ def process(introspection_data):
|
||||
def _run_post_hooks(node_info, introspection_data):
|
||||
hooks = plugins_base.processing_hooks_manager()
|
||||
|
||||
node_patches = []
|
||||
port_patches = {}
|
||||
for hook_ext in hooks:
|
||||
node_patches = []
|
||||
ports_patches = {}
|
||||
hook_ext.obj.before_update(introspection_data, node_info,
|
||||
node_patches, port_patches)
|
||||
node_patches=node_patches,
|
||||
ports_patches=ports_patches)
|
||||
if node_patches:
|
||||
LOG.warn(_LW('Using node_patches is deprecated'))
|
||||
node_info.patch(node_patches)
|
||||
|
||||
node_patches = [p for p in node_patches if p]
|
||||
port_patches = {mac: patch for (mac, patch) in port_patches.items()
|
||||
if patch and mac in node_info.ports()}
|
||||
return node_patches, port_patches
|
||||
if ports_patches:
|
||||
LOG.warn(_LW('Using ports_patches is deprecated'))
|
||||
for mac, patches in ports_patches.items():
|
||||
node_info.patch_port(mac, patches)
|
||||
|
||||
|
||||
def _process_node(node, introspection_data, node_info):
|
||||
@ -142,8 +146,7 @@ def _process_node(node, introspection_data, node_info):
|
||||
|
||||
node_info.create_ports(introspection_data.get('macs') or ())
|
||||
|
||||
node_patches, port_patches = _run_post_hooks(node_info,
|
||||
introspection_data)
|
||||
_run_post_hooks(node_info, introspection_data)
|
||||
|
||||
if CONF.processing.store_data == 'swift':
|
||||
swift_object_name = swift.store_introspection_data(introspection_data,
|
||||
@ -152,18 +155,13 @@ def _process_node(node, introspection_data, node_info):
|
||||
'Swift in object %(obj)s'),
|
||||
{'node': node_info.uuid, 'obj': swift_object_name})
|
||||
if CONF.processing.store_data_location:
|
||||
node_patches.append({'op': 'add',
|
||||
'path': '/extra/%s' %
|
||||
CONF.processing.store_data_location,
|
||||
'value': swift_object_name})
|
||||
node_info.patch([{'op': 'add', 'path': '/extra/%s' %
|
||||
CONF.processing.store_data_location,
|
||||
'value': swift_object_name}])
|
||||
else:
|
||||
LOG.debug('Swift support is disabled, introspection data for node %s '
|
||||
'won\'t be stored', node_info.uuid)
|
||||
|
||||
node_info.patch(node_patches)
|
||||
for mac, patches in port_patches.items():
|
||||
node_info.patch_port(mac, patches)
|
||||
|
||||
ironic = utils.get_client()
|
||||
firewall.update_filters(ironic)
|
||||
|
||||
@ -197,7 +195,7 @@ def _finish_set_ipmi_credentials(ironic, node, node_info, introspection_data,
|
||||
introspection_data.get('ipmi_address')):
|
||||
patch.append({'op': 'add', 'path': '/driver_info/ipmi_address',
|
||||
'value': introspection_data['ipmi_address']})
|
||||
ironic.node.update(node_info.uuid, patch)
|
||||
node_info.patch(patch)
|
||||
|
||||
for attempt in range(_CREDENTIALS_WAIT_RETRIES):
|
||||
try:
|
||||
|
@ -73,6 +73,16 @@ class BaseTest(unittest.TestCase):
|
||||
actual = sorted(actual, key=lambda p: p['path'])
|
||||
self.assertEqual(expected, actual)
|
||||
|
||||
def assertCalledWithPatch(self, expected, mock_call):
|
||||
def _get_patch_param(call):
|
||||
try:
|
||||
return call[0][1]
|
||||
except IndexError:
|
||||
return call[0][0]
|
||||
|
||||
actual = sum(map(_get_patch_param, mock_call.call_args_list), [])
|
||||
self.assertPatchEqual(actual, expected)
|
||||
|
||||
|
||||
class NodeTest(BaseTest):
|
||||
def setUp(self):
|
||||
|
@ -147,7 +147,8 @@ class Test(Base):
|
||||
self.assertEqual({'uuid': self.uuid}, res)
|
||||
eventlet.greenthread.sleep(DEFAULT_SLEEP)
|
||||
|
||||
self.cli.node.update.assert_any_call(self.uuid, self.patch)
|
||||
self.cli.node.update.assert_called_once_with(self.uuid, mock.ANY)
|
||||
self.assertCalledWithPatch(self.patch, self.cli.node.update)
|
||||
self.cli.port.create.assert_called_once_with(
|
||||
node_uuid=self.uuid, address='11:22:33:44:55:66')
|
||||
|
||||
@ -176,8 +177,8 @@ class Test(Base):
|
||||
self.assertTrue(res['ipmi_setup_credentials'])
|
||||
eventlet.greenthread.sleep(DEFAULT_SLEEP)
|
||||
|
||||
self.cli.node.update.assert_any_call(self.uuid, self.patch)
|
||||
self.cli.node.update.assert_any_call(self.uuid, patch_credentials)
|
||||
self.assertCalledWithPatch(self.patch + patch_credentials,
|
||||
self.cli.node.update)
|
||||
self.cli.port.create.assert_called_once_with(
|
||||
node_uuid=self.uuid, address='11:22:33:44:55:66')
|
||||
|
||||
|
@ -398,8 +398,8 @@ class TestPlugins(unittest.TestCase):
|
||||
mgr = plugins_base.processing_hooks_manager()
|
||||
mgr.map_method('before_processing', 'introspection_data')
|
||||
mock_pre.assert_called_once_with(mock.ANY, 'introspection_data')
|
||||
mgr.map_method('before_update', 'node_info', {}, [], {})
|
||||
mock_post.assert_called_once_with(mock.ANY, 'node_info', {}, [], {})
|
||||
mgr.map_method('before_update', 'node_info', {})
|
||||
mock_post.assert_called_once_with(mock.ANY, 'node_info', {})
|
||||
|
||||
def test_manager_is_cached(self):
|
||||
self.assertIs(plugins_base.processing_hooks_manager(),
|
||||
|
@ -12,52 +12,39 @@
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
try:
|
||||
from unittest import mock
|
||||
except ImportError:
|
||||
import mock
|
||||
|
||||
import mock
|
||||
|
||||
from ironic_inspector import node_cache
|
||||
from ironic_inspector.plugins import extra_hardware
|
||||
from ironic_inspector.test import base as test_base
|
||||
|
||||
|
||||
@mock.patch.object(extra_hardware.swift, 'SwiftAPI', autospec=True)
|
||||
@mock.patch.object(node_cache.NodeInfo, 'patch')
|
||||
class TestExtraHardware(test_base.NodeTest):
|
||||
def setUp(self):
|
||||
super(TestExtraHardware, self).setUp()
|
||||
self.hook = extra_hardware.ExtraHardwareHook()
|
||||
hook = extra_hardware.ExtraHardwareHook()
|
||||
|
||||
def _before_update(self, introspection_data):
|
||||
node_patches = []
|
||||
ports_patches = {}
|
||||
self.hook.before_update(introspection_data, self.node_info,
|
||||
node_patches, ports_patches)
|
||||
self.assertFalse(ports_patches)
|
||||
return node_patches
|
||||
|
||||
def test_data_recieved(self, swift_mock):
|
||||
def test_data_recieved(self, patch_mock, swift_mock):
|
||||
introspection_data = {
|
||||
'data': [['memory', 'total', 'size', '4294967296'],
|
||||
['cpu', 'physical', 'number', '1'],
|
||||
['cpu', 'logical', 'number', '1']]}
|
||||
self.hook.before_processing(introspection_data)
|
||||
node_patches = self._before_update(introspection_data)
|
||||
self.hook.before_update(introspection_data, self.node_info)
|
||||
|
||||
swift_conn = swift_mock.return_value
|
||||
name = 'extra_hardware-%s' % self.uuid
|
||||
data = json.dumps(introspection_data['data'])
|
||||
swift_conn.create_object.assert_called_once_with(name, data)
|
||||
self.assertEqual('add',
|
||||
node_patches[0]['op'])
|
||||
self.assertEqual('/extra/hardware_swift_object',
|
||||
node_patches[0]['path'])
|
||||
self.assertEqual(name,
|
||||
node_patches[0]['value'])
|
||||
patch_mock.assert_called_once_with(
|
||||
[{'op': 'add', 'path': '/extra/hardware_swift_object',
|
||||
'value': name}])
|
||||
|
||||
def test_no_data_recieved(self, swift_mock):
|
||||
def test_no_data_recieved(self, patch_mock, swift_mock):
|
||||
introspection_data = {'cats': 'meow'}
|
||||
swift_conn = swift_mock.return_value
|
||||
self.hook.before_processing(introspection_data)
|
||||
node_patches = self._before_update(introspection_data)
|
||||
self.assertFalse(node_patches)
|
||||
self.hook.before_update(introspection_data, self.node_info)
|
||||
self.assertFalse(patch_mock.called)
|
||||
self.assertFalse(swift_conn.create_object.called)
|
||||
|
@ -11,24 +11,16 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import mock
|
||||
|
||||
from ironic_inspector import node_cache
|
||||
from ironic_inspector.plugins import base
|
||||
from ironic_inspector.plugins import raid_device
|
||||
from ironic_inspector.test import base as test_base
|
||||
|
||||
|
||||
class TestRaidDeviceDetection(test_base.NodeTest):
|
||||
|
||||
def setUp(self):
|
||||
super(TestRaidDeviceDetection, self).setUp()
|
||||
self.hook = raid_device.RaidDeviceDetection()
|
||||
|
||||
def _before_update(self, introspection_data):
|
||||
node_patches = []
|
||||
ports_patches = {}
|
||||
self.hook.before_update(introspection_data, self.node_info,
|
||||
node_patches, ports_patches)
|
||||
self.assertFalse(ports_patches)
|
||||
return node_patches
|
||||
hook = raid_device.RaidDeviceDetection()
|
||||
|
||||
def test_loadable_by_name(self):
|
||||
names = ('raid_device', 'root_device_hint')
|
||||
@ -50,63 +42,52 @@ class TestRaidDeviceDetection(test_base.NodeTest):
|
||||
|
||||
self.assertEqual(42, introspection_data['local_gb'])
|
||||
|
||||
|
||||
class TestRaidDeviceDetectionUpdate(test_base.NodeTest):
|
||||
hook = raid_device.RaidDeviceDetection()
|
||||
|
||||
@mock.patch.object(node_cache.NodeInfo, 'patch')
|
||||
def _check(self, data, patch, mock_patch):
|
||||
self.hook.before_processing(data)
|
||||
self.hook.before_update(data, self.node_info)
|
||||
self.assertCalledWithPatch(patch, mock_patch)
|
||||
|
||||
def test_no_previous_block_devices(self):
|
||||
introspection_data = {'block_devices': {'serials': ['foo', 'bar']}}
|
||||
node_patches = self._before_update(introspection_data)
|
||||
|
||||
self.assertEqual('add',
|
||||
node_patches[0]['op'])
|
||||
self.assertEqual('/extra/block_devices',
|
||||
node_patches[0]['path'])
|
||||
self.assertEqual(introspection_data['block_devices'],
|
||||
node_patches[0]['value'])
|
||||
expected = [{'op': 'add', 'path': '/extra/block_devices',
|
||||
'value': introspection_data['block_devices']}]
|
||||
self._check(introspection_data, expected)
|
||||
|
||||
def test_root_device_found(self):
|
||||
self.node.extra['block_devices'] = {'serials': ['foo', 'bar']}
|
||||
introspection_data = {'block_devices': {'serials': ['foo', 'baz']}}
|
||||
self.hook.before_processing(introspection_data)
|
||||
node_patches = self._before_update(introspection_data)
|
||||
expected = [{'op': 'remove', 'path': '/extra/block_devices'},
|
||||
{'op': 'add', 'path': '/properties/root_device',
|
||||
'value': {'serial': 'baz'}}]
|
||||
|
||||
self.assertEqual('remove',
|
||||
node_patches[0]['op'])
|
||||
self.assertEqual('/extra/block_devices',
|
||||
node_patches[0]['path'])
|
||||
self.assertEqual('add',
|
||||
node_patches[1]['op'])
|
||||
self.assertEqual('/properties/root_device',
|
||||
node_patches[1]['path'])
|
||||
self.assertEqual({'serial': 'baz'},
|
||||
node_patches[1]['value'])
|
||||
self._check(introspection_data, expected)
|
||||
|
||||
def test_root_device_already_exposed(self):
|
||||
self.node.properties['root_device'] = {'serial': 'foo'}
|
||||
introspection_data = {'block_devices': {'serials': ['foo', 'baz']}}
|
||||
self.hook.before_processing(introspection_data)
|
||||
node_patches = self._before_update(introspection_data)
|
||||
|
||||
self.assertFalse(node_patches)
|
||||
self._check(introspection_data, [])
|
||||
|
||||
def test_multiple_new_devices(self):
|
||||
self.node.extra['block_devices'] = {'serials': ['foo', 'bar']}
|
||||
introspection_data = {
|
||||
'block_devices': {'serials': ['foo', 'baz', 'qux']}
|
||||
}
|
||||
self.hook.before_processing(introspection_data)
|
||||
node_patches = self._before_update(introspection_data)
|
||||
|
||||
self.assertFalse(node_patches)
|
||||
self._check(introspection_data, [])
|
||||
|
||||
def test_no_new_devices(self):
|
||||
self.node.extra['block_devices'] = {'serials': ['foo', 'bar']}
|
||||
introspection_data = {'block_devices': {'serials': ['foo', 'bar']}}
|
||||
self.hook.before_processing(introspection_data)
|
||||
node_patches = self._before_update(introspection_data)
|
||||
|
||||
self.assertFalse(node_patches)
|
||||
self._check(introspection_data, [])
|
||||
|
||||
def test_no_block_devices_from_ramdisk(self):
|
||||
introspection_data = {}
|
||||
self.hook.before_processing(introspection_data)
|
||||
node_patches = self._before_update(introspection_data)
|
||||
|
||||
self.assertFalse(node_patches)
|
||||
self._check(introspection_data, [])
|
||||
|
@ -59,7 +59,8 @@ class TestSchedulerHook(test_base.NodeTest):
|
||||
self.assertRaisesRegexp(utils.Error, key,
|
||||
self.hook.before_processing, new_data)
|
||||
|
||||
def test_before_update(self):
|
||||
@mock.patch.object(node_cache.NodeInfo, 'patch')
|
||||
def test_before_update(self, mock_patch):
|
||||
patch = [
|
||||
{'path': '/properties/cpus', 'value': '2', 'op': 'add'},
|
||||
{'path': '/properties/cpu_arch', 'value': 'x86_64', 'op': 'add'},
|
||||
@ -67,12 +68,11 @@ class TestSchedulerHook(test_base.NodeTest):
|
||||
{'path': '/properties/local_gb', 'value': '20', 'op': 'add'}
|
||||
]
|
||||
|
||||
self.hook.before_update(self.data, self.node_info,
|
||||
self.node_patches, self.ports_patches)
|
||||
self.assertPatchEqual(patch, self.node_patches)
|
||||
self.assertFalse(self.ports_patches)
|
||||
self.hook.before_update(self.data, self.node_info)
|
||||
self.assertCalledWithPatch(patch, mock_patch)
|
||||
|
||||
def test_before_update_no_overwrite(self):
|
||||
@mock.patch.object(node_cache.NodeInfo, 'patch')
|
||||
def test_before_update_no_overwrite(self, mock_patch):
|
||||
CONF.set_override('overwrite_existing', False, 'processing')
|
||||
self.node.properties = {
|
||||
'memory_mb': '4096',
|
||||
@ -83,10 +83,8 @@ class TestSchedulerHook(test_base.NodeTest):
|
||||
{'path': '/properties/local_gb', 'value': '20', 'op': 'add'}
|
||||
]
|
||||
|
||||
self.hook.before_update(self.data, self.node_info,
|
||||
self.node_patches, self.ports_patches)
|
||||
self.assertPatchEqual(patch, self.node_patches)
|
||||
self.assertFalse(self.ports_patches)
|
||||
self.hook.before_update(self.data, self.node_info)
|
||||
self.assertCalledWithPatch(patch, mock_patch)
|
||||
|
||||
|
||||
class TestValidateInterfacesHook(test_base.NodeTest):
|
||||
@ -183,14 +181,14 @@ class TestValidateInterfacesHook(test_base.NodeTest):
|
||||
|
||||
@mock.patch.object(node_cache.NodeInfo, 'delete_port', autospec=True)
|
||||
def test_keep_all(self, mock_delete_port):
|
||||
self.hook.before_update(self.data, self.node_info, None, None)
|
||||
self.hook.before_update(self.data, self.node_info)
|
||||
self.assertFalse(mock_delete_port.called)
|
||||
|
||||
@mock.patch.object(node_cache.NodeInfo, 'delete_port')
|
||||
def test_keep_present(self, mock_delete_port):
|
||||
CONF.set_override('keep_ports', 'present', 'processing')
|
||||
self.data['all_interfaces'] = self.orig_interfaces
|
||||
self.hook.before_update(self.data, self.node_info, None, None)
|
||||
self.hook.before_update(self.data, self.node_info)
|
||||
|
||||
mock_delete_port.assert_called_once_with(self.existing_ports[1])
|
||||
|
||||
@ -198,7 +196,7 @@ class TestValidateInterfacesHook(test_base.NodeTest):
|
||||
def test_keep_added(self, mock_delete_port):
|
||||
CONF.set_override('keep_ports', 'added', 'processing')
|
||||
self.data['macs'] = [self.pxe_interface['mac']]
|
||||
self.hook.before_update(self.data, self.node_info, None, None)
|
||||
self.hook.before_update(self.data, self.node_info)
|
||||
|
||||
mock_delete_port.assert_any_call(self.existing_ports[0])
|
||||
mock_delete_port.assert_any_call(self.existing_ports[1])
|
||||
|
@ -294,13 +294,13 @@ class TestProcessNode(BaseTest):
|
||||
address=self.macs[0])
|
||||
self.cli.port.create.assert_any_call(node_uuid=self.uuid,
|
||||
address=self.macs[1])
|
||||
self.cli.node.update.assert_called_once_with(self.uuid,
|
||||
self.patch_props)
|
||||
self.assertCalledWithPatch(self.patch_props, self.cli.node.update)
|
||||
self.cli.node.set_power_state.assert_called_once_with(self.uuid, 'off')
|
||||
self.assertFalse(self.cli.node.validate.called)
|
||||
|
||||
post_hook_mock.assert_called_once_with(self.data, self.node_info,
|
||||
self.patch_props, {})
|
||||
node_patches=mock.ANY,
|
||||
ports_patches=mock.ANY)
|
||||
finished_mock.assert_called_once_with(mock.ANY)
|
||||
|
||||
def test_overwrite_disabled(self, filters_mock, post_hook_mock):
|
||||
@ -312,7 +312,7 @@ class TestProcessNode(BaseTest):
|
||||
|
||||
self.call()
|
||||
|
||||
self.cli.node.update.assert_called_once_with(self.uuid, patch)
|
||||
self.assertCalledWithPatch(patch, self.cli.node.update)
|
||||
|
||||
def test_port_failed(self, filters_mock, post_hook_mock):
|
||||
self.cli.port.create.side_effect = (
|
||||
@ -324,26 +324,25 @@ class TestProcessNode(BaseTest):
|
||||
address=self.macs[0])
|
||||
self.cli.port.create.assert_any_call(node_uuid=self.uuid,
|
||||
address=self.macs[1])
|
||||
self.cli.node.update.assert_called_once_with(self.uuid,
|
||||
self.patch_props)
|
||||
self.assertCalledWithPatch(self.patch_props, self.cli.node.update)
|
||||
|
||||
def test_hook_patches(self, filters_mock, post_hook_mock):
|
||||
node_patches = ['node patch1', 'node patch2']
|
||||
port_patch = ['port patch']
|
||||
expected_node_patches = [{'path': 'foo', 'op': 'bar'}]
|
||||
expected_port_patch = [{'path': 'foo', 'op': 'baz'}]
|
||||
|
||||
def fake_hook(data, node_info, node_p, ports_p):
|
||||
node_p.extend(node_patches)
|
||||
ports_p.setdefault(self.macs[1], []).extend(port_patch)
|
||||
def fake_hook(data, node_info, node_patches, ports_patches):
|
||||
node_patches.extend(expected_node_patches)
|
||||
ports_patches.setdefault(self.macs[1],
|
||||
[]).extend(expected_port_patch)
|
||||
|
||||
post_hook_mock.side_effect = fake_hook
|
||||
|
||||
self.call()
|
||||
|
||||
self.cli.node.update.assert_called_once_with(self.uuid,
|
||||
self.patch_props
|
||||
+ node_patches)
|
||||
self.cli.port.update.assert_called_once_with(self.ports[1].uuid,
|
||||
port_patch)
|
||||
self.assertCalledWithPatch(self.patch_props + expected_node_patches,
|
||||
self.cli.node.update)
|
||||
self.assertCalledWithPatch(expected_port_patch,
|
||||
self.cli.port.update)
|
||||
|
||||
def test_set_ipmi_credentials(self, filters_mock, post_hook_mock):
|
||||
self.node_info.set_option('new_ipmi_credentials', self.new_creds)
|
||||
@ -381,7 +380,6 @@ class TestProcessNode(BaseTest):
|
||||
self.assertRaisesRegexp(utils.Error, 'Failed to validate',
|
||||
self.call)
|
||||
|
||||
self.cli.node.update.assert_any_call(self.uuid, self.patch_props)
|
||||
self.cli.node.update.assert_any_call(self.uuid, self.patch_credentials)
|
||||
self.assertEqual(2, self.cli.node.update.call_count)
|
||||
self.assertEqual(process._CREDENTIALS_WAIT_RETRIES,
|
||||
@ -401,8 +399,7 @@ class TestProcessNode(BaseTest):
|
||||
self.call)
|
||||
|
||||
self.cli.node.set_power_state.assert_called_once_with(self.uuid, 'off')
|
||||
self.cli.node.update.assert_called_once_with(self.uuid,
|
||||
self.patch_props)
|
||||
self.assertCalledWithPatch(self.patch_props, self.cli.node.update)
|
||||
finished_mock.assert_called_once_with(
|
||||
mock.ANY,
|
||||
error='Failed to power off node %s, check it\'s power management'
|
||||
@ -418,8 +415,7 @@ class TestProcessNode(BaseTest):
|
||||
self.call()
|
||||
|
||||
swift_conn.create_object.assert_called_once_with(name, expected)
|
||||
self.cli.node.update.assert_called_once_with(self.uuid,
|
||||
self.patch_props)
|
||||
self.assertCalledWithPatch(self.patch_props, self.cli.node.update)
|
||||
|
||||
@mock.patch.object(process.swift, 'SwiftAPI', autospec=True)
|
||||
def test_store_data_location(self, swift_mock, filters_mock,
|
||||
@ -439,5 +435,4 @@ class TestProcessNode(BaseTest):
|
||||
self.call()
|
||||
|
||||
swift_conn.create_object.assert_called_once_with(name, expected)
|
||||
self.cli.node.update.assert_called_once_with(self.uuid,
|
||||
self.patch_props)
|
||||
self.assertCalledWithPatch(self.patch_props, self.cli.node.update)
|
||||
|
Loading…
Reference in New Issue
Block a user