Add inspection (processing) hooks
Adds inspection hooks in the agent inspect interface for processing data received from the ramdisk at the /v1/continue_inspection endpoint. The four default configuration hooks 'ramdisk-error', 'validate-interfaces', 'ports' and 'architecture' are added. (The remaining inspection hooks will be added in further patches.) Change-Id: I2cf1be465ba7a93fd66881b14972e960acd4dd4e Story: #2010275
This commit is contained in:
parent
2528bf6621
commit
e6360bc84b
@ -69,6 +69,20 @@ opts = [
|
|||||||
default=True,
|
default=True,
|
||||||
help=_('Whether to update the ports\' pxe_enabled field '
|
help=_('Whether to update the ports\' pxe_enabled field '
|
||||||
'according to the inspection data.')),
|
'according to the inspection data.')),
|
||||||
|
cfg.StrOpt('default_hooks',
|
||||||
|
default='ramdisk-error,validate-interfaces,ports,architecture',
|
||||||
|
help=_('A comma-separated lists of inspection hooks that are '
|
||||||
|
'run by default. In most cases, the operators will not '
|
||||||
|
'modify this. The default (somewhat conservative) hooks '
|
||||||
|
'will raise an exception in case the ramdisk reports an '
|
||||||
|
'error, validate interfaces in the inventory, and create'
|
||||||
|
' ports.')),
|
||||||
|
cfg.StrOpt('hooks',
|
||||||
|
default='$default_hooks',
|
||||||
|
help=_('Comma-separated list of enabled hooks for processing '
|
||||||
|
'pipeline. The default for this is $default_hooks. '
|
||||||
|
'Hooks can be added before or after the defaults '
|
||||||
|
'like this: "prehook,$default_hooks,posthook".')),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@ -14,23 +14,34 @@
|
|||||||
In-band inspection implementation.
|
In-band inspection implementation.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from oslo_config import cfg
|
||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
|
from oslo_utils import excutils
|
||||||
|
|
||||||
from ironic.common import boot_devices
|
from ironic.common import boot_devices
|
||||||
|
from ironic.common import exception
|
||||||
from ironic.common.i18n import _
|
from ironic.common.i18n import _
|
||||||
from ironic.common import states
|
from ironic.common import states
|
||||||
from ironic.conductor import utils as cond_utils
|
from ironic.conductor import utils as cond_utils
|
||||||
from ironic.drivers.modules import deploy_utils
|
from ironic.drivers.modules import deploy_utils
|
||||||
from ironic.drivers.modules import inspect_utils
|
from ironic.drivers.modules import inspect_utils
|
||||||
|
from ironic.drivers.modules.inspector.hooks import base as hooks_base
|
||||||
from ironic.drivers.modules.inspector import interface as common
|
from ironic.drivers.modules.inspector import interface as common
|
||||||
from ironic.drivers.modules.inspector import nics
|
from ironic.drivers import utils as driver_utils
|
||||||
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
CONF = cfg.CONF
|
||||||
|
|
||||||
|
|
||||||
class AgentInspect(common.Common):
|
class AgentInspect(common.Common):
|
||||||
"""In-band inspection."""
|
"""In-band inspection."""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__()
|
||||||
|
self.hooks = hooks_base.validate_inspection_hooks()
|
||||||
|
|
||||||
def _start_managed_inspection(self, task):
|
def _start_managed_inspection(self, task):
|
||||||
"""Start inspection managed by ironic."""
|
"""Start inspection managed by ironic."""
|
||||||
ep = deploy_utils.get_ironic_api_url().rstrip('/')
|
ep = deploy_utils.get_ironic_api_url().rstrip('/')
|
||||||
@ -71,13 +82,116 @@ class AgentInspect(common.Common):
|
|||||||
def continue_inspection(self, task, inventory, plugin_data):
|
def continue_inspection(self, task, inventory, plugin_data):
|
||||||
"""Continue in-band hardware inspection.
|
"""Continue in-band hardware inspection.
|
||||||
|
|
||||||
This implementation simply defers to ironic-inspector. It only exists
|
|
||||||
to simplify the transition to Ironic-native in-band inspection.
|
|
||||||
|
|
||||||
:param task: a task from TaskManager.
|
:param task: a task from TaskManager.
|
||||||
:param inventory: hardware inventory from the node.
|
:param inventory: hardware inventory from the node.
|
||||||
:param plugin_data: optional plugin-specific data.
|
:param plugin_data: optional plugin-specific data.
|
||||||
"""
|
"""
|
||||||
# TODO(dtantsur): migrate the whole pipeline from ironic-inspector
|
# Run the inspection hooks
|
||||||
nics.process_interfaces(task, inventory, plugin_data)
|
run_inspection_hooks(task, inventory, plugin_data, self.hooks)
|
||||||
|
|
||||||
common.clean_up(task, finish=False)
|
common.clean_up(task, finish=False)
|
||||||
|
|
||||||
|
|
||||||
|
def _store_logs(plugin_data, node):
|
||||||
|
logs = plugin_data.get('logs')
|
||||||
|
if not logs:
|
||||||
|
LOG.warning('No logs were passed by the ramdisk for node %(node)s.',
|
||||||
|
{'node': node.uuid})
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
driver_utils.store_ramdisk_logs(node, logs, label='inspect')
|
||||||
|
except exception:
|
||||||
|
LOG.exception('Could not store the ramdisk logs for node %(node)s. ',
|
||||||
|
{'node': node.uuid})
|
||||||
|
|
||||||
|
|
||||||
|
def run_inspection_hooks(task, inventory, plugin_data, hooks):
|
||||||
|
"""Process data from the ramdisk using inspection hooks."""
|
||||||
|
|
||||||
|
_run_preprocess_hooks(task, inventory, plugin_data, hooks)
|
||||||
|
|
||||||
|
try:
|
||||||
|
_run_post_hooks(task, inventory, plugin_data, hooks)
|
||||||
|
_power_off_node(task)
|
||||||
|
except exception.HardwareInspectionFailure:
|
||||||
|
with excutils.save_and_reraise_exception():
|
||||||
|
_store_logs(plugin_data, task.node)
|
||||||
|
except Exception as exc:
|
||||||
|
LOG.exception('Unexpected exception while running inspection hooks for'
|
||||||
|
' node %(node)s', {'node': task.node.uuid})
|
||||||
|
msg = _('Unexpected exception %(exc_class)s during processing for '
|
||||||
|
'node: %(node)s. Error: %(error)s' %
|
||||||
|
{'exc_class': exc.__class__.__name__,
|
||||||
|
'node': task.node.uuid,
|
||||||
|
'error': exc})
|
||||||
|
_store_logs(plugin_data, task.node)
|
||||||
|
raise exception.HardwareInspectionFailure(error=msg)
|
||||||
|
|
||||||
|
if CONF.agent.deploy_logs_collect == 'always':
|
||||||
|
_store_logs(plugin_data, task.node)
|
||||||
|
|
||||||
|
|
||||||
|
def _run_preprocess_hooks(task, inventory, plugin_data, hooks):
|
||||||
|
failures = []
|
||||||
|
|
||||||
|
for hook in hooks:
|
||||||
|
LOG.debug('Running preprocess inspection hook: %(hook)s for node: '
|
||||||
|
'%(node)s', {'hook': hook.name, 'node': task.node.uuid})
|
||||||
|
|
||||||
|
# NOTE(dtantsur): catch exceptions, so that we have changes to update
|
||||||
|
# node inspection status with after look up
|
||||||
|
try:
|
||||||
|
hook.obj.preprocess(task, inventory, plugin_data)
|
||||||
|
except exception.HardwareInspectionFailure as exc:
|
||||||
|
LOG.error('Preprocess hook: %(hook)s failed for node %(node)s '
|
||||||
|
'with error: %(error)s', {'hook': hook.name,
|
||||||
|
'node': task.node.uuid,
|
||||||
|
'error': exc})
|
||||||
|
failures.append('Error in preprocess hook %(hook)s: %(error)s',
|
||||||
|
{'hook': hook.name,
|
||||||
|
'error': exc})
|
||||||
|
except Exception as exc:
|
||||||
|
LOG.exception('Preprocess hook: %(hook)s failed for node %(node)s '
|
||||||
|
'with error: %(error)s', {'hook': hook.name,
|
||||||
|
'node': task.node.uuid,
|
||||||
|
'error': exc})
|
||||||
|
failures.append(_('Unexpected exception %(exc_class)s during '
|
||||||
|
'preprocess hook %(hook)s: %(error)s' %
|
||||||
|
{'exc_class': exc.__class__.__name__,
|
||||||
|
'hook': hook.name,
|
||||||
|
'error': exc}))
|
||||||
|
|
||||||
|
if failures:
|
||||||
|
msg = _('The following failures happened while running preprocess '
|
||||||
|
'hooks for node %(node)s:\n%(failures)s',
|
||||||
|
{'node': task.node.uuid,
|
||||||
|
'failures': '\n'.join(failures)})
|
||||||
|
_store_logs(plugin_data, task.node)
|
||||||
|
raise exception.HardwareInspectionFailure(error=msg)
|
||||||
|
|
||||||
|
# TODO(masghar): Store unprocessed inspection data
|
||||||
|
|
||||||
|
|
||||||
|
def _run_post_hooks(task, inventory, plugin_data, hooks):
|
||||||
|
for hook in hooks:
|
||||||
|
LOG.debug('Running inspection hook %(hook)s for node %(node)s',
|
||||||
|
{'hook': hook.name, 'node': task.node.uuid})
|
||||||
|
hook.obj.__call__(task, inventory, plugin_data)
|
||||||
|
|
||||||
|
|
||||||
|
def _power_off_node(task):
|
||||||
|
power_off = CONF.inspector.power_off
|
||||||
|
if not power_off:
|
||||||
|
return
|
||||||
|
|
||||||
|
node = task.node
|
||||||
|
LOG.debug('Forcing power off of node %(node)s', {'node': node.uuid})
|
||||||
|
try:
|
||||||
|
cond_utils.node_power_action(task, states.POWER_OFF)
|
||||||
|
except Exception as exc:
|
||||||
|
msg = _("Failed to power off node %(node)s after inspection. Check "
|
||||||
|
"its power management configuration. Error: %(error)s" %
|
||||||
|
{'node': node.uuid, "error": exc})
|
||||||
|
raise exception.HardwareInspectionFailure(msg)
|
||||||
|
LOG.info('Node %(node)s powered off', {'node': node.uuid})
|
||||||
|
0
ironic/drivers/modules/inspector/hooks/__init__.py
Normal file
0
ironic/drivers/modules/inspector/hooks/__init__.py
Normal file
42
ironic/drivers/modules/inspector/hooks/architecture.py
Normal file
42
ironic/drivers/modules/inspector/hooks/architecture.py
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
# implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from oslo_log import log as logging
|
||||||
|
|
||||||
|
from ironic.common import exception
|
||||||
|
from ironic.common.i18n import _
|
||||||
|
from ironic.drivers.modules.inspector.hooks import base
|
||||||
|
|
||||||
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class ArchitectureHook(base.InspectionHook):
|
||||||
|
"""Hook to set the node's cpu_arch property based on the inventory."""
|
||||||
|
|
||||||
|
def __call__(self, task, inventory, plugin_data):
|
||||||
|
"""Update node properties with CPU architecture."""
|
||||||
|
|
||||||
|
try:
|
||||||
|
cpu_arch = inventory['cpu']['architecture']
|
||||||
|
LOG.info('Discovered CPU architecture: %(cpu_arch)s for node: '
|
||||||
|
'%(node)s', {'cpu_arch': cpu_arch,
|
||||||
|
'node': task.node.uuid})
|
||||||
|
task.node.set_property('cpu_arch', cpu_arch)
|
||||||
|
task.node.save()
|
||||||
|
except (KeyError, ValueError, TypeError):
|
||||||
|
msg = _('Inventory has malformed or missing CPU architecture '
|
||||||
|
'information: %(cpu)s for node %(node)s.') % {
|
||||||
|
'cpu': inventory.get('cpu'), 'node': task.node.uuid}
|
||||||
|
LOG.error(msg)
|
||||||
|
raise exception.InvalidNodeInventory(node=task.node.uuid,
|
||||||
|
reason=msg)
|
129
ironic/drivers/modules/inspector/hooks/base.py
Normal file
129
ironic/drivers/modules/inspector/hooks/base.py
Normal file
@ -0,0 +1,129 @@
|
|||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
# implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Base code for inspection hooks support."""
|
||||||
|
|
||||||
|
import abc
|
||||||
|
|
||||||
|
from oslo_config import cfg
|
||||||
|
from oslo_log import log
|
||||||
|
import stevedore
|
||||||
|
|
||||||
|
from ironic.common import exception
|
||||||
|
from ironic.common.i18n import _
|
||||||
|
|
||||||
|
CONF = cfg.CONF
|
||||||
|
LOG = log.getLogger(__name__)
|
||||||
|
_HOOKS_MGR = None
|
||||||
|
|
||||||
|
|
||||||
|
class InspectionHook(metaclass=abc.ABCMeta): # pragma: no cover
|
||||||
|
"""Abstract base class for inspection hooks."""
|
||||||
|
|
||||||
|
dependencies = []
|
||||||
|
"""An ordered list of hooks that must be enabled before this one.
|
||||||
|
|
||||||
|
The items here should be entry point names, not classes.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def preprocess(self, task, inventory, plugin_data):
|
||||||
|
"""Hook to run before the main inspection data processing.
|
||||||
|
|
||||||
|
This hook is run even before sanity checks.
|
||||||
|
|
||||||
|
:param task: A TaskManager instance.
|
||||||
|
:param inventory: Hardware inventory information sent by the ramdisk.
|
||||||
|
Must not be modified by the hook.
|
||||||
|
:param plugin_data: Plugin data sent by the ramdisk. May be modified by
|
||||||
|
the hook.
|
||||||
|
:returns: nothing.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __call__(self, task, inventory, plugin_data):
|
||||||
|
"""Hook to run to process inspection data (before Ironic node update).
|
||||||
|
|
||||||
|
This hook is run after node is found and ports are created,
|
||||||
|
just before the node is updated with the data.
|
||||||
|
|
||||||
|
:param task: A TaskManager instance.
|
||||||
|
:param inventory: Hardware inventory information sent by the ramdisk.
|
||||||
|
Must not be modified by the hook.
|
||||||
|
:param plugin_data: Plugin data sent by the ramdisk. May be modified by
|
||||||
|
the hook.
|
||||||
|
:returns: nothing.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def reset():
|
||||||
|
"""Reset cached managers."""
|
||||||
|
global _HOOKS_MGR
|
||||||
|
|
||||||
|
_HOOKS_MGR = None
|
||||||
|
|
||||||
|
|
||||||
|
def missing_entrypoints_callback(names):
|
||||||
|
"""Raise RuntimeError with comma-separated list of missing hooks"""
|
||||||
|
error = _('The following hook(s) are missing or failed to load: %s')
|
||||||
|
raise RuntimeError(error % ', '.join(names))
|
||||||
|
|
||||||
|
|
||||||
|
def inspection_hooks_manager(*args):
|
||||||
|
"""Create a Stevedore extension manager for inspection hooks.
|
||||||
|
|
||||||
|
:param args: arguments to pass to the hooks constructor
|
||||||
|
:returns: a Stevedore NamedExtensionManager
|
||||||
|
"""
|
||||||
|
global _HOOKS_MGR
|
||||||
|
if _HOOKS_MGR is None:
|
||||||
|
enabled_hooks = [x.strip()
|
||||||
|
for x in CONF.inspector.hooks.split(',')
|
||||||
|
if x.strip()]
|
||||||
|
_HOOKS_MGR = stevedore.NamedExtensionManager(
|
||||||
|
'ironic.inspection.hooks',
|
||||||
|
names=enabled_hooks,
|
||||||
|
invoke_on_load=True,
|
||||||
|
invoke_args=args,
|
||||||
|
on_missing_entrypoints_callback=missing_entrypoints_callback,
|
||||||
|
name_order=True)
|
||||||
|
return _HOOKS_MGR
|
||||||
|
|
||||||
|
|
||||||
|
def validate_inspection_hooks():
|
||||||
|
"""Validate the enabled inspection hooks.
|
||||||
|
|
||||||
|
:raises: RuntimeError on missing or failed to load hooks
|
||||||
|
:returns: the list of hooks that passed validation
|
||||||
|
"""
|
||||||
|
conf_hooks = [ext for ext in inspection_hooks_manager()]
|
||||||
|
valid_hooks = []
|
||||||
|
valid_hook_names = set()
|
||||||
|
errors = []
|
||||||
|
|
||||||
|
for hook in conf_hooks:
|
||||||
|
deps = getattr(hook.obj, 'dependencies', ())
|
||||||
|
missing = [d for d in deps if d not in valid_hook_names]
|
||||||
|
if missing:
|
||||||
|
errors.append('Hook %(hook)s requires these missing hooks to be '
|
||||||
|
'enabled before it: %(missing)s' %
|
||||||
|
{'hook': hook.name, 'missing': ', '.join(missing)})
|
||||||
|
else:
|
||||||
|
valid_hooks.append(hook)
|
||||||
|
valid_hook_names.add(hook.name)
|
||||||
|
|
||||||
|
if errors:
|
||||||
|
msg = _('Some hooks failed to load due to dependency problems: '
|
||||||
|
'%(errors)s') % {'errors': ', '.join(errors)}
|
||||||
|
LOG.error(msg)
|
||||||
|
raise exception.HardwareInspectionFailure(error=msg)
|
||||||
|
|
||||||
|
return valid_hooks
|
93
ironic/drivers/modules/inspector/hooks/ports.py
Normal file
93
ironic/drivers/modules/inspector/hooks/ports.py
Normal file
@ -0,0 +1,93 @@
|
|||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
# implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from oslo_log import log as logging
|
||||||
|
|
||||||
|
from ironic.common import exception
|
||||||
|
from ironic.conf import CONF
|
||||||
|
from ironic.drivers.modules.inspector.hooks import base
|
||||||
|
from ironic import objects
|
||||||
|
|
||||||
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class PortsHook(base.InspectionHook):
|
||||||
|
"""Hook to create ironic ports."""
|
||||||
|
|
||||||
|
dependencies = ['validate-interfaces']
|
||||||
|
|
||||||
|
def __call__(self, task, inventory, plugin_data):
|
||||||
|
if CONF.inspector.add_ports != 'disabled':
|
||||||
|
add_ports(task, plugin_data['valid_interfaces'])
|
||||||
|
|
||||||
|
update_ports(task, plugin_data['all_interfaces'], plugin_data['macs'])
|
||||||
|
|
||||||
|
|
||||||
|
def add_ports(task, interfaces):
|
||||||
|
"""Add ports for all previously validated interfaces."""
|
||||||
|
for iface in interfaces.values():
|
||||||
|
mac = iface['mac_address']
|
||||||
|
extra = {}
|
||||||
|
if iface.get('client_id'):
|
||||||
|
extra['client-id'] = iface['client_id']
|
||||||
|
port_dict = {'address': mac, 'node_id': task.node.id,
|
||||||
|
'pxe_enabled': iface['pxe_enabled'], 'extra': extra}
|
||||||
|
port = objects.Port(task.context, **port_dict)
|
||||||
|
try:
|
||||||
|
port.create()
|
||||||
|
LOG.info("Port created for MAC address %(address)s for "
|
||||||
|
"node %(node)s%(pxe)s",
|
||||||
|
{'address': mac, 'node': task.node.uuid,
|
||||||
|
'pxe': ' (PXE booting)' if iface['pxe_enabled'] else ''})
|
||||||
|
except exception.MACAlreadyExists:
|
||||||
|
LOG.info("Port already exists for MAC address %(address)s "
|
||||||
|
"for node %(node)s",
|
||||||
|
{'address': mac, 'node': task.node.uuid})
|
||||||
|
|
||||||
|
|
||||||
|
def update_ports(task, all_interfaces, valid_macs):
|
||||||
|
"""Update ports to match the valid MACs.
|
||||||
|
|
||||||
|
Depending on the value of ``[inspector]keep_ports``, some ports may be
|
||||||
|
removed.
|
||||||
|
"""
|
||||||
|
# TODO(dtantsur): no port update for active nodes (when supported)
|
||||||
|
|
||||||
|
if CONF.inspector.keep_ports == 'present':
|
||||||
|
expected_macs = {iface['mac_address']
|
||||||
|
for iface in all_interfaces.values()}
|
||||||
|
elif CONF.inspector.keep_ports == 'added':
|
||||||
|
expected_macs = set(valid_macs)
|
||||||
|
else:
|
||||||
|
expected_macs = None # unused
|
||||||
|
|
||||||
|
pxe_macs = {iface['mac_address'] for iface in all_interfaces.values()
|
||||||
|
if iface['pxe_enabled']}
|
||||||
|
|
||||||
|
for port in objects.Port.list_by_node_id(task.context, task.node.id):
|
||||||
|
if expected_macs and port.address not in expected_macs:
|
||||||
|
expected_str = ', '.join(sorted(expected_macs))
|
||||||
|
LOG.info("Deleting port %(port)s of node %(node)s as its MAC "
|
||||||
|
"%(mac)s is not in the expected MAC list [%(expected)s]",
|
||||||
|
{'port': port.uuid, 'mac': port.address,
|
||||||
|
'node': task.node.uuid, 'expected': expected_str})
|
||||||
|
port.destroy()
|
||||||
|
elif CONF.inspector.update_pxe_enabled:
|
||||||
|
pxe_enabled = port.address in pxe_macs
|
||||||
|
if pxe_enabled != port.pxe_enabled:
|
||||||
|
LOG.debug("Changing pxe_enabled=%(val)s on port %(port)s "
|
||||||
|
"of node %(node)s to match the inventory",
|
||||||
|
{'port': port.address, 'val': pxe_enabled,
|
||||||
|
'node': task.node.uuid})
|
||||||
|
port.pxe_enabled = pxe_enabled
|
||||||
|
port.save()
|
29
ironic/drivers/modules/inspector/hooks/ramdisk_error.py
Normal file
29
ironic/drivers/modules/inspector/hooks/ramdisk_error.py
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
# implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from ironic.common import exception
|
||||||
|
from ironic.common.i18n import _
|
||||||
|
from ironic.drivers.modules.inspector.hooks import base
|
||||||
|
|
||||||
|
|
||||||
|
class RamdiskErrorHook(base.InspectionHook):
|
||||||
|
"""Hook to process error sent from the ramdisk."""
|
||||||
|
|
||||||
|
def preprocess(self, task, inventory, plugin_data):
|
||||||
|
if plugin_data.get('error'):
|
||||||
|
msg = _("Ramdisk reported error: %(error)s") % {
|
||||||
|
'error': plugin_data['error']}
|
||||||
|
raise exception.HardwareInspectionFailure(error=msg)
|
||||||
|
|
||||||
|
def __call__(self, task, inventory, plugin_data):
|
||||||
|
pass
|
@ -1,14 +1,15 @@
|
|||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# not use this file except in compliance with the License. You may obtain
|
# you may not use this file except in compliance with the License.
|
||||||
# a copy of the License at
|
# You may obtain a copy of the License at
|
||||||
#
|
#
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
#
|
#
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
# License for the specific language governing permissions and limitations
|
# implied.
|
||||||
# under the License.
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
from oslo_utils import netutils
|
from oslo_utils import netutils
|
||||||
@ -17,19 +18,27 @@ from ironic.common import exception
|
|||||||
from ironic.common.i18n import _
|
from ironic.common.i18n import _
|
||||||
from ironic.common import utils
|
from ironic.common import utils
|
||||||
from ironic.conf import CONF
|
from ironic.conf import CONF
|
||||||
from ironic import objects
|
from ironic.drivers.modules.inspector.hooks import base
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def get_pxe_mac(inventory):
|
class ValidateInterfacesHook(base.InspectionHook):
|
||||||
"""Get MAC address of the PXE interface."""
|
"""Hook to validate network interfaces."""
|
||||||
pxe_mac = inventory.get('boot', {}).get('pxe_interface')
|
|
||||||
if pxe_mac and '-' in pxe_mac:
|
def preprocess(self, task, inventory, plugin_data):
|
||||||
# pxelinux format: 01-aa-bb-cc-dd-ee-ff
|
all_interfaces = get_interfaces(task.node, inventory)
|
||||||
pxe_mac = pxe_mac.split('-', 1)[1]
|
valid_interfaces = validate_interfaces(task.node, inventory,
|
||||||
pxe_mac = pxe_mac.replace('-', ':').lower()
|
all_interfaces)
|
||||||
return pxe_mac
|
valid_macs = [iface['mac_address'] for iface in
|
||||||
|
valid_interfaces.values()]
|
||||||
|
|
||||||
|
plugin_data['all_interfaces'] = all_interfaces
|
||||||
|
plugin_data['valid_interfaces'] = valid_interfaces
|
||||||
|
plugin_data['macs'] = valid_macs
|
||||||
|
|
||||||
|
def __call__(self, task, inventory, plugin_data):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
def get_interfaces(node, inventory):
|
def get_interfaces(node, inventory):
|
||||||
@ -132,78 +141,11 @@ def validate_interfaces(node, inventory, interfaces):
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def add_ports(task, interfaces):
|
def get_pxe_mac(inventory):
|
||||||
"""Add ports for all previously validated interfaces."""
|
"""Get MAC address of the PXE interface."""
|
||||||
for iface in interfaces.values():
|
pxe_mac = inventory.get('boot', {}).get('pxe_interface')
|
||||||
mac = iface['mac_address']
|
if pxe_mac and '-' in pxe_mac:
|
||||||
extra = {}
|
# pxelinux format: 01-aa-bb-cc-dd-ee-ff
|
||||||
if iface.get('client_id'):
|
pxe_mac = pxe_mac.split('-', 1)[1]
|
||||||
extra['client-id'] = iface['client_id']
|
pxe_mac = pxe_mac.replace('-', ':').lower()
|
||||||
port_dict = {'address': mac, 'node_id': task.node.id,
|
return pxe_mac
|
||||||
'pxe_enabled': iface['pxe_enabled'], 'extra': extra}
|
|
||||||
port = objects.Port(task.context, **port_dict)
|
|
||||||
try:
|
|
||||||
port.create()
|
|
||||||
LOG.info("Port created for MAC address %(address)s for "
|
|
||||||
"node %(node)s%(pxe)s",
|
|
||||||
{'address': mac, 'node': task.node.uuid,
|
|
||||||
'pxe': ' (PXE booting)' if iface['pxe_enabled'] else ''})
|
|
||||||
except exception.MACAlreadyExists:
|
|
||||||
LOG.info("Port already exists for MAC address %(address)s "
|
|
||||||
"for node %(node)s",
|
|
||||||
{'address': mac, 'node': task.node.uuid})
|
|
||||||
|
|
||||||
|
|
||||||
def update_ports(task, all_interfaces, valid_macs):
|
|
||||||
"""Update ports to match the valid MACs.
|
|
||||||
|
|
||||||
Depending on the value of ``[inspector]keep_ports``, some ports may be
|
|
||||||
removed.
|
|
||||||
"""
|
|
||||||
# TODO(dtantsur): no port update for active nodes (when supported)
|
|
||||||
|
|
||||||
if CONF.inspector.keep_ports == 'present':
|
|
||||||
expected_macs = {iface['mac_address']
|
|
||||||
for iface in all_interfaces.values()}
|
|
||||||
elif CONF.inspector.keep_ports == 'added':
|
|
||||||
expected_macs = set(valid_macs)
|
|
||||||
else:
|
|
||||||
expected_macs = None # unused
|
|
||||||
|
|
||||||
pxe_macs = {iface['mac_address'] for iface in all_interfaces.values()
|
|
||||||
if iface['pxe_enabled']}
|
|
||||||
|
|
||||||
for port in objects.Port.list_by_node_id(task.context, task.node.id):
|
|
||||||
if expected_macs and port.address not in expected_macs:
|
|
||||||
expected_str = ', '.join(sorted(expected_macs))
|
|
||||||
LOG.info("Deleting port %(port)s of node %(node)s as its MAC "
|
|
||||||
"%(mac)s is not in the expected MAC list [%(expected)s]",
|
|
||||||
{'port': port.uuid, 'mac': port.address,
|
|
||||||
'node': task.node.uuid, 'expected': expected_str})
|
|
||||||
port.destroy()
|
|
||||||
elif CONF.inspector.update_pxe_enabled:
|
|
||||||
pxe_enabled = port.address in pxe_macs
|
|
||||||
if pxe_enabled != port.pxe_enabled:
|
|
||||||
LOG.debug('Changing pxe_enabled=%(val)s on port %(port)s '
|
|
||||||
'of node %(node)s to match the inventory',
|
|
||||||
{'port': port.address, 'val': pxe_enabled,
|
|
||||||
'node': task.node.uuid})
|
|
||||||
port.pxe_enabled = pxe_enabled
|
|
||||||
port.save()
|
|
||||||
|
|
||||||
|
|
||||||
def process_interfaces(task, inventory, plugin_data):
|
|
||||||
"""Process network interfaces in the inventory."""
|
|
||||||
# TODO(dtantsur): this function will become two hooks in the future.
|
|
||||||
all_interfaces = get_interfaces(task.node, inventory)
|
|
||||||
interfaces = validate_interfaces(task.node, inventory, all_interfaces)
|
|
||||||
valid_macs = [iface['mac_address'] for iface in interfaces.values()]
|
|
||||||
|
|
||||||
plugin_data['all_interfaces'] = all_interfaces
|
|
||||||
plugin_data['valid_interfaces'] = interfaces
|
|
||||||
plugin_data['macs'] = valid_macs
|
|
||||||
|
|
||||||
if CONF.inspector.add_ports != 'disabled':
|
|
||||||
add_ports(task, interfaces)
|
|
||||||
|
|
||||||
update_ports(task, all_interfaces, valid_macs)
|
|
@ -298,6 +298,11 @@ def store_ramdisk_logs(node, logs, label=None):
|
|||||||
:raises: SwiftOperationError, if any operation with Swift fails.
|
:raises: SwiftOperationError, if any operation with Swift fails.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
if CONF.agent.deploy_logs_collect == 'never':
|
||||||
|
LOG.info('Ramdisk logs will not be stored as the configuration '
|
||||||
|
'option `agent.deploy_logs_collect` is set to `never`.')
|
||||||
|
return
|
||||||
|
|
||||||
logs_file_name = get_ramdisk_logs_file_name(node, label=label)
|
logs_file_name = get_ramdisk_logs_file_name(node, label=label)
|
||||||
data = base64.decode_as_bytes(logs)
|
data = base64.decode_as_bytes(logs)
|
||||||
|
|
||||||
@ -309,6 +314,8 @@ def store_ramdisk_logs(node, logs, label=None):
|
|||||||
logs_file_name)
|
logs_file_name)
|
||||||
with open(log_path, 'wb') as f:
|
with open(log_path, 'wb') as f:
|
||||||
f.write(data)
|
f.write(data)
|
||||||
|
LOG.info('Ramdisk logs were stored in local storage for node %(node)s',
|
||||||
|
{'node': node.uuid})
|
||||||
|
|
||||||
elif CONF.agent.deploy_logs_storage_backend == 'swift':
|
elif CONF.agent.deploy_logs_storage_backend == 'swift':
|
||||||
with tempfile.NamedTemporaryFile(dir=CONF.tempdir) as f:
|
with tempfile.NamedTemporaryFile(dir=CONF.tempdir) as f:
|
||||||
@ -322,6 +329,8 @@ def store_ramdisk_logs(node, logs, label=None):
|
|||||||
swift_api.create_object(
|
swift_api.create_object(
|
||||||
CONF.agent.deploy_logs_swift_container, logs_file_name,
|
CONF.agent.deploy_logs_swift_container, logs_file_name,
|
||||||
f.name, object_headers=object_headers)
|
f.name, object_headers=object_headers)
|
||||||
|
LOG.info('Ramdisk logs were stored in swift for node %(node)s',
|
||||||
|
{'node': node.uuid})
|
||||||
|
|
||||||
|
|
||||||
def collect_ramdisk_logs(node, label=None):
|
def collect_ramdisk_logs(node, label=None):
|
||||||
|
@ -753,6 +753,18 @@ class Node(base.IronicObject, object_base.VersionedObjectDictCompat):
|
|||||||
self.instance_info[key] = value
|
self.instance_info[key] = value
|
||||||
self._changed_fields.add('instance_info')
|
self._changed_fields.add('instance_info')
|
||||||
|
|
||||||
|
def set_property(self, key, value):
|
||||||
|
"""Set a `properties` value.
|
||||||
|
|
||||||
|
Setting a `properties` dict value via this method ensures that this
|
||||||
|
field will be flagged for saving.
|
||||||
|
|
||||||
|
:param key: Key of item to set
|
||||||
|
:param value: Value of item to set
|
||||||
|
"""
|
||||||
|
self.properties[key] = value
|
||||||
|
self._changed_fields.add('properties')
|
||||||
|
|
||||||
|
|
||||||
@base.IronicObjectRegistry.register
|
@base.IronicObjectRegistry.register
|
||||||
class NodePayload(notification.NotificationPayloadBase):
|
class NodePayload(notification.NotificationPayloadBase):
|
||||||
|
@ -0,0 +1,39 @@
|
|||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
|
||||||
|
from ironic.conductor import task_manager
|
||||||
|
from ironic.conf import CONF
|
||||||
|
from ironic.drivers.modules.inspector.hooks import architecture as \
|
||||||
|
architecture_hook
|
||||||
|
from ironic.tests.unit.db import base as db_base
|
||||||
|
from ironic.tests.unit.objects import utils as obj_utils
|
||||||
|
|
||||||
|
|
||||||
|
class ArchitectureTestCase(db_base.DbTestCase):
|
||||||
|
def setUp(self):
|
||||||
|
super().setUp()
|
||||||
|
CONF.set_override('enabled_inspect_interfaces',
|
||||||
|
['agent', 'no-inspect'])
|
||||||
|
self.node = obj_utils.create_test_node(self.context,
|
||||||
|
inspect_interface='agent')
|
||||||
|
self.inventory = {'cpu': {'architecture': 'test-architecture'}}
|
||||||
|
self.plugin_data = {'fake': 'fake-plugin-data'}
|
||||||
|
|
||||||
|
def test_architecture(self):
|
||||||
|
with task_manager.acquire(self.context, self.node.id) as task:
|
||||||
|
architecture_hook.ArchitectureHook().__call__(task, self.inventory,
|
||||||
|
self.plugin_data)
|
||||||
|
self.node.refresh()
|
||||||
|
result = self.node.properties
|
||||||
|
self.assertEqual(result['cpu_arch'],
|
||||||
|
self.inventory['cpu']['architecture'])
|
125
ironic/tests/unit/drivers/modules/inspector/hooks/test_ports.py
Normal file
125
ironic/tests/unit/drivers/modules/inspector/hooks/test_ports.py
Normal file
@ -0,0 +1,125 @@
|
|||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
from oslo_utils import uuidutils
|
||||||
|
|
||||||
|
from ironic.conductor import task_manager
|
||||||
|
from ironic.conf import CONF
|
||||||
|
from ironic.drivers.modules.inspector.hooks import ports as ports_hook
|
||||||
|
from ironic import objects
|
||||||
|
from ironic.tests.unit.db import base as db_base
|
||||||
|
from ironic.tests.unit.drivers.modules.inspector.hooks. \
|
||||||
|
test_validate_interfaces import _PXE_INTERFACE
|
||||||
|
from ironic.tests.unit.drivers.modules.inspector.hooks. \
|
||||||
|
test_validate_interfaces import _VALID
|
||||||
|
from ironic.tests.unit.objects import utils as obj_utils
|
||||||
|
|
||||||
|
|
||||||
|
class AddPortsTestCase(db_base.DbTestCase):
|
||||||
|
def setUp(self):
|
||||||
|
super().setUp()
|
||||||
|
CONF.set_override('enabled_inspect_interfaces',
|
||||||
|
['agent', 'no-inspect'])
|
||||||
|
self.node = obj_utils.create_test_node(self.context,
|
||||||
|
inspect_interface='agent')
|
||||||
|
self.interfaces = {key: value.copy() for key, value in _VALID.items()
|
||||||
|
if key not in ('lo', 'em4')}
|
||||||
|
self.macs = {
|
||||||
|
_PXE_INTERFACE,
|
||||||
|
'11:11:11:11:11:11',
|
||||||
|
'22:22:22:22:22:22',
|
||||||
|
'33:33:33:33:33:33',
|
||||||
|
}
|
||||||
|
|
||||||
|
def test_add_ports(self):
|
||||||
|
with task_manager.acquire(self.context, self.node.id) as task:
|
||||||
|
ports_hook.add_ports(task, self.interfaces)
|
||||||
|
ports = objects.Port.list_by_node_id(self.context, self.node.id)
|
||||||
|
self.assertEqual(
|
||||||
|
{port.address: port.pxe_enabled for port in ports},
|
||||||
|
{mac: (mac == _PXE_INTERFACE) for mac in self.macs})
|
||||||
|
|
||||||
|
def test_duplicates(self):
|
||||||
|
obj_utils.create_test_port(self.context,
|
||||||
|
node_id=self.node.id,
|
||||||
|
address=_PXE_INTERFACE,
|
||||||
|
pxe_enabled=False)
|
||||||
|
with task_manager.acquire(self.context, self.node.id) as task:
|
||||||
|
ports_hook.add_ports(task, self.interfaces)
|
||||||
|
ports = objects.Port.list_by_node_id(self.context, self.node.id)
|
||||||
|
self.assertEqual(
|
||||||
|
{port.address: port.pxe_enabled for port in ports},
|
||||||
|
# Always False because the PXE port already existed
|
||||||
|
{mac: False for mac in self.macs})
|
||||||
|
|
||||||
|
|
||||||
|
class UpdatePortsTestCase(db_base.DbTestCase):
|
||||||
|
def setUp(self):
|
||||||
|
super().setUp()
|
||||||
|
CONF.set_override('enabled_inspect_interfaces',
|
||||||
|
['agent', 'no-inspect'])
|
||||||
|
self.node = obj_utils.create_test_node(self.context,
|
||||||
|
inspect_interface='agent')
|
||||||
|
self.macs = {
|
||||||
|
_PXE_INTERFACE,
|
||||||
|
'11:11:11:11:11:11',
|
||||||
|
'22:22:22:22:22:22',
|
||||||
|
'33:33:33:33:33:33',
|
||||||
|
}
|
||||||
|
self.present_macs = {i['mac_address'] for i in _VALID.values()}
|
||||||
|
self.extra_mac = '00:11:00:11:00:11'
|
||||||
|
self.all_macs = self.present_macs | {self.extra_mac}
|
||||||
|
for mac in self.all_macs:
|
||||||
|
obj_utils.create_test_port(self.context,
|
||||||
|
uuid=uuidutils.generate_uuid(),
|
||||||
|
node_id=self.node.id,
|
||||||
|
address=mac,
|
||||||
|
pxe_enabled=(mac == self.extra_mac))
|
||||||
|
|
||||||
|
def test_keep_all(self):
|
||||||
|
with task_manager.acquire(self.context, self.node.id) as task:
|
||||||
|
ports_hook.update_ports(task, _VALID, self.macs)
|
||||||
|
ports = objects.Port.list_by_node_id(self.context, self.node.id)
|
||||||
|
self.assertEqual(
|
||||||
|
{port.address: port.pxe_enabled for port in ports},
|
||||||
|
# Nothing is removed by default, pxe_enabled updated
|
||||||
|
{mac: (mac == _PXE_INTERFACE) for mac in self.all_macs})
|
||||||
|
|
||||||
|
def test_keep_pxe_enabled(self):
|
||||||
|
CONF.set_override('update_pxe_enabled', False, group='inspector')
|
||||||
|
with task_manager.acquire(self.context, self.node.id) as task:
|
||||||
|
ports_hook.update_ports(task, _VALID, self.macs)
|
||||||
|
ports = objects.Port.list_by_node_id(self.context, self.node.id)
|
||||||
|
self.assertEqual(
|
||||||
|
{port.address: port.pxe_enabled for port in ports},
|
||||||
|
# Nothing is removed by default, pxe_enabled kept intact
|
||||||
|
{mac: (mac == self.extra_mac) for mac in self.all_macs})
|
||||||
|
|
||||||
|
def test_keep_added(self):
|
||||||
|
CONF.set_override('keep_ports', 'added', group='inspector')
|
||||||
|
with task_manager.acquire(self.context, self.node.id) as task:
|
||||||
|
ports_hook.update_ports(task, _VALID, self.macs)
|
||||||
|
ports = objects.Port.list_by_node_id(self.context, self.node.id)
|
||||||
|
self.assertEqual(
|
||||||
|
{port.address: port.pxe_enabled for port in ports},
|
||||||
|
# Extra ports removed, pxe_enabled updated
|
||||||
|
{mac: (mac == _PXE_INTERFACE) for mac in self.macs})
|
||||||
|
|
||||||
|
def test_keep_present(self):
|
||||||
|
CONF.set_override('keep_ports', 'present', group='inspector')
|
||||||
|
with task_manager.acquire(self.context, self.node.id) as task:
|
||||||
|
ports_hook.update_ports(task, _VALID, self.macs)
|
||||||
|
ports = objects.Port.list_by_node_id(self.context, self.node.id)
|
||||||
|
self.assertEqual(
|
||||||
|
{port.address: port.pxe_enabled for port in ports},
|
||||||
|
# Extra port removed, pxe_enabled updated
|
||||||
|
{mac: (mac == _PXE_INTERFACE) for mac in self.present_macs})
|
@ -0,0 +1,39 @@
|
|||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
|
||||||
|
from ironic.common import exception
|
||||||
|
from ironic.conductor import task_manager
|
||||||
|
from ironic.conf import CONF
|
||||||
|
from ironic.drivers.modules.inspector.hooks import ramdisk_error \
|
||||||
|
as ramdisk_error_hook
|
||||||
|
from ironic.tests.unit.db import base as db_base
|
||||||
|
from ironic.tests.unit.objects import utils as obj_utils
|
||||||
|
|
||||||
|
|
||||||
|
class RamdiskErrorTestCase(db_base.DbTestCase):
|
||||||
|
def setUp(self):
|
||||||
|
super().setUp()
|
||||||
|
CONF.set_override('enabled_inspect_interfaces',
|
||||||
|
['agent', 'no-inspect'])
|
||||||
|
self.node = obj_utils.create_test_node(self.context,
|
||||||
|
inspect_interface='agent')
|
||||||
|
self.inventory = {'fake': 'fake-inventory'}
|
||||||
|
self.plugin_data = {'error': 'There was an error!'}
|
||||||
|
|
||||||
|
def test_ramdisk_error(self):
|
||||||
|
with task_manager.acquire(self.context, self.node.id) as task:
|
||||||
|
hook = ramdisk_error_hook.RamdiskErrorHook()
|
||||||
|
self.assertRaises(exception.HardwareInspectionFailure,
|
||||||
|
hook.preprocess,
|
||||||
|
task=task, inventory=self.inventory,
|
||||||
|
plugin_data=self.plugin_data)
|
@ -13,13 +13,10 @@
|
|||||||
import copy
|
import copy
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
|
|
||||||
from oslo_utils import uuidutils
|
|
||||||
|
|
||||||
from ironic.common import exception
|
from ironic.common import exception
|
||||||
from ironic.conductor import task_manager
|
|
||||||
from ironic.conf import CONF
|
from ironic.conf import CONF
|
||||||
from ironic.drivers.modules.inspector import nics
|
from ironic.drivers.modules.inspector.hooks import validate_interfaces as \
|
||||||
from ironic import objects
|
validate_interfaces_hook
|
||||||
from ironic.tests.unit.db import base as db_base
|
from ironic.tests.unit.db import base as db_base
|
||||||
from ironic.tests.unit.objects import utils as obj_utils
|
from ironic.tests.unit.objects import utils as obj_utils
|
||||||
|
|
||||||
@ -154,7 +151,8 @@ class GetInterfacesTestCase(db_base.DbTestCase):
|
|||||||
])
|
])
|
||||||
|
|
||||||
def test_get_interfaces(self):
|
def test_get_interfaces(self):
|
||||||
result = nics.get_interfaces(self.node, self.inventory)
|
result = validate_interfaces_hook.get_interfaces(self.node,
|
||||||
|
self.inventory)
|
||||||
self.assertEqual(_VALID, result)
|
self.assertEqual(_VALID, result)
|
||||||
|
|
||||||
|
|
||||||
@ -168,7 +166,7 @@ class ValidateInterfacesTestCase(db_base.DbTestCase):
|
|||||||
|
|
||||||
def test_pxe_only(self):
|
def test_pxe_only(self):
|
||||||
expected = {'em0': _VALID['em0']}
|
expected = {'em0': _VALID['em0']}
|
||||||
result = nics.validate_interfaces(
|
result = validate_interfaces_hook.validate_interfaces(
|
||||||
self.node, self.inventory, self.interfaces)
|
self.node, self.inventory, self.interfaces)
|
||||||
self.assertEqual(expected, result)
|
self.assertEqual(expected, result)
|
||||||
# Make sure we don't modify the initial structures
|
# Make sure we don't modify the initial structures
|
||||||
@ -179,16 +177,16 @@ class ValidateInterfacesTestCase(db_base.DbTestCase):
|
|||||||
CONF.set_override('add_ports', 'all', group='inspector')
|
CONF.set_override('add_ports', 'all', group='inspector')
|
||||||
expected = {key: value.copy() for key, value in _VALID.items()
|
expected = {key: value.copy() for key, value in _VALID.items()
|
||||||
if key != 'lo'}
|
if key != 'lo'}
|
||||||
result = nics.validate_interfaces(
|
result = validate_interfaces_hook.validate_interfaces(
|
||||||
self.node, self.inventory, self.interfaces)
|
self.node, self.inventory, self.interfaces)
|
||||||
self.assertEqual(expected, result)
|
self.assertEqual(expected, result)
|
||||||
|
|
||||||
@mock.patch.object(nics.LOG, 'warning', autospec=True)
|
@mock.patch.object(validate_interfaces_hook.LOG, 'warning', autospec=True)
|
||||||
def test_no_pxe_fallback_to_all(self, mock_warn):
|
def test_no_pxe_fallback_to_all(self, mock_warn):
|
||||||
del self.inventory['boot']
|
del self.inventory['boot']
|
||||||
expected = {key: value.copy() for key, value in _VALID.items()
|
expected = {key: value.copy() for key, value in _VALID.items()
|
||||||
if key != 'lo'}
|
if key != 'lo'}
|
||||||
result = nics.validate_interfaces(
|
result = validate_interfaces_hook.validate_interfaces(
|
||||||
self.node, self.inventory, self.interfaces)
|
self.node, self.inventory, self.interfaces)
|
||||||
self.assertEqual(expected, result)
|
self.assertEqual(expected, result)
|
||||||
self.assertTrue(mock_warn.called)
|
self.assertTrue(mock_warn.called)
|
||||||
@ -197,7 +195,7 @@ class ValidateInterfacesTestCase(db_base.DbTestCase):
|
|||||||
CONF.set_override('add_ports', 'active', group='inspector')
|
CONF.set_override('add_ports', 'active', group='inspector')
|
||||||
expected = {key: value.copy() for key, value in _VALID.items()
|
expected = {key: value.copy() for key, value in _VALID.items()
|
||||||
if key not in ('lo', 'em4')}
|
if key not in ('lo', 'em4')}
|
||||||
result = nics.validate_interfaces(
|
result = validate_interfaces_hook.validate_interfaces(
|
||||||
self.node, self.inventory, self.interfaces)
|
self.node, self.inventory, self.interfaces)
|
||||||
self.assertEqual(expected, result)
|
self.assertEqual(expected, result)
|
||||||
|
|
||||||
@ -206,106 +204,5 @@ class ValidateInterfacesTestCase(db_base.DbTestCase):
|
|||||||
self.interfaces = {key: value.copy() for key, value in _VALID.items()
|
self.interfaces = {key: value.copy() for key, value in _VALID.items()
|
||||||
if key in ('lo', 'em4')}
|
if key in ('lo', 'em4')}
|
||||||
self.assertRaises(exception.InvalidNodeInventory,
|
self.assertRaises(exception.InvalidNodeInventory,
|
||||||
nics.validate_interfaces,
|
validate_interfaces_hook.validate_interfaces,
|
||||||
self.node, self.inventory, self.interfaces)
|
self.node, self.inventory, self.interfaces)
|
||||||
|
|
||||||
|
|
||||||
class AddPortsTestCase(db_base.DbTestCase):
|
|
||||||
def setUp(self):
|
|
||||||
super().setUp()
|
|
||||||
CONF.set_override('enabled_inspect_interfaces',
|
|
||||||
['agent', 'no-inspect'])
|
|
||||||
self.node = obj_utils.create_test_node(self.context,
|
|
||||||
inspect_interface='agent')
|
|
||||||
self.interfaces = {key: value.copy() for key, value in _VALID.items()
|
|
||||||
if key not in ('lo', 'em4')}
|
|
||||||
self.macs = {
|
|
||||||
_PXE_INTERFACE,
|
|
||||||
'11:11:11:11:11:11',
|
|
||||||
'22:22:22:22:22:22',
|
|
||||||
'33:33:33:33:33:33',
|
|
||||||
}
|
|
||||||
|
|
||||||
def test_add(self):
|
|
||||||
with task_manager.acquire(self.context, self.node.id) as task:
|
|
||||||
nics.add_ports(task, self.interfaces)
|
|
||||||
ports = objects.Port.list_by_node_id(self.context, self.node.id)
|
|
||||||
self.assertEqual(
|
|
||||||
{port.address: port.pxe_enabled for port in ports},
|
|
||||||
{mac: (mac == _PXE_INTERFACE) for mac in self.macs})
|
|
||||||
|
|
||||||
def test_duplicates(self):
|
|
||||||
obj_utils.create_test_port(self.context,
|
|
||||||
node_id=self.node.id,
|
|
||||||
address=_PXE_INTERFACE,
|
|
||||||
pxe_enabled=False)
|
|
||||||
with task_manager.acquire(self.context, self.node.id) as task:
|
|
||||||
nics.add_ports(task, self.interfaces)
|
|
||||||
ports = objects.Port.list_by_node_id(self.context, self.node.id)
|
|
||||||
self.assertEqual(
|
|
||||||
{port.address: port.pxe_enabled for port in ports},
|
|
||||||
# Always False because the PXE port already existed
|
|
||||||
{mac: False for mac in self.macs})
|
|
||||||
|
|
||||||
|
|
||||||
class UpdatePortsTestCase(db_base.DbTestCase):
|
|
||||||
def setUp(self):
|
|
||||||
super().setUp()
|
|
||||||
CONF.set_override('enabled_inspect_interfaces',
|
|
||||||
['agent', 'no-inspect'])
|
|
||||||
self.node = obj_utils.create_test_node(self.context,
|
|
||||||
inspect_interface='agent')
|
|
||||||
self.macs = {
|
|
||||||
_PXE_INTERFACE,
|
|
||||||
'11:11:11:11:11:11',
|
|
||||||
'22:22:22:22:22:22',
|
|
||||||
'33:33:33:33:33:33',
|
|
||||||
}
|
|
||||||
self.present_macs = {i['mac_address'] for i in _VALID.values()}
|
|
||||||
self.extra_mac = '00:11:00:11:00:11'
|
|
||||||
self.all_macs = self.present_macs | {self.extra_mac}
|
|
||||||
for mac in self.all_macs:
|
|
||||||
obj_utils.create_test_port(self.context,
|
|
||||||
uuid=uuidutils.generate_uuid(),
|
|
||||||
node_id=self.node.id,
|
|
||||||
address=mac,
|
|
||||||
pxe_enabled=(mac == self.extra_mac))
|
|
||||||
|
|
||||||
def test_keep_all(self):
|
|
||||||
with task_manager.acquire(self.context, self.node.id) as task:
|
|
||||||
nics.update_ports(task, _VALID, self.macs)
|
|
||||||
ports = objects.Port.list_by_node_id(self.context, self.node.id)
|
|
||||||
self.assertEqual(
|
|
||||||
{port.address: port.pxe_enabled for port in ports},
|
|
||||||
# Nothing is removed by default, pxe_enabled updated
|
|
||||||
{mac: (mac == _PXE_INTERFACE) for mac in self.all_macs})
|
|
||||||
|
|
||||||
def test_keep_pxe_enabled(self):
|
|
||||||
CONF.set_override('update_pxe_enabled', False, group='inspector')
|
|
||||||
with task_manager.acquire(self.context, self.node.id) as task:
|
|
||||||
nics.update_ports(task, _VALID, self.macs)
|
|
||||||
ports = objects.Port.list_by_node_id(self.context, self.node.id)
|
|
||||||
self.assertEqual(
|
|
||||||
{port.address: port.pxe_enabled for port in ports},
|
|
||||||
# Nothing is removed by default, pxe_enabled kept intact
|
|
||||||
{mac: (mac == self.extra_mac) for mac in self.all_macs})
|
|
||||||
|
|
||||||
def test_keep_added(self):
|
|
||||||
CONF.set_override('keep_ports', 'added', group='inspector')
|
|
||||||
with task_manager.acquire(self.context, self.node.id) as task:
|
|
||||||
nics.update_ports(task, _VALID, self.macs)
|
|
||||||
ports = objects.Port.list_by_node_id(self.context, self.node.id)
|
|
||||||
self.assertEqual(
|
|
||||||
{port.address: port.pxe_enabled for port in ports},
|
|
||||||
# Extra ports removed, pxe_enabled updated
|
|
||||||
{mac: (mac == _PXE_INTERFACE) for mac in self.macs})
|
|
||||||
|
|
||||||
def test_keep_present(self):
|
|
||||||
CONF.set_override('keep_ports', 'present', group='inspector')
|
|
||||||
with task_manager.acquire(self.context, self.node.id) as task:
|
|
||||||
nics.update_ports(task, _VALID, self.macs)
|
|
||||||
ports = objects.Port.list_by_node_id(self.context, self.node.id)
|
|
||||||
self.assertEqual(
|
|
||||||
{port.address: port.pxe_enabled for port in ports},
|
|
||||||
# Extra port removed, pxe_enabled updated
|
|
||||||
{mac: (mac == _PXE_INTERFACE) for mac in self.present_macs})
|
|
@ -21,7 +21,6 @@ from ironic.drivers.modules import deploy_utils
|
|||||||
from ironic.drivers.modules import inspect_utils
|
from ironic.drivers.modules import inspect_utils
|
||||||
from ironic.drivers.modules.inspector import agent as inspector
|
from ironic.drivers.modules.inspector import agent as inspector
|
||||||
from ironic.drivers.modules.inspector import interface as common
|
from ironic.drivers.modules.inspector import interface as common
|
||||||
from ironic.drivers.modules.inspector import nics
|
|
||||||
from ironic.tests.unit.db import base as db_base
|
from ironic.tests.unit.db import base as db_base
|
||||||
from ironic.tests.unit.objects import utils as obj_utils
|
from ironic.tests.unit.objects import utils as obj_utils
|
||||||
|
|
||||||
@ -103,25 +102,29 @@ class InspectHardwareTestCase(db_base.DbTestCase):
|
|||||||
|
|
||||||
|
|
||||||
@mock.patch.object(common, 'tear_down_managed_boot', autospec=True)
|
@mock.patch.object(common, 'tear_down_managed_boot', autospec=True)
|
||||||
@mock.patch.object(nics, 'process_interfaces', autospec=True)
|
@mock.patch.object(inspector, 'run_inspection_hooks', autospec=True)
|
||||||
class ContinueInspectionTestCase(db_base.DbTestCase):
|
class ContinueInspectionTestCase(db_base.DbTestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
super().setUp()
|
super().setUp()
|
||||||
CONF.set_override('enabled_inspect_interfaces',
|
CONF.set_override('enabled_inspect_interfaces',
|
||||||
['agent', 'no-inspect'])
|
['agent', 'no-inspect'])
|
||||||
|
self.config(hooks='ramdisk-error,architecture,validate-interfaces,'
|
||||||
|
'ports',
|
||||||
|
group='inspector')
|
||||||
self.node = obj_utils.create_test_node(
|
self.node = obj_utils.create_test_node(
|
||||||
self.context,
|
self.context,
|
||||||
inspect_interface='agent',
|
inspect_interface='agent',
|
||||||
provision_state=states.INSPECTING)
|
provision_state=states.INSPECTING)
|
||||||
self.iface = inspector.AgentInspect()
|
self.iface = inspector.AgentInspect()
|
||||||
|
|
||||||
def test(self, mock_process, mock_tear_down):
|
def test(self, mock_inspection_hooks, mock_tear_down):
|
||||||
mock_tear_down.return_value = None
|
mock_tear_down.return_value = None
|
||||||
with task_manager.acquire(self.context, self.node.id) as task:
|
with task_manager.acquire(self.context, self.node.id) as task:
|
||||||
result = self.iface.continue_inspection(
|
result = self.iface.continue_inspection(
|
||||||
task, mock.sentinel.inventory, mock.sentinel.plugin_data)
|
task, mock.sentinel.inventory, mock.sentinel.plugin_data)
|
||||||
mock_process.assert_called_once_with(
|
mock_inspection_hooks.assert_called_once_with(
|
||||||
task, mock.sentinel.inventory, mock.sentinel.plugin_data)
|
task, mock.sentinel.inventory, mock.sentinel.plugin_data,
|
||||||
|
self.iface.hooks)
|
||||||
mock_tear_down.assert_called_once_with(task)
|
mock_tear_down.assert_called_once_with(task)
|
||||||
self.assertEqual(states.INSPECTING, task.node.provision_state)
|
self.assertEqual(states.INSPECTING, task.node.provision_state)
|
||||||
|
|
||||||
|
@ -0,0 +1,10 @@
|
|||||||
|
---
|
||||||
|
features:
|
||||||
|
- |
|
||||||
|
Adds inspection hooks in the agent inspect interface for processing data
|
||||||
|
received from the ramdisk at the /v1/continue_inspection endpoint. The
|
||||||
|
four default configuration hooks `ramdisk-error`, `validate-interfaces`,
|
||||||
|
`ports` and `architecture` are added. Two new configuration options
|
||||||
|
`default_hooks` and `hooks` are added in the `inspector` configuration
|
||||||
|
section to allow configuring the default enabled hooks and optional
|
||||||
|
additional hooks, respectively.
|
@ -196,6 +196,12 @@ ironic.hardware.types =
|
|||||||
ironic.database.migration_backend =
|
ironic.database.migration_backend =
|
||||||
sqlalchemy = ironic.db.sqlalchemy.migration
|
sqlalchemy = ironic.db.sqlalchemy.migration
|
||||||
|
|
||||||
|
ironic.inspection.hooks =
|
||||||
|
ramdisk-error = ironic.drivers.modules.inspector.hooks.ramdisk_error:RamdiskErrorHook
|
||||||
|
validate-interfaces = ironic.drivers.modules.inspector.hooks.validate_interfaces:ValidateInterfacesHook
|
||||||
|
ports = ironic.drivers.modules.inspector.hooks.ports:PortsHook
|
||||||
|
architecture = ironic.drivers.modules.inspector.hooks.architecture:ArchitectureHook
|
||||||
|
|
||||||
[egg_info]
|
[egg_info]
|
||||||
tag_build =
|
tag_build =
|
||||||
tag_date = 0
|
tag_date = 0
|
||||||
|
Loading…
Reference in New Issue
Block a user