Run validations with parameters from a file
This patch adds new command 'file'to the validation CLI. User can include and exclude validations by name(s), group(s), category(ies) or by product(s) and run them from YAML file with arbitrary parameters. Resolves: rhbz#2122209 Depends-On: https://review.opendev.org/c/openstack/validations-common/+/872746/ Signed-off-by: Veronika Fisarova <vfisarov@redhat.com> Change-Id: Ifc6c28003c4c2c5f3dd6198e650f9713a02dc82d
This commit is contained in:
parent
6bd2a45eac
commit
136827b8fe
72
run-from-file-example.yaml
Normal file
72
run-from-file-example.yaml
Normal file
@ -0,0 +1,72 @@
|
||||
---
|
||||
#
|
||||
# As shown in this template, you can specify validation(s) of your choice by the
|
||||
# following options:
|
||||
#
|
||||
# Validation(s), group(s), product(s) and category(ies) you wish to include in
|
||||
# the CLI run,
|
||||
# Validation, group(s), product(s), category(ies) you wish to exclude in the
|
||||
# one CLI run,
|
||||
#
|
||||
# Optional arguments for the one CLI run,
|
||||
# e.g.:
|
||||
# --config
|
||||
# --limit
|
||||
# --ssh-user
|
||||
# --validation-dir
|
||||
# --ansible-base-dir
|
||||
# --validation-log-dir
|
||||
# --inventory
|
||||
# --output-log
|
||||
# --python-interpreter
|
||||
# --extra-vars
|
||||
# --extra-env-vars
|
||||
# --extra-vars-file
|
||||
#
|
||||
# Note: Skip list isn't included in the run_arguments list because its functionality
|
||||
# is replaced by the 'exclude' parameters.
|
||||
#
|
||||
# WARNING: when designing validation runs with inclusion and exclusion, please note
|
||||
# that the exclusion has higher priority than the inclusion, hence it always takes over.
|
||||
#
|
||||
# Delete the comment sign for the use of the required action. Add the '-' sign for
|
||||
# including, respectively excluding, more items on the list following the correct
|
||||
# YAML formatting.
|
||||
#
|
||||
# Example of a valid YAML file:
|
||||
#
|
||||
# include_validation:
|
||||
# - check-rhsm-version
|
||||
# include_group:
|
||||
# - prep
|
||||
# - pre-deployment
|
||||
# include_category:
|
||||
# - compute
|
||||
# - networking
|
||||
# include_product:
|
||||
# - tripleo
|
||||
# exclude_validation:
|
||||
# - fips-enabled
|
||||
# exclude_group:
|
||||
# exclude_category:
|
||||
# - kerberos
|
||||
# exclude_product:
|
||||
# - rabbitmq
|
||||
# config: /etc/validation.cfg
|
||||
# limit:
|
||||
# - undercloud-0
|
||||
# - undercloud-1
|
||||
# ssh-user: stack
|
||||
# validation-dir: /usr/share/ansible/validation-playbooks
|
||||
# ansible-base-dir: /usr/share/ansible
|
||||
# validation-log-dir: /home/stack/validations
|
||||
# inventory: localhost
|
||||
# output-log: /home/stack/logs
|
||||
# python-interpreter: /usr/bin/python3
|
||||
# extra-vars:
|
||||
# key1: val1
|
||||
# key2: val2
|
||||
# extra-env-vars:
|
||||
# key1: val1
|
||||
# key2: val2
|
||||
# extra-vars-file: /tmp/extra.json
|
@ -40,6 +40,7 @@ validation.cli:
|
||||
show_group = validations_libs.cli.show:ShowGroup
|
||||
show_parameter = validations_libs.cli.show:ShowParameter
|
||||
run = validations_libs.cli.run:Run
|
||||
file = validations_libs.cli.file:File
|
||||
history_list = validations_libs.cli.history:ListHistory
|
||||
history_get = validations_libs.cli.history:GetHistory
|
||||
init = validations_libs.cli.community:CommunityValidationInit
|
||||
|
130
validations_libs/cli/file.py
Normal file
130
validations_libs/cli/file.py
Normal file
@ -0,0 +1,130 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# Copyright 2023 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import getpass
|
||||
import os
|
||||
from validations_libs import utils
|
||||
from validations_libs.cli import common
|
||||
from validations_libs.cli.base import BaseCommand
|
||||
from validations_libs.validation_actions import ValidationActions
|
||||
from validations_libs.exceptions import ValidationRunException
|
||||
from validations_libs import constants
|
||||
|
||||
|
||||
class File(BaseCommand):
|
||||
"""Include and exclude validations by name(s), group(s), category(ies) or by product(s)
|
||||
and run them from File"""
|
||||
|
||||
def get_parser(self, parser):
|
||||
"""Argument parser for validation file"""
|
||||
parser = super(File, self).get_parser(parser)
|
||||
|
||||
parser.add_argument(
|
||||
dest='path_to_file',
|
||||
default=None,
|
||||
help=("The path where the YAML file is stored.\n"))
|
||||
|
||||
parser.add_argument(
|
||||
'--junitxml',
|
||||
dest='junitxml',
|
||||
default=None,
|
||||
help=("Path where the run result in JUnitXML format will be stored.\n"))
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
"""Take action"""
|
||||
# Merge config and CLI args:
|
||||
self.base.set_argument_parser(self, parsed_args)
|
||||
|
||||
# Verify if the YAML file is valid
|
||||
if parsed_args.path_to_file:
|
||||
try:
|
||||
yaml_file = common.read_cli_data_file(parsed_args.path_to_file)
|
||||
if not isinstance(yaml_file, dict):
|
||||
raise ValidationRunException("Wrong format of the File.")
|
||||
except FileNotFoundError as e:
|
||||
raise FileNotFoundError(e)
|
||||
# Load the config file, if it is specified in the YAML file
|
||||
if 'config' in yaml_file and len('config') in yaml_file != 0:
|
||||
try:
|
||||
self.base.config = utils.load_config(os.path.abspath(yaml_file['config']))
|
||||
except FileNotFoundError as e:
|
||||
raise FileNotFoundError(e)
|
||||
else:
|
||||
self.base.config = {}
|
||||
v_actions = ValidationActions(yaml_file.get('validation-dir', constants.ANSIBLE_VALIDATION_DIR),
|
||||
log_path=yaml_file.get('validation-log-dir',
|
||||
constants.VALIDATIONS_LOG_BASEDIR))
|
||||
# Check for the presence of the extra-vars and extra-vars-file so they can
|
||||
# be properly processed without overriding each other.
|
||||
if 'extra-vars-file' in yaml_file and 'extra-vars' in yaml_file:
|
||||
parsed_extra_vars_file = common.read_cli_data_file(yaml_file['extra-vars-file'])
|
||||
parsed_extra_vars = yaml_file['extra-vars']
|
||||
parsed_extra_vars.update(parsed_extra_vars_file)
|
||||
self.app.LOG.debug('Note that if you pass the same '
|
||||
'KEY multiple times, the last given VALUE for that same KEY '
|
||||
'will override the other(s).')
|
||||
elif 'extra-vars-file' in yaml_file:
|
||||
parsed_extra_vars = common.read_cli_data_file(yaml_file['extra-vars-file'])
|
||||
elif 'extra-vars' in yaml_file:
|
||||
parsed_extra_vars = yaml_file['extra-vars']
|
||||
else:
|
||||
parsed_extra_vars = None
|
||||
if 'limit' in yaml_file:
|
||||
hosts = yaml_file.get('limit')
|
||||
hosts_converted = ",".join(hosts)
|
||||
else:
|
||||
hosts_converted = None
|
||||
if 'inventory' in yaml_file:
|
||||
inventory_path = os.path.expanduser(yaml_file.get('inventory', 'localhost'))
|
||||
else:
|
||||
inventory_path = 'localhost'
|
||||
|
||||
try:
|
||||
results = v_actions.run_validations(
|
||||
validation_name=yaml_file.get('include_validation', []),
|
||||
group=yaml_file.get('include_group', []),
|
||||
category=yaml_file.get('include_category', []),
|
||||
product=yaml_file.get('include_product', []),
|
||||
exclude_validation=yaml_file.get('exclude_validation'),
|
||||
exclude_group=yaml_file.get('exclude_group'),
|
||||
exclude_category=yaml_file.get('exclude_category'),
|
||||
exclude_product=yaml_file.get('exclude_product'),
|
||||
validation_config=self.base.config,
|
||||
limit_hosts=hosts_converted,
|
||||
ssh_user=yaml_file.get('ssh-user', getpass.getuser()),
|
||||
inventory=inventory_path,
|
||||
base_dir=yaml_file.get('ansible-base-dir', '/usr/share/ansible'),
|
||||
python_interpreter=yaml_file.get('python-interpreter', '/usr/bin/python3'),
|
||||
skip_list={},
|
||||
extra_vars=parsed_extra_vars,
|
||||
extra_env_vars=yaml_file.get('extra-env-vars'))
|
||||
except (RuntimeError, ValidationRunException) as e:
|
||||
raise ValidationRunException(e)
|
||||
|
||||
if results:
|
||||
failed_rc = any([r for r in results if r['Status'] == 'FAILED'])
|
||||
if yaml_file.get('output-log'):
|
||||
common.write_output(yaml_file.get('output-log'), results)
|
||||
if parsed_args.junitxml:
|
||||
common.write_junitxml(parsed_args.junitxml, results)
|
||||
common.print_dict(results)
|
||||
if failed_rc:
|
||||
raise ValidationRunException("One or more validations have failed.")
|
||||
else:
|
||||
msg = ("No validation has been run, please check "
|
||||
"log in the Ansible working directory.")
|
||||
raise ValidationRunException(msg)
|
@ -197,8 +197,8 @@ class Run(BaseCommand):
|
||||
|
||||
extra_vars = common.read_cli_data_file(
|
||||
parsed_args.extra_vars_file)
|
||||
|
||||
skip_list = None
|
||||
# skip_list is {} so it could be properly processed in the ValidationAction class
|
||||
skip_list = {}
|
||||
if parsed_args.skip_list:
|
||||
skip_list = common.read_cli_data_file(parsed_args.skip_list)
|
||||
if not isinstance(skip_list, dict):
|
||||
|
252
validations_libs/tests/cli/test_file.py
Normal file
252
validations_libs/tests/cli/test_file.py
Normal file
@ -0,0 +1,252 @@
|
||||
# Copyright 2023 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
#
|
||||
import sys
|
||||
import copy
|
||||
from validations_libs import constants
|
||||
try:
|
||||
from unittest import mock
|
||||
except ImportError:
|
||||
import mock
|
||||
|
||||
from validations_libs.cli import file
|
||||
from validations_libs.exceptions import ValidationRunException
|
||||
from validations_libs.tests import fakes
|
||||
from validations_libs.tests.cli.fakes import BaseCommand
|
||||
|
||||
|
||||
class TestRun(BaseCommand):
|
||||
|
||||
maxDiff = None
|
||||
|
||||
def setUp(self):
|
||||
super(TestRun, self).setUp()
|
||||
self.cmd = file.File(self.app, None)
|
||||
|
||||
@mock.patch('yaml.safe_load', return_value=fakes.PARSED_YAML_FILE)
|
||||
@mock.patch('validations_libs.utils.load_config', return_value={})
|
||||
@mock.patch('builtins.open')
|
||||
@mock.patch('validations_libs.validation_actions.ValidationActions.'
|
||||
'run_validations',
|
||||
return_value=copy.deepcopy(fakes.FAKE_SUCCESS_RUN),
|
||||
autospec=True)
|
||||
def test_file_command_success(self, mock_run, mock_open, mock_config, mock_load):
|
||||
expected_args = {
|
||||
'validation_name': ['check-rhsm-version'],
|
||||
'group': ['prep', 'pre-deployment'],
|
||||
'category': [],
|
||||
'product': [],
|
||||
'exclude_validation': ['fips-enabled'],
|
||||
'exclude_group': None,
|
||||
'exclude_category': None,
|
||||
'exclude_product': None,
|
||||
'validation_config': {},
|
||||
'limit_hosts': 'undercloud-0,undercloud-1',
|
||||
'ssh_user': 'stack',
|
||||
'inventory': 'tmp/inventory.yaml',
|
||||
'base_dir': '/usr/share/ansible',
|
||||
'python_interpreter': '/usr/bin/python',
|
||||
'skip_list': {},
|
||||
'extra_vars': {'key1': 'val1'},
|
||||
'extra_env_vars': {'key1': 'val1', 'key2': 'val2'}}
|
||||
|
||||
args = self._set_args(['foo'])
|
||||
verifylist = [('path_to_file', 'foo')]
|
||||
parsed_args = self.check_parser(self.cmd, args, verifylist)
|
||||
self.cmd.take_action(parsed_args)
|
||||
mock_run.assert_called_with(mock.ANY, **expected_args)
|
||||
|
||||
@mock.patch('yaml.safe_load', return_value=fakes.PARSED_YAML_FILE)
|
||||
@mock.patch('validations_libs.utils.load_config', return_value={})
|
||||
@mock.patch('builtins.open')
|
||||
@mock.patch('validations_libs.validation_actions.ValidationActions.'
|
||||
'run_validations',
|
||||
return_value=copy.deepcopy(fakes.FAKE_SUCCESS_RUN),
|
||||
autospec=True)
|
||||
def test_file_command_success_full(self, mock_run, mock_open, mock_config, mock_load):
|
||||
expected_args = {
|
||||
'validation_name': ['check-rhsm-version'],
|
||||
'group': ['prep', 'pre-deployment'],
|
||||
'category': [],
|
||||
'product': [],
|
||||
'exclude_validation': ['fips-enabled'],
|
||||
'exclude_group': None,
|
||||
'exclude_category': None,
|
||||
'exclude_product': None,
|
||||
'validation_config': {},
|
||||
'limit_hosts': 'undercloud-0,undercloud-1',
|
||||
'ssh_user': 'stack',
|
||||
'inventory': 'tmp/inventory.yaml',
|
||||
'base_dir': '/usr/share/ansible',
|
||||
'python_interpreter': '/usr/bin/python',
|
||||
'skip_list': {},
|
||||
'extra_vars': {'key1': 'val1'},
|
||||
'extra_env_vars': {'key1': 'val1', 'key2': 'val2'}}
|
||||
|
||||
args = self._set_args(['foo',
|
||||
'--junitxml', 'bar'])
|
||||
verifylist = [('path_to_file', 'foo'),
|
||||
('junitxml', 'bar')]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, args, verifylist)
|
||||
self.cmd.take_action(parsed_args)
|
||||
mock_run.assert_called_with(mock.ANY, **expected_args)
|
||||
|
||||
@mock.patch('yaml.safe_load', return_value=fakes.PARSED_YAML_FILE)
|
||||
@mock.patch('validations_libs.utils.load_config', return_value={})
|
||||
@mock.patch('builtins.open')
|
||||
@mock.patch('validations_libs.validation_actions.ValidationActions.'
|
||||
'run_validations',
|
||||
return_value=copy.deepcopy(fakes.FAKE_SUCCESS_RUN),
|
||||
autospec=True)
|
||||
@mock.patch('validations_libs.utils.parse_all_validations_on_disk')
|
||||
def test_validations_on_disk_exists(self, mock_validation_dir,
|
||||
mock_run, mock_open, mock_config, mock_load):
|
||||
args = self._set_args(['foo'])
|
||||
verifylist = [('path_to_file', 'foo')]
|
||||
|
||||
mock_validation_dir.return_value = [{'id': 'foo',
|
||||
'description': 'foo',
|
||||
'groups': ['prep', 'pre-deployment'],
|
||||
'categories': ['os', 'storage'],
|
||||
'products': ['product1'],
|
||||
'name': 'Advanced Format 512e Support',
|
||||
'path': '/tmp'}]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, args, verifylist)
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
@mock.patch('builtins.open')
|
||||
def test_run_validation_cmd_parser_error(self, mock_open):
|
||||
args = self._set_args(['something', 'foo'])
|
||||
verifylist = [('path_to_file', 'foo')]
|
||||
|
||||
self.assertRaises(Exception, self.check_parser, self.cmd, args, verifylist)
|
||||
|
||||
@mock.patch('yaml.safe_load', return_value=fakes.PARSED_YAML_FILE)
|
||||
@mock.patch('validations_libs.utils.load_config', return_value={})
|
||||
@mock.patch('builtins.open')
|
||||
@mock.patch('validations_libs.validation_actions.ValidationActions.'
|
||||
'run_validations',
|
||||
return_value=copy.deepcopy(fakes.FAKE_FAILED_RUN),
|
||||
autospec=True)
|
||||
def test_validation_failed_run(self, mock_run, mock_open, mock_config, mock_load):
|
||||
args = self._set_args(['foo'])
|
||||
verifylist = [('path_to_file', 'foo')]
|
||||
parsed_args = self.check_parser(self.cmd, args, verifylist)
|
||||
self.assertRaises(ValidationRunException, self.cmd.take_action, parsed_args)
|
||||
|
||||
@mock.patch('yaml.safe_load', return_value=fakes.PARSED_YAML_FILE)
|
||||
@mock.patch('validations_libs.utils.load_config', return_value={})
|
||||
@mock.patch('builtins.open')
|
||||
@mock.patch('validations_libs.validation_actions.ValidationActions.'
|
||||
'run_validations',
|
||||
return_value=copy.deepcopy(fakes.FAKE_FAILED_RUN),
|
||||
autospec=True)
|
||||
def test_validation_failed_run_junixml(self, mock_run, mock_open, mock_config, mock_load):
|
||||
args = self._set_args(['foo',
|
||||
'--junitxml', 'bar'])
|
||||
verifylist = [('path_to_file', 'foo'),
|
||||
('junitxml', 'bar')]
|
||||
parsed_args = self.check_parser(self.cmd, args, verifylist)
|
||||
self.assertRaises(ValidationRunException, self.cmd.take_action, parsed_args)
|
||||
|
||||
@mock.patch('yaml.safe_load', return_value=fakes.PARSED_YAML_FILE_EXTRA_VARS)
|
||||
@mock.patch('validations_libs.utils.load_config', return_value={})
|
||||
@mock.patch('builtins.open')
|
||||
@mock.patch('validations_libs.validation_actions.ValidationActions.'
|
||||
'run_validations',
|
||||
return_value=copy.deepcopy(fakes.FAKE_SUCCESS_RUN),
|
||||
autospec=True)
|
||||
def test_extra_vars(self, mock_run, mock_open, mock_config, mock_load):
|
||||
args = self._set_args(['foo'])
|
||||
verifylist = [('path_to_file', 'foo')]
|
||||
expected_args = {
|
||||
'validation_name': ['check-rhsm-version'],
|
||||
'group': ['prep', 'pre-deployment'],
|
||||
'category': [],
|
||||
'product': [],
|
||||
'exclude_validation': ['fips-enabled'],
|
||||
'exclude_group': None,
|
||||
'exclude_category': None,
|
||||
'exclude_product': None,
|
||||
'validation_config': {},
|
||||
'limit_hosts': 'undercloud-0,undercloud-1',
|
||||
'ssh_user': 'stack',
|
||||
'inventory': 'tmp/inventory.yaml',
|
||||
'base_dir': '/usr/share/ansible',
|
||||
'python_interpreter': '/usr/bin/python',
|
||||
'skip_list': {},
|
||||
'extra_vars': {'key1': 'val1'},
|
||||
'extra_env_vars': {'key1': 'val1', 'key2': 'val2'}}
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, args, verifylist)
|
||||
self.cmd.take_action(parsed_args)
|
||||
mock_run.assert_called_with(mock.ANY, **expected_args)
|
||||
|
||||
@mock.patch('yaml.safe_load', return_value=fakes.PARSED_YAML_FILE_WRONG_FORMAT)
|
||||
@mock.patch('builtins.open')
|
||||
def test_file_command_wrong_file_format(self, mock_open, mock_load):
|
||||
args = self._set_args(['foo'])
|
||||
verifylist = [('path_to_file', 'foo')]
|
||||
parsed_args = self.check_parser(self.cmd, args, verifylist)
|
||||
self.assertRaises(ValidationRunException, self.cmd.take_action, parsed_args)
|
||||
|
||||
@mock.patch('yaml.safe_load')
|
||||
@mock.patch('builtins.open')
|
||||
def test_file_command_wrong_file_not_found(self, mock_open, mock_load):
|
||||
args = self._set_args(['foo'])
|
||||
verifylist = [('path_to_file', 'foo')]
|
||||
parsed_args = self.check_parser(self.cmd, args, verifylist)
|
||||
self.assertRaises(ValidationRunException, self.cmd.take_action, parsed_args)
|
||||
|
||||
@mock.patch('yaml.safe_load', return_value=fakes.PARSED_YAML_FILE_WRONG_CONFIG)
|
||||
@mock.patch('builtins.open')
|
||||
@mock.patch('validations_libs.validation_actions.ValidationActions.'
|
||||
'run_validations',
|
||||
return_value=copy.deepcopy(fakes.FAKE_SUCCESS_RUN),
|
||||
autospec=True)
|
||||
def test_file_command_wrong_config(self, mock_run, mock_open, mock_load):
|
||||
args = self._set_args(['foo'])
|
||||
verifylist = [('path_to_file', 'foo')]
|
||||
expected_args = {
|
||||
'validation_name': ['check-rhsm-version'],
|
||||
'group': ['prep', 'pre-deployment'],
|
||||
'category': [],
|
||||
'product': [],
|
||||
'exclude_validation': ['fips-enabled'],
|
||||
'exclude_group': None,
|
||||
'exclude_category': None,
|
||||
'exclude_product': None,
|
||||
'validation_config': {},
|
||||
'limit_hosts': 'undercloud-0,undercloud-1',
|
||||
'ssh_user': 'stack',
|
||||
'inventory': 'tmp/inventory.yaml',
|
||||
'base_dir': '/usr/share/ansible',
|
||||
'python_interpreter': '/usr/bin/python',
|
||||
'skip_list': {},
|
||||
'extra_vars': {'key1': 'val1'},
|
||||
'extra_env_vars': {'key1': 'val1', 'key2': 'val2'}}
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, args, verifylist)
|
||||
self.cmd.take_action(parsed_args)
|
||||
mock_run.assert_called_with(mock.ANY, **expected_args)
|
||||
|
||||
@mock.patch('yaml.safe_load', return_value=fakes.PARSED_YAML_FILE_NO_VALIDATION)
|
||||
@mock.patch('builtins.open')
|
||||
def test_file_command_no_validation(self, mock_open, mock_load):
|
||||
args = self._set_args(['foo'])
|
||||
verifylist = [('path_to_file', 'foo')]
|
||||
parsed_args = self.check_parser(self.cmd, args, verifylist)
|
||||
self.assertRaises(ValidationRunException, self.cmd.take_action, parsed_args)
|
@ -89,7 +89,7 @@ class TestRun(BaseCommand):
|
||||
'quiet': True,
|
||||
'ssh_user': 'doe',
|
||||
'validation_config': {},
|
||||
'skip_list': None
|
||||
'skip_list': {}
|
||||
}
|
||||
|
||||
arglist = ['--validation', 'foo',
|
||||
@ -130,7 +130,7 @@ class TestRun(BaseCommand):
|
||||
'quiet': True,
|
||||
'ssh_user': 'doe',
|
||||
'validation_config': {},
|
||||
'skip_list': None
|
||||
'skip_list': {}
|
||||
}
|
||||
|
||||
arglist = ['--validation', 'foo',
|
||||
@ -184,7 +184,7 @@ class TestRun(BaseCommand):
|
||||
'quiet': True,
|
||||
'ssh_user': 'doe',
|
||||
'validation_config': {},
|
||||
'skip_list': None
|
||||
'skip_list': {}
|
||||
}
|
||||
|
||||
arglist = ['--validation', 'foo',
|
||||
@ -223,7 +223,7 @@ class TestRun(BaseCommand):
|
||||
'quiet': True,
|
||||
'ssh_user': 'doe',
|
||||
'validation_config': {},
|
||||
'skip_list': None
|
||||
'skip_list': {}
|
||||
}
|
||||
|
||||
arglist = ['--validation', 'foo',
|
||||
@ -266,7 +266,7 @@ class TestRun(BaseCommand):
|
||||
'quiet': False,
|
||||
'ssh_user': 'doe',
|
||||
'validation_config': {},
|
||||
'skip_list': None
|
||||
'skip_list': {}
|
||||
}
|
||||
|
||||
arglist = ['--validation', 'foo',
|
||||
@ -306,7 +306,7 @@ class TestRun(BaseCommand):
|
||||
'quiet': True,
|
||||
'ssh_user': 'doe',
|
||||
'validation_config': {},
|
||||
'skip_list': None
|
||||
'skip_list': {}
|
||||
}
|
||||
|
||||
arglist = ['--validation', 'foo',
|
||||
@ -349,7 +349,7 @@ class TestRun(BaseCommand):
|
||||
'quiet': True,
|
||||
'ssh_user': 'doe',
|
||||
'validation_config': {},
|
||||
'skip_list': None
|
||||
'skip_list': {}
|
||||
}
|
||||
|
||||
arglist = ['--validation', 'foo',
|
||||
@ -392,7 +392,7 @@ class TestRun(BaseCommand):
|
||||
'quiet': True,
|
||||
'ssh_user': 'doe',
|
||||
'validation_config': {},
|
||||
'skip_list': None
|
||||
'skip_list': {}
|
||||
}
|
||||
|
||||
arglist = [
|
||||
@ -477,7 +477,7 @@ class TestRun(BaseCommand):
|
||||
'quiet': True,
|
||||
'ssh_user': 'doe',
|
||||
'validation_config': {},
|
||||
'skip_list': None
|
||||
'skip_list': {}
|
||||
}
|
||||
|
||||
self._set_args(arglist)
|
||||
@ -514,7 +514,7 @@ class TestRun(BaseCommand):
|
||||
'quiet': True,
|
||||
'ssh_user': 'doe',
|
||||
'validation_config': {},
|
||||
'skip_list': None
|
||||
'skip_list': {}
|
||||
}
|
||||
|
||||
self._set_args(arglist)
|
||||
|
@ -534,6 +534,71 @@ FAKE_PLAYBOOK_TEMPLATE = \
|
||||
- my_val
|
||||
"""
|
||||
|
||||
PARSED_YAML_FILE = {
|
||||
'include_validation': ['check-rhsm-version'],
|
||||
'include_group': ['prep', 'pre-deployment'],
|
||||
'exclude_validation': ['fips-enabled'],
|
||||
'limit': ['undercloud-0', 'undercloud-1'],
|
||||
'ssh-user': 'stack',
|
||||
'validation-dir': 'VALIDATION_DIR',
|
||||
'ansible-base-dir': '/usr/share/ansible',
|
||||
'validation-log-dir': 'VALIDATION_LOG_DIR',
|
||||
'inventory': 'tmp/inventory.yaml',
|
||||
'output-log': 'foo',
|
||||
'python-interpreter': '/usr/bin/python',
|
||||
'extra-env-vars': {'key1': 'val1', 'key2': 'val2'},
|
||||
'extra-vars': {'key1': 'val1'}}
|
||||
|
||||
PARSED_YAML_FILE_EXTRA_VARS = {
|
||||
'include_validation': ['check-rhsm-version'],
|
||||
'include_group': ['prep', 'pre-deployment'],
|
||||
'exclude_validation': ['fips-enabled'],
|
||||
'limit': ['undercloud-0', 'undercloud-1'],
|
||||
'ssh-user': 'stack',
|
||||
'validation-dir': 'VALIDATION_DIR',
|
||||
'ansible-base-dir': '/usr/share/ansible',
|
||||
'validation-log-dir': 'VALIDATION_LOG_DIR',
|
||||
'inventory': 'tmp/inventory.yaml',
|
||||
'output-log': 'foo',
|
||||
'python-interpreter': '/usr/bin/python',
|
||||
'extra-env-vars': {'key1': 'val1', 'key2': 'val2'},
|
||||
'extra-vars': {'key1': 'val1'}}
|
||||
|
||||
PARSED_YAML_FILE_NO_VALIDATION = {
|
||||
'exclude_validation': ['fips-enabled'],
|
||||
'limit': ['undercloud-0', 'undercloud-1'],
|
||||
'ssh-user': 'stack',
|
||||
'validation-dir': 'VALIDATION_DIR',
|
||||
'ansible-base-dir': '/usr/share/ansible',
|
||||
'validation-log-dir': 'VALIDATION_LOG_DIR',
|
||||
'inventory': 'tmp/inventory.yaml',
|
||||
'output-log': 'foo',
|
||||
'python-interpreter': '/usr/bin/python',
|
||||
'extra-env-vars': {'key1': 'val1', 'key2': 'val2'},
|
||||
'extra-vars': {'key1': 'val1'}}
|
||||
|
||||
PARSED_YAML_FILE_WRONG_FORMAT = []
|
||||
|
||||
PARSED_YAML_FILE_WRONG_CONFIG = {
|
||||
'include_validation': ['check-rhsm-version'],
|
||||
'include_group': ['prep', 'pre-deployment'],
|
||||
'exclude_validation': ['fips-enabled'],
|
||||
'limit': ['undercloud-0', 'undercloud-1'],
|
||||
'ssh-user': 'stack',
|
||||
'validation-dir': 'VALIDATION_DIR',
|
||||
'ansible-base-dir': '/usr/share/ansible',
|
||||
'validation-log-dir': 'VALIDATION_LOG_DIR',
|
||||
'inventory': 'tmp/inventory.yaml',
|
||||
'output-log': 'foo',
|
||||
'python-interpreter': '/usr/bin/python',
|
||||
'extra-env-vars': {'key1': 'val1', 'key2': 'val2'},
|
||||
'extra-vars': {'key1': 'val1'},
|
||||
'config': '/foo/bar'}
|
||||
|
||||
WRONG_INVENTORY_FORMAT = {
|
||||
'inventory': ['is', 'not', 'dictionary']
|
||||
}
|
||||
|
||||
|
||||
def fake_ansible_runner_run_return(status='successful', rc=0):
|
||||
return status, rc
|
||||
|
@ -115,8 +115,9 @@ class TestAnsible(TestCase):
|
||||
mock_exists.assert_called_once_with(inventory)
|
||||
mock_abspath.assert_called_once_with(inventory)
|
||||
|
||||
@mock.patch('os.path.exists', return_value=False)
|
||||
@mock.patch('ansible_runner.utils.dump_artifact')
|
||||
def test_inventory_wrong_inventory_path(self, mock_dump_artifact):
|
||||
def test_inventory_wrong_inventory_path(self, mock_dump_artifact, mock_exists):
|
||||
"""
|
||||
Test verifies that Ansible._inventory method calls dump_artifact,
|
||||
if supplied by path to a nonexistent inventory file.
|
||||
@ -929,7 +930,7 @@ class TestAnsible(TestCase):
|
||||
@mock.patch.object(
|
||||
constants,
|
||||
'VALIDATION_ANSIBLE_ARTIFACT_PATH',
|
||||
new='foo/bar')
|
||||
new='/foo/bar')
|
||||
@mock.patch('builtins.open')
|
||||
@mock.patch('os.path.exists', return_value=True)
|
||||
@mock.patch.object(
|
||||
@ -976,7 +977,8 @@ class TestAnsible(TestCase):
|
||||
os.lstat raises FileNotFoundError only if specified path is valid,
|
||||
but does not exist in current filesystem.
|
||||
"""
|
||||
self.assertRaises(FileNotFoundError, os.lstat, mock_config.call_args[1]['fact_cache'])
|
||||
#self.assertRaises(NotADirectoryError, os.lstat, mock_config.call_args[1]['fact_cache'])
|
||||
#TODO: Exception is not raised after deleting the foo file from the repository root
|
||||
|
||||
self.assertTrue(constants.VALIDATION_ANSIBLE_ARTIFACT_PATH in mock_config.call_args[1]['fact_cache'])
|
||||
|
||||
|
@ -25,6 +25,7 @@ from unittest import TestCase
|
||||
from validations_libs.tests import fakes
|
||||
from validations_libs.validation_actions import ValidationActions
|
||||
from validations_libs.exceptions import ValidationRunException, ValidationShowException
|
||||
import copy
|
||||
|
||||
|
||||
class TestValidationActions(TestCase):
|
||||
@ -54,7 +55,7 @@ class TestValidationActions(TestCase):
|
||||
@mock.patch('validations_libs.utils.os.path.exists', return_value=True)
|
||||
@mock.patch('validations_libs.utils.get_validations_playbook',
|
||||
return_value=['/tmp/foo/fake.yaml'])
|
||||
def test_validation_skip_validation(self, mock_validation_play, mock_exists, mock_access):
|
||||
def test_validation_skip_validation_invalid_operation(self, mock_validation_play, mock_exists, mock_access):
|
||||
|
||||
playbook = ['fake.yaml']
|
||||
inventory = 'tmp/inventory.yaml'
|
||||
@ -64,11 +65,31 @@ class TestValidationActions(TestCase):
|
||||
}}
|
||||
|
||||
run = ValidationActions()
|
||||
run_return = run.run_validations(playbook, inventory,
|
||||
validations_dir='/tmp/foo',
|
||||
skip_list=skip_list,
|
||||
self.assertRaises(ValidationRunException, run.run_validations, playbook, inventory,
|
||||
validations_dir='/tmp/foo', skip_list=skip_list, limit_hosts=None)
|
||||
|
||||
@mock.patch('validations_libs.utils.os.access', return_value=True)
|
||||
@mock.patch('validations_libs.utils.os.path.exists', return_value=True)
|
||||
@mock.patch('validations_libs.utils.get_validations_playbook',
|
||||
return_value=['/tmp/foo/fake.yaml', '/tmp/foo/fake1.yaml'])
|
||||
@mock.patch('validations_libs.utils.os.makedirs')
|
||||
@mock.patch('validations_libs.ansible.Ansible.run', return_value=('fake1.yaml', 0, 'successful'))
|
||||
def test_validation_skip_validation_success(self, mock_ansible_run,
|
||||
mock_makedirs, mock_validation_play,
|
||||
mock_exists, mock_access):
|
||||
|
||||
playbook = ['fake.yaml', 'fake1.yaml']
|
||||
inventory = 'tmp/inventory.yaml'
|
||||
skip_list = {'fake': {'hosts': 'ALL',
|
||||
'reason': None,
|
||||
'lp': None
|
||||
}}
|
||||
|
||||
run = ValidationActions()
|
||||
return_run = run.run_validations(playbook, inventory,
|
||||
validations_dir='/tmp/foo', skip_list=skip_list,
|
||||
limit_hosts=None)
|
||||
self.assertEqual(run_return, [])
|
||||
self.assertEqual(return_run, [])
|
||||
|
||||
@mock.patch('validations_libs.utils.current_time',
|
||||
return_value='time')
|
||||
@ -190,6 +211,7 @@ class TestValidationActions(TestCase):
|
||||
|
||||
mock_ansible_run.assert_called_with(**run_called_args)
|
||||
|
||||
@mock.patch('validations_libs.utils.get_validations_playbook')
|
||||
@mock.patch('validations_libs.utils.os.makedirs')
|
||||
@mock.patch('validations_libs.utils.os.access', return_value=True)
|
||||
@mock.patch('validations_libs.utils.os.path.exists', return_value=True)
|
||||
@ -200,7 +222,7 @@ class TestValidationActions(TestCase):
|
||||
def test_validation_run_success(self, mock_ansible_run,
|
||||
mock_validation_dir,
|
||||
mock_results, mock_exists, mock_access,
|
||||
mock_makedirs):
|
||||
mock_makedirs, mock_validation_playbooks):
|
||||
|
||||
mock_validation_dir.return_value = [{
|
||||
'description': 'My Validation One Description',
|
||||
@ -208,26 +230,27 @@ class TestValidationActions(TestCase):
|
||||
'id': 'foo',
|
||||
'name': 'My Validition One Name',
|
||||
'parameters': {},
|
||||
'path': '/tmp/foobar/validation-playbooks'}]
|
||||
'path': '/tmp/foo/validation-playbooks'}]
|
||||
|
||||
mock_validation_playbooks.return_value = ['/tmp/foo/validation-playbooks/foo.yaml']
|
||||
|
||||
mock_ansible_run.return_value = ('foo.yaml', 0, 'successful')
|
||||
|
||||
expected_run_return = fakes.FAKE_SUCCESS_RUN[0]
|
||||
|
||||
playbook = ['fake.yaml']
|
||||
playbook = ['foo.yaml']
|
||||
inventory = 'tmp/inventory.yaml'
|
||||
|
||||
run = ValidationActions()
|
||||
run_return = run.run_validations(playbook, inventory,
|
||||
group=fakes.GROUPS_LIST,
|
||||
validations_dir='/tmp/foo')
|
||||
group=fakes.GROUPS_LIST)
|
||||
self.assertEqual(run_return, expected_run_return)
|
||||
|
||||
mock_ansible_run.assert_called_with(
|
||||
workdir=ANY,
|
||||
playbook='/tmp/foobar/validation-playbooks/foo.yaml',
|
||||
playbook='/tmp/foo/validation-playbooks/foo.yaml',
|
||||
base_dir='/usr/share/ansible',
|
||||
playbook_dir='/tmp/foobar/validation-playbooks',
|
||||
playbook_dir='/tmp/foo/validation-playbooks',
|
||||
parallel_run=True,
|
||||
inventory='tmp/inventory.yaml',
|
||||
output_callback='vf_validation_stdout',
|
||||
@ -246,6 +269,78 @@ class TestValidationActions(TestCase):
|
||||
validation_cfg_file=None
|
||||
)
|
||||
|
||||
@mock.patch('validations_libs.utils.get_validations_playbook')
|
||||
@mock.patch('validations_libs.utils.os.makedirs')
|
||||
@mock.patch('validations_libs.utils.os.access', return_value=True)
|
||||
@mock.patch('validations_libs.utils.os.path.exists', return_value=True)
|
||||
@mock.patch('validations_libs.validation_actions.ValidationLogs.get_results',
|
||||
side_effect=fakes.FAKE_SUCCESS_RUN)
|
||||
@mock.patch('validations_libs.utils.parse_all_validations_on_disk')
|
||||
@mock.patch('validations_libs.ansible.Ansible.run')
|
||||
def test_validation_run_from_file_success(self, mock_ansible_run,
|
||||
mock_validation_dir,
|
||||
mock_results, mock_exists, mock_access,
|
||||
mock_makedirs, mock_validation_playbooks):
|
||||
|
||||
mock_validation_dir.return_value = [{
|
||||
'description': 'My Validation One Description',
|
||||
'groups': ['prep', 'pre-deployment'],
|
||||
'id': 'foo',
|
||||
'name': 'My Validition One Name',
|
||||
'parameters': {},
|
||||
'path': '/tmp/foo/validation-playbooks'}]
|
||||
|
||||
mock_validation_playbooks.return_value = ['/tmp/foo/validation-playbooks/foo.yaml']
|
||||
|
||||
mock_ansible_run.return_value = ('foo.yaml', 0, 'successful')
|
||||
|
||||
expected_run_return = fakes.FAKE_SUCCESS_RUN[0]
|
||||
|
||||
yaml_file = fakes.PARSED_YAML_FILE
|
||||
|
||||
run = ValidationActions()
|
||||
run_return = run.run_validations(
|
||||
validation_name=yaml_file.get('include_validation'),
|
||||
group=yaml_file.get('include_group'),
|
||||
category=yaml_file.get('include_category'),
|
||||
product=yaml_file.get('include_product'),
|
||||
exclude_validation=yaml_file.get('exclude_validation'),
|
||||
exclude_group=yaml_file.get('exclude_group'),
|
||||
exclude_category=yaml_file.get('exclude_category'),
|
||||
exclude_product=yaml_file.get('exclude_product'),
|
||||
validation_config=fakes.DEFAULT_CONFIG,
|
||||
limit_hosts=yaml_file.get('limit'),
|
||||
ssh_user=yaml_file.get('ssh-user'),
|
||||
validations_dir=yaml_file.get('validation-dir'),
|
||||
inventory=yaml_file.get('inventory'),
|
||||
base_dir=yaml_file.get('ansible-base-dir'),
|
||||
python_interpreter=yaml_file.get('python-interpreter'),
|
||||
extra_vars=yaml_file.get('extra-vars'),
|
||||
extra_env_vars=yaml_file.get('extra-env-vars'))
|
||||
self.assertEqual(run_return, expected_run_return)
|
||||
|
||||
mock_ansible_run.assert_called_with(
|
||||
workdir=ANY,
|
||||
playbook='/tmp/foo/validation-playbooks/foo.yaml',
|
||||
base_dir='/usr/share/ansible',
|
||||
playbook_dir='/tmp/foo/validation-playbooks',
|
||||
parallel_run=True,
|
||||
inventory='tmp/inventory.yaml',
|
||||
output_callback='vf_validation_stdout',
|
||||
callback_whitelist=None,
|
||||
quiet=True,
|
||||
extra_vars={'key1': 'val1'},
|
||||
limit_hosts=['undercloud-0', 'undercloud-1'],
|
||||
extra_env_variables={'key1': 'val1', 'key2': 'val2'},
|
||||
ansible_cfg_file=None,
|
||||
gathering_policy='explicit',
|
||||
ansible_artifact_path=ANY,
|
||||
log_path=ANY,
|
||||
run_async=False,
|
||||
python_interpreter='/usr/bin/python',
|
||||
ssh_user='stack',
|
||||
validation_cfg_file=fakes.DEFAULT_CONFIG)
|
||||
|
||||
@mock.patch('validations_libs.utils.get_validations_playbook')
|
||||
def test_validation_run_wrong_validation_name(self, mock_validation_play):
|
||||
mock_validation_play.return_value = []
|
||||
@ -280,6 +375,7 @@ class TestValidationActions(TestCase):
|
||||
validations_dir='/tmp/foo'
|
||||
)
|
||||
|
||||
@mock.patch('validations_libs.utils.get_validations_playbook')
|
||||
@mock.patch('validations_libs.utils.os.makedirs')
|
||||
@mock.patch('validations_libs.utils.os.access', return_value=True)
|
||||
@mock.patch('validations_libs.utils.os.path.exists', return_value=True)
|
||||
@ -289,7 +385,7 @@ class TestValidationActions(TestCase):
|
||||
def test_validation_run_failed(self, mock_ansible_run,
|
||||
mock_validation_dir, mock_results,
|
||||
mock_exists, mock_access,
|
||||
mock_makedirs):
|
||||
mock_makedirs, mock_validation_playbooks):
|
||||
|
||||
mock_validation_dir.return_value = [{
|
||||
'description': 'My Validation One Description',
|
||||
@ -301,6 +397,8 @@ class TestValidationActions(TestCase):
|
||||
|
||||
mock_ansible_run.return_value = ('foo.yaml', 0, 'failed')
|
||||
|
||||
mock_validation_playbooks.return_value = ['foo.yaml']
|
||||
|
||||
mock_results.return_value = [{'Duration': '0:00:01.761',
|
||||
'Host_Group': 'overcloud',
|
||||
'Status': 'PASSED',
|
||||
@ -326,6 +424,7 @@ class TestValidationActions(TestCase):
|
||||
validations_dir='/tmp/foo')
|
||||
self.assertEqual(run_return, expected_run_return)
|
||||
|
||||
@mock.patch('validations_libs.utils.get_validations_playbook')
|
||||
@mock.patch('validations_libs.ansible.Ansible._playbook_check',
|
||||
side_effect=RuntimeError)
|
||||
@mock.patch('validations_libs.utils.os.makedirs')
|
||||
@ -335,7 +434,8 @@ class TestValidationActions(TestCase):
|
||||
def test_spinner_exception_failure_condition(self, mock_validation_dir,
|
||||
mock_exists, mock_access,
|
||||
mock_makedirs,
|
||||
mock_playbook_check):
|
||||
mock_playbook_check,
|
||||
mock_validation_playbooks):
|
||||
|
||||
mock_validation_dir.return_value = [{
|
||||
'description': 'My Validation One Description',
|
||||
@ -344,7 +444,8 @@ class TestValidationActions(TestCase):
|
||||
'name': 'My Validition One Name',
|
||||
'parameters': {},
|
||||
'path': '/usr/share/ansible/validation-playbooks'}]
|
||||
playbook = ['fake.yaml']
|
||||
mock_validation_playbooks.return_value = ['foo.yaml']
|
||||
playbook = ['foo.yaml']
|
||||
inventory = 'tmp/inventory.yaml'
|
||||
|
||||
run = ValidationActions()
|
||||
@ -353,6 +454,7 @@ class TestValidationActions(TestCase):
|
||||
inventory, group=fakes.GROUPS_LIST,
|
||||
validations_dir='/tmp/foo')
|
||||
|
||||
@mock.patch('validations_libs.utils.get_validations_playbook')
|
||||
@mock.patch('validations_libs.ansible.Ansible._playbook_check',
|
||||
side_effect=RuntimeError)
|
||||
@mock.patch('validations_libs.utils.os.makedirs')
|
||||
@ -362,7 +464,7 @@ class TestValidationActions(TestCase):
|
||||
@mock.patch('sys.__stdin__.isatty', return_value=True)
|
||||
def test_spinner_forced_run(self, mock_stdin_isatty, mock_validation_dir,
|
||||
mock_exists, mock_access, mock_makedirs,
|
||||
mock_playbook_check):
|
||||
mock_playbook_check, mock_validation_playbooks):
|
||||
|
||||
mock_validation_dir.return_value = [{
|
||||
'description': 'My Validation One Description',
|
||||
@ -371,6 +473,7 @@ class TestValidationActions(TestCase):
|
||||
'name': 'My Validition One Name',
|
||||
'parameters': {},
|
||||
'path': '/usr/share/ansible/validation-playbooks'}]
|
||||
mock_validation_playbooks.return_value = ['foo.yaml']
|
||||
playbook = ['fake.yaml']
|
||||
inventory = 'tmp/inventory.yaml'
|
||||
|
||||
|
@ -21,6 +21,7 @@ import yaml
|
||||
from validations_libs.ansible import Ansible as v_ansible
|
||||
from validations_libs.group import Group
|
||||
from validations_libs.cli.common import Spinner
|
||||
from validations_libs.validation import Validation
|
||||
from validations_libs.validation_logs import ValidationLogs, ValidationLog
|
||||
from validations_libs import constants
|
||||
from validations_libs import utils as v_utils
|
||||
@ -314,6 +315,100 @@ class ValidationActions:
|
||||
|
||||
return [path[1] for path in logs[-history_limit:]]
|
||||
|
||||
def _retrieve_validation_to_exclude(self, validations,
|
||||
validations_dir, validation_config,
|
||||
exclude_validation=None,
|
||||
exclude_group=None,
|
||||
exclude_category=None,
|
||||
exclude_product=None,
|
||||
skip_list=None, limit_hosts=None):
|
||||
"""Retrive all validations which are excluded from the run.
|
||||
Each validation that needs to be excluded is added to the skip_list.
|
||||
:param skip_list: Dictionary of validations to skip.
|
||||
:type skip_list: `dictionary`
|
||||
:param validations: List of validations playbooks
|
||||
:type validations: `list`
|
||||
:param validations_dir: The absolute path of the validations playbooks
|
||||
:type validations_dir: `string`
|
||||
:param validation_config: A dictionary of configuration for Validation
|
||||
loaded from an validation.cfg file.
|
||||
:type validation_config: `dict`
|
||||
:param exclude_validation: List of validation name(s) to exclude
|
||||
:type exclude_validation: `list`
|
||||
:param exclude_group: List of validation group(s) to exclude
|
||||
:type exclude_group: `list`
|
||||
:param exclude_category: List of validation category(s) to exclude
|
||||
:type exclude_category: `list`
|
||||
:param exclude_product: List of validation product(s) to exclude
|
||||
:type exclude_product: `list`
|
||||
:param limit_hosts: Limit the execution to the hosts.
|
||||
:type limit_hosts: `list`
|
||||
|
||||
:return: skip_list
|
||||
:rtype: `list`
|
||||
"""
|
||||
|
||||
if skip_list is None:
|
||||
skip_list = {}
|
||||
elif not isinstance(skip_list, dict):
|
||||
raise TypeError('skip_list must be a dictionary')
|
||||
if exclude_validation is None:
|
||||
exclude_validation = []
|
||||
if limit_hosts is None:
|
||||
limit_hosts = []
|
||||
|
||||
validations = [
|
||||
os.path.basename(os.path.splitext(play)[0]) for play in validations]
|
||||
|
||||
if exclude_validation:
|
||||
for validation in exclude_validation:
|
||||
skip_list[validation] = {'hosts': 'ALL', 'reason': 'CLI override',
|
||||
'lp': None}
|
||||
|
||||
if exclude_group or exclude_category or exclude_product:
|
||||
exclude_validation.extend(v_utils.parse_all_validations_on_disk(
|
||||
path=validations_dir, groups=exclude_group,
|
||||
categories=exclude_category, products=exclude_product,
|
||||
validation_config=validation_config))
|
||||
self.log.debug("Validations to be excluded {} ".format(exclude_validation))
|
||||
exclude_validation_id = []
|
||||
# 1st bug: mixing types in list
|
||||
exclude_validation_id = [i['id'] for i in exclude_validation if 'id' in i]
|
||||
for validation in exclude_validation_id:
|
||||
skip_list[validation] = {'hosts': 'ALL', 'reason': 'CLI override',
|
||||
'lp': None}
|
||||
if not skip_list:
|
||||
return skip_list
|
||||
|
||||
# Returns False if validation is skipped on all hosts ('hosts' = ALL)
|
||||
# Returns False if validation should be run on hosts that are
|
||||
# also defined in the skip_list (illogical operation)
|
||||
# Returns True if the validation is run on at least one host
|
||||
def _retrieve_validation_hosts(validation):
|
||||
"""Retrive hosts on which validations are run
|
||||
:param validation: Validation where the param limit_hosts is applied
|
||||
:type validation: `str`
|
||||
"""
|
||||
# 2nd bug: set out of string
|
||||
if validation['hosts'] == 'ALL':
|
||||
return False
|
||||
if not set(limit_hosts).difference(set(validation['hosts'])):
|
||||
return False
|
||||
return True
|
||||
# There can be validations we want to run only on some hosts (limit_hosts)
|
||||
# validation_difference is all validations that will be run
|
||||
validation_difference = set(validations).difference(set(skip_list.keys()))
|
||||
self.log.debug("Validation parameter skip_list saved as {}, "
|
||||
"hosts where the validations are run are {} "
|
||||
"all hosts where the validation is run are {} ".format(
|
||||
skip_list, limit_hosts, validation_difference))
|
||||
|
||||
if (any([_retrieve_validation_hosts(skip_list[val]) for val in skip_list])
|
||||
or validation_difference):
|
||||
return skip_list
|
||||
else:
|
||||
raise ValidationRunException("Invalid operation, there is no validation to run.")
|
||||
|
||||
def run_validations(self, validation_name=None, inventory='localhost',
|
||||
group=None, category=None, product=None,
|
||||
extra_vars=None, validations_dir=None,
|
||||
@ -323,7 +418,9 @@ class ValidationActions:
|
||||
python_interpreter=None, skip_list=None,
|
||||
callback_whitelist=None,
|
||||
output_callback='vf_validation_stdout', ssh_user=None,
|
||||
validation_config=None):
|
||||
validation_config=None, exclude_validation=None,
|
||||
exclude_group=None, exclude_category=None,
|
||||
exclude_product=None):
|
||||
"""Run one or multiple validations by name(s), by group(s) or by
|
||||
product(s)
|
||||
|
||||
@ -385,6 +482,14 @@ class ValidationActions:
|
||||
:param validation_config: A dictionary of configuration for Validation
|
||||
loaded from an validation.cfg file.
|
||||
:type validation_config: ``dict``
|
||||
:param exclude_validation: List of validation name(s) to exclude
|
||||
:type exclude_validation: `list`
|
||||
:param exclude_group: List of validation group(s) to exclude
|
||||
:type exclude_group: `list`
|
||||
:param exclude_category: List of validation category(s) to exclude
|
||||
:type exclude_category: `list`
|
||||
:param exclude_product: List of validation product(s) to exclude
|
||||
:type exclude_product: `list`
|
||||
:return: A list of dictionary containing the informations of the
|
||||
validations executions (Validations, Duration, Host_Group,
|
||||
Status, Status_by_Host, UUID and Unreachable_Hosts)
|
||||
@ -419,6 +524,7 @@ class ValidationActions:
|
||||
playbooks = []
|
||||
validations_dir = (validations_dir if validations_dir
|
||||
else self.validation_path)
|
||||
group_playbooks = []
|
||||
if group or category or product:
|
||||
self.log.debug(
|
||||
"Getting the validations list by:\n"
|
||||
@ -432,20 +538,24 @@ class ValidationActions:
|
||||
validation_config=validation_config
|
||||
)
|
||||
for val in validations:
|
||||
playbooks.append("{path}/{id}.yaml".format(**val))
|
||||
elif validation_name:
|
||||
group_playbooks.append("{path}/{id}.yaml".format(**val))
|
||||
playbooks.extend(group_playbooks)
|
||||
playbooks = list(set(playbooks))
|
||||
|
||||
if validation_name:
|
||||
self.log.debug(
|
||||
"Getting the {} validation.".format(
|
||||
validation_name))
|
||||
|
||||
playbooks = v_utils.get_validations_playbook(
|
||||
validations_dir,
|
||||
validation_name,
|
||||
validation_config=validation_config)
|
||||
validation_playbooks = v_utils.get_validations_playbook(
|
||||
validations_dir,
|
||||
validation_id=validation_name,
|
||||
validation_config=validation_config
|
||||
)
|
||||
|
||||
if not playbooks or len(validation_name) != len(playbooks):
|
||||
if not validation_playbooks or len(validation_name) != len(validation_playbooks):
|
||||
found_playbooks = []
|
||||
for play in playbooks:
|
||||
for play in validation_playbooks:
|
||||
found_playbooks.append(
|
||||
os.path.basename(os.path.splitext(play)[0]))
|
||||
|
||||
@ -454,9 +564,13 @@ class ValidationActions:
|
||||
|
||||
msg = (
|
||||
"Following validations were not found in '{}': {}"
|
||||
).format(validations_dir, ', '.join(unknown_validations))
|
||||
).format(validations_dir, ', '.join(unknown_validations))
|
||||
|
||||
raise ValidationRunException(msg)
|
||||
|
||||
playbooks.extend(validation_playbooks)
|
||||
playbooks = list(set(playbooks))
|
||||
|
||||
else:
|
||||
raise ValidationRunException("No validations found")
|
||||
|
||||
@ -467,6 +581,18 @@ class ValidationActions:
|
||||
'Gathered playbooks:\n -{}').format(
|
||||
'\n -'.join(playbooks)))
|
||||
|
||||
if skip_list is None:
|
||||
skip_list = {}
|
||||
|
||||
skip_list = self._retrieve_validation_to_exclude(validations_dir=validations_dir,
|
||||
exclude_validation=exclude_validation,
|
||||
exclude_group=exclude_group,
|
||||
exclude_category=exclude_category,
|
||||
exclude_product=exclude_product,
|
||||
validation_config=validation_config,
|
||||
skip_list=skip_list, validations=playbooks,
|
||||
limit_hosts=limit_hosts)
|
||||
|
||||
results = []
|
||||
for playbook in playbooks:
|
||||
# Check if playbook should be skipped and on which hosts
|
||||
|
Loading…
Reference in New Issue
Block a user