diff --git a/distributedcloud/dccommon/subcloud_install.py b/distributedcloud/dccommon/subcloud_install.py index a9075feea..5f3828e06 100644 --- a/distributedcloud/dccommon/subcloud_install.py +++ b/distributedcloud/dccommon/subcloud_install.py @@ -732,24 +732,17 @@ class SubcloudInstall(object): # create the install override file self.create_install_override_file(override_path, payload) - def install(self, log_file_dir, install_command, abortable=False): + def install(self, log_file_dir, install_command): LOG.info("Start remote install %s", self.name) log_file = os.path.join(log_file_dir, self.name) + '_playbook_output.log' try: # Since this is a long-running task we want to register # for cleanup on process restart/SWACT. - if abortable: - # Install phase of subcloud deployment - run_ansible = dccommon_utils.RunAnsible() - aborted = run_ansible.exec_playbook(log_file, install_command, self.name) - # Returns True if the playbook was aborted and False otherwise - return aborted - else: - dccommon_utils.run_playbook(log_file, install_command) - # Always return false because this playbook execution - # method cannot be aborted - return False + ansible = dccommon_utils.AnsiblePlaybook(self.name) + aborted = ansible.run_playbook(log_file, install_command) + # Returns True if the playbook was aborted and False otherwise + return aborted except exceptions.PlaybookExecutionFailed: msg = ("Failed to install %s, check individual " "log at %s or run %s for details" diff --git a/distributedcloud/dccommon/tests/unit/test_utils.py b/distributedcloud/dccommon/tests/unit/test_utils.py index 78e6ebccc..0434a27af 100644 --- a/distributedcloud/dccommon/tests/unit/test_utils.py +++ b/distributedcloud/dccommon/tests/unit/test_utils.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2022 Wind River Systems, Inc. +# Copyright (c) 2022-2023 Wind River Systems, Inc. # # SPDX-License-Identifier: Apache-2.0 # @@ -8,6 +8,9 @@ from dccommon.exceptions import PlaybookExecutionTimeout from dccommon.tests import base from dccommon import utils +FAKE_SUBCLOUD_NAME = 'subcloud1' +FAKE_LOG_FILE = '/dev/null' + class TestUtils(base.DCCommonTestCase): @@ -17,26 +20,29 @@ class TestUtils(base.DCCommonTestCase): def tearDown(self): super(TestUtils, self).tearDown() - def test_run_playbook(self): + def test_exec_playbook(self): # no timeout: testscript = ['dccommon/tests/unit/test_utils_script.sh', '1'] - utils.run_playbook('/dev/null', testscript) + ansible = utils.AnsiblePlaybook(FAKE_SUBCLOUD_NAME) + ansible.run_playbook(FAKE_LOG_FILE, testscript) - def test_run_playbook_timeout(self): + def test_exec_playbook_timeout(self): testscript = ['dccommon/tests/unit/test_utils_script.sh', '30'] + ansible = utils.AnsiblePlaybook(FAKE_SUBCLOUD_NAME) self.assertRaises(PlaybookExecutionTimeout, - utils.run_playbook, - '/dev/null', + ansible.run_playbook, + FAKE_LOG_FILE, testscript, timeout=2) - def test_run_playbook_timeout_requires_kill(self): + def test_exec_playbook_timeout_requires_kill(self): # This option ignores a regular TERM signal, and requires a # kill -9 (KILL signal) to terminate. We're using this to simulate # a hung process script = ['dccommon/tests/unit/test_utils_script.sh', '30', 'TERM'] + ansible = utils.AnsiblePlaybook(FAKE_SUBCLOUD_NAME) self.assertRaises(PlaybookExecutionTimeout, - utils.run_playbook, - '/dev/null', + ansible.run_playbook, + FAKE_LOG_FILE, script, timeout=2) diff --git a/distributedcloud/dccommon/utils.py b/distributedcloud/dccommon/utils.py index 42a0d7de4..444203f93 100644 --- a/distributedcloud/dccommon/utils.py +++ b/distributedcloud/dccommon/utils.py @@ -89,7 +89,7 @@ class memoized(object): return functools.partial(self.__call__, obj) -class RunAnsible(object): +class AnsiblePlaybook(object): """Class to run Ansible playbooks with the abort option Approach: @@ -110,30 +110,33 @@ class RunAnsible(object): abort_status = {} lock = threading.Lock() - def _unregister_subcloud(self, subcloud_name): - with RunAnsible.lock: - if RunAnsible.abort_status.get(subcloud_name): - del RunAnsible.abort_status[subcloud_name] + def __init__(self, subcloud_name: str): + self.subcloud_name = subcloud_name - def run_abort(self, subcloud_name, timeout=600): + def _unregister_subcloud(self): + with AnsiblePlaybook.lock: + if AnsiblePlaybook.abort_status.get(self.subcloud_name): + del AnsiblePlaybook.abort_status[self.subcloud_name] + + def run_abort(self, timeout=600): """Set abort status for a subcloud. :param subcloud_name: Name of the subcloud param timeout: Timeout in seconds. """ - with RunAnsible.lock: - RunAnsible.abort_status[subcloud_name]['abort'] = True + with AnsiblePlaybook.lock: + AnsiblePlaybook.abort_status[self.subcloud_name]['abort'] = True unabortable_flag = os.path.join(consts.ANSIBLE_OVERRIDES_PATH, - '.%s_deploy_not_abortable' % subcloud_name) - subp = RunAnsible.abort_status[subcloud_name]['subp'] + '.%s_deploy_not_abortable' % self.subcloud_name) + subp = AnsiblePlaybook.abort_status[self.subcloud_name]['subp'] while os.path.exists(unabortable_flag) and timeout > 0: time.sleep(1) timeout -= 1 kill_subprocess_group(subp) return True - def exec_playbook(self, log_file, playbook_command, subcloud_name, - timeout=None, register_cleanup=True): + def run_playbook(self, log_file, playbook_command, timeout=None, + register_cleanup=True): """Run ansible playbook via subprocess. :param log_file: Logs output to file @@ -166,7 +169,7 @@ class RunAnsible(object): # Remove unabortable flag created by the playbook # if present from previous executions unabortable_flag = os.path.join(consts.ANSIBLE_OVERRIDES_PATH, - '.%s_deploy_not_abortable' % subcloud_name) + '.%s_deploy_not_abortable' % self.subcloud_name) if os.path.exists(unabortable_flag): os.remove(unabortable_flag) @@ -178,8 +181,8 @@ class RunAnsible(object): try: if register_cleanup: SubprocessCleanup.register_subprocess_group(subp) - with RunAnsible.lock: - RunAnsible.abort_status[subcloud_name] = { + with AnsiblePlaybook.lock: + AnsiblePlaybook.abort_status[self.subcloud_name] = { 'abort': False, 'subp': subp} @@ -202,8 +205,8 @@ class RunAnsible(object): # 5: Playbook failed while waiting to be aborted (process exited) # - playbook_failure is True with subp_rc != 0, # aborted is True, unabortable_flag_exists is False - with RunAnsible.lock: - aborted = RunAnsible.abort_status[subcloud_name]['abort'] + with AnsiblePlaybook.lock: + aborted = AnsiblePlaybook.abort_status[self.subcloud_name]['abort'] unabortable_flag_exists = os.path.exists(unabortable_flag) playbook_failure = (subp_rc != 0 and (not aborted or unabortable_flag_exists)) @@ -226,7 +229,7 @@ class RunAnsible(object): f_out_log.flush() if register_cleanup: SubprocessCleanup.unregister_subprocess_group(subp) - self._unregister_subcloud(subcloud_name) + self._unregister_subcloud() except PlaybookExecutionFailed: raise @@ -255,88 +258,6 @@ def _strip_password_from_command(script_command): return logged_command -# TODO(vgluzrom): remove this function and replace all calls -# with RunAnsible class -def run_playbook(log_file, playbook_command, - timeout=None, register_cleanup=True): - """Run ansible playbook via subprocess. - - log_file: Logs output to file - timeout: Timeout in seconds. Raises PlaybookExecutionTimeout on timeout - register_cleanup: Register the subprocess group for cleanup on shutdown, - if the underlying service supports cleanup. - """ - exec_env = os.environ.copy() - exec_env["ANSIBLE_LOG_PATH"] = "/dev/null" - - if timeout: - # Invoke ansible-playbook via the 'timeout' command. - # Using --kill-after=5s which will force a kill -9 if the process - # hasn't terminated within 5s: - timeout_log_str = " (timeout: %ss)" % timeout - playbook_command = ["/usr/bin/timeout", "--kill-after=5s", - "%ss" % timeout] + playbook_command - else: - timeout_log_str = '' - - with open(log_file, "a+") as f_out_log: - try: - logged_playbook_command = \ - _strip_password_from_command(playbook_command) - txt = "%s Executing playbook command%s: %s\n" \ - % (datetime.today().strftime('%Y-%m-%d-%H:%M:%S'), - timeout_log_str, - logged_playbook_command) - f_out_log.write(txt) - f_out_log.flush() - - if register_cleanup: - # Use the same process group / session for all children - # This makes it easier to kill the entire process group - # on cleanup - preexec_fn = os.setsid - else: - preexec_fn = None - - # TODO(kmacleod) future considerations: - # - In python3, this code can be simplified to use the new - # subprocess.run(timeout=val) method or Popen with - # subp.wait(timeout=val) - # - Beginning with ansible 2.10, we can introduce the - # ANSIBLE_TASK_TIMEOUT value to set a task-level timeout. - # This is not available in our current version of ansible (2.7.5) - subp = subprocess.Popen(playbook_command, - stdout=f_out_log, - stderr=f_out_log, - env=exec_env, - preexec_fn=preexec_fn) - try: - if register_cleanup: - SubprocessCleanup.register_subprocess_group(subp) - - subp.communicate() # wait for process to exit - - if timeout and subp.returncode == TIMEOUT_EXITCODE: - f_out_log.write( - "%s TIMEOUT (%ss) - playbook is terminated\n" % - (datetime.today().strftime('%Y-%m-%d-%H:%M:%S'), timeout) - ) - raise PlaybookExecutionTimeout(playbook_cmd=playbook_command, - timeout=timeout) - if subp.returncode != 0: - raise PlaybookExecutionFailed(playbook_cmd=playbook_command) - finally: - f_out_log.flush() - if register_cleanup: - SubprocessCleanup.unregister_subprocess_group(subp) - - except PlaybookExecutionFailed: - raise - except Exception as ex: - LOG.error(str(ex)) - raise - - def is_token_expiring_soon(token, stale_token_duration_min=STALE_TOKEN_DURATION_MIN, stale_token_duration_max=STALE_TOKEN_DURATION_MAX, diff --git a/distributedcloud/dcmanager/api/controllers/v1/phased_subcloud_deploy.py b/distributedcloud/dcmanager/api/controllers/v1/phased_subcloud_deploy.py index 1b4013e71..bc72167cb 100644 --- a/distributedcloud/dcmanager/api/controllers/v1/phased_subcloud_deploy.py +++ b/distributedcloud/dcmanager/api/controllers/v1/phased_subcloud_deploy.py @@ -85,11 +85,7 @@ VALID_STATES_FOR_DEPLOY_CONFIG = ( consts.DEPLOY_STATE_PRE_CONFIG_FAILED, consts.DEPLOY_STATE_CONFIG_FAILED, consts.DEPLOY_STATE_BOOTSTRAPPED, - consts.DEPLOY_STATE_CONFIG_ABORTED, - # The next two states are needed due to upgrade scenario: - # TODO(gherzman): remove states when they are no longer needed - consts.DEPLOY_STATE_DEPLOY_FAILED, - consts.DEPLOY_STATE_DEPLOY_PREP_FAILED, + consts.DEPLOY_STATE_CONFIG_ABORTED ) VALID_STATES_FOR_DEPLOY_ABORT = ( @@ -205,6 +201,11 @@ class PhasedSubcloudDeployController(object): pecan.abort(400, _('Subcloud deploy status must be either: %s') % allowed_states_str) + initial_deployment = psd_common.is_initial_deployment(subcloud.name) + if not initial_deployment: + pecan.abort(400, _('The deploy install command can only be used ' + 'during initial deployment.')) + payload['software_version'] = payload.get('release', subcloud.software_version) psd_common.populate_payload_with_pre_existing_data( payload, subcloud, SUBCLOUD_INSTALL_GET_FILE_CONTENTS) @@ -216,7 +217,7 @@ class PhasedSubcloudDeployController(object): # version. Update the deploy status as pre-install. self.dcmanager_rpc_client.subcloud_deploy_install( - context, subcloud.id, payload) + context, subcloud.id, payload, initial_deployment=True) subcloud_dict = db_api.subcloud_db_model_to_dict(subcloud) subcloud_dict['deploy-status'] = consts.DEPLOY_STATE_PRE_INSTALL subcloud_dict['software-version'] = payload['software_version'] @@ -237,32 +238,49 @@ class PhasedSubcloudDeployController(object): % valid_states_str) has_bootstrap_values = consts.BOOTSTRAP_VALUES in request.POST - payload = {} + + payload = psd_common.get_request_data( + request, subcloud, SUBCLOUD_BOOTSTRAP_GET_FILE_CONTENTS) # Try to load the existing override values override_file = psd_common.get_config_file_path(subcloud.name) if os.path.exists(override_file): - psd_common.populate_payload_with_pre_existing_data( - payload, subcloud, SUBCLOUD_BOOTSTRAP_GET_FILE_CONTENTS) + if not has_bootstrap_values: + psd_common.populate_payload_with_pre_existing_data( + payload, subcloud, SUBCLOUD_BOOTSTRAP_GET_FILE_CONTENTS) + else: + psd_common.update_payload_from_overrides_file( + payload, subcloud.name, [consts.BOOTSTRAP_ADDRESS]) + payload['software_version'] = subcloud.software_version elif not has_bootstrap_values: msg = _("Required bootstrap-values file was not provided and it was" " not previously available at %s") % (override_file) pecan.abort(400, msg) - request_data = psd_common.get_request_data( - request, subcloud, SUBCLOUD_BOOTSTRAP_GET_FILE_CONTENTS) - - # Update the existing values with new ones from the request - payload.update(request_data) - psd_common.pre_deploy_bootstrap(context, payload, subcloud, has_bootstrap_values) try: # Ask dcmanager-manager to bootstrap the subcloud. self.dcmanager_rpc_client.subcloud_deploy_bootstrap( - context, subcloud.id, payload) - return db_api.subcloud_db_model_to_dict(subcloud) + context, subcloud.id, payload, initial_deployment=True) + + # Change the response to correctly display the values + # that will be updated on the manager. + subcloud_dict = db_api.subcloud_db_model_to_dict(subcloud) + subcloud_dict['deploy-status'] = consts.DEPLOY_STATE_PRE_BOOTSTRAP + subcloud_dict['description'] = payload.get("description", + subcloud.description) + subcloud_dict['location'] = payload.get("location", subcloud.location) + subcloud_dict['management-subnet'] = utils.get_management_subnet(payload) + subcloud_dict['management-gateway-ip'] = \ + utils.get_management_gateway_address(payload) + subcloud_dict['management-start-ip'] = \ + utils.get_management_start_address(payload) + subcloud_dict['management-end-ip'] = utils.get_management_end_address(payload) + subcloud_dict['systemcontroller-gateway-ip'] = payload.get( + "systemcontroller_gateway_address", subcloud.systemcontroller_gateway_ip) + return subcloud_dict except RemoteError as e: pecan.abort(httpclient.UNPROCESSABLE_ENTITY, e.value) @@ -290,9 +308,12 @@ class PhasedSubcloudDeployController(object): psd_common.validate_sysadmin_password(payload) + psd_common.update_payload_from_overrides_file(payload, subcloud.name, + [consts.BOOTSTRAP_ADDRESS]) + try: self.dcmanager_rpc_client.subcloud_deploy_config( - context, subcloud.id, payload) + context, subcloud.id, payload, initial_deployment=True) subcloud_dict = db_api.subcloud_db_model_to_dict(subcloud) subcloud_dict['deploy-status'] = consts.DEPLOY_STATE_PRE_CONFIG return subcloud_dict @@ -334,6 +355,11 @@ class PhasedSubcloudDeployController(object): 'of the following states: %s') % allowed_states_str) + initial_deployment = psd_common.is_initial_deployment(subcloud.name) + if not initial_deployment: + pecan.abort(400, _('The subcloud can only be aborted during ' + 'initial deployment.')) + try: self.dcmanager_rpc_client.subcloud_deploy_abort( context, subcloud.id, subcloud.deploy_status) @@ -345,7 +371,7 @@ class PhasedSubcloudDeployController(object): pecan.abort(422, e.value) except Exception: LOG.exception("Unable to abort subcloud %s deployment" % subcloud.name) - pecan.abort(500, _('Unable to abort subcloud deploy')) + pecan.abort(500, _('Unable to abort subcloud deployment')) def _deploy_resume(self, context: RequestContext, request: pecan.Request, subcloud): @@ -355,6 +381,11 @@ class PhasedSubcloudDeployController(object): pecan.abort(400, _('Subcloud deploy status must be either: %s') % allowed_states_str) + initial_deployment = psd_common.is_initial_deployment(subcloud.name) + if not initial_deployment: + pecan.abort(400, _('The subcloud can only be resumed during ' + 'initial deployment.')) + # Since both install and config are optional phases, # it's necessary to check if they should be executed config_file = psd_common.get_config_file_path(subcloud.name, @@ -371,18 +402,22 @@ class PhasedSubcloudDeployController(object): if deploy_states_to_run == [CONFIG] and not has_config_values: msg = _("Only deploy phase left is deploy config. " "Required %s file was not provided and it was not " - "previously available.") % consts.DEPLOY_CONFIG + "previously available. If manually configuring the " + "subcloud, please run 'dcmanager subcloud deploy " + "complete'") % consts.DEPLOY_CONFIG pecan.abort(400, msg) # Since the subcloud can be installed manually and the config is optional, # skip those phases if the user doesn't provide the install or config values # and they are not available from previous executions. + # Add the deploy complete phase if deploy config is not going to be executed. files_for_resume = [] for state in deploy_states_to_run: if state == INSTALL and not has_install_values: deploy_states_to_run.remove(state) elif state == CONFIG and not has_config_values: deploy_states_to_run.remove(state) + deploy_states_to_run.append(COMPLETE) else: files_for_resume.extend(FILES_MAPPING[state]) @@ -393,6 +428,8 @@ class PhasedSubcloudDeployController(object): if INSTALL in deploy_states_to_run: payload['software_version'] = payload.get('release', subcloud.software_version) else: + LOG.debug('Disregarding release parameter for %s as installation is complete.' + % subcloud.name) payload['software_version'] = subcloud.software_version # Need to remove bootstrap_values from the list of files to populate @@ -402,6 +439,11 @@ class PhasedSubcloudDeployController(object): not in FILES_MAPPING[BOOTSTRAP]] psd_common.populate_payload_with_pre_existing_data( payload, subcloud, files_for_resume) + # Update payload with bootstrap-address from overrides file + # if not present already + if consts.BOOTSTRAP_ADDRESS not in payload: + psd_common.update_payload_from_overrides_file(payload, subcloud.name, + [consts.BOOTSTRAP_ADDRESS]) psd_common.validate_sysadmin_password(payload) for state in deploy_states_to_run: @@ -419,11 +461,25 @@ class PhasedSubcloudDeployController(object): try: self.dcmanager_rpc_client.subcloud_deploy_resume( context, subcloud.id, subcloud.name, payload, deploy_states_to_run) + + # Change the response to correctly display the values + # that will be updated on the manager. subcloud_dict = db_api.subcloud_db_model_to_dict(subcloud) next_deploy_phase = RESUMABLE_STATES[subcloud.deploy_status][0] next_deploy_state = RESUME_PREP_UPDATE_STATUS[next_deploy_phase] subcloud_dict['deploy-status'] = next_deploy_state subcloud_dict['software-version'] = payload['software_version'] + subcloud_dict['description'] = payload.get("description", + subcloud.description) + subcloud_dict['location'] = payload.get("location", subcloud.location) + subcloud_dict['management-subnet'] = utils.get_management_subnet(payload) + subcloud_dict['management-gateway-ip'] = \ + utils.get_management_gateway_address(payload) + subcloud_dict['management-start-ip'] = \ + utils.get_management_start_address(payload) + subcloud_dict['management-end-ip'] = utils.get_management_end_address(payload) + subcloud_dict['systemcontroller-gateway-ip'] = payload.get( + "systemcontroller_gateway_address", subcloud.systemcontroller_gateway_ip) return subcloud_dict except RemoteError as e: pecan.abort(422, e.value) diff --git a/distributedcloud/dcmanager/common/phased_subcloud_deploy.py b/distributedcloud/dcmanager/common/phased_subcloud_deploy.py index 9bd40a345..61d725983 100644 --- a/distributedcloud/dcmanager/common/phased_subcloud_deploy.py +++ b/distributedcloud/dcmanager/common/phased_subcloud_deploy.py @@ -879,6 +879,31 @@ def add_subcloud_to_database(context, payload): return subcloud +def is_initial_deployment(subcloud_name: str) -> bool: + """Get initial deployment flag from inventory file""" + + postfix = consts.INVENTORY_FILE_POSTFIX + filename = utils.get_ansible_filename(subcloud_name, postfix) + + # Assume initial deployment if inventory file is missing + if not os.path.exists(filename): + return True + + content = utils.load_yaml_file(filename) + initial_deployment = content['all']['vars'].get('initial_deployment') + return initial_deployment + + +def update_payload_from_overrides_file(payload, subcloud_name, values): + """Update payload with values from existing overrides file""" + + overrides_filename = get_config_file_path(subcloud_name) + content = utils.load_yaml_file(overrides_filename) + for value in values: + if not payload.get(value): + payload[value] = content.get(value) + + def get_request_data(request: pecan.Request, subcloud: models.Subcloud, subcloud_file_contents: typing.Sequence): diff --git a/distributedcloud/dcmanager/common/prestage.py b/distributedcloud/dcmanager/common/prestage.py index 4e5ed3da1..214536d38 100644 --- a/distributedcloud/dcmanager/common/prestage.py +++ b/distributedcloud/dcmanager/common/prestage.py @@ -34,7 +34,7 @@ from dccommon.drivers.openstack.sdk_platform import OpenStackDriver from dccommon.drivers.openstack.sysinv_v1 import SysinvClient from dccommon.exceptions import PlaybookExecutionFailed from dccommon.exceptions import PlaybookExecutionTimeout -from dccommon.utils import run_playbook +from dccommon.utils import AnsiblePlaybook from dcmanager.common import consts from dcmanager.common import exceptions @@ -340,8 +340,9 @@ def _run_ansible(context, prestage_command, phase, ansible_pass=utils.decode_and_normalize_passwd(sysadmin_password)) try: - run_playbook(log_file, prestage_command, - timeout=timeout_seconds, register_cleanup=True) + ansible = AnsiblePlaybook(subcloud.name) + ansible.run_playbook(log_file, prestage_command, timeout=timeout_seconds, + register_cleanup=True) except PlaybookExecutionFailed as ex: timeout_msg = '' if isinstance(ex, PlaybookExecutionTimeout): diff --git a/distributedcloud/dcmanager/common/utils.py b/distributedcloud/dcmanager/common/utils.py index 0ed75f687..d06872d89 100644 --- a/distributedcloud/dcmanager/common/utils.py +++ b/distributedcloud/dcmanager/common/utils.py @@ -338,7 +338,8 @@ def get_filename_by_prefix(dir_path, prefix): def create_subcloud_inventory(subcloud, - inventory_file): + inventory_file, + initial_deployment=False): """Create the ansible inventory file for the specified subcloud""" # Delete the file if it already exists @@ -351,6 +352,7 @@ def create_subcloud_inventory(subcloud, ' vars:\n' ' ansible_ssh_user: sysadmin\n' ' ansible_ssh_extra_args: "-o UserKnownHostsFile=/dev/null"\n' + ' initial_deployment: ' + str(initial_deployment) + '\n' ' hosts:\n' ' ' + subcloud['name'] + ':\n' ' ansible_host: ' + @@ -361,7 +363,8 @@ def create_subcloud_inventory(subcloud, def create_subcloud_inventory_with_admin_creds(subcloud_name, inventory_file, subcloud_bootstrap_address, - ansible_pass): + ansible_pass, + initial_deployment=False): """Create the ansible inventory file for the specified subcloud. Includes ansible_become_pass attribute. @@ -379,6 +382,7 @@ def create_subcloud_inventory_with_admin_creds(subcloud_name, ' ansible_ssh_pass: {0}\n' ' ansible_become_pass: {0}\n' ' ansible_ssh_extra_args: "-o UserKnownHostsFile=/dev/null"\n' + ' initial_deployment: ' + str(initial_deployment) + '\n' ' hosts:\n' ' {1}:\n' ' ansible_host: {2}\n').format(ansible_pass, @@ -1116,19 +1120,31 @@ def get_value_from_yaml_file(filename, key): return value -def update_values_on_yaml_file(filename, values, yaml_dump=True): +def update_values_on_yaml_file(filename, values, values_to_keep=None, + yaml_dump=True): """Update all specified key values from the given yaml file. + If values_to_keep is provided, all values other than specified + will be deleted from the loaded file prior to update. + :param filename: the yaml filename :param values: dict with yaml keys and values to replace + :param values_to_keep: list of values to keep on original file :param yaml_dump: write file using yaml dump (default is True) """ + if values_to_keep is None: + values_to_keep = [] update_file = False if not os.path.isfile(filename): return with open(os.path.abspath(filename), 'r') as f: data = f.read() data = yaml.load(data, Loader=yaml.SafeLoader) + if values_to_keep: + for key in data.copy(): + if key not in values_to_keep: + data.pop(key) + update_file = True for key, value in values.items(): if key not in data or value != data.get(key): data.update({key: value}) diff --git a/distributedcloud/dcmanager/manager/service.py b/distributedcloud/dcmanager/manager/service.py index da2ae3317..ed60255ad 100644 --- a/distributedcloud/dcmanager/manager/service.py +++ b/distributedcloud/dcmanager/manager/service.py @@ -220,29 +220,29 @@ class DCManagerService(service.Service): payload) @request_context - def subcloud_deploy_bootstrap(self, context, subcloud_id, payload): + def subcloud_deploy_bootstrap(self, context, subcloud_id, payload, + initial_deployment): # Bootstraps a subcloud LOG.info("Handling subcloud_deploy_bootstrap request for: %s" % payload.get('name')) - return self.subcloud_manager.subcloud_deploy_bootstrap(context, - subcloud_id, - payload) + return self.subcloud_manager.subcloud_deploy_bootstrap( + context, subcloud_id, payload, initial_deployment) @request_context - def subcloud_deploy_config(self, context, subcloud_id, payload): + def subcloud_deploy_config(self, context, subcloud_id, payload, + initial_deployment): # Configures a subcloud LOG.info("Handling subcloud_deploy_config request for: %s" % subcloud_id) - return self.subcloud_manager.subcloud_deploy_config(context, - subcloud_id, - payload) + return self.subcloud_manager.subcloud_deploy_config( + context, subcloud_id, payload, initial_deployment) @request_context - def subcloud_deploy_install(self, context, subcloud_id, payload): + def subcloud_deploy_install(self, context, subcloud_id, payload, + initial_deployment): # Install a subcloud LOG.info("Handling subcloud_deploy_install request for: %s" % subcloud_id) - return self.subcloud_manager.subcloud_deploy_install(context, - subcloud_id, - payload) + return self.subcloud_manager.subcloud_deploy_install( + context, subcloud_id, payload, initial_deployment) @request_context def subcloud_deploy_complete(self, context, subcloud_id): diff --git a/distributedcloud/dcmanager/manager/subcloud_manager.py b/distributedcloud/dcmanager/manager/subcloud_manager.py index 70683aecf..12b508220 100644 --- a/distributedcloud/dcmanager/manager/subcloud_manager.py +++ b/distributedcloud/dcmanager/manager/subcloud_manager.py @@ -46,8 +46,7 @@ from dccommon.exceptions import PlaybookExecutionFailed from dccommon import kubeoperator from dccommon.subcloud_install import SubcloudInstall from dccommon.subcloud_install import SubcloudShutdown -from dccommon.utils import run_playbook -from dccommon.utils import RunAnsible +from dccommon.utils import AnsiblePlaybook from dcmanager.audit import rpcapi as dcmanager_audit_rpc_client from dcmanager.common import consts from dcmanager.common.consts import INVENTORY_FILE_POSTFIX @@ -129,7 +128,6 @@ TRANSITORY_STATES = { consts.DEPLOY_STATE_DEPLOYING: consts.DEPLOY_STATE_CONFIG_FAILED, } - TRANSITORY_BACKUP_STATES = { consts.BACKUP_STATE_VALIDATING: consts.BACKUP_STATE_VALIDATE_FAILED, consts.BACKUP_STATE_PRE_BACKUP: consts.BACKUP_STATE_PREP_FAILED, @@ -150,6 +148,37 @@ ENDPOINT_URLS = { dccommon_consts.ENDPOINT_TYPE_SOFTWARE: "https://{}:5498", } +# Values present on the overrides file generated during +# subcloud_deploy_create. They should not be deleted from +# the overrides if it's needed to recreate the file. +GENERATED_OVERRIDES_VALUES = [ + 'region_config', + 'distributed_cloud_role', + 'system_controller_subnet', + 'system_controller_floating_address', + 'system_controller_oam_subnet', + 'system_controller_oam_floating_address', + 'system_controller_keystone_admin_user_id', + 'system_controller_keystone_admin_project_id', + 'system_controller_keystone_services_project_id', + 'system_controller_keystone_sysinv_user_id', + 'system_controller_keystone_dcmanager_user_id', + 'users', + 'dc_root_ca_cert', + 'sc_ca_cert', + 'sc_ca_key' +] + +VALUES_TO_DELETE_OVERRIDES = [ + 'deploy_playbook', + 'deploy_values', + 'deploy_config', + 'deploy_chart', + 'deploy_overrides', + 'install_values', + 'sysadmin_password' +] + class SubcloudManager(manager.Manager): """Manages tasks related to subclouds.""" @@ -378,7 +407,7 @@ class SubcloudManager(manager.Manager): subcloud = self.subcloud_migrate_generate_ansible_config( context, subcloud.id, saved_payload) - self.rehome_subcloud(context, subcloud, saved_payload) + self.rehome_subcloud(context, subcloud) except Exception: # If we failed to migrate the subcloud, update the # deployment status @@ -388,19 +417,45 @@ class SubcloudManager(manager.Manager): context, subcloud.id, deploy_status=consts.DEPLOY_STATE_REHOME_FAILED) - def rehome_subcloud(self, context, subcloud, payload): + def rehome_subcloud(self, context, subcloud): # Ansible inventory filename for the specified subcloud ansible_subcloud_inventory_file = self._get_ansible_filename( subcloud.name, INVENTORY_FILE_POSTFIX) + log_file = ( + os.path.join(consts.DC_ANSIBLE_LOG_DIR, subcloud.name) + + "_playbook_output.log" + ) + rehome_command = self.compose_rehome_command( subcloud.name, subcloud.region_name, ansible_subcloud_inventory_file, subcloud.software_version) - self.run_deploy_thread(subcloud, payload, context, - rehome_command=rehome_command) + # Update the deploy status to rehoming + db_api.subcloud_update( + context, subcloud.id, + deploy_status=consts.DEPLOY_STATE_REHOMING) + + # Run the rehome-subcloud playbook + try: + ansible = AnsiblePlaybook(subcloud.name) + ansible.run_playbook(log_file, rehome_command) + except PlaybookExecutionFailed: + msg = "Failed to run the subcloud rehome playbook" \ + f" for subcloud {subcloud.name}, check individual log at " \ + f"{log_file} for detailed output." + LOG.error(msg) + db_api.subcloud_update( + context, subcloud.id, + deploy_status=consts.DEPLOY_STATE_REHOME_FAILED) + return + # Update the deploy status to complete + db_api.subcloud_update( + context, subcloud.id, + deploy_status=consts.DEPLOY_STATE_DONE) + LOG.info("Successfully rehomed subcloud %s" % subcloud.name) def add_subcloud(self, context, subcloud_id, payload): """Add subcloud and notify orchestrators. @@ -413,10 +468,12 @@ class SubcloudManager(manager.Manager): rehoming = payload.get('migrate', '').lower() == "true" secondary = (payload.get('secondary', '').lower() == "true") + initial_deployment = True if not rehoming else False # Create the subcloud subcloud = self.subcloud_deploy_create(context, subcloud_id, payload, rehoming, + initial_deployment, return_as_dict=False) # return if 'secondary' subcloud @@ -433,7 +490,7 @@ class SubcloudManager(manager.Manager): # Rehome subcloud if rehoming: - self.rehome_subcloud(context, subcloud, payload) + self.rehome_subcloud(context, subcloud) return # Define which deploy phases should be run @@ -443,16 +500,16 @@ class SubcloudManager(manager.Manager): phases_to_run.append(consts.DEPLOY_PHASE_BOOTSTRAP) if consts.DEPLOY_CONFIG in payload: phases_to_run.append(consts.DEPLOY_PHASE_CONFIG) + else: + phases_to_run.append(consts.DEPLOY_PHASE_COMPLETE) # Finish adding the subcloud by running the deploy phases succeeded = self.run_deploy_phases( - context, subcloud_id, payload, phases_to_run) + context, subcloud_id, payload, phases_to_run, + initial_deployment=initial_deployment) if succeeded: - subcloud = db_api.subcloud_update( - context, subcloud_id, deploy_status=consts.DEPLOY_STATE_DONE) - - LOG.info(f"Finished adding subcloud {subcloud['name']}.") + LOG.info(f"Finished adding subcloud {subcloud['name']}.") def redeploy_subcloud(self, context, subcloud_id, payload): """Redeploy subcloud @@ -472,15 +529,13 @@ class SubcloudManager(manager.Manager): consts.DEPLOY_PHASE_BOOTSTRAP] if consts.DEPLOY_CONFIG in payload: phases_to_run.append(consts.DEPLOY_PHASE_CONFIG) + else: + phases_to_run.append(consts.DEPLOY_PHASE_COMPLETE) succeeded = self.run_deploy_phases(context, subcloud_id, payload, - phases_to_run) + phases_to_run, initial_deployment=True) if succeeded: - db_api.subcloud_update( - context, subcloud.id, - deploy_status=consts.DEPLOY_STATE_DONE, - error_description=consts.ERROR_DESC_EMPTY) LOG.info(f"Finished redeploying subcloud {subcloud['name']}.") def create_subcloud_backups(self, context, payload): @@ -603,13 +658,15 @@ class SubcloudManager(manager.Manager): failed_subclouds, invalid_subclouds) def _deploy_bootstrap_prep(self, context, subcloud, payload: dict, - ansible_subcloud_inventory_file): + ansible_subcloud_inventory_file, + initial_deployment=False): """Run the preparation steps needed to run the bootstrap operation :param context: target request context object :param subcloud: subcloud model object :param payload: bootstrap request parameters :param ansible_subcloud_inventory_file: the ansible inventory file path + :param initial_deployment: initial_deployment flag from subcloud inventory :return: ansible command needed to run the bootstrap playbook """ network_reconfig = utils.has_network_reconfig(payload, subcloud) @@ -639,19 +696,23 @@ class SubcloudManager(manager.Manager): payload['ansible_become_pass'] = payload['sysadmin_password'] payload['ansible_ssh_pass'] = payload['sysadmin_password'] payload['admin_password'] = str(keyring.get_password('CGCS', 'admin')) - payload_without_sysadmin_password = payload.copy() - if 'sysadmin_password' in payload_without_sysadmin_password: - del payload_without_sysadmin_password['sysadmin_password'] + + payload_for_overrides_file = payload.copy() + for key in VALUES_TO_DELETE_OVERRIDES: + if key in payload_for_overrides_file: + del payload_for_overrides_file[key] # Update the ansible overrides file overrides_file = os.path.join(dccommon_consts.ANSIBLE_OVERRIDES_PATH, subcloud.name + '.yml') utils.update_values_on_yaml_file(overrides_file, - payload_without_sysadmin_password) + payload_for_overrides_file, + values_to_keep=GENERATED_OVERRIDES_VALUES) # Update the ansible inventory for the subcloud utils.create_subcloud_inventory(payload, - ansible_subcloud_inventory_file) + ansible_subcloud_inventory_file, + initial_deployment) bootstrap_command = self.compose_bootstrap_command( subcloud.name, @@ -661,15 +722,26 @@ class SubcloudManager(manager.Manager): return bootstrap_command def _deploy_config_prep(self, subcloud, payload: dict, - ansible_subcloud_inventory_file): + ansible_subcloud_inventory_file, + initial_deployment=False): """Run the preparation steps needed to run the config operation :param subcloud: target subcloud model object :param payload: config request parameters :param ansible_subcloud_inventory_file: the ansible inventory file path + :param initial_deployment: initial_deployment flag from subcloud inventory :return: ansible command needed to run the config playbook """ self._prepare_for_deployment(payload, subcloud.name) + + # Update the ansible inventory for the subcloud + bootstrap_address = payload[consts.BOOTSTRAP_ADDRESS] + subcloud_params = {'name': subcloud.name, + consts.BOOTSTRAP_ADDRESS: bootstrap_address} + utils.create_subcloud_inventory(subcloud_params, + ansible_subcloud_inventory_file, + initial_deployment) + config_command = self.compose_config_command( subcloud.name, ansible_subcloud_inventory_file, @@ -677,12 +749,14 @@ class SubcloudManager(manager.Manager): return config_command def _deploy_install_prep(self, subcloud, payload: dict, - ansible_subcloud_inventory_file): + ansible_subcloud_inventory_file, + initial_deployment=False): """Run the preparation steps needed to run the install operation :param subcloud: target subcloud model object :param payload: install request parameters :param ansible_subcloud_inventory_file: the ansible inventory file path + :param initial_deployment: initial_deployment flag from subcloud inventory :return: ansible command needed to run the install playbook """ payload['install_values']['ansible_ssh_pass'] = \ @@ -703,6 +777,14 @@ class SubcloudManager(manager.Manager): utils.update_values_on_yaml_file(bootstrap_file, update_values) + # Update the ansible inventory for the subcloud + bootstrap_address = payload['install_values']['bootstrap_address'] + subcloud_params = {'name': subcloud.name, + consts.BOOTSTRAP_ADDRESS: bootstrap_address} + utils.create_subcloud_inventory(subcloud_params, + ansible_subcloud_inventory_file, + initial_deployment) + install_command = self.compose_install_command( subcloud.name, ansible_subcloud_inventory_file, @@ -722,8 +804,8 @@ class SubcloudManager(manager.Manager): subcloud = utils.update_abort_status(context, subcloud_id, deploy_status) try: - run_ansible = RunAnsible() - aborted = run_ansible.run_abort(subcloud.name) + ansible = AnsiblePlaybook(subcloud.name) + aborted = ansible.run_abort() if not aborted: LOG.warning("Ansible deploy phase subprocess of %s " "was terminated before it could be aborted" @@ -771,7 +853,8 @@ class SubcloudManager(manager.Manager): % (subcloud_name, ', '.join(deploy_states_to_run))) self.run_deploy_phases(context, subcloud_id, payload, - deploy_states_to_run) + deploy_states_to_run, + initial_deployment=True) def subcloud_migrate_generate_ansible_config(self, context, subcloud_id, payload): """Generate latest ansible config based on given payload for day-2 rehoming purpose. @@ -838,13 +921,15 @@ class SubcloudManager(manager.Manager): return subcloud def subcloud_deploy_create(self, context, subcloud_id, payload, - rehoming=False, return_as_dict=True): + rehoming=False, initial_deployment=True, + return_as_dict=True): """Create subcloud and notify orchestrators. :param context: request context object :param subcloud_id: subcloud_id from db :param payload: subcloud configuration :param rehoming: flag indicating if this is part of a rehoming operation + :param initial_deployment: initial_deployment flag from subcloud inventory :param return_as_dict: converts the subcloud DB object to a dict before returning :return: resulting subcloud DB object or dictionary """ @@ -972,7 +1057,8 @@ class SubcloudManager(manager.Manager): # Create the ansible inventory for the new subcloud utils.create_subcloud_inventory(payload, - ansible_subcloud_inventory_file) + ansible_subcloud_inventory_file, + initial_deployment) # create subcloud intermediate certificate and pass in keys self._create_intermediate_ca_cert(payload) @@ -1023,12 +1109,14 @@ class SubcloudManager(manager.Manager): return subcloud - def subcloud_deploy_install(self, context, subcloud_id, payload: dict) -> bool: + def subcloud_deploy_install(self, context, subcloud_id, payload: dict, + initial_deployment=False) -> bool: """Install subcloud :param context: request context object :param subcloud_id: subcloud id from db :param payload: subcloud Install + :param initial_deployment: initial_deployment flag from subcloud inventory :return: success status """ @@ -1051,11 +1139,11 @@ class SubcloudManager(manager.Manager): subcloud.name, INVENTORY_FILE_POSTFIX) install_command = self._deploy_install_prep( - subcloud, payload, ansible_subcloud_inventory_file) + subcloud, payload, ansible_subcloud_inventory_file, + initial_deployment) install_success = self._run_subcloud_install( context, subcloud, install_command, - log_file, payload['install_values'], - abortable=True) + log_file, payload['install_values']) if install_success: db_api.subcloud_update( context, subcloud.id, @@ -1072,12 +1160,14 @@ class SubcloudManager(manager.Manager): deploy_status=consts.DEPLOY_STATE_PRE_INSTALL_FAILED) return False - def subcloud_deploy_bootstrap(self, context, subcloud_id, payload): + def subcloud_deploy_bootstrap(self, context, subcloud_id, payload, + initial_deployment=False): """Bootstrap subcloud :param context: request context object :param subcloud_id: subcloud_id from db :param payload: subcloud bootstrap configuration + :param initial_deployment: initial_deployment flag from subcloud inventory :return: success status """ LOG.info("Bootstrapping subcloud %s." % payload['name']) @@ -1095,7 +1185,8 @@ class SubcloudManager(manager.Manager): bootstrap_command = self._deploy_bootstrap_prep( context, subcloud, payload, - ansible_subcloud_inventory_file) + ansible_subcloud_inventory_file, + initial_deployment) bootstrap_success = self._run_subcloud_bootstrap( context, subcloud, bootstrap_command, log_file) return bootstrap_success @@ -1107,12 +1198,14 @@ class SubcloudManager(manager.Manager): deploy_status=consts.DEPLOY_STATE_PRE_BOOTSTRAP_FAILED) return False - def subcloud_deploy_config(self, context, subcloud_id, payload: dict) -> bool: + def subcloud_deploy_config(self, context, subcloud_id, payload: dict, + initial_deployment=False) -> bool: """Configure subcloud :param context: request context object :param subcloud_id: subcloud_id from db :param payload: subcloud configuration + :param initial_deployment: initial_deployment flag from subcloud inventory :return: success status """ LOG.info("Configuring subcloud %s." % subcloud_id) @@ -1129,11 +1222,9 @@ class SubcloudManager(manager.Manager): ansible_subcloud_inventory_file = self._get_ansible_filename( subcloud.name, INVENTORY_FILE_POSTFIX) - self._prepare_for_deployment(payload, subcloud.name) - config_command = self.compose_config_command( - subcloud.name, - ansible_subcloud_inventory_file, - payload) + config_command = self._deploy_config_prep( + subcloud, payload, ansible_subcloud_inventory_file, + initial_deployment) config_success = self._run_subcloud_config(subcloud, context, config_command, log_file) @@ -1433,7 +1524,8 @@ class SubcloudManager(manager.Manager): % (operation, ' ,'.join(failed_subcloud_names))) return notice - def _create_subcloud_inventory_file(self, subcloud, data_install=None): + def _create_subcloud_inventory_file(self, subcloud, data_install=None, + initial_deployment=False): # Ansible inventory filename for the specified subcloud ansible_subcloud_inventory_file = self._get_ansible_filename( subcloud.name, INVENTORY_FILE_POSTFIX) @@ -1455,7 +1547,8 @@ class SubcloudManager(manager.Manager): 'bootstrap-address': oam_fip} utils.create_subcloud_inventory(subcloud_params, - ansible_subcloud_inventory_file) + ansible_subcloud_inventory_file, + initial_deployment) return ansible_subcloud_inventory_file def _create_overrides_for_backup_or_restore(self, op, payload, subcloud_name): @@ -1547,7 +1640,8 @@ class SubcloudManager(manager.Manager): # Run the subcloud backup playbook try: - run_playbook(log_file, backup_command) + ansible = AnsiblePlaybook(subcloud.name) + ansible.run_playbook(log_file, backup_command) # Decide between complete-local or complete-central if local_only: @@ -1573,7 +1667,8 @@ class SubcloudManager(manager.Manager): try: # Run the subcloud backup delete playbook - run_playbook(log_file, delete_command) + ansible = AnsiblePlaybook(subcloud.name) + ansible.run_playbook(log_file, delete_command) # Set backup status to unknown after delete, since most recent backup may # have been deleted @@ -1608,7 +1703,8 @@ class SubcloudManager(manager.Manager): ) # Run the subcloud backup restore playbook try: - run_playbook(log_file, restore_command) + ansible = AnsiblePlaybook(subcloud.name) + ansible.run_playbook(log_file, restore_command) LOG.info("Successfully restore subcloud %s" % subcloud.name) db_api.subcloud_update( context, subcloud.id, @@ -1682,145 +1778,29 @@ class SubcloudManager(manager.Manager): except Exception as e: LOG.exception(e) - def run_deploy_thread(self, subcloud, payload, context, - install_command=None, bootstrap_command=None, - config_command=None, rehome_command=None, - network_reconfig=None): - try: - self._run_deploy(subcloud, payload, context, - install_command, bootstrap_command, - config_command, rehome_command, - network_reconfig) - except Exception as ex: - LOG.exception("run_deploy failed") - raise ex - - def _run_deploy(self, subcloud, payload, context, - install_command, bootstrap_command, - config_command, rehome_command, - network_reconfig): - log_file = ( - os.path.join(consts.DC_ANSIBLE_LOG_DIR, subcloud.name) - + "_playbook_output.log" - ) - if install_command: - install_success = self._run_subcloud_install( - context, subcloud, install_command, - log_file, payload['install_values'] - ) - if not install_success: - return - if bootstrap_command: - try: - # Update the subcloud to bootstrapping - db_api.subcloud_update( - context, subcloud.id, - deploy_status=consts.DEPLOY_STATE_BOOTSTRAPPING, - error_description=consts.ERROR_DESC_EMPTY) - except Exception: - LOG.error("DB subcloud_update failed") - # exception is logged above - raise - - # Run the ansible boostrap-subcloud playbook - LOG.info("Starting bootstrap of %s" % subcloud.name) - try: - run_playbook(log_file, bootstrap_command) - except PlaybookExecutionFailed: - msg = utils.find_ansible_error_msg( - subcloud.name, log_file, consts.DEPLOY_STATE_BOOTSTRAPPING) - LOG.error(msg) - db_api.subcloud_update( - context, subcloud.id, - deploy_status=consts.DEPLOY_STATE_BOOTSTRAP_FAILED, - error_description=msg[0:consts.ERROR_DESCRIPTION_LENGTH]) - return - LOG.info("Successfully bootstrapped %s" % subcloud.name) - if config_command: - # Run the custom deploy playbook - LOG.info("Starting deploy of %s" % subcloud.name) - db_api.subcloud_update( - context, subcloud.id, - deploy_status=consts.DEPLOY_STATE_DEPLOYING, - error_description=consts.ERROR_DESC_EMPTY) - - try: - run_playbook(log_file, config_command) - except PlaybookExecutionFailed: - msg = utils.find_ansible_error_msg( - subcloud.name, log_file, consts.DEPLOY_STATE_DEPLOYING) - LOG.error(msg) - db_api.subcloud_update( - context, subcloud.id, - deploy_status=consts.DEPLOY_STATE_DEPLOY_FAILED, - error_description=msg[0:consts.ERROR_DESCRIPTION_LENGTH]) - return - LOG.info("Successfully deployed %s" % subcloud.name) - if rehome_command: - # Update the deploy status to rehoming - db_api.subcloud_update( - context, subcloud.id, - deploy_status=consts.DEPLOY_STATE_REHOMING) - - # Run the rehome-subcloud playbook - try: - run_playbook(log_file, rehome_command) - except PlaybookExecutionFailed: - msg = "Failed to run the subcloud rehome playbook" \ - " for subcloud %s, check individual log at " \ - "%s for detailed output." % ( - subcloud.name, - log_file) - LOG.error(msg) - db_api.subcloud_update( - context, subcloud.id, - deploy_status=consts.DEPLOY_STATE_REHOME_FAILED) - return - LOG.info("Successfully rehomed subcloud %s" % - subcloud.name) - if network_reconfig: - self._configure_system_controller_network(context, payload, subcloud) - subcloud = db_api.subcloud_update( - context, - subcloud.id, - description=payload.get('description', subcloud.description), - management_subnet=utils.get_management_subnet(payload), - management_gateway_ip=utils.get_management_gateway_address(payload), - management_start_ip=utils.get_management_start_address(payload), - management_end_ip=utils.get_management_end_address(payload), - location=payload.get('location', subcloud.location), - group_id=payload.get('group_id', subcloud.group_id), - data_install=payload.get('data_install', subcloud.data_install) - ) - - # Regenerate the addn_hosts_dc file - self._create_addn_hosts_dc(context) - - db_api.subcloud_update( - context, subcloud.id, - deploy_status=consts.DEPLOY_STATE_DONE, - error_description=consts.ERROR_DESC_EMPTY) - def run_deploy_phases(self, context, subcloud_id, payload, - deploy_phases_to_run): + deploy_phases_to_run, initial_deployment=False): """Run one or more deployment phases, ensuring correct order :param context: request context object :param subcloud_id: subcloud id from db :param payload: deploy phases payload :param deploy_phases_to_run: deploy phases that should run + :param initial_deployment: initial_deployment flag from subcloud inventory """ try: succeeded = True if consts.DEPLOY_PHASE_INSTALL in deploy_phases_to_run: succeeded = self.subcloud_deploy_install( - context, subcloud_id, payload) + context, subcloud_id, payload, initial_deployment) if succeeded and consts.DEPLOY_PHASE_BOOTSTRAP in deploy_phases_to_run: succeeded = self.subcloud_deploy_bootstrap( - context, subcloud_id, payload) + context, subcloud_id, payload, initial_deployment) if succeeded and consts.DEPLOY_PHASE_CONFIG in deploy_phases_to_run: succeeded = self.subcloud_deploy_config( - context, subcloud_id, payload) + context, subcloud_id, payload, initial_deployment) + if succeeded and consts.DEPLOY_PHASE_COMPLETE in deploy_phases_to_run: + self.subcloud_deploy_complete(context, subcloud_id) return succeeded except Exception as ex: @@ -1837,9 +1817,9 @@ class SubcloudManager(manager.Manager): error_description=consts.ERROR_DESC_EMPTY) try: - run_ansible = RunAnsible() - aborted = run_ansible.exec_playbook( - log_file, config_command, subcloud.name) + ansible = AnsiblePlaybook(subcloud.name) + aborted = ansible.run_playbook( + log_file, config_command) except PlaybookExecutionFailed: msg = utils.find_ansible_error_msg( subcloud.name, log_file, consts.DEPLOY_STATE_CONFIGURING) @@ -1856,10 +1836,11 @@ class SubcloudManager(manager.Manager): context, subcloud.id, deploy_status=consts.DEPLOY_STATE_DONE, error_description=consts.ERROR_DESC_EMPTY) + return True @staticmethod def _run_subcloud_install(context, subcloud, install_command, - log_file, payload, abortable=False): + log_file, payload): software_version = str(payload['software_version']) LOG.info("Preparing remote install of %s, version: %s", subcloud.name, software_version) @@ -1889,7 +1870,7 @@ class SubcloudManager(manager.Manager): error_description=consts.ERROR_DESC_EMPTY) try: aborted = install.install( - consts.DC_ANSIBLE_LOG_DIR, install_command, abortable=abortable) + consts.DC_ANSIBLE_LOG_DIR, install_command) except Exception as e: msg = utils.find_ansible_error_msg( subcloud.name, log_file, consts.DEPLOY_STATE_INSTALLING) @@ -1918,9 +1899,8 @@ class SubcloudManager(manager.Manager): # Run the ansible subcloud boostrap playbook LOG.info("Starting bootstrap of %s" % subcloud.name) try: - run_ansible = RunAnsible() - aborted = run_ansible.exec_playbook( - log_file, bootstrap_command, subcloud.name) + ansible = AnsiblePlaybook(subcloud.name) + aborted = ansible.run_playbook(log_file, bootstrap_command) except PlaybookExecutionFailed: msg = utils.find_ansible_error_msg( subcloud.name, log_file, consts.DEPLOY_STATE_BOOTSTRAPPING) @@ -1992,9 +1972,7 @@ class SubcloudManager(manager.Manager): ) for k, v in payload.items(): - if k not in ['deploy_playbook', 'deploy_values', - 'deploy_config', 'deploy_chart', - 'deploy_overrides', 'install_values']: + if k not in VALUES_TO_DELETE_OVERRIDES: f_out_overrides_file.write("%s: %s\n" % (k, json.dumps(v))) def _write_deploy_files(self, payload, subcloud_name): @@ -2541,7 +2519,8 @@ class SubcloudManager(manager.Manager): '_playbook_output.log') subcloud_id = subcloud.id try: - run_playbook(log_file, update_command) + ansible = AnsiblePlaybook(subcloud_name) + ansible.run_playbook(log_file, update_command) utils.delete_subcloud_inventory(overrides_file) except PlaybookExecutionFailed: msg = utils.find_ansible_error_msg( diff --git a/distributedcloud/dcmanager/orchestrator/states/upgrade/migrating_data.py b/distributedcloud/dcmanager/orchestrator/states/upgrade/migrating_data.py index ffc61a100..327c53837 100644 --- a/distributedcloud/dcmanager/orchestrator/states/upgrade/migrating_data.py +++ b/distributedcloud/dcmanager/orchestrator/states/upgrade/migrating_data.py @@ -8,7 +8,7 @@ import time from dccommon import consts as dccommon_consts from dccommon.exceptions import PlaybookExecutionFailed -from dccommon.utils import run_playbook +from dccommon.utils import AnsiblePlaybook from dcmanager.common import consts from dcmanager.common.exceptions import StrategyStoppedException from dcmanager.common import utils @@ -31,9 +31,10 @@ DEFAULT_MAX_API_QUERIES = 30 DEFAULT_API_SLEEP = 60 -def migrate_subcloud_data(migrate_command, log_file): +def migrate_subcloud_data(migrate_command, log_file, subcloud_name): try: - run_playbook(log_file, migrate_command) + ansible = AnsiblePlaybook(subcloud_name) + ansible.run_playbook(log_file, migrate_command) except PlaybookExecutionFailed: msg_orch = ("Failed to migrate data, check individual " "log at %s or run %s for details" @@ -149,7 +150,8 @@ class MigratingDataState(BaseState): % (consts.TEMP_SYSADMIN_PASSWORD, consts.TEMP_SYSADMIN_PASSWORD)] try: - migrate_subcloud_data(data_migrating_cmd, log_file) + migrate_subcloud_data(data_migrating_cmd, log_file, + strategy_step.subcloud.name) except Exception as e: # Two error messages: one for subcloud error description and logs and # one for orchestrator strategy_step detail (shorter than the previous). diff --git a/distributedcloud/dcmanager/rpc/client.py b/distributedcloud/dcmanager/rpc/client.py index d98239bbf..dc29bdbe3 100644 --- a/distributedcloud/dcmanager/rpc/client.py +++ b/distributedcloud/dcmanager/rpc/client.py @@ -205,20 +205,26 @@ class ManagerClient(RPCClient): subcloud_id=subcloud_id, payload=payload)) - def subcloud_deploy_install(self, ctxt, subcloud_id, payload): + def subcloud_deploy_install(self, ctxt, subcloud_id, payload, + initial_deployment): return self.cast(ctxt, self.make_msg('subcloud_deploy_install', subcloud_id=subcloud_id, - payload=payload)) + payload=payload, + initial_deployment=initial_deployment)) - def subcloud_deploy_bootstrap(self, ctxt, subcloud_id, payload): + def subcloud_deploy_bootstrap(self, ctxt, subcloud_id, payload, + initial_deployment): return self.cast(ctxt, self.make_msg('subcloud_deploy_bootstrap', subcloud_id=subcloud_id, - payload=payload)) + payload=payload, + initial_deployment=initial_deployment)) - def subcloud_deploy_config(self, ctxt, subcloud_id, payload): + def subcloud_deploy_config(self, ctxt, subcloud_id, payload, + initial_deployment): return self.cast(ctxt, self.make_msg('subcloud_deploy_config', subcloud_id=subcloud_id, - payload=payload)) + payload=payload, + initial_deployment=initial_deployment)) def subcloud_deploy_complete(self, ctxt, subcloud_id): return self.call(ctxt, self.make_msg('subcloud_deploy_complete', diff --git a/distributedcloud/dcmanager/tests/unit/api/v1/controllers/test_phased_subcloud_deploy.py b/distributedcloud/dcmanager/tests/unit/api/v1/controllers/test_phased_subcloud_deploy.py index b91ac88b3..ffb3a30f1 100644 --- a/distributedcloud/dcmanager/tests/unit/api/v1/controllers/test_phased_subcloud_deploy.py +++ b/distributedcloud/dcmanager/tests/unit/api/v1/controllers/test_phased_subcloud_deploy.py @@ -116,14 +116,16 @@ class TestSubcloudDeployBootstrap(testroot.DCManagerApiTest): @mock.patch.object(dutils, 'load_yaml_file') @mock.patch.object(os_path, 'exists') def test_subcloud_bootstrap(self, mock_path_exists, mock_load_yaml): - mock_path_exists.side_effect = [False, False, False, False, True] - mock_load_yaml.return_value = { - "software_version": fake_subcloud.FAKE_SOFTWARE_VERSION} - subcloud = fake_subcloud.create_fake_subcloud( self.ctx, name=fake_subcloud.FAKE_BOOTSTRAP_FILE_DATA["name"], deploy_status=consts.DEPLOY_STATE_INSTALLED) + config_file = psd_common.get_config_file_path(subcloud.name) + mock_path_exists.side_effect = lambda x: True if x == config_file else False + mock_load_yaml.return_value = { + consts.BOOTSTRAP_ADDRESS: + fake_subcloud.FAKE_BOOTSTRAP_VALUE[consts.BOOTSTRAP_ADDRESS], + "software_version": fake_subcloud.FAKE_SOFTWARE_VERSION} fake_content = json.dumps( fake_subcloud.FAKE_BOOTSTRAP_FILE_DATA).encode("utf-8") @@ -256,22 +258,24 @@ class TestSubcloudDeployConfig(testroot.DCManagerApiTest): self.mock_get_request_data = p.start() self.addCleanup(p.stop) - def test_configure_subcloud(self): + @mock.patch.object(dutils, 'load_yaml_file') + def test_configure_subcloud(self, mock_load_yaml): subcloud = fake_subcloud.create_fake_subcloud(self.ctx) fake_password = (base64.b64encode('testpass'.encode("utf-8"))).decode('ascii') data = {'sysadmin_password': fake_password} self.mock_rpc_client().subcloud_deploy_config.return_value = True self.mock_get_request_data.return_value = data + mock_load_yaml.return_value = { + consts.BOOTSTRAP_ADDRESS: + fake_subcloud.FAKE_BOOTSTRAP_VALUE[consts.BOOTSTRAP_ADDRESS]} response = self.app.patch_json(FAKE_URL + '/' + str(subcloud.id) + '/configure', headers=FAKE_HEADERS, params=data) self.mock_rpc_client().subcloud_deploy_config.assert_called_once_with( - mock.ANY, - subcloud.id, - mock.ANY) + mock.ANY, subcloud.id, data, initial_deployment=True) self.assertEqual(response.status_int, 200) def test_configure_subcloud_no_body(self): @@ -338,7 +342,8 @@ class TestSubcloudDeployInstall(testroot.DCManagerApiTest): self.mock_get_request_data = p.start() self.addCleanup(p.stop) - def test_install_subcloud(self): + @mock.patch.object(psd_common, 'is_initial_deployment') + def test_install_subcloud(self, mock_initial_deployment): subcloud = fake_subcloud.create_fake_subcloud( self.ctx, @@ -358,6 +363,7 @@ class TestSubcloudDeployInstall(testroot.DCManagerApiTest): 'bmc_password': fake_bmc_password} self.mock_get_request_data.return_value = install_payload self.mock_get_subcloud_db_install_values.return_value = install_data + mock_initial_deployment.return_value = True self.mock_rpc_client().subcloud_deploy_install.return_value = True self.mock_get_vault_load_files.return_value = ('iso_file_path', 'sig_file_path') @@ -371,7 +377,8 @@ class TestSubcloudDeployInstall(testroot.DCManagerApiTest): response.json['deploy-status']) self.assertEqual(SW_VERSION, response.json['software-version']) - def test_install_subcloud_with_release_parameter(self): + @mock.patch.object(psd_common, 'is_initial_deployment') + def test_install_subcloud_with_release_parameter(self, mock_initial_deployment): subcloud = fake_subcloud.create_fake_subcloud( self.ctx, @@ -392,6 +399,7 @@ class TestSubcloudDeployInstall(testroot.DCManagerApiTest): 'release': FAKE_SOFTWARE_VERSION} self.mock_get_request_data.return_value = install_payload self.mock_get_subcloud_db_install_values.return_value = install_data + mock_initial_deployment.return_value = True self.mock_rpc_client().subcloud_deploy_install.return_value = True self.mock_get_vault_load_files.return_value = ('iso_file_path', 'sig_file_path') @@ -404,7 +412,40 @@ class TestSubcloudDeployInstall(testroot.DCManagerApiTest): self.assertEqual(consts.DEPLOY_STATE_PRE_INSTALL, response.json['deploy-status']) - def test_install_subcloud_no_body(self): + @mock.patch.object(psd_common, 'is_initial_deployment') + def test_install_subcloud_not_initial_deployment( + self, mock_initial_deployment): + + subcloud = fake_subcloud.create_fake_subcloud( + self.ctx, + deploy_status=consts.DEPLOY_STATE_CREATED, + software_version=SW_VERSION) + install_data = copy.copy(FAKE_SUBCLOUD_INSTALL_VALUES) + install_data.pop('software_version') + + fake_sysadmin_password = base64.b64encode( + 'testpass'.encode("utf-8")).decode('utf-8') + fake_bmc_password = base64.b64encode( + 'bmc_password'.encode("utf-8")).decode('utf-8') + bmc_password = {'bmc_password': fake_bmc_password} + install_data.update(bmc_password) + install_payload = {'install_values': install_data, + 'sysadmin_password': fake_sysadmin_password, + 'bmc_password': fake_bmc_password} + self.mock_get_request_data.return_value = install_payload + self.mock_get_subcloud_db_install_values.return_value = install_data + + self.mock_rpc_client().subcloud_deploy_install.return_value = True + self.mock_get_vault_load_files.return_value = ('iso_file_path', 'sig_file_path') + mock_initial_deployment.return_value = False + + six.assertRaisesRegex(self, webtest.app.AppError, "400 *", + self.app.patch_json, FAKE_URL + '/' + + str(subcloud.id) + '/install', + headers=FAKE_HEADERS, params={}) + + @mock.patch.object(psd_common, 'is_initial_deployment') + def test_install_subcloud_no_body(self, mock_initial_deployment): subcloud = fake_subcloud.create_fake_subcloud( self.ctx, @@ -412,13 +453,16 @@ class TestSubcloudDeployInstall(testroot.DCManagerApiTest): software_version=SW_VERSION) self.mock_get_request_data.return_value = {} + mock_initial_deployment.return_value = True six.assertRaisesRegex(self, webtest.app.AppError, "400 *", self.app.patch_json, FAKE_URL + '/' + str(subcloud.id) + '/install', headers=FAKE_HEADERS, params={}) - def test_install_subcloud_no_install_values_on_request_or_db(self): + @mock.patch.object(psd_common, 'is_initial_deployment') + def test_install_subcloud_no_install_values_on_request_or_db( + self, mock_initial_deployment): subcloud = fake_subcloud.create_fake_subcloud( self.ctx, @@ -433,13 +477,16 @@ class TestSubcloudDeployInstall(testroot.DCManagerApiTest): install_payload = {'sysadmin_password': fake_sysadmin_password, 'bmc_password': fake_bmc_password} self.mock_get_request_data.return_value = install_payload + mock_initial_deployment.return_value = True six.assertRaisesRegex(self, webtest.app.AppError, "400 *", self.app.patch_json, FAKE_URL + '/' + str(subcloud.id) + '/install', headers=FAKE_HEADERS, params=install_payload) - def test_install_subcloud_no_install_values_on_request(self): + @mock.patch.object(psd_common, 'is_initial_deployment') + def test_install_subcloud_no_install_values_on_request( + self, mock_initial_deployment): subcloud = fake_subcloud.create_fake_subcloud( self.ctx, @@ -460,6 +507,7 @@ class TestSubcloudDeployInstall(testroot.DCManagerApiTest): self.mock_rpc_client().subcloud_deploy_install.return_value = True self.mock_get_vault_load_files.return_value = ('iso_file_path', 'sig_file_path') + mock_initial_deployment.return_value = True response = self.app.patch_json( FAKE_URL + '/' + str(subcloud.id) + '/install', @@ -521,12 +569,14 @@ class TestSubcloudDeployAbort(testroot.DCManagerApiTest): self.mock_rpc_client = p.start() self.addCleanup(p.stop) - def test_abort_subcloud(self): + @mock.patch.object(psd_common, 'is_initial_deployment') + def test_abort_subcloud(self, mock_initial_deployment): subcloud = fake_subcloud.create_fake_subcloud( self.ctx, deploy_status=consts.DEPLOY_STATE_INSTALLING) self.mock_rpc_client().subcloud_deploy_abort.return_value = True + mock_initial_deployment.return_value = True response = self.app.patch_json(FAKE_URL + '/' + str(subcloud.id) + '/abort', @@ -537,6 +587,20 @@ class TestSubcloudDeployAbort(testroot.DCManagerApiTest): subcloud.deploy_status) self.assertEqual(response.status_int, 200) + @mock.patch.object(psd_common, 'is_initial_deployment') + def test_abort_subcloud_not_initial_deployment(self, mock_initial_deployment): + subcloud = fake_subcloud.create_fake_subcloud( + self.ctx, + deploy_status=consts.DEPLOY_STATE_INSTALLING) + + self.mock_rpc_client().subcloud_deploy_abort.return_value = True + mock_initial_deployment.return_value = False + + six.assertRaisesRegex(self, webtest.app.AppError, "400 *", + self.app.patch_json, FAKE_URL + '/' + + str(subcloud.id) + '/abort', + headers=FAKE_HEADERS) + def test_abort_subcloud_invalid_deploy_status(self): subcloud = fake_subcloud.create_fake_subcloud( self.ctx, @@ -592,11 +656,13 @@ class TestSubcloudDeployResume(testroot.DCManagerApiTest): self.mock_query = p.start() self.addCleanup(p.stop) + @mock.patch.object(dutils, 'load_yaml_file') + @mock.patch.object(psd_common, 'is_initial_deployment') @mock.patch.object(os_path, 'isdir') @mock.patch.object(os, 'listdir') - def test_resume_subcloud(self, - mock_os_listdir, - mock_os_isdir): + def test_resume_subcloud( + self, mock_os_listdir, mock_os_isdir, mock_initial_deployment, + mock_load_yaml): mock_os_isdir.return_value = True mock_os_listdir.return_value = ['deploy_chart_fake.tgz', 'deploy_overrides_fake.yaml', @@ -612,6 +678,10 @@ class TestSubcloudDeployResume(testroot.DCManagerApiTest): self.mock_get_vault_load_files.return_value = ('iso_file_path', 'sig_file_path') self.mock_rpc_client().subcloud_deploy_resume.return_value = True + mock_initial_deployment.return_value = True + mock_load_yaml.return_value = { + consts.BOOTSTRAP_ADDRESS: + fake_subcloud.FAKE_BOOTSTRAP_VALUE[consts.BOOTSTRAP_ADDRESS]} for state in psd_api.RESUMABLE_STATES: fake_sysadmin_password = base64.b64encode( @@ -649,6 +719,30 @@ class TestSubcloudDeployResume(testroot.DCManagerApiTest): response.json['deploy-status']) self.assertEqual(SW_VERSION, response.json['software-version']) + @mock.patch.object(psd_common, 'is_initial_deployment') + def test_resume_subcloud_not_initial_deployment(self, + mock_initial_deployment): + + subcloud = fake_subcloud.create_fake_subcloud( + self.ctx, + name=fake_subcloud.FAKE_BOOTSTRAP_FILE_DATA["name"], + deploy_status=consts.DEPLOY_STATE_CREATED, + software_version=SW_VERSION) + + self.mock_get_vault_load_files.return_value = ('iso_file_path', 'sig_file_path') + self.mock_rpc_client().subcloud_deploy_resume.return_value = True + mock_initial_deployment.return_value = False + + for state in psd_api.RESUMABLE_STATES: + subcloud = db_api.subcloud_update(self.ctx, + subcloud.id, + deploy_status=state) + + six.assertRaisesRegex(self, webtest.app.AppError, "400 *", + self.app.patch_json, FAKE_URL + '/' + + str(subcloud.id) + '/resume', + headers=FAKE_HEADERS) + def test_resume_subcloud_invalid_state(self): subcloud = fake_subcloud.create_fake_subcloud( @@ -673,15 +767,14 @@ class TestSubcloudDeployResume(testroot.DCManagerApiTest): str(subcloud.id) + '/resume', headers=FAKE_HEADERS) + @mock.patch.object(psd_common, 'is_initial_deployment') @mock.patch.object(dutils, 'load_yaml_file') @mock.patch.object(os_path, 'exists') @mock.patch.object(os_path, 'isdir') @mock.patch.object(os, 'listdir') - def test_resume_subcloud_no_request_data(self, - mock_os_listdir, - mock_os_isdir, - mock_path_exists, - mock_load_yaml): + def test_resume_subcloud_no_request_data( + self, mock_os_listdir, mock_os_isdir, mock_path_exists, + mock_load_yaml, mock_initial_deployment): subcloud = fake_subcloud.create_fake_subcloud( self.ctx, name=fake_subcloud.FAKE_BOOTSTRAP_FILE_DATA["name"], @@ -692,6 +785,8 @@ class TestSubcloudDeployResume(testroot.DCManagerApiTest): consts.DEPLOY_CONFIG) mock_path_exists.side_effect = lambda x: True if x == config_file else False mock_load_yaml.return_value = { + consts.BOOTSTRAP_ADDRESS: + fake_subcloud.FAKE_BOOTSTRAP_VALUE[consts.BOOTSTRAP_ADDRESS], "software_version": fake_subcloud.FAKE_SOFTWARE_VERSION} mock_os_isdir.return_value = True mock_os_listdir.return_value = ['deploy_chart_fake.tgz', @@ -699,6 +794,7 @@ class TestSubcloudDeployResume(testroot.DCManagerApiTest): 'deploy_playbook_fake.yaml'] self.mock_get_vault_load_files.return_value = ('iso_file_path', 'sig_file_path') self.mock_rpc_client().subcloud_deploy_resume.return_value = True + mock_initial_deployment.return_value = True for state in psd_api.RESUMABLE_STATES: fake_sysadmin_password = base64.b64encode( diff --git a/distributedcloud/dcmanager/tests/unit/manager/test_subcloud_manager.py b/distributedcloud/dcmanager/tests/unit/manager/test_subcloud_manager.py index f20e0bc6a..107e9b866 100644 --- a/distributedcloud/dcmanager/tests/unit/manager/test_subcloud_manager.py +++ b/distributedcloud/dcmanager/tests/unit/manager/test_subcloud_manager.py @@ -31,7 +31,7 @@ import threading from dccommon import consts as dccommon_consts from dccommon import subcloud_install -from dccommon.utils import RunAnsible +from dccommon.utils import AnsiblePlaybook from dcmanager.common import consts from dcmanager.common import exceptions from dcmanager.common import prestage @@ -451,13 +451,15 @@ class TestSubcloudManager(base.DCManagerTestCase): self.assertEqual('localhost', sm.host) self.assertEqual(self.ctx, sm.context) + @mock.patch.object(cutils, 'create_subcloud_inventory') @mock.patch.object( subcloud_manager.SubcloudManager, 'compose_install_command') @mock.patch.object( subcloud_manager.SubcloudManager, '_run_subcloud_install') def test_subcloud_deploy_install(self, mock_run_subcloud_install, - mock_compose_install_command): + mock_compose_install_command, + mock_create_subcloud_inventory): mock_run_subcloud_install.return_value = True subcloud_name = 'subcloud1' @@ -573,13 +575,13 @@ class TestSubcloudManager(base.DCManagerTestCase): @mock.patch.object(subcloud_manager, 'keyring') @mock.patch.object(cutils, 'get_playbook_for_software_version') @mock.patch.object(cutils, 'update_values_on_yaml_file') - @mock.patch.object(RunAnsible, 'exec_playbook') - def test_subcloud_deploy_bootstrap(self, mock_exec_playbook, mock_update_yml, + @mock.patch.object(AnsiblePlaybook, 'run_playbook') + def test_subcloud_deploy_bootstrap(self, mock_run_playbook, mock_update_yml, mock_get_playbook_for_software_version, mock_keyring, mock_create_subcloud_inventory): mock_get_playbook_for_software_version.return_value = "22.12" mock_keyring.get_password.return_value = "testpass" - mock_exec_playbook.return_value = False + mock_run_playbook.return_value = False subcloud = fake_subcloud.create_fake_subcloud( self.ctx, @@ -593,7 +595,7 @@ class TestSubcloudManager(base.DCManagerTestCase): sm = subcloud_manager.SubcloudManager() sm.subcloud_deploy_bootstrap(self.ctx, subcloud.id, payload) - mock_exec_playbook.assert_called_once() + mock_run_playbook.assert_called_once() # Verify subcloud was updated with correct values updated_subcloud = db_api.subcloud_get_by_name(self.ctx, @@ -624,9 +626,12 @@ class TestSubcloudManager(base.DCManagerTestCase): self.assertEqual(consts.DEPLOY_STATE_PRE_BOOTSTRAP_FAILED, updated_subcloud.deploy_status) + @mock.patch.object(cutils, 'create_subcloud_inventory') @mock.patch.object(subcloud_manager.SubcloudManager, '_prepare_for_deployment') - def test_configure_subcloud(self, mock_prepare_for_deployment): + def test_configure_subcloud(self, mock_prepare_for_deployment, + mock_create_subcloud_inventory): + subcloud = self.create_subcloud_static( self.ctx, name='subcloud1', @@ -636,12 +641,15 @@ class TestSubcloudManager(base.DCManagerTestCase): "deploy_playbook": "test_playbook.yaml", "deploy_overrides": "test_overrides.yaml", "deploy_chart": "test_chart.yaml", - "deploy_config": "subcloud1.yaml"} + "deploy_config": "subcloud1.yaml", + consts.BOOTSTRAP_ADDRESS: + fake_subcloud.FAKE_BOOTSTRAP_VALUE[consts.BOOTSTRAP_ADDRESS]} sm = subcloud_manager.SubcloudManager() sm.subcloud_deploy_config(self.ctx, subcloud.id, payload=fake_payload) mock_prepare_for_deployment.assert_called_once() + mock_create_subcloud_inventory.assert_called_once() @mock.patch.object(subcloud_manager.SubcloudManager, '_run_subcloud_install') @@ -649,17 +657,17 @@ class TestSubcloudManager(base.DCManagerTestCase): '_prepare_for_deployment') @mock.patch.object(cutils, 'create_subcloud_inventory') @mock.patch.object(subcloud_manager, 'keyring') - @mock.patch.object(cutils, 'get_playbook_for_software_version') @mock.patch.object(cutils, 'update_values_on_yaml_file') - @mock.patch.object(RunAnsible, 'exec_playbook') - def test_subcloud_deploy_resume(self, mock_exec_playbook, mock_update_yml, - mock_get_playbook_for_software_version, - mock_keyring, create_subcloud_inventory, - mock_prepare_for_deployment, - mock_run_subcloud_install): + @mock.patch.object(cutils, 'get_playbook_for_software_version') + @mock.patch.object(AnsiblePlaybook, 'run_playbook') + def test_subcloud_deploy_resume( + self, mock_run_playbook, mock_get_playbook_for_software_version, + mock_update_yml, mock_keyring, mock_create_subcloud_inventory, + mock_prepare_for_deployment, mock_run_subcloud_install): + mock_get_playbook_for_software_version.return_value = "22.12" mock_keyring.get_password.return_value = "testpass" - mock_exec_playbook.return_value = False + mock_run_playbook.return_value = False mock_run_subcloud_install.return_value = True subcloud = self.create_subcloud_static( @@ -696,6 +704,7 @@ class TestSubcloudManager(base.DCManagerTestCase): sm = subcloud_manager.SubcloudManager() sm.subcloud_deploy_resume(self.ctx, subcloud.id, subcloud.name, fake_payload, deploy_states_to_run) + mock_prepare_for_deployment.assert_called_once() # Verify subcloud was updated with correct values updated_subcloud = db_api.subcloud_get_by_name(self.ctx, @@ -703,6 +712,7 @@ class TestSubcloudManager(base.DCManagerTestCase): self.assertEqual(consts.DEPLOY_STATE_DONE, updated_subcloud.deploy_status) + @mock.patch.object(cutils, 'get_oam_addresses') @mock.patch.object(subcloud_install.SubcloudInstall, 'prep') @mock.patch.object(subcloud_install, 'KeystoneClient') @mock.patch.object(subcloud_install, 'SysinvClient') @@ -720,8 +730,8 @@ class TestSubcloudManager(base.DCManagerTestCase): @mock.patch.object(subcloud_manager, 'keyring') @mock.patch.object(cutils, 'get_playbook_for_software_version') @mock.patch.object(cutils, 'update_values_on_yaml_file') - @mock.patch.object(RunAnsible, 'exec_playbook') - def test_add_subcloud(self, mock_exec_playbook, mock_update_yml, + @mock.patch.object(AnsiblePlaybook, 'run_playbook') + def test_add_subcloud(self, mock_run_playbook, mock_update_yml, mock_get_playbook_for_software_version, mock_keyring, mock_create_subcloud_inventory, mock_write_deploy_files, mock_sysinv_client, @@ -729,7 +739,7 @@ class TestSubcloudManager(base.DCManagerTestCase): mock_create_intermediate_ca_cert, mock_write_subcloud_ansible_config, mock_install_ks_client, mock_install_sysinvclient, - mock_install_prep): + mock_install_prep, mock_oam_address): # Prepare the payload install_values = copy.copy(fake_subcloud.FAKE_SUBCLOUD_INSTALL_VALUES) install_values['software_version'] = SW_VERSION @@ -752,7 +762,7 @@ class TestSubcloudManager(base.DCManagerTestCase): # Mock return values mock_get_playbook_for_software_version.return_value = SW_VERSION mock_keyring.get_password.return_value = payload['sysadmin_password'] - mock_exec_playbook.return_value = False + mock_run_playbook.return_value = False mock_openstack_driver().keystone_client = FakeKeystoneClient() # Call the add method @@ -769,9 +779,10 @@ class TestSubcloudManager(base.DCManagerTestCase): mock_update_yml.assert_called() mock_create_subcloud_inventory.assert_called() mock_get_playbook_for_software_version.assert_called_once() - self.assertEqual(mock_exec_playbook.call_count, 3) + self.assertEqual(mock_run_playbook.call_count, 3) - @mock.patch.object(subcloud_manager, 'run_playbook') + @mock.patch.object(subcloud_manager.AnsiblePlaybook, + 'run_playbook') @mock.patch.object(subcloud_manager.SubcloudManager, 'compose_rehome_command') @mock.patch.object(subcloud_manager.SubcloudManager, @@ -800,7 +811,6 @@ class TestSubcloudManager(base.DCManagerTestCase): mock_create_intermediate_ca_cert, mock_compose_rehome_command, mock_run_playbook): - values = utils.create_subcloud_dict(base.SUBCLOUD_SAMPLE_DATA_0) values['deploy_status'] = consts.DEPLOY_STATE_NONE values['migrate'] = 'true' @@ -824,7 +834,6 @@ class TestSubcloudManager(base.DCManagerTestCase): mock_create_subcloud_inventory.assert_called_once() mock_write_subcloud_ansible_config.assert_called_once() mock_create_intermediate_ca_cert.assert_called_once() - mock_run_playbook.assert_called_once() mock_compose_rehome_command.assert_called_once_with( values['name'], values['region_name'], @@ -974,7 +983,7 @@ class TestSubcloudManager(base.DCManagerTestCase): @mock.patch.object(subcloud_manager.SubcloudManager, '_create_subcloud_route') @mock.patch.object(subcloud_manager, 'OpenStackDriver') - @mock.patch.object(subcloud_manager, 'run_playbook') + @mock.patch.object(subcloud_manager.AnsiblePlaybook, 'run_playbook') def test_update_subcloud_network_reconfiguration( self, mock_run_playbook, mock_keystone_client, mock_create_route, mock_update_endpoints, mock_delete_route, mock_addn_hosts_dc): @@ -1796,6 +1805,10 @@ class TestSubcloudManager(base.DCManagerTestCase): ] ) + @mock.patch.object(subcloud_manager.SubcloudManager, + '_create_addn_hosts_dc') + @mock.patch.object(subcloud_manager, 'OpenStackDriver') + @mock.patch.object(cutils, 'get_oam_addresses') @mock.patch.object(subcloud_manager.SubcloudManager, '_run_subcloud_install') @mock.patch.object(subcloud_manager.SubcloudManager, @@ -1804,15 +1817,19 @@ class TestSubcloudManager(base.DCManagerTestCase): @mock.patch.object(subcloud_manager, 'keyring') @mock.patch.object(cutils, 'get_playbook_for_software_version') @mock.patch.object(cutils, 'update_values_on_yaml_file') - @mock.patch.object(RunAnsible, 'exec_playbook') - def test_subcloud_redeploy(self, mock_exec_playbook, mock_update_yml, + @mock.patch.object(AnsiblePlaybook, 'run_playbook') + def test_subcloud_redeploy(self, mock_run_playbook, mock_update_yml, mock_get_playbook_for_software_version, mock_keyring, create_subcloud_inventory, mock_prepare_for_deployment, - mock_run_subcloud_install): + mock_run_subcloud_install, + mock_oam_address, mock_keystone_client, + mock_create_addn_hosts): + + mock_keystone_client().keystone_client = FakeKeystoneClient() mock_get_playbook_for_software_version.return_value = "22.12" mock_keyring.get_password.return_value = "testpass" - mock_exec_playbook.return_value = False + mock_run_playbook.return_value = False mock_run_subcloud_install.return_value = True subcloud = self.create_subcloud_static( @@ -2294,7 +2311,7 @@ class TestSubcloudManager(base.DCManagerTestCase): '_create_backup_overrides_file') @mock.patch.object(subcloud_manager, 'keyring') @mock.patch.object(cutils, 'get_oam_addresses') - @mock.patch.object(subcloud_manager, 'run_playbook') + @mock.patch.object(subcloud_manager.AnsiblePlaybook, 'run_playbook') @mock.patch.object(subcloud_manager.SubcloudManager, '_clear_subcloud_backup_failure_alarm_if_exists') @mock.patch.object(subcloud_manager.SubcloudManager, @@ -2368,7 +2385,7 @@ class TestSubcloudManager(base.DCManagerTestCase): @mock.patch.object(cutils, 'create_subcloud_inventory') @mock.patch.object(cutils, 'get_oam_addresses') @mock.patch.object(subcloud_manager, 'OpenStackDriver') - @mock.patch.object(subcloud_manager, 'run_playbook') + @mock.patch.object(subcloud_manager.AnsiblePlaybook, 'run_playbook') @mock.patch.object(subcloud_manager.SubcloudManager, 'compose_backup_delete_command') @mock.patch.object(subcloud_manager.SubcloudManager, @@ -2826,8 +2843,7 @@ class TestSubcloudManager(base.DCManagerTestCase): mock_subcloud_migrate_generate_ansible_config.assert_called_once_with( mock.ANY, mock.ANY, payload_result['rehome_data']['saved_payload']) - mock_rehome_subcloud.assert_called_once_with( - mock.ANY, mock.ANY, payload_result['rehome_data']['saved_payload']) + mock_rehome_subcloud.assert_called_once_with(mock.ANY, mock.ANY) self.assertFalse(mock_db_api.subcloud_update.called)