diff --git a/setup.cfg b/setup.cfg index cb9b5f688..67cb860b2 100644 --- a/setup.cfg +++ b/setup.cfg @@ -64,6 +64,7 @@ watcher_scoring_engine_containers = watcher_strategies = dummy = watcher.decision_engine.strategy.strategies.dummy_strategy:DummyStrategy dummy_with_scorer = watcher.decision_engine.strategy.strategies.dummy_with_scorer:DummyWithScorer + dummy_with_resize = watcher.decision_engine.strategy.strategies.dummy_with_resize:DummyWithResize basic = watcher.decision_engine.strategy.strategies.basic_consolidation:BasicConsolidation outlet_temperature = watcher.decision_engine.strategy.strategies.outlet_temp_control:OutletTempControl vm_workload_consolidation = watcher.decision_engine.strategy.strategies.vm_workload_consolidation:VMWorkloadConsolidation @@ -76,12 +77,14 @@ watcher_actions = nop = watcher.applier.actions.nop:Nop sleep = watcher.applier.actions.sleep:Sleep change_nova_service_state = watcher.applier.actions.change_nova_service_state:ChangeNovaServiceState + resize = watcher.applier.actions.resize:Resize watcher_workflow_engines = taskflow = watcher.applier.workflow_engine.default:DefaultWorkFlowEngine watcher_planners = - default = watcher.decision_engine.planner.default:DefaultPlanner + weight = watcher.decision_engine.planner.weight:WeightPlanner + workload_stabilization = watcher.decision_engine.planner.workload_stabilization:WorkloadStabilizationPlanner watcher_cluster_data_model_collectors = compute = watcher.decision_engine.model.collector.nova:NovaClusterDataModelCollector diff --git a/watcher/api/controllers/v1/action.py b/watcher/api/controllers/v1/action.py index e2b9e528a..3e96fdb95 100644 --- a/watcher/api/controllers/v1/action.py +++ b/watcher/api/controllers/v1/action.py @@ -88,7 +88,6 @@ class Action(base.APIBase): between the internal object model and the API representation of a action. """ _action_plan_uuid = None - _next_uuid = None def _get_action_plan_uuid(self): return self._action_plan_uuid @@ -105,22 +104,6 @@ class Action(base.APIBase): except exception.ActionPlanNotFound: self._action_plan_uuid = None - def _get_next_uuid(self): - return self._next_uuid - - def _set_next_uuid(self, value): - if value == wtypes.Unset: - self._next_uuid = wtypes.Unset - elif value and self._next_uuid != value: - try: - action_next = objects.Action.get( - pecan.request.context, value) - self._next_uuid = action_next.uuid - self.next = action_next.id - except exception.ActionNotFound: - self.action_next_uuid = None - # raise e - uuid = wtypes.wsattr(types.uuid, readonly=True) """Unique UUID for this action""" @@ -138,10 +121,8 @@ class Action(base.APIBase): input_parameters = types.jsontype """One or more key/value pairs """ - next_uuid = wsme.wsproperty(types.uuid, _get_next_uuid, - _set_next_uuid, - mandatory=True) - """This next action UUID""" + parents = wtypes.wsattr(types.jsontype, readonly=True) + """UUIDs of parent actions""" links = wsme.wsattr([link.Link], readonly=True) """A list containing a self link and associated action links""" @@ -152,7 +133,6 @@ class Action(base.APIBase): self.fields = [] fields = list(objects.Action.fields) fields.append('action_plan_uuid') - fields.append('next_uuid') for field in fields: # Skip fields we do not expose. if not hasattr(self, field): @@ -163,15 +143,13 @@ class Action(base.APIBase): self.fields.append('action_plan_id') setattr(self, 'action_plan_uuid', kwargs.get('action_plan_id', wtypes.Unset)) - setattr(self, 'next_uuid', kwargs.get('next', - wtypes.Unset)) @staticmethod def _convert_with_links(action, url, expand=True): if not expand: - action.unset_fields_except(['uuid', 'state', 'next', 'next_uuid', - 'action_plan_uuid', 'action_plan_id', - 'action_type']) + action.unset_fields_except(['uuid', 'state', 'action_plan_uuid', + 'action_plan_id', 'action_type', + 'parents']) action.links = [link.Link.make_link('self', url, 'actions', action.uuid), @@ -193,9 +171,9 @@ class Action(base.APIBase): state='PENDING', created_at=datetime.datetime.utcnow(), deleted_at=None, - updated_at=datetime.datetime.utcnow()) + updated_at=datetime.datetime.utcnow(), + parents=[]) sample._action_plan_uuid = '7ae81bb3-dec3-4289-8d6c-da80bd8001ae' - sample._next_uuid = '7ae81bb3-dec3-4289-8d6c-da80bd8001ae' return cls._convert_with_links(sample, 'http://localhost:9322', expand) @@ -216,17 +194,6 @@ class ActionCollection(collection.Collection): collection.actions = [Action.convert_with_links(p, expand) for p in actions] - if 'sort_key' in kwargs: - reverse = False - if kwargs['sort_key'] == 'next_uuid': - if 'sort_dir' in kwargs: - reverse = True if kwargs['sort_dir'] == 'desc' else False - collection.actions = sorted( - collection.actions, - key=lambda action: action.next_uuid or '', - reverse=reverse) - - collection.next = collection.get_next(limit, url=url, **kwargs) return collection @classmethod @@ -268,10 +235,7 @@ class ActionsController(rest.RestController): if audit_uuid: filters['audit_uuid'] = audit_uuid - if sort_key == 'next_uuid': - sort_db_key = None - else: - sort_db_key = sort_key + sort_db_key = sort_key actions = objects.Action.list(pecan.request.context, limit, diff --git a/watcher/api/controllers/v1/action_plan.py b/watcher/api/controllers/v1/action_plan.py index de6a0f83e..47a121654 100644 --- a/watcher/api/controllers/v1/action_plan.py +++ b/watcher/api/controllers/v1/action_plan.py @@ -106,7 +106,7 @@ class ActionPlanPatchType(types.JsonPatchType): @staticmethod def mandatory_attrs(): - return ["audit_id", "state", "first_action_id"] + return ["audit_id", "state"] class ActionPlan(base.APIBase): @@ -120,7 +120,6 @@ class ActionPlan(base.APIBase): _audit_uuid = None _strategy_uuid = None _strategy_name = None - _first_action_uuid = None _efficacy_indicators = None def _get_audit_uuid(self): @@ -137,21 +136,6 @@ class ActionPlan(base.APIBase): except exception.AuditNotFound: self._audit_uuid = None - def _get_first_action_uuid(self): - return self._first_action_uuid - - def _set_first_action_uuid(self, value): - if value == wtypes.Unset: - self._first_action_uuid = wtypes.Unset - elif value and self._first_action_uuid != value: - try: - first_action = objects.Action.get(pecan.request.context, - value) - self._first_action_uuid = first_action.uuid - self.first_action_id = first_action.id - except exception.ActionNotFound: - self._first_action_uuid = None - def _get_efficacy_indicators(self): if self._efficacy_indicators is None: self._set_efficacy_indicators(wtypes.Unset) @@ -220,11 +204,6 @@ class ActionPlan(base.APIBase): uuid = wtypes.wsattr(types.uuid, readonly=True) """Unique UUID for this action plan""" - first_action_uuid = wsme.wsproperty( - types.uuid, _get_first_action_uuid, _set_first_action_uuid, - mandatory=True) - """The UUID of the first action this action plans links to""" - audit_uuid = wsme.wsproperty(types.uuid, _get_audit_uuid, _set_audit_uuid, mandatory=True) """The UUID of the audit this port belongs to""" @@ -263,7 +242,6 @@ class ActionPlan(base.APIBase): setattr(self, field, kwargs.get(field, wtypes.Unset)) self.fields.append('audit_uuid') - self.fields.append('first_action_uuid') self.fields.append('efficacy_indicators') setattr(self, 'audit_uuid', kwargs.get('audit_id', wtypes.Unset)) @@ -271,16 +249,13 @@ class ActionPlan(base.APIBase): setattr(self, 'strategy_uuid', kwargs.get('strategy_id', wtypes.Unset)) fields.append('strategy_name') setattr(self, 'strategy_name', kwargs.get('strategy_id', wtypes.Unset)) - setattr(self, 'first_action_uuid', - kwargs.get('first_action_id', wtypes.Unset)) @staticmethod def _convert_with_links(action_plan, url, expand=True): if not expand: action_plan.unset_fields_except( ['uuid', 'state', 'efficacy_indicators', 'global_efficacy', - 'updated_at', 'audit_uuid', 'strategy_uuid', 'strategy_name', - 'first_action_uuid']) + 'updated_at', 'audit_uuid', 'strategy_uuid', 'strategy_name']) action_plan.links = [ link.Link.make_link( @@ -305,7 +280,6 @@ class ActionPlan(base.APIBase): created_at=datetime.datetime.utcnow(), deleted_at=None, updated_at=datetime.datetime.utcnow()) - sample._first_action_uuid = '57eaf9ab-5aaa-4f7e-bdf7-9a140ac7a720' sample._audit_uuid = 'abcee106-14d3-4515-b744-5a26885cf6f6' sample._efficacy_indicators = [{'description': 'Test indicator', 'name': 'test_indicator', diff --git a/watcher/applier/actions/resize.py b/watcher/applier/actions/resize.py new file mode 100644 index 000000000..0db45f9ce --- /dev/null +++ b/watcher/applier/actions/resize.py @@ -0,0 +1,106 @@ +# -*- encoding: utf-8 -*- +# Copyright (c) 2017 Servionica +# +# Authors: Alexander Chadin +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from oslo_log import log +import six +import voluptuous + +from watcher._i18n import _, _LC +from watcher.applier.actions import base +from watcher.common import nova_helper +from watcher.common import utils + +LOG = log.getLogger(__name__) + + +class Resize(base.BaseAction): + """Resizes a server with specified flavor. + + This action will allow you to resize a server to another flavor. + + The action schema is:: + + schema = Schema({ + 'resource_id': str, # should be a UUID + 'flavor': str, # should be either ID or Name of Flavor + }) + + The `resource_id` is the UUID of the server to resize. + The `flavor` is the ID or Name of Flavor (Nova accepts either ID or Name + of Flavor to resize() function). + """ + + # input parameters constants + FLAVOR = 'flavor' + + def check_resource_id(self, value): + if (value is not None and + len(value) > 0 and not + utils.is_uuid_like(value)): + raise voluptuous.Invalid(_("The parameter " + "resource_id is invalid.")) + + @property + def schema(self): + return voluptuous.Schema({ + voluptuous.Required(self.RESOURCE_ID): self.check_resource_id, + voluptuous.Required(self.FLAVOR): + voluptuous.All(voluptuous.Any(*six.string_types), + voluptuous.Length(min=1)), + }) + + @property + def instance_uuid(self): + return self.resource_id + + @property + def flavor(self): + return self.input_parameters.get(self.FLAVOR) + + def resize(self): + nova = nova_helper.NovaHelper(osc=self.osc) + LOG.debug("Resize instance %s to %s flavor", self.instance_uuid, + self.flavor) + instance = nova.find_instance(self.instance_uuid) + result = None + if instance: + try: + result = nova.resize_instance( + instance_id=self.instance_uuid, flavor=self.flavor) + except Exception as exc: + LOG.exception(exc) + LOG.critical( + _LC("Unexpected error occurred. Resizing failed for " + "instance %s."), self.instance_uuid) + return result + + def execute(self): + return self.resize() + + def revert(self): + return self.migrate(destination=self.source_node) + + def pre_condition(self): + # TODO(jed): check if the instance exists / check if the instance is on + # the source_node + pass + + def post_condition(self): + # TODO(jed): check extra parameters (network response, etc.) + pass diff --git a/watcher/common/exception.py b/watcher/common/exception.py index c49bee601..bf2679eee 100644 --- a/watcher/common/exception.py +++ b/watcher/common/exception.py @@ -282,6 +282,10 @@ class ActionFilterCombinationProhibited(Invalid): "prohibited") +class UnsupportedActionType(UnsupportedError): + msg_fmt = _("Provided %(action_type) is not supported yet") + + class EfficacyIndicatorNotFound(ResourceNotFound): msg_fmt = _("Efficacy indicator %(efficacy_indicator)s could not be found") diff --git a/watcher/common/nova_helper.py b/watcher/common/nova_helper.py index 860a9ff9a..b6432f78f 100644 --- a/watcher/common/nova_helper.py +++ b/watcher/common/nova_helper.py @@ -305,6 +305,70 @@ class NovaHelper(object): return True + def resize_instance(self, instance_id, flavor, retry=120): + """This method resizes given instance with specified flavor. + + This method uses the Nova built-in resize() + action to do a resize of a given instance. + + It returns True if the resize was successful, + False otherwise. + + :param instance_id: the unique id of the instance to resize. + :param flavor: the name or ID of the flavor to resize to. + """ + LOG.debug("Trying a resize of instance %s to flavor '%s'" % ( + instance_id, flavor)) + + # Looking for the instance to resize + instance = self.find_instance(instance_id) + + flavor_id = None + + try: + flavor_id = self.nova.flavors.get(flavor) + except nvexceptions.NotFound: + flavor_id = [f.id for f in self.nova.flavors.list() if + f.name == flavor][0] + except nvexceptions.ClientException as e: + LOG.debug("Nova client exception occurred while resizing " + "instance %s. Exception: %s", instance_id, e) + + if not flavor_id: + LOG.debug("Flavor not found: %s" % flavor) + return False + + if not instance: + LOG.debug("Instance not found: %s" % instance_id) + return False + + instance_status = getattr(instance, 'OS-EXT-STS:vm_state') + LOG.debug( + "Instance %s is in '%s' status." % (instance_id, + instance_status)) + + instance.resize(flavor=flavor_id) + while getattr(instance, + 'OS-EXT-STS:vm_state') != 'resized' \ + and retry: + instance = self.nova.servers.get(instance.id) + LOG.debug( + 'Waiting the resize of {0} to {1}'.format( + instance, flavor_id)) + time.sleep(1) + retry -= 1 + + instance_status = getattr(instance, 'status') + if instance_status != 'VERIFY_RESIZE': + return False + + instance.confirm_resize() + + LOG.debug("Resizing succeeded : instance %s is now on flavor " + "'%s'.", instance_id, flavor_id) + + return True + def live_migrate_instance(self, instance_id, dest_hostname, block_migration=False, retry=120): """This method does a live migration of a given instance @@ -645,6 +709,16 @@ class NovaHelper(object): return network_id + def get_instance_by_uuid(self, instance_uuid): + return [instance for instance in + self.nova.servers.list(search_opts={"all_tenants": True, + "uuid": instance_uuid})] + + def get_instance_by_name(self, instance_name): + return [instance for instance in + self.nova.servers.list(search_opts={"all_tenants": True, + "name": instance_name})] + def get_instances_by_node(self, host): return [instance for instance in self.nova.servers.list(search_opts={"all_tenants": True}) diff --git a/watcher/conf/planner.py b/watcher/conf/planner.py index c06bcccae..1386c2f89 100644 --- a/watcher/conf/planner.py +++ b/watcher/conf/planner.py @@ -22,7 +22,7 @@ watcher_planner = cfg.OptGroup(name='watcher_planner', title='Defines the parameters of ' 'the planner') -default_planner = 'default' +default_planner = 'weight' WATCHER_PLANNER_OPTS = { cfg.StrOpt('planner', diff --git a/watcher/db/sqlalchemy/api.py b/watcher/db/sqlalchemy/api.py index 9601ec266..75ae0370b 100644 --- a/watcher/db/sqlalchemy/api.py +++ b/watcher/db/sqlalchemy/api.py @@ -746,6 +746,9 @@ class Connection(api.BaseConnection): if not values.get('uuid'): values['uuid'] = utils.generate_uuid() + if values.get('state') is None: + values['state'] = objects.action.State.PENDING + try: action = self._create(models.Action, values) except db_exc.DBDuplicateEntry: diff --git a/watcher/db/sqlalchemy/models.py b/watcher/db/sqlalchemy/models.py index 8e65a5913..4f90f7b2c 100644 --- a/watcher/db/sqlalchemy/models.py +++ b/watcher/db/sqlalchemy/models.py @@ -193,7 +193,6 @@ class ActionPlan(Base): ) id = Column(Integer, primary_key=True, autoincrement=True) uuid = Column(String(36)) - first_action_id = Column(Integer) audit_id = Column(Integer, ForeignKey('audits.id'), nullable=False) strategy_id = Column(Integer, ForeignKey('strategies.id'), nullable=False) state = Column(String(20), nullable=True) @@ -219,7 +218,7 @@ class Action(Base): action_type = Column(String(255), nullable=False) input_parameters = Column(JSONEncodedDict, nullable=True) state = Column(String(20), nullable=True) - next = Column(String(36), nullable=True) + parents = Column(JSONEncodedList, nullable=True) action_plan = orm.relationship( ActionPlan, foreign_keys=action_plan_id, lazy=None) diff --git a/watcher/decision_engine/planner/default.py b/watcher/decision_engine/planner/default.py deleted file mode 100644 index 35a8aeee1..000000000 --- a/watcher/decision_engine/planner/default.py +++ /dev/null @@ -1,169 +0,0 @@ -# -*- encoding: utf-8 -*- -# Copyright (c) 2015 b<>com -# -# Authors: Jean-Emile DARTOIS -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from oslo_config import cfg -from oslo_log import log - -from watcher._i18n import _LW -from watcher.common import utils -from watcher.decision_engine.planner import base -from watcher import objects - -LOG = log.getLogger(__name__) - - -class DefaultPlanner(base.BasePlanner): - """Default planner implementation - - This implementation comes with basic rules with a set of action types that - are weighted. An action having a lower weight will be scheduled before the - other ones. The set of action types can be specified by 'weights' in the - ``watcher.conf``. You need to associate a different weight to all available - actions into the configuration file, otherwise you will get an error when - the new action will be referenced in the solution produced by a strategy. - """ - - weights_dict = { - 'nop': 0, - 'sleep': 1, - 'change_nova_service_state': 2, - 'migrate': 3, - } - - @classmethod - def get_config_opts(cls): - return [ - cfg.DictOpt( - 'weights', - help="These weights are used to schedule the actions", - default=cls.weights_dict), - ] - - def create_action(self, - action_plan_id, - action_type, - input_parameters=None): - uuid = utils.generate_uuid() - action = { - 'uuid': uuid, - 'action_plan_id': int(action_plan_id), - 'action_type': action_type, - 'input_parameters': input_parameters, - 'state': objects.action.State.PENDING, - 'next': None, - } - - return action - - def schedule(self, context, audit_id, solution): - LOG.debug('Creating an action plan for the audit uuid: %s', audit_id) - priorities = self.config.weights - action_plan = self._create_action_plan(context, audit_id, solution) - - actions = list(solution.actions) - to_schedule = [] - for action in actions: - json_action = self.create_action( - action_plan_id=action_plan.id, - action_type=action.get('action_type'), - input_parameters=action.get('input_parameters')) - to_schedule.append((priorities[action.get('action_type')], - json_action)) - - self._create_efficacy_indicators( - context, action_plan.id, solution.efficacy_indicators) - - # scheduling - scheduled = sorted(to_schedule, key=lambda x: (x[0])) - if len(scheduled) == 0: - LOG.warning(_LW("The action plan is empty")) - action_plan.first_action_id = None - action_plan.state = objects.action_plan.State.SUCCEEDED - action_plan.save() - else: - # create the first action - parent_action = self._create_action(context, - scheduled[0][1], - None) - # remove first - scheduled.pop(0) - - action_plan.first_action_id = parent_action.id - action_plan.save() - - for s_action in scheduled: - current_action = self._create_action(context, s_action[1], - parent_action) - parent_action = current_action - - return action_plan - - def _create_action_plan(self, context, audit_id, solution): - strategy = objects.Strategy.get_by_name( - context, solution.strategy.name) - - action_plan_dict = { - 'uuid': utils.generate_uuid(), - 'audit_id': audit_id, - 'strategy_id': strategy.id, - 'first_action_id': None, - 'state': objects.action_plan.State.RECOMMENDED, - 'global_efficacy': solution.global_efficacy, - } - - new_action_plan = objects.ActionPlan(context, **action_plan_dict) - new_action_plan.create() - - return new_action_plan - - def _create_efficacy_indicators(self, context, action_plan_id, indicators): - efficacy_indicators = [] - for indicator in indicators: - efficacy_indicator_dict = { - 'uuid': utils.generate_uuid(), - 'name': indicator.name, - 'description': indicator.description, - 'unit': indicator.unit, - 'value': indicator.value, - 'action_plan_id': action_plan_id, - } - new_efficacy_indicator = objects.EfficacyIndicator( - context, **efficacy_indicator_dict) - new_efficacy_indicator.create() - - efficacy_indicators.append(new_efficacy_indicator) - return efficacy_indicators - - def _create_action(self, context, _action, parent_action): - try: - LOG.debug("Creating the %s in the Watcher database", - _action.get("action_type")) - - new_action = objects.Action(context, **_action) - new_action.create() - new_action.save() - - if parent_action: - parent_action.next = new_action.id - parent_action.save() - - return new_action - except Exception as exc: - LOG.exception(exc) - raise diff --git a/watcher/decision_engine/planner/weight.py b/watcher/decision_engine/planner/weight.py new file mode 100644 index 000000000..f4b579bd8 --- /dev/null +++ b/watcher/decision_engine/planner/weight.py @@ -0,0 +1,233 @@ +# -*- encoding: utf-8 -*- +# +# Authors: Vincent Francoise +# Alexander Chadin +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import collections + +import networkx as nx +from oslo_config import cfg +from oslo_config import types +from oslo_log import log + +from watcher._i18n import _LW +from watcher.common import utils +from watcher.decision_engine.planner import base +from watcher import objects + +LOG = log.getLogger(__name__) + + +class WeightPlanner(base.BasePlanner): + """Weight planner implementation + + This implementation builds actions with parents in accordance with weights. + Set of actions having a lower weight will be scheduled before + the other ones. There are two config options to configure: + action_weights and parallelization. + + *Limitations* + + - This planner requires to have action_weights and parallelization configs + tuned well. + """ + + def __init__(self, config): + super(WeightPlanner, self).__init__(config) + + action_weights = { + 'turn_host_to_acpi_s3_state': 10, + 'resize': 20, + 'migrate': 30, + 'sleep': 40, + 'change_nova_service_state': 50, + 'nop': 60, + } + + parallelization = { + 'turn_host_to_acpi_s3_state': 2, + 'resize': 2, + 'migrate': 2, + 'sleep': 1, + 'change_nova_service_state': 1, + 'nop': 1, + } + + @classmethod + def get_config_opts(cls): + return [ + cfg.Opt( + 'weights', + type=types.Dict(value_type=types.Integer()), + help="These weights are used to schedule the actions. " + "Action Plan will be build in accordance with sets of " + "actions ordered by descending weights." + "Two action types cannot have the same weight. ", + default=cls.action_weights), + cfg.Opt( + 'parallelization', + type=types.Dict(value_type=types.Integer()), + help="Number of actions to be run in parallel on a per " + "action type basis.", + default=cls.parallelization), + ] + + @staticmethod + def format_action(action_plan_id, action_type, + input_parameters=None, parents=()): + return { + 'uuid': utils.generate_uuid(), + 'action_plan_id': int(action_plan_id), + 'action_type': action_type, + 'input_parameters': input_parameters, + 'state': objects.action.State.PENDING, + 'parents': parents or None, + } + + @staticmethod + def chunkify(lst, n): + """Yield successive n-sized chunks from lst.""" + if n < 1: + # Just to make sure the number is valid + n = 1 + + # Split a flat list in a list of chunks of size n. + # e.g. chunkify([0, 1, 2, 3, 4], 2) -> [[0, 1], [2, 3], [4]] + for i in range(0, len(lst), n): + yield lst[i:i + n] + + def compute_action_graph(self, sorted_weighted_actions): + reverse_weights = {v: k for k, v in self.config.weights.items()} + # leaf_groups contains a list of list of nodes called groups + # each group is a set of nodes from which a future node will + # branch off (parent nodes). + + # START --> migrate-1 --> migrate-3 + # \ \--> resize-1 --> FINISH + # \--> migrate-2 -------------/ + # In the above case migrate-1 will the only memeber of the leaf + # group that migrate-3 will use as parent group, whereas + # resize-1 will have both migrate-2 and migrate-3 in its + # parent/leaf group + leaf_groups = [] + action_graph = nx.DiGraph() + # We iterate through each action type category (sorted by weight) to + # insert them in a Directed Acyclic Graph + for idx, (weight, actions) in enumerate(sorted_weighted_actions): + action_chunks = self.chunkify( + actions, self.config.parallelization[reverse_weights[weight]]) + + # We split the actions into chunks/layers that will have to be + # spread across all the available branches of the graph + for chunk_idx, actions_chunk in enumerate(action_chunks): + for action in actions_chunk: + action_graph.add_node(action) + + # all other actions + parent_nodes = [] + if not idx and not chunk_idx: + parent_nodes = [] + elif leaf_groups: + parent_nodes = leaf_groups + + for parent_node in parent_nodes: + action_graph.add_edge(parent_node, action) + action.parents.append(parent_node.uuid) + + if leaf_groups: + leaf_groups = [] + leaf_groups.extend([a for a in actions_chunk]) + + return action_graph + + def schedule(self, context, audit_id, solution): + LOG.debug('Creating an action plan for the audit uuid: %s', audit_id) + action_plan = self.create_action_plan(context, audit_id, solution) + + sorted_weighted_actions = self.get_sorted_actions_by_weight( + context, action_plan, solution) + action_graph = self.compute_action_graph(sorted_weighted_actions) + + self._create_efficacy_indicators( + context, action_plan.id, solution.efficacy_indicators) + + if len(action_graph.nodes()) == 0: + LOG.warning(_LW("The action plan is empty")) + action_plan.state = objects.action_plan.State.SUCCEEDED + action_plan.save() + + self.create_scheduled_actions(action_plan, action_graph) + return action_plan + + def get_sorted_actions_by_weight(self, context, action_plan, solution): + # We need to make them immutable to add them to the graph + action_objects = list([ + objects.Action( + context, uuid=utils.generate_uuid(), parents=[], + action_plan_id=action_plan.id, **a) + for a in solution.actions]) + # This is a dict of list with each being a weight and the list being + # all the actions associated to this weight + weighted_actions = collections.defaultdict(list) + for action in action_objects: + action_weight = self.config.weights[action.action_type] + weighted_actions[action_weight].append(action) + + return reversed(sorted(weighted_actions.items(), key=lambda x: x[0])) + + def create_scheduled_actions(self, action_plan, graph): + for action in graph.nodes(): + LOG.debug("Creating the %s in the Watcher database", + action.action_type) + try: + action.create() + except Exception as exc: + LOG.exception(exc) + raise + + def create_action_plan(self, context, audit_id, solution): + strategy = objects.Strategy.get_by_name( + context, solution.strategy.name) + + action_plan_dict = { + 'uuid': utils.generate_uuid(), + 'audit_id': audit_id, + 'strategy_id': strategy.id, + 'state': objects.action_plan.State.RECOMMENDED, + 'global_efficacy': solution.global_efficacy, + } + + new_action_plan = objects.ActionPlan(context, **action_plan_dict) + new_action_plan.create() + + return new_action_plan + + def _create_efficacy_indicators(self, context, action_plan_id, indicators): + efficacy_indicators = [] + for indicator in indicators: + efficacy_indicator_dict = { + 'uuid': utils.generate_uuid(), + 'name': indicator.name, + 'description': indicator.description, + 'unit': indicator.unit, + 'value': indicator.value, + 'action_plan_id': action_plan_id, + } + new_efficacy_indicator = objects.EfficacyIndicator( + context, **efficacy_indicator_dict) + new_efficacy_indicator.create() + + efficacy_indicators.append(new_efficacy_indicator) + return efficacy_indicators diff --git a/watcher/decision_engine/planner/workload_stabilization.py b/watcher/decision_engine/planner/workload_stabilization.py new file mode 100644 index 000000000..125e31021 --- /dev/null +++ b/watcher/decision_engine/planner/workload_stabilization.py @@ -0,0 +1,301 @@ +# -*- encoding: utf-8 -*- +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import abc + +from oslo_config import cfg +from oslo_config import types +from oslo_log import log + +from watcher._i18n import _LW +from watcher.common import clients +from watcher.common import exception +from watcher.common import nova_helper +from watcher.common import utils +from watcher.decision_engine.planner import base +from watcher import objects + +LOG = log.getLogger(__name__) + + +class WorkloadStabilizationPlanner(base.BasePlanner): + """Workload Stabilization planner implementation + + This implementation comes with basic rules with a set of action types that + are weighted. An action having a lower weight will be scheduled before the + other ones. The set of action types can be specified by 'weights' in the + ``watcher.conf``. You need to associate a different weight to all available + actions into the configuration file, otherwise you will get an error when + the new action will be referenced in the solution produced by a strategy. + + *Limitations* + + - This is a proof of concept that is not meant to be used in production + """ + + def __init__(self, config): + super(WorkloadStabilizationPlanner, self).__init__(config) + self._osc = clients.OpenStackClients() + + @property + def osc(self): + return self._osc + + weights_dict = { + 'turn_host_to_acpi_s3_state': 0, + 'resize': 1, + 'migrate': 2, + 'sleep': 3, + 'change_nova_service_state': 4, + 'nop': 5, + } + + @classmethod + def get_config_opts(cls): + return [ + cfg.Opt( + 'weights', + type=types.Dict(value_type=types.Integer()), + help="These weights are used to schedule the actions", + default=cls.weights_dict), + ] + + def create_action(self, + action_plan_id, + action_type, + input_parameters=None): + uuid = utils.generate_uuid() + action = { + 'uuid': uuid, + 'action_plan_id': int(action_plan_id), + 'action_type': action_type, + 'input_parameters': input_parameters, + 'state': objects.action.State.PENDING, + 'parents': None + } + + return action + + def load_child_class(self, child_name): + for c in BaseActionValidator.__subclasses__(): + if child_name == c.action_name: + return c() + return None + + def schedule(self, context, audit_id, solution): + LOG.debug('Creating an action plan for the audit uuid: %s', audit_id) + weights = self.config.weights + action_plan = self._create_action_plan(context, audit_id, solution) + + actions = list(solution.actions) + to_schedule = [] + for action in actions: + json_action = self.create_action( + action_plan_id=action_plan.id, + action_type=action.get('action_type'), + input_parameters=action.get('input_parameters')) + to_schedule.append((weights[action.get('action_type')], + json_action)) + + self._create_efficacy_indicators( + context, action_plan.id, solution.efficacy_indicators) + + # scheduling + scheduled = sorted(to_schedule, key=lambda weight: (weight[0]), + reverse=True) + if len(scheduled) == 0: + LOG.warning(_LW("The action plan is empty")) + action_plan.state = objects.action_plan.State.SUCCEEDED + action_plan.save() + else: + resource_action_map = {} + scheduled_actions = [x[1] for x in scheduled] + for action in scheduled_actions: + a_type = action['action_type'] + if a_type != 'turn_host_to_acpi_s3_state': + plugin_action = self.load_child_class( + action.get("action_type")) + if not plugin_action: + raise exception.UnsupportedActionType( + action_type=action.get("action_type")) + db_action = self._create_action(context, action) + parents = plugin_action.validate_parents( + resource_action_map, action) + if parents: + db_action.parents = parents + db_action.save() + # if we have an action that will make host unreachable, we need + # to complete all actions (resize and migration type) + # related to the host. + # Note(alexchadin): turn_host_to_acpi_s3_state doesn't + # actually exist. Placed code shows relations between + # action types. + # TODO(alexchadin): add turn_host_to_acpi_s3_state action type. + else: + host_to_acpi_s3 = action['input_parameters']['resource_id'] + host_actions = resource_action_map.get(host_to_acpi_s3) + action_parents = [] + if host_actions: + resize_actions = [x[0] for x in host_actions + if x[1] == 'resize'] + migrate_actions = [x[0] for x in host_actions + if x[1] == 'migrate'] + resize_migration_parents = [ + x.parents for x in + [objects.Action.get_by_uuid(context, resize_action) + for resize_action in resize_actions]] + # resize_migration_parents should be one level list + resize_migration_parents = [ + parent for sublist in resize_migration_parents + for parent in sublist] + action_parents.extend([uuid for uuid in + resize_actions]) + action_parents.extend([uuid for uuid in + migrate_actions if uuid not in + resize_migration_parents]) + db_action = self._create_action(context, action) + db_action.parents = action_parents + db_action.save() + + return action_plan + + def _create_action_plan(self, context, audit_id, solution): + strategy = objects.Strategy.get_by_name( + context, solution.strategy.name) + + action_plan_dict = { + 'uuid': utils.generate_uuid(), + 'audit_id': audit_id, + 'strategy_id': strategy.id, + 'state': objects.action_plan.State.RECOMMENDED, + 'global_efficacy': solution.global_efficacy, + } + + new_action_plan = objects.ActionPlan(context, **action_plan_dict) + new_action_plan.create() + + return new_action_plan + + def _create_efficacy_indicators(self, context, action_plan_id, indicators): + efficacy_indicators = [] + for indicator in indicators: + efficacy_indicator_dict = { + 'uuid': utils.generate_uuid(), + 'name': indicator.name, + 'description': indicator.description, + 'unit': indicator.unit, + 'value': indicator.value, + 'action_plan_id': action_plan_id, + } + new_efficacy_indicator = objects.EfficacyIndicator( + context, **efficacy_indicator_dict) + new_efficacy_indicator.create() + + efficacy_indicators.append(new_efficacy_indicator) + return efficacy_indicators + + def _create_action(self, context, _action): + try: + LOG.debug("Creating the %s in the Watcher database", + _action.get("action_type")) + + new_action = objects.Action(context, **_action) + new_action.create() + + return new_action + except Exception as exc: + LOG.exception(exc) + raise + + +class BaseActionValidator(object): + action_name = None + + def __init__(self): + super(BaseActionValidator, self).__init__() + self._osc = None + + @property + def osc(self): + if not self._osc: + self._osc = clients.OpenStackClients() + return self._osc + + @abc.abstractmethod + def validate_parents(self, resource_action_map, action): + raise NotImplementedError() + + def _mapping(self, resource_action_map, resource_id, action_uuid, + action_type): + if resource_id not in resource_action_map: + resource_action_map[resource_id] = [(action_uuid, + action_type,)] + else: + resource_action_map[resource_id].append((action_uuid, + action_type,)) + + +class MigrationActionValidator(BaseActionValidator): + action_name = "migrate" + + def validate_parents(self, resource_action_map, action): + instance_uuid = action['input_parameters']['resource_id'] + host_name = action['input_parameters']['source_node'] + self._mapping(resource_action_map, instance_uuid, action['uuid'], + 'migrate') + self._mapping(resource_action_map, host_name, action['uuid'], + 'migrate') + + +class ResizeActionValidator(BaseActionValidator): + action_name = "resize" + + def validate_parents(self, resource_action_map, action): + nova = nova_helper.NovaHelper(osc=self.osc) + instance_uuid = action['input_parameters']['resource_id'] + parent_actions = resource_action_map.get(instance_uuid) + host_of_instance = nova.get_hostname( + nova.get_instance_by_uuid(instance_uuid)[0]) + self._mapping(resource_action_map, host_of_instance, action['uuid'], + 'resize') + if parent_actions: + return [x[0] for x in parent_actions] + else: + return [] + + +class ChangeNovaServiceStateActionValidator(BaseActionValidator): + action_name = "change_nova_service_state" + + def validate_parents(self, resource_action_map, action): + host_name = action['input_parameters']['resource_id'] + self._mapping(resource_action_map, host_name, action.uuid, + 'change_nova_service_state') + return [] + + +class SleepActionValidator(BaseActionValidator): + action_name = "sleep" + + def validate_parents(self, resource_action_map, action): + return [] + + +class NOPActionValidator(BaseActionValidator): + action_name = "nop" + + def validate_parents(self, resource_action_map, action): + return [] diff --git a/watcher/decision_engine/strategy/strategies/dummy_with_resize.py b/watcher/decision_engine/strategy/strategies/dummy_with_resize.py new file mode 100644 index 000000000..c35b2a0da --- /dev/null +++ b/watcher/decision_engine/strategy/strategies/dummy_with_resize.py @@ -0,0 +1,119 @@ +# -*- encoding: utf-8 -*- +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from oslo_log import log + +from watcher._i18n import _ +from watcher.decision_engine.strategy.strategies import base + +LOG = log.getLogger(__name__) + + +class DummyWithResize(base.DummyBaseStrategy): + """Dummy strategy used for integration testing via Tempest + + *Description* + + This strategy does not provide any useful optimization. Its only purpose + is to be used by Tempest tests. + + *Requirements* + + + + *Limitations* + + Do not use in production. + + *Spec URL* + + + """ + + NOP = "nop" + SLEEP = "sleep" + + def pre_execute(self): + pass + + def do_execute(self): + para1 = self.input_parameters.para1 + para2 = self.input_parameters.para2 + LOG.debug("Executing Dummy strategy with para1=%(p1)f, para2=%(p2)s", + {'p1': para1, 'p2': para2}) + parameters = {'message': 'hello World'} + self.solution.add_action(action_type=self.NOP, + input_parameters=parameters) + + parameters = {'message': 'Welcome'} + self.solution.add_action(action_type=self.NOP, + input_parameters=parameters) + + self.solution.add_action(action_type=self.SLEEP, + input_parameters={'duration': 5.0}) + self.solution.add_action( + action_type='migrate', + resource_id='b199db0c-1408-4d52-b5a5-5ca14de0ff36', + input_parameters={ + 'source_node': 'server1', + 'destination_node': 'server2'}) + + self.solution.add_action( + action_type='migrate', + resource_id='8db1b3c1-7938-4c34-8c03-6de14b874f8f', + input_parameters={ + 'source_node': 'server1', + 'destination_node': 'server2'} + ) + self.solution.add_action( + action_type='resize', + resource_id='8db1b3c1-7938-4c34-8c03-6de14b874f8f', + input_parameters={'flavor': 'x1'} + ) + + def post_execute(self): + pass + + @classmethod + def get_name(cls): + return "dummy_with_resize" + + @classmethod + def get_display_name(cls): + return _("Dummy strategy with resize") + + @classmethod + def get_translatable_display_name(cls): + return "Dummy strategy with resize" + + @classmethod + def get_schema(cls): + # Mandatory default setting for each element + return { + "properties": { + "para1": { + "description": "number parameter example", + "type": "number", + "default": 3.2, + "minimum": 1.0, + "maximum": 10.2, + }, + "para2": { + "description": "string parameter example", + "type": "string", + "default": "hello" + }, + }, + } diff --git a/watcher/objects/action.py b/watcher/objects/action.py index 44c008755..08ddd4972 100644 --- a/watcher/objects/action.py +++ b/watcher/objects/action.py @@ -37,7 +37,8 @@ class Action(base.WatcherPersistentObject, base.WatcherObject, # Version 1.0: Initial version # Version 1.1: Added 'action_plan' object field - VERSION = '1.1' + # Version 2.0: Removed 'next' object field, Added 'parents' object field + VERSION = '2.0' dbapi = db_api.get_instance() @@ -48,7 +49,7 @@ class Action(base.WatcherPersistentObject, base.WatcherObject, 'action_type': wfields.StringField(nullable=True), 'input_parameters': wfields.DictField(nullable=True), 'state': wfields.StringField(nullable=True), - 'next': wfields.IntegerField(nullable=True), + 'parents': wfields.ListOfStringsField(nullable=True), 'action_plan': wfields.ObjectField('ActionPlan', nullable=True), } diff --git a/watcher/objects/action_plan.py b/watcher/objects/action_plan.py index a97154cfb..9e1f1414f 100644 --- a/watcher/objects/action_plan.py +++ b/watcher/objects/action_plan.py @@ -95,7 +95,8 @@ class ActionPlan(base.WatcherPersistentObject, base.WatcherObject, # Version 1.0: Initial version # Version 1.1: Added 'audit' and 'strategy' object field # Version 1.2: audit_id is not nullable anymore - VERSION = '1.2' + # Version 2.0: Removed 'first_action_id' object field + VERSION = '2.0' dbapi = db_api.get_instance() @@ -104,7 +105,6 @@ class ActionPlan(base.WatcherPersistentObject, base.WatcherObject, 'uuid': wfields.UUIDField(), 'audit_id': wfields.IntegerField(), 'strategy_id': wfields.IntegerField(), - 'first_action_id': wfields.IntegerField(nullable=True), 'state': wfields.StringField(nullable=True), 'global_efficacy': wfields.FlexibleDictField(nullable=True), diff --git a/watcher/tests/api/v1/test_actions.py b/watcher/tests/api/v1/test_actions.py index bd2c07ffb..b0f71045d 100644 --- a/watcher/tests/api/v1/test_actions.py +++ b/watcher/tests/api/v1/test_actions.py @@ -34,7 +34,7 @@ def post_get_test_action(**kw): del action['action_plan_id'] action['action_plan_uuid'] = kw.get('action_plan_uuid', action_plan['uuid']) - action['next'] = None + action['parents'] = None return action @@ -42,7 +42,7 @@ class TestActionObject(base.TestCase): def test_action_init(self): action_dict = api_utils.action_post_data(action_plan_id=None, - next=None) + parents=None) del action_dict['state'] action = api_action.Action(**action_dict) self.assertEqual(wtypes.Unset, action.state) @@ -67,13 +67,13 @@ class TestListAction(api_base.FunctionalTest): self.assertIn(field, action) def test_one(self): - action = obj_utils.create_test_action(self.context, next=None) + action = obj_utils.create_test_action(self.context, parents=None) response = self.get_json('/actions') self.assertEqual(action.uuid, response['actions'][0]["uuid"]) self._assert_action_fields(response['actions'][0]) def test_one_soft_deleted(self): - action = obj_utils.create_test_action(self.context, next=None) + action = obj_utils.create_test_action(self.context, parents=None) action.soft_delete() response = self.get_json('/actions', headers={'X-Show-Deleted': 'True'}) @@ -84,7 +84,7 @@ class TestListAction(api_base.FunctionalTest): self.assertEqual([], response['actions']) def test_get_one(self): - action = obj_utils.create_test_action(self.context, next=None) + action = obj_utils.create_test_action(self.context, parents=None) response = self.get_json('/actions/%s' % action['uuid']) self.assertEqual(action.uuid, response['uuid']) self.assertEqual(action.action_type, response['action_type']) @@ -92,7 +92,7 @@ class TestListAction(api_base.FunctionalTest): self._assert_action_fields(response) def test_get_one_soft_deleted(self): - action = obj_utils.create_test_action(self.context, next=None) + action = obj_utils.create_test_action(self.context, parents=None) action.soft_delete() response = self.get_json('/actions/%s' % action['uuid'], headers={'X-Show-Deleted': 'True'}) @@ -104,13 +104,13 @@ class TestListAction(api_base.FunctionalTest): self.assertEqual(404, response.status_int) def test_detail(self): - action = obj_utils.create_test_action(self.context, next=None) + action = obj_utils.create_test_action(self.context, parents=None) response = self.get_json('/actions/detail') self.assertEqual(action.uuid, response['actions'][0]["uuid"]) self._assert_action_fields(response['actions'][0]) def test_detail_soft_deleted(self): - action = obj_utils.create_test_action(self.context, next=None) + action = obj_utils.create_test_action(self.context, parents=None) action.soft_delete() response = self.get_json('/actions/detail', headers={'X-Show-Deleted': 'True'}) @@ -121,7 +121,7 @@ class TestListAction(api_base.FunctionalTest): self.assertEqual([], response['actions']) def test_detail_against_single(self): - action = obj_utils.create_test_action(self.context, next=None) + action = obj_utils.create_test_action(self.context, parents=None) response = self.get_json('/actions/%s/detail' % action['uuid'], expect_errors=True) self.assertEqual(404, response.status_int) @@ -312,18 +312,23 @@ class TestListAction(api_base.FunctionalTest): set([act['uuid'] for act in response['actions'] if act['action_plan_uuid'] == action_plan2.uuid])) - def test_many_with_next_uuid(self): + def test_many_with_parents(self): action_list = [] for id_ in range(5): - action = obj_utils.create_test_action(self.context, id=id_, - uuid=utils.generate_uuid(), - next=id_ + 1) + if id_ > 0: + action = obj_utils.create_test_action( + self.context, id=id_, uuid=utils.generate_uuid(), + parents=[action_list[id_ - 1]]) + else: + action = obj_utils.create_test_action( + self.context, id=id_, uuid=utils.generate_uuid(), + parents=[]) action_list.append(action.uuid) response = self.get_json('/actions') response_actions = response['actions'] for id_ in range(4): - self.assertEqual(response_actions[id_]['next_uuid'], - response_actions[id_ + 1]['uuid']) + self.assertEqual(response_actions[id_]['uuid'], + response_actions[id_ + 1]['parents'][0]) def test_many_without_soft_deleted(self): action_list = [] @@ -357,30 +362,6 @@ class TestListAction(api_base.FunctionalTest): uuids = [s['uuid'] for s in response['actions']] self.assertEqual(sorted(action_list), sorted(uuids)) - def test_many_with_sort_key_next_uuid(self): - for id_ in range(5): - obj_utils.create_test_action(self.context, id=id_, - uuid=utils.generate_uuid(), - next=id_ + 1) - response = self.get_json('/actions/') - reference_uuids = [ - s.get('next_uuid', '') for s in response['actions'] - ] - - response = self.get_json('/actions/?sort_key=next_uuid') - - self.assertEqual(5, len(response['actions'])) - uuids = [(s['next_uuid'] if 'next_uuid' in s else '') - for s in response['actions']] - self.assertEqual(sorted(reference_uuids), uuids) - - response = self.get_json('/actions/?sort_key=next_uuid&sort_dir=desc') - - self.assertEqual(5, len(response['actions'])) - uuids = [(s['next_uuid'] if 'next_uuid' in s else '') - for s in response['actions']] - self.assertEqual(sorted(reference_uuids, reverse=True), uuids) - def test_links(self): uuid = utils.generate_uuid() obj_utils.create_test_action(self.context, id=1, uuid=uuid) @@ -393,18 +374,15 @@ class TestListAction(api_base.FunctionalTest): self.assertTrue(self.validate_link(l['href'], bookmark=bookmark)) def test_collection_links(self): - next = -1 + parents = None for id_ in range(5): action = obj_utils.create_test_action(self.context, id=id_, uuid=utils.generate_uuid(), - next=next) - next = action.id + parents=parents) + parents = [action.id] response = self.get_json('/actions/?limit=3') self.assertEqual(3, len(response['actions'])) - next_marker = response['actions'][-1]['uuid'] - self.assertIn(next_marker, response['next']) - def test_collection_links_default_limit(self): cfg.CONF.set_override('max_limit', 3, 'api', enforce_type=True) @@ -414,9 +392,6 @@ class TestListAction(api_base.FunctionalTest): response = self.get_json('/actions') self.assertEqual(3, len(response['actions'])) - next_marker = response['actions'][-1]['uuid'] - self.assertIn(next_marker, response['next']) - class TestPatch(api_base.FunctionalTest): @@ -426,7 +401,7 @@ class TestPatch(api_base.FunctionalTest): obj_utils.create_test_strategy(self.context) obj_utils.create_test_audit(self.context) obj_utils.create_test_action_plan(self.context) - self.action = obj_utils.create_test_action(self.context, next=None) + self.action = obj_utils.create_test_action(self.context, parents=None) p = mock.patch.object(db_api.BaseConnection, 'update_action') self.mock_action_update = p.start() self.mock_action_update.side_effect = self._simulate_rpc_action_update @@ -461,7 +436,7 @@ class TestDelete(api_base.FunctionalTest): self.strategy = obj_utils.create_test_strategy(self.context) self.audit = obj_utils.create_test_audit(self.context) self.action_plan = obj_utils.create_test_action_plan(self.context) - self.action = obj_utils.create_test_action(self.context, next=None) + self.action = obj_utils.create_test_action(self.context, parents=None) p = mock.patch.object(db_api.BaseConnection, 'update_action') self.mock_action_update = p.start() self.mock_action_update.side_effect = self._simulate_rpc_action_update diff --git a/watcher/tests/api/v1/test_actions_plans.py b/watcher/tests/api/v1/test_actions_plans.py index 7f1602eaa..aaae6b09f 100644 --- a/watcher/tests/api/v1/test_actions_plans.py +++ b/watcher/tests/api/v1/test_actions_plans.py @@ -77,14 +77,6 @@ class TestListActionPlan(api_base.FunctionalTest): 'unit': '%'}], response['efficacy_indicators']) - def test_get_one_with_first_action(self): - action_plan = obj_utils.create_test_action_plan(self.context) - action = obj_utils.create_test_action(self.context, id=1) - response = self.get_json('/action_plans/%s' % action_plan['uuid']) - self.assertEqual(action_plan.uuid, response['uuid']) - self.assertEqual(action.uuid, response['first_action_uuid']) - self._assert_action_plans_fields(response) - def test_get_one_soft_deleted(self): action_plan = obj_utils.create_test_action_plan(self.context) action_plan.soft_delete() @@ -322,7 +314,7 @@ class TestDelete(api_base.FunctionalTest): def test_delete_action_plan_with_action(self): action = obj_utils.create_test_action( - self.context, id=self.action_plan.first_action_id) + self.context, id=1) self.delete('/action_plans/%s' % self.action_plan.uuid) ap_response = self.get_json('/action_plans/%s' % self.action_plan.uuid, diff --git a/watcher/tests/applier/actions/test_resize.py b/watcher/tests/applier/actions/test_resize.py new file mode 100644 index 000000000..52e4b9af2 --- /dev/null +++ b/watcher/tests/applier/actions/test_resize.py @@ -0,0 +1,102 @@ +# -*- encoding: utf-8 -*- +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import unicode_literals + +import mock +import voluptuous + +from watcher.applier.actions import base as baction +from watcher.applier.actions import resize +from watcher.common import clients +from watcher.common import nova_helper +from watcher.tests import base + + +class TestResize(base.TestCase): + + INSTANCE_UUID = "94ae2f92-b7fd-4da7-9e97-f13504ae98c4" + + def setUp(self): + super(TestResize, self).setUp() + + self.r_osc_cls = mock.Mock() + self.r_helper_cls = mock.Mock() + self.r_helper = mock.Mock(spec=nova_helper.NovaHelper) + self.r_helper_cls.return_value = self.r_helper + self.r_osc = mock.Mock(spec=clients.OpenStackClients) + self.r_osc_cls.return_value = self.r_osc + + r_openstack_clients = mock.patch.object( + clients, "OpenStackClients", self.r_osc_cls) + r_nova_helper = mock.patch.object( + nova_helper, "NovaHelper", self.r_helper_cls) + + r_openstack_clients.start() + r_nova_helper.start() + + self.addCleanup(r_openstack_clients.stop) + self.addCleanup(r_nova_helper.stop) + + self.input_parameters = { + "flavor": "x1", + baction.BaseAction.RESOURCE_ID: self.INSTANCE_UUID, + } + self.action = resize.Resize(mock.Mock()) + self.action.input_parameters = self.input_parameters + + def test_parameters(self): + params = {baction.BaseAction.RESOURCE_ID: + self.INSTANCE_UUID, + self.action.FLAVOR: 'x1'} + self.action.input_parameters = params + self.assertTrue(self.action.validate_parameters()) + + def test_parameters_exception_empty_fields(self): + parameters = {baction.BaseAction.RESOURCE_ID: + self.INSTANCE_UUID, + self.action.FLAVOR: None} + self.action.input_parameters = parameters + exc = self.assertRaises( + voluptuous.MultipleInvalid, self.action.validate_parameters) + self.assertEqual([(['flavor'], voluptuous.TypeInvalid)], + [(e.path, type(e)) for e in exc.errors]) + + def test_parameters_exception_flavor(self): + parameters = {baction.BaseAction.RESOURCE_ID: + self.INSTANCE_UUID, + self.action.FLAVOR: None} + self.action.input_parameters = parameters + exc = self.assertRaises( + voluptuous.MultipleInvalid, self.action.validate_parameters) + self.assertEqual( + [(['flavor'], voluptuous.TypeInvalid)], + [(e.path, type(e)) for e in exc.errors]) + + def test_parameters_exception_resource_id(self): + parameters = {baction.BaseAction.RESOURCE_ID: "EFEF", + self.action.FLAVOR: 'x1'} + self.action.input_parameters = parameters + exc = self.assertRaises( + voluptuous.MultipleInvalid, self.action.validate_parameters) + self.assertEqual( + [(['resource_id'], voluptuous.Invalid)], + [(e.path, type(e)) for e in exc.errors]) + + def test_execute_resize(self): + self.r_helper.find_instance.return_value = self.INSTANCE_UUID + self.action.execute() + self.r_helper.resize_instance.assert_called_once_with( + instance_id=self.INSTANCE_UUID, flavor='x1') diff --git a/watcher/tests/applier/workflow_engine/test_default_workflow_engine.py b/watcher/tests/applier/workflow_engine/test_default_workflow_engine.py index 1983639d9..331c0a31c 100644 --- a/watcher/tests/applier/workflow_engine/test_default_workflow_engine.py +++ b/watcher/tests/applier/workflow_engine/test_default_workflow_engine.py @@ -70,14 +70,15 @@ class TestDefaultWorkFlowEngine(base.DbTestCase): except Exception as exc: self.fail(exc) - def create_action(self, action_type, parameters, next): + def create_action(self, action_type, parameters, parents): action = { 'uuid': utils.generate_uuid(), 'action_plan_id': 0, 'action_type': action_type, 'input_parameters': parameters, 'state': objects.action.State.PENDING, - 'next': next, + 'parents': parents, + } new_action = objects.Action(self.context, **action) new_action.create() @@ -116,7 +117,7 @@ class TestDefaultWorkFlowEngine(base.DbTestCase): def test_execute_with_two_actions(self): actions = [] second = self.create_action("sleep", {'duration': 0.0}, None) - first = self.create_action("nop", {'message': 'test'}, second.id) + first = self.create_action("nop", {'message': 'test'}, None) actions.append(first) actions.append(second) @@ -132,8 +133,8 @@ class TestDefaultWorkFlowEngine(base.DbTestCase): actions = [] third = self.create_action("nop", {'message': 'next'}, None) - second = self.create_action("sleep", {'duration': 0.0}, third.id) - first = self.create_action("nop", {'message': 'hello'}, second.id) + second = self.create_action("sleep", {'duration': 0.0}, None) + first = self.create_action("nop", {'message': 'hello'}, None) self.check_action_state(first, objects.action.State.PENDING) self.check_action_state(second, objects.action.State.PENDING) @@ -154,8 +155,8 @@ class TestDefaultWorkFlowEngine(base.DbTestCase): actions = [] third = self.create_action("no_exist", {'message': 'next'}, None) - second = self.create_action("sleep", {'duration': 0.0}, third.id) - first = self.create_action("nop", {'message': 'hello'}, second.id) + second = self.create_action("sleep", {'duration': 0.0}, None) + first = self.create_action("nop", {'message': 'hello'}, None) self.check_action_state(first, objects.action.State.PENDING) self.check_action_state(second, objects.action.State.PENDING) diff --git a/watcher/tests/common/test_nova_helper.py b/watcher/tests/common/test_nova_helper.py index b7ad8d8ea..1c4b8562f 100644 --- a/watcher/tests/common/test_nova_helper.py +++ b/watcher/tests/common/test_nova_helper.py @@ -38,6 +38,7 @@ class TestNovaHelper(base.TestCase): self.instance_uuid = "fb5311b7-37f3-457e-9cde-6494a3c59bfe" self.source_node = "ldev-indeedsrv005" self.destination_node = "ldev-indeedsrv006" + self.flavor_name = "x1" @staticmethod def fake_server(*args, **kwargs): @@ -89,6 +90,22 @@ class TestNovaHelper(base.TestCase): result = nova_util.set_host_offline("rennes") self.assertFalse(result) + @mock.patch.object(time, 'sleep', mock.Mock()) + def test_resize_instance(self, mock_glance, mock_cinder, + mock_neutron, mock_nova): + nova_util = nova_helper.NovaHelper() + server = self.fake_server(self.instance_uuid) + setattr(server, 'status', 'VERIFY_RESIZE') + self.fake_nova_find_list(nova_util, find=server, list=server) + is_success = nova_util.resize_instance(self.instance_uuid, + self.flavor_name) + self.assertTrue(is_success) + + setattr(server, 'status', 'SOMETHING_ELSE') + is_success = nova_util.resize_instance(self.instance_uuid, + self.flavor_name) + self.assertFalse(is_success) + @mock.patch.object(time, 'sleep', mock.Mock()) def test_live_migrate_instance(self, mock_glance, mock_cinder, mock_neutron, mock_nova): diff --git a/watcher/tests/db/test_action.py b/watcher/tests/db/test_action.py index 406ed639a..d98d85472 100644 --- a/watcher/tests/db/test_action.py +++ b/watcher/tests/db/test_action.py @@ -278,28 +278,28 @@ class DbActionTestCase(base.DbTestCase): id=1, uuid=w_utils.generate_uuid(), audit_id=audit.id, - first_action_id=None, + parents=None, state=objects.action_plan.State.RECOMMENDED) action1 = self._create_test_action( id=1, action_plan_id=1, description='description action 1', uuid=w_utils.generate_uuid(), - next=None, + parents=None, state=objects.action_plan.State.PENDING) action2 = self._create_test_action( id=2, action_plan_id=2, description='description action 2', uuid=w_utils.generate_uuid(), - next=action1['uuid'], + parents=[action1['uuid']], state=objects.action_plan.State.PENDING) action3 = self._create_test_action( id=3, action_plan_id=1, description='description action 3', uuid=w_utils.generate_uuid(), - next=action2['uuid'], + parents=[action2['uuid']], state=objects.action_plan.State.ONGOING) res = self.dbapi.get_action_list( self.context, diff --git a/watcher/tests/db/test_action_plan.py b/watcher/tests/db/test_action_plan.py index ca97a0e4f..7569990bc 100644 --- a/watcher/tests/db/test_action_plan.py +++ b/watcher/tests/db/test_action_plan.py @@ -283,13 +283,11 @@ class DbActionPlanTestCase(base.DbTestCase): id=1, uuid=w_utils.generate_uuid(), audit_id=audit['id'], - first_action_id=None, state=ap_objects.State.RECOMMENDED) action_plan2 = self._create_test_action_plan( id=2, uuid=w_utils.generate_uuid(), audit_id=audit['id'], - first_action_id=action_plan1['id'], state=ap_objects.State.ONGOING) res = self.dbapi.get_action_plan_list( diff --git a/watcher/tests/db/utils.py b/watcher/tests/db/utils.py index e44ca8056..f94bbc82c 100644 --- a/watcher/tests/db/utils.py +++ b/watcher/tests/db/utils.py @@ -130,7 +130,7 @@ def get_test_action(**kwargs): 'resource_id': '10a47dd1-4874-4298-91cf-eff046dbdb8d'}), 'state': kwargs.get('state', objects.action_plan.State.PENDING), - 'next': kwargs.get('next', 2), + 'parents': kwargs.get('parents', []), 'created_at': kwargs.get('created_at'), 'updated_at': kwargs.get('updated_at'), 'deleted_at': kwargs.get('deleted_at'), @@ -166,7 +166,6 @@ def get_test_action_plan(**kwargs): 'audit_id': kwargs.get('audit_id', 1), 'strategy_id': kwargs.get('strategy_id', 1), 'global_efficacy': kwargs.get('global_efficacy', {}), - 'first_action_id': kwargs.get('first_action_id', 1), 'created_at': kwargs.get('created_at'), 'updated_at': kwargs.get('updated_at'), 'deleted_at': kwargs.get('deleted_at'), diff --git a/watcher/tests/decision_engine/planner/test_default_planner.py b/watcher/tests/decision_engine/planner/test_default_planner.py deleted file mode 100644 index d75ad836a..000000000 --- a/watcher/tests/decision_engine/planner/test_default_planner.py +++ /dev/null @@ -1,208 +0,0 @@ -# -*- encoding: utf-8 -*- -# Copyright (c) 2015 b<>com -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import mock - -from watcher.common import utils -from watcher.db import api as db_api -from watcher.decision_engine.planner import default as pbase -from watcher.decision_engine.solution import default as dsol -from watcher.decision_engine.strategy import strategies -from watcher import objects -from watcher.tests.db import base -from watcher.tests.db import utils as db_utils -from watcher.tests.decision_engine.model import ceilometer_metrics as fake -from watcher.tests.decision_engine.model import faker_cluster_state -from watcher.tests.objects import utils as obj_utils - - -class SolutionFaker(object): - @staticmethod - def build(): - metrics = fake.FakeCeilometerMetrics() - current_state_cluster = faker_cluster_state.FakerModelCollector() - strategy = strategies.BasicConsolidation( - config=mock.Mock(datasource="ceilometer")) - strategy._compute_model = current_state_cluster.generate_scenario_1() - strategy.ceilometer = mock.MagicMock( - get_statistics=metrics.mock_get_statistics) - return strategy.execute() - - -class SolutionFakerSingleHyp(object): - @staticmethod - def build(): - metrics = fake.FakeCeilometerMetrics() - current_state_cluster = faker_cluster_state.FakerModelCollector() - strategy = strategies.BasicConsolidation( - config=mock.Mock(datasource="ceilometer")) - strategy._compute_model = ( - current_state_cluster.generate_scenario_3_with_2_nodes()) - strategy.ceilometer = mock.MagicMock( - get_statistics=metrics.mock_get_statistics) - - return strategy.execute() - - -class TestActionScheduling(base.DbTestCase): - - def setUp(self): - super(TestActionScheduling, self).setUp() - self.strategy = db_utils.create_test_strategy(name="dummy") - self.audit = db_utils.create_test_audit( - uuid=utils.generate_uuid(), strategy_id=self.strategy.id) - self.default_planner = pbase.DefaultPlanner(mock.Mock()) - - def test_schedule_actions(self): - solution = dsol.DefaultSolution( - goal=mock.Mock(), strategy=self.strategy) - - parameters = { - "source_node": "server1", - "destination_node": "server2", - } - solution.add_action(action_type="migrate", - resource_id="b199db0c-1408-4d52-b5a5-5ca14de0ff36", - input_parameters=parameters) - - with mock.patch.object( - pbase.DefaultPlanner, "create_action", - wraps=self.default_planner.create_action - ) as m_create_action: - self.default_planner.config.weights = {'migrate': 3} - action_plan = self.default_planner.schedule( - self.context, self.audit.id, solution) - - self.assertIsNotNone(action_plan.uuid) - self.assertEqual(1, m_create_action.call_count) - filters = {'action_plan_id': action_plan.id} - actions = objects.Action.dbapi.get_action_list(self.context, filters) - self.assertEqual("migrate", actions[0].action_type) - - def test_schedule_two_actions(self): - solution = dsol.DefaultSolution( - goal=mock.Mock(), strategy=self.strategy) - - parameters = { - "source_node": "server1", - "destination_node": "server2", - } - solution.add_action(action_type="migrate", - resource_id="b199db0c-1408-4d52-b5a5-5ca14de0ff36", - input_parameters=parameters) - - solution.add_action(action_type="nop", - resource_id="", - input_parameters={}) - - with mock.patch.object( - pbase.DefaultPlanner, "create_action", - wraps=self.default_planner.create_action - ) as m_create_action: - self.default_planner.config.weights = {'migrate': 3, 'nop': 0} - action_plan = self.default_planner.schedule( - self.context, self.audit.id, solution) - self.assertIsNotNone(action_plan.uuid) - self.assertEqual(2, m_create_action.call_count) - # check order - filters = {'action_plan_id': action_plan.id} - actions = objects.Action.dbapi.get_action_list(self.context, filters) - self.assertEqual("nop", actions[0].action_type) - self.assertEqual("migrate", actions[1].action_type) - - def test_schedule_actions_with_unknown_action(self): - solution = dsol.DefaultSolution( - goal=mock.Mock(), strategy=self.strategy) - - parameters = { - "src_uuid_node": "server1", - "dst_uuid_node": "server2", - } - solution.add_action(action_type="migrate", - resource_id="b199db0c-1408-4d52-b5a5-5ca14de0ff36", - input_parameters=parameters) - - solution.add_action(action_type="new_action_type", - resource_id="", - input_parameters={}) - - with mock.patch.object( - pbase.DefaultPlanner, "create_action", - wraps=self.default_planner.create_action - ) as m_create_action: - self.default_planner.config.weights = {'migrate': 0} - self.assertRaises(KeyError, self.default_planner.schedule, - self.context, self.audit.id, solution) - self.assertEqual(2, m_create_action.call_count) - - -class TestDefaultPlanner(base.DbTestCase): - - def setUp(self): - super(TestDefaultPlanner, self).setUp() - self.default_planner = pbase.DefaultPlanner(mock.Mock()) - self.default_planner.config.weights = { - 'nop': 0, - 'sleep': 1, - 'change_nova_service_state': 2, - 'migrate': 3 - } - - self.goal = obj_utils.create_test_goal(self.context) - self.strategy = obj_utils.create_test_strategy( - self.context, goal_id=self.goal.id) - obj_utils.create_test_audit_template( - self.context, goal_id=self.goal.id, strategy_id=self.strategy.id) - - p = mock.patch.object(db_api.BaseConnection, 'create_action_plan') - self.mock_create_action_plan = p.start() - self.mock_create_action_plan.side_effect = ( - self._simulate_action_plan_create) - self.addCleanup(p.stop) - - q = mock.patch.object(db_api.BaseConnection, 'create_action') - self.mock_create_action = q.start() - self.mock_create_action.side_effect = ( - self._simulate_action_create) - self.addCleanup(q.stop) - - def _simulate_action_plan_create(self, action_plan): - action_plan.create() - return action_plan - - def _simulate_action_create(self, action): - action.create() - return action - - @mock.patch.object(objects.Strategy, 'get_by_name') - def test_schedule_scheduled_empty(self, m_get_by_name): - m_get_by_name.return_value = self.strategy - audit = db_utils.create_test_audit( - goal_id=self.goal.id, strategy_id=self.strategy.id) - fake_solution = SolutionFakerSingleHyp.build() - action_plan = self.default_planner.schedule(self.context, - audit.id, fake_solution) - self.assertIsNotNone(action_plan.uuid) - - @mock.patch.object(objects.Strategy, 'get_by_name') - def test_scheduler_warning_empty_action_plan(self, m_get_by_name): - m_get_by_name.return_value = self.strategy - audit = db_utils.create_test_audit( - goal_id=self.goal.id, strategy_id=self.strategy.id) - fake_solution = SolutionFaker.build() - action_plan = self.default_planner.schedule( - self.context, audit.id, fake_solution) - self.assertIsNotNone(action_plan.uuid) diff --git a/watcher/tests/decision_engine/planner/test_planner_manager.py b/watcher/tests/decision_engine/planner/test_planner_manager.py index 845627237..7d030f11f 100644 --- a/watcher/tests/decision_engine/planner/test_planner_manager.py +++ b/watcher/tests/decision_engine/planner/test_planner_manager.py @@ -16,13 +16,13 @@ from oslo_config import cfg -from watcher.decision_engine.planner import default from watcher.decision_engine.planner import manager as planner +from watcher.decision_engine.planner import weight from watcher.tests import base class TestPlannerManager(base.TestCase): def test_load(self): - cfg.CONF.set_override('planner', "default", group='watcher_planner') + cfg.CONF.set_override('planner', "weight", group='watcher_planner') manager = planner.PlannerManager() - self.assertIsInstance(manager.load(), default.DefaultPlanner) + self.assertIsInstance(manager.load(), weight.WeightPlanner) diff --git a/watcher/tests/decision_engine/planner/test_weight_planner.py b/watcher/tests/decision_engine/planner/test_weight_planner.py new file mode 100644 index 000000000..fdfd2447a --- /dev/null +++ b/watcher/tests/decision_engine/planner/test_weight_planner.py @@ -0,0 +1,943 @@ +# -*- encoding: utf-8 -*- +# Copyright (c) 2015 b<>com +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import mock + +from watcher.common import nova_helper +from watcher.common import utils +from watcher.db import api as db_api +from watcher.decision_engine.planner import weight as pbase +from watcher.decision_engine.solution import default as dsol +from watcher.decision_engine.strategy import strategies +from watcher import objects +from watcher.tests.db import base +from watcher.tests.db import utils as db_utils +from watcher.tests.decision_engine.model import ceilometer_metrics as fake +from watcher.tests.decision_engine.model import faker_cluster_state +from watcher.tests.objects import utils as obj_utils + + +class SolutionFaker(object): + @staticmethod + def build(): + metrics = fake.FakerMetricsCollector() + current_state_cluster = faker_cluster_state.FakerModelCollector() + sercon = strategies.BasicConsolidation(config=mock.Mock()) + sercon.compute_model = current_state_cluster.generate_scenario_1() + sercon.ceilometer = mock.MagicMock( + get_statistics=metrics.mock_get_statistics) + return sercon.execute() + + +class SolutionFakerSingleHyp(object): + @staticmethod + def build(): + metrics = fake.FakerMetricsCollector() + current_state_cluster = faker_cluster_state.FakerModelCollector() + sercon = strategies.BasicConsolidation(config=mock.Mock()) + sercon.compute_model = ( + current_state_cluster.generate_scenario_3_with_2_nodes()) + sercon.ceilometer = mock.MagicMock( + get_statistics=metrics.mock_get_statistics) + + return sercon.execute() + + +class TestActionScheduling(base.DbTestCase): + + def setUp(self): + super(TestActionScheduling, self).setUp() + self.strategy = db_utils.create_test_strategy(name="dummy") + self.audit = db_utils.create_test_audit( + uuid=utils.generate_uuid(), strategy_id=self.strategy.id) + self.planner = pbase.WeightPlanner( + mock.Mock( + weights={ + 'turn_host_to_acpi_s3_state': 10, + 'resize': 20, + 'migrate': 30, + 'sleep': 40, + 'change_nova_service_state': 50, + 'nop': 60, + 'new_action_type': 70, + }, + parallelization={ + 'turn_host_to_acpi_s3_state': 2, + 'resize': 2, + 'migrate': 2, + 'sleep': 1, + 'change_nova_service_state': 1, + 'nop': 1, + 'new_action_type': 70, + })) + + @mock.patch.object(utils, "generate_uuid") + def test_schedule_actions(self, m_generate_uuid): + m_generate_uuid.side_effect = [ + "00000000-0000-0000-0000-000000000000", # Action plan + "11111111-1111-1111-1111-111111111111", # Migrate 1 + "22222222-2222-2222-2222-222222222222", + "33333333-3333-3333-3333-333333333333", + # "44444444-4444-4444-4444-444444444444", + # "55555555-5555-5555-5555-555555555555", + # "66666666-6666-6666-6666-666666666666", + # "77777777-7777-7777-7777-777777777777", + # "88888888-8888-8888-8888-888888888888", + # "99999999-9999-9999-9999-999999999999", + ] + solution = dsol.DefaultSolution( + goal=mock.Mock(), strategy=self.strategy) + + solution.add_action(action_type="migrate", + resource_id="DOESNOTMATTER", + input_parameters={"source_node": "server1", + "destination_node": "server2"}) + + self.planner.config.weights = {'migrate': 3} + action_plan = self.planner.schedule( + self.context, self.audit.id, solution) + + self.assertIsNotNone(action_plan.uuid) + with mock.patch.object( + pbase.WeightPlanner, "create_scheduled_actions", + wraps=self.planner.create_scheduled_actions + ) as m_create_scheduled_actions: + action_plan = self.planner.schedule( + self.context, self.audit.id, solution) + self.assertIsNotNone(action_plan.uuid) + self.assertEqual(1, m_create_scheduled_actions.call_count) + action_graph = m_create_scheduled_actions.call_args[0][1] + + expected_edges = [] + + edges = sorted([(src.as_dict(), dst.as_dict()) + for src, dst in action_graph.edges()], + key=lambda pair: pair[0]['uuid']) + for src, dst in edges: + for key in ('id', 'action_plan', 'action_plan_id', 'created_at', + 'input_parameters', 'deleted_at', 'updated_at', + 'state'): + del src[key] + del dst[key] + + self.assertEqual(len(expected_edges), len(edges)) + for pair in expected_edges: + self.assertIn(pair, edges) + + @mock.patch.object(utils, "generate_uuid") + def test_schedule_two_actions(self, m_generate_uuid): + m_generate_uuid.side_effect = [ + "00000000-0000-0000-0000-000000000000", # Action plan + "11111111-1111-1111-1111-111111111111", + "22222222-2222-2222-2222-222222222222", + "33333333-3333-3333-3333-333333333333", + "44444444-4444-4444-4444-444444444444", # Migrate 1 + "55555555-5555-5555-5555-555555555555", # Nop 1 + ] + solution = dsol.DefaultSolution( + goal=mock.Mock(), strategy=self.strategy) + + # We create the migrate action before but we then schedule + # after the nop action + solution.add_action(action_type="migrate", + resource_id="DOESNOTMATTER", + input_parameters={"source_node": "server1", + "destination_node": "server2"}) + + solution.add_action(action_type="nop", + input_parameters={"message": "Hello world"}) + + self.planner.config.weights = {'migrate': 3, 'nop': 5} + + action_plan = self.planner.schedule( + self.context, self.audit.id, solution) + + self.assertIsNotNone(action_plan.uuid) + with mock.patch.object( + pbase.WeightPlanner, "create_scheduled_actions", + wraps=self.planner.create_scheduled_actions + ) as m_create_scheduled_actions: + action_plan = self.planner.schedule( + self.context, self.audit.id, solution) + self.assertIsNotNone(action_plan.uuid) + self.assertEqual(1, m_create_scheduled_actions.call_count) + action_graph = m_create_scheduled_actions.call_args[0][1] + + expected_edges = \ + [({'action_type': 'nop', + 'parents': [], + 'uuid': '55555555-5555-5555-5555-555555555555'}, + {'action_type': 'migrate', + 'parents': ['55555555-5555-5555-5555-555555555555'], + 'uuid': '44444444-4444-4444-4444-444444444444'})] + + edges = sorted([(src.as_dict(), dst.as_dict()) + for src, dst in action_graph.edges()], + key=lambda pair: pair[0]['uuid']) + for src, dst in edges: + for key in ('id', 'action_plan', 'action_plan_id', 'created_at', + 'input_parameters', 'deleted_at', 'updated_at', + 'state'): + del src[key] + del dst[key] + + self.assertEqual(len(expected_edges), len(edges)) + for pair in expected_edges: + self.assertIn(pair, edges) + + @mock.patch.object(utils, "generate_uuid") + def test_schedule_actions_with_unknown_action(self, m_generate_uuid): + m_generate_uuid.side_effect = [ + "00000000-0000-0000-0000-000000000000", # Action plan + "11111111-1111-1111-1111-111111111111", # Migrate 1 + "22222222-2222-2222-2222-222222222222", # new_action_type + "33333333-3333-3333-3333-333333333333", + + ] + solution = dsol.DefaultSolution( + goal=mock.Mock(), strategy=self.strategy) + + parameters = { + "src_uuid_node": "server1", + "dst_uuid_node": "server2", + } + solution.add_action(action_type="migrate", + resource_id="DOESNOTMATTER", + input_parameters=parameters) + + solution.add_action(action_type="new_action_type", + resource_id="", + input_parameters={}) + + with mock.patch.object( + pbase.WeightPlanner, "create_scheduled_actions", + wraps=self.planner.create_scheduled_actions + ) as m_create_scheduled_actions: + action_plan = self.planner.schedule( + self.context, self.audit.id, solution) + self.assertIsNotNone(action_plan.uuid) + self.assertEqual(1, m_create_scheduled_actions.call_count) + action_graph = m_create_scheduled_actions.call_args[0][1] + + expected_edges = \ + [({'action_type': 'new_action_type', + 'parents': [], + 'uuid': '22222222-2222-2222-2222-222222222222'}, + {'action_type': 'migrate', + 'parents': ['22222222-2222-2222-2222-222222222222'], + 'uuid': '11111111-1111-1111-1111-111111111111'})] + + edges = sorted([(src.as_dict(), dst.as_dict()) + for src, dst in action_graph.edges()], + key=lambda pair: pair[0]['uuid']) + for src, dst in edges: + for key in ('id', 'action_plan', 'action_plan_id', 'created_at', + 'input_parameters', 'deleted_at', 'updated_at', + 'state'): + del src[key] + del dst[key] + + self.assertEqual(len(expected_edges), len(edges)) + for pair in expected_edges: + self.assertIn(pair, edges) + + @mock.patch.object(utils, "generate_uuid") + @mock.patch.object(nova_helper.NovaHelper, 'get_instance_by_uuid') + def test_schedule_migrate_resize_actions(self, m_nova, m_generate_uuid): + m_generate_uuid.side_effect = [ + "00000000-0000-0000-0000-000000000000", # Action plan + "11111111-1111-1111-1111-111111111111", # Migrate 1 + "22222222-2222-2222-2222-222222222222", # Migrate 2 + "33333333-3333-3333-3333-333333333333", # Migrate 3 + "44444444-4444-4444-4444-444444444444", # Migrate 4 + "55555555-5555-5555-5555-555555555555", # Migrate 5 + "66666666-6666-6666-6666-666666666666", # Resize 1 + "77777777-7777-7777-7777-777777777777", # Resize 2 + "88888888-8888-8888-8888-888888888888", # Nop + "99999999-9999-9999-9999-999999999999", + ] + m_nova.return_value = 'server1' + solution = dsol.DefaultSolution( + goal=mock.Mock(), strategy=self.strategy) + + parameters = { + "source_node": "server1", + "destination_node": "server2", + } + solution.add_action(action_type="migrate", + resource_id="DOESNOTMATTER", + input_parameters=parameters) + + solution.add_action(action_type="resize", + resource_id="DOESNOTMATTER", + input_parameters={"flavor": "x1"}) + + with mock.patch.object( + pbase.WeightPlanner, "create_scheduled_actions", + wraps=self.planner.create_scheduled_actions + ) as m_create_scheduled_actions: + action_plan = self.planner.schedule( + self.context, self.audit.id, solution) + self.assertIsNotNone(action_plan.uuid) + self.assertEqual(1, m_create_scheduled_actions.call_count) + action_graph = m_create_scheduled_actions.call_args[0][1] + + expected_edges = \ + [({'action_type': 'migrate', + 'parents': [], + 'uuid': '11111111-1111-1111-1111-111111111111'}, + {'action_type': 'resize', + 'parents': ['11111111-1111-1111-1111-111111111111'], + 'uuid': '22222222-2222-2222-2222-222222222222'})] + + edges = sorted([(src.as_dict(), dst.as_dict()) + for src, dst in action_graph.edges()], + key=lambda pair: pair[0]['uuid']) + for src, dst in edges: + for key in ('id', 'action_plan', 'action_plan_id', 'created_at', + 'input_parameters', 'deleted_at', 'updated_at', + 'state'): + del src[key] + del dst[key] + + self.assertEqual(len(expected_edges), len(edges)) + for pair in expected_edges: + self.assertIn(pair, edges) + + @mock.patch.object(utils, "generate_uuid") + def test_schedule_3_migrate_1_resize_1_acpi_actions_1_swimlane( + self, m_generate_uuid): + self.planner.config.parallelization["migrate"] = 1 + m_generate_uuid.side_effect = [ + "00000000-0000-0000-0000-000000000000", # Action plan + "11111111-1111-1111-1111-111111111111", # Migrate 1 + "22222222-2222-2222-2222-222222222222", # Migrate 2 + "33333333-3333-3333-3333-333333333333", # Migrate 3 + "44444444-4444-4444-4444-444444444444", # Resize + "55555555-5555-5555-5555-555555555555", # ACPI + "66666666-6666-6666-6666-666666666666", + "77777777-7777-7777-7777-777777777777", + "88888888-8888-8888-8888-888888888888", + "99999999-9999-9999-9999-999999999999", + ] + + solution = dsol.DefaultSolution( + goal=mock.Mock(), strategy=self.strategy) + + parameters = { + "source_node": "server1", + "destination_node": "server2", + } + solution.add_action(action_type="migrate", + resource_id="DOESNOTMATTER", + input_parameters=parameters) + + solution.add_action(action_type="migrate", + resource_id="DOESNOTMATTER", + input_parameters={"source_node": "server1", + "destination_node": "server2"}) + + solution.add_action(action_type="migrate", + resource_id="DOESNOTMATTER", + input_parameters={"source_node": "server2", + "destination_node": "server3"}) + + solution.add_action(action_type="resize", + resource_id="DOESNOTMATTER", + input_parameters={'flavor': 'x1'}) + + solution.add_action(action_type="turn_host_to_acpi_s3_state", + resource_id="server1", + input_parameters={}) + + with mock.patch.object( + pbase.WeightPlanner, "create_scheduled_actions", + wraps=self.planner.create_scheduled_actions + ) as m_create_scheduled_actions: + action_plan = self.planner.schedule( + self.context, self.audit.id, solution) + self.assertIsNotNone(action_plan.uuid) + self.assertEqual(1, m_create_scheduled_actions.call_count) + action_graph = m_create_scheduled_actions.call_args[0][1] + + expected_edges = \ + [({'action_type': 'migrate', + 'parents': ['11111111-1111-1111-1111-111111111111'], + 'uuid': '22222222-2222-2222-2222-222222222222'}, + {'action_type': 'migrate', + 'parents': ['22222222-2222-2222-2222-222222222222'], + 'uuid': '33333333-3333-3333-3333-333333333333'}), + ({'action_type': 'migrate', + 'parents': [], + 'uuid': '11111111-1111-1111-1111-111111111111'}, + {'action_type': 'migrate', + 'parents': ['11111111-1111-1111-1111-111111111111'], + 'uuid': '22222222-2222-2222-2222-222222222222'}), + ({'action_type': 'resize', + 'parents': ['33333333-3333-3333-3333-333333333333'], + 'uuid': '44444444-4444-4444-4444-444444444444'}, + {'action_type': 'turn_host_to_acpi_s3_state', + 'parents': ['44444444-4444-4444-4444-444444444444'], + 'uuid': '55555555-5555-5555-5555-555555555555'}), + ({'action_type': 'migrate', + 'parents': ['22222222-2222-2222-2222-222222222222'], + 'uuid': '33333333-3333-3333-3333-333333333333'}, + {'action_type': 'resize', + 'parents': ['33333333-3333-3333-3333-333333333333'], + 'uuid': '44444444-4444-4444-4444-444444444444'})] + + edges = sorted([(src.as_dict(), dst.as_dict()) + for src, dst in action_graph.edges()], + key=lambda pair: pair[0]['uuid']) + for src, dst in edges: + for key in ('id', 'action_plan', 'action_plan_id', 'created_at', + 'input_parameters', 'deleted_at', 'updated_at', + 'state'): + del src[key] + del dst[key] + + self.assertEqual(len(expected_edges), len(edges)) + for pair in expected_edges: + self.assertIn(pair, edges) + + @mock.patch.object(utils, "generate_uuid") + def test_schedule_migrate_resize_acpi_actions_2_swimlanes( + self, m_generate_uuid): + self.planner.config.parallelization["migrate"] = 2 + m_generate_uuid.side_effect = [ + "00000000-0000-0000-0000-000000000000", # Action plan + "11111111-1111-1111-1111-111111111111", # Migrate 1 + "22222222-2222-2222-2222-222222222222", # Migrate 2 + "33333333-3333-3333-3333-333333333333", # Migrate 3 + "44444444-4444-4444-4444-444444444444", # Resize + "55555555-5555-5555-5555-555555555555", # ACPI + "66666666-6666-6666-6666-666666666666", + "77777777-7777-7777-7777-777777777777", + "88888888-8888-8888-8888-888888888888", + "99999999-9999-9999-9999-999999999999", + ] + + solution = dsol.DefaultSolution( + goal=mock.Mock(), strategy=self.strategy) + + parameters = { + "source_node": "server1", + "destination_node": "server2", + } + solution.add_action(action_type="migrate", + resource_id="DOESNOTMATTER", + input_parameters=parameters) + + solution.add_action(action_type="migrate", + resource_id="DOESNOTMATTER", + input_parameters={"source_node": "server1", + "destination_node": "server2"}) + + solution.add_action(action_type="migrate", + resource_id="DOESNOTMATTER", + input_parameters={"source_node": "server2", + "destination_node": "server3"}) + + solution.add_action(action_type="resize", + resource_id="DOESNOTMATTER", + input_parameters={'flavor': 'x1'}) + + solution.add_action(action_type="turn_host_to_acpi_s3_state", + resource_id="server1", + input_parameters={}) + + with mock.patch.object( + pbase.WeightPlanner, "create_scheduled_actions", + wraps=self.planner.create_scheduled_actions + ) as m_create_scheduled_actions: + action_plan = self.planner.schedule( + self.context, self.audit.id, solution) + self.assertIsNotNone(action_plan.uuid) + self.assertEqual(1, m_create_scheduled_actions.call_count) + action_graph = m_create_scheduled_actions.call_args[0][1] + + expected_edges = \ + [({'action_type': 'migrate', + 'parents': [], + 'uuid': '11111111-1111-1111-1111-111111111111'}, + {'action_type': 'migrate', + 'parents': ['11111111-1111-1111-1111-111111111111', + '22222222-2222-2222-2222-222222222222'], + 'uuid': '33333333-3333-3333-3333-333333333333'}), + ({'action_type': 'resize', + 'parents': ['33333333-3333-3333-3333-333333333333'], + 'uuid': '44444444-4444-4444-4444-444444444444'}, + {'action_type': 'turn_host_to_acpi_s3_state', + 'parents': ['44444444-4444-4444-4444-444444444444'], + 'uuid': '55555555-5555-5555-5555-555555555555'}), + ({'action_type': 'migrate', + 'parents': [], + 'uuid': '22222222-2222-2222-2222-222222222222'}, + {'action_type': 'migrate', + 'parents': ['11111111-1111-1111-1111-111111111111', + '22222222-2222-2222-2222-222222222222'], + 'uuid': '33333333-3333-3333-3333-333333333333'}), + ({'action_type': 'migrate', + 'parents': ['11111111-1111-1111-1111-111111111111', + '22222222-2222-2222-2222-222222222222'], + 'uuid': '33333333-3333-3333-3333-333333333333'}, + {'action_type': 'resize', + 'parents': ['33333333-3333-3333-3333-333333333333'], + 'uuid': '44444444-4444-4444-4444-444444444444'})] + + edges = sorted([(src.as_dict(), dst.as_dict()) + for src, dst in action_graph.edges()], + key=lambda pair: pair[0]['uuid']) + for src, dst in edges: + for key in ('id', 'action_plan', 'action_plan_id', 'created_at', + 'input_parameters', 'deleted_at', 'updated_at', + 'state'): + del src[key] + del dst[key] + + self.assertEqual(len(expected_edges), len(edges)) + for pair in expected_edges: + self.assertIn(pair, edges) + + @mock.patch.object(utils, "generate_uuid") + def test_schedule_migrate_resize_acpi_actions_3_swimlanes( + self, m_generate_uuid): + self.planner.config.parallelization["migrate"] = 3 + m_generate_uuid.side_effect = [ + "00000000-0000-0000-0000-000000000000", # Action plan + "11111111-1111-1111-1111-111111111111", # Migrate 1 + "22222222-2222-2222-2222-222222222222", # Migrate 2 + "33333333-3333-3333-3333-333333333333", # Migrate 3 + "44444444-4444-4444-4444-444444444444", # Resize + "55555555-5555-5555-5555-555555555555", # ACPI + "66666666-6666-6666-6666-666666666666", + "77777777-7777-7777-7777-777777777777", + "88888888-8888-8888-8888-888888888888", + "99999999-9999-9999-9999-999999999999", + ] + + solution = dsol.DefaultSolution( + goal=mock.Mock(), strategy=self.strategy) + + parameters = { + "source_node": "server1", + "destination_node": "server2", + } + solution.add_action(action_type="migrate", + resource_id="DOESNOTMATTER", + input_parameters=parameters) + + solution.add_action(action_type="migrate", + resource_id="DOESNOTMATTER", + input_parameters={"source_node": "server1", + "destination_node": "server2"}) + + solution.add_action(action_type="migrate", + resource_id="DOESNOTMATTER", + input_parameters={"source_node": "server2", + "destination_node": "server3"}) + + solution.add_action(action_type="resize", + resource_id="DOESNOTMATTER", + input_parameters={'flavor': 'x1'}) + + solution.add_action(action_type="turn_host_to_acpi_s3_state", + resource_id="server1", + input_parameters={}) + + with mock.patch.object( + pbase.WeightPlanner, "create_scheduled_actions", + wraps=self.planner.create_scheduled_actions + ) as m_create_scheduled_actions: + action_plan = self.planner.schedule( + self.context, self.audit.id, solution) + self.assertIsNotNone(action_plan.uuid) + self.assertEqual(1, m_create_scheduled_actions.call_count) + action_graph = m_create_scheduled_actions.call_args[0][1] + + expected_edges = \ + [({'action_type': 'resize', + 'parents': ['11111111-1111-1111-1111-111111111111', + '22222222-2222-2222-2222-222222222222', + '33333333-3333-3333-3333-333333333333'], + 'uuid': '44444444-4444-4444-4444-444444444444'}, + {'action_type': 'turn_host_to_acpi_s3_state', + 'parents': ['44444444-4444-4444-4444-444444444444'], + 'uuid': '55555555-5555-5555-5555-555555555555'}), + ({'action_type': 'migrate', + 'parents': [], + 'uuid': '11111111-1111-1111-1111-111111111111'}, + {'action_type': 'resize', + 'parents': ['11111111-1111-1111-1111-111111111111', + '22222222-2222-2222-2222-222222222222', + '33333333-3333-3333-3333-333333333333'], + 'uuid': '44444444-4444-4444-4444-444444444444'}), + ({'action_type': 'migrate', + 'parents': [], + 'uuid': '22222222-2222-2222-2222-222222222222'}, + {'action_type': 'resize', + 'parents': ['11111111-1111-1111-1111-111111111111', + '22222222-2222-2222-2222-222222222222', + '33333333-3333-3333-3333-333333333333'], + 'uuid': '44444444-4444-4444-4444-444444444444'}), + ({'action_type': 'migrate', + 'parents': [], + 'uuid': '33333333-3333-3333-3333-333333333333'}, + {'action_type': 'resize', + 'parents': ['11111111-1111-1111-1111-111111111111', + '22222222-2222-2222-2222-222222222222', + '33333333-3333-3333-3333-333333333333'], + 'uuid': '44444444-4444-4444-4444-444444444444'})] + + edges = sorted([(src.as_dict(), dst.as_dict()) + for src, dst in action_graph.edges()], + key=lambda pair: pair[0]['uuid']) + for src, dst in edges: + for key in ('id', 'action_plan', 'action_plan_id', 'created_at', + 'input_parameters', 'deleted_at', 'updated_at', + 'state'): + del src[key] + del dst[key] + + self.assertEqual(len(expected_edges), len(edges)) + for pair in expected_edges: + self.assertIn(pair, edges) + + @mock.patch.object(utils, "generate_uuid") + def test_schedule_three_migrate_two_resize_actions( + self, m_generate_uuid): + self.planner.config.parallelization["migrate"] = 3 + self.planner.config.parallelization["resize"] = 2 + m_generate_uuid.side_effect = [ + "00000000-0000-0000-0000-000000000000", # Action plan + "11111111-1111-1111-1111-111111111111", # Migrate 1 + "22222222-2222-2222-2222-222222222222", # Migrate 2 + "33333333-3333-3333-3333-333333333333", # Migrate 3 + "44444444-4444-4444-4444-444444444444", # Resize + "55555555-5555-5555-5555-555555555555", # ACPI + "66666666-6666-6666-6666-666666666666", + "77777777-7777-7777-7777-777777777777", + "88888888-8888-8888-8888-888888888888", + "99999999-9999-9999-9999-999999999999", + ] + + solution = dsol.DefaultSolution( + goal=mock.Mock(), strategy=self.strategy) + + parameters = { + "source_node": "server1", + "destination_node": "server2", + } + solution.add_action(action_type="migrate", + resource_id="DOESNOTMATTER", + input_parameters=parameters) + + solution.add_action(action_type="migrate", + resource_id="DOESNOTMATTER", + input_parameters={"source_node": "server1", + "destination_node": "server2"}) + + solution.add_action(action_type="migrate", + resource_id="DOESNOTMATTER", + input_parameters={"source_node": "server2", + "destination_node": "server3"}) + + solution.add_action(action_type="resize", + resource_id="DOESNOTMATTER", + input_parameters={'flavor': 'x1'}) + + solution.add_action(action_type="resize", + resource_id="b189db0c-1408-4d52-b5a5-5ca14de0ff36", + input_parameters={'flavor': 'x1'}) + + with mock.patch.object( + pbase.WeightPlanner, "create_scheduled_actions", + wraps=self.planner.create_scheduled_actions + ) as m_create_scheduled_actions: + action_plan = self.planner.schedule( + self.context, self.audit.id, solution) + self.assertIsNotNone(action_plan.uuid) + self.assertEqual(1, m_create_scheduled_actions.call_count) + action_graph = m_create_scheduled_actions.call_args[0][1] + + expected_edges = \ + [({'action_type': 'migrate', + 'parents': [], + 'uuid': '11111111-1111-1111-1111-111111111111'}, + {'action_type': 'resize', + 'parents': ['11111111-1111-1111-1111-111111111111', + '22222222-2222-2222-2222-222222222222', + '33333333-3333-3333-3333-333333333333'], + 'uuid': '55555555-5555-5555-5555-555555555555'}), + ({'action_type': 'migrate', + 'parents': [], + 'uuid': '11111111-1111-1111-1111-111111111111'}, + {'action_type': 'resize', + 'parents': ['11111111-1111-1111-1111-111111111111', + '22222222-2222-2222-2222-222222222222', + '33333333-3333-3333-3333-333333333333'], + 'uuid': '55555555-5555-5555-5555-555555555555'}), + ({'action_type': 'migrate', + 'parents': [], + 'uuid': '22222222-2222-2222-2222-222222222222'}, + {'action_type': 'resize', + 'parents': ['11111111-1111-1111-1111-111111111111', + '22222222-2222-2222-2222-222222222222', + '33333333-3333-3333-3333-333333333333'], + 'uuid': '55555555-5555-5555-5555-555555555555'}), + ({'action_type': 'migrate', + 'parents': [], + 'uuid': '22222222-2222-2222-2222-222222222222'}, + {'action_type': 'resize', + 'parents': ['11111111-1111-1111-1111-111111111111', + '22222222-2222-2222-2222-222222222222', + '33333333-3333-3333-3333-333333333333'], + 'uuid': '55555555-5555-5555-5555-555555555555'}), + ({'action_type': 'migrate', + 'parents': [], + 'uuid': '33333333-3333-3333-3333-333333333333'}, + {'action_type': 'resize', + 'parents': ['11111111-1111-1111-1111-111111111111', + '22222222-2222-2222-2222-222222222222', + '33333333-3333-3333-3333-333333333333'], + 'uuid': '55555555-5555-5555-5555-555555555555'}), + ({'action_type': 'migrate', + 'parents': [], + 'uuid': '33333333-3333-3333-3333-333333333333'}, + {'action_type': 'resize', + 'parents': ['11111111-1111-1111-1111-111111111111', + '22222222-2222-2222-2222-222222222222', + '33333333-3333-3333-3333-333333333333'], + 'uuid': '55555555-5555-5555-5555-555555555555'})] + + edges = sorted([(src.as_dict(), dst.as_dict()) + for src, dst in action_graph.edges()], + key=lambda pair: pair[0]['uuid']) + for src, dst in edges: + for key in ('id', 'action_plan', 'action_plan_id', 'created_at', + 'input_parameters', 'deleted_at', 'updated_at', + 'state'): + del src[key] + del dst[key] + + self.assertEqual(len(expected_edges), len(edges)) + for pair in expected_edges: + self.assertIn(pair, edges) + + @mock.patch.object(utils, "generate_uuid") + def test_schedule_5_migrate_2_resize_actions_for_2_swimlanes( + self, m_generate_uuid): + self.planner.config.parallelization["migrate"] = 2 + self.planner.config.parallelization["resize"] = 2 + m_generate_uuid.side_effect = [ + "00000000-0000-0000-0000-000000000000", # Action plan + "11111111-1111-1111-1111-111111111111", # Migrate 1 + "22222222-2222-2222-2222-222222222222", # Migrate 2 + "33333333-3333-3333-3333-333333333333", # Migrate 3 + "44444444-4444-4444-4444-444444444444", # Migrate 4 + "55555555-5555-5555-5555-555555555555", # Migrate 5 + "66666666-6666-6666-6666-666666666666", # Resize 1 + "77777777-7777-7777-7777-777777777777", # Resize 2 + "88888888-8888-8888-8888-888888888888", # Nop + "99999999-9999-9999-9999-999999999999", + ] + + solution = dsol.DefaultSolution( + goal=mock.Mock(), strategy=self.strategy) + + solution.add_action(action_type="migrate", + resource_id="DOESNOTMATTER", + input_parameters={"source_node": "server1", + "destination_node": "server6"}) + + solution.add_action(action_type="migrate", + resource_id="DOESNOTMATTER", + input_parameters={"source_node": "server2", + "destination_node": "server6"}) + + solution.add_action(action_type="migrate", + resource_id="DOESNOTMATTER", + input_parameters={"source_node": "server3", + "destination_node": "server6"}) + + solution.add_action(action_type="migrate", + resource_id="DOESNOTMATTER", + input_parameters={"source_node": "server4", + "destination_node": "server6"}) + + solution.add_action(action_type="migrate", + resource_id="DOESNOTMATTER", + input_parameters={"source_node": "server5", + "destination_node": "server6"}) + + solution.add_action(action_type="resize", + resource_id="DOESNOTMATTER", + input_parameters={'flavor': 'x1'}) + + solution.add_action(action_type="resize", + resource_id="DOESNOTMATTER", + input_parameters={'flavor': 'x2'}) + + solution.add_action(action_type="turn_host_to_acpi_s3_state", + resource_id="DOESNOTMATTER") + + with mock.patch.object( + pbase.WeightPlanner, "create_scheduled_actions", + wraps=self.planner.create_scheduled_actions + ) as m_create_scheduled_actions: + action_plan = self.planner.schedule( + self.context, self.audit.id, solution) + self.assertIsNotNone(action_plan.uuid) + self.assertEqual(1, m_create_scheduled_actions.call_count) + action_graph = m_create_scheduled_actions.call_args[0][1] + + expected_edges = \ + [({'action_type': 'migrate', + 'parents': [], + 'uuid': '11111111-1111-1111-1111-111111111111'}, + {'action_type': 'migrate', + 'parents': ['11111111-1111-1111-1111-111111111111', + '22222222-2222-2222-2222-222222222222'], + 'uuid': '33333333-3333-3333-3333-333333333333'}), + ({'action_type': 'migrate', + 'parents': [], + 'uuid': '11111111-1111-1111-1111-111111111111'}, + {'action_type': 'migrate', + 'parents': ['11111111-1111-1111-1111-111111111111', + '22222222-2222-2222-2222-222222222222'], + 'uuid': '44444444-4444-4444-4444-444444444444'}), + ({'action_type': 'migrate', + 'parents': [], + 'uuid': '22222222-2222-2222-2222-222222222222'}, + {'action_type': 'migrate', + 'parents': ['11111111-1111-1111-1111-111111111111', + '22222222-2222-2222-2222-222222222222'], + 'uuid': '33333333-3333-3333-3333-333333333333'}), + ({'action_type': 'migrate', + 'parents': [], + 'uuid': '22222222-2222-2222-2222-222222222222'}, + {'action_type': 'migrate', + 'parents': ['11111111-1111-1111-1111-111111111111', + '22222222-2222-2222-2222-222222222222'], + 'uuid': '44444444-4444-4444-4444-444444444444'}), + ({'action_type': 'migrate', + 'parents': ['11111111-1111-1111-1111-111111111111', + '22222222-2222-2222-2222-222222222222'], + 'uuid': '33333333-3333-3333-3333-333333333333'}, + {'action_type': 'migrate', + 'parents': ['33333333-3333-3333-3333-333333333333', + '44444444-4444-4444-4444-444444444444'], + 'uuid': '55555555-5555-5555-5555-555555555555'}), + ({'action_type': 'migrate', + 'parents': ['11111111-1111-1111-1111-111111111111', + '22222222-2222-2222-2222-222222222222'], + 'uuid': '44444444-4444-4444-4444-444444444444'}, + {'action_type': 'migrate', + 'parents': ['33333333-3333-3333-3333-333333333333', + '44444444-4444-4444-4444-444444444444'], + 'uuid': '55555555-5555-5555-5555-555555555555'}), + ({'action_type': 'migrate', + 'parents': ['33333333-3333-3333-3333-333333333333', + '44444444-4444-4444-4444-444444444444'], + 'uuid': '55555555-5555-5555-5555-555555555555'}, + {'action_type': 'resize', + 'parents': ['55555555-5555-5555-5555-555555555555'], + 'uuid': '66666666-6666-6666-6666-666666666666'}), + ({'action_type': 'migrate', + 'parents': ['33333333-3333-3333-3333-333333333333', + '44444444-4444-4444-4444-444444444444'], + 'uuid': '55555555-5555-5555-5555-555555555555'}, + {'action_type': 'resize', + 'parents': ['55555555-5555-5555-5555-555555555555'], + 'uuid': '77777777-7777-7777-7777-777777777777'}), + ({'action_type': 'resize', + 'parents': ['55555555-5555-5555-5555-555555555555'], + 'uuid': '66666666-6666-6666-6666-666666666666'}, + {'action_type': 'turn_host_to_acpi_s3_state', + 'parents': ['66666666-6666-6666-6666-666666666666', + '77777777-7777-7777-7777-777777777777'], + 'uuid': '88888888-8888-8888-8888-888888888888'}), + ({'action_type': 'resize', + 'parents': ['55555555-5555-5555-5555-555555555555'], + 'uuid': '77777777-7777-7777-7777-777777777777'}, + {'action_type': 'turn_host_to_acpi_s3_state', + 'parents': ['66666666-6666-6666-6666-666666666666', + '77777777-7777-7777-7777-777777777777'], + 'uuid': '88888888-8888-8888-8888-888888888888'})] + + edges = sorted([(src.as_dict(), dst.as_dict()) + for src, dst in action_graph.edges()], + key=lambda pair: pair[0]['uuid']) + for src, dst in edges: + for key in ('id', 'action_plan', 'action_plan_id', 'created_at', + 'input_parameters', 'deleted_at', 'updated_at', + 'state'): + del src[key] + del dst[key] + + self.assertEqual(len(expected_edges), len(edges)) + for pair in expected_edges: + self.assertIn(pair, edges) + + +class TestWeightPlanner(base.DbTestCase): + + def setUp(self): + super(TestWeightPlanner, self).setUp() + self.planner = pbase.WeightPlanner(mock.Mock()) + self.planner.config.weights = { + 'nop': 0, + 'sleep': 1, + 'change_nova_service_state': 2, + 'migrate': 3 + } + + self.goal = obj_utils.create_test_goal(self.context) + self.strategy = obj_utils.create_test_strategy( + self.context, goal_id=self.goal.id) + obj_utils.create_test_audit_template( + self.context, goal_id=self.goal.id, strategy_id=self.strategy.id) + + p = mock.patch.object(db_api.BaseConnection, 'create_action_plan') + self.mock_create_action_plan = p.start() + self.mock_create_action_plan.side_effect = ( + self._simulate_action_plan_create) + self.addCleanup(p.stop) + + q = mock.patch.object(db_api.BaseConnection, 'create_action') + self.mock_create_action = q.start() + self.mock_create_action.side_effect = ( + self._simulate_action_create) + self.addCleanup(q.stop) + + def _simulate_action_plan_create(self, action_plan): + action_plan.create() + return action_plan + + def _simulate_action_create(self, action): + action.create() + return action + + @mock.patch.object(objects.Strategy, 'get_by_name') + def test_scheduler_warning_empty_action_plan(self, m_get_by_name): + m_get_by_name.return_value = self.strategy + audit = db_utils.create_test_audit( + goal_id=self.goal.id, strategy_id=self.strategy.id) + fake_solution = mock.MagicMock(efficacy_indicators=[], + actions=[]) + action_plan = self.planner.schedule( + self.context, audit.id, fake_solution) + self.assertIsNotNone(action_plan.uuid) diff --git a/watcher/tests/decision_engine/planner/test_workload_stabilization_planner.py b/watcher/tests/decision_engine/planner/test_workload_stabilization_planner.py new file mode 100644 index 000000000..f53b98d01 --- /dev/null +++ b/watcher/tests/decision_engine/planner/test_workload_stabilization_planner.py @@ -0,0 +1,378 @@ +# -*- encoding: utf-8 -*- +# Copyright (c) 2015 b<>com +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import mock + +from watcher.common import exception +from watcher.common import nova_helper +from watcher.common import utils +from watcher.db import api as db_api +from watcher.decision_engine.planner import workload_stabilization as pbase +from watcher.decision_engine.solution import default as dsol +from watcher.decision_engine.strategy import strategies +from watcher import objects +from watcher.tests.db import base +from watcher.tests.db import utils as db_utils +from watcher.tests.decision_engine.model import ceilometer_metrics as fake +from watcher.tests.decision_engine.model import faker_cluster_state +from watcher.tests.objects import utils as obj_utils + + +class SolutionFaker(object): + @staticmethod + def build(): + metrics = fake.FakerMetricsCollector() + current_state_cluster = faker_cluster_state.FakerModelCollector() + sercon = strategies.BasicConsolidation(config=mock.Mock()) + sercon._compute_model = current_state_cluster.generate_scenario_1() + sercon.ceilometer = mock.MagicMock( + get_statistics=metrics.mock_get_statistics) + return sercon.execute() + + +class SolutionFakerSingleHyp(object): + @staticmethod + def build(): + metrics = fake.FakerMetricsCollector() + current_state_cluster = faker_cluster_state.FakerModelCollector() + sercon = strategies.BasicConsolidation(config=mock.Mock()) + sercon._compute_model = ( + current_state_cluster.generate_scenario_3_with_2_nodes()) + sercon.ceilometer = mock.MagicMock( + get_statistics=metrics.mock_get_statistics) + + return sercon.execute() + + +class TestActionScheduling(base.DbTestCase): + + def setUp(self): + super(TestActionScheduling, self).setUp() + self.strategy = db_utils.create_test_strategy(name="dummy") + self.audit = db_utils.create_test_audit( + uuid=utils.generate_uuid(), strategy_id=self.strategy.id) + self.planner = pbase.WorkloadStabilizationPlanner(mock.Mock()) + self.nova_helper = nova_helper.NovaHelper(mock.Mock()) + + def test_schedule_actions(self): + solution = dsol.DefaultSolution( + goal=mock.Mock(), strategy=self.strategy) + + parameters = { + "source_node": "server1", + "destination_node": "server2", + } + solution.add_action(action_type="migrate", + resource_id="b199db0c-1408-4d52-b5a5-5ca14de0ff36", + input_parameters=parameters) + + with mock.patch.object( + pbase.WorkloadStabilizationPlanner, "create_action", + wraps=self.planner.create_action + ) as m_create_action: + self.planner.config.weights = {'migrate': 3} + action_plan = self.planner.schedule( + self.context, self.audit.id, solution) + + self.assertIsNotNone(action_plan.uuid) + self.assertEqual(1, m_create_action.call_count) + filters = {'action_plan_id': action_plan.id} + actions = objects.Action.dbapi.get_action_list(self.context, filters) + self.assertEqual("migrate", actions[0].action_type) + + def test_schedule_two_actions(self): + solution = dsol.DefaultSolution( + goal=mock.Mock(), strategy=self.strategy) + + parameters = { + "source_node": "server1", + "destination_node": "server2", + } + solution.add_action(action_type="migrate", + resource_id="b199db0c-1408-4d52-b5a5-5ca14de0ff36", + input_parameters=parameters) + + solution.add_action(action_type="nop", + input_parameters={"message": "Hello world"}) + + with mock.patch.object( + pbase.WorkloadStabilizationPlanner, "create_action", + wraps=self.planner.create_action + ) as m_create_action: + self.planner.config.weights = {'migrate': 3, 'nop': 5} + action_plan = self.planner.schedule( + self.context, self.audit.id, solution) + self.assertIsNotNone(action_plan.uuid) + self.assertEqual(2, m_create_action.call_count) + # check order + filters = {'action_plan_id': action_plan.id} + actions = objects.Action.dbapi.get_action_list(self.context, filters) + self.assertEqual("nop", actions[0].action_type) + self.assertEqual("migrate", actions[1].action_type) + + def test_schedule_actions_with_unknown_action(self): + solution = dsol.DefaultSolution( + goal=mock.Mock(), strategy=self.strategy) + + parameters = { + "src_uuid_node": "server1", + "dst_uuid_node": "server2", + } + solution.add_action(action_type="migrate", + resource_id="b199db0c-1408-4d52-b5a5-5ca14de0ff36", + input_parameters=parameters) + + solution.add_action(action_type="new_action_type", + resource_id="", + input_parameters={}) + + with mock.patch.object( + pbase.WorkloadStabilizationPlanner, "create_action", + wraps=self.planner.create_action + ) as m_create_action: + with mock.patch.object(nova_helper, 'NovaHelper') as m_nova: + self.planner.config.weights = {'migrate': 0} + self.assertRaises(KeyError, self.planner.schedule, + self.context, self.audit.id, solution) + assert not m_nova.called + self.assertEqual(2, m_create_action.call_count) + + def test_schedule_actions_with_unsupported_action(self): + solution = dsol.DefaultSolution( + goal=mock.Mock(), strategy=self.strategy) + + parameters = { + "src_uuid_node": "server1", + "dst_uuid_node": "server2", + } + solution.add_action(action_type="migrate", + resource_id="b199db0c-1408-4d52-b5a5-5ca14de0ff36", + input_parameters=parameters) + + solution.add_action(action_type="new_action_type", + resource_id="", + input_parameters={}) + with mock.patch.object( + pbase.WorkloadStabilizationPlanner, "create_action", + wraps=self.planner.create_action + ) as m_create_action: + with mock.patch.object(nova_helper, 'NovaHelper') as m_nova: + self.planner.config.weights = { + 'turn_host_to_acpi_s3_state': 0, + 'resize': 1, + 'migrate': 2, + 'sleep': 3, + 'change_nova_service_state': 4, + 'nop': 5, + 'new_action_type': 6} + self.assertRaises(exception.UnsupportedActionType, + self.planner.schedule, + self.context, self.audit.id, solution) + assert not m_nova.called + self.assertEqual(2, m_create_action.call_count) + + @mock.patch.object(nova_helper.NovaHelper, 'get_instance_by_uuid') + def test_schedule_migrate_resize_actions(self, mock_nova): + mock_nova.return_value = 'server1' + solution = dsol.DefaultSolution( + goal=mock.Mock(), strategy=self.strategy) + + parameters = { + "source_node": "server1", + "destination_node": "server2", + } + solution.add_action(action_type="migrate", + resource_id="b199db0c-1408-4d52-b5a5-5ca14de0ff36", + input_parameters=parameters) + + solution.add_action(action_type="resize", + resource_id="b199db0c-1408-4d52-b5a5-5ca14de0ff36", + input_parameters={"flavor": "x1"}) + + with mock.patch.object( + pbase.WorkloadStabilizationPlanner, "create_action", + wraps=self.planner.create_action + ) as m_create_action: + with mock.patch.object(nova_helper, 'NovaHelper') as m_nova: + self.planner.config.weights = {'migrate': 3, 'resize': 2} + action_plan = self.planner.schedule( + self.context, self.audit.id, solution) + self.assertEqual(1, m_nova.call_count) + self.assertIsNotNone(action_plan.uuid) + self.assertEqual(2, m_create_action.call_count) + # check order + filters = {'action_plan_id': action_plan.id} + actions = objects.Action.dbapi.get_action_list(self.context, filters) + self.assertEqual("migrate", actions[0].action_type) + self.assertEqual("resize", actions[1].action_type) + self.assertEqual(actions[0].uuid, actions[1].parents[0]) + + def test_schedule_migrate_resize_acpi_s3_actions(self): + solution = dsol.DefaultSolution( + goal=mock.Mock(), strategy=self.strategy) + + parameters = { + "source_node": "server1", + "destination_node": "server2", + } + parent_migration = "b199db0c-1408-4d52-b5a5-5ca14de0ff36" + solution.add_action(action_type="migrate", + resource_id="b199db0c-1408-4d52-b5a5-5ca14de0ff36", + input_parameters=parameters) + + solution.add_action(action_type="resize", + resource_id="b199db0c-1408-4d52-b5a5-5ca14de0ff36", + input_parameters={'flavor': 'x1'}) + + solution.add_action(action_type="migrate", + resource_id="f6416850-da28-4047-a547-8c49f53e95fe", + input_parameters={"source_node": "server1", + "destination_node": "server2"}) + + solution.add_action(action_type="migrate", + resource_id="bb404e74-2caf-447b-bd1e-9234db386ca5", + input_parameters={"source_node": "server2", + "destination_node": "server3"}) + + solution.add_action(action_type="turn_host_to_acpi_s3_state", + resource_id="server1", + input_parameters={}) + + with mock.patch.object( + pbase.WorkloadStabilizationPlanner, "create_action", + wraps=self.planner.create_action + ) as m_create_action: + with mock.patch.object( + nova_helper, 'NovaHelper') as m_nova: + m_nova().get_hostname.return_value = 'server1' + m_nova().get_instance_by_uuid.return_value = ['uuid1'] + self.planner.config.weights = { + 'turn_host_to_acpi_s3_state': 0, + 'resize': 1, + 'migrate': 2, + 'sleep': 3, + 'change_nova_service_state': 4, + 'nop': 5} + action_plan = self.planner.schedule( + self.context, self.audit.id, solution) + self.assertEqual(3, m_nova.call_count) + self.assertIsNotNone(action_plan.uuid) + self.assertEqual(5, m_create_action.call_count) + # check order + filters = {'action_plan_id': action_plan.id} + actions = objects.Action.dbapi.get_action_list(self.context, filters) + self.assertEqual("migrate", actions[0].action_type) + self.assertEqual("migrate", actions[1].action_type) + self.assertEqual("migrate", actions[2].action_type) + self.assertEqual("resize", actions[3].action_type) + self.assertEqual("turn_host_to_acpi_s3_state", actions[4].action_type) + for action in actions: + if action.input_parameters['resource_id'] == parent_migration: + parent_migration = action + break + self.assertEqual(parent_migration.uuid, actions[3].parents[0]) + + +class TestDefaultPlanner(base.DbTestCase): + + def setUp(self): + super(TestDefaultPlanner, self).setUp() + self.planner = pbase.WorkloadStabilizationPlanner(mock.Mock()) + self.planner.config.weights = { + 'nop': 0, + 'sleep': 1, + 'change_nova_service_state': 2, + 'migrate': 3 + } + + self.goal = obj_utils.create_test_goal(self.context) + self.strategy = obj_utils.create_test_strategy( + self.context, goal_id=self.goal.id) + obj_utils.create_test_audit_template( + self.context, goal_id=self.goal.id, strategy_id=self.strategy.id) + + p = mock.patch.object(db_api.BaseConnection, 'create_action_plan') + self.mock_create_action_plan = p.start() + self.mock_create_action_plan.side_effect = ( + self._simulate_action_plan_create) + self.addCleanup(p.stop) + + q = mock.patch.object(db_api.BaseConnection, 'create_action') + self.mock_create_action = q.start() + self.mock_create_action.side_effect = ( + self._simulate_action_create) + self.addCleanup(q.stop) + + def _simulate_action_plan_create(self, action_plan): + action_plan.create() + return action_plan + + def _simulate_action_create(self, action): + action.create() + return action + + @mock.patch.object(objects.Strategy, 'get_by_name') + def test_scheduler_warning_empty_action_plan(self, m_get_by_name): + m_get_by_name.return_value = self.strategy + audit = db_utils.create_test_audit( + goal_id=self.goal.id, strategy_id=self.strategy.id) + fake_solution = mock.MagicMock(efficacy_indicators=[], + actions=[]) + action_plan = self.planner.schedule( + self.context, audit.id, fake_solution) + self.assertIsNotNone(action_plan.uuid) + + +class TestActionValidator(base.DbTestCase): + INSTANCE_UUID = "94ae2f92-b7fd-4da7-9e97-f13504ae98c4" + + def setUp(self): + super(TestActionValidator, self).setUp() + self.r_osc_cls = mock.Mock() + self.r_helper_cls = mock.Mock() + self.r_helper = mock.Mock(spec=nova_helper.NovaHelper) + self.r_helper_cls.return_value = self.r_helper + r_nova_helper = mock.patch.object( + nova_helper, "NovaHelper", self.r_helper_cls) + + r_nova_helper.start() + + self.addCleanup(r_nova_helper.stop) + + def test_resize_validate_parents(self): + resize_object = pbase.ResizeActionValidator() + action = {'uuid': 'fcec56cd-74c1-406b-a7c1-81ef9f0c1393', + 'input_parameters': {'resource_id': self.INSTANCE_UUID}} + resource_action_map = {self.INSTANCE_UUID: [ + ('action_uuid', 'migrate')]} + self.r_helper.get_hostname.return_value = 'server1' + self.r_helper.get_instance_by_uuid.return_value = ['instance'] + result = resize_object.validate_parents(resource_action_map, action) + self.assertEqual('action_uuid', result[0]) + + def test_migrate_validate_parents(self): + migrate_object = pbase.MigrationActionValidator() + action = {'uuid': '712f1701-4c1b-4076-bfcf-3f23cfec6c3b', + 'input_parameters': {'source_node': 'server1', + 'resource_id': self.INSTANCE_UUID}} + resource_action_map = {} + expected_map = { + '94ae2f92-b7fd-4da7-9e97-f13504ae98c4': [ + ('712f1701-4c1b-4076-bfcf-3f23cfec6c3b', 'migrate')], + 'server1': [ + ('712f1701-4c1b-4076-bfcf-3f23cfec6c3b', 'migrate')]} + migrate_object.validate_parents(resource_action_map, action) + self.assertEqual(resource_action_map, expected_map) diff --git a/watcher/tests/decision_engine/test_sync.py b/watcher/tests/decision_engine/test_sync.py index 7e1b8d16a..3a2f23e09 100644 --- a/watcher/tests/decision_engine/test_sync.py +++ b/watcher/tests/decision_engine/test_sync.py @@ -363,29 +363,25 @@ class TestSyncer(base.DbTestCase): action_plan1 = objects.ActionPlan( self.ctx, id=1, uuid=utils.generate_uuid(), audit_id=audit1.id, strategy_id=strategy1.id, - first_action_id=None, state='DOESNOTMATTER', - global_efficacy={}) + state='DOESNOTMATTER', global_efficacy={}) # Stale after syncing because the goal of the audit has been modified # (compared to the defined fake goals) action_plan2 = objects.ActionPlan( self.ctx, id=2, uuid=utils.generate_uuid(), audit_id=audit2.id, strategy_id=strategy2.id, - first_action_id=None, state='DOESNOTMATTER', - global_efficacy={}) + state='DOESNOTMATTER', global_efficacy={}) # Stale after syncing because the strategy has been modified # (compared to the defined fake strategies) action_plan3 = objects.ActionPlan( self.ctx, id=3, uuid=utils.generate_uuid(), audit_id=audit3.id, strategy_id=strategy3.id, - first_action_id=None, state='DOESNOTMATTER', - global_efficacy={}) + state='DOESNOTMATTER', global_efficacy={}) # Stale after syncing because both the strategy and the related audit # have been modified (compared to the defined fake goals/strategies) action_plan4 = objects.ActionPlan( self.ctx, id=4, uuid=utils.generate_uuid(), audit_id=audit4.id, strategy_id=strategy4.id, - first_action_id=None, state='DOESNOTMATTER', - global_efficacy={}) + state='DOESNOTMATTER', global_efficacy={}) action_plan1.create() action_plan2.create() @@ -575,14 +571,12 @@ class TestSyncer(base.DbTestCase): action_plan1 = objects.ActionPlan( self.ctx, id=1, uuid=utils.generate_uuid(), audit_id=audit1.id, strategy_id=strategy1.id, - first_action_id=None, state='DOESNOTMATTER', - global_efficacy={}) + state='DOESNOTMATTER', global_efficacy={}) # Stale after syncing because its related goal has been soft deleted action_plan2 = objects.ActionPlan( self.ctx, id=2, uuid=utils.generate_uuid(), audit_id=audit2.id, strategy_id=strategy2.id, - first_action_id=None, state='DOESNOTMATTER', - global_efficacy={}) + state='DOESNOTMATTER', global_efficacy={}) action_plan1.create() action_plan2.create() diff --git a/watcher/tests/objects/test_objects.py b/watcher/tests/objects/test_objects.py index 3872573b3..04c13e188 100644 --- a/watcher/tests/objects/test_objects.py +++ b/watcher/tests/objects/test_objects.py @@ -413,8 +413,8 @@ expected_object_fingerprints = { 'Strategy': '1.1-73f164491bdd4c034f48083a51bdeb7b', 'AuditTemplate': '1.1-b291973ffc5efa2c61b24fe34fdccc0b', 'Audit': '1.2-910522db78b7b1cb59df614754656db4', - 'ActionPlan': '1.2-42709eadf6b2bd228ea87817e8c3e31e', - 'Action': '1.1-52c77e4db4ce0aa9480c9760faec61a1', + 'ActionPlan': '2.0-394f1abbf5d73d7b6675a118fe1a0284', + 'Action': '2.0-1dd4959a7e7ac30c62ef170fe08dd935', 'EfficacyIndicator': '1.0-655b71234a82bc7478aff964639c4bb0', 'ScoringEngine': '1.0-4abbe833544000728e17bd9e83f97576', 'Service': '1.0-4b35b99ada9677a882c9de2b30212f35', diff --git a/watcher_tempest_plugin/tests/api/admin/test_action.py b/watcher_tempest_plugin/tests/api/admin/test_action.py index bc1ba409a..c57e10d0a 100644 --- a/watcher_tempest_plugin/tests/api/admin/test_action.py +++ b/watcher_tempest_plugin/tests/api/admin/test_action.py @@ -45,18 +45,19 @@ class TestShowListAction(base.BaseInfraOptimTest): @test.attr(type='smoke') def test_show_one_action(self): - _, action = self.client.show_action( - self.action_plan["first_action_uuid"]) + _, action_uuid = self.client.list_actions( + action_plan_uuid=self.action_plan['uuid'])['actions'][0]['uuid'] + _, action = self.client.show_action(action_uuid) - self.assertEqual(self.action_plan["first_action_uuid"], - action['uuid']) + self.assertEqual(action_uuid, action['uuid']) self.assertEqual("nop", action['action_type']) self.assertEqual("PENDING", action['state']) @test.attr(type='smoke') def test_show_action_with_links(self): - _, action = self.client.show_action( - self.action_plan["first_action_uuid"]) + _, action_uuid = self.client.list_actions( + action_plan_uuid=self.action_plan['uuid'])['actions'][0]['uuid'] + _, action = self.client.show_action(action_uuid) self.assertIn('links', action.keys()) self.assertEqual(2, len(action['links'])) self.assertIn(action['uuid'], action['links'][0]['href'])