Add Voluptuous to validate the action parameters
We want a simplest way to validate the input parameters of an Action through a schema. APIImpact DocImpact Partially implements: blueprint watcher-add-actions-via-conf Change-Id: I139775f467fe7778c7354b0cfacf796fc27ffcb2
This commit is contained in:
parent
33ee575936
commit
e3198d25a5
@ -17,6 +17,7 @@ oslo.utils>=3.4.0 # Apache-2.0
|
||||
PasteDeploy>=1.5.0
|
||||
pbr>=1.6
|
||||
pecan>=1.0.0
|
||||
voluptuous>=0.8.6 # BSD License
|
||||
python-ceilometerclient>=2.2.1
|
||||
python-cinderclient>=1.3.1
|
||||
python-glanceclient>=1.2.0
|
||||
|
@ -129,13 +129,10 @@ class Action(base.APIBase):
|
||||
alarm = types.uuid
|
||||
"""An alarm UUID related to this action"""
|
||||
|
||||
applies_to = wtypes.text
|
||||
"""Applies to"""
|
||||
|
||||
action_type = wtypes.text
|
||||
"""Action type"""
|
||||
|
||||
input_parameters = wtypes.DictType(wtypes.text, wtypes.text)
|
||||
input_parameters = types.jsontype
|
||||
"""One or more key/value pairs """
|
||||
|
||||
next_uuid = wsme.wsproperty(types.uuid, _get_next_uuid,
|
||||
|
@ -27,10 +27,15 @@ from watcher.common import clients
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class BaseAction(object):
|
||||
# NOTE(jed) by convention we decided
|
||||
# that the attribute "resource_id" is the unique id of
|
||||
# the resource to which the Action applies to allow us to use it in the
|
||||
# watcher dashboard and will be nested in input_parameters
|
||||
RESOURCE_ID = 'resource_id'
|
||||
|
||||
def __init__(self, osc=None):
|
||||
""":param osc: an OpenStackClients instance"""
|
||||
self._input_parameters = {}
|
||||
self._applies_to = ""
|
||||
self._osc = osc
|
||||
|
||||
@property
|
||||
@ -48,12 +53,8 @@ class BaseAction(object):
|
||||
self._input_parameters = p
|
||||
|
||||
@property
|
||||
def applies_to(self):
|
||||
return self._applies_to
|
||||
|
||||
@applies_to.setter
|
||||
def applies_to(self, a):
|
||||
self._applies_to = a
|
||||
def resource_id(self):
|
||||
return self.input_parameters[self.RESOURCE_ID]
|
||||
|
||||
@abc.abstractmethod
|
||||
def execute(self):
|
||||
@ -70,3 +71,11 @@ class BaseAction(object):
|
||||
@abc.abstractmethod
|
||||
def postcondition(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
@abc.abstractproperty
|
||||
def schema(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
def validate_parameters(self):
|
||||
self.schema(self.input_parameters)
|
||||
return True
|
||||
|
@ -16,7 +16,8 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
import six
|
||||
import voluptuous
|
||||
|
||||
from watcher._i18n import _
|
||||
from watcher.applier.actions import base
|
||||
@ -27,13 +28,27 @@ from watcher.decision_engine.model import hypervisor_state as hstate
|
||||
|
||||
class ChangeNovaServiceState(base.BaseAction):
|
||||
|
||||
STATE = 'state'
|
||||
|
||||
@property
|
||||
def schema(self):
|
||||
return voluptuous.Schema({
|
||||
voluptuous.Required(self.RESOURCE_ID):
|
||||
voluptuous.All(
|
||||
voluptuous.Any(*six.string_types),
|
||||
voluptuous.Length(min=1)),
|
||||
voluptuous.Required(self.STATE):
|
||||
voluptuous.Any(*[state.value
|
||||
for state in list(hstate.HypervisorState)]),
|
||||
})
|
||||
|
||||
@property
|
||||
def host(self):
|
||||
return self.applies_to
|
||||
return self.resource_id
|
||||
|
||||
@property
|
||||
def state(self):
|
||||
return self.input_parameters.get('state')
|
||||
return self.input_parameters.get(self.STATE)
|
||||
|
||||
def execute(self):
|
||||
target_state = None
|
||||
|
@ -33,5 +33,10 @@ class ActionFactory(object):
|
||||
loaded_action = self.action_loader.load(name=object_action.action_type,
|
||||
osc=osc)
|
||||
loaded_action.input_parameters = object_action.input_parameters
|
||||
loaded_action.applies_to = object_action.applies_to
|
||||
LOG.debug("Checking the input parameters")
|
||||
# NOTE(jed) if we change the schema of an action and we try to reload
|
||||
# an older version of the Action, the validation can fail.
|
||||
# We need to add the versioning of an Action or a migration tool.
|
||||
# We can also create an new Action which extends the previous one.
|
||||
loaded_action.validate_parameters()
|
||||
return loaded_action
|
||||
|
@ -18,30 +18,63 @@
|
||||
#
|
||||
|
||||
from oslo_log import log
|
||||
import six
|
||||
import voluptuous
|
||||
|
||||
from watcher._i18n import _
|
||||
from watcher.applier.actions import base
|
||||
from watcher.common import exception
|
||||
from watcher.common import nova_helper
|
||||
from watcher.common import utils
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
||||
class Migrate(base.BaseAction):
|
||||
|
||||
# input parameters constants
|
||||
MIGRATION_TYPE = 'migration_type'
|
||||
LIVE_MIGRATION = 'live'
|
||||
DST_HYPERVISOR = 'dst_hypervisor'
|
||||
SRC_HYPERVISOR = 'src_hypervisor'
|
||||
|
||||
def check_resource_id(self, value):
|
||||
if (value is not None and
|
||||
len(value) > 0 and not
|
||||
utils.is_uuid_like(value)):
|
||||
raise voluptuous.Invalid(_("The parameter"
|
||||
" resource_id is invalid."))
|
||||
|
||||
@property
|
||||
def schema(self):
|
||||
return voluptuous.Schema({
|
||||
voluptuous.Required(self.RESOURCE_ID): self.check_resource_id,
|
||||
voluptuous.Required(self.MIGRATION_TYPE,
|
||||
default=self.LIVE_MIGRATION):
|
||||
voluptuous.Any(*[self.LIVE_MIGRATION]),
|
||||
voluptuous.Required(self.DST_HYPERVISOR):
|
||||
voluptuous.All(voluptuous.Any(*six.string_types),
|
||||
voluptuous.Length(min=1)),
|
||||
voluptuous.Required(self.SRC_HYPERVISOR):
|
||||
voluptuous.All(voluptuous.Any(*six.string_types),
|
||||
voluptuous.Length(min=1)),
|
||||
})
|
||||
|
||||
@property
|
||||
def instance_uuid(self):
|
||||
return self.applies_to
|
||||
return self.resource_id
|
||||
|
||||
@property
|
||||
def migration_type(self):
|
||||
return self.input_parameters.get('migration_type')
|
||||
return self.input_parameters.get(self.MIGRATION_TYPE)
|
||||
|
||||
@property
|
||||
def dst_hypervisor(self):
|
||||
return self.input_parameters.get('dst_hypervisor')
|
||||
return self.input_parameters.get(self.DST_HYPERVISOR)
|
||||
|
||||
@property
|
||||
def src_hypervisor(self):
|
||||
return self.input_parameters.get('src_hypervisor')
|
||||
return self.input_parameters.get(self.SRC_HYPERVISOR)
|
||||
|
||||
def migrate(self, destination):
|
||||
nova = nova_helper.NovaHelper(osc=self.osc)
|
||||
|
@ -18,6 +18,8 @@
|
||||
#
|
||||
|
||||
from oslo_log import log
|
||||
import six
|
||||
import voluptuous
|
||||
|
||||
from watcher.applier.actions import base
|
||||
|
||||
@ -27,9 +29,18 @@ LOG = log.getLogger(__name__)
|
||||
|
||||
class Nop(base.BaseAction):
|
||||
|
||||
MESSAGE = 'message'
|
||||
|
||||
@property
|
||||
def schema(self):
|
||||
return voluptuous.Schema({
|
||||
voluptuous.Required(self.MESSAGE): voluptuous.Any(
|
||||
voluptuous.Any(*six.string_types), None)
|
||||
})
|
||||
|
||||
@property
|
||||
def message(self):
|
||||
return self.input_parameters.get('message')
|
||||
return self.input_parameters.get(self.MESSAGE)
|
||||
|
||||
def execute(self):
|
||||
LOG.debug("executing action NOP message:%s ", self.message)
|
||||
|
@ -18,19 +18,29 @@
|
||||
#
|
||||
import time
|
||||
|
||||
|
||||
from oslo_log import log
|
||||
import voluptuous
|
||||
|
||||
from watcher.applier.actions import base
|
||||
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
||||
class Sleep(base.BaseAction):
|
||||
|
||||
DURATION = 'duration'
|
||||
|
||||
@property
|
||||
def schema(self):
|
||||
return voluptuous.Schema({
|
||||
voluptuous.Required(self.DURATION, default=1):
|
||||
voluptuous.All(float, voluptuous.Range(min=0))
|
||||
})
|
||||
|
||||
@property
|
||||
def duration(self):
|
||||
return int(self.input_parameters.get('duration'))
|
||||
return int(self.input_parameters.get(self.DURATION))
|
||||
|
||||
def execute(self):
|
||||
LOG.debug("Starting action Sleep duration:%s ", self.duration)
|
||||
|
@ -279,3 +279,7 @@ class HypervisorNotFound(WatcherException):
|
||||
|
||||
class LoadingError(WatcherException):
|
||||
msg_fmt = _("Error loading plugin '%(name)s'")
|
||||
|
||||
|
||||
class ReservedWord(WatcherException):
|
||||
msg_fmt = _("The identifier '%(name)s' is a reserved word")
|
||||
|
@ -160,7 +160,6 @@ class Action(Base):
|
||||
nullable=False)
|
||||
# only for the first version
|
||||
action_type = Column(String(255), nullable=False)
|
||||
applies_to = Column(String(255), nullable=True)
|
||||
input_parameters = Column(JSONEncodedDict, nullable=True)
|
||||
state = Column(String(20), nullable=True)
|
||||
# todo(jed) remove parameter alarm
|
||||
|
@ -38,14 +38,12 @@ class DefaultPlanner(base.BasePlanner):
|
||||
def create_action(self,
|
||||
action_plan_id,
|
||||
action_type,
|
||||
applies_to,
|
||||
input_parameters=None):
|
||||
uuid = utils.generate_uuid()
|
||||
action = {
|
||||
'uuid': uuid,
|
||||
'action_plan_id': int(action_plan_id),
|
||||
'action_type': action_type,
|
||||
'applies_to': applies_to,
|
||||
'input_parameters': input_parameters,
|
||||
'state': objects.action.State.PENDING,
|
||||
'alarm': None,
|
||||
@ -63,8 +61,6 @@ class DefaultPlanner(base.BasePlanner):
|
||||
json_action = self.create_action(action_plan_id=action_plan.id,
|
||||
action_type=action.get(
|
||||
'action_type'),
|
||||
applies_to=action.get(
|
||||
'applies_to'),
|
||||
input_parameters=action.get(
|
||||
'input_parameters'))
|
||||
to_schedule.append((self.priorities[action.get('action_type')],
|
||||
|
@ -87,13 +87,13 @@ class BaseSolution(object):
|
||||
@abc.abstractmethod
|
||||
def add_action(self,
|
||||
action_type,
|
||||
applies_to,
|
||||
resource_id,
|
||||
input_parameters=None):
|
||||
"""Add a new Action in the Action Plan
|
||||
|
||||
:param action_type: the unique id of an action type defined in
|
||||
entry point 'watcher_actions'
|
||||
:param applies_to: the unique id of the resource to which the
|
||||
:param resource_id: the unique id of the resource to which the
|
||||
`Action` applies.
|
||||
:param input_parameters: An array of input parameters provided as
|
||||
key-value pairs of strings. Each key-pair contains names and
|
||||
|
@ -18,12 +18,14 @@
|
||||
#
|
||||
from oslo_log import log
|
||||
|
||||
from watcher.decision_engine.solution.base import BaseSolution
|
||||
from watcher.applier.actions import base as baction
|
||||
from watcher.common import exception
|
||||
from watcher.decision_engine.solution import base
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
||||
class DefaultSolution(BaseSolution):
|
||||
class DefaultSolution(base.BaseSolution):
|
||||
def __init__(self):
|
||||
"""Stores a set of actions generated by a strategy
|
||||
|
||||
@ -34,12 +36,17 @@ class DefaultSolution(BaseSolution):
|
||||
self._actions = []
|
||||
|
||||
def add_action(self, action_type,
|
||||
applies_to,
|
||||
input_parameters=None):
|
||||
# todo(jed) add https://pypi.python.org/pypi/schema
|
||||
input_parameters=None,
|
||||
resource_id=None):
|
||||
|
||||
if input_parameters is not None:
|
||||
if baction.BaseAction.RESOURCE_ID in input_parameters.keys():
|
||||
raise exception.ReservedWord(name=baction.BaseAction.
|
||||
RESOURCE_ID)
|
||||
if resource_id is not None:
|
||||
input_parameters[baction.BaseAction.RESOURCE_ID] = resource_id
|
||||
action = {
|
||||
'action_type': action_type,
|
||||
'applies_to': applies_to,
|
||||
'input_parameters': input_parameters
|
||||
}
|
||||
self._actions.append(action)
|
||||
|
@ -329,14 +329,14 @@ class BasicConsolidation(BaseStrategy):
|
||||
0,
|
||||
0)
|
||||
|
||||
def add_change_service_state(self, applies_to, state):
|
||||
def add_change_service_state(self, resource_id, state):
|
||||
parameters = {'state': state}
|
||||
self.solution.add_action(action_type=self.CHANGE_NOVA_SERVICE_STATE,
|
||||
applies_to=applies_to,
|
||||
resource_id=resource_id,
|
||||
input_parameters=parameters)
|
||||
|
||||
def add_migration(self,
|
||||
applies_to,
|
||||
resource_id,
|
||||
migration_type,
|
||||
src_hypervisor,
|
||||
dst_hypervisor):
|
||||
@ -344,7 +344,7 @@ class BasicConsolidation(BaseStrategy):
|
||||
'src_hypervisor': src_hypervisor,
|
||||
'dst_hypervisor': dst_hypervisor}
|
||||
self.solution.add_action(action_type=self.MIGRATION,
|
||||
applies_to=applies_to,
|
||||
resource_id=resource_id,
|
||||
input_parameters=parameters)
|
||||
|
||||
def score_of_nodes(self, current_model, score):
|
||||
|
@ -18,12 +18,12 @@
|
||||
#
|
||||
from oslo_log import log
|
||||
|
||||
from watcher.decision_engine.strategy.strategies.base import BaseStrategy
|
||||
from watcher.decision_engine.strategy.strategies import base
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
||||
class DummyStrategy(BaseStrategy):
|
||||
class DummyStrategy(base.BaseStrategy):
|
||||
DEFAULT_NAME = "dummy"
|
||||
DEFAULT_DESCRIPTION = "Dummy Strategy"
|
||||
|
||||
@ -35,17 +35,15 @@ class DummyStrategy(BaseStrategy):
|
||||
super(DummyStrategy, self).__init__(name, description, osc)
|
||||
|
||||
def execute(self, model):
|
||||
LOG.debug("Executing Dummy strategy")
|
||||
parameters = {'message': 'hello World'}
|
||||
self.solution.add_action(action_type=self.NOP,
|
||||
applies_to="",
|
||||
input_parameters=parameters)
|
||||
|
||||
parameters = {'message': 'Welcome'}
|
||||
self.solution.add_action(action_type=self.NOP,
|
||||
applies_to="",
|
||||
input_parameters=parameters)
|
||||
|
||||
self.solution.add_action(action_type=self.SLEEP,
|
||||
applies_to="",
|
||||
input_parameters={'duration': '5'})
|
||||
input_parameters={'duration': 5.0})
|
||||
return self.solution
|
||||
|
@ -20,16 +20,16 @@ from oslo_log import log
|
||||
|
||||
from watcher._i18n import _LE
|
||||
from watcher.common import exception as wexc
|
||||
from watcher.decision_engine.model.resource import ResourceType
|
||||
from watcher.decision_engine.model.vm_state import VMState
|
||||
from watcher.decision_engine.strategy.strategies.base import BaseStrategy
|
||||
from watcher.metrics_engine.cluster_history.ceilometer import \
|
||||
CeilometerClusterHistory
|
||||
from watcher.decision_engine.model import resource
|
||||
from watcher.decision_engine.model import vm_state
|
||||
from watcher.decision_engine.strategy.strategies import base
|
||||
from watcher.metrics_engine.cluster_history import ceilometer as ceil
|
||||
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
||||
class OutletTempControl(BaseStrategy):
|
||||
class OutletTempControl(base.BaseStrategy):
|
||||
|
||||
DEFAULT_NAME = "outlet_temp_control"
|
||||
DEFAULT_DESCRIPTION = "outlet temperature based migration strategy"
|
||||
@ -81,7 +81,7 @@ class OutletTempControl(BaseStrategy):
|
||||
@property
|
||||
def ceilometer(self):
|
||||
if self._ceilometer is None:
|
||||
self._ceilometer = CeilometerClusterHistory(osc=self.osc)
|
||||
self._ceilometer = ceil.CeilometerClusterHistory(osc=self.osc)
|
||||
return self._ceilometer
|
||||
|
||||
@ceilometer.setter
|
||||
@ -104,7 +104,7 @@ class OutletTempControl(BaseStrategy):
|
||||
return vcpus_used, memory_mb_used, disk_gb_used
|
||||
|
||||
def group_hosts_by_outlet_temp(self, model):
|
||||
'''Group hosts based on outlet temp meters'''
|
||||
"""Group hosts based on outlet temp meters"""
|
||||
|
||||
hypervisors = model.get_all_hypervisors()
|
||||
size_cluster = len(hypervisors)
|
||||
@ -137,7 +137,7 @@ class OutletTempControl(BaseStrategy):
|
||||
return hosts_need_release, hosts_target
|
||||
|
||||
def choose_vm_to_migrate(self, model, hosts):
|
||||
'''pick up an active vm instance to migrate from provided hosts'''
|
||||
"""pick up an active vm instance to migrate from provided hosts"""
|
||||
|
||||
for hvmap in hosts:
|
||||
mig_src_hypervisor = hvmap['hv']
|
||||
@ -147,7 +147,7 @@ class OutletTempControl(BaseStrategy):
|
||||
try:
|
||||
# select the first active VM to migrate
|
||||
vm = model.get_vm_from_id(vm_id)
|
||||
if vm.state != VMState.ACTIVE.value:
|
||||
if vm.state != vm_state.VMState.ACTIVE.value:
|
||||
LOG.info(_LE("VM not active, skipped: %s"),
|
||||
vm.uuid)
|
||||
continue
|
||||
@ -159,11 +159,11 @@ class OutletTempControl(BaseStrategy):
|
||||
return None
|
||||
|
||||
def filter_dest_servers(self, model, hosts, vm_to_migrate):
|
||||
'''Only return hosts with sufficient available resources'''
|
||||
"""Only return hosts with sufficient available resources"""
|
||||
|
||||
cap_cores = model.get_resource_from_id(ResourceType.cpu_cores)
|
||||
cap_disk = model.get_resource_from_id(ResourceType.disk)
|
||||
cap_mem = model.get_resource_from_id(ResourceType.memory)
|
||||
cap_cores = model.get_resource_from_id(resource.ResourceType.cpu_cores)
|
||||
cap_disk = model.get_resource_from_id(resource.ResourceType.disk)
|
||||
cap_mem = model.get_resource_from_id(resource.ResourceType.memory)
|
||||
|
||||
required_cores = cap_cores.get_capacity(vm_to_migrate)
|
||||
required_disk = cap_disk.get_capacity(vm_to_migrate)
|
||||
@ -239,10 +239,10 @@ class OutletTempControl(BaseStrategy):
|
||||
mig_src_hypervisor,
|
||||
mig_dst_hypervisor):
|
||||
parameters = {'migration_type': 'live',
|
||||
'src_hypervisor': mig_src_hypervisor,
|
||||
'dst_hypervisor': mig_dst_hypervisor}
|
||||
'src_hypervisor': mig_src_hypervisor.uuid,
|
||||
'dst_hypervisor': mig_dst_hypervisor.uuid}
|
||||
self.solution.add_action(action_type=self.MIGRATION,
|
||||
applies_to=vm_src,
|
||||
resource_id=vm_src.uuid,
|
||||
input_parameters=parameters)
|
||||
|
||||
self.solution.model = current_model
|
||||
|
@ -8,7 +8,7 @@ msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: python-watcher 0.21.1.dev32\n"
|
||||
"Report-Msgid-Bugs-To: EMAIL@ADDRESS\n"
|
||||
"POT-Creation-Date: 2016-01-26 11:26+0100\n"
|
||||
"POT-Creation-Date: 2016-02-09 09:07+0100\n"
|
||||
"PO-Revision-Date: 2015-12-11 15:42+0100\n"
|
||||
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
|
||||
"Language: fr\n"
|
||||
@ -24,7 +24,7 @@ msgstr ""
|
||||
msgid "Invalid state: %(state)s"
|
||||
msgstr "État invalide : %(state)s"
|
||||
|
||||
#: watcher/api/controllers/v1/action_plan.py:420
|
||||
#: watcher/api/controllers/v1/action_plan.py:422
|
||||
#, python-format
|
||||
msgid "State transition not allowed: (%(initial_state)s -> %(new_state)s)"
|
||||
msgstr "Transition d'état non autorisée : (%(initial_state)s -> %(new_state)s)"
|
||||
@ -85,21 +85,30 @@ msgstr ""
|
||||
msgid "Error parsing HTTP response: %s"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/applier/actions/change_nova_service_state.py:58
|
||||
#: watcher/applier/actions/change_nova_service_state.py:69
|
||||
msgid "The target state is not defined"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/applier/workflow_engine/default.py:126
|
||||
#: watcher/applier/actions/migration.py:43
|
||||
msgid "The parameter resource_id is invalid."
|
||||
msgstr "Le paramètre resource_id est invalide"
|
||||
|
||||
#: watcher/applier/actions/migration.py:86
|
||||
#, python-format
|
||||
msgid "Migration of type %(migration_type)s is not supported."
|
||||
msgstr ""
|
||||
|
||||
#: watcher/applier/workflow_engine/default.py:128
|
||||
#, python-format
|
||||
msgid "The WorkFlow Engine has failed to execute the action %s"
|
||||
msgstr "Le moteur de workflow a echoué lors de l'éxécution de l'action %s"
|
||||
|
||||
#: watcher/applier/workflow_engine/default.py:144
|
||||
#: watcher/applier/workflow_engine/default.py:146
|
||||
#, python-format
|
||||
msgid "Revert action %s"
|
||||
msgstr "Annulation de l'action %s"
|
||||
|
||||
#: watcher/applier/workflow_engine/default.py:150
|
||||
#: watcher/applier/workflow_engine/default.py:152
|
||||
msgid "Oops! We need disaster recover plan"
|
||||
msgstr "Oops! Nous avons besoin d'un plan de reprise d'activité"
|
||||
|
||||
@ -119,184 +128,210 @@ msgstr "Sert sur 0.0.0.0:%(port)s, accessible à http://127.0.0.1:%(port)s"
|
||||
msgid "serving on http://%(host)s:%(port)s"
|
||||
msgstr "Sert sur http://%(host)s:%(port)s"
|
||||
|
||||
#: watcher/common/exception.py:51
|
||||
#: watcher/common/clients.py:29
|
||||
msgid "Version of Nova API to use in novaclient."
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/clients.py:34
|
||||
msgid "Version of Glance API to use in glanceclient."
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/clients.py:39
|
||||
msgid "Version of Cinder API to use in cinderclient."
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/clients.py:44
|
||||
msgid "Version of Ceilometer API to use in ceilometerclient."
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/clients.py:50
|
||||
msgid "Version of Neutron API to use in neutronclient."
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:59
|
||||
#, python-format
|
||||
msgid "Unexpected keystone client error occurred: %s"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:72
|
||||
msgid "An unknown exception occurred"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:71
|
||||
#: watcher/common/exception.py:92
|
||||
msgid "Exception in string format operation"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:101
|
||||
#: watcher/common/exception.py:122
|
||||
msgid "Not authorized"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:106
|
||||
#: watcher/common/exception.py:127
|
||||
msgid "Operation not permitted"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:110
|
||||
#: watcher/common/exception.py:131
|
||||
msgid "Unacceptable parameters"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:115
|
||||
#: watcher/common/exception.py:136
|
||||
#, python-format
|
||||
msgid "The %(name)s %(id)s could not be found"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:119
|
||||
#: watcher/common/exception.py:140
|
||||
#, fuzzy
|
||||
msgid "Conflict"
|
||||
msgstr "Conflit"
|
||||
|
||||
#: watcher/common/exception.py:124
|
||||
#: watcher/common/exception.py:145
|
||||
#, python-format
|
||||
msgid "The %(name)s resource %(id)s could not be found"
|
||||
msgstr "La ressource %(name)s / %(id)s est introuvable"
|
||||
|
||||
#: watcher/common/exception.py:129
|
||||
#: watcher/common/exception.py:150
|
||||
#, python-format
|
||||
msgid "Expected an uuid or int but received %(identity)s"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:133
|
||||
#: watcher/common/exception.py:154
|
||||
#, python-format
|
||||
msgid "Goal %(goal)s is not defined in Watcher configuration file"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:139
|
||||
#, python-format
|
||||
msgid "%(err)s"
|
||||
msgstr "%(err)s"
|
||||
|
||||
#: watcher/common/exception.py:143
|
||||
#: watcher/common/exception.py:158
|
||||
#, python-format
|
||||
msgid "Expected a uuid but received %(uuid)s"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:147
|
||||
#: watcher/common/exception.py:162
|
||||
#, python-format
|
||||
msgid "Expected a logical name but received %(name)s"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:151
|
||||
#: watcher/common/exception.py:166
|
||||
#, python-format
|
||||
msgid "Expected a logical name or uuid but received %(name)s"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:155
|
||||
#: watcher/common/exception.py:170
|
||||
#, python-format
|
||||
msgid "AuditTemplate %(audit_template)s could not be found"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:159
|
||||
#: watcher/common/exception.py:174
|
||||
#, python-format
|
||||
msgid "An audit_template with UUID %(uuid)s or name %(name)s already exists"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:164
|
||||
#: watcher/common/exception.py:179
|
||||
#, python-format
|
||||
msgid "AuditTemplate %(audit_template)s is referenced by one or multiple audit"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:169
|
||||
#: watcher/common/exception.py:184
|
||||
#, python-format
|
||||
msgid "Audit %(audit)s could not be found"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:173
|
||||
#: watcher/common/exception.py:188
|
||||
#, python-format
|
||||
msgid "An audit with UUID %(uuid)s already exists"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:177
|
||||
#: watcher/common/exception.py:192
|
||||
#, python-format
|
||||
msgid "Audit %(audit)s is referenced by one or multiple action plans"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:182
|
||||
#: watcher/common/exception.py:197
|
||||
#, python-format
|
||||
msgid "ActionPlan %(action_plan)s could not be found"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:186
|
||||
#: watcher/common/exception.py:201
|
||||
#, python-format
|
||||
msgid "An action plan with UUID %(uuid)s already exists"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:190
|
||||
#: watcher/common/exception.py:205
|
||||
#, python-format
|
||||
msgid "Action Plan %(action_plan)s is referenced by one or multiple actions"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:195
|
||||
#: watcher/common/exception.py:210
|
||||
#, python-format
|
||||
msgid "Action %(action)s could not be found"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:199
|
||||
#: watcher/common/exception.py:214
|
||||
#, python-format
|
||||
msgid "An action with UUID %(uuid)s already exists"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:203
|
||||
#: watcher/common/exception.py:218
|
||||
#, python-format
|
||||
msgid "Action plan %(action_plan)s is referenced by one or multiple goals"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:208
|
||||
#: watcher/common/exception.py:223
|
||||
msgid "Filtering actions on both audit and action-plan is prohibited"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:217
|
||||
#: watcher/common/exception.py:232
|
||||
#, python-format
|
||||
msgid "Couldn't apply patch '%(patch)s'. Reason: %(reason)s"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:224
|
||||
#: watcher/common/exception.py:239
|
||||
msgid "Illegal argument"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:228
|
||||
#: watcher/common/exception.py:243
|
||||
msgid "No such metric"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:232
|
||||
#: watcher/common/exception.py:247
|
||||
msgid "No rows were returned"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:236
|
||||
#: watcher/common/exception.py:251
|
||||
#, python-format
|
||||
msgid "%(client)s connection failed. Reason: %(reason)s"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:255
|
||||
msgid "'Keystone API endpoint is missing''"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:240
|
||||
#: watcher/common/exception.py:259
|
||||
msgid "The list of hypervisor(s) in the cluster is empty"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:244
|
||||
#: watcher/common/exception.py:263
|
||||
msgid "The metrics resource collector is not defined"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:248
|
||||
#: watcher/common/exception.py:267
|
||||
msgid "the cluster state is not defined"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:254
|
||||
#: watcher/common/exception.py:273
|
||||
#, python-format
|
||||
msgid "The instance '%(name)s' is not found"
|
||||
msgstr "L'instance '%(name)s' n'a pas été trouvée"
|
||||
|
||||
#: watcher/common/exception.py:258
|
||||
#: watcher/common/exception.py:277
|
||||
msgid "The hypervisor is not found"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:262
|
||||
#: watcher/common/exception.py:281
|
||||
#, fuzzy, python-format
|
||||
msgid "Error loading plugin '%(name)s'"
|
||||
msgstr "Erreur lors du chargement du module '%(name)s'"
|
||||
|
||||
#: watcher/common/keystone.py:59
|
||||
msgid "No Keystone service catalog loaded"
|
||||
#: watcher/common/exception.py:285
|
||||
#, fuzzy, python-format
|
||||
msgid "The identifier '%(name)s' is a reserved word"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/service.py:83
|
||||
@ -347,18 +382,22 @@ msgid ""
|
||||
"template uuid instead"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/db/sqlalchemy/api.py:277
|
||||
msgid "Cannot overwrite UUID for an existing AuditTemplate."
|
||||
#: watcher/db/sqlalchemy/api.py:278
|
||||
msgid "Cannot overwrite UUID for an existing Audit Template."
|
||||
msgstr ""
|
||||
|
||||
#: watcher/db/sqlalchemy/api.py:386 watcher/db/sqlalchemy/api.py:586
|
||||
#: watcher/db/sqlalchemy/api.py:388
|
||||
msgid "Cannot overwrite UUID for an existing Audit."
|
||||
msgstr ""
|
||||
|
||||
#: watcher/db/sqlalchemy/api.py:477
|
||||
#: watcher/db/sqlalchemy/api.py:480
|
||||
msgid "Cannot overwrite UUID for an existing Action."
|
||||
msgstr ""
|
||||
|
||||
#: watcher/db/sqlalchemy/api.py:590
|
||||
msgid "Cannot overwrite UUID for an existing Action Plan."
|
||||
msgstr ""
|
||||
|
||||
#: watcher/db/sqlalchemy/migration.py:73
|
||||
msgid ""
|
||||
"Watcher database schema is already under version control; use upgrade() "
|
||||
@ -370,44 +409,44 @@ msgstr ""
|
||||
msgid "'obj' argument type is not valid"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/decision_engine/planner/default.py:76
|
||||
#: watcher/decision_engine/planner/default.py:72
|
||||
msgid "The action plan is empty"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/decision_engine/strategy/selection/default.py:59
|
||||
#: watcher/decision_engine/strategy/selection/default.py:60
|
||||
#, python-format
|
||||
msgid "Incorrect mapping: could not find associated strategy for '%s'"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/decision_engine/strategy/strategies/basic_consolidation.py:267
|
||||
#: watcher/decision_engine/strategy/strategies/basic_consolidation.py:314
|
||||
#: watcher/decision_engine/strategy/strategies/basic_consolidation.py:269
|
||||
#: watcher/decision_engine/strategy/strategies/basic_consolidation.py:316
|
||||
#, python-format
|
||||
msgid "No values returned by %(resource_id)s for %(metric_name)s"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/decision_engine/strategy/strategies/basic_consolidation.py:424
|
||||
#: watcher/decision_engine/strategy/strategies/basic_consolidation.py:426
|
||||
msgid "Initializing Sercon Consolidation"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/decision_engine/strategy/strategies/basic_consolidation.py:468
|
||||
#: watcher/decision_engine/strategy/strategies/basic_consolidation.py:470
|
||||
msgid "The workloads of the compute nodes of the cluster is zero"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/decision_engine/strategy/strategies/outlet_temp_control.py:125
|
||||
#: watcher/decision_engine/strategy/strategies/outlet_temp_control.py:127
|
||||
#, python-format
|
||||
msgid "%s: no outlet temp data"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/decision_engine/strategy/strategies/outlet_temp_control.py:149
|
||||
#: watcher/decision_engine/strategy/strategies/outlet_temp_control.py:151
|
||||
#, python-format
|
||||
msgid "VM not active, skipped: %s"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/decision_engine/strategy/strategies/outlet_temp_control.py:206
|
||||
#: watcher/decision_engine/strategy/strategies/outlet_temp_control.py:208
|
||||
msgid "No hosts under outlet temp threshold found"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/decision_engine/strategy/strategies/outlet_temp_control.py:229
|
||||
#: watcher/decision_engine/strategy/strategies/outlet_temp_control.py:231
|
||||
msgid "No proper target host could be found"
|
||||
msgstr ""
|
||||
|
||||
@ -582,9 +621,20 @@ msgstr ""
|
||||
#~ msgid "Description cannot be empty"
|
||||
#~ msgstr ""
|
||||
|
||||
#~ msgid ""
|
||||
#~ "Failed to remove trailing character. "
|
||||
#~ "Returning original object. Supplied object "
|
||||
#~ "is not a string: %s,"
|
||||
#~ msgid "The hypervisor state is invalid."
|
||||
#~ msgstr "L'état de l'hyperviseur est invalide"
|
||||
|
||||
#~ msgid "%(err)s"
|
||||
#~ msgstr "%(err)s"
|
||||
|
||||
#~ msgid "No Keystone service catalog loaded"
|
||||
#~ msgstr ""
|
||||
|
||||
#~ msgid "Cannot overwrite UUID for an existing AuditTemplate."
|
||||
#~ msgstr ""
|
||||
|
||||
#~ msgid ""
|
||||
#~ "This identifier is reserved word and "
|
||||
#~ "cannot be used as variables '%(name)s'"
|
||||
#~ msgstr ""
|
||||
|
||||
|
@ -7,9 +7,9 @@
|
||||
#, fuzzy
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: python-watcher 0.23.2.dev1\n"
|
||||
"Project-Id-Version: python-watcher 0.23.3.dev2\n"
|
||||
"Report-Msgid-Bugs-To: EMAIL@ADDRESS\n"
|
||||
"POT-Creation-Date: 2016-01-26 11:26+0100\n"
|
||||
"POT-Creation-Date: 2016-02-09 09:07+0100\n"
|
||||
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
|
||||
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
|
||||
"Language-Team: LANGUAGE <LL@li.org>\n"
|
||||
@ -23,7 +23,7 @@ msgstr ""
|
||||
msgid "Invalid state: %(state)s"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/api/controllers/v1/action_plan.py:420
|
||||
#: watcher/api/controllers/v1/action_plan.py:422
|
||||
#, python-format
|
||||
msgid "State transition not allowed: (%(initial_state)s -> %(new_state)s)"
|
||||
msgstr ""
|
||||
@ -84,25 +84,30 @@ msgstr ""
|
||||
msgid "Error parsing HTTP response: %s"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/applier/actions/change_nova_service_state.py:58
|
||||
#: watcher/applier/actions/change_nova_service_state.py:69
|
||||
msgid "The target state is not defined"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/applier/actions/migration.py:60
|
||||
#: watcher/applier/actions/migration.py:43
|
||||
msgid "The parameter resource_id is invalid."
|
||||
msgstr ""
|
||||
|
||||
#: watcher/applier/actions/migration.py:86
|
||||
#, python-format
|
||||
msgid "Migration of type %(migration_type)s is not supported."
|
||||
msgstr ""
|
||||
|
||||
#: watcher/applier/workflow_engine/default.py:126
|
||||
#: watcher/applier/workflow_engine/default.py:128
|
||||
#, python-format
|
||||
msgid "The WorkFlow Engine has failed to execute the action %s"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/applier/workflow_engine/default.py:144
|
||||
#: watcher/applier/workflow_engine/default.py:146
|
||||
#, python-format
|
||||
msgid "Revert action %s"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/applier/workflow_engine/default.py:150
|
||||
#: watcher/applier/workflow_engine/default.py:152
|
||||
msgid "Oops! We need disaster recover plan"
|
||||
msgstr ""
|
||||
|
||||
@ -122,178 +127,209 @@ msgstr ""
|
||||
msgid "serving on http://%(host)s:%(port)s"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:51
|
||||
#: watcher/common/clients.py:29
|
||||
msgid "Version of Nova API to use in novaclient."
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/clients.py:34
|
||||
msgid "Version of Glance API to use in glanceclient."
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/clients.py:39
|
||||
msgid "Version of Cinder API to use in cinderclient."
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/clients.py:44
|
||||
msgid "Version of Ceilometer API to use in ceilometerclient."
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/clients.py:50
|
||||
msgid "Version of Neutron API to use in neutronclient."
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:59
|
||||
#, python-format
|
||||
msgid "Unexpected keystone client error occurred: %s"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:72
|
||||
msgid "An unknown exception occurred"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:71
|
||||
#: watcher/common/exception.py:92
|
||||
msgid "Exception in string format operation"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:101
|
||||
#: watcher/common/exception.py:122
|
||||
msgid "Not authorized"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:106
|
||||
#: watcher/common/exception.py:127
|
||||
msgid "Operation not permitted"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:110
|
||||
#: watcher/common/exception.py:131
|
||||
msgid "Unacceptable parameters"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:115
|
||||
#: watcher/common/exception.py:136
|
||||
#, python-format
|
||||
msgid "The %(name)s %(id)s could not be found"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:119
|
||||
#: watcher/common/exception.py:140
|
||||
msgid "Conflict"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:124
|
||||
#: watcher/common/exception.py:145
|
||||
#, python-format
|
||||
msgid "The %(name)s resource %(id)s could not be found"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:129
|
||||
#: watcher/common/exception.py:150
|
||||
#, python-format
|
||||
msgid "Expected an uuid or int but received %(identity)s"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:133
|
||||
#: watcher/common/exception.py:154
|
||||
#, python-format
|
||||
msgid "Goal %(goal)s is not defined in Watcher configuration file"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:143
|
||||
#: watcher/common/exception.py:158
|
||||
#, python-format
|
||||
msgid "Expected a uuid but received %(uuid)s"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:147
|
||||
#: watcher/common/exception.py:162
|
||||
#, python-format
|
||||
msgid "Expected a logical name but received %(name)s"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:151
|
||||
#: watcher/common/exception.py:166
|
||||
#, python-format
|
||||
msgid "Expected a logical name or uuid but received %(name)s"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:155
|
||||
#: watcher/common/exception.py:170
|
||||
#, python-format
|
||||
msgid "AuditTemplate %(audit_template)s could not be found"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:159
|
||||
#: watcher/common/exception.py:174
|
||||
#, python-format
|
||||
msgid "An audit_template with UUID %(uuid)s or name %(name)s already exists"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:164
|
||||
#: watcher/common/exception.py:179
|
||||
#, python-format
|
||||
msgid "AuditTemplate %(audit_template)s is referenced by one or multiple audit"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:169
|
||||
#: watcher/common/exception.py:184
|
||||
#, python-format
|
||||
msgid "Audit %(audit)s could not be found"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:173
|
||||
#: watcher/common/exception.py:188
|
||||
#, python-format
|
||||
msgid "An audit with UUID %(uuid)s already exists"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:177
|
||||
#: watcher/common/exception.py:192
|
||||
#, python-format
|
||||
msgid "Audit %(audit)s is referenced by one or multiple action plans"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:182
|
||||
#: watcher/common/exception.py:197
|
||||
#, python-format
|
||||
msgid "ActionPlan %(action_plan)s could not be found"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:186
|
||||
#: watcher/common/exception.py:201
|
||||
#, python-format
|
||||
msgid "An action plan with UUID %(uuid)s already exists"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:190
|
||||
#: watcher/common/exception.py:205
|
||||
#, python-format
|
||||
msgid "Action Plan %(action_plan)s is referenced by one or multiple actions"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:195
|
||||
#: watcher/common/exception.py:210
|
||||
#, python-format
|
||||
msgid "Action %(action)s could not be found"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:199
|
||||
#: watcher/common/exception.py:214
|
||||
#, python-format
|
||||
msgid "An action with UUID %(uuid)s already exists"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:203
|
||||
#: watcher/common/exception.py:218
|
||||
#, python-format
|
||||
msgid "Action plan %(action_plan)s is referenced by one or multiple goals"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:208
|
||||
#: watcher/common/exception.py:223
|
||||
msgid "Filtering actions on both audit and action-plan is prohibited"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:217
|
||||
#: watcher/common/exception.py:232
|
||||
#, python-format
|
||||
msgid "Couldn't apply patch '%(patch)s'. Reason: %(reason)s"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:224
|
||||
#: watcher/common/exception.py:239
|
||||
msgid "Illegal argument"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:228
|
||||
#: watcher/common/exception.py:243
|
||||
msgid "No such metric"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:232
|
||||
#: watcher/common/exception.py:247
|
||||
msgid "No rows were returned"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:236
|
||||
#: watcher/common/exception.py:251
|
||||
#, python-format
|
||||
msgid "%(client)s connection failed. Reason: %(reason)s"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:255
|
||||
msgid "'Keystone API endpoint is missing''"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:240
|
||||
#: watcher/common/exception.py:259
|
||||
msgid "The list of hypervisor(s) in the cluster is empty"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:244
|
||||
#: watcher/common/exception.py:263
|
||||
msgid "The metrics resource collector is not defined"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:248
|
||||
#: watcher/common/exception.py:267
|
||||
msgid "the cluster state is not defined"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:254
|
||||
#: watcher/common/exception.py:273
|
||||
#, python-format
|
||||
msgid "The instance '%(name)s' is not found"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:258
|
||||
#: watcher/common/exception.py:277
|
||||
msgid "The hypervisor is not found"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/exception.py:262
|
||||
#: watcher/common/exception.py:281
|
||||
#, python-format
|
||||
msgid "Error loading plugin '%(name)s'"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/keystone.py:59
|
||||
msgid "No Keystone service catalog loaded"
|
||||
#: watcher/common/exception.py:285
|
||||
#, python-format
|
||||
msgid "The identifier '%(name)s' is a reserved word"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/common/service.py:83
|
||||
@ -344,18 +380,22 @@ msgid ""
|
||||
"template uuid instead"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/db/sqlalchemy/api.py:277
|
||||
msgid "Cannot overwrite UUID for an existing AuditTemplate."
|
||||
#: watcher/db/sqlalchemy/api.py:278
|
||||
msgid "Cannot overwrite UUID for an existing Audit Template."
|
||||
msgstr ""
|
||||
|
||||
#: watcher/db/sqlalchemy/api.py:386 watcher/db/sqlalchemy/api.py:586
|
||||
#: watcher/db/sqlalchemy/api.py:388
|
||||
msgid "Cannot overwrite UUID for an existing Audit."
|
||||
msgstr ""
|
||||
|
||||
#: watcher/db/sqlalchemy/api.py:477
|
||||
#: watcher/db/sqlalchemy/api.py:480
|
||||
msgid "Cannot overwrite UUID for an existing Action."
|
||||
msgstr ""
|
||||
|
||||
#: watcher/db/sqlalchemy/api.py:590
|
||||
msgid "Cannot overwrite UUID for an existing Action Plan."
|
||||
msgstr ""
|
||||
|
||||
#: watcher/db/sqlalchemy/migration.py:73
|
||||
msgid ""
|
||||
"Watcher database schema is already under version control; use upgrade() "
|
||||
@ -367,44 +407,44 @@ msgstr ""
|
||||
msgid "'obj' argument type is not valid"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/decision_engine/planner/default.py:76
|
||||
#: watcher/decision_engine/planner/default.py:72
|
||||
msgid "The action plan is empty"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/decision_engine/strategy/selection/default.py:59
|
||||
#: watcher/decision_engine/strategy/selection/default.py:60
|
||||
#, python-format
|
||||
msgid "Incorrect mapping: could not find associated strategy for '%s'"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/decision_engine/strategy/strategies/basic_consolidation.py:267
|
||||
#: watcher/decision_engine/strategy/strategies/basic_consolidation.py:314
|
||||
#: watcher/decision_engine/strategy/strategies/basic_consolidation.py:269
|
||||
#: watcher/decision_engine/strategy/strategies/basic_consolidation.py:316
|
||||
#, python-format
|
||||
msgid "No values returned by %(resource_id)s for %(metric_name)s"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/decision_engine/strategy/strategies/basic_consolidation.py:424
|
||||
#: watcher/decision_engine/strategy/strategies/basic_consolidation.py:426
|
||||
msgid "Initializing Sercon Consolidation"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/decision_engine/strategy/strategies/basic_consolidation.py:468
|
||||
#: watcher/decision_engine/strategy/strategies/basic_consolidation.py:470
|
||||
msgid "The workloads of the compute nodes of the cluster is zero"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/decision_engine/strategy/strategies/outlet_temp_control.py:125
|
||||
#: watcher/decision_engine/strategy/strategies/outlet_temp_control.py:127
|
||||
#, python-format
|
||||
msgid "%s: no outlet temp data"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/decision_engine/strategy/strategies/outlet_temp_control.py:149
|
||||
#: watcher/decision_engine/strategy/strategies/outlet_temp_control.py:151
|
||||
#, python-format
|
||||
msgid "VM not active, skipped: %s"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/decision_engine/strategy/strategies/outlet_temp_control.py:206
|
||||
#: watcher/decision_engine/strategy/strategies/outlet_temp_control.py:208
|
||||
msgid "No hosts under outlet temp threshold found"
|
||||
msgstr ""
|
||||
|
||||
#: watcher/decision_engine/strategy/strategies/outlet_temp_control.py:229
|
||||
#: watcher/decision_engine/strategy/strategies/outlet_temp_control.py:231
|
||||
msgid "No proper target host could be found"
|
||||
msgstr ""
|
||||
|
||||
|
@ -17,32 +17,30 @@
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log
|
||||
|
||||
from watcher.decision_engine.model.hypervisor import Hypervisor
|
||||
from watcher.decision_engine.model.model_root import ModelRoot
|
||||
from watcher.decision_engine.model.resource import Resource
|
||||
from watcher.decision_engine.model.resource import ResourceType
|
||||
from watcher.decision_engine.model.vm import VM
|
||||
from watcher.metrics_engine.cluster_model_collector.api import \
|
||||
BaseClusterModelCollector
|
||||
from watcher.decision_engine.model import hypervisor as obj_hypervisor
|
||||
from watcher.decision_engine.model import model_root
|
||||
from watcher.decision_engine.model import resource
|
||||
from watcher.decision_engine.model import vm as obj_vm
|
||||
from watcher.metrics_engine.cluster_model_collector import api
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
||||
class NovaClusterModelCollector(BaseClusterModelCollector):
|
||||
class NovaClusterModelCollector(api.BaseClusterModelCollector):
|
||||
def __init__(self, wrapper):
|
||||
super(NovaClusterModelCollector, self).__init__()
|
||||
self.wrapper = wrapper
|
||||
|
||||
def get_latest_cluster_data_model(self):
|
||||
LOG.debug("Getting latest cluster data model")
|
||||
|
||||
cluster = ModelRoot()
|
||||
mem = Resource(ResourceType.memory)
|
||||
num_cores = Resource(ResourceType.cpu_cores)
|
||||
disk = Resource(ResourceType.disk)
|
||||
cluster = model_root.ModelRoot()
|
||||
mem = resource.Resource(resource.ResourceType.memory)
|
||||
num_cores = resource.Resource(resource.ResourceType.cpu_cores)
|
||||
disk = resource.Resource(resource.ResourceType.disk)
|
||||
cluster.create_resource(mem)
|
||||
cluster.create_resource(num_cores)
|
||||
cluster.create_resource(disk)
|
||||
@ -52,7 +50,7 @@ class NovaClusterModelCollector(BaseClusterModelCollector):
|
||||
for h in hypervisors:
|
||||
service = self.wrapper.nova.services.find(id=h.service['id'])
|
||||
# create hypervisor in cluster_model_collector
|
||||
hypervisor = Hypervisor()
|
||||
hypervisor = obj_hypervisor.Hypervisor()
|
||||
hypervisor.uuid = service.host
|
||||
hypervisor.hostname = h.hypervisor_hostname
|
||||
# set capacity
|
||||
@ -65,7 +63,7 @@ class NovaClusterModelCollector(BaseClusterModelCollector):
|
||||
vms = self.wrapper.get_vms_by_hypervisor(str(service.host))
|
||||
for v in vms:
|
||||
# create VM in cluster_model_collector
|
||||
vm = VM()
|
||||
vm = obj_vm.VM()
|
||||
vm.uuid = v.id
|
||||
# nova/nova/compute/vm_states.py
|
||||
vm.state = getattr(v, 'OS-EXT-STS:vm_state')
|
||||
|
@ -42,7 +42,6 @@ class Action(base.WatcherObject):
|
||||
'uuid': obj_utils.str_or_none,
|
||||
'action_plan_id': obj_utils.int_or_none,
|
||||
'action_type': obj_utils.str_or_none,
|
||||
'applies_to': obj_utils.str_or_none,
|
||||
'input_parameters': obj_utils.dict_or_none,
|
||||
'state': obj_utils.str_or_none,
|
||||
# todo(jed) remove parameter alarm
|
||||
|
@ -0,0 +1,54 @@
|
||||
# -*- encoding: utf-8 -*-
|
||||
# Copyright (c) 2016 b<>com
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
import voluptuous
|
||||
|
||||
from watcher.applier.actions import base as baction
|
||||
from watcher.applier.actions import change_nova_service_state
|
||||
from watcher.decision_engine.model import hypervisor_state as hstate
|
||||
from watcher.tests import base
|
||||
|
||||
|
||||
class TestChangeNovaServiceState(base.TestCase):
|
||||
def setUp(self):
|
||||
super(TestChangeNovaServiceState, self).setUp()
|
||||
self.a = change_nova_service_state.ChangeNovaServiceState()
|
||||
|
||||
def test_parameters_down(self):
|
||||
self.a.input_parameters = {
|
||||
baction.BaseAction.RESOURCE_ID: "compute-1",
|
||||
self.a.STATE: hstate.HypervisorState.OFFLINE.value}
|
||||
self.assertEqual(True, self.a.validate_parameters())
|
||||
|
||||
def test_parameters_up(self):
|
||||
self.a.input_parameters = {
|
||||
baction.BaseAction.RESOURCE_ID: "compute-1",
|
||||
self.a.STATE: hstate.HypervisorState.ONLINE.value}
|
||||
self.assertEqual(True, self.a.validate_parameters())
|
||||
|
||||
def test_parameters_exception_wrong_state(self):
|
||||
self.a.input_parameters = {
|
||||
baction.BaseAction.RESOURCE_ID: "compute-1",
|
||||
self.a.STATE: 'error'}
|
||||
self.assertRaises(voluptuous.Invalid, self.a.validate_parameters)
|
||||
|
||||
def test_parameters_resource_id_empty(self):
|
||||
self.a.input_parameters = {
|
||||
self.a.STATE: None}
|
||||
self.assertRaises(voluptuous.Invalid, self.a.validate_parameters)
|
||||
|
||||
def test_parameters_applies_add_extra(self):
|
||||
self.a.input_parameters = {"extra": "failed"}
|
||||
self.assertRaises(voluptuous.Invalid, self.a.validate_parameters)
|
78
watcher/tests/applier/actions/test_migration.py
Normal file
78
watcher/tests/applier/actions/test_migration.py
Normal file
@ -0,0 +1,78 @@
|
||||
# -*- encoding: utf-8 -*-
|
||||
# Copyright (c) 2016 b<>com
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
import voluptuous
|
||||
|
||||
from watcher.applier.actions import base as baction
|
||||
from watcher.applier.actions import migration
|
||||
from watcher.tests import base
|
||||
|
||||
|
||||
class TestMigration(base.TestCase):
|
||||
def setUp(self):
|
||||
super(TestMigration, self).setUp()
|
||||
self.mig = migration.Migrate()
|
||||
|
||||
def test_parameters(self):
|
||||
params = {baction.BaseAction.RESOURCE_ID:
|
||||
"45a37aeb-95ab-4ddb-a305-7d9f62c2f5ba",
|
||||
self.mig.MIGRATION_TYPE: 'live',
|
||||
self.mig.DST_HYPERVISOR: 'compute-2',
|
||||
self.mig.SRC_HYPERVISOR: 'compute3'}
|
||||
self.mig.input_parameters = params
|
||||
self.assertEqual(True, self.mig.validate_parameters())
|
||||
|
||||
def test_parameters_exception_resource_id(self):
|
||||
parameters = {baction.BaseAction.RESOURCE_ID: "EFEF",
|
||||
'migration_type': 'live',
|
||||
'src_hypervisor': 'compute-2',
|
||||
'dst_hypervisor': 'compute3'}
|
||||
self.mig.input_parameters = parameters
|
||||
self.assertRaises(voluptuous.Invalid, self.mig.validate_parameters)
|
||||
|
||||
def test_parameters_exception_migration_type(self):
|
||||
parameters = {baction.BaseAction.RESOURCE_ID:
|
||||
"45a37aeb-95ab-4ddb-a305-7d9f62c2f5ba",
|
||||
'migration_type': 'cold',
|
||||
'src_hypervisor': 'compute-2',
|
||||
'dst_hypervisor': 'compute3'}
|
||||
self.mig.input_parameters = parameters
|
||||
self.assertRaises(voluptuous.Invalid, self.mig.validate_parameters)
|
||||
|
||||
def test_parameters_exception_src_hypervisor(self):
|
||||
parameters = {baction.BaseAction.RESOURCE_ID:
|
||||
"45a37aeb-95ab-4ddb-a305-7d9f62c2f5ba",
|
||||
'migration_type': 'cold',
|
||||
'src_hypervisor': None,
|
||||
'dst_hypervisor': 'compute3'}
|
||||
self.mig.input_parameters = parameters
|
||||
self.assertRaises(voluptuous.Invalid, self.mig.validate_parameters)
|
||||
|
||||
def test_parameters_exception_dst_hypervisor(self):
|
||||
parameters = {baction.BaseAction.RESOURCE_ID:
|
||||
"45a37aeb-95ab-4ddb-a305-7d9f62c2f5ba",
|
||||
'migration_type': 'cold',
|
||||
'src_hypervisor': 'compute-1',
|
||||
'dst_hypervisor': None}
|
||||
self.mig.input_parameters = parameters
|
||||
self.assertRaises(voluptuous.Invalid, self.mig.validate_parameters)
|
||||
|
||||
def test_parameters_exception_empty_fields(self):
|
||||
parameters = {baction.BaseAction.RESOURCE_ID: None,
|
||||
'migration_type': None,
|
||||
'src_hypervisor': None,
|
||||
'dst_hypervisor': None}
|
||||
self.mig.input_parameters = parameters
|
||||
self.assertRaises(voluptuous.Invalid, self.mig.validate_parameters)
|
42
watcher/tests/applier/actions/test_sleep.py
Normal file
42
watcher/tests/applier/actions/test_sleep.py
Normal file
@ -0,0 +1,42 @@
|
||||
|
||||
# -*- encoding: utf-8 -*-
|
||||
# Copyright (c) 2016 b<>com
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
import voluptuous
|
||||
|
||||
from watcher.applier.actions import sleep
|
||||
from watcher.tests import base
|
||||
|
||||
|
||||
class TestSleep(base.TestCase):
|
||||
def setUp(self):
|
||||
super(TestSleep, self).setUp()
|
||||
self.s = sleep.Sleep()
|
||||
|
||||
def test_parameters_duration(self):
|
||||
self.s.input_parameters = {self.s.DURATION: 1.0}
|
||||
self.assertEqual(True, self.s.validate_parameters())
|
||||
|
||||
def test_parameters_duration_empty(self):
|
||||
self.s.input_parameters = {self.s.DURATION: None}
|
||||
self.assertRaises(voluptuous.Invalid, self.s.validate_parameters)
|
||||
|
||||
def test_parameters_wrong_parameter(self):
|
||||
self.s.input_parameters = {self.s.DURATION: "ef"}
|
||||
self.assertRaises(voluptuous.Invalid, self.s.validate_parameters)
|
||||
|
||||
def test_parameters_add_field(self):
|
||||
self.s.input_parameters = {self.s.DURATION: 1.0, "not_required": "nop"}
|
||||
self.assertRaises(voluptuous.Invalid, self.s.validate_parameters)
|
@ -32,6 +32,12 @@ from watcher.tests.db import base
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class FakeAction(abase.BaseAction):
|
||||
def schema(self):
|
||||
pass
|
||||
|
||||
def postcondition(self):
|
||||
pass
|
||||
|
||||
def precondition(self):
|
||||
pass
|
||||
|
||||
@ -62,12 +68,11 @@ class TestDefaultWorkFlowEngine(base.DbTestCase):
|
||||
result = self.engine.execute(actions)
|
||||
self.assertEqual(result, True)
|
||||
|
||||
def create_action(self, action_type, applies_to, parameters, next):
|
||||
def create_action(self, action_type, parameters, next):
|
||||
action = {
|
||||
'uuid': utils.generate_uuid(),
|
||||
'action_plan_id': 0,
|
||||
'action_type': action_type,
|
||||
'applies_to': applies_to,
|
||||
'input_parameters': parameters,
|
||||
'state': objects.action.State.PENDING,
|
||||
'alarm': None,
|
||||
@ -92,15 +97,15 @@ class TestDefaultWorkFlowEngine(base.DbTestCase):
|
||||
self.assertEqual(result, True)
|
||||
|
||||
def test_execute_with_one_action(self):
|
||||
actions = [self.create_action("nop", "", {'message': 'test'}, None)]
|
||||
actions = [self.create_action("nop", {'message': 'test'}, None)]
|
||||
result = self.engine.execute(actions)
|
||||
self.assertEqual(result, True)
|
||||
self.check_actions_state(actions, objects.action.State.SUCCEEDED)
|
||||
|
||||
def test_execute_with_two_actions(self):
|
||||
actions = []
|
||||
next = self.create_action("sleep", "", {'duration': '0'}, None)
|
||||
first = self.create_action("nop", "", {'message': 'test'}, next.id)
|
||||
next = self.create_action("sleep", {'duration': 0.0}, None)
|
||||
first = self.create_action("nop", {'message': 'test'}, next.id)
|
||||
|
||||
actions.append(first)
|
||||
actions.append(next)
|
||||
@ -111,9 +116,9 @@ class TestDefaultWorkFlowEngine(base.DbTestCase):
|
||||
|
||||
def test_execute_with_three_actions(self):
|
||||
actions = []
|
||||
next2 = self.create_action("nop", "vm1", {'message': 'next'}, None)
|
||||
next = self.create_action("sleep", "vm1", {'duration': '0'}, next2.id)
|
||||
first = self.create_action("nop", "vm1", {'message': 'hello'}, next.id)
|
||||
next2 = self.create_action("nop", {'message': 'next'}, None)
|
||||
next = self.create_action("sleep", {'duration': 0.0}, next2.id)
|
||||
first = self.create_action("nop", {'message': 'hello'}, next.id)
|
||||
self.check_action_state(first, objects.action.State.PENDING)
|
||||
self.check_action_state(next, objects.action.State.PENDING)
|
||||
self.check_action_state(next2, objects.action.State.PENDING)
|
||||
@ -128,12 +133,9 @@ class TestDefaultWorkFlowEngine(base.DbTestCase):
|
||||
|
||||
def test_execute_with_exception(self):
|
||||
actions = []
|
||||
next2 = self.create_action("no_exist",
|
||||
"vm1", {'message': 'next'}, None)
|
||||
next = self.create_action("sleep", "vm1",
|
||||
{'duration': '0'}, next2.id)
|
||||
first = self.create_action("nop", "vm1",
|
||||
{'message': 'hello'}, next.id)
|
||||
next2 = self.create_action("no_exist", {'message': 'next'}, None)
|
||||
next = self.create_action("sleep", {'duration': 0.0}, next2.id)
|
||||
first = self.create_action("nop", {'message': 'hello'}, next.id)
|
||||
|
||||
self.check_action_state(first, objects.action.State.PENDING)
|
||||
self.check_action_state(next, objects.action.State.PENDING)
|
||||
@ -158,7 +160,7 @@ class TestDefaultWorkFlowEngine(base.DbTestCase):
|
||||
plugin=FakeAction,
|
||||
obj=None),
|
||||
namespace=FakeAction.namespace())
|
||||
actions = [self.create_action("dontcare", "vm1", {}, None)]
|
||||
actions = [self.create_action("dontcare", {}, None)]
|
||||
result = self.engine.execute(actions)
|
||||
self.assertEqual(result, False)
|
||||
self.check_action_state(actions[0], objects.action.State.FAILED)
|
||||
|
@ -84,10 +84,12 @@ def get_test_action(**kwargs):
|
||||
'uuid': kwargs.get('uuid', '10a47dd1-4874-4298-91cf-eff046dbdb8d'),
|
||||
'action_plan_id': kwargs.get('action_plan_id', 1),
|
||||
'action_type': kwargs.get('action_type', 'nop'),
|
||||
'applies_to': kwargs.get('applies_to',
|
||||
'10a47dd1-4874-4298-91cf-eff046dbdb8d'),
|
||||
'input_parameters': kwargs.get('input_parameters', {'key1': 'val1',
|
||||
'key2': 'val2'}),
|
||||
'input_parameters':
|
||||
kwargs.get('input_parameters',
|
||||
{'key1': 'val1',
|
||||
'key2': 'val2',
|
||||
'resource_id':
|
||||
'10a47dd1-4874-4298-91cf-eff046dbdb8d'}),
|
||||
'state': kwargs.get('state', 'PENDING'),
|
||||
'alarm': kwargs.get('alarm', None),
|
||||
'next': kwargs.get('next', 2),
|
||||
|
@ -18,55 +18,60 @@
|
||||
#
|
||||
import uuid
|
||||
|
||||
from watcher.decision_engine.model.hypervisor import Hypervisor
|
||||
from watcher.decision_engine.model.vm_state import VMState
|
||||
from watcher.decision_engine.model import hypervisor as modelhyp
|
||||
from watcher.decision_engine.model import vm_state
|
||||
from watcher.tests import base
|
||||
from watcher.tests.decision_engine.strategy.strategies.faker_cluster_state import \
|
||||
FakerModelCollector
|
||||
from watcher.tests.decision_engine.strategy.strategies import \
|
||||
faker_cluster_state
|
||||
|
||||
|
||||
class TestMapping(base.BaseTestCase):
|
||||
|
||||
VM1_UUID = "73b09e16-35b7-4922-804e-e8f5d9b740fc"
|
||||
VM2_UUID = "a4cab39b-9828-413a-bf88-f76921bf1517"
|
||||
|
||||
def test_get_node_from_vm(self):
|
||||
fake_cluster = FakerModelCollector()
|
||||
fake_cluster = faker_cluster_state.FakerModelCollector()
|
||||
model = fake_cluster.generate_scenario_3_with_2_hypervisors()
|
||||
|
||||
vms = model.get_all_vms()
|
||||
keys = list(vms.keys())
|
||||
vm = vms[keys[0]]
|
||||
if vm.uuid != 'VM_0':
|
||||
if vm.uuid != self.VM1_UUID:
|
||||
vm = vms[keys[1]]
|
||||
node = model.mapping.get_node_from_vm(vm)
|
||||
self.assertEqual(node.uuid, 'Node_0')
|
||||
|
||||
def test_get_node_from_vm_id(self):
|
||||
fake_cluster = FakerModelCollector()
|
||||
fake_cluster = faker_cluster_state.FakerModelCollector()
|
||||
model = fake_cluster.generate_scenario_3_with_2_hypervisors()
|
||||
|
||||
hyps = model.mapping.get_node_vms_from_id("BLABLABLA")
|
||||
self.assertEqual(hyps.__len__(), 0)
|
||||
|
||||
def test_get_all_vms(self):
|
||||
fake_cluster = FakerModelCollector()
|
||||
fake_cluster = faker_cluster_state.FakerModelCollector()
|
||||
model = fake_cluster.generate_scenario_3_with_2_hypervisors()
|
||||
|
||||
vms = model.get_all_vms()
|
||||
self.assertEqual(vms.__len__(), 2)
|
||||
self.assertEqual(vms['VM_0'].state, VMState.ACTIVE.value)
|
||||
self.assertEqual(vms['VM_0'].uuid, 'VM_0')
|
||||
self.assertEqual(vms['VM_1'].state, VMState.ACTIVE.value)
|
||||
self.assertEqual(vms['VM_1'].uuid, 'VM_1')
|
||||
self.assertEqual(vms[self.VM1_UUID].state,
|
||||
vm_state.VMState.ACTIVE.value)
|
||||
self.assertEqual(vms[self.VM1_UUID].uuid, self.VM1_UUID)
|
||||
self.assertEqual(vms[self.VM2_UUID].state,
|
||||
vm_state.VMState.ACTIVE.value)
|
||||
self.assertEqual(vms[self.VM2_UUID].uuid, self.VM2_UUID)
|
||||
|
||||
def test_get_mapping(self):
|
||||
fake_cluster = FakerModelCollector()
|
||||
fake_cluster = faker_cluster_state.FakerModelCollector()
|
||||
model = fake_cluster.generate_scenario_3_with_2_hypervisors()
|
||||
|
||||
mapping_vm = model.mapping.get_mapping_vm()
|
||||
self.assertEqual(mapping_vm.__len__(), 2)
|
||||
self.assertEqual(mapping_vm['VM_0'], 'Node_0')
|
||||
self.assertEqual(mapping_vm['VM_1'], 'Node_1')
|
||||
self.assertEqual(mapping_vm[self.VM1_UUID], 'Node_0')
|
||||
self.assertEqual(mapping_vm[self.VM2_UUID], 'Node_1')
|
||||
|
||||
def test_migrate_vm(self):
|
||||
fake_cluster = FakerModelCollector()
|
||||
fake_cluster = faker_cluster_state.FakerModelCollector()
|
||||
model = fake_cluster.generate_scenario_3_with_2_hypervisors()
|
||||
vms = model.get_all_vms()
|
||||
keys = list(vms.keys())
|
||||
@ -81,13 +86,13 @@ class TestMapping(base.BaseTestCase):
|
||||
self.assertEqual(model.mapping.migrate_vm(vm1, hyp0, hyp1), True)
|
||||
|
||||
def test_unmap_from_id_log_warning(self):
|
||||
fake_cluster = FakerModelCollector()
|
||||
fake_cluster = faker_cluster_state.FakerModelCollector()
|
||||
model = fake_cluster.generate_scenario_3_with_2_hypervisors()
|
||||
vms = model.get_all_vms()
|
||||
keys = list(vms.keys())
|
||||
vm0 = vms[keys[0]]
|
||||
id = "{0}".format(uuid.uuid4())
|
||||
hypervisor = Hypervisor()
|
||||
hypervisor = modelhyp.Hypervisor()
|
||||
hypervisor.uuid = id
|
||||
|
||||
model.mapping.unmap_from_id(hypervisor.uuid, vm0.uuid)
|
||||
@ -95,7 +100,7 @@ class TestMapping(base.BaseTestCase):
|
||||
# hypervisor.uuid)), 1)
|
||||
|
||||
def test_unmap_from_id(self):
|
||||
fake_cluster = FakerModelCollector()
|
||||
fake_cluster = faker_cluster_state.FakerModelCollector()
|
||||
model = fake_cluster.generate_scenario_3_with_2_hypervisors()
|
||||
vms = model.get_all_vms()
|
||||
keys = list(vms.keys())
|
||||
|
@ -68,7 +68,7 @@ class TestActionScheduling(base.DbTestCase):
|
||||
"dst_uuid_hypervisor": "server2",
|
||||
}
|
||||
solution.add_action(action_type="migrate",
|
||||
applies_to="b199db0c-1408-4d52-b5a5-5ca14de0ff36",
|
||||
resource_id="b199db0c-1408-4d52-b5a5-5ca14de0ff36",
|
||||
input_parameters=parameters)
|
||||
|
||||
with mock.patch.object(
|
||||
@ -94,11 +94,11 @@ class TestActionScheduling(base.DbTestCase):
|
||||
"dst_uuid_hypervisor": "server2",
|
||||
}
|
||||
solution.add_action(action_type="migrate",
|
||||
applies_to="b199db0c-1408-4d52-b5a5-5ca14de0ff36",
|
||||
resource_id="b199db0c-1408-4d52-b5a5-5ca14de0ff36",
|
||||
input_parameters=parameters)
|
||||
|
||||
solution.add_action(action_type="nop",
|
||||
applies_to="",
|
||||
resource_id="",
|
||||
input_parameters={})
|
||||
|
||||
with mock.patch.object(
|
||||
|
@ -13,27 +13,29 @@
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from watcher.decision_engine.solution.default import DefaultSolution
|
||||
|
||||
from watcher.decision_engine.solution import default
|
||||
from watcher.tests import base
|
||||
|
||||
|
||||
class TestDefaultSolution(base.BaseTestCase):
|
||||
def test_default_solution(self):
|
||||
solution = DefaultSolution()
|
||||
solution = default.DefaultSolution()
|
||||
parameters = {
|
||||
"src_uuid_hypervisor": "server1",
|
||||
"dst_uuid_hypervisor": "server2",
|
||||
}
|
||||
solution.add_action(action_type="nop",
|
||||
applies_to="b199db0c-1408-4d52-b5a5-5ca14de0ff36",
|
||||
resource_id="b199db0c-1408-4d52-b5a5-5ca14de0ff36",
|
||||
input_parameters=parameters)
|
||||
self.assertEqual(len(solution.actions), 1)
|
||||
expected_action_type = "nop"
|
||||
expected_applies_to = "b199db0c-1408-4d52-b5a5-5ca14de0ff36"
|
||||
expected_parameters = parameters
|
||||
expected_parameters = {
|
||||
"src_uuid_hypervisor": "server1",
|
||||
"dst_uuid_hypervisor": "server2",
|
||||
"resource_id": "b199db0c-1408-4d52-b5a5-5ca14de0ff36"
|
||||
}
|
||||
self.assertEqual(solution.actions[0].get('action_type'),
|
||||
expected_action_type)
|
||||
self.assertEqual(solution.actions[0].get('applies_to'),
|
||||
expected_applies_to)
|
||||
self.assertEqual(solution.actions[0].get('input_parameters'),
|
||||
expected_parameters)
|
||||
|
@ -16,81 +16,24 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
import random
|
||||
|
||||
from watcher.decision_engine.model.hypervisor import Hypervisor
|
||||
from watcher.decision_engine.model.model_root import ModelRoot
|
||||
from watcher.decision_engine.model.resource import Resource
|
||||
from watcher.decision_engine.model.resource import ResourceType
|
||||
from watcher.decision_engine.model.vm import VM
|
||||
from watcher.metrics_engine.cluster_model_collector.api import \
|
||||
BaseClusterModelCollector
|
||||
from watcher.decision_engine.model import hypervisor
|
||||
from watcher.decision_engine.model import model_root as modelroot
|
||||
from watcher.decision_engine.model import resource
|
||||
from watcher.decision_engine.model import vm as modelvm
|
||||
from watcher.metrics_engine.cluster_model_collector import api
|
||||
|
||||
|
||||
class FakerModelCollector(BaseClusterModelCollector):
|
||||
class FakerModelCollector(api.BaseClusterModelCollector):
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def get_latest_cluster_data_model(self):
|
||||
return self.generate_scenario_1()
|
||||
|
||||
def generate_random(self, count_nodes, number_of_vm_per_node):
|
||||
vms = []
|
||||
|
||||
current_state_cluster = ModelRoot()
|
||||
# number of nodes
|
||||
count_node = count_nodes
|
||||
# number max of vm per hypervisor
|
||||
node_count_vm = number_of_vm_per_node
|
||||
# total number of virtual machine
|
||||
count_vm = (count_node * node_count_vm)
|
||||
|
||||
# define ressouce ( CPU, MEM disk, ... )
|
||||
mem = Resource(ResourceType.memory)
|
||||
# 2199.954 Mhz
|
||||
num_cores = Resource(ResourceType.cpu_cores)
|
||||
disk = Resource(ResourceType.disk)
|
||||
|
||||
current_state_cluster.create_resource(mem)
|
||||
current_state_cluster.create_resource(num_cores)
|
||||
current_state_cluster.create_resource(disk)
|
||||
|
||||
for i in range(0, count_node):
|
||||
node_uuid = "Node_{0}".format(i)
|
||||
|
||||
hypervisor = Hypervisor()
|
||||
hypervisor.uuid = node_uuid
|
||||
hypervisor.hostname = "host_{0}".format(i)
|
||||
|
||||
mem.set_capacity(hypervisor, 132)
|
||||
disk.set_capacity(hypervisor, 250)
|
||||
num_cores.set_capacity(hypervisor, 40)
|
||||
current_state_cluster.add_hypervisor(hypervisor)
|
||||
|
||||
for i in range(0, count_vm):
|
||||
vm_uuid = "VM_{0}".format(i)
|
||||
vm = VM()
|
||||
vm.uuid = vm_uuid
|
||||
mem.set_capacity(vm, 8)
|
||||
disk.set_capacity(vm, 10)
|
||||
num_cores.set_capacity(vm, 10)
|
||||
vms.append(vm)
|
||||
current_state_cluster.add_vm(vm)
|
||||
j = 0
|
||||
for node_id in current_state_cluster.get_all_hypervisors():
|
||||
for i in range(0, random.randint(0, node_count_vm)):
|
||||
# todo(jed) check if enough capacity
|
||||
current_state_cluster.get_mapping().map(
|
||||
current_state_cluster.get_hypervisor_from_id(node_id),
|
||||
vms[j])
|
||||
j += 1
|
||||
return current_state_cluster
|
||||
|
||||
def generate_scenario_1(self):
|
||||
vms = []
|
||||
|
||||
current_state_cluster = ModelRoot()
|
||||
current_state_cluster = modelroot.ModelRoot()
|
||||
# number of nodes
|
||||
count_node = 5
|
||||
# number max of vm per node
|
||||
@ -99,10 +42,10 @@ class FakerModelCollector(BaseClusterModelCollector):
|
||||
count_vm = (count_node * node_count_vm)
|
||||
|
||||
# define ressouce ( CPU, MEM disk, ... )
|
||||
mem = Resource(ResourceType.memory)
|
||||
mem = resource.Resource(resource.ResourceType.memory)
|
||||
# 2199.954 Mhz
|
||||
num_cores = Resource(ResourceType.cpu_cores)
|
||||
disk = Resource(ResourceType.disk)
|
||||
num_cores = resource.Resource(resource.ResourceType.cpu_cores)
|
||||
disk = resource.Resource(resource.ResourceType.disk)
|
||||
|
||||
current_state_cluster.create_resource(mem)
|
||||
current_state_cluster.create_resource(num_cores)
|
||||
@ -110,7 +53,7 @@ class FakerModelCollector(BaseClusterModelCollector):
|
||||
|
||||
for i in range(0, count_node):
|
||||
node_uuid = "Node_{0}".format(i)
|
||||
node = Hypervisor()
|
||||
node = hypervisor.Hypervisor()
|
||||
node.uuid = node_uuid
|
||||
node.hostname = "hostname_{0}".format(i)
|
||||
|
||||
@ -121,7 +64,7 @@ class FakerModelCollector(BaseClusterModelCollector):
|
||||
|
||||
for i in range(0, count_vm):
|
||||
vm_uuid = "VM_{0}".format(i)
|
||||
vm = VM()
|
||||
vm = modelvm.VM()
|
||||
vm.uuid = vm_uuid
|
||||
mem.set_capacity(vm, 2)
|
||||
disk.set_capacity(vm, 20)
|
||||
@ -168,130 +111,68 @@ class FakerModelCollector(BaseClusterModelCollector):
|
||||
model.get_hypervisor_from_id(h_id),
|
||||
model.get_vm_from_id(vm_id))
|
||||
|
||||
def generate_scenario_2(self):
|
||||
vms = []
|
||||
|
||||
current_state_cluster = ModelRoot()
|
||||
# number of nodes
|
||||
count_node = 10
|
||||
# number max of vm per node
|
||||
node_count_vm = 7
|
||||
# total number of virtual machine
|
||||
count_vm = (count_node * node_count_vm)
|
||||
|
||||
# define ressouce ( CPU, MEM disk, ... )
|
||||
mem = Resource(ResourceType.memory)
|
||||
# 2199.954 Mhz
|
||||
num_cores = Resource(ResourceType.cpu_cores)
|
||||
disk = Resource(ResourceType.disk)
|
||||
|
||||
current_state_cluster.create_resource(mem)
|
||||
current_state_cluster.create_resource(num_cores)
|
||||
current_state_cluster.create_resource(disk)
|
||||
|
||||
for i in range(0, count_node):
|
||||
node_uuid = "Node_{0}".format(i)
|
||||
node = Hypervisor()
|
||||
node.uuid = node_uuid
|
||||
node.hostname = "hostname_{0}".format(i)
|
||||
|
||||
mem.set_capacity(node, 132)
|
||||
disk.set_capacity(node, 250)
|
||||
num_cores.set_capacity(node, 40)
|
||||
current_state_cluster.add_hypervisor(node)
|
||||
|
||||
for i in range(0, count_vm):
|
||||
vm_uuid = "VM_{0}".format(i)
|
||||
vm = VM()
|
||||
vm.uuid = vm_uuid
|
||||
mem.set_capacity(vm, 10)
|
||||
disk.set_capacity(vm, 25)
|
||||
num_cores.set_capacity(vm, 16)
|
||||
vms.append(vm)
|
||||
current_state_cluster.add_vm(vm)
|
||||
indice = 0
|
||||
for j in range(0, 2):
|
||||
node_uuid = "Node_{0}".format(j)
|
||||
for i in range(indice, 3):
|
||||
vm_uuid = "VM_{0}".format(i)
|
||||
self.map(current_state_cluster, node_uuid, vm_uuid)
|
||||
|
||||
for j in range(2, 5):
|
||||
node_uuid = "Node_{0}".format(j)
|
||||
for i in range(indice, 4):
|
||||
vm_uuid = "VM_{0}".format(i)
|
||||
self.map(current_state_cluster, node_uuid, vm_uuid)
|
||||
|
||||
for j in range(5, 10):
|
||||
node_uuid = "Node_{0}".format(j)
|
||||
for i in range(indice, 4):
|
||||
vm_uuid = "VM_{0}".format(i)
|
||||
self.map(current_state_cluster, node_uuid, vm_uuid)
|
||||
|
||||
return current_state_cluster
|
||||
|
||||
def generate_scenario_3_with_2_hypervisors(self):
|
||||
vms = []
|
||||
|
||||
current_state_cluster = ModelRoot()
|
||||
root = modelroot.ModelRoot()
|
||||
# number of nodes
|
||||
count_node = 2
|
||||
# number max of vm per node
|
||||
node_count_vm = 1
|
||||
# total number of virtual machine
|
||||
count_vm = (count_node * node_count_vm)
|
||||
|
||||
# define ressouce ( CPU, MEM disk, ... )
|
||||
mem = Resource(ResourceType.memory)
|
||||
mem = resource.Resource(resource.ResourceType.memory)
|
||||
# 2199.954 Mhz
|
||||
num_cores = Resource(ResourceType.cpu_cores)
|
||||
disk = Resource(ResourceType.disk)
|
||||
num_cores = resource.Resource(resource.ResourceType.cpu_cores)
|
||||
disk = resource.Resource(resource.ResourceType.disk)
|
||||
|
||||
current_state_cluster.create_resource(mem)
|
||||
current_state_cluster.create_resource(num_cores)
|
||||
current_state_cluster.create_resource(disk)
|
||||
root.create_resource(mem)
|
||||
root.create_resource(num_cores)
|
||||
root.create_resource(disk)
|
||||
|
||||
for i in range(0, count_node):
|
||||
node_uuid = "Node_{0}".format(i)
|
||||
node = Hypervisor()
|
||||
node = hypervisor.Hypervisor()
|
||||
node.uuid = node_uuid
|
||||
node.hostname = "hostname_{0}".format(i)
|
||||
|
||||
mem.set_capacity(node, 132)
|
||||
disk.set_capacity(node, 250)
|
||||
num_cores.set_capacity(node, 40)
|
||||
current_state_cluster.add_hypervisor(node)
|
||||
root.add_hypervisor(node)
|
||||
|
||||
for i in range(0, count_vm):
|
||||
vm_uuid = "VM_{0}".format(i)
|
||||
vm = VM()
|
||||
vm.uuid = vm_uuid
|
||||
mem.set_capacity(vm, 2)
|
||||
disk.set_capacity(vm, 20)
|
||||
num_cores.set_capacity(vm, 10)
|
||||
vms.append(vm)
|
||||
current_state_cluster.add_vm(vm)
|
||||
vm1 = modelvm.VM()
|
||||
vm1.uuid = "73b09e16-35b7-4922-804e-e8f5d9b740fc"
|
||||
mem.set_capacity(vm1, 2)
|
||||
disk.set_capacity(vm1, 20)
|
||||
num_cores.set_capacity(vm1, 10)
|
||||
vms.append(vm1)
|
||||
root.add_vm(vm1)
|
||||
|
||||
current_state_cluster.get_mapping().map(
|
||||
current_state_cluster.get_hypervisor_from_id("Node_0"),
|
||||
current_state_cluster.get_vm_from_id("VM_0"))
|
||||
vm2 = modelvm.VM()
|
||||
vm2.uuid = "a4cab39b-9828-413a-bf88-f76921bf1517"
|
||||
mem.set_capacity(vm2, 2)
|
||||
disk.set_capacity(vm2, 20)
|
||||
num_cores.set_capacity(vm2, 10)
|
||||
vms.append(vm2)
|
||||
root.add_vm(vm2)
|
||||
|
||||
current_state_cluster.get_mapping().map(
|
||||
current_state_cluster.get_hypervisor_from_id("Node_1"),
|
||||
current_state_cluster.get_vm_from_id("VM_1"))
|
||||
root.get_mapping().map(root.get_hypervisor_from_id("Node_0"),
|
||||
root.get_vm_from_id(str(vm1.uuid)))
|
||||
|
||||
return current_state_cluster
|
||||
root.get_mapping().map(root.get_hypervisor_from_id("Node_1"),
|
||||
root.get_vm_from_id(str(vm2.uuid)))
|
||||
|
||||
return root
|
||||
|
||||
def generate_scenario_4_with_1_hypervisor_no_vm(self):
|
||||
current_state_cluster = ModelRoot()
|
||||
current_state_cluster = modelroot.ModelRoot()
|
||||
# number of nodes
|
||||
count_node = 1
|
||||
|
||||
# define ressouce ( CPU, MEM disk, ... )
|
||||
mem = Resource(ResourceType.memory)
|
||||
mem = resource.Resource(resource.ResourceType.memory)
|
||||
# 2199.954 Mhz
|
||||
num_cores = Resource(ResourceType.cpu_cores)
|
||||
disk = Resource(ResourceType.disk)
|
||||
num_cores = resource.Resource(resource.ResourceType.cpu_cores)
|
||||
disk = resource.Resource(resource.ResourceType.disk)
|
||||
|
||||
current_state_cluster.create_resource(mem)
|
||||
current_state_cluster.create_resource(num_cores)
|
||||
@ -299,7 +180,7 @@ class FakerModelCollector(BaseClusterModelCollector):
|
||||
|
||||
for i in range(0, count_node):
|
||||
node_uuid = "Node_{0}".format(i)
|
||||
node = Hypervisor()
|
||||
node = hypervisor.Hypervisor()
|
||||
node.uuid = node_uuid
|
||||
node.hostname = "hostname_{0}".format(i)
|
||||
|
||||
@ -312,17 +193,17 @@ class FakerModelCollector(BaseClusterModelCollector):
|
||||
|
||||
def generate_scenario_5_with_vm_disk_0(self):
|
||||
vms = []
|
||||
current_state_cluster = ModelRoot()
|
||||
current_state_cluster = modelroot.ModelRoot()
|
||||
# number of nodes
|
||||
count_node = 1
|
||||
# number of vms
|
||||
count_vm = 1
|
||||
|
||||
# define ressouce ( CPU, MEM disk, ... )
|
||||
mem = Resource(ResourceType.memory)
|
||||
mem = resource.Resource(resource.ResourceType.memory)
|
||||
# 2199.954 Mhz
|
||||
num_cores = Resource(ResourceType.cpu_cores)
|
||||
disk = Resource(ResourceType.disk)
|
||||
num_cores = resource.Resource(resource.ResourceType.cpu_cores)
|
||||
disk = resource.Resource(resource.ResourceType.disk)
|
||||
|
||||
current_state_cluster.create_resource(mem)
|
||||
current_state_cluster.create_resource(num_cores)
|
||||
@ -330,7 +211,7 @@ class FakerModelCollector(BaseClusterModelCollector):
|
||||
|
||||
for i in range(0, count_node):
|
||||
node_uuid = "Node_{0}".format(i)
|
||||
node = Hypervisor()
|
||||
node = hypervisor.Hypervisor()
|
||||
node.uuid = node_uuid
|
||||
node.hostname = "hostname_{0}".format(i)
|
||||
|
||||
@ -341,7 +222,7 @@ class FakerModelCollector(BaseClusterModelCollector):
|
||||
|
||||
for i in range(0, count_vm):
|
||||
vm_uuid = "VM_{0}".format(i)
|
||||
vm = VM()
|
||||
vm = modelvm.VM()
|
||||
vm.uuid = vm_uuid
|
||||
mem.set_capacity(vm, 2)
|
||||
disk.set_capacity(vm, 0)
|
||||
|
@ -16,26 +16,26 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
from collections import Counter
|
||||
import collections
|
||||
import mock
|
||||
|
||||
from watcher.applier.actions.loading import default
|
||||
from watcher.common import exception
|
||||
from watcher.decision_engine.model.model_root import ModelRoot
|
||||
from watcher.decision_engine.strategy.strategies.basic_consolidation import \
|
||||
BasicConsolidation
|
||||
from watcher.decision_engine.model import model_root
|
||||
from watcher.decision_engine.strategy import strategies
|
||||
from watcher.tests import base
|
||||
from watcher.tests.decision_engine.strategy.strategies.faker_cluster_state \
|
||||
import FakerModelCollector
|
||||
from watcher.tests.decision_engine.strategy.strategies.faker_metrics_collector\
|
||||
import FakerMetricsCollector
|
||||
from watcher.tests.decision_engine.strategy.strategies \
|
||||
import faker_cluster_state
|
||||
from watcher.tests.decision_engine.strategy.strategies \
|
||||
import faker_metrics_collector
|
||||
|
||||
|
||||
class TestBasicConsolidation(base.BaseTestCase):
|
||||
# fake metrics
|
||||
fake_metrics = FakerMetricsCollector()
|
||||
fake_metrics = faker_metrics_collector.FakerMetricsCollector()
|
||||
|
||||
# fake cluster
|
||||
fake_cluster = FakerModelCollector()
|
||||
fake_cluster = faker_cluster_state.FakerModelCollector()
|
||||
|
||||
def test_cluster_size(self):
|
||||
size_cluster = len(
|
||||
@ -45,7 +45,7 @@ class TestBasicConsolidation(base.BaseTestCase):
|
||||
|
||||
def test_basic_consolidation_score_hypervisor(self):
|
||||
cluster = self.fake_cluster.generate_scenario_1()
|
||||
sercon = BasicConsolidation()
|
||||
sercon = strategies.BasicConsolidation()
|
||||
sercon.ceilometer = mock.MagicMock(
|
||||
statistic_aggregation=self.fake_metrics.mock_get_statistics)
|
||||
|
||||
@ -67,7 +67,7 @@ class TestBasicConsolidation(base.BaseTestCase):
|
||||
|
||||
def test_basic_consolidation_score_vm(self):
|
||||
cluster = self.fake_cluster.generate_scenario_1()
|
||||
sercon = BasicConsolidation()
|
||||
sercon = strategies.BasicConsolidation()
|
||||
sercon.ceilometer = mock.MagicMock(
|
||||
statistic_aggregation=self.fake_metrics.mock_get_statistics)
|
||||
vm_0 = cluster.get_vm_from_id("VM_0")
|
||||
@ -90,7 +90,7 @@ class TestBasicConsolidation(base.BaseTestCase):
|
||||
|
||||
def test_basic_consolidation_score_vm_disk(self):
|
||||
cluster = self.fake_cluster.generate_scenario_5_with_vm_disk_0()
|
||||
sercon = BasicConsolidation()
|
||||
sercon = strategies.BasicConsolidation()
|
||||
sercon.ceilometer = mock.MagicMock(
|
||||
statistic_aggregation=self.fake_metrics.mock_get_statistics)
|
||||
vm_0 = cluster.get_vm_from_id("VM_0")
|
||||
@ -99,7 +99,7 @@ class TestBasicConsolidation(base.BaseTestCase):
|
||||
|
||||
def test_basic_consolidation_weight(self):
|
||||
cluster = self.fake_cluster.generate_scenario_1()
|
||||
sercon = BasicConsolidation()
|
||||
sercon = strategies.BasicConsolidation()
|
||||
sercon.ceilometer = mock.MagicMock(
|
||||
statistic_aggregation=self.fake_metrics.mock_get_statistics)
|
||||
vm_0 = cluster.get_vm_from_id("VM_0")
|
||||
@ -114,24 +114,24 @@ class TestBasicConsolidation(base.BaseTestCase):
|
||||
vm_0_weight_assert)
|
||||
|
||||
def test_calculate_migration_efficacy(self):
|
||||
sercon = BasicConsolidation()
|
||||
sercon = strategies.BasicConsolidation()
|
||||
sercon.calculate_migration_efficacy()
|
||||
|
||||
def test_exception_model(self):
|
||||
sercon = BasicConsolidation()
|
||||
sercon = strategies.BasicConsolidation()
|
||||
self.assertRaises(exception.ClusterStateNotDefined, sercon.execute,
|
||||
None)
|
||||
|
||||
def test_exception_cluster_empty(self):
|
||||
sercon = BasicConsolidation()
|
||||
model = ModelRoot()
|
||||
sercon = strategies.BasicConsolidation()
|
||||
model = model_root.ModelRoot()
|
||||
self.assertRaises(exception.ClusterEmpty, sercon.execute,
|
||||
model)
|
||||
|
||||
def test_calculate_score_vm_raise_cluster_state_not_found(self):
|
||||
metrics = FakerMetricsCollector()
|
||||
metrics = faker_metrics_collector.FakerMetricsCollector()
|
||||
metrics.empty_one_metric("CPU_COMPUTE")
|
||||
sercon = BasicConsolidation()
|
||||
sercon = strategies.BasicConsolidation()
|
||||
sercon.ceilometer = mock.MagicMock(
|
||||
statistic_aggregation=self.fake_metrics.mock_get_statistics)
|
||||
|
||||
@ -139,8 +139,8 @@ class TestBasicConsolidation(base.BaseTestCase):
|
||||
sercon.calculate_score_vm, "VM_1", None)
|
||||
|
||||
def test_check_migration(self):
|
||||
sercon = BasicConsolidation()
|
||||
fake_cluster = FakerModelCollector()
|
||||
sercon = strategies.BasicConsolidation()
|
||||
fake_cluster = faker_cluster_state.FakerModelCollector()
|
||||
model = fake_cluster.generate_scenario_3_with_2_hypervisors()
|
||||
|
||||
all_vms = model.get_all_vms()
|
||||
@ -151,8 +151,8 @@ class TestBasicConsolidation(base.BaseTestCase):
|
||||
sercon.check_migration(model, hyp0, hyp0, vm0)
|
||||
|
||||
def test_threshold(self):
|
||||
sercon = BasicConsolidation()
|
||||
fake_cluster = FakerModelCollector()
|
||||
sercon = strategies.BasicConsolidation()
|
||||
fake_cluster = faker_cluster_state.FakerModelCollector()
|
||||
model = fake_cluster.generate_scenario_3_with_2_hypervisors()
|
||||
|
||||
all_hyps = model.get_all_hypervisors()
|
||||
@ -165,19 +165,19 @@ class TestBasicConsolidation(base.BaseTestCase):
|
||||
self.assertEqual(sercon.get_threshold_cores(), threshold_cores + 1)
|
||||
|
||||
def test_number_of(self):
|
||||
sercon = BasicConsolidation()
|
||||
sercon = strategies.BasicConsolidation()
|
||||
sercon.get_number_of_released_nodes()
|
||||
sercon.get_number_of_migrations()
|
||||
|
||||
def test_basic_consolidation_migration(self):
|
||||
sercon = BasicConsolidation()
|
||||
sercon = strategies.BasicConsolidation()
|
||||
sercon.ceilometer = mock.MagicMock(
|
||||
statistic_aggregation=self.fake_metrics.mock_get_statistics)
|
||||
|
||||
solution = sercon.execute(
|
||||
self.fake_cluster.generate_scenario_3_with_2_hypervisors())
|
||||
|
||||
actions_counter = Counter(
|
||||
actions_counter = collections.Counter(
|
||||
[action.get('action_type') for action in solution.actions])
|
||||
|
||||
expected_num_migrations = 1
|
||||
@ -189,26 +189,31 @@ class TestBasicConsolidation(base.BaseTestCase):
|
||||
self.assertEqual(num_migrations, expected_num_migrations)
|
||||
self.assertEqual(num_hypervisor_state_change, expected_power_state)
|
||||
|
||||
def test_execute_cluster_empty(self):
|
||||
current_state_cluster = FakerModelCollector()
|
||||
sercon = BasicConsolidation("sercon", "Basic offline consolidation")
|
||||
sercon.ceilometer = mock.MagicMock(
|
||||
statistic_aggregation=self.fake_metrics.mock_get_statistics)
|
||||
model = current_state_cluster.generate_random(0, 0)
|
||||
self.assertRaises(exception.ClusterEmpty, sercon.execute, model)
|
||||
|
||||
# calculate_weight
|
||||
def test_execute_no_workload(self):
|
||||
sercon = BasicConsolidation()
|
||||
sercon = strategies.BasicConsolidation()
|
||||
sercon.ceilometer = mock.MagicMock(
|
||||
statistic_aggregation=self.fake_metrics.mock_get_statistics)
|
||||
|
||||
current_state_cluster = FakerModelCollector()
|
||||
current_state_cluster = faker_cluster_state.FakerModelCollector()
|
||||
model = current_state_cluster. \
|
||||
generate_scenario_4_with_1_hypervisor_no_vm()
|
||||
|
||||
with mock.patch.object(BasicConsolidation, 'calculate_weight') \
|
||||
with mock.patch.object(strategies.BasicConsolidation,
|
||||
'calculate_weight') \
|
||||
as mock_score_call:
|
||||
mock_score_call.return_value = 0
|
||||
solution = sercon.execute(model)
|
||||
self.assertEqual(solution.efficacy, 100)
|
||||
|
||||
def test_check_parameters(self):
|
||||
sercon = strategies.BasicConsolidation()
|
||||
sercon.ceilometer = mock.MagicMock(
|
||||
statistic_aggregation=self.fake_metrics.mock_get_statistics)
|
||||
solution = sercon.execute(
|
||||
self.fake_cluster.generate_scenario_3_with_2_hypervisors())
|
||||
loader = default.DefaultActionLoader()
|
||||
for action in solution.actions:
|
||||
loaded_action = loader.load(action['action_type'])
|
||||
loaded_action.input_parameters = action['input_parameters']
|
||||
loaded_action.validate_parameters()
|
||||
|
@ -13,16 +13,29 @@
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from watcher.decision_engine.strategy.strategies.dummy_strategy import \
|
||||
DummyStrategy
|
||||
|
||||
from watcher.applier.actions.loading import default
|
||||
from watcher.decision_engine.strategy import strategies
|
||||
from watcher.tests import base
|
||||
from watcher.tests.decision_engine.strategy.strategies.faker_cluster_state\
|
||||
import FakerModelCollector
|
||||
from watcher.tests.decision_engine.strategy.strategies import \
|
||||
faker_cluster_state
|
||||
|
||||
|
||||
class TestDummyStrategy(base.TestCase):
|
||||
def test_dummy_strategy(self):
|
||||
tactique = DummyStrategy("basic", "Basic offline consolidation")
|
||||
fake_cluster = FakerModelCollector()
|
||||
dummy = strategies.DummyStrategy("dummy", "Dummy strategy")
|
||||
fake_cluster = faker_cluster_state.FakerModelCollector()
|
||||
model = fake_cluster.generate_scenario_3_with_2_hypervisors()
|
||||
tactique.execute(model)
|
||||
solution = dummy.execute(model)
|
||||
self.assertEqual(3, len(solution.actions))
|
||||
|
||||
def test_check_parameters(self):
|
||||
dummy = strategies.DummyStrategy("dummy", "Dummy strategy")
|
||||
fake_cluster = faker_cluster_state.FakerModelCollector()
|
||||
model = fake_cluster.generate_scenario_3_with_2_hypervisors()
|
||||
solution = dummy.execute(model)
|
||||
loader = default.DefaultActionLoader()
|
||||
for action in solution.actions:
|
||||
loaded_action = loader.load(action['action_type'])
|
||||
loaded_action.input_parameters = action['input_parameters']
|
||||
loaded_action.validate_parameters()
|
||||
|
@ -16,35 +16,35 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
from collections import Counter
|
||||
import collections
|
||||
import mock
|
||||
|
||||
from watcher.applier.actions.loading import default
|
||||
from watcher.common import exception
|
||||
from watcher.decision_engine.model.model_root import ModelRoot
|
||||
from watcher.decision_engine.model.resource import ResourceType
|
||||
from watcher.decision_engine.strategy.strategies.outlet_temp_control import \
|
||||
OutletTempControl
|
||||
from watcher.decision_engine.model import model_root
|
||||
from watcher.decision_engine.model import resource
|
||||
from watcher.decision_engine.strategy import strategies
|
||||
from watcher.tests import base
|
||||
from watcher.tests.decision_engine.strategy.strategies.faker_cluster_state \
|
||||
import FakerModelCollector
|
||||
from watcher.tests.decision_engine.strategy.strategies.faker_metrics_collector\
|
||||
import FakerMetricsCollector
|
||||
from watcher.tests.decision_engine.strategy.strategies \
|
||||
import faker_cluster_state
|
||||
from watcher.tests.decision_engine.strategy.strategies \
|
||||
import faker_metrics_collector
|
||||
|
||||
|
||||
class TestOutletTempControl(base.BaseTestCase):
|
||||
# fake metrics
|
||||
fake_metrics = FakerMetricsCollector()
|
||||
fake_metrics = faker_metrics_collector.FakerMetricsCollector()
|
||||
|
||||
# fake cluster
|
||||
fake_cluster = FakerModelCollector()
|
||||
fake_cluster = faker_cluster_state.FakerModelCollector()
|
||||
|
||||
def test_calc_used_res(self):
|
||||
model = self.fake_cluster.generate_scenario_3_with_2_hypervisors()
|
||||
strategy = OutletTempControl()
|
||||
strategy = strategies.OutletTempControl()
|
||||
hypervisor = model.get_hypervisor_from_id('Node_0')
|
||||
cap_cores = model.get_resource_from_id(ResourceType.cpu_cores)
|
||||
cap_mem = model.get_resource_from_id(ResourceType.memory)
|
||||
cap_disk = model.get_resource_from_id(ResourceType.disk)
|
||||
cap_cores = model.get_resource_from_id(resource.ResourceType.cpu_cores)
|
||||
cap_mem = model.get_resource_from_id(resource.ResourceType.memory)
|
||||
cap_disk = model.get_resource_from_id(resource.ResourceType.disk)
|
||||
cores_used, mem_used, disk_used = strategy.calc_used_res(model,
|
||||
hypervisor,
|
||||
cap_cores,
|
||||
@ -55,7 +55,7 @@ class TestOutletTempControl(base.BaseTestCase):
|
||||
|
||||
def test_group_hosts_by_outlet_temp(self):
|
||||
model = self.fake_cluster.generate_scenario_3_with_2_hypervisors()
|
||||
strategy = OutletTempControl()
|
||||
strategy = strategies.OutletTempControl()
|
||||
strategy.ceilometer = mock.MagicMock(
|
||||
statistic_aggregation=self.fake_metrics.mock_get_statistics)
|
||||
h1, h2 = strategy.group_hosts_by_outlet_temp(model)
|
||||
@ -64,17 +64,18 @@ class TestOutletTempControl(base.BaseTestCase):
|
||||
|
||||
def test_choose_vm_to_migrate(self):
|
||||
model = self.fake_cluster.generate_scenario_3_with_2_hypervisors()
|
||||
strategy = OutletTempControl()
|
||||
strategy = strategies.OutletTempControl()
|
||||
strategy.ceilometer = mock.MagicMock(
|
||||
statistic_aggregation=self.fake_metrics.mock_get_statistics)
|
||||
h1, h2 = strategy.group_hosts_by_outlet_temp(model)
|
||||
vm_to_mig = strategy.choose_vm_to_migrate(model, h1)
|
||||
self.assertEqual(vm_to_mig[0].uuid, 'Node_1')
|
||||
self.assertEqual(vm_to_mig[1].uuid, 'VM_1')
|
||||
self.assertEqual(vm_to_mig[1].uuid,
|
||||
"a4cab39b-9828-413a-bf88-f76921bf1517")
|
||||
|
||||
def test_filter_dest_servers(self):
|
||||
model = self.fake_cluster.generate_scenario_3_with_2_hypervisors()
|
||||
strategy = OutletTempControl()
|
||||
strategy = strategies.OutletTempControl()
|
||||
strategy.ceilometer = mock.MagicMock(
|
||||
statistic_aggregation=self.fake_metrics.mock_get_statistics)
|
||||
h1, h2 = strategy.group_hosts_by_outlet_temp(model)
|
||||
@ -84,29 +85,28 @@ class TestOutletTempControl(base.BaseTestCase):
|
||||
self.assertEqual(dest_hosts[0]['hv'].uuid, 'Node_0')
|
||||
|
||||
def test_exception_model(self):
|
||||
strategy = OutletTempControl()
|
||||
strategy = strategies.OutletTempControl()
|
||||
self.assertRaises(exception.ClusterStateNotDefined, strategy.execute,
|
||||
None)
|
||||
|
||||
def test_exception_cluster_empty(self):
|
||||
strategy = OutletTempControl()
|
||||
model = ModelRoot()
|
||||
strategy = strategies.OutletTempControl()
|
||||
model = model_root.ModelRoot()
|
||||
self.assertRaises(exception.ClusterEmpty, strategy.execute, model)
|
||||
|
||||
def test_execute_cluster_empty(self):
|
||||
current_state_cluster = FakerModelCollector()
|
||||
strategy = OutletTempControl()
|
||||
strategy = strategies.OutletTempControl()
|
||||
strategy.ceilometer = mock.MagicMock(
|
||||
statistic_aggregation=self.fake_metrics.mock_get_statistics)
|
||||
model = current_state_cluster.generate_random(0, 0)
|
||||
model = model_root.ModelRoot()
|
||||
self.assertRaises(exception.ClusterEmpty, strategy.execute, model)
|
||||
|
||||
def test_execute_no_workload(self):
|
||||
strategy = OutletTempControl()
|
||||
strategy = strategies.OutletTempControl()
|
||||
strategy.ceilometer = mock.MagicMock(
|
||||
statistic_aggregation=self.fake_metrics.mock_get_statistics)
|
||||
|
||||
current_state_cluster = FakerModelCollector()
|
||||
current_state_cluster = faker_cluster_state.FakerModelCollector()
|
||||
model = current_state_cluster. \
|
||||
generate_scenario_4_with_1_hypervisor_no_vm()
|
||||
|
||||
@ -114,13 +114,25 @@ class TestOutletTempControl(base.BaseTestCase):
|
||||
self.assertEqual(solution.actions, [])
|
||||
|
||||
def test_execute(self):
|
||||
strategy = OutletTempControl()
|
||||
strategy = strategies.OutletTempControl()
|
||||
strategy.ceilometer = mock.MagicMock(
|
||||
statistic_aggregation=self.fake_metrics.mock_get_statistics)
|
||||
model = self.fake_cluster.generate_scenario_3_with_2_hypervisors()
|
||||
solution = strategy.execute(model)
|
||||
actions_counter = Counter(
|
||||
actions_counter = collections.Counter(
|
||||
[action.get('action_type') for action in solution.actions])
|
||||
|
||||
num_migrations = actions_counter.get("migrate", 0)
|
||||
self.assertEqual(num_migrations, 1)
|
||||
|
||||
def test_check_parameters(self):
|
||||
outlet = strategies.OutletTempControl()
|
||||
outlet.ceilometer = mock.MagicMock(
|
||||
statistic_aggregation=self.fake_metrics.mock_get_statistics)
|
||||
model = self.fake_cluster.generate_scenario_3_with_2_hypervisors()
|
||||
solution = outlet.execute(model)
|
||||
loader = default.DefaultActionLoader()
|
||||
for action in solution.actions:
|
||||
loaded_action = loader.load(action['action_type'])
|
||||
loaded_action.input_parameters = action['input_parameters']
|
||||
loaded_action.validate_parameters()
|
||||
|
Loading…
Reference in New Issue
Block a user