Removed mistral/tests/functional
All the changes has been merged to mistral_tempest_tests. Also scrits has been changed. Change-Id: I6c514a3c75f1b6e3b94b0e9b0e542697b68d9a02 Partially-Implements: blueprint mistral-tempest-plugin
This commit is contained in:
parent
39a025fce4
commit
568bbf5384
@ -38,4 +38,4 @@ MISTRALCLIENT_DIR=/opt/stack/new/python-mistralclient
|
||||
export PYTHONPATH=$PYTHONPATH:$TEMPEST_DIR
|
||||
|
||||
pwd
|
||||
nosetests -sv mistral/tests/functional/
|
||||
nosetests -sv mistral_tempest_tests/tests/
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -1,309 +0,0 @@
|
||||
# Copyright 2013 Mirantis, Inc. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import json
|
||||
import os
|
||||
import time
|
||||
|
||||
import mock
|
||||
import six
|
||||
|
||||
from tempest import clients
|
||||
from tempest import config
|
||||
from tempest import test as test
|
||||
from tempest_lib import auth
|
||||
from tempest_lib.common import rest_client
|
||||
from tempest_lib import exceptions
|
||||
|
||||
|
||||
CONF = config.CONF
|
||||
|
||||
|
||||
def get_resource(path):
|
||||
main_package = 'mistral/tests'
|
||||
dir_path = __file__[0:__file__.find(main_package) + len(main_package) + 1]
|
||||
|
||||
return open(dir_path + 'resources/' + path).read()
|
||||
|
||||
|
||||
def find_items(items, **props):
|
||||
def _matches(item, **props):
|
||||
for prop_name, prop_val in six.iteritems(props):
|
||||
if item[prop_name] != prop_val:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
filtered = list(filter(lambda item: _matches(item, **props), items))
|
||||
|
||||
if len(filtered) == 1:
|
||||
return filtered[0]
|
||||
|
||||
return filtered
|
||||
|
||||
|
||||
class MistralClientBase(rest_client.RestClient):
|
||||
def __init__(self, auth_provider, service_type):
|
||||
super(MistralClientBase, self).__init__(
|
||||
auth_provider=auth_provider,
|
||||
service=service_type,
|
||||
region=CONF.identity.region
|
||||
)
|
||||
|
||||
if service_type not in ('workflow', 'workflowv2'):
|
||||
msg = "Invalid parameter 'service_type'. "
|
||||
raise exceptions.UnprocessableEntity(msg)
|
||||
|
||||
self.endpoint_url = 'publicURL'
|
||||
|
||||
self.workbooks = []
|
||||
self.executions = []
|
||||
self.workflows = []
|
||||
self.triggers = []
|
||||
self.actions = []
|
||||
self.action_executions = []
|
||||
|
||||
def get_list_obj(self, name):
|
||||
resp, body = self.get(name)
|
||||
|
||||
return resp, json.loads(body)
|
||||
|
||||
def delete_obj(self, obj, name):
|
||||
return self.delete('{obj}/{name}'.format(obj=obj, name=name))
|
||||
|
||||
def get_object(self, obj, id):
|
||||
resp, body = self.get('{obj}/{id}'.format(obj=obj, id=id))
|
||||
|
||||
return resp, json.loads(body)
|
||||
|
||||
def wait_execution_success(self, ex_body, timeout=180, url='executions'):
|
||||
return self.wait_execution(ex_body, timeout=timeout, url=url)
|
||||
|
||||
def wait_execution(self, ex_body, timeout=180, url='executions',
|
||||
target_state='SUCCESS'):
|
||||
start_time = time.time()
|
||||
|
||||
expected_states = [target_state, 'RUNNING']
|
||||
|
||||
while ex_body['state'] != target_state:
|
||||
if time.time() - start_time > timeout:
|
||||
msg = ("Execution exceeds timeout {0} "
|
||||
"to change state to {1}. "
|
||||
"Execution: {2}".format(timeout, target_state, ex_body))
|
||||
raise exceptions.TimeoutException(msg)
|
||||
|
||||
_, ex_body = self.get_object(url, ex_body['id'])
|
||||
|
||||
if ex_body['state'] not in expected_states:
|
||||
msg = ("Execution state %s is not in expected "
|
||||
"states: %s" % (ex_body['state'], expected_states))
|
||||
raise exceptions.TempestException(msg)
|
||||
|
||||
time.sleep(1)
|
||||
|
||||
return ex_body
|
||||
|
||||
|
||||
class MistralClientV2(MistralClientBase):
|
||||
|
||||
def post_request(self, url, file_name):
|
||||
headers = {"headers": "Content-Type:text/plain"}
|
||||
|
||||
return self.post(url, get_resource(file_name), headers=headers)
|
||||
|
||||
def post_json(self, url, obj):
|
||||
headers = {"Content-Type": "application/json"}
|
||||
|
||||
return self.post(url, json.dumps(obj), headers=headers)
|
||||
|
||||
def update_request(self, url, file_name):
|
||||
headers = {"headers": "Content-Type:text/plain"}
|
||||
|
||||
resp, body = self.put(url, get_resource(file_name), headers=headers)
|
||||
|
||||
return resp, json.loads(body)
|
||||
|
||||
def get_definition(self, item, name):
|
||||
resp, body = self.get("%s/%s" % (item, name))
|
||||
|
||||
return resp, json.loads(body)['definition']
|
||||
|
||||
def create_workbook(self, yaml_file):
|
||||
resp, body = self.post_request('workbooks', yaml_file)
|
||||
|
||||
wb_name = json.loads(body)['name']
|
||||
self.workbooks.append(wb_name)
|
||||
|
||||
_, wfs = self.get_list_obj('workflows')
|
||||
|
||||
for wf in wfs['workflows']:
|
||||
if wf['name'].startswith(wb_name):
|
||||
self.workflows.append(wf['name'])
|
||||
|
||||
return resp, json.loads(body)
|
||||
|
||||
def create_workflow(self, yaml_file, scope=None):
|
||||
if scope:
|
||||
resp, body = self.post_request('workflows?scope=public', yaml_file)
|
||||
else:
|
||||
resp, body = self.post_request('workflows', yaml_file)
|
||||
|
||||
for wf in json.loads(body)['workflows']:
|
||||
self.workflows.append(wf['name'])
|
||||
|
||||
return resp, json.loads(body)
|
||||
|
||||
def create_execution(self, wf_name, wf_input=None, params=None):
|
||||
body = {"workflow_name": "%s" % wf_name}
|
||||
|
||||
if wf_input:
|
||||
body.update({'input': json.dumps(wf_input)})
|
||||
if params:
|
||||
body.update({'params': json.dumps(params)})
|
||||
|
||||
resp, body = self.post('executions', json.dumps(body))
|
||||
|
||||
self.executions.append(json.loads(body)['id'])
|
||||
|
||||
return resp, json.loads(body)
|
||||
|
||||
def update_execution(self, execution_id, put_body):
|
||||
resp, body = self.put('executions/%s' % execution_id, put_body)
|
||||
|
||||
return resp, json.loads(body)
|
||||
|
||||
def create_cron_trigger(self, name, wf_name, wf_input=None, pattern=None,
|
||||
first_time=None, count=None):
|
||||
post_body = {
|
||||
'name': name,
|
||||
'workflow_name': wf_name,
|
||||
'pattern': pattern,
|
||||
'remaining_executions': count,
|
||||
'first_execution_time': first_time
|
||||
}
|
||||
|
||||
if wf_input:
|
||||
post_body.update({'workflow_input': json.dumps(wf_input)})
|
||||
|
||||
rest, body = self.post('cron_triggers', json.dumps(post_body))
|
||||
|
||||
self.triggers.append(name)
|
||||
|
||||
return rest, json.loads(body)
|
||||
|
||||
def create_action(self, yaml_file):
|
||||
resp, body = self.post_request('actions', yaml_file)
|
||||
|
||||
self.actions.extend(
|
||||
[action['name'] for action in json.loads(body)['actions']])
|
||||
|
||||
return resp, json.loads(body)
|
||||
|
||||
def get_wf_tasks(self, wf_name):
|
||||
all_tasks = self.get_list_obj('tasks')[1]['tasks']
|
||||
|
||||
return [t for t in all_tasks if t['workflow_name'] == wf_name]
|
||||
|
||||
def create_action_execution(self, request_body):
|
||||
resp, body = self.post_json('action_executions', request_body)
|
||||
|
||||
params = json.loads(request_body.get('params', '{}'))
|
||||
if params.get('save_result', False):
|
||||
self.action_executions.append(json.loads(body)['id'])
|
||||
|
||||
return resp, json.loads(body)
|
||||
|
||||
|
||||
class AuthProv(auth.KeystoneV2AuthProvider):
|
||||
def __init__(self):
|
||||
self.alt_part = None
|
||||
|
||||
def auth_request(self, method, url, *args, **kwargs):
|
||||
req_url, headers, body = super(AuthProv, self).auth_request(
|
||||
method, url, *args, **kwargs)
|
||||
return 'http://localhost:8989/{0}/{1}'.format(
|
||||
os.environ['VERSION'], url), headers, body
|
||||
|
||||
def get_auth(self):
|
||||
return 'mock_str', 'mock_str'
|
||||
|
||||
def base_url(self, *args, **kwargs):
|
||||
return ''
|
||||
|
||||
|
||||
class TestCase(test.BaseTestCase):
|
||||
|
||||
credentials = ['primary', 'alt']
|
||||
|
||||
@classmethod
|
||||
def resource_setup(cls):
|
||||
"""Client authentication.
|
||||
|
||||
This method allows to initialize authentication before
|
||||
each test case and define parameters of Mistral API Service.
|
||||
"""
|
||||
super(TestCase, cls).resource_setup()
|
||||
|
||||
if 'WITHOUT_AUTH' in os.environ:
|
||||
cls.mgr = mock.MagicMock()
|
||||
cls.mgr.auth_provider = AuthProv()
|
||||
cls.alt_mgr = cls.mgr
|
||||
else:
|
||||
cls.mgr = cls.manager
|
||||
cls.alt_mgr = cls.alt_manager
|
||||
|
||||
if cls._service == 'workflowv2':
|
||||
cls.client = MistralClientV2(
|
||||
cls.mgr.auth_provider, cls._service)
|
||||
cls.alt_client = MistralClientV2(
|
||||
cls.alt_mgr.auth_provider, cls._service)
|
||||
|
||||
def setUp(self):
|
||||
super(TestCase, self).setUp()
|
||||
|
||||
def tearDown(self):
|
||||
super(TestCase, self).tearDown()
|
||||
|
||||
for wb in self.client.workbooks:
|
||||
self.client.delete_obj('workbooks', wb)
|
||||
|
||||
self.client.workbooks = []
|
||||
|
||||
|
||||
class TestCaseAdvanced(TestCase):
|
||||
@classmethod
|
||||
def resource_setup(cls):
|
||||
super(TestCaseAdvanced, cls).resource_setup()
|
||||
|
||||
cls.server_client = clients.ServersClient(
|
||||
cls.mgr.auth_provider,
|
||||
"compute",
|
||||
region=CONF.identity.region
|
||||
)
|
||||
|
||||
cls.image_ref = CONF.compute.image_ref
|
||||
cls.flavor_ref = CONF.compute.flavor_ref
|
||||
|
||||
def tearDown(self):
|
||||
for wb in self.client.workbooks:
|
||||
self.client.delete_obj('workbooks', wb)
|
||||
|
||||
self.client.workbooks = []
|
||||
|
||||
for ex in self.client.executions:
|
||||
self.client.delete_obj('executions', ex)
|
||||
|
||||
self.client.executions = []
|
||||
|
||||
super(TestCaseAdvanced, self).tearDown()
|
@ -1,86 +0,0 @@
|
||||
# Copyright 2015 - Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from tempest import test
|
||||
|
||||
from mistral.tests.functional import base
|
||||
|
||||
|
||||
class OpenStackActionsTestsV2(base.TestCase):
|
||||
|
||||
_service = 'workflowv2'
|
||||
|
||||
# TODO(akuznetsova): add checks for task result after task_output
|
||||
# TODO(akuznetsova): refactoring will be finished
|
||||
|
||||
@classmethod
|
||||
def resource_setup(cls):
|
||||
super(OpenStackActionsTestsV2, cls).resource_setup()
|
||||
|
||||
_, cls.wb = cls.client.create_workbook(
|
||||
'openstack/action_collection_wb.yaml')
|
||||
|
||||
@test.attr(type='openstack')
|
||||
def test_nova_actions(self):
|
||||
wf_name = self.wb['name'] + '.nova'
|
||||
_, execution = self.client.create_execution(wf_name)
|
||||
self.client.wait_execution_success(execution)
|
||||
executed_task = self.client.get_wf_tasks(wf_name)[-1]
|
||||
|
||||
self.assertEqual('SUCCESS', executed_task['state'])
|
||||
|
||||
@test.attr(type='openstack')
|
||||
def test_keystone_actions(self):
|
||||
wf_name = self.wb['name'] + '.keystone'
|
||||
_, execution = self.client.create_execution(wf_name)
|
||||
self.client.wait_execution_success(execution)
|
||||
executed_task = self.client.get_wf_tasks(wf_name)[-1]
|
||||
|
||||
self.assertEqual('SUCCESS', executed_task['state'])
|
||||
|
||||
@test.attr(type='openstack')
|
||||
def test_heat_actions(self):
|
||||
wf_name = self.wb['name'] + '.heat'
|
||||
_, execution = self.client.create_execution(wf_name)
|
||||
self.client.wait_execution_success(execution)
|
||||
executed_task = self.client.get_wf_tasks(wf_name)[-1]
|
||||
|
||||
self.assertEqual('SUCCESS', executed_task['state'])
|
||||
|
||||
@test.attr(type='openstack')
|
||||
def test_glance_actions(self):
|
||||
wf_name = self.wb['name'] + '.glance'
|
||||
_, execution = self.client.create_execution(wf_name)
|
||||
self.client.wait_execution_success(execution)
|
||||
executed_task = self.client.get_wf_tasks(wf_name)[-1]
|
||||
|
||||
self.assertEqual('SUCCESS', executed_task['state'])
|
||||
|
||||
@test.attr(type='openstack')
|
||||
def test_cinder_actions(self):
|
||||
wf_name = self.wb['name'] + '.cinder'
|
||||
_, execution = self.client.create_execution(wf_name)
|
||||
self.client.wait_execution_success(execution)
|
||||
executed_task = self.client.get_wf_tasks(wf_name)[-1]
|
||||
|
||||
self.assertEqual('SUCCESS', executed_task['state'])
|
||||
|
||||
@test.attr(type='openstack')
|
||||
def test_neutron_actions(self):
|
||||
wf_name = self.wb['name'] + '.neutron'
|
||||
_, execution = self.client.create_execution(wf_name)
|
||||
self.client.wait_execution_success(execution)
|
||||
executed_task = self.client.get_wf_tasks(wf_name)[-1]
|
||||
|
||||
self.assertEqual('SUCCESS', executed_task['state'])
|
@ -1,272 +0,0 @@
|
||||
# Copyright 2015 - Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import os
|
||||
from os import path
|
||||
import time
|
||||
|
||||
from oslo_log import log as logging
|
||||
from paramiko import ssh_exception
|
||||
from tempest import config
|
||||
from tempest import test
|
||||
|
||||
from mistral.tests.functional import base
|
||||
from mistral import utils
|
||||
from mistral.utils import ssh_utils
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
CONF = config.CONF
|
||||
SSH_KEYS_DIRECTORY = path.expanduser("~/.ssh/")
|
||||
|
||||
|
||||
class SSHActionsTestsV2(base.TestCaseAdvanced):
|
||||
|
||||
_service = 'workflowv2'
|
||||
|
||||
@classmethod
|
||||
def _create_security_group_rule_ssh(cls):
|
||||
sec_groups = (
|
||||
cls.mgr.compute_security_groups_client.
|
||||
list_security_groups()
|
||||
)
|
||||
sec_groups = sec_groups['security_groups']
|
||||
|
||||
default_group = next(
|
||||
g for g in sec_groups if g['name'] == 'default'
|
||||
)
|
||||
|
||||
rule = (
|
||||
cls.mgr.compute_security_group_rules_client
|
||||
.create_security_group_rule(
|
||||
parent_group_id=default_group['id'],
|
||||
ip_protocol="tcp",
|
||||
from_port=22,
|
||||
to_port=22,
|
||||
cidr="0.0.0.0/0"
|
||||
)
|
||||
)
|
||||
|
||||
cls.ssh_rule_id = rule['security_group_rule']['id']
|
||||
|
||||
@classmethod
|
||||
def _create_server(cls, server_name, **kwargs):
|
||||
return cls.server_client.create_server(
|
||||
name=server_name,
|
||||
imageRef=CONF.compute.image_ref,
|
||||
flavorRef=CONF.compute.flavor_ref,
|
||||
**kwargs
|
||||
).get('server')
|
||||
|
||||
@classmethod
|
||||
def _associate_floating_ip_to_server(cls, server_id):
|
||||
fl_ip_client = cls.mgr.compute_floating_ips_client
|
||||
|
||||
all_ips = fl_ip_client.list_floating_ips().get(
|
||||
'floating_ips'
|
||||
)
|
||||
free_ips = list(
|
||||
filter(lambda fl_ip: fl_ip['instance_id'] is None, all_ips)
|
||||
)
|
||||
|
||||
if free_ips:
|
||||
ip = free_ips[0]['ip']
|
||||
else:
|
||||
# Allocate new floating ip.
|
||||
ip = fl_ip_client.create_floating_ip()['floating_ip']['ip']
|
||||
|
||||
# Associate IP.
|
||||
fl_ip_client.associate_floating_ip_to_server(
|
||||
floating_ip=ip,
|
||||
server_id=server_id
|
||||
)
|
||||
|
||||
return ip
|
||||
|
||||
@classmethod
|
||||
def _wait_until_server_up(cls, server_ip, timeout=120, delay=2):
|
||||
seconds_remain = timeout
|
||||
|
||||
LOG.info("Waiting server SSH [IP=%s]..." % server_ip)
|
||||
|
||||
while seconds_remain > 0:
|
||||
try:
|
||||
ssh_utils.execute_command('cd', server_ip, None)
|
||||
except ssh_exception.SSHException:
|
||||
LOG.info("Server %s: SSH service is ready.")
|
||||
return
|
||||
except Exception as e:
|
||||
LOG.info(str(e))
|
||||
seconds_remain -= delay
|
||||
time.sleep(delay)
|
||||
else:
|
||||
return
|
||||
|
||||
raise Exception(
|
||||
"Failed waiting until server's '%s' SSH is up." % server_ip
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _wait_until_server_active(cls, server_id, timeout=60, delay=2):
|
||||
seconds_remain = timeout
|
||||
|
||||
LOG.info("Waiting server [id=%s]..." % server_id)
|
||||
|
||||
while seconds_remain > 0:
|
||||
server_info = cls.server_client.show_server(server_id)
|
||||
if server_info['server']['status'] == 'ACTIVE':
|
||||
return
|
||||
|
||||
seconds_remain -= delay
|
||||
time.sleep(delay)
|
||||
|
||||
raise Exception(
|
||||
"Failed waiting until server %s is active." % server_id
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def resource_setup(cls):
|
||||
super(SSHActionsTestsV2, cls).resource_setup()
|
||||
|
||||
# Modify security group for accessing VM via SSH.
|
||||
cls._create_security_group_rule_ssh()
|
||||
|
||||
# Create keypair (public and private keys).
|
||||
cls.private_key, cls.public_key = utils.generate_key_pair()
|
||||
cls.key_name = 'mistral-functional-tests-key'
|
||||
|
||||
# If ZUUL_PROJECT is specified, it means
|
||||
# tests are running on Jenkins gate.
|
||||
|
||||
if os.environ.get('ZUUL_PROJECT'):
|
||||
cls.key_dir = "/opt/stack/new/.ssh/"
|
||||
|
||||
if not path.exists(cls.key_dir):
|
||||
os.mkdir(cls.key_dir)
|
||||
else:
|
||||
cls.key_dir = SSH_KEYS_DIRECTORY
|
||||
|
||||
utils.save_text_to(
|
||||
cls.private_key,
|
||||
cls.key_dir + cls.key_name,
|
||||
overwrite=True
|
||||
)
|
||||
|
||||
LOG.info(
|
||||
"Private key saved to %s" % cls.key_dir + cls.key_name
|
||||
)
|
||||
|
||||
# Create keypair in nova.
|
||||
cls.mgr.keypairs_client.create_keypair(
|
||||
name=cls.key_name,
|
||||
public_key=cls.public_key
|
||||
)
|
||||
|
||||
# Start servers and provide key_name.
|
||||
# Note: start public vm only after starting the guest one,
|
||||
# so we can track public vm launching using ssh, but can't
|
||||
# do the same with guest VM.
|
||||
cls.guest_vm = cls._create_server(
|
||||
'mistral-guest-vm',
|
||||
key_name=cls.key_name
|
||||
)
|
||||
cls.public_vm = cls._create_server(
|
||||
'mistral-public-vm',
|
||||
key_name=cls.key_name
|
||||
)
|
||||
|
||||
cls._wait_until_server_active(cls.public_vm['id'])
|
||||
|
||||
cls.public_vm_ip = cls._associate_floating_ip_to_server(
|
||||
cls.public_vm['id']
|
||||
)
|
||||
|
||||
# Wait until server is up.
|
||||
cls._wait_until_server_up(cls.public_vm_ip)
|
||||
|
||||
# Update servers info.
|
||||
cls.public_vm = cls.server_client.show_server(
|
||||
cls.public_vm['id']
|
||||
).get('server')
|
||||
|
||||
cls.guest_vm = cls.server_client.show_server(
|
||||
cls.guest_vm['id']
|
||||
).get('server')
|
||||
|
||||
@classmethod
|
||||
def resource_cleanup(cls):
|
||||
fl_ip_client = cls.mgr.compute_floating_ips_client
|
||||
fl_ip_client.disassociate_floating_ip_from_server(
|
||||
cls.public_vm_ip,
|
||||
cls.public_vm['id']
|
||||
)
|
||||
cls.server_client.delete_server(cls.public_vm['id'])
|
||||
cls.server_client.delete_server(cls.guest_vm['id'])
|
||||
cls.mgr.keypairs_client.delete_keypair(cls.key_name)
|
||||
|
||||
cls.mgr.compute_security_group_rules_client.delete_security_group_rule(
|
||||
cls.ssh_rule_id
|
||||
)
|
||||
os.remove(cls.key_dir + cls.key_name)
|
||||
|
||||
super(SSHActionsTestsV2, cls).resource_cleanup()
|
||||
|
||||
@test.attr(type='sanity')
|
||||
def test_run_ssh_action(self):
|
||||
input_data = {
|
||||
'cmd': 'hostname',
|
||||
'host': self.public_vm_ip,
|
||||
'username': CONF.validation.image_ssh_user,
|
||||
'private_key_filename': self.key_name
|
||||
}
|
||||
|
||||
resp, body = self.client.create_action_execution(
|
||||
{
|
||||
'name': 'std.ssh',
|
||||
'input': json.dumps(input_data)
|
||||
}
|
||||
)
|
||||
|
||||
self.assertEqual(201, resp.status)
|
||||
|
||||
output = json.loads(body['output'])
|
||||
|
||||
self.assertIn(self.public_vm['name'], output['result'])
|
||||
|
||||
@test.attr(type='sanity')
|
||||
def test_run_ssh_proxied_action(self):
|
||||
guest_vm_ip = self.guest_vm['addresses'].popitem()[1][0]['addr']
|
||||
|
||||
input_data = {
|
||||
'cmd': 'hostname',
|
||||
'host': guest_vm_ip,
|
||||
'username': CONF.validation.image_ssh_user,
|
||||
'private_key_filename': self.key_name,
|
||||
'gateway_host': self.public_vm_ip,
|
||||
'gateway_username': CONF.validation.image_ssh_user
|
||||
}
|
||||
|
||||
resp, body = self.client.create_action_execution(
|
||||
{
|
||||
'name': 'std.ssh_proxied',
|
||||
'input': json.dumps(input_data)
|
||||
}
|
||||
)
|
||||
|
||||
self.assertEqual(201, resp.status)
|
||||
|
||||
output = json.loads(body['output'])
|
||||
|
||||
self.assertIn(self.guest_vm['name'], output['result'])
|
@ -20,7 +20,6 @@ import mock
|
||||
import six
|
||||
|
||||
from tempest import clients
|
||||
from tempest.common import credentials_factory as creds
|
||||
from tempest import config
|
||||
from tempest import test as test
|
||||
from tempest_lib import auth
|
||||
@ -244,6 +243,9 @@ class AuthProv(auth.KeystoneV2AuthProvider):
|
||||
|
||||
|
||||
class TestCase(test.BaseTestCase):
|
||||
|
||||
credentials = ['primary', 'alt']
|
||||
|
||||
@classmethod
|
||||
def resource_setup(cls):
|
||||
"""Client authentication.
|
||||
@ -256,16 +258,10 @@ class TestCase(test.BaseTestCase):
|
||||
if 'WITHOUT_AUTH' in os.environ:
|
||||
cls.mgr = mock.MagicMock()
|
||||
cls.mgr.auth_provider = AuthProv()
|
||||
cls.alt_mgr = cls.mgr
|
||||
else:
|
||||
cls.creds = creds.get_configured_credentials(
|
||||
credential_type='user'
|
||||
)
|
||||
cls.mgr = clients.Manager(cls.creds)
|
||||
|
||||
cls.alt_creds = creds.get_configured_credentials(
|
||||
credential_type='alt_user'
|
||||
)
|
||||
cls.alt_mgr = clients.Manager(cls.alt_creds)
|
||||
cls.mgr = cls.manager
|
||||
cls.alt_mgr = cls.alt_manager
|
||||
|
||||
if cls._service == 'workflowv2':
|
||||
cls.client = MistralClientV2(
|
||||
|
@ -382,9 +382,8 @@ class WorkflowTestsV2(base.TestCase):
|
||||
name
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
"Can't delete workflow that has triggers " +
|
||||
"[workflow_name=wf2],[cron_trigger_name(s)=trigger]",
|
||||
self.assertIn(
|
||||
"Can't delete workflow that has triggers associated",
|
||||
exception.resp_body['faultstring']
|
||||
)
|
||||
finally:
|
||||
|
@ -38,19 +38,25 @@ class SSHActionsTestsV2(base.TestCaseAdvanced):
|
||||
|
||||
@classmethod
|
||||
def _create_security_group_rule_ssh(cls):
|
||||
sec_groups = cls.mgr.security_groups_client.list_security_groups()
|
||||
sec_groups = (
|
||||
cls.mgr.compute_security_groups_client.
|
||||
list_security_groups()
|
||||
)
|
||||
sec_groups = sec_groups['security_groups']
|
||||
|
||||
default_group = next(
|
||||
g for g in sec_groups if g['name'] == 'default'
|
||||
)
|
||||
|
||||
rule = cls.mgr.security_group_rules_client.create_security_group_rule(
|
||||
parent_group_id=default_group['id'],
|
||||
ip_protocol="tcp",
|
||||
from_port=22,
|
||||
to_port=22,
|
||||
cidr="0.0.0.0/0"
|
||||
rule = (
|
||||
cls.mgr.compute_security_group_rules_client
|
||||
.create_security_group_rule(
|
||||
parent_group_id=default_group['id'],
|
||||
ip_protocol="tcp",
|
||||
from_port=22,
|
||||
to_port=22,
|
||||
cidr="0.0.0.0/0"
|
||||
)
|
||||
)
|
||||
|
||||
cls.ssh_rule_id = rule['security_group_rule']['id']
|
||||
@ -201,11 +207,16 @@ class SSHActionsTestsV2(base.TestCaseAdvanced):
|
||||
|
||||
@classmethod
|
||||
def resource_cleanup(cls):
|
||||
fl_ip_client = cls.mgr.compute_floating_ips_client
|
||||
fl_ip_client.disassociate_floating_ip_from_server(
|
||||
cls.public_vm_ip,
|
||||
cls.public_vm['id']
|
||||
)
|
||||
cls.server_client.delete_server(cls.public_vm['id'])
|
||||
cls.server_client.delete_server(cls.guest_vm['id'])
|
||||
cls.mgr.keypairs_client.delete_keypair(cls.key_name)
|
||||
|
||||
cls.mgr.security_group_rules_client.delete_security_group_rule(
|
||||
cls.mgr.compute_security_group_rules_client.delete_security_group_rule(
|
||||
cls.ssh_rule_id
|
||||
)
|
||||
os.remove(cls.key_dir + cls.key_name)
|
||||
@ -217,7 +228,7 @@ class SSHActionsTestsV2(base.TestCaseAdvanced):
|
||||
input_data = {
|
||||
'cmd': 'hostname',
|
||||
'host': self.public_vm_ip,
|
||||
'username': CONF.scenario.ssh_user,
|
||||
'username': CONF.validation.image_ssh_user,
|
||||
'private_key_filename': self.key_name
|
||||
}
|
||||
|
||||
@ -241,10 +252,10 @@ class SSHActionsTestsV2(base.TestCaseAdvanced):
|
||||
input_data = {
|
||||
'cmd': 'hostname',
|
||||
'host': guest_vm_ip,
|
||||
'username': CONF.scenario.ssh_user,
|
||||
'username': CONF.validation.image_ssh_user,
|
||||
'private_key_filename': self.key_name,
|
||||
'gateway_host': self.public_vm_ip,
|
||||
'gateway_username': CONF.scenario.ssh_user
|
||||
'gateway_username': CONF.validation.image_ssh_user
|
||||
}
|
||||
|
||||
resp, body = self.client.create_action_execution(
|
||||
|
@ -14,7 +14,7 @@ function pre_hook() {
|
||||
function run_tests_by_version() {
|
||||
echo "$(tput setaf 4)Running integration API and workflow execution tests for v$1$(tput sgr 0)"
|
||||
export VERSION="v$1"
|
||||
nosetests -v mistral/tests/functional/api/v$1/
|
||||
nosetests -v mistral_tempest_tests/tests/api/v$1/
|
||||
unset VERSION
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user