Merge "Make scenario-tests work with all datastores"
This commit is contained in:
commit
ee89e7d32d
@ -101,6 +101,11 @@ function configure_trove {
|
||||
setup_trove_logging $TROVE_CONF
|
||||
iniset $TROVE_CONF DEFAULT trove_api_workers "$API_WORKERS"
|
||||
|
||||
# Increase default quota.
|
||||
iniset $TROVE_CONF DEFAULT max_accepted_volume_size 10
|
||||
iniset $TROVE_CONF DEFAULT max_instances_per_user 10
|
||||
iniset $TROVE_CONF DEFAULT max_volumes_per_user 10
|
||||
|
||||
configure_auth_token_middleware $TROVE_CONF trove $TROVE_AUTH_CACHE_DIR
|
||||
|
||||
# (Re)create trove taskmanager conf file if needed
|
||||
@ -121,6 +126,10 @@ function configure_trove {
|
||||
iniset $TROVE_TASKMANAGER_CONF DEFAULT nova_compute_service_type compute_legacy
|
||||
|
||||
setup_trove_logging $TROVE_TASKMANAGER_CONF
|
||||
|
||||
# Increase default timeouts (required by the tests).
|
||||
iniset $TROVE_TASKMANAGER_CONF DEFAULT agent_call_high_timeout 300
|
||||
iniset $TROVE_TASKMANAGER_CONF DEFAULT usage_timeout 1200
|
||||
fi
|
||||
|
||||
# (Re)create trove conductor conf file if needed
|
||||
|
@ -20,3 +20,14 @@ class MariadbHelper(MysqlHelper):
|
||||
|
||||
def __init__(self, expected_override_name):
|
||||
super(MariadbHelper, self).__init__(expected_override_name)
|
||||
|
||||
# Mariadb currently does not support configuration groups.
|
||||
# see: bug/1532256
|
||||
def get_dynamic_group(self):
|
||||
return dict()
|
||||
|
||||
def get_non_dynamic_group(self):
|
||||
return dict()
|
||||
|
||||
def get_invalid_groups(self):
|
||||
return []
|
||||
|
@ -29,6 +29,24 @@ class RedisHelper(TestHelper):
|
||||
self.value_pattern = 'id:%s'
|
||||
self.label_value = 'value_set'
|
||||
|
||||
self._ds_client_cache = dict()
|
||||
|
||||
def get_client(self, host, *args, **kwargs):
|
||||
# We need to cache the Redis client in order to prevent Error 99
|
||||
# (Cannot assign requested address) when working with large data sets.
|
||||
# A new client may be created frequently due to how the redirection
|
||||
# works (see '_execute_with_redirection').
|
||||
# The old (now closed) connections however have to wait for about 60s
|
||||
# (TIME_WAIT) before the port can be released.
|
||||
# This is a feature of the operating system that helps it dealing with
|
||||
# packets that arrive after the connection is closed.
|
||||
if host in self._ds_client_cache:
|
||||
return self._ds_client_cache[host]
|
||||
|
||||
client = self.create_client(host, *args, **kwargs)
|
||||
self._ds_client_cache[host] = client
|
||||
return client
|
||||
|
||||
def create_client(self, host, *args, **kwargs):
|
||||
user = self.get_helper_credentials()
|
||||
client = redis.StrictRedis(password=user['password'], host=host)
|
||||
|
@ -257,7 +257,9 @@ class BackupRunner(TestRunner):
|
||||
self.instance_info.dbaas_flavor_href,
|
||||
self.instance_info.volume,
|
||||
nics=self.instance_info.nics,
|
||||
restorePoint=restore_point)
|
||||
restorePoint=restore_point,
|
||||
datastore=self.instance_info.dbaas_datastore,
|
||||
datastore_version=self.instance_info.dbaas_datastore_version)
|
||||
return result
|
||||
|
||||
def run_restore_from_backup_completed(
|
||||
|
@ -13,6 +13,8 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from proboscis import SkipTest
|
||||
|
||||
from trove.tests.scenario.runners.test_runners import TestRunner
|
||||
from troveclient.compat import exceptions
|
||||
|
||||
@ -25,10 +27,23 @@ class DatabaseActionsRunner(TestRunner):
|
||||
# likely require replacing GA casts with calls which I believe are
|
||||
# more appropriate anyways.
|
||||
|
||||
def __init__(self):
|
||||
super(DatabaseActionsRunner, self).__init__()
|
||||
self.db_defs = []
|
||||
|
||||
@property
|
||||
def first_db_def(self):
|
||||
if self.db_defs:
|
||||
return self.db_defs[0]
|
||||
raise SkipTest("No valid database definitions provided.")
|
||||
|
||||
def run_databases_create(self, expected_http_code=202):
|
||||
databases = self.test_helper.get_valid_database_definitions()
|
||||
self.db_defs = self.assert_databases_create(
|
||||
self.instance_info.id, databases, expected_http_code)
|
||||
if databases:
|
||||
self.db_defs = self.assert_databases_create(
|
||||
self.instance_info.id, databases, expected_http_code)
|
||||
else:
|
||||
raise SkipTest("No valid database definitions provided.")
|
||||
|
||||
def assert_databases_create(self, instance_id, serial_databases_def,
|
||||
expected_http_code):
|
||||
@ -104,7 +119,7 @@ class DatabaseActionsRunner(TestRunner):
|
||||
self, expected_exception=exceptions.BadRequest,
|
||||
expected_http_code=400):
|
||||
self.assert_databases_create_failure(
|
||||
self.instance_info.id, self.db_defs[0],
|
||||
self.instance_info.id, self.first_db_def,
|
||||
expected_exception, expected_http_code)
|
||||
|
||||
def assert_databases_create_failure(
|
||||
|
@ -31,6 +31,7 @@ class ReplicationRunner(TestRunner):
|
||||
self.master_host = self.get_instance_host(self.master_id)
|
||||
self.replica_1_host = None
|
||||
self.master_backup_count = None
|
||||
self.used_data_sets = set()
|
||||
|
||||
def run_add_data_for_replication(self, data_type=DataType.small):
|
||||
self.assert_add_replication_data(data_type, self.master_host)
|
||||
@ -40,6 +41,7 @@ class ReplicationRunner(TestRunner):
|
||||
'helper' class should implement the 'add_<data_type>_data' method.
|
||||
"""
|
||||
self.test_helper.add_data(data_type, host)
|
||||
self.used_data_sets.add(data_type)
|
||||
|
||||
def run_verify_data_for_replication(self, data_type=DataType.small):
|
||||
self.assert_verify_replication_data(data_type, self.master_host)
|
||||
@ -203,9 +205,9 @@ class ReplicationRunner(TestRunner):
|
||||
"""In order for this to work, the corresponding datastore
|
||||
'helper' class should implement the 'remove_<type>_data' method.
|
||||
"""
|
||||
self.test_helper.remove_data(DataType.small, host)
|
||||
self.test_helper.remove_data(DataType.tiny, host)
|
||||
self.test_helper.remove_data(DataType.tiny2, host)
|
||||
for data_set in self.used_data_sets:
|
||||
self.report.log("Removing replicated data set: %s" % data_set)
|
||||
self.test_helper.remove_data(data_set, host)
|
||||
|
||||
def run_detach_replica_from_source(self,
|
||||
expected_states=['ACTIVE'],
|
||||
|
@ -15,6 +15,8 @@
|
||||
|
||||
from six.moves.urllib import parse as urllib_parse
|
||||
|
||||
from proboscis import SkipTest
|
||||
|
||||
from trove.tests.scenario.runners.test_runners import TestRunner
|
||||
from troveclient.compat import exceptions
|
||||
|
||||
@ -27,10 +29,23 @@ class UserActionsRunner(TestRunner):
|
||||
# likely require replacing GA casts with calls which I believe are
|
||||
# more appropriate anyways.
|
||||
|
||||
def __init__(self):
|
||||
super(UserActionsRunner, self).__init__()
|
||||
self.user_defs = []
|
||||
|
||||
@property
|
||||
def first_user_def(self):
|
||||
if self.user_defs:
|
||||
return self.user_defs[0]
|
||||
raise SkipTest("No valid user definitions provided.")
|
||||
|
||||
def run_users_create(self, expected_http_code=202):
|
||||
users = self.test_helper.get_valid_user_definitions()
|
||||
self.user_defs = self.assert_users_create(
|
||||
self.instance_info.id, users, expected_http_code)
|
||||
if users:
|
||||
self.user_defs = self.assert_users_create(
|
||||
self.instance_info.id, users, expected_http_code)
|
||||
else:
|
||||
raise SkipTest("No valid user definitions provided.")
|
||||
|
||||
def assert_users_create(self, instance_id, serial_users_def,
|
||||
expected_http_code):
|
||||
@ -57,8 +72,9 @@ class UserActionsRunner(TestRunner):
|
||||
user_name = expected_user_def['name']
|
||||
self.assert_equal(user.name, expected_user_def['name'],
|
||||
"Mismatch of names for user: %s" % user_name)
|
||||
self.assert_equal(user.databases, expected_user_def['databases'],
|
||||
"Mismatch of databases for user: %s" % user_name)
|
||||
self.assert_list_elements_equal(
|
||||
user.databases, expected_user_def['databases'],
|
||||
"Mismatch of databases for user: %s" % user_name)
|
||||
|
||||
def run_users_list(self, expected_http_code=200):
|
||||
self.assert_users_list(
|
||||
@ -153,7 +169,7 @@ class UserActionsRunner(TestRunner):
|
||||
self, expected_exception=exceptions.BadRequest,
|
||||
expected_http_code=400):
|
||||
self.assert_users_create_failure(
|
||||
self.instance_info.id, self.user_defs[0],
|
||||
self.instance_info.id, self.first_user_def,
|
||||
expected_exception, expected_http_code)
|
||||
|
||||
def run_system_user_create(
|
||||
@ -181,15 +197,15 @@ class UserActionsRunner(TestRunner):
|
||||
self, expected_exception=exceptions.BadRequest,
|
||||
expected_http_code=400):
|
||||
self.assert_user_attribute_update_failure(
|
||||
self.instance_info.id, self.user_defs[0], {'name': ''},
|
||||
self.instance_info.id, self.first_user_def, {'name': ''},
|
||||
expected_exception, expected_http_code)
|
||||
|
||||
def run_user_update_with_existing_name(
|
||||
self, expected_exception=exceptions.BadRequest,
|
||||
expected_http_code=400):
|
||||
self.assert_user_attribute_update_failure(
|
||||
self.instance_info.id, self.user_defs[0],
|
||||
{'name': self.user_defs[0]['name']},
|
||||
self.instance_info.id, self.first_user_def,
|
||||
{'name': self.first_user_def['name']},
|
||||
expected_exception, expected_http_code)
|
||||
|
||||
def assert_user_attribute_update_failure(
|
||||
@ -220,7 +236,7 @@ class UserActionsRunner(TestRunner):
|
||||
expected_exception, expected_http_code)
|
||||
|
||||
def run_user_attribute_update(self, expected_http_code=202):
|
||||
updated_def = self.user_defs[0]
|
||||
updated_def = self.first_user_def
|
||||
# Update the name by appending a random string to it.
|
||||
updated_name = ''.join([updated_def['name'], 'upd'])
|
||||
update_attribites = {'name': updated_name,
|
||||
|
Loading…
Reference in New Issue
Block a user