Merge "Break apart replication scenario tests"
This commit is contained in:
commit
2a7937c4b5
@ -21,6 +21,12 @@ from trove.tests.scenario.runners import test_runners
|
||||
|
||||
|
||||
GROUP = "scenario.replication_group"
|
||||
GROUP_REPL_CREATE = "scenario.repl_create_group"
|
||||
GROUP_REPL_CREATE_WAIT = "scenario.repl_create_wait_group"
|
||||
GROUP_REPL_MULTI_CREATE = "scenario.repl_multi_create_group"
|
||||
GROUP_REPL_MULTI_CREATE_WAIT = "scenario.repl_multi_create_wait_group"
|
||||
GROUP_REPL_DELETE = "scenario.repl_delete_group"
|
||||
GROUP_REPL_DELETE_WAIT = "scenario.repl_delete_wait_group"
|
||||
|
||||
|
||||
class ReplicationRunnerFactory(test_runners.RunnerFactory):
|
||||
@ -29,12 +35,13 @@ class ReplicationRunnerFactory(test_runners.RunnerFactory):
|
||||
_runner_cls = 'ReplicationRunner'
|
||||
|
||||
|
||||
@test(depends_on_groups=[instance_create_group.GROUP], groups=[GROUP])
|
||||
class ReplicationGroup(TestGroup):
|
||||
"""Test Replication functionality."""
|
||||
@test(depends_on_groups=[instance_create_group.GROUP],
|
||||
groups=[GROUP, GROUP_REPL_CREATE])
|
||||
class ReplicationCreateGroup(TestGroup):
|
||||
"""Test Replication Create functionality."""
|
||||
|
||||
def __init__(self):
|
||||
super(ReplicationGroup, self).__init__(
|
||||
super(ReplicationCreateGroup, self).__init__(
|
||||
ReplicationRunnerFactory.instance())
|
||||
|
||||
@test
|
||||
@ -57,32 +64,27 @@ class ReplicationGroup(TestGroup):
|
||||
"""Test creating a single replica."""
|
||||
self.test_runner.run_create_single_replica()
|
||||
|
||||
@test(runs_after=[create_single_replica])
|
||||
def add_data_after_replica(self):
|
||||
"""Add data to master after initial replica is setup"""
|
||||
self.test_runner.run_add_data_after_replica()
|
||||
|
||||
@test(runs_after=[add_data_after_replica])
|
||||
def verify_replica_data_after_single(self):
|
||||
"""Verify data exists on single replica"""
|
||||
self.test_runner.run_verify_replica_data_after_single()
|
||||
@test(depends_on_groups=[GROUP_REPL_CREATE],
|
||||
groups=[GROUP, GROUP_REPL_CREATE_WAIT])
|
||||
class ReplicationCreateWaitGroup(TestGroup):
|
||||
"""Wait for Replication Create to complete."""
|
||||
|
||||
@test(runs_after=[verify_replica_data_after_single])
|
||||
def __init__(self):
|
||||
super(ReplicationCreateWaitGroup, self).__init__(
|
||||
ReplicationRunnerFactory.instance())
|
||||
|
||||
@test
|
||||
def wait_for_non_affinity_master(self):
|
||||
"""Wait for non-affinity master to complete."""
|
||||
self.test_runner.run_wait_for_non_affinity_master()
|
||||
|
||||
@test(runs_after=[wait_for_non_affinity_master])
|
||||
@test(depends_on=[wait_for_non_affinity_master])
|
||||
def create_non_affinity_replica(self):
|
||||
"""Test creating a non-affinity replica."""
|
||||
self.test_runner.run_create_non_affinity_replica()
|
||||
|
||||
@test(runs_after=[create_non_affinity_replica])
|
||||
def create_multiple_replicas(self):
|
||||
"""Test creating multiple replicas."""
|
||||
self.test_runner.run_create_multiple_replicas()
|
||||
|
||||
@test(runs_after=[create_multiple_replicas])
|
||||
@test(depends_on=[create_non_affinity_replica])
|
||||
def wait_for_non_affinity_replica_fail(self):
|
||||
"""Wait for non-affinity replica to fail."""
|
||||
self.test_runner.run_wait_for_non_affinity_replica_fail()
|
||||
@ -93,17 +95,71 @@ class ReplicationGroup(TestGroup):
|
||||
self.test_runner.run_delete_non_affinity_repl()
|
||||
|
||||
@test(runs_after=[delete_non_affinity_repl])
|
||||
def wait_for_single_replica(self):
|
||||
"""Wait for single replica to complete."""
|
||||
self.test_runner.run_wait_for_single_replica()
|
||||
|
||||
@test(depends_on=[wait_for_single_replica])
|
||||
def add_data_after_replica(self):
|
||||
"""Add data to master after initial replica is setup"""
|
||||
self.test_runner.run_add_data_after_replica()
|
||||
|
||||
@test(depends_on=[add_data_after_replica])
|
||||
def verify_replica_data_after_single(self):
|
||||
"""Verify data exists on single replica"""
|
||||
self.test_runner.run_verify_replica_data_after_single()
|
||||
|
||||
|
||||
@test(depends_on_groups=[GROUP_REPL_CREATE_WAIT],
|
||||
groups=[GROUP, GROUP_REPL_MULTI_CREATE])
|
||||
class ReplicationMultiCreateGroup(TestGroup):
|
||||
"""Test Replication Multi-Create functionality."""
|
||||
|
||||
def __init__(self):
|
||||
super(ReplicationMultiCreateGroup, self).__init__(
|
||||
ReplicationRunnerFactory.instance())
|
||||
|
||||
@test
|
||||
def create_multiple_replicas(self):
|
||||
"""Test creating multiple replicas."""
|
||||
self.test_runner.run_create_multiple_replicas()
|
||||
|
||||
@test(runs_after=[create_multiple_replicas])
|
||||
def wait_for_delete_non_affinity_repl(self):
|
||||
"""Wait for the non-affinity replica to delete."""
|
||||
self.test_runner.run_wait_for_delete_non_affinity_repl()
|
||||
|
||||
@test(depends_on=[wait_for_delete_non_affinity_repl])
|
||||
def delete_non_affinity_master(self):
|
||||
"""Test deleting non-affinity master."""
|
||||
self.test_runner.run_delete_non_affinity_master()
|
||||
|
||||
@test(depends_on=[create_single_replica, create_multiple_replicas],
|
||||
runs_after=[delete_non_affinity_master])
|
||||
|
||||
@test(depends_on_groups=[GROUP_REPL_MULTI_CREATE],
|
||||
groups=[GROUP, GROUP_REPL_MULTI_CREATE_WAIT])
|
||||
class ReplicationMultiCreateWaitGroup(TestGroup):
|
||||
"""Wait for Replication Multi-Create to complete."""
|
||||
|
||||
def __init__(self):
|
||||
super(ReplicationMultiCreateWaitGroup, self).__init__(
|
||||
ReplicationRunnerFactory.instance())
|
||||
|
||||
@test
|
||||
def wait_for_delete_non_affinity_master(self):
|
||||
"""Wait for the non-affinity master to delete."""
|
||||
self.test_runner.run_wait_for_delete_non_affinity_master()
|
||||
|
||||
@test(runs_after=[wait_for_delete_non_affinity_master])
|
||||
def wait_for_multiple_replicas(self):
|
||||
"""Wait for multiple replicas to complete."""
|
||||
self.test_runner.run_wait_for_multiple_replicas()
|
||||
|
||||
@test(depends_on=[wait_for_multiple_replicas])
|
||||
def verify_replica_data_orig(self):
|
||||
"""Verify original data was transferred to replicas."""
|
||||
self.test_runner.run_verify_replica_data_orig()
|
||||
|
||||
@test(depends_on=[create_single_replica, create_multiple_replicas],
|
||||
@test(depends_on=[wait_for_multiple_replicas],
|
||||
runs_after=[verify_replica_data_orig])
|
||||
def add_data_to_replicate(self):
|
||||
"""Add new data to master to verify replication."""
|
||||
@ -114,45 +170,43 @@ class ReplicationGroup(TestGroup):
|
||||
"""Verify new data exists on master."""
|
||||
self.test_runner.run_verify_data_to_replicate()
|
||||
|
||||
@test(depends_on=[create_single_replica, create_multiple_replicas,
|
||||
add_data_to_replicate],
|
||||
@test(depends_on=[add_data_to_replicate],
|
||||
runs_after=[verify_data_to_replicate])
|
||||
def wait_for_data_to_replicate(self):
|
||||
"""Wait to ensure that the data is replicated."""
|
||||
self.test_runner.run_wait_for_data_to_replicate()
|
||||
def verify_replica_data_orig(self):
|
||||
"""Verify original data was transferred to replicas."""
|
||||
self.test_runner.run_verify_replica_data_orig()
|
||||
|
||||
@test(depends_on=[create_single_replica, create_multiple_replicas,
|
||||
add_data_to_replicate],
|
||||
runs_after=[wait_for_data_to_replicate])
|
||||
@test(depends_on=[add_data_to_replicate],
|
||||
runs_after=[verify_replica_data_orig])
|
||||
def verify_replica_data_new(self):
|
||||
"""Verify new data was transferred to replicas."""
|
||||
self.test_runner.run_verify_replica_data_new()
|
||||
|
||||
@test(depends_on=[create_single_replica, create_multiple_replicas],
|
||||
@test(depends_on=[wait_for_multiple_replicas],
|
||||
runs_after=[verify_replica_data_new])
|
||||
def promote_master(self):
|
||||
"""Ensure promoting master fails."""
|
||||
self.test_runner.run_promote_master()
|
||||
|
||||
@test(depends_on=[create_single_replica, create_multiple_replicas],
|
||||
@test(depends_on=[wait_for_multiple_replicas],
|
||||
runs_after=[promote_master])
|
||||
def eject_replica(self):
|
||||
"""Ensure ejecting non master fails."""
|
||||
self.test_runner.run_eject_replica()
|
||||
|
||||
@test(depends_on=[create_single_replica, create_multiple_replicas],
|
||||
@test(depends_on=[wait_for_multiple_replicas],
|
||||
runs_after=[eject_replica])
|
||||
def eject_valid_master(self):
|
||||
"""Ensure ejecting valid master fails."""
|
||||
self.test_runner.run_eject_valid_master()
|
||||
|
||||
@test(depends_on=[create_single_replica, create_multiple_replicas],
|
||||
@test(depends_on=[wait_for_multiple_replicas],
|
||||
runs_after=[eject_valid_master])
|
||||
def delete_valid_master(self):
|
||||
"""Ensure deleting valid master fails."""
|
||||
self.test_runner.run_delete_valid_master()
|
||||
|
||||
@test(depends_on=[create_single_replica, create_multiple_replicas],
|
||||
@test(depends_on=[wait_for_multiple_replicas],
|
||||
runs_after=[delete_valid_master])
|
||||
def promote_to_replica_source(self):
|
||||
"""Test promoting a replica to replica source (master)."""
|
||||
@ -163,7 +217,7 @@ class ReplicationGroup(TestGroup):
|
||||
"""Verify data is still on new master."""
|
||||
self.test_runner.run_verify_replica_data_new_master()
|
||||
|
||||
@test(depends_on=[create_single_replica, create_multiple_replicas,
|
||||
@test(depends_on=[wait_for_multiple_replicas,
|
||||
promote_to_replica_source],
|
||||
runs_after=[verify_replica_data_new_master])
|
||||
def add_data_to_replicate2(self):
|
||||
@ -175,15 +229,9 @@ class ReplicationGroup(TestGroup):
|
||||
"""Verify data exists on new master."""
|
||||
self.test_runner.run_verify_data_to_replicate2()
|
||||
|
||||
@test(depends_on=[add_data_to_replicate2],
|
||||
runs_after=[verify_data_to_replicate2])
|
||||
def wait_for_data_to_replicate2(self):
|
||||
"""Wait to ensure that the new data was replicated."""
|
||||
self.test_runner.run_wait_for_data_to_replicate()
|
||||
|
||||
@test(depends_on=[create_single_replica, create_multiple_replicas,
|
||||
@test(depends_on=[wait_for_multiple_replicas,
|
||||
add_data_to_replicate2],
|
||||
runs_after=[wait_for_data_to_replicate2])
|
||||
runs_after=[verify_data_to_replicate2])
|
||||
def verify_replica_data_new2(self):
|
||||
"""Verify data was transferred to new replicas."""
|
||||
self.test_runner.run_verify_replica_data_new2()
|
||||
@ -195,6 +243,22 @@ class ReplicationGroup(TestGroup):
|
||||
self.test_runner.run_promote_original_source()
|
||||
|
||||
@test(depends_on=[promote_original_source])
|
||||
def add_final_data_to_replicate(self):
|
||||
"""Add final data to original master to verify switch."""
|
||||
self.test_runner.run_add_final_data_to_replicate()
|
||||
|
||||
@test(depends_on=[add_final_data_to_replicate])
|
||||
def verify_data_to_replicate_final(self):
|
||||
"""Verify final data exists on master."""
|
||||
self.test_runner.run_verify_data_to_replicate_final()
|
||||
|
||||
@test(depends_on=[verify_data_to_replicate_final])
|
||||
def verify_final_data_replicated(self):
|
||||
"""Verify final data was transferred to all replicas."""
|
||||
self.test_runner.run_verify_final_data_replicated()
|
||||
|
||||
@test(depends_on=[promote_original_source],
|
||||
runs_after=[verify_final_data_replicated])
|
||||
def remove_replicated_data(self):
|
||||
"""Remove replication data."""
|
||||
self.test_runner.run_remove_replicated_data()
|
||||
@ -205,8 +269,17 @@ class ReplicationGroup(TestGroup):
|
||||
"""Test detaching a replica from the master."""
|
||||
self.test_runner.run_detach_replica_from_source()
|
||||
|
||||
@test(depends_on=[promote_original_source],
|
||||
runs_after=[detach_replica_from_source])
|
||||
|
||||
@test(depends_on_groups=[GROUP_REPL_MULTI_CREATE_WAIT],
|
||||
groups=[GROUP, GROUP_REPL_DELETE])
|
||||
class ReplicationDeleteGroup(TestGroup):
|
||||
"""Test Replication Delete functionality."""
|
||||
|
||||
def __init__(self):
|
||||
super(ReplicationDeleteGroup, self).__init__(
|
||||
ReplicationRunnerFactory.instance())
|
||||
|
||||
@test
|
||||
def delete_detached_replica(self):
|
||||
"""Test deleting the detached replica."""
|
||||
self.test_runner.run_delete_detached_replica()
|
||||
@ -216,7 +289,22 @@ class ReplicationGroup(TestGroup):
|
||||
"""Test deleting all the remaining replicas."""
|
||||
self.test_runner.run_delete_all_replicas()
|
||||
|
||||
@test(runs_after=[delete_all_replicas])
|
||||
|
||||
@test(depends_on_groups=[GROUP_REPL_DELETE],
|
||||
groups=[GROUP, GROUP_REPL_DELETE_WAIT])
|
||||
class ReplicationDeleteWaitGroup(TestGroup):
|
||||
"""Wait for Replication Delete to complete."""
|
||||
|
||||
def __init__(self):
|
||||
super(ReplicationDeleteWaitGroup, self).__init__(
|
||||
ReplicationRunnerFactory.instance())
|
||||
|
||||
@test
|
||||
def wait_for_delete_replicas(self):
|
||||
"""Wait for all the replicas to delete."""
|
||||
self.test_runner.run_wait_for_delete_replicas()
|
||||
|
||||
@test(runs_after=[wait_for_delete_replicas])
|
||||
def test_backup_deleted(self):
|
||||
"""Test that the created backup is now gone."""
|
||||
self.test_runner.run_test_backup_deleted()
|
||||
|
@ -58,8 +58,8 @@ class CassandraHelper(TestHelper):
|
||||
|
||||
DATA_COLUMN_NAME = 'value'
|
||||
|
||||
def __init__(self, expected_override_name):
|
||||
super(CassandraHelper, self).__init__(expected_override_name)
|
||||
def __init__(self, expected_override_name, report):
|
||||
super(CassandraHelper, self).__init__(expected_override_name, report)
|
||||
|
||||
self._data_cache = dict()
|
||||
|
||||
|
@ -21,8 +21,8 @@ from trove.tests.scenario.runners.test_runners import TestRunner
|
||||
|
||||
class CouchdbHelper(TestHelper):
|
||||
|
||||
def __init__(self, expected_override_name):
|
||||
super(CouchdbHelper, self).__init__(expected_override_name)
|
||||
def __init__(self, expected_override_name, report):
|
||||
super(CouchdbHelper, self).__init__(expected_override_name, report)
|
||||
self._data_cache = dict()
|
||||
self.field_name = 'ff-%s'
|
||||
self.database = 'firstdb'
|
||||
|
@ -18,5 +18,5 @@ from trove.tests.scenario.helpers.mysql_helper import MysqlHelper
|
||||
|
||||
class MariadbHelper(MysqlHelper):
|
||||
|
||||
def __init__(self, expected_override_name):
|
||||
super(MariadbHelper, self).__init__(expected_override_name)
|
||||
def __init__(self, expected_override_name, report):
|
||||
super(MariadbHelper, self).__init__(expected_override_name, report)
|
||||
|
@ -18,8 +18,8 @@ from trove.tests.scenario.helpers.test_helper import TestHelper
|
||||
|
||||
class MongodbHelper(TestHelper):
|
||||
|
||||
def __init__(self, expected_override_name):
|
||||
super(MongodbHelper, self).__init__(expected_override_name)
|
||||
def __init__(self, expected_override_name, report):
|
||||
super(MongodbHelper, self).__init__(expected_override_name, report)
|
||||
|
||||
def get_valid_database_definitions(self):
|
||||
return [{"name": 'db1'}, {"name": 'db2'}, {'name': 'db3'}]
|
||||
|
@ -18,8 +18,9 @@ from trove.tests.scenario.helpers.sql_helper import SqlHelper
|
||||
|
||||
class MysqlHelper(SqlHelper):
|
||||
|
||||
def __init__(self, expected_override_name):
|
||||
super(MysqlHelper, self).__init__(expected_override_name, 'mysql')
|
||||
def __init__(self, expected_override_name, report):
|
||||
super(MysqlHelper, self).__init__(expected_override_name, report,
|
||||
'mysql')
|
||||
|
||||
def get_helper_credentials(self):
|
||||
return {'name': 'lite', 'password': 'litepass', 'database': 'firstdb'}
|
||||
|
@ -18,5 +18,5 @@ from trove.tests.scenario.helpers.mysql_helper import MysqlHelper
|
||||
|
||||
class PerconaHelper(MysqlHelper):
|
||||
|
||||
def __init__(self, expected_override_name):
|
||||
super(PerconaHelper, self).__init__(expected_override_name)
|
||||
def __init__(self, expected_override_name, report):
|
||||
super(PerconaHelper, self).__init__(expected_override_name, report)
|
||||
|
@ -18,8 +18,8 @@ from trove.tests.scenario.helpers.sql_helper import SqlHelper
|
||||
|
||||
class PostgresqlHelper(SqlHelper):
|
||||
|
||||
def __init__(self, expected_override_name):
|
||||
super(PostgresqlHelper, self).__init__(expected_override_name,
|
||||
def __init__(self, expected_override_name, report):
|
||||
super(PostgresqlHelper, self).__init__(expected_override_name, report,
|
||||
'postgresql')
|
||||
|
||||
@property
|
||||
|
@ -18,5 +18,5 @@ from trove.tests.scenario.helpers.mysql_helper import MysqlHelper
|
||||
|
||||
class PxcHelper(MysqlHelper):
|
||||
|
||||
def __init__(self, expected_override_name):
|
||||
super(PxcHelper, self).__init__(expected_override_name)
|
||||
def __init__(self, expected_override_name, report):
|
||||
super(PxcHelper, self).__init__(expected_override_name, report)
|
||||
|
@ -22,8 +22,8 @@ from trove.tests.scenario.runners.test_runners import TestRunner
|
||||
|
||||
class RedisHelper(TestHelper):
|
||||
|
||||
def __init__(self, expected_override_name):
|
||||
super(RedisHelper, self).__init__(expected_override_name)
|
||||
def __init__(self, expected_override_name, report):
|
||||
super(RedisHelper, self).__init__(expected_override_name, report)
|
||||
|
||||
self.key_patterns = ['user_a:%s', 'user_b:%s']
|
||||
self.value_pattern = 'id:%s'
|
||||
|
@ -27,8 +27,8 @@ class SqlHelper(TestHelper):
|
||||
|
||||
DATA_COLUMN_NAME = 'value'
|
||||
|
||||
def __init__(self, expected_override_name, protocol, port=None):
|
||||
super(SqlHelper, self).__init__(expected_override_name)
|
||||
def __init__(self, expected_override_name, report, protocol, port=None):
|
||||
super(SqlHelper, self).__init__(expected_override_name, report)
|
||||
|
||||
self.protocol = protocol
|
||||
self.port = port
|
||||
|
@ -38,11 +38,13 @@ class DataType(Enum):
|
||||
tiny = 3
|
||||
# another tiny dataset (also for replication propagation)
|
||||
tiny2 = 4
|
||||
# a third tiny dataset (also for replication propagation)
|
||||
tiny3 = 5
|
||||
# small amount of data (this can be added to each instance
|
||||
# after creation, for example).
|
||||
small = 5
|
||||
small = 6
|
||||
# large data, enough to make creating a backup take 20s or more.
|
||||
large = 6
|
||||
large = 7
|
||||
|
||||
|
||||
class TestHelper(object):
|
||||
@ -67,7 +69,7 @@ class TestHelper(object):
|
||||
# actual data manipulation work.
|
||||
DT_ACTUAL = 'actual'
|
||||
|
||||
def __init__(self, expected_override_name):
|
||||
def __init__(self, expected_override_name, report):
|
||||
"""Initialize the helper class by creating a number of stub
|
||||
functions that each datastore specific class can chose to
|
||||
override. Basically, the functions are of the form:
|
||||
@ -86,6 +88,7 @@ class TestHelper(object):
|
||||
super(TestHelper, self).__init__()
|
||||
|
||||
self._expected_override_name = expected_override_name
|
||||
self.report = report
|
||||
|
||||
# For building data access functions
|
||||
# name/fn pairs for each action
|
||||
@ -114,6 +117,9 @@ class TestHelper(object):
|
||||
DataType.tiny2.name: {
|
||||
self.DATA_START: 2000,
|
||||
self.DATA_SIZE: 100},
|
||||
DataType.tiny3.name: {
|
||||
self.DATA_START: 3000,
|
||||
self.DATA_SIZE: 100},
|
||||
DataType.small.name: {
|
||||
self.DATA_START: 10000,
|
||||
self.DATA_SIZE: 1000},
|
||||
@ -180,13 +186,25 @@ class TestHelper(object):
|
||||
##############
|
||||
def add_data(self, data_type, host, *args, **kwargs):
|
||||
"""Adds data of type 'data_type' to the database. Descendant
|
||||
classes should implement a function for each DataType value
|
||||
of the form 'add_{DataType.name}_data' - for example:
|
||||
'add_tiny_data'
|
||||
'add_small_data'
|
||||
...
|
||||
Since this method may be called multiple times, the implemented
|
||||
'add_*_data' functions should be idempotent.
|
||||
classes should implement a function 'add_actual_data' that has the
|
||||
following signature:
|
||||
def add_actual_data(
|
||||
self, # standard self reference
|
||||
data_label, # label used to identify the 'type' to add
|
||||
data_start, # a start count
|
||||
data_size, # a size to use
|
||||
host, # the host to add the data to
|
||||
*args, # for possible future expansion
|
||||
**kwargs # for possible future expansion
|
||||
):
|
||||
The data_label could be used to create a database or a table if the
|
||||
datastore supports that. The data_start and data_size values are
|
||||
designed not to overlap, such that all the data could be stored
|
||||
in a single namespace (for example, creating ids from data_start
|
||||
to data_start + data_size).
|
||||
|
||||
Since this method may be called multiple times, the
|
||||
'add_actual_data' function should be idempotent.
|
||||
"""
|
||||
self._perform_data_action(self.FN_ADD, data_type.name, host,
|
||||
*args, **kwargs)
|
||||
@ -203,9 +221,27 @@ class TestHelper(object):
|
||||
datastore. This can be done by testing edge cases, and possibly
|
||||
some random elements within the set. See
|
||||
instructions for 'add_data' for implementation guidance.
|
||||
By default, the verification is attempted 10 times, sleeping for 3
|
||||
seconds between each attempt. This can be controlled by the
|
||||
retry_count and retry_sleep kwarg values.
|
||||
"""
|
||||
self._perform_data_action(self.FN_VERIFY, data_type.name, host,
|
||||
*args, **kwargs)
|
||||
retry_count = kwargs.pop('retry_count', 10) or 0
|
||||
retry_sleep = kwargs.pop('retry_sleep', 3) or 0
|
||||
attempts = -1
|
||||
while True:
|
||||
attempts += 1
|
||||
try:
|
||||
self._perform_data_action(self.FN_VERIFY, data_type.name, host,
|
||||
*args, **kwargs)
|
||||
break
|
||||
except Exception as ex:
|
||||
self.report.log("Attempt %d to verify data type %s failed\n%s"
|
||||
% (attempts, data_type.name, ex))
|
||||
if attempts > retry_count:
|
||||
raise
|
||||
self.report.log("Trying again (after %d second sleep)" %
|
||||
retry_sleep)
|
||||
sleep(retry_sleep)
|
||||
|
||||
def _perform_data_action(self, fn_type, fn_name, host, *args, **kwargs):
|
||||
fns = self._data_fns[fn_type]
|
||||
@ -285,15 +321,6 @@ class TestHelper(object):
|
||||
if name in fns:
|
||||
fns[name] = fn
|
||||
|
||||
#####################
|
||||
# Replication related
|
||||
#####################
|
||||
def wait_for_replicas(self):
|
||||
"""Wait for data to propagate to all the replicas. Datastore
|
||||
specific overrides could increase (or decrease) this delay.
|
||||
"""
|
||||
sleep(30)
|
||||
|
||||
#######################
|
||||
# Database/User related
|
||||
#######################
|
||||
|
@ -20,8 +20,9 @@ from trove.tests.scenario.helpers.sql_helper import SqlHelper
|
||||
|
||||
class VerticaHelper(SqlHelper):
|
||||
|
||||
def __init__(self, expected_override_name):
|
||||
super(VerticaHelper, self).__init__(expected_override_name, 'vertica')
|
||||
def __init__(self, expected_override_name, report):
|
||||
super(VerticaHelper, self).__init__(expected_override_name, report,
|
||||
'vertica')
|
||||
|
||||
def get_helper_credentials(self):
|
||||
return {'name': 'lite', 'password': 'litepass', 'database': 'lite'}
|
||||
|
@ -190,6 +190,7 @@ class InstanceCreateRunner(TestRunner):
|
||||
self.report.log("Using an existing instance: %s" % instance.id)
|
||||
self.assert_equal(expected_states[-1], instance.status,
|
||||
"Given instance is in a bad state.")
|
||||
instance_info.name = instance.name
|
||||
else:
|
||||
self.report.log("Creating a new instance.")
|
||||
instance = self.auth_client.instances.create(
|
||||
|
@ -42,7 +42,7 @@ class ReplicationRunner(TestRunner):
|
||||
|
||||
def assert_add_replication_data(self, data_type, host):
|
||||
"""In order for this to work, the corresponding datastore
|
||||
'helper' class should implement the 'add_<data_type>_data' method.
|
||||
'helper' class should implement the 'add_actual_data' method.
|
||||
"""
|
||||
self.test_helper.add_data(data_type, host)
|
||||
self.used_data_sets.add(data_type)
|
||||
@ -55,7 +55,7 @@ class ReplicationRunner(TestRunner):
|
||||
|
||||
def assert_verify_replication_data(self, data_type, host):
|
||||
"""In order for this to work, the corresponding datastore
|
||||
'helper' class should implement the 'verify_<data_type>_data' method.
|
||||
'helper' class should implement the 'verify_actual_data' method.
|
||||
"""
|
||||
self.test_helper.verify_data(data_type, host)
|
||||
|
||||
@ -69,18 +69,14 @@ class ReplicationRunner(TestRunner):
|
||||
locality='anti-affinity').id
|
||||
self.assert_client_code(expected_http_code)
|
||||
|
||||
def run_create_single_replica(self, expected_states=['BUILD', 'ACTIVE'],
|
||||
expected_http_code=200):
|
||||
master_id = self.instance_info.id
|
||||
def run_create_single_replica(self, expected_http_code=200):
|
||||
self.master_backup_count = len(
|
||||
self.auth_client.instances.backups(master_id))
|
||||
self.auth_client.instances.backups(self.master_id))
|
||||
self.replica_1_id = self.assert_replica_create(
|
||||
master_id, 'replica1', 1, expected_states, expected_http_code)
|
||||
self.replica_1_host = self.get_instance_host(self.replica_1_id)
|
||||
self.master_id, 'replica1', 1, expected_http_code)
|
||||
|
||||
def assert_replica_create(
|
||||
self, master_id, replica_name, replica_count,
|
||||
expected_states, expected_http_code):
|
||||
self, master_id, replica_name, replica_count, expected_http_code):
|
||||
replica = self.auth_client.instances.create(
|
||||
self.instance_info.name + replica_name,
|
||||
self.instance_info.dbaas_flavor_href,
|
||||
@ -89,14 +85,15 @@ class ReplicationRunner(TestRunner):
|
||||
datastore_version=self.instance_info.dbaas_datastore_version,
|
||||
nics=self.instance_info.nics,
|
||||
replica_count=replica_count)
|
||||
replica_id = replica.id
|
||||
self.assert_client_code(expected_http_code)
|
||||
return replica.id
|
||||
|
||||
self.assert_instance_action(replica_id, expected_states,
|
||||
expected_http_code)
|
||||
self._assert_is_master(master_id, [replica_id])
|
||||
self._assert_is_replica(replica_id, master_id)
|
||||
self._assert_locality(master_id)
|
||||
return replica_id
|
||||
def run_wait_for_single_replica(self, expected_states=['BUILD', 'ACTIVE']):
|
||||
self.assert_instance_action(self.replica_1_id, expected_states)
|
||||
self._assert_is_master(self.master_id, [self.replica_1_id])
|
||||
self._assert_is_replica(self.replica_1_id, self.master_id)
|
||||
self._assert_locality(self.master_id)
|
||||
self.replica_1_host = self.get_instance_host(self.replica_1_id)
|
||||
|
||||
def _assert_is_master(self, instance_id, replica_ids):
|
||||
instance = self.get_instance(instance_id)
|
||||
@ -148,43 +145,49 @@ class ReplicationRunner(TestRunner):
|
||||
replica_count=1).id
|
||||
self.assert_client_code(expected_http_code)
|
||||
|
||||
def run_create_multiple_replicas(self, expected_states=['BUILD', 'ACTIVE'],
|
||||
expected_http_code=200):
|
||||
master_id = self.instance_info.id
|
||||
def run_create_multiple_replicas(self, expected_http_code=200):
|
||||
self.replica_2_id = self.assert_replica_create(
|
||||
master_id, 'replica2', 2, expected_states, expected_http_code)
|
||||
self.master_id, 'replica2', 2, expected_http_code)
|
||||
|
||||
def run_wait_for_multiple_replicas(
|
||||
self, expected_states=['BUILD', 'ACTIVE']):
|
||||
replica_ids = self._get_replica_set(self.master_id)
|
||||
self.assert_instance_action(replica_ids, expected_states)
|
||||
self._assert_is_master(self.master_id, replica_ids)
|
||||
for replica_id in replica_ids:
|
||||
self._assert_is_replica(replica_id, self.master_id)
|
||||
self._assert_locality(self.master_id)
|
||||
|
||||
def run_wait_for_non_affinity_replica_fail(
|
||||
self, expected_states=['BUILD', 'FAILED']):
|
||||
self, expected_states=['BUILD', 'ERROR']):
|
||||
self._assert_instance_states(self.non_affinity_repl_id,
|
||||
expected_states,
|
||||
fast_fail_status=['ACTIVE'])
|
||||
|
||||
def run_delete_non_affinity_repl(self,
|
||||
expected_last_state=['SHUTDOWN'],
|
||||
expected_http_code=202):
|
||||
def run_delete_non_affinity_repl(self, expected_http_code=202):
|
||||
self.assert_delete_instances(
|
||||
self.non_affinity_repl_id,
|
||||
expected_last_state=expected_last_state,
|
||||
expected_http_code=expected_http_code)
|
||||
self.non_affinity_repl_id, expected_http_code=expected_http_code)
|
||||
|
||||
def assert_delete_instances(
|
||||
self, instance_ids, expected_last_state, expected_http_code):
|
||||
def assert_delete_instances(self, instance_ids, expected_http_code):
|
||||
instance_ids = (instance_ids if utils.is_collection(instance_ids)
|
||||
else [instance_ids])
|
||||
for instance_id in instance_ids:
|
||||
self.auth_client.instances.delete(instance_id)
|
||||
self.assert_client_code(expected_http_code)
|
||||
|
||||
self.assert_all_gone(instance_ids, expected_last_state)
|
||||
def run_wait_for_delete_non_affinity_repl(
|
||||
self, expected_last_status=['SHUTDOWN']):
|
||||
self.assert_all_gone([self.non_affinity_repl_id],
|
||||
expected_last_status=expected_last_status)
|
||||
|
||||
def run_delete_non_affinity_master(self,
|
||||
expected_last_state=['SHUTDOWN'],
|
||||
expected_http_code=202):
|
||||
def run_delete_non_affinity_master(self, expected_http_code=202):
|
||||
self.assert_delete_instances(
|
||||
self.non_affinity_master_id,
|
||||
expected_last_state=expected_last_state,
|
||||
expected_http_code=expected_http_code)
|
||||
self.non_affinity_master_id, expected_http_code=expected_http_code)
|
||||
|
||||
def run_wait_for_delete_non_affinity_master(
|
||||
self, expected_last_status=['SHUTDOWN']):
|
||||
self.assert_all_gone([self.non_affinity_master_id],
|
||||
expected_last_status=expected_last_status)
|
||||
self.assert_server_group_gone(self.non_affinity_srv_grp_id)
|
||||
|
||||
def run_add_data_to_replicate(self):
|
||||
@ -193,9 +196,6 @@ class ReplicationRunner(TestRunner):
|
||||
def run_verify_data_to_replicate(self):
|
||||
self.assert_verify_replication_data(DataType.tiny, self.master_host)
|
||||
|
||||
def run_wait_for_data_to_replicate(self):
|
||||
self.test_helper.wait_for_replicas()
|
||||
|
||||
def run_verify_replica_data_orig(self):
|
||||
self.assert_verify_replica_data(self.instance_info.id, DataType.small)
|
||||
|
||||
@ -292,6 +292,15 @@ class ReplicationRunner(TestRunner):
|
||||
self.instance_info.id, self.replica_1_id, expected_states,
|
||||
expected_http_code)
|
||||
|
||||
def run_add_final_data_to_replicate(self):
|
||||
self.assert_add_replication_data(DataType.tiny3, self.master_host)
|
||||
|
||||
def run_verify_data_to_replicate_final(self):
|
||||
self.assert_verify_replication_data(DataType.tiny3, self.master_host)
|
||||
|
||||
def run_verify_final_data_replicated(self):
|
||||
self.assert_verify_replica_data(self.master_id, DataType.tiny3)
|
||||
|
||||
def run_remove_replicated_data(self):
|
||||
self.assert_remove_replicated_data(self.master_host)
|
||||
|
||||
@ -343,25 +352,26 @@ class ReplicationRunner(TestRunner):
|
||||
else:
|
||||
self.fail("Unexpected replica_of ID.")
|
||||
|
||||
def run_delete_detached_replica(self,
|
||||
expected_last_state=['SHUTDOWN'],
|
||||
expected_http_code=202):
|
||||
def run_delete_detached_replica(self, expected_http_code=202):
|
||||
self.assert_delete_instances(
|
||||
self.replica_1_id, expected_last_state=expected_last_state,
|
||||
expected_http_code=expected_http_code)
|
||||
self.replica_1_id, expected_http_code=expected_http_code)
|
||||
|
||||
def run_delete_all_replicas(self, expected_last_state=['SHUTDOWN'],
|
||||
expected_http_code=202):
|
||||
def run_delete_all_replicas(self, expected_http_code=202):
|
||||
self.assert_delete_all_replicas(
|
||||
self.instance_info.id, expected_last_state,
|
||||
expected_http_code)
|
||||
self.instance_info.id, expected_http_code)
|
||||
|
||||
def assert_delete_all_replicas(
|
||||
self, master_id, expected_last_state, expected_http_code):
|
||||
self, master_id, expected_http_code):
|
||||
self.report.log("Deleting a replication set: %s" % master_id)
|
||||
replica_ids = self._get_replica_set(master_id)
|
||||
self.assert_delete_instances(replica_ids, expected_last_state,
|
||||
expected_http_code)
|
||||
self.assert_delete_instances(replica_ids, expected_http_code)
|
||||
|
||||
def run_wait_for_delete_replicas(
|
||||
self, expected_last_status=['SHUTDOWN']):
|
||||
replica_ids = self._get_replica_set(self.master_id)
|
||||
replica_ids.update(self.replica_1_id)
|
||||
self.assert_all_gone(replica_ids,
|
||||
expected_last_status=expected_last_status)
|
||||
|
||||
def run_test_backup_deleted(self):
|
||||
backup = self.auth_client.instances.backups(self.master_id)
|
||||
|
@ -72,17 +72,19 @@ class RunnerFactory(object):
|
||||
runner_module_name, class_prefix, runner_base_name,
|
||||
TEST_RUNNERS_NS)
|
||||
runner = runner_cls(*args, **kwargs)
|
||||
runner._test_helper = cls._get_helper()
|
||||
runner._test_helper = cls._get_helper(runner.report)
|
||||
return runner
|
||||
|
||||
@classmethod
|
||||
def _get_helper(cls):
|
||||
def _get_helper(cls, report):
|
||||
class_prefix = cls._get_test_datastore()
|
||||
helper_cls = cls._load_dynamic_class(
|
||||
TEST_HELPER_MODULE_NAME, class_prefix,
|
||||
TEST_HELPER_BASE_NAME, TEST_HELPERS_NS)
|
||||
return helper_cls(cls._build_class_name(
|
||||
class_prefix, TEST_HELPER_BASE_NAME, strip_test=True))
|
||||
return helper_cls(
|
||||
cls._build_class_name(class_prefix,
|
||||
TEST_HELPER_BASE_NAME, strip_test=True),
|
||||
report)
|
||||
|
||||
@classmethod
|
||||
def _get_test_datastore(cls):
|
||||
@ -224,6 +226,13 @@ class TestRunner(object):
|
||||
|
||||
@classmethod
|
||||
def assert_is_sublist(cls, sub_list, full_list, message=None):
|
||||
if not message:
|
||||
message = 'Unexpected sublist'
|
||||
try:
|
||||
message += ": sub_list '%s' (full_list '%s')." % (
|
||||
sub_list, full_list)
|
||||
except TypeError:
|
||||
pass
|
||||
return cls.assert_true(set(sub_list).issubset(full_list), message)
|
||||
|
||||
@classmethod
|
||||
@ -396,7 +405,7 @@ class TestRunner(object):
|
||||
return self.has_env_flag(self.DO_NOT_DELETE_INSTANCE_FLAG)
|
||||
|
||||
def assert_instance_action(
|
||||
self, instance_ids, expected_states, expected_http_code):
|
||||
self, instance_ids, expected_states, expected_http_code=None):
|
||||
self.assert_client_code(expected_http_code)
|
||||
if expected_states:
|
||||
self.assert_all_instance_states(
|
||||
|
Loading…
Reference in New Issue
Block a user