Merge "Break apart replication scenario tests"
This commit is contained in:
commit
2a7937c4b5
@ -21,6 +21,12 @@ from trove.tests.scenario.runners import test_runners
|
|||||||
|
|
||||||
|
|
||||||
GROUP = "scenario.replication_group"
|
GROUP = "scenario.replication_group"
|
||||||
|
GROUP_REPL_CREATE = "scenario.repl_create_group"
|
||||||
|
GROUP_REPL_CREATE_WAIT = "scenario.repl_create_wait_group"
|
||||||
|
GROUP_REPL_MULTI_CREATE = "scenario.repl_multi_create_group"
|
||||||
|
GROUP_REPL_MULTI_CREATE_WAIT = "scenario.repl_multi_create_wait_group"
|
||||||
|
GROUP_REPL_DELETE = "scenario.repl_delete_group"
|
||||||
|
GROUP_REPL_DELETE_WAIT = "scenario.repl_delete_wait_group"
|
||||||
|
|
||||||
|
|
||||||
class ReplicationRunnerFactory(test_runners.RunnerFactory):
|
class ReplicationRunnerFactory(test_runners.RunnerFactory):
|
||||||
@ -29,12 +35,13 @@ class ReplicationRunnerFactory(test_runners.RunnerFactory):
|
|||||||
_runner_cls = 'ReplicationRunner'
|
_runner_cls = 'ReplicationRunner'
|
||||||
|
|
||||||
|
|
||||||
@test(depends_on_groups=[instance_create_group.GROUP], groups=[GROUP])
|
@test(depends_on_groups=[instance_create_group.GROUP],
|
||||||
class ReplicationGroup(TestGroup):
|
groups=[GROUP, GROUP_REPL_CREATE])
|
||||||
"""Test Replication functionality."""
|
class ReplicationCreateGroup(TestGroup):
|
||||||
|
"""Test Replication Create functionality."""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
super(ReplicationGroup, self).__init__(
|
super(ReplicationCreateGroup, self).__init__(
|
||||||
ReplicationRunnerFactory.instance())
|
ReplicationRunnerFactory.instance())
|
||||||
|
|
||||||
@test
|
@test
|
||||||
@ -57,32 +64,27 @@ class ReplicationGroup(TestGroup):
|
|||||||
"""Test creating a single replica."""
|
"""Test creating a single replica."""
|
||||||
self.test_runner.run_create_single_replica()
|
self.test_runner.run_create_single_replica()
|
||||||
|
|
||||||
@test(runs_after=[create_single_replica])
|
|
||||||
def add_data_after_replica(self):
|
|
||||||
"""Add data to master after initial replica is setup"""
|
|
||||||
self.test_runner.run_add_data_after_replica()
|
|
||||||
|
|
||||||
@test(runs_after=[add_data_after_replica])
|
@test(depends_on_groups=[GROUP_REPL_CREATE],
|
||||||
def verify_replica_data_after_single(self):
|
groups=[GROUP, GROUP_REPL_CREATE_WAIT])
|
||||||
"""Verify data exists on single replica"""
|
class ReplicationCreateWaitGroup(TestGroup):
|
||||||
self.test_runner.run_verify_replica_data_after_single()
|
"""Wait for Replication Create to complete."""
|
||||||
|
|
||||||
@test(runs_after=[verify_replica_data_after_single])
|
def __init__(self):
|
||||||
|
super(ReplicationCreateWaitGroup, self).__init__(
|
||||||
|
ReplicationRunnerFactory.instance())
|
||||||
|
|
||||||
|
@test
|
||||||
def wait_for_non_affinity_master(self):
|
def wait_for_non_affinity_master(self):
|
||||||
"""Wait for non-affinity master to complete."""
|
"""Wait for non-affinity master to complete."""
|
||||||
self.test_runner.run_wait_for_non_affinity_master()
|
self.test_runner.run_wait_for_non_affinity_master()
|
||||||
|
|
||||||
@test(runs_after=[wait_for_non_affinity_master])
|
@test(depends_on=[wait_for_non_affinity_master])
|
||||||
def create_non_affinity_replica(self):
|
def create_non_affinity_replica(self):
|
||||||
"""Test creating a non-affinity replica."""
|
"""Test creating a non-affinity replica."""
|
||||||
self.test_runner.run_create_non_affinity_replica()
|
self.test_runner.run_create_non_affinity_replica()
|
||||||
|
|
||||||
@test(runs_after=[create_non_affinity_replica])
|
@test(depends_on=[create_non_affinity_replica])
|
||||||
def create_multiple_replicas(self):
|
|
||||||
"""Test creating multiple replicas."""
|
|
||||||
self.test_runner.run_create_multiple_replicas()
|
|
||||||
|
|
||||||
@test(runs_after=[create_multiple_replicas])
|
|
||||||
def wait_for_non_affinity_replica_fail(self):
|
def wait_for_non_affinity_replica_fail(self):
|
||||||
"""Wait for non-affinity replica to fail."""
|
"""Wait for non-affinity replica to fail."""
|
||||||
self.test_runner.run_wait_for_non_affinity_replica_fail()
|
self.test_runner.run_wait_for_non_affinity_replica_fail()
|
||||||
@ -93,17 +95,71 @@ class ReplicationGroup(TestGroup):
|
|||||||
self.test_runner.run_delete_non_affinity_repl()
|
self.test_runner.run_delete_non_affinity_repl()
|
||||||
|
|
||||||
@test(runs_after=[delete_non_affinity_repl])
|
@test(runs_after=[delete_non_affinity_repl])
|
||||||
|
def wait_for_single_replica(self):
|
||||||
|
"""Wait for single replica to complete."""
|
||||||
|
self.test_runner.run_wait_for_single_replica()
|
||||||
|
|
||||||
|
@test(depends_on=[wait_for_single_replica])
|
||||||
|
def add_data_after_replica(self):
|
||||||
|
"""Add data to master after initial replica is setup"""
|
||||||
|
self.test_runner.run_add_data_after_replica()
|
||||||
|
|
||||||
|
@test(depends_on=[add_data_after_replica])
|
||||||
|
def verify_replica_data_after_single(self):
|
||||||
|
"""Verify data exists on single replica"""
|
||||||
|
self.test_runner.run_verify_replica_data_after_single()
|
||||||
|
|
||||||
|
|
||||||
|
@test(depends_on_groups=[GROUP_REPL_CREATE_WAIT],
|
||||||
|
groups=[GROUP, GROUP_REPL_MULTI_CREATE])
|
||||||
|
class ReplicationMultiCreateGroup(TestGroup):
|
||||||
|
"""Test Replication Multi-Create functionality."""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
super(ReplicationMultiCreateGroup, self).__init__(
|
||||||
|
ReplicationRunnerFactory.instance())
|
||||||
|
|
||||||
|
@test
|
||||||
|
def create_multiple_replicas(self):
|
||||||
|
"""Test creating multiple replicas."""
|
||||||
|
self.test_runner.run_create_multiple_replicas()
|
||||||
|
|
||||||
|
@test(runs_after=[create_multiple_replicas])
|
||||||
|
def wait_for_delete_non_affinity_repl(self):
|
||||||
|
"""Wait for the non-affinity replica to delete."""
|
||||||
|
self.test_runner.run_wait_for_delete_non_affinity_repl()
|
||||||
|
|
||||||
|
@test(depends_on=[wait_for_delete_non_affinity_repl])
|
||||||
def delete_non_affinity_master(self):
|
def delete_non_affinity_master(self):
|
||||||
"""Test deleting non-affinity master."""
|
"""Test deleting non-affinity master."""
|
||||||
self.test_runner.run_delete_non_affinity_master()
|
self.test_runner.run_delete_non_affinity_master()
|
||||||
|
|
||||||
@test(depends_on=[create_single_replica, create_multiple_replicas],
|
|
||||||
runs_after=[delete_non_affinity_master])
|
@test(depends_on_groups=[GROUP_REPL_MULTI_CREATE],
|
||||||
|
groups=[GROUP, GROUP_REPL_MULTI_CREATE_WAIT])
|
||||||
|
class ReplicationMultiCreateWaitGroup(TestGroup):
|
||||||
|
"""Wait for Replication Multi-Create to complete."""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
super(ReplicationMultiCreateWaitGroup, self).__init__(
|
||||||
|
ReplicationRunnerFactory.instance())
|
||||||
|
|
||||||
|
@test
|
||||||
|
def wait_for_delete_non_affinity_master(self):
|
||||||
|
"""Wait for the non-affinity master to delete."""
|
||||||
|
self.test_runner.run_wait_for_delete_non_affinity_master()
|
||||||
|
|
||||||
|
@test(runs_after=[wait_for_delete_non_affinity_master])
|
||||||
|
def wait_for_multiple_replicas(self):
|
||||||
|
"""Wait for multiple replicas to complete."""
|
||||||
|
self.test_runner.run_wait_for_multiple_replicas()
|
||||||
|
|
||||||
|
@test(depends_on=[wait_for_multiple_replicas])
|
||||||
def verify_replica_data_orig(self):
|
def verify_replica_data_orig(self):
|
||||||
"""Verify original data was transferred to replicas."""
|
"""Verify original data was transferred to replicas."""
|
||||||
self.test_runner.run_verify_replica_data_orig()
|
self.test_runner.run_verify_replica_data_orig()
|
||||||
|
|
||||||
@test(depends_on=[create_single_replica, create_multiple_replicas],
|
@test(depends_on=[wait_for_multiple_replicas],
|
||||||
runs_after=[verify_replica_data_orig])
|
runs_after=[verify_replica_data_orig])
|
||||||
def add_data_to_replicate(self):
|
def add_data_to_replicate(self):
|
||||||
"""Add new data to master to verify replication."""
|
"""Add new data to master to verify replication."""
|
||||||
@ -114,45 +170,43 @@ class ReplicationGroup(TestGroup):
|
|||||||
"""Verify new data exists on master."""
|
"""Verify new data exists on master."""
|
||||||
self.test_runner.run_verify_data_to_replicate()
|
self.test_runner.run_verify_data_to_replicate()
|
||||||
|
|
||||||
@test(depends_on=[create_single_replica, create_multiple_replicas,
|
@test(depends_on=[add_data_to_replicate],
|
||||||
add_data_to_replicate],
|
|
||||||
runs_after=[verify_data_to_replicate])
|
runs_after=[verify_data_to_replicate])
|
||||||
def wait_for_data_to_replicate(self):
|
def verify_replica_data_orig(self):
|
||||||
"""Wait to ensure that the data is replicated."""
|
"""Verify original data was transferred to replicas."""
|
||||||
self.test_runner.run_wait_for_data_to_replicate()
|
self.test_runner.run_verify_replica_data_orig()
|
||||||
|
|
||||||
@test(depends_on=[create_single_replica, create_multiple_replicas,
|
@test(depends_on=[add_data_to_replicate],
|
||||||
add_data_to_replicate],
|
runs_after=[verify_replica_data_orig])
|
||||||
runs_after=[wait_for_data_to_replicate])
|
|
||||||
def verify_replica_data_new(self):
|
def verify_replica_data_new(self):
|
||||||
"""Verify new data was transferred to replicas."""
|
"""Verify new data was transferred to replicas."""
|
||||||
self.test_runner.run_verify_replica_data_new()
|
self.test_runner.run_verify_replica_data_new()
|
||||||
|
|
||||||
@test(depends_on=[create_single_replica, create_multiple_replicas],
|
@test(depends_on=[wait_for_multiple_replicas],
|
||||||
runs_after=[verify_replica_data_new])
|
runs_after=[verify_replica_data_new])
|
||||||
def promote_master(self):
|
def promote_master(self):
|
||||||
"""Ensure promoting master fails."""
|
"""Ensure promoting master fails."""
|
||||||
self.test_runner.run_promote_master()
|
self.test_runner.run_promote_master()
|
||||||
|
|
||||||
@test(depends_on=[create_single_replica, create_multiple_replicas],
|
@test(depends_on=[wait_for_multiple_replicas],
|
||||||
runs_after=[promote_master])
|
runs_after=[promote_master])
|
||||||
def eject_replica(self):
|
def eject_replica(self):
|
||||||
"""Ensure ejecting non master fails."""
|
"""Ensure ejecting non master fails."""
|
||||||
self.test_runner.run_eject_replica()
|
self.test_runner.run_eject_replica()
|
||||||
|
|
||||||
@test(depends_on=[create_single_replica, create_multiple_replicas],
|
@test(depends_on=[wait_for_multiple_replicas],
|
||||||
runs_after=[eject_replica])
|
runs_after=[eject_replica])
|
||||||
def eject_valid_master(self):
|
def eject_valid_master(self):
|
||||||
"""Ensure ejecting valid master fails."""
|
"""Ensure ejecting valid master fails."""
|
||||||
self.test_runner.run_eject_valid_master()
|
self.test_runner.run_eject_valid_master()
|
||||||
|
|
||||||
@test(depends_on=[create_single_replica, create_multiple_replicas],
|
@test(depends_on=[wait_for_multiple_replicas],
|
||||||
runs_after=[eject_valid_master])
|
runs_after=[eject_valid_master])
|
||||||
def delete_valid_master(self):
|
def delete_valid_master(self):
|
||||||
"""Ensure deleting valid master fails."""
|
"""Ensure deleting valid master fails."""
|
||||||
self.test_runner.run_delete_valid_master()
|
self.test_runner.run_delete_valid_master()
|
||||||
|
|
||||||
@test(depends_on=[create_single_replica, create_multiple_replicas],
|
@test(depends_on=[wait_for_multiple_replicas],
|
||||||
runs_after=[delete_valid_master])
|
runs_after=[delete_valid_master])
|
||||||
def promote_to_replica_source(self):
|
def promote_to_replica_source(self):
|
||||||
"""Test promoting a replica to replica source (master)."""
|
"""Test promoting a replica to replica source (master)."""
|
||||||
@ -163,7 +217,7 @@ class ReplicationGroup(TestGroup):
|
|||||||
"""Verify data is still on new master."""
|
"""Verify data is still on new master."""
|
||||||
self.test_runner.run_verify_replica_data_new_master()
|
self.test_runner.run_verify_replica_data_new_master()
|
||||||
|
|
||||||
@test(depends_on=[create_single_replica, create_multiple_replicas,
|
@test(depends_on=[wait_for_multiple_replicas,
|
||||||
promote_to_replica_source],
|
promote_to_replica_source],
|
||||||
runs_after=[verify_replica_data_new_master])
|
runs_after=[verify_replica_data_new_master])
|
||||||
def add_data_to_replicate2(self):
|
def add_data_to_replicate2(self):
|
||||||
@ -175,15 +229,9 @@ class ReplicationGroup(TestGroup):
|
|||||||
"""Verify data exists on new master."""
|
"""Verify data exists on new master."""
|
||||||
self.test_runner.run_verify_data_to_replicate2()
|
self.test_runner.run_verify_data_to_replicate2()
|
||||||
|
|
||||||
@test(depends_on=[add_data_to_replicate2],
|
@test(depends_on=[wait_for_multiple_replicas,
|
||||||
runs_after=[verify_data_to_replicate2])
|
|
||||||
def wait_for_data_to_replicate2(self):
|
|
||||||
"""Wait to ensure that the new data was replicated."""
|
|
||||||
self.test_runner.run_wait_for_data_to_replicate()
|
|
||||||
|
|
||||||
@test(depends_on=[create_single_replica, create_multiple_replicas,
|
|
||||||
add_data_to_replicate2],
|
add_data_to_replicate2],
|
||||||
runs_after=[wait_for_data_to_replicate2])
|
runs_after=[verify_data_to_replicate2])
|
||||||
def verify_replica_data_new2(self):
|
def verify_replica_data_new2(self):
|
||||||
"""Verify data was transferred to new replicas."""
|
"""Verify data was transferred to new replicas."""
|
||||||
self.test_runner.run_verify_replica_data_new2()
|
self.test_runner.run_verify_replica_data_new2()
|
||||||
@ -195,6 +243,22 @@ class ReplicationGroup(TestGroup):
|
|||||||
self.test_runner.run_promote_original_source()
|
self.test_runner.run_promote_original_source()
|
||||||
|
|
||||||
@test(depends_on=[promote_original_source])
|
@test(depends_on=[promote_original_source])
|
||||||
|
def add_final_data_to_replicate(self):
|
||||||
|
"""Add final data to original master to verify switch."""
|
||||||
|
self.test_runner.run_add_final_data_to_replicate()
|
||||||
|
|
||||||
|
@test(depends_on=[add_final_data_to_replicate])
|
||||||
|
def verify_data_to_replicate_final(self):
|
||||||
|
"""Verify final data exists on master."""
|
||||||
|
self.test_runner.run_verify_data_to_replicate_final()
|
||||||
|
|
||||||
|
@test(depends_on=[verify_data_to_replicate_final])
|
||||||
|
def verify_final_data_replicated(self):
|
||||||
|
"""Verify final data was transferred to all replicas."""
|
||||||
|
self.test_runner.run_verify_final_data_replicated()
|
||||||
|
|
||||||
|
@test(depends_on=[promote_original_source],
|
||||||
|
runs_after=[verify_final_data_replicated])
|
||||||
def remove_replicated_data(self):
|
def remove_replicated_data(self):
|
||||||
"""Remove replication data."""
|
"""Remove replication data."""
|
||||||
self.test_runner.run_remove_replicated_data()
|
self.test_runner.run_remove_replicated_data()
|
||||||
@ -205,8 +269,17 @@ class ReplicationGroup(TestGroup):
|
|||||||
"""Test detaching a replica from the master."""
|
"""Test detaching a replica from the master."""
|
||||||
self.test_runner.run_detach_replica_from_source()
|
self.test_runner.run_detach_replica_from_source()
|
||||||
|
|
||||||
@test(depends_on=[promote_original_source],
|
|
||||||
runs_after=[detach_replica_from_source])
|
@test(depends_on_groups=[GROUP_REPL_MULTI_CREATE_WAIT],
|
||||||
|
groups=[GROUP, GROUP_REPL_DELETE])
|
||||||
|
class ReplicationDeleteGroup(TestGroup):
|
||||||
|
"""Test Replication Delete functionality."""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
super(ReplicationDeleteGroup, self).__init__(
|
||||||
|
ReplicationRunnerFactory.instance())
|
||||||
|
|
||||||
|
@test
|
||||||
def delete_detached_replica(self):
|
def delete_detached_replica(self):
|
||||||
"""Test deleting the detached replica."""
|
"""Test deleting the detached replica."""
|
||||||
self.test_runner.run_delete_detached_replica()
|
self.test_runner.run_delete_detached_replica()
|
||||||
@ -216,7 +289,22 @@ class ReplicationGroup(TestGroup):
|
|||||||
"""Test deleting all the remaining replicas."""
|
"""Test deleting all the remaining replicas."""
|
||||||
self.test_runner.run_delete_all_replicas()
|
self.test_runner.run_delete_all_replicas()
|
||||||
|
|
||||||
@test(runs_after=[delete_all_replicas])
|
|
||||||
|
@test(depends_on_groups=[GROUP_REPL_DELETE],
|
||||||
|
groups=[GROUP, GROUP_REPL_DELETE_WAIT])
|
||||||
|
class ReplicationDeleteWaitGroup(TestGroup):
|
||||||
|
"""Wait for Replication Delete to complete."""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
super(ReplicationDeleteWaitGroup, self).__init__(
|
||||||
|
ReplicationRunnerFactory.instance())
|
||||||
|
|
||||||
|
@test
|
||||||
|
def wait_for_delete_replicas(self):
|
||||||
|
"""Wait for all the replicas to delete."""
|
||||||
|
self.test_runner.run_wait_for_delete_replicas()
|
||||||
|
|
||||||
|
@test(runs_after=[wait_for_delete_replicas])
|
||||||
def test_backup_deleted(self):
|
def test_backup_deleted(self):
|
||||||
"""Test that the created backup is now gone."""
|
"""Test that the created backup is now gone."""
|
||||||
self.test_runner.run_test_backup_deleted()
|
self.test_runner.run_test_backup_deleted()
|
||||||
|
@ -58,8 +58,8 @@ class CassandraHelper(TestHelper):
|
|||||||
|
|
||||||
DATA_COLUMN_NAME = 'value'
|
DATA_COLUMN_NAME = 'value'
|
||||||
|
|
||||||
def __init__(self, expected_override_name):
|
def __init__(self, expected_override_name, report):
|
||||||
super(CassandraHelper, self).__init__(expected_override_name)
|
super(CassandraHelper, self).__init__(expected_override_name, report)
|
||||||
|
|
||||||
self._data_cache = dict()
|
self._data_cache = dict()
|
||||||
|
|
||||||
|
@ -21,8 +21,8 @@ from trove.tests.scenario.runners.test_runners import TestRunner
|
|||||||
|
|
||||||
class CouchdbHelper(TestHelper):
|
class CouchdbHelper(TestHelper):
|
||||||
|
|
||||||
def __init__(self, expected_override_name):
|
def __init__(self, expected_override_name, report):
|
||||||
super(CouchdbHelper, self).__init__(expected_override_name)
|
super(CouchdbHelper, self).__init__(expected_override_name, report)
|
||||||
self._data_cache = dict()
|
self._data_cache = dict()
|
||||||
self.field_name = 'ff-%s'
|
self.field_name = 'ff-%s'
|
||||||
self.database = 'firstdb'
|
self.database = 'firstdb'
|
||||||
|
@ -18,5 +18,5 @@ from trove.tests.scenario.helpers.mysql_helper import MysqlHelper
|
|||||||
|
|
||||||
class MariadbHelper(MysqlHelper):
|
class MariadbHelper(MysqlHelper):
|
||||||
|
|
||||||
def __init__(self, expected_override_name):
|
def __init__(self, expected_override_name, report):
|
||||||
super(MariadbHelper, self).__init__(expected_override_name)
|
super(MariadbHelper, self).__init__(expected_override_name, report)
|
||||||
|
@ -18,8 +18,8 @@ from trove.tests.scenario.helpers.test_helper import TestHelper
|
|||||||
|
|
||||||
class MongodbHelper(TestHelper):
|
class MongodbHelper(TestHelper):
|
||||||
|
|
||||||
def __init__(self, expected_override_name):
|
def __init__(self, expected_override_name, report):
|
||||||
super(MongodbHelper, self).__init__(expected_override_name)
|
super(MongodbHelper, self).__init__(expected_override_name, report)
|
||||||
|
|
||||||
def get_valid_database_definitions(self):
|
def get_valid_database_definitions(self):
|
||||||
return [{"name": 'db1'}, {"name": 'db2'}, {'name': 'db3'}]
|
return [{"name": 'db1'}, {"name": 'db2'}, {'name': 'db3'}]
|
||||||
|
@ -18,8 +18,9 @@ from trove.tests.scenario.helpers.sql_helper import SqlHelper
|
|||||||
|
|
||||||
class MysqlHelper(SqlHelper):
|
class MysqlHelper(SqlHelper):
|
||||||
|
|
||||||
def __init__(self, expected_override_name):
|
def __init__(self, expected_override_name, report):
|
||||||
super(MysqlHelper, self).__init__(expected_override_name, 'mysql')
|
super(MysqlHelper, self).__init__(expected_override_name, report,
|
||||||
|
'mysql')
|
||||||
|
|
||||||
def get_helper_credentials(self):
|
def get_helper_credentials(self):
|
||||||
return {'name': 'lite', 'password': 'litepass', 'database': 'firstdb'}
|
return {'name': 'lite', 'password': 'litepass', 'database': 'firstdb'}
|
||||||
|
@ -18,5 +18,5 @@ from trove.tests.scenario.helpers.mysql_helper import MysqlHelper
|
|||||||
|
|
||||||
class PerconaHelper(MysqlHelper):
|
class PerconaHelper(MysqlHelper):
|
||||||
|
|
||||||
def __init__(self, expected_override_name):
|
def __init__(self, expected_override_name, report):
|
||||||
super(PerconaHelper, self).__init__(expected_override_name)
|
super(PerconaHelper, self).__init__(expected_override_name, report)
|
||||||
|
@ -18,8 +18,8 @@ from trove.tests.scenario.helpers.sql_helper import SqlHelper
|
|||||||
|
|
||||||
class PostgresqlHelper(SqlHelper):
|
class PostgresqlHelper(SqlHelper):
|
||||||
|
|
||||||
def __init__(self, expected_override_name):
|
def __init__(self, expected_override_name, report):
|
||||||
super(PostgresqlHelper, self).__init__(expected_override_name,
|
super(PostgresqlHelper, self).__init__(expected_override_name, report,
|
||||||
'postgresql')
|
'postgresql')
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@ -18,5 +18,5 @@ from trove.tests.scenario.helpers.mysql_helper import MysqlHelper
|
|||||||
|
|
||||||
class PxcHelper(MysqlHelper):
|
class PxcHelper(MysqlHelper):
|
||||||
|
|
||||||
def __init__(self, expected_override_name):
|
def __init__(self, expected_override_name, report):
|
||||||
super(PxcHelper, self).__init__(expected_override_name)
|
super(PxcHelper, self).__init__(expected_override_name, report)
|
||||||
|
@ -22,8 +22,8 @@ from trove.tests.scenario.runners.test_runners import TestRunner
|
|||||||
|
|
||||||
class RedisHelper(TestHelper):
|
class RedisHelper(TestHelper):
|
||||||
|
|
||||||
def __init__(self, expected_override_name):
|
def __init__(self, expected_override_name, report):
|
||||||
super(RedisHelper, self).__init__(expected_override_name)
|
super(RedisHelper, self).__init__(expected_override_name, report)
|
||||||
|
|
||||||
self.key_patterns = ['user_a:%s', 'user_b:%s']
|
self.key_patterns = ['user_a:%s', 'user_b:%s']
|
||||||
self.value_pattern = 'id:%s'
|
self.value_pattern = 'id:%s'
|
||||||
|
@ -27,8 +27,8 @@ class SqlHelper(TestHelper):
|
|||||||
|
|
||||||
DATA_COLUMN_NAME = 'value'
|
DATA_COLUMN_NAME = 'value'
|
||||||
|
|
||||||
def __init__(self, expected_override_name, protocol, port=None):
|
def __init__(self, expected_override_name, report, protocol, port=None):
|
||||||
super(SqlHelper, self).__init__(expected_override_name)
|
super(SqlHelper, self).__init__(expected_override_name, report)
|
||||||
|
|
||||||
self.protocol = protocol
|
self.protocol = protocol
|
||||||
self.port = port
|
self.port = port
|
||||||
|
@ -38,11 +38,13 @@ class DataType(Enum):
|
|||||||
tiny = 3
|
tiny = 3
|
||||||
# another tiny dataset (also for replication propagation)
|
# another tiny dataset (also for replication propagation)
|
||||||
tiny2 = 4
|
tiny2 = 4
|
||||||
|
# a third tiny dataset (also for replication propagation)
|
||||||
|
tiny3 = 5
|
||||||
# small amount of data (this can be added to each instance
|
# small amount of data (this can be added to each instance
|
||||||
# after creation, for example).
|
# after creation, for example).
|
||||||
small = 5
|
small = 6
|
||||||
# large data, enough to make creating a backup take 20s or more.
|
# large data, enough to make creating a backup take 20s or more.
|
||||||
large = 6
|
large = 7
|
||||||
|
|
||||||
|
|
||||||
class TestHelper(object):
|
class TestHelper(object):
|
||||||
@ -67,7 +69,7 @@ class TestHelper(object):
|
|||||||
# actual data manipulation work.
|
# actual data manipulation work.
|
||||||
DT_ACTUAL = 'actual'
|
DT_ACTUAL = 'actual'
|
||||||
|
|
||||||
def __init__(self, expected_override_name):
|
def __init__(self, expected_override_name, report):
|
||||||
"""Initialize the helper class by creating a number of stub
|
"""Initialize the helper class by creating a number of stub
|
||||||
functions that each datastore specific class can chose to
|
functions that each datastore specific class can chose to
|
||||||
override. Basically, the functions are of the form:
|
override. Basically, the functions are of the form:
|
||||||
@ -86,6 +88,7 @@ class TestHelper(object):
|
|||||||
super(TestHelper, self).__init__()
|
super(TestHelper, self).__init__()
|
||||||
|
|
||||||
self._expected_override_name = expected_override_name
|
self._expected_override_name = expected_override_name
|
||||||
|
self.report = report
|
||||||
|
|
||||||
# For building data access functions
|
# For building data access functions
|
||||||
# name/fn pairs for each action
|
# name/fn pairs for each action
|
||||||
@ -114,6 +117,9 @@ class TestHelper(object):
|
|||||||
DataType.tiny2.name: {
|
DataType.tiny2.name: {
|
||||||
self.DATA_START: 2000,
|
self.DATA_START: 2000,
|
||||||
self.DATA_SIZE: 100},
|
self.DATA_SIZE: 100},
|
||||||
|
DataType.tiny3.name: {
|
||||||
|
self.DATA_START: 3000,
|
||||||
|
self.DATA_SIZE: 100},
|
||||||
DataType.small.name: {
|
DataType.small.name: {
|
||||||
self.DATA_START: 10000,
|
self.DATA_START: 10000,
|
||||||
self.DATA_SIZE: 1000},
|
self.DATA_SIZE: 1000},
|
||||||
@ -180,13 +186,25 @@ class TestHelper(object):
|
|||||||
##############
|
##############
|
||||||
def add_data(self, data_type, host, *args, **kwargs):
|
def add_data(self, data_type, host, *args, **kwargs):
|
||||||
"""Adds data of type 'data_type' to the database. Descendant
|
"""Adds data of type 'data_type' to the database. Descendant
|
||||||
classes should implement a function for each DataType value
|
classes should implement a function 'add_actual_data' that has the
|
||||||
of the form 'add_{DataType.name}_data' - for example:
|
following signature:
|
||||||
'add_tiny_data'
|
def add_actual_data(
|
||||||
'add_small_data'
|
self, # standard self reference
|
||||||
...
|
data_label, # label used to identify the 'type' to add
|
||||||
Since this method may be called multiple times, the implemented
|
data_start, # a start count
|
||||||
'add_*_data' functions should be idempotent.
|
data_size, # a size to use
|
||||||
|
host, # the host to add the data to
|
||||||
|
*args, # for possible future expansion
|
||||||
|
**kwargs # for possible future expansion
|
||||||
|
):
|
||||||
|
The data_label could be used to create a database or a table if the
|
||||||
|
datastore supports that. The data_start and data_size values are
|
||||||
|
designed not to overlap, such that all the data could be stored
|
||||||
|
in a single namespace (for example, creating ids from data_start
|
||||||
|
to data_start + data_size).
|
||||||
|
|
||||||
|
Since this method may be called multiple times, the
|
||||||
|
'add_actual_data' function should be idempotent.
|
||||||
"""
|
"""
|
||||||
self._perform_data_action(self.FN_ADD, data_type.name, host,
|
self._perform_data_action(self.FN_ADD, data_type.name, host,
|
||||||
*args, **kwargs)
|
*args, **kwargs)
|
||||||
@ -203,9 +221,27 @@ class TestHelper(object):
|
|||||||
datastore. This can be done by testing edge cases, and possibly
|
datastore. This can be done by testing edge cases, and possibly
|
||||||
some random elements within the set. See
|
some random elements within the set. See
|
||||||
instructions for 'add_data' for implementation guidance.
|
instructions for 'add_data' for implementation guidance.
|
||||||
|
By default, the verification is attempted 10 times, sleeping for 3
|
||||||
|
seconds between each attempt. This can be controlled by the
|
||||||
|
retry_count and retry_sleep kwarg values.
|
||||||
"""
|
"""
|
||||||
self._perform_data_action(self.FN_VERIFY, data_type.name, host,
|
retry_count = kwargs.pop('retry_count', 10) or 0
|
||||||
*args, **kwargs)
|
retry_sleep = kwargs.pop('retry_sleep', 3) or 0
|
||||||
|
attempts = -1
|
||||||
|
while True:
|
||||||
|
attempts += 1
|
||||||
|
try:
|
||||||
|
self._perform_data_action(self.FN_VERIFY, data_type.name, host,
|
||||||
|
*args, **kwargs)
|
||||||
|
break
|
||||||
|
except Exception as ex:
|
||||||
|
self.report.log("Attempt %d to verify data type %s failed\n%s"
|
||||||
|
% (attempts, data_type.name, ex))
|
||||||
|
if attempts > retry_count:
|
||||||
|
raise
|
||||||
|
self.report.log("Trying again (after %d second sleep)" %
|
||||||
|
retry_sleep)
|
||||||
|
sleep(retry_sleep)
|
||||||
|
|
||||||
def _perform_data_action(self, fn_type, fn_name, host, *args, **kwargs):
|
def _perform_data_action(self, fn_type, fn_name, host, *args, **kwargs):
|
||||||
fns = self._data_fns[fn_type]
|
fns = self._data_fns[fn_type]
|
||||||
@ -285,15 +321,6 @@ class TestHelper(object):
|
|||||||
if name in fns:
|
if name in fns:
|
||||||
fns[name] = fn
|
fns[name] = fn
|
||||||
|
|
||||||
#####################
|
|
||||||
# Replication related
|
|
||||||
#####################
|
|
||||||
def wait_for_replicas(self):
|
|
||||||
"""Wait for data to propagate to all the replicas. Datastore
|
|
||||||
specific overrides could increase (or decrease) this delay.
|
|
||||||
"""
|
|
||||||
sleep(30)
|
|
||||||
|
|
||||||
#######################
|
#######################
|
||||||
# Database/User related
|
# Database/User related
|
||||||
#######################
|
#######################
|
||||||
|
@ -20,8 +20,9 @@ from trove.tests.scenario.helpers.sql_helper import SqlHelper
|
|||||||
|
|
||||||
class VerticaHelper(SqlHelper):
|
class VerticaHelper(SqlHelper):
|
||||||
|
|
||||||
def __init__(self, expected_override_name):
|
def __init__(self, expected_override_name, report):
|
||||||
super(VerticaHelper, self).__init__(expected_override_name, 'vertica')
|
super(VerticaHelper, self).__init__(expected_override_name, report,
|
||||||
|
'vertica')
|
||||||
|
|
||||||
def get_helper_credentials(self):
|
def get_helper_credentials(self):
|
||||||
return {'name': 'lite', 'password': 'litepass', 'database': 'lite'}
|
return {'name': 'lite', 'password': 'litepass', 'database': 'lite'}
|
||||||
|
@ -190,6 +190,7 @@ class InstanceCreateRunner(TestRunner):
|
|||||||
self.report.log("Using an existing instance: %s" % instance.id)
|
self.report.log("Using an existing instance: %s" % instance.id)
|
||||||
self.assert_equal(expected_states[-1], instance.status,
|
self.assert_equal(expected_states[-1], instance.status,
|
||||||
"Given instance is in a bad state.")
|
"Given instance is in a bad state.")
|
||||||
|
instance_info.name = instance.name
|
||||||
else:
|
else:
|
||||||
self.report.log("Creating a new instance.")
|
self.report.log("Creating a new instance.")
|
||||||
instance = self.auth_client.instances.create(
|
instance = self.auth_client.instances.create(
|
||||||
|
@ -42,7 +42,7 @@ class ReplicationRunner(TestRunner):
|
|||||||
|
|
||||||
def assert_add_replication_data(self, data_type, host):
|
def assert_add_replication_data(self, data_type, host):
|
||||||
"""In order for this to work, the corresponding datastore
|
"""In order for this to work, the corresponding datastore
|
||||||
'helper' class should implement the 'add_<data_type>_data' method.
|
'helper' class should implement the 'add_actual_data' method.
|
||||||
"""
|
"""
|
||||||
self.test_helper.add_data(data_type, host)
|
self.test_helper.add_data(data_type, host)
|
||||||
self.used_data_sets.add(data_type)
|
self.used_data_sets.add(data_type)
|
||||||
@ -55,7 +55,7 @@ class ReplicationRunner(TestRunner):
|
|||||||
|
|
||||||
def assert_verify_replication_data(self, data_type, host):
|
def assert_verify_replication_data(self, data_type, host):
|
||||||
"""In order for this to work, the corresponding datastore
|
"""In order for this to work, the corresponding datastore
|
||||||
'helper' class should implement the 'verify_<data_type>_data' method.
|
'helper' class should implement the 'verify_actual_data' method.
|
||||||
"""
|
"""
|
||||||
self.test_helper.verify_data(data_type, host)
|
self.test_helper.verify_data(data_type, host)
|
||||||
|
|
||||||
@ -69,18 +69,14 @@ class ReplicationRunner(TestRunner):
|
|||||||
locality='anti-affinity').id
|
locality='anti-affinity').id
|
||||||
self.assert_client_code(expected_http_code)
|
self.assert_client_code(expected_http_code)
|
||||||
|
|
||||||
def run_create_single_replica(self, expected_states=['BUILD', 'ACTIVE'],
|
def run_create_single_replica(self, expected_http_code=200):
|
||||||
expected_http_code=200):
|
|
||||||
master_id = self.instance_info.id
|
|
||||||
self.master_backup_count = len(
|
self.master_backup_count = len(
|
||||||
self.auth_client.instances.backups(master_id))
|
self.auth_client.instances.backups(self.master_id))
|
||||||
self.replica_1_id = self.assert_replica_create(
|
self.replica_1_id = self.assert_replica_create(
|
||||||
master_id, 'replica1', 1, expected_states, expected_http_code)
|
self.master_id, 'replica1', 1, expected_http_code)
|
||||||
self.replica_1_host = self.get_instance_host(self.replica_1_id)
|
|
||||||
|
|
||||||
def assert_replica_create(
|
def assert_replica_create(
|
||||||
self, master_id, replica_name, replica_count,
|
self, master_id, replica_name, replica_count, expected_http_code):
|
||||||
expected_states, expected_http_code):
|
|
||||||
replica = self.auth_client.instances.create(
|
replica = self.auth_client.instances.create(
|
||||||
self.instance_info.name + replica_name,
|
self.instance_info.name + replica_name,
|
||||||
self.instance_info.dbaas_flavor_href,
|
self.instance_info.dbaas_flavor_href,
|
||||||
@ -89,14 +85,15 @@ class ReplicationRunner(TestRunner):
|
|||||||
datastore_version=self.instance_info.dbaas_datastore_version,
|
datastore_version=self.instance_info.dbaas_datastore_version,
|
||||||
nics=self.instance_info.nics,
|
nics=self.instance_info.nics,
|
||||||
replica_count=replica_count)
|
replica_count=replica_count)
|
||||||
replica_id = replica.id
|
self.assert_client_code(expected_http_code)
|
||||||
|
return replica.id
|
||||||
|
|
||||||
self.assert_instance_action(replica_id, expected_states,
|
def run_wait_for_single_replica(self, expected_states=['BUILD', 'ACTIVE']):
|
||||||
expected_http_code)
|
self.assert_instance_action(self.replica_1_id, expected_states)
|
||||||
self._assert_is_master(master_id, [replica_id])
|
self._assert_is_master(self.master_id, [self.replica_1_id])
|
||||||
self._assert_is_replica(replica_id, master_id)
|
self._assert_is_replica(self.replica_1_id, self.master_id)
|
||||||
self._assert_locality(master_id)
|
self._assert_locality(self.master_id)
|
||||||
return replica_id
|
self.replica_1_host = self.get_instance_host(self.replica_1_id)
|
||||||
|
|
||||||
def _assert_is_master(self, instance_id, replica_ids):
|
def _assert_is_master(self, instance_id, replica_ids):
|
||||||
instance = self.get_instance(instance_id)
|
instance = self.get_instance(instance_id)
|
||||||
@ -148,43 +145,49 @@ class ReplicationRunner(TestRunner):
|
|||||||
replica_count=1).id
|
replica_count=1).id
|
||||||
self.assert_client_code(expected_http_code)
|
self.assert_client_code(expected_http_code)
|
||||||
|
|
||||||
def run_create_multiple_replicas(self, expected_states=['BUILD', 'ACTIVE'],
|
def run_create_multiple_replicas(self, expected_http_code=200):
|
||||||
expected_http_code=200):
|
|
||||||
master_id = self.instance_info.id
|
|
||||||
self.replica_2_id = self.assert_replica_create(
|
self.replica_2_id = self.assert_replica_create(
|
||||||
master_id, 'replica2', 2, expected_states, expected_http_code)
|
self.master_id, 'replica2', 2, expected_http_code)
|
||||||
|
|
||||||
|
def run_wait_for_multiple_replicas(
|
||||||
|
self, expected_states=['BUILD', 'ACTIVE']):
|
||||||
|
replica_ids = self._get_replica_set(self.master_id)
|
||||||
|
self.assert_instance_action(replica_ids, expected_states)
|
||||||
|
self._assert_is_master(self.master_id, replica_ids)
|
||||||
|
for replica_id in replica_ids:
|
||||||
|
self._assert_is_replica(replica_id, self.master_id)
|
||||||
|
self._assert_locality(self.master_id)
|
||||||
|
|
||||||
def run_wait_for_non_affinity_replica_fail(
|
def run_wait_for_non_affinity_replica_fail(
|
||||||
self, expected_states=['BUILD', 'FAILED']):
|
self, expected_states=['BUILD', 'ERROR']):
|
||||||
self._assert_instance_states(self.non_affinity_repl_id,
|
self._assert_instance_states(self.non_affinity_repl_id,
|
||||||
expected_states,
|
expected_states,
|
||||||
fast_fail_status=['ACTIVE'])
|
fast_fail_status=['ACTIVE'])
|
||||||
|
|
||||||
def run_delete_non_affinity_repl(self,
|
def run_delete_non_affinity_repl(self, expected_http_code=202):
|
||||||
expected_last_state=['SHUTDOWN'],
|
|
||||||
expected_http_code=202):
|
|
||||||
self.assert_delete_instances(
|
self.assert_delete_instances(
|
||||||
self.non_affinity_repl_id,
|
self.non_affinity_repl_id, expected_http_code=expected_http_code)
|
||||||
expected_last_state=expected_last_state,
|
|
||||||
expected_http_code=expected_http_code)
|
|
||||||
|
|
||||||
def assert_delete_instances(
|
def assert_delete_instances(self, instance_ids, expected_http_code):
|
||||||
self, instance_ids, expected_last_state, expected_http_code):
|
|
||||||
instance_ids = (instance_ids if utils.is_collection(instance_ids)
|
instance_ids = (instance_ids if utils.is_collection(instance_ids)
|
||||||
else [instance_ids])
|
else [instance_ids])
|
||||||
for instance_id in instance_ids:
|
for instance_id in instance_ids:
|
||||||
self.auth_client.instances.delete(instance_id)
|
self.auth_client.instances.delete(instance_id)
|
||||||
self.assert_client_code(expected_http_code)
|
self.assert_client_code(expected_http_code)
|
||||||
|
|
||||||
self.assert_all_gone(instance_ids, expected_last_state)
|
def run_wait_for_delete_non_affinity_repl(
|
||||||
|
self, expected_last_status=['SHUTDOWN']):
|
||||||
|
self.assert_all_gone([self.non_affinity_repl_id],
|
||||||
|
expected_last_status=expected_last_status)
|
||||||
|
|
||||||
def run_delete_non_affinity_master(self,
|
def run_delete_non_affinity_master(self, expected_http_code=202):
|
||||||
expected_last_state=['SHUTDOWN'],
|
|
||||||
expected_http_code=202):
|
|
||||||
self.assert_delete_instances(
|
self.assert_delete_instances(
|
||||||
self.non_affinity_master_id,
|
self.non_affinity_master_id, expected_http_code=expected_http_code)
|
||||||
expected_last_state=expected_last_state,
|
|
||||||
expected_http_code=expected_http_code)
|
def run_wait_for_delete_non_affinity_master(
|
||||||
|
self, expected_last_status=['SHUTDOWN']):
|
||||||
|
self.assert_all_gone([self.non_affinity_master_id],
|
||||||
|
expected_last_status=expected_last_status)
|
||||||
self.assert_server_group_gone(self.non_affinity_srv_grp_id)
|
self.assert_server_group_gone(self.non_affinity_srv_grp_id)
|
||||||
|
|
||||||
def run_add_data_to_replicate(self):
|
def run_add_data_to_replicate(self):
|
||||||
@ -193,9 +196,6 @@ class ReplicationRunner(TestRunner):
|
|||||||
def run_verify_data_to_replicate(self):
|
def run_verify_data_to_replicate(self):
|
||||||
self.assert_verify_replication_data(DataType.tiny, self.master_host)
|
self.assert_verify_replication_data(DataType.tiny, self.master_host)
|
||||||
|
|
||||||
def run_wait_for_data_to_replicate(self):
|
|
||||||
self.test_helper.wait_for_replicas()
|
|
||||||
|
|
||||||
def run_verify_replica_data_orig(self):
|
def run_verify_replica_data_orig(self):
|
||||||
self.assert_verify_replica_data(self.instance_info.id, DataType.small)
|
self.assert_verify_replica_data(self.instance_info.id, DataType.small)
|
||||||
|
|
||||||
@ -292,6 +292,15 @@ class ReplicationRunner(TestRunner):
|
|||||||
self.instance_info.id, self.replica_1_id, expected_states,
|
self.instance_info.id, self.replica_1_id, expected_states,
|
||||||
expected_http_code)
|
expected_http_code)
|
||||||
|
|
||||||
|
def run_add_final_data_to_replicate(self):
|
||||||
|
self.assert_add_replication_data(DataType.tiny3, self.master_host)
|
||||||
|
|
||||||
|
def run_verify_data_to_replicate_final(self):
|
||||||
|
self.assert_verify_replication_data(DataType.tiny3, self.master_host)
|
||||||
|
|
||||||
|
def run_verify_final_data_replicated(self):
|
||||||
|
self.assert_verify_replica_data(self.master_id, DataType.tiny3)
|
||||||
|
|
||||||
def run_remove_replicated_data(self):
|
def run_remove_replicated_data(self):
|
||||||
self.assert_remove_replicated_data(self.master_host)
|
self.assert_remove_replicated_data(self.master_host)
|
||||||
|
|
||||||
@ -343,25 +352,26 @@ class ReplicationRunner(TestRunner):
|
|||||||
else:
|
else:
|
||||||
self.fail("Unexpected replica_of ID.")
|
self.fail("Unexpected replica_of ID.")
|
||||||
|
|
||||||
def run_delete_detached_replica(self,
|
def run_delete_detached_replica(self, expected_http_code=202):
|
||||||
expected_last_state=['SHUTDOWN'],
|
|
||||||
expected_http_code=202):
|
|
||||||
self.assert_delete_instances(
|
self.assert_delete_instances(
|
||||||
self.replica_1_id, expected_last_state=expected_last_state,
|
self.replica_1_id, expected_http_code=expected_http_code)
|
||||||
expected_http_code=expected_http_code)
|
|
||||||
|
|
||||||
def run_delete_all_replicas(self, expected_last_state=['SHUTDOWN'],
|
def run_delete_all_replicas(self, expected_http_code=202):
|
||||||
expected_http_code=202):
|
|
||||||
self.assert_delete_all_replicas(
|
self.assert_delete_all_replicas(
|
||||||
self.instance_info.id, expected_last_state,
|
self.instance_info.id, expected_http_code)
|
||||||
expected_http_code)
|
|
||||||
|
|
||||||
def assert_delete_all_replicas(
|
def assert_delete_all_replicas(
|
||||||
self, master_id, expected_last_state, expected_http_code):
|
self, master_id, expected_http_code):
|
||||||
self.report.log("Deleting a replication set: %s" % master_id)
|
self.report.log("Deleting a replication set: %s" % master_id)
|
||||||
replica_ids = self._get_replica_set(master_id)
|
replica_ids = self._get_replica_set(master_id)
|
||||||
self.assert_delete_instances(replica_ids, expected_last_state,
|
self.assert_delete_instances(replica_ids, expected_http_code)
|
||||||
expected_http_code)
|
|
||||||
|
def run_wait_for_delete_replicas(
|
||||||
|
self, expected_last_status=['SHUTDOWN']):
|
||||||
|
replica_ids = self._get_replica_set(self.master_id)
|
||||||
|
replica_ids.update(self.replica_1_id)
|
||||||
|
self.assert_all_gone(replica_ids,
|
||||||
|
expected_last_status=expected_last_status)
|
||||||
|
|
||||||
def run_test_backup_deleted(self):
|
def run_test_backup_deleted(self):
|
||||||
backup = self.auth_client.instances.backups(self.master_id)
|
backup = self.auth_client.instances.backups(self.master_id)
|
||||||
|
@ -72,17 +72,19 @@ class RunnerFactory(object):
|
|||||||
runner_module_name, class_prefix, runner_base_name,
|
runner_module_name, class_prefix, runner_base_name,
|
||||||
TEST_RUNNERS_NS)
|
TEST_RUNNERS_NS)
|
||||||
runner = runner_cls(*args, **kwargs)
|
runner = runner_cls(*args, **kwargs)
|
||||||
runner._test_helper = cls._get_helper()
|
runner._test_helper = cls._get_helper(runner.report)
|
||||||
return runner
|
return runner
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _get_helper(cls):
|
def _get_helper(cls, report):
|
||||||
class_prefix = cls._get_test_datastore()
|
class_prefix = cls._get_test_datastore()
|
||||||
helper_cls = cls._load_dynamic_class(
|
helper_cls = cls._load_dynamic_class(
|
||||||
TEST_HELPER_MODULE_NAME, class_prefix,
|
TEST_HELPER_MODULE_NAME, class_prefix,
|
||||||
TEST_HELPER_BASE_NAME, TEST_HELPERS_NS)
|
TEST_HELPER_BASE_NAME, TEST_HELPERS_NS)
|
||||||
return helper_cls(cls._build_class_name(
|
return helper_cls(
|
||||||
class_prefix, TEST_HELPER_BASE_NAME, strip_test=True))
|
cls._build_class_name(class_prefix,
|
||||||
|
TEST_HELPER_BASE_NAME, strip_test=True),
|
||||||
|
report)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _get_test_datastore(cls):
|
def _get_test_datastore(cls):
|
||||||
@ -224,6 +226,13 @@ class TestRunner(object):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def assert_is_sublist(cls, sub_list, full_list, message=None):
|
def assert_is_sublist(cls, sub_list, full_list, message=None):
|
||||||
|
if not message:
|
||||||
|
message = 'Unexpected sublist'
|
||||||
|
try:
|
||||||
|
message += ": sub_list '%s' (full_list '%s')." % (
|
||||||
|
sub_list, full_list)
|
||||||
|
except TypeError:
|
||||||
|
pass
|
||||||
return cls.assert_true(set(sub_list).issubset(full_list), message)
|
return cls.assert_true(set(sub_list).issubset(full_list), message)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@ -396,7 +405,7 @@ class TestRunner(object):
|
|||||||
return self.has_env_flag(self.DO_NOT_DELETE_INSTANCE_FLAG)
|
return self.has_env_flag(self.DO_NOT_DELETE_INSTANCE_FLAG)
|
||||||
|
|
||||||
def assert_instance_action(
|
def assert_instance_action(
|
||||||
self, instance_ids, expected_states, expected_http_code):
|
self, instance_ids, expected_states, expected_http_code=None):
|
||||||
self.assert_client_code(expected_http_code)
|
self.assert_client_code(expected_http_code)
|
||||||
if expected_states:
|
if expected_states:
|
||||||
self.assert_all_instance_states(
|
self.assert_all_instance_states(
|
||||||
|
Loading…
x
Reference in New Issue
Block a user