Organize provisioning to use testresources

This change introduces the use of the testresources package,
such that the provisioning system uses TestResourceManager
objects in order to create and drop databases, schemas,
and manage transactional testing.  A new series of objects
to support transparent transaction containers within
tests is added as well.

partially implement bp: long-lived-transactionalized-db-fixtures
Partial-Bug: #1339206

Change-Id: I16bfa3af0e1ad6a9231ea38dea7cd76092347f55
This commit is contained in:
Mike Bayer 2014-12-30 18:24:37 -05:00 committed by Roman Podoliaka
parent aa6fc4e2bd
commit 6ccea346d9
12 changed files with 490 additions and 154 deletions

View File

@ -16,7 +16,6 @@
"""Provision test environment for specific DB backends"""
import abc
import argparse
import logging
import os
import random
@ -28,6 +27,7 @@ from six import moves
import sqlalchemy
from sqlalchemy.engine import url as sa_url
from sqlalchemy import schema
import testresources
from oslo_db._i18n import _LI
from oslo_db import exception
@ -39,31 +39,103 @@ LOG = logging.getLogger(__name__)
class ProvisionedDatabase(object):
"""Represent a single database node that can be used for testing in
pass
a serialized fashion.
``ProvisionedDatabase`` includes features for full lifecycle management
of a node, in a way that is context-specific. Depending on how the
test environment runs, ``ProvisionedDatabase`` should know if it needs
to create and drop databases or if it is making use of a database that
is maintained by an external process.
class BackendResource(testresources.TestResourceManager):
def __init__(self, database_type):
super(BackendResource, self).__init__()
self.database_type = database_type
self.backend = Backend.backend_for_database_type(self.database_type)
"""
def make(self, dependency_resources):
return self.backend
def isDirty(self):
return False
class DatabaseResource(testresources.TestResourceManager):
def __init__(self, database_type):
self.backend = Backend.backend_for_database_type(database_type)
self.db_token = _random_ident()
super(DatabaseResource, self).__init__()
self.database_type = database_type
self.resources = [
('backend', BackendResource(database_type))
]
self.backend.create_named_database(self.db_token)
self.engine = self.backend.provisioned_engine(self.db_token)
def make(self, dependency_resources):
dependency_resources['db_token'] = db_token = _random_ident()
backend = dependency_resources['backend']
LOG.info(
"CREATE BACKEND %s TOKEN %s", backend.engine.url, db_token)
backend.create_named_database(db_token, conditional=True)
dependency_resources['engine'] = \
backend.provisioned_engine(db_token)
return ProvisionedDatabase()
def drop_all_objects(self):
self.backend.drop_all_objects(self.engine)
def clean(self, resource):
resource.engine.dispose()
LOG.info(
"DROP BACKEND %s TOKEN %s",
resource.backend.engine, resource.db_token)
resource.backend.drop_named_database(resource.db_token)
def dispose(self):
self.engine.dispose()
self.backend.drop_named_database(self.db_token)
def isDirty(self):
return False
class TransactionResource(testresources.TestResourceManager):
def __init__(self, database_resource, schema_resource):
super(TransactionResource, self).__init__()
self.resources = [
('database', database_resource),
('schema', schema_resource)
]
def clean(self, resource):
resource._dispose()
def make(self, dependency_resources):
conn = dependency_resources['database'].engine.connect()
return utils.NonCommittingEngine(conn)
def isDirty(self):
return True
class Schema(object):
pass
class SchemaResource(testresources.TestResourceManager):
def __init__(self, database_resource, generate_schema, teardown=False):
super(SchemaResource, self).__init__()
self.generate_schema = generate_schema
self.teardown = teardown
self.resources = [
('database', database_resource)
]
def clean(self, resource):
LOG.info(
"DROP ALL OBJECTS, BACKEND %s",
resource.database.engine.url)
resource.database.backend.drop_all_objects(
resource.database.engine)
def make(self, dependency_resources):
if self.generate_schema:
self.generate_schema(dependency_resources['database'].engine)
return Schema()
def isDirty(self):
if self.teardown:
return True
else:
return False
class Backend(object):
@ -85,19 +157,12 @@ class Backend(object):
self.verified = False
self.engine = None
self.impl = BackendImpl.impl(database_type)
self.current_dbs = set()
Backend.backends_by_database_type[database_type] = self
@classmethod
def backend_for_database_type(cls, database_type):
"""Return and verify the ``Backend`` for the given database type.
Creates the engine if it does not already exist and raises
``BackendNotAvailable`` if it cannot be produced.
:return: a base ``Engine`` that allows provisioning of databases.
:raises: ``BackendNotAvailable``, if an engine for this backend
cannot be produced.
"""Return the ``Backend`` for the given database type.
"""
try:
@ -172,10 +237,13 @@ class Backend(object):
conn.close()
return eng
def create_named_database(self, ident):
def create_named_database(self, ident, conditional=False):
"""Create a database with the given name."""
self.impl.create_named_database(self.engine, ident)
if not conditional or ident not in self.current_dbs:
self.current_dbs.add(ident)
self.impl.create_named_database(
self.engine, ident, conditional=conditional)
def drop_named_database(self, ident, conditional=False):
"""Drop a database with the given name."""
@ -183,6 +251,7 @@ class Backend(object):
self.impl.drop_named_database(
self.engine, ident,
conditional=conditional)
self.current_dbs.discard(ident)
def drop_all_objects(self, engine):
"""Drop all database objects.
@ -303,7 +372,7 @@ class BackendImpl(object):
"""
@abc.abstractmethod
def create_named_database(self, engine, ident):
def create_named_database(self, engine, ident, conditional=False):
"""Create a database with the given name."""
@abc.abstractmethod
@ -388,9 +457,10 @@ class MySQLBackendImpl(BackendImpl):
def create_opportunistic_driver_url(self):
return "mysql://openstack_citest:openstack_citest@localhost/"
def create_named_database(self, engine, ident):
def create_named_database(self, engine, ident, conditional=False):
with engine.connect() as conn:
conn.execute("CREATE DATABASE %s" % ident)
if not conditional or not self.database_exists(conn, ident):
conn.execute("CREATE DATABASE %s" % ident)
def drop_named_database(self, engine, ident, conditional=False):
with engine.connect() as conn:
@ -409,10 +479,12 @@ class SQLiteBackendImpl(BackendImpl):
def create_opportunistic_driver_url(self):
return "sqlite://"
def create_named_database(self, engine, ident):
def create_named_database(self, engine, ident, conditional=False):
url = self._provisioned_database_url(engine.url, ident)
eng = sqlalchemy.create_engine(url)
eng.connect().close()
filename = url.database
if filename and (not conditional or not os.access(filename, os.F_OK)):
eng = sqlalchemy.create_engine(url)
eng.connect().close()
def provisioned_engine(self, base_url, ident):
return session.create_engine(
@ -442,10 +514,11 @@ class PostgresqlBackendImpl(BackendImpl):
return "postgresql://openstack_citest:openstack_citest"\
"@localhost/postgres"
def create_named_database(self, engine, ident):
def create_named_database(self, engine, ident, conditional=False):
with engine.connect().execution_options(
isolation_level="AUTOCOMMIT") as conn:
conn.execute("CREATE DATABASE %s" % ident)
if not conditional or not self.database_exists(conn, ident):
conn.execute("CREATE DATABASE %s" % ident)
def drop_named_database(self, engine, ident, conditional=False):
with engine.connect().execution_options(
@ -501,82 +574,4 @@ def _random_ident():
for i in moves.range(10))
def _echo_cmd(args):
idents = [_random_ident() for i in moves.range(args.instances_count)]
print("\n".join(idents))
def _create_cmd(args):
idents = [_random_ident() for i in moves.range(args.instances_count)]
for backend in Backend.all_viable_backends():
for ident in idents:
backend.create_named_database(ident)
print("\n".join(idents))
def _drop_cmd(args):
for backend in Backend.all_viable_backends():
for ident in args.instances:
backend.drop_named_database(ident, args.conditional)
Backend._setup()
def main(argv=None):
"""Command line interface to create/drop databases.
::create: Create test database with random names.
::drop: Drop database created by previous command.
::echo: create random names and display them; don't create.
"""
parser = argparse.ArgumentParser(
description='Controller to handle database creation and dropping'
' commands.',
epilog='Typically called by the test runner, e.g. shell script, '
'testr runner via .testr.conf, or other system.')
subparsers = parser.add_subparsers(
help='Subcommands to manipulate temporary test databases.')
create = subparsers.add_parser(
'create',
help='Create temporary test databases.')
create.set_defaults(which=_create_cmd)
create.add_argument(
'instances_count',
type=int,
help='Number of databases to create.')
drop = subparsers.add_parser(
'drop',
help='Drop temporary test databases.')
drop.set_defaults(which=_drop_cmd)
drop.add_argument(
'instances',
nargs='+',
help='List of databases uri to be dropped.')
drop.add_argument(
'--conditional',
action="store_true",
help="Check if database exists first before dropping"
)
echo = subparsers.add_parser(
'echo',
help="Create random database names and display only."
)
echo.set_defaults(which=_echo_cmd)
echo.add_argument(
'instances_count',
type=int,
help='Number of identifiers to create.')
args = parser.parse_args(argv)
cmd = args.which
cmd(args)
if __name__ == "__main__":
main()

View File

@ -14,6 +14,8 @@
# under the License.
import fixtures
import testresources
import testscenarios
try:
from oslotest import base as test_base
@ -22,6 +24,7 @@ except ImportError:
' test-requirements')
import os
import six
from oslo_db import exception
@ -48,40 +51,118 @@ class DbFixture(fixtures.Fixture):
def __init__(self, test):
super(DbFixture, self).__init__()
self.test = test
def setUp(self):
super(DbFixture, self).setUp()
try:
self.provision = provision.ProvisionedDatabase(self.DRIVER)
self.addCleanup(self.provision.dispose)
except exception.BackendNotAvailable:
msg = '%s backend is not available.' % self.DRIVER
return self.test.skip(msg)
testresources.setUpResources(
self.test, self.test.resources, testresources._get_result())
self.addCleanup(
testresources.tearDownResources,
self.test, self.test.resources, testresources._get_result()
)
if not hasattr(self.test, 'db'):
self.test.skip("database '%s' unavailable" % self.DRIVER)
if self.test.SCHEMA_SCOPE:
self.test.engine = self.test.transaction_engine
self.test.sessionmaker = session.get_maker(
self.test.transaction_engine)
else:
self.test.provision = self.provision
self.test.engine = self.provision.engine
self.addCleanup(setattr, self.test, 'engine', None)
self.test.engine = self.test.db.engine
self.test.sessionmaker = session.get_maker(self.test.engine)
self.addCleanup(setattr, self.test, 'sessionmaker', None)
self.addCleanup(setattr, self.test, 'sessionmaker', None)
self.addCleanup(setattr, self.test, 'engine', None)
class DbTestCase(test_base.BaseTestCase):
"""Base class for testing of DB code.
Using `DbFixture`. Intended to be the main database test case to use all
the tests on a given backend with user defined uri. Backend specific
tests should be decorated with `backend_specific` decorator.
"""
FIXTURE = DbFixture
SCHEMA_SCOPE = None
_schema_resources = {}
_database_resources = {}
def _resources_for_driver(self, driver, schema_scope, generate_schema):
# testresources relies on the identity and state of the
# TestResourceManager objects in play to correctly manage
# resources, and it also hardcodes to looking at the
# ".resources" attribute on the test object, even though the
# setUpResources() function passes the list of resources in,
# so we have to code the TestResourceManager logic into the
# .resources attribute and ensure that the same set of test
# variables always produces the same TestResourceManager objects.
if driver not in self._database_resources:
try:
self._database_resources[driver] = \
provision.DatabaseResource(driver)
except exception.BackendNotAvailable:
self._database_resources[driver] = None
database_resource = self._database_resources[driver]
if database_resource is None:
return []
if schema_scope:
key = (driver, schema_scope)
if key not in self._schema_resources:
schema_resource = provision.SchemaResource(
database_resource, generate_schema)
transaction_resource = provision.TransactionResource(
database_resource, schema_resource)
self._schema_resources[key] = \
transaction_resource
transaction_resource = self._schema_resources[key]
return [
('transaction_engine', transaction_resource),
('db', database_resource),
]
else:
key = (driver, None)
if key not in self._schema_resources:
self._schema_resources[key] = provision.SchemaResource(
database_resource, generate_schema, teardown=True)
schema_resource = self._schema_resources[key]
return [
('schema', schema_resource),
('db', database_resource)
]
@property
def resources(self):
return self._resources_for_driver(
self.FIXTURE.DRIVER, self.SCHEMA_SCOPE, self.generate_schema)
def setUp(self):
super(DbTestCase, self).setUp()
self.useFixture(self.FIXTURE(self))
def generate_schema(self, engine):
"""Generate schema objects to be used within a test.
The function is separate from the setUp() case as the scope
of this method is controlled by the provisioning system. A
test that specifies SCHEMA_SCOPE may not call this method
for each test, as the schema may be maintained from a previous run.
"""
if self.SCHEMA_SCOPE:
# if SCHEMA_SCOPE is set, then this method definitely
# has to be implemented. This is a guard against a test
# that inadvertently does schema setup within setUp().
raise NotImplementedError(
"This test requires schema-level setup to be "
"implemented within generate_schema().")
class OpportunisticTestCase(DbTestCase):
"""Placeholder for backwards compatibility."""
@ -126,3 +207,41 @@ class MySQLOpportunisticTestCase(OpportunisticTestCase):
class PostgreSQLOpportunisticTestCase(OpportunisticTestCase):
FIXTURE = PostgreSQLOpportunisticFixture
def optimize_db_test_loader(file_):
"""Package level load_tests() function.
Will apply an optimizing test suite to all sub-tests, which groups DB
tests and other resources appropriately.
Place this in an __init__.py package file within the root of the test
suite, at the level where testresources loads it as a package::
from oslo.db.sqlalchemy import test_base
load_tests = test_base.optimize_db_test_loader(__file__)
Alternatively, the directive can be placed into a test module directly.
"""
this_dir = os.path.dirname(file_)
def load_tests(loader, found_tests, pattern):
# pattern is None if the directive is placed within
# a test module directly, as well as within certain test
# discovery patterns
if pattern is not None:
pkg_tests = loader.discover(start_dir=this_dir, pattern=pattern)
result = testresources.OptimisingTestSuite()
found_tests = testscenarios.load_tests_apply_scenarios(
loader, found_tests, pattern)
result.addTest(found_tests)
if pattern is not None:
result.addTest(pkg_tests)
return result
return load_tests

View File

@ -16,6 +16,7 @@
import abc
import collections
import functools
import logging
import pprint
@ -483,9 +484,6 @@ class ModelsMigrationsSync(object):
return meta_def != insp_def
return insp_def != "'%s'::character varying" % meta_def.arg
def _cleanup(self):
self.provision.drop_all_objects()
FKInfo = collections.namedtuple('fk_info', ['constrained_columns',
'referred_table',
'referred_columns'])
@ -567,7 +565,8 @@ class ModelsMigrationsSync(object):
' for running of this test: %s' % e)
# drop all tables after a test run
self.addCleanup(self._cleanup)
self.addCleanup(functools.partial(self.db.backend.drop_all_objects,
self.get_engine()))
# run migration scripts
self.db_sync(self.get_engine())

View File

@ -17,6 +17,7 @@
# under the License.
import collections
import contextlib
import logging
import re
@ -1012,3 +1013,126 @@ def get_non_innodb_tables(connectable, skip_tables=('migrate_version',
query = text(query_str)
noninnodb = connectable.execute(query, **params)
return [i[0] for i in noninnodb]
class NonCommittingConnectable(object):
"""A ``Connectable`` substitute which rolls all operations back.
``NonCommittingConnectable`` forms the basis of mock
``Engine`` and ``Connection`` objects within a test. It provides
only that part of the API that should reasonably be used within
a single-connection test environment (e.g. no engine.dispose(),
connection.invalidate(), etc. ). The connection runs both within
a transaction as well as a savepoint. The transaction is there
so that any operations upon the connection can be rolled back.
If the test calls begin(), a "pseduo" transaction is returned that
won't actually commit anything. The subtransaction is there to allow
a test to successfully call rollback(), however, where all operations
to that point will be rolled back and the operations can continue,
simulating a real rollback while still remaining within a transaction
external to the test.
"""
def __init__(self, connection):
self.connection = connection
self._trans = connection.begin()
self._restart_nested()
def _restart_nested(self):
self._nested_trans = self.connection.begin_nested()
def _dispose(self):
if not self.connection.closed:
self._nested_trans.rollback()
self._trans.rollback()
self.connection.close()
def execute(self, obj, *multiparams, **params):
"""Executes the given construct and returns a :class:`.ResultProxy`."""
return self.connection.execute(obj, *multiparams, **params)
def scalar(self, obj, *multiparams, **params):
"""Executes and returns the first column of the first row."""
return self.connection.scalar(obj, *multiparams, **params)
class NonCommittingEngine(NonCommittingConnectable):
"""``Engine`` -specific non committing connectbale."""
@property
def url(self):
return self.connection.engine.url
@property
def engine(self):
return self
def connect(self):
return NonCommittingConnection(self.connection)
@contextlib.contextmanager
def begin(self):
conn = self.connect()
trans = conn.begin()
try:
yield conn
except Exception:
trans.rollback()
else:
trans.commit()
class NonCommittingConnection(NonCommittingConnectable):
"""``Connection`` -specific non committing connectbale."""
def close(self):
"""Close the 'Connection'.
In this context, close() is a no-op.
"""
pass
def begin(self):
return NonCommittingTransaction(self, self.connection.begin())
def __enter__(self):
return self
def __exit__(self, *arg):
pass
class NonCommittingTransaction(object):
"""A wrapper for ``Transaction``.
This is to accommodate being able to guaranteed start a new
SAVEPOINT when a transaction is rolled back.
"""
def __init__(self, provisioned, transaction):
self.provisioned = provisioned
self.transaction = transaction
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
if type is None:
try:
self.commit()
except Exception:
self.rollback()
raise
else:
self.rollback()
def commit(self):
self.transaction.commit()
def rollback(self):
self.transaction.rollback()
self.provisioned._restart_nested()

View File

@ -0,0 +1,18 @@
# Copyright (c) 2014 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo.db.sqlalchemy import test_base
load_tests = test_base.optimize_db_test_loader(__file__)

View File

@ -326,12 +326,17 @@ class TestReferenceErrorMySQL(TestReferenceErrorSQLite,
self.assertEqual("resource_foo", matched.key_table)
def test_raise_ansi_quotes(self):
self.engine.execute("SET SESSION sql_mode = 'ANSI';")
matched = self.assertRaises(
exception.DBReferenceError,
self.engine.execute,
self.table_2.insert({'id': 1, 'foo_id': 2})
)
with self.engine.connect() as conn:
conn.detach() # will not be returned to the pool when closed
# this is incompatible with some internals of the engine
conn.execute("SET SESSION sql_mode = 'ANSI';")
matched = self.assertRaises(
exception.DBReferenceError,
conn.execute,
self.table_2.insert({'id': 1, 'foo_id': 2})
)
self.assertInnerException(
matched,

View File

@ -171,7 +171,7 @@ class TestWalkVersions(test.BaseTestCase, migrate.WalkVersionsMixin):
self.assertEqual(upgraded, self.migrate_up.call_args_list)
class ModelsMigrationSyncMixin(test.BaseTestCase):
class ModelsMigrationSyncMixin(test_base.DbTestCase):
def setUp(self):
super(ModelsMigrationSyncMixin, self).setUp()

View File

@ -10,11 +10,13 @@
# License for the specific language governing permissions and limitations
# under the License.
from oslotest import base as oslo_test_base
from sqlalchemy import inspect
from sqlalchemy import schema
from sqlalchemy import types
from oslo.db import exception
from oslo.db.sqlalchemy import provision
from oslo_db.sqlalchemy import test_base
@ -62,7 +64,7 @@ class DropAllObjectsTest(test_base.DbTestCase):
set(insp.get_table_names())
)
self.provision.drop_all_objects()
self.db.backend.drop_all_objects(self.engine)
insp = inspect(self.engine)
self.assertEqual(
@ -71,11 +73,83 @@ class DropAllObjectsTest(test_base.DbTestCase):
)
class MySQLRetainSchemaTest(
class MySQLDropAllObjectsTest(
DropAllObjectsTest, test_base.MySQLOpportunisticTestCase):
pass
class PostgresqlRetainSchemaTest(
class PostgreSQLDropAllObjectsTest(
DropAllObjectsTest, test_base.PostgreSQLOpportunisticTestCase):
pass
class RetainSchemaTest(oslo_test_base.BaseTestCase):
DRIVER = "sqlite"
def setUp(self):
super(RetainSchemaTest, self).setUp()
metadata = schema.MetaData()
self.test_table = schema.Table(
'test_table', metadata,
schema.Column('x', types.Integer),
schema.Column('y', types.Integer),
mysql_engine='InnoDB'
)
def gen_schema(engine):
metadata.create_all(engine, checkfirst=False)
self._gen_schema = gen_schema
def test_once(self):
self._run_test()
def test_twice(self):
self._run_test()
def _run_test(self):
try:
database_resource = provision.DatabaseResource(self.DRIVER)
except exception.BackendNotAvailable:
self.skip("database not available")
schema_resource = provision.SchemaResource(
database_resource, self._gen_schema)
transaction_resource = provision.TransactionResource(
database_resource, schema_resource)
engine = transaction_resource.getResource()
with engine.connect() as conn:
rows = conn.execute(self.test_table.select())
self.assertEqual(rows.fetchall(), [])
trans = conn.begin()
conn.execute(
self.test_table.insert(),
{"x": 1, "y": 2}
)
trans.rollback()
rows = conn.execute(self.test_table.select())
self.assertEqual(rows.fetchall(), [])
trans = conn.begin()
conn.execute(
self.test_table.insert(),
{"x": 2, "y": 3}
)
trans.commit()
rows = conn.execute(self.test_table.select())
self.assertEqual(rows.fetchall(), [(2, 3)])
transaction_resource.finishedWith(engine)
class MySQLRetainSchemaTest(RetainSchemaTest):
DRIVER = "mysql"
class PostgresqlRetainSchemaTest(RetainSchemaTest):
DRIVER = "postgresql"

View File

@ -229,20 +229,22 @@ class MySQLModeTestCase(test_base.MySQLOpportunisticTestCase):
def setUp(self):
super(MySQLModeTestCase, self).setUp()
self.engine = session.create_engine(self.engine.url,
mysql_sql_mode=self.mysql_mode)
self.connection = self.engine.connect()
mode_engine = session.create_engine(
self.engine.url,
mysql_sql_mode=self.mysql_mode)
self.connection = mode_engine.connect()
meta = MetaData()
meta.bind = self.engine
self.test_table = Table(_TABLE_NAME + "mode", meta,
Column('id', Integer, primary_key=True),
Column('bar', String(255)))
self.test_table.create()
self.test_table.create(self.connection)
self.addCleanup(self.test_table.drop)
self.addCleanup(self.connection.close)
def cleanup():
self.test_table.drop(self.connection)
self.connection.close()
mode_engine.dispose()
self.addCleanup(cleanup)
def _test_string_too_long(self, value):
with self.connection.begin():

View File

@ -13,3 +13,5 @@ SQLAlchemy>=0.9.7,<=0.9.99
sqlalchemy-migrate>=0.9.1,!=0.9.2
stevedore>=1.1.0 # Apache-2.0
six>=1.7.0
testresources>=0.2.4
testscenarios>=0.4

View File

@ -15,6 +15,5 @@ sphinx>=1.1.2,!=1.2.0,!=1.3b1,<1.3
oslosphinx>=2.2.0 # Apache-2.0
oslotest>=1.2.0 # Apache-2.0
testrepository>=0.0.18
testscenarios>=0.4
testtools>=0.9.36,!=1.2.0
tempest-lib>=0.1.0

View File

@ -14,7 +14,6 @@ sphinx>=1.1.2,!=1.2.0,!=1.3b1,<1.3
oslosphinx>=2.2.0 # Apache-2.0
oslotest>=1.2.0 # Apache-2.0
testrepository>=0.0.18
testscenarios>=0.4
testtools>=0.9.36,!=1.2.0
tempest-lib>=0.1.0