Update amulet test definitions
This change requires the following charm-helpers change to land first: - https://github.com/juju/charm-helpers/pull/32 Change-Id: Iae88b2c11fe9ddcc176075f54a8c075d2dc3ba4c
This commit is contained in:
parent
0d8bdc3fc2
commit
4e70770d67
@ -858,9 +858,12 @@ class OpenStackAmuletUtils(AmuletUtils):
|
||||
:returns: List of pool name, object count, kb disk space used
|
||||
"""
|
||||
df = self.get_ceph_df(sentry_unit)
|
||||
pool_name = df['pools'][pool_id]['name']
|
||||
obj_count = df['pools'][pool_id]['stats']['objects']
|
||||
kb_used = df['pools'][pool_id]['stats']['kb_used']
|
||||
for pool in df['pools']:
|
||||
if pool['id'] == pool_id:
|
||||
pool_name = pool['name']
|
||||
obj_count = pool['stats']['objects']
|
||||
kb_used = pool['stats']['kb_used']
|
||||
|
||||
self.log.debug('Ceph {} pool (ID {}): {} objects, '
|
||||
'{} kb used'.format(pool_name, pool_id,
|
||||
obj_count, kb_used))
|
||||
|
@ -392,6 +392,8 @@ def get_swift_codename(version):
|
||||
releases = UBUNTU_OPENSTACK_RELEASE
|
||||
release = [k for k, v in six.iteritems(releases) if codename in v]
|
||||
ret = subprocess.check_output(['apt-cache', 'policy', 'swift'])
|
||||
if six.PY3:
|
||||
ret = ret.decode('UTF-8')
|
||||
if codename in ret or release[0] in ret:
|
||||
return codename
|
||||
elif len(codenames) == 1:
|
||||
|
@ -377,12 +377,12 @@ def get_mon_map(service):
|
||||
try:
|
||||
return json.loads(mon_status)
|
||||
except ValueError as v:
|
||||
log("Unable to parse mon_status json: {}. Error: {}".format(
|
||||
mon_status, v.message))
|
||||
log("Unable to parse mon_status json: {}. Error: {}"
|
||||
.format(mon_status, str(v)))
|
||||
raise
|
||||
except CalledProcessError as e:
|
||||
log("mon_status command failed with message: {}".format(
|
||||
e.message))
|
||||
log("mon_status command failed with message: {}"
|
||||
.format(str(e)))
|
||||
raise
|
||||
|
||||
|
||||
|
@ -549,6 +549,8 @@ def write_file(path, content, owner='root', group='root', perms=0o444):
|
||||
with open(path, 'wb') as target:
|
||||
os.fchown(target.fileno(), uid, gid)
|
||||
os.fchmod(target.fileno(), perms)
|
||||
if six.PY3 and isinstance(content, six.string_types):
|
||||
content = content.encode('UTF-8')
|
||||
target.write(content)
|
||||
return
|
||||
# the contents were the same, but we might still need to change the
|
||||
|
@ -20,6 +20,7 @@ UBUNTU_RELEASES = (
|
||||
'yakkety',
|
||||
'zesty',
|
||||
'artful',
|
||||
'bionic',
|
||||
)
|
||||
|
||||
|
||||
|
@ -64,7 +64,7 @@ class CinderBackupBasicDeployment(OpenStackAmuletDeployment):
|
||||
# Note: cinder-backup becomes a cinder subordinate unit.
|
||||
this_service = {'name': 'cinder-backup'}
|
||||
other_services = [
|
||||
{'name': 'percona-cluster', 'constraints': {'mem': '3072M'}},
|
||||
{'name': 'percona-cluster'},
|
||||
{'name': 'keystone'},
|
||||
{'name': 'rabbitmq-server'},
|
||||
{'name': 'ceph', 'units': 3},
|
||||
@ -96,10 +96,8 @@ class CinderBackupBasicDeployment(OpenStackAmuletDeployment):
|
||||
'admin-token': 'ubuntutesting'
|
||||
}
|
||||
pxc_config = {
|
||||
'dataset-size': '25%',
|
||||
'innodb-buffer-pool-size': '256M',
|
||||
'max-connections': 1000,
|
||||
'root-password': 'ChangeMe123',
|
||||
'sst-password': 'ChangeMe123',
|
||||
}
|
||||
cinder_config = {
|
||||
'block-device': 'None',
|
||||
@ -239,18 +237,32 @@ class CinderBackupBasicDeployment(OpenStackAmuletDeployment):
|
||||
def test_110_users(self):
|
||||
"""Verify expected users."""
|
||||
u.log.debug('Checking keystone users...')
|
||||
expected = [
|
||||
{'name': 'cinder_cinderv2',
|
||||
'enabled': True,
|
||||
'tenantId': u.not_null,
|
||||
'id': u.not_null,
|
||||
'email': 'juju@localhost'},
|
||||
{'name': 'admin',
|
||||
'enabled': True,
|
||||
'tenantId': u.not_null,
|
||||
'id': u.not_null,
|
||||
'email': 'juju@localhost'}
|
||||
]
|
||||
|
||||
if self._get_openstack_release() < self.xenial_pike:
|
||||
expected = [{
|
||||
'name': 'cinder_cinderv2',
|
||||
'enabled': True,
|
||||
'tenantId': u.not_null,
|
||||
'id': u.not_null,
|
||||
'email': 'juju@localhost',
|
||||
}]
|
||||
else:
|
||||
expected = [{
|
||||
'name': 'cinderv3_cinderv2',
|
||||
'enabled': True,
|
||||
'tenantId': u.not_null,
|
||||
'id': u.not_null,
|
||||
'email': 'juju@localhost',
|
||||
}]
|
||||
|
||||
expected.append({
|
||||
'name': 'admin',
|
||||
'enabled': True,
|
||||
'tenantId': u.not_null,
|
||||
'id': u.not_null,
|
||||
'email': 'juju@localhost',
|
||||
})
|
||||
|
||||
actual = self.keystone.users.list()
|
||||
ret = u.validate_user_data(expected, actual)
|
||||
if ret:
|
||||
@ -275,10 +287,15 @@ class CinderBackupBasicDeployment(OpenStackAmuletDeployment):
|
||||
endpoint_vol['id'] = u.not_null
|
||||
endpoint_id['id'] = u.not_null
|
||||
|
||||
expected = {
|
||||
'identity': [endpoint_id],
|
||||
'volume': [endpoint_id]
|
||||
}
|
||||
if self._get_openstack_release() >= self.xenial_pike:
|
||||
# Pike and later
|
||||
expected = {'identity': [endpoint_id],
|
||||
'volumev2': [endpoint_id]}
|
||||
else:
|
||||
# Ocata and prior
|
||||
expected = {'identity': [endpoint_id],
|
||||
'volume': [endpoint_id]}
|
||||
|
||||
actual = self.keystone.service_catalog.get_endpoints()
|
||||
|
||||
ret = u.validate_svc_catalog_endpoint_data(expected, actual)
|
||||
@ -478,6 +495,8 @@ class CinderBackupBasicDeployment(OpenStackAmuletDeployment):
|
||||
'service_tenant_id': u.not_null,
|
||||
'service_host': u.valid_ip
|
||||
}
|
||||
if self._get_openstack_release() >= self.xenial_pike:
|
||||
expected['service_username'] = 'cinderv3_cinderv2'
|
||||
ret = u.validate_relation_data(unit, relation, expected)
|
||||
if ret:
|
||||
msg = u.relation_error('identity-service cinder', ret)
|
||||
@ -489,14 +508,29 @@ class CinderBackupBasicDeployment(OpenStackAmuletDeployment):
|
||||
unit = self.cinder_sentry
|
||||
relation = ['identity-service',
|
||||
'keystone:identity-service']
|
||||
expected = {
|
||||
'cinder_service': 'cinder',
|
||||
'cinder_region': 'RegionOne',
|
||||
'cinder_public_url': u.valid_url,
|
||||
'cinder_internal_url': u.valid_url,
|
||||
'cinder_admin_url': u.valid_url,
|
||||
'private-address': u.valid_ip
|
||||
}
|
||||
if self._get_openstack_release() < self.xenial_pike:
|
||||
expected = {
|
||||
'cinder_service': 'cinder',
|
||||
'cinder_region': 'RegionOne',
|
||||
'cinder_public_url': u.valid_url,
|
||||
'cinder_internal_url': u.valid_url,
|
||||
'cinder_admin_url': u.valid_url,
|
||||
'private-address': u.valid_ip
|
||||
}
|
||||
else:
|
||||
expected = {
|
||||
'cinderv2_service': 'cinderv2',
|
||||
'cinderv2_region': 'RegionOne',
|
||||
'cinderv2_public_url': u.valid_url,
|
||||
'cinderv2_internal_url': u.valid_url,
|
||||
'cinderv2_admin_url': u.valid_url,
|
||||
'cinderv3_service': 'cinderv3',
|
||||
'cinderv3_region': 'RegionOne',
|
||||
'cinderv3_public_url': u.valid_url,
|
||||
'cinderv3_internal_url': u.valid_url,
|
||||
'cinderv3_admin_url': u.valid_url,
|
||||
'private-address': u.valid_ip
|
||||
}
|
||||
ret = u.validate_relation_data(unit, relation, expected)
|
||||
if ret:
|
||||
msg = u.relation_error('cinder identity-service', ret)
|
||||
@ -816,11 +850,14 @@ class CinderBackupBasicDeployment(OpenStackAmuletDeployment):
|
||||
if ret:
|
||||
amulet.raise_status(amulet.FAIL, msg=ret)
|
||||
|
||||
# Validate ceph cinder pool disk space usage samples over time
|
||||
ret = u.validate_ceph_pool_samples(pool_size_samples,
|
||||
"cinder pool disk usage")
|
||||
if ret:
|
||||
amulet.raise_status(amulet.FAIL, msg=ret)
|
||||
# Luminous (pike) ceph seems more efficient at disk usage so we cannot
|
||||
# grantee the ordering of kb_used
|
||||
if self._get_openstack_release() < self.xenial_mitaka:
|
||||
# Validate ceph cinder pool disk space usage samples over time
|
||||
ret = u.validate_ceph_pool_samples(pool_size_samples,
|
||||
"cinder pool disk usage")
|
||||
if ret:
|
||||
amulet.raise_status(amulet.FAIL, msg=ret)
|
||||
|
||||
def test_499_ceph_cmds_exit_zero(self):
|
||||
"""Check basic functionality of ceph cli commands against
|
||||
|
@ -858,9 +858,12 @@ class OpenStackAmuletUtils(AmuletUtils):
|
||||
:returns: List of pool name, object count, kb disk space used
|
||||
"""
|
||||
df = self.get_ceph_df(sentry_unit)
|
||||
pool_name = df['pools'][pool_id]['name']
|
||||
obj_count = df['pools'][pool_id]['stats']['objects']
|
||||
kb_used = df['pools'][pool_id]['stats']['kb_used']
|
||||
for pool in df['pools']:
|
||||
if pool['id'] == pool_id:
|
||||
pool_name = pool['name']
|
||||
obj_count = pool['stats']['objects']
|
||||
kb_used = pool['stats']['kb_used']
|
||||
|
||||
self.log.debug('Ceph {} pool (ID {}): {} objects, '
|
||||
'{} kb used'.format(pool_name, pool_id,
|
||||
obj_count, kb_used))
|
||||
|
@ -549,6 +549,8 @@ def write_file(path, content, owner='root', group='root', perms=0o444):
|
||||
with open(path, 'wb') as target:
|
||||
os.fchown(target.fileno(), uid, gid)
|
||||
os.fchmod(target.fileno(), perms)
|
||||
if six.PY3 and isinstance(content, six.string_types):
|
||||
content = content.encode('UTF-8')
|
||||
target.write(content)
|
||||
return
|
||||
# the contents were the same, but we might still need to change the
|
||||
|
@ -20,6 +20,7 @@ UBUNTU_RELEASES = (
|
||||
'yakkety',
|
||||
'zesty',
|
||||
'artful',
|
||||
'bionic',
|
||||
)
|
||||
|
||||
|
||||
|
@ -21,3 +21,6 @@ from basic_deployment import CinderBackupBasicDeployment
|
||||
if __name__ == '__main__':
|
||||
deployment = CinderBackupBasicDeployment(series='artful')
|
||||
deployment.run_tests()
|
||||
|
||||
# NOTE(beisner): Artful target disabled, pending bug:
|
||||
# https://bugs.launchpad.net/charm-percona-cluster/+bug/1728132
|
||||
|
0
tests/gate-basic-xenial-pike
Normal file → Executable file
0
tests/gate-basic-xenial-pike
Normal file → Executable file
2
tox.ini
2
tox.ini
@ -60,7 +60,7 @@ basepython = python2.7
|
||||
deps = -r{toxinidir}/requirements.txt
|
||||
-r{toxinidir}/test-requirements.txt
|
||||
commands =
|
||||
bundletester -vl DEBUG -r json -o func-results.json gate-basic-xenial-mitaka --no-destroy
|
||||
bundletester -vl DEBUG -r json -o func-results.json gate-basic-xenial-pike --no-destroy
|
||||
|
||||
[testenv:func27-dfs]
|
||||
# Charm Functional Test
|
||||
|
Loading…
x
Reference in New Issue
Block a user