Merge "Change ensure share to make startup faster"
This commit is contained in:
commit
c72557266b
@ -1326,3 +1326,15 @@ def message_destroy(context, message_id):
|
|||||||
def cleanup_expired_messages(context):
|
def cleanup_expired_messages(context):
|
||||||
"""Soft delete expired messages"""
|
"""Soft delete expired messages"""
|
||||||
return IMPL.cleanup_expired_messages(context)
|
return IMPL.cleanup_expired_messages(context)
|
||||||
|
|
||||||
|
|
||||||
|
def backend_info_get(context, host):
|
||||||
|
"""Get hash info for given host."""
|
||||||
|
return IMPL.backend_info_get(context, host)
|
||||||
|
|
||||||
|
|
||||||
|
def backend_info_update(context, host, value=None,
|
||||||
|
delete_existing=False):
|
||||||
|
"""Update hash info for host."""
|
||||||
|
return IMPL.backend_info_update(context, host=host, value=value,
|
||||||
|
delete_existing=delete_existing)
|
||||||
|
@ -0,0 +1,62 @@
|
|||||||
|
# Copyright 2017 Huawei inc.
|
||||||
|
# All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
"""add_backend_info_table
|
||||||
|
|
||||||
|
Revision ID: 4a482571410f
|
||||||
|
Revises: 27cb96d991fa
|
||||||
|
Create Date: 2017-05-18 14:47:38.201658
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '4a482571410f'
|
||||||
|
down_revision = '27cb96d991fa'
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
from oslo_log import log
|
||||||
|
import sqlalchemy as sql
|
||||||
|
|
||||||
|
LOG = log.getLogger(__name__)
|
||||||
|
|
||||||
|
backend_info_table_name = 'backend_info'
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
try:
|
||||||
|
op.create_table(
|
||||||
|
backend_info_table_name,
|
||||||
|
sql.Column('created_at', sql.DateTime),
|
||||||
|
sql.Column('updated_at', sql.DateTime),
|
||||||
|
sql.Column('deleted_at', sql.DateTime),
|
||||||
|
sql.Column('deleted', sql.Integer, default=0),
|
||||||
|
sql.Column('host', sql.String(255),
|
||||||
|
nullable=False, primary_key=True),
|
||||||
|
sql.Column('info_hash', sql.String(255),
|
||||||
|
nullable=False),
|
||||||
|
mysql_engine='InnoDB',
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
LOG.error("Table |%s| not created!",
|
||||||
|
backend_info_table_name)
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
try:
|
||||||
|
op.drop_table(backend_info_table_name)
|
||||||
|
except Exception:
|
||||||
|
LOG.error("%s table not dropped", backend_info_table_name)
|
||||||
|
raise
|
@ -4911,3 +4911,56 @@ def cleanup_expired_messages(context):
|
|||||||
with session.begin():
|
with session.begin():
|
||||||
return session.query(models.Message).filter(
|
return session.query(models.Message).filter(
|
||||||
models.Message.expires_at < now).delete()
|
models.Message.expires_at < now).delete()
|
||||||
|
|
||||||
|
|
||||||
|
@require_context
|
||||||
|
def backend_info_get(context, host):
|
||||||
|
"""Get hash info for given host."""
|
||||||
|
session = get_session()
|
||||||
|
|
||||||
|
result = _backend_info_query(session, context, host)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@require_context
|
||||||
|
def backend_info_create(context, host, value):
|
||||||
|
session = get_session()
|
||||||
|
with session.begin():
|
||||||
|
info_ref = models.BackendInfo()
|
||||||
|
info_ref.update({"host": host,
|
||||||
|
"info_hash": value})
|
||||||
|
info_ref.save(session)
|
||||||
|
return info_ref
|
||||||
|
|
||||||
|
|
||||||
|
@require_context
|
||||||
|
def backend_info_update(context, host, value=None, delete_existing=False):
|
||||||
|
"""Remove backend info for host name."""
|
||||||
|
session = get_session()
|
||||||
|
|
||||||
|
with session.begin():
|
||||||
|
info_ref = _backend_info_query(session, context, host)
|
||||||
|
if info_ref:
|
||||||
|
if value:
|
||||||
|
info_ref.update({"info_hash": value})
|
||||||
|
elif delete_existing and info_ref['deleted'] != 1:
|
||||||
|
info_ref.update({"deleted": 1,
|
||||||
|
"deleted_at": timeutils.utcnow()})
|
||||||
|
else:
|
||||||
|
info_ref = models.BackendInfo()
|
||||||
|
info_ref.update({"host": host,
|
||||||
|
"info_hash": value})
|
||||||
|
info_ref.save(session)
|
||||||
|
return info_ref
|
||||||
|
|
||||||
|
|
||||||
|
def _backend_info_query(session, context, host, read_deleted=False):
|
||||||
|
result = model_query(
|
||||||
|
context, models.BackendInfo, session=session,
|
||||||
|
read_deleted=read_deleted,
|
||||||
|
).filter_by(
|
||||||
|
host=host,
|
||||||
|
).first()
|
||||||
|
|
||||||
|
return result
|
||||||
|
@ -1220,6 +1220,13 @@ class Message(BASE, ManilaBase):
|
|||||||
deleted = Column(String(36), default='False')
|
deleted = Column(String(36), default='False')
|
||||||
|
|
||||||
|
|
||||||
|
class BackendInfo(BASE, ManilaBase):
|
||||||
|
"""Represent Backend Info."""
|
||||||
|
__tablename__ = "backend_info"
|
||||||
|
host = Column(String(255), primary_key=True)
|
||||||
|
info_hash = Column(String(255))
|
||||||
|
|
||||||
|
|
||||||
def register_models():
|
def register_models():
|
||||||
"""Register Models and create metadata.
|
"""Register Models and create metadata.
|
||||||
|
|
||||||
|
@ -2514,3 +2514,50 @@ class ShareDriver(object):
|
|||||||
data['ipv4_support'] = (4 in self.ip_versions)
|
data['ipv4_support'] = (4 in self.ip_versions)
|
||||||
data['ipv6_support'] = (6 in self.ip_versions)
|
data['ipv6_support'] = (6 in self.ip_versions)
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
def get_backend_info(self, context):
|
||||||
|
"""Get driver and array configuration parameters.
|
||||||
|
|
||||||
|
Driver can use this method to get the special configuration info and
|
||||||
|
return for assessment.
|
||||||
|
|
||||||
|
:returns: A dictionary containing driver-specific info.
|
||||||
|
|
||||||
|
Example::
|
||||||
|
|
||||||
|
{
|
||||||
|
'version': '2.23'
|
||||||
|
'port': '80',
|
||||||
|
'logicalportip': '1.1.1.1',
|
||||||
|
...
|
||||||
|
}
|
||||||
|
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def ensure_shares(self, context, shares):
|
||||||
|
"""Invoked to ensure that shares are exported.
|
||||||
|
|
||||||
|
Driver can use this method to update the list of export locations of
|
||||||
|
the shares if it changes. To do that, a dictionary of shares should
|
||||||
|
be returned.
|
||||||
|
:shares: None or a list of all shares for updates.
|
||||||
|
:returns: None or a dictionary of updates in the format.
|
||||||
|
|
||||||
|
Example::
|
||||||
|
|
||||||
|
{
|
||||||
|
'09960614-8574-4e03-89cf-7cf267b0bd08': {
|
||||||
|
'export_locations': [{...}, {...}],
|
||||||
|
'status': 'error',
|
||||||
|
},
|
||||||
|
|
||||||
|
'28f6eabb-4342-486a-a7f4-45688f0c0295': {
|
||||||
|
'export_locations': [{...}, {...}],
|
||||||
|
'status': 'available',
|
||||||
|
},
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
@ -280,11 +280,18 @@ class LVMShareDriver(LVMMixin, driver.ShareDriver):
|
|||||||
# remove dir
|
# remove dir
|
||||||
self._execute('rmdir', mount_path, run_as_root=True)
|
self._execute('rmdir', mount_path, run_as_root=True)
|
||||||
|
|
||||||
|
def ensure_shares(self, context, shares):
|
||||||
|
updates = {}
|
||||||
|
for share in shares:
|
||||||
|
updates[share['id']] = {
|
||||||
|
'export_locations': self.ensure_share(context, share)}
|
||||||
|
return updates
|
||||||
|
|
||||||
def ensure_share(self, ctx, share, share_server=None):
|
def ensure_share(self, ctx, share, share_server=None):
|
||||||
"""Ensure that storage are mounted and exported."""
|
"""Ensure that storage are mounted and exported."""
|
||||||
device_name = self._get_local_path(share)
|
device_name = self._get_local_path(share)
|
||||||
self._mount_device(share, device_name)
|
self._mount_device(share, device_name)
|
||||||
self._get_helper(share).create_exports(
|
return self._get_helper(share).create_exports(
|
||||||
self.share_server, share['name'], recreate=True)
|
self.share_server, share['name'], recreate=True)
|
||||||
|
|
||||||
def _delete_share(self, ctx, share):
|
def _delete_share(self, ctx, share):
|
||||||
@ -513,3 +520,9 @@ class LVMShareDriver(LVMMixin, driver.ShareDriver):
|
|||||||
share['id'])
|
share['id'])
|
||||||
|
|
||||||
return updated_shares
|
return updated_shares
|
||||||
|
|
||||||
|
def get_backend_info(self, context):
|
||||||
|
return {
|
||||||
|
'export_ips': ','.join(self.share_server['public_addresses']),
|
||||||
|
'db_version': utils.get_recent_db_migration_id(),
|
||||||
|
}
|
||||||
|
@ -22,6 +22,7 @@
|
|||||||
import copy
|
import copy
|
||||||
import datetime
|
import datetime
|
||||||
import functools
|
import functools
|
||||||
|
import hashlib
|
||||||
|
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
from oslo_log import log
|
from oslo_log import log
|
||||||
@ -312,9 +313,49 @@ class ShareManager(manager.SchedulerDependentManager):
|
|||||||
|
|
||||||
_driver_setup()
|
_driver_setup()
|
||||||
|
|
||||||
share_instances = self.db.share_instances_get_all_by_host(ctxt,
|
self.ensure_driver_resources(ctxt)
|
||||||
self.host)
|
|
||||||
|
self.publish_service_capabilities(ctxt)
|
||||||
|
LOG.info("Finished initialization of driver: '%(driver)s"
|
||||||
|
"@%(host)s'",
|
||||||
|
{"driver": self.driver.__class__.__name__,
|
||||||
|
"host": self.host})
|
||||||
|
|
||||||
|
def ensure_driver_resources(self, ctxt):
|
||||||
|
old_backend_info_hash = self.db.backend_info_get(ctxt, self.host)
|
||||||
|
new_backend_info = None
|
||||||
|
new_backend_info_hash = None
|
||||||
|
update_share_instances = []
|
||||||
|
try:
|
||||||
|
new_backend_info = self.driver.get_backend_info(ctxt)
|
||||||
|
except Exception as e:
|
||||||
|
if not isinstance(e, NotImplementedError):
|
||||||
|
LOG.exception(
|
||||||
|
("The backend %(host)s could not get backend info."),
|
||||||
|
{'host': self.host})
|
||||||
|
raise
|
||||||
|
else:
|
||||||
|
LOG.debug(
|
||||||
|
("The backend %(host)s does not support get backend"
|
||||||
|
" info method."),
|
||||||
|
{'host': self.host})
|
||||||
|
|
||||||
|
if new_backend_info:
|
||||||
|
new_backend_info_hash = hashlib.sha1(six.text_type(
|
||||||
|
sorted(new_backend_info.items())).encode('utf-8')).hexdigest()
|
||||||
|
|
||||||
|
if (old_backend_info_hash and
|
||||||
|
old_backend_info_hash == new_backend_info_hash):
|
||||||
|
LOG.debug(
|
||||||
|
("The ensure share be skipped because the The old backend "
|
||||||
|
"%(host)s info as the same as new backend info"),
|
||||||
|
{'host': self.host})
|
||||||
|
return
|
||||||
|
|
||||||
|
share_instances = self.db.share_instances_get_all_by_host(
|
||||||
|
ctxt, self.host)
|
||||||
LOG.debug("Re-exporting %s shares", len(share_instances))
|
LOG.debug("Re-exporting %s shares", len(share_instances))
|
||||||
|
|
||||||
for share_instance in share_instances:
|
for share_instance in share_instances:
|
||||||
share_ref = self.db.share_get(ctxt, share_instance['share_id'])
|
share_ref = self.db.share_get(ctxt, share_instance['share_id'])
|
||||||
|
|
||||||
@ -337,21 +378,44 @@ class ShareManager(manager.SchedulerDependentManager):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
self._ensure_share_instance_has_pool(ctxt, share_instance)
|
self._ensure_share_instance_has_pool(ctxt, share_instance)
|
||||||
share_server = self._get_share_server(ctxt, share_instance)
|
|
||||||
share_instance = self.db.share_instance_get(
|
share_instance = self.db.share_instance_get(
|
||||||
ctxt, share_instance['id'], with_share_data=True)
|
ctxt, share_instance['id'], with_share_data=True)
|
||||||
try:
|
update_share_instances.append(share_instance)
|
||||||
export_locations = self.driver.ensure_share(
|
|
||||||
ctxt, share_instance, share_server=share_server)
|
|
||||||
except Exception:
|
|
||||||
LOG.exception("Caught exception trying ensure "
|
|
||||||
"share '%(s_id)s'.",
|
|
||||||
{'s_id': share_instance['id']})
|
|
||||||
continue
|
|
||||||
|
|
||||||
if export_locations:
|
try:
|
||||||
|
update_share_instances = self.driver.ensure_shares(
|
||||||
|
ctxt, update_share_instances)
|
||||||
|
except Exception as e:
|
||||||
|
if not isinstance(e, NotImplementedError):
|
||||||
|
LOG.exception("Caught exception trying ensure "
|
||||||
|
"share instances.")
|
||||||
|
else:
|
||||||
|
self._ensure_share(ctxt, update_share_instances)
|
||||||
|
|
||||||
|
if new_backend_info:
|
||||||
|
self.db.backend_info_update(
|
||||||
|
ctxt, self.host, new_backend_info_hash)
|
||||||
|
|
||||||
|
for share_instance in share_instances:
|
||||||
|
if share_instance['id'] not in update_share_instances:
|
||||||
|
continue
|
||||||
|
if update_share_instances[share_instance['id']].get('status'):
|
||||||
|
self.db.share_instance_update(
|
||||||
|
ctxt, share_instance['id'],
|
||||||
|
{'status': (
|
||||||
|
update_share_instances[share_instance['id']].
|
||||||
|
get('status')),
|
||||||
|
'host': share_instance['host']}
|
||||||
|
)
|
||||||
|
|
||||||
|
update_export_location = (
|
||||||
|
update_share_instances[share_instance['id']]
|
||||||
|
.get('export_locations'))
|
||||||
|
if update_export_location:
|
||||||
self.db.share_export_locations_update(
|
self.db.share_export_locations_update(
|
||||||
ctxt, share_instance['id'], export_locations)
|
ctxt, share_instance['id'], update_export_location)
|
||||||
|
|
||||||
|
share_server = self._get_share_server(ctxt, share_instance)
|
||||||
|
|
||||||
if share_instance['access_rules_status'] != (
|
if share_instance['access_rules_status'] != (
|
||||||
constants.STATUS_ACTIVE):
|
constants.STATUS_ACTIVE):
|
||||||
@ -396,11 +460,21 @@ class ShareManager(manager.SchedulerDependentManager):
|
|||||||
"access rules for snapshot instance %s.",
|
"access rules for snapshot instance %s.",
|
||||||
snap_instance['id'])
|
snap_instance['id'])
|
||||||
|
|
||||||
self.publish_service_capabilities(ctxt)
|
def _ensure_share(self, ctxt, share_instances):
|
||||||
LOG.info("Finished initialization of driver: '%(driver)s"
|
for share_instance in share_instances:
|
||||||
"@%(host)s'",
|
try:
|
||||||
{"driver": self.driver.__class__.__name__,
|
share_server = self._get_share_server(
|
||||||
"host": self.host})
|
ctxt, share_instance)
|
||||||
|
export_locations = self.driver.ensure_share(
|
||||||
|
ctxt, share_instance, share_server=share_server)
|
||||||
|
except Exception:
|
||||||
|
LOG.exception("Caught exception trying ensure "
|
||||||
|
"share '%(s_id)s'.",
|
||||||
|
{'s_id': share_instance['id']})
|
||||||
|
continue
|
||||||
|
if export_locations:
|
||||||
|
self.db.share_export_locations_update(
|
||||||
|
ctxt, share_instance['id'], export_locations)
|
||||||
|
|
||||||
def _provide_share_server_for_share(self, context, share_network_id,
|
def _provide_share_server_for_share(self, context, share_network_id,
|
||||||
share_instance, snapshot=None,
|
share_instance, snapshot=None,
|
||||||
|
@ -19,6 +19,7 @@
|
|||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
|
|
||||||
from manila.common import constants
|
from manila.common import constants
|
||||||
|
from manila.db import migration
|
||||||
from manila import rpc
|
from manila import rpc
|
||||||
from manila import utils
|
from manila import utils
|
||||||
|
|
||||||
@ -147,3 +148,7 @@ def _usage_from_share(share_ref, share_instance_ref, **extra_usage_info):
|
|||||||
usage_info.update(extra_usage_info)
|
usage_info.update(extra_usage_info)
|
||||||
|
|
||||||
return usage_info
|
return usage_info
|
||||||
|
|
||||||
|
|
||||||
|
def get_recent_db_migration_id():
|
||||||
|
return migration.version()
|
||||||
|
@ -2537,3 +2537,28 @@ class NewDescriptionColumnChecks(BaseMigrationChecks):
|
|||||||
db_result = engine.execute(table.select())
|
db_result = engine.execute(table.select())
|
||||||
for record in db_result:
|
for record in db_result:
|
||||||
self.test_case.assertFalse(hasattr(record, 'description'))
|
self.test_case.assertFalse(hasattr(record, 'description'))
|
||||||
|
|
||||||
|
|
||||||
|
@map_to_migration('4a482571410f')
|
||||||
|
class BackenInfoTableChecks(BaseMigrationChecks):
|
||||||
|
new_table_name = 'backend_info'
|
||||||
|
|
||||||
|
def setup_upgrade_data(self, engine):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def check_upgrade(self, engine, data):
|
||||||
|
data = {
|
||||||
|
'host': 'test_host',
|
||||||
|
'info_hash': 'test_hash',
|
||||||
|
'created_at': datetime.datetime(2017, 7, 10, 18, 5, 58),
|
||||||
|
'updated_at': None,
|
||||||
|
'deleted_at': None,
|
||||||
|
'deleted': 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
new_table = utils.load_table(self.new_table_name, engine)
|
||||||
|
engine.execute(new_table.insert(data))
|
||||||
|
|
||||||
|
def check_downgrade(self, engine):
|
||||||
|
self.test_case.assertRaises(sa_exc.NoSuchTableError, utils.load_table,
|
||||||
|
self.new_table_name, engine)
|
||||||
|
@ -2925,3 +2925,61 @@ class MessagesDatabaseAPITestCase(test.TestCase):
|
|||||||
db_api.cleanup_expired_messages(adm_context)
|
db_api.cleanup_expired_messages(adm_context)
|
||||||
messages = db_api.message_get_all(adm_context)
|
messages = db_api.message_get_all(adm_context)
|
||||||
self.assertEqual(2, len(messages))
|
self.assertEqual(2, len(messages))
|
||||||
|
|
||||||
|
|
||||||
|
class BackendInfoDatabaseAPITestCase(test.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
"""Run before each test."""
|
||||||
|
super(BackendInfoDatabaseAPITestCase, self).setUp()
|
||||||
|
self.ctxt = context.get_admin_context()
|
||||||
|
|
||||||
|
def test_create(self):
|
||||||
|
host = "fake_host"
|
||||||
|
value = "fake_hash_value"
|
||||||
|
|
||||||
|
initial_data = db_api.backend_info_get(self.ctxt, host)
|
||||||
|
db_api.backend_info_update(self.ctxt, host, value)
|
||||||
|
actual_data = db_api.backend_info_get(self.ctxt, host)
|
||||||
|
|
||||||
|
self.assertIsNone(initial_data)
|
||||||
|
self.assertEqual(value, actual_data['info_hash'])
|
||||||
|
self.assertEqual(host, actual_data['host'])
|
||||||
|
|
||||||
|
def test_get(self):
|
||||||
|
host = "fake_host"
|
||||||
|
value = "fake_hash_value"
|
||||||
|
|
||||||
|
db_api.backend_info_update(self.ctxt, host, value, False)
|
||||||
|
actual_result = db_api.backend_info_get(self.ctxt, host)
|
||||||
|
|
||||||
|
self.assertEqual(value, actual_result['info_hash'])
|
||||||
|
self.assertEqual(host, actual_result['host'])
|
||||||
|
|
||||||
|
def test_delete(self):
|
||||||
|
host = "fake_host"
|
||||||
|
value = "fake_hash_value"
|
||||||
|
|
||||||
|
db_api.backend_info_update(self.ctxt, host, value)
|
||||||
|
initial_data = db_api.backend_info_get(self.ctxt, host)
|
||||||
|
|
||||||
|
db_api.backend_info_update(self.ctxt, host, delete_existing=True)
|
||||||
|
actual_data = db_api.backend_info_get(self.ctxt, host)
|
||||||
|
|
||||||
|
self.assertEqual(value, initial_data['info_hash'])
|
||||||
|
self.assertEqual(host, initial_data['host'])
|
||||||
|
self.assertIsNone(actual_data)
|
||||||
|
|
||||||
|
def test_double_update(self):
|
||||||
|
host = "fake_host"
|
||||||
|
value_1 = "fake_hash_value_1"
|
||||||
|
value_2 = "fake_hash_value_2"
|
||||||
|
|
||||||
|
initial_data = db_api.backend_info_get(self.ctxt, host)
|
||||||
|
db_api.backend_info_update(self.ctxt, host, value_1)
|
||||||
|
db_api.backend_info_update(self.ctxt, host, value_2)
|
||||||
|
actual_data = db_api.backend_info_get(self.ctxt, host)
|
||||||
|
|
||||||
|
self.assertIsNone(initial_data)
|
||||||
|
self.assertEqual(value_2, actual_data['info_hash'])
|
||||||
|
self.assertEqual(host, actual_data['host'])
|
||||||
|
@ -681,3 +681,11 @@ class LVMShareDriverTestCase(test.TestCase):
|
|||||||
self._driver.update_share_usage_size,
|
self._driver.update_share_usage_size,
|
||||||
self._context,
|
self._context,
|
||||||
[self.share])
|
[self.share])
|
||||||
|
|
||||||
|
def test_get_backend_info(self):
|
||||||
|
backend_info = self._driver.get_backend_info(self._context)
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
{'export_ips': ','.join(self.server['public_addresses']),
|
||||||
|
'db_version': mock.ANY},
|
||||||
|
backend_info)
|
||||||
|
@ -15,6 +15,7 @@
|
|||||||
|
|
||||||
"""Test of Share Manager for Manila."""
|
"""Test of Share Manager for Manila."""
|
||||||
import datetime
|
import datetime
|
||||||
|
import hashlib
|
||||||
import random
|
import random
|
||||||
|
|
||||||
import ddt
|
import ddt
|
||||||
@ -293,7 +294,12 @@ class ShareManagerTestCase(test.TestCase):
|
|||||||
|
|
||||||
return instances, rules
|
return instances, rules
|
||||||
|
|
||||||
def test_init_host_with_shares_and_rules(self):
|
@ddt.data(("588569466613133740", {"db_version": "test_version"}),
|
||||||
|
(None, {"db_version": "test_version"}),
|
||||||
|
(None, None))
|
||||||
|
@ddt.unpack
|
||||||
|
def test_init_host_with_shares_and_rules(self, old_backend_info_hash,
|
||||||
|
new_backend_info):
|
||||||
|
|
||||||
# initialization of test data
|
# initialization of test data
|
||||||
def raise_share_access_exists(*args, **kwargs):
|
def raise_share_access_exists(*args, **kwargs):
|
||||||
@ -302,7 +308,20 @@ class ShareManagerTestCase(test.TestCase):
|
|||||||
|
|
||||||
instances, rules = self._setup_init_mocks()
|
instances, rules = self._setup_init_mocks()
|
||||||
fake_export_locations = ['fake/path/1', 'fake/path']
|
fake_export_locations = ['fake/path/1', 'fake/path']
|
||||||
|
fake_update_instances = {
|
||||||
|
instances[0]['id']: {'export_locations': fake_export_locations},
|
||||||
|
instances[2]['id']: {'export_locations': fake_export_locations}
|
||||||
|
}
|
||||||
|
instances[0]['access_rules_status'] = ''
|
||||||
|
instances[2]['access_rules_status'] = ''
|
||||||
share_server = 'fake_share_server_type_does_not_matter'
|
share_server = 'fake_share_server_type_does_not_matter'
|
||||||
|
self.mock_object(self.share_manager.db,
|
||||||
|
'backend_info_get',
|
||||||
|
mock.Mock(return_value=old_backend_info_hash))
|
||||||
|
mock_backend_info_update = self.mock_object(
|
||||||
|
self.share_manager.db, 'backend_info_update')
|
||||||
|
self.mock_object(self.share_manager.driver, 'get_backend_info',
|
||||||
|
mock.Mock(return_value=new_backend_info))
|
||||||
self.mock_object(self.share_manager.db,
|
self.mock_object(self.share_manager.db,
|
||||||
'share_instances_get_all_by_host',
|
'share_instances_get_all_by_host',
|
||||||
mock.Mock(return_value=instances))
|
mock.Mock(return_value=instances))
|
||||||
@ -311,8 +330,8 @@ class ShareManagerTestCase(test.TestCase):
|
|||||||
instances[4]]))
|
instances[4]]))
|
||||||
self.mock_object(self.share_manager.db,
|
self.mock_object(self.share_manager.db,
|
||||||
'share_export_locations_update')
|
'share_export_locations_update')
|
||||||
self.mock_object(self.share_manager.driver, 'ensure_share',
|
self.mock_object(self.share_manager.driver, 'ensure_shares',
|
||||||
mock.Mock(return_value=fake_export_locations))
|
mock.Mock(return_value=fake_update_instances))
|
||||||
self.mock_object(self.share_manager, '_ensure_share_instance_has_pool')
|
self.mock_object(self.share_manager, '_ensure_share_instance_has_pool')
|
||||||
self.mock_object(self.share_manager, '_get_share_server',
|
self.mock_object(self.share_manager, '_get_share_server',
|
||||||
mock.Mock(return_value=share_server))
|
mock.Mock(return_value=share_server))
|
||||||
@ -343,30 +362,64 @@ class ShareManagerTestCase(test.TestCase):
|
|||||||
utils.IsAMatcher(context.RequestContext))
|
utils.IsAMatcher(context.RequestContext))
|
||||||
(self.share_manager.driver.check_for_setup_error.
|
(self.share_manager.driver.check_for_setup_error.
|
||||||
assert_called_once_with())
|
assert_called_once_with())
|
||||||
|
if new_backend_info:
|
||||||
|
self.share_manager.db.backend_info_update.assert_called_once_with(
|
||||||
|
utils.IsAMatcher(context.RequestContext),
|
||||||
|
self.share_manager.host, hashlib.sha1(six.text_type(sorted(
|
||||||
|
new_backend_info.items())).encode('utf-8')).hexdigest())
|
||||||
|
else:
|
||||||
|
mock_backend_info_update.assert_not_called()
|
||||||
self.share_manager._ensure_share_instance_has_pool.assert_has_calls([
|
self.share_manager._ensure_share_instance_has_pool.assert_has_calls([
|
||||||
mock.call(utils.IsAMatcher(context.RequestContext), instances[0]),
|
mock.call(utils.IsAMatcher(context.RequestContext), instances[0]),
|
||||||
mock.call(utils.IsAMatcher(context.RequestContext), instances[2]),
|
mock.call(utils.IsAMatcher(context.RequestContext), instances[2]),
|
||||||
])
|
])
|
||||||
self.share_manager._get_share_server.assert_has_calls([
|
self.share_manager._get_share_server.assert_has_calls([
|
||||||
mock.call(utils.IsAMatcher(context.RequestContext), instances[0]),
|
mock.call(utils.IsAMatcher(context.RequestContext),
|
||||||
mock.call(utils.IsAMatcher(context.RequestContext), instances[2]),
|
instances[0]),
|
||||||
])
|
mock.call(utils.IsAMatcher(context.RequestContext),
|
||||||
self.share_manager.driver.ensure_share.assert_has_calls([
|
instances[2]),
|
||||||
mock.call(utils.IsAMatcher(context.RequestContext), instances[0],
|
|
||||||
share_server=share_server),
|
|
||||||
mock.call(utils.IsAMatcher(context.RequestContext), instances[2],
|
|
||||||
share_server=share_server),
|
|
||||||
])
|
])
|
||||||
|
self.share_manager.driver.ensure_shares.assert_called_once_with(
|
||||||
|
utils.IsAMatcher(context.RequestContext),
|
||||||
|
[instances[0], instances[2], instances[4]])
|
||||||
(self.share_manager.publish_service_capabilities.
|
(self.share_manager.publish_service_capabilities.
|
||||||
assert_called_once_with(
|
assert_called_once_with(
|
||||||
utils.IsAMatcher(context.RequestContext)))
|
utils.IsAMatcher(context.RequestContext)))
|
||||||
self.share_manager.access_helper.update_access_rules.assert_has_calls([
|
self.share_manager.access_helper.update_access_rules.assert_has_calls([
|
||||||
mock.call(mock.ANY, instances[4]['id'], share_server=share_server),
|
mock.call(mock.ANY, instances[0]['id'], share_server=share_server),
|
||||||
|
mock.call(mock.ANY, instances[2]['id'], share_server=share_server),
|
||||||
])
|
])
|
||||||
|
|
||||||
def test_init_host_with_exception_on_ensure_share(self):
|
def test_init_host_without_shares_and_rules(self):
|
||||||
def raise_exception(*args, **kwargs):
|
new_backend_info = {"db_version": "sdfesxcv"}
|
||||||
raise exception.ManilaException(message="Fake raise")
|
self.mock_object(
|
||||||
|
self.share_manager.db, 'backend_info_get',
|
||||||
|
mock.Mock(return_value=hashlib.sha1(six.text_type(sorted(
|
||||||
|
new_backend_info.items())).encode('utf-8')).hexdigest()))
|
||||||
|
self.mock_object(self.share_manager.driver, 'get_backend_info',
|
||||||
|
mock.Mock(return_value=new_backend_info))
|
||||||
|
self.mock_object(self.share_manager, 'publish_service_capabilities',
|
||||||
|
mock.Mock())
|
||||||
|
mock_ensure_shares = self.mock_object(
|
||||||
|
self.share_manager.driver, 'ensure_shares')
|
||||||
|
mock_share_instances_get_all_by_host = self.mock_object(
|
||||||
|
self.share_manager.db, 'share_instances_get_all_by_host')
|
||||||
|
|
||||||
|
# call of 'init_host' method
|
||||||
|
self.share_manager.init_host()
|
||||||
|
self.share_manager.driver.do_setup.assert_called_once_with(
|
||||||
|
utils.IsAMatcher(context.RequestContext))
|
||||||
|
self.share_manager.db.backend_info_get.assert_called_once_with(
|
||||||
|
utils.IsAMatcher(context.RequestContext), self.share_manager.host)
|
||||||
|
self.share_manager.driver.get_backend_info.assert_called_once_with(
|
||||||
|
utils.IsAMatcher(context.RequestContext))
|
||||||
|
mock_ensure_shares.assert_not_called()
|
||||||
|
mock_share_instances_get_all_by_host.assert_not_called()
|
||||||
|
|
||||||
|
@ddt.data(exception.ManilaException, ['fake/path/1', 'fake/path'])
|
||||||
|
def test_init_host_with_ensure_share(self, expected_ensure_share_result):
|
||||||
|
def raise_NotImplementedError(*args, **kwargs):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
instances = self._setup_init_mocks(setup_access_rules=False)
|
instances = self._setup_init_mocks(setup_access_rules=False)
|
||||||
share_server = 'fake_share_server_type_does_not_matter'
|
share_server = 'fake_share_server_type_does_not_matter'
|
||||||
@ -376,9 +429,13 @@ class ShareManagerTestCase(test.TestCase):
|
|||||||
self.mock_object(self.share_manager.db, 'share_instance_get',
|
self.mock_object(self.share_manager.db, 'share_instance_get',
|
||||||
mock.Mock(side_effect=[instances[0], instances[2],
|
mock.Mock(side_effect=[instances[0], instances[2],
|
||||||
instances[3]]))
|
instances[3]]))
|
||||||
|
self.mock_object(
|
||||||
|
self.share_manager.driver, 'ensure_shares',
|
||||||
|
mock.Mock(side_effect=raise_NotImplementedError))
|
||||||
self.mock_object(self.share_manager.driver, 'ensure_share',
|
self.mock_object(self.share_manager.driver, 'ensure_share',
|
||||||
mock.Mock(side_effect=raise_exception))
|
mock.Mock(side_effect=expected_ensure_share_result))
|
||||||
self.mock_object(self.share_manager, '_ensure_share_instance_has_pool')
|
self.mock_object(
|
||||||
|
self.share_manager, '_ensure_share_instance_has_pool')
|
||||||
self.mock_object(self.share_manager, '_get_share_server',
|
self.mock_object(self.share_manager, '_get_share_server',
|
||||||
mock.Mock(return_value=share_server))
|
mock.Mock(return_value=share_server))
|
||||||
self.mock_object(self.share_manager, 'publish_service_capabilities')
|
self.mock_object(self.share_manager, 'publish_service_capabilities')
|
||||||
@ -399,6 +456,9 @@ class ShareManagerTestCase(test.TestCase):
|
|||||||
mock.call(utils.IsAMatcher(context.RequestContext), instances[0]),
|
mock.call(utils.IsAMatcher(context.RequestContext), instances[0]),
|
||||||
mock.call(utils.IsAMatcher(context.RequestContext), instances[2]),
|
mock.call(utils.IsAMatcher(context.RequestContext), instances[2]),
|
||||||
])
|
])
|
||||||
|
self.share_manager.driver.ensure_shares.assert_called_once_with(
|
||||||
|
utils.IsAMatcher(context.RequestContext),
|
||||||
|
[instances[0], instances[2], instances[3]])
|
||||||
self.share_manager._get_share_server.assert_has_calls([
|
self.share_manager._get_share_server.assert_has_calls([
|
||||||
mock.call(utils.IsAMatcher(context.RequestContext), instances[0]),
|
mock.call(utils.IsAMatcher(context.RequestContext), instances[0]),
|
||||||
mock.call(utils.IsAMatcher(context.RequestContext), instances[2]),
|
mock.call(utils.IsAMatcher(context.RequestContext), instances[2]),
|
||||||
@ -422,12 +482,83 @@ class ShareManagerTestCase(test.TestCase):
|
|||||||
{'id': instances[1]['id'], 'status': instances[1]['status']},
|
{'id': instances[1]['id'], 'status': instances[1]['status']},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def test_init_host_with_exception_on_ensure_shares(self):
|
||||||
|
def raise_exception(*args, **kwargs):
|
||||||
|
raise exception.ManilaException(message="Fake raise")
|
||||||
|
|
||||||
|
instances = self._setup_init_mocks(setup_access_rules=False)
|
||||||
|
mock_ensure_share = self.mock_object(
|
||||||
|
self.share_manager.driver, 'ensure_share')
|
||||||
|
self.mock_object(self.share_manager.db,
|
||||||
|
'share_instances_get_all_by_host',
|
||||||
|
mock.Mock(return_value=instances))
|
||||||
|
self.mock_object(self.share_manager.db, 'share_instance_get',
|
||||||
|
mock.Mock(side_effect=[instances[0], instances[2],
|
||||||
|
instances[3]]))
|
||||||
|
self.mock_object(
|
||||||
|
self.share_manager.driver, 'ensure_shares',
|
||||||
|
mock.Mock(side_effect=raise_exception))
|
||||||
|
self.mock_object(
|
||||||
|
self.share_manager, '_ensure_share_instance_has_pool')
|
||||||
|
|
||||||
|
# call of 'init_host' method
|
||||||
|
self.share_manager.init_host()
|
||||||
|
|
||||||
|
# verification of call
|
||||||
|
(self.share_manager.db.share_instances_get_all_by_host.
|
||||||
|
assert_called_once_with(utils.IsAMatcher(context.RequestContext),
|
||||||
|
self.share_manager.host))
|
||||||
|
self.share_manager.driver.do_setup.assert_called_once_with(
|
||||||
|
utils.IsAMatcher(context.RequestContext))
|
||||||
|
self.share_manager.driver.check_for_setup_error.assert_called_with()
|
||||||
|
self.share_manager._ensure_share_instance_has_pool.assert_has_calls([
|
||||||
|
mock.call(utils.IsAMatcher(context.RequestContext), instances[0]),
|
||||||
|
mock.call(utils.IsAMatcher(context.RequestContext), instances[2]),
|
||||||
|
])
|
||||||
|
self.share_manager.driver.ensure_shares.assert_called_once_with(
|
||||||
|
utils.IsAMatcher(context.RequestContext),
|
||||||
|
[instances[0], instances[2], instances[3]])
|
||||||
|
mock_ensure_share.assert_not_called()
|
||||||
|
|
||||||
|
def test_init_host_with_exception_on_get_backend_info(self):
|
||||||
|
def raise_exception(*args, **kwargs):
|
||||||
|
raise exception.ManilaException(message="Fake raise")
|
||||||
|
|
||||||
|
mock_ensure_share = self.mock_object(
|
||||||
|
self.share_manager.driver, 'ensure_share')
|
||||||
|
mock_ensure_shares = self.mock_object(
|
||||||
|
self.share_manager.driver, 'ensure_shares')
|
||||||
|
self.mock_object(self.share_manager.db,
|
||||||
|
'backend_info_get',
|
||||||
|
mock.Mock(return_value="test_backend_info"))
|
||||||
|
self.mock_object(
|
||||||
|
self.share_manager.driver, 'get_backend_info',
|
||||||
|
mock.Mock(side_effect=raise_exception))
|
||||||
|
# call of 'init_host' method
|
||||||
|
self.assertRaises(
|
||||||
|
exception.ManilaException,
|
||||||
|
self.share_manager.init_host,
|
||||||
|
)
|
||||||
|
|
||||||
|
# verification of call
|
||||||
|
self.share_manager.db.backend_info_get.assert_called_once_with(
|
||||||
|
utils.IsAMatcher(context.RequestContext), self.share_manager.host)
|
||||||
|
self.share_manager.driver.get_backend_info.assert_called_once_with(
|
||||||
|
utils.IsAMatcher(context.RequestContext))
|
||||||
|
mock_ensure_share.assert_not_called()
|
||||||
|
mock_ensure_shares.assert_not_called()
|
||||||
|
|
||||||
def test_init_host_with_exception_on_update_access_rules(self):
|
def test_init_host_with_exception_on_update_access_rules(self):
|
||||||
def raise_exception(*args, **kwargs):
|
def raise_exception(*args, **kwargs):
|
||||||
raise exception.ManilaException(message="Fake raise")
|
raise exception.ManilaException(message="Fake raise")
|
||||||
|
|
||||||
instances, rules = self._setup_init_mocks()
|
instances, rules = self._setup_init_mocks()
|
||||||
share_server = 'fake_share_server_type_does_not_matter'
|
share_server = 'fake_share_server_type_does_not_matter'
|
||||||
|
fake_update_instances = {
|
||||||
|
instances[0]['id']: {'status': 'available'},
|
||||||
|
instances[2]['id']: {'status': 'available'},
|
||||||
|
instances[4]['id']: {'status': 'available'}
|
||||||
|
}
|
||||||
smanager = self.share_manager
|
smanager = self.share_manager
|
||||||
self.mock_object(smanager.db, 'share_instances_get_all_by_host',
|
self.mock_object(smanager.db, 'share_instances_get_all_by_host',
|
||||||
mock.Mock(return_value=instances))
|
mock.Mock(return_value=instances))
|
||||||
@ -436,6 +567,8 @@ class ShareManagerTestCase(test.TestCase):
|
|||||||
instances[4]]))
|
instances[4]]))
|
||||||
self.mock_object(self.share_manager.driver, 'ensure_share',
|
self.mock_object(self.share_manager.driver, 'ensure_share',
|
||||||
mock.Mock(return_value=None))
|
mock.Mock(return_value=None))
|
||||||
|
self.mock_object(self.share_manager.driver, 'ensure_shares',
|
||||||
|
mock.Mock(return_value=fake_update_instances))
|
||||||
self.mock_object(smanager, '_ensure_share_instance_has_pool')
|
self.mock_object(smanager, '_ensure_share_instance_has_pool')
|
||||||
self.mock_object(smanager, '_get_share_server',
|
self.mock_object(smanager, '_get_share_server',
|
||||||
mock.Mock(return_value=share_server))
|
mock.Mock(return_value=share_server))
|
||||||
@ -461,16 +594,9 @@ class ShareManagerTestCase(test.TestCase):
|
|||||||
mock.call(utils.IsAMatcher(context.RequestContext), instances[0]),
|
mock.call(utils.IsAMatcher(context.RequestContext), instances[0]),
|
||||||
mock.call(utils.IsAMatcher(context.RequestContext), instances[2]),
|
mock.call(utils.IsAMatcher(context.RequestContext), instances[2]),
|
||||||
])
|
])
|
||||||
smanager._get_share_server.assert_has_calls([
|
smanager.driver.ensure_shares.assert_called_once_with(
|
||||||
mock.call(utils.IsAMatcher(context.RequestContext), instances[0]),
|
utils.IsAMatcher(context.RequestContext),
|
||||||
mock.call(utils.IsAMatcher(context.RequestContext), instances[2]),
|
[instances[0], instances[2], instances[4]])
|
||||||
])
|
|
||||||
smanager.driver.ensure_share.assert_has_calls([
|
|
||||||
mock.call(utils.IsAMatcher(context.RequestContext), instances[0],
|
|
||||||
share_server=share_server),
|
|
||||||
mock.call(utils.IsAMatcher(context.RequestContext), instances[2],
|
|
||||||
share_server=share_server),
|
|
||||||
])
|
|
||||||
(self.share_manager.publish_service_capabilities.
|
(self.share_manager.publish_service_capabilities.
|
||||||
assert_called_once_with(
|
assert_called_once_with(
|
||||||
utils.IsAMatcher(context.RequestContext)))
|
utils.IsAMatcher(context.RequestContext)))
|
||||||
|
@ -0,0 +1,4 @@
|
|||||||
|
---
|
||||||
|
fixes:
|
||||||
|
- Adds the ability to solve the potential problem of slow start
|
||||||
|
up, and deal with non-user-initiated state changes to shares.
|
Loading…
Reference in New Issue
Block a user