De-duplicate properties_data between events, resources

Properties data (encrypted or not) is now stored in the
resource_properties_data table. We still support reading properties
data from the legacy locations (separate columns in the resource and
event tables) but all new properties data are written to
resource_properties_data.

Instead of duplicating properties data for a given resource
and its events, we refer to the same resource_properties_data
object from both resources and events.

Notes about encrypting/decrypting properties data:

1. ResourcePropertiesData takes care of encrypting/decrypting, so
   consumers (events and resources) don't have to worry about it.
2. Previously (not anymore), heat.engine.resource would take care of
   the encrypting in _store() and _store_or_update().
3. Previously and currently, heat.object.resource decrypts the legacy
   properties_data field if needed.

Change-Id: I569e16d4a7d3f5ccc22d57fe881c4fd5994677ac
Closes-Bug: #1524013
This commit is contained in:
Crag Wolfe 2016-08-31 01:47:58 -04:00
parent a27b6cc1e4
commit 57c5aae88b
10 changed files with 655 additions and 329 deletions

View File

@ -919,17 +919,36 @@ def _delete_event_rows(context, stack_id, limit):
# So we must manually supply the IN() values.
# pgsql SHOULD work with the pure DELETE/JOIN below but that must be
# confirmed via integration tests.
query = _query_all_by_stack(context, stack_id)
session = context.session
res = session.query(models.Event.id).filter_by(
stack_id=stack_id).order_by(models.Event.id).limit(limit).all()
if not res:
id_pairs = [(e.id, e.rsrc_prop_data_id) for e in query.order_by(
models.Event.id).limit(limit).all()]
if id_pairs is None:
return 0
(max_id, ) = res[-1]
return session.query(models.Event).filter(
(ids, rsrc_prop_ids) = zip(*id_pairs)
max_id = ids[-1]
# delete the events
retval = session.query(models.Event.id).filter(
models.Event.id <= max_id).filter(
models.Event.stack_id == stack_id).delete(
synchronize_session=False)
# delete unreferenced resource_properties_data
rsrc_prop_ids = set(rsrc_prop_ids)
if rsrc_prop_ids:
still_ref_ids_from_events = [e.rsrc_prop_data_id for e
in _query_all_by_stack(
context, stack_id).all()]
still_ref_ids_from_rsrcs = [r.rsrc_prop_data_id for r
in context.session.query(models.Resource).
filter_by(stack_id=stack_id).all()]
rsrc_prop_ids = rsrc_prop_ids - set(still_ref_ids_from_events) \
- set(still_ref_ids_from_rsrcs)
q_rpd = session.query(models.ResourcePropertiesData.id).filter(
models.ResourcePropertiesData.id.in_(rsrc_prop_ids))
q_rpd.delete(synchronize_session=False)
return retval
def event_create(context, values):
if 'stack_id' in values and cfg.CONF.max_events_per_stack:
@ -1275,6 +1294,8 @@ def _purge_stacks(stack_infos, engine, meta):
stack_tag = sqlalchemy.Table('stack_tag', meta, autoload=True)
resource = sqlalchemy.Table('resource', meta, autoload=True)
resource_data = sqlalchemy.Table('resource_data', meta, autoload=True)
resource_properties_data = sqlalchemy.Table(
'resource_properties_data', meta, autoload=True)
event = sqlalchemy.Table('event', meta, autoload=True)
raw_template = sqlalchemy.Table('raw_template', meta, autoload=True)
raw_template_files = sqlalchemy.Table('raw_template_files', meta,
@ -1285,6 +1306,9 @@ def _purge_stacks(stack_infos, engine, meta):
stack_info_str = ','.join([str(i) for i in stack_infos])
LOG.info("Purging stacks %s" % stack_info_str)
# TODO(cwolfe): find a way to make this re-entrant with
# reasonably sized transactions (good luck), or add
# a cleanup for orphaned rows.
stack_ids = [stack_info[0] for stack_info in stack_infos]
# delete stack locks (just in case some got stuck)
stack_lock_del = stack_lock.delete().where(
@ -1300,68 +1324,94 @@ def _purge_stacks(stack_infos, engine, meta):
res_data_del = resource_data.delete().where(
resource_data.c.resource_id.in_(res_where))
engine.execute(res_data_del)
# delete resources (normally there shouldn't be any)
res_del = resource.delete().where(resource.c.stack_id.in_(stack_ids))
engine.execute(res_del)
# delete events
event_del = event.delete().where(event.c.stack_id.in_(stack_ids))
engine.execute(event_del)
# clean up any sync_points that may have lingered
sync_del = syncpoint.delete().where(
syncpoint.c.stack_id.in_(stack_ids))
engine.execute(sync_del)
conn = engine.connect()
with conn.begin(): # these deletes in a transaction
# delete the stacks
stack_del = stack.delete().where(stack.c.id.in_(stack_ids))
conn.execute(stack_del)
# delete orphaned raw templates
raw_template_ids = [i[1] for i in stack_infos if i[1] is not None]
raw_template_ids.extend(i[2] for i in stack_infos if i[2] is not None)
if raw_template_ids: # keep those still referenced
raw_tmpl_sel = sqlalchemy.select([stack.c.raw_template_id]).where(
stack.c.raw_template_id.in_(raw_template_ids))
raw_tmpl = [i[0] for i in conn.execute(raw_tmpl_sel)]
raw_template_ids = set(raw_template_ids) - set(raw_tmpl)
if raw_template_ids: # keep those still referenced (previous tmpl)
raw_tmpl_sel = sqlalchemy.select(
[stack.c.prev_raw_template_id]).where(
stack.c.prev_raw_template_id.in_(raw_template_ids))
raw_tmpl = [i[0] for i in conn.execute(raw_tmpl_sel)]
raw_template_ids = raw_template_ids - set(raw_tmpl)
if raw_template_ids: # delete raw_templates if we have any
# get rsrc_prop_data_ids to delete
rsrc_prop_data_where = sqlalchemy.select(
[resource.c.rsrc_prop_data_id]).where(
resource.c.stack_id.in_(stack_ids))
rsrc_prop_data_ids = set(
[i[0] for i in list(engine.execute(rsrc_prop_data_where))])
rsrc_prop_data_where = sqlalchemy.select(
[event.c.rsrc_prop_data_id]).where(
event.c.stack_id.in_(stack_ids))
rsrc_prop_data_ids.update(
[i[0] for i in list(engine.execute(rsrc_prop_data_where))])
# delete events
event_del = event.delete().where(event.c.stack_id.in_(stack_ids))
engine.execute(event_del)
# delete resources (normally there shouldn't be any)
res_del = resource.delete().where(resource.c.stack_id.in_(stack_ids))
engine.execute(res_del)
# delete resource_properties_data
if rsrc_prop_data_ids: # keep rpd's in events
rsrc_prop_data_where = sqlalchemy.select(
[event.c.rsrc_prop_data_id]).where(
event.c.rsrc_prop_data_id.in_(rsrc_prop_data_ids))
ids = list(engine.execute(rsrc_prop_data_where))
rsrc_prop_data_ids.difference_update([i[0] for i in ids])
if rsrc_prop_data_ids: # keep rpd's in resources
rsrc_prop_data_where = sqlalchemy.select(
[resource.c.rsrc_prop_data_id]).where(
resource.c.rsrc_prop_data_id.in_(rsrc_prop_data_ids))
ids = list(engine.execute(rsrc_prop_data_where))
rsrc_prop_data_ids.difference_update([i[0] for i in ids])
if rsrc_prop_data_ids: # delete if we have any
rsrc_prop_data_del = resource_properties_data.delete().where(
resource_properties_data.c.id.in_(rsrc_prop_data_ids))
engine.execute(rsrc_prop_data_del)
# delete the stacks
stack_del = stack.delete().where(stack.c.id.in_(stack_ids))
engine.execute(stack_del)
# delete orphaned raw templates
raw_template_ids = [i[1] for i in stack_infos if i[1] is not None]
raw_template_ids.extend(i[2] for i in stack_infos if i[2] is not None)
if raw_template_ids: # keep those still referenced
raw_tmpl_sel = sqlalchemy.select([stack.c.raw_template_id]).where(
stack.c.raw_template_id.in_(raw_template_ids))
raw_tmpl = [i[0] for i in engine.execute(raw_tmpl_sel)]
raw_template_ids = set(raw_template_ids) - set(raw_tmpl)
if raw_template_ids: # keep those still referenced (previous tmpl)
raw_tmpl_sel = sqlalchemy.select(
[stack.c.prev_raw_template_id]).where(
stack.c.prev_raw_template_id.in_(raw_template_ids))
raw_tmpl = [i[0] for i in engine.execute(raw_tmpl_sel)]
raw_template_ids = raw_template_ids - set(raw_tmpl)
if raw_template_ids: # delete raw_templates if we have any
raw_tmpl_file_sel = sqlalchemy.select(
[raw_template.c.files_id]).where(
raw_template.c.id.in_(raw_template_ids))
raw_tmpl_file_ids = [i[0] for i in engine.execute(
raw_tmpl_file_sel)]
raw_templ_del = raw_template.delete().where(
raw_template.c.id.in_(raw_template_ids))
engine.execute(raw_templ_del)
if raw_tmpl_file_ids: # keep _files still referenced
raw_tmpl_file_sel = sqlalchemy.select(
[raw_template.c.files_id]).where(
raw_template.c.id.in_(raw_template_ids))
raw_tmpl_file_ids = [i[0] for i in conn.execute(
raw_template.c.files_id.in_(raw_tmpl_file_ids))
raw_tmpl_files = [i[0] for i in engine.execute(
raw_tmpl_file_sel)]
raw_templ_del = raw_template.delete().where(
raw_template.c.id.in_(raw_template_ids))
conn.execute(raw_templ_del)
if raw_tmpl_file_ids: # keep _files still referenced
raw_tmpl_file_sel = sqlalchemy.select(
[raw_template.c.files_id]).where(
raw_template.c.files_id.in_(raw_tmpl_file_ids))
raw_tmpl_files = [i[0] for i in conn.execute(
raw_tmpl_file_sel)]
raw_tmpl_file_ids = set(raw_tmpl_file_ids) \
- set(raw_tmpl_files)
if raw_tmpl_file_ids: # delete _files if we have any
raw_tmpl_file_del = raw_template_files.delete().where(
raw_template_files.c.id.in_(raw_tmpl_file_ids))
conn.execute(raw_tmpl_file_del)
# purge any user creds that are no longer referenced
user_creds_ids = [i[3] for i in stack_infos if i[3] is not None]
if user_creds_ids: # keep those still referenced
user_sel = sqlalchemy.select([stack.c.user_creds_id]).where(
stack.c.user_creds_id.in_(user_creds_ids))
users = [i[0] for i in conn.execute(user_sel)]
user_creds_ids = set(user_creds_ids) - set(users)
if user_creds_ids: # delete if we have any
usr_creds_del = user_creds.delete().where(
user_creds.c.id.in_(user_creds_ids))
conn.execute(usr_creds_del)
raw_tmpl_file_ids = set(raw_tmpl_file_ids) \
- set(raw_tmpl_files)
if raw_tmpl_file_ids: # delete _files if we have any
raw_tmpl_file_del = raw_template_files.delete().where(
raw_template_files.c.id.in_(raw_tmpl_file_ids))
engine.execute(raw_tmpl_file_del)
# purge any user creds that are no longer referenced
user_creds_ids = [i[3] for i in stack_infos if i[3] is not None]
if user_creds_ids: # keep those still referenced
user_sel = sqlalchemy.select([stack.c.user_creds_id]).where(
stack.c.user_creds_id.in_(user_creds_ids))
users = [i[0] for i in engine.execute(user_sel)]
user_creds_ids = set(user_creds_ids) - set(users)
if user_creds_ids: # delete if we have any
usr_creds_del = user_creds.delete().where(
user_creds.c.id.in_(user_creds_ids))
engine.execute(usr_creds_del)
def sync_point_delete_all_by_stack_and_traversal(context, stack_id,
@ -1414,15 +1464,16 @@ def db_version(engine):
return migration.db_version(engine)
def _crypt_action(encrypt):
if encrypt:
return _('encrypt')
return _('decrypt')
def _db_encrypt_or_decrypt_template_params(
ctxt, encryption_key, encrypt=False, batch_size=50, verbose=False):
from heat.engine import template
session = ctxt.session
if encrypt:
crypt_action = _('encrypt')
else:
crypt_action = _('decrypt')
excs = []
query = session.query(models.RawTemplate)
template_batches = _get_batch(
@ -1483,10 +1534,10 @@ def _db_encrypt_or_decrypt_template_params(
raw_template_update(ctxt, raw_template.id,
{'environment': newenv})
except Exception as exc:
LOG.exception(
_LE('Failed to %(crypt_action)s parameters of raw '
'template %(id)d'), {'id': raw_template.id,
'crypt_action': crypt_action})
LOG.exception(_LE('Failed to %(crypt_action)s parameters '
'of raw template %(id)d'),
{'id': raw_template.id,
'crypt_action': _crypt_action(encrypt)})
excs.append(exc)
continue
finally:
@ -1494,19 +1545,16 @@ def _db_encrypt_or_decrypt_template_params(
LOG.info(_LI("Finished %(crypt_action)s processing of "
"raw_template %(id)d."),
{'id': raw_template.id,
'crypt_action': crypt_action})
'crypt_action': _crypt_action(encrypt)})
next_batch = list(itertools.islice(template_batches, batch_size))
return excs
def _db_encrypt_or_decrypt_resource_prop_data(
def _db_encrypt_or_decrypt_resource_prop_data_legacy(
ctxt, encryption_key, encrypt=False, batch_size=50, verbose=False):
session = ctxt.session
excs = []
if encrypt:
crypt_action = _('encrypt')
else:
crypt_action = _('decrypt')
# Older resources may have properties_data in the legacy column,
# so update those as needed
query = session.query(models.Resource).filter(
@ -1538,7 +1586,7 @@ def _db_encrypt_or_decrypt_resource_prop_data(
LOG.exception(_LE('Failed to %(crypt_action)s '
'properties_data of resource %(id)d') %
{'id': resource.id,
'crypt_action': crypt_action})
'crypt_action': _crypt_action(encrypt)})
excs.append(exc)
continue
finally:
@ -1549,6 +1597,53 @@ def _db_encrypt_or_decrypt_resource_prop_data(
return excs
def _db_encrypt_or_decrypt_resource_prop_data(
ctxt, encryption_key, encrypt=False, batch_size=50, verbose=False):
session = ctxt.session
excs = []
# Older resources may have properties_data in the legacy column,
# so update those as needed
query = session.query(models.ResourcePropertiesData).filter(
models.ResourcePropertiesData.encrypted.isnot(encrypt))
rpd_batches = _get_batch(
session=session, ctxt=ctxt, query=query,
model=models.ResourcePropertiesData, batch_size=batch_size)
next_batch = list(itertools.islice(rpd_batches, batch_size))
while next_batch:
with session.begin(subtransactions=True):
for rpd in next_batch:
if not rpd.data:
continue
try:
if verbose:
LOG.info(_LI("Processing resource_properties_data "
"%(id)d..."), {'id': rpd.id})
if encrypt:
result = crypt.encrypted_dict(rpd.data,
encryption_key)
else:
result = crypt.decrypted_dict(rpd.data,
encryption_key)
rpd.update({'data': result,
'encrypted': encrypt})
except Exception as exc:
LOG.exception(
_LE("Failed to %(crypt_action)s "
"data of resource_properties_data %(id)d") %
{'id': rpd.id,
'crypt_action': _crypt_action(encrypt)})
excs.append(exc)
continue
finally:
if verbose:
LOG.info(
_LI("Finished processing resource_properties_data"
" %(id)d."), {'id': rpd.id})
next_batch = list(itertools.islice(rpd_batches, batch_size))
return excs
def db_encrypt_parameters_and_properties(ctxt, encryption_key, batch_size=50,
verbose=False):
"""Encrypt parameters and properties for all templates in db.
@ -1568,6 +1663,8 @@ def db_encrypt_parameters_and_properties(ctxt, encryption_key, batch_size=50,
ctxt, encryption_key, True, batch_size, verbose))
excs.extend(_db_encrypt_or_decrypt_resource_prop_data(
ctxt, encryption_key, True, batch_size, verbose))
excs.extend(_db_encrypt_or_decrypt_resource_prop_data_legacy(
ctxt, encryption_key, True, batch_size, verbose))
return excs
@ -1590,6 +1687,8 @@ def db_decrypt_parameters_and_properties(ctxt, encryption_key, batch_size=50,
ctxt, encryption_key, False, batch_size, verbose))
excs.extend(_db_encrypt_or_decrypt_resource_prop_data(
ctxt, encryption_key, False, batch_size, verbose))
excs.extend(_db_encrypt_or_decrypt_resource_prop_data_legacy(
ctxt, encryption_key, False, batch_size, verbose))
return excs

View File

@ -11,19 +11,9 @@
# License for the specific language governing permissions and limitations
# under the License.
import six
from sqlalchemy.util.compat import pickle
import oslo_db.exception
from oslo_log import log as logging
from heat.common import identifier
from heat.objects import event as event_object
LOG = logging.getLogger(__name__)
MAX_EVENT_RESOURCE_PROPERTIES_SIZE = (1 << 16) - 1
from heat.objects import resource_properties_data as rpd_objects
class Event(object):
@ -45,10 +35,17 @@ class Event(object):
self.physical_resource_id = physical_resource_id
self.resource_name = resource_name
self.resource_type = resource_type
try:
self.resource_properties = dict(resource_properties)
except ValueError as ex:
self.resource_properties = {'Error': six.text_type(ex)}
self.rsrc_prop_data = None
if isinstance(resource_properties,
rpd_objects.ResourcePropertiesData):
self.rsrc_prop_data = resource_properties
self.resource_properties = self.rsrc_prop_data.data
elif resource_properties is None:
self.resource_properties = {}
else:
raise AssertionError(
_('resource_properties is unexpected type %s'),
type(resource_properties))
self.uuid = uuid
self.timestamp = timestamp
self.id = id
@ -63,7 +60,6 @@ class Event(object):
'resource_status': self.status,
'resource_status_reason': self.reason,
'resource_type': self.resource_type,
'resource_properties': self.resource_properties,
}
if self.uuid is not None:
@ -72,39 +68,10 @@ class Event(object):
if self.timestamp is not None:
ev['created_at'] = self.timestamp
# Workaround: we don't want to attempt to store the
# event.resource_properties column if the data is too large
# (greater than permitted by BLOB). Otherwise, we end up with
# an unsightly log message.
rp_size = len(pickle.dumps(ev['resource_properties'],
pickle.HIGHEST_PROTOCOL))
if rp_size > MAX_EVENT_RESOURCE_PROPERTIES_SIZE:
LOG.debug('event\'s resource_properties too large to store at '
'%d bytes', rp_size)
# Try truncating the largest value and see if that gets us under
# the db column's size constraint.
max_key, max_val = max(ev['resource_properties'].items(),
key=lambda i: len(repr(i[1])))
err = 'Resource properties are too large to store fully'
ev['resource_properties'].update({'Error': err})
ev['resource_properties'][max_key] = '<Deleted, too large>'
rp_size = len(pickle.dumps(ev['resource_properties'],
pickle.HIGHEST_PROTOCOL))
if rp_size > MAX_EVENT_RESOURCE_PROPERTIES_SIZE:
LOG.debug('event\'s resource_properties STILL too large '
'after truncating largest key at %d bytes', rp_size)
err = 'Resource properties are too large to attempt to store'
ev['resource_properties'] = {'Error': err}
if self.rsrc_prop_data:
ev['rsrc_prop_data_id'] = self.rsrc_prop_data.id
# We should have worked around the issue, but let's be extra
# careful.
try:
new_ev = event_object.Event.create(self.context, ev)
except oslo_db.exception.DBError:
# Give up and drop all properties..
err = 'Resource properties are too large to store'
ev['resource_properties'] = {'Error': err}
new_ev = event_object.Event.create(self.context, ev)
new_ev = event_object.Event.create(self.context, ev)
self.id = new_ev.id
self.timestamp = new_ev.created_at

View File

@ -44,6 +44,7 @@ from heat.engine import scheduler
from heat.engine import support
from heat.objects import resource as resource_objects
from heat.objects import resource_data as resource_data_objects
from heat.objects import resource_properties_data as rpd_objects
from heat.objects import stack as stack_objects
from heat.rpc import client as rpc_client
@ -249,6 +250,7 @@ class Resource(object):
self.uuid = None
self._data = None
self._rsrc_metadata = None
self._rsrc_prop_data = None
self._stored_properties_data = None
self.created_time = stack.created_time
self.updated_time = stack.updated_time
@ -291,6 +293,7 @@ class Resource(object):
self._data = {}
self._rsrc_metadata = resource.rsrc_metadata
self._stored_properties_data = resource.properties_data
self._rsrc_prop_data = resource.rsrc_prop_data
self.created_time = resource.created_at
self.updated_time = resource.updated_at
self.needed_by = resource.needed_by
@ -350,7 +353,7 @@ class Resource(object):
# Don't set physical_resource_id so that a create is triggered.
rs = {'stack_id': self.stack.id,
'name': self.name,
'properties_data': self._stored_properties_data,
'rsrc_prop_data_id': self._create_or_replace_rsrc_prop_data(),
'needed_by': self.needed_by,
'requires': self.requires,
'replaces': self.id,
@ -858,7 +861,10 @@ class Resource(object):
yield self.action_handler_task(action, args=handler_args)
def _update_stored_properties(self):
old_props = self._stored_properties_data
self._stored_properties_data = function.resolve(self.properties.data)
if self._stored_properties_data != old_props:
self._rsrc_prop_data = None
def preview(self):
"""Default implementation of Resource.preview.
@ -1721,10 +1727,6 @@ class Resource(object):
def _store(self, metadata=None):
"""Create the resource in the database."""
properties_data_encrypted, properties_data = (
resource_objects.Resource.encrypt_properties_data(
self._stored_properties_data))
if not self.root_stack_id:
self.root_stack_id = self.stack.root_stack_id()
try:
@ -1735,8 +1737,8 @@ class Resource(object):
'physical_resource_id': self.resource_id,
'name': self.name,
'rsrc_metadata': metadata,
'properties_data': properties_data,
'properties_data_encrypted': properties_data_encrypted,
'rsrc_prop_data_id':
self._create_or_replace_rsrc_prop_data(),
'needed_by': self.needed_by,
'requires': self.requires,
'replaces': self.replaces,
@ -1744,7 +1746,6 @@ class Resource(object):
'current_template_id': self.current_template_id,
'stack_name': self.stack.name,
'root_stack_id': self.root_stack_id}
new_rs = resource_objects.Resource.create(self.context, rs)
self.id = new_rs.id
self.uuid = new_rs.uuid
@ -1757,7 +1758,7 @@ class Resource(object):
"""Add a state change event to the database."""
physical_res_id = self.resource_id or self.physical_resource_name()
ev = event.Event(self.context, self.stack, action, status, reason,
physical_res_id, self.properties,
physical_res_id, self._rsrc_prop_data,
self.name, self.type())
ev.store()
@ -1769,18 +1770,15 @@ class Resource(object):
self.status = status
self.status_reason = reason
properties_data_encrypted, properties_data = (
resource_objects.Resource.encrypt_properties_data(
self._stored_properties_data))
data = {
'action': self.action,
'status': self.status,
'status_reason': reason,
'stack_id': self.stack.id,
'updated_at': self.updated_time,
'properties_data': properties_data,
'properties_data_encrypted': properties_data_encrypted,
'needed_by': self.needed_by,
'rsrc_prop_data_id': self._create_or_replace_rsrc_prop_data(),
'properties_data': None,
'requires': self.requires,
'replaces': self.replaces,
'replaced_by': self.replaced_by,
@ -2286,6 +2284,18 @@ class Resource(object):
self._data = None
return True
def _create_or_replace_rsrc_prop_data(self):
if self._rsrc_prop_data is not None:
return self._rsrc_prop_data.id
if not self._stored_properties_data:
return None
self._rsrc_prop_data = \
rpd_objects.ResourcePropertiesData(self.context).create(
self.context, self._stored_properties_data)
return self._rsrc_prop_data.id
def is_using_neutron(self):
try:
sess_client = self.client('neutron').httpclient

View File

@ -887,7 +887,7 @@ class Stack(collections.Mapping):
def _add_event(self, action, status, reason):
"""Add a state change event to the database."""
ev = event.Event(self.context, self, action, status, reason,
self.id, {},
self.id, None,
self.name, 'OS::Heat::Stack')
ev.store()

View File

@ -34,7 +34,7 @@ def verify(test, reality, tmpl):
if isinstance(prop_def, scenario_template.GetAtt):
targs = reality.resources_by_logical_name(prop_def.target_name)
att_value = targs[0].properties_data[prop_def.attr]
att_value = targs[0].rsrc_prop_data.data[prop_def.attr]
test.assertEqual(att_value, real_value)
elif isinstance(prop_def, scenario_template.GetRes):
@ -44,7 +44,11 @@ def verify(test, reality, tmpl):
else:
test.assertEqual(prop_def, real_value)
test.assertEqual(len(defn.properties), len(phys_rsrc.properties_data))
len_rsrc_prop_data = 0
if phys_rsrc.rsrc_prop_data:
len_rsrc_prop_data = len(phys_rsrc.rsrc_prop_data.data)
test.assertEqual(len(defn.properties),
len_rsrc_prop_data)
test.assertEqual(len(tmpl.resources), len(all_rsrcs))

View File

@ -1396,10 +1396,13 @@ def create_stack(ctx, template, user_creds, **kwargs):
return db_api.stack_create(ctx, values)
def create_resource(ctx, stack, **kwargs):
def create_resource(ctx, stack, legacy_prop_data=False, **kwargs):
phy_res_id = UUID1
if 'phys_res_id' in kwargs:
phy_res_id = kwargs.pop('phys_res_id')
if not legacy_prop_data:
rpd = db_api.resource_prop_data_create(ctx, {'data': {'foo1': 'bar1'},
'encrypted': False})
values = {
'name': 'test_resource_name',
'physical_resource_id': phy_res_id,
@ -1408,8 +1411,11 @@ def create_resource(ctx, stack, **kwargs):
'status_reason': 'create_complete',
'rsrc_metadata': json.loads('{"foo": "123"}'),
'stack_id': stack.id,
'properties_data': {'foo1': 'bar1'}
}
if not legacy_prop_data:
values['rsrc_prop_data'] = rpd
else:
values['properties_data'] = {'foo1': 'bar1'}
values.update(kwargs)
return db_api.resource_create(ctx, values)
@ -1424,7 +1430,18 @@ def create_resource_data(ctx, resource, **kwargs):
return db_api.resource_data_set(ctx, resource.id, **values)
def create_resource_prop_data(ctx, **kwargs):
values = {
'data': {'foo1': 'bar1'},
'encrypted': False
}
values.update(kwargs)
return db_api.resource_prop_data_create(ctx, **values)
def create_event(ctx, **kwargs):
rpd = db_api.resource_prop_data_create(ctx, {'data': {'name': 'foo'},
'encrypted': False})
values = {
'stack_id': 'test_stack_id',
'resource_action': 'create',
@ -1432,7 +1449,7 @@ def create_event(ctx, **kwargs):
'resource_name': 'res',
'physical_resource_id': UUID1,
'resource_status_reason': "create_complete",
'resource_properties': {'name': 'foo'}
'rsrc_prop_data': rpd,
}
values.update(kwargs)
return db_api.event_create(ctx, values)
@ -1998,23 +2015,26 @@ class DBAPIStackTest(common.HeatTestCase):
creds = [create_user_creds(self.ctx) for i in range(5)]
stacks = [create_stack(self.ctx, templates[i], creds[i],
deleted_at=deleted[i]) for i in range(5)]
resources = [create_resource(self.ctx, stacks[i]) for i in range(5)]
events = [create_event(self.ctx, stack_id=stacks[i].id)
for i in range(5)]
db_api.purge_deleted(age=1, granularity='days')
admin_ctx = utils.dummy_context(is_admin=True)
self._deleted_stack_existance(admin_ctx, stacks,
tmpl_files, (0, 1, 2), (3, 4))
self._deleted_stack_existance(admin_ctx, stacks, resources,
events, tmpl_files, (0, 1, 2), (3, 4))
db_api.purge_deleted(age=22, granularity='hours')
self._deleted_stack_existance(admin_ctx, stacks,
tmpl_files, (0, 1, 2), (3, 4))
self._deleted_stack_existance(admin_ctx, stacks, resources,
events, tmpl_files, (0, 1, 2), (3, 4))
db_api.purge_deleted(age=1100, granularity='minutes')
self._deleted_stack_existance(admin_ctx, stacks,
tmpl_files, (0, 1), (2, 3, 4))
self._deleted_stack_existance(admin_ctx, stacks, resources,
events, tmpl_files, (0, 1), (2, 3, 4))
db_api.purge_deleted(age=3600, granularity='seconds')
self._deleted_stack_existance(admin_ctx, stacks,
tmpl_files, (), (0, 1, 2, 3, 4))
self._deleted_stack_existance(admin_ctx, stacks, resources,
events, tmpl_files, (), (0, 1, 2, 3, 4))
def test_purge_project_deleted(self):
now = timeutils.utcnow()
@ -2037,31 +2057,34 @@ class DBAPIStackTest(common.HeatTestCase):
stacks = [create_stack(self.ctx, templates[i], creds[i],
deleted_at=deleted[i], **values[i]
) for i in range(5)]
resources = [create_resource(self.ctx, stacks[i]) for i in range(5)]
events = [create_event(self.ctx, stack_id=stacks[i].id)
for i in range(5)]
db_api.purge_deleted(age=1, granularity='days', project_id=UUID1)
admin_ctx = utils.dummy_context(is_admin=True)
self._deleted_stack_existance(admin_ctx, stacks,
tmpl_files, (0, 1, 2, 3, 4), ())
self._deleted_stack_existance(admin_ctx, stacks, resources,
events, tmpl_files, (0, 1, 2, 3, 4), ())
db_api.purge_deleted(age=22, granularity='hours', project_id=UUID1)
self._deleted_stack_existance(admin_ctx, stacks,
tmpl_files, (0, 1, 2, 3, 4), ())
self._deleted_stack_existance(admin_ctx, stacks, resources,
events, tmpl_files, (0, 1, 2, 3, 4), ())
db_api.purge_deleted(age=1100, granularity='minutes', project_id=UUID1)
self._deleted_stack_existance(admin_ctx, stacks,
tmpl_files, (0, 1, 3, 4), (2,))
self._deleted_stack_existance(admin_ctx, stacks, resources,
events, tmpl_files, (0, 1, 3, 4), (2,))
db_api.purge_deleted(age=30, granularity='hours', project_id=UUID2)
self._deleted_stack_existance(admin_ctx, stacks,
tmpl_files, (0, 1, 3), (2, 4))
self._deleted_stack_existance(admin_ctx, stacks, resources,
events, tmpl_files, (0, 1, 3), (2, 4))
db_api.purge_deleted(age=3600, granularity='seconds', project_id=UUID1)
self._deleted_stack_existance(admin_ctx, stacks,
tmpl_files, (3,), (0, 1, 2, 4))
self._deleted_stack_existance(admin_ctx, stacks, resources,
events, tmpl_files, (3,), (0, 1, 2, 4))
db_api.purge_deleted(age=3600, granularity='seconds', project_id=UUID2)
self._deleted_stack_existance(admin_ctx, stacks,
tmpl_files, (), (0, 1, 2, 3, 4))
self._deleted_stack_existance(admin_ctx, stacks, resources,
events, tmpl_files, (), (0, 1, 2, 3, 4))
def test_purge_deleted_prev_raw_template(self):
now = timeutils.utcnow()
@ -2156,13 +2179,23 @@ class DBAPIStackTest(common.HeatTestCase):
db_api.raw_template_files_get,
self.ctx, tmpl_files[2].files_id)
def _deleted_stack_existance(self, ctx, stacks,
def _deleted_stack_existance(self, ctx, stacks, resources, events,
tmpl_files, existing, deleted):
for s in existing:
self.assertIsNotNone(db_api.stack_get(ctx, stacks[s].id,
show_deleted=True))
self.assertIsNotNone(db_api.raw_template_files_get(
ctx, tmpl_files[s].files_id))
self.assertIsNotNone(db_api.resource_get(
ctx, resources[s].id))
self.assertIsNotNone(db_api.event_get(
ctx, events[s].id))
self.assertIsNotNone(ctx.session.query(
models.ResourcePropertiesData).filter_by(
id=resources[s].rsrc_prop_data.id).first())
self.assertIsNotNone(ctx.session.query(
models.ResourcePropertiesData).filter_by(
id=events[s].rsrc_prop_data.id).first())
for s in deleted:
self.assertIsNone(db_api.stack_get(ctx, stacks[s].id,
show_deleted=True))
@ -2177,6 +2210,16 @@ class DBAPIStackTest(common.HeatTestCase):
self.assertEqual([],
db_api.event_get_all_by_stack(ctx,
stacks[s].id))
self.assertIsNone(db_api.event_get(ctx, events[s].id))
self.assertIsNone(ctx.session.query(
models.ResourcePropertiesData).filter_by(
id=resources[s].rsrc_prop_data.id).first())
self.assertIsNone(ctx.session.query(
models.ResourcePropertiesData).filter_by(
id=events[s].rsrc_prop_data.id).first())
self.assertEqual([],
db_api.event_get_all_by_stack(ctx,
stacks[s].id))
self.assertIsNone(db_api.user_creds_get(
self.ctx, stacks[s].user_creds_id))
@ -2220,6 +2263,7 @@ class DBAPIStackTest(common.HeatTestCase):
create_resource(
self.ctx,
stack,
False,
name='%s-%s' % (stack.name, i),
root_stack_id=root_stack_id
)
@ -2363,7 +2407,8 @@ class DBAPIResourceTest(common.HeatTestCase):
{'name': 'res2'},
{'name': 'res3'},
]
[create_resource(self.ctx, self.stack, **val) for val in values]
[create_resource(self.ctx, self.stack, False, **val)
for val in values]
resources = db_api.resource_get_all(self.ctx)
self.assertEqual(3, len(resources))
@ -2380,7 +2425,8 @@ class DBAPIResourceTest(common.HeatTestCase):
{'name': 'res3', 'stack_id': self.stack.id},
{'name': 'res4', 'stack_id': self.stack1.id},
]
[create_resource(self.ctx, self.stack, **val) for val in values]
[create_resource(self.ctx, self.stack, False, **val)
for val in values]
# Test for all resources in a stack
resources = db_api.resource_get_all_by_stack(self.ctx, self.stack.id)
@ -2675,7 +2721,7 @@ class DBAPIEventTest(common.HeatTestCase):
self.assertEqual('res', ret_event.resource_name)
self.assertEqual(UUID1, ret_event.physical_resource_id)
self.assertEqual('create_complete', ret_event.resource_status_reason)
self.assertEqual({'name': 'foo'}, ret_event.resource_properties)
self.assertEqual({'name': 'foo'}, ret_event.rsrc_prop_data.data)
def test_event_get_all(self):
self.stack1 = create_stack(self.ctx, self.template, self.user_creds,
@ -2965,7 +3011,7 @@ class DBAPIResourceUpdateTest(common.HeatTestCase):
template = create_raw_template(self.ctx)
user_creds = create_user_creds(self.ctx)
stack = create_stack(self.ctx, template, user_creds)
self.resource = create_resource(self.ctx, stack,
self.resource = create_resource(self.ctx, stack, False,
atomic_key=0)
def test_unlocked_resource_update(self):
@ -3324,7 +3370,8 @@ class DBAPICryptParamsPropsTest(common.HeatTestCase):
return db_api.raw_template_create(self.ctx, template)
def encrypt(self, enc_key=None, batch_size=50):
def encrypt(self, enc_key=None, batch_size=50,
legacy_prop_data=False):
session = self.ctx.session
if enc_key is None:
enc_key = cfg.CONF.auth_encryption_key
@ -3345,14 +3392,20 @@ class DBAPICryptParamsPropsTest(common.HeatTestCase):
enc_resources = session.query(models.Resource).all()
self.assertNotEqual([], enc_resources)
for enc_resource in enc_resources:
self.assertEqual('cryptography_decrypt_v1',
enc_resource.properties_data['foo1'][0])
if legacy_prop_data:
self.assertEqual(
'cryptography_decrypt_v1',
enc_resource.properties_data['foo1'][0])
else:
self.assertEqual(
'cryptography_decrypt_v1',
enc_resource.rsrc_prop_data.data['foo1'][0])
ev = enc_tmpl.environment['parameters']['param2'][1]
return ev
def decrypt(self, encrypt_value, enc_key=None,
batch_size=50):
batch_size=50, legacy_prop_data=False):
session = self.ctx.session
if enc_key is None:
enc_key = cfg.CONF.auth_encryption_key
@ -3383,11 +3436,16 @@ class DBAPICryptParamsPropsTest(common.HeatTestCase):
dec_resources = session.query(models.Resource).all()
self.assertNotEqual([], dec_resources)
for dec_resource in dec_resources:
self.assertEqual('bar1', dec_resource.properties_data['foo1'])
if legacy_prop_data:
self.assertEqual(
'bar1', dec_resource.properties_data['foo1'])
else:
self.assertEqual(
'bar1', dec_resource.rsrc_prop_data.data['foo1'])
return decrypt_value
def _test_db_encrypt_decrypt(self, batch_size=50):
def _test_db_encrypt_decrypt(self, batch_size=50, legacy_prop_data=False):
session = self.ctx.session
raw_templates = session.query(models.RawTemplate).all()
self.assertNotEqual([], raw_templates)
@ -3403,50 +3461,74 @@ class DBAPICryptParamsPropsTest(common.HeatTestCase):
self.assertEqual(len(resources), len(raw_templates))
for resource in resources:
resource = db_api.resource_get(self.ctx, resource.id)
self.assertEqual('bar1', resource.properties_data['foo1'])
if legacy_prop_data:
self.assertEqual(
'bar1', resource.properties_data['foo1'])
else:
self.assertEqual(
'bar1', resource.rsrc_prop_data.data['foo1'])
# Test encryption
encrypt_value = self.encrypt(batch_size=batch_size)
encrypt_value = self.encrypt(batch_size=batch_size,
legacy_prop_data=legacy_prop_data)
# Test that encryption is idempotent
encrypt_value2 = self.encrypt(batch_size=batch_size)
encrypt_value2 = self.encrypt(batch_size=batch_size,
legacy_prop_data=legacy_prop_data)
self.assertEqual(encrypt_value, encrypt_value2)
# Test decryption
decrypt_value = self.decrypt(encrypt_value, batch_size=batch_size)
decrypt_value = self.decrypt(encrypt_value, batch_size=batch_size,
legacy_prop_data=legacy_prop_data)
# Test that decryption is idempotent
decrypt_value2 = self.decrypt(encrypt_value, batch_size=batch_size)
decrypt_value2 = self.decrypt(encrypt_value, batch_size=batch_size,
legacy_prop_data=legacy_prop_data)
self.assertEqual(decrypt_value, decrypt_value2)
# Test using a different encryption key to encrypt & decrypt
encrypt_value3 = self.encrypt(
enc_key='774c15be099ea74123a9b9592ff12680',
batch_size=batch_size)
batch_size=batch_size, legacy_prop_data=legacy_prop_data)
decrypt_value3 = self.decrypt(
encrypt_value3, enc_key='774c15be099ea74123a9b9592ff12680',
batch_size=batch_size)
batch_size=batch_size, legacy_prop_data=legacy_prop_data)
self.assertEqual(decrypt_value, decrypt_value3)
self.assertNotEqual(encrypt_value, decrypt_value)
self.assertNotEqual(encrypt_value3, decrypt_value3)
self.assertNotEqual(encrypt_value, encrypt_value3)
def test_db_encrypt_decrypt(self):
"""Test encryption and decryption for single template"""
"""Test encryption and decryption for single template and resource."""
self._test_db_encrypt_decrypt()
def test_db_encrypt_decrypt_in_batches(self):
"""Test encryption and decryption in for several templates.
def test_db_encrypt_decrypt_legacy_prop_data(self):
"""Test encryption and decryption for res with legacy prop data."""
# delete what setUp created
[self.ctx.session.delete(r) for r in
self.ctx.session.query(models.Resource).all()]
[self.ctx.session.delete(s) for s in
self.ctx.session.query(models.Stack).all()]
[self.ctx.session.delete(t) for t in
self.ctx.session.query(models.RawTemplate).all()]
Test encryption and decryption when heat requests templates in batch:
predefined amount records.
tmpl = self._create_template()
stack = create_stack(self.ctx, tmpl, self.user_creds)
create_resource(self.ctx, stack, True, name='res1')
self._test_db_encrypt_decrypt(legacy_prop_data=True)
def test_db_encrypt_decrypt_in_batches(self):
"""Test encryption and decryption in for several templates and resources.
Test encryption and decryption with set batch size of
templates and resources.
"""
tmpl1 = self._create_template()
tmpl2 = self._create_template()
stack = create_stack(self.ctx, tmpl1, self.user_creds)
create_resource(self.ctx, stack, name='res1')
create_resource(self.ctx, stack, False, name='res1')
stack2 = create_stack(self.ctx, tmpl2, self.user_creds)
create_resource(self.ctx, stack2, name='res2')
create_resource(self.ctx, stack2, False, name='res2')
self._test_db_encrypt_decrypt(batch_size=1)
@ -3540,15 +3622,20 @@ class DBAPICryptParamsPropsTest(common.HeatTestCase):
template = self._create_template()
user_creds = create_user_creds(ctx)
stack = create_stack(ctx, template, user_creds)
create_resource(ctx, stack, name='res1')
create_resource(ctx, stack, legacy_prop_data=True, name='res2')
db_api.db_encrypt_parameters_and_properties(
ctx, cfg.CONF.auth_encryption_key, verbose=True)
self.assertIn("Processing raw_template 1", info_logger.output)
self.assertIn("Processing resource 1", info_logger.output)
self.assertIn("Finished encrypt processing of raw_template 1",
info_logger.output)
self.assertIn("Finished processing resource 1", info_logger.output)
self.assertIn("Processing resource_properties_data 1",
info_logger.output)
self.assertIn("Finished processing resource_properties_data 1",
info_logger.output)
# only the resource with legacy properties data is processed
self.assertIn("Processing resource 2", info_logger.output)
self.assertIn("Finished processing resource 2", info_logger.output)
info_logger2 = self.useFixture(
fixtures.FakeLogger(level=logging.INFO,
@ -3558,10 +3645,15 @@ class DBAPICryptParamsPropsTest(common.HeatTestCase):
db_api.db_decrypt_parameters_and_properties(
ctx, cfg.CONF.auth_encryption_key, verbose=True)
self.assertIn("Processing raw_template 1", info_logger2.output)
self.assertIn("Processing resource 1", info_logger2.output)
self.assertIn("Finished decrypt processing of raw_template 1",
info_logger2.output)
self.assertIn("Finished processing resource 1", info_logger2.output)
self.assertIn("Processing resource_properties_data 1",
info_logger.output)
self.assertIn("Finished processing resource_properties_data 1",
info_logger.output)
# only the resource with legacy properties data is processed
self.assertIn("Processing resource 2", info_logger2.output)
self.assertIn("Finished processing resource 2", info_logger2.output)
def test_db_encrypt_decrypt_verbose_off(self):
info_logger = self.useFixture(

View File

@ -87,6 +87,7 @@ class NeutronTest(common.HeatTestCase):
tmpl = template.Template(empty_tmpl)
stack_name = 'dummystack'
self.dummy_stack = stack.Stack(utils.dummy_context(), stack_name, tmpl)
self.dummy_stack.store()
tmpl = rsrc_defn.ResourceDefinition('test_res', 'Foo')
self.dummy_stack.has_cache_data = mock.Mock(return_value=False)

View File

@ -22,6 +22,7 @@ import six
from heat.common import exception
from heat.common import template_format
from heat.common import timeutils as heat_timeutils
from heat.db.sqlalchemy import models
from heat.engine import api
from heat.engine import event
from heat.engine import parameters
@ -42,7 +43,8 @@ class FormatTest(common.HeatTestCase):
tmpl = template.Template({
'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {
'generic1': {'Type': 'GenericResourceType'},
'generic1': {'Type': 'GenericResourceType',
'Properties': {'k1': 'v1'}},
'generic2': {
'Type': 'GenericResourceType',
'DependsOn': 'generic1'},
@ -54,12 +56,12 @@ class FormatTest(common.HeatTestCase):
self.stack = parser.Stack(self.context, 'test_stack',
tmpl, stack_id=str(uuid.uuid4()))
def _dummy_event(self):
def _dummy_event(self, res_properties=None):
resource = self.stack['generic1']
ev = event.Event(self.context, self.stack, 'CREATE',
'COMPLETE', 'state changed',
'z3455xyc-9f88-404d-a85b-5315293e67de',
resource.properties, resource.name, resource.type(),
res_properties, resource.name, resource.type(),
uuid='abc123yc-9f88-404d-a85b-531529456xyz')
event_id = ev.store()
return event_object.Event.get_by_id(self.context, event_id)
@ -312,6 +314,31 @@ class FormatTest(common.HeatTestCase):
'tenant': 'test_tenant_id'
}, event_id_formatted)
def test_format_event_prop_data(self):
resource = self.stack['generic1']
resource._update_stored_properties()
resource._store()
event = self._dummy_event(res_properties=resource._rsrc_prop_data)
formatted = api.format_event(event, self.stack.identifier())
self.assertEqual({'k1': 'v1'}, formatted[rpc_api.EVENT_RES_PROPERTIES])
def test_format_event_legacy_prop_data(self):
event = self._dummy_event(res_properties=None)
# legacy location
db_obj = self.stack.context.session.query(
models.Event).filter_by(id=event.id).first()
db_obj.update({'resource_properties': {'legacy_k1': 'legacy_v1'}})
db_obj.save(self.stack.context.session)
event_legacy = event_object.Event.get_by_id(self.context, event.id)
formatted = api.format_event(event_legacy, self.stack.identifier())
self.assertEqual({'legacy_k1': 'legacy_v1'},
formatted[rpc_api.EVENT_RES_PROPERTIES])
def test_format_event_empty_prop_data(self):
event = self._dummy_event(res_properties=None)
formatted = api.format_event(event, self.stack.identifier())
self.assertEqual({}, formatted[rpc_api.EVENT_RES_PROPERTIES])
@mock.patch.object(api, 'format_stack_resource')
def test_format_stack_preview(self, mock_fmt_resource):
def mock_format_resources(res, **kwargs):

View File

@ -13,19 +13,17 @@
import mock
from oslo_config import cfg
import oslo_db.exception
import uuid
from heat.db.sqlalchemy import api as db_api
from heat.db.sqlalchemy import models
from heat.engine import event
from heat.engine import rsrc_defn
from heat.engine import stack
from heat.engine import template
from heat.objects import event as event_object
from heat.objects import resource_properties_data as rpd_object
from heat.objects import stack as stack_object
from heat.tests import common
from heat.tests import generic_resource as generic_rsrc
from heat.tests import utils
cfg.CONF.import_opt('event_purge_batch_size', 'heat.common.config')
@ -53,34 +51,13 @@ tmpl_multiple = {
}
}
tmpl_multiple_too_large = {
'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {
'EventTestResource': {
'Type': 'ResourceWithMultipleRequiredProps',
'Properties': {'Foo1': 'zoo',
'Foo2': 'A' * (1 << 16),
'Foo3': '99999'}
}
}
}
tmpl_multiple_srsly_too_large = {
'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {
'EventTestResource': {
'Type': 'ResourceWithMultipleRequiredProps',
'Properties': {'Foo1': 'Z' * (1 << 16),
'Foo2': 'A' * (1 << 16),
'Foo3': '99999'}
}
}
}
class EventCommon(common.HeatTestCase):
def _setup_stack(self, the_tmpl):
def _setup_stack(self, the_tmpl, encrypted=False):
if encrypted:
cfg.CONF.set_override('encrypt_parameters_and_properties', True)
self.username = 'event_test_user'
self.ctx = utils.dummy_context()
@ -92,6 +69,7 @@ class EventCommon(common.HeatTestCase):
self.stack.store()
self.resource = self.stack['EventTestResource']
self.resource._update_stored_properties()
self.resource._store()
self.addCleanup(stack_object.Stack.delete, self.ctx, self.stack.id)
@ -108,14 +86,14 @@ class EventTest(EventCommon):
self.resource.resource_id_set('resource_physical_id')
e = event.Event(self.ctx, self.stack, 'TEST', 'IN_PROGRESS', 'Testing',
'alabama', self.resource.properties,
'alabama', self.resource._rsrc_prop_data,
self.resource.name, self.resource.type())
e.store()
self.assertEqual(1, len(event_object.Event.get_all_by_stack(
self.ctx,
self.stack.id)))
e = event.Event(self.ctx, self.stack, 'TEST', 'IN_PROGRESS', 'Testing',
'arizona', self.resource.properties,
'arizona', self.resource._rsrc_prop_data,
self.resource.name, self.resource.type())
e.store()
events = event_object.Event.get_all_by_stack(self.ctx, self.stack.id)
@ -128,7 +106,7 @@ class EventTest(EventCommon):
self.resource.resource_id_set('resource_physical_id')
e = event.Event(self.ctx, self.stack, 'TEST', 'IN_PROGRESS', 'Testing',
'arkansas', self.resource.properties,
'arkansas', self.resource._rsrc_prop_data,
self.resource.name, self.resource.type())
e.store()
@ -137,7 +115,7 @@ class EventTest(EventCommon):
mock_random_uniform.return_value = 2.0 / 100 - .0001
e = event.Event(self.ctx, self.stack, 'TEST',
'IN_PROGRESS', 'Testing',
'alaska', self.resource.properties,
'alaska', self.resource._rsrc_prop_data,
self.resource.name, self.resource.type())
e.store()
events = event_object.Event.get_all_by_stack(self.ctx, self.stack.id)
@ -149,16 +127,68 @@ class EventTest(EventCommon):
mock_random_uniform.return_value = 2.0 / 100 + .0001
e = event.Event(self.ctx, self.stack, 'TEST',
'IN_PROGRESS', 'Testing',
'aardvark', self.resource.properties,
'aardvark', self.resource._rsrc_prop_data,
self.resource.name, self.resource.type())
e.store()
events = event_object.Event.get_all_by_stack(self.ctx, self.stack.id)
self.assertEqual(2, len(events))
def test_store_caps_resource_props_data(self):
cfg.CONF.set_override('event_purge_batch_size', 2, enforce_type=True)
cfg.CONF.set_override('max_events_per_stack', 3, enforce_type=True)
self.resource.resource_id_set('resource_physical_id')
e = event.Event(self.ctx, self.stack, 'TEST', 'IN_PROGRESS', 'Testing',
'alabama', self.resource._rsrc_prop_data,
self.resource.name, self.resource.type())
e.store()
rpd1_id = self.resource._rsrc_prop_data.id
rpd2 = rpd_object.ResourcePropertiesData.create(
self.ctx, {'encrypted': False, 'data': {'foo': 'bar'}})
rpd2_id = rpd2.id
e = event.Event(self.ctx, self.stack, 'TEST', 'IN_PROGRESS', 'Testing',
'arizona', rpd2,
self.resource.name, self.resource.type())
e.store()
rpd3 = rpd_object.ResourcePropertiesData.create(
self.ctx, {'encrypted': False, 'data': {'foo': 'bar'}})
rpd3_id = rpd3.id
e = event.Event(self.ctx, self.stack, 'TEST', 'IN_PROGRESS', 'Testing',
'arkansas', rpd3,
self.resource.name, self.resource.type())
e.store()
rpd4 = rpd_object.ResourcePropertiesData.create(
self.ctx, {'encrypted': False, 'data': {'foo': 'bar'}})
rpd4_id = rpd4.id
e = event.Event(self.ctx, self.stack, 'TEST', 'IN_PROGRESS', 'Testing',
'arkansas', rpd4,
self.resource.name, self.resource.type())
e.store()
events = event_object.Event.get_all_by_stack(self.ctx, self.stack.id)
self.assertEqual(2, len(events))
self.assertEqual('arkansas', events[0].physical_resource_id)
# rpd1 should still exist since that is still referred to by
# the resource. rpd2 shoud have been deleted along with the
# 2nd event.
self.assertIsNotNone(self.ctx.session.query(
models.ResourcePropertiesData).get(rpd1_id))
self.assertIsNone(self.ctx.session.query(
models.ResourcePropertiesData).get(rpd2_id))
# We didn't purge the last two events, so we ought to have
# kept rsrc_prop_data for both.
self.assertIsNotNone(self.ctx.session.query(
models.ResourcePropertiesData).get(rpd3_id))
self.assertIsNotNone(self.ctx.session.query(
models.ResourcePropertiesData).get(rpd4_id))
def test_identifier(self):
event_uuid = 'abc123yc-9f88-404d-a85b-531529456xyz'
e = event.Event(self.ctx, self.stack, 'TEST', 'IN_PROGRESS', 'Testing',
'wibble', self.resource.properties,
'wibble', self.resource._rsrc_prop_data,
self.resource.name, self.resource.type(),
uuid=event_uuid)
@ -173,27 +203,16 @@ class EventTest(EventCommon):
def test_identifier_is_none(self):
e = event.Event(self.ctx, self.stack, 'TEST', 'IN_PROGRESS', 'Testing',
'wibble', self.resource.properties,
'wibble', self.resource._rsrc_prop_data,
self.resource.name, self.resource.type())
self.assertIsNone(e.identifier())
e.store()
self.assertIsNotNone(e.identifier())
def test_badprop(self):
rname = 'bad_resource'
defn = rsrc_defn.ResourceDefinition(rname,
'ResourceWithRequiredProps',
{'IntFoo': False})
res = generic_rsrc.ResourceWithRequiredProps(rname, defn, self.stack)
e = event.Event(self.ctx, self.stack, 'TEST', 'IN_PROGRESS', 'Testing',
'wibble', res.properties, res.name, res.type())
self.assertIn('Error', e.resource_properties)
def test_as_dict(self):
e = event.Event(self.ctx, self.stack, 'TEST', 'IN_PROGRESS', 'Testing',
'wibble', self.resource.properties,
'wibble', self.resource._rsrc_prop_data,
self.resource.name, self.resource.type())
e.store()
@ -213,6 +232,23 @@ class EventTest(EventCommon):
'version': '0.1'}}
self.assertEqual(expected, e.as_dict())
def test_load_deprecated_prop_data(self):
e = event.Event(self.ctx, self.stack, 'TEST', 'IN_PROGRESS', 'Testing',
'wibble', self.resource._rsrc_prop_data,
self.resource.name, self.resource.type())
e.store()
# for test purposes, dress up the event to have the deprecated
# properties_data field populated
e_obj = db_api.event_get(self.resource.context, e.id)
with self.ctx.session.begin():
e_obj['resource_properties'] = {'Time': 'not enough'}
e_obj['rsrc_prop_data'] = None
# verify the deprecated data gets loaded
ev = event_object.Event.get_by_id(self.ctx, e.id)
self.assertEqual({'Time': 'not enough'}, ev.resource_properties)
def test_event_object_resource_properties_data(self):
cfg.CONF.set_override('encrypt_parameters_and_properties', True,
enforce_type=True)
@ -232,78 +268,29 @@ class EventTest(EventCommon):
self.assertEqual(data, e_obj.rsrc_prop_data.data)
class EventTestSingleLargeProp(EventCommon):
class EventEncryptedTest(EventCommon):
def setUp(self):
super(EventTestSingleLargeProp, self).setUp()
self._setup_stack(tmpl_multiple_too_large)
def test_too_large_single_prop(self):
self.resource.resource_id_set('resource_physical_id')
super(EventEncryptedTest, self).setUp()
self._setup_stack(tmpl, encrypted=True)
def test_props_encrypted(self):
e = event.Event(self.ctx, self.stack, 'TEST', 'IN_PROGRESS', 'Testing',
'alabama', self.resource.properties,
'wibble', self.resource._rsrc_prop_data,
self.resource.name, self.resource.type())
e.store()
self.assertIsNotNone(e.id)
# verify the resource_properties_data db data is encrypted
e_obj = event_object.Event.get_by_id(self.resource.context, e.id)
rpd_id = e_obj['rsrc_prop_data'].id
results = self.resource.context.session.query(
models.ResourcePropertiesData).filter_by(
id=rpd_id)
self.assertNotEqual('goo',
results[0]['data']['Foo'])
self.assertTrue(results[0]['encrypted'])
# verify encrypted data is decrypted when retrieved through
# heat object layer
ev = event_object.Event.get_by_id(self.ctx, e.id)
self.assertEqual(
{'Foo1': 'zoo',
'Foo2': '<Deleted, too large>',
'Foo3': '99999',
'Error': 'Resource properties are too large to store fully'},
ev['resource_properties'])
class EventTestMultipleLargeProp(EventCommon):
def setUp(self):
super(EventTestMultipleLargeProp, self).setUp()
self._setup_stack(tmpl_multiple_srsly_too_large)
def test_too_large_multiple_prop(self):
self.resource.resource_id_set('resource_physical_id')
e = event.Event(self.ctx, self.stack, 'TEST', 'IN_PROGRESS', 'Testing',
'alabama', self.resource.properties,
self.resource.name, self.resource.type())
e.store()
self.assertIsNotNone(e.id)
ev = event_object.Event.get_by_id(self.ctx, e.id)
self.assertEqual(
{'Error': 'Resource properties are too large to attempt to store'},
ev['resource_properties'])
class EventTestStoreProps(EventCommon):
def setUp(self):
super(EventTestStoreProps, self).setUp()
self._setup_stack(tmpl_multiple)
def test_store_fail_all_props(self):
self.resource.resource_id_set('resource_physical_id')
e = event.Event(self.ctx, self.stack, 'TEST', 'IN_PROGRESS', 'Testing',
'alabama', self.resource.properties,
self.resource.name, self.resource.type())
e.store()
self.assertIsNotNone(e.id)
ev = event_object.Event.get_by_id(self.ctx, e.id)
errors = [oslo_db.exception.DBError]
def side_effect(*args):
try:
raise errors.pop()
except IndexError:
self.assertEqual(
{'Error': 'Resource properties are too large to store'},
args[1]['resource_properties'])
return ev
with mock.patch("heat.objects.event.Event") as mock_event:
mock_event.create.side_effect = side_effect
e.store()
self.assertEqual({'Foo': 'goo'}, ev.rsrc_prop_data.data)

View File

@ -814,6 +814,79 @@ class ResourceTest(common.HeatTestCase):
scheduler.TaskRunner(res.create)()
self.assertEqual((res.CREATE, res.COMPLETE), res.state)
def test_deprecated_prop_data_updated(self):
tmpl = rsrc_defn.ResourceDefinition('test_resource', 'Foo',
{'Foo': 'abc'})
res = generic_rsrc.ResourceWithProps('test_resource', tmpl, self.stack)
scheduler.TaskRunner(res.create)()
res_obj = db_api.resource_get(self.stack.context, res.id)
self.assertIsNone(res_obj.properties_data)
self.assertIsNone(res_obj.properties_data_encrypted)
# Now that we've established these couple of depcrated fields
# are not populated, let's populate them.
db_api.resource_update_and_save(self.stack.context, res_obj.id,
{'properties_data': {'Foo': 'lucky'},
'properties_data_encrypted': False,
'rsrc_prop_data': None})
res._rsrc_prop_data = None
res._load_data(res_obj)
# Legacy properties_data slurped into res._stored_properties_data
self.assertEqual(res._stored_properties_data, {'Foo': 'lucky'})
res._rsrc_prop_data = None
res._store_or_update(res.CREATE, res.IN_PROGRESS, 'test_rpd')
# Modernity, the data is where it belongs
self.assertEqual(res._rsrc_prop_data.data, {'Foo': 'lucky'})
self.assertFalse(hasattr(res, 'properties_data'))
self.assertFalse(hasattr(res, 'properties_data_encrypted'))
def test_deprecated_encrypted_prop_data_updated(self):
cfg.CONF.set_override('encrypt_parameters_and_properties', True)
tmpl = rsrc_defn.ResourceDefinition('test_resource', 'Foo',
{'Foo': 'abc'})
res = generic_rsrc.ResourceWithProps('test_resource', tmpl, self.stack)
scheduler.TaskRunner(res.create)()
res_obj = db_api.resource_get(self.stack.context, res.id)
self.assertIsNone(res_obj.properties_data)
self.assertIsNone(res_obj.properties_data_encrypted)
# Now that we've established these couple of depcrated fields
# are not populated, let's populate them.
encrypted_data = \
rpd_object.ResourcePropertiesData.encrypt_properties_data(
{'Foo': 'lucky'})[1]
db_api.resource_update_and_save(self.stack.context, res_obj.id,
{'properties_data': encrypted_data,
'properties_data_encrypted': True,
'rsrc_prop_data': None})
# This is where the decrypting of legacy data happens
res_obj = resource_objects.Resource._from_db_object(
resource_objects.Resource(), self.stack.context, res_obj)
self.assertEqual('lucky',
res_obj.properties_data['Foo'])
res._rsrc_prop_data = None
res._load_data(res_obj)
# Legacy properties_data slurped into res._stored_properties_data
self.assertEqual(res._stored_properties_data, {'Foo': 'lucky'})
res._rsrc_prop_data = None
res._store_or_update(res.CREATE, res.IN_PROGRESS, 'test_store')
# Modernity, the data is where it belongs
# The db object data is encrypted
rsrc_prop_data_db_obj = db_api.resource_prop_data_get(
self.stack.context, res._rsrc_prop_data.id)
self.assertNotEqual(rsrc_prop_data_db_obj['data'], {'Foo': 'lucky'})
self.assertEqual(rsrc_prop_data_db_obj.encrypted, True)
# But the objects/ rsrc_prop_data.data is always unencrypted
self.assertEqual(res._rsrc_prop_data.data, {'Foo': 'lucky'})
self.assertFalse(hasattr(res, 'properties_data'))
self.assertFalse(hasattr(res, 'properties_data_encrypted'))
def test_create_fail_missing_req_prop(self):
rname = 'test_resource'
tmpl = rsrc_defn.ResourceDefinition(rname, 'Foo', {})
@ -1824,39 +1897,40 @@ class ResourceTest(common.HeatTestCase):
'prop4': ['a', 'list'],
'prop5': True}
# The db data should be encrypted when _store() is called
res = generic_rsrc.GenericResource('test_res_enc', tmpl, self.stack)
res._stored_properties_data = stored_properties_data
res._rsrc_prop_data = None
res._store()
db_res = db_api.resource_get(res.context, res.id)
self.assertNotEqual('string',
db_res.rsrc_prop_data.data['prop1'])
# The db data should be encrypted when _store_or_update() is called
res = generic_rsrc.GenericResource('test_res_enc', tmpl, self.stack)
res._stored_properties_data = stored_properties_data
res._store_or_update(res.CREATE, res.IN_PROGRESS, 'test_store')
db_res = db_api.resource_get(res.context, res.id)
self.assertNotEqual('string',
db_res.properties_data['prop1'])
# The db data should be encrypted when _store() is called
res = generic_rsrc.GenericResource('test_res_enc', tmpl, self.stack)
res._stored_properties_data = stored_properties_data
res._store()
db_res = db_api.resource_get(res.context, res.id)
self.assertNotEqual('string',
db_res.properties_data['prop1'])
db_res.rsrc_prop_data.data['prop1'])
# The properties data should be decrypted when the object is
# loaded using get_obj
res_obj = resource_objects.Resource.get_obj(res.context, res.id)
self.assertEqual('string', res_obj.properties_data['prop1'])
self.assertEqual('string', res_obj.rsrc_prop_data.data['prop1'])
# The properties data should be decrypted when the object is
# _stored_properties_data should be decrypted when the object is
# loaded using get_all_by_stack
res_objs = resource_objects.Resource.get_all_by_stack(res.context,
self.stack.id)
res_obj = res_objs['test_res_enc']
self.assertEqual('string', res_obj.properties_data['prop1'])
self.assertEqual('string', res_obj.rsrc_prop_data.data['prop1'])
# The properties data should be decrypted when the object is
# refreshed
res_obj = resource_objects.Resource.get_obj(res.context, res.id)
res_obj.refresh()
self.assertEqual('string', res_obj.properties_data['prop1'])
self.assertEqual('string', res_obj.rsrc_prop_data.data['prop1'])
def test_properties_data_no_encryption(self):
cfg.CONF.set_override('encrypt_parameters_and_properties', False,
@ -1873,28 +1947,31 @@ class ResourceTest(common.HeatTestCase):
# is called
res = generic_rsrc.GenericResource('test_res_enc', tmpl, self.stack)
res._stored_properties_data = stored_properties_data
res._rsrc_prop_data = None
res._store_or_update(res.CREATE, res.IN_PROGRESS, 'test_store')
db_res = db_api.resource_get(res.context, res.id)
self.assertEqual('string', db_res.properties_data['prop1'])
self.assertEqual('string', db_res.rsrc_prop_data.data['prop1'])
# The db data should not be encrypted when _store() is called
res = generic_rsrc.GenericResource('test_res_enc', tmpl, self.stack)
res._stored_properties_data = stored_properties_data
res._store()
db_res = db_api.resource_get(res.context, res.id)
self.assertEqual('string', db_res.properties_data['prop1'])
self.assertEqual('string', db_res.rsrc_prop_data.data['prop1'])
# The properties data should not be modified when the object
# is loaded using get_obj
prev_rsrc_prop_data_id = db_res.rsrc_prop_data.id
res_obj = resource_objects.Resource.get_obj(res.context, res.id)
self.assertEqual('string', res_obj.properties_data['prop1'])
self.assertEqual('string', res_obj.rsrc_prop_data.data['prop1'])
self.assertEqual(prev_rsrc_prop_data_id, res_obj.rsrc_prop_data.id)
# The properties data should not be modified when the object
# is loaded using get_all_by_stack
res_objs = resource_objects.Resource.get_all_by_stack(res.context,
self.stack.id)
res_obj = res_objs['test_res_enc']
self.assertEqual('string', res_obj.properties_data['prop1'])
self.assertEqual('string', res_obj.rsrc_prop_data.data['prop1'])
self.assertEqual(prev_rsrc_prop_data_id, res_obj.rsrc_prop_data.id)
def _assert_resource_lock(self, res_id, engine_id, atomic_key):
rs = resource_objects.Resource.get_obj(self.stack.context, res_id)
@ -4332,3 +4409,65 @@ class TestResourceMapping(common.HeatTestCase):
# It's soft check and should not be a cause of the merge conflict
# Feel free to update it in some separate patch
self.assertGreaterEqual(num_of_types, 137)
class TestResourcePropDataUpdate(common.HeatTestCase):
scenarios = [
('s1', dict(
old_rpd={1: 2}, new_rpd={3: 4}, replaced=True)),
('s2', dict(
old_rpd={'1': '2'}, new_rpd={'3': '4'}, replaced=True)),
('s3', dict(
old_rpd={'1': '2'}, new_rpd={'1': '4'}, replaced=True)),
('s4', dict(
old_rpd={'1': '2'}, new_rpd={'1': '2'}, replaced=False)),
('s5', dict(
old_rpd={'1': '2', 4: 3}, new_rpd={'1': '2'}, replaced=True)),
('s6', dict(
old_rpd={'1': '2', 4: 3}, new_rpd={'1': '2', 4: 3},
replaced=False)),
('s7', dict(
old_rpd={'1': '2'}, new_rpd={'1': '2', 4: 3}, replaced=True)),
('s8', dict(
old_rpd={'1': '2'}, new_rpd={}, replaced=True)),
('s9', dict(
old_rpd={}, new_rpd={1: 2}, replaced=True)),
('s10', dict(
old_rpd={}, new_rpd={}, replaced=False)),
('s11', dict(
old_rpd=None, new_rpd={}, replaced=False)),
('s11', dict(
old_rpd={}, new_rpd=None, replaced=False)),
('s12', dict(
old_rpd={3: 4}, new_rpd=None, replaced=True)),
]
def setUp(self):
super(TestResourcePropDataUpdate, self).setUp()
self.stack = parser.Stack(utils.dummy_context(), 'test_stack',
template.Template(empty_template))
self.stack.store()
snippet = rsrc_defn.ResourceDefinition('aresource',
'GenericResourceType')
self.res = resource.Resource('aresource', snippet, self.stack)
def test_create_or_replace_rsrc_prop_data(self):
res = self.res
res._stored_properties_data = self.old_rpd
if self.replaced:
res._rsrc_prop_data = None
res._store()
if res._rsrc_prop_data is None:
old_rpd_id = -1
else:
old_rpd_id = res._rsrc_prop_data.id
res._stored_properties_data = self.new_rpd
if self.replaced:
res._rsrc_prop_data = None
res._store()
if res._rsrc_prop_data is None:
new_rpd_id = -1
else:
new_rpd_id = res._rsrc_prop_data.id
self.assertEqual(self.replaced, old_rpd_id != new_rpd_id)