Move base64-decoding/unpickling into DatabaseBroker

...which was the one responsible for pickling and encoding the data in
the first place.

Change-Id: Ia7fad7ddd7cf95c8a0168dd6c899c80ddfdd3521
This commit is contained in:
Tim Burke 2018-10-05 14:58:35 -07:00
parent 6e7ca26169
commit 4ca605c51e
4 changed files with 20 additions and 20 deletions

View File

@ -17,7 +17,6 @@ Pluggable Back-end for Account Server
""" """
from uuid import uuid4 from uuid import uuid4
import six.moves.cPickle as pickle
import sqlite3 import sqlite3
@ -204,12 +203,11 @@ class AccountBroker(DatabaseBroker):
def _commit_puts_load(self, item_list, entry): def _commit_puts_load(self, item_list, entry):
"""See :func:`swift.common.db.DatabaseBroker._commit_puts_load`""" """See :func:`swift.common.db.DatabaseBroker._commit_puts_load`"""
loaded = pickle.loads(entry.decode('base64'))
# check to see if the update includes policy_index or not # check to see if the update includes policy_index or not
(name, put_timestamp, delete_timestamp, object_count, bytes_used, (name, put_timestamp, delete_timestamp, object_count, bytes_used,
deleted) = loaded[:6] deleted) = entry[:6]
if len(loaded) > 6: if len(entry) > 6:
storage_policy_index = loaded[6] storage_policy_index = entry[6]
else: else:
# legacy support during upgrade until first non legacy storage # legacy support during upgrade until first non legacy storage
# policy is defined # policy is defined

View File

@ -730,7 +730,8 @@ class DatabaseBroker(object):
for entry in fp.read().split(b':'): for entry in fp.read().split(b':'):
if entry: if entry:
try: try:
self._commit_puts_load(item_list, entry) data = pickle.loads(base64.b64decode(entry))
self._commit_puts_load(item_list, data)
except Exception: except Exception:
self.logger.exception( self.logger.exception(
_('Invalid pending entry %(file)s: %(entry)s'), _('Invalid pending entry %(file)s: %(entry)s'),
@ -760,7 +761,7 @@ class DatabaseBroker(object):
def _commit_puts_load(self, item_list, entry): def _commit_puts_load(self, item_list, entry):
""" """
Unmarshall the :param:entry and append it to :param:item_list. Unmarshall the :param:entry tuple and append it to :param:item_list.
This is implemented by a particular broker to be compatible This is implemented by a particular broker to be compatible
with its :func:`merge_items`. with its :func:`merge_items`.
""" """

View File

@ -21,7 +21,6 @@ import os
from uuid import uuid4 from uuid import uuid4
import six import six
import six.moves.cPickle as pickle
from six.moves import range from six.moves import range
import sqlite3 import sqlite3
from eventlet import tpool from eventlet import tpool
@ -643,17 +642,16 @@ class ContainerBroker(DatabaseBroker):
def _commit_puts_load(self, item_list, entry): def _commit_puts_load(self, item_list, entry):
"""See :func:`swift.common.db.DatabaseBroker._commit_puts_load`""" """See :func:`swift.common.db.DatabaseBroker._commit_puts_load`"""
data = pickle.loads(entry.decode('base64')) (name, timestamp, size, content_type, etag, deleted) = entry[:6]
(name, timestamp, size, content_type, etag, deleted) = data[:6] if len(entry) > 6:
if len(data) > 6: storage_policy_index = entry[6]
storage_policy_index = data[6]
else: else:
storage_policy_index = 0 storage_policy_index = 0
content_type_timestamp = meta_timestamp = None content_type_timestamp = meta_timestamp = None
if len(data) > 7: if len(entry) > 7:
content_type_timestamp = data[7] content_type_timestamp = entry[7]
if len(data) > 8: if len(entry) > 8:
meta_timestamp = data[8] meta_timestamp = entry[8]
item_list.append({'name': name, item_list.append({'name': name,
'created_at': timestamp, 'created_at': timestamp,
'size': size, 'size': size,

View File

@ -274,7 +274,7 @@ class ExampleBroker(DatabaseBroker):
conn.commit() conn.commit()
def _commit_puts_load(self, item_list, entry): def _commit_puts_load(self, item_list, entry):
(name, timestamp, deleted) = pickle.loads(base64.b64decode(entry)) (name, timestamp, deleted) = entry
item_list.append({ item_list.append({
'name': name, 'name': name,
'created_at': timestamp, 'created_at': timestamp,
@ -1422,16 +1422,19 @@ class TestDatabaseBroker(unittest.TestCase):
# load file and merge # load file and merge
with open(broker.pending_file, 'wb') as fd: with open(broker.pending_file, 'wb') as fd:
fd.write(b':1:2:99') for v in (1, 2, 99):
fd.write(b':' + base64.b64encode(pickle.dumps(
v, protocol=PICKLE_PROTOCOL)))
with patch.object(broker, 'merge_items') as mock_merge_items: with patch.object(broker, 'merge_items') as mock_merge_items:
broker._commit_puts_load = lambda l, e: l.append(e) broker._commit_puts_load = lambda l, e: l.append(e)
broker._commit_puts() broker._commit_puts()
mock_merge_items.assert_called_once_with([b'1', b'2', b'99']) mock_merge_items.assert_called_once_with([1, 2, 99])
self.assertEqual(0, os.path.getsize(broker.pending_file)) self.assertEqual(0, os.path.getsize(broker.pending_file))
# load file and merge with given list # load file and merge with given list
with open(broker.pending_file, 'wb') as fd: with open(broker.pending_file, 'wb') as fd:
fd.write(b':bad') fd.write(b':' + base64.b64encode(pickle.dumps(
b'bad', protocol=PICKLE_PROTOCOL)))
with patch.object(broker, 'merge_items') as mock_merge_items: with patch.object(broker, 'merge_items') as mock_merge_items:
broker._commit_puts_load = lambda l, e: l.append(e) broker._commit_puts_load = lambda l, e: l.append(e)
broker._commit_puts([b'not']) broker._commit_puts([b'not'])