Merge "Update test infrastructure" into feature/ec_review

This commit is contained in:
Jenkins 2015-04-14 07:42:37 +00:00 committed by Gerrit Code Review
commit 8a58bbf75f
9 changed files with 415 additions and 255 deletions

View File

@ -1317,7 +1317,12 @@ class TestFile(Base):
self.assertEqual(file_types, file_types_read)
def testRangedGets(self):
file_length = 10000
# We set the file_length to a strange multiple here. This is to check
# that ranges still work in the EC case when the requested range
# spans EC segment boundaries. The 1 MiB base value is chosen because
# that's a common EC segment size. The 1.33 multiple is to ensure we
# aren't aligned on segment boundaries
file_length = int(1048576 * 1.33)
range_size = file_length / 10
file_item = self.env.container.file(Utils.create_name())
data = file_item.write_random(file_length)

View File

@ -22,24 +22,30 @@ import errno
import sys
from contextlib import contextmanager, closing
from collections import defaultdict, Iterable
import itertools
from numbers import Number
from tempfile import NamedTemporaryFile
import time
import eventlet
from eventlet.green import socket
from tempfile import mkdtemp
from shutil import rmtree
from swift.common.utils import Timestamp
from test import get_config
from swift.common import swob, utils
from swift.common.ring import Ring, RingData
from hashlib import md5
from eventlet import sleep, Timeout
import logging.handlers
from httplib import HTTPException
from swift.common import storage_policy
from swift.common.storage_policy import StoragePolicy, ECStoragePolicy
import functools
import cPickle as pickle
from gzip import GzipFile
import mock as mocklib
import inspect
EMPTY_ETAG = md5().hexdigest()
# try not to import this module from swift
if not os.path.basename(sys.argv[0]).startswith('swift'):
@ -47,26 +53,40 @@ if not os.path.basename(sys.argv[0]).startswith('swift'):
utils.HASH_PATH_SUFFIX = 'endcap'
def patch_policies(thing_or_policies=None, legacy_only=False):
if legacy_only:
default_policies = [storage_policy.StoragePolicy(
0, 'legacy', True, object_ring=FakeRing())]
else:
default_policies = [
storage_policy.StoragePolicy(
0, 'nulo', True, object_ring=FakeRing()),
storage_policy.StoragePolicy(
1, 'unu', object_ring=FakeRing()),
]
thing_or_policies = thing_or_policies or default_policies
def patch_policies(thing_or_policies=None, legacy_only=False,
with_ec_default=False, fake_ring_args=None):
if isinstance(thing_or_policies, (
Iterable, storage_policy.StoragePolicyCollection)):
return PatchPolicies(thing_or_policies)
return PatchPolicies(thing_or_policies, fake_ring_args=fake_ring_args)
if legacy_only:
default_policies = [
StoragePolicy(0, name='legacy', is_default=True),
]
default_ring_args = [{}]
elif with_ec_default:
default_policies = [
ECStoragePolicy(0, name='ec', is_default=True,
ec_type='jerasure_rs_vand', ec_ndata=4,
ec_nparity=2, ec_segment_size=4096),
StoragePolicy(1, name='unu'),
]
default_ring_args = [{'replicas': 6}, {}]
else:
# it's a thing!
return PatchPolicies(default_policies)(thing_or_policies)
default_policies = [
StoragePolicy(0, name='nulo', is_default=True),
StoragePolicy(1, name='unu'),
]
default_ring_args = [{}, {}]
fake_ring_args = fake_ring_args or default_ring_args
decorator = PatchPolicies(default_policies, fake_ring_args=fake_ring_args)
if not thing_or_policies:
return decorator
else:
# it's a thing, we return the wrapped thing instead of the decorator
return decorator(thing_or_policies)
class PatchPolicies(object):
@ -76,11 +96,33 @@ class PatchPolicies(object):
patched yet)
"""
def __init__(self, policies):
def __init__(self, policies, fake_ring_args=None):
if isinstance(policies, storage_policy.StoragePolicyCollection):
self.policies = policies
else:
self.policies = storage_policy.StoragePolicyCollection(policies)
self.fake_ring_args = fake_ring_args or [None] * len(self.policies)
def _setup_rings(self):
"""
Our tests tend to use the policies rings like their own personal
playground - which can be a problem in the particular case of a
patched TestCase class where the FakeRing objects are scoped in the
call to the patch_policies wrapper outside of the TestCase instance
which can lead to some bled state.
To help tests get better isolation without having to think about it,
here we're capturing the args required to *build* a new FakeRing
instances so we can ensure each test method gets a clean ring setup.
The TestCase can always "tweak" these fresh rings in setUp - or if
they'd prefer to get the same "reset" behavior with custom FakeRing's
they can pass in their own fake_ring_args to patch_policies instead of
setting the object_ring on the policy definitions.
"""
for policy, fake_ring_arg in zip(self.policies, self.fake_ring_args):
if fake_ring_arg is not None:
policy.object_ring = FakeRing(**fake_ring_arg)
def __call__(self, thing):
if isinstance(thing, type):
@ -89,24 +131,33 @@ class PatchPolicies(object):
return self._patch_method(thing)
def _patch_class(self, cls):
"""
Creating a new class that inherits from decorated class is the more
common way I've seen class decorators done - but it seems to cause
infinite recursion when super is called from inside methods in the
decorated class.
"""
class NewClass(cls):
already_patched = False
orig_setUp = cls.setUp
orig_tearDown = cls.tearDown
def setUp(cls_self):
self._orig_POLICIES = storage_policy._POLICIES
if not cls_self.already_patched:
if not getattr(cls_self, '_policies_patched', False):
storage_policy._POLICIES = self.policies
cls_self.already_patched = True
super(NewClass, cls_self).setUp()
self._setup_rings()
cls_self._policies_patched = True
orig_setUp(cls_self)
def tearDown(cls_self):
super(NewClass, cls_self).tearDown()
orig_tearDown(cls_self)
storage_policy._POLICIES = self._orig_POLICIES
NewClass.__name__ = cls.__name__
return NewClass
cls.setUp = setUp
cls.tearDown = tearDown
return cls
def _patch_method(self, f):
@functools.wraps(f)
@ -114,6 +165,7 @@ class PatchPolicies(object):
self._orig_POLICIES = storage_policy._POLICIES
try:
storage_policy._POLICIES = self.policies
self._setup_rings()
return f(*args, **kwargs)
finally:
storage_policy._POLICIES = self._orig_POLICIES
@ -178,7 +230,9 @@ class FakeRing(Ring):
for x in xrange(self.replicas, min(self.replicas + self.max_more_nodes,
self.replicas * self.replicas)):
yield {'ip': '10.0.0.%s' % x,
'replication_ip': '10.0.0.%s' % x,
'port': self._base_port + x,
'replication_port': self._base_port + x,
'device': 'sda',
'zone': x % 3,
'region': x % 2,
@ -206,6 +260,48 @@ def write_fake_ring(path, *devs):
pickle.dump(RingData(replica2part2dev_id, devs, part_shift), f)
class FabricatedRing(Ring):
"""
When a FakeRing just won't do - you can fabricate one to meet
your tests needs.
"""
def __init__(self, replicas=6, devices=8, nodes=4, port=6000,
part_power=4):
self.devices = devices
self.nodes = nodes
self.port = port
self.replicas = 6
self.part_power = part_power
self._part_shift = 32 - self.part_power
self._reload()
def _reload(self, *args, **kwargs):
self._rtime = time.time() * 2
if hasattr(self, '_replica2part2dev_id'):
return
self._devs = [{
'region': 1,
'zone': 1,
'weight': 1.0,
'id': i,
'device': 'sda%d' % i,
'ip': '10.0.0.%d' % (i % self.nodes),
'replication_ip': '10.0.0.%d' % (i % self.nodes),
'port': self.port,
'replication_port': self.port,
} for i in range(self.devices)]
self._replica2part2dev_id = [
[None] * 2 ** self.part_power
for i in range(self.replicas)
]
dev_ids = itertools.cycle(range(self.devices))
for p in range(2 ** self.part_power):
for r in range(self.replicas):
self._replica2part2dev_id[r][p] = next(dev_ids)
class FakeMemcache(object):
def __init__(self):
@ -363,8 +459,8 @@ class UnmockTimeModule(object):
logging.time = UnmockTimeModule()
class FakeLogger(logging.Logger):
# a thread safe logger
class FakeLogger(logging.Logger, object):
# a thread safe fake logger
def __init__(self, *args, **kwargs):
self._clear()
@ -376,22 +472,31 @@ class FakeLogger(logging.Logger):
self.thread_locals = None
self.parent = None
store_in = {
logging.ERROR: 'error',
logging.WARNING: 'warning',
logging.INFO: 'info',
logging.DEBUG: 'debug',
logging.CRITICAL: 'critical',
}
def _log(self, level, msg, *args, **kwargs):
store_name = self.store_in[level]
cargs = [msg]
if any(args):
cargs.extend(args)
captured = dict(kwargs)
if 'exc_info' in kwargs and \
not isinstance(kwargs['exc_info'], tuple):
captured['exc_info'] = sys.exc_info()
self.log_dict[store_name].append((tuple(cargs), captured))
super(FakeLogger, self)._log(level, msg, *args, **kwargs)
def _clear(self):
self.log_dict = defaultdict(list)
self.lines_dict = {'critical': [], 'error': [], 'info': [],
'warning': [], 'debug': []}
def _store_in(store_name):
def stub_fn(self, *args, **kwargs):
self.log_dict[store_name].append((args, kwargs))
return stub_fn
def _store_and_log_in(store_name, level):
def stub_fn(self, *args, **kwargs):
self.log_dict[store_name].append((args, kwargs))
self._log(level, args[0], args[1:], **kwargs)
return stub_fn
def get_lines_for_level(self, level):
if level not in self.lines_dict:
raise KeyError(
@ -404,16 +509,10 @@ class FakeLogger(logging.Logger):
return dict((level, msgs) for level, msgs in self.lines_dict.items()
if len(msgs) > 0)
error = _store_and_log_in('error', logging.ERROR)
info = _store_and_log_in('info', logging.INFO)
warning = _store_and_log_in('warning', logging.WARNING)
warn = _store_and_log_in('warning', logging.WARNING)
debug = _store_and_log_in('debug', logging.DEBUG)
def exception(self, *args, **kwargs):
self.log_dict['exception'].append((args, kwargs,
str(sys.exc_info()[1])))
print 'FakeLogger Exception: %s' % self.log_dict
def _store_in(store_name):
def stub_fn(self, *args, **kwargs):
self.log_dict[store_name].append((args, kwargs))
return stub_fn
# mock out the StatsD logging methods:
update_stats = _store_in('update_stats')
@ -605,19 +704,53 @@ def mock(update):
delattr(module, attr)
class SlowBody(object):
"""
This will work with our fake_http_connect, if you hand in these
instead of strings it will make reads take longer by the given
amount. It should be a little bit easier to extend than the
current slow kwarg - which inserts whitespace in the response.
Also it should be easy to detect if you have one of these (or a
subclass) for the body inside of FakeConn if we wanted to do
something smarter than just duck-type the str/buffer api
enough to get by.
"""
def __init__(self, body, slowness):
self.body = body
self.slowness = slowness
def slowdown(self):
eventlet.sleep(self.slowness)
def __getitem__(self, s):
return SlowBody(self.body[s], self.slowness)
def __len__(self):
return len(self.body)
def __radd__(self, other):
self.slowdown()
return other + self.body
def fake_http_connect(*code_iter, **kwargs):
class FakeConn(object):
def __init__(self, status, etag=None, body='', timestamp='1',
headers=None):
headers=None, expect_headers=None, connection_id=None,
give_send=None):
# connect exception
if isinstance(status, (Exception, Timeout)):
if isinstance(status, (Exception, eventlet.Timeout)):
raise status
if isinstance(status, tuple):
self.expect_status, self.status = status
self.expect_status = list(status[:-1])
self.status = status[-1]
self.explicit_expect_list = True
else:
self.expect_status, self.status = (None, status)
self.expect_status, self.status = ([], status)
self.explicit_expect_list = False
if not self.expect_status:
# when a swift backend service returns a status before reading
# from the body (mostly an error response) eventlet.wsgi will
@ -628,9 +761,9 @@ def fake_http_connect(*code_iter, **kwargs):
# our backend services and return certain types of responses
# as expect statuses just like a real backend server would do.
if self.status in (507, 412, 409):
self.expect_status = status
self.expect_status = [status]
else:
self.expect_status = 100
self.expect_status = [100, 100]
self.reason = 'Fake'
self.host = '1.2.3.4'
self.port = '1234'
@ -639,32 +772,41 @@ def fake_http_connect(*code_iter, **kwargs):
self.etag = etag
self.body = body
self.headers = headers or {}
self.expect_headers = expect_headers or {}
self.timestamp = timestamp
self.connection_id = connection_id
self.give_send = give_send
if 'slow' in kwargs and isinstance(kwargs['slow'], list):
try:
self._next_sleep = kwargs['slow'].pop(0)
except IndexError:
self._next_sleep = None
# be nice to trixy bits with node_iter's
eventlet.sleep()
def getresponse(self):
if isinstance(self.status, (Exception, Timeout)):
if self.expect_status and self.explicit_expect_list:
raise Exception('Test did not consume all fake '
'expect status: %r' % (self.expect_status,))
if isinstance(self.status, (Exception, eventlet.Timeout)):
raise self.status
exc = kwargs.get('raise_exc')
if exc:
if isinstance(exc, (Exception, Timeout)):
if isinstance(exc, (Exception, eventlet.Timeout)):
raise exc
raise Exception('test')
if kwargs.get('raise_timeout_exc'):
raise Timeout()
raise eventlet.Timeout()
return self
def getexpect(self):
if isinstance(self.expect_status, (Exception, Timeout)):
expect_status = self.expect_status.pop(0)
if isinstance(self.expect_status, (Exception, eventlet.Timeout)):
raise self.expect_status
headers = {}
if self.expect_status == 409:
headers = dict(self.expect_headers)
if expect_status == 409:
headers['X-Backend-Timestamp'] = self.timestamp
return FakeConn(self.expect_status, headers=headers)
return FakeConn(expect_status, headers=headers)
def getheaders(self):
etag = self.etag
@ -717,18 +859,20 @@ def fake_http_connect(*code_iter, **kwargs):
if am_slow:
if self.sent < 4:
self.sent += 1
sleep(value)
eventlet.sleep(value)
return ' '
rv = self.body[:amt]
self.body = self.body[amt:]
return rv
def send(self, amt=None):
if self.give_send:
self.give_send(self.connection_id, amt)
am_slow, value = self.get_slow()
if am_slow:
if self.received < 4:
self.received += 1
sleep(value)
eventlet.sleep(value)
def getheader(self, name, default=None):
return swob.HeaderKeyDict(self.getheaders()).get(name, default)
@ -738,16 +882,22 @@ def fake_http_connect(*code_iter, **kwargs):
timestamps_iter = iter(kwargs.get('timestamps') or ['1'] * len(code_iter))
etag_iter = iter(kwargs.get('etags') or [None] * len(code_iter))
if isinstance(kwargs.get('headers'), list):
if isinstance(kwargs.get('headers'), (list, tuple)):
headers_iter = iter(kwargs['headers'])
else:
headers_iter = iter([kwargs.get('headers', {})] * len(code_iter))
if isinstance(kwargs.get('expect_headers'), (list, tuple)):
expect_headers_iter = iter(kwargs['expect_headers'])
else:
expect_headers_iter = iter([kwargs.get('expect_headers', {})] *
len(code_iter))
x = kwargs.get('missing_container', [False] * len(code_iter))
if not isinstance(x, (tuple, list)):
x = [x] * len(code_iter)
container_ts_iter = iter(x)
code_iter = iter(code_iter)
conn_id_and_code_iter = enumerate(code_iter)
static_body = kwargs.get('body', None)
body_iter = kwargs.get('body_iter', None)
if body_iter:
@ -755,17 +905,22 @@ def fake_http_connect(*code_iter, **kwargs):
def connect(*args, **ckwargs):
if kwargs.get('slow_connect', False):
sleep(0.1)
eventlet.sleep(0.1)
if 'give_content_type' in kwargs:
if len(args) >= 7 and 'Content-Type' in args[6]:
kwargs['give_content_type'](args[6]['Content-Type'])
else:
kwargs['give_content_type']('')
i, status = conn_id_and_code_iter.next()
if 'give_connect' in kwargs:
kwargs['give_connect'](*args, **ckwargs)
status = code_iter.next()
give_conn_fn = kwargs['give_connect']
argspec = inspect.getargspec(give_conn_fn)
if argspec.keywords or 'connection_id' in argspec.args:
ckwargs['connection_id'] = i
give_conn_fn(*args, **ckwargs)
etag = etag_iter.next()
headers = headers_iter.next()
expect_headers = expect_headers_iter.next()
timestamp = timestamps_iter.next()
if status <= 0:
@ -775,7 +930,8 @@ def fake_http_connect(*code_iter, **kwargs):
else:
body = body_iter.next()
return FakeConn(status, etag, body=body, timestamp=timestamp,
headers=headers)
headers=headers, expect_headers=expect_headers,
connection_id=i, give_send=kwargs.get('give_send'))
connect.code_iter = code_iter
@ -806,3 +962,7 @@ def mocked_http_conn(*args, **kwargs):
left_over_status = list(fake_conn.code_iter)
if left_over_status:
raise AssertionError('left over status %r' % left_over_status)
def make_timestamp_iter():
return iter(Timestamp(t) for t in itertools.count(int(time.time())))

View File

@ -564,9 +564,10 @@ class TestDloGetManifest(DloTestCase):
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_dlo(req)
self.assertEqual(status, "409 Conflict")
err_log = self.dlo.logger.log_dict['exception'][0][0][0]
self.assertTrue(err_log.startswith('ERROR: An error occurred '
'while retrieving segments'))
err_lines = self.dlo.logger.get_lines_for_level('error')
self.assertEqual(len(err_lines), 1)
self.assertTrue(err_lines[0].startswith(
'ERROR: An error occurred while retrieving segments'))
def test_error_fetching_second_segment(self):
self.app.register(
@ -581,9 +582,10 @@ class TestDloGetManifest(DloTestCase):
self.assertTrue(isinstance(exc, exceptions.SegmentError))
self.assertEqual(status, "200 OK")
self.assertEqual(''.join(body), "aaaaa") # first segment made it out
err_log = self.dlo.logger.log_dict['exception'][0][0][0]
self.assertTrue(err_log.startswith('ERROR: An error occurred '
'while retrieving segments'))
err_lines = self.dlo.logger.get_lines_for_level('error')
self.assertEqual(len(err_lines), 1)
self.assertTrue(err_lines[0].startswith(
'ERROR: An error occurred while retrieving segments'))
def test_error_listing_container_first_listing_request(self):
self.app.register(

View File

@ -1431,9 +1431,10 @@ class TestSloGetManifest(SloTestCase):
self.assertEqual(status, '409 Conflict')
self.assertEqual(self.app.call_count, 10)
err_log = self.slo.logger.log_dict['exception'][0][0][0]
self.assertTrue(err_log.startswith('ERROR: An error occurred '
'while retrieving segments'))
error_lines = self.slo.logger.get_lines_for_level('error')
self.assertEqual(len(error_lines), 1)
self.assertTrue(error_lines[0].startswith(
'ERROR: An error occurred while retrieving segments'))
def test_get_with_if_modified_since(self):
# It's important not to pass the If-[Un]Modified-Since header to the
@ -1508,9 +1509,10 @@ class TestSloGetManifest(SloTestCase):
status, headers, body = self.call_slo(req)
self.assertEqual('409 Conflict', status)
err_log = self.slo.logger.log_dict['exception'][0][0][0]
self.assertTrue(err_log.startswith('ERROR: An error occurred '
'while retrieving segments'))
error_lines = self.slo.logger.get_lines_for_level('error')
self.assertEqual(len(error_lines), 1)
self.assertTrue(error_lines[0].startswith(
'ERROR: An error occurred while retrieving segments'))
def test_invalid_json_submanifest(self):
self.app.register(
@ -1585,9 +1587,10 @@ class TestSloGetManifest(SloTestCase):
status, headers, body = self.call_slo(req)
self.assertEqual('409 Conflict', status)
err_log = self.slo.logger.log_dict['exception'][0][0][0]
self.assertTrue(err_log.startswith('ERROR: An error occurred '
'while retrieving segments'))
error_lines = self.slo.logger.get_lines_for_level('error')
self.assertEqual(len(error_lines), 1)
self.assertTrue(error_lines[0].startswith(
'ERROR: An error occurred while retrieving segments'))
def test_first_segment_mismatched_size(self):
self.app.register('GET', '/v1/AUTH_test/gettest/manifest-badsize',
@ -1603,9 +1606,10 @@ class TestSloGetManifest(SloTestCase):
status, headers, body = self.call_slo(req)
self.assertEqual('409 Conflict', status)
err_log = self.slo.logger.log_dict['exception'][0][0][0]
self.assertTrue(err_log.startswith('ERROR: An error occurred '
'while retrieving segments'))
error_lines = self.slo.logger.get_lines_for_level('error')
self.assertEqual(len(error_lines), 1)
self.assertTrue(error_lines[0].startswith(
'ERROR: An error occurred while retrieving segments'))
def test_download_takes_too_long(self):
the_time = [time.time()]
@ -1657,9 +1661,10 @@ class TestSloGetManifest(SloTestCase):
status, headers, body = self.call_slo(req)
self.assertEqual('409 Conflict', status)
err_log = self.slo.logger.log_dict['exception'][0][0][0]
self.assertTrue(err_log.startswith('ERROR: An error occurred '
'while retrieving segments'))
error_lines = self.slo.logger.get_lines_for_level('error')
self.assertEqual(len(error_lines), 1)
self.assertTrue(error_lines[0].startswith(
'ERROR: An error occurred while retrieving segments'))
class TestSloBulkLogger(unittest.TestCase):

View File

@ -939,9 +939,9 @@ class TestContainerSync(unittest.TestCase):
{'account': 'a', 'container': 'c', 'storage_policy_index': 0},
realm, realm_key))
self.assertEquals(cs.container_puts, 2)
self.assertTrue(
cs.logger.log_dict['exception'][0][0][0].startswith(
'ERROR Syncing '))
error_lines = cs.logger.get_lines_for_level('error')
self.assertEqual(len(error_lines), 1)
self.assertTrue(error_lines[0].startswith('ERROR Syncing '))
finally:
sync.uuid = orig_uuid
sync.shuffle = orig_shuffle

View File

@ -2484,8 +2484,9 @@ class TestDiskFile(unittest.TestCase):
self.fail("Expected exception DiskFileNoSpace")
self.assertTrue(_m_fallocate.called)
self.assertTrue(_m_unlink.called)
self.assert_(self.df_mgr.logger.log_dict['exception'][0][0][0].
startswith("Error removing tempfile:"))
error_lines = self.df_mgr.logger.get_lines_for_level('error')
for line in error_lines:
self.assertTrue(line.startswith("Error removing tempfile:"))
if __name__ == '__main__':
unittest.main()

View File

@ -16,7 +16,7 @@
import urllib
from time import time
from unittest import main, TestCase
from test.unit import FakeLogger, FakeRing, mocked_http_conn
from test.unit import FakeRing, mocked_http_conn, debug_logger
from copy import deepcopy
from tempfile import mkdtemp
from shutil import rmtree
@ -53,7 +53,8 @@ class TestObjectExpirer(TestCase):
internal_client.sleep = not_sleep
self.rcache = mkdtemp()
self.logger = FakeLogger()
self.conf = {'recon_cache_path': self.rcache}
self.logger = debug_logger('test-recon')
def tearDown(self):
rmtree(self.rcache)
@ -167,7 +168,7 @@ class TestObjectExpirer(TestCase):
'2': set('5-five 6-six'.split()),
'3': set(u'7-seven\u2661'.split()),
}
x = ObjectExpirer({})
x = ObjectExpirer(self.conf)
x.swift = InternalClient(containers)
deleted_objects = {}
@ -233,31 +234,32 @@ class TestObjectExpirer(TestCase):
x = expirer.ObjectExpirer({}, logger=self.logger)
x.report()
self.assertEqual(x.logger.log_dict['info'], [])
self.assertEqual(x.logger.get_lines_for_level('info'), [])
x.logger._clear()
x.report(final=True)
self.assertTrue('completed' in x.logger.log_dict['info'][-1][0][0],
x.logger.log_dict['info'])
self.assertTrue('so far' not in x.logger.log_dict['info'][-1][0][0],
x.logger.log_dict['info'])
self.assertTrue(
'completed' in str(x.logger.get_lines_for_level('info')))
self.assertTrue(
'so far' not in str(x.logger.get_lines_for_level('info')))
x.logger._clear()
x.report_last_time = time() - x.report_interval
x.report()
self.assertTrue('completed' not in x.logger.log_dict['info'][-1][0][0],
x.logger.log_dict['info'])
self.assertTrue('so far' in x.logger.log_dict['info'][-1][0][0],
x.logger.log_dict['info'])
self.assertTrue(
'completed' not in str(x.logger.get_lines_for_level('info')))
self.assertTrue(
'so far' in str(x.logger.get_lines_for_level('info')))
def test_run_once_nothing_to_do(self):
x = expirer.ObjectExpirer({}, logger=self.logger)
x = expirer.ObjectExpirer(self.conf, logger=self.logger)
x.swift = 'throw error because a string does not have needed methods'
x.run_once()
self.assertEqual(x.logger.log_dict['exception'],
[(("Unhandled exception",), {},
"'str' object has no attribute "
"'get_account_info'")])
self.assertEqual(x.logger.get_lines_for_level('error'),
["Unhandled exception: "])
log_args, log_kwargs = x.logger.log_dict['error'][0]
self.assertEqual(str(log_kwargs['exc_info'][1]),
"'str' object has no attribute 'get_account_info'")
def test_run_once_calls_report(self):
class InternalClient(object):
@ -267,14 +269,14 @@ class TestObjectExpirer(TestCase):
def iter_containers(*a, **kw):
return []
x = expirer.ObjectExpirer({}, logger=self.logger)
x = expirer.ObjectExpirer(self.conf, logger=self.logger)
x.swift = InternalClient()
x.run_once()
self.assertEqual(
x.logger.log_dict['info'],
[(('Pass beginning; 1 possible containers; '
'2 possible objects',), {}),
(('Pass completed in 0s; 0 objects expired',), {})])
x.logger.get_lines_for_level('info'), [
'Pass beginning; 1 possible containers; 2 possible objects',
'Pass completed in 0s; 0 objects expired',
])
def test_run_once_unicode_problem(self):
class InternalClient(object):
@ -296,7 +298,7 @@ class TestObjectExpirer(TestCase):
def delete_container(*a, **kw):
pass
x = expirer.ObjectExpirer({}, logger=self.logger)
x = expirer.ObjectExpirer(self.conf, logger=self.logger)
x.swift = InternalClient()
requests = []
@ -323,27 +325,28 @@ class TestObjectExpirer(TestCase):
def iter_objects(*a, **kw):
raise Exception('This should not have been called')
x = expirer.ObjectExpirer({'recon_cache_path': self.rcache},
x = expirer.ObjectExpirer(self.conf,
logger=self.logger)
x.swift = InternalClient([{'name': str(int(time() + 86400))}])
x.run_once()
for exccall in x.logger.log_dict['exception']:
self.assertTrue(
'This should not have been called' not in exccall[0][0])
self.assertEqual(
x.logger.log_dict['info'],
[(('Pass beginning; 1 possible containers; '
'2 possible objects',), {}),
(('Pass completed in 0s; 0 objects expired',), {})])
logs = x.logger.all_log_lines()
self.assertEqual(logs['info'], [
'Pass beginning; 1 possible containers; 2 possible objects',
'Pass completed in 0s; 0 objects expired',
])
self.assertTrue('error' not in logs)
# Reverse test to be sure it still would blow up the way expected.
fake_swift = InternalClient([{'name': str(int(time() - 86400))}])
x = expirer.ObjectExpirer({}, logger=self.logger, swift=fake_swift)
x = expirer.ObjectExpirer(self.conf, logger=self.logger,
swift=fake_swift)
x.run_once()
self.assertEqual(
x.logger.log_dict['exception'],
[(('Unhandled exception',), {},
str(Exception('This should not have been called')))])
x.logger.get_lines_for_level('error'), [
'Unhandled exception: '])
log_args, log_kwargs = x.logger.log_dict['error'][-1]
self.assertEqual(str(log_kwargs['exc_info'][1]),
'This should not have been called')
def test_object_timestamp_break(self):
class InternalClient(object):
@ -369,33 +372,27 @@ class TestObjectExpirer(TestCase):
fake_swift = InternalClient(
[{'name': str(int(time() - 86400))}],
[{'name': '%d-actual-obj' % int(time() + 86400)}])
x = expirer.ObjectExpirer({}, logger=self.logger, swift=fake_swift)
x = expirer.ObjectExpirer(self.conf, logger=self.logger,
swift=fake_swift)
x.run_once()
for exccall in x.logger.log_dict['exception']:
self.assertTrue(
'This should not have been called' not in exccall[0][0])
self.assertEqual(
x.logger.log_dict['info'],
[(('Pass beginning; 1 possible containers; '
'2 possible objects',), {}),
(('Pass completed in 0s; 0 objects expired',), {})])
self.assertTrue('error' not in x.logger.all_log_lines())
self.assertEqual(x.logger.get_lines_for_level('info'), [
'Pass beginning; 1 possible containers; 2 possible objects',
'Pass completed in 0s; 0 objects expired',
])
# Reverse test to be sure it still would blow up the way expected.
ts = int(time() - 86400)
fake_swift = InternalClient(
[{'name': str(int(time() - 86400))}],
[{'name': '%d-actual-obj' % ts}])
x = expirer.ObjectExpirer({}, logger=self.logger, swift=fake_swift)
x = expirer.ObjectExpirer(self.conf, logger=self.logger,
swift=fake_swift)
x.delete_actual_object = should_not_be_called
x.run_once()
excswhiledeleting = []
for exccall in x.logger.log_dict['exception']:
if exccall[0][0].startswith('Exception while deleting '):
excswhiledeleting.append(exccall[0][0])
self.assertEqual(
excswhiledeleting,
x.logger.get_lines_for_level('error'),
['Exception while deleting object %d %d-actual-obj '
'This should not have been called' % (ts, ts)])
'This should not have been called: ' % (ts, ts)])
def test_failed_delete_keeps_entry(self):
class InternalClient(object):
@ -428,24 +425,22 @@ class TestObjectExpirer(TestCase):
fake_swift = InternalClient(
[{'name': str(int(time() - 86400))}],
[{'name': '%d-actual-obj' % ts}])
x = expirer.ObjectExpirer({}, logger=self.logger, swift=fake_swift)
x = expirer.ObjectExpirer(self.conf, logger=self.logger,
swift=fake_swift)
x.iter_containers = lambda: [str(int(time() - 86400))]
x.delete_actual_object = deliberately_blow_up
x.pop_queue = should_not_get_called
x.run_once()
excswhiledeleting = []
for exccall in x.logger.log_dict['exception']:
if exccall[0][0].startswith('Exception while deleting '):
excswhiledeleting.append(exccall[0][0])
error_lines = x.logger.get_lines_for_level('error')
self.assertEqual(
excswhiledeleting,
error_lines,
['Exception while deleting object %d %d-actual-obj '
'failed to delete actual object' % (ts, ts)])
'failed to delete actual object: ' % (ts, ts)])
self.assertEqual(
x.logger.log_dict['info'],
[(('Pass beginning; 1 possible containers; '
'2 possible objects',), {}),
(('Pass completed in 0s; 0 objects expired',), {})])
x.logger.get_lines_for_level('info'), [
'Pass beginning; 1 possible containers; 2 possible objects',
'Pass completed in 0s; 0 objects expired',
])
# Reverse test to be sure it still would blow up the way expected.
ts = int(time() - 86400)
@ -453,18 +448,15 @@ class TestObjectExpirer(TestCase):
[{'name': str(int(time() - 86400))}],
[{'name': '%d-actual-obj' % ts}])
self.logger._clear()
x = expirer.ObjectExpirer({}, logger=self.logger, swift=fake_swift)
x = expirer.ObjectExpirer(self.conf, logger=self.logger,
swift=fake_swift)
x.delete_actual_object = lambda o, t: None
x.pop_queue = should_not_get_called
x.run_once()
excswhiledeleting = []
for exccall in x.logger.log_dict['exception']:
if exccall[0][0].startswith('Exception while deleting '):
excswhiledeleting.append(exccall[0][0])
self.assertEqual(
excswhiledeleting,
self.logger.get_lines_for_level('error'),
['Exception while deleting object %d %d-actual-obj This should '
'not have been called' % (ts, ts)])
'not have been called: ' % (ts, ts)])
def test_success_gets_counted(self):
class InternalClient(object):
@ -493,7 +485,8 @@ class TestObjectExpirer(TestCase):
fake_swift = InternalClient(
[{'name': str(int(time() - 86400))}],
[{'name': '%d-acc/c/actual-obj' % int(time() - 86400)}])
x = expirer.ObjectExpirer({}, logger=self.logger, swift=fake_swift)
x = expirer.ObjectExpirer(self.conf, logger=self.logger,
swift=fake_swift)
x.delete_actual_object = lambda o, t: None
x.pop_queue = lambda c, o: None
self.assertEqual(x.report_objects, 0)
@ -501,10 +494,9 @@ class TestObjectExpirer(TestCase):
x.run_once()
self.assertEqual(x.report_objects, 1)
self.assertEqual(
x.logger.log_dict['info'],
[(('Pass beginning; 1 possible containers; '
'2 possible objects',), {}),
(('Pass completed in 0s; 1 objects expired',), {})])
x.logger.get_lines_for_level('info'),
['Pass beginning; 1 possible containers; 2 possible objects',
'Pass completed in 0s; 1 objects expired'])
def test_delete_actual_object_does_not_get_unicode(self):
class InternalClient(object):
@ -539,17 +531,18 @@ class TestObjectExpirer(TestCase):
fake_swift = InternalClient(
[{'name': str(int(time() - 86400))}],
[{'name': u'%d-actual-obj' % int(time() - 86400)}])
x = expirer.ObjectExpirer({}, logger=self.logger, swift=fake_swift)
x = expirer.ObjectExpirer(self.conf, logger=self.logger,
swift=fake_swift)
x.delete_actual_object = delete_actual_object_test_for_unicode
x.pop_queue = lambda c, o: None
self.assertEqual(x.report_objects, 0)
x.run_once()
self.assertEqual(x.report_objects, 1)
self.assertEqual(
x.logger.log_dict['info'],
[(('Pass beginning; 1 possible containers; '
'2 possible objects',), {}),
(('Pass completed in 0s; 1 objects expired',), {})])
x.logger.get_lines_for_level('info'), [
'Pass beginning; 1 possible containers; 2 possible objects',
'Pass completed in 0s; 1 objects expired',
])
self.assertFalse(got_unicode[0])
def test_failed_delete_continues_on(self):
@ -579,7 +572,7 @@ class TestObjectExpirer(TestCase):
def fail_delete_actual_object(actual_obj, timestamp):
raise Exception('failed to delete actual object')
x = expirer.ObjectExpirer({}, logger=self.logger)
x = expirer.ObjectExpirer(self.conf, logger=self.logger)
cts = int(time() - 86400)
ots = int(time() - 86400)
@ -597,28 +590,24 @@ class TestObjectExpirer(TestCase):
x.swift = InternalClient(containers, objects)
x.delete_actual_object = fail_delete_actual_object
x.run_once()
excswhiledeleting = []
for exccall in x.logger.log_dict['exception']:
if exccall[0][0].startswith('Exception while deleting '):
excswhiledeleting.append(exccall[0][0])
self.assertEqual(sorted(excswhiledeleting), sorted([
error_lines = x.logger.get_lines_for_level('error')
self.assertEqual(sorted(error_lines), sorted([
'Exception while deleting object %d %d-actual-obj failed to '
'delete actual object' % (cts, ots),
'delete actual object: ' % (cts, ots),
'Exception while deleting object %d %d-next-obj failed to '
'delete actual object' % (cts, ots),
'delete actual object: ' % (cts, ots),
'Exception while deleting object %d %d-actual-obj failed to '
'delete actual object' % (cts + 1, ots),
'delete actual object: ' % (cts + 1, ots),
'Exception while deleting object %d %d-next-obj failed to '
'delete actual object' % (cts + 1, ots),
'delete actual object: ' % (cts + 1, ots),
'Exception while deleting container %d failed to delete '
'container' % (cts,),
'container: ' % (cts,),
'Exception while deleting container %d failed to delete '
'container' % (cts + 1,)]))
self.assertEqual(
x.logger.log_dict['info'],
[(('Pass beginning; 1 possible containers; '
'2 possible objects',), {}),
(('Pass completed in 0s; 0 objects expired',), {})])
'container: ' % (cts + 1,)]))
self.assertEqual(x.logger.get_lines_for_level('info'), [
'Pass beginning; 1 possible containers; 2 possible objects',
'Pass completed in 0s; 0 objects expired',
])
def test_run_forever_initial_sleep_random(self):
global last_not_sleep
@ -664,9 +653,11 @@ class TestObjectExpirer(TestCase):
finally:
expirer.sleep = orig_sleep
self.assertEqual(str(err), 'exiting exception 2')
self.assertEqual(x.logger.log_dict['exception'],
[(('Unhandled exception',), {},
'exception 1')])
self.assertEqual(x.logger.get_lines_for_level('error'),
['Unhandled exception: '])
log_args, log_kwargs = x.logger.log_dict['error'][0]
self.assertEqual(str(log_kwargs['exc_info'][1]),
'exception 1')
def test_delete_actual_object(self):
got_env = [None]

View File

@ -27,7 +27,7 @@ from errno import ENOENT, ENOTEMPTY, ENOTDIR
from eventlet.green import subprocess
from eventlet import Timeout, tpool
from test.unit import FakeLogger, debug_logger, patch_policies
from test.unit import debug_logger, patch_policies
from swift.common import utils
from swift.common.utils import hash_path, mkdirs, normalize_timestamp, \
storage_directory
@ -190,7 +190,7 @@ class TestObjectReplicator(unittest.TestCase):
swift_dir=self.testdir, devices=self.devices, mount_check='false',
timeout='300', stats_interval='1', sync_method='rsync')
self.replicator = object_replicator.ObjectReplicator(self.conf)
self.replicator.logger = FakeLogger()
self.logger = self.replicator.logger = debug_logger('test-replicator')
self.df_mgr = diskfile.DiskFileManager(self.conf,
self.replicator.logger)
@ -280,22 +280,20 @@ class TestObjectReplicator(unittest.TestCase):
def test_collect_jobs_mkdirs_error(self):
non_local = {}
def blowup_mkdirs(path):
non_local['path'] = path
raise OSError('Ow!')
with mock.patch.object(object_replicator, 'mkdirs', blowup_mkdirs):
rmtree(self.objects, ignore_errors=1)
object_replicator.mkdirs = blowup_mkdirs
self.replicator.collect_jobs()
self.assertTrue('exception' in self.replicator.logger.log_dict)
self.assertEquals(
len(self.replicator.logger.log_dict['exception']), 1)
exc_args, exc_kwargs, exc_str = \
self.replicator.logger.log_dict['exception'][0]
self.assertEquals(len(exc_args), 1)
self.assertTrue(exc_args[0].startswith('ERROR creating '))
self.assertEquals(exc_kwargs, {})
self.assertEquals(exc_str, 'Ow!')
self.assertEqual(self.logger.get_lines_for_level('error'), [
'ERROR creating %s: ' % non_local['path']])
log_args, log_kwargs = self.logger.log_dict['error'][0]
self.assertEqual(str(log_kwargs['exc_info'][1]), 'Ow!')
def test_collect_jobs(self):
jobs = self.replicator.collect_jobs()
@ -383,14 +381,13 @@ class TestObjectReplicator(unittest.TestCase):
self.assertFalse(os.path.exists(pol_0_part_1_path))
self.assertFalse(os.path.exists(pol_1_part_1_path))
logged_warnings = sorted(self.replicator.logger.log_dict['warning'])
self.assertEquals(
(('Removing partition directory which was a file: %s',
pol_1_part_1_path), {}), logged_warnings[0])
self.assertEquals(
(('Removing partition directory which was a file: %s',
pol_0_part_1_path), {}), logged_warnings[1])
self.assertEqual(
sorted(self.logger.get_lines_for_level('warning')), [
('Removing partition directory which was a file: %s'
% pol_1_part_1_path),
('Removing partition directory which was a file: %s'
% pol_0_part_1_path),
])
def test_delete_partition(self):
with mock.patch('swift.obj.replicator.http_connect',
@ -833,7 +830,8 @@ class TestObjectReplicator(unittest.TestCase):
def test_delete_partition_ssync_with_cleanup_failure(self):
with mock.patch('swift.obj.replicator.http_connect',
mock_http_connect(200)):
self.replicator.logger = mock_logger = mock.MagicMock()
self.replicator.logger = mock_logger = \
debug_logger('test-replicator')
df = self.df_mgr.get_diskfile('sda', '1', 'a', 'c', 'o')
mkdirs(df._datadir)
f = open(os.path.join(df._datadir,
@ -886,7 +884,7 @@ class TestObjectReplicator(unittest.TestCase):
with mock.patch('os.rmdir',
raise_exception_rmdir(OSError, ENOENT)):
self.replicator.replicate()
self.assertEquals(mock_logger.exception.call_count, 0)
self.assertFalse(mock_logger.get_lines_for_level('error'))
self.assertFalse(os.access(whole_path_from, os.F_OK))
self.assertTrue(os.access(suffix_dir_path, os.F_OK))
self.assertTrue(os.access(part_path, os.F_OK))
@ -895,7 +893,7 @@ class TestObjectReplicator(unittest.TestCase):
with mock.patch('os.rmdir',
raise_exception_rmdir(OSError, ENOTEMPTY)):
self.replicator.replicate()
self.assertEquals(mock_logger.exception.call_count, 0)
self.assertFalse(mock_logger.get_lines_for_level('error'))
self.assertFalse(os.access(whole_path_from, os.F_OK))
self.assertTrue(os.access(suffix_dir_path, os.F_OK))
self.assertTrue(os.access(part_path, os.F_OK))
@ -904,7 +902,7 @@ class TestObjectReplicator(unittest.TestCase):
with mock.patch('os.rmdir',
raise_exception_rmdir(OSError, ENOTDIR)):
self.replicator.replicate()
self.assertEquals(mock_logger.exception.call_count, 1)
self.assertEqual(len(mock_logger.get_lines_for_level('error')), 1)
self.assertFalse(os.access(whole_path_from, os.F_OK))
self.assertTrue(os.access(suffix_dir_path, os.F_OK))
self.assertTrue(os.access(part_path, os.F_OK))

View File

@ -77,6 +77,8 @@ class TestObjectController(unittest.TestCase):
self.df_mgr = diskfile.DiskFileManager(conf,
self.object_controller.logger)
self.logger = debug_logger('test-object-controller')
def tearDown(self):
"""Tear down for testing swift.object.server.ObjectController"""
rmtree(self.tmpdir)
@ -3305,7 +3307,7 @@ class TestObjectController(unittest.TestCase):
given_args.extend(args)
self.object_controller.async_update = fake_async_update
self.object_controller.logger = FakeLogger()
self.object_controller.logger = self.logger
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
@ -3317,10 +3319,10 @@ class TestObjectController(unittest.TestCase):
self.object_controller.delete_at_update('PUT', 2, 'a', 'c', 'o',
req, 'sda1', 0)
self.assertEquals(
self.object_controller.logger.log_dict['warning'],
[(('X-Delete-At-Container header must be specified for expiring '
self.logger.get_lines_for_level('warning'),
['X-Delete-At-Container header must be specified for expiring '
'objects background PUT to work properly. Making best guess as '
'to the container name for now.',), {})])
'to the container name for now.'])
def test_delete_at_update_delete(self):
given_args = []
@ -4167,7 +4169,7 @@ class TestObjectController(unittest.TestCase):
outbuf = StringIO()
self.object_controller = object_server.ObjectController(
{'devices': self.testdir, 'mount_check': 'false',
'replication_server': 'false'}, logger=FakeLogger())
'replication_server': 'false'}, logger=self.logger)
def start_response(*args):
# Sends args to outbuf
@ -4207,11 +4209,10 @@ class TestObjectController(unittest.TestCase):
env, start_response)
self.assertEqual(response, answer)
self.assertEqual(
self.object_controller.logger.log_dict['info'],
[(('None - - [01/Jan/1970:02:46:41 +0000] "PUT'
self.logger.get_lines_for_level('info'),
['None - - [01/Jan/1970:02:46:41 +0000] "PUT'
' /sda1/p/a/c/o" 405 - "-" "-" "-" 1.0000 "-"'
' 1234 -',),
{})])
' 1234 -'])
def test_call_incorrect_replication_method(self):
inbuf = StringIO()
@ -4281,17 +4282,17 @@ class TestObjectController(unittest.TestCase):
new=mock_method):
response = self.object_controller.__call__(env, start_response)
self.assertEqual(response, answer)
self.assertEqual(self.object_controller.logger.log_dict['info'],
[])
self.assertEqual(self.logger.get_lines_for_level('info'), [])
def test__call__returns_500(self):
inbuf = StringIO()
errbuf = StringIO()
outbuf = StringIO()
self.logger = debug_logger('test')
self.object_controller = object_server.ObjectController(
{'devices': self.testdir, 'mount_check': 'false',
'replication_server': 'false', 'log_requests': 'false'},
logger=FakeLogger())
logger=self.logger)
def start_response(*args):
# Sends args to outbuf
@ -4323,14 +4324,11 @@ class TestObjectController(unittest.TestCase):
response = self.object_controller.__call__(env, start_response)
self.assertTrue(response[0].startswith(
'Traceback (most recent call last):'))
self.assertEqual(
self.object_controller.logger.log_dict['exception'],
[(('ERROR __call__ error with %(method)s %(path)s ',
{'method': 'PUT', 'path': '/sda1/p/a/c/o'}),
{},
'')])
self.assertEqual(self.object_controller.logger.log_dict['INFO'],
[])
self.assertEqual(self.logger.get_lines_for_level('error'), [
'ERROR __call__ error with %(method)s %(path)s : ' % {
'method': 'PUT', 'path': '/sda1/p/a/c/o'},
])
self.assertEqual(self.logger.get_lines_for_level('info'), [])
def test_PUT_slow(self):
inbuf = StringIO()
@ -4340,7 +4338,7 @@ class TestObjectController(unittest.TestCase):
{'devices': self.testdir, 'mount_check': 'false',
'replication_server': 'false', 'log_requests': 'false',
'slow': '10'},
logger=FakeLogger())
logger=self.logger)
def start_response(*args):
# Sends args to outbuf
@ -4373,14 +4371,14 @@ class TestObjectController(unittest.TestCase):
mock.MagicMock()) as ms:
self.object_controller.__call__(env, start_response)
ms.assert_called_with(9)
self.assertEqual(
self.object_controller.logger.log_dict['info'], [])
self.assertEqual(self.logger.get_lines_for_level('info'),
[])
def test_log_line_format(self):
req = Request.blank(
'/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD', 'REMOTE_ADDR': '1.2.3.4'})
self.object_controller.logger = FakeLogger()
self.object_controller.logger = self.logger
with mock.patch(
'time.gmtime', mock.MagicMock(side_effect=[gmtime(10001.0)])):
with mock.patch(
@ -4390,9 +4388,9 @@ class TestObjectController(unittest.TestCase):
'os.getpid', mock.MagicMock(return_value=1234)):
req.get_response(self.object_controller)
self.assertEqual(
self.object_controller.logger.log_dict['info'],
[(('1.2.3.4 - - [01/Jan/1970:02:46:41 +0000] "HEAD /sda1/p/a/c/o" '
'404 - "-" "-" "-" 2.0000 "-" 1234 -',), {})])
self.logger.get_lines_for_level('info'),
['1.2.3.4 - - [01/Jan/1970:02:46:41 +0000] "HEAD /sda1/p/a/c/o" '
'404 - "-" "-" "-" 2.0000 "-" 1234 -'])
@patch_policies([storage_policy.StoragePolicy(0, 'zero', True),
storage_policy.StoragePolicy(1, 'one', False)])