Pep8 unit test modules w/ <= 20 violations (6 of 12)

Change-Id: I7317beb97e1530cb18c62da55ccf4c64206ff362
Signed-off-by: Peter Portante <peter.portante@redhat.com>
This commit is contained in:
Peter Portante 2013-08-31 23:42:43 -04:00
parent be1cff4f1f
commit 56593a1323
7 changed files with 231 additions and 134 deletions

View File

@ -36,7 +36,7 @@ class FakeApp(object):
return resp(env, start_response)
status, headers, body = self.status_headers_body_iter.next()
return Response(status=status, headers=headers,
body=body)(env, start_response)
body=body)(env, start_response)
class SwiftAuth(unittest.TestCase):
@ -174,7 +174,8 @@ class TestAuthorize(unittest.TestCase):
def _get_account(self, identity=None):
if not identity:
identity = self._get_identity()
return self.test_auth._get_account_for_tenant(identity['HTTP_X_TENANT_ID'])
return self.test_auth._get_account_for_tenant(
identity['HTTP_X_TENANT_ID'])
def _get_identity(self, tenant_id='tenant_id', tenant_name='tenant_name',
user_id='user_id', user_name='user_name', roles=[]):
@ -244,7 +245,8 @@ class TestAuthorize(unittest.TestCase):
self.assertTrue(req.environ.get('swift_owner'))
def _check_authorize_for_tenant_owner_match(self, exception=None):
identity = self._get_identity(user_name='same_name', tenant_name='same_name')
identity = self._get_identity(user_name='same_name',
tenant_name='same_name')
req = self._check_authenticate(identity=identity, exception=exception)
expected = bool(exception is None)
self.assertEqual(bool(req.environ.get('swift_owner')), expected)
@ -314,30 +316,55 @@ class TestAuthorize(unittest.TestCase):
self._check_authenticate(identity=identity, acl=acl)
def test_cross_tenant_authorization_success(self):
self.assertEqual(self.test_auth._authorize_cross_tenant('userID',
'userA', 'tenantID', 'tenantNAME', ['tenantID:userA']), 'tenantID:userA')
self.assertEqual(self.test_auth._authorize_cross_tenant('userID',
'userA', 'tenantID', 'tenantNAME', ['tenantNAME:userA']), 'tenantNAME:userA')
self.assertEqual(self.test_auth._authorize_cross_tenant('userID',
'userA', 'tenantID', 'tenantNAME', ['*:userA']), '*:userA')
self.assertEqual(
self.test_auth._authorize_cross_tenant(
'userID', 'userA', 'tenantID', 'tenantNAME',
['tenantID:userA']),
'tenantID:userA')
self.assertEqual(
self.test_auth._authorize_cross_tenant(
'userID', 'userA', 'tenantID', 'tenantNAME',
['tenantNAME:userA']),
'tenantNAME:userA')
self.assertEqual(
self.test_auth._authorize_cross_tenant(
'userID', 'userA', 'tenantID', 'tenantNAME', ['*:userA']),
'*:userA')
self.assertEqual(self.test_auth._authorize_cross_tenant('userID',
'userA', 'tenantID', 'tenantNAME', ['tenantID:userID']), 'tenantID:userID')
self.assertEqual(self.test_auth._authorize_cross_tenant('userID',
'userA', 'tenantID', 'tenantNAME', ['tenantNAME:userID']), 'tenantNAME:userID')
self.assertEqual(self.test_auth._authorize_cross_tenant('userID',
'userA', 'tenantID', 'tenantNAME', ['*:userID']), '*:userID')
self.assertEqual(
self.test_auth._authorize_cross_tenant(
'userID', 'userA', 'tenantID', 'tenantNAME',
['tenantID:userID']),
'tenantID:userID')
self.assertEqual(
self.test_auth._authorize_cross_tenant(
'userID', 'userA', 'tenantID', 'tenantNAME',
['tenantNAME:userID']),
'tenantNAME:userID')
self.assertEqual(
self.test_auth._authorize_cross_tenant(
'userID', 'userA', 'tenantID', 'tenantNAME', ['*:userID']),
'*:userID')
self.assertEqual(self.test_auth._authorize_cross_tenant('userID',
'userA', 'tenantID', 'tenantNAME', ['tenantID:*']), 'tenantID:*')
self.assertEqual(self.test_auth._authorize_cross_tenant('userID',
'userA', 'tenantID', 'tenantNAME', ['tenantNAME:*']), 'tenantNAME:*')
self.assertEqual(self.test_auth._authorize_cross_tenant('userID',
'userA', 'tenantID', 'tenantNAME', ['*:*']), '*:*')
self.assertEqual(
self.test_auth._authorize_cross_tenant(
'userID', 'userA', 'tenantID', 'tenantNAME', ['tenantID:*']),
'tenantID:*')
self.assertEqual(
self.test_auth._authorize_cross_tenant(
'userID', 'userA', 'tenantID', 'tenantNAME', ['tenantNAME:*']),
'tenantNAME:*')
self.assertEqual(
self.test_auth._authorize_cross_tenant(
'userID', 'userA', 'tenantID', 'tenantNAME', ['*:*']),
'*:*')
def test_cross_tenant_authorization_failure(self):
self.assertEqual(self.test_auth._authorize_cross_tenant('userID',
'userA', 'tenantID', 'tenantNAME', ['tenantXYZ:userA']), None)
self.assertEqual(
self.test_auth._authorize_cross_tenant(
'userID', 'userA', 'tenantID', 'tenantNAME',
['tenantXYZ:userA']),
None)
def test_delete_own_account_not_allowed(self):
roles = self.test_auth.operator_roles.split(',')

View File

@ -26,6 +26,7 @@ from swift.common.swob import Request
class FakeApp(object):
def __init__(self, body=['FAKE APP'], response_str='200 OK'):
self.body = body
self.response_str = response_str
@ -40,6 +41,7 @@ class FakeApp(object):
class FakeAppNoContentLengthNoTransferEncoding(object):
def __init__(self, body=['FAKE APP']):
self.body = body
@ -51,6 +53,7 @@ class FakeAppNoContentLengthNoTransferEncoding(object):
class FileLikeExceptor(object):
def __init__(self):
pass
@ -62,9 +65,10 @@ class FileLikeExceptor(object):
class FakeAppReadline(object):
def __call__(self, env, start_response):
start_response('200 OK', [('Content-Type', 'text/plain'),
('Content-Length', '8')])
('Content-Length', '8')])
env['wsgi.input'].readline()
return ["FAKE APP"]
@ -153,6 +157,7 @@ class TestProxyLogging(unittest.TestCase):
function), but only for GET requests.
"""
stub_times = []
def stub_time():
return stub_times.pop(0)
@ -201,8 +206,10 @@ class TestProxyLogging(unittest.TestCase):
iter_response = app(req.environ, lambda *_: None)
self.assertEqual('7654321', ''.join(iter_response))
self.assertEqual([], app.access_logger.log_dict['timing'])
self.assertEqual([], app.access_logger.log_dict['timing_since'])
self.assertEqual([], app.access_logger.log_dict['update_stats'])
self.assertEqual([],
app.access_logger.log_dict['timing_since'])
self.assertEqual([],
app.access_logger.log_dict['update_stats'])
# PUT (no first-byte timing!)
app = proxy_logging.ProxyLoggingMiddleware(
@ -221,7 +228,8 @@ class TestProxyLogging(unittest.TestCase):
'%s.GET.314.first-byte.timing' % exp_type, app)
self.assertNotTiming(
'%s.PUT.314.first-byte.timing' % exp_type, app)
self.assertUpdateStats('%s.PUT.314.xfer' % exp_type, 6 + 8, app)
self.assertUpdateStats(
'%s.PUT.314.xfer' % exp_type, 6 + 8, app)
finally:
time.time = orig_time
@ -267,12 +275,13 @@ class TestProxyLogging(unittest.TestCase):
conf_key: 'SPECIAL, GET,PUT ', # crazy spaces ok
})
app.access_logger = FakeLogger()
req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': method})
req = Request.blank('/v1/a/c',
environ={'REQUEST_METHOD': method})
app.log_request(req, 911, 4, 43, 1.01)
self.assertTiming('container.%s.911.timing' % exp_method, app,
exp_timing=1.01 * 1000)
exp_timing=1.01 * 1000)
self.assertUpdateStats('container.%s.911.xfer' % exp_method,
4 + 43, app)
4 + 43, app)
def test_basic_req(self):
app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), {})
@ -318,7 +327,7 @@ class TestProxyLogging(unittest.TestCase):
def test_log_headers(self):
app = proxy_logging.ProxyLoggingMiddleware(FakeApp(),
{'log_headers': 'yes'})
{'log_headers': 'yes'})
app.access_logger = FakeLogger()
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET'})
resp = app(req.environ, start_response)
@ -330,10 +339,12 @@ class TestProxyLogging(unittest.TestCase):
def test_upload_size(self):
app = proxy_logging.ProxyLoggingMiddleware(FakeApp(),
{'log_headers': 'yes'})
{'log_headers': 'yes'})
app.access_logger = FakeLogger()
req = Request.blank('/v1/a/c/o/foo', environ={'REQUEST_METHOD': 'PUT',
'wsgi.input': StringIO.StringIO('some stuff')})
req = Request.blank(
'/v1/a/c/o/foo',
environ={'REQUEST_METHOD': 'PUT',
'wsgi.input': StringIO.StringIO('some stuff')})
resp = app(req.environ, start_response)
# exhaust generator
[x for x in resp]
@ -346,11 +357,13 @@ class TestProxyLogging(unittest.TestCase):
def test_upload_line(self):
app = proxy_logging.ProxyLoggingMiddleware(FakeAppReadline(),
{'log_headers': 'yes'})
{'log_headers': 'yes'})
app.access_logger = FakeLogger()
req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': 'POST',
'wsgi.input': StringIO.StringIO(
'some stuff\nsome other stuff\n')})
req = Request.blank(
'/v1/a/c',
environ={'REQUEST_METHOD': 'POST',
'wsgi.input': StringIO.StringIO(
'some stuff\nsome other stuff\n')})
resp = app(req.environ, start_response)
# exhaust generator
[x for x in resp]
@ -365,7 +378,7 @@ class TestProxyLogging(unittest.TestCase):
app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), {})
app.access_logger = FakeLogger()
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET',
'QUERY_STRING': 'x=3'})
'QUERY_STRING': 'x=3'})
resp = app(req.environ, start_response)
# exhaust generator
[x for x in resp]
@ -376,7 +389,7 @@ class TestProxyLogging(unittest.TestCase):
app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), {})
app.access_logger = FakeLogger()
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET',
'REMOTE_ADDR': '1.2.3.4'})
'REMOTE_ADDR': '1.2.3.4'})
resp = app(req.environ, start_response)
# exhaust generator
[x for x in resp]
@ -412,9 +425,10 @@ class TestProxyLogging(unittest.TestCase):
self.assertEquals(log_parts[1], '1.2.3.4') # remote addr
def test_facility(self):
app = proxy_logging.ProxyLoggingMiddleware(FakeApp(),
{'log_headers': 'yes',
'access_log_facility': 'LOG_LOCAL7'})
app = proxy_logging.ProxyLoggingMiddleware(
FakeApp(),
{'log_headers': 'yes',
'access_log_facility': 'LOG_LOCAL7'})
handler = get_logger.handler4logger[app.access_logger.logger]
self.assertEquals(SysLogHandler.LOG_LOCAL7, handler.facility)
@ -440,7 +454,7 @@ class TestProxyLogging(unittest.TestCase):
app = proxy_logging.ProxyLoggingMiddleware(FakeAppReadline(), {})
app.access_logger = FakeLogger()
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET',
'wsgi.input': FileLikeExceptor()})
'wsgi.input': FileLikeExceptor()})
try:
resp = app(req.environ, start_response)
# read body
@ -456,7 +470,7 @@ class TestProxyLogging(unittest.TestCase):
FakeApp(['some', 'stuff']), {})
app.access_logger = FakeLogger()
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET',
'wsgi.input': FileLikeExceptor()})
'wsgi.input': FileLikeExceptor()})
try:
resp = app(req.environ, start_response)
# read body
@ -642,6 +656,9 @@ class TestProxyLogging(unittest.TestCase):
log_parts = self._log_parts(app)
self.assertEquals(log_parts[9], '...')
# Avoids pyflakes error, "local variable 'resp_body' is assigned to
# but never used
self.assertTrue(resp_body is not None)
if __name__ == '__main__':
unittest.main()

View File

@ -18,7 +18,9 @@ import unittest
from swift.common.swob import Request, HTTPUnauthorized
from swift.common.middleware import container_quotas
class FakeCache(object):
def __init__(self, val):
if 'status' not in val:
val['status'] = 200
@ -27,7 +29,9 @@ class FakeCache(object):
def get(self, *args):
return self.val
class FakeApp(object):
def __init__(self):
pass
@ -35,7 +39,9 @@ class FakeApp(object):
start_response('200 OK', [])
return []
class FakeMissingApp(object):
def __init__(self):
pass
@ -43,16 +49,18 @@ class FakeMissingApp(object):
start_response('404 Not Found', [])
return []
def start_response(*args):
pass
class TestContainerQuotas(unittest.TestCase):
def test_split_path_empty_container_path_segment(self):
app = container_quotas.ContainerQuotaMiddleware(FakeApp(), {})
req = Request.blank('/v1/a//something/something_else',
environ={'REQUEST_METHOD': 'PUT',
'swift.cache': {'key':'value'}})
'swift.cache': {'key': 'value'}})
res = req.get_response(app)
self.assertEquals(res.status_int, 200)
@ -69,7 +77,8 @@ class TestContainerQuotas(unittest.TestCase):
def test_no_quotas(self):
app = container_quotas.ContainerQuotaMiddleware(FakeApp(), {})
req = Request.blank('/v1/a/c/o',
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'swift.cache': FakeCache({}),
'CONTENT_LENGTH': '100'})
res = req.get_response(app)
@ -78,7 +87,8 @@ class TestContainerQuotas(unittest.TestCase):
def test_exceed_bytes_quota(self):
app = container_quotas.ContainerQuotaMiddleware(FakeApp(), {})
cache = FakeCache({'bytes': 0, 'meta': {'quota-bytes': '2'}})
req = Request.blank('/v1/a/c/o',
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache,
'CONTENT_LENGTH': '100'})
res = req.get_response(app)
@ -87,7 +97,8 @@ class TestContainerQuotas(unittest.TestCase):
def test_not_exceed_bytes_quota(self):
app = container_quotas.ContainerQuotaMiddleware(FakeApp(), {})
cache = FakeCache({'bytes': 0, 'meta': {'quota-bytes': '100'}})
req = Request.blank('/v1/a/c/o',
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache,
'CONTENT_LENGTH': '100'})
res = req.get_response(app)
@ -96,7 +107,8 @@ class TestContainerQuotas(unittest.TestCase):
def test_exceed_counts_quota(self):
app = container_quotas.ContainerQuotaMiddleware(FakeApp(), {})
cache = FakeCache({'object_count': 1, 'meta': {'quota-count': '1'}})
req = Request.blank('/v1/a/c/o',
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache,
'CONTENT_LENGTH': '100'})
res = req.get_response(app)
@ -105,21 +117,24 @@ class TestContainerQuotas(unittest.TestCase):
def test_not_exceed_counts_quota(self):
app = container_quotas.ContainerQuotaMiddleware(FakeApp(), {})
cache = FakeCache({'object_count': 1, 'meta': {'quota-count': '2'}})
req = Request.blank('/v1/a/c/o',
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache,
'CONTENT_LENGTH': '100'})
res = req.get_response(app)
self.assertEquals(res.status_int, 200)
def test_invalid_quotas(self):
req = Request.blank('/v1/a/c',
req = Request.blank(
'/v1/a/c',
environ={'REQUEST_METHOD': 'POST',
'HTTP_X_CONTAINER_META_QUOTA_BYTES': 'abc'})
res = req.get_response(
container_quotas.ContainerQuotaMiddleware(FakeApp(), {}))
self.assertEquals(res.status_int, 400)
req = Request.blank('/v1/a/c',
req = Request.blank(
'/v1/a/c',
environ={'REQUEST_METHOD': 'POST',
'HTTP_X_CONTAINER_META_QUOTA_COUNT': 'abc'})
res = req.get_response(
@ -127,14 +142,16 @@ class TestContainerQuotas(unittest.TestCase):
self.assertEquals(res.status_int, 400)
def test_valid_quotas(self):
req = Request.blank('/v1/a/c',
req = Request.blank(
'/v1/a/c',
environ={'REQUEST_METHOD': 'POST',
'HTTP_X_CONTAINER_META_QUOTA_BYTES': '123'})
res = req.get_response(
container_quotas.ContainerQuotaMiddleware(FakeApp(), {}))
self.assertEquals(res.status_int, 200)
req = Request.blank('/v1/a/c',
req = Request.blank(
'/v1/a/c',
environ={'REQUEST_METHOD': 'POST',
'HTTP_X_CONTAINER_META_QUOTA_COUNT': '123'})
res = req.get_response(
@ -142,7 +159,8 @@ class TestContainerQuotas(unittest.TestCase):
self.assertEquals(res.status_int, 200)
def test_delete_quotas(self):
req = Request.blank('/v1/a/c',
req = Request.blank(
'/v1/a/c',
environ={'REQUEST_METHOD': 'POST',
'HTTP_X_CONTAINER_META_QUOTA_BYTES': None})
res = req.get_response(
@ -152,7 +170,8 @@ class TestContainerQuotas(unittest.TestCase):
def test_missing_container(self):
app = container_quotas.ContainerQuotaMiddleware(FakeMissingApp(), {})
cache = FakeCache({'bytes': 0, 'meta': {'quota-bytes': '100'}})
req = Request.blank('/v1/a/c/o',
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache,
'CONTENT_LENGTH': '100'})
res = req.get_response(app)
@ -162,7 +181,8 @@ class TestContainerQuotas(unittest.TestCase):
app = container_quotas.ContainerQuotaMiddleware(FakeApp(), {})
cache = FakeCache({'object_count': 1, 'meta': {'quota-count': '1'},
'write_acl': None})
req = Request.blank('/v1/a/c/o',
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache,
'CONTENT_LENGTH': '100',
'swift.authorize': lambda *args: HTTPUnauthorized()})

View File

@ -238,7 +238,7 @@ class TestDBReplicator(unittest.TestCase):
node = {'replication_ip': '127.0.0.1', 'replication_port': 80,
'device': 'sdb1'}
conn = db_replicator.ReplConnection(node, '1234567890', 'abcdefg',
logging.getLogger())
logging.getLogger())
def req(method, path, body, headers):
self.assertEquals(method, 'REPLICATE')
@ -260,11 +260,13 @@ class TestDBReplicator(unittest.TestCase):
def test_rsync_file(self):
replicator = TestReplicator({})
with _mock_process(-1):
self.assertEquals(False,
replicator._rsync_file('/some/file', 'remote:/some/file'))
self.assertEquals(
False,
replicator._rsync_file('/some/file', 'remote:/some/file'))
with _mock_process(0):
self.assertEquals(True,
replicator._rsync_file('/some/file', 'remote:/some/file'))
self.assertEquals(
True,
replicator._rsync_file('/some/file', 'remote:/some/file'))
def test_rsync_file_popen_args(self):
replicator = TestReplicator({})
@ -383,7 +385,8 @@ class TestDBReplicator(unittest.TestCase):
with patch('os.path.getmtime', ChangingMtimesOs()):
broker = FakeBroker()
replicator = MyTestReplicator(broker)
fake_device = {'ip': '127.0.0.1', 'replication_ip': '127.0.0.1',
fake_device = {'ip': '127.0.0.1',
'replication_ip': '127.0.0.1',
'device': 'sda1'}
replicator._rsync_db(broker, fake_device, ReplHttp(), 'abcd')
self.assertEquals(2, replicator._rsync_file_call_count)
@ -518,7 +521,7 @@ class TestDBReplicator(unittest.TestCase):
os.mkdir(temp_hash_dir)
temp_file = NamedTemporaryFile(dir=temp_hash_dir, delete=False)
temp_hash_dir2 = os.path.join(temp_suf_dir,
'266e33924a08ede4204871468c11e16e')
'266e33924a08ede4204871468c11e16e')
os.mkdir(temp_hash_dir2)
temp_file2 = NamedTemporaryFile(dir=temp_hash_dir2, delete=False)
@ -773,9 +776,10 @@ class TestReplToNode(unittest.TestCase):
self.replicator = TestReplicator({})
self.fake_node = {'ip': '127.0.0.1', 'device': 'sda1', 'port': 1000}
self.fake_info = {'id': 'a', 'point': -1, 'max_row': 10, 'hash': 'b',
'created_at': 100, 'put_timestamp': 0,
'delete_timestamp': 0, 'count': 0,
'metadata': {'Test': ('Value', normalize_timestamp(1))}}
'created_at': 100, 'put_timestamp': 0,
'delete_timestamp': 0, 'count': 0,
'metadata': {
'Test': ('Value', normalize_timestamp(1))}}
self.replicator.logger = mock.Mock()
self.replicator._rsync_db = mock.Mock(return_value=True)
self.replicator._usync_db = mock.Mock(return_value=True)
@ -790,7 +794,7 @@ class TestReplToNode(unittest.TestCase):
self.fake_node, self.broker, '0', self.fake_info), True)
self.replicator._usync_db.assert_has_calls([
mock.call(max(rinfo['point'], local_sync), self.broker,
self.http, rinfo['id'], self.fake_info['id'])
self.http, rinfo['id'], self.fake_info['id'])
])
def test_repl_to_node_rsync_success(self):
@ -803,9 +807,10 @@ class TestReplToNode(unittest.TestCase):
mock.call.increment('remote_merges')
])
self.replicator._rsync_db.assert_has_calls([
mock.call(self.broker, self.fake_node, self.http, self.fake_info['id'],
replicate_method='rsync_then_merge',
replicate_timeout=(self.fake_info['count'] / 2000))
mock.call(self.broker, self.fake_node, self.http,
self.fake_info['id'],
replicate_method='rsync_then_merge',
replicate_timeout=(self.fake_info['count'] / 2000))
])
def test_repl_to_node_already_in_sync(self):
@ -825,14 +830,15 @@ class TestReplToNode(unittest.TestCase):
mock.call.increment('rsyncs')
])
self.replicator._rsync_db.assert_has_calls([
mock.call(self.broker, self.fake_node, self.http, self.fake_info['id'])
mock.call(self.broker, self.fake_node, self.http,
self.fake_info['id'])
])
def test_repl_to_node_drive_not_mounted(self):
self.http = ReplHttp('{"id": 3, "point": -1}', set_status=507)
self.assertRaises(DriveNotMounted, self.replicator._repl_to_node,
self.fake_node, FakeBroker(), '0', self.fake_info)
self.fake_node, FakeBroker(), '0', self.fake_info)
def test_repl_to_node_300_status(self):
self.http = ReplHttp('{"id": 3, "point": -1}', set_status=300)

View File

@ -114,7 +114,8 @@ class TestDirectClient(unittest.TestCase):
was_http_connector = direct_client.http_connect
direct_client.http_connect = mock_http_connect(200, fake_headers, body)
resp_headers, resp = direct_client.direct_get_account(node, part, account)
resp_headers, resp = direct_client.direct_get_account(node, part,
account)
fake_headers.update({'user-agent': 'direct-client %s' % os.getpid()})
self.assertEqual(fake_headers, resp_headers)
@ -122,7 +123,8 @@ class TestDirectClient(unittest.TestCase):
direct_client.http_connect = mock_http_connect(204, fake_headers, body)
resp_headers, resp = direct_client.direct_get_account(node, part, account)
resp_headers, resp = direct_client.direct_get_account(node, part,
account)
fake_headers.update({'user-agent': 'direct-client %s' % os.getpid()})
self.assertEqual(fake_headers, resp_headers)
@ -140,7 +142,8 @@ class TestDirectClient(unittest.TestCase):
was_http_connector = direct_client.http_connect
direct_client.http_connect = mock_http_connect(200, headers)
resp = direct_client.direct_head_container(node, part, account, container)
resp = direct_client.direct_head_container(node, part, account,
container)
headers.update({'user-agent': 'direct-client %s' % os.getpid()})
self.assertEqual(headers, resp)
@ -219,7 +222,8 @@ class TestDirectClient(unittest.TestCase):
direct_client.http_connect = mock_http_connect(200, body=contents)
resp_header, obj_body = (
direct_client.direct_get_object(node, part, account, container, name))
direct_client.direct_get_object(node, part, account, container,
name))
self.assertEqual(obj_body, contents)
direct_client.http_connect = was_http_connector
@ -240,7 +244,7 @@ class TestDirectClient(unittest.TestCase):
direct_client.http_connect = mock_http_connect(200, fake_headers)
direct_client.direct_post_object(node, part, account,
container, name, headers)
container, name, headers)
self.assertEqual(headers['Key'], fake_headers[0].get('Key'))
direct_client.http_connect = was_http_connector
@ -255,7 +259,8 @@ class TestDirectClient(unittest.TestCase):
was_http_connector = direct_client.http_connect
direct_client.http_connect = mock_http_connect(200)
direct_client.direct_delete_object(node, part, account, container, name)
direct_client.direct_delete_object(node, part, account, container,
name)
direct_client.http_connect = was_http_connector
@ -271,7 +276,7 @@ class TestDirectClient(unittest.TestCase):
direct_client.http_connect = mock_http_connect(200)
resp = direct_client.direct_put_object(node, part, account,
container, name, contents, 6)
container, name, contents, 6)
self.assertEqual(md5('123456').hexdigest(), resp)
direct_client.http_connect = was_http_connector
@ -288,8 +293,8 @@ class TestDirectClient(unittest.TestCase):
direct_client.http_connect = mock_http_connect(500)
self.assertRaises(direct_client.ClientException,
direct_client.direct_put_object, node, part, account,
container, name, contents)
direct_client.direct_put_object, node, part, account,
container, name, contents)
direct_client.http_connect = was_http_connector
@ -305,7 +310,7 @@ class TestDirectClient(unittest.TestCase):
direct_client.http_connect = mock_http_connect(200)
resp = direct_client.direct_put_object(node, part, account,
container, name, contents)
container, name, contents)
self.assertEqual(md5('6\r\n123456\r\n0\r\n\r\n').hexdigest(), resp)
direct_client.http_connect = was_http_connector
@ -322,7 +327,8 @@ class TestDirectClient(unittest.TestCase):
direct_client.http_connect = mock_http_connect(200, headers)
attempts, resp = direct_client.retry(direct_client.direct_head_object,
node, part, account, container, name)
node, part, account, container,
name)
headers.update({'user-agent': 'direct-client %s' % os.getpid()})
self.assertEqual(headers, resp)
self.assertEqual(attempts, 1)

View File

@ -116,7 +116,9 @@ class TestObjectExpirer(TestCase):
self.assertRaises(ValueError, expirer.ObjectExpirer, conf)
def test_process_based_concurrency(self):
class ObjectExpirer(expirer.ObjectExpirer):
def __init__(self, conf):
super(ObjectExpirer, self).__init__(conf)
self.processes = 3
@ -128,6 +130,7 @@ class TestObjectExpirer(TestCase):
self.deleted_objects[container].add(obj)
class InternalClient(object):
def __init__(self, containers):
self.containers = containers
@ -232,9 +235,9 @@ class TestObjectExpirer(TestCase):
x.swift = 'throw error because a string does not have needed methods'
x.run_once()
self.assertEqual(x.logger.log_dict['exception'],
[(("Unhandled exception",), {},
"'str' object has no attribute "
"'get_account_info'")])
[(("Unhandled exception",), {},
"'str' object has no attribute "
"'get_account_info'")])
def test_run_once_calls_report(self):
class InternalClient(object):
@ -286,7 +289,8 @@ class TestObjectExpirer(TestCase):
x.logger = FakeLogger()
x.swift = InternalClient([{'name': str(int(time() - 86400))}])
x.run_once()
self.assertEqual(x.logger.log_dict['exception'],
self.assertEqual(
x.logger.log_dict['exception'],
[(('Unhandled exception',), {},
str(Exception('This should not have been called')))])
@ -313,13 +317,15 @@ class TestObjectExpirer(TestCase):
x = expirer.ObjectExpirer({})
x.logger = FakeLogger()
x.swift = InternalClient([{'name': str(int(time() - 86400))}],
x.swift = InternalClient(
[{'name': str(int(time() - 86400))}],
[{'name': '%d-actual-obj' % int(time() + 86400)}])
x.run_once()
for exccall in x.logger.log_dict['exception']:
self.assertTrue(
'This should not have been called' not in exccall[0][0])
self.assertEqual(x.logger.log_dict['info'],
self.assertEqual(
x.logger.log_dict['info'],
[(('Pass beginning; 1 possible containers; '
'2 possible objects',), {}),
(('Pass completed in 0s; 0 objects expired',), {})])
@ -328,7 +334,8 @@ class TestObjectExpirer(TestCase):
x = expirer.ObjectExpirer({})
x.logger = FakeLogger()
ts = int(time() - 86400)
x.swift = InternalClient([{'name': str(int(time() - 86400))}],
x.swift = InternalClient(
[{'name': str(int(time() - 86400))}],
[{'name': '%d-actual-obj' % ts}])
x.delete_actual_object = should_not_be_called
x.run_once()
@ -336,7 +343,8 @@ class TestObjectExpirer(TestCase):
for exccall in x.logger.log_dict['exception']:
if exccall[0][0].startswith('Exception while deleting '):
excswhiledeleting.append(exccall[0][0])
self.assertEqual(excswhiledeleting,
self.assertEqual(
excswhiledeleting,
['Exception while deleting object %d %d-actual-obj '
'This should not have been called' % (ts, ts)])
@ -372,17 +380,20 @@ class TestObjectExpirer(TestCase):
x.iter_containers = lambda: [str(int(time() - 86400))]
ts = int(time() - 86400)
x.delete_actual_object = deliberately_blow_up
x.swift = InternalClient([{'name': str(int(time() - 86400))}],
x.swift = InternalClient(
[{'name': str(int(time() - 86400))}],
[{'name': '%d-actual-obj' % ts}])
x.run_once()
excswhiledeleting = []
for exccall in x.logger.log_dict['exception']:
if exccall[0][0].startswith('Exception while deleting '):
excswhiledeleting.append(exccall[0][0])
self.assertEqual(excswhiledeleting,
self.assertEqual(
excswhiledeleting,
['Exception while deleting object %d %d-actual-obj '
'failed to delete actual object' % (ts, ts)])
self.assertEqual(x.logger.log_dict['info'],
self.assertEqual(
x.logger.log_dict['info'],
[(('Pass beginning; 1 possible containers; '
'2 possible objects',), {}),
(('Pass completed in 0s; 0 objects expired',), {})])
@ -392,14 +403,16 @@ class TestObjectExpirer(TestCase):
x.logger = FakeLogger()
ts = int(time() - 86400)
x.delete_actual_object = lambda o, t: None
x.swift = InternalClient([{'name': str(int(time() - 86400))}],
x.swift = InternalClient(
[{'name': str(int(time() - 86400))}],
[{'name': '%d-actual-obj' % ts}])
x.run_once()
excswhiledeleting = []
for exccall in x.logger.log_dict['exception']:
if exccall[0][0].startswith('Exception while deleting '):
excswhiledeleting.append(exccall[0][0])
self.assertEqual(excswhiledeleting,
self.assertEqual(
excswhiledeleting,
['Exception while deleting object %d %d-actual-obj This should '
'not have been called' % (ts, ts)])
@ -428,11 +441,13 @@ class TestObjectExpirer(TestCase):
x.logger = FakeLogger()
x.delete_actual_object = lambda o, t: None
self.assertEqual(x.report_objects, 0)
x.swift = InternalClient([{'name': str(int(time() - 86400))}],
x.swift = InternalClient(
[{'name': str(int(time() - 86400))}],
[{'name': '%d-actual-obj' % int(time() - 86400)}])
x.run_once()
self.assertEqual(x.report_objects, 1)
self.assertEqual(x.logger.log_dict['info'],
self.assertEqual(
x.logger.log_dict['info'],
[(('Pass beginning; 1 possible containers; '
'2 possible objects',), {}),
(('Pass completed in 0s; 1 objects expired',), {})])
@ -468,11 +483,13 @@ class TestObjectExpirer(TestCase):
x.logger = FakeLogger()
x.delete_actual_object = delete_actual_object_test_for_unicode
self.assertEqual(x.report_objects, 0)
x.swift = InternalClient([{'name': str(int(time() - 86400))}],
x.swift = InternalClient(
[{'name': str(int(time() - 86400))}],
[{'name': u'%d-actual-obj' % int(time() - 86400)}])
x.run_once()
self.assertEqual(x.report_objects, 1)
self.assertEqual(x.logger.log_dict['info'],
self.assertEqual(
x.logger.log_dict['info'],
[(('Pass beginning; 1 possible containers; '
'2 possible objects',), {}),
(('Pass completed in 0s; 1 objects expired',), {})])
@ -538,7 +555,8 @@ class TestObjectExpirer(TestCase):
'container' % (cts,),
'Exception while deleting container %d failed to delete '
'container' % (cts + 1,)]))
self.assertEqual(x.logger.log_dict['info'],
self.assertEqual(
x.logger.log_dict['info'],
[(('Pass beginning; 1 possible containers; '
'2 possible objects',), {}),
(('Pass completed in 0s; 0 objects expired',), {})])
@ -589,8 +607,8 @@ class TestObjectExpirer(TestCase):
expirer.sleep = orig_sleep
self.assertEqual(str(err), 'exiting exception 2')
self.assertEqual(x.logger.log_dict['exception'],
[(('Unhandled exception',), {},
'exception 1')])
[(('Unhandled exception',), {},
'exception 1')])
def test_delete_actual_object(self):
got_env = [None]
@ -651,7 +669,8 @@ class TestObjectExpirer(TestCase):
def test_delete_actual_object_does_not_handle_odd_stuff(self):
def fake_app(env, start_response):
start_response('503 Internal Server Error',
start_response(
'503 Internal Server Error',
[('Content-Length', '0')])
return []
@ -675,7 +694,7 @@ class TestObjectExpirer(TestCase):
x.delete_actual_object(name, timestamp)
x.swift.make_request.assert_called_once()
self.assertEqual(x.swift.make_request.call_args[0][1],
'/v1/' + urllib.quote(name))
'/v1/' + urllib.quote(name))
if __name__ == '__main__':

View File

@ -126,10 +126,12 @@ def _create_test_ring(path):
intended_part_shift = 30
intended_reload_time = 15
with closing(GzipFile(testgz, 'wb')) as f:
pickle.dump(ring.RingData(intended_replica2part2dev_id,
intended_devs, intended_part_shift),
pickle.dump(
ring.RingData(intended_replica2part2dev_id,
intended_devs, intended_part_shift),
f)
return ring.Ring(path, ring_name='object', reload_time=intended_reload_time)
return ring.Ring(path, ring_name='object',
reload_time=intended_reload_time)
class TestObjectReplicator(unittest.TestCase):
@ -164,7 +166,7 @@ class TestObjectReplicator(unittest.TestCase):
def test_run_once(self):
replicator = object_replicator.ObjectReplicator(
dict(swift_dir=self.testdir, devices=self.devices,
mount_check='false', timeout='300', stats_interval='1'))
mount_check='false', timeout='300', stats_interval='1'))
was_connector = object_replicator.http_connect
object_replicator.http_connect = mock_http_connect(200)
cur_part = '0'
@ -182,7 +184,7 @@ class TestObjectReplicator(unittest.TestCase):
process_arg_checker = []
nodes = [node for node in
self.ring.get_part_nodes(int(cur_part))
if node['ip'] not in _ips()]
if node['ip'] not in _ips()]
for node in nodes:
rsync_mod = '%s::object/sda/objects/%s' % (node['ip'], cur_part)
process_arg_checker.append(
@ -314,15 +316,15 @@ class TestObjectReplicator(unittest.TestCase):
def test_run_once_recover_from_failure(self):
replicator = object_replicator.ObjectReplicator(
dict(swift_dir=self.testdir, devices=self.devices,
mount_check='false', timeout='300', stats_interval='1'))
mount_check='false', timeout='300', stats_interval='1'))
was_connector = object_replicator.http_connect
try:
object_replicator.http_connect = mock_http_connect(200)
# Write some files into '1' and run replicate- they should be moved
# to the other partitoins and then node should get deleted.
cur_part = '1'
df = diskfile.DiskFile(self.devices, 'sda', cur_part, 'a', 'c', 'o',
FakeLogger())
df = diskfile.DiskFile(
self.devices, 'sda', cur_part, 'a', 'c', 'o', FakeLogger())
mkdirs(df.datadir)
f = open(os.path.join(df.datadir,
normalize_timestamp(time.time()) + '.data'),
@ -335,7 +337,7 @@ class TestObjectReplicator(unittest.TestCase):
process_arg_checker = []
nodes = [node for node in
self.ring.get_part_nodes(int(cur_part))
if node['ip'] not in _ips()]
if node['ip'] not in _ips()]
for node in nodes:
rsync_mod = '%s::object/sda/objects/%s' % (node['ip'],
cur_part)
@ -350,16 +352,16 @@ class TestObjectReplicator(unittest.TestCase):
for i, result in [('0', True), ('1', False),
('2', True), ('3', True)]:
self.assertEquals(os.access(
os.path.join(self.objects,
i, diskfile.HASH_FILE),
os.F_OK), result)
os.path.join(self.objects,
i, diskfile.HASH_FILE),
os.F_OK), result)
finally:
object_replicator.http_connect = was_connector
def test_run_once_recover_from_timeout(self):
replicator = object_replicator.ObjectReplicator(
dict(swift_dir=self.testdir, devices=self.devices,
mount_check='false', timeout='300', stats_interval='1'))
mount_check='false', timeout='300', stats_interval='1'))
was_connector = object_replicator.http_connect
was_get_hashes = object_replicator.get_hashes
was_execute = tpool.execute
@ -385,8 +387,8 @@ class TestObjectReplicator(unittest.TestCase):
# Write some files into '1' and run replicate- they should be moved
# to the other partitions and then node should get deleted.
cur_part = '1'
df = diskfile.DiskFile(self.devices, 'sda', cur_part, 'a', 'c', 'o',
FakeLogger())
df = diskfile.DiskFile(
self.devices, 'sda', cur_part, 'a', 'c', 'o', FakeLogger())
mkdirs(df.datadir)
f = open(os.path.join(df.datadir,
normalize_timestamp(time.time()) + '.data'),
@ -399,7 +401,7 @@ class TestObjectReplicator(unittest.TestCase):
process_arg_checker = []
nodes = [node for node in
self.ring.get_part_nodes(int(cur_part))
if node['ip'] not in _ips()]
if node['ip'] not in _ips()]
for node in nodes:
rsync_mod = '%s::object/sda/objects/%s' % (node['ip'],
cur_part)
@ -464,9 +466,9 @@ class TestObjectReplicator(unittest.TestCase):
self.assertEquals(mock_http.call_count, len(self.ring._devs) - 1)
reqs = []
for node in job['nodes']:
reqs.append(mock.call(node['ip'], node['port'], node['device'],
job['partition'], 'REPLICATE', '',
headers=self.headers))
reqs.append(mock.call(node['ip'], node['port'], node['device'],
job['partition'], 'REPLICATE', '',
headers=self.headers))
if job['partition'] == '0':
self.assertEquals(self.replicator.suffix_hash, 0)
mock_http.assert_has_calls(reqs, any_order=True)
@ -542,14 +544,14 @@ class TestObjectReplicator(unittest.TestCase):
self.replicator.update(repl_job)
reqs = []
for node in repl_job['nodes']:
reqs.append(mock.call(node['replication_ip'],
node['replication_port'], node['device'],
repl_job['partition'], 'REPLICATE',
'', headers=self.headers))
reqs.append(mock.call(node['replication_ip'],
node['replication_port'], node['device'],
repl_job['partition'], 'REPLICATE',
'/a83', headers=self.headers))
reqs.append(mock.call(node['replication_ip'],
node['replication_port'], node['device'],
repl_job['partition'], 'REPLICATE',
'', headers=self.headers))
reqs.append(mock.call(node['replication_ip'],
node['replication_port'], node['device'],
repl_job['partition'], 'REPLICATE',
'/a83', headers=self.headers))
mock_http.assert_has_calls(reqs, any_order=True)