Merge "Add FakeStatsdClient to unit tests"
This commit is contained in:
commit
a57479a023
@ -1234,21 +1234,7 @@ class StatsdClient(object):
|
|||||||
self.logger = logger
|
self.logger = logger
|
||||||
|
|
||||||
# Determine if host is IPv4 or IPv6
|
# Determine if host is IPv4 or IPv6
|
||||||
addr_info = None
|
addr_info, self._sock_family = self._determine_sock_family(host, port)
|
||||||
try:
|
|
||||||
addr_info = socket.getaddrinfo(host, port, socket.AF_INET)
|
|
||||||
self._sock_family = socket.AF_INET
|
|
||||||
except socket.gaierror:
|
|
||||||
try:
|
|
||||||
addr_info = socket.getaddrinfo(host, port, socket.AF_INET6)
|
|
||||||
self._sock_family = socket.AF_INET6
|
|
||||||
except socket.gaierror:
|
|
||||||
# Don't keep the server from starting from what could be a
|
|
||||||
# transient DNS failure. Any hostname will get re-resolved as
|
|
||||||
# necessary in the .sendto() calls.
|
|
||||||
# However, we don't know if we're IPv4 or IPv6 in this case, so
|
|
||||||
# we assume legacy IPv4.
|
|
||||||
self._sock_family = socket.AF_INET
|
|
||||||
|
|
||||||
# NOTE: we use the original host value, not the DNS-resolved one
|
# NOTE: we use the original host value, not the DNS-resolved one
|
||||||
# because if host is a hostname, we don't want to cache the DNS
|
# because if host is a hostname, we don't want to cache the DNS
|
||||||
@ -1268,6 +1254,24 @@ class StatsdClient(object):
|
|||||||
else:
|
else:
|
||||||
self._target = (host, port)
|
self._target = (host, port)
|
||||||
|
|
||||||
|
def _determine_sock_family(self, host, port):
|
||||||
|
addr_info = sock_family = None
|
||||||
|
try:
|
||||||
|
addr_info = socket.getaddrinfo(host, port, socket.AF_INET)
|
||||||
|
sock_family = socket.AF_INET
|
||||||
|
except socket.gaierror:
|
||||||
|
try:
|
||||||
|
addr_info = socket.getaddrinfo(host, port, socket.AF_INET6)
|
||||||
|
sock_family = socket.AF_INET6
|
||||||
|
except socket.gaierror:
|
||||||
|
# Don't keep the server from starting from what could be a
|
||||||
|
# transient DNS failure. Any hostname will get re-resolved as
|
||||||
|
# necessary in the .sendto() calls.
|
||||||
|
# However, we don't know if we're IPv4 or IPv6 in this case, so
|
||||||
|
# we assume legacy IPv4.
|
||||||
|
sock_family = socket.AF_INET
|
||||||
|
return addr_info, sock_family
|
||||||
|
|
||||||
def _set_prefix(self, tail_prefix):
|
def _set_prefix(self, tail_prefix):
|
||||||
"""
|
"""
|
||||||
Modifies the prefix that is added to metric names. The resulting prefix
|
Modifies the prefix that is added to metric names. The resulting prefix
|
||||||
@ -1342,11 +1346,15 @@ class StatsdClient(object):
|
|||||||
def decrement(self, metric, sample_rate=None):
|
def decrement(self, metric, sample_rate=None):
|
||||||
return self.update_stats(metric, -1, sample_rate)
|
return self.update_stats(metric, -1, sample_rate)
|
||||||
|
|
||||||
def timing(self, metric, timing_ms, sample_rate=None):
|
def _timing(self, metric, timing_ms, sample_rate):
|
||||||
|
# This method was added to disagregate timing metrics when testing
|
||||||
return self._send(metric, timing_ms, 'ms', sample_rate)
|
return self._send(metric, timing_ms, 'ms', sample_rate)
|
||||||
|
|
||||||
|
def timing(self, metric, timing_ms, sample_rate=None):
|
||||||
|
return self._timing(metric, timing_ms, sample_rate)
|
||||||
|
|
||||||
def timing_since(self, metric, orig_time, sample_rate=None):
|
def timing_since(self, metric, orig_time, sample_rate=None):
|
||||||
return self.timing(metric, (time.time() - orig_time) * 1000,
|
return self._timing(metric, (time.time() - orig_time) * 1000,
|
||||||
sample_rate)
|
sample_rate)
|
||||||
|
|
||||||
def transfer_rate(self, metric, elapsed_time, byte_xfer, sample_rate=None):
|
def transfer_rate(self, metric, elapsed_time, byte_xfer, sample_rate=None):
|
||||||
|
@ -29,6 +29,74 @@ class WARN_DEPRECATED(Exception):
|
|||||||
print(self.msg)
|
print(self.msg)
|
||||||
|
|
||||||
|
|
||||||
|
class FakeStatsdClient(utils.StatsdClient):
|
||||||
|
def __init__(self, host, port, base_prefix='', tail_prefix='',
|
||||||
|
default_sample_rate=1, sample_rate_factor=1, logger=None):
|
||||||
|
super(FakeStatsdClient, self).__init__(
|
||||||
|
host, port, base_prefix, tail_prefix, default_sample_rate,
|
||||||
|
sample_rate_factor, logger)
|
||||||
|
self.clear()
|
||||||
|
|
||||||
|
# Capture then call parent pubic stat functions
|
||||||
|
self.update_stats = self._capture("update_stats")
|
||||||
|
self.increment = self._capture("increment")
|
||||||
|
self.decrement = self._capture("decrement")
|
||||||
|
self.timing = self._capture("timing")
|
||||||
|
self.timing_since = self._capture("timing_since")
|
||||||
|
self.transfer_rate = self._capture("transfer_rate")
|
||||||
|
|
||||||
|
def _capture(self, func_name):
|
||||||
|
func = getattr(super(FakeStatsdClient, self), func_name)
|
||||||
|
|
||||||
|
def wrapper(*args, **kwargs):
|
||||||
|
self.calls[func_name].append((args, kwargs))
|
||||||
|
return func(*args, **kwargs)
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
def _determine_sock_family(self, host, port):
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
def _open_socket(self):
|
||||||
|
return self
|
||||||
|
|
||||||
|
# sendto and close are mimicing the socket calls.
|
||||||
|
def sendto(self, msg, target):
|
||||||
|
self.sendto_calls.append((msg, target))
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def _send(self, *args, **kwargs):
|
||||||
|
self.send_calls.append((args, kwargs))
|
||||||
|
super(FakeStatsdClient, self)._send(*args, **kwargs)
|
||||||
|
|
||||||
|
def clear(self):
|
||||||
|
self.send_calls = []
|
||||||
|
self.calls = defaultdict(list)
|
||||||
|
self.sendto_calls = []
|
||||||
|
|
||||||
|
def get_increments(self):
|
||||||
|
return [call[0][0] for call in self.calls['increment']]
|
||||||
|
|
||||||
|
def get_increment_counts(self):
|
||||||
|
# note: this method reports the sum of stats sent via the increment
|
||||||
|
# method only; consider using get_stats_counts instead to get the sum
|
||||||
|
# of stats sent via both the increment and update_stats methods
|
||||||
|
counts = defaultdict(int)
|
||||||
|
for metric in self.get_increments():
|
||||||
|
counts[metric] += 1
|
||||||
|
return counts
|
||||||
|
|
||||||
|
def get_update_stats(self):
|
||||||
|
return [call[0][:2] for call in self.calls['update_stats']]
|
||||||
|
|
||||||
|
def get_stats_counts(self):
|
||||||
|
counts = defaultdict(int)
|
||||||
|
for metric, step in self.get_update_stats():
|
||||||
|
counts[metric] += step
|
||||||
|
return counts
|
||||||
|
|
||||||
|
|
||||||
class CaptureLog(object):
|
class CaptureLog(object):
|
||||||
"""
|
"""
|
||||||
Captures log records passed to the ``handle`` method and provides accessor
|
Captures log records passed to the ``handle`` method and provides accessor
|
||||||
@ -79,7 +147,7 @@ class FakeLogger(logging.Logger, CaptureLog):
|
|||||||
self.level = logging.NOTSET
|
self.level = logging.NOTSET
|
||||||
if 'facility' in kwargs:
|
if 'facility' in kwargs:
|
||||||
self.facility = kwargs['facility']
|
self.facility = kwargs['facility']
|
||||||
self.statsd_client = None
|
self.statsd_client = FakeStatsdClient("host", 8125)
|
||||||
self.thread_locals = None
|
self.thread_locals = None
|
||||||
self.parent = None
|
self.parent = None
|
||||||
|
|
||||||
@ -92,6 +160,13 @@ class FakeLogger(logging.Logger, CaptureLog):
|
|||||||
NOTICE: 'notice',
|
NOTICE: 'notice',
|
||||||
}
|
}
|
||||||
|
|
||||||
|
def clear(self):
|
||||||
|
self._clear()
|
||||||
|
self.statsd_client.clear()
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
self.clear()
|
||||||
|
|
||||||
def warn(self, *args, **kwargs):
|
def warn(self, *args, **kwargs):
|
||||||
raise WARN_DEPRECATED("Deprecated Method warn use warning instead")
|
raise WARN_DEPRECATED("Deprecated Method warn use warning instead")
|
||||||
|
|
||||||
@ -116,53 +191,9 @@ class FakeLogger(logging.Logger, CaptureLog):
|
|||||||
self.log_dict[store_name].append((tuple(cargs), captured))
|
self.log_dict[store_name].append((tuple(cargs), captured))
|
||||||
super(FakeLogger, self)._log(level, msg, *args, **kwargs)
|
super(FakeLogger, self)._log(level, msg, *args, **kwargs)
|
||||||
|
|
||||||
def _store_in(store_name):
|
|
||||||
def stub_fn(self, *args, **kwargs):
|
|
||||||
self.log_dict[store_name].append((args, kwargs))
|
|
||||||
return stub_fn
|
|
||||||
|
|
||||||
# mock out the StatsD logging methods:
|
|
||||||
update_stats = _store_in('update_stats')
|
|
||||||
increment = _store_in('increment')
|
|
||||||
decrement = _store_in('decrement')
|
|
||||||
timing = _store_in('timing')
|
|
||||||
timing_since = _store_in('timing_since')
|
|
||||||
transfer_rate = _store_in('transfer_rate')
|
|
||||||
set_statsd_prefix = _store_in('set_statsd_prefix')
|
|
||||||
|
|
||||||
def get_increments(self):
|
|
||||||
return [call[0][0] for call in self.log_dict['increment']]
|
|
||||||
|
|
||||||
def get_increment_counts(self):
|
|
||||||
# note: this method reports the sum of stats sent via the increment
|
|
||||||
# method only; consider using get_stats_counts instead to get the sum
|
|
||||||
# of stats sent via both the increment and update_stats methods
|
|
||||||
counts = {}
|
|
||||||
for metric in self.get_increments():
|
|
||||||
if metric not in counts:
|
|
||||||
counts[metric] = 0
|
|
||||||
counts[metric] += 1
|
|
||||||
return counts
|
|
||||||
|
|
||||||
def get_update_stats(self):
|
|
||||||
return [call[0] for call in self.log_dict['update_stats']]
|
|
||||||
|
|
||||||
def get_stats_counts(self):
|
|
||||||
# return dict key->count for stats, aggregating calls to both the
|
|
||||||
# increment and update methods
|
|
||||||
counts = self.get_increment_counts()
|
|
||||||
for metric, step in self.get_update_stats():
|
|
||||||
if metric not in counts:
|
|
||||||
counts[metric] = 0
|
|
||||||
counts[metric] += step
|
|
||||||
return counts
|
|
||||||
|
|
||||||
def setFormatter(self, obj):
|
def setFormatter(self, obj):
|
||||||
self.formatter = obj
|
self.formatter = obj
|
||||||
|
|
||||||
def close(self):
|
|
||||||
self._clear()
|
|
||||||
|
|
||||||
def set_name(self, name):
|
def set_name(self, name):
|
||||||
# don't touch _handlers
|
# don't touch _handlers
|
||||||
self._name = name
|
self._name = name
|
||||||
@ -214,20 +245,6 @@ class DebugLogger(FakeLogger):
|
|||||||
|
|
||||||
class DebugLogAdapter(utils.LogAdapter):
|
class DebugLogAdapter(utils.LogAdapter):
|
||||||
|
|
||||||
def _send_to_logger(name):
|
|
||||||
def stub_fn(self, *args, **kwargs):
|
|
||||||
return getattr(self.logger, name)(*args, **kwargs)
|
|
||||||
return stub_fn
|
|
||||||
|
|
||||||
# delegate to FakeLogger's mocks
|
|
||||||
update_stats = _send_to_logger('update_stats')
|
|
||||||
increment = _send_to_logger('increment')
|
|
||||||
decrement = _send_to_logger('decrement')
|
|
||||||
timing = _send_to_logger('timing')
|
|
||||||
timing_since = _send_to_logger('timing_since')
|
|
||||||
transfer_rate = _send_to_logger('transfer_rate')
|
|
||||||
set_statsd_prefix = _send_to_logger('set_statsd_prefix')
|
|
||||||
|
|
||||||
def __getattribute__(self, name):
|
def __getattribute__(self, name):
|
||||||
try:
|
try:
|
||||||
return object.__getattribute__(self, name)
|
return object.__getattribute__(self, name)
|
||||||
|
@ -130,7 +130,8 @@ class TestAuditorRealBroker(unittest.TestCase):
|
|||||||
'The total container_count for the account a (%d) does not match '
|
'The total container_count for the account a (%d) does not match '
|
||||||
'the sum of container_count across policies (%d)'
|
'the sum of container_count across policies (%d)'
|
||||||
% (num_containers, num_containers - 1), error_message)
|
% (num_containers, num_containers - 1), error_message)
|
||||||
self.assertEqual(test_auditor.logger.get_increment_counts(),
|
self.assertEqual(
|
||||||
|
test_auditor.logger.statsd_client.get_increment_counts(),
|
||||||
{'failures': 1})
|
{'failures': 1})
|
||||||
|
|
||||||
|
|
||||||
|
@ -469,7 +469,8 @@ class TestReaper(unittest.TestCase):
|
|||||||
patch('swift.account.reaper.AccountReaper.reap_object',
|
patch('swift.account.reaper.AccountReaper.reap_object',
|
||||||
self.fake_reap_object):
|
self.fake_reap_object):
|
||||||
r.reap_container('a', 'partition', acc_nodes, 'c')
|
r.reap_container('a', 'partition', acc_nodes, 'c')
|
||||||
self.assertEqual(r.logger.get_increment_counts()['return_codes.4'], 1)
|
self.assertEqual(
|
||||||
|
r.logger.statsd_client.get_increment_counts()['return_codes.4'], 1)
|
||||||
self.assertEqual(r.stats_containers_deleted, 1)
|
self.assertEqual(r.stats_containers_deleted, 1)
|
||||||
|
|
||||||
def test_reap_container_partial_fail(self):
|
def test_reap_container_partial_fail(self):
|
||||||
@ -488,7 +489,8 @@ class TestReaper(unittest.TestCase):
|
|||||||
patch('swift.account.reaper.AccountReaper.reap_object',
|
patch('swift.account.reaper.AccountReaper.reap_object',
|
||||||
self.fake_reap_object):
|
self.fake_reap_object):
|
||||||
r.reap_container('a', 'partition', acc_nodes, 'c')
|
r.reap_container('a', 'partition', acc_nodes, 'c')
|
||||||
self.assertEqual(r.logger.get_increment_counts()['return_codes.4'], 4)
|
self.assertEqual(
|
||||||
|
r.logger.statsd_client.get_increment_counts()['return_codes.4'], 4)
|
||||||
self.assertEqual(r.stats_containers_possibly_remaining, 1)
|
self.assertEqual(r.stats_containers_possibly_remaining, 1)
|
||||||
|
|
||||||
def test_reap_container_full_fail(self):
|
def test_reap_container_full_fail(self):
|
||||||
@ -507,7 +509,8 @@ class TestReaper(unittest.TestCase):
|
|||||||
patch('swift.account.reaper.AccountReaper.reap_object',
|
patch('swift.account.reaper.AccountReaper.reap_object',
|
||||||
self.fake_reap_object):
|
self.fake_reap_object):
|
||||||
r.reap_container('a', 'partition', acc_nodes, 'c')
|
r.reap_container('a', 'partition', acc_nodes, 'c')
|
||||||
self.assertEqual(r.logger.get_increment_counts()['return_codes.4'], 5)
|
self.assertEqual(
|
||||||
|
r.logger.statsd_client.get_increment_counts()['return_codes.4'], 5)
|
||||||
self.assertEqual(r.stats_containers_remaining, 1)
|
self.assertEqual(r.stats_containers_remaining, 1)
|
||||||
|
|
||||||
def test_reap_container_get_object_timeout(self):
|
def test_reap_container_get_object_timeout(self):
|
||||||
|
@ -2649,7 +2649,7 @@ class TestAccountController(unittest.TestCase):
|
|||||||
with mock.patch(
|
with mock.patch(
|
||||||
'time.time',
|
'time.time',
|
||||||
mock.MagicMock(side_effect=[10000.0, 10001.0, 10002.0,
|
mock.MagicMock(side_effect=[10000.0, 10001.0, 10002.0,
|
||||||
10002.0])):
|
10002.0, 10002.0])):
|
||||||
with mock.patch(
|
with mock.patch(
|
||||||
'os.getpid', mock.MagicMock(return_value=1234)):
|
'os.getpid', mock.MagicMock(return_value=1234)):
|
||||||
req.get_response(self.controller)
|
req.get_response(self.controller)
|
||||||
|
@ -243,8 +243,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
|||||||
'Date': self.get_date_header()})
|
'Date': self.get_date_header()})
|
||||||
status, headers, body = self.call_s3api(req)
|
status, headers, body = self.call_s3api(req)
|
||||||
self.assertEqual(self._get_error_code(body), 'AccessDenied')
|
self.assertEqual(self._get_error_code(body), 'AccessDenied')
|
||||||
self.assertEqual({'403.AccessDenied.invalid_header_auth': 1},
|
self.assertEqual(
|
||||||
self.s3api.logger.logger.get_increment_counts())
|
{'403.AccessDenied.invalid_header_auth': 1},
|
||||||
|
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||||
|
|
||||||
def test_bad_method(self):
|
def test_bad_method(self):
|
||||||
req = Request.blank('/',
|
req = Request.blank('/',
|
||||||
@ -253,8 +254,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
|||||||
'Date': self.get_date_header()})
|
'Date': self.get_date_header()})
|
||||||
status, headers, body = self.call_s3api(req)
|
status, headers, body = self.call_s3api(req)
|
||||||
self.assertEqual(self._get_error_code(body), 'MethodNotAllowed')
|
self.assertEqual(self._get_error_code(body), 'MethodNotAllowed')
|
||||||
self.assertEqual({'405.MethodNotAllowed': 1},
|
self.assertEqual(
|
||||||
self.s3api.logger.logger.get_increment_counts())
|
{'405.MethodNotAllowed': 1},
|
||||||
|
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||||
|
|
||||||
def test_bad_method_but_method_exists_in_controller(self):
|
def test_bad_method_but_method_exists_in_controller(self):
|
||||||
req = Request.blank(
|
req = Request.blank(
|
||||||
@ -264,8 +266,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
|||||||
'Date': self.get_date_header()})
|
'Date': self.get_date_header()})
|
||||||
status, headers, body = self.call_s3api(req)
|
status, headers, body = self.call_s3api(req)
|
||||||
self.assertEqual(self._get_error_code(body), 'MethodNotAllowed')
|
self.assertEqual(self._get_error_code(body), 'MethodNotAllowed')
|
||||||
self.assertEqual({'405.MethodNotAllowed': 1},
|
self.assertEqual(
|
||||||
self.s3api.logger.logger.get_increment_counts())
|
{'405.MethodNotAllowed': 1},
|
||||||
|
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||||
|
|
||||||
def test_path_info_encode(self):
|
def test_path_info_encode(self):
|
||||||
bucket_name = 'b%75cket'
|
bucket_name = 'b%75cket'
|
||||||
@ -397,8 +400,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
|||||||
req.content_type = 'text/plain'
|
req.content_type = 'text/plain'
|
||||||
status, headers, body = self.call_s3api(req)
|
status, headers, body = self.call_s3api(req)
|
||||||
self.assertEqual(self._get_error_code(body), 'AccessDenied')
|
self.assertEqual(self._get_error_code(body), 'AccessDenied')
|
||||||
self.assertEqual({'403.AccessDenied.expired': 1},
|
self.assertEqual(
|
||||||
self.s3api.logger.logger.get_increment_counts())
|
{'403.AccessDenied.expired': 1},
|
||||||
|
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||||
|
|
||||||
def test_signed_urls(self):
|
def test_signed_urls(self):
|
||||||
# Set expire to last 32b timestamp value
|
# Set expire to last 32b timestamp value
|
||||||
@ -445,8 +449,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
|||||||
req.content_type = 'text/plain'
|
req.content_type = 'text/plain'
|
||||||
status, headers, body = self.call_s3api(req)
|
status, headers, body = self.call_s3api(req)
|
||||||
self.assertEqual(self._get_error_code(body), 'AccessDenied')
|
self.assertEqual(self._get_error_code(body), 'AccessDenied')
|
||||||
self.assertEqual({'403.AccessDenied.invalid_expires': 1},
|
self.assertEqual(
|
||||||
self.s3api.logger.logger.get_increment_counts())
|
{'403.AccessDenied.invalid_expires': 1},
|
||||||
|
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||||
|
|
||||||
def test_signed_urls_no_sign(self):
|
def test_signed_urls_no_sign(self):
|
||||||
expire = '2147483647' # 19 Jan 2038 03:14:07
|
expire = '2147483647' # 19 Jan 2038 03:14:07
|
||||||
@ -458,8 +463,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
|||||||
req.content_type = 'text/plain'
|
req.content_type = 'text/plain'
|
||||||
status, headers, body = self.call_s3api(req)
|
status, headers, body = self.call_s3api(req)
|
||||||
self.assertEqual(self._get_error_code(body), 'AccessDenied')
|
self.assertEqual(self._get_error_code(body), 'AccessDenied')
|
||||||
self.assertEqual({'403.AccessDenied.invalid_query_auth': 1},
|
self.assertEqual(
|
||||||
self.s3api.logger.logger.get_increment_counts())
|
{'403.AccessDenied.invalid_query_auth': 1},
|
||||||
|
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||||
|
|
||||||
def test_signed_urls_no_access(self):
|
def test_signed_urls_no_access(self):
|
||||||
expire = '2147483647' # 19 Jan 2038 03:14:07
|
expire = '2147483647' # 19 Jan 2038 03:14:07
|
||||||
@ -470,8 +476,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
|||||||
req.content_type = 'text/plain'
|
req.content_type = 'text/plain'
|
||||||
status, headers, body = self.call_s3api(req)
|
status, headers, body = self.call_s3api(req)
|
||||||
self.assertEqual(self._get_error_code(body), 'AccessDenied')
|
self.assertEqual(self._get_error_code(body), 'AccessDenied')
|
||||||
self.assertEqual({'403.AccessDenied.invalid_query_auth': 1},
|
self.assertEqual(
|
||||||
self.s3api.logger.logger.get_increment_counts())
|
{'403.AccessDenied.invalid_query_auth': 1},
|
||||||
|
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||||
|
|
||||||
def test_signed_urls_v4(self):
|
def test_signed_urls_v4(self):
|
||||||
req = Request.blank(
|
req = Request.blank(
|
||||||
@ -518,8 +525,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
|||||||
'AuthorizationQueryParametersError')
|
'AuthorizationQueryParametersError')
|
||||||
self.assertEqual(self._get_error_message(body), message)
|
self.assertEqual(self._get_error_message(body), message)
|
||||||
self.assertIn(extra, body)
|
self.assertIn(extra, body)
|
||||||
self.assertEqual({'400.AuthorizationQueryParametersError': 1},
|
self.assertEqual(
|
||||||
self.s3api.logger.logger.get_increment_counts())
|
{'400.AuthorizationQueryParametersError': 1},
|
||||||
|
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||||
|
|
||||||
dt = self.get_v4_amz_date_header().split('T', 1)[0]
|
dt = self.get_v4_amz_date_header().split('T', 1)[0]
|
||||||
test('test:tester/not-a-date/us-east-1/s3/aws4_request',
|
test('test:tester/not-a-date/us-east-1/s3/aws4_request',
|
||||||
@ -548,8 +556,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
|||||||
req.content_type = 'text/plain'
|
req.content_type = 'text/plain'
|
||||||
status, headers, body = self.call_s3api(req)
|
status, headers, body = self.call_s3api(req)
|
||||||
self.assertEqual(self._get_error_code(body), 'AccessDenied')
|
self.assertEqual(self._get_error_code(body), 'AccessDenied')
|
||||||
self.assertEqual({'403.AccessDenied.invalid_date': 1},
|
self.assertEqual(
|
||||||
self.s3api.logger.logger.get_increment_counts())
|
{'403.AccessDenied.invalid_date': 1},
|
||||||
|
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||||
|
|
||||||
def test_signed_urls_v4_invalid_algorithm(self):
|
def test_signed_urls_v4_invalid_algorithm(self):
|
||||||
req = Request.blank(
|
req = Request.blank(
|
||||||
@ -565,8 +574,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
|||||||
req.content_type = 'text/plain'
|
req.content_type = 'text/plain'
|
||||||
status, headers, body = self.call_s3api(req)
|
status, headers, body = self.call_s3api(req)
|
||||||
self.assertEqual(self._get_error_code(body), 'InvalidArgument')
|
self.assertEqual(self._get_error_code(body), 'InvalidArgument')
|
||||||
self.assertEqual({'400.InvalidArgument': 1},
|
self.assertEqual(
|
||||||
self.s3api.logger.logger.get_increment_counts())
|
{'400.InvalidArgument': 1},
|
||||||
|
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||||
|
|
||||||
def test_signed_urls_v4_missing_signed_headers(self):
|
def test_signed_urls_v4_missing_signed_headers(self):
|
||||||
req = Request.blank(
|
req = Request.blank(
|
||||||
@ -582,8 +592,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
|||||||
status, headers, body = self.call_s3api(req)
|
status, headers, body = self.call_s3api(req)
|
||||||
self.assertEqual(self._get_error_code(body),
|
self.assertEqual(self._get_error_code(body),
|
||||||
'AuthorizationHeaderMalformed')
|
'AuthorizationHeaderMalformed')
|
||||||
self.assertEqual({'400.AuthorizationHeaderMalformed': 1},
|
self.assertEqual(
|
||||||
self.s3api.logger.logger.get_increment_counts())
|
{'400.AuthorizationHeaderMalformed': 1},
|
||||||
|
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||||
|
|
||||||
def test_signed_urls_v4_invalid_credentials(self):
|
def test_signed_urls_v4_invalid_credentials(self):
|
||||||
req = Request.blank('/bucket/object'
|
req = Request.blank('/bucket/object'
|
||||||
@ -598,8 +609,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
|||||||
req.content_type = 'text/plain'
|
req.content_type = 'text/plain'
|
||||||
status, headers, body = self.call_s3api(req)
|
status, headers, body = self.call_s3api(req)
|
||||||
self.assertEqual(self._get_error_code(body), 'AccessDenied')
|
self.assertEqual(self._get_error_code(body), 'AccessDenied')
|
||||||
self.assertEqual({'403.AccessDenied.invalid_credential': 1},
|
self.assertEqual(
|
||||||
self.s3api.logger.logger.get_increment_counts())
|
{'403.AccessDenied.invalid_credential': 1},
|
||||||
|
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||||
|
|
||||||
def test_signed_urls_v4_missing_signature(self):
|
def test_signed_urls_v4_missing_signature(self):
|
||||||
req = Request.blank(
|
req = Request.blank(
|
||||||
@ -614,8 +626,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
|||||||
req.content_type = 'text/plain'
|
req.content_type = 'text/plain'
|
||||||
status, headers, body = self.call_s3api(req)
|
status, headers, body = self.call_s3api(req)
|
||||||
self.assertEqual(self._get_error_code(body), 'AccessDenied')
|
self.assertEqual(self._get_error_code(body), 'AccessDenied')
|
||||||
self.assertEqual({'403.AccessDenied.invalid_query_auth': 1},
|
self.assertEqual(
|
||||||
self.s3api.logger.logger.get_increment_counts())
|
{'403.AccessDenied.invalid_query_auth': 1},
|
||||||
|
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||||
|
|
||||||
def test_bucket_virtual_hosted_style(self):
|
def test_bucket_virtual_hosted_style(self):
|
||||||
req = Request.blank('/',
|
req = Request.blank('/',
|
||||||
@ -712,8 +725,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
|||||||
'Date': self.get_date_header()})
|
'Date': self.get_date_header()})
|
||||||
status, headers, body = self.call_s3api(req)
|
status, headers, body = self.call_s3api(req)
|
||||||
self.assertEqual(self._get_error_code(body), 'InvalidURI')
|
self.assertEqual(self._get_error_code(body), 'InvalidURI')
|
||||||
self.assertEqual({'400.InvalidURI': 1},
|
self.assertEqual(
|
||||||
self.s3api.logger.logger.get_increment_counts())
|
{'400.InvalidURI': 1},
|
||||||
|
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||||
|
|
||||||
def test_object_create_bad_md5_unreadable(self):
|
def test_object_create_bad_md5_unreadable(self):
|
||||||
req = Request.blank('/bucket/object',
|
req = Request.blank('/bucket/object',
|
||||||
@ -723,8 +737,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
|||||||
headers={'Date': self.get_date_header()})
|
headers={'Date': self.get_date_header()})
|
||||||
status, headers, body = self.call_s3api(req)
|
status, headers, body = self.call_s3api(req)
|
||||||
self.assertEqual(self._get_error_code(body), 'InvalidDigest')
|
self.assertEqual(self._get_error_code(body), 'InvalidDigest')
|
||||||
self.assertEqual({'400.InvalidDigest': 1},
|
self.assertEqual(
|
||||||
self.s3api.logger.logger.get_increment_counts())
|
{'400.InvalidDigest': 1},
|
||||||
|
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||||
|
|
||||||
def test_object_create_bad_md5_too_short(self):
|
def test_object_create_bad_md5_too_short(self):
|
||||||
too_short_digest = md5(b'hey', usedforsecurity=False).digest()[:-1]
|
too_short_digest = md5(b'hey', usedforsecurity=False).digest()[:-1]
|
||||||
@ -739,8 +754,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
|||||||
headers={'Date': self.get_date_header()})
|
headers={'Date': self.get_date_header()})
|
||||||
status, headers, body = self.call_s3api(req)
|
status, headers, body = self.call_s3api(req)
|
||||||
self.assertEqual(self._get_error_code(body), 'InvalidDigest')
|
self.assertEqual(self._get_error_code(body), 'InvalidDigest')
|
||||||
self.assertEqual({'400.InvalidDigest': 1},
|
self.assertEqual(
|
||||||
self.s3api.logger.logger.get_increment_counts())
|
{'400.InvalidDigest': 1},
|
||||||
|
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||||
|
|
||||||
def test_object_create_bad_md5_bad_padding(self):
|
def test_object_create_bad_md5_bad_padding(self):
|
||||||
too_short_digest = md5(b'hey', usedforsecurity=False).digest()
|
too_short_digest = md5(b'hey', usedforsecurity=False).digest()
|
||||||
@ -755,8 +771,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
|||||||
headers={'Date': self.get_date_header()})
|
headers={'Date': self.get_date_header()})
|
||||||
status, headers, body = self.call_s3api(req)
|
status, headers, body = self.call_s3api(req)
|
||||||
self.assertEqual(self._get_error_code(body), 'InvalidDigest')
|
self.assertEqual(self._get_error_code(body), 'InvalidDigest')
|
||||||
self.assertEqual({'400.InvalidDigest': 1},
|
self.assertEqual(
|
||||||
self.s3api.logger.logger.get_increment_counts())
|
{'400.InvalidDigest': 1},
|
||||||
|
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||||
|
|
||||||
def test_object_create_bad_md5_too_long(self):
|
def test_object_create_bad_md5_too_long(self):
|
||||||
too_long_digest = md5(
|
too_long_digest = md5(
|
||||||
@ -772,8 +789,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
|||||||
headers={'Date': self.get_date_header()})
|
headers={'Date': self.get_date_header()})
|
||||||
status, headers, body = self.call_s3api(req)
|
status, headers, body = self.call_s3api(req)
|
||||||
self.assertEqual(self._get_error_code(body), 'InvalidDigest')
|
self.assertEqual(self._get_error_code(body), 'InvalidDigest')
|
||||||
self.assertEqual({'400.InvalidDigest': 1},
|
self.assertEqual(
|
||||||
self.s3api.logger.logger.get_increment_counts())
|
{'400.InvalidDigest': 1},
|
||||||
|
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||||
|
|
||||||
def test_invalid_metadata_directive(self):
|
def test_invalid_metadata_directive(self):
|
||||||
req = Request.blank('/',
|
req = Request.blank('/',
|
||||||
@ -784,8 +802,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
|||||||
headers={'Date': self.get_date_header()})
|
headers={'Date': self.get_date_header()})
|
||||||
status, headers, body = self.call_s3api(req)
|
status, headers, body = self.call_s3api(req)
|
||||||
self.assertEqual(self._get_error_code(body), 'InvalidArgument')
|
self.assertEqual(self._get_error_code(body), 'InvalidArgument')
|
||||||
self.assertEqual({'400.InvalidArgument': 1},
|
self.assertEqual(
|
||||||
self.s3api.logger.logger.get_increment_counts())
|
{'400.InvalidArgument': 1},
|
||||||
|
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||||
|
|
||||||
def test_invalid_storage_class(self):
|
def test_invalid_storage_class(self):
|
||||||
req = Request.blank('/',
|
req = Request.blank('/',
|
||||||
@ -795,8 +814,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
|||||||
headers={'Date': self.get_date_header()})
|
headers={'Date': self.get_date_header()})
|
||||||
status, headers, body = self.call_s3api(req)
|
status, headers, body = self.call_s3api(req)
|
||||||
self.assertEqual(self._get_error_code(body), 'InvalidStorageClass')
|
self.assertEqual(self._get_error_code(body), 'InvalidStorageClass')
|
||||||
self.assertEqual({'400.InvalidStorageClass': 1},
|
self.assertEqual(
|
||||||
self.s3api.logger.logger.get_increment_counts())
|
{'400.InvalidStorageClass': 1},
|
||||||
|
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||||
|
|
||||||
def test_invalid_ssc(self):
|
def test_invalid_ssc(self):
|
||||||
req = Request.blank('/',
|
req = Request.blank('/',
|
||||||
@ -806,8 +826,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
|||||||
'Date': self.get_date_header()})
|
'Date': self.get_date_header()})
|
||||||
status, headers, body = self.call_s3api(req)
|
status, headers, body = self.call_s3api(req)
|
||||||
self.assertEqual(self._get_error_code(body), 'InvalidArgument')
|
self.assertEqual(self._get_error_code(body), 'InvalidArgument')
|
||||||
self.assertEqual({'400.InvalidArgument': 1},
|
self.assertEqual(
|
||||||
self.s3api.logger.logger.get_increment_counts())
|
{'400.InvalidArgument': 1},
|
||||||
|
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||||
|
|
||||||
def _test_unsupported_header(self, header, value=None):
|
def _test_unsupported_header(self, header, value=None):
|
||||||
if value is None:
|
if value is None:
|
||||||
@ -820,8 +841,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
|||||||
self.s3api.logger.logger.clear()
|
self.s3api.logger.logger.clear()
|
||||||
status, headers, body = self.call_s3api(req)
|
status, headers, body = self.call_s3api(req)
|
||||||
self.assertEqual(self._get_error_code(body), 'NotImplemented')
|
self.assertEqual(self._get_error_code(body), 'NotImplemented')
|
||||||
self.assertEqual({'501.NotImplemented': 1},
|
self.assertEqual(
|
||||||
self.s3api.logger.logger.get_increment_counts())
|
{'501.NotImplemented': 1},
|
||||||
|
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||||
|
|
||||||
def test_mfa(self):
|
def test_mfa(self):
|
||||||
self._test_unsupported_header('x-amz-mfa')
|
self._test_unsupported_header('x-amz-mfa')
|
||||||
@ -881,8 +903,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
|||||||
headers={'Date': self.get_date_header()})
|
headers={'Date': self.get_date_header()})
|
||||||
status, headers, body = self.call_s3api(req)
|
status, headers, body = self.call_s3api(req)
|
||||||
self.assertEqual(self._get_error_code(body), 'NotImplemented')
|
self.assertEqual(self._get_error_code(body), 'NotImplemented')
|
||||||
self.assertEqual({'501.NotImplemented': 1},
|
self.assertEqual(
|
||||||
self.s3api.logger.logger.get_increment_counts())
|
{'501.NotImplemented': 1},
|
||||||
|
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||||
|
|
||||||
def test_notification(self):
|
def test_notification(self):
|
||||||
self._test_unsupported_resource('notification')
|
self._test_unsupported_resource('notification')
|
||||||
@ -909,8 +932,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
|||||||
'Date': self.get_date_header()})
|
'Date': self.get_date_header()})
|
||||||
status, headers, body = self.call_s3api(req)
|
status, headers, body = self.call_s3api(req)
|
||||||
self.assertEqual(status.split()[0], '200')
|
self.assertEqual(status.split()[0], '200')
|
||||||
self.assertEqual({},
|
self.assertEqual(
|
||||||
self.s3api.logger.logger.get_increment_counts())
|
{},
|
||||||
|
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||||
|
|
||||||
req = Request.blank('/bucket?tagging',
|
req = Request.blank('/bucket?tagging',
|
||||||
environ={'REQUEST_METHOD': 'PUT'},
|
environ={'REQUEST_METHOD': 'PUT'},
|
||||||
@ -919,8 +943,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
|||||||
self.s3api.logger.logger.clear()
|
self.s3api.logger.logger.clear()
|
||||||
status, headers, body = self.call_s3api(req)
|
status, headers, body = self.call_s3api(req)
|
||||||
self.assertEqual(self._get_error_code(body), 'NotImplemented')
|
self.assertEqual(self._get_error_code(body), 'NotImplemented')
|
||||||
self.assertEqual({'501.NotImplemented': 1},
|
self.assertEqual(
|
||||||
self.s3api.logger.logger.get_increment_counts())
|
{'501.NotImplemented': 1},
|
||||||
|
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||||
|
|
||||||
req = Request.blank('/bucket?tagging',
|
req = Request.blank('/bucket?tagging',
|
||||||
environ={'REQUEST_METHOD': 'DELETE'},
|
environ={'REQUEST_METHOD': 'DELETE'},
|
||||||
@ -929,8 +954,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
|||||||
self.s3api.logger.logger.clear()
|
self.s3api.logger.logger.clear()
|
||||||
status, headers, body = self.call_s3api(req)
|
status, headers, body = self.call_s3api(req)
|
||||||
self.assertEqual(self._get_error_code(body), 'NotImplemented')
|
self.assertEqual(self._get_error_code(body), 'NotImplemented')
|
||||||
self.assertEqual({'501.NotImplemented': 1},
|
self.assertEqual(
|
||||||
self.s3api.logger.logger.get_increment_counts())
|
{'501.NotImplemented': 1},
|
||||||
|
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||||
|
|
||||||
def test_restore(self):
|
def test_restore(self):
|
||||||
self._test_unsupported_resource('restore')
|
self._test_unsupported_resource('restore')
|
||||||
@ -945,8 +971,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
|||||||
self.assertEqual(elem.find('./Code').text, 'MethodNotAllowed')
|
self.assertEqual(elem.find('./Code').text, 'MethodNotAllowed')
|
||||||
self.assertEqual(elem.find('./Method').text, 'POST')
|
self.assertEqual(elem.find('./Method').text, 'POST')
|
||||||
self.assertEqual(elem.find('./ResourceType').text, 'ACL')
|
self.assertEqual(elem.find('./ResourceType').text, 'ACL')
|
||||||
self.assertEqual({'405.MethodNotAllowed': 1},
|
self.assertEqual(
|
||||||
self.s3api.logger.logger.get_increment_counts())
|
{'405.MethodNotAllowed': 1},
|
||||||
|
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||||
|
|
||||||
@mock.patch.object(registry, '_sensitive_headers', set())
|
@mock.patch.object(registry, '_sensitive_headers', set())
|
||||||
@mock.patch.object(registry, '_sensitive_params', set())
|
@mock.patch.object(registry, '_sensitive_params', set())
|
||||||
@ -1079,8 +1106,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
|||||||
status, headers, body = self.call_s3api(req)
|
status, headers, body = self.call_s3api(req)
|
||||||
self.assertEqual(status.split()[0], '403')
|
self.assertEqual(status.split()[0], '403')
|
||||||
self.assertEqual(self._get_error_code(body), 'AccessDenied')
|
self.assertEqual(self._get_error_code(body), 'AccessDenied')
|
||||||
self.assertEqual({'403.AccessDenied.invalid_date': 1},
|
self.assertEqual(
|
||||||
self.s3api.logger.logger.get_increment_counts())
|
{'403.AccessDenied.invalid_date': 1},
|
||||||
|
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||||
|
|
||||||
def test_signature_v4_no_payload(self):
|
def test_signature_v4_no_payload(self):
|
||||||
environ = {
|
environ = {
|
||||||
@ -1100,8 +1128,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
|||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
self._get_error_message(body),
|
self._get_error_message(body),
|
||||||
'Missing required header for this request: x-amz-content-sha256')
|
'Missing required header for this request: x-amz-content-sha256')
|
||||||
self.assertEqual({'400.InvalidRequest': 1},
|
self.assertEqual(
|
||||||
self.s3api.logger.logger.get_increment_counts())
|
{'400.InvalidRequest': 1},
|
||||||
|
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||||
|
|
||||||
def test_signature_v4_bad_authorization_string(self):
|
def test_signature_v4_bad_authorization_string(self):
|
||||||
def test(auth_str, error, msg, metric, extra=b''):
|
def test(auth_str, error, msg, metric, extra=b''):
|
||||||
@ -1119,8 +1148,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
|||||||
self.assertEqual(self._get_error_code(body), error)
|
self.assertEqual(self._get_error_code(body), error)
|
||||||
self.assertEqual(self._get_error_message(body), msg)
|
self.assertEqual(self._get_error_message(body), msg)
|
||||||
self.assertIn(extra, body)
|
self.assertIn(extra, body)
|
||||||
self.assertEqual({metric: 1},
|
self.assertEqual(
|
||||||
self.s3api.logger.logger.get_increment_counts())
|
{metric: 1},
|
||||||
|
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||||
|
|
||||||
auth_str = ('AWS4-HMAC-SHA256 '
|
auth_str = ('AWS4-HMAC-SHA256 '
|
||||||
'SignedHeaders=host;x-amz-date,'
|
'SignedHeaders=host;x-amz-date,'
|
||||||
@ -1382,8 +1412,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
|||||||
# FIXME: should this failed as 400 or pass via query auth?
|
# FIXME: should this failed as 400 or pass via query auth?
|
||||||
# for now, 403 forbidden for safety
|
# for now, 403 forbidden for safety
|
||||||
self.assertEqual(status.split()[0], '403', body)
|
self.assertEqual(status.split()[0], '403', body)
|
||||||
self.assertEqual({'403.AccessDenied.invalid_expires': 1},
|
self.assertEqual(
|
||||||
self.s3api.logger.logger.get_increment_counts())
|
{'403.AccessDenied.invalid_expires': 1},
|
||||||
|
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||||
|
|
||||||
# But if we are missing Signature in query param
|
# But if we are missing Signature in query param
|
||||||
req = Request.blank(
|
req = Request.blank(
|
||||||
@ -1397,8 +1428,9 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
|||||||
self.s3api.logger.logger.clear()
|
self.s3api.logger.logger.clear()
|
||||||
status, headers, body = self.call_s3api(req)
|
status, headers, body = self.call_s3api(req)
|
||||||
self.assertEqual(status.split()[0], '403', body)
|
self.assertEqual(status.split()[0], '403', body)
|
||||||
self.assertEqual({'403.AccessDenied.invalid_expires': 1},
|
self.assertEqual(
|
||||||
self.s3api.logger.logger.get_increment_counts())
|
{'403.AccessDenied.invalid_expires': 1},
|
||||||
|
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||||
|
|
||||||
def test_s3api_with_only_s3_token(self):
|
def test_s3api_with_only_s3_token(self):
|
||||||
self.swift = FakeSwift()
|
self.swift = FakeSwift()
|
||||||
@ -1510,9 +1542,10 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
|||||||
self.assertEqual(1, mock_req.call_count)
|
self.assertEqual(1, mock_req.call_count)
|
||||||
# it never even tries to contact keystone
|
# it never even tries to contact keystone
|
||||||
self.assertEqual(0, mock_fetch.call_count)
|
self.assertEqual(0, mock_fetch.call_count)
|
||||||
|
statsd_client = self.s3api.logger.logger.statsd_client
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
{'403.SignatureDoesNotMatch': 1},
|
{'403.SignatureDoesNotMatch': 1},
|
||||||
self.s3api.logger.logger.get_increment_counts())
|
statsd_client.get_increment_counts())
|
||||||
|
|
||||||
def test_s3api_with_only_s3_token_in_s3acl(self):
|
def test_s3api_with_only_s3_token_in_s3acl(self):
|
||||||
self.swift = FakeSwift()
|
self.swift = FakeSwift()
|
||||||
@ -1564,30 +1597,35 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
|||||||
|
|
||||||
status, _, body = do_test(800)
|
status, _, body = do_test(800)
|
||||||
self.assertEqual('200 OK', status)
|
self.assertEqual('200 OK', status)
|
||||||
self.assertFalse(self.s3api.logger.logger.get_increment_counts())
|
self.assertFalse(
|
||||||
|
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||||
|
|
||||||
status, _, body = do_test(-800)
|
status, _, body = do_test(-800)
|
||||||
self.assertEqual('200 OK', status)
|
self.assertEqual('200 OK', status)
|
||||||
self.assertFalse(self.s3api.logger.logger.get_increment_counts())
|
self.assertFalse(
|
||||||
|
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||||
|
|
||||||
status, _, body = do_test(1000)
|
status, _, body = do_test(1000)
|
||||||
self.assertEqual('403 Forbidden', status)
|
self.assertEqual('403 Forbidden', status)
|
||||||
self.assertEqual(self._get_error_code(body), 'RequestTimeTooSkewed')
|
self.assertEqual(self._get_error_code(body), 'RequestTimeTooSkewed')
|
||||||
self.assertEqual({'403.RequestTimeTooSkewed': 1},
|
self.assertEqual(
|
||||||
self.s3api.logger.logger.get_increment_counts())
|
{'403.RequestTimeTooSkewed': 1},
|
||||||
|
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||||
|
|
||||||
status, _, body = do_test(-1000)
|
status, _, body = do_test(-1000)
|
||||||
self.assertEqual('403 Forbidden', status)
|
self.assertEqual('403 Forbidden', status)
|
||||||
self.assertEqual(self._get_error_code(body), 'RequestTimeTooSkewed')
|
self.assertEqual(self._get_error_code(body), 'RequestTimeTooSkewed')
|
||||||
self.assertEqual({'403.RequestTimeTooSkewed': 1},
|
self.assertEqual(
|
||||||
self.s3api.logger.logger.get_increment_counts())
|
{'403.RequestTimeTooSkewed': 1},
|
||||||
|
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||||
|
|
||||||
self.s3api.conf.allowable_clock_skew = 100
|
self.s3api.conf.allowable_clock_skew = 100
|
||||||
status, _, body = do_test(800)
|
status, _, body = do_test(800)
|
||||||
self.assertEqual('403 Forbidden', status)
|
self.assertEqual('403 Forbidden', status)
|
||||||
self.assertEqual(self._get_error_code(body), 'RequestTimeTooSkewed')
|
self.assertEqual(self._get_error_code(body), 'RequestTimeTooSkewed')
|
||||||
self.assertEqual({'403.RequestTimeTooSkewed': 1},
|
self.assertEqual(
|
||||||
self.s3api.logger.logger.get_increment_counts())
|
{'403.RequestTimeTooSkewed': 1},
|
||||||
|
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||||
|
|
||||||
def test_s3api_error_metric(self):
|
def test_s3api_error_metric(self):
|
||||||
class KaboomResponse(ErrorResponse):
|
class KaboomResponse(ErrorResponse):
|
||||||
@ -1607,17 +1645,20 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
|||||||
self.call_s3api(req)
|
self.call_s3api(req)
|
||||||
|
|
||||||
do_test(ErrorResponse(status=403, msg='not good', reason='bad'))
|
do_test(ErrorResponse(status=403, msg='not good', reason='bad'))
|
||||||
self.assertEqual({'403.ErrorResponse.bad': 1},
|
self.assertEqual(
|
||||||
self.s3api.logger.logger.get_increment_counts())
|
{'403.ErrorResponse.bad': 1},
|
||||||
|
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||||
|
|
||||||
do_test(AccessDenied(msg='no entry', reason='invalid_date'))
|
do_test(AccessDenied(msg='no entry', reason='invalid_date'))
|
||||||
self.assertEqual({'403.AccessDenied.invalid_date': 1},
|
self.assertEqual(
|
||||||
self.s3api.logger.logger.get_increment_counts())
|
{'403.AccessDenied.invalid_date': 1},
|
||||||
|
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||||
|
|
||||||
# check whitespace replaced with underscore
|
# check whitespace replaced with underscore
|
||||||
do_test(KaboomResponse(status=400, msg='boom', reason='boom boom'))
|
do_test(KaboomResponse(status=400, msg='boom', reason='boom boom'))
|
||||||
self.assertEqual({'400.ka_boom.boom_boom': 1},
|
self.assertEqual(
|
||||||
self.s3api.logger.logger.get_increment_counts())
|
{'400.ka_boom.boom_boom': 1},
|
||||||
|
self.s3api.logger.logger.statsd_client.get_increment_counts())
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
@ -107,7 +107,8 @@ class TestBackendRatelimitMiddleware(unittest.TestCase):
|
|||||||
fake_time[0] += 0.01
|
fake_time[0] += 0.01
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
ratelimited,
|
ratelimited,
|
||||||
logger.get_increment_counts().get('backend.ratelimit', 0))
|
logger.statsd_client.get_increment_counts().get(
|
||||||
|
'backend.ratelimit', 0))
|
||||||
return success
|
return success
|
||||||
|
|
||||||
def test_ratelimited(self):
|
def test_ratelimited(self):
|
||||||
@ -163,7 +164,8 @@ class TestBackendRatelimitMiddleware(unittest.TestCase):
|
|||||||
resp = req.get_response(rl)
|
resp = req.get_response(rl)
|
||||||
self.assertEqual(200, resp.status_int)
|
self.assertEqual(200, resp.status_int)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
0, logger.get_increment_counts().get('backend.ratelimit', 0))
|
0, logger.statsd_client.get_increment_counts().get(
|
||||||
|
'backend.ratelimit', 0))
|
||||||
|
|
||||||
do_test('/recon/version')
|
do_test('/recon/version')
|
||||||
do_test('/healthcheck')
|
do_test('/healthcheck')
|
||||||
|
@ -1553,7 +1553,7 @@ class TestFormPost(unittest.TestCase):
|
|||||||
do_test(digest, False)
|
do_test(digest, False)
|
||||||
|
|
||||||
# NB: one increment per *upload*, not client request
|
# NB: one increment per *upload*, not client request
|
||||||
self.assertEqual(self.logger.get_increment_counts(), {
|
self.assertEqual(self.logger.statsd_client.get_increment_counts(), {
|
||||||
'formpost.digests.sha1': 4,
|
'formpost.digests.sha1': 4,
|
||||||
'formpost.digests.sha256': 4,
|
'formpost.digests.sha256': 4,
|
||||||
'formpost.digests.sha512': 4,
|
'formpost.digests.sha512': 4,
|
||||||
|
@ -123,7 +123,7 @@ class TestProxyLogging(unittest.TestCase):
|
|||||||
return info_calls[0][0][0].split(' ')
|
return info_calls[0][0][0].split(' ')
|
||||||
|
|
||||||
def assertTiming(self, exp_metric, app, exp_timing=None):
|
def assertTiming(self, exp_metric, app, exp_timing=None):
|
||||||
timing_calls = app.access_logger.log_dict['timing']
|
timing_calls = app.access_logger.statsd_client.calls['timing']
|
||||||
found = False
|
found = False
|
||||||
for timing_call in timing_calls:
|
for timing_call in timing_calls:
|
||||||
self.assertEqual({}, timing_call[1])
|
self.assertEqual({}, timing_call[1])
|
||||||
@ -138,12 +138,13 @@ class TestProxyLogging(unittest.TestCase):
|
|||||||
exp_metric, timing_calls))
|
exp_metric, timing_calls))
|
||||||
|
|
||||||
def assertNotTiming(self, not_exp_metric, app):
|
def assertNotTiming(self, not_exp_metric, app):
|
||||||
timing_calls = app.access_logger.log_dict['timing']
|
timing_calls = app.access_logger.statsd_client.calls['timing']
|
||||||
for timing_call in timing_calls:
|
for timing_call in timing_calls:
|
||||||
self.assertNotEqual(not_exp_metric, timing_call[0][0])
|
self.assertNotEqual(not_exp_metric, timing_call[0][0])
|
||||||
|
|
||||||
def assertUpdateStats(self, exp_metrics_and_values, app):
|
def assertUpdateStats(self, exp_metrics_and_values, app):
|
||||||
update_stats_calls = sorted(app.access_logger.log_dict['update_stats'])
|
update_stats_calls = sorted(
|
||||||
|
app.access_logger.statsd_client.calls['update_stats'])
|
||||||
got_metrics_values_and_kwargs = [(usc[0][0], usc[0][1], usc[1])
|
got_metrics_values_and_kwargs = [(usc[0][0], usc[0][1], usc[1])
|
||||||
for usc in update_stats_calls]
|
for usc in update_stats_calls]
|
||||||
exp_metrics_values_and_kwargs = [(emv[0], emv[1], {})
|
exp_metrics_values_and_kwargs = [(emv[0], emv[1], {})
|
||||||
@ -335,8 +336,8 @@ class TestProxyLogging(unittest.TestCase):
|
|||||||
self.assertNotTiming(
|
self.assertNotTiming(
|
||||||
'%s.PUT.314.first-byte.timing' % exp_type, app)
|
'%s.PUT.314.first-byte.timing' % exp_type, app)
|
||||||
# No results returned for the non-existent policy
|
# No results returned for the non-existent policy
|
||||||
self.assertUpdateStats([('object.PUT.314.xfer', 6 + 8)],
|
self.assertUpdateStats(
|
||||||
app)
|
[('object.PUT.314.xfer', 6 + 8)], app)
|
||||||
|
|
||||||
def test_log_request_stat_method_filtering_default(self):
|
def test_log_request_stat_method_filtering_default(self):
|
||||||
method_map = {
|
method_map = {
|
||||||
@ -506,7 +507,8 @@ class TestProxyLogging(unittest.TestCase):
|
|||||||
self.assertEqual(log_parts[6], '200')
|
self.assertEqual(log_parts[6], '200')
|
||||||
self.assertEqual(resp_body, b'somechunksof data')
|
self.assertEqual(resp_body, b'somechunksof data')
|
||||||
self.assertEqual(log_parts[11], str(len(resp_body)))
|
self.assertEqual(log_parts[11], str(len(resp_body)))
|
||||||
self.assertUpdateStats([('SOS.GET.200.xfer', len(resp_body))], app)
|
self.assertUpdateStats([('SOS.GET.200.xfer', len(resp_body))],
|
||||||
|
app)
|
||||||
|
|
||||||
def test_log_headers(self):
|
def test_log_headers(self):
|
||||||
for conf_key in ['access_log_headers', 'log_headers']:
|
for conf_key in ['access_log_headers', 'log_headers']:
|
||||||
|
@ -173,7 +173,7 @@ class TestTempURL(unittest.TestCase):
|
|||||||
key, hmac_body, hashlib.sha512).digest())
|
key, hmac_body, hashlib.sha512).digest())
|
||||||
self.assert_valid_sig(expires, path, [key], b'sha512:' + sig)
|
self.assert_valid_sig(expires, path, [key], b'sha512:' + sig)
|
||||||
|
|
||||||
self.assertEqual(self.logger.get_increment_counts(), {
|
self.assertEqual(self.logger.statsd_client.get_increment_counts(), {
|
||||||
'tempurl.digests.sha1': 1,
|
'tempurl.digests.sha1': 1,
|
||||||
'tempurl.digests.sha256': 2,
|
'tempurl.digests.sha256': 2,
|
||||||
'tempurl.digests.sha512': 1
|
'tempurl.digests.sha512': 1
|
||||||
|
@ -1155,8 +1155,9 @@ class TestDBReplicator(unittest.TestCase):
|
|||||||
self.assertFalse(os.path.exists(temp_file.name))
|
self.assertFalse(os.path.exists(temp_file.name))
|
||||||
self.assertTrue(os.path.exists(temp_hash_dir2))
|
self.assertTrue(os.path.exists(temp_hash_dir2))
|
||||||
self.assertTrue(os.path.exists(temp_file2.name))
|
self.assertTrue(os.path.exists(temp_file2.name))
|
||||||
self.assertEqual([(('removes.some_device',), {})],
|
self.assertEqual(
|
||||||
replicator.logger.log_dict['increment'])
|
[(('removes.some_device',), {})],
|
||||||
|
replicator.logger.statsd_client.calls['increment'])
|
||||||
self.assertEqual(1, replicator.stats['remove'])
|
self.assertEqual(1, replicator.stats['remove'])
|
||||||
|
|
||||||
temp_file2.db_file = temp_file2.name
|
temp_file2.db_file = temp_file2.name
|
||||||
@ -1169,8 +1170,9 @@ class TestDBReplicator(unittest.TestCase):
|
|||||||
self.assertFalse(os.path.exists(temp_file.name))
|
self.assertFalse(os.path.exists(temp_file.name))
|
||||||
self.assertFalse(os.path.exists(temp_hash_dir2))
|
self.assertFalse(os.path.exists(temp_hash_dir2))
|
||||||
self.assertFalse(os.path.exists(temp_file2.name))
|
self.assertFalse(os.path.exists(temp_file2.name))
|
||||||
self.assertEqual([(('removes.some_device',), {})] * 2,
|
self.assertEqual(
|
||||||
replicator.logger.log_dict['increment'])
|
[(('removes.some_device',), {})] * 2,
|
||||||
|
replicator.logger.statsd_client.calls['increment'])
|
||||||
self.assertEqual(2, replicator.stats['remove'])
|
self.assertEqual(2, replicator.stats['remove'])
|
||||||
finally:
|
finally:
|
||||||
rmtree(temp_dir)
|
rmtree(temp_dir)
|
||||||
|
@ -1134,28 +1134,28 @@ class TestMemcached(unittest.TestCase):
|
|||||||
with patch('time.time',) as mock_time:
|
with patch('time.time',) as mock_time:
|
||||||
mock_time.return_value = 1000.99
|
mock_time.return_value = 1000.99
|
||||||
memcache_client.set('some_key', [1, 2, 3])
|
memcache_client.set('some_key', [1, 2, 3])
|
||||||
last_stats = self.logger.log_dict['timing_since'][-1]
|
last_stats = self.logger.statsd_client.calls['timing_since'][-1]
|
||||||
self.assertEqual('memcached.set.timing', last_stats[0][0])
|
self.assertEqual('memcached.set.timing', last_stats[0][0])
|
||||||
self.assertEqual(last_stats[0][1], 1000.99)
|
self.assertEqual(last_stats[0][1], 1000.99)
|
||||||
mock_time.return_value = 2000.99
|
mock_time.return_value = 2000.99
|
||||||
self.assertEqual(memcache_client.get('some_key'), [1, 2, 3])
|
self.assertEqual(memcache_client.get('some_key'), [1, 2, 3])
|
||||||
last_stats = self.logger.log_dict['timing_since'][-1]
|
last_stats = self.logger.statsd_client.calls['timing_since'][-1]
|
||||||
self.assertEqual('memcached.get.timing', last_stats[0][0])
|
self.assertEqual('memcached.get.timing', last_stats[0][0])
|
||||||
self.assertEqual(last_stats[0][1], 2000.99)
|
self.assertEqual(last_stats[0][1], 2000.99)
|
||||||
mock_time.return_value = 3000.99
|
mock_time.return_value = 3000.99
|
||||||
self.assertEqual(memcache_client.decr('decr_key', delta=5), 0)
|
self.assertEqual(memcache_client.decr('decr_key', delta=5), 0)
|
||||||
last_stats = self.logger.log_dict['timing_since'][-1]
|
last_stats = self.logger.statsd_client.calls['timing_since'][-1]
|
||||||
self.assertEqual('memcached.decr.timing', last_stats[0][0])
|
self.assertEqual('memcached.decr.timing', last_stats[0][0])
|
||||||
self.assertEqual(last_stats[0][1], 3000.99)
|
self.assertEqual(last_stats[0][1], 3000.99)
|
||||||
mock_time.return_value = 4000.99
|
mock_time.return_value = 4000.99
|
||||||
self.assertEqual(memcache_client.incr('decr_key', delta=5), 5)
|
self.assertEqual(memcache_client.incr('decr_key', delta=5), 5)
|
||||||
last_stats = self.logger.log_dict['timing_since'][-1]
|
last_stats = self.logger.statsd_client.calls['timing_since'][-1]
|
||||||
self.assertEqual('memcached.incr.timing', last_stats[0][0])
|
self.assertEqual('memcached.incr.timing', last_stats[0][0])
|
||||||
self.assertEqual(last_stats[0][1], 4000.99)
|
self.assertEqual(last_stats[0][1], 4000.99)
|
||||||
mock_time.return_value = 5000.99
|
mock_time.return_value = 5000.99
|
||||||
memcache_client.set_multi(
|
memcache_client.set_multi(
|
||||||
{'some_key1': [1, 2, 3], 'some_key2': [4, 5, 6]}, 'multi_key')
|
{'some_key1': [1, 2, 3], 'some_key2': [4, 5, 6]}, 'multi_key')
|
||||||
last_stats = self.logger.log_dict['timing_since'][-1]
|
last_stats = self.logger.statsd_client.calls['timing_since'][-1]
|
||||||
self.assertEqual('memcached.set_multi.timing', last_stats[0][0])
|
self.assertEqual('memcached.set_multi.timing', last_stats[0][0])
|
||||||
self.assertEqual(last_stats[0][1], 5000.99)
|
self.assertEqual(last_stats[0][1], 5000.99)
|
||||||
mock_time.return_value = 6000.99
|
mock_time.return_value = 6000.99
|
||||||
@ -1165,12 +1165,12 @@ class TestMemcached(unittest.TestCase):
|
|||||||
'multi_key'),
|
'multi_key'),
|
||||||
[[4, 5, 6],
|
[[4, 5, 6],
|
||||||
[1, 2, 3]])
|
[1, 2, 3]])
|
||||||
last_stats = self.logger.log_dict['timing_since'][-1]
|
last_stats = self.logger.statsd_client.calls['timing_since'][-1]
|
||||||
self.assertEqual('memcached.get_multi.timing', last_stats[0][0])
|
self.assertEqual('memcached.get_multi.timing', last_stats[0][0])
|
||||||
self.assertEqual(last_stats[0][1], 6000.99)
|
self.assertEqual(last_stats[0][1], 6000.99)
|
||||||
mock_time.return_value = 7000.99
|
mock_time.return_value = 7000.99
|
||||||
memcache_client.delete('some_key')
|
memcache_client.delete('some_key')
|
||||||
last_stats = self.logger.log_dict['timing_since'][-1]
|
last_stats = self.logger.statsd_client.calls['timing_since'][-1]
|
||||||
self.assertEqual('memcached.delete.timing', last_stats[0][0])
|
self.assertEqual('memcached.delete.timing', last_stats[0][0])
|
||||||
self.assertEqual(last_stats[0][1], 7000.99)
|
self.assertEqual(last_stats[0][1], 7000.99)
|
||||||
|
|
||||||
@ -1190,8 +1190,10 @@ class TestMemcached(unittest.TestCase):
|
|||||||
mock_time.return_value = 4000.99
|
mock_time.return_value = 4000.99
|
||||||
with self.assertRaises(MemcacheConnectionError):
|
with self.assertRaises(MemcacheConnectionError):
|
||||||
memcache_client.incr('incr_key', delta=5)
|
memcache_client.incr('incr_key', delta=5)
|
||||||
self.assertTrue(self.logger.log_dict['timing_since'])
|
self.assertTrue(
|
||||||
last_stats = self.logger.log_dict['timing_since'][-1]
|
self.logger.statsd_client.calls['timing_since'])
|
||||||
|
last_stats = \
|
||||||
|
self.logger.statsd_client.calls['timing_since'][-1]
|
||||||
self.assertEqual('memcached.incr.errors.timing',
|
self.assertEqual('memcached.incr.errors.timing',
|
||||||
last_stats[0][0])
|
last_stats[0][0])
|
||||||
self.assertEqual(last_stats[0][1], 4000.99)
|
self.assertEqual(last_stats[0][1], 4000.99)
|
||||||
@ -1218,8 +1220,10 @@ class TestMemcached(unittest.TestCase):
|
|||||||
memcache_client.set(
|
memcache_client.set(
|
||||||
'set_key', [1, 2, 3],
|
'set_key', [1, 2, 3],
|
||||||
raise_on_error=True)
|
raise_on_error=True)
|
||||||
self.assertTrue(self.logger.log_dict['timing_since'])
|
self.assertTrue(
|
||||||
last_stats = self.logger.log_dict['timing_since'][-1]
|
self.logger.statsd_client.calls['timing_since'])
|
||||||
|
last_stats = \
|
||||||
|
self.logger.statsd_client.calls['timing_since'][-1]
|
||||||
self.assertEqual('memcached.set.errors.timing',
|
self.assertEqual('memcached.set.errors.timing',
|
||||||
last_stats[0][0])
|
last_stats[0][0])
|
||||||
self.assertEqual(last_stats[0][1], 4000.99)
|
self.assertEqual(last_stats[0][1], 4000.99)
|
||||||
@ -1244,8 +1248,10 @@ class TestMemcached(unittest.TestCase):
|
|||||||
mock_time.return_value = 4000.99
|
mock_time.return_value = 4000.99
|
||||||
with self.assertRaises(MemcacheConnectionError):
|
with self.assertRaises(MemcacheConnectionError):
|
||||||
memcache_client.get('get_key', raise_on_error=True)
|
memcache_client.get('get_key', raise_on_error=True)
|
||||||
self.assertTrue(self.logger.log_dict['timing_since'])
|
self.assertTrue(
|
||||||
last_stats = self.logger.log_dict['timing_since'][-1]
|
self.logger.statsd_client.calls['timing_since'])
|
||||||
|
last_stats = \
|
||||||
|
self.logger.statsd_client.calls['timing_since'][-1]
|
||||||
self.assertEqual('memcached.get.errors.timing',
|
self.assertEqual('memcached.get.errors.timing',
|
||||||
last_stats[0][0])
|
last_stats[0][0])
|
||||||
self.assertEqual(last_stats[0][1], 4000.99)
|
self.assertEqual(last_stats[0][1], 4000.99)
|
||||||
@ -1270,8 +1276,10 @@ class TestMemcached(unittest.TestCase):
|
|||||||
mock_time.return_value = 4000.99
|
mock_time.return_value = 4000.99
|
||||||
with self.assertRaises(MemcacheConnectionError):
|
with self.assertRaises(MemcacheConnectionError):
|
||||||
memcache_client.get('get_key', raise_on_error=True)
|
memcache_client.get('get_key', raise_on_error=True)
|
||||||
self.assertTrue(self.logger.log_dict['timing_since'])
|
self.assertTrue(
|
||||||
last_stats = self.logger.log_dict['timing_since'][-1]
|
self.logger.statsd_client.calls['timing_since'])
|
||||||
|
last_stats = \
|
||||||
|
self.logger.statsd_client.calls['timing_since'][-1]
|
||||||
self.assertEqual('memcached.get.conn_err.timing',
|
self.assertEqual('memcached.get.conn_err.timing',
|
||||||
last_stats[0][0])
|
last_stats[0][0])
|
||||||
self.assertEqual(last_stats[0][1], 4000.99)
|
self.assertEqual(last_stats[0][1], 4000.99)
|
||||||
@ -1297,8 +1305,10 @@ class TestMemcached(unittest.TestCase):
|
|||||||
mock_time.side_effect = itertools.count(4000.99, 1.0)
|
mock_time.side_effect = itertools.count(4000.99, 1.0)
|
||||||
with self.assertRaises(MemcacheConnectionError):
|
with self.assertRaises(MemcacheConnectionError):
|
||||||
memcache_client.incr('nvratelimit/v2/wf/124593', delta=5)
|
memcache_client.incr('nvratelimit/v2/wf/124593', delta=5)
|
||||||
self.assertTrue(self.logger.log_dict['timing_since'])
|
self.assertTrue(
|
||||||
last_stats = self.logger.log_dict['timing_since'][-1]
|
self.logger.statsd_client.calls['timing_since'])
|
||||||
|
last_stats = \
|
||||||
|
self.logger.statsd_client.calls['timing_since'][-1]
|
||||||
self.assertEqual('memcached.incr.timeout.timing',
|
self.assertEqual('memcached.incr.timeout.timing',
|
||||||
last_stats[0][0])
|
last_stats[0][0])
|
||||||
self.assertEqual(last_stats[0][1], 4002.99)
|
self.assertEqual(last_stats[0][1], 4002.99)
|
||||||
@ -1330,8 +1340,10 @@ class TestMemcached(unittest.TestCase):
|
|||||||
memcache_client.set(
|
memcache_client.set(
|
||||||
'shard-updating-v2/acc/container', [1, 2, 3],
|
'shard-updating-v2/acc/container', [1, 2, 3],
|
||||||
raise_on_error=True)
|
raise_on_error=True)
|
||||||
self.assertTrue(self.logger.log_dict['timing_since'])
|
self.assertTrue(
|
||||||
last_stats = self.logger.log_dict['timing_since'][-1]
|
self.logger.statsd_client.calls['timing_since'])
|
||||||
|
last_stats = \
|
||||||
|
self.logger.statsd_client.calls['timing_since'][-1]
|
||||||
self.assertEqual('memcached.set.timeout.timing',
|
self.assertEqual('memcached.set.timeout.timing',
|
||||||
last_stats[0][0])
|
last_stats[0][0])
|
||||||
self.assertEqual(last_stats[0][1], 4002.99)
|
self.assertEqual(last_stats[0][1], 4002.99)
|
||||||
@ -1362,8 +1374,10 @@ class TestMemcached(unittest.TestCase):
|
|||||||
with self.assertRaises(MemcacheConnectionError):
|
with self.assertRaises(MemcacheConnectionError):
|
||||||
memcache_client.get(
|
memcache_client.get(
|
||||||
'shard-updating-v2/acc/container', raise_on_error=True)
|
'shard-updating-v2/acc/container', raise_on_error=True)
|
||||||
self.assertTrue(self.logger.log_dict['timing_since'])
|
self.assertTrue(
|
||||||
last_stats = self.logger.log_dict['timing_since'][-1]
|
self.logger.statsd_client.calls['timing_since'])
|
||||||
|
last_stats = \
|
||||||
|
self.logger.statsd_client.calls['timing_since'][-1]
|
||||||
self.assertEqual('memcached.get.timeout.timing',
|
self.assertEqual('memcached.get.timeout.timing',
|
||||||
last_stats[0][0])
|
last_stats[0][0])
|
||||||
self.assertEqual(last_stats[0][1], 4002.99)
|
self.assertEqual(last_stats[0][1], 4002.99)
|
||||||
@ -1407,8 +1421,8 @@ class TestMemcached(unittest.TestCase):
|
|||||||
with self.assertRaises(MemcacheConnectionError):
|
with self.assertRaises(MemcacheConnectionError):
|
||||||
memcache_client.incr(
|
memcache_client.incr(
|
||||||
'shard-updating-v2/acc/container', time=1.23)
|
'shard-updating-v2/acc/container', time=1.23)
|
||||||
self.assertTrue(self.logger.log_dict['timing_since'])
|
self.assertTrue(self.logger.statsd_client.calls['timing_since'])
|
||||||
last_stats = self.logger.log_dict['timing_since'][-1]
|
last_stats = self.logger.statsd_client.calls['timing_since'][-1]
|
||||||
self.assertEqual('memcached.incr.conn_err.timing',
|
self.assertEqual('memcached.incr.conn_err.timing',
|
||||||
last_stats[0][0])
|
last_stats[0][0])
|
||||||
self.assertEqual(last_stats[0][1], 4002.99)
|
self.assertEqual(last_stats[0][1], 4002.99)
|
||||||
@ -1440,8 +1454,8 @@ class TestMemcached(unittest.TestCase):
|
|||||||
with self.assertRaises(MemcacheConnectionError):
|
with self.assertRaises(MemcacheConnectionError):
|
||||||
memcache_client.incr(
|
memcache_client.incr(
|
||||||
'shard-updating-v2/acc/container', time=1.23)
|
'shard-updating-v2/acc/container', time=1.23)
|
||||||
self.assertTrue(self.logger.log_dict['timing_since'])
|
self.assertTrue(self.logger.statsd_client.calls['timing_since'])
|
||||||
last_stats = self.logger.log_dict['timing_since'][-1]
|
last_stats = self.logger.statsd_client.calls['timing_since'][-1]
|
||||||
self.assertEqual('memcached.incr.errors.timing',
|
self.assertEqual('memcached.incr.errors.timing',
|
||||||
last_stats[0][0])
|
last_stats[0][0])
|
||||||
self.assertEqual(last_stats[0][1], 4002.99)
|
self.assertEqual(last_stats[0][1], 4002.99)
|
||||||
@ -1451,7 +1465,7 @@ class TestMemcached(unittest.TestCase):
|
|||||||
self.assertIn("with key_prefix shard-updating-v2/acc, method incr, "
|
self.assertIn("with key_prefix shard-updating-v2/acc, method incr, "
|
||||||
"time_spent 1.0" % resp.split(), error_logs[0])
|
"time_spent 1.0" % resp.split(), error_logs[0])
|
||||||
self.assertIn('1.2.3.4:11211', memcache_client._errors)
|
self.assertIn('1.2.3.4:11211', memcache_client._errors)
|
||||||
self.assertEqual([4004.99], memcache_client._errors['1.2.3.4:11211'])
|
self.assertEqual([4005.99], memcache_client._errors['1.2.3.4:11211'])
|
||||||
|
|
||||||
|
|
||||||
class ExcConfigParser(object):
|
class ExcConfigParser(object):
|
||||||
|
@ -884,9 +884,11 @@ class TestReplicatorSync(test_db_replicator.TestReplicatorSync):
|
|||||||
# push to remote, and third node was missing (also maybe reconciler)
|
# push to remote, and third node was missing (also maybe reconciler)
|
||||||
self.assertTrue(2 < daemon.stats['rsync'] <= 3, daemon.stats['rsync'])
|
self.assertTrue(2 < daemon.stats['rsync'] <= 3, daemon.stats['rsync'])
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
1, self.logger.get_stats_counts().get('reconciler_db_created'))
|
1, self.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'reconciler_db_created'))
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
self.logger.get_stats_counts().get('reconciler_db_exists'))
|
self.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'reconciler_db_exists'))
|
||||||
|
|
||||||
# grab the rsynced instance of remote_broker
|
# grab the rsynced instance of remote_broker
|
||||||
remote_broker = self._get_broker('a', 'c', node_index=1)
|
remote_broker = self._get_broker('a', 'c', node_index=1)
|
||||||
@ -909,9 +911,11 @@ class TestReplicatorSync(test_db_replicator.TestReplicatorSync):
|
|||||||
self.logger.clear()
|
self.logger.clear()
|
||||||
reconciler = daemon.get_reconciler_broker(misplaced[0]['created_at'])
|
reconciler = daemon.get_reconciler_broker(misplaced[0]['created_at'])
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
self.logger.get_stats_counts().get('reconciler_db_created'))
|
self.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'reconciler_db_created'))
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
1, self.logger.get_stats_counts().get('reconciler_db_exists'))
|
1, self.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'reconciler_db_exists'))
|
||||||
# but it may not be on the same node as us anymore though...
|
# but it may not be on the same node as us anymore though...
|
||||||
reconciler = self._get_broker(reconciler.account,
|
reconciler = self._get_broker(reconciler.account,
|
||||||
reconciler.container, node_index=0)
|
reconciler.container, node_index=0)
|
||||||
@ -1482,7 +1486,7 @@ class TestReplicatorSync(test_db_replicator.TestReplicatorSync):
|
|||||||
self.assertEqual(0, daemon.stats['rsync'])
|
self.assertEqual(0, daemon.stats['rsync'])
|
||||||
self.assertEqual(1, daemon.stats['diff'])
|
self.assertEqual(1, daemon.stats['diff'])
|
||||||
self.assertEqual({'diffs': 1},
|
self.assertEqual({'diffs': 1},
|
||||||
daemon.logger.get_increment_counts())
|
daemon.logger.statsd_client.get_increment_counts())
|
||||||
|
|
||||||
# update one shard range
|
# update one shard range
|
||||||
shard_ranges[1].update_meta(50, 50)
|
shard_ranges[1].update_meta(50, 50)
|
||||||
@ -1494,7 +1498,7 @@ class TestReplicatorSync(test_db_replicator.TestReplicatorSync):
|
|||||||
self.assertEqual(0, daemon.stats['rsync'])
|
self.assertEqual(0, daemon.stats['rsync'])
|
||||||
self.assertEqual(0, daemon.stats['diff'])
|
self.assertEqual(0, daemon.stats['diff'])
|
||||||
self.assertEqual({'no_changes': 1},
|
self.assertEqual({'no_changes': 1},
|
||||||
daemon.logger.get_increment_counts())
|
daemon.logger.statsd_client.get_increment_counts())
|
||||||
|
|
||||||
# now enable local broker for sharding
|
# now enable local broker for sharding
|
||||||
own_sr = broker.enable_sharding(Timestamp.now())
|
own_sr = broker.enable_sharding(Timestamp.now())
|
||||||
@ -1509,7 +1513,7 @@ class TestReplicatorSync(test_db_replicator.TestReplicatorSync):
|
|||||||
self.assertEqual(0, daemon.stats['no_change'])
|
self.assertEqual(0, daemon.stats['no_change'])
|
||||||
self.assertEqual(0, daemon.stats['rsync'])
|
self.assertEqual(0, daemon.stats['rsync'])
|
||||||
self.assertEqual(0, daemon.stats['diff'])
|
self.assertEqual(0, daemon.stats['diff'])
|
||||||
self.assertFalse(daemon.logger.get_increments())
|
self.assertFalse(daemon.logger.statsd_client.get_increments())
|
||||||
|
|
||||||
daemon = check_replicate(broker_ranges, broker, remote_broker)
|
daemon = check_replicate(broker_ranges, broker, remote_broker)
|
||||||
check_stats(daemon)
|
check_stats(daemon)
|
||||||
@ -1614,7 +1618,8 @@ class TestReplicatorSync(test_db_replicator.TestReplicatorSync):
|
|||||||
self.assertIn('Bad response 500', error_lines[0])
|
self.assertIn('Bad response 500', error_lines[0])
|
||||||
self.assertFalse(error_lines[1:])
|
self.assertFalse(error_lines[1:])
|
||||||
self.assertEqual(1, daemon.stats['diff'])
|
self.assertEqual(1, daemon.stats['diff'])
|
||||||
self.assertEqual(1, daemon.logger.get_increment_counts()['diffs'])
|
self.assertEqual(
|
||||||
|
1, daemon.logger.statsd_client.get_increment_counts()['diffs'])
|
||||||
|
|
||||||
def test_sync_shard_ranges_timeout_in_fetch(self):
|
def test_sync_shard_ranges_timeout_in_fetch(self):
|
||||||
# verify that replication is not considered successful if
|
# verify that replication is not considered successful if
|
||||||
@ -1652,9 +1657,11 @@ class TestReplicatorSync(test_db_replicator.TestReplicatorSync):
|
|||||||
self.assertIn('ERROR syncing /', error_lines[0])
|
self.assertIn('ERROR syncing /', error_lines[0])
|
||||||
self.assertFalse(error_lines[1:])
|
self.assertFalse(error_lines[1:])
|
||||||
self.assertEqual(0, daemon.stats['diff'])
|
self.assertEqual(0, daemon.stats['diff'])
|
||||||
self.assertNotIn('diffs', daemon.logger.get_increment_counts())
|
self.assertNotIn(
|
||||||
|
'diffs', daemon.logger.statsd_client.get_increment_counts())
|
||||||
self.assertEqual(1, daemon.stats['failure'])
|
self.assertEqual(1, daemon.stats['failure'])
|
||||||
self.assertEqual(1, daemon.logger.get_increment_counts()['failures'])
|
self.assertEqual(
|
||||||
|
1, daemon.logger.statsd_client.get_increment_counts()['failures'])
|
||||||
|
|
||||||
def test_sync_shard_ranges_none_to_sync(self):
|
def test_sync_shard_ranges_none_to_sync(self):
|
||||||
# verify that merge_shard_ranges is not sent if there are no shard
|
# verify that merge_shard_ranges is not sent if there are no shard
|
||||||
|
@ -5530,7 +5530,7 @@ class TestContainerController(unittest.TestCase):
|
|||||||
environ={'REQUEST_METHOD': 'HEAD', 'REMOTE_ADDR': '1.2.3.4'})
|
environ={'REQUEST_METHOD': 'HEAD', 'REMOTE_ADDR': '1.2.3.4'})
|
||||||
with mock.patch('time.time',
|
with mock.patch('time.time',
|
||||||
mock.MagicMock(side_effect=[10000.0, 10001.0, 10002.0,
|
mock.MagicMock(side_effect=[10000.0, 10001.0, 10002.0,
|
||||||
10002.0])), \
|
10002.0, 10002.0])), \
|
||||||
mock.patch('os.getpid', mock.MagicMock(return_value=1234)):
|
mock.patch('os.getpid', mock.MagicMock(return_value=1234)):
|
||||||
req.get_response(self.controller)
|
req.get_response(self.controller)
|
||||||
info_lines = self.controller.logger.get_lines_for_level('info')
|
info_lines = self.controller.logger.get_lines_for_level('info')
|
||||||
|
@ -557,7 +557,7 @@ class TestSharder(BaseTestSharder):
|
|||||||
'failure': 2,
|
'failure': 2,
|
||||||
'completed': 1}
|
'completed': 1}
|
||||||
self._assert_stats(expected, sharder, 'visited')
|
self._assert_stats(expected, sharder, 'visited')
|
||||||
counts = sharder.logger.get_stats_counts()
|
counts = sharder.logger.statsd_client.get_stats_counts()
|
||||||
self.assertEqual(2, counts.get('visited_success'))
|
self.assertEqual(2, counts.get('visited_success'))
|
||||||
self.assertEqual(1, counts.get('visited_failure'))
|
self.assertEqual(1, counts.get('visited_failure'))
|
||||||
self.assertIsNone(counts.get('visited_completed'))
|
self.assertIsNone(counts.get('visited_completed'))
|
||||||
@ -572,7 +572,7 @@ class TestSharder(BaseTestSharder):
|
|||||||
self._assert_stats({'found': 4}, sharder, 'scanned')
|
self._assert_stats({'found': 4}, sharder, 'scanned')
|
||||||
self._assert_stats({'placed': 456}, sharder, 'misplaced')
|
self._assert_stats({'placed': 456}, sharder, 'misplaced')
|
||||||
self.assertEqual({'misplaced_placed': 456},
|
self.assertEqual({'misplaced_placed': 456},
|
||||||
sharder.logger.get_stats_counts())
|
sharder.logger.statsd_client.get_stats_counts())
|
||||||
|
|
||||||
def test_run_forever(self):
|
def test_run_forever(self):
|
||||||
conf = {'recon_cache_path': self.tempdir,
|
conf = {'recon_cache_path': self.tempdir,
|
||||||
@ -1569,9 +1569,11 @@ class TestSharder(BaseTestSharder):
|
|||||||
self.assertIsInstance(stats['max_time'], float)
|
self.assertIsInstance(stats['max_time'], float)
|
||||||
self.assertLessEqual(stats['min_time'], stats['max_time'])
|
self.assertLessEqual(stats['min_time'], stats['max_time'])
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
1, sharder.logger.get_stats_counts().get('cleaved_db_created'))
|
1, sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'cleaved_db_created'))
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
sharder.logger.get_stats_counts().get('cleaved_db_exists'))
|
sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'cleaved_db_exists'))
|
||||||
self.assertEqual(SHARDING, broker.get_db_state())
|
self.assertEqual(SHARDING, broker.get_db_state())
|
||||||
sharder._replicate_object.assert_called_once_with(
|
sharder._replicate_object.assert_called_once_with(
|
||||||
0, expected_shard_dbs[0], 0)
|
0, expected_shard_dbs[0], 0)
|
||||||
@ -1634,9 +1636,11 @@ class TestSharder(BaseTestSharder):
|
|||||||
'db_created': 1, 'db_exists': 0}
|
'db_created': 1, 'db_exists': 0}
|
||||||
self._assert_stats(expected, sharder, 'cleaved')
|
self._assert_stats(expected, sharder, 'cleaved')
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
1, sharder.logger.get_stats_counts().get('cleaved_db_created'))
|
1, sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'cleaved_db_created'))
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
sharder.logger.get_stats_counts().get('cleaved_db_exists'))
|
sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'cleaved_db_exists'))
|
||||||
|
|
||||||
# cleaving state is unchanged
|
# cleaving state is unchanged
|
||||||
updated_shard_ranges = broker.get_shard_ranges()
|
updated_shard_ranges = broker.get_shard_ranges()
|
||||||
@ -1673,9 +1677,11 @@ class TestSharder(BaseTestSharder):
|
|||||||
self.assertIsInstance(stats['max_time'], float)
|
self.assertIsInstance(stats['max_time'], float)
|
||||||
self.assertLessEqual(stats['min_time'], stats['max_time'])
|
self.assertLessEqual(stats['min_time'], stats['max_time'])
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
1, sharder.logger.get_stats_counts().get('cleaved_db_created'))
|
1, sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'cleaved_db_created'))
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
1, sharder.logger.get_stats_counts().get('cleaved_db_exists'))
|
1, sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'cleaved_db_exists'))
|
||||||
|
|
||||||
self.assertEqual(SHARDING, broker.get_db_state())
|
self.assertEqual(SHARDING, broker.get_db_state())
|
||||||
sharder._replicate_object.assert_has_calls(
|
sharder._replicate_object.assert_has_calls(
|
||||||
@ -1739,9 +1745,11 @@ class TestSharder(BaseTestSharder):
|
|||||||
self.assertIsInstance(stats['max_time'], float)
|
self.assertIsInstance(stats['max_time'], float)
|
||||||
self.assertLessEqual(stats['min_time'], stats['max_time'])
|
self.assertLessEqual(stats['min_time'], stats['max_time'])
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
1, sharder.logger.get_stats_counts().get('cleaved_db_created'))
|
1, sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'cleaved_db_created'))
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
sharder.logger.get_stats_counts().get('cleaved_db_exists'))
|
sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'cleaved_db_exists'))
|
||||||
|
|
||||||
self.assertEqual(SHARDING, broker.get_db_state())
|
self.assertEqual(SHARDING, broker.get_db_state())
|
||||||
sharder._replicate_object.assert_called_once_with(
|
sharder._replicate_object.assert_called_once_with(
|
||||||
@ -1812,9 +1820,11 @@ class TestSharder(BaseTestSharder):
|
|||||||
self.assertIsInstance(stats['max_time'], float)
|
self.assertIsInstance(stats['max_time'], float)
|
||||||
self.assertLessEqual(stats['min_time'], stats['max_time'])
|
self.assertLessEqual(stats['min_time'], stats['max_time'])
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
1, sharder.logger.get_stats_counts().get('cleaved_db_created'))
|
1, sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'cleaved_db_created'))
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
sharder.logger.get_stats_counts().get('cleaved_db_exists'))
|
sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'cleaved_db_exists'))
|
||||||
|
|
||||||
sharder._replicate_object.assert_called_once_with(
|
sharder._replicate_object.assert_called_once_with(
|
||||||
0, expected_shard_dbs[4], 0)
|
0, expected_shard_dbs[4], 0)
|
||||||
@ -2466,19 +2476,22 @@ class TestSharder(BaseTestSharder):
|
|||||||
self.assertEqual('', context.cursor)
|
self.assertEqual('', context.cursor)
|
||||||
self.assertEqual(10, context.cleave_to_row)
|
self.assertEqual(10, context.cleave_to_row)
|
||||||
self.assertEqual(12, context.max_row) # note that max row increased
|
self.assertEqual(12, context.max_row) # note that max row increased
|
||||||
self.assertTrue(self.logger.log_dict['timing_since'])
|
self.assertTrue(self.logger.statsd_client.calls['timing_since'])
|
||||||
self.assertEqual('sharder.sharding.move_misplaced',
|
self.assertEqual(
|
||||||
self.logger.log_dict['timing_since'][-3][0][0])
|
'sharder.sharding.move_misplaced',
|
||||||
self.assertGreater(self.logger.log_dict['timing_since'][-3][0][1], 0)
|
self.logger.statsd_client.calls['timing_since'][-3][0][0])
|
||||||
self.assertEqual('sharder.sharding.set_state',
|
self.assertGreater(
|
||||||
self.logger.log_dict['timing_since'][-2][0][0])
|
self.logger.statsd_client.calls['timing_since'][-3][0][1], 0)
|
||||||
self.assertGreater(self.logger.log_dict['timing_since'][-2][0][1], 0)
|
self.assertEqual(
|
||||||
self.assertEqual('sharder.sharding.cleave',
|
'sharder.sharding.set_state',
|
||||||
self.logger.log_dict['timing_since'][-1][0][0])
|
self.logger.statsd_client.calls['timing_since'][-2][0][0])
|
||||||
self.assertGreater(self.logger.log_dict['timing_since'][-1][0][1], 0)
|
self.assertGreater(
|
||||||
for call in self.logger.log_dict['timing_since']:
|
self.logger.statsd_client.calls['timing_since'][-2][0][1], 0)
|
||||||
self.assertNotIsInstance(call[0][1], Timestamp)
|
self.assertEqual(
|
||||||
self.assertIsInstance(call[0][1], float)
|
'sharder.sharding.cleave',
|
||||||
|
self.logger.statsd_client.calls['timing_since'][-1][0][0])
|
||||||
|
self.assertGreater(
|
||||||
|
self.logger.statsd_client.calls['timing_since'][-1][0][1], 0)
|
||||||
lines = sharder.logger.get_lines_for_level('info')
|
lines = sharder.logger.get_lines_for_level('info')
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
["Kick off container cleaving, own shard range in state "
|
["Kick off container cleaving, own shard range in state "
|
||||||
@ -2524,22 +2537,27 @@ class TestSharder(BaseTestSharder):
|
|||||||
'Completed cleaving, DB set to sharded state, path: a/c, db: %s'
|
'Completed cleaving, DB set to sharded state, path: a/c, db: %s'
|
||||||
% broker.db_file, lines[1:])
|
% broker.db_file, lines[1:])
|
||||||
self.assertFalse(sharder.logger.get_lines_for_level('warning'))
|
self.assertFalse(sharder.logger.get_lines_for_level('warning'))
|
||||||
self.assertTrue(self.logger.log_dict['timing_since'])
|
self.assertTrue(self.logger.statsd_client.calls['timing_since'])
|
||||||
self.assertEqual('sharder.sharding.move_misplaced',
|
self.assertEqual(
|
||||||
self.logger.log_dict['timing_since'][-4][0][0])
|
'sharder.sharding.move_misplaced',
|
||||||
self.assertGreater(self.logger.log_dict['timing_since'][-4][0][1], 0)
|
self.logger.statsd_client.calls['timing_since'][-4][0][0])
|
||||||
self.assertEqual('sharder.sharding.cleave',
|
self.assertGreater(
|
||||||
self.logger.log_dict['timing_since'][-3][0][0])
|
self.logger.statsd_client.calls['timing_since'][-4][0][1], 0)
|
||||||
self.assertGreater(self.logger.log_dict['timing_since'][-3][0][1], 0)
|
self.assertEqual(
|
||||||
self.assertEqual('sharder.sharding.completed',
|
'sharder.sharding.cleave',
|
||||||
self.logger.log_dict['timing_since'][-2][0][0])
|
self.logger.statsd_client.calls['timing_since'][-3][0][0])
|
||||||
self.assertGreater(self.logger.log_dict['timing_since'][-2][0][1], 0)
|
self.assertGreater(
|
||||||
self.assertEqual('sharder.sharding.send_sr',
|
self.logger.statsd_client.calls['timing_since'][-3][0][1], 0)
|
||||||
self.logger.log_dict['timing_since'][-1][0][0])
|
self.assertEqual(
|
||||||
self.assertGreater(self.logger.log_dict['timing_since'][-1][0][1], 0)
|
'sharder.sharding.completed',
|
||||||
for call in self.logger.log_dict['timing_since']:
|
self.logger.statsd_client.calls['timing_since'][-2][0][0])
|
||||||
self.assertNotIsInstance(call[0][1], Timestamp)
|
self.assertGreater(
|
||||||
self.assertIsInstance(call[0][1], float)
|
self.logger.statsd_client.calls['timing_since'][-2][0][1], 0)
|
||||||
|
self.assertEqual(
|
||||||
|
'sharder.sharding.send_sr',
|
||||||
|
self.logger.statsd_client.calls['timing_since'][-1][0][0])
|
||||||
|
self.assertGreater(
|
||||||
|
self.logger.statsd_client.calls['timing_since'][-1][0][1], 0)
|
||||||
|
|
||||||
def test_cleave_timing_metrics(self):
|
def test_cleave_timing_metrics(self):
|
||||||
broker = self._make_broker()
|
broker = self._make_broker()
|
||||||
@ -2578,19 +2596,27 @@ class TestSharder(BaseTestSharder):
|
|||||||
'Completed cleaving, DB set to sharded state, path: a/c, db: %s'
|
'Completed cleaving, DB set to sharded state, path: a/c, db: %s'
|
||||||
% broker.db_file, lines[1:])
|
% broker.db_file, lines[1:])
|
||||||
|
|
||||||
self.assertTrue(self.logger.log_dict['timing_since'])
|
self.assertTrue(self.logger.statsd_client.calls['timing_since'])
|
||||||
self.assertEqual('sharder.sharding.move_misplaced',
|
self.assertEqual(
|
||||||
self.logger.log_dict['timing_since'][-4][0][0])
|
'sharder.sharding.move_misplaced',
|
||||||
self.assertGreater(self.logger.log_dict['timing_since'][-4][0][1], 0)
|
self.logger.statsd_client.calls['timing_since'][-4][0][0])
|
||||||
self.assertEqual('sharder.sharding.cleave',
|
self.assertGreater(
|
||||||
self.logger.log_dict['timing_since'][-3][0][0])
|
self.logger.statsd_client.calls['timing_since'][-4][0][1], 0)
|
||||||
self.assertGreater(self.logger.log_dict['timing_since'][-3][0][1], 0)
|
self.assertEqual(
|
||||||
self.assertEqual('sharder.sharding.completed',
|
'sharder.sharding.cleave',
|
||||||
self.logger.log_dict['timing_since'][-2][0][0])
|
self.logger.statsd_client.calls['timing_since'][-3][0][0])
|
||||||
self.assertGreater(self.logger.log_dict['timing_since'][-2][0][1], 0)
|
self.assertGreater(
|
||||||
self.assertEqual('sharder.sharding.send_sr',
|
self.logger.statsd_client.calls['timing_since'][-3][0][1], 0)
|
||||||
self.logger.log_dict['timing_since'][-1][0][0])
|
self.assertEqual(
|
||||||
self.assertGreater(self.logger.log_dict['timing_since'][-1][0][1], 0)
|
'sharder.sharding.completed',
|
||||||
|
self.logger.statsd_client.calls['timing_since'][-2][0][0])
|
||||||
|
self.assertGreater(
|
||||||
|
self.logger.statsd_client.calls['timing_since'][-2][0][1], 0)
|
||||||
|
self.assertEqual(
|
||||||
|
'sharder.sharding.send_sr',
|
||||||
|
self.logger.statsd_client.calls['timing_since'][-1][0][0])
|
||||||
|
self.assertGreater(
|
||||||
|
self.logger.statsd_client.calls['timing_since'][-1][0][1], 0)
|
||||||
|
|
||||||
# check shard ranges were updated to ACTIVE
|
# check shard ranges were updated to ACTIVE
|
||||||
self.assertEqual([ShardRange.ACTIVE] * 2,
|
self.assertEqual([ShardRange.ACTIVE] * 2,
|
||||||
@ -3822,15 +3848,20 @@ class TestSharder(BaseTestSharder):
|
|||||||
'db_created': 0, 'db_exists': 0}
|
'db_created': 0, 'db_exists': 0}
|
||||||
self._assert_stats(expected_stats, sharder, 'misplaced')
|
self._assert_stats(expected_stats, sharder, 'misplaced')
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
sharder.logger.get_stats_counts().get('misplaced_found'))
|
sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'misplaced_found'))
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
sharder.logger.get_stats_counts().get('misplaced_placed'))
|
sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'misplaced_placed'))
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
sharder.logger.get_stats_counts().get('misplaced_unplaced'))
|
sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'misplaced_unplaced'))
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
sharder.logger.get_stats_counts().get('misplaced_db_created'))
|
sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'misplaced_db_created'))
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
sharder.logger.get_stats_counts().get('misplaced_db_exists'))
|
sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'misplaced_db_exists'))
|
||||||
|
|
||||||
# sharding - no misplaced objects
|
# sharding - no misplaced objects
|
||||||
self.assertTrue(broker.set_sharding_state())
|
self.assertTrue(broker.set_sharding_state())
|
||||||
@ -3839,15 +3870,20 @@ class TestSharder(BaseTestSharder):
|
|||||||
sharder._replicate_object.assert_not_called()
|
sharder._replicate_object.assert_not_called()
|
||||||
self._assert_stats(expected_stats, sharder, 'misplaced')
|
self._assert_stats(expected_stats, sharder, 'misplaced')
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
sharder.logger.get_stats_counts().get('misplaced_found'))
|
sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'misplaced_found'))
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
sharder.logger.get_stats_counts().get('misplaced_placed'))
|
sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'misplaced_placed'))
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
sharder.logger.get_stats_counts().get('misplaced_unplaced'))
|
sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'misplaced_unplaced'))
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
sharder.logger.get_stats_counts().get('misplaced_db_created'))
|
sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'misplaced_db_created'))
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
sharder.logger.get_stats_counts().get('misplaced_db_exists'))
|
sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'misplaced_db_exists'))
|
||||||
|
|
||||||
# pretend we cleaved up to end of second shard range
|
# pretend we cleaved up to end of second shard range
|
||||||
context = CleavingContext.load(broker)
|
context = CleavingContext.load(broker)
|
||||||
@ -3858,15 +3894,20 @@ class TestSharder(BaseTestSharder):
|
|||||||
sharder._replicate_object.assert_not_called()
|
sharder._replicate_object.assert_not_called()
|
||||||
self._assert_stats(expected_stats, sharder, 'misplaced')
|
self._assert_stats(expected_stats, sharder, 'misplaced')
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
sharder.logger.get_stats_counts().get('misplaced_found'))
|
sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'misplaced_found'))
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
sharder.logger.get_stats_counts().get('misplaced_placed'))
|
sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'misplaced_placed'))
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
sharder.logger.get_stats_counts().get('misplaced_unplaced'))
|
sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'misplaced_unplaced'))
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
sharder.logger.get_stats_counts().get('misplaced_db_created'))
|
sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'misplaced_db_created'))
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
sharder.logger.get_stats_counts().get('misplaced_db_exists'))
|
sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'misplaced_db_exists'))
|
||||||
|
|
||||||
# sharding - misplaced objects
|
# sharding - misplaced objects
|
||||||
for obj in objects:
|
for obj in objects:
|
||||||
@ -3879,15 +3920,20 @@ class TestSharder(BaseTestSharder):
|
|||||||
sharder._replicate_object.assert_not_called()
|
sharder._replicate_object.assert_not_called()
|
||||||
self._assert_stats(expected_stats, sharder, 'misplaced')
|
self._assert_stats(expected_stats, sharder, 'misplaced')
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
sharder.logger.get_stats_counts().get('misplaced_found'))
|
sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'misplaced_found'))
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
sharder.logger.get_stats_counts().get('misplaced_placed'))
|
sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'misplaced_placed'))
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
sharder.logger.get_stats_counts().get('misplaced_unplaced'))
|
sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'misplaced_unplaced'))
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
sharder.logger.get_stats_counts().get('misplaced_db_created'))
|
sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'misplaced_db_created'))
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
sharder.logger.get_stats_counts().get('misplaced_db_exists'))
|
sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'misplaced_db_exists'))
|
||||||
self.assertFalse(os.path.exists(expected_shard_dbs[0]))
|
self.assertFalse(os.path.exists(expected_shard_dbs[0]))
|
||||||
self.assertFalse(os.path.exists(expected_shard_dbs[1]))
|
self.assertFalse(os.path.exists(expected_shard_dbs[1]))
|
||||||
self.assertFalse(os.path.exists(expected_shard_dbs[2]))
|
self.assertFalse(os.path.exists(expected_shard_dbs[2]))
|
||||||
@ -3907,13 +3953,17 @@ class TestSharder(BaseTestSharder):
|
|||||||
'db_created': 1, 'db_exists': 0}
|
'db_created': 1, 'db_exists': 0}
|
||||||
self._assert_stats(expected_stats, sharder, 'misplaced')
|
self._assert_stats(expected_stats, sharder, 'misplaced')
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
1, sharder.logger.get_stats_counts()['misplaced_found'])
|
1, sharder.logger.statsd_client.get_stats_counts()[
|
||||||
|
'misplaced_found'])
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
2, sharder.logger.get_stats_counts()['misplaced_placed'])
|
2, sharder.logger.statsd_client.get_stats_counts()[
|
||||||
|
'misplaced_placed'])
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
1, sharder.logger.get_stats_counts()['misplaced_db_created'])
|
1, sharder.logger.statsd_client.get_stats_counts()[
|
||||||
|
'misplaced_db_created'])
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
sharder.logger.get_stats_counts().get('misplaced_db_exists'))
|
sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'misplaced_db_exists'))
|
||||||
|
|
||||||
# check misplaced objects were moved
|
# check misplaced objects were moved
|
||||||
self._check_objects(objects[:2], expected_shard_dbs[1])
|
self._check_objects(objects[:2], expected_shard_dbs[1])
|
||||||
@ -3950,13 +4000,17 @@ class TestSharder(BaseTestSharder):
|
|||||||
)
|
)
|
||||||
self._assert_stats(expected_stats, sharder, 'misplaced')
|
self._assert_stats(expected_stats, sharder, 'misplaced')
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
1, sharder.logger.get_stats_counts()['misplaced_found'])
|
1, sharder.logger.statsd_client.get_stats_counts()[
|
||||||
|
'misplaced_found'])
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
4, sharder.logger.get_stats_counts()['misplaced_placed'])
|
4, sharder.logger.statsd_client.get_stats_counts()[
|
||||||
|
'misplaced_placed'])
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
3, sharder.logger.get_stats_counts()['misplaced_db_created'])
|
3, sharder.logger.statsd_client.get_stats_counts()[
|
||||||
|
'misplaced_db_created'])
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
sharder.logger.get_stats_counts().get('misplaced_db_exists'))
|
sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'misplaced_db_exists'))
|
||||||
|
|
||||||
# check misplaced objects were moved
|
# check misplaced objects were moved
|
||||||
self._check_objects(new_objects, expected_shard_dbs[0])
|
self._check_objects(new_objects, expected_shard_dbs[0])
|
||||||
@ -3977,13 +4031,17 @@ class TestSharder(BaseTestSharder):
|
|||||||
'db_created': 0, 'db_exists': 0}
|
'db_created': 0, 'db_exists': 0}
|
||||||
self._assert_stats(expected_stats, sharder, 'misplaced')
|
self._assert_stats(expected_stats, sharder, 'misplaced')
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
sharder.logger.get_stats_counts().get('misplaced_found'))
|
sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'misplaced_found'))
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
sharder.logger.get_stats_counts().get('misplaced_placed'))
|
sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'misplaced_placed'))
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
sharder.logger.get_stats_counts().get('misplaced_db_created'))
|
sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'misplaced_db_created'))
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
sharder.logger.get_stats_counts().get('misplaced_db_exists'))
|
sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'misplaced_db_exists'))
|
||||||
|
|
||||||
# and then more misplaced updates arrive
|
# and then more misplaced updates arrive
|
||||||
newer_objects = [
|
newer_objects = [
|
||||||
@ -4012,13 +4070,17 @@ class TestSharder(BaseTestSharder):
|
|||||||
'db_created': 1, 'db_exists': 1}
|
'db_created': 1, 'db_exists': 1}
|
||||||
self._assert_stats(expected_stats, sharder, 'misplaced')
|
self._assert_stats(expected_stats, sharder, 'misplaced')
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
1, sharder.logger.get_stats_counts()['misplaced_found'])
|
1, sharder.logger.statsd_client.get_stats_counts()[
|
||||||
|
'misplaced_found'])
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
3, sharder.logger.get_stats_counts()['misplaced_placed'])
|
3, sharder.logger.statsd_client.get_stats_counts()[
|
||||||
|
'misplaced_placed'])
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
1, sharder.logger.get_stats_counts()['misplaced_db_created'])
|
1, sharder.logger.statsd_client.get_stats_counts()[
|
||||||
|
'misplaced_db_created'])
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
1, sharder.logger.get_stats_counts()['misplaced_db_exists'])
|
1, sharder.logger.statsd_client.get_stats_counts()[
|
||||||
|
'misplaced_db_exists'])
|
||||||
|
|
||||||
# check new misplaced objects were moved
|
# check new misplaced objects were moved
|
||||||
self._check_objects(newer_objects[:1] + new_objects,
|
self._check_objects(newer_objects[:1] + new_objects,
|
||||||
@ -4212,7 +4274,8 @@ class TestSharder(BaseTestSharder):
|
|||||||
'found': 1, 'placed': 4, 'unplaced': 0}
|
'found': 1, 'placed': 4, 'unplaced': 0}
|
||||||
self._assert_stats(expected_stats, sharder, 'misplaced')
|
self._assert_stats(expected_stats, sharder, 'misplaced')
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
1, sharder.logger.get_stats_counts()['misplaced_found'])
|
1, sharder.logger.statsd_client.get_stats_counts()[
|
||||||
|
'misplaced_found'])
|
||||||
# check misplaced objects were moved
|
# check misplaced objects were moved
|
||||||
self._check_objects(objects[:2], expected_dbs[1])
|
self._check_objects(objects[:2], expected_dbs[1])
|
||||||
self._check_objects(objects[2:3], expected_dbs[2])
|
self._check_objects(objects[2:3], expected_dbs[2])
|
||||||
@ -4245,7 +4308,8 @@ class TestSharder(BaseTestSharder):
|
|||||||
'placed': 4, 'unplaced': 0}
|
'placed': 4, 'unplaced': 0}
|
||||||
self._assert_stats(expected_stats, sharder, 'misplaced')
|
self._assert_stats(expected_stats, sharder, 'misplaced')
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
1, sharder.logger.get_stats_counts()['misplaced_found'])
|
1, sharder.logger.statsd_client.get_stats_counts()[
|
||||||
|
'misplaced_found'])
|
||||||
# check misplaced objects were moved to shard dbs
|
# check misplaced objects were moved to shard dbs
|
||||||
self._check_objects(objects[:2], expected_dbs[1])
|
self._check_objects(objects[:2], expected_dbs[1])
|
||||||
self._check_objects(objects[2:3], expected_dbs[2])
|
self._check_objects(objects[2:3], expected_dbs[2])
|
||||||
@ -4278,7 +4342,8 @@ class TestSharder(BaseTestSharder):
|
|||||||
'placed': 4, 'unplaced': 0}
|
'placed': 4, 'unplaced': 0}
|
||||||
self._assert_stats(expected_stats, sharder, 'misplaced')
|
self._assert_stats(expected_stats, sharder, 'misplaced')
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
1, sharder.logger.get_stats_counts()['misplaced_found'])
|
1, sharder.logger.statsd_client.get_stats_counts()[
|
||||||
|
'misplaced_found'])
|
||||||
# check misplaced objects were moved to shard dbs
|
# check misplaced objects were moved to shard dbs
|
||||||
self._check_objects(objects[:2], expected_dbs[1])
|
self._check_objects(objects[:2], expected_dbs[1])
|
||||||
self._check_objects(objects[2:3], expected_dbs[2])
|
self._check_objects(objects[2:3], expected_dbs[2])
|
||||||
@ -4311,7 +4376,8 @@ class TestSharder(BaseTestSharder):
|
|||||||
'placed': 4, 'unplaced': 0}
|
'placed': 4, 'unplaced': 0}
|
||||||
self._assert_stats(expected_stats, sharder, 'misplaced')
|
self._assert_stats(expected_stats, sharder, 'misplaced')
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
1, sharder.logger.get_stats_counts()['misplaced_found'])
|
1, sharder.logger.statsd_client.get_stats_counts()[
|
||||||
|
'misplaced_found'])
|
||||||
# check misplaced objects were moved to shard dbs
|
# check misplaced objects were moved to shard dbs
|
||||||
self._check_objects(objects[:2], expected_dbs[1])
|
self._check_objects(objects[:2], expected_dbs[1])
|
||||||
self._check_objects(objects[2:3], expected_dbs[2])
|
self._check_objects(objects[2:3], expected_dbs[2])
|
||||||
@ -4365,15 +4431,20 @@ class TestSharder(BaseTestSharder):
|
|||||||
'found': 0, 'placed': 0, 'unplaced': 0}
|
'found': 0, 'placed': 0, 'unplaced': 0}
|
||||||
self._assert_stats(expected_stats, sharder, 'misplaced')
|
self._assert_stats(expected_stats, sharder, 'misplaced')
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
1, sharder.logger.get_stats_counts().get('misplaced_success'))
|
1, sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'misplaced_success'))
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
sharder.logger.get_stats_counts().get('misplaced_failure'))
|
sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'misplaced_failure'))
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
sharder.logger.get_stats_counts().get('misplaced_found'))
|
sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'misplaced_found'))
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
sharder.logger.get_stats_counts().get('misplaced_placed'))
|
sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'misplaced_placed'))
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
sharder.logger.get_stats_counts().get('misplaced_unplaced'))
|
sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'misplaced_unplaced'))
|
||||||
self.assertFalse(sharder.logger.get_lines_for_level('warning'))
|
self.assertFalse(sharder.logger.get_lines_for_level('warning'))
|
||||||
|
|
||||||
# now put objects
|
# now put objects
|
||||||
@ -4401,15 +4472,20 @@ class TestSharder(BaseTestSharder):
|
|||||||
'found': 1, 'placed': 2, 'unplaced': 2}
|
'found': 1, 'placed': 2, 'unplaced': 2}
|
||||||
self._assert_stats(expected_stats, sharder, 'misplaced')
|
self._assert_stats(expected_stats, sharder, 'misplaced')
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
sharder.logger.get_stats_counts().get('misplaced_success'))
|
sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'misplaced_success'))
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
1, sharder.logger.get_stats_counts().get('misplaced_failure'))
|
1, sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'misplaced_failure'))
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
1, sharder.logger.get_stats_counts()['misplaced_found'])
|
1, sharder.logger.statsd_client.get_stats_counts()[
|
||||||
|
'misplaced_found'])
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
2, sharder.logger.get_stats_counts().get('misplaced_placed'))
|
2, sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'misplaced_placed'))
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
2, sharder.logger.get_stats_counts().get('misplaced_unplaced'))
|
2, sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'misplaced_unplaced'))
|
||||||
# some misplaced objects could not be moved...
|
# some misplaced objects could not be moved...
|
||||||
warning_lines = sharder.logger.get_lines_for_level('warning')
|
warning_lines = sharder.logger.get_lines_for_level('warning')
|
||||||
self.assertIn(
|
self.assertIn(
|
||||||
@ -4445,15 +4521,20 @@ class TestSharder(BaseTestSharder):
|
|||||||
'found': 1, 'placed': 2, 'unplaced': 0}
|
'found': 1, 'placed': 2, 'unplaced': 0}
|
||||||
self._assert_stats(expected_stats, sharder, 'misplaced')
|
self._assert_stats(expected_stats, sharder, 'misplaced')
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
1, sharder.logger.get_stats_counts().get('misplaced_success'))
|
1, sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'misplaced_success'))
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
sharder.logger.get_stats_counts().get('misplaced_failure'))
|
sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'misplaced_failure'))
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
1, sharder.logger.get_stats_counts()['misplaced_found'])
|
1, sharder.logger.statsd_client.get_stats_counts()[
|
||||||
|
'misplaced_found'])
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
2, sharder.logger.get_stats_counts().get('misplaced_placed'))
|
2, sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'misplaced_placed'))
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
sharder.logger.get_stats_counts().get('misplaced_unplaced'))
|
sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'misplaced_unplaced'))
|
||||||
self.assertFalse(sharder.logger.get_lines_for_level('warning'))
|
self.assertFalse(sharder.logger.get_lines_for_level('warning'))
|
||||||
|
|
||||||
# check misplaced objects were moved
|
# check misplaced objects were moved
|
||||||
@ -4477,16 +4558,21 @@ class TestSharder(BaseTestSharder):
|
|||||||
'found': 0, 'placed': 0, 'unplaced': 0}
|
'found': 0, 'placed': 0, 'unplaced': 0}
|
||||||
self._assert_stats(expected_stats, sharder, 'misplaced')
|
self._assert_stats(expected_stats, sharder, 'misplaced')
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
1, sharder.logger.get_stats_counts().get('misplaced_success'))
|
1, sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'misplaced_success'))
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
sharder.logger.get_stats_counts().get('misplaced_failure'))
|
sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'misplaced_failure'))
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
sharder.logger.get_stats_counts().get('misplaced_found'))
|
sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'misplaced_found'))
|
||||||
self.assertFalse(sharder.logger.get_lines_for_level('warning'))
|
self.assertFalse(sharder.logger.get_lines_for_level('warning'))
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
sharder.logger.get_stats_counts().get('misplaced_placed'))
|
sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'misplaced_placed'))
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
sharder.logger.get_stats_counts().get('misplaced_unplaced'))
|
sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'misplaced_unplaced'))
|
||||||
|
|
||||||
# and then more misplaced updates arrive
|
# and then more misplaced updates arrive
|
||||||
new_objects = [
|
new_objects = [
|
||||||
@ -4520,15 +4606,20 @@ class TestSharder(BaseTestSharder):
|
|||||||
'found': 1, 'placed': 2, 'unplaced': 0}
|
'found': 1, 'placed': 2, 'unplaced': 0}
|
||||||
self._assert_stats(expected_stats, sharder, 'misplaced')
|
self._assert_stats(expected_stats, sharder, 'misplaced')
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
1, sharder.logger.get_stats_counts().get('misplaced_success'))
|
1, sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'misplaced_success'))
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
sharder.logger.get_stats_counts().get('misplaced_failure'))
|
sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'misplaced_failure'))
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
1, sharder.logger.get_stats_counts()['misplaced_found'])
|
1, sharder.logger.statsd_client.get_stats_counts()[
|
||||||
|
'misplaced_found'])
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
2, sharder.logger.get_stats_counts().get('misplaced_placed'))
|
2, sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'misplaced_placed'))
|
||||||
self.assertFalse(
|
self.assertFalse(
|
||||||
sharder.logger.get_stats_counts().get('misplaced_unplaced'))
|
sharder.logger.statsd_client.get_stats_counts().get(
|
||||||
|
'misplaced_unplaced'))
|
||||||
self.assertFalse(sharder.logger.get_lines_for_level('warning'))
|
self.assertFalse(sharder.logger.get_lines_for_level('warning'))
|
||||||
|
|
||||||
# check new misplaced objects were moved
|
# check new misplaced objects were moved
|
||||||
@ -4615,7 +4706,8 @@ class TestSharder(BaseTestSharder):
|
|||||||
'found': 1, 'placed': 1, 'unplaced': 2}
|
'found': 1, 'placed': 1, 'unplaced': 2}
|
||||||
self._assert_stats(expected_stats, sharder, 'misplaced')
|
self._assert_stats(expected_stats, sharder, 'misplaced')
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
1, sharder.logger.get_stats_counts()['misplaced_found'])
|
1, sharder.logger.statsd_client.get_stats_counts()[
|
||||||
|
'misplaced_found'])
|
||||||
warning_lines = sharder.logger.get_lines_for_level('warning')
|
warning_lines = sharder.logger.get_lines_for_level('warning')
|
||||||
self.assertIn(
|
self.assertIn(
|
||||||
'Failed to find destination for at least 2 misplaced objects',
|
'Failed to find destination for at least 2 misplaced objects',
|
||||||
@ -4650,7 +4742,8 @@ class TestSharder(BaseTestSharder):
|
|||||||
'found': 1, 'placed': 2, 'unplaced': 0}
|
'found': 1, 'placed': 2, 'unplaced': 0}
|
||||||
self._assert_stats(expected_stats, sharder, 'misplaced')
|
self._assert_stats(expected_stats, sharder, 'misplaced')
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
1, sharder.logger.get_stats_counts()['misplaced_found'])
|
1, sharder.logger.statsd_client.get_stats_counts()[
|
||||||
|
'misplaced_found'])
|
||||||
self.assertFalse(sharder.logger.get_lines_for_level('warning'))
|
self.assertFalse(sharder.logger.get_lines_for_level('warning'))
|
||||||
|
|
||||||
# check misplaced objects were moved
|
# check misplaced objects were moved
|
||||||
@ -4701,7 +4794,8 @@ class TestSharder(BaseTestSharder):
|
|||||||
'found': 1, 'placed': 5, 'unplaced': 0}
|
'found': 1, 'placed': 5, 'unplaced': 0}
|
||||||
self._assert_stats(expected_stats, sharder, 'misplaced')
|
self._assert_stats(expected_stats, sharder, 'misplaced')
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
1, sharder.logger.get_stats_counts()['misplaced_found'])
|
1, sharder.logger.statsd_client.get_stats_counts()[
|
||||||
|
'misplaced_found'])
|
||||||
self.assertFalse(sharder.logger.get_lines_for_level('warning'))
|
self.assertFalse(sharder.logger.get_lines_for_level('warning'))
|
||||||
|
|
||||||
# check *all* the misplaced objects were moved
|
# check *all* the misplaced objects were moved
|
||||||
@ -4757,7 +4851,8 @@ class TestSharder(BaseTestSharder):
|
|||||||
'found': 1, 'placed': 2, 'unplaced': 0}
|
'found': 1, 'placed': 2, 'unplaced': 0}
|
||||||
self._assert_stats(expected_stats, sharder, 'misplaced')
|
self._assert_stats(expected_stats, sharder, 'misplaced')
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
1, sharder.logger.get_stats_counts()['misplaced_found'])
|
1, sharder.logger.statsd_client.get_stats_counts()[
|
||||||
|
'misplaced_found'])
|
||||||
|
|
||||||
# check new misplaced objects were moved
|
# check new misplaced objects were moved
|
||||||
self._check_objects(objects[:1], expected_shard_dbs[0])
|
self._check_objects(objects[:1], expected_shard_dbs[0])
|
||||||
@ -4784,7 +4879,8 @@ class TestSharder(BaseTestSharder):
|
|||||||
)
|
)
|
||||||
self._assert_stats(expected_stats, sharder, 'misplaced')
|
self._assert_stats(expected_stats, sharder, 'misplaced')
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
1, sharder.logger.get_stats_counts()['misplaced_found'])
|
1, sharder.logger.statsd_client.get_stats_counts()[
|
||||||
|
'misplaced_found'])
|
||||||
|
|
||||||
# check older misplaced objects were not merged to shard brokers
|
# check older misplaced objects were not merged to shard brokers
|
||||||
self._check_objects(objects[:1], expected_shard_dbs[0])
|
self._check_objects(objects[:1], expected_shard_dbs[0])
|
||||||
@ -4823,7 +4919,8 @@ class TestSharder(BaseTestSharder):
|
|||||||
)
|
)
|
||||||
self._assert_stats(expected_stats, sharder, 'misplaced')
|
self._assert_stats(expected_stats, sharder, 'misplaced')
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
1, sharder.logger.get_stats_counts()['misplaced_found'])
|
1, sharder.logger.statsd_client.get_stats_counts()[
|
||||||
|
'misplaced_found'])
|
||||||
|
|
||||||
# check only the newer misplaced object was moved
|
# check only the newer misplaced object was moved
|
||||||
self._check_objects([newer_object], expected_shard_dbs[0])
|
self._check_objects([newer_object], expected_shard_dbs[0])
|
||||||
|
@ -140,9 +140,9 @@ def _create_test_rings(path, next_part_power=None):
|
|||||||
|
|
||||||
def count_stats(logger, key, metric):
|
def count_stats(logger, key, metric):
|
||||||
count = 0
|
count = 0
|
||||||
for record in logger.log_dict[key]:
|
for record in logger.statsd_client.calls[key]:
|
||||||
log_args, log_kwargs = record
|
stat_args, stat_kwargs = record
|
||||||
m = log_args[0]
|
m = stat_args[0]
|
||||||
if re.match(metric, m):
|
if re.match(metric, m):
|
||||||
count += 1
|
count += 1
|
||||||
return count
|
return count
|
||||||
@ -1704,7 +1704,7 @@ class TestGlobalSetupObjectReconstructor(unittest.TestCase):
|
|||||||
part_info)
|
part_info)
|
||||||
found_jobs.extend(jobs)
|
found_jobs.extend(jobs)
|
||||||
for job in jobs:
|
for job in jobs:
|
||||||
self.logger._clear()
|
self.logger.clear()
|
||||||
node_count = len(job['sync_to'])
|
node_count = len(job['sync_to'])
|
||||||
rehash_count = node_count * rehash_per_job_type[
|
rehash_count = node_count * rehash_per_job_type[
|
||||||
job['job_type']]
|
job['job_type']]
|
||||||
@ -1745,7 +1745,7 @@ class TestGlobalSetupObjectReconstructor(unittest.TestCase):
|
|||||||
part_info)
|
part_info)
|
||||||
found_jobs.extend(jobs)
|
found_jobs.extend(jobs)
|
||||||
for job in jobs:
|
for job in jobs:
|
||||||
self.logger._clear()
|
self.logger.clear()
|
||||||
self.reconstructor.process_job(job)
|
self.reconstructor.process_job(job)
|
||||||
for line in self.logger.get_lines_for_level('error'):
|
for line in self.logger.get_lines_for_level('error'):
|
||||||
self.assertIn('responded as unmounted', line)
|
self.assertIn('responded as unmounted', line)
|
||||||
@ -1778,7 +1778,7 @@ class TestGlobalSetupObjectReconstructor(unittest.TestCase):
|
|||||||
part_info)
|
part_info)
|
||||||
found_jobs.extend(jobs)
|
found_jobs.extend(jobs)
|
||||||
for job in jobs:
|
for job in jobs:
|
||||||
self.logger._clear()
|
self.logger.clear()
|
||||||
self.reconstructor.process_job(job)
|
self.reconstructor.process_job(job)
|
||||||
for line in self.logger.get_lines_for_level('error'):
|
for line in self.logger.get_lines_for_level('error'):
|
||||||
self.assertIn('Invalid response 400', line)
|
self.assertIn('Invalid response 400', line)
|
||||||
@ -1810,7 +1810,7 @@ class TestGlobalSetupObjectReconstructor(unittest.TestCase):
|
|||||||
part_info)
|
part_info)
|
||||||
found_jobs.extend(jobs)
|
found_jobs.extend(jobs)
|
||||||
for job in jobs:
|
for job in jobs:
|
||||||
self.logger._clear()
|
self.logger.clear()
|
||||||
self.reconstructor.process_job(job)
|
self.reconstructor.process_job(job)
|
||||||
for line in self.logger.get_lines_for_level('error'):
|
for line in self.logger.get_lines_for_level('error'):
|
||||||
self.assertIn('Timeout (Nones)', line)
|
self.assertIn('Timeout (Nones)', line)
|
||||||
|
@ -1881,9 +1881,9 @@ class TestObjectReplicator(unittest.TestCase):
|
|||||||
|
|
||||||
# attempt to 16 times but succeeded only 15 times due to Timeout
|
# attempt to 16 times but succeeded only 15 times due to Timeout
|
||||||
suffix_hashes = sum(
|
suffix_hashes = sum(
|
||||||
count for (metric, count), _junk in
|
call[0][1] for call in
|
||||||
replicator.logger.logger.log_dict['update_stats']
|
replicator.logger.logger.statsd_client.calls['update_stats']
|
||||||
if metric == 'suffix.hashes')
|
if call[0][0] == 'suffix.hashes')
|
||||||
self.assertEqual(15, suffix_hashes)
|
self.assertEqual(15, suffix_hashes)
|
||||||
|
|
||||||
def test_run(self):
|
def test_run(self):
|
||||||
|
@ -8155,7 +8155,7 @@ class TestObjectController(BaseTestCase):
|
|||||||
self.object_controller.logger = self.logger
|
self.object_controller.logger = self.logger
|
||||||
with mock.patch('time.time',
|
with mock.patch('time.time',
|
||||||
side_effect=[10000.0, 10000.0, 10001.0, 10002.0,
|
side_effect=[10000.0, 10000.0, 10001.0, 10002.0,
|
||||||
10002.0]), \
|
10002.0, 10002.0]), \
|
||||||
mock.patch('os.getpid', return_value=1234):
|
mock.patch('os.getpid', return_value=1234):
|
||||||
req.get_response(self.object_controller)
|
req.get_response(self.object_controller)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
|
@ -461,7 +461,7 @@ class TestObjectUpdater(unittest.TestCase):
|
|||||||
self.assertEqual([
|
self.assertEqual([
|
||||||
mock.call(self.devices_dir, 'sda1', True),
|
mock.call(self.devices_dir, 'sda1', True),
|
||||||
], mock_check_drive.mock_calls)
|
], mock_check_drive.mock_calls)
|
||||||
self.assertEqual(ou.logger.get_increment_counts(), {})
|
self.assertEqual(ou.logger.statsd_client.get_increment_counts(), {})
|
||||||
|
|
||||||
@mock.patch('swift.obj.updater.dump_recon_cache')
|
@mock.patch('swift.obj.updater.dump_recon_cache')
|
||||||
@mock.patch.object(object_updater, 'check_drive')
|
@mock.patch.object(object_updater, 'check_drive')
|
||||||
@ -525,16 +525,16 @@ class TestObjectUpdater(unittest.TestCase):
|
|||||||
ou.run_once()
|
ou.run_once()
|
||||||
self.assertTrue(not os.path.exists(older_op_path))
|
self.assertTrue(not os.path.exists(older_op_path))
|
||||||
self.assertTrue(os.path.exists(op_path))
|
self.assertTrue(os.path.exists(op_path))
|
||||||
self.assertEqual(ou.logger.get_increment_counts(),
|
self.assertEqual(ou.logger.statsd_client.get_increment_counts(),
|
||||||
{'failures': 1, 'unlinks': 1})
|
{'failures': 1, 'unlinks': 1})
|
||||||
self.assertIsNone(pickle.load(open(op_path, 'rb')).get('successes'))
|
self.assertIsNone(pickle.load(open(op_path, 'rb')).get('successes'))
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
['ERROR with remote server 127.0.0.1:67890/sda1: '
|
['ERROR with remote server 127.0.0.1:67890/sda1: '
|
||||||
'Connection refused'] * 3,
|
'Connection refused'] * 3,
|
||||||
ou.logger.get_lines_for_level('error'))
|
ou.logger.get_lines_for_level('error'))
|
||||||
self.assertEqual([args for args, kw in ou.logger.log_dict['timing']], [
|
self.assertEqual(
|
||||||
('updater.timing.status.500', mock.ANY),
|
sorted(ou.logger.statsd_client.calls['timing']),
|
||||||
] * 3)
|
sorted([(('updater.timing.status.500', mock.ANY), {}), ] * 3))
|
||||||
ou.logger.clear()
|
ou.logger.clear()
|
||||||
|
|
||||||
bindsock = listen_zero()
|
bindsock = listen_zero()
|
||||||
@ -590,17 +590,17 @@ class TestObjectUpdater(unittest.TestCase):
|
|||||||
if err:
|
if err:
|
||||||
raise err
|
raise err
|
||||||
self.assertTrue(os.path.exists(op_path))
|
self.assertTrue(os.path.exists(op_path))
|
||||||
self.assertEqual(ou.logger.get_increment_counts(),
|
self.assertEqual(ou.logger.statsd_client.get_increment_counts(),
|
||||||
{'failures': 1})
|
{'failures': 1})
|
||||||
self.assertEqual([0],
|
self.assertEqual([0],
|
||||||
pickle.load(open(op_path, 'rb')).get('successes'))
|
pickle.load(open(op_path, 'rb')).get('successes'))
|
||||||
self.assertEqual([], ou.logger.get_lines_for_level('error'))
|
self.assertEqual([], ou.logger.get_lines_for_level('error'))
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
sorted([args for args, kw in ou.logger.log_dict['timing']]),
|
sorted(ou.logger.statsd_client.calls['timing']),
|
||||||
sorted([
|
sorted([
|
||||||
('updater.timing.status.201', mock.ANY),
|
(('updater.timing.status.201', mock.ANY), {}),
|
||||||
('updater.timing.status.500', mock.ANY),
|
(('updater.timing.status.500', mock.ANY), {}),
|
||||||
('updater.timing.status.500', mock.ANY),
|
(('updater.timing.status.500', mock.ANY), {}),
|
||||||
]))
|
]))
|
||||||
|
|
||||||
# only 1/2 updates succeeds
|
# only 1/2 updates succeeds
|
||||||
@ -611,16 +611,16 @@ class TestObjectUpdater(unittest.TestCase):
|
|||||||
if err:
|
if err:
|
||||||
raise err
|
raise err
|
||||||
self.assertTrue(os.path.exists(op_path))
|
self.assertTrue(os.path.exists(op_path))
|
||||||
self.assertEqual(ou.logger.get_increment_counts(),
|
self.assertEqual(ou.logger.statsd_client.get_increment_counts(),
|
||||||
{'failures': 1})
|
{'failures': 1})
|
||||||
self.assertEqual([0, 2],
|
self.assertEqual([0, 2],
|
||||||
pickle.load(open(op_path, 'rb')).get('successes'))
|
pickle.load(open(op_path, 'rb')).get('successes'))
|
||||||
self.assertEqual([], ou.logger.get_lines_for_level('error'))
|
self.assertEqual([], ou.logger.get_lines_for_level('error'))
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
sorted([args for args, kw in ou.logger.log_dict['timing']]),
|
sorted(ou.logger.statsd_client.calls['timing']),
|
||||||
sorted([
|
sorted([
|
||||||
('updater.timing.status.404', mock.ANY),
|
(('updater.timing.status.404', mock.ANY), {}),
|
||||||
('updater.timing.status.201', mock.ANY),
|
(('updater.timing.status.201', mock.ANY), {}),
|
||||||
]))
|
]))
|
||||||
|
|
||||||
# final update has Timeout
|
# final update has Timeout
|
||||||
@ -630,7 +630,7 @@ class TestObjectUpdater(unittest.TestCase):
|
|||||||
mock_connect.return_value.getresponse.side_effect = exc
|
mock_connect.return_value.getresponse.side_effect = exc
|
||||||
ou.run_once()
|
ou.run_once()
|
||||||
self.assertTrue(os.path.exists(op_path))
|
self.assertTrue(os.path.exists(op_path))
|
||||||
self.assertEqual(ou.logger.get_increment_counts(),
|
self.assertEqual(ou.logger.statsd_client.get_increment_counts(),
|
||||||
{'failures': 1})
|
{'failures': 1})
|
||||||
self.assertEqual([0, 2],
|
self.assertEqual([0, 2],
|
||||||
pickle.load(open(op_path, 'rb')).get('successes'))
|
pickle.load(open(op_path, 'rb')).get('successes'))
|
||||||
@ -638,9 +638,10 @@ class TestObjectUpdater(unittest.TestCase):
|
|||||||
self.assertIn(
|
self.assertIn(
|
||||||
'Timeout waiting on remote server 127.0.0.1:%d/sda1: 99 seconds'
|
'Timeout waiting on remote server 127.0.0.1:%d/sda1: 99 seconds'
|
||||||
% bindsock.getsockname()[1], ou.logger.get_lines_for_level('info'))
|
% bindsock.getsockname()[1], ou.logger.get_lines_for_level('info'))
|
||||||
self.assertEqual([args for args, kw in ou.logger.log_dict['timing']], [
|
self.assertEqual(
|
||||||
('updater.timing.status.499', mock.ANY),
|
sorted(ou.logger.statsd_client.calls['timing']),
|
||||||
])
|
sorted([
|
||||||
|
(('updater.timing.status.499', mock.ANY), {})]))
|
||||||
|
|
||||||
# final update has ConnectionTimeout
|
# final update has ConnectionTimeout
|
||||||
ou.logger.clear()
|
ou.logger.clear()
|
||||||
@ -649,7 +650,7 @@ class TestObjectUpdater(unittest.TestCase):
|
|||||||
mock_connect.return_value.getresponse.side_effect = exc
|
mock_connect.return_value.getresponse.side_effect = exc
|
||||||
ou.run_once()
|
ou.run_once()
|
||||||
self.assertTrue(os.path.exists(op_path))
|
self.assertTrue(os.path.exists(op_path))
|
||||||
self.assertEqual(ou.logger.get_increment_counts(),
|
self.assertEqual(ou.logger.statsd_client.get_increment_counts(),
|
||||||
{'failures': 1})
|
{'failures': 1})
|
||||||
self.assertEqual([0, 2],
|
self.assertEqual([0, 2],
|
||||||
pickle.load(open(op_path, 'rb')).get('successes'))
|
pickle.load(open(op_path, 'rb')).get('successes'))
|
||||||
@ -657,9 +658,11 @@ class TestObjectUpdater(unittest.TestCase):
|
|||||||
self.assertIn(
|
self.assertIn(
|
||||||
'Timeout connecting to remote server 127.0.0.1:%d/sda1: 9 seconds'
|
'Timeout connecting to remote server 127.0.0.1:%d/sda1: 9 seconds'
|
||||||
% bindsock.getsockname()[1], ou.logger.get_lines_for_level('info'))
|
% bindsock.getsockname()[1], ou.logger.get_lines_for_level('info'))
|
||||||
self.assertEqual([args for args, kw in ou.logger.log_dict['timing']], [
|
self.assertEqual(
|
||||||
('updater.timing.status.500', mock.ANY),
|
sorted(ou.logger.statsd_client.calls['timing']),
|
||||||
])
|
sorted([
|
||||||
|
(('updater.timing.status.500', mock.ANY), {})
|
||||||
|
]))
|
||||||
|
|
||||||
# final update succeeds
|
# final update succeeds
|
||||||
event = spawn(accept, [201])
|
event = spawn(accept, [201])
|
||||||
@ -676,11 +679,13 @@ class TestObjectUpdater(unittest.TestCase):
|
|||||||
self.assertTrue(os.path.exists(os.path.dirname(os.path.dirname(
|
self.assertTrue(os.path.exists(os.path.dirname(os.path.dirname(
|
||||||
op_path))))
|
op_path))))
|
||||||
self.assertEqual([], ou.logger.get_lines_for_level('error'))
|
self.assertEqual([], ou.logger.get_lines_for_level('error'))
|
||||||
self.assertEqual(ou.logger.get_increment_counts(),
|
self.assertEqual(ou.logger.statsd_client.get_increment_counts(),
|
||||||
{'unlinks': 1, 'successes': 1})
|
{'unlinks': 1, 'successes': 1})
|
||||||
self.assertEqual([args for args, kw in ou.logger.log_dict['timing']], [
|
self.assertEqual(
|
||||||
('updater.timing.status.201', mock.ANY),
|
sorted(ou.logger.statsd_client.calls['timing']),
|
||||||
])
|
sorted([
|
||||||
|
(('updater.timing.status.201', mock.ANY), {}),
|
||||||
|
]))
|
||||||
|
|
||||||
def test_obj_put_legacy_updates(self):
|
def test_obj_put_legacy_updates(self):
|
||||||
ts = (normalize_timestamp(t) for t in
|
ts = (normalize_timestamp(t) for t in
|
||||||
@ -698,7 +703,7 @@ class TestObjectUpdater(unittest.TestCase):
|
|||||||
account, container, obj = 'a', 'c', 'o'
|
account, container, obj = 'a', 'c', 'o'
|
||||||
# write an async
|
# write an async
|
||||||
for op in ('PUT', 'DELETE'):
|
for op in ('PUT', 'DELETE'):
|
||||||
self.logger._clear()
|
self.logger.clear()
|
||||||
daemon = object_updater.ObjectUpdater(conf, logger=self.logger)
|
daemon = object_updater.ObjectUpdater(conf, logger=self.logger)
|
||||||
dfmanager = DiskFileManager(conf, daemon.logger)
|
dfmanager = DiskFileManager(conf, daemon.logger)
|
||||||
# don't include storage-policy-index in headers_out pickle
|
# don't include storage-policy-index in headers_out pickle
|
||||||
@ -728,9 +733,9 @@ class TestObjectUpdater(unittest.TestCase):
|
|||||||
self.assertEqual(method, op)
|
self.assertEqual(method, op)
|
||||||
self.assertEqual(headers['X-Backend-Storage-Policy-Index'],
|
self.assertEqual(headers['X-Backend-Storage-Policy-Index'],
|
||||||
str(int(policy)))
|
str(int(policy)))
|
||||||
self.assertEqual(daemon.logger.get_increment_counts(),
|
self.assertEqual(
|
||||||
{'successes': 1, 'unlinks': 1,
|
daemon.logger.statsd_client.get_increment_counts(),
|
||||||
'async_pendings': 1})
|
{'successes': 1, 'unlinks': 1, 'async_pendings': 1})
|
||||||
|
|
||||||
def _write_async_update(self, dfmanager, timestamp, policy,
|
def _write_async_update(self, dfmanager, timestamp, policy,
|
||||||
headers=None, container_path=None):
|
headers=None, container_path=None):
|
||||||
@ -791,7 +796,7 @@ class TestObjectUpdater(unittest.TestCase):
|
|||||||
self.assertEqual(method, 'PUT')
|
self.assertEqual(method, 'PUT')
|
||||||
self.assertDictEqual(expected, headers)
|
self.assertDictEqual(expected, headers)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
daemon.logger.get_increment_counts(),
|
daemon.logger.statsd_client.get_increment_counts(),
|
||||||
{'successes': 1, 'unlinks': 1, 'async_pendings': 1})
|
{'successes': 1, 'unlinks': 1, 'async_pendings': 1})
|
||||||
self.assertFalse(os.listdir(async_dir))
|
self.assertFalse(os.listdir(async_dir))
|
||||||
daemon.logger.clear()
|
daemon.logger.clear()
|
||||||
@ -908,7 +913,7 @@ class TestObjectUpdater(unittest.TestCase):
|
|||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
{'redirects': 1, 'successes': 1,
|
{'redirects': 1, 'successes': 1,
|
||||||
'unlinks': 1, 'async_pendings': 1},
|
'unlinks': 1, 'async_pendings': 1},
|
||||||
daemon.logger.get_increment_counts())
|
daemon.logger.statsd_client.get_increment_counts())
|
||||||
self.assertFalse(os.listdir(async_dir)) # no async file
|
self.assertFalse(os.listdir(async_dir)) # no async file
|
||||||
|
|
||||||
def test_obj_put_async_root_update_redirected_previous_success(self):
|
def test_obj_put_async_root_update_redirected_previous_success(self):
|
||||||
@ -940,7 +945,7 @@ class TestObjectUpdater(unittest.TestCase):
|
|||||||
[req['path'] for req in conn.requests])
|
[req['path'] for req in conn.requests])
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
{'failures': 1, 'async_pendings': 1},
|
{'failures': 1, 'async_pendings': 1},
|
||||||
daemon.logger.get_increment_counts())
|
daemon.logger.statsd_client.get_increment_counts())
|
||||||
async_path, async_data = self._check_async_file(async_dir)
|
async_path, async_data = self._check_async_file(async_dir)
|
||||||
self.assertEqual(dict(orig_async_data, successes=[1]), async_data)
|
self.assertEqual(dict(orig_async_data, successes=[1]), async_data)
|
||||||
|
|
||||||
@ -968,7 +973,7 @@ class TestObjectUpdater(unittest.TestCase):
|
|||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
{'redirects': 1, 'successes': 1, 'failures': 1, 'unlinks': 1,
|
{'redirects': 1, 'successes': 1, 'failures': 1, 'unlinks': 1,
|
||||||
'async_pendings': 1},
|
'async_pendings': 1},
|
||||||
daemon.logger.get_increment_counts())
|
daemon.logger.statsd_client.get_increment_counts())
|
||||||
self.assertFalse(os.listdir(async_dir)) # no async file
|
self.assertFalse(os.listdir(async_dir)) # no async file
|
||||||
|
|
||||||
def _check_async_file(self, async_dir):
|
def _check_async_file(self, async_dir):
|
||||||
@ -1016,7 +1021,7 @@ class TestObjectUpdater(unittest.TestCase):
|
|||||||
[req['path'] for req in conn.requests])
|
[req['path'] for req in conn.requests])
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
{'failures': 1, 'async_pendings': 1},
|
{'failures': 1, 'async_pendings': 1},
|
||||||
daemon.logger.get_increment_counts())
|
daemon.logger.statsd_client.get_increment_counts())
|
||||||
# async file still intact
|
# async file still intact
|
||||||
async_path, async_data = self._check_async_file(async_dir)
|
async_path, async_data = self._check_async_file(async_dir)
|
||||||
self.assertEqual(orig_async_path, async_path)
|
self.assertEqual(orig_async_path, async_path)
|
||||||
@ -1095,7 +1100,7 @@ class TestObjectUpdater(unittest.TestCase):
|
|||||||
[req['path'] for req in conn.requests])
|
[req['path'] for req in conn.requests])
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
{'redirects': 2, 'async_pendings': 1},
|
{'redirects': 2, 'async_pendings': 1},
|
||||||
daemon.logger.get_increment_counts())
|
daemon.logger.statsd_client.get_increment_counts())
|
||||||
# update failed, we still have pending file with most recent redirect
|
# update failed, we still have pending file with most recent redirect
|
||||||
# response Location header value added to data
|
# response Location header value added to data
|
||||||
async_path, async_data = self._check_async_file(async_dir)
|
async_path, async_data = self._check_async_file(async_dir)
|
||||||
@ -1121,7 +1126,7 @@ class TestObjectUpdater(unittest.TestCase):
|
|||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
{'redirects': 2, 'successes': 1, 'unlinks': 1,
|
{'redirects': 2, 'successes': 1, 'unlinks': 1,
|
||||||
'async_pendings': 1},
|
'async_pendings': 1},
|
||||||
daemon.logger.get_increment_counts())
|
daemon.logger.statsd_client.get_increment_counts())
|
||||||
self.assertFalse(os.listdir(async_dir)) # no async file
|
self.assertFalse(os.listdir(async_dir)) # no async file
|
||||||
|
|
||||||
def test_obj_put_async_update_redirection_loop(self):
|
def test_obj_put_async_update_redirection_loop(self):
|
||||||
@ -1169,7 +1174,7 @@ class TestObjectUpdater(unittest.TestCase):
|
|||||||
[req['path'] for req in conn.requests])
|
[req['path'] for req in conn.requests])
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
{'redirects': 2, 'async_pendings': 1},
|
{'redirects': 2, 'async_pendings': 1},
|
||||||
daemon.logger.get_increment_counts())
|
daemon.logger.statsd_client.get_increment_counts())
|
||||||
# update failed, we still have pending file with most recent redirect
|
# update failed, we still have pending file with most recent redirect
|
||||||
# response Location header value added to data
|
# response Location header value added to data
|
||||||
async_path, async_data = self._check_async_file(async_dir)
|
async_path, async_data = self._check_async_file(async_dir)
|
||||||
@ -1201,7 +1206,7 @@ class TestObjectUpdater(unittest.TestCase):
|
|||||||
[req['path'] for req in conn.requests])
|
[req['path'] for req in conn.requests])
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
{'redirects': 4, 'async_pendings': 1},
|
{'redirects': 4, 'async_pendings': 1},
|
||||||
daemon.logger.get_increment_counts())
|
daemon.logger.statsd_client.get_increment_counts())
|
||||||
# update failed, we still have pending file with most recent redirect
|
# update failed, we still have pending file with most recent redirect
|
||||||
# response Location header value from root added to persisted data
|
# response Location header value from root added to persisted data
|
||||||
async_path, async_data = self._check_async_file(async_dir)
|
async_path, async_data = self._check_async_file(async_dir)
|
||||||
@ -1231,7 +1236,7 @@ class TestObjectUpdater(unittest.TestCase):
|
|||||||
[req['path'] for req in conn.requests])
|
[req['path'] for req in conn.requests])
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
{'redirects': 6, 'async_pendings': 1},
|
{'redirects': 6, 'async_pendings': 1},
|
||||||
daemon.logger.get_increment_counts())
|
daemon.logger.statsd_client.get_increment_counts())
|
||||||
# update failed, we still have pending file, but container_path is None
|
# update failed, we still have pending file, but container_path is None
|
||||||
# because most recent redirect location was a repeat
|
# because most recent redirect location was a repeat
|
||||||
async_path, async_data = self._check_async_file(async_dir)
|
async_path, async_data = self._check_async_file(async_dir)
|
||||||
@ -1255,7 +1260,7 @@ class TestObjectUpdater(unittest.TestCase):
|
|||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
{'redirects': 6, 'successes': 1, 'unlinks': 1,
|
{'redirects': 6, 'successes': 1, 'unlinks': 1,
|
||||||
'async_pendings': 1},
|
'async_pendings': 1},
|
||||||
daemon.logger.get_increment_counts())
|
daemon.logger.statsd_client.get_increment_counts())
|
||||||
self.assertFalse(os.listdir(async_dir)) # no async file
|
self.assertFalse(os.listdir(async_dir)) # no async file
|
||||||
|
|
||||||
def test_obj_update_quarantine(self):
|
def test_obj_update_quarantine(self):
|
||||||
@ -1287,7 +1292,7 @@ class TestObjectUpdater(unittest.TestCase):
|
|||||||
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
{'quarantines': 1},
|
{'quarantines': 1},
|
||||||
daemon.logger.get_increment_counts())
|
daemon.logger.statsd_client.get_increment_counts())
|
||||||
self.assertFalse(os.listdir(async_dir)) # no asyncs
|
self.assertFalse(os.listdir(async_dir)) # no asyncs
|
||||||
|
|
||||||
def test_obj_update_gone_missing(self):
|
def test_obj_update_gone_missing(self):
|
||||||
@ -1319,7 +1324,8 @@ class TestObjectUpdater(unittest.TestCase):
|
|||||||
with mocked_http_conn():
|
with mocked_http_conn():
|
||||||
with mock.patch('swift.obj.updater.dump_recon_cache'):
|
with mock.patch('swift.obj.updater.dump_recon_cache'):
|
||||||
daemon._load_update(self.sda1, op_path)
|
daemon._load_update(self.sda1, op_path)
|
||||||
self.assertEqual({}, daemon.logger.get_increment_counts())
|
self.assertEqual(
|
||||||
|
{}, daemon.logger.statsd_client.get_increment_counts())
|
||||||
self.assertEqual(os.listdir(async_dir), [ohash[-3:]])
|
self.assertEqual(os.listdir(async_dir), [ohash[-3:]])
|
||||||
self.assertFalse(os.listdir(odir))
|
self.assertFalse(os.listdir(odir))
|
||||||
|
|
||||||
@ -1399,7 +1405,7 @@ class TestObjectUpdater(unittest.TestCase):
|
|||||||
info_lines[-1])
|
info_lines[-1])
|
||||||
self.assertEqual({'skips': 9, 'successes': 2, 'unlinks': 2,
|
self.assertEqual({'skips': 9, 'successes': 2, 'unlinks': 2,
|
||||||
'deferrals': 9},
|
'deferrals': 9},
|
||||||
self.logger.get_increment_counts())
|
self.logger.statsd_client.get_increment_counts())
|
||||||
|
|
||||||
@mock.patch('swift.obj.updater.dump_recon_cache')
|
@mock.patch('swift.obj.updater.dump_recon_cache')
|
||||||
def test_per_container_rate_limit_unlimited(self, mock_recon):
|
def test_per_container_rate_limit_unlimited(self, mock_recon):
|
||||||
@ -1437,7 +1443,7 @@ class TestObjectUpdater(unittest.TestCase):
|
|||||||
'0 errors, 0 redirects, 0 skips, 0 deferrals, 0 drains',
|
'0 errors, 0 redirects, 0 skips, 0 deferrals, 0 drains',
|
||||||
info_lines[-1])
|
info_lines[-1])
|
||||||
self.assertEqual({'successes': 11, 'unlinks': 11},
|
self.assertEqual({'successes': 11, 'unlinks': 11},
|
||||||
self.logger.get_increment_counts())
|
self.logger.statsd_client.get_increment_counts())
|
||||||
|
|
||||||
@mock.patch('swift.obj.updater.dump_recon_cache')
|
@mock.patch('swift.obj.updater.dump_recon_cache')
|
||||||
def test_per_container_rate_limit_some_limited(self, mock_recon):
|
def test_per_container_rate_limit_some_limited(self, mock_recon):
|
||||||
@ -1506,7 +1512,7 @@ class TestObjectUpdater(unittest.TestCase):
|
|||||||
info_lines[-1])
|
info_lines[-1])
|
||||||
self.assertEqual({'skips': 2, 'successes': 2, 'unlinks': 2,
|
self.assertEqual({'skips': 2, 'successes': 2, 'unlinks': 2,
|
||||||
'deferrals': 2},
|
'deferrals': 2},
|
||||||
self.logger.get_increment_counts())
|
self.logger.statsd_client.get_increment_counts())
|
||||||
|
|
||||||
@mock.patch('swift.obj.updater.dump_recon_cache')
|
@mock.patch('swift.obj.updater.dump_recon_cache')
|
||||||
def test_per_container_rate_limit_defer_2_skip_1(self, mock_recon):
|
def test_per_container_rate_limit_defer_2_skip_1(self, mock_recon):
|
||||||
@ -1556,7 +1562,8 @@ class TestObjectUpdater(unittest.TestCase):
|
|||||||
|
|
||||||
def fake_get_time(bucket_iter):
|
def fake_get_time(bucket_iter):
|
||||||
captured_skips_stats.append(
|
captured_skips_stats.append(
|
||||||
daemon.logger.get_increment_counts().get('skips', 0))
|
daemon.logger.statsd_client.get_increment_counts().get(
|
||||||
|
'skips', 0))
|
||||||
captured_queues.append(list(bucket_iter.buckets[0].deque))
|
captured_queues.append(list(bucket_iter.buckets[0].deque))
|
||||||
# make each update delay before the iter being called again
|
# make each update delay before the iter being called again
|
||||||
now[0] += latencies.pop(0)
|
now[0] += latencies.pop(0)
|
||||||
@ -1623,7 +1630,7 @@ class TestObjectUpdater(unittest.TestCase):
|
|||||||
info_lines[-1])
|
info_lines[-1])
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
{'skips': 1, 'successes': 3, 'unlinks': 3, 'deferrals': 2,
|
{'skips': 1, 'successes': 3, 'unlinks': 3, 'deferrals': 2,
|
||||||
'drains': 1}, self.logger.get_increment_counts())
|
'drains': 1}, self.logger.statsd_client.get_increment_counts())
|
||||||
|
|
||||||
@mock.patch('swift.obj.updater.dump_recon_cache')
|
@mock.patch('swift.obj.updater.dump_recon_cache')
|
||||||
def test_per_container_rate_limit_defer_3_skip_1(self, mock_recon):
|
def test_per_container_rate_limit_defer_3_skip_1(self, mock_recon):
|
||||||
@ -1673,7 +1680,8 @@ class TestObjectUpdater(unittest.TestCase):
|
|||||||
|
|
||||||
def fake_get_time(bucket_iter):
|
def fake_get_time(bucket_iter):
|
||||||
captured_skips_stats.append(
|
captured_skips_stats.append(
|
||||||
daemon.logger.get_increment_counts().get('skips', 0))
|
daemon.logger.statsd_client.get_increment_counts().get(
|
||||||
|
'skips', 0))
|
||||||
captured_queues.append(list(bucket_iter.buckets[0].deque))
|
captured_queues.append(list(bucket_iter.buckets[0].deque))
|
||||||
# make each update delay before the iter being called again
|
# make each update delay before the iter being called again
|
||||||
now[0] += latencies.pop(0)
|
now[0] += latencies.pop(0)
|
||||||
@ -1743,7 +1751,7 @@ class TestObjectUpdater(unittest.TestCase):
|
|||||||
info_lines[-1])
|
info_lines[-1])
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
{'skips': 1, 'successes': 4, 'unlinks': 4, 'deferrals': 3,
|
{'skips': 1, 'successes': 4, 'unlinks': 4, 'deferrals': 3,
|
||||||
'drains': 2}, self.logger.get_increment_counts())
|
'drains': 2}, self.logger.statsd_client.get_increment_counts())
|
||||||
|
|
||||||
@mock.patch('swift.obj.updater.dump_recon_cache')
|
@mock.patch('swift.obj.updater.dump_recon_cache')
|
||||||
def test_per_container_rate_limit_unsent_deferrals(self, mock_recon):
|
def test_per_container_rate_limit_unsent_deferrals(self, mock_recon):
|
||||||
@ -1799,7 +1807,8 @@ class TestObjectUpdater(unittest.TestCase):
|
|||||||
if not captured_skips_stats:
|
if not captured_skips_stats:
|
||||||
daemon.begin = now[0]
|
daemon.begin = now[0]
|
||||||
captured_skips_stats.append(
|
captured_skips_stats.append(
|
||||||
daemon.logger.get_increment_counts().get('skips', 0))
|
daemon.logger.statsd_client.get_increment_counts().get(
|
||||||
|
'skips', 0))
|
||||||
captured_queues.append(list(bucket_iter.buckets[0].deque))
|
captured_queues.append(list(bucket_iter.buckets[0].deque))
|
||||||
# insert delay each time iter is called
|
# insert delay each time iter is called
|
||||||
now[0] += latencies.pop(0)
|
now[0] += latencies.pop(0)
|
||||||
@ -1870,8 +1879,9 @@ class TestObjectUpdater(unittest.TestCase):
|
|||||||
info_lines[-1])
|
info_lines[-1])
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
{'successes': 5, 'unlinks': 5, 'deferrals': 4, 'drains': 2},
|
{'successes': 5, 'unlinks': 5, 'deferrals': 4, 'drains': 2},
|
||||||
self.logger.get_increment_counts())
|
self.logger.statsd_client.get_increment_counts())
|
||||||
self.assertEqual([('skips', 2)], self.logger.get_update_stats())
|
self.assertEqual(
|
||||||
|
2, self.logger.statsd_client.get_stats_counts()['skips'])
|
||||||
|
|
||||||
|
|
||||||
class TestObjectUpdaterFunctions(unittest.TestCase):
|
class TestObjectUpdaterFunctions(unittest.TestCase):
|
||||||
|
@ -555,7 +555,8 @@ class TestFuncs(BaseTest):
|
|||||||
self.assertEqual(resp['object_count'], 0)
|
self.assertEqual(resp['object_count'], 0)
|
||||||
self.assertEqual(resp['versions'], None)
|
self.assertEqual(resp['versions'], None)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
[x[0][0] for x in self.logger.logger.log_dict['increment']],
|
[x[0][0] for x in
|
||||||
|
self.logger.logger.statsd_client.calls['increment']],
|
||||||
['container.info.cache.miss'])
|
['container.info.cache.miss'])
|
||||||
|
|
||||||
# container info is cached in cache.
|
# container info is cached in cache.
|
||||||
@ -585,7 +586,8 @@ class TestFuncs(BaseTest):
|
|||||||
[(k, str, v, str)
|
[(k, str, v, str)
|
||||||
for k, v in subdict.items()])
|
for k, v in subdict.items()])
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
[x[0][0] for x in self.logger.logger.log_dict['increment']],
|
[x[0][0] for x in
|
||||||
|
self.logger.logger.statsd_client.calls['increment']],
|
||||||
['container.info.cache.hit'])
|
['container.info.cache.hit'])
|
||||||
|
|
||||||
def test_get_cache_key(self):
|
def test_get_cache_key(self):
|
||||||
@ -670,27 +672,27 @@ class TestFuncs(BaseTest):
|
|||||||
record_cache_op_metrics(
|
record_cache_op_metrics(
|
||||||
self.logger, 'shard_listing', 'infocache_hit')
|
self.logger, 'shard_listing', 'infocache_hit')
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
self.logger.get_increment_counts().get(
|
self.logger.statsd_client.get_increment_counts().get(
|
||||||
'shard_listing.infocache.hit'),
|
'shard_listing.infocache.hit'),
|
||||||
1)
|
1)
|
||||||
record_cache_op_metrics(
|
record_cache_op_metrics(
|
||||||
self.logger, 'shard_listing', 'hit')
|
self.logger, 'shard_listing', 'hit')
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
self.logger.get_increment_counts().get(
|
self.logger.statsd_client.get_increment_counts().get(
|
||||||
'shard_listing.cache.hit'),
|
'shard_listing.cache.hit'),
|
||||||
1)
|
1)
|
||||||
resp = FakeResponse(status_int=200)
|
resp = FakeResponse(status_int=200)
|
||||||
record_cache_op_metrics(
|
record_cache_op_metrics(
|
||||||
self.logger, 'shard_updating', 'skip', resp)
|
self.logger, 'shard_updating', 'skip', resp)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
self.logger.get_increment_counts().get(
|
self.logger.statsd_client.get_increment_counts().get(
|
||||||
'shard_updating.cache.skip.200'),
|
'shard_updating.cache.skip.200'),
|
||||||
1)
|
1)
|
||||||
resp = FakeResponse(status_int=503)
|
resp = FakeResponse(status_int=503)
|
||||||
record_cache_op_metrics(
|
record_cache_op_metrics(
|
||||||
self.logger, 'shard_updating', 'disabled', resp)
|
self.logger, 'shard_updating', 'disabled', resp)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
self.logger.get_increment_counts().get(
|
self.logger.statsd_client.get_increment_counts().get(
|
||||||
'shard_updating.cache.disabled.503'),
|
'shard_updating.cache.disabled.503'),
|
||||||
1)
|
1)
|
||||||
|
|
||||||
|
@ -2767,7 +2767,8 @@ class TestContainerController(TestRingBase):
|
|||||||
self.assertEqual(self.ns_bound_list,
|
self.assertEqual(self.ns_bound_list,
|
||||||
req.environ['swift.infocache'][cache_key])
|
req.environ['swift.infocache'][cache_key])
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
[x[0][0] for x in self.logger.logger.log_dict['increment']],
|
[x[0][0] for x in
|
||||||
|
self.logger.logger.statsd_client.calls['increment']],
|
||||||
['container.info.cache.miss',
|
['container.info.cache.miss',
|
||||||
'container.shard_listing.cache.bypass.200'])
|
'container.shard_listing.cache.bypass.200'])
|
||||||
|
|
||||||
@ -2805,7 +2806,8 @@ class TestContainerController(TestRingBase):
|
|||||||
self.assertEqual(self.ns_bound_list,
|
self.assertEqual(self.ns_bound_list,
|
||||||
req.environ['swift.infocache'][cache_key])
|
req.environ['swift.infocache'][cache_key])
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
[x[0][0] for x in self.logger.logger.log_dict['increment']],
|
[x[0][0] for x in
|
||||||
|
self.logger.logger.statsd_client.calls['increment']],
|
||||||
['container.info.cache.hit',
|
['container.info.cache.hit',
|
||||||
'container.shard_listing.cache.miss.200'])
|
'container.shard_listing.cache.miss.200'])
|
||||||
|
|
||||||
@ -2829,7 +2831,8 @@ class TestContainerController(TestRingBase):
|
|||||||
self.assertEqual(self.ns_bound_list,
|
self.assertEqual(self.ns_bound_list,
|
||||||
req.environ['swift.infocache'][cache_key])
|
req.environ['swift.infocache'][cache_key])
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
[x[0][0] for x in self.logger.logger.log_dict['increment']],
|
[x[0][0] for x in
|
||||||
|
self.logger.logger.statsd_client.calls['increment']],
|
||||||
['container.info.cache.hit',
|
['container.info.cache.hit',
|
||||||
'container.shard_listing.cache.hit'])
|
'container.shard_listing.cache.hit'])
|
||||||
|
|
||||||
@ -2866,7 +2869,8 @@ class TestContainerController(TestRingBase):
|
|||||||
self.assertEqual(self.ns_bound_list,
|
self.assertEqual(self.ns_bound_list,
|
||||||
req.environ['swift.infocache'][cache_key])
|
req.environ['swift.infocache'][cache_key])
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
[x[0][0] for x in self.logger.logger.log_dict['increment']],
|
[x[0][0] for x in
|
||||||
|
self.logger.logger.statsd_client.calls['increment']],
|
||||||
['container.info.cache.hit',
|
['container.info.cache.hit',
|
||||||
'container.shard_listing.cache.skip.200'])
|
'container.shard_listing.cache.skip.200'])
|
||||||
|
|
||||||
@ -2890,7 +2894,8 @@ class TestContainerController(TestRingBase):
|
|||||||
self.assertEqual(self.ns_bound_list,
|
self.assertEqual(self.ns_bound_list,
|
||||||
req.environ['swift.infocache'][cache_key])
|
req.environ['swift.infocache'][cache_key])
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
[x[0][0] for x in self.logger.logger.log_dict['increment']],
|
[x[0][0] for x in
|
||||||
|
self.logger.logger.statsd_client.calls['increment']],
|
||||||
['container.info.cache.hit',
|
['container.info.cache.hit',
|
||||||
'container.shard_listing.cache.hit'])
|
'container.shard_listing.cache.hit'])
|
||||||
|
|
||||||
@ -2913,7 +2918,8 @@ class TestContainerController(TestRingBase):
|
|||||||
self.assertEqual(self.ns_bound_list,
|
self.assertEqual(self.ns_bound_list,
|
||||||
req.environ['swift.infocache'][cache_key])
|
req.environ['swift.infocache'][cache_key])
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
[x[0][0] for x in self.logger.logger.log_dict['increment']],
|
[x[0][0] for x in
|
||||||
|
self.logger.logger.statsd_client.calls['increment']],
|
||||||
['container.info.infocache.hit',
|
['container.info.infocache.hit',
|
||||||
'container.shard_listing.infocache.hit'])
|
'container.shard_listing.infocache.hit'])
|
||||||
|
|
||||||
@ -3068,7 +3074,7 @@ class TestContainerController(TestRingBase):
|
|||||||
self.assertEqual(404, resp.status_int)
|
self.assertEqual(404, resp.status_int)
|
||||||
self.assertEqual({'container.info.cache.hit': 1,
|
self.assertEqual({'container.info.cache.hit': 1,
|
||||||
'container.shard_listing.cache.miss.404': 1},
|
'container.shard_listing.cache.miss.404': 1},
|
||||||
self.logger.get_increment_counts())
|
self.logger.statsd_client.get_increment_counts())
|
||||||
|
|
||||||
def test_GET_shard_ranges_read_from_cache_error(self):
|
def test_GET_shard_ranges_read_from_cache_error(self):
|
||||||
self._setup_shard_range_stubs()
|
self._setup_shard_range_stubs()
|
||||||
@ -3101,7 +3107,7 @@ class TestContainerController(TestRingBase):
|
|||||||
self.assertEqual(404, resp.status_int)
|
self.assertEqual(404, resp.status_int)
|
||||||
self.assertEqual({'container.info.cache.hit': 1,
|
self.assertEqual({'container.info.cache.hit': 1,
|
||||||
'container.shard_listing.cache.error.404': 1},
|
'container.shard_listing.cache.error.404': 1},
|
||||||
self.logger.get_increment_counts())
|
self.logger.statsd_client.get_increment_counts())
|
||||||
|
|
||||||
def _do_test_GET_shard_ranges_read_from_cache(self, params, record_type):
|
def _do_test_GET_shard_ranges_read_from_cache(self, params, record_type):
|
||||||
# pre-warm cache with container metadata and shard ranges and verify
|
# pre-warm cache with container metadata and shard ranges and verify
|
||||||
@ -3124,7 +3130,7 @@ class TestContainerController(TestRingBase):
|
|||||||
self.memcache.calls)
|
self.memcache.calls)
|
||||||
self.assertEqual({'container.info.cache.hit': 1,
|
self.assertEqual({'container.info.cache.hit': 1,
|
||||||
'container.shard_listing.cache.hit': 1},
|
'container.shard_listing.cache.hit': 1},
|
||||||
self.logger.get_increment_counts())
|
self.logger.statsd_client.get_increment_counts())
|
||||||
return resp
|
return resp
|
||||||
|
|
||||||
def test_GET_shard_ranges_read_from_cache(self):
|
def test_GET_shard_ranges_read_from_cache(self):
|
||||||
@ -3220,7 +3226,7 @@ class TestContainerController(TestRingBase):
|
|||||||
self.memcache.calls[2][1][1]['sharding_state'])
|
self.memcache.calls[2][1][1]['sharding_state'])
|
||||||
self.assertEqual({'container.info.cache.miss': 1,
|
self.assertEqual({'container.info.cache.miss': 1,
|
||||||
'container.shard_listing.cache.bypass.200': 1},
|
'container.shard_listing.cache.bypass.200': 1},
|
||||||
self.logger.get_increment_counts())
|
self.logger.statsd_client.get_increment_counts())
|
||||||
return resp
|
return resp
|
||||||
|
|
||||||
def test_GET_shard_ranges_write_to_cache(self):
|
def test_GET_shard_ranges_write_to_cache(self):
|
||||||
@ -3315,7 +3321,7 @@ class TestContainerController(TestRingBase):
|
|||||||
self.memcache.calls[2][1][1]['sharding_state'])
|
self.memcache.calls[2][1][1]['sharding_state'])
|
||||||
self.assertEqual({'container.info.cache.miss': 1,
|
self.assertEqual({'container.info.cache.miss': 1,
|
||||||
'container.shard_listing.cache.force_skip.200': 1},
|
'container.shard_listing.cache.force_skip.200': 1},
|
||||||
self.logger.get_increment_counts())
|
self.logger.statsd_client.get_increment_counts())
|
||||||
|
|
||||||
def _do_test_GET_shard_ranges_no_cache_write(self, resp_hdrs):
|
def _do_test_GET_shard_ranges_no_cache_write(self, resp_hdrs):
|
||||||
# verify that there is a cache lookup to check container info but then
|
# verify that there is a cache lookup to check container info but then
|
||||||
@ -3488,7 +3494,7 @@ class TestContainerController(TestRingBase):
|
|||||||
self.memcache.calls[1][1][1]['sharding_state'])
|
self.memcache.calls[1][1][1]['sharding_state'])
|
||||||
self.assertEqual({'container.info.cache.miss': 1,
|
self.assertEqual({'container.info.cache.miss': 1,
|
||||||
'container.shard_listing.cache.bypass.200': 1},
|
'container.shard_listing.cache.bypass.200': 1},
|
||||||
self.logger.get_increment_counts())
|
self.logger.statsd_client.get_increment_counts())
|
||||||
self.memcache.delete_all()
|
self.memcache.delete_all()
|
||||||
|
|
||||||
def test_GET_shard_ranges_bad_response_body(self):
|
def test_GET_shard_ranges_bad_response_body(self):
|
||||||
|
@ -1257,7 +1257,7 @@ class TestProxyServer(unittest.TestCase):
|
|||||||
self.assertTrue(log_kwargs['exc_info'])
|
self.assertTrue(log_kwargs['exc_info'])
|
||||||
self.assertIs(caught_exc, log_kwargs['exc_info'][1])
|
self.assertIs(caught_exc, log_kwargs['exc_info'][1])
|
||||||
incremented_limit_samples.append(
|
incremented_limit_samples.append(
|
||||||
logger.get_increment_counts().get(
|
logger.statsd_client.get_increment_counts().get(
|
||||||
'error_limiter.incremented_limit', 0))
|
'error_limiter.incremented_limit', 0))
|
||||||
self.assertEqual([0] * 10 + [1], incremented_limit_samples)
|
self.assertEqual([0] * 10 + [1], incremented_limit_samples)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
@ -1294,7 +1294,7 @@ class TestProxyServer(unittest.TestCase):
|
|||||||
self.assertIn(expected_msg, line)
|
self.assertIn(expected_msg, line)
|
||||||
self.assertIn(node_to_string(node), line)
|
self.assertIn(node_to_string(node), line)
|
||||||
incremented_limit_samples.append(
|
incremented_limit_samples.append(
|
||||||
logger.get_increment_counts().get(
|
logger.statsd_client.get_increment_counts().get(
|
||||||
'error_limiter.incremented_limit', 0))
|
'error_limiter.incremented_limit', 0))
|
||||||
|
|
||||||
self.assertEqual([0] * 10 + [1], incremented_limit_samples)
|
self.assertEqual([0] * 10 + [1], incremented_limit_samples)
|
||||||
@ -1310,7 +1310,8 @@ class TestProxyServer(unittest.TestCase):
|
|||||||
line = logger.get_lines_for_level('error')[-2]
|
line = logger.get_lines_for_level('error')[-2]
|
||||||
self.assertIn(expected_msg, line)
|
self.assertIn(expected_msg, line)
|
||||||
self.assertIn(node_to_string(node), line)
|
self.assertIn(node_to_string(node), line)
|
||||||
self.assertEqual(2, logger.get_increment_counts().get(
|
self.assertEqual(
|
||||||
|
2, logger.statsd_client.get_increment_counts().get(
|
||||||
'error_limiter.incremented_limit', 0))
|
'error_limiter.incremented_limit', 0))
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
('Node will be error limited for 60.00s: %s' %
|
('Node will be error limited for 60.00s: %s' %
|
||||||
@ -3609,7 +3610,7 @@ class TestReplicatedObjectController(
|
|||||||
error_node = object_ring.get_part_nodes(1)[0]
|
error_node = object_ring.get_part_nodes(1)[0]
|
||||||
self.app.error_limit(error_node, 'test')
|
self.app.error_limit(error_node, 'test')
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
1, self.logger.get_increment_counts().get(
|
1, self.logger.statsd_client.get_increment_counts().get(
|
||||||
'error_limiter.forced_limit', 0))
|
'error_limiter.forced_limit', 0))
|
||||||
line = self.logger.get_lines_for_level('error')[-1]
|
line = self.logger.get_lines_for_level('error')[-1]
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
@ -3618,7 +3619,7 @@ class TestReplicatedObjectController(
|
|||||||
|
|
||||||
# no error limited checking yet.
|
# no error limited checking yet.
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
0, self.logger.get_increment_counts().get(
|
0, self.logger.statsd_client.get_increment_counts().get(
|
||||||
'error_limiter.is_limited', 0))
|
'error_limiter.is_limited', 0))
|
||||||
set_http_connect(200, 200, # account, container
|
set_http_connect(200, 200, # account, container
|
||||||
201, 201, 201, # 3 working backends
|
201, 201, 201, # 3 working backends
|
||||||
@ -3630,7 +3631,7 @@ class TestReplicatedObjectController(
|
|||||||
self.assertTrue(res.status.startswith('201 '))
|
self.assertTrue(res.status.startswith('201 '))
|
||||||
# error limited happened during PUT.
|
# error limited happened during PUT.
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
1, self.logger.get_increment_counts().get(
|
1, self.logger.statsd_client.get_increment_counts().get(
|
||||||
'error_limiter.is_limited', 0))
|
'error_limiter.is_limited', 0))
|
||||||
|
|
||||||
# this is kind of a hokey test, but in FakeRing, the port is even when
|
# this is kind of a hokey test, but in FakeRing, the port is even when
|
||||||
@ -4328,7 +4329,7 @@ class TestReplicatedObjectController(
|
|||||||
resp = req.get_response(self.app)
|
resp = req.get_response(self.app)
|
||||||
|
|
||||||
self.assertEqual(resp.status_int, 202)
|
self.assertEqual(resp.status_int, 202)
|
||||||
stats = self.app.logger.get_increment_counts()
|
stats = self.app.logger.statsd_client.get_increment_counts()
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
{'account.info.cache.disabled.200': 1,
|
{'account.info.cache.disabled.200': 1,
|
||||||
'account.info.infocache.hit': 2,
|
'account.info.infocache.hit': 2,
|
||||||
@ -4425,7 +4426,7 @@ class TestReplicatedObjectController(
|
|||||||
resp = req.get_response(self.app)
|
resp = req.get_response(self.app)
|
||||||
|
|
||||||
self.assertEqual(resp.status_int, 202)
|
self.assertEqual(resp.status_int, 202)
|
||||||
stats = self.app.logger.get_increment_counts()
|
stats = self.app.logger.statsd_client.get_increment_counts()
|
||||||
self.assertEqual({'account.info.cache.miss.200': 1,
|
self.assertEqual({'account.info.cache.miss.200': 1,
|
||||||
'account.info.infocache.hit': 2,
|
'account.info.infocache.hit': 2,
|
||||||
'container.info.cache.miss.200': 1,
|
'container.info.cache.miss.200': 1,
|
||||||
@ -4538,7 +4539,8 @@ class TestReplicatedObjectController(
|
|||||||
resp = req.get_response(self.app)
|
resp = req.get_response(self.app)
|
||||||
|
|
||||||
self.assertEqual(resp.status_int, 202)
|
self.assertEqual(resp.status_int, 202)
|
||||||
stats = self.app.logger.get_increment_counts()
|
|
||||||
|
stats = self.app.logger.statsd_client.get_increment_counts()
|
||||||
self.assertEqual({'account.info.cache.miss.200': 1,
|
self.assertEqual({'account.info.cache.miss.200': 1,
|
||||||
'account.info.infocache.hit': 1,
|
'account.info.infocache.hit': 1,
|
||||||
'container.info.cache.miss.200': 1,
|
'container.info.cache.miss.200': 1,
|
||||||
@ -4636,7 +4638,8 @@ class TestReplicatedObjectController(
|
|||||||
|
|
||||||
# verify request hitted infocache.
|
# verify request hitted infocache.
|
||||||
self.assertEqual(resp.status_int, 202)
|
self.assertEqual(resp.status_int, 202)
|
||||||
stats = self.app.logger.get_increment_counts()
|
|
||||||
|
stats = self.app.logger.statsd_client.get_increment_counts()
|
||||||
self.assertEqual({'account.info.cache.disabled.200': 1,
|
self.assertEqual({'account.info.cache.disabled.200': 1,
|
||||||
'account.info.infocache.hit': 1,
|
'account.info.infocache.hit': 1,
|
||||||
'container.info.cache.disabled.200': 1,
|
'container.info.cache.disabled.200': 1,
|
||||||
@ -4737,7 +4740,8 @@ class TestReplicatedObjectController(
|
|||||||
resp = req.get_response(self.app)
|
resp = req.get_response(self.app)
|
||||||
|
|
||||||
self.assertEqual(resp.status_int, 202)
|
self.assertEqual(resp.status_int, 202)
|
||||||
stats = self.app.logger.get_increment_counts()
|
|
||||||
|
stats = self.app.logger.statsd_client.get_increment_counts()
|
||||||
self.assertEqual({'account.info.cache.miss.200': 1,
|
self.assertEqual({'account.info.cache.miss.200': 1,
|
||||||
'account.info.infocache.hit': 1,
|
'account.info.infocache.hit': 1,
|
||||||
'container.info.cache.miss.200': 1,
|
'container.info.cache.miss.200': 1,
|
||||||
@ -4782,7 +4786,8 @@ class TestReplicatedObjectController(
|
|||||||
resp = req.get_response(self.app)
|
resp = req.get_response(self.app)
|
||||||
|
|
||||||
self.assertEqual(resp.status_int, 202)
|
self.assertEqual(resp.status_int, 202)
|
||||||
stats = self.app.logger.get_increment_counts()
|
|
||||||
|
stats = self.app.logger.statsd_client.get_increment_counts()
|
||||||
self.assertEqual({'account.info.cache.miss.200': 1,
|
self.assertEqual({'account.info.cache.miss.200': 1,
|
||||||
'account.info.infocache.hit': 1,
|
'account.info.infocache.hit': 1,
|
||||||
'container.info.cache.miss.200': 1,
|
'container.info.cache.miss.200': 1,
|
||||||
@ -4850,7 +4855,7 @@ class TestReplicatedObjectController(
|
|||||||
resp = req.get_response(self.app)
|
resp = req.get_response(self.app)
|
||||||
|
|
||||||
self.assertEqual(resp.status_int, 202)
|
self.assertEqual(resp.status_int, 202)
|
||||||
stats = self.app.logger.get_increment_counts()
|
stats = self.app.logger.statsd_client.get_increment_counts()
|
||||||
self.assertEqual(stats, {
|
self.assertEqual(stats, {
|
||||||
'account.info.cache.hit': 2,
|
'account.info.cache.hit': 2,
|
||||||
'account.info.cache.miss.200': 1,
|
'account.info.cache.miss.200': 1,
|
||||||
@ -4896,7 +4901,7 @@ class TestReplicatedObjectController(
|
|||||||
resp = req.get_response(self.app)
|
resp = req.get_response(self.app)
|
||||||
|
|
||||||
self.assertEqual(resp.status_int, 202)
|
self.assertEqual(resp.status_int, 202)
|
||||||
stats = self.app.logger.get_increment_counts()
|
stats = self.app.logger.statsd_client.get_increment_counts()
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
{'account.info.cache.disabled.200': 1,
|
{'account.info.cache.disabled.200': 1,
|
||||||
'account.info.infocache.hit': 2,
|
'account.info.infocache.hit': 2,
|
||||||
@ -5500,7 +5505,8 @@ class TestReplicatedObjectController(
|
|||||||
collected_nodes.append(node)
|
collected_nodes.append(node)
|
||||||
self.assertEqual(len(collected_nodes), 7)
|
self.assertEqual(len(collected_nodes), 7)
|
||||||
self.assertEqual(self.app.logger.log_dict['warning'], [])
|
self.assertEqual(self.app.logger.log_dict['warning'], [])
|
||||||
self.assertEqual(self.app.logger.get_increments(), [])
|
self.assertEqual(
|
||||||
|
self.app.logger.statsd_client.get_increments(), [])
|
||||||
|
|
||||||
# one error-limited primary node -> one handoff warning
|
# one error-limited primary node -> one handoff warning
|
||||||
self.app.log_handoffs = True
|
self.app.log_handoffs = True
|
||||||
@ -5520,7 +5526,7 @@ class TestReplicatedObjectController(
|
|||||||
self.app.logger.get_lines_for_level('warning'), [
|
self.app.logger.get_lines_for_level('warning'), [
|
||||||
'Handoff requested (5)'])
|
'Handoff requested (5)'])
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
self.app.logger.get_increments(),
|
self.app.logger.statsd_client.get_increments(),
|
||||||
['error_limiter.is_limited', 'handoff_count'])
|
['error_limiter.is_limited', 'handoff_count'])
|
||||||
|
|
||||||
# two error-limited primary nodes -> two handoff warnings
|
# two error-limited primary nodes -> two handoff warnings
|
||||||
@ -5543,7 +5549,7 @@ class TestReplicatedObjectController(
|
|||||||
'Handoff requested (5)',
|
'Handoff requested (5)',
|
||||||
'Handoff requested (6)',
|
'Handoff requested (6)',
|
||||||
])
|
])
|
||||||
stats = self.app.logger.get_increment_counts()
|
stats = self.app.logger.statsd_client.get_increment_counts()
|
||||||
self.assertEqual(2, stats.get('error_limiter.is_limited', 0))
|
self.assertEqual(2, stats.get('error_limiter.is_limited', 0))
|
||||||
self.assertEqual(2, stats.get('handoff_count', 0))
|
self.assertEqual(2, stats.get('handoff_count', 0))
|
||||||
|
|
||||||
@ -5571,7 +5577,7 @@ class TestReplicatedObjectController(
|
|||||||
'Handoff requested (9)',
|
'Handoff requested (9)',
|
||||||
'Handoff requested (10)',
|
'Handoff requested (10)',
|
||||||
])
|
])
|
||||||
stats = self.app.logger.get_increment_counts()
|
stats = self.app.logger.statsd_client.get_increment_counts()
|
||||||
self.assertEqual(4, stats.get('error_limiter.is_limited', 0))
|
self.assertEqual(4, stats.get('error_limiter.is_limited', 0))
|
||||||
self.assertEqual(4, stats.get('handoff_count', 0))
|
self.assertEqual(4, stats.get('handoff_count', 0))
|
||||||
self.assertEqual(1, stats.get('handoff_all_count', 0))
|
self.assertEqual(1, stats.get('handoff_all_count', 0))
|
||||||
@ -5608,15 +5614,15 @@ class TestReplicatedObjectController(
|
|||||||
self.assertIn(first_nodes[0], second_nodes)
|
self.assertIn(first_nodes[0], second_nodes)
|
||||||
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
0, self.logger.get_increment_counts().get(
|
0, self.logger.statsd_client.get_increment_counts().get(
|
||||||
'error_limiter.is_limited', 0))
|
'error_limiter.is_limited', 0))
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
0, self.logger.get_increment_counts().get(
|
0, self.logger.statsd_client.get_increment_counts().get(
|
||||||
'error_limiter.forced_limit', 0))
|
'error_limiter.forced_limit', 0))
|
||||||
|
|
||||||
self.app.error_limit(first_nodes[0], 'test')
|
self.app.error_limit(first_nodes[0], 'test')
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
1, self.logger.get_increment_counts().get(
|
1, self.logger.statsd_client.get_increment_counts().get(
|
||||||
'error_limiter.forced_limit', 0))
|
'error_limiter.forced_limit', 0))
|
||||||
line = self.logger.get_lines_for_level('error')[-1]
|
line = self.logger.get_lines_for_level('error')[-1]
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
@ -5627,13 +5633,13 @@ class TestReplicatedObjectController(
|
|||||||
object_ring, 0, self.logger, request=Request.blank('')))
|
object_ring, 0, self.logger, request=Request.blank('')))
|
||||||
self.assertNotIn(first_nodes[0], second_nodes)
|
self.assertNotIn(first_nodes[0], second_nodes)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
1, self.logger.get_increment_counts().get(
|
1, self.logger.statsd_client.get_increment_counts().get(
|
||||||
'error_limiter.is_limited', 0))
|
'error_limiter.is_limited', 0))
|
||||||
third_nodes = list(self.app.iter_nodes(
|
third_nodes = list(self.app.iter_nodes(
|
||||||
object_ring, 0, self.logger, request=Request.blank('')))
|
object_ring, 0, self.logger, request=Request.blank('')))
|
||||||
self.assertNotIn(first_nodes[0], third_nodes)
|
self.assertNotIn(first_nodes[0], third_nodes)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
2, self.logger.get_increment_counts().get(
|
2, self.logger.statsd_client.get_increment_counts().get(
|
||||||
'error_limiter.is_limited', 0))
|
'error_limiter.is_limited', 0))
|
||||||
|
|
||||||
def test_iter_nodes_gives_extra_if_error_limited_inline(self):
|
def test_iter_nodes_gives_extra_if_error_limited_inline(self):
|
||||||
|
Loading…
Reference in New Issue
Block a user