Updates to remove _ usage that is not i18n related
This commit is contained in:
parent
56791413b8
commit
9dd1e2ae84
19
bin/st
19
bin/st
@ -80,7 +80,7 @@ except ImportError:
|
|||||||
res = []
|
res = []
|
||||||
consts = {'true': True, 'false': False, 'null': None}
|
consts = {'true': True, 'false': False, 'null': None}
|
||||||
string = '(' + comments.sub('', string) + ')'
|
string = '(' + comments.sub('', string) + ')'
|
||||||
for type, val, _, _, _ in \
|
for type, val, _junk, _junk, _junk in \
|
||||||
generate_tokens(StringIO(string).readline):
|
generate_tokens(StringIO(string).readline):
|
||||||
if (type == OP and val not in '[]{}:,()-') or \
|
if (type == OP and val not in '[]{}:,()-') or \
|
||||||
(type == NAME and val not in consts):
|
(type == NAME and val not in consts):
|
||||||
@ -914,7 +914,7 @@ def st_delete(parser, args, print_queue, error_queue):
|
|||||||
segment_queue.put((scontainer, delobj['name']))
|
segment_queue.put((scontainer, delobj['name']))
|
||||||
if not segment_queue.empty():
|
if not segment_queue.empty():
|
||||||
segment_threads = [QueueFunctionThread(segment_queue,
|
segment_threads = [QueueFunctionThread(segment_queue,
|
||||||
_delete_segment, create_connection()) for _ in
|
_delete_segment, create_connection()) for _junk in
|
||||||
xrange(10)]
|
xrange(10)]
|
||||||
for thread in segment_threads:
|
for thread in segment_threads:
|
||||||
thread.start()
|
thread.start()
|
||||||
@ -972,11 +972,11 @@ def st_delete(parser, args, print_queue, error_queue):
|
|||||||
create_connection = lambda: Connection(options.auth, options.user,
|
create_connection = lambda: Connection(options.auth, options.user,
|
||||||
options.key, preauthurl=url, preauthtoken=token, snet=options.snet)
|
options.key, preauthurl=url, preauthtoken=token, snet=options.snet)
|
||||||
object_threads = [QueueFunctionThread(object_queue, _delete_object,
|
object_threads = [QueueFunctionThread(object_queue, _delete_object,
|
||||||
create_connection()) for _ in xrange(10)]
|
create_connection()) for _junk in xrange(10)]
|
||||||
for thread in object_threads:
|
for thread in object_threads:
|
||||||
thread.start()
|
thread.start()
|
||||||
container_threads = [QueueFunctionThread(container_queue,
|
container_threads = [QueueFunctionThread(container_queue,
|
||||||
_delete_container, create_connection()) for _ in xrange(10)]
|
_delete_container, create_connection()) for _junk in xrange(10)]
|
||||||
for thread in container_threads:
|
for thread in container_threads:
|
||||||
thread.start()
|
thread.start()
|
||||||
if not args:
|
if not args:
|
||||||
@ -1142,11 +1142,11 @@ def st_download(options, args, print_queue, error_queue):
|
|||||||
create_connection = lambda: Connection(options.auth, options.user,
|
create_connection = lambda: Connection(options.auth, options.user,
|
||||||
options.key, preauthurl=url, preauthtoken=token, snet=options.snet)
|
options.key, preauthurl=url, preauthtoken=token, snet=options.snet)
|
||||||
object_threads = [QueueFunctionThread(object_queue, _download_object,
|
object_threads = [QueueFunctionThread(object_queue, _download_object,
|
||||||
create_connection()) for _ in xrange(10)]
|
create_connection()) for _junk in xrange(10)]
|
||||||
for thread in object_threads:
|
for thread in object_threads:
|
||||||
thread.start()
|
thread.start()
|
||||||
container_threads = [QueueFunctionThread(container_queue,
|
container_threads = [QueueFunctionThread(container_queue,
|
||||||
_download_container, create_connection()) for _ in xrange(10)]
|
_download_container, create_connection()) for _junk in xrange(10)]
|
||||||
for thread in container_threads:
|
for thread in container_threads:
|
||||||
thread.start()
|
thread.start()
|
||||||
if not args:
|
if not args:
|
||||||
@ -1525,7 +1525,8 @@ def st_upload(options, args, print_queue, error_queue):
|
|||||||
full_size = getsize(path)
|
full_size = getsize(path)
|
||||||
segment_queue = Queue(10000)
|
segment_queue = Queue(10000)
|
||||||
segment_threads = [QueueFunctionThread(segment_queue,
|
segment_threads = [QueueFunctionThread(segment_queue,
|
||||||
_segment_job, create_connection()) for _ in xrange(10)]
|
_segment_job, create_connection()) for _junk in
|
||||||
|
xrange(10)]
|
||||||
for thread in segment_threads:
|
for thread in segment_threads:
|
||||||
thread.start()
|
thread.start()
|
||||||
segment = 0
|
segment = 0
|
||||||
@ -1569,7 +1570,7 @@ def st_upload(options, args, print_queue, error_queue):
|
|||||||
'container': scontainer, 'obj': delobj['name']})
|
'container': scontainer, 'obj': delobj['name']})
|
||||||
if not segment_queue.empty():
|
if not segment_queue.empty():
|
||||||
segment_threads = [QueueFunctionThread(segment_queue,
|
segment_threads = [QueueFunctionThread(segment_queue,
|
||||||
_segment_job, create_connection()) for _ in
|
_segment_job, create_connection()) for _junk in
|
||||||
xrange(10)]
|
xrange(10)]
|
||||||
for thread in segment_threads:
|
for thread in segment_threads:
|
||||||
thread.start()
|
thread.start()
|
||||||
@ -1603,7 +1604,7 @@ def st_upload(options, args, print_queue, error_queue):
|
|||||||
create_connection = lambda: Connection(options.auth, options.user,
|
create_connection = lambda: Connection(options.auth, options.user,
|
||||||
options.key, preauthurl=url, preauthtoken=token, snet=options.snet)
|
options.key, preauthurl=url, preauthtoken=token, snet=options.snet)
|
||||||
object_threads = [QueueFunctionThread(object_queue, _object_job,
|
object_threads = [QueueFunctionThread(object_queue, _object_job,
|
||||||
create_connection()) for _ in xrange(10)]
|
create_connection()) for _junk in xrange(10)]
|
||||||
for thread in object_threads:
|
for thread in object_threads:
|
||||||
thread.start()
|
thread.start()
|
||||||
conn = create_connection()
|
conn = create_connection()
|
||||||
|
@ -25,7 +25,7 @@ if __name__ == '__main__':
|
|||||||
gettext.install('swift', unicode=1)
|
gettext.install('swift', unicode=1)
|
||||||
if len(argv) != 4 or argv[1] != '-K':
|
if len(argv) != 4 or argv[1] != '-K':
|
||||||
exit('Syntax: %s -K <super_admin_key> <path to auth.db>' % argv[0])
|
exit('Syntax: %s -K <super_admin_key> <path to auth.db>' % argv[0])
|
||||||
_, _, super_admin_key, auth_db = argv
|
_junk, _junk, super_admin_key, auth_db = argv
|
||||||
call(['swauth-prep', '-K', super_admin_key])
|
call(['swauth-prep', '-K', super_admin_key])
|
||||||
conn = sqlite3.connect(auth_db)
|
conn = sqlite3.connect(auth_db)
|
||||||
for account, cfaccount, user, password, admin, reseller_admin in \
|
for account, cfaccount, user, password, admin, reseller_admin in \
|
||||||
|
@ -105,7 +105,7 @@ if __name__ == '__main__':
|
|||||||
else:
|
else:
|
||||||
conf = CONF_DEFAULTS
|
conf = CONF_DEFAULTS
|
||||||
parser.set_defaults(**conf)
|
parser.set_defaults(**conf)
|
||||||
options, _ = parser.parse_args()
|
options, _junk = parser.parse_args()
|
||||||
if options.concurrency is not '':
|
if options.concurrency is not '':
|
||||||
options.put_concurrency = options.concurrency
|
options.put_concurrency = options.concurrency
|
||||||
options.get_concurrency = options.concurrency
|
options.get_concurrency = options.concurrency
|
||||||
|
@ -32,7 +32,7 @@ GRACEFUL_SHUTDOWN_SERVERS = ['account-server', 'container-server',
|
|||||||
MAX_DESCRIPTORS = 32768
|
MAX_DESCRIPTORS = 32768
|
||||||
MAX_MEMORY = (1024 * 1024 * 1024) * 2 # 2 GB
|
MAX_MEMORY = (1024 * 1024 * 1024) * 2 # 2 GB
|
||||||
|
|
||||||
_, server, command = sys.argv
|
_junk, server, command = sys.argv
|
||||||
if server == 'all':
|
if server == 'all':
|
||||||
servers = ALL_SERVERS
|
servers = ALL_SERVERS
|
||||||
else:
|
else:
|
||||||
@ -155,7 +155,7 @@ def do_stop(server, graceful=False):
|
|||||||
except OSError:
|
except OSError:
|
||||||
pass
|
pass
|
||||||
for pid_file, pid in pfiles:
|
for pid_file, pid in pfiles:
|
||||||
for _ in xrange(150): # 15 seconds
|
for _junk in xrange(150): # 15 seconds
|
||||||
if not os.path.exists('/proc/%s' % pid):
|
if not os.path.exists('/proc/%s' % pid):
|
||||||
break
|
break
|
||||||
time.sleep(0.1)
|
time.sleep(0.1)
|
||||||
|
@ -127,7 +127,7 @@ if __name__ == '__main__':
|
|||||||
next_report += 2
|
next_report += 2
|
||||||
while need_to_queue >= 1:
|
while need_to_queue >= 1:
|
||||||
container = 'stats_container_dispersion_%s' % uuid4()
|
container = 'stats_container_dispersion_%s' % uuid4()
|
||||||
part, _ = container_ring.get_nodes(account, container)
|
part, _junk = container_ring.get_nodes(account, container)
|
||||||
if part in parts_left:
|
if part in parts_left:
|
||||||
coropool.spawn(put_container, connpool, container, report)
|
coropool.spawn(put_container, connpool, container, report)
|
||||||
sleep()
|
sleep()
|
||||||
@ -152,7 +152,7 @@ if __name__ == '__main__':
|
|||||||
next_report += 2
|
next_report += 2
|
||||||
while need_to_queue >= 1:
|
while need_to_queue >= 1:
|
||||||
obj = 'stats_object_dispersion_%s' % uuid4()
|
obj = 'stats_object_dispersion_%s' % uuid4()
|
||||||
part, _ = object_ring.get_nodes(account, container, obj)
|
part, _junk = object_ring.get_nodes(account, container, obj)
|
||||||
if part in parts_left:
|
if part in parts_left:
|
||||||
coropool.spawn(put_object, connpool, container, obj, report)
|
coropool.spawn(put_object, connpool, container, obj, report)
|
||||||
sleep()
|
sleep()
|
||||||
|
@ -107,7 +107,7 @@ def audit(coropool, connpool, account, container_ring, object_ring, options):
|
|||||||
found = False
|
found = False
|
||||||
error_log = get_error_log('%(ip)s:%(port)s/%(device)s' % node)
|
error_log = get_error_log('%(ip)s:%(port)s/%(device)s' % node)
|
||||||
try:
|
try:
|
||||||
attempts, _ = direct_client.retry(
|
attempts, _junk = direct_client.retry(
|
||||||
direct_client.direct_head_object, node, part,
|
direct_client.direct_head_object, node, part,
|
||||||
account, container, obj, error_log=error_log,
|
account, container, obj, error_log=error_log,
|
||||||
retries=options.retries)
|
retries=options.retries)
|
||||||
@ -160,7 +160,7 @@ def audit(coropool, connpool, account, container_ring, object_ring, options):
|
|||||||
print 'Containers Missing'
|
print 'Containers Missing'
|
||||||
print '-' * 78
|
print '-' * 78
|
||||||
for container in sorted(containers_missing_replicas.keys()):
|
for container in sorted(containers_missing_replicas.keys()):
|
||||||
part, _ = container_ring.get_nodes(account, container)
|
part, _junk = container_ring.get_nodes(account, container)
|
||||||
for node in containers_missing_replicas[container]:
|
for node in containers_missing_replicas[container]:
|
||||||
print 'http://%s:%s/%s/%s/%s/%s' % (node['ip'], node['port'],
|
print 'http://%s:%s/%s/%s/%s/%s' % (node['ip'], node['port'],
|
||||||
node['device'], part, account, container)
|
node['device'], part, account, container)
|
||||||
@ -170,8 +170,8 @@ def audit(coropool, connpool, account, container_ring, object_ring, options):
|
|||||||
print 'Objects Missing'
|
print 'Objects Missing'
|
||||||
print '-' * 78
|
print '-' * 78
|
||||||
for opath in sorted(objects_missing_replicas.keys()):
|
for opath in sorted(objects_missing_replicas.keys()):
|
||||||
_, container, obj = opath.split('/', 2)
|
_junk, container, obj = opath.split('/', 2)
|
||||||
part, _ = object_ring.get_nodes(account, container, obj)
|
part, _junk = object_ring.get_nodes(account, container, obj)
|
||||||
for node in objects_missing_replicas[opath]:
|
for node in objects_missing_replicas[opath]:
|
||||||
print 'http://%s:%s/%s/%s/%s/%s/%s' % (node['ip'],
|
print 'http://%s:%s/%s/%s/%s/%s/%s' % (node['ip'],
|
||||||
node['port'], node['device'], part, account, container,
|
node['port'], node['device'], part, account, container,
|
||||||
@ -200,7 +200,7 @@ def container_dispersion_report(coropool, connpool, account, container_ring,
|
|||||||
for node in nodes:
|
for node in nodes:
|
||||||
error_log = get_error_log('%(ip)s:%(port)s/%(device)s' % node)
|
error_log = get_error_log('%(ip)s:%(port)s/%(device)s' % node)
|
||||||
try:
|
try:
|
||||||
attempts, _ = direct_client.retry(
|
attempts, _junk = direct_client.retry(
|
||||||
direct_client.direct_head_container, node,
|
direct_client.direct_head_container, node,
|
||||||
part, account, container, error_log=error_log,
|
part, account, container, error_log=error_log,
|
||||||
retries=options.retries)
|
retries=options.retries)
|
||||||
@ -284,7 +284,7 @@ def object_dispersion_report(coropool, connpool, account, object_ring, options):
|
|||||||
for node in nodes:
|
for node in nodes:
|
||||||
error_log = get_error_log('%(ip)s:%(port)s/%(device)s' % node)
|
error_log = get_error_log('%(ip)s:%(port)s/%(device)s' % node)
|
||||||
try:
|
try:
|
||||||
attempts, _ = direct_client.retry(
|
attempts, _junk = direct_client.retry(
|
||||||
direct_client.direct_head_object, node, part,
|
direct_client.direct_head_object, node, part,
|
||||||
account, container, obj, error_log=error_log,
|
account, container, obj, error_log=error_log,
|
||||||
retries=options.retries)
|
retries=options.retries)
|
||||||
|
@ -229,7 +229,7 @@ class AccountReaper(Daemon):
|
|||||||
if not containers:
|
if not containers:
|
||||||
break
|
break
|
||||||
try:
|
try:
|
||||||
for (container, _, _, _) in containers:
|
for (container, _junk, _junk, _junk) in containers:
|
||||||
self.container_pool.spawn(self.reap_container, account,
|
self.container_pool.spawn(self.reap_container, account,
|
||||||
partition, nodes, container)
|
partition, nodes, container)
|
||||||
self.container_pool.waitall()
|
self.container_pool.waitall()
|
||||||
|
@ -435,7 +435,7 @@ YOU HAVE A FEW OPTIONS:
|
|||||||
:param request: webob.Request object
|
:param request: webob.Request object
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
_, token = split_path(request.path, minsegs=2)
|
_junk, token = split_path(request.path, minsegs=2)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
return HTTPBadRequest()
|
return HTTPBadRequest()
|
||||||
# Retrieves (TTL, account, user, cfaccount) if valid, False otherwise
|
# Retrieves (TTL, account, user, cfaccount) if valid, False otherwise
|
||||||
@ -478,7 +478,8 @@ YOU HAVE A FEW OPTIONS:
|
|||||||
:param request: webob.Request object
|
:param request: webob.Request object
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
_, account_name, user_name = split_path(request.path, minsegs=3)
|
_junk, account_name, user_name = \
|
||||||
|
split_path(request.path, minsegs=3)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
return HTTPBadRequest()
|
return HTTPBadRequest()
|
||||||
create_reseller_admin = \
|
create_reseller_admin = \
|
||||||
|
@ -76,7 +76,7 @@ except ImportError:
|
|||||||
res = []
|
res = []
|
||||||
consts = {'true': True, 'false': False, 'null': None}
|
consts = {'true': True, 'false': False, 'null': None}
|
||||||
string = '(' + comments.sub('', string) + ')'
|
string = '(' + comments.sub('', string) + ')'
|
||||||
for type, val, _, _, _ in \
|
for type, val, _junk, _junk, _junk in \
|
||||||
generate_tokens(StringIO(string).readline):
|
generate_tokens(StringIO(string).readline):
|
||||||
if (type == OP and val not in '[]{}:,()-') or \
|
if (type == OP and val not in '[]{}:,()-') or \
|
||||||
(type == NAME and val not in consts):
|
(type == NAME and val not in consts):
|
||||||
|
@ -932,7 +932,7 @@ class ContainerBroker(DatabaseBroker):
|
|||||||
if not row:
|
if not row:
|
||||||
return []
|
return []
|
||||||
max_rowid = row['ROWID']
|
max_rowid = row['ROWID']
|
||||||
for _ in xrange(min(max_count, max_rowid)):
|
for _junk in xrange(min(max_count, max_rowid)):
|
||||||
row = conn.execute('''
|
row = conn.execute('''
|
||||||
SELECT name FROM object WHERE ROWID >= ? AND +deleted = 0
|
SELECT name FROM object WHERE ROWID >= ? AND +deleted = 0
|
||||||
LIMIT 1
|
LIMIT 1
|
||||||
@ -1435,7 +1435,7 @@ class AccountBroker(DatabaseBroker):
|
|||||||
if not row:
|
if not row:
|
||||||
return []
|
return []
|
||||||
max_rowid = row['ROWID']
|
max_rowid = row['ROWID']
|
||||||
for _ in xrange(min(max_count, max_rowid)):
|
for _junk in xrange(min(max_count, max_rowid)):
|
||||||
row = conn.execute('''
|
row = conn.execute('''
|
||||||
SELECT name FROM container WHERE
|
SELECT name FROM container WHERE
|
||||||
ROWID >= ? AND +deleted = 0
|
ROWID >= ? AND +deleted = 0
|
||||||
|
@ -299,8 +299,8 @@ class Swauth(object):
|
|||||||
req.start_time = time()
|
req.start_time = time()
|
||||||
handler = None
|
handler = None
|
||||||
try:
|
try:
|
||||||
version, account, user, _ = split_path(req.path_info, minsegs=1,
|
version, account, user, _junk = split_path(req.path_info,
|
||||||
maxsegs=4, rest_with_last=True)
|
minsegs=1, maxsegs=4, rest_with_last=True)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
return HTTPNotFound(request=req)
|
return HTTPNotFound(request=req)
|
||||||
if version in ('v1', 'v1.0', 'auth'):
|
if version in ('v1', 'v1.0', 'auth'):
|
||||||
|
@ -399,7 +399,8 @@ class Swift3Middleware(object):
|
|||||||
h += header.lower() + ":" + str(req.headers[header]) + "\n"
|
h += header.lower() + ":" + str(req.headers[header]) + "\n"
|
||||||
h += req.path
|
h += req.path
|
||||||
try:
|
try:
|
||||||
account, user, _ = req.headers['Authorization'].split(' ')[-1].split(':')
|
account, user, _junk = \
|
||||||
|
req.headers['Authorization'].split(' ')[-1].split(':')
|
||||||
except:
|
except:
|
||||||
return None, None
|
return None, None
|
||||||
token = base64.urlsafe_b64encode(h)
|
token = base64.urlsafe_b64encode(h)
|
||||||
|
@ -239,7 +239,7 @@ class RingBuilder(object):
|
|||||||
(sum(d['parts'] for d in self.devs if d is not None),
|
(sum(d['parts'] for d in self.devs if d is not None),
|
||||||
self.parts * self.replicas))
|
self.parts * self.replicas))
|
||||||
if stats:
|
if stats:
|
||||||
dev_usage = array('I', (0 for _ in xrange(len(self.devs))))
|
dev_usage = array('I', (0 for _junk in xrange(len(self.devs))))
|
||||||
for part in xrange(self.parts):
|
for part in xrange(self.parts):
|
||||||
zones = {}
|
zones = {}
|
||||||
for replica in xrange(self.replicas):
|
for replica in xrange(self.replicas):
|
||||||
@ -342,8 +342,9 @@ class RingBuilder(object):
|
|||||||
'%08x.%04x' % (dev['parts_wanted'], randint(0, 0xffff))
|
'%08x.%04x' % (dev['parts_wanted'], randint(0, 0xffff))
|
||||||
available_devs = sorted((d for d in self.devs if d is not None),
|
available_devs = sorted((d for d in self.devs if d is not None),
|
||||||
key=lambda x: x['sort_key'])
|
key=lambda x: x['sort_key'])
|
||||||
self._replica2part2dev = [array('H') for _ in xrange(self.replicas)]
|
self._replica2part2dev = \
|
||||||
for _ in xrange(self.parts):
|
[array('H') for _junk in xrange(self.replicas)]
|
||||||
|
for _junk in xrange(self.parts):
|
||||||
other_zones = array('H')
|
other_zones = array('H')
|
||||||
for replica in xrange(self.replicas):
|
for replica in xrange(self.replicas):
|
||||||
index = len(available_devs) - 1
|
index = len(available_devs) - 1
|
||||||
@ -365,7 +366,7 @@ class RingBuilder(object):
|
|||||||
index = mid + 1
|
index = mid + 1
|
||||||
available_devs.insert(index, dev)
|
available_devs.insert(index, dev)
|
||||||
other_zones.append(dev['zone'])
|
other_zones.append(dev['zone'])
|
||||||
self._last_part_moves = array('B', (0 for _ in xrange(self.parts)))
|
self._last_part_moves = array('B', (0 for _junk in xrange(self.parts)))
|
||||||
self._last_part_moves_epoch = int(time())
|
self._last_part_moves_epoch = int(time())
|
||||||
for dev in self.devs:
|
for dev in self.devs:
|
||||||
del dev['sort_key']
|
del dev['sort_key']
|
||||||
|
@ -577,7 +577,7 @@ class ObjectController(object):
|
|||||||
if suffix:
|
if suffix:
|
||||||
recalculate_hashes(path, suffix.split('-'))
|
recalculate_hashes(path, suffix.split('-'))
|
||||||
return Response()
|
return Response()
|
||||||
_, hashes = get_hashes(path, do_listdir=False)
|
_junk, hashes = get_hashes(path, do_listdir=False)
|
||||||
return Response(body=pickle.dumps(hashes))
|
return Response(body=pickle.dumps(hashes))
|
||||||
|
|
||||||
def __call__(self, env, start_response):
|
def __call__(self, env, start_response):
|
||||||
|
@ -87,11 +87,11 @@ class AccountStat(Daemon):
|
|||||||
broker = AccountBroker(db_path)
|
broker = AccountBroker(db_path)
|
||||||
if not broker.is_deleted():
|
if not broker.is_deleted():
|
||||||
(account_name,
|
(account_name,
|
||||||
_, _, _,
|
_junk, _junk, _junk,
|
||||||
container_count,
|
container_count,
|
||||||
object_count,
|
object_count,
|
||||||
bytes_used,
|
bytes_used,
|
||||||
_, _) = broker.get_info()
|
_junk, _junk) = broker.get_info()
|
||||||
line_data = '"%s",%d,%d,%d\n' % (
|
line_data = '"%s",%d,%d,%d\n' % (
|
||||||
account_name, container_count,
|
account_name, container_count,
|
||||||
object_count, bytes_used)
|
object_count, bytes_used)
|
||||||
|
@ -365,7 +365,7 @@ def multiprocess_collate(processor_args, logs_to_process, worker_count):
|
|||||||
results = []
|
results = []
|
||||||
in_queue = multiprocessing.Queue()
|
in_queue = multiprocessing.Queue()
|
||||||
out_queue = multiprocessing.Queue()
|
out_queue = multiprocessing.Queue()
|
||||||
for _ in range(worker_count):
|
for _junk in range(worker_count):
|
||||||
p = multiprocessing.Process(target=collate_worker,
|
p = multiprocessing.Process(target=collate_worker,
|
||||||
args=(processor_args,
|
args=(processor_args,
|
||||||
in_queue,
|
in_queue,
|
||||||
@ -374,7 +374,7 @@ def multiprocess_collate(processor_args, logs_to_process, worker_count):
|
|||||||
results.append(p)
|
results.append(p)
|
||||||
for x in logs_to_process:
|
for x in logs_to_process:
|
||||||
in_queue.put(x)
|
in_queue.put(x)
|
||||||
for _ in range(worker_count):
|
for _junk in range(worker_count):
|
||||||
in_queue.put(None)
|
in_queue.put(None)
|
||||||
count = 0
|
count = 0
|
||||||
while True:
|
while True:
|
||||||
|
@ -26,7 +26,7 @@ class StatsLogProcessor(object):
|
|||||||
data_object_name):
|
data_object_name):
|
||||||
'''generate hourly groupings of data from one stats log file'''
|
'''generate hourly groupings of data from one stats log file'''
|
||||||
account_totals = {}
|
account_totals = {}
|
||||||
year, month, day, hour, _ = data_object_name.split('/')
|
year, month, day, hour, _junk = data_object_name.split('/')
|
||||||
for line in obj_stream:
|
for line in obj_stream:
|
||||||
if not line:
|
if not line:
|
||||||
continue
|
continue
|
||||||
|
@ -119,7 +119,7 @@ class TestAuthServer(unittest.TestCase):
|
|||||||
headers={'X-Storage-User': 'tester',
|
headers={'X-Storage-User': 'tester',
|
||||||
'X-Storage-Pass': 'testing'}))
|
'X-Storage-Pass': 'testing'}))
|
||||||
token = res.headers['x-storage-token']
|
token = res.headers['x-storage-token']
|
||||||
ttl, _, _, _ = self.controller.validate_token(token)
|
ttl, _junk, _junk, _junk = self.controller.validate_token(token)
|
||||||
self.assert_(ttl > 0, repr(ttl))
|
self.assert_(ttl > 0, repr(ttl))
|
||||||
|
|
||||||
def test_validate_token_expired(self):
|
def test_validate_token_expired(self):
|
||||||
@ -134,7 +134,7 @@ class TestAuthServer(unittest.TestCase):
|
|||||||
headers={'X-Storage-User': 'tester',
|
headers={'X-Storage-User': 'tester',
|
||||||
'X-Storage-Pass': 'testing'}))
|
'X-Storage-Pass': 'testing'}))
|
||||||
token = res.headers['x-storage-token']
|
token = res.headers['x-storage-token']
|
||||||
ttl, _, _, _ = self.controller.validate_token(token)
|
ttl, _junk, _junk, _junk = self.controller.validate_token(token)
|
||||||
self.assert_(ttl > 0, repr(ttl))
|
self.assert_(ttl > 0, repr(ttl))
|
||||||
auth_server.time = lambda: 1 + self.controller.token_life
|
auth_server.time = lambda: 1 + self.controller.token_life
|
||||||
self.assertEquals(self.controller.validate_token(token), False)
|
self.assertEquals(self.controller.validate_token(token), False)
|
||||||
@ -318,7 +318,7 @@ class TestAuthServer(unittest.TestCase):
|
|||||||
headers={'X-Storage-User': 'tester',
|
headers={'X-Storage-User': 'tester',
|
||||||
'X-Storage-Pass': 'testing'}))
|
'X-Storage-Pass': 'testing'}))
|
||||||
token = res.headers['x-storage-token']
|
token = res.headers['x-storage-token']
|
||||||
ttl, _, _, _ = self.controller.validate_token(token)
|
ttl, _junk, _junk, _junk = self.controller.validate_token(token)
|
||||||
self.assert_(ttl > 0, repr(ttl))
|
self.assert_(ttl > 0, repr(ttl))
|
||||||
|
|
||||||
def test_auth_SOSO_good_Mosso_headers(self):
|
def test_auth_SOSO_good_Mosso_headers(self):
|
||||||
@ -330,7 +330,7 @@ class TestAuthServer(unittest.TestCase):
|
|||||||
headers={'X-Auth-User': 'test:tester',
|
headers={'X-Auth-User': 'test:tester',
|
||||||
'X-Auth-Key': 'testing'}))
|
'X-Auth-Key': 'testing'}))
|
||||||
token = res.headers['x-storage-token']
|
token = res.headers['x-storage-token']
|
||||||
ttl, _, _, _ = self.controller.validate_token(token)
|
ttl, _junk, _junk, _junk = self.controller.validate_token(token)
|
||||||
self.assert_(ttl > 0, repr(ttl))
|
self.assert_(ttl > 0, repr(ttl))
|
||||||
|
|
||||||
def test_auth_SOSO_bad_Mosso_headers(self):
|
def test_auth_SOSO_bad_Mosso_headers(self):
|
||||||
@ -438,7 +438,7 @@ class TestAuthServer(unittest.TestCase):
|
|||||||
headers={'X-Auth-User': 'test:tester',
|
headers={'X-Auth-User': 'test:tester',
|
||||||
'X-Auth-Key': 'testing'}))
|
'X-Auth-Key': 'testing'}))
|
||||||
token = res.headers['x-storage-token']
|
token = res.headers['x-storage-token']
|
||||||
ttl, _, _, _ = self.controller.validate_token(token)
|
ttl, _junk, _junk, _junk = self.controller.validate_token(token)
|
||||||
self.assert_(ttl > 0, repr(ttl))
|
self.assert_(ttl > 0, repr(ttl))
|
||||||
|
|
||||||
def test_auth_Mosso_good_SOSO_header_names(self):
|
def test_auth_Mosso_good_SOSO_header_names(self):
|
||||||
@ -450,7 +450,7 @@ class TestAuthServer(unittest.TestCase):
|
|||||||
headers={'X-Storage-User': 'test:tester',
|
headers={'X-Storage-User': 'test:tester',
|
||||||
'X-Storage-Pass': 'testing'}))
|
'X-Storage-Pass': 'testing'}))
|
||||||
token = res.headers['x-storage-token']
|
token = res.headers['x-storage-token']
|
||||||
ttl, _, _, _ = self.controller.validate_token(token)
|
ttl, _junk, _junk, _junk = self.controller.validate_token(token)
|
||||||
self.assert_(ttl > 0, repr(ttl))
|
self.assert_(ttl > 0, repr(ttl))
|
||||||
|
|
||||||
def test_basic_logging(self):
|
def test_basic_logging(self):
|
||||||
@ -712,7 +712,7 @@ class TestAuthServer(unittest.TestCase):
|
|||||||
res = self.controller.handle_auth(Request.blank('/v1.0',
|
res = self.controller.handle_auth(Request.blank('/v1.0',
|
||||||
environ={'REQUEST_METHOD': 'GET'},
|
environ={'REQUEST_METHOD': 'GET'},
|
||||||
headers={'X-Auth-User': 'act:usr', 'X-Auth-Key': 'pas'}))
|
headers={'X-Auth-User': 'act:usr', 'X-Auth-Key': 'pas'}))
|
||||||
_, _, _, stgact = \
|
_junk, _junk, _junk, stgact = \
|
||||||
self.controller.validate_token(res.headers['x-auth-token'])
|
self.controller.validate_token(res.headers['x-auth-token'])
|
||||||
self.assertEquals(stgact, '')
|
self.assertEquals(stgact, '')
|
||||||
|
|
||||||
@ -723,7 +723,7 @@ class TestAuthServer(unittest.TestCase):
|
|||||||
res = self.controller.handle_auth(Request.blank('/v1.0',
|
res = self.controller.handle_auth(Request.blank('/v1.0',
|
||||||
environ={'REQUEST_METHOD': 'GET'},
|
environ={'REQUEST_METHOD': 'GET'},
|
||||||
headers={'X-Auth-User': 'act:usr', 'X-Auth-Key': 'pas'}))
|
headers={'X-Auth-User': 'act:usr', 'X-Auth-Key': 'pas'}))
|
||||||
_, _, _, vstgact = \
|
_junk, _junk, _junk, vstgact = \
|
||||||
self.controller.validate_token(res.headers['x-auth-token'])
|
self.controller.validate_token(res.headers['x-auth-token'])
|
||||||
self.assertEquals(stgact, vstgact)
|
self.assertEquals(stgact, vstgact)
|
||||||
|
|
||||||
@ -734,7 +734,7 @@ class TestAuthServer(unittest.TestCase):
|
|||||||
res = self.controller.handle_auth(Request.blank('/v1.0',
|
res = self.controller.handle_auth(Request.blank('/v1.0',
|
||||||
environ={'REQUEST_METHOD': 'GET'},
|
environ={'REQUEST_METHOD': 'GET'},
|
||||||
headers={'X-Auth-User': 'act:usr', 'X-Auth-Key': 'pas'}))
|
headers={'X-Auth-User': 'act:usr', 'X-Auth-Key': 'pas'}))
|
||||||
_, _, _, stgact = \
|
_junk, _junk, _junk, stgact = \
|
||||||
self.controller.validate_token(res.headers['x-auth-token'])
|
self.controller.validate_token(res.headers['x-auth-token'])
|
||||||
self.assertEquals(stgact, '.reseller_admin')
|
self.assertEquals(stgact, '.reseller_admin')
|
||||||
|
|
||||||
|
@ -95,7 +95,7 @@ class Logger(object):
|
|||||||
self.error_value = (msg, args, kwargs)
|
self.error_value = (msg, args, kwargs)
|
||||||
|
|
||||||
def exception(self, msg, *args, **kwargs):
|
def exception(self, msg, *args, **kwargs):
|
||||||
_, exc, _ = sys.exc_info()
|
_junk, exc, _junk = sys.exc_info()
|
||||||
self.exception_value = (msg,
|
self.exception_value = (msg,
|
||||||
'%s %s' % (exc.__class__.__name__, str(exc)), args, kwargs)
|
'%s %s' % (exc.__class__.__name__, str(exc)), args, kwargs)
|
||||||
|
|
||||||
|
@ -35,10 +35,10 @@ class TestHttpHelpers(unittest.TestCase):
|
|||||||
|
|
||||||
def test_http_connection(self):
|
def test_http_connection(self):
|
||||||
url = 'http://www.test.com'
|
url = 'http://www.test.com'
|
||||||
_, conn = c.http_connection(url)
|
_junk, conn = c.http_connection(url)
|
||||||
self.assertTrue(isinstance(conn, c.HTTPConnection))
|
self.assertTrue(isinstance(conn, c.HTTPConnection))
|
||||||
url = 'https://www.test.com'
|
url = 'https://www.test.com'
|
||||||
_, conn = c.http_connection(url)
|
_junk, conn = c.http_connection(url)
|
||||||
self.assertTrue(isinstance(conn, c.HTTPSConnection))
|
self.assertTrue(isinstance(conn, c.HTTPSConnection))
|
||||||
url = 'ftp://www.test.com'
|
url = 'ftp://www.test.com'
|
||||||
self.assertRaises(c.ClientException, c.http_connection, url)
|
self.assertRaises(c.ClientException, c.http_connection, url)
|
||||||
|
@ -142,7 +142,7 @@ class TestContainerUpdater(unittest.TestCase):
|
|||||||
bindsock = listen(('127.0.0.1', 0))
|
bindsock = listen(('127.0.0.1', 0))
|
||||||
def spawn_accepts():
|
def spawn_accepts():
|
||||||
events = []
|
events = []
|
||||||
for _ in xrange(2):
|
for _junk in xrange(2):
|
||||||
sock, addr = bindsock.accept()
|
sock, addr = bindsock.accept()
|
||||||
events.append(spawn(accept, sock, addr, 201))
|
events.append(spawn(accept, sock, addr, 201))
|
||||||
return events
|
return events
|
||||||
@ -195,7 +195,7 @@ class TestContainerUpdater(unittest.TestCase):
|
|||||||
bindsock = listen(('127.0.0.1', 0))
|
bindsock = listen(('127.0.0.1', 0))
|
||||||
def spawn_accepts():
|
def spawn_accepts():
|
||||||
events = []
|
events = []
|
||||||
for _ in xrange(2):
|
for _junk in xrange(2):
|
||||||
with Timeout(3):
|
with Timeout(3):
|
||||||
sock, addr = bindsock.accept()
|
sock, addr = bindsock.accept()
|
||||||
events.append(spawn(accept, sock, addr))
|
events.append(spawn(accept, sock, addr))
|
||||||
|
@ -1154,7 +1154,7 @@ class TestObjectController(unittest.TestCase):
|
|||||||
self.assert_status_map(controller.HEAD, (503, 200, 200), 200)
|
self.assert_status_map(controller.HEAD, (503, 200, 200), 200)
|
||||||
self.assertEquals(controller.app.object_ring.devs[0]['errors'], 2)
|
self.assertEquals(controller.app.object_ring.devs[0]['errors'], 2)
|
||||||
self.assert_('last_error' in controller.app.object_ring.devs[0])
|
self.assert_('last_error' in controller.app.object_ring.devs[0])
|
||||||
for _ in xrange(self.app.error_suppression_limit):
|
for _junk in xrange(self.app.error_suppression_limit):
|
||||||
self.assert_status_map(controller.HEAD, (503, 503, 503), 503)
|
self.assert_status_map(controller.HEAD, (503, 503, 503), 503)
|
||||||
self.assertEquals(controller.app.object_ring.devs[0]['errors'],
|
self.assertEquals(controller.app.object_ring.devs[0]['errors'],
|
||||||
self.app.error_suppression_limit + 1)
|
self.app.error_suppression_limit + 1)
|
||||||
@ -2590,7 +2590,7 @@ class TestContainerController(unittest.TestCase):
|
|||||||
self.assertEquals(
|
self.assertEquals(
|
||||||
controller.app.container_ring.devs[0]['errors'], 2)
|
controller.app.container_ring.devs[0]['errors'], 2)
|
||||||
self.assert_('last_error' in controller.app.container_ring.devs[0])
|
self.assert_('last_error' in controller.app.container_ring.devs[0])
|
||||||
for _ in xrange(self.app.error_suppression_limit):
|
for _junk in xrange(self.app.error_suppression_limit):
|
||||||
self.assert_status_map(controller.HEAD,
|
self.assert_status_map(controller.HEAD,
|
||||||
(200, 503, 503, 503), 503)
|
(200, 503, 503, 503), 503)
|
||||||
self.assertEquals(controller.app.container_ring.devs[0]['errors'],
|
self.assertEquals(controller.app.container_ring.devs[0]['errors'],
|
||||||
|
Loading…
Reference in New Issue
Block a user