Random pep8 fixes!

This patch merely fixes a selection of files to the point where
pep8 1.3.3 is happy. Most of the errors are indentation related to
continued lines (E126, E127, E128), bracket positions (E124) and the
use of backslash (E502).

Patch 2 fixes David's comments regarding backslash and an odd comment
 - thanks David!

Change-Id: I4fbd77ecf5395743cb96acb95fa946c322c16560
This commit is contained in:
Tom Fifield 2012-10-13 00:15:18 +11:00
parent 4cf96b3791
commit 9344a4a582
9 changed files with 65 additions and 52 deletions

View File

@ -38,7 +38,7 @@ setup(
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2.6',
'Environment :: No Input/Output (Daemon)',
],
],
install_requires=[], # removed for better compat
scripts=[
'bin/swift-account-audit',
@ -79,7 +79,7 @@ setup(
'object=swift.obj.server:app_factory',
'container=swift.container.server:app_factory',
'account=swift.account.server:app_factory',
],
],
'paste.filter_factory': [
'healthcheck=swift.common.middleware.healthcheck:filter_factory',
'memcache=swift.common.middleware.memcache:filter_factory',
@ -95,7 +95,7 @@ setup(
'formpost=swift.common.middleware.formpost:filter_factory',
'name_check=swift.common.middleware.name_check:filter_factory',
'proxy_logging=swift.common.middleware.proxy_logging:'
'filter_factory',
],
},
)
'filter_factory',
],
},
)

View File

@ -58,7 +58,8 @@ def delete_containers(logger, conf):
except client.ClientException, e:
if e.http_status != HTTP_CONFLICT:
logger.warn("Unable to delete container '%s'. "
"Got http status '%d'." % (container, e.http_status))
"Got http status '%d'."
% (container, e.http_status))
_func_on_containers(logger, conf, 'del_concurrency', _deleter)
@ -179,15 +180,17 @@ class Bench(object):
self.devices = conf.devices.split()
self.names = names
self.conn_pool = ConnectionPool(self.url,
max(self.put_concurrency, self.get_concurrency,
self.del_concurrency))
max(self.put_concurrency,
self.get_concurrency,
self.del_concurrency))
def _log_status(self, title):
total = time.time() - self.beginbeat
self.logger.info(_('%(complete)s %(title)s [%(fail)s failures], '
'%(rate).01f/s'),
{'title': title, 'complete': self.complete, 'fail': self.failures,
'rate': (float(self.complete) / total)})
{'title': title, 'complete': self.complete,
'fail': self.failures,
'rate': (float(self.complete) / total)})
@contextmanager
def connection(self):
@ -362,11 +365,12 @@ class BenchDELETE(Bench):
try:
if self.use_proxy:
client.delete_object(self.url, self.token,
container_name, name, http_conn=conn)
container_name, name, http_conn=conn)
else:
node = {'ip': self.ip, 'port': self.port, 'device': device}
direct_client.direct_delete_object(node, partition,
self.account, container_name, name)
self.account,
container_name, name)
except client.ClientException, e:
self.logger.debug(str(e))
self.failures += 1
@ -390,11 +394,12 @@ class BenchGET(Bench):
try:
if self.use_proxy:
client.get_object(self.url, self.token,
container_name, name, http_conn=conn)
container_name, name, http_conn=conn)
else:
node = {'ip': self.ip, 'port': self.port, 'device': device}
direct_client.direct_get_object(node, partition,
self.account, container_name, name)
self.account,
container_name, name)
except client.ClientException, e:
self.logger.debug(str(e))
self.failures += 1
@ -419,7 +424,7 @@ class BenchPUT(Bench):
source = random.choice(self.files)
elif self.upper_object_size > self.lower_object_size:
source = '0' * random.randint(self.lower_object_size,
self.upper_object_size)
self.upper_object_size)
else:
source = '0' * self.object_size
device = random.choice(self.devices)
@ -429,13 +434,16 @@ class BenchPUT(Bench):
try:
if self.use_proxy:
client.put_object(self.url, self.token,
container_name, name, source,
content_length=len(source), http_conn=conn)
container_name, name, source,
content_length=len(source),
http_conn=conn)
else:
node = {'ip': self.ip, 'port': self.port, 'device': device}
direct_client.direct_put_object(node, partition,
self.account, container_name, name, source,
content_length=len(source))
self.account,
container_name, name,
source,
content_length=len(source))
except client.ClientException, e:
self.logger.debug(str(e))
self.failures += 1

View File

@ -94,7 +94,7 @@ class BufferedHTTPConnection(HTTPConnection):
def getexpect(self):
response = BufferedHTTPResponse(self.sock, strict=self.strict,
method=self._method)
method=self._method)
response.expect_response()
return response
@ -102,8 +102,9 @@ class BufferedHTTPConnection(HTTPConnection):
response = HTTPConnection.getresponse(self)
logging.debug(_("HTTP PERF: %(time).5f seconds to %(method)s "
"%(host)s:%(port)s %(path)s)"),
{'time': time.time() - self._connected_time, 'method': self._method,
'host': self.host, 'port': self.port, 'path': self._path})
{'time': time.time() - self._connected_time,
'method': self._method, 'host': self.host,
'port': self.port, 'path': self._path})
return response

View File

@ -76,14 +76,15 @@ def run_daemon(klass, conf_file, section_name='', once=False, **kwargs):
# once on command line (i.e. daemonize=false) will over-ride config
once = once or \
conf.get('daemonize', 'true').lower() not in utils.TRUE_VALUES
conf.get('daemonize', 'true').lower() not in utils.TRUE_VALUES
# pre-configure logger
if 'logger' in kwargs:
logger = kwargs.pop('logger')
else:
logger = utils.get_logger(conf, conf.get('log_name', section_name),
log_to_console=kwargs.pop('verbose', False), log_route=section_name)
log_to_console=kwargs.pop('verbose', False),
log_route=section_name)
# disable fallocate if desired
if conf.get('disable_fallocate', 'no').lower() in utils.TRUE_VALUES:

View File

@ -30,10 +30,10 @@ RUN_DIR = '/var/run/swift'
# auth-server has been removed from ALL_SERVERS, start it explicitly
ALL_SERVERS = ['account-auditor', 'account-server', 'container-auditor',
'container-replicator', 'container-server', 'container-sync',
'container-updater', 'object-auditor', 'object-server', 'object-expirer',
'object-replicator', 'object-updater', 'proxy-server',
'account-replicator', 'account-reaper']
'container-replicator', 'container-server', 'container-sync',
'container-updater', 'object-auditor', 'object-server',
'object-expirer', 'object-replicator', 'object-updater',
'proxy-server', 'account-replicator', 'account-reaper']
MAIN_SERVERS = ['proxy-server', 'account-server', 'container-server',
'object-server']
REST_SERVERS = [s for s in ALL_SERVERS if s not in MAIN_SERVERS]
@ -55,9 +55,9 @@ def setup_env():
"""
try:
resource.setrlimit(resource.RLIMIT_NOFILE,
(MAX_DESCRIPTORS, MAX_DESCRIPTORS))
(MAX_DESCRIPTORS, MAX_DESCRIPTORS))
resource.setrlimit(resource.RLIMIT_DATA,
(MAX_MEMORY, MAX_MEMORY))
(MAX_MEMORY, MAX_MEMORY))
except ValueError:
print _("WARNING: Unable to increase file descriptor limit. "
"Running as non-root?")
@ -219,7 +219,8 @@ class Manager():
# keep track of the pids yeiled back as killed for all servers
killed_pids = set()
for server, killed_pid in watch_server_pids(server_pids,
interval=KILL_WAIT, **kwargs):
interval=KILL_WAIT,
**kwargs):
print _("%s (%s) appears to have stopped") % (server, killed_pid)
killed_pids.add(killed_pid)
if not killed_pids.symmetric_difference(signaled_pids):
@ -360,8 +361,8 @@ class Server():
"""
if self.server in STANDALONE_SERVERS:
return pid_file.replace(
os.path.normpath(RUN_DIR), SWIFT_DIR, 1).rsplit(
'.pid', 1)[0] + '.conf'
os.path.normpath(RUN_DIR), SWIFT_DIR, 1)\
.rsplit('.pid', 1)[0] + '.conf'
else:
return pid_file.replace(
os.path.normpath(RUN_DIR), SWIFT_DIR, 1).replace(
@ -380,7 +381,7 @@ class Server():
'.conf')
else:
found_conf_files = search_tree(SWIFT_DIR, '%s-server*' % self.type,
'.conf')
'.conf')
number = kwargs.get('number')
if number:
try:

View File

@ -82,15 +82,15 @@ class MemcacheRing(object):
def _exception_occurred(self, server, e, action='talking'):
if isinstance(e, socket.timeout):
logging.error(_("Timeout %(action)s to memcached: %(server)s"),
{'action': action, 'server': server})
{'action': action, 'server': server})
else:
logging.exception(_("Error %(action)s to memcached: %(server)s"),
{'action': action, 'server': server})
{'action': action, 'server': server})
now = time.time()
self._errors[server].append(time.time())
if len(self._errors[server]) > ERROR_LIMIT_COUNT:
self._errors[server] = [err for err in self._errors[server]
if err > now - ERROR_LIMIT_TIME]
if err > now - ERROR_LIMIT_TIME]
if len(self._errors[server]) > ERROR_LIMIT_COUNT:
self._error_limited[server] = now + ERROR_LIMIT_DURATION
logging.error(_('Error limiting server %s'), server)
@ -156,8 +156,8 @@ class MemcacheRing(object):
flags |= JSON_FLAG
for (server, fp, sock) in self._get_conns(key):
try:
sock.sendall('set %s %d %d %s noreply\r\n%s\r\n' % \
(key, flags, timeout, len(value), value))
sock.sendall('set %s %d %d %s noreply\r\n%s\r\n' %
(key, flags, timeout, len(value), value))
self._return_conn(server, fp, sock)
return
except Exception, e:
@ -225,8 +225,8 @@ class MemcacheRing(object):
add_val = delta
if command == 'decr':
add_val = '0'
sock.sendall('add %s %d %d %s\r\n%s\r\n' % \
(key, 0, timeout, len(add_val), add_val))
sock.sendall('add %s %d %d %s\r\n%s\r\n' %
(key, 0, timeout, len(add_val), add_val))
line = fp.readline().strip().split()
if line[0].upper() == 'NOT_STORED':
sock.sendall('%s %s %s\r\n' % (command, key, delta))

View File

@ -108,7 +108,8 @@ def clean_acl(name, value):
second = second[1:].strip()
if not second or second == '.':
raise ValueError('No host/domain value after referrer '
'designation in ACL: %s' % repr(raw_value))
'designation in ACL: %s' %
repr(raw_value))
values.append('.r:%s%s' % (negate and '-' or '', second))
else:
raise ValueError('Unknown designator %s in ACL: %s' %
@ -156,8 +157,8 @@ def referrer_allowed(referrer, referrer_acl):
for mhost in referrer_acl:
if mhost[0] == '-':
mhost = mhost[1:]
if mhost == rhost or \
(mhost[0] == '.' and rhost.endswith(mhost)):
if mhost == rhost or (mhost[0] == '.' and
rhost.endswith(mhost)):
allow = False
elif mhost == '*' or mhost == rhost or \
(mhost[0] == '.' and rhost.endswith(mhost)):

View File

@ -67,6 +67,7 @@ for k in default_constraints:
# tests.
config[k] = '%s constraint is not defined' % k
def load_constraint(name):
c = config[name]
if not isinstance(c, int):

View File

@ -93,8 +93,8 @@ class TestEmptyDevice(TestCase):
for node in onodes[1:]:
start_server(node['port'], self.port2server, self.pids)
self.assertFalse(os.path.exists(obj_dir))
# We've indirectly verified the handoff node has the object, but let's
# directly verify it.
# We've indirectly verified the handoff node has the object, but
# let's directly verify it.
another_onode = self.object_ring.get_more_nodes(opart).next()
odata = direct_client.direct_get_object(
another_onode, opart, self.account, container, obj)[-1]
@ -118,28 +118,28 @@ class TestEmptyDevice(TestCase):
exc = None
try:
direct_client.direct_get_object(onode, opart, self.account,
container, obj)
container, obj)
except direct_client.ClientException, err:
exc = err
self.assertEquals(exc.http_status, 404)
self.assertFalse(os.path.exists(obj_dir))
call(['swift-object-replicator',
'/etc/swift/object-server/%d.conf' %
((onode['port'] - 6000) / 10), 'once'])
'/etc/swift/object-server/%d.conf' %
((onode['port'] - 6000) / 10), 'once'])
call(['swift-object-replicator',
'/etc/swift/object-server/%d.conf' %
((another_onode['port'] - 6000) / 10), 'once'])
odata = direct_client.direct_get_object(onode, opart, self.account,
container, obj)[-1]
container, obj)[-1]
if odata != 'VERIFY':
raise Exception('Direct object GET did not return VERIFY, instead '
'it returned: %s' % repr(odata))
exc = None
try:
direct_client.direct_get_object(another_onode, opart, self.account,
container, obj)
container, obj)
except direct_client.ClientException, err:
exc = err
self.assertEquals(exc.http_status, 404)