Merge "Delay string interpolations at logging calls"
This commit is contained in:
commit
bfcab95ff2
@ -65,7 +65,7 @@ class CacheController(object):
|
||||
req.context, image=image, enforcer=self.policy,
|
||||
policy_str=new_policy).manage_image_cache()
|
||||
except exception.Forbidden:
|
||||
LOG.debug("User not permitted by '%s' policy" % new_policy)
|
||||
LOG.debug("User not permitted by '%s' policy", new_policy)
|
||||
raise webob.exc.HTTPForbidden()
|
||||
|
||||
if not CONF.image_cache_dir:
|
||||
|
@ -281,7 +281,7 @@ class ImagesController(object):
|
||||
raise webob.exc.HTTPBadGateway('Stage host is unavailable')
|
||||
req_id_hdr = 'x-openstack-request-id'
|
||||
if req_id_hdr in r.headers:
|
||||
LOG.debug('Replying with remote request id %s' % (
|
||||
LOG.debug('Replying with remote request id %s', (
|
||||
r.headers[req_id_hdr]))
|
||||
req.context.request_id = r.headers[req_id_hdr]
|
||||
if r.status_code // 100 != 2:
|
||||
@ -391,7 +391,7 @@ class ImagesController(object):
|
||||
if existing_store in stores:
|
||||
LOG.debug("Removing store '%s' from all stores as "
|
||||
"image is already available in that "
|
||||
"store." % existing_store)
|
||||
"store.", existing_store)
|
||||
stores.remove(existing_store)
|
||||
|
||||
if len(stores) == 0:
|
||||
|
@ -103,9 +103,9 @@ class ThreadPoolModel(object):
|
||||
|
||||
def spawn(self, fn, *args, **kwargs):
|
||||
"""Spawn a function with args using the thread pool."""
|
||||
LOG.debug('Spawning with %s: %s(%s, %s)' % (
|
||||
self.get_threadpool_executor_class().__name__,
|
||||
fn, args, kwargs))
|
||||
LOG.debug('Spawning with %s: %s(%s, %s)',
|
||||
self.get_threadpool_executor_class().__name__,
|
||||
fn, args, kwargs)
|
||||
return self.pool.submit(fn, *args, **kwargs)
|
||||
|
||||
def map(self, fn, iterable):
|
||||
|
@ -274,7 +274,7 @@ def get_flow(**kwargs):
|
||||
task_type = kwargs.get('task_type')
|
||||
image_repo = kwargs.get('image_repo')
|
||||
|
||||
LOG.debug("Flow: %(task_type)s with ID %(id)s on %(repo)s" %
|
||||
LOG.debug("Flow: %(task_type)s with ID %(id)s on %(repo)s",
|
||||
{'task_type': task_type, 'id': task_id, 'repo': image_repo})
|
||||
|
||||
return lf.Flow(task_type).add(
|
||||
|
@ -150,7 +150,7 @@ class TaskExecutor(glance.async_.TaskExecutor):
|
||||
try:
|
||||
super(TaskExecutor, self).begin_processing(task_id)
|
||||
except exception.ImportTaskError as exc:
|
||||
LOG.error(_LE('Failed to execute task %(task_id)s: %(exc)s') %
|
||||
LOG.error(_LE('Failed to execute task %(task_id)s: %(exc)s'),
|
||||
{'task_id': task_id, 'exc': exc.msg})
|
||||
task = self.task_repo.get(task_id)
|
||||
task.fail(exc.msg)
|
||||
@ -183,7 +183,7 @@ class TaskExecutor(glance.async_.TaskExecutor):
|
||||
self.task_repo.save(task)
|
||||
except Exception as exc:
|
||||
with excutils.save_and_reraise_exception():
|
||||
LOG.error(_LE('Failed to execute task %(task_id)s: %(exc)s') %
|
||||
LOG.error(_LE('Failed to execute task %(task_id)s: %(exc)s'),
|
||||
{'task_id': task_id,
|
||||
'exc': encodeutils.exception_to_unicode(exc)})
|
||||
# TODO(sabari): Check for specific exceptions and update the
|
||||
|
@ -634,12 +634,12 @@ def replication_compare(options, args):
|
||||
if image[key] != headers.get(key):
|
||||
LOG.warning(_LW('%(image_id)s: field %(key)s differs '
|
||||
'(source is %(source_value)s, destination '
|
||||
'is %(target_value)s)')
|
||||
% {'image_id': image['id'],
|
||||
'key': key,
|
||||
'source_value': image[key],
|
||||
'target_value': headers.get(key,
|
||||
'undefined')})
|
||||
'is %(target_value)s)'),
|
||||
{'image_id': image['id'],
|
||||
'key': key,
|
||||
'source_value': image[key],
|
||||
'target_value': headers.get(key,
|
||||
'undefined')})
|
||||
differences[image['id']] = 'diff'
|
||||
else:
|
||||
LOG.debug('%(image_id)s is identical',
|
||||
@ -647,9 +647,9 @@ def replication_compare(options, args):
|
||||
|
||||
elif image['status'] == 'active':
|
||||
LOG.warning(_LW('Image %(image_id)s ("%(image_name)s") '
|
||||
'entirely missing from the destination')
|
||||
% {'image_id': image['id'],
|
||||
'image_name': image.get('name', '--unnamed')})
|
||||
'entirely missing from the destination'),
|
||||
{'image_id': image['id'],
|
||||
'image_name': image.get('name', '--unnamed')})
|
||||
differences[image['id']] = 'missing'
|
||||
|
||||
return differences
|
||||
|
@ -170,7 +170,7 @@ class PropertyRules(object):
|
||||
LOG.warning(
|
||||
_LW('Property protection on operation %(operation)s'
|
||||
' for rule %(rule)s is not found. No role will be'
|
||||
' allowed to perform this operation.') %
|
||||
' allowed to perform this operation.'),
|
||||
{'operation': operation,
|
||||
'rule': property_exp})
|
||||
|
||||
|
@ -124,7 +124,7 @@ def set_image_data(image, uri, task_id, backend=None):
|
||||
image.set_data(data_iter, backend=backend)
|
||||
except Exception as e:
|
||||
with excutils.save_and_reraise_exception():
|
||||
LOG.warning("Task %(task_id)s failed with exception %(error)s" %
|
||||
LOG.warning("Task %(task_id)s failed with exception %(error)s",
|
||||
{"error": encodeutils.exception_to_unicode(e),
|
||||
"task_id": task_id})
|
||||
LOG.info("Task %(task_id)s: Could not import image file"
|
||||
|
@ -152,7 +152,7 @@ def set_image_data(image, uri, task_id, backend=None, set_active=True,
|
||||
except Exception as e:
|
||||
with excutils.save_and_reraise_exception():
|
||||
LOG.warning(_LW("Task %(task_id)s failed with exception "
|
||||
"%(error)s") %
|
||||
"%(error)s"),
|
||||
{"error": encodeutils.exception_to_unicode(e),
|
||||
"task_id": task_id})
|
||||
LOG.info(_LI("Task %(task_id)s: Could not import image file"
|
||||
|
@ -673,14 +673,14 @@ class PosixServer(BaseServer):
|
||||
self.stale_children.remove(pid)
|
||||
LOG.info(_LI('Removed stale child %s'), pid)
|
||||
else:
|
||||
LOG.warning(_LW('Unrecognised child %s') % pid)
|
||||
LOG.warning(_LW('Unrecognised child %s'), pid)
|
||||
|
||||
def _verify_and_respawn_children(self, pid, status):
|
||||
if len(self.stale_children) == 0:
|
||||
LOG.debug('No stale children')
|
||||
if os.WIFEXITED(status) and os.WEXITSTATUS(status) != 0:
|
||||
LOG.error(_LE('Not respawning child %d, cannot '
|
||||
'recover from termination') % pid)
|
||||
'recover from termination'), pid)
|
||||
if not self.children and not self.stale_children:
|
||||
LOG.info(
|
||||
_LI('All workers have terminated. Exiting'))
|
||||
|
@ -537,7 +537,7 @@ class TaskExecutorFactory(object):
|
||||
except ImportError:
|
||||
with excutils.save_and_reraise_exception():
|
||||
LOG.exception(_LE("Failed to load the %s executor provided "
|
||||
"in the config.") % CONF.task.task_executor)
|
||||
"in the config."), CONF.task.task_executor)
|
||||
|
||||
|
||||
class MetadefNamespace(object):
|
||||
|
@ -371,7 +371,7 @@ class ImageCache(object):
|
||||
except Exception as e:
|
||||
LOG.exception(_LE("Exception encountered while tee'ing "
|
||||
"image '%(image_id)s' into cache: %(error)s. "
|
||||
"Continuing with response.") %
|
||||
"Continuing with response."),
|
||||
{'image_id': image_id,
|
||||
'error': encodeutils.exception_to_unicode(e)})
|
||||
|
||||
|
@ -348,7 +348,7 @@ class Driver(base.Driver):
|
||||
|
||||
msg = (_LW("Fetch of cache file failed (%(e)s), rolling "
|
||||
"back by moving '%(incomplete_path)s' to "
|
||||
"'%(invalid_path)s'") %
|
||||
"'%(invalid_path)s'"),
|
||||
{'e': e,
|
||||
'incomplete_path': incomplete_path,
|
||||
'invalid_path': invalid_path})
|
||||
|
@ -51,7 +51,7 @@ class Prefetcher(base.CacheApp):
|
||||
return False
|
||||
|
||||
if image.status != 'active':
|
||||
LOG.warning(_LW("Image '%s' is not active. Not caching.") %
|
||||
LOG.warning(_LW("Image '%s' is not active. Not caching."),
|
||||
image_id)
|
||||
return False
|
||||
|
||||
|
@ -410,7 +410,7 @@ class Scrubber(object):
|
||||
LOG.info(_LI("Image %s is scrubbed from a location."), image_id)
|
||||
except Exception as e:
|
||||
LOG.error(_LE("Unable to scrub image %(id)s from a location. "
|
||||
"Reason: %(exc)s ") %
|
||||
"Reason: %(exc)s "),
|
||||
{'id': image_id,
|
||||
'exc': encodeutils.exception_to_unicode(e)})
|
||||
raise
|
||||
|
@ -51,10 +51,10 @@ class TestImageImportLocking(functional.SynchronousAPIBase):
|
||||
set_active=True):
|
||||
me = str(uuid.uuid4())
|
||||
while state['want_run'] == True:
|
||||
LOG.info('fake_set_data running %s' % me)
|
||||
LOG.info('fake_set_data running %s', me)
|
||||
state['running'] = True
|
||||
time.sleep(0.1)
|
||||
LOG.info('fake_set_data ended %s' % me)
|
||||
LOG.info('fake_set_data ended %s', me)
|
||||
|
||||
# Constrain oslo timeutils time so we can manipulate it
|
||||
tf = time_fixture.TimeFixture()
|
||||
|
@ -314,16 +314,16 @@ def wait_for_fork(pid,
|
||||
pass
|
||||
elif not nice_timer.expired():
|
||||
# Politely ask the process to GTFO
|
||||
LOG.warning('Killing child %i with SIGTERM' % pid)
|
||||
LOG.warning('Killing child %i with SIGTERM', pid)
|
||||
os.kill(pid, signal.SIGTERM)
|
||||
else:
|
||||
# No more Mr. Nice Guy
|
||||
LOG.warning('Killing child %i with SIGKILL' % pid)
|
||||
LOG.warning('Killing child %i with SIGKILL', pid)
|
||||
os.kill(pid, signal.SIGKILL)
|
||||
expected_exitcode = signal.SIGKILL
|
||||
time.sleep(1)
|
||||
continue
|
||||
LOG.info('waitpid(%i) returned %i,%i' % (pid, cpid, rc))
|
||||
LOG.info('waitpid(%i) returned %i,%i', pid, cpid, rc)
|
||||
if rc != expected_exitcode:
|
||||
raise RuntimeError('The exit code %d is not %d'
|
||||
% (rc, expected_exitcode))
|
||||
@ -332,7 +332,7 @@ def wait_for_fork(pid,
|
||||
# Nothing to wait for
|
||||
return 0
|
||||
except Exception as e:
|
||||
LOG.error('Got wait error: %s' % e)
|
||||
LOG.error('Got wait error: %s', e)
|
||||
if raise_error:
|
||||
raise
|
||||
|
||||
|
1
tox.ini
1
tox.ini
@ -131,6 +131,7 @@ ignore-path = .venv,.git,.tox,.eggs,*glance/locale*,*lib/python*,glance.egg*,api
|
||||
# W503 line break before binary operator - conflicting guidance
|
||||
# W504 line break after binary operator - conflicting guidance
|
||||
ignore = E402,E711,E712,H404,H405,W503,W504
|
||||
enable-extensions = H904
|
||||
exclude = .venv,.git,.tox,dist,doc,etc,*glance/locale*,*lib/python*,*egg,build
|
||||
|
||||
[hacking]
|
||||
|
Loading…
Reference in New Issue
Block a user