Remove log translations
Log messages are no longer being translated. This removes all use of the _LE, _LI, and _LW translation markers to simplify logging and to avoid confusion with new contributions. See: http://lists.openstack.org/pipermail/openstack-i18n/2016-November/002574.html http://lists.openstack.org/pipermail/openstack-dev/2017-March/113365.html Closes-Bug: #1674577 Change-Id: I7553ae7c222f61b6796b72ac4a2b744d3e08fee6
This commit is contained in:
parent
231913a153
commit
443b3e3262
@ -23,13 +23,3 @@ _translators = oslo_i18n.TranslatorFactory(domain='oslo_db')
|
||||
|
||||
# The primary translation function using the well-known name "_"
|
||||
_ = _translators.primary
|
||||
|
||||
# Translators for log levels.
|
||||
#
|
||||
# The abbreviated names are meant to reflect the usual use of a short
|
||||
# name like '_'. The "L" is for "log" and the other letter comes from
|
||||
# the level.
|
||||
_LI = _translators.log_info
|
||||
_LW = _translators.log_warning
|
||||
_LE = _translators.log_error
|
||||
_LC = _translators.log_critical
|
||||
|
@ -33,7 +33,6 @@ from oslo_utils import importutils
|
||||
from oslo_utils import reflection
|
||||
import six
|
||||
|
||||
from oslo_db._i18n import _LE
|
||||
from oslo_db import exception
|
||||
from oslo_db import options
|
||||
|
||||
@ -144,7 +143,7 @@ class wrap_db_retry(object):
|
||||
ectxt.reraise = not expected
|
||||
else:
|
||||
if expected:
|
||||
LOG.exception(_LE('DB exceeded retry limit.'))
|
||||
LOG.exception('DB exceeded retry limit.')
|
||||
# if it's a RetryRequest, we need to unpack it
|
||||
if isinstance(e, exception.RetryRequest):
|
||||
ectxt.type_ = type(e.inner_exc)
|
||||
|
@ -19,7 +19,6 @@ import threading
|
||||
|
||||
from oslo_config import cfg
|
||||
|
||||
from oslo_db._i18n import _LE
|
||||
from oslo_db import api
|
||||
|
||||
|
||||
@ -61,8 +60,8 @@ class TpoolDbapiWrapper(object):
|
||||
try:
|
||||
from eventlet import tpool
|
||||
except ImportError:
|
||||
LOG.exception(_LE("'eventlet' is required for "
|
||||
"TpoolDbapiWrapper."))
|
||||
LOG.exception("'eventlet' is required for "
|
||||
"TpoolDbapiWrapper.")
|
||||
raise
|
||||
self._db_api = tpool.Proxy(db_api)
|
||||
else:
|
||||
|
@ -29,7 +29,6 @@ from sqlalchemy import exc
|
||||
from sqlalchemy import pool
|
||||
from sqlalchemy.sql.expression import select
|
||||
|
||||
from oslo_db._i18n import _LW
|
||||
from oslo_db import exception
|
||||
|
||||
from oslo_db.sqlalchemy import exc_filters
|
||||
@ -255,16 +254,15 @@ def _init_events(engine, mysql_sql_mode=None, **kw):
|
||||
realmode = cursor.fetchone()
|
||||
|
||||
if realmode is None:
|
||||
LOG.warning(_LW('Unable to detect effective SQL mode'))
|
||||
LOG.warning('Unable to detect effective SQL mode')
|
||||
else:
|
||||
realmode = realmode[1]
|
||||
LOG.debug('MySQL server mode set to %s', realmode)
|
||||
if 'TRADITIONAL' not in realmode.upper() and \
|
||||
'STRICT_ALL_TABLES' not in realmode.upper():
|
||||
LOG.warning(
|
||||
_LW(
|
||||
"MySQL SQL mode is '%s', "
|
||||
"consider enabling TRADITIONAL or STRICT_ALL_TABLES"),
|
||||
"consider enabling TRADITIONAL or STRICT_ALL_TABLES",
|
||||
realmode)
|
||||
|
||||
|
||||
@ -330,7 +328,7 @@ def _test_connection(engine, max_retries, retry_interval):
|
||||
try:
|
||||
return engine.connect()
|
||||
except exception.DBConnectionError as de:
|
||||
msg = _LW('SQL connection failed. %s attempts left.')
|
||||
msg = 'SQL connection failed. %s attempts left.'
|
||||
LOG.warning(msg, max_retries - attempt)
|
||||
time.sleep(retry_interval)
|
||||
de_ref = de
|
||||
@ -355,10 +353,10 @@ def _add_process_guards(engine):
|
||||
def checkout(dbapi_connection, connection_record, connection_proxy):
|
||||
pid = os.getpid()
|
||||
if connection_record.info['pid'] != pid:
|
||||
LOG.debug(_LW(
|
||||
LOG.debug(
|
||||
"Parent process %(orig)s forked (%(newproc)s) with an open "
|
||||
"database connection, "
|
||||
"which is being discarded and recreated."),
|
||||
"which is being discarded and recreated.",
|
||||
{"newproc": pid, "orig": connection_record.info['pid']})
|
||||
connection_record.connection = connection_proxy.connection = None
|
||||
raise exc.DisconnectionError(
|
||||
|
@ -19,7 +19,6 @@ import sys
|
||||
from sqlalchemy import event
|
||||
from sqlalchemy import exc as sqla_exc
|
||||
|
||||
from oslo_db._i18n import _LE
|
||||
from oslo_db import exception
|
||||
|
||||
|
||||
@ -407,7 +406,7 @@ def _raise_for_remaining_DBAPIError(error, match, engine_name, is_disconnect):
|
||||
raise exception.DBConnectionError(error)
|
||||
else:
|
||||
LOG.exception(
|
||||
_LE('DBAPIError exception wrapped from %s') % error)
|
||||
'DBAPIError exception wrapped from %s' % error)
|
||||
raise exception.DBError(error)
|
||||
|
||||
|
||||
@ -418,7 +417,7 @@ def _raise_for_unicode_encode(error, match, engine_name, is_disconnect):
|
||||
|
||||
@filters("*", Exception, r".*")
|
||||
def _raise_for_all_others(error, match, engine_name, is_disconnect):
|
||||
LOG.exception(_LE('DB exception wrapped.'))
|
||||
LOG.exception('DB exception wrapped.')
|
||||
raise exception.DBError(error)
|
||||
|
||||
ROLLBACK_CAUSE_KEY = 'oslo.db.sp_rollback_cause'
|
||||
|
@ -15,7 +15,6 @@ import os
|
||||
|
||||
from migrate.versioning import version as migrate_version
|
||||
|
||||
from oslo_db._i18n import _LE
|
||||
from oslo_db.sqlalchemy import migration
|
||||
from oslo_db.sqlalchemy.migration_cli import ext_base
|
||||
|
||||
@ -58,9 +57,9 @@ class MigrateExtension(ext_base.MigrationExtensionBase):
|
||||
init_version=self.init_version)
|
||||
except ValueError:
|
||||
LOG.error(
|
||||
_LE('Migration number for migrate plugin must be valid '
|
||||
'Migration number for migrate plugin must be valid '
|
||||
'integer or empty, if you want to downgrade '
|
||||
'to initial state')
|
||||
'to initial state'
|
||||
)
|
||||
raise
|
||||
|
||||
|
@ -31,7 +31,6 @@ from sqlalchemy.engine import url as sa_url
|
||||
from sqlalchemy import schema
|
||||
import testresources
|
||||
|
||||
from oslo_db._i18n import _LI
|
||||
from oslo_db import exception
|
||||
from oslo_db.sqlalchemy import enginefacade
|
||||
from oslo_db.sqlalchemy import session
|
||||
@ -292,7 +291,7 @@ class Backend(object):
|
||||
# within create_engine(). So if ibm_db_sa, cx_oracle etc.
|
||||
# isn't installed, we get an ImportError here.
|
||||
LOG.info(
|
||||
_LI("The %(dbapi)s backend is unavailable: %(err)s"),
|
||||
"The %(dbapi)s backend is unavailable: %(err)s",
|
||||
dict(dbapi=url.drivername, err=i_e))
|
||||
raise exception.BackendNotAvailable(
|
||||
"Backend '%s' is unavailable: No DBAPI installed" %
|
||||
@ -305,7 +304,7 @@ class Backend(object):
|
||||
# usually raises OperationalError and should always at
|
||||
# least raise a SQLAlchemy-wrapped DBAPI Error.
|
||||
LOG.info(
|
||||
_LI("The %(dbapi)s backend is unavailable: %(err)s"),
|
||||
"The %(dbapi)s backend is unavailable: %(err)s",
|
||||
dict(dbapi=url.drivername, err=d_e)
|
||||
)
|
||||
raise exception.BackendNotAvailable(
|
||||
|
@ -30,7 +30,6 @@ import sqlalchemy.exc
|
||||
import sqlalchemy.sql.expression as expr
|
||||
import sqlalchemy.types as types
|
||||
|
||||
from oslo_db._i18n import _LE
|
||||
from oslo_db import exception as exc
|
||||
from oslo_db.sqlalchemy import provision
|
||||
from oslo_db.sqlalchemy import utils
|
||||
@ -270,7 +269,7 @@ class WalkVersionsMixin(object):
|
||||
if check:
|
||||
check(self.migrate_engine, data)
|
||||
except exc.DBMigrationError:
|
||||
msg = _LE("Failed to migrate to version %(ver)s on engine %(eng)s")
|
||||
msg = "Failed to migrate to version %(ver)s on engine %(eng)s"
|
||||
LOG.error(msg, {"ver": version, "eng": self.migrate_engine})
|
||||
raise
|
||||
|
||||
|
@ -45,8 +45,8 @@ from sqlalchemy import String
|
||||
from sqlalchemy import Table
|
||||
from sqlalchemy.types import NullType
|
||||
|
||||
from oslo_db._i18n import _
|
||||
from oslo_db import exception
|
||||
from oslo_db._i18n import _, _LI, _LW
|
||||
from oslo_db.sqlalchemy import models
|
||||
|
||||
# NOTE(ochuprykov): Add references for backwards compatibility
|
||||
@ -175,8 +175,8 @@ def paginate_query(query, model, limit, sort_keys, marker=None,
|
||||
:return: The query with sorting/pagination added.
|
||||
"""
|
||||
if _stable_sorting_order(model, sort_keys) is False:
|
||||
LOG.warning(_LW('Unique keys not in sort_keys. '
|
||||
'The sorting order may be unstable.'))
|
||||
LOG.warning('Unique keys not in sort_keys. '
|
||||
'The sorting order may be unstable.')
|
||||
|
||||
assert(not (sort_dir and sort_dirs))
|
||||
|
||||
@ -524,8 +524,8 @@ def drop_old_duplicate_entries_from_table(engine, table_name,
|
||||
rows_to_delete_select = sqlalchemy.sql.select(
|
||||
[table.c.id]).where(delete_condition)
|
||||
for row in engine.execute(rows_to_delete_select).fetchall():
|
||||
LOG.info(_LI("Deleting duplicated row with id: %(id)s from table: "
|
||||
"%(table)s"), dict(id=row[0], table=table_name))
|
||||
LOG.info("Deleting duplicated row with id: %(id)s from table: "
|
||||
"%(table)s", dict(id=row[0], table=table_name))
|
||||
|
||||
if use_soft_delete:
|
||||
delete_statement = table.update().\
|
||||
|
Loading…
Reference in New Issue
Block a user