Remove the log module and all dependencies
This also removed the log test, but given we don't even have the oslo log module there is no chance of a conflict. Change-Id: I19fd454f860df7714b5ca901bdc0b134e58e0b4b
This commit is contained in:
parent
84ed871797
commit
17b38e525f
@ -1,186 +0,0 @@
|
|||||||
# Copyright 2010 United States Government as represented by the
|
|
||||||
# Administrator of the National Aeronautics and Space Administration.
|
|
||||||
# Copyright 2011 Justin Santa Barbara
|
|
||||||
# All Rights Reserved.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
'''
|
|
||||||
JSON related utilities.
|
|
||||||
|
|
||||||
This module provides a few things:
|
|
||||||
|
|
||||||
1) A handy function for getting an object down to something that can be
|
|
||||||
JSON serialized. See to_primitive().
|
|
||||||
|
|
||||||
2) Wrappers around loads() and dumps(). The dumps() wrapper will
|
|
||||||
automatically use to_primitive() for you if needed.
|
|
||||||
|
|
||||||
3) This sets up anyjson to use the loads() and dumps() wrappers if anyjson
|
|
||||||
is available.
|
|
||||||
'''
|
|
||||||
|
|
||||||
|
|
||||||
import codecs
|
|
||||||
import datetime
|
|
||||||
import functools
|
|
||||||
import inspect
|
|
||||||
import itertools
|
|
||||||
import sys
|
|
||||||
|
|
||||||
if sys.version_info < (2, 7):
|
|
||||||
# On Python <= 2.6, json module is not C boosted, so try to use
|
|
||||||
# simplejson module if available
|
|
||||||
try:
|
|
||||||
import simplejson as json
|
|
||||||
except ImportError:
|
|
||||||
import json
|
|
||||||
else:
|
|
||||||
import json
|
|
||||||
|
|
||||||
import six
|
|
||||||
import six.moves.xmlrpc_client as xmlrpclib
|
|
||||||
|
|
||||||
from mistralclient.openstack.common import gettextutils
|
|
||||||
from mistralclient.openstack.common import importutils
|
|
||||||
from mistralclient.openstack.common import strutils
|
|
||||||
from mistralclient.openstack.common import timeutils
|
|
||||||
|
|
||||||
netaddr = importutils.try_import("netaddr")
|
|
||||||
|
|
||||||
_nasty_type_tests = [inspect.ismodule, inspect.isclass, inspect.ismethod,
|
|
||||||
inspect.isfunction, inspect.isgeneratorfunction,
|
|
||||||
inspect.isgenerator, inspect.istraceback, inspect.isframe,
|
|
||||||
inspect.iscode, inspect.isbuiltin, inspect.isroutine,
|
|
||||||
inspect.isabstract]
|
|
||||||
|
|
||||||
_simple_types = (six.string_types + six.integer_types
|
|
||||||
+ (type(None), bool, float))
|
|
||||||
|
|
||||||
|
|
||||||
def to_primitive(value, convert_instances=False, convert_datetime=True,
|
|
||||||
level=0, max_depth=3):
|
|
||||||
"""Convert a complex object into primitives.
|
|
||||||
|
|
||||||
Handy for JSON serialization. We can optionally handle instances,
|
|
||||||
but since this is a recursive function, we could have cyclical
|
|
||||||
data structures.
|
|
||||||
|
|
||||||
To handle cyclical data structures we could track the actual objects
|
|
||||||
visited in a set, but not all objects are hashable. Instead we just
|
|
||||||
track the depth of the object inspections and don't go too deep.
|
|
||||||
|
|
||||||
Therefore, convert_instances=True is lossy ... be aware.
|
|
||||||
|
|
||||||
"""
|
|
||||||
# handle obvious types first - order of basic types determined by running
|
|
||||||
# full tests on nova project, resulting in the following counts:
|
|
||||||
# 572754 <type 'NoneType'>
|
|
||||||
# 460353 <type 'int'>
|
|
||||||
# 379632 <type 'unicode'>
|
|
||||||
# 274610 <type 'str'>
|
|
||||||
# 199918 <type 'dict'>
|
|
||||||
# 114200 <type 'datetime.datetime'>
|
|
||||||
# 51817 <type 'bool'>
|
|
||||||
# 26164 <type 'list'>
|
|
||||||
# 6491 <type 'float'>
|
|
||||||
# 283 <type 'tuple'>
|
|
||||||
# 19 <type 'long'>
|
|
||||||
if isinstance(value, _simple_types):
|
|
||||||
return value
|
|
||||||
|
|
||||||
if isinstance(value, datetime.datetime):
|
|
||||||
if convert_datetime:
|
|
||||||
return timeutils.strtime(value)
|
|
||||||
else:
|
|
||||||
return value
|
|
||||||
|
|
||||||
# value of itertools.count doesn't get caught by nasty_type_tests
|
|
||||||
# and results in infinite loop when list(value) is called.
|
|
||||||
if type(value) == itertools.count:
|
|
||||||
return six.text_type(value)
|
|
||||||
|
|
||||||
# FIXME(vish): Workaround for LP bug 852095. Without this workaround,
|
|
||||||
# tests that raise an exception in a mocked method that
|
|
||||||
# has a @wrap_exception with a notifier will fail. If
|
|
||||||
# we up the dependency to 0.5.4 (when it is released) we
|
|
||||||
# can remove this workaround.
|
|
||||||
if getattr(value, '__module__', None) == 'mox':
|
|
||||||
return 'mock'
|
|
||||||
|
|
||||||
if level > max_depth:
|
|
||||||
return '?'
|
|
||||||
|
|
||||||
# The try block may not be necessary after the class check above,
|
|
||||||
# but just in case ...
|
|
||||||
try:
|
|
||||||
recursive = functools.partial(to_primitive,
|
|
||||||
convert_instances=convert_instances,
|
|
||||||
convert_datetime=convert_datetime,
|
|
||||||
level=level,
|
|
||||||
max_depth=max_depth)
|
|
||||||
if isinstance(value, dict):
|
|
||||||
return dict((k, recursive(v)) for k, v in six.iteritems(value))
|
|
||||||
elif isinstance(value, (list, tuple)):
|
|
||||||
return [recursive(lv) for lv in value]
|
|
||||||
|
|
||||||
# It's not clear why xmlrpclib created their own DateTime type, but
|
|
||||||
# for our purposes, make it a datetime type which is explicitly
|
|
||||||
# handled
|
|
||||||
if isinstance(value, xmlrpclib.DateTime):
|
|
||||||
value = datetime.datetime(*tuple(value.timetuple())[:6])
|
|
||||||
|
|
||||||
if convert_datetime and isinstance(value, datetime.datetime):
|
|
||||||
return timeutils.strtime(value)
|
|
||||||
elif isinstance(value, gettextutils.Message):
|
|
||||||
return value.data
|
|
||||||
elif hasattr(value, 'iteritems'):
|
|
||||||
return recursive(dict(value.iteritems()), level=level + 1)
|
|
||||||
elif hasattr(value, '__iter__'):
|
|
||||||
return recursive(list(value))
|
|
||||||
elif convert_instances and hasattr(value, '__dict__'):
|
|
||||||
# Likely an instance of something. Watch for cycles.
|
|
||||||
# Ignore class member vars.
|
|
||||||
return recursive(value.__dict__, level=level + 1)
|
|
||||||
elif netaddr and isinstance(value, netaddr.IPAddress):
|
|
||||||
return six.text_type(value)
|
|
||||||
else:
|
|
||||||
if any(test(value) for test in _nasty_type_tests):
|
|
||||||
return six.text_type(value)
|
|
||||||
return value
|
|
||||||
except TypeError:
|
|
||||||
# Class objects are tricky since they may define something like
|
|
||||||
# __iter__ defined but it isn't callable as list().
|
|
||||||
return six.text_type(value)
|
|
||||||
|
|
||||||
|
|
||||||
def dumps(value, default=to_primitive, **kwargs):
|
|
||||||
return json.dumps(value, default=default, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
def loads(s, encoding='utf-8'):
|
|
||||||
return json.loads(strutils.safe_decode(s, encoding))
|
|
||||||
|
|
||||||
|
|
||||||
def load(fp, encoding='utf-8'):
|
|
||||||
return json.load(codecs.getreader(encoding)(fp))
|
|
||||||
|
|
||||||
|
|
||||||
try:
|
|
||||||
import anyjson
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
anyjson._modules.append((__name__, 'dumps', TypeError,
|
|
||||||
'loads', ValueError, 'load'))
|
|
||||||
anyjson.force_implementation(__name__)
|
|
@ -1,45 +0,0 @@
|
|||||||
# Copyright 2011 OpenStack Foundation.
|
|
||||||
# All Rights Reserved.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
"""Local storage of variables using weak references"""
|
|
||||||
|
|
||||||
import threading
|
|
||||||
import weakref
|
|
||||||
|
|
||||||
|
|
||||||
class WeakLocal(threading.local):
|
|
||||||
def __getattribute__(self, attr):
|
|
||||||
rval = super(WeakLocal, self).__getattribute__(attr)
|
|
||||||
if rval:
|
|
||||||
# NOTE(mikal): this bit is confusing. What is stored is a weak
|
|
||||||
# reference, not the value itself. We therefore need to lookup
|
|
||||||
# the weak reference and return the inner value here.
|
|
||||||
rval = rval()
|
|
||||||
return rval
|
|
||||||
|
|
||||||
def __setattr__(self, attr, value):
|
|
||||||
value = weakref.ref(value)
|
|
||||||
return super(WeakLocal, self).__setattr__(attr, value)
|
|
||||||
|
|
||||||
|
|
||||||
# NOTE(mikal): the name "store" should be deprecated in the future
|
|
||||||
store = WeakLocal()
|
|
||||||
|
|
||||||
# A "weak" store uses weak references and allows an object to fall out of scope
|
|
||||||
# when it falls out of scope in the code that uses the thread local storage. A
|
|
||||||
# "strong" store will hold a reference to the object so that it never falls out
|
|
||||||
# of scope.
|
|
||||||
weak_store = WeakLocal()
|
|
||||||
strong_store = threading.local()
|
|
@ -1,723 +0,0 @@
|
|||||||
# Copyright 2011 OpenStack Foundation.
|
|
||||||
# Copyright 2010 United States Government as represented by the
|
|
||||||
# Administrator of the National Aeronautics and Space Administration.
|
|
||||||
# All Rights Reserved.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
"""OpenStack logging handler.
|
|
||||||
|
|
||||||
This module adds to logging functionality by adding the option to specify
|
|
||||||
a context object when calling the various log methods. If the context object
|
|
||||||
is not specified, default formatting is used. Additionally, an instance uuid
|
|
||||||
may be passed as part of the log message, which is intended to make it easier
|
|
||||||
for admins to find messages related to a specific instance.
|
|
||||||
|
|
||||||
It also allows setting of formatting information through conf.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
import inspect
|
|
||||||
import itertools
|
|
||||||
import logging
|
|
||||||
import logging.config
|
|
||||||
import logging.handlers
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
import traceback
|
|
||||||
|
|
||||||
from oslo.config import cfg
|
|
||||||
import six
|
|
||||||
from six import moves
|
|
||||||
|
|
||||||
from mistralclient.openstack.common.gettextutils import _
|
|
||||||
from mistralclient.openstack.common import importutils
|
|
||||||
from mistralclient.openstack.common import jsonutils
|
|
||||||
from mistralclient.openstack.common import local
|
|
||||||
|
|
||||||
|
|
||||||
_DEFAULT_LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
|
|
||||||
|
|
||||||
_SANITIZE_KEYS = ['adminPass', 'admin_pass', 'password', 'admin_password']
|
|
||||||
|
|
||||||
# NOTE(ldbragst): Let's build a list of regex objects using the list of
|
|
||||||
# _SANITIZE_KEYS we already have. This way, we only have to add the new key
|
|
||||||
# to the list of _SANITIZE_KEYS and we can generate regular expressions
|
|
||||||
# for XML and JSON automatically.
|
|
||||||
_SANITIZE_PATTERNS = []
|
|
||||||
_FORMAT_PATTERNS = [r'(%(key)s\s*[=]\s*[\"\']).*?([\"\'])',
|
|
||||||
r'(<%(key)s>).*?(</%(key)s>)',
|
|
||||||
r'([\"\']%(key)s[\"\']\s*:\s*[\"\']).*?([\"\'])',
|
|
||||||
r'([\'"].*?%(key)s[\'"]\s*:\s*u?[\'"]).*?([\'"])',
|
|
||||||
r'([\'"].*?%(key)s[\'"]\s*,\s*\'--?[A-z]+\'\s*,\s*u?[\'"])'
|
|
||||||
'.*?([\'"])',
|
|
||||||
r'(%(key)s\s*--?[A-z]+\s*).*?([\s])']
|
|
||||||
|
|
||||||
for key in _SANITIZE_KEYS:
|
|
||||||
for pattern in _FORMAT_PATTERNS:
|
|
||||||
reg_ex = re.compile(pattern % {'key': key}, re.DOTALL)
|
|
||||||
_SANITIZE_PATTERNS.append(reg_ex)
|
|
||||||
|
|
||||||
|
|
||||||
common_cli_opts = [
|
|
||||||
cfg.BoolOpt('debug',
|
|
||||||
short='d',
|
|
||||||
default=False,
|
|
||||||
help='Print debugging output (set logging level to '
|
|
||||||
'DEBUG instead of default WARNING level).'),
|
|
||||||
cfg.BoolOpt('verbose',
|
|
||||||
short='v',
|
|
||||||
default=False,
|
|
||||||
help='Print more verbose output (set logging level to '
|
|
||||||
'INFO instead of default WARNING level).'),
|
|
||||||
]
|
|
||||||
|
|
||||||
logging_cli_opts = [
|
|
||||||
cfg.StrOpt('log-config-append',
|
|
||||||
metavar='PATH',
|
|
||||||
deprecated_name='log-config',
|
|
||||||
help='The name of a logging configuration file. This file '
|
|
||||||
'is appended to any existing logging configuration '
|
|
||||||
'files. For details about logging configuration files, '
|
|
||||||
'see the Python logging module documentation.'),
|
|
||||||
cfg.StrOpt('log-format',
|
|
||||||
metavar='FORMAT',
|
|
||||||
help='DEPRECATED. '
|
|
||||||
'A logging.Formatter log message format string which may '
|
|
||||||
'use any of the available logging.LogRecord attributes. '
|
|
||||||
'This option is deprecated. Please use '
|
|
||||||
'logging_context_format_string and '
|
|
||||||
'logging_default_format_string instead.'),
|
|
||||||
cfg.StrOpt('log-date-format',
|
|
||||||
default=_DEFAULT_LOG_DATE_FORMAT,
|
|
||||||
metavar='DATE_FORMAT',
|
|
||||||
help='Format string for %%(asctime)s in log records. '
|
|
||||||
'Default: %(default)s .'),
|
|
||||||
cfg.StrOpt('log-file',
|
|
||||||
metavar='PATH',
|
|
||||||
deprecated_name='logfile',
|
|
||||||
help='(Optional) Name of log file to output to. '
|
|
||||||
'If no default is set, logging will go to stdout.'),
|
|
||||||
cfg.StrOpt('log-dir',
|
|
||||||
deprecated_name='logdir',
|
|
||||||
help='(Optional) The base directory used for relative '
|
|
||||||
'--log-file paths.'),
|
|
||||||
cfg.BoolOpt('use-syslog',
|
|
||||||
default=False,
|
|
||||||
help='Use syslog for logging. '
|
|
||||||
'Existing syslog format is DEPRECATED during I, '
|
|
||||||
'and will change in J to honor RFC5424.'),
|
|
||||||
cfg.BoolOpt('use-syslog-rfc-format',
|
|
||||||
# TODO(bogdando) remove or use True after existing
|
|
||||||
# syslog format deprecation in J
|
|
||||||
default=False,
|
|
||||||
help='(Optional) Enables or disables syslog rfc5424 format '
|
|
||||||
'for logging. If enabled, prefixes the MSG part of the '
|
|
||||||
'syslog message with APP-NAME (RFC5424). The '
|
|
||||||
'format without the APP-NAME is deprecated in I, '
|
|
||||||
'and will be removed in J.'),
|
|
||||||
cfg.StrOpt('syslog-log-facility',
|
|
||||||
default='LOG_USER',
|
|
||||||
help='Syslog facility to receive log lines.')
|
|
||||||
]
|
|
||||||
|
|
||||||
generic_log_opts = [
|
|
||||||
cfg.BoolOpt('use_stderr',
|
|
||||||
default=True,
|
|
||||||
help='Log output to standard error.')
|
|
||||||
]
|
|
||||||
|
|
||||||
log_opts = [
|
|
||||||
cfg.StrOpt('logging_context_format_string',
|
|
||||||
default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s '
|
|
||||||
'%(name)s [%(request_id)s %(user_identity)s] '
|
|
||||||
'%(instance)s%(message)s',
|
|
||||||
help='Format string to use for log messages with context.'),
|
|
||||||
cfg.StrOpt('logging_default_format_string',
|
|
||||||
default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s '
|
|
||||||
'%(name)s [-] %(instance)s%(message)s',
|
|
||||||
help='Format string to use for log messages without context.'),
|
|
||||||
cfg.StrOpt('logging_debug_format_suffix',
|
|
||||||
default='%(funcName)s %(pathname)s:%(lineno)d',
|
|
||||||
help='Data to append to log format when level is DEBUG.'),
|
|
||||||
cfg.StrOpt('logging_exception_prefix',
|
|
||||||
default='%(asctime)s.%(msecs)03d %(process)d TRACE %(name)s '
|
|
||||||
'%(instance)s',
|
|
||||||
help='Prefix each line of exception output with this format.'),
|
|
||||||
cfg.ListOpt('default_log_levels',
|
|
||||||
default=[
|
|
||||||
'amqp=WARN',
|
|
||||||
'amqplib=WARN',
|
|
||||||
'boto=WARN',
|
|
||||||
'qpid=WARN',
|
|
||||||
'sqlalchemy=WARN',
|
|
||||||
'suds=INFO',
|
|
||||||
'oslo.messaging=INFO',
|
|
||||||
'iso8601=WARN',
|
|
||||||
'requests.packages.urllib3.connectionpool=WARN'
|
|
||||||
],
|
|
||||||
help='List of logger=LEVEL pairs.'),
|
|
||||||
cfg.BoolOpt('publish_errors',
|
|
||||||
default=False,
|
|
||||||
help='Enables or disables publication of error events.'),
|
|
||||||
cfg.BoolOpt('fatal_deprecations',
|
|
||||||
default=False,
|
|
||||||
help='Enables or disables fatal status of deprecations.'),
|
|
||||||
|
|
||||||
# NOTE(mikal): there are two options here because sometimes we are handed
|
|
||||||
# a full instance (and could include more information), and other times we
|
|
||||||
# are just handed a UUID for the instance.
|
|
||||||
cfg.StrOpt('instance_format',
|
|
||||||
default='[instance: %(uuid)s] ',
|
|
||||||
help='The format for an instance that is passed with the log '
|
|
||||||
'message. '),
|
|
||||||
cfg.StrOpt('instance_uuid_format',
|
|
||||||
default='[instance: %(uuid)s] ',
|
|
||||||
help='The format for an instance UUID that is passed with the '
|
|
||||||
'log message. '),
|
|
||||||
]
|
|
||||||
|
|
||||||
CONF = cfg.CONF
|
|
||||||
CONF.register_cli_opts(common_cli_opts)
|
|
||||||
CONF.register_cli_opts(logging_cli_opts)
|
|
||||||
CONF.register_opts(generic_log_opts)
|
|
||||||
CONF.register_opts(log_opts)
|
|
||||||
|
|
||||||
# our new audit level
|
|
||||||
# NOTE(jkoelker) Since we synthesized an audit level, make the logging
|
|
||||||
# module aware of it so it acts like other levels.
|
|
||||||
logging.AUDIT = logging.INFO + 1
|
|
||||||
logging.addLevelName(logging.AUDIT, 'AUDIT')
|
|
||||||
|
|
||||||
|
|
||||||
try:
|
|
||||||
NullHandler = logging.NullHandler
|
|
||||||
except AttributeError: # NOTE(jkoelker) NullHandler added in Python 2.7
|
|
||||||
class NullHandler(logging.Handler):
|
|
||||||
def handle(self, record):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def emit(self, record):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def createLock(self):
|
|
||||||
self.lock = None
|
|
||||||
|
|
||||||
|
|
||||||
def _dictify_context(context):
|
|
||||||
if context is None:
|
|
||||||
return None
|
|
||||||
if not isinstance(context, dict) and getattr(context, 'to_dict', None):
|
|
||||||
context = context.to_dict()
|
|
||||||
return context
|
|
||||||
|
|
||||||
|
|
||||||
def _get_binary_name():
|
|
||||||
return os.path.basename(inspect.stack()[-1][1])
|
|
||||||
|
|
||||||
|
|
||||||
def _get_log_file_path(binary=None):
|
|
||||||
logfile = CONF.log_file
|
|
||||||
logdir = CONF.log_dir
|
|
||||||
|
|
||||||
if logfile and not logdir:
|
|
||||||
return logfile
|
|
||||||
|
|
||||||
if logfile and logdir:
|
|
||||||
return os.path.join(logdir, logfile)
|
|
||||||
|
|
||||||
if logdir:
|
|
||||||
binary = binary or _get_binary_name()
|
|
||||||
return '%s.log' % (os.path.join(logdir, binary),)
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def mask_password(message, secret="***"):
|
|
||||||
"""Replace password with 'secret' in message.
|
|
||||||
|
|
||||||
:param message: The string which includes security information.
|
|
||||||
:param secret: value with which to replace passwords.
|
|
||||||
:returns: The unicode value of message with the password fields masked.
|
|
||||||
|
|
||||||
For example:
|
|
||||||
|
|
||||||
>>> mask_password("'adminPass' : 'aaaaa'")
|
|
||||||
"'adminPass' : '***'"
|
|
||||||
>>> mask_password("'admin_pass' : 'aaaaa'")
|
|
||||||
"'admin_pass' : '***'"
|
|
||||||
>>> mask_password('"password" : "aaaaa"')
|
|
||||||
'"password" : "***"'
|
|
||||||
>>> mask_password("'original_password' : 'aaaaa'")
|
|
||||||
"'original_password' : '***'"
|
|
||||||
>>> mask_password("u'original_password' : u'aaaaa'")
|
|
||||||
"u'original_password' : u'***'"
|
|
||||||
"""
|
|
||||||
message = six.text_type(message)
|
|
||||||
|
|
||||||
# NOTE(ldbragst): Check to see if anything in message contains any key
|
|
||||||
# specified in _SANITIZE_KEYS, if not then just return the message since
|
|
||||||
# we don't have to mask any passwords.
|
|
||||||
if not any(key in message for key in _SANITIZE_KEYS):
|
|
||||||
return message
|
|
||||||
|
|
||||||
secret = r'\g<1>' + secret + r'\g<2>'
|
|
||||||
for pattern in _SANITIZE_PATTERNS:
|
|
||||||
message = re.sub(pattern, secret, message)
|
|
||||||
return message
|
|
||||||
|
|
||||||
|
|
||||||
class BaseLoggerAdapter(logging.LoggerAdapter):
|
|
||||||
|
|
||||||
def audit(self, msg, *args, **kwargs):
|
|
||||||
self.log(logging.AUDIT, msg, *args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
class LazyAdapter(BaseLoggerAdapter):
|
|
||||||
def __init__(self, name='unknown', version='unknown'):
|
|
||||||
self._logger = None
|
|
||||||
self.extra = {}
|
|
||||||
self.name = name
|
|
||||||
self.version = version
|
|
||||||
|
|
||||||
@property
|
|
||||||
def logger(self):
|
|
||||||
if not self._logger:
|
|
||||||
self._logger = getLogger(self.name, self.version)
|
|
||||||
return self._logger
|
|
||||||
|
|
||||||
|
|
||||||
class ContextAdapter(BaseLoggerAdapter):
|
|
||||||
warn = logging.LoggerAdapter.warning
|
|
||||||
|
|
||||||
def __init__(self, logger, project_name, version_string):
|
|
||||||
self.logger = logger
|
|
||||||
self.project = project_name
|
|
||||||
self.version = version_string
|
|
||||||
self._deprecated_messages_sent = dict()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def handlers(self):
|
|
||||||
return self.logger.handlers
|
|
||||||
|
|
||||||
def deprecated(self, msg, *args, **kwargs):
|
|
||||||
"""Call this method when a deprecated feature is used.
|
|
||||||
|
|
||||||
If the system is configured for fatal deprecations then the message
|
|
||||||
is logged at the 'critical' level and :class:`DeprecatedConfig` will
|
|
||||||
be raised.
|
|
||||||
|
|
||||||
Otherwise, the message will be logged (once) at the 'warn' level.
|
|
||||||
|
|
||||||
:raises: :class:`DeprecatedConfig` if the system is configured for
|
|
||||||
fatal deprecations.
|
|
||||||
|
|
||||||
"""
|
|
||||||
stdmsg = _("Deprecated: %s") % msg
|
|
||||||
if CONF.fatal_deprecations:
|
|
||||||
self.critical(stdmsg, *args, **kwargs)
|
|
||||||
raise DeprecatedConfig(msg=stdmsg)
|
|
||||||
|
|
||||||
# Using a list because a tuple with dict can't be stored in a set.
|
|
||||||
sent_args = self._deprecated_messages_sent.setdefault(msg, list())
|
|
||||||
|
|
||||||
if args in sent_args:
|
|
||||||
# Already logged this message, so don't log it again.
|
|
||||||
return
|
|
||||||
|
|
||||||
sent_args.append(args)
|
|
||||||
self.warn(stdmsg, *args, **kwargs)
|
|
||||||
|
|
||||||
def process(self, msg, kwargs):
|
|
||||||
# NOTE(mrodden): catch any Message/other object and
|
|
||||||
# coerce to unicode before they can get
|
|
||||||
# to the python logging and possibly
|
|
||||||
# cause string encoding trouble
|
|
||||||
if not isinstance(msg, six.string_types):
|
|
||||||
msg = six.text_type(msg)
|
|
||||||
|
|
||||||
if 'extra' not in kwargs:
|
|
||||||
kwargs['extra'] = {}
|
|
||||||
extra = kwargs['extra']
|
|
||||||
|
|
||||||
context = kwargs.pop('context', None)
|
|
||||||
if not context:
|
|
||||||
context = getattr(local.store, 'context', None)
|
|
||||||
if context:
|
|
||||||
extra.update(_dictify_context(context))
|
|
||||||
|
|
||||||
instance = kwargs.pop('instance', None)
|
|
||||||
instance_uuid = (extra.get('instance_uuid') or
|
|
||||||
kwargs.pop('instance_uuid', None))
|
|
||||||
instance_extra = ''
|
|
||||||
if instance:
|
|
||||||
instance_extra = CONF.instance_format % instance
|
|
||||||
elif instance_uuid:
|
|
||||||
instance_extra = (CONF.instance_uuid_format
|
|
||||||
% {'uuid': instance_uuid})
|
|
||||||
extra['instance'] = instance_extra
|
|
||||||
|
|
||||||
extra.setdefault('user_identity', kwargs.pop('user_identity', None))
|
|
||||||
|
|
||||||
extra['project'] = self.project
|
|
||||||
extra['version'] = self.version
|
|
||||||
extra['extra'] = extra.copy()
|
|
||||||
return msg, kwargs
|
|
||||||
|
|
||||||
|
|
||||||
class JSONFormatter(logging.Formatter):
|
|
||||||
def __init__(self, fmt=None, datefmt=None):
|
|
||||||
# NOTE(jkoelker) we ignore the fmt argument, but its still there
|
|
||||||
# since logging.config.fileConfig passes it.
|
|
||||||
self.datefmt = datefmt
|
|
||||||
|
|
||||||
def formatException(self, ei, strip_newlines=True):
|
|
||||||
lines = traceback.format_exception(*ei)
|
|
||||||
if strip_newlines:
|
|
||||||
lines = [moves.filter(
|
|
||||||
lambda x: x,
|
|
||||||
line.rstrip().splitlines()) for line in lines]
|
|
||||||
lines = list(itertools.chain(*lines))
|
|
||||||
return lines
|
|
||||||
|
|
||||||
def format(self, record):
|
|
||||||
message = {'message': record.getMessage(),
|
|
||||||
'asctime': self.formatTime(record, self.datefmt),
|
|
||||||
'name': record.name,
|
|
||||||
'msg': record.msg,
|
|
||||||
'args': record.args,
|
|
||||||
'levelname': record.levelname,
|
|
||||||
'levelno': record.levelno,
|
|
||||||
'pathname': record.pathname,
|
|
||||||
'filename': record.filename,
|
|
||||||
'module': record.module,
|
|
||||||
'lineno': record.lineno,
|
|
||||||
'funcname': record.funcName,
|
|
||||||
'created': record.created,
|
|
||||||
'msecs': record.msecs,
|
|
||||||
'relative_created': record.relativeCreated,
|
|
||||||
'thread': record.thread,
|
|
||||||
'thread_name': record.threadName,
|
|
||||||
'process_name': record.processName,
|
|
||||||
'process': record.process,
|
|
||||||
'traceback': None}
|
|
||||||
|
|
||||||
if hasattr(record, 'extra'):
|
|
||||||
message['extra'] = record.extra
|
|
||||||
|
|
||||||
if record.exc_info:
|
|
||||||
message['traceback'] = self.formatException(record.exc_info)
|
|
||||||
|
|
||||||
return jsonutils.dumps(message)
|
|
||||||
|
|
||||||
|
|
||||||
def _create_logging_excepthook(product_name):
|
|
||||||
def logging_excepthook(exc_type, value, tb):
|
|
||||||
extra = {'exc_info': (exc_type, value, tb)}
|
|
||||||
getLogger(product_name).critical(
|
|
||||||
"".join(traceback.format_exception_only(exc_type, value)),
|
|
||||||
**extra)
|
|
||||||
return logging_excepthook
|
|
||||||
|
|
||||||
|
|
||||||
class LogConfigError(Exception):
|
|
||||||
|
|
||||||
message = _('Error loading logging config %(log_config)s: %(err_msg)s')
|
|
||||||
|
|
||||||
def __init__(self, log_config, err_msg):
|
|
||||||
self.log_config = log_config
|
|
||||||
self.err_msg = err_msg
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return self.message % dict(log_config=self.log_config,
|
|
||||||
err_msg=self.err_msg)
|
|
||||||
|
|
||||||
|
|
||||||
def _load_log_config(log_config_append):
|
|
||||||
try:
|
|
||||||
logging.config.fileConfig(log_config_append,
|
|
||||||
disable_existing_loggers=False)
|
|
||||||
except moves.configparser.Error as exc:
|
|
||||||
raise LogConfigError(log_config_append, six.text_type(exc))
|
|
||||||
|
|
||||||
|
|
||||||
def setup(product_name, version='unknown'):
|
|
||||||
"""Setup logging."""
|
|
||||||
if CONF.log_config_append:
|
|
||||||
_load_log_config(CONF.log_config_append)
|
|
||||||
else:
|
|
||||||
_setup_logging_from_conf(product_name, version)
|
|
||||||
sys.excepthook = _create_logging_excepthook(product_name)
|
|
||||||
|
|
||||||
|
|
||||||
def set_defaults(logging_context_format_string):
|
|
||||||
cfg.set_defaults(log_opts,
|
|
||||||
logging_context_format_string=
|
|
||||||
logging_context_format_string)
|
|
||||||
|
|
||||||
|
|
||||||
def _find_facility_from_conf():
|
|
||||||
facility_names = logging.handlers.SysLogHandler.facility_names
|
|
||||||
facility = getattr(logging.handlers.SysLogHandler,
|
|
||||||
CONF.syslog_log_facility,
|
|
||||||
None)
|
|
||||||
|
|
||||||
if facility is None and CONF.syslog_log_facility in facility_names:
|
|
||||||
facility = facility_names.get(CONF.syslog_log_facility)
|
|
||||||
|
|
||||||
if facility is None:
|
|
||||||
valid_facilities = facility_names.keys()
|
|
||||||
consts = ['LOG_AUTH', 'LOG_AUTHPRIV', 'LOG_CRON', 'LOG_DAEMON',
|
|
||||||
'LOG_FTP', 'LOG_KERN', 'LOG_LPR', 'LOG_MAIL', 'LOG_NEWS',
|
|
||||||
'LOG_AUTH', 'LOG_SYSLOG', 'LOG_USER', 'LOG_UUCP',
|
|
||||||
'LOG_LOCAL0', 'LOG_LOCAL1', 'LOG_LOCAL2', 'LOG_LOCAL3',
|
|
||||||
'LOG_LOCAL4', 'LOG_LOCAL5', 'LOG_LOCAL6', 'LOG_LOCAL7']
|
|
||||||
valid_facilities.extend(consts)
|
|
||||||
raise TypeError(_('syslog facility must be one of: %s') %
|
|
||||||
', '.join("'%s'" % fac
|
|
||||||
for fac in valid_facilities))
|
|
||||||
|
|
||||||
return facility
|
|
||||||
|
|
||||||
|
|
||||||
class RFCSysLogHandler(logging.handlers.SysLogHandler):
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
self.binary_name = _get_binary_name()
|
|
||||||
# Do not use super() unless type(logging.handlers.SysLogHandler)
|
|
||||||
# is 'type' (Python 2.7).
|
|
||||||
# Use old style calls, if the type is 'classobj' (Python 2.6)
|
|
||||||
logging.handlers.SysLogHandler.__init__(self, *args, **kwargs)
|
|
||||||
|
|
||||||
def format(self, record):
|
|
||||||
# Do not use super() unless type(logging.handlers.SysLogHandler)
|
|
||||||
# is 'type' (Python 2.7).
|
|
||||||
# Use old style calls, if the type is 'classobj' (Python 2.6)
|
|
||||||
msg = logging.handlers.SysLogHandler.format(self, record)
|
|
||||||
msg = self.binary_name + ' ' + msg
|
|
||||||
return msg
|
|
||||||
|
|
||||||
|
|
||||||
def _setup_logging_from_conf(project, version):
|
|
||||||
log_root = getLogger(None).logger
|
|
||||||
for handler in log_root.handlers:
|
|
||||||
log_root.removeHandler(handler)
|
|
||||||
|
|
||||||
if CONF.use_syslog:
|
|
||||||
facility = _find_facility_from_conf()
|
|
||||||
# TODO(bogdando) use the format provided by RFCSysLogHandler
|
|
||||||
# after existing syslog format deprecation in J
|
|
||||||
if CONF.use_syslog_rfc_format:
|
|
||||||
syslog = RFCSysLogHandler(address='/dev/log',
|
|
||||||
facility=facility)
|
|
||||||
else:
|
|
||||||
syslog = logging.handlers.SysLogHandler(address='/dev/log',
|
|
||||||
facility=facility)
|
|
||||||
log_root.addHandler(syslog)
|
|
||||||
|
|
||||||
logpath = _get_log_file_path()
|
|
||||||
if logpath:
|
|
||||||
filelog = logging.handlers.WatchedFileHandler(logpath)
|
|
||||||
log_root.addHandler(filelog)
|
|
||||||
|
|
||||||
if CONF.use_stderr:
|
|
||||||
streamlog = ColorHandler()
|
|
||||||
log_root.addHandler(streamlog)
|
|
||||||
|
|
||||||
elif not logpath:
|
|
||||||
# pass sys.stdout as a positional argument
|
|
||||||
# python2.6 calls the argument strm, in 2.7 it's stream
|
|
||||||
streamlog = logging.StreamHandler(sys.stdout)
|
|
||||||
log_root.addHandler(streamlog)
|
|
||||||
|
|
||||||
if CONF.publish_errors:
|
|
||||||
handler = importutils.import_object(
|
|
||||||
"mistralclient.openstack.common.log_handler.PublishErrorsHandler",
|
|
||||||
logging.ERROR)
|
|
||||||
log_root.addHandler(handler)
|
|
||||||
|
|
||||||
datefmt = CONF.log_date_format
|
|
||||||
for handler in log_root.handlers:
|
|
||||||
# NOTE(alaski): CONF.log_format overrides everything currently. This
|
|
||||||
# should be deprecated in favor of context aware formatting.
|
|
||||||
if CONF.log_format:
|
|
||||||
handler.setFormatter(logging.Formatter(fmt=CONF.log_format,
|
|
||||||
datefmt=datefmt))
|
|
||||||
log_root.info('Deprecated: log_format is now deprecated and will '
|
|
||||||
'be removed in the next release')
|
|
||||||
else:
|
|
||||||
handler.setFormatter(ContextFormatter(project=project,
|
|
||||||
version=version,
|
|
||||||
datefmt=datefmt))
|
|
||||||
|
|
||||||
if CONF.debug:
|
|
||||||
log_root.setLevel(logging.DEBUG)
|
|
||||||
elif CONF.verbose:
|
|
||||||
log_root.setLevel(logging.INFO)
|
|
||||||
else:
|
|
||||||
log_root.setLevel(logging.WARNING)
|
|
||||||
|
|
||||||
for pair in CONF.default_log_levels:
|
|
||||||
mod, _sep, level_name = pair.partition('=')
|
|
||||||
logger = logging.getLogger(mod)
|
|
||||||
# NOTE(AAzza) in python2.6 Logger.setLevel doesn't convert string name
|
|
||||||
# to integer code.
|
|
||||||
if sys.version_info < (2, 7):
|
|
||||||
level = logging.getLevelName(level_name)
|
|
||||||
logger.setLevel(level)
|
|
||||||
else:
|
|
||||||
logger.setLevel(level_name)
|
|
||||||
|
|
||||||
|
|
||||||
_loggers = {}
|
|
||||||
|
|
||||||
|
|
||||||
def getLogger(name='unknown', version='unknown'):
|
|
||||||
if name not in _loggers:
|
|
||||||
_loggers[name] = ContextAdapter(logging.getLogger(name),
|
|
||||||
name,
|
|
||||||
version)
|
|
||||||
return _loggers[name]
|
|
||||||
|
|
||||||
|
|
||||||
def getLazyLogger(name='unknown', version='unknown'):
|
|
||||||
"""Returns lazy logger.
|
|
||||||
|
|
||||||
Creates a pass-through logger that does not create the real logger
|
|
||||||
until it is really needed and delegates all calls to the real logger
|
|
||||||
once it is created.
|
|
||||||
"""
|
|
||||||
return LazyAdapter(name, version)
|
|
||||||
|
|
||||||
|
|
||||||
class WritableLogger(object):
|
|
||||||
"""A thin wrapper that responds to `write` and logs."""
|
|
||||||
|
|
||||||
def __init__(self, logger, level=logging.INFO):
|
|
||||||
self.logger = logger
|
|
||||||
self.level = level
|
|
||||||
|
|
||||||
def write(self, msg):
|
|
||||||
self.logger.log(self.level, msg.rstrip())
|
|
||||||
|
|
||||||
|
|
||||||
class ContextFormatter(logging.Formatter):
|
|
||||||
"""A context.RequestContext aware formatter configured through flags.
|
|
||||||
|
|
||||||
The flags used to set format strings are: logging_context_format_string
|
|
||||||
and logging_default_format_string. You can also specify
|
|
||||||
logging_debug_format_suffix to append extra formatting if the log level is
|
|
||||||
debug.
|
|
||||||
|
|
||||||
For information about what variables are available for the formatter see:
|
|
||||||
http://docs.python.org/library/logging.html#formatter
|
|
||||||
|
|
||||||
If available, uses the context value stored in TLS - local.store.context
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
"""Initialize ContextFormatter instance
|
|
||||||
|
|
||||||
Takes additional keyword arguments which can be used in the message
|
|
||||||
format string.
|
|
||||||
|
|
||||||
:keyword project: project name
|
|
||||||
:type project: string
|
|
||||||
:keyword version: project version
|
|
||||||
:type version: string
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
self.project = kwargs.pop('project', 'unknown')
|
|
||||||
self.version = kwargs.pop('version', 'unknown')
|
|
||||||
|
|
||||||
logging.Formatter.__init__(self, *args, **kwargs)
|
|
||||||
|
|
||||||
def format(self, record):
|
|
||||||
"""Uses contextstring if request_id is set, otherwise default."""
|
|
||||||
|
|
||||||
# store project info
|
|
||||||
record.project = self.project
|
|
||||||
record.version = self.version
|
|
||||||
|
|
||||||
# store request info
|
|
||||||
context = getattr(local.store, 'context', None)
|
|
||||||
if context:
|
|
||||||
d = _dictify_context(context)
|
|
||||||
for k, v in d.items():
|
|
||||||
setattr(record, k, v)
|
|
||||||
|
|
||||||
# NOTE(sdague): default the fancier formatting params
|
|
||||||
# to an empty string so we don't throw an exception if
|
|
||||||
# they get used
|
|
||||||
for key in ('instance', 'color', 'user_identity'):
|
|
||||||
if key not in record.__dict__:
|
|
||||||
record.__dict__[key] = ''
|
|
||||||
|
|
||||||
if record.__dict__.get('request_id'):
|
|
||||||
self._fmt = CONF.logging_context_format_string
|
|
||||||
else:
|
|
||||||
self._fmt = CONF.logging_default_format_string
|
|
||||||
|
|
||||||
if (record.levelno == logging.DEBUG and
|
|
||||||
CONF.logging_debug_format_suffix):
|
|
||||||
self._fmt += " " + CONF.logging_debug_format_suffix
|
|
||||||
|
|
||||||
# Cache this on the record, Logger will respect our formatted copy
|
|
||||||
if record.exc_info:
|
|
||||||
record.exc_text = self.formatException(record.exc_info, record)
|
|
||||||
return logging.Formatter.format(self, record)
|
|
||||||
|
|
||||||
def formatException(self, exc_info, record=None):
|
|
||||||
"""Format exception output with CONF.logging_exception_prefix."""
|
|
||||||
if not record:
|
|
||||||
return logging.Formatter.formatException(self, exc_info)
|
|
||||||
|
|
||||||
stringbuffer = moves.StringIO()
|
|
||||||
traceback.print_exception(exc_info[0], exc_info[1], exc_info[2],
|
|
||||||
None, stringbuffer)
|
|
||||||
lines = stringbuffer.getvalue().split('\n')
|
|
||||||
stringbuffer.close()
|
|
||||||
|
|
||||||
if CONF.logging_exception_prefix.find('%(asctime)') != -1:
|
|
||||||
record.asctime = self.formatTime(record, self.datefmt)
|
|
||||||
|
|
||||||
formatted_lines = []
|
|
||||||
for line in lines:
|
|
||||||
pl = CONF.logging_exception_prefix % record.__dict__
|
|
||||||
fl = '%s%s' % (pl, line)
|
|
||||||
formatted_lines.append(fl)
|
|
||||||
return '\n'.join(formatted_lines)
|
|
||||||
|
|
||||||
|
|
||||||
class ColorHandler(logging.StreamHandler):
|
|
||||||
LEVEL_COLORS = {
|
|
||||||
logging.DEBUG: '\033[00;32m', # GREEN
|
|
||||||
logging.INFO: '\033[00;36m', # CYAN
|
|
||||||
logging.AUDIT: '\033[01;36m', # BOLD CYAN
|
|
||||||
logging.WARN: '\033[01;33m', # BOLD YELLOW
|
|
||||||
logging.ERROR: '\033[01;31m', # BOLD RED
|
|
||||||
logging.CRITICAL: '\033[01;31m', # BOLD RED
|
|
||||||
}
|
|
||||||
|
|
||||||
def format(self, record):
|
|
||||||
record.color = self.LEVEL_COLORS[record.levelno]
|
|
||||||
return logging.StreamHandler.format(self, record)
|
|
||||||
|
|
||||||
|
|
||||||
class DeprecatedConfig(Exception):
|
|
||||||
message = _("Fatal call to deprecated config: %(msg)s")
|
|
||||||
|
|
||||||
def __init__(self, msg):
|
|
||||||
super(Exception, self).__init__(self.message % dict(msg=msg))
|
|
@ -1,210 +0,0 @@
|
|||||||
# Copyright 2011 OpenStack Foundation.
|
|
||||||
# All Rights Reserved.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
"""
|
|
||||||
Time related utilities and helper functions.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import calendar
|
|
||||||
import datetime
|
|
||||||
import time
|
|
||||||
|
|
||||||
import iso8601
|
|
||||||
import six
|
|
||||||
|
|
||||||
|
|
||||||
# ISO 8601 extended time format with microseconds
|
|
||||||
_ISO8601_TIME_FORMAT_SUBSECOND = '%Y-%m-%dT%H:%M:%S.%f'
|
|
||||||
_ISO8601_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S'
|
|
||||||
PERFECT_TIME_FORMAT = _ISO8601_TIME_FORMAT_SUBSECOND
|
|
||||||
|
|
||||||
|
|
||||||
def isotime(at=None, subsecond=False):
|
|
||||||
"""Stringify time in ISO 8601 format."""
|
|
||||||
if not at:
|
|
||||||
at = utcnow()
|
|
||||||
st = at.strftime(_ISO8601_TIME_FORMAT
|
|
||||||
if not subsecond
|
|
||||||
else _ISO8601_TIME_FORMAT_SUBSECOND)
|
|
||||||
tz = at.tzinfo.tzname(None) if at.tzinfo else 'UTC'
|
|
||||||
st += ('Z' if tz == 'UTC' else tz)
|
|
||||||
return st
|
|
||||||
|
|
||||||
|
|
||||||
def parse_isotime(timestr):
|
|
||||||
"""Parse time from ISO 8601 format."""
|
|
||||||
try:
|
|
||||||
return iso8601.parse_date(timestr)
|
|
||||||
except iso8601.ParseError as e:
|
|
||||||
raise ValueError(six.text_type(e))
|
|
||||||
except TypeError as e:
|
|
||||||
raise ValueError(six.text_type(e))
|
|
||||||
|
|
||||||
|
|
||||||
def strtime(at=None, fmt=PERFECT_TIME_FORMAT):
|
|
||||||
"""Returns formatted utcnow."""
|
|
||||||
if not at:
|
|
||||||
at = utcnow()
|
|
||||||
return at.strftime(fmt)
|
|
||||||
|
|
||||||
|
|
||||||
def parse_strtime(timestr, fmt=PERFECT_TIME_FORMAT):
|
|
||||||
"""Turn a formatted time back into a datetime."""
|
|
||||||
return datetime.datetime.strptime(timestr, fmt)
|
|
||||||
|
|
||||||
|
|
||||||
def normalize_time(timestamp):
|
|
||||||
"""Normalize time in arbitrary timezone to UTC naive object."""
|
|
||||||
offset = timestamp.utcoffset()
|
|
||||||
if offset is None:
|
|
||||||
return timestamp
|
|
||||||
return timestamp.replace(tzinfo=None) - offset
|
|
||||||
|
|
||||||
|
|
||||||
def is_older_than(before, seconds):
|
|
||||||
"""Return True if before is older than seconds."""
|
|
||||||
if isinstance(before, six.string_types):
|
|
||||||
before = parse_strtime(before).replace(tzinfo=None)
|
|
||||||
else:
|
|
||||||
before = before.replace(tzinfo=None)
|
|
||||||
|
|
||||||
return utcnow() - before > datetime.timedelta(seconds=seconds)
|
|
||||||
|
|
||||||
|
|
||||||
def is_newer_than(after, seconds):
|
|
||||||
"""Return True if after is newer than seconds."""
|
|
||||||
if isinstance(after, six.string_types):
|
|
||||||
after = parse_strtime(after).replace(tzinfo=None)
|
|
||||||
else:
|
|
||||||
after = after.replace(tzinfo=None)
|
|
||||||
|
|
||||||
return after - utcnow() > datetime.timedelta(seconds=seconds)
|
|
||||||
|
|
||||||
|
|
||||||
def utcnow_ts():
|
|
||||||
"""Timestamp version of our utcnow function."""
|
|
||||||
if utcnow.override_time is None:
|
|
||||||
# NOTE(kgriffs): This is several times faster
|
|
||||||
# than going through calendar.timegm(...)
|
|
||||||
return int(time.time())
|
|
||||||
|
|
||||||
return calendar.timegm(utcnow().timetuple())
|
|
||||||
|
|
||||||
|
|
||||||
def utcnow():
|
|
||||||
"""Overridable version of utils.utcnow."""
|
|
||||||
if utcnow.override_time:
|
|
||||||
try:
|
|
||||||
return utcnow.override_time.pop(0)
|
|
||||||
except AttributeError:
|
|
||||||
return utcnow.override_time
|
|
||||||
return datetime.datetime.utcnow()
|
|
||||||
|
|
||||||
|
|
||||||
def iso8601_from_timestamp(timestamp):
|
|
||||||
"""Returns a iso8601 formatted date from timestamp."""
|
|
||||||
return isotime(datetime.datetime.utcfromtimestamp(timestamp))
|
|
||||||
|
|
||||||
|
|
||||||
utcnow.override_time = None
|
|
||||||
|
|
||||||
|
|
||||||
def set_time_override(override_time=None):
|
|
||||||
"""Overrides utils.utcnow.
|
|
||||||
|
|
||||||
Make it return a constant time or a list thereof, one at a time.
|
|
||||||
|
|
||||||
:param override_time: datetime instance or list thereof. If not
|
|
||||||
given, defaults to the current UTC time.
|
|
||||||
"""
|
|
||||||
utcnow.override_time = override_time or datetime.datetime.utcnow()
|
|
||||||
|
|
||||||
|
|
||||||
def advance_time_delta(timedelta):
|
|
||||||
"""Advance overridden time using a datetime.timedelta."""
|
|
||||||
assert(not utcnow.override_time is None)
|
|
||||||
try:
|
|
||||||
for dt in utcnow.override_time:
|
|
||||||
dt += timedelta
|
|
||||||
except TypeError:
|
|
||||||
utcnow.override_time += timedelta
|
|
||||||
|
|
||||||
|
|
||||||
def advance_time_seconds(seconds):
|
|
||||||
"""Advance overridden time by seconds."""
|
|
||||||
advance_time_delta(datetime.timedelta(0, seconds))
|
|
||||||
|
|
||||||
|
|
||||||
def clear_time_override():
|
|
||||||
"""Remove the overridden time."""
|
|
||||||
utcnow.override_time = None
|
|
||||||
|
|
||||||
|
|
||||||
def marshall_now(now=None):
|
|
||||||
"""Make an rpc-safe datetime with microseconds.
|
|
||||||
|
|
||||||
Note: tzinfo is stripped, but not required for relative times.
|
|
||||||
"""
|
|
||||||
if not now:
|
|
||||||
now = utcnow()
|
|
||||||
return dict(day=now.day, month=now.month, year=now.year, hour=now.hour,
|
|
||||||
minute=now.minute, second=now.second,
|
|
||||||
microsecond=now.microsecond)
|
|
||||||
|
|
||||||
|
|
||||||
def unmarshall_time(tyme):
|
|
||||||
"""Unmarshall a datetime dict."""
|
|
||||||
return datetime.datetime(day=tyme['day'],
|
|
||||||
month=tyme['month'],
|
|
||||||
year=tyme['year'],
|
|
||||||
hour=tyme['hour'],
|
|
||||||
minute=tyme['minute'],
|
|
||||||
second=tyme['second'],
|
|
||||||
microsecond=tyme['microsecond'])
|
|
||||||
|
|
||||||
|
|
||||||
def delta_seconds(before, after):
|
|
||||||
"""Return the difference between two timing objects.
|
|
||||||
|
|
||||||
Compute the difference in seconds between two date, time, or
|
|
||||||
datetime objects (as a float, to microsecond resolution).
|
|
||||||
"""
|
|
||||||
delta = after - before
|
|
||||||
return total_seconds(delta)
|
|
||||||
|
|
||||||
|
|
||||||
def total_seconds(delta):
|
|
||||||
"""Return the total seconds of datetime.timedelta object.
|
|
||||||
|
|
||||||
Compute total seconds of datetime.timedelta, datetime.timedelta
|
|
||||||
doesn't have method total_seconds in Python2.6, calculate it manually.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
return delta.total_seconds()
|
|
||||||
except AttributeError:
|
|
||||||
return ((delta.days * 24 * 3600) + delta.seconds +
|
|
||||||
float(delta.microseconds) / (10 ** 6))
|
|
||||||
|
|
||||||
|
|
||||||
def is_soon(dt, window):
|
|
||||||
"""Determines if time is going to happen in the next window seconds.
|
|
||||||
|
|
||||||
:param dt: the time
|
|
||||||
:param window: minimum seconds to remain to consider the time not soon
|
|
||||||
|
|
||||||
:return: True if expiration is within the given duration
|
|
||||||
"""
|
|
||||||
soon = (utcnow() + datetime.timedelta(seconds=window))
|
|
||||||
return normalize_time(dt) <= soon
|
|
@ -1,26 +0,0 @@
|
|||||||
# Copyright 2014 Rackspace Hosting
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
from mistralclient.openstack.common import log as logging
|
|
||||||
from mistralclient.tests import base
|
|
||||||
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class TestSolumclient(base.BaseCommandTest):
|
|
||||||
|
|
||||||
def test_can_use_oslo_logging(self):
|
|
||||||
# Just showing that we can import and use logging
|
|
||||||
LOG.info('Nothing to see here.')
|
|
@ -3,7 +3,6 @@
|
|||||||
# The list of modules to copy from oslo-incubator.git
|
# The list of modules to copy from oslo-incubator.git
|
||||||
# TODO(rakhmerov): We'll need to use apiclient later.
|
# TODO(rakhmerov): We'll need to use apiclient later.
|
||||||
module=cliutils
|
module=cliutils
|
||||||
module=log
|
|
||||||
|
|
||||||
# The base module to hold the copy of openstack.common
|
# The base module to hold the copy of openstack.common
|
||||||
base=mistralclient
|
base=mistralclient
|
||||||
|
Loading…
Reference in New Issue
Block a user