Sync with oslo-incubator
Remove obsoleted modules from tree. Sync with oslo-incubator at commit 838a2a31fe009feb2501716d6656eeea426ea357. The updated lockfile module needs setting of the lock_path option, ensure that it is set in the testsuite. Closes-Bug: #1382189 Change-Id: I43ca76a21bd5bdb45f23af9b99b96aefa635ccc1
This commit is contained in:
parent
2c127a0b57
commit
0c93feeb5b
@ -1,17 +0,0 @@
|
|||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
import six
|
|
||||||
|
|
||||||
|
|
||||||
six.add_move(six.MovedModule('mox', 'mox', 'mox3.mox'))
|
|
40
manila/openstack/common/_i18n.py
Normal file
40
manila/openstack/common/_i18n.py
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
"""oslo.i18n integration module.
|
||||||
|
|
||||||
|
See http://docs.openstack.org/developer/oslo.i18n/usage.html
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import oslo.i18n
|
||||||
|
|
||||||
|
|
||||||
|
# NOTE(dhellmann): This reference to o-s-l-o will be replaced by the
|
||||||
|
# application name when this module is synced into the separate
|
||||||
|
# repository. It is OK to have more than one translation function
|
||||||
|
# using the same domain, since there will still only be one message
|
||||||
|
# catalog.
|
||||||
|
_translators = oslo.i18n.TranslatorFactory(domain='manila')
|
||||||
|
|
||||||
|
# The primary translation function using the well-known name "_"
|
||||||
|
_ = _translators.primary
|
||||||
|
|
||||||
|
# Translators for log levels.
|
||||||
|
#
|
||||||
|
# The abbreviated names are meant to reflect the usual use of a short
|
||||||
|
# name like '_'. The "L" is for "log" and the other letter comes from
|
||||||
|
# the level.
|
||||||
|
_LI = _translators.log_info
|
||||||
|
_LW = _translators.log_warning
|
||||||
|
_LE = _translators.log_error
|
||||||
|
_LC = _translators.log_critical
|
@ -29,7 +29,7 @@ import eventlet.backdoor
|
|||||||
import greenlet
|
import greenlet
|
||||||
from oslo.config import cfg
|
from oslo.config import cfg
|
||||||
|
|
||||||
from manila.openstack.common.gettextutils import _LI
|
from manila.openstack.common._i18n import _LI
|
||||||
from manila.openstack.common import log as logging
|
from manila.openstack.common import log as logging
|
||||||
|
|
||||||
help_for_backdoor_port = (
|
help_for_backdoor_port = (
|
||||||
|
@ -1,113 +0,0 @@
|
|||||||
# Copyright 2011 OpenStack Foundation.
|
|
||||||
# Copyright 2012, Red Hat, Inc.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
"""
|
|
||||||
Exception related utilities.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import sys
|
|
||||||
import time
|
|
||||||
import traceback
|
|
||||||
|
|
||||||
import six
|
|
||||||
|
|
||||||
from manila.openstack.common.gettextutils import _LE
|
|
||||||
|
|
||||||
|
|
||||||
class save_and_reraise_exception(object):
|
|
||||||
"""Save current exception, run some code and then re-raise.
|
|
||||||
|
|
||||||
In some cases the exception context can be cleared, resulting in None
|
|
||||||
being attempted to be re-raised after an exception handler is run. This
|
|
||||||
can happen when eventlet switches greenthreads or when running an
|
|
||||||
exception handler, code raises and catches an exception. In both
|
|
||||||
cases the exception context will be cleared.
|
|
||||||
|
|
||||||
To work around this, we save the exception state, run handler code, and
|
|
||||||
then re-raise the original exception. If another exception occurs, the
|
|
||||||
saved exception is logged and the new exception is re-raised.
|
|
||||||
|
|
||||||
In some cases the caller may not want to re-raise the exception, and
|
|
||||||
for those circumstances this context provides a reraise flag that
|
|
||||||
can be used to suppress the exception. For example::
|
|
||||||
|
|
||||||
except Exception:
|
|
||||||
with save_and_reraise_exception() as ctxt:
|
|
||||||
decide_if_need_reraise()
|
|
||||||
if not should_be_reraised:
|
|
||||||
ctxt.reraise = False
|
|
||||||
|
|
||||||
If another exception occurs and reraise flag is False,
|
|
||||||
the saved exception will not be logged.
|
|
||||||
|
|
||||||
If the caller wants to raise new exception during exception handling
|
|
||||||
he/she sets reraise to False initially with an ability to set it back to
|
|
||||||
True if needed::
|
|
||||||
|
|
||||||
except Exception:
|
|
||||||
with save_and_reraise_exception(reraise=False) as ctxt:
|
|
||||||
[if statements to determine whether to raise a new exception]
|
|
||||||
# Not raising a new exception, so reraise
|
|
||||||
ctxt.reraise = True
|
|
||||||
"""
|
|
||||||
def __init__(self, reraise=True):
|
|
||||||
self.reraise = reraise
|
|
||||||
|
|
||||||
def __enter__(self):
|
|
||||||
self.type_, self.value, self.tb, = sys.exc_info()
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
||||||
if exc_type is not None:
|
|
||||||
if self.reraise:
|
|
||||||
logging.error(_LE('Original exception being dropped: %s'),
|
|
||||||
traceback.format_exception(self.type_,
|
|
||||||
self.value,
|
|
||||||
self.tb))
|
|
||||||
return False
|
|
||||||
if self.reraise:
|
|
||||||
six.reraise(self.type_, self.value, self.tb)
|
|
||||||
|
|
||||||
|
|
||||||
def forever_retry_uncaught_exceptions(infunc):
|
|
||||||
def inner_func(*args, **kwargs):
|
|
||||||
last_log_time = 0
|
|
||||||
last_exc_message = None
|
|
||||||
exc_count = 0
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
return infunc(*args, **kwargs)
|
|
||||||
except Exception as exc:
|
|
||||||
this_exc_message = six.u(str(exc))
|
|
||||||
if this_exc_message == last_exc_message:
|
|
||||||
exc_count += 1
|
|
||||||
else:
|
|
||||||
exc_count = 1
|
|
||||||
# Do not log any more frequently than once a minute unless
|
|
||||||
# the exception message changes
|
|
||||||
cur_time = int(time.time())
|
|
||||||
if (cur_time - last_log_time > 60 or
|
|
||||||
this_exc_message != last_exc_message):
|
|
||||||
logging.exception(
|
|
||||||
_LE('Unexpected exception occurred %d time(s)... '
|
|
||||||
'retrying.') % exc_count)
|
|
||||||
last_log_time = cur_time
|
|
||||||
last_exc_message = this_exc_message
|
|
||||||
exc_count = 0
|
|
||||||
# This should be a very rare event. In case it isn't, do
|
|
||||||
# a sleep.
|
|
||||||
time.sleep(1)
|
|
||||||
return inner_func
|
|
@ -18,7 +18,8 @@ import errno
|
|||||||
import os
|
import os
|
||||||
import tempfile
|
import tempfile
|
||||||
|
|
||||||
from manila.openstack.common import excutils
|
from oslo.utils import excutils
|
||||||
|
|
||||||
from manila.openstack.common import log as logging
|
from manila.openstack.common import log as logging
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
@ -1,73 +0,0 @@
|
|||||||
# Copyright 2011 OpenStack Foundation.
|
|
||||||
# All Rights Reserved.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
"""
|
|
||||||
Import related utilities and helper functions.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import sys
|
|
||||||
import traceback
|
|
||||||
|
|
||||||
|
|
||||||
def import_class(import_str):
|
|
||||||
"""Returns a class from a string including module and class."""
|
|
||||||
mod_str, _sep, class_str = import_str.rpartition('.')
|
|
||||||
__import__(mod_str)
|
|
||||||
try:
|
|
||||||
return getattr(sys.modules[mod_str], class_str)
|
|
||||||
except AttributeError:
|
|
||||||
raise ImportError('Class %s cannot be found (%s)' %
|
|
||||||
(class_str,
|
|
||||||
traceback.format_exception(*sys.exc_info())))
|
|
||||||
|
|
||||||
|
|
||||||
def import_object(import_str, *args, **kwargs):
|
|
||||||
"""Import a class and return an instance of it."""
|
|
||||||
return import_class(import_str)(*args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
def import_object_ns(name_space, import_str, *args, **kwargs):
|
|
||||||
"""Tries to import object from default namespace.
|
|
||||||
|
|
||||||
Imports a class and return an instance of it, first by trying
|
|
||||||
to find the class in a default namespace, then failing back to
|
|
||||||
a full path if not found in the default namespace.
|
|
||||||
"""
|
|
||||||
import_value = "%s.%s" % (name_space, import_str)
|
|
||||||
try:
|
|
||||||
return import_class(import_value)(*args, **kwargs)
|
|
||||||
except ImportError:
|
|
||||||
return import_class(import_str)(*args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
def import_module(import_str):
|
|
||||||
"""Import a module."""
|
|
||||||
__import__(import_str)
|
|
||||||
return sys.modules[import_str]
|
|
||||||
|
|
||||||
|
|
||||||
def import_versioned_module(version, submodule=None):
|
|
||||||
module = 'manila.v%s' % version
|
|
||||||
if submodule:
|
|
||||||
module = '.'.join((module, submodule))
|
|
||||||
return import_module(module)
|
|
||||||
|
|
||||||
|
|
||||||
def try_import(import_str, default=None):
|
|
||||||
"""Try to import a module and if it fails return default."""
|
|
||||||
try:
|
|
||||||
return import_module(import_str)
|
|
||||||
except ImportError:
|
|
||||||
return default
|
|
@ -1,190 +0,0 @@
|
|||||||
# Copyright 2010 United States Government as represented by the
|
|
||||||
# Administrator of the National Aeronautics and Space Administration.
|
|
||||||
# Copyright 2011 Justin Santa Barbara
|
|
||||||
# All Rights Reserved.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
'''
|
|
||||||
JSON related utilities.
|
|
||||||
|
|
||||||
This module provides a few things:
|
|
||||||
|
|
||||||
1) A handy function for getting an object down to something that can be
|
|
||||||
JSON serialized. See to_primitive().
|
|
||||||
|
|
||||||
2) Wrappers around loads() and dumps(). The dumps() wrapper will
|
|
||||||
automatically use to_primitive() for you if needed.
|
|
||||||
|
|
||||||
3) This sets up anyjson to use the loads() and dumps() wrappers if anyjson
|
|
||||||
is available.
|
|
||||||
'''
|
|
||||||
|
|
||||||
|
|
||||||
import codecs
|
|
||||||
import datetime
|
|
||||||
import functools
|
|
||||||
import inspect
|
|
||||||
import itertools
|
|
||||||
import sys
|
|
||||||
|
|
||||||
if sys.version_info < (2, 7):
|
|
||||||
# On Python <= 2.6, json module is not C boosted, so try to use
|
|
||||||
# simplejson module if available
|
|
||||||
try:
|
|
||||||
import simplejson as json
|
|
||||||
except ImportError:
|
|
||||||
import json
|
|
||||||
else:
|
|
||||||
import json
|
|
||||||
|
|
||||||
import six
|
|
||||||
import six.moves.xmlrpc_client as xmlrpclib
|
|
||||||
|
|
||||||
from manila.openstack.common import gettextutils
|
|
||||||
from manila.openstack.common import importutils
|
|
||||||
from manila.openstack.common import strutils
|
|
||||||
from manila.openstack.common import timeutils
|
|
||||||
|
|
||||||
netaddr = importutils.try_import("netaddr")
|
|
||||||
|
|
||||||
_nasty_type_tests = [inspect.ismodule, inspect.isclass, inspect.ismethod,
|
|
||||||
inspect.isfunction, inspect.isgeneratorfunction,
|
|
||||||
inspect.isgenerator, inspect.istraceback, inspect.isframe,
|
|
||||||
inspect.iscode, inspect.isbuiltin, inspect.isroutine,
|
|
||||||
inspect.isabstract]
|
|
||||||
|
|
||||||
_simple_types = (six.string_types + six.integer_types
|
|
||||||
+ (type(None), bool, float))
|
|
||||||
|
|
||||||
|
|
||||||
def to_primitive(value, convert_instances=False, convert_datetime=True,
|
|
||||||
level=0, max_depth=3):
|
|
||||||
"""Convert a complex object into primitives.
|
|
||||||
|
|
||||||
Handy for JSON serialization. We can optionally handle instances,
|
|
||||||
but since this is a recursive function, we could have cyclical
|
|
||||||
data structures.
|
|
||||||
|
|
||||||
To handle cyclical data structures we could track the actual objects
|
|
||||||
visited in a set, but not all objects are hashable. Instead we just
|
|
||||||
track the depth of the object inspections and don't go too deep.
|
|
||||||
|
|
||||||
Therefore, convert_instances=True is lossy ... be aware.
|
|
||||||
|
|
||||||
"""
|
|
||||||
# handle obvious types first - order of basic types determined by running
|
|
||||||
# full tests on nova project, resulting in the following counts:
|
|
||||||
# 572754 <type 'NoneType'>
|
|
||||||
# 460353 <type 'int'>
|
|
||||||
# 379632 <type 'unicode'>
|
|
||||||
# 274610 <type 'str'>
|
|
||||||
# 199918 <type 'dict'>
|
|
||||||
# 114200 <type 'datetime.datetime'>
|
|
||||||
# 51817 <type 'bool'>
|
|
||||||
# 26164 <type 'list'>
|
|
||||||
# 6491 <type 'float'>
|
|
||||||
# 283 <type 'tuple'>
|
|
||||||
# 19 <type 'long'>
|
|
||||||
if isinstance(value, _simple_types):
|
|
||||||
return value
|
|
||||||
|
|
||||||
if isinstance(value, datetime.datetime):
|
|
||||||
if convert_datetime:
|
|
||||||
return timeutils.strtime(value)
|
|
||||||
else:
|
|
||||||
return value
|
|
||||||
|
|
||||||
# value of itertools.count doesn't get caught by nasty_type_tests
|
|
||||||
# and results in infinite loop when list(value) is called.
|
|
||||||
if type(value) == itertools.count:
|
|
||||||
return six.text_type(value)
|
|
||||||
|
|
||||||
# FIXME(vish): Workaround for LP bug 852095. Without this workaround,
|
|
||||||
# tests that raise an exception in a mocked method that
|
|
||||||
# has a @wrap_exception with a notifier will fail. If
|
|
||||||
# we up the dependency to 0.5.4 (when it is released) we
|
|
||||||
# can remove this workaround.
|
|
||||||
if getattr(value, '__module__', None) == 'mox':
|
|
||||||
return 'mock'
|
|
||||||
|
|
||||||
if level > max_depth:
|
|
||||||
return '?'
|
|
||||||
|
|
||||||
# The try block may not be necessary after the class check above,
|
|
||||||
# but just in case ...
|
|
||||||
try:
|
|
||||||
recursive = functools.partial(to_primitive,
|
|
||||||
convert_instances=convert_instances,
|
|
||||||
convert_datetime=convert_datetime,
|
|
||||||
level=level,
|
|
||||||
max_depth=max_depth)
|
|
||||||
if isinstance(value, dict):
|
|
||||||
return dict((k, recursive(v)) for k, v in six.iteritems(value))
|
|
||||||
elif isinstance(value, (list, tuple)):
|
|
||||||
return [recursive(lv) for lv in value]
|
|
||||||
|
|
||||||
# It's not clear why xmlrpclib created their own DateTime type, but
|
|
||||||
# for our purposes, make it a datetime type which is explicitly
|
|
||||||
# handled
|
|
||||||
if isinstance(value, xmlrpclib.DateTime):
|
|
||||||
value = datetime.datetime(*tuple(value.timetuple())[:6])
|
|
||||||
|
|
||||||
if convert_datetime and isinstance(value, datetime.datetime):
|
|
||||||
return timeutils.strtime(value)
|
|
||||||
elif isinstance(value, gettextutils.Message):
|
|
||||||
return value.data
|
|
||||||
elif hasattr(value, 'iteritems'):
|
|
||||||
return recursive(dict(value.iteritems()), level=level + 1)
|
|
||||||
elif hasattr(value, '__iter__'):
|
|
||||||
return recursive(list(value))
|
|
||||||
elif convert_instances and hasattr(value, '__dict__'):
|
|
||||||
# Likely an instance of something. Watch for cycles.
|
|
||||||
# Ignore class member vars.
|
|
||||||
return recursive(value.__dict__, level=level + 1)
|
|
||||||
elif netaddr and isinstance(value, netaddr.IPAddress):
|
|
||||||
return six.text_type(value)
|
|
||||||
else:
|
|
||||||
if any(test(value) for test in _nasty_type_tests):
|
|
||||||
return six.text_type(value)
|
|
||||||
return value
|
|
||||||
except TypeError:
|
|
||||||
# Class objects are tricky since they may define something like
|
|
||||||
# __iter__ defined but it isn't callable as list().
|
|
||||||
return six.text_type(value)
|
|
||||||
|
|
||||||
|
|
||||||
def dumps(value, default=to_primitive, **kwargs):
|
|
||||||
return json.dumps(value, default=default, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
def dump(obj, fp, *args, **kwargs):
|
|
||||||
return json.dump(obj, fp, *args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
def loads(s, encoding='utf-8', **kwargs):
|
|
||||||
return json.loads(strutils.safe_decode(s, encoding), **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
def load(fp, encoding='utf-8', **kwargs):
|
|
||||||
return json.load(codecs.getreader(encoding)(fp), **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
try:
|
|
||||||
import anyjson
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
anyjson._modules.append((__name__, 'dumps', TypeError,
|
|
||||||
'loads', ValueError, 'load'))
|
|
||||||
anyjson.force_implementation(__name__)
|
|
@ -16,6 +16,7 @@
|
|||||||
import contextlib
|
import contextlib
|
||||||
import errno
|
import errno
|
||||||
import functools
|
import functools
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import subprocess
|
import subprocess
|
||||||
@ -28,8 +29,7 @@ import weakref
|
|||||||
from oslo.config import cfg
|
from oslo.config import cfg
|
||||||
|
|
||||||
from manila.openstack.common import fileutils
|
from manila.openstack.common import fileutils
|
||||||
from manila.openstack.common.gettextutils import _, _LE, _LI
|
from manila.openstack.common._i18n import _, _LE, _LI
|
||||||
from manila.openstack.common import log as logging
|
|
||||||
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
@ -101,10 +101,8 @@ class _FileLock(object):
|
|||||||
raise threading.ThreadError(_("Unable to acquire lock on"
|
raise threading.ThreadError(_("Unable to acquire lock on"
|
||||||
" `%(filename)s` due to"
|
" `%(filename)s` due to"
|
||||||
" %(exception)s") %
|
" %(exception)s") %
|
||||||
{
|
{'filename': self.fname,
|
||||||
'filename': self.fname,
|
'exception': e})
|
||||||
'exception': e,
|
|
||||||
})
|
|
||||||
|
|
||||||
def __enter__(self):
|
def __enter__(self):
|
||||||
self.acquire()
|
self.acquire()
|
||||||
@ -148,58 +146,12 @@ class _FcntlLock(_FileLock):
|
|||||||
fcntl.lockf(self.lockfile, fcntl.LOCK_UN)
|
fcntl.lockf(self.lockfile, fcntl.LOCK_UN)
|
||||||
|
|
||||||
|
|
||||||
class _PosixLock(object):
|
|
||||||
def __init__(self, name):
|
|
||||||
# Hash the name because it's not valid to have POSIX semaphore
|
|
||||||
# names with things like / in them. Then use base64 to encode
|
|
||||||
# the digest() instead taking the hexdigest() because the
|
|
||||||
# result is shorter and most systems can't have shm sempahore
|
|
||||||
# names longer than 31 characters.
|
|
||||||
h = hashlib.sha1()
|
|
||||||
h.update(name.encode('ascii'))
|
|
||||||
self.name = str((b'/' + base64.urlsafe_b64encode(
|
|
||||||
h.digest())).decode('ascii'))
|
|
||||||
|
|
||||||
def acquire(self, timeout=None):
|
|
||||||
self.semaphore = posix_ipc.Semaphore(self.name,
|
|
||||||
flags=posix_ipc.O_CREAT,
|
|
||||||
initial_value=1)
|
|
||||||
self.semaphore.acquire(timeout)
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __enter__(self):
|
|
||||||
self.acquire()
|
|
||||||
return self
|
|
||||||
|
|
||||||
def release(self):
|
|
||||||
self.semaphore.release()
|
|
||||||
self.semaphore.close()
|
|
||||||
|
|
||||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
||||||
self.release()
|
|
||||||
|
|
||||||
def exists(self):
|
|
||||||
try:
|
|
||||||
semaphore = posix_ipc.Semaphore(self.name)
|
|
||||||
except posix_ipc.ExistentialError:
|
|
||||||
return False
|
|
||||||
else:
|
|
||||||
semaphore.close()
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
if os.name == 'nt':
|
if os.name == 'nt':
|
||||||
import msvcrt
|
import msvcrt
|
||||||
InterProcessLock = _WindowsLock
|
InterProcessLock = _WindowsLock
|
||||||
FileLock = _WindowsLock
|
|
||||||
else:
|
else:
|
||||||
import base64
|
|
||||||
import fcntl
|
import fcntl
|
||||||
import hashlib
|
InterProcessLock = _FcntlLock
|
||||||
|
|
||||||
import posix_ipc
|
|
||||||
InterProcessLock = _PosixLock
|
|
||||||
FileLock = _FcntlLock
|
|
||||||
|
|
||||||
_semaphores = weakref.WeakValueDictionary()
|
_semaphores = weakref.WeakValueDictionary()
|
||||||
_semaphores_lock = threading.Lock()
|
_semaphores_lock = threading.Lock()
|
||||||
@ -216,11 +168,7 @@ def _get_lock_path(name, lock_file_prefix, lock_path=None):
|
|||||||
local_lock_path = lock_path or CONF.lock_path
|
local_lock_path = lock_path or CONF.lock_path
|
||||||
|
|
||||||
if not local_lock_path:
|
if not local_lock_path:
|
||||||
# NOTE(bnemec): Create a fake lock path for posix locks so we don't
|
raise cfg.RequiredOptError('lock_path')
|
||||||
# unnecessarily raise the RequiredOptError below.
|
|
||||||
if InterProcessLock is not _PosixLock:
|
|
||||||
raise cfg.RequiredOptError('lock_path')
|
|
||||||
local_lock_path = 'posixlock:/'
|
|
||||||
|
|
||||||
return os.path.join(local_lock_path, name)
|
return os.path.join(local_lock_path, name)
|
||||||
|
|
||||||
@ -231,11 +179,6 @@ def external_lock(name, lock_file_prefix=None, lock_path=None):
|
|||||||
|
|
||||||
lock_file_path = _get_lock_path(name, lock_file_prefix, lock_path)
|
lock_file_path = _get_lock_path(name, lock_file_prefix, lock_path)
|
||||||
|
|
||||||
# NOTE(bnemec): If an explicit lock_path was passed to us then it
|
|
||||||
# means the caller is relying on file-based locking behavior, so
|
|
||||||
# we can't use posix locks for those calls.
|
|
||||||
if lock_path:
|
|
||||||
return FileLock(lock_file_path)
|
|
||||||
return InterProcessLock(lock_file_path)
|
return InterProcessLock(lock_file_path)
|
||||||
|
|
||||||
|
|
||||||
@ -256,11 +199,12 @@ def internal_lock(name):
|
|||||||
with _semaphores_lock:
|
with _semaphores_lock:
|
||||||
try:
|
try:
|
||||||
sem = _semaphores[name]
|
sem = _semaphores[name]
|
||||||
|
LOG.debug('Using existing semaphore "%s"', name)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
sem = threading.Semaphore()
|
sem = threading.Semaphore()
|
||||||
_semaphores[name] = sem
|
_semaphores[name] = sem
|
||||||
|
LOG.debug('Created new semaphore "%s"', name)
|
||||||
|
|
||||||
LOG.debug('Got semaphore "%(lock)s"', {'lock': name})
|
|
||||||
return sem
|
return sem
|
||||||
|
|
||||||
|
|
||||||
@ -282,13 +226,16 @@ def lock(name, lock_file_prefix=None, external=False, lock_path=None):
|
|||||||
"""
|
"""
|
||||||
int_lock = internal_lock(name)
|
int_lock = internal_lock(name)
|
||||||
with int_lock:
|
with int_lock:
|
||||||
if external and not CONF.disable_process_locking:
|
LOG.debug('Acquired semaphore "%(lock)s"', {'lock': name})
|
||||||
ext_lock = external_lock(name, lock_file_prefix, lock_path)
|
try:
|
||||||
with ext_lock:
|
if external and not CONF.disable_process_locking:
|
||||||
yield ext_lock
|
ext_lock = external_lock(name, lock_file_prefix, lock_path)
|
||||||
else:
|
with ext_lock:
|
||||||
yield int_lock
|
yield ext_lock
|
||||||
LOG.debug('Released semaphore "%(lock)s"', {'lock': name})
|
else:
|
||||||
|
yield int_lock
|
||||||
|
finally:
|
||||||
|
LOG.debug('Releasing semaphore "%(lock)s"', {'lock': name})
|
||||||
|
|
||||||
|
|
||||||
def synchronized(name, lock_file_prefix=None, external=False, lock_path=None):
|
def synchronized(name, lock_file_prefix=None, external=False, lock_path=None):
|
||||||
|
@ -33,20 +33,20 @@ import logging
|
|||||||
import logging.config
|
import logging.config
|
||||||
import logging.handlers
|
import logging.handlers
|
||||||
import os
|
import os
|
||||||
|
import socket
|
||||||
import sys
|
import sys
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from oslo.config import cfg
|
from oslo.config import cfg
|
||||||
|
from oslo.serialization import jsonutils
|
||||||
|
from oslo.utils import importutils
|
||||||
import six
|
import six
|
||||||
from six import moves
|
from six import moves
|
||||||
|
|
||||||
from manila.openstack.common.gettextutils import _
|
_PY26 = sys.version_info[0:2] == (2, 6)
|
||||||
from manila.openstack.common import importutils
|
|
||||||
from manila.openstack.common import jsonutils
|
from manila.openstack.common._i18n import _
|
||||||
from manila.openstack.common import local
|
from manila.openstack.common import local
|
||||||
# NOTE(flaper87): Pls, remove when graduating this module
|
|
||||||
# from the incubator.
|
|
||||||
from manila.openstack.common.strutils import mask_password # noqa
|
|
||||||
|
|
||||||
|
|
||||||
_DEFAULT_LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
|
_DEFAULT_LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
|
||||||
@ -124,7 +124,9 @@ DEFAULT_LOG_LEVELS = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN',
|
|||||||
'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO',
|
'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO',
|
||||||
'oslo.messaging=INFO', 'iso8601=WARN',
|
'oslo.messaging=INFO', 'iso8601=WARN',
|
||||||
'requests.packages.urllib3.connectionpool=WARN',
|
'requests.packages.urllib3.connectionpool=WARN',
|
||||||
'urllib3.connectionpool=WARN', 'websocket=WARN']
|
'urllib3.connectionpool=WARN', 'websocket=WARN',
|
||||||
|
"keystonemiddleware=WARN", "routes.middleware=WARN",
|
||||||
|
"stevedore=WARN"]
|
||||||
|
|
||||||
log_opts = [
|
log_opts = [
|
||||||
cfg.StrOpt('logging_context_format_string',
|
cfg.StrOpt('logging_context_format_string',
|
||||||
@ -227,6 +229,15 @@ class BaseLoggerAdapter(logging.LoggerAdapter):
|
|||||||
def audit(self, msg, *args, **kwargs):
|
def audit(self, msg, *args, **kwargs):
|
||||||
self.log(logging.AUDIT, msg, *args, **kwargs)
|
self.log(logging.AUDIT, msg, *args, **kwargs)
|
||||||
|
|
||||||
|
def isEnabledFor(self, level):
|
||||||
|
if _PY26:
|
||||||
|
# This method was added in python 2.7 (and it does the exact
|
||||||
|
# same logic, so we need to do the exact same logic so that
|
||||||
|
# python 2.6 has this capability as well).
|
||||||
|
return self.logger.isEnabledFor(level)
|
||||||
|
else:
|
||||||
|
return super(BaseLoggerAdapter, self).isEnabledFor(level)
|
||||||
|
|
||||||
|
|
||||||
class LazyAdapter(BaseLoggerAdapter):
|
class LazyAdapter(BaseLoggerAdapter):
|
||||||
def __init__(self, name='unknown', version='unknown'):
|
def __init__(self, name='unknown', version='unknown'):
|
||||||
@ -289,11 +300,10 @@ class ContextAdapter(BaseLoggerAdapter):
|
|||||||
self.warn(stdmsg, *args, **kwargs)
|
self.warn(stdmsg, *args, **kwargs)
|
||||||
|
|
||||||
def process(self, msg, kwargs):
|
def process(self, msg, kwargs):
|
||||||
# NOTE(mrodden): catch any Message/other object and
|
# NOTE(jecarey): If msg is not unicode, coerce it into unicode
|
||||||
# coerce to unicode before they can get
|
# before it can get to the python logging and
|
||||||
# to the python logging and possibly
|
# possibly cause string encoding trouble
|
||||||
# cause string encoding trouble
|
if not isinstance(msg, six.text_type):
|
||||||
if not isinstance(msg, six.string_types):
|
|
||||||
msg = six.text_type(msg)
|
msg = six.text_type(msg)
|
||||||
|
|
||||||
if 'extra' not in kwargs:
|
if 'extra' not in kwargs:
|
||||||
@ -418,12 +428,12 @@ def set_defaults(logging_context_format_string=None,
|
|||||||
# later in a backwards in-compatible change
|
# later in a backwards in-compatible change
|
||||||
if default_log_levels is not None:
|
if default_log_levels is not None:
|
||||||
cfg.set_defaults(
|
cfg.set_defaults(
|
||||||
log_opts,
|
log_opts,
|
||||||
default_log_levels=default_log_levels)
|
default_log_levels=default_log_levels)
|
||||||
if logging_context_format_string is not None:
|
if logging_context_format_string is not None:
|
||||||
cfg.set_defaults(
|
cfg.set_defaults(
|
||||||
log_opts,
|
log_opts,
|
||||||
logging_context_format_string=logging_context_format_string)
|
logging_context_format_string=logging_context_format_string)
|
||||||
|
|
||||||
|
|
||||||
def _find_facility_from_conf():
|
def _find_facility_from_conf():
|
||||||
@ -472,18 +482,6 @@ def _setup_logging_from_conf(project, version):
|
|||||||
for handler in log_root.handlers:
|
for handler in log_root.handlers:
|
||||||
log_root.removeHandler(handler)
|
log_root.removeHandler(handler)
|
||||||
|
|
||||||
if CONF.use_syslog:
|
|
||||||
facility = _find_facility_from_conf()
|
|
||||||
# TODO(bogdando) use the format provided by RFCSysLogHandler
|
|
||||||
# after existing syslog format deprecation in J
|
|
||||||
if CONF.use_syslog_rfc_format:
|
|
||||||
syslog = RFCSysLogHandler(address='/dev/log',
|
|
||||||
facility=facility)
|
|
||||||
else:
|
|
||||||
syslog = logging.handlers.SysLogHandler(address='/dev/log',
|
|
||||||
facility=facility)
|
|
||||||
log_root.addHandler(syslog)
|
|
||||||
|
|
||||||
logpath = _get_log_file_path()
|
logpath = _get_log_file_path()
|
||||||
if logpath:
|
if logpath:
|
||||||
filelog = logging.handlers.WatchedFileHandler(logpath)
|
filelog = logging.handlers.WatchedFileHandler(logpath)
|
||||||
@ -542,6 +540,20 @@ def _setup_logging_from_conf(project, version):
|
|||||||
else:
|
else:
|
||||||
logger.setLevel(level_name)
|
logger.setLevel(level_name)
|
||||||
|
|
||||||
|
if CONF.use_syslog:
|
||||||
|
try:
|
||||||
|
facility = _find_facility_from_conf()
|
||||||
|
# TODO(bogdando) use the format provided by RFCSysLogHandler
|
||||||
|
# after existing syslog format deprecation in J
|
||||||
|
if CONF.use_syslog_rfc_format:
|
||||||
|
syslog = RFCSysLogHandler(facility=facility)
|
||||||
|
else:
|
||||||
|
syslog = logging.handlers.SysLogHandler(facility=facility)
|
||||||
|
log_root.addHandler(syslog)
|
||||||
|
except socket.error:
|
||||||
|
log_root.error('Unable to add syslog handler. Verify that syslog '
|
||||||
|
'is running.')
|
||||||
|
|
||||||
|
|
||||||
_loggers = {}
|
_loggers = {}
|
||||||
|
|
||||||
@ -611,6 +623,12 @@ class ContextFormatter(logging.Formatter):
|
|||||||
def format(self, record):
|
def format(self, record):
|
||||||
"""Uses contextstring if request_id is set, otherwise default."""
|
"""Uses contextstring if request_id is set, otherwise default."""
|
||||||
|
|
||||||
|
# NOTE(jecarey): If msg is not unicode, coerce it into unicode
|
||||||
|
# before it can get to the python logging and
|
||||||
|
# possibly cause string encoding trouble
|
||||||
|
if not isinstance(record.msg, six.text_type):
|
||||||
|
record.msg = six.text_type(record.msg)
|
||||||
|
|
||||||
# store project info
|
# store project info
|
||||||
record.project = self.project
|
record.project = self.project
|
||||||
record.version = self.version
|
record.version = self.version
|
||||||
|
@ -21,7 +21,7 @@ import time
|
|||||||
from eventlet import event
|
from eventlet import event
|
||||||
from eventlet import greenthread
|
from eventlet import greenthread
|
||||||
|
|
||||||
from manila.openstack.common.gettextutils import _LE, _LW
|
from manila.openstack.common._i18n import _LE, _LW
|
||||||
from manila.openstack.common import log as logging
|
from manila.openstack.common import log as logging
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
@ -1,163 +0,0 @@
|
|||||||
# Copyright 2012 OpenStack Foundation.
|
|
||||||
# All Rights Reserved.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
"""
|
|
||||||
Network-related utilities and helper functions.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import socket
|
|
||||||
|
|
||||||
from six.moves.urllib import parse
|
|
||||||
|
|
||||||
from manila.openstack.common.gettextutils import _LW
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def parse_host_port(address, default_port=None):
|
|
||||||
"""Interpret a string as a host:port pair.
|
|
||||||
|
|
||||||
An IPv6 address MUST be escaped if accompanied by a port,
|
|
||||||
because otherwise ambiguity ensues: 2001:db8:85a3::8a2e:370:7334
|
|
||||||
means both [2001:db8:85a3::8a2e:370:7334] and
|
|
||||||
[2001:db8:85a3::8a2e:370]:7334.
|
|
||||||
|
|
||||||
>>> parse_host_port('server01:80')
|
|
||||||
('server01', 80)
|
|
||||||
>>> parse_host_port('server01')
|
|
||||||
('server01', None)
|
|
||||||
>>> parse_host_port('server01', default_port=1234)
|
|
||||||
('server01', 1234)
|
|
||||||
>>> parse_host_port('[::1]:80')
|
|
||||||
('::1', 80)
|
|
||||||
>>> parse_host_port('[::1]')
|
|
||||||
('::1', None)
|
|
||||||
>>> parse_host_port('[::1]', default_port=1234)
|
|
||||||
('::1', 1234)
|
|
||||||
>>> parse_host_port('2001:db8:85a3::8a2e:370:7334', default_port=1234)
|
|
||||||
('2001:db8:85a3::8a2e:370:7334', 1234)
|
|
||||||
>>> parse_host_port(None)
|
|
||||||
(None, None)
|
|
||||||
"""
|
|
||||||
if not address:
|
|
||||||
return (None, None)
|
|
||||||
|
|
||||||
if address[0] == '[':
|
|
||||||
# Escaped ipv6
|
|
||||||
_host, _port = address[1:].split(']')
|
|
||||||
host = _host
|
|
||||||
if ':' in _port:
|
|
||||||
port = _port.split(':')[1]
|
|
||||||
else:
|
|
||||||
port = default_port
|
|
||||||
else:
|
|
||||||
if address.count(':') == 1:
|
|
||||||
host, port = address.split(':')
|
|
||||||
else:
|
|
||||||
# 0 means ipv4, >1 means ipv6.
|
|
||||||
# We prohibit unescaped ipv6 addresses with port.
|
|
||||||
host = address
|
|
||||||
port = default_port
|
|
||||||
|
|
||||||
return (host, None if port is None else int(port))
|
|
||||||
|
|
||||||
|
|
||||||
class ModifiedSplitResult(parse.SplitResult):
|
|
||||||
"""Split results class for urlsplit."""
|
|
||||||
|
|
||||||
# NOTE(dims): The functions below are needed for Python 2.6.x.
|
|
||||||
# We can remove these when we drop support for 2.6.x.
|
|
||||||
@property
|
|
||||||
def hostname(self):
|
|
||||||
netloc = self.netloc.split('@', 1)[-1]
|
|
||||||
host, port = parse_host_port(netloc)
|
|
||||||
return host
|
|
||||||
|
|
||||||
@property
|
|
||||||
def port(self):
|
|
||||||
netloc = self.netloc.split('@', 1)[-1]
|
|
||||||
host, port = parse_host_port(netloc)
|
|
||||||
return port
|
|
||||||
|
|
||||||
|
|
||||||
def urlsplit(url, scheme='', allow_fragments=True):
|
|
||||||
"""Parse a URL using urlparse.urlsplit(), splitting query and fragments.
|
|
||||||
This function papers over Python issue9374 when needed.
|
|
||||||
|
|
||||||
The parameters are the same as urlparse.urlsplit.
|
|
||||||
"""
|
|
||||||
scheme, netloc, path, query, fragment = parse.urlsplit(
|
|
||||||
url, scheme, allow_fragments)
|
|
||||||
if allow_fragments and '#' in path:
|
|
||||||
path, fragment = path.split('#', 1)
|
|
||||||
if '?' in path:
|
|
||||||
path, query = path.split('?', 1)
|
|
||||||
return ModifiedSplitResult(scheme, netloc,
|
|
||||||
path, query, fragment)
|
|
||||||
|
|
||||||
|
|
||||||
def set_tcp_keepalive(sock, tcp_keepalive=True,
|
|
||||||
tcp_keepidle=None,
|
|
||||||
tcp_keepalive_interval=None,
|
|
||||||
tcp_keepalive_count=None):
|
|
||||||
"""Set values for tcp keepalive parameters
|
|
||||||
|
|
||||||
This function configures tcp keepalive parameters if users wish to do
|
|
||||||
so.
|
|
||||||
|
|
||||||
:param tcp_keepalive: Boolean, turn on or off tcp_keepalive. If users are
|
|
||||||
not sure, this should be True, and default values will be used.
|
|
||||||
|
|
||||||
:param tcp_keepidle: time to wait before starting to send keepalive probes
|
|
||||||
:param tcp_keepalive_interval: time between successive probes, once the
|
|
||||||
initial wait time is over
|
|
||||||
:param tcp_keepalive_count: number of probes to send before the connection
|
|
||||||
is killed
|
|
||||||
"""
|
|
||||||
|
|
||||||
# NOTE(praneshp): Despite keepalive being a tcp concept, the level is
|
|
||||||
# still SOL_SOCKET. This is a quirk.
|
|
||||||
if isinstance(tcp_keepalive, bool):
|
|
||||||
sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, tcp_keepalive)
|
|
||||||
else:
|
|
||||||
raise TypeError("tcp_keepalive must be a boolean")
|
|
||||||
|
|
||||||
if not tcp_keepalive:
|
|
||||||
return
|
|
||||||
|
|
||||||
# These options aren't available in the OS X version of eventlet,
|
|
||||||
# Idle + Count * Interval effectively gives you the total timeout.
|
|
||||||
if tcp_keepidle is not None:
|
|
||||||
if hasattr(socket, 'TCP_KEEPIDLE'):
|
|
||||||
sock.setsockopt(socket.IPPROTO_TCP,
|
|
||||||
socket.TCP_KEEPIDLE,
|
|
||||||
tcp_keepidle)
|
|
||||||
else:
|
|
||||||
LOG.warning(_LW('tcp_keepidle not available on your system'))
|
|
||||||
if tcp_keepalive_interval is not None:
|
|
||||||
if hasattr(socket, 'TCP_KEEPINTVL'):
|
|
||||||
sock.setsockopt(socket.IPPROTO_TCP,
|
|
||||||
socket.TCP_KEEPINTVL,
|
|
||||||
tcp_keepalive_interval)
|
|
||||||
else:
|
|
||||||
LOG.warning(_LW('tcp_keepintvl not available on your system'))
|
|
||||||
if tcp_keepalive_count is not None:
|
|
||||||
if hasattr(socket, 'TCP_KEEPCNT'):
|
|
||||||
sock.setsockopt(socket.IPPROTO_TCP,
|
|
||||||
socket.TCP_KEEPCNT,
|
|
||||||
tcp_keepalive_count)
|
|
||||||
else:
|
|
||||||
LOG.warning(_LW('tcp_keepknt not available on your system'))
|
|
@ -77,16 +77,17 @@ as it allows particular rules to be explicitly disabled.
|
|||||||
|
|
||||||
import abc
|
import abc
|
||||||
import ast
|
import ast
|
||||||
|
import os
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from oslo.config import cfg
|
from oslo.config import cfg
|
||||||
|
from oslo.serialization import jsonutils
|
||||||
import six
|
import six
|
||||||
import six.moves.urllib.parse as urlparse
|
import six.moves.urllib.parse as urlparse
|
||||||
import six.moves.urllib.request as urlrequest
|
import six.moves.urllib.request as urlrequest
|
||||||
|
|
||||||
from manila.openstack.common import fileutils
|
from manila.openstack.common import fileutils
|
||||||
from manila.openstack.common.gettextutils import _, _LE
|
from manila.openstack.common._i18n import _, _LE, _LW
|
||||||
from manila.openstack.common import jsonutils
|
|
||||||
from manila.openstack.common import log as logging
|
from manila.openstack.common import log as logging
|
||||||
|
|
||||||
|
|
||||||
@ -98,6 +99,10 @@ policy_opts = [
|
|||||||
default='default',
|
default='default',
|
||||||
help=_('Default rule. Enforced when a requested rule is not '
|
help=_('Default rule. Enforced when a requested rule is not '
|
||||||
'found.')),
|
'found.')),
|
||||||
|
cfg.MultiStrOpt('policy_dirs',
|
||||||
|
default=['policy.d'],
|
||||||
|
help=_('Directories where policy configuration files are '
|
||||||
|
'stored')),
|
||||||
]
|
]
|
||||||
|
|
||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
@ -188,8 +193,8 @@ class Enforcer(object):
|
|||||||
|
|
||||||
def __init__(self, policy_file=None, rules=None,
|
def __init__(self, policy_file=None, rules=None,
|
||||||
default_rule=None, use_conf=True):
|
default_rule=None, use_conf=True):
|
||||||
self.rules = Rules(rules, default_rule)
|
|
||||||
self.default_rule = default_rule or CONF.policy_default_rule
|
self.default_rule = default_rule or CONF.policy_default_rule
|
||||||
|
self.rules = Rules(rules, self.default_rule)
|
||||||
|
|
||||||
self.policy_path = None
|
self.policy_path = None
|
||||||
self.policy_file = policy_file or CONF.policy_file
|
self.policy_file = policy_file or CONF.policy_file
|
||||||
@ -233,31 +238,53 @@ class Enforcer(object):
|
|||||||
|
|
||||||
if self.use_conf:
|
if self.use_conf:
|
||||||
if not self.policy_path:
|
if not self.policy_path:
|
||||||
self.policy_path = self._get_policy_path()
|
self.policy_path = self._get_policy_path(self.policy_file)
|
||||||
|
|
||||||
|
self._load_policy_file(self.policy_path, force_reload)
|
||||||
|
for path in CONF.policy_dirs:
|
||||||
|
try:
|
||||||
|
path = self._get_policy_path(path)
|
||||||
|
except cfg.ConfigFilesNotFoundError:
|
||||||
|
LOG.warn(_LW("Can not find policy directories %s"), path)
|
||||||
|
continue
|
||||||
|
self._walk_through_policy_directory(path,
|
||||||
|
self._load_policy_file,
|
||||||
|
force_reload, False)
|
||||||
|
|
||||||
|
def _walk_through_policy_directory(self, path, func, *args):
|
||||||
|
# We do not iterate over sub-directories.
|
||||||
|
policy_files = next(os.walk(path))[2]
|
||||||
|
policy_files.sort()
|
||||||
|
for policy_file in [p for p in policy_files if not p.startswith('.')]:
|
||||||
|
func(os.path.join(path, policy_file), *args)
|
||||||
|
|
||||||
|
def _load_policy_file(self, path, force_reload, overwrite=True):
|
||||||
reloaded, data = fileutils.read_cached_file(
|
reloaded, data = fileutils.read_cached_file(
|
||||||
self.policy_path, force_reload=force_reload)
|
path, force_reload=force_reload)
|
||||||
if reloaded or not self.rules:
|
if reloaded or not self.rules:
|
||||||
rules = Rules.load_json(data, self.default_rule)
|
rules = Rules.load_json(data, self.default_rule)
|
||||||
self.set_rules(rules)
|
self.set_rules(rules, overwrite)
|
||||||
LOG.debug("Rules successfully reloaded")
|
LOG.debug("Rules successfully reloaded")
|
||||||
|
|
||||||
def _get_policy_path(self):
|
def _get_policy_path(self, path):
|
||||||
"""Locate the policy json data file.
|
"""Locate the policy json data file/path.
|
||||||
|
|
||||||
:param policy_file: Custom policy file to locate.
|
:param path: It's value can be a full path or related path. When
|
||||||
|
full path specified, this function just returns the full
|
||||||
|
path. When related path specified, this function will
|
||||||
|
search configuration directories to find one that exists.
|
||||||
|
|
||||||
:returns: The policy path
|
:returns: The policy path
|
||||||
|
|
||||||
:raises: ConfigFilesNotFoundError if the file couldn't
|
:raises: ConfigFilesNotFoundError if the file/path couldn't
|
||||||
be located.
|
be located.
|
||||||
"""
|
"""
|
||||||
policy_file = CONF.find_file(self.policy_file)
|
policy_path = CONF.find_file(path)
|
||||||
|
|
||||||
if policy_file:
|
if policy_path:
|
||||||
return policy_file
|
return policy_path
|
||||||
|
|
||||||
raise cfg.ConfigFilesNotFoundError((self.policy_file,))
|
raise cfg.ConfigFilesNotFoundError((path,))
|
||||||
|
|
||||||
def enforce(self, rule, target, creds, do_raise=False,
|
def enforce(self, rule, target, creds, do_raise=False,
|
||||||
exc=None, *args, **kwargs):
|
exc=None, *args, **kwargs):
|
||||||
@ -272,7 +299,7 @@ class Enforcer(object):
|
|||||||
:param do_raise: Whether to raise an exception or not if check
|
:param do_raise: Whether to raise an exception or not if check
|
||||||
fails.
|
fails.
|
||||||
:param exc: Class of the exception to raise if the check fails.
|
:param exc: Class of the exception to raise if the check fails.
|
||||||
Any remaining arguments passed to check() (both
|
Any remaining arguments passed to enforce() (both
|
||||||
positional and keyword arguments) will be passed to
|
positional and keyword arguments) will be passed to
|
||||||
the exception class. If not specified, PolicyNotAuthorized
|
the exception class. If not specified, PolicyNotAuthorized
|
||||||
will be used.
|
will be used.
|
||||||
@ -785,7 +812,7 @@ def _parse_text_rule(rule):
|
|||||||
return state.result
|
return state.result
|
||||||
except ValueError:
|
except ValueError:
|
||||||
# Couldn't parse the rule
|
# Couldn't parse the rule
|
||||||
LOG.exception(_LE("Failed to understand rule %r") % rule)
|
LOG.exception(_LE("Failed to understand rule %s") % rule)
|
||||||
|
|
||||||
# Fail closed
|
# Fail closed
|
||||||
return FalseCheck()
|
return FalseCheck()
|
||||||
@ -876,7 +903,6 @@ class GenericCheck(Check):
|
|||||||
'Member':%(role.name)s
|
'Member':%(role.name)s
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# TODO(termie): do dict inspection via dot syntax
|
|
||||||
try:
|
try:
|
||||||
match = self.match % target
|
match = self.match % target
|
||||||
except KeyError:
|
except KeyError:
|
||||||
@ -889,7 +915,10 @@ class GenericCheck(Check):
|
|||||||
leftval = ast.literal_eval(self.kind)
|
leftval = ast.literal_eval(self.kind)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
try:
|
try:
|
||||||
leftval = creds[self.kind]
|
kind_parts = self.kind.split('.')
|
||||||
|
leftval = creds
|
||||||
|
for kind_part in kind_parts:
|
||||||
|
leftval = leftval[kind_part]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
return False
|
return False
|
||||||
return match == six.text_type(leftval)
|
return match == six.text_type(leftval)
|
||||||
|
@ -27,10 +27,10 @@ import signal
|
|||||||
|
|
||||||
from eventlet.green import subprocess
|
from eventlet.green import subprocess
|
||||||
from eventlet import greenthread
|
from eventlet import greenthread
|
||||||
|
from oslo.utils import strutils
|
||||||
import six
|
import six
|
||||||
|
|
||||||
from manila.openstack.common.gettextutils import _
|
from manila.openstack.common._i18n import _
|
||||||
from manila.openstack.common import strutils
|
|
||||||
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
@ -150,12 +150,12 @@ def execute(*cmd, **kwargs):
|
|||||||
cmd = shlex.split(root_helper) + list(cmd)
|
cmd = shlex.split(root_helper) + list(cmd)
|
||||||
|
|
||||||
cmd = map(str, cmd)
|
cmd = map(str, cmd)
|
||||||
|
sanitized_cmd = strutils.mask_password(' '.join(cmd))
|
||||||
|
|
||||||
while attempts > 0:
|
while attempts > 0:
|
||||||
attempts -= 1
|
attempts -= 1
|
||||||
try:
|
try:
|
||||||
LOG.log(loglevel, 'Running cmd (subprocess): %s',
|
LOG.log(loglevel, _('Running cmd (subprocess): %s'), sanitized_cmd)
|
||||||
strutils.mask_password(' '.join(cmd)))
|
|
||||||
_PIPE = subprocess.PIPE # pylint: disable=E1101
|
_PIPE = subprocess.PIPE # pylint: disable=E1101
|
||||||
|
|
||||||
if os.name == 'nt':
|
if os.name == 'nt':
|
||||||
@ -192,16 +192,18 @@ def execute(*cmd, **kwargs):
|
|||||||
LOG.log(loglevel, 'Result was %s' % _returncode)
|
LOG.log(loglevel, 'Result was %s' % _returncode)
|
||||||
if not ignore_exit_code and _returncode not in check_exit_code:
|
if not ignore_exit_code and _returncode not in check_exit_code:
|
||||||
(stdout, stderr) = result
|
(stdout, stderr) = result
|
||||||
|
sanitized_stdout = strutils.mask_password(stdout)
|
||||||
|
sanitized_stderr = strutils.mask_password(stderr)
|
||||||
raise ProcessExecutionError(exit_code=_returncode,
|
raise ProcessExecutionError(exit_code=_returncode,
|
||||||
stdout=stdout,
|
stdout=sanitized_stdout,
|
||||||
stderr=stderr,
|
stderr=sanitized_stderr,
|
||||||
cmd=' '.join(cmd))
|
cmd=sanitized_cmd)
|
||||||
return result
|
return result
|
||||||
except ProcessExecutionError:
|
except ProcessExecutionError:
|
||||||
if not attempts:
|
if not attempts:
|
||||||
raise
|
raise
|
||||||
else:
|
else:
|
||||||
LOG.log(loglevel, '%r failed. Retrying.', cmd)
|
LOG.log(loglevel, _('%r failed. Retrying.'), sanitized_cmd)
|
||||||
if delay_on_retry:
|
if delay_on_retry:
|
||||||
greenthread.sleep(random.randint(20, 200) / 100.0)
|
greenthread.sleep(random.randint(20, 200) / 100.0)
|
||||||
finally:
|
finally:
|
||||||
@ -240,7 +242,8 @@ def trycmd(*args, **kwargs):
|
|||||||
|
|
||||||
def ssh_execute(ssh, cmd, process_input=None,
|
def ssh_execute(ssh, cmd, process_input=None,
|
||||||
addl_env=None, check_exit_code=True):
|
addl_env=None, check_exit_code=True):
|
||||||
LOG.debug('Running cmd (SSH): %s', cmd)
|
sanitized_cmd = strutils.mask_password(cmd)
|
||||||
|
LOG.debug('Running cmd (SSH): %s', sanitized_cmd)
|
||||||
if addl_env:
|
if addl_env:
|
||||||
raise InvalidArgumentError(_('Environment not supported over SSH'))
|
raise InvalidArgumentError(_('Environment not supported over SSH'))
|
||||||
|
|
||||||
@ -254,7 +257,10 @@ def ssh_execute(ssh, cmd, process_input=None,
|
|||||||
# NOTE(justinsb): This seems suspicious...
|
# NOTE(justinsb): This seems suspicious...
|
||||||
# ...other SSH clients have buffering issues with this approach
|
# ...other SSH clients have buffering issues with this approach
|
||||||
stdout = stdout_stream.read()
|
stdout = stdout_stream.read()
|
||||||
|
sanitized_stdout = strutils.mask_password(stdout)
|
||||||
stderr = stderr_stream.read()
|
stderr = stderr_stream.read()
|
||||||
|
sanitized_stderr = strutils.mask_password(stderr)
|
||||||
|
|
||||||
stdin_stream.close()
|
stdin_stream.close()
|
||||||
|
|
||||||
exit_status = channel.recv_exit_status()
|
exit_status = channel.recv_exit_status()
|
||||||
@ -264,11 +270,11 @@ def ssh_execute(ssh, cmd, process_input=None,
|
|||||||
LOG.debug('Result was %s' % exit_status)
|
LOG.debug('Result was %s' % exit_status)
|
||||||
if check_exit_code and exit_status != 0:
|
if check_exit_code and exit_status != 0:
|
||||||
raise ProcessExecutionError(exit_code=exit_status,
|
raise ProcessExecutionError(exit_code=exit_status,
|
||||||
stdout=stdout,
|
stdout=sanitized_stdout,
|
||||||
stderr=stderr,
|
stderr=sanitized_stderr,
|
||||||
cmd=cmd)
|
cmd=sanitized_cmd)
|
||||||
|
|
||||||
return (stdout, stderr)
|
return (sanitized_stdout, sanitized_stderr)
|
||||||
|
|
||||||
|
|
||||||
def get_worker_count():
|
def get_worker_count():
|
||||||
|
@ -17,7 +17,7 @@
|
|||||||
Filter support
|
Filter support
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from manila.openstack.common.gettextutils import _LI
|
from manila.openstack.common._i18n import _LI
|
||||||
from manila.openstack.common import log as logging
|
from manila.openstack.common import log as logging
|
||||||
from manila.openstack.common.scheduler import base_handler
|
from manila.openstack.common.scheduler import base_handler
|
||||||
|
|
||||||
|
@ -15,7 +15,7 @@
|
|||||||
|
|
||||||
import operator
|
import operator
|
||||||
|
|
||||||
from manila.openstack.common import strutils
|
from oslo.utils import strutils
|
||||||
|
|
||||||
# 1. The following operations are supported:
|
# 1. The following operations are supported:
|
||||||
# =, s==, s!=, s>=, s>, s<=, s<, <in>, <is>, <or>, ==, !=, >=, <=
|
# =, s==, s!=, s>=, s>, s<=, s<, <in>, <is>, <or>, ==, !=, >=, <=
|
||||||
|
@ -15,9 +15,9 @@
|
|||||||
|
|
||||||
import operator
|
import operator
|
||||||
|
|
||||||
|
from oslo.serialization import jsonutils
|
||||||
import six
|
import six
|
||||||
|
|
||||||
from manila.openstack.common import jsonutils
|
|
||||||
from manila.openstack.common.scheduler import filters
|
from manila.openstack.common.scheduler import filters
|
||||||
|
|
||||||
|
|
||||||
|
@ -38,14 +38,12 @@ from eventlet import event
|
|||||||
from oslo.config import cfg
|
from oslo.config import cfg
|
||||||
|
|
||||||
from manila.openstack.common import eventlet_backdoor
|
from manila.openstack.common import eventlet_backdoor
|
||||||
from manila.openstack.common.gettextutils import _LE, _LI, _LW
|
from manila.openstack.common._i18n import _LE, _LI, _LW
|
||||||
from manila.openstack.common import importutils
|
|
||||||
from manila.openstack.common import log as logging
|
from manila.openstack.common import log as logging
|
||||||
from manila.openstack.common import systemd
|
from manila.openstack.common import systemd
|
||||||
from manila.openstack.common import threadgroup
|
from manila.openstack.common import threadgroup
|
||||||
|
|
||||||
|
|
||||||
rpc = importutils.try_import('manila.openstack.common.rpc')
|
|
||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -180,12 +178,6 @@ class ServiceLauncher(Launcher):
|
|||||||
status = exc.code
|
status = exc.code
|
||||||
finally:
|
finally:
|
||||||
self.stop()
|
self.stop()
|
||||||
if rpc:
|
|
||||||
try:
|
|
||||||
rpc.cleanup()
|
|
||||||
except Exception:
|
|
||||||
# We're shutting down, so it doesn't matter at this point.
|
|
||||||
LOG.exception(_LE('Exception during rpc cleanup.'))
|
|
||||||
|
|
||||||
return status, signo
|
return status, signo
|
||||||
|
|
||||||
|
@ -17,7 +17,7 @@ import ssl
|
|||||||
|
|
||||||
from oslo.config import cfg
|
from oslo.config import cfg
|
||||||
|
|
||||||
from manila.openstack.common.gettextutils import _
|
from manila.openstack.common._i18n import _
|
||||||
|
|
||||||
|
|
||||||
ssl_opts = [
|
ssl_opts = [
|
||||||
|
@ -1,295 +0,0 @@
|
|||||||
# Copyright 2011 OpenStack Foundation.
|
|
||||||
# All Rights Reserved.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
"""
|
|
||||||
System-level utilities and helper functions.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import math
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
import unicodedata
|
|
||||||
|
|
||||||
import six
|
|
||||||
|
|
||||||
from manila.openstack.common.gettextutils import _
|
|
||||||
|
|
||||||
|
|
||||||
UNIT_PREFIX_EXPONENT = {
|
|
||||||
'k': 1,
|
|
||||||
'K': 1,
|
|
||||||
'Ki': 1,
|
|
||||||
'M': 2,
|
|
||||||
'Mi': 2,
|
|
||||||
'G': 3,
|
|
||||||
'Gi': 3,
|
|
||||||
'T': 4,
|
|
||||||
'Ti': 4,
|
|
||||||
}
|
|
||||||
UNIT_SYSTEM_INFO = {
|
|
||||||
'IEC': (1024, re.compile(r'(^[-+]?\d*\.?\d+)([KMGT]i?)?(b|bit|B)$')),
|
|
||||||
'SI': (1000, re.compile(r'(^[-+]?\d*\.?\d+)([kMGT])?(b|bit|B)$')),
|
|
||||||
}
|
|
||||||
|
|
||||||
TRUE_STRINGS = ('1', 't', 'true', 'on', 'y', 'yes')
|
|
||||||
FALSE_STRINGS = ('0', 'f', 'false', 'off', 'n', 'no')
|
|
||||||
|
|
||||||
SLUGIFY_STRIP_RE = re.compile(r"[^\w\s-]")
|
|
||||||
SLUGIFY_HYPHENATE_RE = re.compile(r"[-\s]+")
|
|
||||||
|
|
||||||
|
|
||||||
# NOTE(flaper87): The following 3 globals are used by `mask_password`
|
|
||||||
_SANITIZE_KEYS = ['adminPass', 'admin_pass', 'password', 'admin_password']
|
|
||||||
|
|
||||||
# NOTE(ldbragst): Let's build a list of regex objects using the list of
|
|
||||||
# _SANITIZE_KEYS we already have. This way, we only have to add the new key
|
|
||||||
# to the list of _SANITIZE_KEYS and we can generate regular expressions
|
|
||||||
# for XML and JSON automatically.
|
|
||||||
_SANITIZE_PATTERNS = []
|
|
||||||
_FORMAT_PATTERNS = [r'(%(key)s\s*[=]\s*[\"\']).*?([\"\'])',
|
|
||||||
r'(<%(key)s>).*?(</%(key)s>)',
|
|
||||||
r'([\"\']%(key)s[\"\']\s*:\s*[\"\']).*?([\"\'])',
|
|
||||||
r'([\'"].*?%(key)s[\'"]\s*:\s*u?[\'"]).*?([\'"])',
|
|
||||||
r'([\'"].*?%(key)s[\'"]\s*,\s*\'--?[A-z]+\'\s*,\s*u?[\'"])'
|
|
||||||
'.*?([\'"])',
|
|
||||||
r'(%(key)s\s*--?[A-z]+\s*)\S+(\s*)']
|
|
||||||
|
|
||||||
for key in _SANITIZE_KEYS:
|
|
||||||
for pattern in _FORMAT_PATTERNS:
|
|
||||||
reg_ex = re.compile(pattern % {'key': key}, re.DOTALL)
|
|
||||||
_SANITIZE_PATTERNS.append(reg_ex)
|
|
||||||
|
|
||||||
|
|
||||||
def int_from_bool_as_string(subject):
|
|
||||||
"""Interpret a string as a boolean and return either 1 or 0.
|
|
||||||
|
|
||||||
Any string value in:
|
|
||||||
|
|
||||||
('True', 'true', 'On', 'on', '1')
|
|
||||||
|
|
||||||
is interpreted as a boolean True.
|
|
||||||
|
|
||||||
Useful for JSON-decoded stuff and config file parsing
|
|
||||||
"""
|
|
||||||
return bool_from_string(subject) and 1 or 0
|
|
||||||
|
|
||||||
|
|
||||||
def bool_from_string(subject, strict=False, default=False):
|
|
||||||
"""Interpret a string as a boolean.
|
|
||||||
|
|
||||||
A case-insensitive match is performed such that strings matching 't',
|
|
||||||
'true', 'on', 'y', 'yes', or '1' are considered True and, when
|
|
||||||
`strict=False`, anything else returns the value specified by 'default'.
|
|
||||||
|
|
||||||
Useful for JSON-decoded stuff and config file parsing.
|
|
||||||
|
|
||||||
If `strict=True`, unrecognized values, including None, will raise a
|
|
||||||
ValueError which is useful when parsing values passed in from an API call.
|
|
||||||
Strings yielding False are 'f', 'false', 'off', 'n', 'no', or '0'.
|
|
||||||
"""
|
|
||||||
if not isinstance(subject, six.string_types):
|
|
||||||
subject = six.text_type(subject)
|
|
||||||
|
|
||||||
lowered = subject.strip().lower()
|
|
||||||
|
|
||||||
if lowered in TRUE_STRINGS:
|
|
||||||
return True
|
|
||||||
elif lowered in FALSE_STRINGS:
|
|
||||||
return False
|
|
||||||
elif strict:
|
|
||||||
acceptable = ', '.join(
|
|
||||||
"'%s'" % s for s in sorted(TRUE_STRINGS + FALSE_STRINGS))
|
|
||||||
msg = _("Unrecognized value '%(val)s', acceptable values are:"
|
|
||||||
" %(acceptable)s") % {'val': subject,
|
|
||||||
'acceptable': acceptable}
|
|
||||||
raise ValueError(msg)
|
|
||||||
else:
|
|
||||||
return default
|
|
||||||
|
|
||||||
|
|
||||||
def safe_decode(text, incoming=None, errors='strict'):
|
|
||||||
"""Decodes incoming text/bytes string using `incoming` if they're not
|
|
||||||
already unicode.
|
|
||||||
|
|
||||||
:param incoming: Text's current encoding
|
|
||||||
:param errors: Errors handling policy. See here for valid
|
|
||||||
values http://docs.python.org/2/library/codecs.html
|
|
||||||
:returns: text or a unicode `incoming` encoded
|
|
||||||
representation of it.
|
|
||||||
:raises TypeError: If text is not an instance of str
|
|
||||||
"""
|
|
||||||
if not isinstance(text, (six.string_types, six.binary_type)):
|
|
||||||
raise TypeError("%s can't be decoded" % type(text))
|
|
||||||
|
|
||||||
if isinstance(text, six.text_type):
|
|
||||||
return text
|
|
||||||
|
|
||||||
if not incoming:
|
|
||||||
incoming = (sys.stdin.encoding or
|
|
||||||
sys.getdefaultencoding())
|
|
||||||
|
|
||||||
try:
|
|
||||||
return text.decode(incoming, errors)
|
|
||||||
except UnicodeDecodeError:
|
|
||||||
# Note(flaper87) If we get here, it means that
|
|
||||||
# sys.stdin.encoding / sys.getdefaultencoding
|
|
||||||
# didn't return a suitable encoding to decode
|
|
||||||
# text. This happens mostly when global LANG
|
|
||||||
# var is not set correctly and there's no
|
|
||||||
# default encoding. In this case, most likely
|
|
||||||
# python will use ASCII or ANSI encoders as
|
|
||||||
# default encodings but they won't be capable
|
|
||||||
# of decoding non-ASCII characters.
|
|
||||||
#
|
|
||||||
# Also, UTF-8 is being used since it's an ASCII
|
|
||||||
# extension.
|
|
||||||
return text.decode('utf-8', errors)
|
|
||||||
|
|
||||||
|
|
||||||
def safe_encode(text, incoming=None,
|
|
||||||
encoding='utf-8', errors='strict'):
|
|
||||||
"""Encodes incoming text/bytes string using `encoding`.
|
|
||||||
|
|
||||||
If incoming is not specified, text is expected to be encoded with
|
|
||||||
current python's default encoding. (`sys.getdefaultencoding`)
|
|
||||||
|
|
||||||
:param incoming: Text's current encoding
|
|
||||||
:param encoding: Expected encoding for text (Default UTF-8)
|
|
||||||
:param errors: Errors handling policy. See here for valid
|
|
||||||
values http://docs.python.org/2/library/codecs.html
|
|
||||||
:returns: text or a bytestring `encoding` encoded
|
|
||||||
representation of it.
|
|
||||||
:raises TypeError: If text is not an instance of str
|
|
||||||
"""
|
|
||||||
if not isinstance(text, (six.string_types, six.binary_type)):
|
|
||||||
raise TypeError("%s can't be encoded" % type(text))
|
|
||||||
|
|
||||||
if not incoming:
|
|
||||||
incoming = (sys.stdin.encoding or
|
|
||||||
sys.getdefaultencoding())
|
|
||||||
|
|
||||||
if isinstance(text, six.text_type):
|
|
||||||
return text.encode(encoding, errors)
|
|
||||||
elif text and encoding != incoming:
|
|
||||||
# Decode text before encoding it with `encoding`
|
|
||||||
text = safe_decode(text, incoming, errors)
|
|
||||||
return text.encode(encoding, errors)
|
|
||||||
else:
|
|
||||||
return text
|
|
||||||
|
|
||||||
|
|
||||||
def string_to_bytes(text, unit_system='IEC', return_int=False):
|
|
||||||
"""Converts a string into an float representation of bytes.
|
|
||||||
|
|
||||||
The units supported for IEC ::
|
|
||||||
|
|
||||||
Kb(it), Kib(it), Mb(it), Mib(it), Gb(it), Gib(it), Tb(it), Tib(it)
|
|
||||||
KB, KiB, MB, MiB, GB, GiB, TB, TiB
|
|
||||||
|
|
||||||
The units supported for SI ::
|
|
||||||
|
|
||||||
kb(it), Mb(it), Gb(it), Tb(it)
|
|
||||||
kB, MB, GB, TB
|
|
||||||
|
|
||||||
Note that the SI unit system does not support capital letter 'K'
|
|
||||||
|
|
||||||
:param text: String input for bytes size conversion.
|
|
||||||
:param unit_system: Unit system for byte size conversion.
|
|
||||||
:param return_int: If True, returns integer representation of text
|
|
||||||
in bytes. (default: decimal)
|
|
||||||
:returns: Numerical representation of text in bytes.
|
|
||||||
:raises ValueError: If text has an invalid value.
|
|
||||||
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
base, reg_ex = UNIT_SYSTEM_INFO[unit_system]
|
|
||||||
except KeyError:
|
|
||||||
msg = _('Invalid unit system: "%s"') % unit_system
|
|
||||||
raise ValueError(msg)
|
|
||||||
match = reg_ex.match(text)
|
|
||||||
if match:
|
|
||||||
magnitude = float(match.group(1))
|
|
||||||
unit_prefix = match.group(2)
|
|
||||||
if match.group(3) in ['b', 'bit']:
|
|
||||||
magnitude /= 8
|
|
||||||
else:
|
|
||||||
msg = _('Invalid string format: %s') % text
|
|
||||||
raise ValueError(msg)
|
|
||||||
if not unit_prefix:
|
|
||||||
res = magnitude
|
|
||||||
else:
|
|
||||||
res = magnitude * pow(base, UNIT_PREFIX_EXPONENT[unit_prefix])
|
|
||||||
if return_int:
|
|
||||||
return int(math.ceil(res))
|
|
||||||
return res
|
|
||||||
|
|
||||||
|
|
||||||
def to_slug(value, incoming=None, errors="strict"):
|
|
||||||
"""Normalize string.
|
|
||||||
|
|
||||||
Convert to lowercase, remove non-word characters, and convert spaces
|
|
||||||
to hyphens.
|
|
||||||
|
|
||||||
Inspired by Django's `slugify` filter.
|
|
||||||
|
|
||||||
:param value: Text to slugify
|
|
||||||
:param incoming: Text's current encoding
|
|
||||||
:param errors: Errors handling policy. See here for valid
|
|
||||||
values http://docs.python.org/2/library/codecs.html
|
|
||||||
:returns: slugified unicode representation of `value`
|
|
||||||
:raises TypeError: If text is not an instance of str
|
|
||||||
"""
|
|
||||||
value = safe_decode(value, incoming, errors)
|
|
||||||
# NOTE(aababilov): no need to use safe_(encode|decode) here:
|
|
||||||
# encodings are always "ascii", error handling is always "ignore"
|
|
||||||
# and types are always known (first: unicode; second: str)
|
|
||||||
value = unicodedata.normalize("NFKD", value).encode(
|
|
||||||
"ascii", "ignore").decode("ascii")
|
|
||||||
value = SLUGIFY_STRIP_RE.sub("", value).strip().lower()
|
|
||||||
return SLUGIFY_HYPHENATE_RE.sub("-", value)
|
|
||||||
|
|
||||||
|
|
||||||
def mask_password(message, secret="***"):
|
|
||||||
"""Replace password with 'secret' in message.
|
|
||||||
|
|
||||||
:param message: The string which includes security information.
|
|
||||||
:param secret: value with which to replace passwords.
|
|
||||||
:returns: The unicode value of message with the password fields masked.
|
|
||||||
|
|
||||||
For example:
|
|
||||||
|
|
||||||
>>> mask_password("'adminPass' : 'aaaaa'")
|
|
||||||
"'adminPass' : '***'"
|
|
||||||
>>> mask_password("'admin_pass' : 'aaaaa'")
|
|
||||||
"'admin_pass' : '***'"
|
|
||||||
>>> mask_password('"password" : "aaaaa"')
|
|
||||||
'"password" : "***"'
|
|
||||||
>>> mask_password("'original_password' : 'aaaaa'")
|
|
||||||
"'original_password' : '***'"
|
|
||||||
>>> mask_password("u'original_password' : u'aaaaa'")
|
|
||||||
"u'original_password' : u'***'"
|
|
||||||
"""
|
|
||||||
message = six.text_type(message)
|
|
||||||
|
|
||||||
# NOTE(ldbragst): Check to see if anything in message contains any key
|
|
||||||
# specified in _SANITIZE_KEYS, if not then just return the message since
|
|
||||||
# we don't have to mask any passwords.
|
|
||||||
if not any(key in message for key in _SANITIZE_KEYS):
|
|
||||||
return message
|
|
||||||
|
|
||||||
secret = r'\g<1>' + secret + r'\g<2>'
|
|
||||||
for pattern in _SANITIZE_PATTERNS:
|
|
||||||
message = re.sub(pattern, secret, message)
|
|
||||||
return message
|
|
@ -1,210 +0,0 @@
|
|||||||
# Copyright 2011 OpenStack Foundation.
|
|
||||||
# All Rights Reserved.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
"""
|
|
||||||
Time related utilities and helper functions.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import calendar
|
|
||||||
import datetime
|
|
||||||
import time
|
|
||||||
|
|
||||||
import iso8601
|
|
||||||
import six
|
|
||||||
|
|
||||||
|
|
||||||
# ISO 8601 extended time format with microseconds
|
|
||||||
_ISO8601_TIME_FORMAT_SUBSECOND = '%Y-%m-%dT%H:%M:%S.%f'
|
|
||||||
_ISO8601_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S'
|
|
||||||
PERFECT_TIME_FORMAT = _ISO8601_TIME_FORMAT_SUBSECOND
|
|
||||||
|
|
||||||
|
|
||||||
def isotime(at=None, subsecond=False):
|
|
||||||
"""Stringify time in ISO 8601 format."""
|
|
||||||
if not at:
|
|
||||||
at = utcnow()
|
|
||||||
st = at.strftime(_ISO8601_TIME_FORMAT
|
|
||||||
if not subsecond
|
|
||||||
else _ISO8601_TIME_FORMAT_SUBSECOND)
|
|
||||||
tz = at.tzinfo.tzname(None) if at.tzinfo else 'UTC'
|
|
||||||
st += ('Z' if tz == 'UTC' else tz)
|
|
||||||
return st
|
|
||||||
|
|
||||||
|
|
||||||
def parse_isotime(timestr):
|
|
||||||
"""Parse time from ISO 8601 format."""
|
|
||||||
try:
|
|
||||||
return iso8601.parse_date(timestr)
|
|
||||||
except iso8601.ParseError as e:
|
|
||||||
raise ValueError(six.text_type(e))
|
|
||||||
except TypeError as e:
|
|
||||||
raise ValueError(six.text_type(e))
|
|
||||||
|
|
||||||
|
|
||||||
def strtime(at=None, fmt=PERFECT_TIME_FORMAT):
|
|
||||||
"""Returns formatted utcnow."""
|
|
||||||
if not at:
|
|
||||||
at = utcnow()
|
|
||||||
return at.strftime(fmt)
|
|
||||||
|
|
||||||
|
|
||||||
def parse_strtime(timestr, fmt=PERFECT_TIME_FORMAT):
|
|
||||||
"""Turn a formatted time back into a datetime."""
|
|
||||||
return datetime.datetime.strptime(timestr, fmt)
|
|
||||||
|
|
||||||
|
|
||||||
def normalize_time(timestamp):
|
|
||||||
"""Normalize time in arbitrary timezone to UTC naive object."""
|
|
||||||
offset = timestamp.utcoffset()
|
|
||||||
if offset is None:
|
|
||||||
return timestamp
|
|
||||||
return timestamp.replace(tzinfo=None) - offset
|
|
||||||
|
|
||||||
|
|
||||||
def is_older_than(before, seconds):
|
|
||||||
"""Return True if before is older than seconds."""
|
|
||||||
if isinstance(before, six.string_types):
|
|
||||||
before = parse_strtime(before).replace(tzinfo=None)
|
|
||||||
else:
|
|
||||||
before = before.replace(tzinfo=None)
|
|
||||||
|
|
||||||
return utcnow() - before > datetime.timedelta(seconds=seconds)
|
|
||||||
|
|
||||||
|
|
||||||
def is_newer_than(after, seconds):
|
|
||||||
"""Return True if after is newer than seconds."""
|
|
||||||
if isinstance(after, six.string_types):
|
|
||||||
after = parse_strtime(after).replace(tzinfo=None)
|
|
||||||
else:
|
|
||||||
after = after.replace(tzinfo=None)
|
|
||||||
|
|
||||||
return after - utcnow() > datetime.timedelta(seconds=seconds)
|
|
||||||
|
|
||||||
|
|
||||||
def utcnow_ts():
|
|
||||||
"""Timestamp version of our utcnow function."""
|
|
||||||
if utcnow.override_time is None:
|
|
||||||
# NOTE(kgriffs): This is several times faster
|
|
||||||
# than going through calendar.timegm(...)
|
|
||||||
return int(time.time())
|
|
||||||
|
|
||||||
return calendar.timegm(utcnow().timetuple())
|
|
||||||
|
|
||||||
|
|
||||||
def utcnow():
|
|
||||||
"""Overridable version of utils.utcnow."""
|
|
||||||
if utcnow.override_time:
|
|
||||||
try:
|
|
||||||
return utcnow.override_time.pop(0)
|
|
||||||
except AttributeError:
|
|
||||||
return utcnow.override_time
|
|
||||||
return datetime.datetime.utcnow()
|
|
||||||
|
|
||||||
|
|
||||||
def iso8601_from_timestamp(timestamp):
|
|
||||||
"""Returns an iso8601 formatted date from timestamp."""
|
|
||||||
return isotime(datetime.datetime.utcfromtimestamp(timestamp))
|
|
||||||
|
|
||||||
|
|
||||||
utcnow.override_time = None
|
|
||||||
|
|
||||||
|
|
||||||
def set_time_override(override_time=None):
|
|
||||||
"""Overrides utils.utcnow.
|
|
||||||
|
|
||||||
Make it return a constant time or a list thereof, one at a time.
|
|
||||||
|
|
||||||
:param override_time: datetime instance or list thereof. If not
|
|
||||||
given, defaults to the current UTC time.
|
|
||||||
"""
|
|
||||||
utcnow.override_time = override_time or datetime.datetime.utcnow()
|
|
||||||
|
|
||||||
|
|
||||||
def advance_time_delta(timedelta):
|
|
||||||
"""Advance overridden time using a datetime.timedelta."""
|
|
||||||
assert utcnow.override_time is not None
|
|
||||||
try:
|
|
||||||
for dt in utcnow.override_time:
|
|
||||||
dt += timedelta
|
|
||||||
except TypeError:
|
|
||||||
utcnow.override_time += timedelta
|
|
||||||
|
|
||||||
|
|
||||||
def advance_time_seconds(seconds):
|
|
||||||
"""Advance overridden time by seconds."""
|
|
||||||
advance_time_delta(datetime.timedelta(0, seconds))
|
|
||||||
|
|
||||||
|
|
||||||
def clear_time_override():
|
|
||||||
"""Remove the overridden time."""
|
|
||||||
utcnow.override_time = None
|
|
||||||
|
|
||||||
|
|
||||||
def marshall_now(now=None):
|
|
||||||
"""Make an rpc-safe datetime with microseconds.
|
|
||||||
|
|
||||||
Note: tzinfo is stripped, but not required for relative times.
|
|
||||||
"""
|
|
||||||
if not now:
|
|
||||||
now = utcnow()
|
|
||||||
return dict(day=now.day, month=now.month, year=now.year, hour=now.hour,
|
|
||||||
minute=now.minute, second=now.second,
|
|
||||||
microsecond=now.microsecond)
|
|
||||||
|
|
||||||
|
|
||||||
def unmarshall_time(tyme):
|
|
||||||
"""Unmarshall a datetime dict."""
|
|
||||||
return datetime.datetime(day=tyme['day'],
|
|
||||||
month=tyme['month'],
|
|
||||||
year=tyme['year'],
|
|
||||||
hour=tyme['hour'],
|
|
||||||
minute=tyme['minute'],
|
|
||||||
second=tyme['second'],
|
|
||||||
microsecond=tyme['microsecond'])
|
|
||||||
|
|
||||||
|
|
||||||
def delta_seconds(before, after):
|
|
||||||
"""Return the difference between two timing objects.
|
|
||||||
|
|
||||||
Compute the difference in seconds between two date, time, or
|
|
||||||
datetime objects (as a float, to microsecond resolution).
|
|
||||||
"""
|
|
||||||
delta = after - before
|
|
||||||
return total_seconds(delta)
|
|
||||||
|
|
||||||
|
|
||||||
def total_seconds(delta):
|
|
||||||
"""Return the total seconds of datetime.timedelta object.
|
|
||||||
|
|
||||||
Compute total seconds of datetime.timedelta, datetime.timedelta
|
|
||||||
doesn't have method total_seconds in Python2.6, calculate it manually.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
return delta.total_seconds()
|
|
||||||
except AttributeError:
|
|
||||||
return ((delta.days * 24 * 3600) + delta.seconds +
|
|
||||||
float(delta.microseconds) / (10 ** 6))
|
|
||||||
|
|
||||||
|
|
||||||
def is_soon(dt, window):
|
|
||||||
"""Determines if time is going to happen in the next window seconds.
|
|
||||||
|
|
||||||
:param dt: the time
|
|
||||||
:param window: minimum seconds to remain to consider the time not soon
|
|
||||||
|
|
||||||
:return: True if expiration is within the given duration
|
|
||||||
"""
|
|
||||||
soon = (utcnow() + datetime.timedelta(seconds=window))
|
|
||||||
return normalize_time(dt) <= soon
|
|
@ -18,10 +18,12 @@ Helpers for comparing version strings.
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import functools
|
import functools
|
||||||
|
import inspect
|
||||||
|
|
||||||
import pkg_resources
|
import pkg_resources
|
||||||
|
import six
|
||||||
|
|
||||||
from manila.openstack.common.gettextutils import _
|
from manila.openstack.common._i18n import _
|
||||||
from manila.openstack.common import log as logging
|
from manila.openstack.common import log as logging
|
||||||
|
|
||||||
|
|
||||||
@ -116,16 +118,34 @@ class deprecated(object):
|
|||||||
self.remove_in = remove_in
|
self.remove_in = remove_in
|
||||||
self.what = what
|
self.what = what
|
||||||
|
|
||||||
def __call__(self, func):
|
def __call__(self, func_or_cls):
|
||||||
if not self.what:
|
if not self.what:
|
||||||
self.what = func.__name__ + '()'
|
self.what = func_or_cls.__name__ + '()'
|
||||||
|
msg, details = self._build_message()
|
||||||
|
|
||||||
@functools.wraps(func)
|
if inspect.isfunction(func_or_cls):
|
||||||
def wrapped(*args, **kwargs):
|
|
||||||
msg, details = self._build_message()
|
@six.wraps(func_or_cls)
|
||||||
LOG.deprecated(msg, details)
|
def wrapped(*args, **kwargs):
|
||||||
return func(*args, **kwargs)
|
LOG.deprecated(msg, details)
|
||||||
return wrapped
|
return func_or_cls(*args, **kwargs)
|
||||||
|
return wrapped
|
||||||
|
elif inspect.isclass(func_or_cls):
|
||||||
|
orig_init = func_or_cls.__init__
|
||||||
|
|
||||||
|
# TODO(tsufiev): change `functools` module to `six` as
|
||||||
|
# soon as six 1.7.4 (with fix for passing `assigned`
|
||||||
|
# argument to underlying `functools.wraps`) is released
|
||||||
|
# and added to the manila-incubator requrements
|
||||||
|
@functools.wraps(orig_init, assigned=('__name__', '__doc__'))
|
||||||
|
def new_init(self, *args, **kwargs):
|
||||||
|
LOG.deprecated(msg, details)
|
||||||
|
orig_init(self, *args, **kwargs)
|
||||||
|
func_or_cls.__init__ = new_init
|
||||||
|
return func_or_cls
|
||||||
|
else:
|
||||||
|
raise TypeError('deprecated can be used only with functions or '
|
||||||
|
'classes')
|
||||||
|
|
||||||
def _get_safe_to_remove_release(self, release):
|
def _get_safe_to_remove_release(self, release):
|
||||||
# TODO(dstanek): this method will have to be reimplemented once
|
# TODO(dstanek): this method will have to be reimplemented once
|
||||||
|
@ -23,6 +23,7 @@ inline callbacks.
|
|||||||
|
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
|
import tempfile
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
import fixtures
|
import fixtures
|
||||||
@ -128,6 +129,8 @@ class TestCase(testtools.TestCase):
|
|||||||
self.start = timeutils.utcnow()
|
self.start = timeutils.utcnow()
|
||||||
|
|
||||||
self.log_fixture = self.useFixture(fixtures.FakeLogger())
|
self.log_fixture = self.useFixture(fixtures.FakeLogger())
|
||||||
|
self.useFixture(fixtures.NestedTempfile())
|
||||||
|
self.useFixture(fixtures.TempHomeDir())
|
||||||
|
|
||||||
global _DB_CACHE
|
global _DB_CACHE
|
||||||
if not _DB_CACHE:
|
if not _DB_CACHE:
|
||||||
@ -144,6 +147,8 @@ class TestCase(testtools.TestCase):
|
|||||||
self.injected = []
|
self.injected = []
|
||||||
self._services = []
|
self._services = []
|
||||||
CONF.set_override('fatal_exception_format_errors', True)
|
CONF.set_override('fatal_exception_format_errors', True)
|
||||||
|
# This will be cleaned up by the NestedTempfile fixture
|
||||||
|
CONF.set_override('lock_path', tempfile.mkdtemp())
|
||||||
|
|
||||||
rpc.add_extra_exmods('manila.tests')
|
rpc.add_extra_exmods('manila.tests')
|
||||||
self.addCleanup(rpc.clear_extra_exmods)
|
self.addCleanup(rpc.clear_extra_exmods)
|
||||||
|
@ -3,19 +3,11 @@
|
|||||||
# The list of modules to copy from openstack-common
|
# The list of modules to copy from openstack-common
|
||||||
module=context
|
module=context
|
||||||
module=eventlet_backdoor
|
module=eventlet_backdoor
|
||||||
# TODO(jaegerandi) remove excutils with next sync of oslo-incubator
|
|
||||||
module=excutils
|
|
||||||
module=fileutils
|
module=fileutils
|
||||||
# TODO(jaegerandi) remove importutils with next sync of oslo-incubator
|
|
||||||
module=importutils
|
|
||||||
# TODO(jaegerandi) remove jsonutils with next sync of oslo-incubator
|
|
||||||
module=jsonutils
|
|
||||||
module=local
|
module=local
|
||||||
module=lockutils
|
module=lockutils
|
||||||
module=log
|
module=log
|
||||||
module=loopingcall
|
module=loopingcall
|
||||||
# TODO(jaegerandi) remove network_utils with next sync of oslo-incubator
|
|
||||||
module=network_utils
|
|
||||||
module=policy
|
module=policy
|
||||||
module=processutils
|
module=processutils
|
||||||
module=scheduler
|
module=scheduler
|
||||||
@ -23,12 +15,8 @@ module=scheduler.filters
|
|||||||
module=scheduler.weights
|
module=scheduler.weights
|
||||||
module=service
|
module=service
|
||||||
module=sslutils
|
module=sslutils
|
||||||
# TODO(jaegerandi) remove strutils with next sync of oslo-incubator
|
|
||||||
module=strutils
|
|
||||||
module=systemd
|
module=systemd
|
||||||
module=threadgroup
|
module=threadgroup
|
||||||
# TODO(jaegerandi) remove timeutils with next sync of oslo-incubator
|
|
||||||
module=timeutils
|
|
||||||
module=uuidutils
|
module=uuidutils
|
||||||
module=versionutils
|
module=versionutils
|
||||||
|
|
||||||
|
@ -51,8 +51,8 @@ def main(argv):
|
|||||||
if os.environ.get('tools_path'):
|
if os.environ.get('tools_path'):
|
||||||
root = os.environ['tools_path']
|
root = os.environ['tools_path']
|
||||||
venv = os.path.join(root, '.venv')
|
venv = os.path.join(root, '.venv')
|
||||||
if os.environ.get('venv'):
|
if os.environ.get('VENV'):
|
||||||
venv = os.environ['venv']
|
venv = os.environ['VENV']
|
||||||
|
|
||||||
pip_requires = os.path.join(root, 'requirements.txt')
|
pip_requires = os.path.join(root, 'requirements.txt')
|
||||||
test_requires = os.path.join(root, 'test-requirements.txt')
|
test_requires = os.path.join(root, 'test-requirements.txt')
|
||||||
|
Loading…
Reference in New Issue
Block a user