Remove six usage and basestring check
Remove basestring check. Remove six Replace the following items with Python 3 style code. - six.string_types - six.int2byte - six.indexbytes - six.add_metaclass - six.StringIO - six.text_type - six.integer_types - six.binary_type - six.BytesIO - six.reraise Change-Id: I4fb9033d152963c504ceb4d5c4d08f934ee4accb
This commit is contained in:
parent
9e2515aad3
commit
e954184693
@ -2,6 +2,4 @@ Trove Library Specific Commandments
|
|||||||
-------------------------------------
|
-------------------------------------
|
||||||
|
|
||||||
- [T103] Exception messages should be translated
|
- [T103] Exception messages should be translated
|
||||||
- [T104] Python 3 is not support basestring,replace basestring with
|
|
||||||
six.string_types
|
|
||||||
- [T105] Validate no LOG translations
|
- [T105] Validate no LOG translations
|
||||||
|
@ -9,7 +9,6 @@ python-swiftclient>=2.2.0
|
|||||||
python-cinderclient>=1.1.0
|
python-cinderclient>=1.1.0
|
||||||
python-keystoneclient>=2.0.0,!=2.1.0 # Apache-2.0
|
python-keystoneclient>=2.0.0,!=2.1.0 # Apache-2.0
|
||||||
kombu>=2.5.0
|
kombu>=2.5.0
|
||||||
six>=1.7.0
|
|
||||||
babel
|
babel
|
||||||
python-heatclient>=0.2.9
|
python-heatclient>=0.2.9
|
||||||
passlib
|
passlib
|
||||||
|
@ -41,7 +41,6 @@ be used.
|
|||||||
import atexit
|
import atexit
|
||||||
import gettext
|
import gettext
|
||||||
import os
|
import os
|
||||||
import six
|
|
||||||
import sys
|
import sys
|
||||||
import proboscis
|
import proboscis
|
||||||
|
|
||||||
|
@ -135,7 +135,6 @@ restructuredtext-lint==1.1.3
|
|||||||
rfc3986==1.1.0
|
rfc3986==1.1.0
|
||||||
Routes==2.3.1
|
Routes==2.3.1
|
||||||
simplejson==3.13.2
|
simplejson==3.13.2
|
||||||
six==1.10.0
|
|
||||||
smmap2==2.0.3
|
smmap2==2.0.3
|
||||||
snowballstemmer==1.2.1
|
snowballstemmer==1.2.1
|
||||||
Sphinx==1.6.2
|
Sphinx==1.6.2
|
||||||
|
@ -37,7 +37,6 @@ oslo.upgradecheck>=0.1.0 # Apache-2.0
|
|||||||
oslo.utils>=3.33.0 # Apache-2.0
|
oslo.utils>=3.33.0 # Apache-2.0
|
||||||
oslo.concurrency>=3.26.0 # Apache-2.0
|
oslo.concurrency>=3.26.0 # Apache-2.0
|
||||||
PyMySQL>=0.7.6 # MIT License
|
PyMySQL>=0.7.6 # MIT License
|
||||||
six>=1.10.0 # MIT
|
|
||||||
stevedore>=1.20.0 # Apache-2.0
|
stevedore>=1.20.0 # Apache-2.0
|
||||||
oslo.messaging>=5.29.0 # Apache-2.0
|
oslo.messaging>=5.29.0 # Apache-2.0
|
||||||
osprofiler>=1.4.0 # Apache-2.0
|
osprofiler>=1.4.0 # Apache-2.0
|
||||||
|
@ -18,7 +18,6 @@ from collections import OrderedDict
|
|||||||
import io
|
import io
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import six
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from pylint import lint
|
from pylint import lint
|
||||||
@ -60,8 +59,7 @@ class Config(object):
|
|||||||
sorted_config = OrderedDict()
|
sorted_config = OrderedDict()
|
||||||
for key in sorted(self.config.keys()):
|
for key in sorted(self.config.keys()):
|
||||||
value = self.get(key)
|
value = self.get(key)
|
||||||
if isinstance(value, list) and not isinstance(value,
|
if isinstance(value, list) and not isinstance(value,str):
|
||||||
six.string_types):
|
|
||||||
sorted_config[key] = sorted(value)
|
sorted_config[key] = sorted(value)
|
||||||
else:
|
else:
|
||||||
sorted_config[key] = value
|
sorted_config[key] = value
|
||||||
|
1
tox.ini
1
tox.ini
@ -67,7 +67,6 @@ import_exceptions = trove.common.i18n
|
|||||||
[flake8:local-plugins]
|
[flake8:local-plugins]
|
||||||
extension =
|
extension =
|
||||||
# T103= checks:check_raised_localized_exceptions
|
# T103= checks:check_raised_localized_exceptions
|
||||||
T104 = checks:check_no_basestring
|
|
||||||
T105 = checks:no_translate_logs
|
T105 = checks:no_translate_logs
|
||||||
N335 = checks:assert_raises_regexp
|
N335 = checks:assert_raises_regexp
|
||||||
paths = ./trove/hacking
|
paths = ./trove/hacking
|
||||||
|
@ -13,8 +13,6 @@
|
|||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
import six
|
|
||||||
|
|
||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
|
|
||||||
from neutronclient.common import exceptions as neutron_exceptions
|
from neutronclient.common import exceptions as neutron_exceptions
|
||||||
@ -324,7 +322,7 @@ class Cluster(object):
|
|||||||
node['availability_zone'])
|
node['availability_zone'])
|
||||||
if 'type' in node:
|
if 'type' in node:
|
||||||
instance_type = node['type']
|
instance_type = node['type']
|
||||||
if isinstance(instance_type, six.string_types):
|
if isinstance(instance_type, str):
|
||||||
instance_type = instance_type.split(',')
|
instance_type = instance_type.split(',')
|
||||||
instance['instance_type'] = instance_type
|
instance['instance_type'] = instance_type
|
||||||
instances.append(instance)
|
instances.append(instance)
|
||||||
|
@ -20,7 +20,6 @@ import hashlib
|
|||||||
import os
|
import os
|
||||||
from oslo_utils import encodeutils
|
from oslo_utils import encodeutils
|
||||||
import random
|
import random
|
||||||
import six
|
|
||||||
import string
|
import string
|
||||||
|
|
||||||
from cryptography.hazmat.backends import default_backend
|
from cryptography.hazmat.backends import default_backend
|
||||||
@ -67,12 +66,12 @@ def decode_data(data):
|
|||||||
# Pad the data string to an multiple of pad_size
|
# Pad the data string to an multiple of pad_size
|
||||||
def pad_for_encryption(data, pad_size=IV_BYTE_COUNT):
|
def pad_for_encryption(data, pad_size=IV_BYTE_COUNT):
|
||||||
pad_count = pad_size - (len(data) % pad_size)
|
pad_count = pad_size - (len(data) % pad_size)
|
||||||
return data + six.int2byte(pad_count) * pad_count
|
return data + bytes((pad_count,)) * pad_count
|
||||||
|
|
||||||
|
|
||||||
# Unpad the data string by stripping off excess characters
|
# Unpad the data string by stripping off excess characters
|
||||||
def unpad_after_decryption(data):
|
def unpad_after_decryption(data):
|
||||||
return data[:len(data) - six.indexbytes(data, -1)]
|
return data[:len(data) - data[-1]]
|
||||||
|
|
||||||
|
|
||||||
def encrypt_data(data, key, iv_byte_count=IV_BYTE_COUNT):
|
def encrypt_data(data, key, iv_byte_count=IV_BYTE_COUNT):
|
||||||
|
@ -14,7 +14,6 @@
|
|||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
import abc
|
import abc
|
||||||
import six
|
|
||||||
|
|
||||||
from trove.common import cfg
|
from trove.common import cfg
|
||||||
from trove.common.i18n import _
|
from trove.common.i18n import _
|
||||||
@ -85,7 +84,7 @@ class DatastoreModelsBase(object):
|
|||||||
:param desc: Description for exception message.
|
:param desc: Description for exception message.
|
||||||
:raises: ValueError if not a string/unicode.
|
:raises: ValueError if not a string/unicode.
|
||||||
"""
|
"""
|
||||||
if not isinstance(value, six.string_types):
|
if not isinstance(value, str):
|
||||||
raise ValueError(_("%(desc)s is not a string. Type = %(t)s.")
|
raise ValueError(_("%(desc)s is not a string. Type = %(t)s.")
|
||||||
% {'desc': desc, 't': type(value)})
|
% {'desc': desc, 't': type(value)})
|
||||||
|
|
||||||
|
@ -15,15 +15,13 @@
|
|||||||
|
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from six import u
|
|
||||||
|
|
||||||
from trove.common.db import models
|
from trove.common.db import models
|
||||||
|
|
||||||
|
|
||||||
class PostgreSQLSchema(models.DatastoreSchema):
|
class PostgreSQLSchema(models.DatastoreSchema):
|
||||||
"""Represents a PostgreSQL schema and its associated properties."""
|
"""Represents a PostgreSQL schema and its associated properties."""
|
||||||
|
|
||||||
name_regex = re.compile(u(r'^[\u0001-\u007F\u0080-\uFFFF]+[^\s]$'))
|
name_regex = re.compile(str(r'^[\u0001-\u007F\u0080-\uFFFF]+[^\s]$'))
|
||||||
|
|
||||||
def __init__(self, name=None, collate=None, character_set=None,
|
def __init__(self, name=None, collate=None, character_set=None,
|
||||||
deserializing=False):
|
deserializing=False):
|
||||||
|
@ -19,7 +19,6 @@ from lxml import etree
|
|||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
from oslo_utils import encodeutils
|
from oslo_utils import encodeutils
|
||||||
import routes
|
import routes
|
||||||
import six
|
|
||||||
import stevedore
|
import stevedore
|
||||||
import webob.dec
|
import webob.dec
|
||||||
import webob.exc
|
import webob.exc
|
||||||
@ -34,8 +33,7 @@ DEFAULT_XMLNS = "http://docs.openstack.org/trove"
|
|||||||
XMLNS_ATOM = "http://www.w3.org/2005/Atom"
|
XMLNS_ATOM = "http://www.w3.org/2005/Atom"
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
class ExtensionDescriptor(object, metaclass=abc.ABCMeta):
|
||||||
class ExtensionDescriptor(object):
|
|
||||||
"""Base class that defines the contract for extensions.
|
"""Base class that defines the contract for extensions.
|
||||||
|
|
||||||
Note that you don't have to derive from this class to have a valid
|
Note that you don't have to derive from this class to have a valid
|
||||||
|
@ -14,8 +14,6 @@
|
|||||||
# under the License.
|
# under the License.
|
||||||
#
|
#
|
||||||
|
|
||||||
import six
|
|
||||||
|
|
||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
|
|
||||||
from trove.common.clients import create_nova_client
|
from trove.common.clients import create_nova_client
|
||||||
@ -84,7 +82,7 @@ class ServerGroup(object):
|
|||||||
scheduler_hint = None
|
scheduler_hint = None
|
||||||
if locality:
|
if locality:
|
||||||
# Build the scheduler hint, but only if locality's a string
|
# Build the scheduler hint, but only if locality's a string
|
||||||
if isinstance(locality, six.string_types):
|
if isinstance(locality, str):
|
||||||
server_group = cls.create(
|
server_group = cls.create(
|
||||||
context, locality, name_suffix)
|
context, locality, name_suffix)
|
||||||
scheduler_hint = cls.convert_to_hint(
|
scheduler_hint = cls.convert_to_hint(
|
||||||
|
@ -17,7 +17,6 @@
|
|||||||
import abc
|
import abc
|
||||||
|
|
||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
import six
|
|
||||||
|
|
||||||
from trove.common.i18n import _
|
from trove.common.i18n import _
|
||||||
from trove.common import utils
|
from trove.common import utils
|
||||||
@ -26,8 +25,7 @@ from trove.common import utils
|
|||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
class Strategy(object, metaclass=abc.ABCMeta):
|
||||||
class Strategy(object):
|
|
||||||
|
|
||||||
__strategy_ns__ = None
|
__strategy_ns__ = None
|
||||||
|
|
||||||
|
@ -21,7 +21,6 @@ import io
|
|||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
import six
|
|
||||||
import xmltodict
|
import xmltodict
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
@ -74,7 +73,7 @@ class StringConverter(object):
|
|||||||
# Return known mappings and quoted strings right away.
|
# Return known mappings and quoted strings right away.
|
||||||
if value in self._object_mappings:
|
if value in self._object_mappings:
|
||||||
return self._object_mappings[value]
|
return self._object_mappings[value]
|
||||||
elif (isinstance(value, six.string_types) and
|
elif (isinstance(value, str) and
|
||||||
re.match("^'(.*)'|\"(.*)\"$", value)):
|
re.match("^'(.*)'|\"(.*)\"$", value)):
|
||||||
return value
|
return value
|
||||||
|
|
||||||
@ -84,8 +83,7 @@ class StringConverter(object):
|
|||||||
return value
|
return value
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
class StreamCodec(object, metaclass=abc.ABCMeta):
|
||||||
class StreamCodec(object):
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def serialize(self, data):
|
def serialize(self, data):
|
||||||
@ -202,7 +200,7 @@ class IniCodec(StreamCodec):
|
|||||||
|
|
||||||
def serialize(self, dict_data):
|
def serialize(self, dict_data):
|
||||||
parser = self._init_config_parser(dict_data)
|
parser = self._init_config_parser(dict_data)
|
||||||
output = six.StringIO()
|
output = io.StringIO()
|
||||||
parser.write(output)
|
parser.write(output)
|
||||||
|
|
||||||
return output.getvalue()
|
return output.getvalue()
|
||||||
@ -217,8 +215,8 @@ class IniCodec(StreamCodec):
|
|||||||
for s in parser.sections()}
|
for s in parser.sections()}
|
||||||
|
|
||||||
def _pre_parse(self, stream):
|
def _pre_parse(self, stream):
|
||||||
buf = six.StringIO()
|
buf = io.StringIO()
|
||||||
for line in six.StringIO(stream):
|
for line in io.StringIO(stream):
|
||||||
# Ignore commented lines.
|
# Ignore commented lines.
|
||||||
if not line.startswith(self._comment_markers):
|
if not line.startswith(self._comment_markers):
|
||||||
# Strip leading and trailing whitespaces from each line.
|
# Strip leading and trailing whitespaces from each line.
|
||||||
@ -297,7 +295,7 @@ class PropertiesCodec(StreamCodec):
|
|||||||
self._unpack_singletons = unpack_singletons
|
self._unpack_singletons = unpack_singletons
|
||||||
|
|
||||||
def serialize(self, dict_data):
|
def serialize(self, dict_data):
|
||||||
output = six.StringIO()
|
output = io.StringIO()
|
||||||
writer = csv.writer(output, delimiter=self._delimiter,
|
writer = csv.writer(output, delimiter=self._delimiter,
|
||||||
quoting=self.QUOTING_MODE,
|
quoting=self.QUOTING_MODE,
|
||||||
strict=self.STRICT_MODE,
|
strict=self.STRICT_MODE,
|
||||||
@ -309,7 +307,7 @@ class PropertiesCodec(StreamCodec):
|
|||||||
return output.getvalue()
|
return output.getvalue()
|
||||||
|
|
||||||
def deserialize(self, stream):
|
def deserialize(self, stream):
|
||||||
reader = csv.reader(six.StringIO(stream),
|
reader = csv.reader(io.StringIO(stream),
|
||||||
delimiter=self._delimiter,
|
delimiter=self._delimiter,
|
||||||
quoting=self.QUOTING_MODE,
|
quoting=self.QUOTING_MODE,
|
||||||
strict=self.STRICT_MODE,
|
strict=self.STRICT_MODE,
|
||||||
|
@ -30,7 +30,6 @@ from oslo_utils.encodeutils import safe_encode
|
|||||||
from oslo_utils import importutils
|
from oslo_utils import importutils
|
||||||
from oslo_utils import strutils
|
from oslo_utils import strutils
|
||||||
from passlib import pwd
|
from passlib import pwd
|
||||||
import six
|
|
||||||
|
|
||||||
from trove.common import cfg
|
from trove.common import cfg
|
||||||
from trove.common import exception
|
from trove.common import exception
|
||||||
@ -333,7 +332,7 @@ def is_collection(item):
|
|||||||
"""Return True is a given item is an iterable collection, but not a string.
|
"""Return True is a given item is an iterable collection, but not a string.
|
||||||
"""
|
"""
|
||||||
return (isinstance(item, collections.Iterable) and
|
return (isinstance(item, collections.Iterable) and
|
||||||
not isinstance(item, (bytes, six.text_type)))
|
not isinstance(item, (bytes, str)))
|
||||||
|
|
||||||
|
|
||||||
def format_output(message, format_len=79, truncate_len=None, replace_index=0):
|
def format_output(message, format_len=79, truncate_len=None, replace_index=0):
|
||||||
|
@ -14,7 +14,6 @@
|
|||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
import six
|
|
||||||
|
|
||||||
from trove.cluster import models as cluster_models
|
from trove.cluster import models as cluster_models
|
||||||
import trove.common.apischema as apischema
|
import trove.common.apischema as apischema
|
||||||
@ -307,7 +306,7 @@ class ConfigurationsController(wsgi.Controller):
|
|||||||
raise exception.UnprocessableEntity(message=msg)
|
raise exception.UnprocessableEntity(message=msg)
|
||||||
|
|
||||||
# integer min/max checking
|
# integer min/max checking
|
||||||
if isinstance(v, six.integer_types) and not isinstance(v, bool):
|
if isinstance(v, int) and not isinstance(v, bool):
|
||||||
if rule.min_size is not None:
|
if rule.min_size is not None:
|
||||||
try:
|
try:
|
||||||
min_value = int(rule.min_size)
|
min_value = int(rule.min_size)
|
||||||
@ -345,9 +344,9 @@ class ConfigurationsController(wsgi.Controller):
|
|||||||
if value_type == "boolean":
|
if value_type == "boolean":
|
||||||
return bool
|
return bool
|
||||||
elif value_type == "string":
|
elif value_type == "string":
|
||||||
return six.string_types
|
return str
|
||||||
elif value_type == "integer":
|
elif value_type == "integer":
|
||||||
return six.integer_types
|
return int
|
||||||
elif value_type == "float":
|
elif value_type == "float":
|
||||||
return float
|
return float
|
||||||
else:
|
else:
|
||||||
|
@ -19,7 +19,6 @@ import abc
|
|||||||
from oslo_config.cfg import NoSuchOptError
|
from oslo_config.cfg import NoSuchOptError
|
||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
from oslo_utils import importutils
|
from oslo_utils import importutils
|
||||||
import six
|
|
||||||
|
|
||||||
from trove.cluster import models as cluster_models
|
from trove.cluster import models as cluster_models
|
||||||
from trove.cluster.models import DBCluster
|
from trove.cluster.models import DBCluster
|
||||||
@ -62,8 +61,7 @@ class ExtensionController(wsgi.Controller):
|
|||||||
{'tenant': target.tenant_id})
|
{'tenant': target.tenant_id})
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
class BaseDatastoreRootController(ExtensionController, metaclass=abc.ABCMeta):
|
||||||
class BaseDatastoreRootController(ExtensionController):
|
|
||||||
"""Base class that defines the contract for root controllers."""
|
"""Base class that defines the contract for root controllers."""
|
||||||
|
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
|
@ -18,7 +18,6 @@ import os
|
|||||||
import re
|
import re
|
||||||
|
|
||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
import six
|
|
||||||
|
|
||||||
from trove.guestagent.common import guestagent_utils
|
from trove.guestagent.common import guestagent_utils
|
||||||
from trove.guestagent.common import operating_system
|
from trove.guestagent.common import operating_system
|
||||||
@ -224,8 +223,7 @@ class ConfigurationManager(object):
|
|||||||
self._value_cache = self.parse_configuration()
|
self._value_cache = self.parse_configuration()
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
class ConfigurationOverrideStrategy(object, metaclass=abc.ABCMeta):
|
||||||
class ConfigurationOverrideStrategy(object):
|
|
||||||
"""ConfigurationOverrideStrategy handles configuration files.
|
"""ConfigurationOverrideStrategy handles configuration files.
|
||||||
The strategy provides functionality to enumerate, apply and remove
|
The strategy provides functionality to enumerate, apply and remove
|
||||||
configuration overrides.
|
configuration overrides.
|
||||||
|
@ -17,8 +17,6 @@ import collections
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
|
||||||
import six
|
|
||||||
|
|
||||||
from trove.common import cfg
|
from trove.common import cfg
|
||||||
from trove.common import pagination
|
from trove.common import pagination
|
||||||
from trove.common import utils
|
from trove.common import utils
|
||||||
@ -112,7 +110,7 @@ def build_file_path(base_dir, base_name, *extensions):
|
|||||||
def to_bytes(value):
|
def to_bytes(value):
|
||||||
"""Convert numbers with a byte suffix to bytes.
|
"""Convert numbers with a byte suffix to bytes.
|
||||||
"""
|
"""
|
||||||
if isinstance(value, six.string_types):
|
if isinstance(value, str):
|
||||||
pattern = re.compile(r'^(\d+)([K,M,G]{1})$')
|
pattern = re.compile(r'^(\d+)([K,M,G]{1})$')
|
||||||
match = pattern.match(value)
|
match = pattern.match(value)
|
||||||
if match:
|
if match:
|
||||||
|
@ -17,7 +17,6 @@ import re
|
|||||||
|
|
||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
from oslo_utils import encodeutils
|
from oslo_utils import encodeutils
|
||||||
import six
|
|
||||||
import sqlalchemy
|
import sqlalchemy
|
||||||
from sqlalchemy import exc
|
from sqlalchemy import exc
|
||||||
from sqlalchemy.sql.expression import text
|
from sqlalchemy.sql.expression import text
|
||||||
@ -93,8 +92,7 @@ class BaseMySqlAppStatus(service.BaseDbStatus):
|
|||||||
return service_status.ServiceStatuses.UNKNOWN
|
return service_status.ServiceStatuses.UNKNOWN
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
class BaseMySqlAdmin(object, metaclass=abc.ABCMeta):
|
||||||
class BaseMySqlAdmin(object):
|
|
||||||
"""Handles administrative tasks on the MySQL database."""
|
"""Handles administrative tasks on the MySQL database."""
|
||||||
|
|
||||||
def __init__(self, mysql_root_access, mysql_app):
|
def __init__(self, mysql_root_access, mysql_app):
|
||||||
|
@ -17,7 +17,6 @@
|
|||||||
import abc
|
import abc
|
||||||
import functools
|
import functools
|
||||||
import re
|
import re
|
||||||
import six
|
|
||||||
|
|
||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
|
|
||||||
@ -27,8 +26,7 @@ from trove.common import exception
|
|||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
class ModuleDriver(object, metaclass=abc.ABCMeta):
|
||||||
class ModuleDriver(object):
|
|
||||||
"""Base class that defines the contract for module drivers.
|
"""Base class that defines the contract for module drivers.
|
||||||
|
|
||||||
Note that you don't have to derive from this class to have a valid
|
Note that you don't have to derive from this class to have a valid
|
||||||
|
@ -24,7 +24,6 @@ from tempfile import NamedTemporaryFile
|
|||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
from oslo_utils import encodeutils
|
from oslo_utils import encodeutils
|
||||||
import pexpect
|
import pexpect
|
||||||
import six
|
|
||||||
|
|
||||||
from trove.common import exception
|
from trove.common import exception
|
||||||
from trove.common.exception import ProcessExecutionError
|
from trove.common.exception import ProcessExecutionError
|
||||||
@ -52,7 +51,7 @@ def getoutput(*cmd):
|
|||||||
stderr=subprocess.STDOUT)
|
stderr=subprocess.STDOUT)
|
||||||
except OSError:
|
except OSError:
|
||||||
# ignore errors like program not found
|
# ignore errors like program not found
|
||||||
return six.text_type("")
|
return str("")
|
||||||
|
|
||||||
stdout = proc.communicate()[0]
|
stdout = proc.communicate()[0]
|
||||||
return encodeutils.safe_decode(stdout)
|
return encodeutils.safe_decode(stdout)
|
||||||
|
@ -16,12 +16,10 @@
|
|||||||
|
|
||||||
import abc
|
import abc
|
||||||
|
|
||||||
import six
|
|
||||||
from trove.common.strategies.strategy import Strategy
|
from trove.common.strategies.strategy import Strategy
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
class Replication(Strategy, metaclass=abc.ABCMeta):
|
||||||
class Replication(Strategy):
|
|
||||||
"""Base class for Replication Strategy implementation."""
|
"""Base class for Replication Strategy implementation."""
|
||||||
|
|
||||||
__strategy_type__ = 'replication'
|
__strategy_type__ = 'replication'
|
||||||
|
@ -16,7 +16,6 @@
|
|||||||
import abc
|
import abc
|
||||||
import os
|
import os
|
||||||
import shlex
|
import shlex
|
||||||
import six
|
|
||||||
from tempfile import NamedTemporaryFile
|
from tempfile import NamedTemporaryFile
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
@ -51,8 +50,7 @@ def log_and_raise(log_fmt, exc_fmt, fmt_content=None):
|
|||||||
raise exception.GuestError(original_message=raise_msg)
|
raise exception.GuestError(original_message=raise_msg)
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
class FSBase(object, metaclass=abc.ABCMeta):
|
||||||
class FSBase(object):
|
|
||||||
|
|
||||||
def __init__(self, fstype, format_options):
|
def __init__(self, fstype, format_options):
|
||||||
self.fstype = fstype
|
self.fstype = fstype
|
||||||
|
@ -58,18 +58,6 @@ def check_raised_localized_exceptions(logical_line, filename):
|
|||||||
yield (logical_line.index(exception_msg), msg)
|
yield (logical_line.index(exception_msg), msg)
|
||||||
|
|
||||||
|
|
||||||
@core.flake8ext
|
|
||||||
def check_no_basestring(logical_line):
|
|
||||||
"""T104 - Don't use basestring, use six.string_types instead
|
|
||||||
basestring is not supported by py3, using six.string_types to ensure
|
|
||||||
py3 and py2 compatibility
|
|
||||||
"""
|
|
||||||
if re.search(r"\, basestring\)", logical_line):
|
|
||||||
msg = ("T104: basestring is not Python3-compatible, use "
|
|
||||||
"six.string_types instead.")
|
|
||||||
yield(0, msg)
|
|
||||||
|
|
||||||
|
|
||||||
@core.flake8ext
|
@core.flake8ext
|
||||||
def no_translate_logs(physical_line, logical_line, filename):
|
def no_translate_logs(physical_line, logical_line, filename):
|
||||||
"""T105 - Log messages shouldn't be translated from the
|
"""T105 - Log messages shouldn't be translated from the
|
||||||
|
@ -26,7 +26,6 @@ from oslo_config.cfg import NoSuchOptError
|
|||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
from oslo_utils import encodeutils
|
from oslo_utils import encodeutils
|
||||||
from oslo_utils import netutils
|
from oslo_utils import netutils
|
||||||
import six
|
|
||||||
from sqlalchemy import func
|
from sqlalchemy import func
|
||||||
|
|
||||||
from trove.backup.models import Backup
|
from trove.backup.models import Backup
|
||||||
@ -757,7 +756,7 @@ class BaseInstance(SimpleInstance):
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
LOG.warning("Failed to stop the database before attempting "
|
LOG.warning("Failed to stop the database before attempting "
|
||||||
"to delete trove instance %s, error: %s", self.id,
|
"to delete trove instance %s, error: %s", self.id,
|
||||||
six.text_type(e))
|
str(e))
|
||||||
|
|
||||||
# Nova VM
|
# Nova VM
|
||||||
if old_server:
|
if old_server:
|
||||||
@ -766,7 +765,7 @@ class BaseInstance(SimpleInstance):
|
|||||||
self.server.delete()
|
self.server.delete()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
LOG.warning("Failed to delete compute server %s",
|
LOG.warning("Failed to delete compute server %s",
|
||||||
self.server_id, six.text_type(e))
|
self.server_id, str(e))
|
||||||
|
|
||||||
# Neutron ports (floating IP)
|
# Neutron ports (floating IP)
|
||||||
try:
|
try:
|
||||||
@ -778,7 +777,7 @@ class BaseInstance(SimpleInstance):
|
|||||||
neutron.delete_port(self.neutron_client, port["id"])
|
neutron.delete_port(self.neutron_client, port["id"])
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
LOG.warning("Failed to delete ports for instance %s, "
|
LOG.warning("Failed to delete ports for instance %s, "
|
||||||
"error: %s", self.id, six.text_type(e))
|
"error: %s", self.id, str(e))
|
||||||
|
|
||||||
# Neutron security groups
|
# Neutron security groups
|
||||||
try:
|
try:
|
||||||
@ -791,7 +790,7 @@ class BaseInstance(SimpleInstance):
|
|||||||
self.neutron_client.delete_security_group(sg["id"])
|
self.neutron_client.delete_security_group(sg["id"])
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
LOG.warning("Failed to delete security groups for instance %s, "
|
LOG.warning("Failed to delete security groups for instance %s, "
|
||||||
"error: %s", self.id, six.text_type(e))
|
"error: %s", self.id, str(e))
|
||||||
|
|
||||||
# DNS resources, e.g. Designate
|
# DNS resources, e.g. Designate
|
||||||
try:
|
try:
|
||||||
@ -801,14 +800,14 @@ class BaseInstance(SimpleInstance):
|
|||||||
dns_api.delete_instance_entry(instance_id=self.id)
|
dns_api.delete_instance_entry(instance_id=self.id)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
LOG.warning("Failed to delete dns entry of instance %s, error: %s",
|
LOG.warning("Failed to delete dns entry of instance %s, error: %s",
|
||||||
self.id, six.text_type(e))
|
self.id, str(e))
|
||||||
|
|
||||||
# Nova server group
|
# Nova server group
|
||||||
try:
|
try:
|
||||||
srv_grp.ServerGroup.delete(self.context, self.server_group)
|
srv_grp.ServerGroup.delete(self.context, self.server_group)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
LOG.warning("Failed to delete server group for %s, error: %s",
|
LOG.warning("Failed to delete server group for %s, error: %s",
|
||||||
self.id, six.text_type(e))
|
self.id, str(e))
|
||||||
|
|
||||||
def server_is_finished():
|
def server_is_finished():
|
||||||
try:
|
try:
|
||||||
@ -844,7 +843,7 @@ class BaseInstance(SimpleInstance):
|
|||||||
volume.delete()
|
volume.delete()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
LOG.warning("Failed to delete volume for instance %s, error: %s",
|
LOG.warning("Failed to delete volume for instance %s, error: %s",
|
||||||
self.id, six.text_type(e))
|
self.id, str(e))
|
||||||
|
|
||||||
notification.TroveInstanceDelete(
|
notification.TroveInstanceDelete(
|
||||||
instance=self,
|
instance=self,
|
||||||
@ -1942,7 +1941,7 @@ class instance_encryption_key_cache(object):
|
|||||||
return val
|
return val
|
||||||
|
|
||||||
# We need string anyway
|
# We need string anyway
|
||||||
if isinstance(val, six.binary_type):
|
if isinstance(val, bytes):
|
||||||
val = encodeutils.safe_decode(val)
|
val = encodeutils.safe_decode(val)
|
||||||
|
|
||||||
if len(self._lru) == self._lru_cache_size:
|
if len(self._lru) == self._lru_cache_size:
|
||||||
|
@ -17,7 +17,6 @@
|
|||||||
"""Model classes that form the core of Module functionality."""
|
"""Model classes that form the core of Module functionality."""
|
||||||
|
|
||||||
import hashlib
|
import hashlib
|
||||||
import six
|
|
||||||
from sqlalchemy.sql.expression import or_
|
from sqlalchemy.sql.expression import or_
|
||||||
|
|
||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
@ -247,7 +246,7 @@ class Module(object):
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def process_contents(contents):
|
def process_contents(contents):
|
||||||
md5 = contents
|
md5 = contents
|
||||||
if isinstance(md5, six.text_type):
|
if isinstance(md5, str):
|
||||||
md5 = md5.encode('utf-8')
|
md5 = md5.encode('utf-8')
|
||||||
md5 = hashlib.md5(md5).hexdigest()
|
md5 = hashlib.md5(md5).hexdigest()
|
||||||
encrypted_contents = crypto_utils.encrypt_data(
|
encrypted_contents = crypto_utils.encrypt_data(
|
||||||
|
@ -15,11 +15,8 @@
|
|||||||
#
|
#
|
||||||
import abc
|
import abc
|
||||||
|
|
||||||
import six
|
|
||||||
|
|
||||||
|
class NetworkDriver(object, metaclass=abc.ABCMeta):
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class NetworkDriver(object):
|
|
||||||
"""Base Network Driver class to abstract the network driver used."""
|
"""Base Network Driver class to abstract the network driver used."""
|
||||||
|
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
|
@ -18,7 +18,6 @@
|
|||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
from oslo_utils import importutils
|
from oslo_utils import importutils
|
||||||
import six
|
|
||||||
|
|
||||||
from trove.common import exception
|
from trove.common import exception
|
||||||
from trove.quota.models import Quota
|
from trove.quota.models import Quota
|
||||||
@ -230,7 +229,7 @@ class QuotaEngine(object):
|
|||||||
|
|
||||||
if not quota_driver_class:
|
if not quota_driver_class:
|
||||||
quota_driver_class = CONF.quota_driver
|
quota_driver_class = CONF.quota_driver
|
||||||
if isinstance(quota_driver_class, six.string_types):
|
if isinstance(quota_driver_class, str):
|
||||||
quota_driver_class = importutils.import_object(quota_driver_class,
|
quota_driver_class = importutils.import_object(quota_driver_class,
|
||||||
self._resources)
|
self._resources)
|
||||||
self._driver = quota_driver_class
|
self._driver = quota_driver_class
|
||||||
|
@ -30,7 +30,6 @@ from proboscis import before_class
|
|||||||
from proboscis.decorators import time_out
|
from proboscis.decorators import time_out
|
||||||
from proboscis import SkipTest
|
from proboscis import SkipTest
|
||||||
from proboscis import test
|
from proboscis import test
|
||||||
import six
|
|
||||||
from troveclient.compat import exceptions
|
from troveclient.compat import exceptions
|
||||||
|
|
||||||
from trove.common.utils import poll_until
|
from trove.common.utils import poll_until
|
||||||
@ -230,12 +229,12 @@ class CreateConfigurations(ConfigurationsTestBase):
|
|||||||
msg="Get Configuration parameter")
|
msg="Get Configuration parameter")
|
||||||
assert_equal(param_name, config_parameter_dict['name'])
|
assert_equal(param_name, config_parameter_dict['name'])
|
||||||
with TypeCheck('ConfigurationParameter', param) as parameter:
|
with TypeCheck('ConfigurationParameter', param) as parameter:
|
||||||
parameter.has_field('name', six.string_types)
|
parameter.has_field('name', str)
|
||||||
parameter.has_field('restart_required', bool)
|
parameter.has_field('restart_required', bool)
|
||||||
parameter.has_field('max', six.integer_types)
|
parameter.has_field('max', int)
|
||||||
parameter.has_field('min', six.integer_types)
|
parameter.has_field('min', int)
|
||||||
parameter.has_field('type', six.string_types)
|
parameter.has_field('type', str)
|
||||||
parameter.has_field('datastore_version_id', six.text_type)
|
parameter.has_field('datastore_version_id', str)
|
||||||
|
|
||||||
@test
|
@test
|
||||||
def test_configurations_create_invalid_values(self):
|
def test_configurations_create_invalid_values(self):
|
||||||
@ -283,12 +282,12 @@ class CreateConfigurations(ConfigurationsTestBase):
|
|||||||
resp, body = instance_info.dbaas.client.last_response
|
resp, body = instance_info.dbaas.client.last_response
|
||||||
assert_equal(resp.status, 200)
|
assert_equal(resp.status, 200)
|
||||||
with TypeCheck('Configuration', result) as configuration:
|
with TypeCheck('Configuration', result) as configuration:
|
||||||
configuration.has_field('name', six.string_types)
|
configuration.has_field('name', str)
|
||||||
configuration.has_field('description', six.string_types)
|
configuration.has_field('description', str)
|
||||||
configuration.has_field('values', dict)
|
configuration.has_field('values', dict)
|
||||||
configuration.has_field('datastore_name', six.string_types)
|
configuration.has_field('datastore_name', str)
|
||||||
configuration.has_field('datastore_version_id', six.text_type)
|
configuration.has_field('datastore_version_id', str)
|
||||||
configuration.has_field('datastore_version_name', six.string_types)
|
configuration.has_field('datastore_version_name', str)
|
||||||
global configuration_info
|
global configuration_info
|
||||||
configuration_info = result
|
configuration_info = result
|
||||||
assert_equal(configuration_info.name, CONFIG_NAME)
|
assert_equal(configuration_info.name, CONFIG_NAME)
|
||||||
@ -367,12 +366,12 @@ class AfterConfigurationsCreation(ConfigurationsTestBase):
|
|||||||
|
|
||||||
# check the result field types
|
# check the result field types
|
||||||
with TypeCheck("configuration", result) as check:
|
with TypeCheck("configuration", result) as check:
|
||||||
check.has_field("id", six.string_types)
|
check.has_field("id", str)
|
||||||
check.has_field("name", six.string_types)
|
check.has_field("name", str)
|
||||||
check.has_field("description", six.string_types)
|
check.has_field("description", str)
|
||||||
check.has_field("values", dict)
|
check.has_field("values", dict)
|
||||||
check.has_field("created", six.string_types)
|
check.has_field("created", str)
|
||||||
check.has_field("updated", six.string_types)
|
check.has_field("updated", str)
|
||||||
check.has_field("instance_count", int)
|
check.has_field("instance_count", int)
|
||||||
|
|
||||||
print(result.values)
|
print(result.values)
|
||||||
@ -402,7 +401,7 @@ class AfterConfigurationsCreation(ConfigurationsTestBase):
|
|||||||
if param.type == 'integer':
|
if param.type == 'integer':
|
||||||
check.has_element(item_key, int)
|
check.has_element(item_key, int)
|
||||||
if param.type == 'string':
|
if param.type == 'string':
|
||||||
check.has_element(item_key, six.string_types)
|
check.has_element(item_key, str)
|
||||||
if param.type == 'boolean':
|
if param.type == 'boolean':
|
||||||
check.has_element(item_key, bool)
|
check.has_element(item_key, bool)
|
||||||
|
|
||||||
@ -433,12 +432,12 @@ class ListConfigurations(ConfigurationsTestBase):
|
|||||||
result = instance_info.dbaas.configurations.list()
|
result = instance_info.dbaas.configurations.list()
|
||||||
for conf in result:
|
for conf in result:
|
||||||
with TypeCheck("Configuration", conf) as check:
|
with TypeCheck("Configuration", conf) as check:
|
||||||
check.has_field('id', six.string_types)
|
check.has_field('id', str)
|
||||||
check.has_field('name', six.string_types)
|
check.has_field('name', str)
|
||||||
check.has_field('description', six.string_types)
|
check.has_field('description', str)
|
||||||
check.has_field('datastore_version_id', six.string_types)
|
check.has_field('datastore_version_id', str)
|
||||||
check.has_field('datastore_version_name', six.string_types)
|
check.has_field('datastore_version_name', str)
|
||||||
check.has_field('datastore_name', six.string_types)
|
check.has_field('datastore_name', str)
|
||||||
|
|
||||||
exists = [config for config in result if
|
exists = [config for config in result if
|
||||||
config.id == configuration_info.id]
|
config.id == configuration_info.id]
|
||||||
|
@ -19,7 +19,6 @@ from proboscis.asserts import assert_raises
|
|||||||
from proboscis.asserts import assert_true
|
from proboscis.asserts import assert_true
|
||||||
from proboscis import before_class
|
from proboscis import before_class
|
||||||
from proboscis import test
|
from proboscis import test
|
||||||
import six
|
|
||||||
from troveclient.compat import exceptions
|
from troveclient.compat import exceptions
|
||||||
|
|
||||||
from trove import tests
|
from trove import tests
|
||||||
@ -48,8 +47,8 @@ class Datastores(object):
|
|||||||
datastores = self.rd_client.datastores.list()
|
datastores = self.rd_client.datastores.list()
|
||||||
for datastore in datastores:
|
for datastore in datastores:
|
||||||
with TypeCheck('Datastore', datastore) as check:
|
with TypeCheck('Datastore', datastore) as check:
|
||||||
check.has_field("id", six.string_types)
|
check.has_field("id", str)
|
||||||
check.has_field("name", six.string_types)
|
check.has_field("name", str)
|
||||||
check.has_field("links", list)
|
check.has_field("links", list)
|
||||||
check.has_field("versions", list)
|
check.has_field("versions", list)
|
||||||
|
|
||||||
@ -59,8 +58,8 @@ class Datastores(object):
|
|||||||
datastore_by_name = self.rd_client.datastores.get(
|
datastore_by_name = self.rd_client.datastores.get(
|
||||||
test_config.dbaas_datastore)
|
test_config.dbaas_datastore)
|
||||||
with TypeCheck('Datastore', datastore_by_name) as check:
|
with TypeCheck('Datastore', datastore_by_name) as check:
|
||||||
check.has_field("id", six.string_types)
|
check.has_field("id", str)
|
||||||
check.has_field("name", six.string_types)
|
check.has_field("name", str)
|
||||||
check.has_field("links", list)
|
check.has_field("links", list)
|
||||||
assert_equal(datastore_by_name.name, test_config.dbaas_datastore)
|
assert_equal(datastore_by_name.name, test_config.dbaas_datastore)
|
||||||
|
|
||||||
@ -68,8 +67,8 @@ class Datastores(object):
|
|||||||
datastore_by_id = self.rd_client.datastores.get(
|
datastore_by_id = self.rd_client.datastores.get(
|
||||||
datastore_by_name.id)
|
datastore_by_name.id)
|
||||||
with TypeCheck('Datastore', datastore_by_id) as check:
|
with TypeCheck('Datastore', datastore_by_id) as check:
|
||||||
check.has_field("id", six.string_types)
|
check.has_field("id", str)
|
||||||
check.has_field("name", six.string_types)
|
check.has_field("name", str)
|
||||||
check.has_field("links", list)
|
check.has_field("links", list)
|
||||||
check.has_field("versions", list)
|
check.has_field("versions", list)
|
||||||
assert_equal(datastore_by_id.id, datastore_by_name.id)
|
assert_equal(datastore_by_id.id, datastore_by_name.id)
|
||||||
@ -134,8 +133,8 @@ class DatastoreVersions(object):
|
|||||||
self.datastore_active.name)
|
self.datastore_active.name)
|
||||||
for version in versions:
|
for version in versions:
|
||||||
with TypeCheck('DatastoreVersion', version) as check:
|
with TypeCheck('DatastoreVersion', version) as check:
|
||||||
check.has_field("id", six.string_types)
|
check.has_field("id", str)
|
||||||
check.has_field("name", six.string_types)
|
check.has_field("name", str)
|
||||||
check.has_field("links", list)
|
check.has_field("links", list)
|
||||||
|
|
||||||
@test
|
@test
|
||||||
@ -143,9 +142,9 @@ class DatastoreVersions(object):
|
|||||||
version = self.rd_client.datastore_versions.get(
|
version = self.rd_client.datastore_versions.get(
|
||||||
self.datastore_active.name, self.datastore_version_active.name)
|
self.datastore_active.name, self.datastore_version_active.name)
|
||||||
with TypeCheck('DatastoreVersion', version) as check:
|
with TypeCheck('DatastoreVersion', version) as check:
|
||||||
check.has_field("id", six.string_types)
|
check.has_field("id", str)
|
||||||
check.has_field("name", six.string_types)
|
check.has_field("name", str)
|
||||||
check.has_field("datastore", six.string_types)
|
check.has_field("datastore", str)
|
||||||
check.has_field("links", list)
|
check.has_field("links", list)
|
||||||
assert_equal(version.name, self.datastore_version_active.name)
|
assert_equal(version.name, self.datastore_version_active.name)
|
||||||
|
|
||||||
@ -154,9 +153,9 @@ class DatastoreVersions(object):
|
|||||||
version = self.rd_client.datastore_versions.get_by_uuid(
|
version = self.rd_client.datastore_versions.get_by_uuid(
|
||||||
self.datastore_version_active.id)
|
self.datastore_version_active.id)
|
||||||
with TypeCheck('DatastoreVersion', version) as check:
|
with TypeCheck('DatastoreVersion', version) as check:
|
||||||
check.has_field("id", six.string_types)
|
check.has_field("id", str)
|
||||||
check.has_field("name", six.string_types)
|
check.has_field("name", str)
|
||||||
check.has_field("datastore", six.string_types)
|
check.has_field("datastore", str)
|
||||||
check.has_field("links", list)
|
check.has_field("links", list)
|
||||||
assert_equal(version.name, self.datastore_version_active.name)
|
assert_equal(version.name, self.datastore_version_active.name)
|
||||||
|
|
||||||
@ -176,8 +175,8 @@ class DatastoreVersions(object):
|
|||||||
self.datastore_active.id)
|
self.datastore_active.id)
|
||||||
for version in versions:
|
for version in versions:
|
||||||
with TypeCheck('DatastoreVersion', version) as check:
|
with TypeCheck('DatastoreVersion', version) as check:
|
||||||
check.has_field("id", six.string_types)
|
check.has_field("id", str)
|
||||||
check.has_field("name", six.string_types)
|
check.has_field("name", str)
|
||||||
check.has_field("links", list)
|
check.has_field("links", list)
|
||||||
|
|
||||||
@test
|
@test
|
||||||
@ -185,9 +184,9 @@ class DatastoreVersions(object):
|
|||||||
version = self.rd_client.datastore_versions.get(
|
version = self.rd_client.datastore_versions.get(
|
||||||
self.datastore_active.id, self.datastore_version_active.id)
|
self.datastore_active.id, self.datastore_version_active.id)
|
||||||
with TypeCheck('DatastoreVersion', version) as check:
|
with TypeCheck('DatastoreVersion', version) as check:
|
||||||
check.has_field("id", six.string_types)
|
check.has_field("id", str)
|
||||||
check.has_field("name", six.string_types)
|
check.has_field("name", str)
|
||||||
check.has_field("datastore", six.string_types)
|
check.has_field("datastore", str)
|
||||||
check.has_field("links", list)
|
check.has_field("links", list)
|
||||||
assert_equal(version.name, self.datastore_version_active.name)
|
assert_equal(version.name, self.datastore_version_active.name)
|
||||||
|
|
||||||
|
@ -24,7 +24,6 @@ import swiftclient.client as swift_client
|
|||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
import six
|
|
||||||
from swiftclient import client as swift
|
from swiftclient import client as swift
|
||||||
|
|
||||||
|
|
||||||
@ -172,7 +171,7 @@ class FakeSwiftConnection(object):
|
|||||||
# container is where the object segments are in and prefix is the
|
# container is where the object segments are in and prefix is the
|
||||||
# common prefix for all segments.
|
# common prefix for all segments.
|
||||||
self.manifest_name = name
|
self.manifest_name = name
|
||||||
if isinstance(contents, six.text_type):
|
if isinstance(contents, str):
|
||||||
object_checksum.update(contents.encode('utf-8'))
|
object_checksum.update(contents.encode('utf-8'))
|
||||||
else:
|
else:
|
||||||
object_checksum.update(contents)
|
object_checksum.update(contents)
|
||||||
|
@ -14,11 +14,9 @@
|
|||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
import abc
|
import abc
|
||||||
import six
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
class TestGroup(object, metaclass=abc.ABCMeta):
|
||||||
class TestGroup(object):
|
|
||||||
|
|
||||||
def __init__(self, test_runner):
|
def __init__(self, test_runner):
|
||||||
self._test_runner = test_runner
|
self._test_runner = test_runner
|
||||||
|
@ -17,7 +17,6 @@ import json
|
|||||||
import os
|
import os
|
||||||
|
|
||||||
from proboscis import SkipTest
|
from proboscis import SkipTest
|
||||||
import six
|
|
||||||
import time as timer
|
import time as timer
|
||||||
|
|
||||||
from trove.common import exception
|
from trove.common import exception
|
||||||
@ -517,17 +516,17 @@ class ClusterRunner(TestRunner):
|
|||||||
def _assert_cluster_values(self, cluster, expected_task_name,
|
def _assert_cluster_values(self, cluster, expected_task_name,
|
||||||
check_locality=True):
|
check_locality=True):
|
||||||
with TypeCheck('Cluster', cluster) as check:
|
with TypeCheck('Cluster', cluster) as check:
|
||||||
check.has_field("id", six.string_types)
|
check.has_field("id", str)
|
||||||
check.has_field("name", six.string_types)
|
check.has_field("name", str)
|
||||||
check.has_field("datastore", dict)
|
check.has_field("datastore", dict)
|
||||||
check.has_field("instances", list)
|
check.has_field("instances", list)
|
||||||
check.has_field("links", list)
|
check.has_field("links", list)
|
||||||
check.has_field("created", six.text_type)
|
check.has_field("created", str)
|
||||||
check.has_field("updated", six.text_type)
|
check.has_field("updated", str)
|
||||||
if check_locality:
|
if check_locality:
|
||||||
check.has_field("locality", six.text_type)
|
check.has_field("locality", str)
|
||||||
if self.active_config_group_id:
|
if self.active_config_group_id:
|
||||||
check.has_field("configuration", six.text_type)
|
check.has_field("configuration", str)
|
||||||
for instance in cluster.instances:
|
for instance in cluster.instances:
|
||||||
isinstance(instance, dict)
|
isinstance(instance, dict)
|
||||||
self.assert_is_not_none(instance['id'])
|
self.assert_is_not_none(instance['id'])
|
||||||
|
@ -16,7 +16,6 @@
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
import json
|
import json
|
||||||
from proboscis import SkipTest
|
from proboscis import SkipTest
|
||||||
import six
|
|
||||||
|
|
||||||
from trove.common.utils import generate_uuid
|
from trove.common.utils import generate_uuid
|
||||||
from trove.tests.scenario.runners.test_runners import TestRunner
|
from trove.tests.scenario.runners.test_runners import TestRunner
|
||||||
@ -140,12 +139,12 @@ class ConfigurationRunner(TestRunner):
|
|||||||
self.assert_client_code(client, expected_http_code)
|
self.assert_client_code(client, expected_http_code)
|
||||||
|
|
||||||
with TypeCheck('Configuration', result) as configuration:
|
with TypeCheck('Configuration', result) as configuration:
|
||||||
configuration.has_field('name', six.string_types)
|
configuration.has_field('name', str)
|
||||||
configuration.has_field('description', six.string_types)
|
configuration.has_field('description', str)
|
||||||
configuration.has_field('values', dict)
|
configuration.has_field('values', dict)
|
||||||
configuration.has_field('datastore_name', six.string_types)
|
configuration.has_field('datastore_name', str)
|
||||||
configuration.has_field('datastore_version_id', six.text_type)
|
configuration.has_field('datastore_version_id', str)
|
||||||
configuration.has_field('datastore_version_name', six.string_types)
|
configuration.has_field('datastore_version_name', str)
|
||||||
|
|
||||||
self.assert_equal(name, result.name)
|
self.assert_equal(name, result.name)
|
||||||
self.assert_equal(description, result.description)
|
self.assert_equal(description, result.description)
|
||||||
@ -217,12 +216,12 @@ class ConfigurationRunner(TestRunner):
|
|||||||
|
|
||||||
# check the result field types
|
# check the result field types
|
||||||
with TypeCheck("configuration", result) as check:
|
with TypeCheck("configuration", result) as check:
|
||||||
check.has_field("id", six.string_types)
|
check.has_field("id", str)
|
||||||
check.has_field("name", six.string_types)
|
check.has_field("name", str)
|
||||||
check.has_field("description", six.string_types)
|
check.has_field("description", str)
|
||||||
check.has_field("values", dict)
|
check.has_field("values", dict)
|
||||||
check.has_field("created", six.string_types)
|
check.has_field("created", str)
|
||||||
check.has_field("updated", six.string_types)
|
check.has_field("updated", str)
|
||||||
check.has_field("instance_count", int)
|
check.has_field("instance_count", int)
|
||||||
|
|
||||||
# check for valid timestamps
|
# check for valid timestamps
|
||||||
@ -244,7 +243,7 @@ class ConfigurationRunner(TestRunner):
|
|||||||
if param.type == 'integer':
|
if param.type == 'integer':
|
||||||
check.has_element(item_key, int)
|
check.has_element(item_key, int)
|
||||||
if param.type == 'string':
|
if param.type == 'string':
|
||||||
check.has_element(item_key, six.string_types)
|
check.has_element(item_key, str)
|
||||||
if param.type == 'boolean':
|
if param.type == 'boolean':
|
||||||
check.has_element(item_key, bool)
|
check.has_element(item_key, bool)
|
||||||
|
|
||||||
|
@ -17,7 +17,6 @@
|
|||||||
import os
|
import os
|
||||||
from proboscis import SkipTest
|
from proboscis import SkipTest
|
||||||
import re
|
import re
|
||||||
import six
|
|
||||||
import tempfile
|
import tempfile
|
||||||
import time
|
import time
|
||||||
|
|
||||||
@ -1341,7 +1340,7 @@ class ModuleRunner(TestRunner):
|
|||||||
contents = fh.read()
|
contents = fh.read()
|
||||||
|
|
||||||
expected = expected['contents']
|
expected = expected['contents']
|
||||||
if isinstance(expected, six.string_types):
|
if isinstance(expected, str):
|
||||||
expected = expected.encode()
|
expected = expected.encode()
|
||||||
|
|
||||||
self.assert_equal(expected, contents,
|
self.assert_equal(expected, contents,
|
||||||
|
@ -19,7 +19,6 @@ import json
|
|||||||
import netaddr
|
import netaddr
|
||||||
import os
|
import os
|
||||||
import proboscis
|
import proboscis
|
||||||
import six
|
|
||||||
import sys
|
import sys
|
||||||
import time as timer
|
import time as timer
|
||||||
import types
|
import types
|
||||||
@ -284,13 +283,12 @@ class LogOnFail(type):
|
|||||||
|
|
||||||
# Only report on the first error that occurs
|
# Only report on the first error that occurs
|
||||||
mcs.reset_inst_ids()
|
mcs.reset_inst_ids()
|
||||||
six.reraise(extype, exvalue, extb)
|
raise exvalue.with_traceback(extb)
|
||||||
|
|
||||||
return wrapper
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(LogOnFail)
|
class TestRunner(object, metaclass=LogOnFail):
|
||||||
class TestRunner(object):
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Base class for all 'Runner' classes.
|
Base class for all 'Runner' classes.
|
||||||
|
@ -18,10 +18,10 @@ Tests dealing with HTTP rate-limiting.
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
import io
|
||||||
from http import client as http_client
|
from http import client as http_client
|
||||||
from unittest.mock import Mock, MagicMock, patch
|
from unittest.mock import Mock, MagicMock, patch
|
||||||
from oslo_serialization import jsonutils
|
from oslo_serialization import jsonutils
|
||||||
import six
|
|
||||||
import webob
|
import webob
|
||||||
|
|
||||||
from trove.common import limits
|
from trove.common import limits
|
||||||
@ -564,7 +564,7 @@ class FakeHttplibSocket(object):
|
|||||||
|
|
||||||
def __init__(self, response_string):
|
def __init__(self, response_string):
|
||||||
"""Initialize new `FakeHttplibSocket`."""
|
"""Initialize new `FakeHttplibSocket`."""
|
||||||
self._buffer = six.BytesIO(response_string)
|
self._buffer = io.BytesIO(response_string)
|
||||||
|
|
||||||
def makefile(self, _mode, *args):
|
def makefile(self, _mode, *args):
|
||||||
"""Returns the socket's internal buffer."""
|
"""Returns the socket's internal buffer."""
|
||||||
|
@ -17,7 +17,6 @@
|
|||||||
import os
|
import os
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
|
|
||||||
import six
|
|
||||||
|
|
||||||
from trove.common import crypto_utils
|
from trove.common import crypto_utils
|
||||||
from trove.tests.unittests import trove_testtools
|
from trove.tests.unittests import trove_testtools
|
||||||
@ -39,7 +38,7 @@ class TestEncryptUtils(trove_testtools.TestCase):
|
|||||||
for datum in data:
|
for datum in data:
|
||||||
encoded_data = crypto_utils.encode_data(datum)
|
encoded_data = crypto_utils.encode_data(datum)
|
||||||
decoded_data = crypto_utils.decode_data(encoded_data)
|
decoded_data = crypto_utils.decode_data(encoded_data)
|
||||||
if isinstance(datum, six.text_type):
|
if isinstance(datum, str):
|
||||||
decoded_data = decoded_data.decode('utf-8')
|
decoded_data = decoded_data.decode('utf-8')
|
||||||
self. assertEqual(datum, decoded_data,
|
self. assertEqual(datum, decoded_data,
|
||||||
"Encode/decode failed")
|
"Encode/decode failed")
|
||||||
|
@ -76,14 +76,6 @@ class HackingTestCase(trove_testtools.TestCase):
|
|||||||
self.assertLinePasses(f, "raise KeyError('Error text')",
|
self.assertLinePasses(f, "raise KeyError('Error text')",
|
||||||
'neutron_lib/tests/unit/mytest.py')
|
'neutron_lib/tests/unit/mytest.py')
|
||||||
|
|
||||||
def test_no_basestring(self):
|
|
||||||
self.assertEqual(
|
|
||||||
1,
|
|
||||||
len(list(tc.check_no_basestring("isinstance(x, basestring)"))))
|
|
||||||
self.assertEqual(
|
|
||||||
0,
|
|
||||||
len(list(tc.check_no_basestring("this basestring is good)"))))
|
|
||||||
|
|
||||||
# We are patching pycodestyle so that only the check under test is actually
|
# We are patching pycodestyle so that only the check under test is actually
|
||||||
# installed.
|
# installed.
|
||||||
@mock.patch('pycodestyle._checks',
|
@mock.patch('pycodestyle._checks',
|
||||||
|
@ -23,7 +23,6 @@ from proboscis.asserts import assert_not_equal
|
|||||||
from proboscis.asserts import assert_true
|
from proboscis.asserts import assert_true
|
||||||
from proboscis.asserts import ASSERTION_ERROR
|
from proboscis.asserts import ASSERTION_ERROR
|
||||||
from proboscis.asserts import Check
|
from proboscis.asserts import Check
|
||||||
import six
|
|
||||||
|
|
||||||
|
|
||||||
def get_stack_trace_of_caller(level_up):
|
def get_stack_trace_of_caller(level_up):
|
||||||
@ -40,7 +39,7 @@ def get_stack_trace_of_caller(level_up):
|
|||||||
def raise_blame_caller(level_up, ex):
|
def raise_blame_caller(level_up, ex):
|
||||||
"""Raises an exception, changing the stack trace to point to the caller."""
|
"""Raises an exception, changing the stack trace to point to the caller."""
|
||||||
new_st = get_stack_trace_of_caller(level_up + 2)
|
new_st = get_stack_trace_of_caller(level_up + 2)
|
||||||
six.reraise(type(ex), ex, new_st)
|
raise ex.with_traceback(new_st)
|
||||||
|
|
||||||
|
|
||||||
class Checker(object):
|
class Checker(object):
|
||||||
|
Loading…
Reference in New Issue
Block a user