Update hacking for Python3
The repo is Python 3 now, so update hacking to version 3.0 which supports Python 3. Fix problems found. Update local hacking checks for new flake8. Remove hacking and friends from lower-constraints, they are not needed to be installed at run-time. Fix flake8wrap to take arguments from tox.ini and not look at .tox directory. Change-Id: I1c5f5a85af02d9f11e475b6088fe1d9d9d252083
This commit is contained in:
parent
977b5ef7ce
commit
3fe89808c6
@ -32,7 +32,6 @@ eventlet==0.18.2
|
||||
extras==1.0.0
|
||||
fasteners==0.14.1
|
||||
fixtures==3.0.0
|
||||
flake8==2.5.5
|
||||
future==0.16.0
|
||||
futurist==1.6.0
|
||||
Flask==0.12.3
|
||||
@ -40,7 +39,6 @@ gitdb2==2.0.3
|
||||
GitPython==2.1.8
|
||||
greenlet==0.4.13
|
||||
grpcio==1.12.0
|
||||
hacking==0.12.0
|
||||
idna==2.6
|
||||
imagesize==1.0.0
|
||||
iso8601==0.1.12
|
||||
@ -102,7 +100,6 @@ Paste==2.0.3
|
||||
PasteDeploy==1.5.2
|
||||
pbr==2.0.0
|
||||
pecan==1.0.0
|
||||
pep8==1.5.7
|
||||
pika==0.10.0
|
||||
pika-pool==0.1.3
|
||||
prettytable==0.7.2
|
||||
@ -111,7 +108,6 @@ psutil==3.2.2
|
||||
pyasn1==0.4.2
|
||||
pycadf==2.7.0
|
||||
pycparser==2.18
|
||||
pyflakes==0.8.1
|
||||
pyroute2==0.5.7
|
||||
Pygments==2.2.0
|
||||
pyinotify==0.9.6
|
||||
|
@ -8,7 +8,7 @@ doc8>=0.6.0 # Apache-2.0
|
||||
coverage!=4.4,>=4.0 # Apache-2.0
|
||||
mock>=2.0.0 # BSD
|
||||
fixtures>=3.0.0 # Apache-2.0/BSD
|
||||
hacking!=0.13.0,<0.14,>=0.12.0 # Apache-2.0
|
||||
hacking>=3.0,<3.1.0 # Apache-2.0
|
||||
oslotest>=3.2.0 # Apache-2.0
|
||||
osprofiler>=1.4.0 # Apache-2.0
|
||||
os-testr>=1.0.0 # Apache-2.0
|
||||
|
@ -16,5 +16,5 @@ if test "x$1" = "x-HEAD" ; then
|
||||
diff -u --from-file /dev/null ${files} | flake8 --max-complexity 34 --exclude zun/criapi --diff "$@"
|
||||
else
|
||||
echo "Running flake8 on all files"
|
||||
exec flake8 --max-complexity 34 --exclude zun/criapi "$@"
|
||||
exec flake8 "$@"
|
||||
fi
|
||||
|
22
tox.ini
22
tox.ini
@ -94,12 +94,26 @@ enable-extensions = H203,H106,H904
|
||||
# H405 is another one that is good as a guideline, but sometimes
|
||||
# multiline docstrings just don't have a natural summary line.
|
||||
# Rejecting code for this reason is wrong.
|
||||
ignore = E123,E125,H405
|
||||
# W503 line break before binary operator
|
||||
# W504 line break after binary operator
|
||||
# E402 module level import not at top of file
|
||||
ignore = E123,E125,H405,W503,W504,E402
|
||||
builtins = _
|
||||
exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,build
|
||||
exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,build,zun/criapi/
|
||||
|
||||
[hacking]
|
||||
local-check-factory = zun.hacking.checks.factory
|
||||
[flake8:local-plugins]
|
||||
extension =
|
||||
Z322 = checks:no_mutable_default_args
|
||||
Z323 = checks:assert_equal_true_or_false
|
||||
Z302 = checks:assert_equal_not_none
|
||||
Z316 = checks:assert_true_isinstance
|
||||
Z338 = checks:assert_equal_in
|
||||
Z310 = checks:use_timeutils_utcnow
|
||||
Z336 = checks:dict_constructor_with_list_copy
|
||||
Z339 = checks:no_xrange
|
||||
Z352 = checks:no_log_warn
|
||||
Z353 = checks:no_translate_logs
|
||||
paths = ./zun/hacking
|
||||
|
||||
[testenv:fast8]
|
||||
basepython = python3
|
||||
|
@ -618,7 +618,7 @@ class ContainersController(base.Controller):
|
||||
try:
|
||||
return neutron_api.find_resourceid_by_name_or_id(
|
||||
'security_group', security_group['name'], context.project_id)
|
||||
except n_exc.NeutronClientNoUniqueMatch as e:
|
||||
except n_exc.NeutronClientNoUniqueMatch:
|
||||
msg = _("Multiple security group matches found for name "
|
||||
"%(name)s, use an ID to be more specific.") % {
|
||||
'name': security_group['name']}
|
||||
|
@ -95,7 +95,7 @@ auto_remove = {
|
||||
|
||||
cpu = {
|
||||
'type': ['number', 'string', 'null'],
|
||||
'pattern': '^[0-9]*(\.([0-9]+))?$',
|
||||
'pattern': r'^[0-9]*(\.([0-9]+))?$',
|
||||
'minLength': 1,
|
||||
'minimum': CONF.minimum_cpus,
|
||||
'maximum': CONF.maximum_cpus,
|
||||
|
@ -33,7 +33,7 @@ class AuthTokenMiddleware(auth_token.AuthProtocol):
|
||||
def __init__(self, app, conf, public_api_routes=None):
|
||||
if public_api_routes is None:
|
||||
public_api_routes = []
|
||||
route_pattern_tpl = '%s(\.json)?$'
|
||||
route_pattern_tpl = r'%s(\.json)?$'
|
||||
|
||||
try:
|
||||
self.public_api_routes = [re.compile(route_pattern_tpl % route_tpl)
|
||||
|
@ -46,5 +46,6 @@ def main():
|
||||
launcher.launch_service(server, workers=server.workers)
|
||||
launcher.wait()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
|
@ -32,5 +32,6 @@ def main():
|
||||
|
||||
service.CNIDaemonServiceManager().run()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
|
@ -16,7 +16,8 @@ import re
|
||||
|
||||
def _quote_meta(s):
|
||||
special_chars = frozenset("()[]{}?*+|^$\\.-#&~")
|
||||
escape = lambda c: r'\{}'.format(c) if c in special_chars else c
|
||||
escape = lambda c: ( # noqa: E731
|
||||
r'\{}'.format(c) if c in special_chars else c)
|
||||
sp = (escape(c) for c in s)
|
||||
return r''.join(sp)
|
||||
|
||||
|
@ -42,7 +42,7 @@ CONF = zun.conf.CONF
|
||||
try:
|
||||
CONF.import_opt('fatal_exception_format_errors',
|
||||
'oslo_versionedobjects.exception')
|
||||
except cfg.NoSuchOptError as e:
|
||||
except cfg.NoSuchOptError:
|
||||
# Note:work around for zun run against master branch
|
||||
# in devstack gate job, as zun not branched yet
|
||||
# versionobjects kilo/master different version can
|
||||
|
@ -32,7 +32,8 @@ def _to_byte_string(value, num_bits):
|
||||
"""
|
||||
|
||||
shifts = six.moves.xrange(num_bits - 8, -8, -8)
|
||||
byte_at = lambda off: (value >> off if off >= 0 else value << -off) & 0xff
|
||||
byte_at = lambda off: ( # noqa: E731
|
||||
(value >> off if off >= 0 else value << -off) & 0xff)
|
||||
return ''.join(six.int2byte(byte_at(offset)) for offset in shifts)
|
||||
|
||||
|
||||
|
@ -1111,7 +1111,8 @@ class Manager(periodic_task.PeriodicTasks):
|
||||
"different from %(tar_tag)s the tag in tar",
|
||||
{'glance_tag': pulled_image['tags'],
|
||||
'tar_tag': pulled_image['tag']})
|
||||
repo_tag = ':'.join([pulled_image['repo'], pulled_image['tag']]) \
|
||||
repo_tag = ':'.join([pulled_image['repo'],
|
||||
pulled_image['tag']]) \
|
||||
if pulled_image['tag'] else pulled_image['repo']
|
||||
image_dict = self.driver.inspect_image(repo_tag)
|
||||
|
||||
|
@ -76,9 +76,10 @@ class Host(object):
|
||||
if backing_filesystem == 'xfs':
|
||||
# Check project quota mount option
|
||||
try:
|
||||
cmd = "mount |grep $(df " + host_info['docker_root_dir'] + \
|
||||
" |awk 'FNR==2 {print $1}') | grep 'xfs'" \
|
||||
" |grep -E 'pquota|prjquota'"
|
||||
cmd = ("mount |grep $(df " +
|
||||
host_info['docker_root_dir'] +
|
||||
" |awk 'FNR==2 {print $1}') | grep 'xfs'"
|
||||
" |grep -E 'pquota|prjquota'")
|
||||
utils.execute(cmd, shell=True)
|
||||
except exception.CommandError:
|
||||
sp_disk_quota = False
|
||||
|
@ -42,9 +42,9 @@ class LinuxHost(host_capability.Host):
|
||||
old_lscpu = True
|
||||
|
||||
if old_lscpu:
|
||||
cpu_sock_pair = re.findall("\d+(?:,\d+)?", str(output))
|
||||
cpu_sock_pair = re.findall(r"\d+(?:,\d+)?", str(output))
|
||||
else:
|
||||
cpu_sock_pair = re.findall("\d+(?:,\d+,[Y/N])?", str(output))
|
||||
cpu_sock_pair = re.findall(r"\d+(?:,\d+,[Y/N])?", str(output))
|
||||
sock_map = defaultdict(list)
|
||||
for value in cpu_sock_pair:
|
||||
val = value.split(",")
|
||||
@ -64,7 +64,7 @@ class LinuxHost(host_capability.Host):
|
||||
else:
|
||||
raise
|
||||
|
||||
sizes = re.findall("size\: \d*", str(output))
|
||||
sizes = re.findall(r"size\: \d*", str(output))
|
||||
mem_numa = []
|
||||
for size in sizes:
|
||||
mem_numa.append(int(size.split(' ')[1]))
|
||||
|
@ -55,4 +55,5 @@ def run_migrations_online():
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
run_migrations_online()
|
||||
|
@ -15,6 +15,8 @@
|
||||
|
||||
import re
|
||||
|
||||
from hacking import core
|
||||
|
||||
"""
|
||||
Guidelines for writing new hacking checks
|
||||
|
||||
@ -43,7 +45,7 @@ assert_equal_with_is_not_none_re = re.compile(
|
||||
r"assertEqual\(.*?\s+is+\s+not+\s+None\)$")
|
||||
assert_true_isinstance_re = re.compile(
|
||||
r"(.)*assertTrue\(isinstance\((\w|\.|\'|\"|\[|\])+, "
|
||||
"(\w|\.|\'|\"|\[|\])+\)\)")
|
||||
r"(\w|\.|\'|\"|\[|\])+\)\)")
|
||||
dict_constructor_with_list_copy_re = re.compile(r".*\bdict\((\[)?(\(|\[)")
|
||||
assert_xrange_re = re.compile(
|
||||
r"\s*xrange\s*\(")
|
||||
@ -53,12 +55,14 @@ translated_log = re.compile(r"(.)*LOG\.(%(levels)s)\(\s*_\(" %
|
||||
{'levels': '|'.join(log_levels)})
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def no_mutable_default_args(logical_line):
|
||||
msg = "Z322: Method's default argument shouldn't be mutable!"
|
||||
if mutable_default_args.match(logical_line):
|
||||
yield (0, msg)
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def assert_equal_true_or_false(logical_line):
|
||||
"""Check for assertEqual(True, A) or assertEqual(False, A) sentences
|
||||
|
||||
@ -71,6 +75,7 @@ def assert_equal_true_or_false(logical_line):
|
||||
"sentences not allowed")
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def assert_equal_not_none(logical_line):
|
||||
"""Check for assertEqual(A is not None) sentences Z302"""
|
||||
msg = "Z302: assertEqual(A is not None) sentences not allowed."
|
||||
@ -79,6 +84,7 @@ def assert_equal_not_none(logical_line):
|
||||
yield (0, msg)
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def assert_true_isinstance(logical_line):
|
||||
"""Check for assertTrue(isinstance(a, b)) sentences
|
||||
|
||||
@ -88,6 +94,7 @@ def assert_true_isinstance(logical_line):
|
||||
yield (0, "Z316: assertTrue(isinstance(a, b)) sentences not allowed")
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def assert_equal_in(logical_line):
|
||||
"""Check for assertEqual(True|False, A in B), assertEqual(A in B, True|False)
|
||||
|
||||
@ -101,6 +108,7 @@ def assert_equal_in(logical_line):
|
||||
"contents.")
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def no_xrange(logical_line):
|
||||
"""Disallow 'xrange()'
|
||||
|
||||
@ -110,6 +118,7 @@ def no_xrange(logical_line):
|
||||
yield(0, "Z339: Do not use xrange().")
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def use_timeutils_utcnow(logical_line, filename):
|
||||
# tools are OK to use the standard datetime module
|
||||
if "/tools/" in filename:
|
||||
@ -123,6 +132,7 @@ def use_timeutils_utcnow(logical_line, filename):
|
||||
yield (pos, msg % f)
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def dict_constructor_with_list_copy(logical_line):
|
||||
msg = ("Z336: Must use a dict comprehension instead of a dict constructor"
|
||||
" with a sequence of key-value pairs.")
|
||||
@ -130,6 +140,7 @@ def dict_constructor_with_list_copy(logical_line):
|
||||
yield (0, msg)
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def no_log_warn(logical_line):
|
||||
"""Disallow 'LOG.warn('
|
||||
|
||||
@ -144,6 +155,7 @@ def no_log_warn(logical_line):
|
||||
yield (0, msg)
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def no_translate_logs(logical_line):
|
||||
"""Check for 'LOG.*(_('
|
||||
|
||||
@ -155,16 +167,3 @@ def no_translate_logs(logical_line):
|
||||
msg = "Z353: Log messages should not be translated!"
|
||||
if translated_log.match(logical_line):
|
||||
yield (0, msg)
|
||||
|
||||
|
||||
def factory(register):
|
||||
register(no_mutable_default_args)
|
||||
register(assert_equal_true_or_false)
|
||||
register(assert_equal_not_none)
|
||||
register(assert_true_isinstance)
|
||||
register(assert_equal_in)
|
||||
register(use_timeutils_utcnow)
|
||||
register(dict_constructor_with_list_copy)
|
||||
register(no_xrange)
|
||||
register(no_log_warn)
|
||||
register(no_translate_logs)
|
||||
|
@ -83,7 +83,7 @@ class DockerDriver(driver.ContainerImageDriver):
|
||||
docker.pull(repo, tag=tag, auth_config=auth_config)
|
||||
except errors.NotFound as e:
|
||||
raise exception.ImageNotFound(message=six.text_type(e))
|
||||
except errors.APIError as e:
|
||||
except errors.APIError:
|
||||
LOG.exception('Error on pulling image')
|
||||
message = _('Error on pulling image: %(repo)s:%(tag)s') % {
|
||||
'repo': repo, 'tag': tag}
|
||||
|
@ -189,8 +189,8 @@ class NeutronAPI(object):
|
||||
def _refresh_neutron_extensions_cache(self):
|
||||
"""Refresh the neutron extensions cache when necessary."""
|
||||
if (not self.last_neutron_extension_sync or
|
||||
((time.time() - self.last_neutron_extension_sync)
|
||||
>= CONF.neutron.extension_sync_interval)):
|
||||
((time.time() - self.last_neutron_extension_sync) >=
|
||||
CONF.neutron.extension_sync_interval)):
|
||||
extensions_list = self.neutron_api.list_extensions()['extensions']
|
||||
self.last_neutron_extension_sync = time.time()
|
||||
self.extensions.clear()
|
||||
|
@ -26,10 +26,10 @@ from zun.common import exception
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
PCI_VENDOR_PATTERN = "^(hex{4})$".replace("hex", "[\da-fA-F]")
|
||||
PCI_VENDOR_PATTERN = "^(hex{4})$".replace("hex", r"[\da-fA-F]")
|
||||
_PCI_ADDRESS_PATTERN = ("^(hex{4}):(hex{2}):(hex{2}).(oct{1})$".
|
||||
replace("hex", "[\da-fA-F]").
|
||||
replace("oct", "[0-7]"))
|
||||
replace("hex", r"[\da-fA-F]").
|
||||
replace("oct", r"[0-7]"))
|
||||
_PCI_ADDRESS_REGEX = re.compile(_PCI_ADDRESS_PATTERN)
|
||||
|
||||
_SRIOV_TOTALVFS = "sriov_totalvfs"
|
||||
@ -167,7 +167,7 @@ def get_vf_num_by_pci_address(pci_addr):
|
||||
A VF is associated with an VF number, which ip link command uses to
|
||||
configure it. This number can be obtained from the PCI device filesystem.
|
||||
"""
|
||||
VIRTFN_RE = re.compile("virtfn(\d+)")
|
||||
VIRTFN_RE = re.compile(r"virtfn(\d+)")
|
||||
virtfns_path = "/sys/bus/pci/devices/%s/physfn/virtfn*" % (pci_addr)
|
||||
vf_num = None
|
||||
try:
|
||||
|
@ -40,8 +40,8 @@ class Scheduler(object):
|
||||
services = objects.ZunService.list_by_binary(context, 'zun-compute')
|
||||
return [service.host
|
||||
for service in services
|
||||
if self.servicegroup_api.service_is_up(service)
|
||||
and not service.disabled]
|
||||
if self.servicegroup_api.service_is_up(service) and
|
||||
not service.disabled]
|
||||
|
||||
@abc.abstractmethod
|
||||
def select_destinations(self, context, containers, extra_specs,
|
||||
|
@ -21,30 +21,31 @@ class TestRegexp(base.BaseTestCase):
|
||||
separatorRegexp = r'(?:[._]|__|[-]*)'
|
||||
nameComponentRegexp = r'[a-z0-9]+(?:(?:(?:[._]|__|[-]*)[a-z0-9]+)+)?'
|
||||
hostnameComponentRegexp = r'(?:[a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9-]*'\
|
||||
'[a-zA-Z0-9])'
|
||||
hostnameRegexp = r'(?:[a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9-]*[a-zA-Z0-9])'\
|
||||
'(?:(?:\.(?:[a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9-]*'\
|
||||
'[a-zA-Z0-9]))+)?(?::[0-9]+)?'
|
||||
r'[a-zA-Z0-9])'
|
||||
hostnameRegexp = r'(?:[a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9-]*'\
|
||||
r'[a-zA-Z0-9])'\
|
||||
r'(?:(?:\.(?:[a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9-]*'\
|
||||
r'[a-zA-Z0-9]))+)?(?::[0-9]+)?'
|
||||
TagRegexp = r'[\w][\w.-]{0,127}'
|
||||
anchoredTagRegexp = r'^[\w][\w.-]{0,127}$'
|
||||
NameRegexp = r'(?:(?:[a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9-]*[a-zA-Z0-9])'\
|
||||
'(?:(?:\.(?:[a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9-]*'\
|
||||
'[a-zA-Z0-9]))+)?(?::[0-9]+)?/)?[a-z0-9]+(?:(?:'\
|
||||
'(?:[._]|__|[-]*)[a-z0-9]+)+)?(?:(?:/[a-z0-9]+'\
|
||||
'(?:(?:(?:[._]|__|[-]*)[a-z0-9]+)+)?)+)?'
|
||||
r'(?:(?:\.(?:[a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9-]*'\
|
||||
r'[a-zA-Z0-9]))+)?(?::[0-9]+)?/)?[a-z0-9]+(?:(?:'\
|
||||
r'(?:[._]|__|[-]*)[a-z0-9]+)+)?(?:(?:/[a-z0-9]+'\
|
||||
r'(?:(?:(?:[._]|__|[-]*)[a-z0-9]+)+)?)+)?'
|
||||
anchoredNameRegexp = r'^(?:((?:[a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9-]*'\
|
||||
'[a-zA-Z0-9])(?:(?:\.(?:[a-zA-Z0-9]|[a-zA-Z0-9]'\
|
||||
'[a-zA-Z0-9-]*[a-zA-Z0-9]))+)?(?::[0-9]+)?)/)?'\
|
||||
'([a-z0-9]+(?:(?:(?:[._]|__|[-]*)[a-z0-9]+)+)?'\
|
||||
'(?:(?:/[a-z0-9]+(?:(?:(?:[._]|__|[-]*)[a-z0-9]'\
|
||||
'+)+)?)+)?)$'
|
||||
r'[a-zA-Z0-9])(?:(?:\.(?:[a-zA-Z0-9]|[a-zA-Z0-9]'\
|
||||
r'[a-zA-Z0-9-]*[a-zA-Z0-9]))+)?(?::[0-9]+)?)/)?'\
|
||||
r'([a-z0-9]+(?:(?:(?:[._]|__|[-]*)[a-z0-9]+)+)?'\
|
||||
r'(?:(?:/[a-z0-9]+(?:(?:(?:[._]|__|[-]*)[a-z0-9]'\
|
||||
r'+)+)?)+)?)$'
|
||||
ReferenceRegexp = r'^((?:(?:[a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9-]*'\
|
||||
'[a-zA-Z0-9])(?:(?:\.(?:[a-zA-Z0-9]|[a-zA-Z0-9]'\
|
||||
'[a-zA-Z0-9-]*[a-zA-Z0-9]))+)?(?::[0-9]+)?/)?'\
|
||||
'[a-z0-9]+(?:(?:(?:[._]|__|[-]*)[a-z0-9]+)+)?'\
|
||||
'(?:(?:/[a-z0-9]+(?:(?:(?:[._]|__|[-]*)[a-z0-9]+)'\
|
||||
'+)?)+)?)(?::([\w][\w.-]{0,127}))?(?:@'\
|
||||
'([a-zA-Z0-9-_+.]+:[a-fA-F0-9]+))?$'
|
||||
r'[a-zA-Z0-9])(?:(?:\.(?:[a-zA-Z0-9]|[a-zA-Z0-9]'\
|
||||
r'[a-zA-Z0-9-]*[a-zA-Z0-9]))+)?(?::[0-9]+)?/)?'\
|
||||
r'[a-z0-9]+(?:(?:(?:[._]|__|[-]*)[a-z0-9]+)+)?'\
|
||||
r'(?:(?:/[a-z0-9]+(?:(?:(?:[._]|__|[-]*)[a-z0-9]+)'\
|
||||
r'+)?)+)?)(?::([\w][\w.-]{0,127}))?(?:@'\
|
||||
r'([a-zA-Z0-9-_+.]+:[a-fA-F0-9]+))?$'
|
||||
|
||||
ImageRegexps = regexp.ImageRegexps
|
||||
self.assertEqual(alphaNumericRegexp,
|
||||
|
@ -89,16 +89,17 @@ class ModelsObjectComparatorMixin(object):
|
||||
self.assertEqual(value, obj2[key])
|
||||
|
||||
def _assertEqualListsOfObjects(self, objs1, objs2, ignored_keys=None):
|
||||
obj_to_dict = lambda o: self._dict_from_object(o, ignored_keys)
|
||||
sort_key = lambda d: [d[k] for k in sorted(d)]
|
||||
conv_and_sort = lambda obj: sorted(map(obj_to_dict, obj), key=sort_key)
|
||||
obj_to_dict = lambda o: self._dict_from_object(o, ignored_keys) # noqa
|
||||
sort_key = lambda d: [d[k] for k in sorted(d)] # noqa: E731
|
||||
conv_and_sort = lambda obj: ( # noqa: E731
|
||||
sorted(map(obj_to_dict, obj), key=sort_key))
|
||||
|
||||
self.assertEqual(conv_and_sort(objs1), conv_and_sort(objs2))
|
||||
|
||||
def _assertEqualOrderedListOfObjects(self, objs1, objs2,
|
||||
ignored_keys=None):
|
||||
obj_to_dict = lambda o: self._dict_from_object(o, ignored_keys)
|
||||
conv = lambda objs: [obj_to_dict(obj) for obj in objs]
|
||||
obj_to_dict = lambda o: self._dict_from_object(o, ignored_keys) # noqa
|
||||
conv = lambda objs: [obj_to_dict(obj) for obj in objs] # noqa: E731
|
||||
|
||||
self.assertEqual(conv(objs1), conv(objs2))
|
||||
|
||||
|
@ -102,7 +102,7 @@ fake_db_devs_tree = [fake_db_dev_3, fake_db_dev_4, fake_db_dev_5]
|
||||
|
||||
class PciDevTrackerTestCase(base.DbTestCase):
|
||||
def _fake_get_pci_devices(self, node_id):
|
||||
return self.fake_devs
|
||||
return self.fake_devs
|
||||
|
||||
def _fake_pci_device_update(self, node_id, address, value):
|
||||
self.update_called += 1
|
||||
|
@ -15,7 +15,7 @@
|
||||
import textwrap
|
||||
|
||||
import mock
|
||||
import pep8
|
||||
import pycodestyle
|
||||
|
||||
from zun.hacking import checks
|
||||
from zun.tests import base
|
||||
@ -25,10 +25,10 @@ class HackingTestCase(base.BaseTestCase):
|
||||
"""Hacking test class.
|
||||
|
||||
This class tests the hacking checks zun .hacking.checks by passing
|
||||
strings to the check methods like the pep8/flake8 parser would. The parser
|
||||
loops over each line in the file and then passes the parameters to the
|
||||
check method. The parameter names in the check method dictate what type of
|
||||
object is passed to the check method. The parameter types are::
|
||||
strings to the check methods like the pycodestyle/flake8 parser would. The
|
||||
parser loops over each line in the file and then passes the parameters to
|
||||
the check method. The parameter names in the check method dictate what
|
||||
type of object is passed to the check method. The parameter types are::
|
||||
|
||||
logical_line: A processed line with the following modifications:
|
||||
- Multi-line statements converted to a single line.
|
||||
@ -45,7 +45,7 @@ class HackingTestCase(base.BaseTestCase):
|
||||
indent_level: indentation (with tabs expanded to multiples of 8)
|
||||
previous_indent_level: indentation on previous line
|
||||
previous_logical: previous logical line
|
||||
filename: Path of the file being run through pep8
|
||||
filename: Path of the file being run through pycodestyle
|
||||
|
||||
When running a test on a check method the return will be False/None if
|
||||
there is no violation in the sample input. If there is an error a tuple is
|
||||
@ -53,17 +53,17 @@ class HackingTestCase(base.BaseTestCase):
|
||||
just assertTrue if the check is expected to fail and assertFalse if it
|
||||
should pass.
|
||||
"""
|
||||
# We are patching pep8 so that only the check under test is actually
|
||||
# We are patching pycodestyle so that only the check under test is actually
|
||||
# installed.
|
||||
|
||||
@mock.patch('pep8._checks',
|
||||
@mock.patch('pycodestyle._checks',
|
||||
{'physical_line': {}, 'logical_line': {}, 'tree': {}})
|
||||
def _run_check(self, code, checker, filename=None):
|
||||
pep8.register_check(checker)
|
||||
pycodestyle.register_check(checker)
|
||||
|
||||
lines = textwrap.dedent(code).strip().splitlines(True)
|
||||
|
||||
checker = pep8.Checker(filename=filename, lines=lines)
|
||||
checker = pycodestyle.Checker(filename=filename, lines=lines)
|
||||
checker.check_all()
|
||||
checker.report._deferred_print.sort()
|
||||
return checker.report._deferred_print
|
||||
|
Loading…
Reference in New Issue
Block a user