cgcs-patch: update for stx 5.0
* Add dependency on DNF * Rebased patch: - 0001-patch_agent-do-not-do-the-packages_iter-if-pkggrp-is.patch * Removed the following patches since they are already in upstream: - 0001-Remove-use-of-rpmUtils.miscutils-from-cgcs-patch.patch - 0003-Cleaning-up-pylint-settings-for-cgcs-patch.patch - 0004-Address-python3-pylint-errors-and-warnings.patch - 0005-Clean-up-pylint-W1201-logging-not-lazy-in-cgcs-patch.patch - 0006-Migrate-patch-agent-to-use-DNF-for-swmgmt.patch Story: 2008952 Task: 42576 Signed-off-by: Jackie Huang <jackie.huang@windriver.com> Change-Id: I01b1795e30036d78097915a07870bc950d13c3d0
This commit is contained in:
parent
81faeff279
commit
e8fe442a6b
@ -8,16 +8,13 @@ SUBPATH0 = "cgcs-patch/cgcs-patch"
|
||||
LICENSE = "Apache-2.0"
|
||||
LIC_FILES_CHKSUM = "file://LICENSE;md5=3b83ef96387f14655fc854ddc3c6bd57"
|
||||
|
||||
SRC_URI += "file://0001-Remove-use-of-rpmUtils.miscutils-from-cgcs-patch.patch;striplevel=3 \
|
||||
file://0003-Cleaning-up-pylint-settings-for-cgcs-patch.patch;striplevel=3 \
|
||||
file://0004-Address-python3-pylint-errors-and-warnings.patch;striplevel=3 \
|
||||
file://0005-Clean-up-pylint-W1201-logging-not-lazy-in-cgcs-patch.patch;striplevel=3 \
|
||||
file://0006-Migrate-patch-agent-to-use-DNF-for-swmgmt.patch;striplevel=3 \
|
||||
file://0007-patch_agent-do-not-do-the-packages_iter-if-pkggrp-is.patch;striplevel=3 \
|
||||
SRC_URI += "\
|
||||
file://0001-patch_agent-do-not-do-the-packages_iter-if-pkggrp-is.patch;striplevel=3 \
|
||||
"
|
||||
|
||||
RDEPENDS_${PN}_append = " \
|
||||
bash \
|
||||
dnf \
|
||||
"
|
||||
RDEPENDS_${PN}-agent_append = " \
|
||||
bash \
|
||||
|
@ -1,171 +0,0 @@
|
||||
From 80ee2e342d1854f439a1ec25c2f6a3a3625a0720 Mon Sep 17 00:00:00 2001
|
||||
From: Don Penney <don.penney@windriver.com>
|
||||
Date: Sun, 22 Dec 2019 22:45:18 -0500
|
||||
Subject: [PATCH] Remove use of rpmUtils.miscutils from cgcs-patch
|
||||
|
||||
The rpmUtils.miscutils.stringToVersion function will not be available
|
||||
in CentOS8, as it is not currently provided for python3. A similar
|
||||
function exists in cgcs_patch.patch_functions, using regex to parse
|
||||
the version from an RPM filename. This update adds a new function,
|
||||
expand_pkgver, implemented in a similar fashion using regex, providing
|
||||
the same capability as rpmUtils.miscutils.stringToVersion.
|
||||
|
||||
Change-Id: I2a04f3dbf85d62c87ca1afcf988b672aafceb642
|
||||
Story: 2006228
|
||||
Task: 37871
|
||||
Signed-off-by: Don Penney <don.penney@windriver.com>
|
||||
|
||||
---
|
||||
cgcs-patch/cgcs-patch/cgcs_patch/patch_agent.py | 11 +++++------
|
||||
cgcs-patch/cgcs-patch/cgcs_patch/patch_controller.py | 6 +++---
|
||||
cgcs-patch/cgcs-patch/cgcs_patch/patch_functions.py | 18 ++++++++++++++++++
|
||||
.../cgcs-patch/cgcs_patch/tests/test_patch_agent.py | 2 --
|
||||
.../cgcs_patch/tests/test_patch_controller.py | 2 --
|
||||
.../cgcs-patch/cgcs_patch/tests/test_patch_utils.py | 14 ++++++++++++++
|
||||
6 files changed, 40 insertions(+), 13 deletions(-)
|
||||
|
||||
diff --git a/cgcs-patch/cgcs-patch/cgcs_patch/patch_agent.py b/cgcs-patch/cgcs-patch/cgcs_patch/patch_agent.py
|
||||
index b95b79d..77930d7 100644
|
||||
--- a/cgcs-patch/cgcs-patch/cgcs_patch/patch_agent.py
|
||||
+++ b/cgcs-patch/cgcs-patch/cgcs_patch/patch_agent.py
|
||||
@@ -19,9 +19,8 @@ import sys
|
||||
import yaml
|
||||
import shutil
|
||||
|
||||
-from rpmUtils.miscutils import stringToVersion # pylint: disable=import-error
|
||||
-
|
||||
from cgcs_patch.patch_functions import configure_logging
|
||||
+from cgcs_patch.patch_functions import parse_pkgver
|
||||
from cgcs_patch.patch_functions import LOG
|
||||
import cgcs_patch.config as cfg
|
||||
from cgcs_patch.base import PatchService
|
||||
@@ -519,8 +518,8 @@ class PatchAgent(PatchService):
|
||||
# 1, if first arg is higher version
|
||||
# 0, if versions are same
|
||||
# -1, if first arg is lower version
|
||||
- rc = rpm.labelCompare(stringToVersion(version),
|
||||
- stringToVersion(stored_ver))
|
||||
+ rc = rpm.labelCompare(parse_pkgver(version),
|
||||
+ parse_pkgver(stored_ver))
|
||||
|
||||
if rc > 0:
|
||||
# Update version
|
||||
@@ -709,8 +708,8 @@ class PatchAgent(PatchService):
|
||||
compare_version = base_version
|
||||
|
||||
# Compare the installed version to what's in the repo
|
||||
- rc = rpm.labelCompare(stringToVersion(installed_version),
|
||||
- stringToVersion(compare_version))
|
||||
+ rc = rpm.labelCompare(parse_pkgver(installed_version),
|
||||
+ parse_pkgver(compare_version))
|
||||
if rc == 0:
|
||||
# Versions match, nothing to do.
|
||||
continue
|
||||
diff --git a/cgcs-patch/cgcs-patch/cgcs_patch/patch_controller.py b/cgcs-patch/cgcs-patch/cgcs_patch/patch_controller.py
|
||||
index 1ba8f5e..4b94a5f 100644
|
||||
--- a/cgcs-patch/cgcs-patch/cgcs_patch/patch_controller.py
|
||||
+++ b/cgcs-patch/cgcs-patch/cgcs_patch/patch_controller.py
|
||||
@@ -17,7 +17,7 @@ import rpm
|
||||
import os
|
||||
import gc
|
||||
|
||||
-from rpmUtils.miscutils import stringToVersion # pylint: disable=import-error
|
||||
+from cgcs_patch.patch_functions import parse_pkgver
|
||||
|
||||
from wsgiref import simple_server
|
||||
from cgcs_patch.api import app
|
||||
@@ -776,8 +776,8 @@ class PatchController(PatchService):
|
||||
# Ignore epoch
|
||||
patch_ver = patch_ver.split(':')[1]
|
||||
|
||||
- rc = rpm.labelCompare(stringToVersion(installed_ver),
|
||||
- stringToVersion(patch_ver))
|
||||
+ rc = rpm.labelCompare(parse_pkgver(installed_ver),
|
||||
+ parse_pkgver(patch_ver))
|
||||
|
||||
if self.patch_data.metadata[patch_id]["repostate"] == constants.AVAILABLE:
|
||||
# The RPM is not expected to be installed.
|
||||
diff --git a/cgcs-patch/cgcs-patch/cgcs_patch/patch_functions.py b/cgcs-patch/cgcs-patch/cgcs_patch/patch_functions.py
|
||||
index 832e4e9..281a286 100644
|
||||
--- a/cgcs-patch/cgcs-patch/cgcs_patch/patch_functions.py
|
||||
+++ b/cgcs-patch/cgcs-patch/cgcs_patch/patch_functions.py
|
||||
@@ -176,6 +176,24 @@ def parse_rpm_filename(filename):
|
||||
return (pkgname, arch, PackageVersion(epoch, version, release))
|
||||
|
||||
|
||||
+def parse_pkgver(pkgver):
|
||||
+ # Version format is:
|
||||
+ # [<epoch>:]<version>-<release>
|
||||
+ #
|
||||
+ pattern = re.compile(r'((([^:]):)?)([^-]+)((-(.*))?)$')
|
||||
+
|
||||
+ m = pattern.match(pkgver)
|
||||
+
|
||||
+ if m is None:
|
||||
+ raise ValueError("Package version does not match expected format: %s" % pkgver)
|
||||
+
|
||||
+ epoch = m.group(3)
|
||||
+ version = m.group(4)
|
||||
+ release = m.group(7)
|
||||
+
|
||||
+ return (epoch, version, release)
|
||||
+
|
||||
+
|
||||
class PackageVersion(object):
|
||||
"""
|
||||
The PackageVersion class provides a structure for RPM version information,
|
||||
diff --git a/cgcs-patch/cgcs-patch/cgcs_patch/tests/test_patch_agent.py b/cgcs-patch/cgcs-patch/cgcs_patch/tests/test_patch_agent.py
|
||||
index c953e4f..bd1eef9 100644
|
||||
--- a/cgcs-patch/cgcs-patch/cgcs_patch/tests/test_patch_agent.py
|
||||
+++ b/cgcs-patch/cgcs-patch/cgcs_patch/tests/test_patch_agent.py
|
||||
@@ -10,8 +10,6 @@ import sys
|
||||
import testtools
|
||||
|
||||
sys.modules['rpm'] = mock.Mock()
|
||||
-sys.modules['rpmUtils'] = mock.Mock()
|
||||
-sys.modules['rpmUtils.miscutils'] = mock.Mock()
|
||||
|
||||
import cgcs_patch.patch_agent # noqa: E402
|
||||
|
||||
diff --git a/cgcs-patch/cgcs-patch/cgcs_patch/tests/test_patch_controller.py b/cgcs-patch/cgcs-patch/cgcs_patch/tests/test_patch_controller.py
|
||||
index d11623f..e2b02c0 100644
|
||||
--- a/cgcs-patch/cgcs-patch/cgcs_patch/tests/test_patch_controller.py
|
||||
+++ b/cgcs-patch/cgcs-patch/cgcs_patch/tests/test_patch_controller.py
|
||||
@@ -10,8 +10,6 @@ import sys
|
||||
import testtools
|
||||
|
||||
sys.modules['rpm'] = mock.Mock()
|
||||
-sys.modules['rpmUtils'] = mock.Mock()
|
||||
-sys.modules['rpmUtils.miscutils'] = mock.Mock()
|
||||
|
||||
import cgcs_patch.patch_controller # noqa: E402
|
||||
|
||||
diff --git a/cgcs-patch/cgcs-patch/cgcs_patch/tests/test_patch_utils.py b/cgcs-patch/cgcs-patch/cgcs_patch/tests/test_patch_utils.py
|
||||
index a5eb8d4..653c65a 100644
|
||||
--- a/cgcs-patch/cgcs-patch/cgcs_patch/tests/test_patch_utils.py
|
||||
+++ b/cgcs-patch/cgcs-patch/cgcs_patch/tests/test_patch_utils.py
|
||||
@@ -9,6 +9,7 @@ import socket
|
||||
import testtools
|
||||
|
||||
import cgcs_patch.constants
|
||||
+import cgcs_patch.patch_functions
|
||||
import cgcs_patch.utils
|
||||
|
||||
|
||||
@@ -130,3 +131,16 @@ class CgcsPatchUtilsTestCase(testtools.TestCase):
|
||||
|
||||
result = cgcs_patch.utils.ip_to_versioned_localhost(ip)
|
||||
self.assertEqual(expected_result, result)
|
||||
+
|
||||
+ def test_parse_pkgver(self):
|
||||
+ versions = {
|
||||
+ '0:1.2.3-r4': ('0', '1.2.3', 'r4'),
|
||||
+ '4.3.2-1': (None, '4.3.2', '1'),
|
||||
+ '8.1.4': (None, '8.1.4', None),
|
||||
+ '5:7.5.3': ('5', '7.5.3', None),
|
||||
+ 'This is a weird version string': (None, 'This is a weird version string', None),
|
||||
+ }
|
||||
+
|
||||
+ for ver, expected in versions.items():
|
||||
+ result = cgcs_patch.patch_functions.parse_pkgver(ver)
|
||||
+ self.assertEqual(result, expected)
|
@ -1,21 +1,25 @@
|
||||
From 059984de897fe2c8c48811ceb76a0331f94b3557 Mon Sep 17 00:00:00 2001
|
||||
From b97b3a2f35f8cf0a9eae975ece510c2bb27ceef9 Mon Sep 17 00:00:00 2001
|
||||
From: Jackie Huang <jackie.huang@windriver.com>
|
||||
Date: Wed, 13 May 2020 22:10:01 +0800
|
||||
Date: Fri, 21 May 2021 15:24:13 +0800
|
||||
Subject: [PATCH] patch_agent: do not do the packages_iter if pkggrp is None
|
||||
|
||||
Addn the handling of packages_iter to the else block to avoid:
|
||||
Add the handling of packages_iter to the else block to avoid:
|
||||
AttributeError: 'NoneType' object has no attribute 'packages_iter'
|
||||
|
||||
Upstream-Status: Inappropriate [poky-stx specific]
|
||||
|
||||
Rebased for stx 5.0
|
||||
|
||||
Signed-off-by: Jackie Huang <jackie.huang@windriver.com>
|
||||
---
|
||||
cgcs-patch/cgcs-patch/cgcs_patch/patch_agent.py | 44 ++++++++++++-------------
|
||||
1 file changed, 22 insertions(+), 22 deletions(-)
|
||||
cgcs-patch/cgcs-patch/cgcs_patch/patch_agent.py | 45 +++++++++++++------------
|
||||
1 file changed, 23 insertions(+), 22 deletions(-)
|
||||
|
||||
diff --git a/cgcs-patch/cgcs-patch/cgcs_patch/patch_agent.py b/cgcs-patch/cgcs-patch/cgcs_patch/patch_agent.py
|
||||
index d8bc375..489d484 100644
|
||||
index e895e00..8de3c0a 100644
|
||||
--- a/cgcs-patch/cgcs-patch/cgcs_patch/patch_agent.py
|
||||
+++ b/cgcs-patch/cgcs-patch/cgcs_patch/patch_agent.py
|
||||
@@ -490,29 +490,29 @@ class PatchAgent(PatchService):
|
||||
@@ -536,29 +536,30 @@ class PatchAgent(PatchService):
|
||||
break
|
||||
|
||||
if pkggrp is None:
|
||||
@ -64,8 +68,9 @@ index d8bc375..489d484 100644
|
||||
+ LOG.info("To install: %s", self.to_install)
|
||||
+ LOG.info("To remove: %s", self.to_remove)
|
||||
+ LOG.info("Missing: %s", self.missing_pkgs)
|
||||
|
||||
return True
|
||||
+
|
||||
if len(self.duplicated_pkgs) > 0:
|
||||
LOG.info("Duplicated: %s", self.duplicated_pkgs)
|
||||
|
||||
--
|
||||
2.7.4
|
@ -1,441 +0,0 @@
|
||||
From de774c85653692b2a901123b5653d0e2101c5353 Mon Sep 17 00:00:00 2001
|
||||
From: Al Bailey <Al.Bailey@windriver.com>
|
||||
Date: Fri, 4 Oct 2019 12:29:03 -0500
|
||||
Subject: [PATCH] Cleaning up pylint settings for cgcs patch
|
||||
|
||||
This also adds cgcs_make_patch folder for pylint
|
||||
|
||||
pylint is invoked with two different pylint.rc files
|
||||
so that different codes can be suppressed for the
|
||||
two different code structures.
|
||||
|
||||
Change-Id: I0d7a87ed435ed716a3c1ea98f5d7badfd2adac7d
|
||||
Story: 2004515
|
||||
Task: 37701
|
||||
Signed-off-by: Al Bailey <Al.Bailey@windriver.com>
|
||||
|
||||
---
|
||||
cgcs-patch/cgcs-patch/pylint.rc | 14 +-
|
||||
cgcs-patch/cgcs-patch/pylint_make_patch.rc | 352 +++++++++++++++++++++++++++++
|
||||
cgcs-patch/cgcs-patch/tox.ini | 6 +-
|
||||
3 files changed, 365 insertions(+), 7 deletions(-)
|
||||
create mode 100644 cgcs-patch/cgcs-patch/pylint_make_patch.rc
|
||||
|
||||
diff --git a/cgcs-patch/cgcs-patch/pylint.rc b/cgcs-patch/cgcs-patch/pylint.rc
|
||||
index dc20bb0..812b6b5 100644
|
||||
--- a/cgcs-patch/cgcs-patch/pylint.rc
|
||||
+++ b/cgcs-patch/cgcs-patch/pylint.rc
|
||||
@@ -44,8 +44,16 @@ symbols=no
|
||||
# --enable=similarities". If you want to run only the classes checker, but have
|
||||
# no Warning level messages displayed, use"--disable=all --enable=classes
|
||||
# --disable=W"
|
||||
-#disable=
|
||||
-disable=C, R, W0603, W0613, W0702, W0703, W1201
|
||||
+# E1111 assignment-from-no-return
|
||||
+# W0107 unnecessary-pass
|
||||
+# W0603 global-statement
|
||||
+# W0612 unused-variable
|
||||
+# W0613 unused-argument
|
||||
+# W0703 broad-except
|
||||
+# W0705 duplicate-except
|
||||
+# W1201 logging-not-lazy
|
||||
+# W1505, deprecated-method
|
||||
+disable=C, R, E1111, W0107, W0603, W0612, W0613, W0703, W0705, W1201, W1505
|
||||
|
||||
|
||||
[REPORTS]
|
||||
@@ -61,7 +69,7 @@ output-format=text
|
||||
files-output=no
|
||||
|
||||
# Tells whether to display a full report or only the messages
|
||||
-reports=yes
|
||||
+reports=no
|
||||
|
||||
# Python expression which should return a note less than 10 (10 is the highest
|
||||
# note). You have access to the variables errors warning, statement which
|
||||
diff --git a/cgcs-patch/cgcs-patch/pylint_make_patch.rc b/cgcs-patch/cgcs-patch/pylint_make_patch.rc
|
||||
new file mode 100644
|
||||
index 0000000..ef4e838
|
||||
--- /dev/null
|
||||
+++ b/cgcs-patch/cgcs-patch/pylint_make_patch.rc
|
||||
@@ -0,0 +1,352 @@
|
||||
+[MASTER]
|
||||
+
|
||||
+# Specify a configuration file.
|
||||
+#rcfile=
|
||||
+
|
||||
+# Python code to execute, usually for sys.path manipulation such as
|
||||
+# pygtk.require().
|
||||
+#init-hook=
|
||||
+
|
||||
+# Profiled execution.
|
||||
+profile=no
|
||||
+
|
||||
+# Add files or directories to the blacklist. They should be base names, not
|
||||
+# paths.
|
||||
+ignore=CVS
|
||||
+
|
||||
+# Pickle collected data for later comparisons.
|
||||
+persistent=yes
|
||||
+
|
||||
+# List of plugins (as comma separated values of python modules names) to load,
|
||||
+# usually to register additional checkers.
|
||||
+load-plugins=
|
||||
+
|
||||
+# DEPRECATED
|
||||
+include-ids=no
|
||||
+
|
||||
+# DEPRECATED
|
||||
+symbols=no
|
||||
+
|
||||
+
|
||||
+[MESSAGES CONTROL]
|
||||
+
|
||||
+# Enable the message, report, category or checker with the given id(s). You can
|
||||
+# either give multiple identifier separated by comma (,) or put this option
|
||||
+# multiple time. See also the "--disable" option for examples.
|
||||
+#enable=
|
||||
+
|
||||
+# Disable the message, report, category or checker with the given id(s). You
|
||||
+# can either give multiple identifiers separated by comma (,) or put this
|
||||
+# option multiple times (only on the command line, not in the configuration
|
||||
+# file where it should appear only once).You can also use "--disable=all" to
|
||||
+# disable everything first and then reenable specific checks. For example, if
|
||||
+# you want to run only the similarities checker, you can use "--disable=all
|
||||
+# --enable=similarities". If you want to run only the classes checker, but have
|
||||
+# no Warning level messages displayed, use"--disable=all --enable=classes
|
||||
+# --disable=W"
|
||||
+# The following are suppressed due to pylint warnings in cgcs_make_patch
|
||||
+# fixme Use of fixme, todo, etc..
|
||||
+# E1101 no-member
|
||||
+# W0101 unreachable
|
||||
+# W0104 pointless-statement
|
||||
+# W0107 unnecessary-pass
|
||||
+# W0212 protected-access
|
||||
+# W0231 super-init-not-called
|
||||
+# W0603 global-statement
|
||||
+# W0612 unused-variable
|
||||
+# W0613 unused-argument
|
||||
+# W0622 redefined-builtin
|
||||
+# W0703 broad-except
|
||||
+# W1401 anomalous-backslash-in-string
|
||||
+# W1505, deprecated-method
|
||||
+disable=C, R, fixme, E1101,
|
||||
+ W0101, W0104, W0107, W0212, W0231, W0603, W0612, W0613, W0622, W0703,
|
||||
+ W1401, W1505
|
||||
+
|
||||
+[REPORTS]
|
||||
+
|
||||
+# Set the output format. Available formats are text, parseable, colorized, msvs
|
||||
+# (visual studio) and html. You can also give a reporter class, eg
|
||||
+# mypackage.mymodule.MyReporterClass.
|
||||
+output-format=text
|
||||
+
|
||||
+# Put messages in a separate file for each module / package specified on the
|
||||
+# command line instead of printing them on stdout. Reports (if any) will be
|
||||
+# written in a file name "pylint_global.[txt|html]".
|
||||
+files-output=no
|
||||
+
|
||||
+# Tells whether to display a full report or only the messages
|
||||
+reports=no
|
||||
+
|
||||
+# Python expression which should return a note less than 10 (10 is the highest
|
||||
+# note). You have access to the variables errors warning, statement which
|
||||
+# respectively contain the number of errors / warnings messages and the total
|
||||
+# number of statements analyzed. This is used by the global evaluation report
|
||||
+# (RP0004).
|
||||
+evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
|
||||
+
|
||||
+# Add a comment according to your evaluation note. This is used by the global
|
||||
+# evaluation report (RP0004).
|
||||
+comment=no
|
||||
+
|
||||
+# Template used to display messages. This is a python new-style format string
|
||||
+# used to format the message information. See doc for all details
|
||||
+#msg-template=
|
||||
+
|
||||
+
|
||||
+[BASIC]
|
||||
+
|
||||
+# Required attributes for module, separated by a comma
|
||||
+required-attributes=
|
||||
+
|
||||
+# List of builtins function names that should not be used, separated by a comma
|
||||
+bad-functions=map,filter,apply,input,file
|
||||
+
|
||||
+# Good variable names which should always be accepted, separated by a comma
|
||||
+good-names=i,j,k,ex,Run,_
|
||||
+
|
||||
+# Bad variable names which should always be refused, separated by a comma
|
||||
+bad-names=foo,bar,baz,toto,tutu,tata
|
||||
+
|
||||
+# Colon-delimited sets of names that determine each other's naming style when
|
||||
+# the name regexes allow several styles.
|
||||
+name-group=
|
||||
+
|
||||
+# Include a hint for the correct naming format with invalid-name
|
||||
+include-naming-hint=no
|
||||
+
|
||||
+# Regular expression matching correct function names
|
||||
+function-rgx=[a-z_][a-z0-9_]{2,30}$
|
||||
+
|
||||
+# Naming hint for function names
|
||||
+function-name-hint=[a-z_][a-z0-9_]{2,30}$
|
||||
+
|
||||
+# Regular expression matching correct variable names
|
||||
+variable-rgx=[a-z_][a-z0-9_]{2,30}$
|
||||
+
|
||||
+# Naming hint for variable names
|
||||
+variable-name-hint=[a-z_][a-z0-9_]{2,30}$
|
||||
+
|
||||
+# Regular expression matching correct constant names
|
||||
+const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$
|
||||
+
|
||||
+# Naming hint for constant names
|
||||
+const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$
|
||||
+
|
||||
+# Regular expression matching correct attribute names
|
||||
+attr-rgx=[a-z_][a-z0-9_]{2,30}$
|
||||
+
|
||||
+# Naming hint for attribute names
|
||||
+attr-name-hint=[a-z_][a-z0-9_]{2,30}$
|
||||
+
|
||||
+# Regular expression matching correct argument names
|
||||
+argument-rgx=[a-z_][a-z0-9_]{2,30}$
|
||||
+
|
||||
+# Naming hint for argument names
|
||||
+argument-name-hint=[a-z_][a-z0-9_]{2,30}$
|
||||
+
|
||||
+# Regular expression matching correct class attribute names
|
||||
+class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
|
||||
+
|
||||
+# Naming hint for class attribute names
|
||||
+class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
|
||||
+
|
||||
+# Regular expression matching correct inline iteration names
|
||||
+inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
|
||||
+
|
||||
+# Naming hint for inline iteration names
|
||||
+inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$
|
||||
+
|
||||
+# Regular expression matching correct class names
|
||||
+class-rgx=[A-Z_][a-zA-Z0-9]+$
|
||||
+
|
||||
+# Naming hint for class names
|
||||
+class-name-hint=[A-Z_][a-zA-Z0-9]+$
|
||||
+
|
||||
+# Regular expression matching correct module names
|
||||
+module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
|
||||
+
|
||||
+# Naming hint for module names
|
||||
+module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
|
||||
+
|
||||
+# Regular expression matching correct method names
|
||||
+method-rgx=[a-z_][a-z0-9_]{2,30}$
|
||||
+
|
||||
+# Naming hint for method names
|
||||
+method-name-hint=[a-z_][a-z0-9_]{2,30}$
|
||||
+
|
||||
+# Regular expression which should only match function or class names that do
|
||||
+# not require a docstring.
|
||||
+no-docstring-rgx=__.*__
|
||||
+
|
||||
+# Minimum line length for functions/classes that require docstrings, shorter
|
||||
+# ones are exempt.
|
||||
+docstring-min-length=-1
|
||||
+
|
||||
+
|
||||
+[FORMAT]
|
||||
+
|
||||
+# Maximum number of characters on a single line.
|
||||
+max-line-length=80
|
||||
+
|
||||
+# Regexp for a line that is allowed to be longer than the limit.
|
||||
+ignore-long-lines=^\s*(# )?<?https?://\S+>?$
|
||||
+
|
||||
+# Allow the body of an if to be on the same line as the test if there is no
|
||||
+# else.
|
||||
+single-line-if-stmt=no
|
||||
+
|
||||
+# List of optional constructs for which whitespace checking is disabled
|
||||
+no-space-check=trailing-comma,dict-separator
|
||||
+
|
||||
+# Maximum number of lines in a module
|
||||
+max-module-lines=1000
|
||||
+
|
||||
+# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
|
||||
+# tab).
|
||||
+indent-string=' '
|
||||
+
|
||||
+# Number of spaces of indent required inside a hanging or continued line.
|
||||
+indent-after-paren=4
|
||||
+
|
||||
+
|
||||
+[LOGGING]
|
||||
+
|
||||
+# Logging modules to check that the string format arguments are in logging
|
||||
+# function parameter format
|
||||
+logging-modules=logging
|
||||
+
|
||||
+
|
||||
+[MISCELLANEOUS]
|
||||
+
|
||||
+# List of note tags to take in consideration, separated by a comma.
|
||||
+notes=FIXME,XXX,TODO
|
||||
+
|
||||
+
|
||||
+[SIMILARITIES]
|
||||
+
|
||||
+# Minimum lines number of a similarity.
|
||||
+min-similarity-lines=4
|
||||
+
|
||||
+# Ignore comments when computing similarities.
|
||||
+ignore-comments=yes
|
||||
+
|
||||
+# Ignore docstrings when computing similarities.
|
||||
+ignore-docstrings=yes
|
||||
+
|
||||
+# Ignore imports when computing similarities.
|
||||
+ignore-imports=no
|
||||
+
|
||||
+
|
||||
+[TYPECHECK]
|
||||
+
|
||||
+# Tells whether missing members accessed in mixin class should be ignored. A
|
||||
+# mixin class is detected if its name ends with "mixin" (case insensitive).
|
||||
+ignore-mixin-members=yes
|
||||
+
|
||||
+# List of module names for which member attributes should not be checked
|
||||
+# (useful for modules/projects where namespaces are manipulated during runtime
|
||||
+# and thus existing member attributes cannot be deduced by static analysis
|
||||
+ignored-modules=
|
||||
+
|
||||
+# List of classes names for which member attributes should not be checked
|
||||
+# (useful for classes with attributes dynamically set).
|
||||
+ignored-classes=rpm,PKCS1_PSS
|
||||
+
|
||||
+# When zope mode is activated, add a predefined set of Zope acquired attributes
|
||||
+# to generated-members.
|
||||
+zope=no
|
||||
+
|
||||
+# List of members which are set dynamically and missed by pylint inference
|
||||
+# system, and so shouldn't trigger E0201 when accessed. Python regular
|
||||
+# expressions are accepted.
|
||||
+generated-members=REQUEST,acl_users,aq_parent
|
||||
+
|
||||
+
|
||||
+[VARIABLES]
|
||||
+
|
||||
+# Tells whether we should check for unused import in __init__ files.
|
||||
+init-import=no
|
||||
+
|
||||
+# A regular expression matching the name of dummy variables (i.e. expectedly
|
||||
+# not used).
|
||||
+dummy-variables-rgx=_$|dummy
|
||||
+
|
||||
+# List of additional names supposed to be defined in builtins. Remember that
|
||||
+# you should avoid to define new builtins when possible.
|
||||
+additional-builtins=
|
||||
+
|
||||
+
|
||||
+[CLASSES]
|
||||
+
|
||||
+# List of interface methods to ignore, separated by a comma. This is used for
|
||||
+# instance to not check methods defines in Zope's Interface base class.
|
||||
+ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by
|
||||
+
|
||||
+# List of method names used to declare (i.e. assign) instance attributes.
|
||||
+defining-attr-methods=__init__,__new__,setUp
|
||||
+
|
||||
+# List of valid names for the first argument in a class method.
|
||||
+valid-classmethod-first-arg=cls
|
||||
+
|
||||
+# List of valid names for the first argument in a metaclass class method.
|
||||
+valid-metaclass-classmethod-first-arg=mcs
|
||||
+
|
||||
+
|
||||
+[DESIGN]
|
||||
+
|
||||
+# Maximum number of arguments for function / method
|
||||
+max-args=5
|
||||
+
|
||||
+# Argument names that match this expression will be ignored. Default to name
|
||||
+# with leading underscore
|
||||
+ignored-argument-names=_.*
|
||||
+
|
||||
+# Maximum number of locals for function / method body
|
||||
+max-locals=15
|
||||
+
|
||||
+# Maximum number of return / yield for function / method body
|
||||
+max-returns=6
|
||||
+
|
||||
+# Maximum number of branch for function / method body
|
||||
+max-branches=12
|
||||
+
|
||||
+# Maximum number of statements in function / method body
|
||||
+max-statements=50
|
||||
+
|
||||
+# Maximum number of parents for a class (see R0901).
|
||||
+max-parents=7
|
||||
+
|
||||
+# Maximum number of attributes for a class (see R0902).
|
||||
+max-attributes=7
|
||||
+
|
||||
+# Minimum number of public methods for a class (see R0903).
|
||||
+min-public-methods=2
|
||||
+
|
||||
+# Maximum number of public methods for a class (see R0904).
|
||||
+max-public-methods=20
|
||||
+
|
||||
+
|
||||
+[IMPORTS]
|
||||
+
|
||||
+# Deprecated modules which should not be used, separated by a comma
|
||||
+deprecated-modules=regsub,TERMIOS,Bastion,rexec
|
||||
+
|
||||
+# Create a graph of every (i.e. internal and external) dependencies in the
|
||||
+# given file (report RP0402 must not be disabled)
|
||||
+import-graph=
|
||||
+
|
||||
+# Create a graph of external dependencies in the given file (report RP0402 must
|
||||
+# not be disabled)
|
||||
+ext-import-graph=
|
||||
+
|
||||
+# Create a graph of internal dependencies in the given file (report RP0402 must
|
||||
+# not be disabled)
|
||||
+int-import-graph=
|
||||
+
|
||||
+
|
||||
+[EXCEPTIONS]
|
||||
+
|
||||
+# Exceptions that will emit a warning when being caught. Defaults to
|
||||
+# "Exception"
|
||||
+overgeneral-exceptions=Exception
|
||||
diff --git a/cgcs-patch/cgcs-patch/tox.ini b/cgcs-patch/cgcs-patch/tox.ini
|
||||
index ba9c568..88e5723 100644
|
||||
--- a/cgcs-patch/cgcs-patch/tox.ini
|
||||
+++ b/cgcs-patch/cgcs-patch/tox.ini
|
||||
@@ -76,7 +76,6 @@ exclude = .venv,.git,.tox,dist,doc,*lib/python*,*egg,build,release-tag-*
|
||||
enable-extensions = H106 H203 H904
|
||||
max-line-length = 120
|
||||
|
||||
-
|
||||
[testenv:flake8]
|
||||
basepython = python3
|
||||
usedevelop = False
|
||||
@@ -85,13 +84,12 @@ commands =
|
||||
flake8 {posargs} .
|
||||
|
||||
[testenv:pylint]
|
||||
+basepython = python3
|
||||
deps = {[testenv]deps}
|
||||
pylint
|
||||
-
|
||||
-basepython = python2.7
|
||||
sitepackages = False
|
||||
-
|
||||
commands = pylint cgcs_patch --rcfile=./pylint.rc
|
||||
+ pylint cgcs_make_patch --rcfile=./pylint_make_patch.rc
|
||||
|
||||
[testenv:cover]
|
||||
setenv =
|
@ -1,213 +0,0 @@
|
||||
From d6675196199ddcefccba0d5d745ac4e93aaecd0f Mon Sep 17 00:00:00 2001
|
||||
From: Don Penney <don.penney@windriver.com>
|
||||
Date: Wed, 4 Dec 2019 22:26:52 -0500
|
||||
Subject: [PATCH] Address python3 pylint errors and warnings
|
||||
|
||||
This commit addresses issues detected by the updated python3 pylint:
|
||||
- Added a return code to the report_app_dependencies function to
|
||||
satisfy the E1111 error reported.
|
||||
- Added line-specific pylint disable for unused-argument for cases
|
||||
where the inclusion of such arguments in the function signature was
|
||||
intentional.
|
||||
- Added line-specific pylint disable for the duplicate-except case
|
||||
found, as python3 has merged IOError into OSError, while these are
|
||||
separate exceptions in python2. Once we're running solely on python3,
|
||||
this duplicate exception handling can be dropped.
|
||||
|
||||
Change-Id: I96a521288e71948f06ad0c88a12c8f475ed8bc99
|
||||
Closes-Bug: 1855180
|
||||
Signed-off-by: Don Penney <don.penney@windriver.com>
|
||||
|
||||
---
|
||||
cgcs-patch/cgcs-patch/cgcs_patch/api/controllers/root.py | 4 ++--
|
||||
cgcs-patch/cgcs-patch/cgcs_patch/messages.py | 2 +-
|
||||
cgcs-patch/cgcs-patch/cgcs_patch/patch_agent.py | 6 +++---
|
||||
cgcs-patch/cgcs-patch/cgcs_patch/patch_client.py | 6 +++---
|
||||
cgcs-patch/cgcs-patch/cgcs_patch/patch_controller.py | 8 +++++---
|
||||
cgcs-patch/cgcs-patch/cgcs_patch/patch_functions.py | 2 +-
|
||||
cgcs-patch/cgcs-patch/cgcs_patch/tests/test_patch_controller.py | 2 +-
|
||||
cgcs-patch/cgcs-patch/pylint.rc | 6 +-----
|
||||
8 files changed, 17 insertions(+), 19 deletions(-)
|
||||
|
||||
diff --git a/cgcs-patch/cgcs-patch/cgcs_patch/api/controllers/root.py b/cgcs-patch/cgcs-patch/cgcs_patch/api/controllers/root.py
|
||||
index f1e0262..4c7bd7f 100644
|
||||
--- a/cgcs-patch/cgcs-patch/cgcs_patch/api/controllers/root.py
|
||||
+++ b/cgcs-patch/cgcs-patch/cgcs_patch/api/controllers/root.py
|
||||
@@ -182,7 +182,7 @@ class PatchAPIController(object):
|
||||
|
||||
@expose('json')
|
||||
@expose('query_hosts.xml', content_type='application/xml')
|
||||
- def query_hosts(self, *args):
|
||||
+ def query_hosts(self, *args): # pylint: disable=unused-argument
|
||||
return dict(data=pc.query_host_cache())
|
||||
|
||||
@expose('json')
|
||||
@@ -197,7 +197,7 @@ class PatchAPIController(object):
|
||||
|
||||
@expose('json')
|
||||
@expose('query.xml', content_type='application/xml')
|
||||
- def host_install(self, *args):
|
||||
+ def host_install(self, *args): # pylint: disable=unused-argument
|
||||
return dict(error="Deprecated: Use host_install_async")
|
||||
|
||||
@expose('json')
|
||||
diff --git a/cgcs-patch/cgcs-patch/cgcs_patch/messages.py b/cgcs-patch/cgcs-patch/cgcs_patch/messages.py
|
||||
index a57ea28..6abc29d 100644
|
||||
--- a/cgcs-patch/cgcs-patch/cgcs_patch/messages.py
|
||||
+++ b/cgcs-patch/cgcs-patch/cgcs_patch/messages.py
|
||||
@@ -60,5 +60,5 @@ class PatchMessage(object):
|
||||
return PATCHMSG_STR[self.msgtype]
|
||||
return "invalid-type"
|
||||
|
||||
- def handle(self, sock, addr):
|
||||
+ def handle(self, sock, addr): # pylint: disable=unused-argument
|
||||
LOG.info("Unhandled message type: %s" % self.msgtype)
|
||||
diff --git a/cgcs-patch/cgcs-patch/cgcs_patch/patch_agent.py b/cgcs-patch/cgcs-patch/cgcs_patch/patch_agent.py
|
||||
index 77930d7..547db52 100644
|
||||
--- a/cgcs-patch/cgcs-patch/cgcs_patch/patch_agent.py
|
||||
+++ b/cgcs-patch/cgcs-patch/cgcs_patch/patch_agent.py
|
||||
@@ -150,7 +150,7 @@ class PatchMessageHelloAgent(messages.PatchMessage):
|
||||
resp = PatchMessageHelloAgentAck()
|
||||
resp.send(sock)
|
||||
|
||||
- def send(self, sock):
|
||||
+ def send(self, sock): # pylint: disable=unused-argument
|
||||
LOG.error("Should not get here")
|
||||
|
||||
|
||||
@@ -196,7 +196,7 @@ class PatchMessageQueryDetailed(messages.PatchMessage):
|
||||
resp = PatchMessageQueryDetailedResp()
|
||||
resp.send(sock)
|
||||
|
||||
- def send(self, sock):
|
||||
+ def send(self, sock): # pylint: disable=unused-argument
|
||||
LOG.error("Should not get here")
|
||||
|
||||
|
||||
@@ -258,7 +258,7 @@ class PatchMessageAgentInstallReq(messages.PatchMessage):
|
||||
resp.status = pa.handle_install()
|
||||
resp.send(sock, addr)
|
||||
|
||||
- def send(self, sock):
|
||||
+ def send(self, sock): # pylint: disable=unused-argument
|
||||
LOG.error("Should not get here")
|
||||
|
||||
|
||||
diff --git a/cgcs-patch/cgcs-patch/cgcs_patch/patch_client.py b/cgcs-patch/cgcs-patch/cgcs_patch/patch_client.py
|
||||
index 705590c..af189fc 100644
|
||||
--- a/cgcs-patch/cgcs-patch/cgcs_patch/patch_client.py
|
||||
+++ b/cgcs-patch/cgcs-patch/cgcs_patch/patch_client.py
|
||||
@@ -960,7 +960,7 @@ def wait_for_install_complete(agent_ip):
|
||||
return rc
|
||||
|
||||
|
||||
-def host_install(debug, args):
|
||||
+def host_install(debug, args): # pylint: disable=unused-argument
|
||||
force = False
|
||||
rc = 0
|
||||
|
||||
@@ -1072,7 +1072,7 @@ def patch_upload_dir_req(debug, args):
|
||||
return check_rc(req)
|
||||
|
||||
|
||||
-def patch_install_local(debug, args):
|
||||
+def patch_install_local(debug, args): # pylint: disable=unused-argument
|
||||
""" This function is used to trigger patch installation prior to configuration """
|
||||
# Check to see if initial configuration has completed
|
||||
if os.path.isfile(INITIAL_CONTROLLER_CONFIG_COMPLETE):
|
||||
@@ -1214,7 +1214,7 @@ def patch_is_available_req(args):
|
||||
return rc
|
||||
|
||||
|
||||
-def patch_report_app_dependencies_req(debug, args):
|
||||
+def patch_report_app_dependencies_req(debug, args): # pylint: disable=unused-argument
|
||||
if len(args) < 2:
|
||||
print_help()
|
||||
|
||||
diff --git a/cgcs-patch/cgcs-patch/cgcs_patch/patch_controller.py b/cgcs-patch/cgcs-patch/cgcs_patch/patch_controller.py
|
||||
index 4b94a5f..79a6401 100644
|
||||
--- a/cgcs-patch/cgcs-patch/cgcs_patch/patch_controller.py
|
||||
+++ b/cgcs-patch/cgcs-patch/cgcs_patch/patch_controller.py
|
||||
@@ -392,7 +392,7 @@ class PatchMessageHelloAgentAck(messages.PatchMessage):
|
||||
self.agent_state)
|
||||
pc.hosts_lock.release()
|
||||
|
||||
- def send(self, sock):
|
||||
+ def send(self, sock): # pylint: disable=unused-argument
|
||||
LOG.error("Should not get here")
|
||||
|
||||
|
||||
@@ -469,7 +469,7 @@ class PatchMessageQueryDetailedResp(messages.PatchMessage):
|
||||
else:
|
||||
pc.hosts_lock.release()
|
||||
|
||||
- def send(self, sock):
|
||||
+ def send(self, sock): # pylint: disable=unused-argument
|
||||
LOG.error("Should not get here")
|
||||
|
||||
|
||||
@@ -525,7 +525,7 @@ class PatchMessageAgentInstallResp(messages.PatchMessage):
|
||||
pc.hosts[addr[0]].install_reject_reason = self.reject_reason
|
||||
pc.hosts_lock.release()
|
||||
|
||||
- def send(self, sock):
|
||||
+ def send(self, sock): # pylint: disable=unused-argument
|
||||
LOG.error("Should not get here")
|
||||
|
||||
|
||||
@@ -2298,6 +2298,8 @@ class PatchController(PatchService):
|
||||
finally:
|
||||
self.patch_data_lock.release()
|
||||
|
||||
+ return True
|
||||
+
|
||||
def query_app_dependencies(self):
|
||||
"""
|
||||
Query application dependencies
|
||||
diff --git a/cgcs-patch/cgcs-patch/cgcs_patch/patch_functions.py b/cgcs-patch/cgcs-patch/cgcs_patch/patch_functions.py
|
||||
index 281a286..e9017f2 100644
|
||||
--- a/cgcs-patch/cgcs-patch/cgcs_patch/patch_functions.py
|
||||
+++ b/cgcs-patch/cgcs-patch/cgcs_patch/patch_functions.py
|
||||
@@ -1253,7 +1253,7 @@ class PatchFile(object):
|
||||
msg = "Failed during patch extraction"
|
||||
LOG.exception(msg)
|
||||
raise PatchFail(msg)
|
||||
- except IOError:
|
||||
+ except IOError: # pylint: disable=duplicate-except
|
||||
msg = "Failed during patch extraction"
|
||||
LOG.exception(msg)
|
||||
raise PatchFail(msg)
|
||||
diff --git a/cgcs-patch/cgcs-patch/cgcs_patch/tests/test_patch_controller.py b/cgcs-patch/cgcs-patch/cgcs_patch/tests/test_patch_controller.py
|
||||
index e2b02c0..1db4b68 100644
|
||||
--- a/cgcs-patch/cgcs-patch/cgcs_patch/tests/test_patch_controller.py
|
||||
+++ b/cgcs-patch/cgcs-patch/cgcs_patch/tests/test_patch_controller.py
|
||||
@@ -17,6 +17,6 @@ import cgcs_patch.patch_controller # noqa: E402
|
||||
class CgcsPatchControllerTestCase(testtools.TestCase):
|
||||
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
- def test_cgcs_patch_controller_instantiate(self, mock_open):
|
||||
+ def test_cgcs_patch_controller_instantiate(self, mock_open): # pylint: disable=unused-argument
|
||||
# pylint: disable=unused-variable
|
||||
pc = cgcs_patch.patch_controller.PatchController() # noqa: F841
|
||||
diff --git a/cgcs-patch/cgcs-patch/pylint.rc b/cgcs-patch/cgcs-patch/pylint.rc
|
||||
index 812b6b5..a2d888b 100644
|
||||
--- a/cgcs-patch/cgcs-patch/pylint.rc
|
||||
+++ b/cgcs-patch/cgcs-patch/pylint.rc
|
||||
@@ -44,16 +44,12 @@ symbols=no
|
||||
# --enable=similarities". If you want to run only the classes checker, but have
|
||||
# no Warning level messages displayed, use"--disable=all --enable=classes
|
||||
# --disable=W"
|
||||
-# E1111 assignment-from-no-return
|
||||
# W0107 unnecessary-pass
|
||||
# W0603 global-statement
|
||||
-# W0612 unused-variable
|
||||
-# W0613 unused-argument
|
||||
# W0703 broad-except
|
||||
-# W0705 duplicate-except
|
||||
# W1201 logging-not-lazy
|
||||
# W1505, deprecated-method
|
||||
-disable=C, R, E1111, W0107, W0603, W0612, W0613, W0703, W0705, W1201, W1505
|
||||
+disable=C, R, W0107, W0603, W0703, W1201, W1505
|
||||
|
||||
|
||||
[REPORTS]
|
@ -1,673 +0,0 @@
|
||||
From b206b6574a75dfc3793886529064e3d938759be8 Mon Sep 17 00:00:00 2001
|
||||
From: Don Penney <don.penney@windriver.com>
|
||||
Date: Mon, 23 Dec 2019 14:36:08 -0500
|
||||
Subject: [PATCH] Clean up pylint W1201 logging-not-lazy in cgcs-patch
|
||||
|
||||
Change-Id: Ib461890ddf7635645d42660dc07a153e2449b09e
|
||||
Story: 2007050
|
||||
Task: 37874
|
||||
Signed-off-by: Don Penney <don.penney@windriver.com>
|
||||
|
||||
---
|
||||
.../cgcs-patch/cgcs_patch/api/controllers/root.py | 2 +-
|
||||
cgcs-patch/cgcs-patch/cgcs_patch/base.py | 4 +-
|
||||
cgcs-patch/cgcs-patch/cgcs_patch/messages.py | 2 +-
|
||||
cgcs-patch/cgcs-patch/cgcs_patch/patch_agent.py | 76 +++++++++----------
|
||||
.../cgcs-patch/cgcs_patch/patch_controller.py | 86 +++++++++++-----------
|
||||
cgcs-patch/cgcs-patch/pylint.rc | 3 +-
|
||||
6 files changed, 86 insertions(+), 87 deletions(-)
|
||||
|
||||
diff --git a/cgcs-patch/cgcs-patch/cgcs_patch/api/controllers/root.py b/cgcs-patch/cgcs-patch/cgcs_patch/api/controllers/root.py
|
||||
index 4c7bd7f..883b58d 100644
|
||||
--- a/cgcs-patch/cgcs-patch/cgcs_patch/api/controllers/root.py
|
||||
+++ b/cgcs-patch/cgcs-patch/cgcs_patch/api/controllers/root.py
|
||||
@@ -135,7 +135,7 @@ class PatchAPIController(object):
|
||||
def upload_dir(self, **kwargs):
|
||||
files = []
|
||||
for path in kwargs.values():
|
||||
- LOG.info("upload-dir: Retrieving patches from %s" % path)
|
||||
+ LOG.info("upload-dir: Retrieving patches from %s", path)
|
||||
for f in glob.glob(path + '/*.patch'):
|
||||
if os.path.isfile(f):
|
||||
files.append(f)
|
||||
diff --git a/cgcs-patch/cgcs-patch/cgcs_patch/base.py b/cgcs-patch/cgcs-patch/cgcs_patch/base.py
|
||||
index 8e47905..e12e26c 100644
|
||||
--- a/cgcs-patch/cgcs-patch/cgcs_patch/base.py
|
||||
+++ b/cgcs-patch/cgcs-patch/cgcs_patch/base.py
|
||||
@@ -160,11 +160,11 @@ class PatchService(object):
|
||||
if result == self.mcast_addr:
|
||||
return
|
||||
except subprocess.CalledProcessError as e:
|
||||
- LOG.error("Command output: %s" % e.output)
|
||||
+ LOG.error("Command output: %s", e.output)
|
||||
return
|
||||
|
||||
# Close the socket and set it up again
|
||||
- LOG.info("Detected missing multicast addr (%s). Reconfiguring" % self.mcast_addr)
|
||||
+ LOG.info("Detected missing multicast addr (%s). Reconfiguring", self.mcast_addr)
|
||||
while self.setup_socket() is None:
|
||||
LOG.info("Unable to setup sockets. Waiting to retry")
|
||||
time.sleep(5)
|
||||
diff --git a/cgcs-patch/cgcs-patch/cgcs_patch/messages.py b/cgcs-patch/cgcs-patch/cgcs_patch/messages.py
|
||||
index 6abc29d..86ff99f 100644
|
||||
--- a/cgcs-patch/cgcs-patch/cgcs_patch/messages.py
|
||||
+++ b/cgcs-patch/cgcs-patch/cgcs_patch/messages.py
|
||||
@@ -61,4 +61,4 @@ class PatchMessage(object):
|
||||
return "invalid-type"
|
||||
|
||||
def handle(self, sock, addr): # pylint: disable=unused-argument
|
||||
- LOG.info("Unhandled message type: %s" % self.msgtype)
|
||||
+ LOG.info("Unhandled message type: %s", self.msgtype)
|
||||
diff --git a/cgcs-patch/cgcs-patch/cgcs_patch/patch_agent.py b/cgcs-patch/cgcs-patch/cgcs_patch/patch_agent.py
|
||||
index 547db52..3abd891 100644
|
||||
--- a/cgcs-patch/cgcs-patch/cgcs_patch/patch_agent.py
|
||||
+++ b/cgcs-patch/cgcs-patch/cgcs_patch/patch_agent.py
|
||||
@@ -70,7 +70,7 @@ def setflag(fname):
|
||||
with open(fname, "w") as f:
|
||||
f.write("%d\n" % os.getpid())
|
||||
except Exception:
|
||||
- LOG.exception("Failed to update %s flag" % fname)
|
||||
+ LOG.exception("Failed to update %s flag", fname)
|
||||
|
||||
|
||||
def clearflag(fname):
|
||||
@@ -78,7 +78,7 @@ def clearflag(fname):
|
||||
try:
|
||||
os.remove(fname)
|
||||
except Exception:
|
||||
- LOG.exception("Failed to clear %s flag" % fname)
|
||||
+ LOG.exception("Failed to clear %s flag", fname)
|
||||
|
||||
|
||||
def check_install_uuid():
|
||||
@@ -101,7 +101,7 @@ def check_install_uuid():
|
||||
controller_install_uuid = str(req.text).rstrip()
|
||||
|
||||
if install_uuid != controller_install_uuid:
|
||||
- LOG.error("Local install_uuid=%s doesn't match controller=%s" % (install_uuid, controller_install_uuid))
|
||||
+ LOG.error("Local install_uuid=%s doesn't match controller=%s", install_uuid, controller_install_uuid)
|
||||
return False
|
||||
|
||||
return True
|
||||
@@ -239,7 +239,7 @@ class PatchMessageAgentInstallReq(messages.PatchMessage):
|
||||
messages.PatchMessage.encode(self)
|
||||
|
||||
def handle(self, sock, addr):
|
||||
- LOG.info("Handling host install request, force=%s" % self.force)
|
||||
+ LOG.info("Handling host install request, force=%s", self.force)
|
||||
global pa
|
||||
resp = PatchMessageAgentInstallResp()
|
||||
|
||||
@@ -354,7 +354,7 @@ class PatchAgent(PatchService):
|
||||
config = yaml.load(output)
|
||||
except subprocess.CalledProcessError as e:
|
||||
LOG.exception("Failed to query channels")
|
||||
- LOG.error("Command output: %s" % e.output)
|
||||
+ LOG.error("Command output: %s", e.output)
|
||||
return False
|
||||
except Exception:
|
||||
LOG.exception("Failed to query channels")
|
||||
@@ -390,23 +390,23 @@ class PatchAgent(PatchService):
|
||||
config[channel].get('baseurl') != ch_baseurl):
|
||||
# Config is invalid
|
||||
add_channel = True
|
||||
- LOG.warning("Invalid smart config found for %s" % channel)
|
||||
+ LOG.warning("Invalid smart config found for %s", channel)
|
||||
try:
|
||||
output = subprocess.check_output(smart_cmd +
|
||||
["channel", "--yes",
|
||||
"--remove", channel],
|
||||
stderr=subprocess.STDOUT)
|
||||
except subprocess.CalledProcessError as e:
|
||||
- LOG.exception("Failed to configure %s channel" % channel)
|
||||
- LOG.error("Command output: %s" % e.output)
|
||||
+ LOG.exception("Failed to configure %s channel", channel)
|
||||
+ LOG.error("Command output: %s", e.output)
|
||||
return False
|
||||
else:
|
||||
# Channel is missing
|
||||
add_channel = True
|
||||
- LOG.warning("Channel %s is missing from config" % channel)
|
||||
+ LOG.warning("Channel %s is missing from config", channel)
|
||||
|
||||
if add_channel:
|
||||
- LOG.info("Adding channel %s" % channel)
|
||||
+ LOG.info("Adding channel %s", channel)
|
||||
cmd_args = ["channel", "--yes", "--add", channel,
|
||||
"type=%s" % ch_type,
|
||||
"name=%s" % ch_name]
|
||||
@@ -417,8 +417,8 @@ class PatchAgent(PatchService):
|
||||
output = subprocess.check_output(smart_cmd + cmd_args,
|
||||
stderr=subprocess.STDOUT)
|
||||
except subprocess.CalledProcessError as e:
|
||||
- LOG.exception("Failed to configure %s channel" % channel)
|
||||
- LOG.error("Command output: %s" % e.output)
|
||||
+ LOG.exception("Failed to configure %s channel", channel)
|
||||
+ LOG.error("Command output: %s", e.output)
|
||||
return False
|
||||
|
||||
updated = True
|
||||
@@ -431,7 +431,7 @@ class PatchAgent(PatchService):
|
||||
config = yaml.load(output)
|
||||
except subprocess.CalledProcessError as e:
|
||||
LOG.exception("Failed to query smart config")
|
||||
- LOG.error("Command output: %s" % e.output)
|
||||
+ LOG.error("Command output: %s", e.output)
|
||||
return False
|
||||
except Exception:
|
||||
LOG.exception("Failed to query smart config")
|
||||
@@ -441,15 +441,15 @@ class PatchAgent(PatchService):
|
||||
nolinktos = 'rpm-nolinktos'
|
||||
if config.get(nolinktos) is not True:
|
||||
# Set the flag
|
||||
- LOG.warning("Setting %s option" % nolinktos)
|
||||
+ LOG.warning("Setting %s option", nolinktos)
|
||||
try:
|
||||
output = subprocess.check_output(smart_cmd +
|
||||
["config", "--set",
|
||||
"%s=true" % nolinktos],
|
||||
stderr=subprocess.STDOUT)
|
||||
except subprocess.CalledProcessError as e:
|
||||
- LOG.exception("Failed to configure %s option" % nolinktos)
|
||||
- LOG.error("Command output: %s" % e.output)
|
||||
+ LOG.exception("Failed to configure %s option", nolinktos)
|
||||
+ LOG.error("Command output: %s", e.output)
|
||||
return False
|
||||
|
||||
updated = True
|
||||
@@ -458,15 +458,15 @@ class PatchAgent(PatchService):
|
||||
nosignature = 'rpm-check-signatures'
|
||||
if config.get(nosignature) is not False:
|
||||
# Set the flag
|
||||
- LOG.warning("Setting %s option" % nosignature)
|
||||
+ LOG.warning("Setting %s option", nosignature)
|
||||
try:
|
||||
output = subprocess.check_output(smart_cmd +
|
||||
["config", "--set",
|
||||
"%s=false" % nosignature],
|
||||
stderr=subprocess.STDOUT)
|
||||
except subprocess.CalledProcessError as e:
|
||||
- LOG.exception("Failed to configure %s option" % nosignature)
|
||||
- LOG.error("Command output: %s" % e.output)
|
||||
+ LOG.exception("Failed to configure %s option", nosignature)
|
||||
+ LOG.error("Command output: %s", e.output)
|
||||
return False
|
||||
|
||||
updated = True
|
||||
@@ -476,7 +476,7 @@ class PatchAgent(PatchService):
|
||||
subprocess.check_output(smart_update, stderr=subprocess.STDOUT)
|
||||
except subprocess.CalledProcessError as e:
|
||||
LOG.exception("Failed to update smartpm")
|
||||
- LOG.error("Command output: %s" % e.output)
|
||||
+ LOG.error("Command output: %s", e.output)
|
||||
return False
|
||||
|
||||
# Reset the patch op counter to force a detailed query
|
||||
@@ -584,7 +584,7 @@ class PatchAgent(PatchService):
|
||||
self.installed[pkgname] = version.split('@')[0]
|
||||
break
|
||||
except subprocess.CalledProcessError:
|
||||
- LOG.error("Failed to query installed version of %s" % pkgname)
|
||||
+ LOG.error("Failed to query installed version of %s", pkgname)
|
||||
|
||||
self.changes = True
|
||||
|
||||
@@ -641,7 +641,7 @@ class PatchAgent(PatchService):
|
||||
subprocess.check_output(smart_update, stderr=subprocess.STDOUT)
|
||||
except subprocess.CalledProcessError as e:
|
||||
LOG.error("Failed to update smartpm")
|
||||
- LOG.error("Command output: %s" % e.output)
|
||||
+ LOG.error("Command output: %s", e.output)
|
||||
# Set a state to "unknown"?
|
||||
return False
|
||||
|
||||
@@ -663,7 +663,7 @@ class PatchAgent(PatchService):
|
||||
output = subprocess.check_output(smart_query_installed)
|
||||
pkgs_installed = self.parse_smart_pkglist(output)
|
||||
except subprocess.CalledProcessError as e:
|
||||
- LOG.error("Failed to query installed pkgs: %s" % e.output)
|
||||
+ LOG.error("Failed to query installed pkgs: %s", e.output)
|
||||
# Set a state to "unknown"?
|
||||
return False
|
||||
|
||||
@@ -671,7 +671,7 @@ class PatchAgent(PatchService):
|
||||
output = subprocess.check_output(smart_query_base)
|
||||
pkgs_base = self.parse_smart_pkglist(output)
|
||||
except subprocess.CalledProcessError as e:
|
||||
- LOG.error("Failed to query base pkgs: %s" % e.output)
|
||||
+ LOG.error("Failed to query base pkgs: %s", e.output)
|
||||
# Set a state to "unknown"?
|
||||
return False
|
||||
|
||||
@@ -679,7 +679,7 @@ class PatchAgent(PatchService):
|
||||
output = subprocess.check_output(smart_query_updates)
|
||||
pkgs_updates = self.parse_smart_pkglist(output)
|
||||
except subprocess.CalledProcessError as e:
|
||||
- LOG.error("Failed to query patched pkgs: %s" % e.output)
|
||||
+ LOG.error("Failed to query patched pkgs: %s", e.output)
|
||||
# Set a state to "unknown"?
|
||||
return False
|
||||
|
||||
@@ -722,11 +722,11 @@ class PatchAgent(PatchService):
|
||||
# Look for new packages
|
||||
self.check_groups()
|
||||
|
||||
- LOG.info("Patch state query returns %s" % self.changes)
|
||||
- LOG.info("Installed: %s" % self.installed)
|
||||
- LOG.info("To install: %s" % self.to_install)
|
||||
- LOG.info("To remove: %s" % self.to_remove)
|
||||
- LOG.info("Missing: %s" % self.missing_pkgs)
|
||||
+ LOG.info("Patch state query returns %s", self.changes)
|
||||
+ LOG.info("Installed: %s", self.installed)
|
||||
+ LOG.info("To install: %s", self.to_install)
|
||||
+ LOG.info("To remove: %s", self.to_remove)
|
||||
+ LOG.info("Missing: %s", self.missing_pkgs)
|
||||
|
||||
return True
|
||||
|
||||
@@ -794,16 +794,16 @@ class PatchAgent(PatchService):
|
||||
try:
|
||||
if verbose_to_stdout:
|
||||
print("Installing software updates...")
|
||||
- LOG.info("Installing: %s" % ", ".join(install_set))
|
||||
+ LOG.info("Installing: %s", ", ".join(install_set))
|
||||
output = subprocess.check_output(smart_install_cmd + install_set, stderr=subprocess.STDOUT)
|
||||
changed = True
|
||||
for line in output.split('\n'):
|
||||
- LOG.info("INSTALL: %s" % line)
|
||||
+ LOG.info("INSTALL: %s", line)
|
||||
if verbose_to_stdout:
|
||||
print("Software updated.")
|
||||
except subprocess.CalledProcessError as e:
|
||||
LOG.exception("Failed to install RPMs")
|
||||
- LOG.error("Command output: %s" % e.output)
|
||||
+ LOG.error("Command output: %s", e.output)
|
||||
rc = False
|
||||
if verbose_to_stdout:
|
||||
print("WARNING: Software update failed.")
|
||||
@@ -820,16 +820,16 @@ class PatchAgent(PatchService):
|
||||
try:
|
||||
if verbose_to_stdout:
|
||||
print("Handling patch removal...")
|
||||
- LOG.info("Removing: %s" % ", ".join(remove_set))
|
||||
+ LOG.info("Removing: %s", ", ".join(remove_set))
|
||||
output = subprocess.check_output(smart_remove_cmd + remove_set, stderr=subprocess.STDOUT)
|
||||
changed = True
|
||||
for line in output.split('\n'):
|
||||
- LOG.info("REMOVE: %s" % line)
|
||||
+ LOG.info("REMOVE: %s", line)
|
||||
if verbose_to_stdout:
|
||||
print("Patch removal complete.")
|
||||
except subprocess.CalledProcessError as e:
|
||||
LOG.exception("Failed to remove RPMs")
|
||||
- LOG.error("Command output: %s" % e.output)
|
||||
+ LOG.error("Command output: %s", e.output)
|
||||
rc = False
|
||||
if verbose_to_stdout:
|
||||
print("WARNING: Patch removal failed.")
|
||||
@@ -862,7 +862,7 @@ class PatchAgent(PatchService):
|
||||
self.node_is_patched = False
|
||||
except subprocess.CalledProcessError as e:
|
||||
LOG.exception("In-Service patch scripts failed")
|
||||
- LOG.error("Command output: %s" % e.output)
|
||||
+ LOG.error("Command output: %s", e.output)
|
||||
# Fail the patching operation
|
||||
rc = False
|
||||
|
||||
@@ -1071,7 +1071,7 @@ def main():
|
||||
# In certain cases, the lighttpd server could still be running using
|
||||
# its default port 80, as opposed to the port configured in platform.conf
|
||||
global http_port_real
|
||||
- LOG.info("Failed install_uuid check via http_port=%s. Trying with default port 80" % http_port_real)
|
||||
+ LOG.info("Failed install_uuid check via http_port=%s. Trying with default port 80", http_port_real)
|
||||
http_port_real = 80
|
||||
|
||||
pa.handle_install(verbose_to_stdout=True, disallow_insvc_patch=True)
|
||||
diff --git a/cgcs-patch/cgcs-patch/cgcs_patch/patch_controller.py b/cgcs-patch/cgcs-patch/cgcs_patch/patch_controller.py
|
||||
index 79a6401..f2b24c8 100644
|
||||
--- a/cgcs-patch/cgcs-patch/cgcs_patch/patch_controller.py
|
||||
+++ b/cgcs-patch/cgcs-patch/cgcs_patch/patch_controller.py
|
||||
@@ -137,11 +137,11 @@ class AgentNeighbour(object):
|
||||
if out_of_date != self.out_of_date or requires_reboot != self.requires_reboot:
|
||||
self.out_of_date = out_of_date
|
||||
self.requires_reboot = requires_reboot
|
||||
- LOG.info("Agent %s (%s) reporting out_of_date=%s, requires_reboot=%s" % (
|
||||
- self.hostname,
|
||||
- self.ip,
|
||||
- self.out_of_date,
|
||||
- self.requires_reboot))
|
||||
+ LOG.info("Agent %s (%s) reporting out_of_date=%s, requires_reboot=%s",
|
||||
+ self.hostname,
|
||||
+ self.ip,
|
||||
+ self.out_of_date,
|
||||
+ self.requires_reboot)
|
||||
|
||||
if self.last_query_id != query_id:
|
||||
self.last_query_id = query_id
|
||||
@@ -488,7 +488,7 @@ class PatchMessageAgentInstallReq(messages.PatchMessage):
|
||||
LOG.error("Should not get here")
|
||||
|
||||
def send(self, sock):
|
||||
- LOG.info("sending install request to node: %s" % self.ip)
|
||||
+ LOG.info("sending install request to node: %s", self.ip)
|
||||
self.encode()
|
||||
message = json.dumps(self.message)
|
||||
sock.sendto(message, (self.ip, cfg.agent_port))
|
||||
@@ -512,7 +512,7 @@ class PatchMessageAgentInstallResp(messages.PatchMessage):
|
||||
messages.PatchMessage.encode(self)
|
||||
|
||||
def handle(self, sock, addr):
|
||||
- LOG.info("Handling install resp from %s" % addr[0])
|
||||
+ LOG.info("Handling install resp from %s", addr[0])
|
||||
global pc
|
||||
# LOG.info("Handling hello ack")
|
||||
|
||||
@@ -551,7 +551,7 @@ class PatchMessageDropHostReq(messages.PatchMessage):
|
||||
return
|
||||
|
||||
if self.ip is None:
|
||||
- LOG.error("Received PATCHMSG_DROP_HOST_REQ with no ip: %s" % json.dumps(self.data))
|
||||
+ LOG.error("Received PATCHMSG_DROP_HOST_REQ with no ip: %s", json.dumps(self.data))
|
||||
return
|
||||
|
||||
pc.drop_host(self.ip, sync_nbr=False)
|
||||
@@ -602,7 +602,7 @@ class PatchController(PatchService):
|
||||
with open(app_dependency_filename, 'r') as f:
|
||||
self.app_dependencies = json.loads(f.read())
|
||||
except Exception:
|
||||
- LOG.exception("Failed to read app dependencies: %s" % app_dependency_filename)
|
||||
+ LOG.exception("Failed to read app dependencies: %s", app_dependency_filename)
|
||||
else:
|
||||
self.app_dependencies = {}
|
||||
|
||||
@@ -658,7 +658,7 @@ class PatchController(PatchService):
|
||||
counter = config.getint('runtime', 'patch_op_counter')
|
||||
self.patch_op_counter = counter
|
||||
|
||||
- LOG.info("patch_op_counter is: %d" % self.patch_op_counter)
|
||||
+ LOG.info("patch_op_counter is: %d", self.patch_op_counter)
|
||||
except configparser.Error:
|
||||
LOG.exception("Failed to read state info")
|
||||
|
||||
@@ -679,9 +679,9 @@ class PatchController(PatchService):
|
||||
"rsync://%s/patching/" % host_url,
|
||||
"%s/" % patch_dir],
|
||||
stderr=subprocess.STDOUT)
|
||||
- LOG.info("Synced to mate patching via rsync: %s" % output)
|
||||
+ LOG.info("Synced to mate patching via rsync: %s", output)
|
||||
except subprocess.CalledProcessError as e:
|
||||
- LOG.error("Failed to rsync: %s" % e.output)
|
||||
+ LOG.error("Failed to rsync: %s", e.output)
|
||||
return False
|
||||
|
||||
try:
|
||||
@@ -691,9 +691,9 @@ class PatchController(PatchService):
|
||||
"rsync://%s/repo/" % host_url,
|
||||
"%s/" % repo_root_dir],
|
||||
stderr=subprocess.STDOUT)
|
||||
- LOG.info("Synced to mate repo via rsync: %s" % output)
|
||||
+ LOG.info("Synced to mate repo via rsync: %s", output)
|
||||
except subprocess.CalledProcessError:
|
||||
- LOG.error("Failed to rsync: %s" % output)
|
||||
+ LOG.error("Failed to rsync: %s", output)
|
||||
return False
|
||||
|
||||
self.read_state_file()
|
||||
@@ -710,7 +710,7 @@ class PatchController(PatchService):
|
||||
with open(app_dependency_filename, 'r') as f:
|
||||
self.app_dependencies = json.loads(f.read())
|
||||
except Exception:
|
||||
- LOG.exception("Failed to read app dependencies: %s" % app_dependency_filename)
|
||||
+ LOG.exception("Failed to read app dependencies: %s", app_dependency_filename)
|
||||
else:
|
||||
self.app_dependencies = {}
|
||||
|
||||
@@ -757,7 +757,7 @@ class PatchController(PatchService):
|
||||
continue
|
||||
|
||||
if patch_id not in self.patch_data.metadata:
|
||||
- LOG.error("Patch data missing for %s" % patch_id)
|
||||
+ LOG.error("Patch data missing for %s", patch_id)
|
||||
continue
|
||||
|
||||
# If the patch is on a different release than the host, skip it.
|
||||
@@ -811,7 +811,7 @@ class PatchController(PatchService):
|
||||
continue
|
||||
|
||||
if patch_id not in self.patch_data.metadata:
|
||||
- LOG.error("Patch data missing for %s" % patch_id)
|
||||
+ LOG.error("Patch data missing for %s", patch_id)
|
||||
continue
|
||||
|
||||
if personality not in self.patch_data.metadata[patch_id]:
|
||||
@@ -835,7 +835,7 @@ class PatchController(PatchService):
|
||||
continue
|
||||
|
||||
if patch_id not in self.patch_data.metadata:
|
||||
- LOG.error("Patch data missing for %s" % patch_id)
|
||||
+ LOG.error("Patch data missing for %s", patch_id)
|
||||
continue
|
||||
|
||||
if personality not in self.patch_data.metadata[patch_id]:
|
||||
@@ -902,10 +902,10 @@ class PatchController(PatchService):
|
||||
|
||||
if os.path.exists(semchk):
|
||||
try:
|
||||
- LOG.info("Running semantic check: %s" % semchk)
|
||||
+ LOG.info("Running semantic check: %s", semchk)
|
||||
subprocess.check_output([semchk] + patch_state_args,
|
||||
stderr=subprocess.STDOUT)
|
||||
- LOG.info("Semantic check %s passed" % semchk)
|
||||
+ LOG.info("Semantic check %s passed", semchk)
|
||||
except subprocess.CalledProcessError as e:
|
||||
msg = "Semantic check failed for %s:\n%s" % (patch_id, e.output)
|
||||
LOG.exception(msg)
|
||||
@@ -1158,7 +1158,7 @@ class PatchController(PatchService):
|
||||
# Copy the RPMs. If a failure occurs, clean up copied files.
|
||||
copied = []
|
||||
for rpmfile in rpmlist:
|
||||
- LOG.info("Copy %s to %s" % (rpmfile, rpmlist[rpmfile]))
|
||||
+ LOG.info("Copy %s to %s", rpmfile, rpmlist[rpmfile])
|
||||
try:
|
||||
shutil.copy(rpmfile, rpmlist[rpmfile])
|
||||
copied.append(rpmlist[rpmfile])
|
||||
@@ -1167,7 +1167,7 @@ class PatchController(PatchService):
|
||||
LOG.exception(msg)
|
||||
# Clean up files
|
||||
for filename in copied:
|
||||
- LOG.info("Cleaning up %s" % filename)
|
||||
+ LOG.info("Cleaning up %s", filename)
|
||||
os.remove(filename)
|
||||
|
||||
raise RpmFail(msg)
|
||||
@@ -1206,7 +1206,7 @@ class PatchController(PatchService):
|
||||
"comps.xml",
|
||||
rdir],
|
||||
stderr=subprocess.STDOUT)
|
||||
- LOG.info("Repo[%s] updated:\n%s" % (ver, output))
|
||||
+ LOG.info("Repo[%s] updated:\n%s", ver, output)
|
||||
except subprocess.CalledProcessError:
|
||||
msg = "Failed to update the repo for %s" % ver
|
||||
LOG.exception(msg)
|
||||
@@ -1387,7 +1387,7 @@ class PatchController(PatchService):
|
||||
"comps.xml",
|
||||
rdir],
|
||||
stderr=subprocess.STDOUT)
|
||||
- LOG.info("Repo[%s] updated:\n%s" % (ver, output))
|
||||
+ LOG.info("Repo[%s] updated:\n%s", ver, output)
|
||||
except subprocess.CalledProcessError:
|
||||
msg = "Failed to update the repo for %s" % ver
|
||||
LOG.exception(msg)
|
||||
@@ -1529,7 +1529,7 @@ class PatchController(PatchService):
|
||||
"comps.xml",
|
||||
repo_dir[release]],
|
||||
stderr=subprocess.STDOUT)
|
||||
- LOG.info("Repo[%s] updated:\n%s" % (release, output))
|
||||
+ LOG.info("Repo[%s] updated:\n%s", release, output)
|
||||
except subprocess.CalledProcessError:
|
||||
msg = "Failed to update the repo for %s" % release
|
||||
LOG.exception(msg)
|
||||
@@ -1844,7 +1844,7 @@ class PatchController(PatchService):
|
||||
for patch_id in sorted(patch_ids):
|
||||
if patch_id not in self.patch_data.metadata.keys():
|
||||
errormsg = "%s is unrecognized\n" % patch_id
|
||||
- LOG.info("patch_query_dependencies: %s" % errormsg)
|
||||
+ LOG.info("patch_query_dependencies: %s", errormsg)
|
||||
results["error"] += errormsg
|
||||
failure = True
|
||||
self.patch_data_lock.release()
|
||||
@@ -1892,7 +1892,7 @@ class PatchController(PatchService):
|
||||
errormsg = "A commit cannot be performed with non-REL status patches in the system:\n"
|
||||
for patch_id in non_rel_list:
|
||||
errormsg += " %s\n" % patch_id
|
||||
- LOG.info("patch_commit rejected: %s" % errormsg)
|
||||
+ LOG.info("patch_commit rejected: %s", errormsg)
|
||||
results["error"] += errormsg
|
||||
return results
|
||||
|
||||
@@ -1901,7 +1901,7 @@ class PatchController(PatchService):
|
||||
for patch_id in sorted(patch_ids):
|
||||
if patch_id not in self.patch_data.metadata.keys():
|
||||
errormsg = "%s is unrecognized\n" % patch_id
|
||||
- LOG.info("patch_commit: %s" % errormsg)
|
||||
+ LOG.info("patch_commit: %s", errormsg)
|
||||
results["error"] += errormsg
|
||||
failure = True
|
||||
self.patch_data_lock.release()
|
||||
@@ -1925,7 +1925,7 @@ class PatchController(PatchService):
|
||||
errormsg = "The following patches are not applied and cannot be committed:\n"
|
||||
for patch_id in avail_list:
|
||||
errormsg += " %s\n" % patch_id
|
||||
- LOG.info("patch_commit rejected: %s" % errormsg)
|
||||
+ LOG.info("patch_commit rejected: %s", errormsg)
|
||||
results["error"] += errormsg
|
||||
return results
|
||||
|
||||
@@ -2039,7 +2039,7 @@ class PatchController(PatchService):
|
||||
"comps.xml",
|
||||
rdir],
|
||||
stderr=subprocess.STDOUT)
|
||||
- LOG.info("Repo[%s] updated:\n%s" % (ver, output))
|
||||
+ LOG.info("Repo[%s] updated:\n%s", ver, output)
|
||||
except subprocess.CalledProcessError:
|
||||
msg = "Failed to update the repo for %s" % ver
|
||||
LOG.exception(msg)
|
||||
@@ -2100,7 +2100,7 @@ class PatchController(PatchService):
|
||||
self.hosts_lock.release()
|
||||
msg = "Unknown host specified: %s" % host_ip
|
||||
msg_error += msg + "\n"
|
||||
- LOG.error("Error in host-install: " + msg)
|
||||
+ LOG.error("Error in host-install: %s", msg)
|
||||
return dict(info=msg_info, warning=msg_warning, error=msg_error)
|
||||
|
||||
msg = "Running host-install for %s (%s), force=%s, async_req=%s" % (host_ip, ip, force, async_req)
|
||||
@@ -2128,7 +2128,7 @@ class PatchController(PatchService):
|
||||
# async_req install requested, so return now
|
||||
msg = "Patch installation request sent to %s." % self.hosts[ip].hostname
|
||||
msg_info += msg + "\n"
|
||||
- LOG.info("host-install async_req: " + msg)
|
||||
+ LOG.info("host-install async_req: %s", msg)
|
||||
return dict(info=msg_info, warning=msg_warning, error=msg_error)
|
||||
|
||||
# Now we wait, up to ten mins... TODO: Wait on a condition
|
||||
@@ -2141,7 +2141,7 @@ class PatchController(PatchService):
|
||||
self.hosts_lock.release()
|
||||
msg = "Agent expired while waiting: %s" % ip
|
||||
msg_error += msg + "\n"
|
||||
- LOG.error("Error in host-install: " + msg)
|
||||
+ LOG.error("Error in host-install: %s", msg)
|
||||
break
|
||||
|
||||
if not self.hosts[ip].install_pending:
|
||||
@@ -2150,17 +2150,17 @@ class PatchController(PatchService):
|
||||
if self.hosts[ip].install_status:
|
||||
msg = "Patch installation was successful on %s." % self.hosts[ip].hostname
|
||||
msg_info += msg + "\n"
|
||||
- LOG.info("host-install: " + msg)
|
||||
+ LOG.info("host-install: %s", msg)
|
||||
elif self.hosts[ip].install_reject_reason:
|
||||
msg = "Patch installation rejected by %s. %s" % (
|
||||
self.hosts[ip].hostname,
|
||||
self.hosts[ip].install_reject_reason)
|
||||
msg_error += msg + "\n"
|
||||
- LOG.error("Error in host-install: " + msg)
|
||||
+ LOG.error("Error in host-install: %s", msg)
|
||||
else:
|
||||
msg = "Patch installation failed on %s." % self.hosts[ip].hostname
|
||||
msg_error += msg + "\n"
|
||||
- LOG.error("Error in host-install: " + msg)
|
||||
+ LOG.error("Error in host-install: %s", msg)
|
||||
|
||||
self.hosts_lock.release()
|
||||
break
|
||||
@@ -2172,7 +2172,7 @@ class PatchController(PatchService):
|
||||
if not resp_rx:
|
||||
msg = "Timeout occurred while waiting response from %s." % ip
|
||||
msg_error += msg + "\n"
|
||||
- LOG.error("Error in host-install: " + msg)
|
||||
+ LOG.error("Error in host-install: %s", msg)
|
||||
|
||||
return dict(info=msg_info, warning=msg_warning, error=msg_error)
|
||||
|
||||
@@ -2203,7 +2203,7 @@ class PatchController(PatchService):
|
||||
self.hosts_lock.release()
|
||||
msg = "Unknown host specified: %s" % host_ip
|
||||
msg_error += msg + "\n"
|
||||
- LOG.error("Error in drop-host: " + msg)
|
||||
+ LOG.error("Error in drop-host: %s", msg)
|
||||
return dict(info=msg_info, warning=msg_warning, error=msg_error)
|
||||
|
||||
msg = "Running drop-host for %s (%s)" % (host_ip, ip)
|
||||
@@ -2272,8 +2272,8 @@ class PatchController(PatchService):
|
||||
|
||||
appname = kwargs.get("app")
|
||||
|
||||
- LOG.info("Handling app dependencies report: app=%s, patch_ids=%s" %
|
||||
- (appname, ','.join(patch_ids)))
|
||||
+ LOG.info("Handling app dependencies report: app=%s, patch_ids=%s",
|
||||
+ appname, ','.join(patch_ids))
|
||||
|
||||
self.patch_data_lock.acquire()
|
||||
|
||||
@@ -2516,7 +2516,7 @@ class PatchControllerMainThread(threading.Thread):
|
||||
inputs = [pc.sock_in] + agent_query_conns
|
||||
outputs = []
|
||||
|
||||
- # LOG.info("Running select, remaining=%d" % remaining)
|
||||
+ # LOG.info("Running select, remaining=%d", remaining)
|
||||
rlist, wlist, xlist = select.select(inputs, outputs, inputs, remaining)
|
||||
|
||||
if (len(rlist) == 0 and
|
||||
@@ -2641,7 +2641,7 @@ class PatchControllerMainThread(threading.Thread):
|
||||
for n in nbrs:
|
||||
# Age out controllers after 2 minutes
|
||||
if pc.controller_neighbours[n].get_age() >= 120:
|
||||
- LOG.info("Aging out controller %s from table" % n)
|
||||
+ LOG.info("Aging out controller %s from table", n)
|
||||
del pc.controller_neighbours[n]
|
||||
pc.controller_neighbours_lock.release()
|
||||
|
||||
@@ -2650,7 +2650,7 @@ class PatchControllerMainThread(threading.Thread):
|
||||
for n in nbrs:
|
||||
# Age out hosts after 1 hour
|
||||
if pc.hosts[n].get_age() >= 3600:
|
||||
- LOG.info("Aging out host %s from table" % n)
|
||||
+ LOG.info("Aging out host %s from table", n)
|
||||
del pc.hosts[n]
|
||||
for patch_id in pc.interim_state.keys():
|
||||
if n in pc.interim_state[patch_id]:
|
||||
diff --git a/cgcs-patch/cgcs-patch/pylint.rc b/cgcs-patch/cgcs-patch/pylint.rc
|
||||
index a2d888b..57a9829 100644
|
||||
--- a/cgcs-patch/cgcs-patch/pylint.rc
|
||||
+++ b/cgcs-patch/cgcs-patch/pylint.rc
|
||||
@@ -47,9 +47,8 @@ symbols=no
|
||||
# W0107 unnecessary-pass
|
||||
# W0603 global-statement
|
||||
# W0703 broad-except
|
||||
-# W1201 logging-not-lazy
|
||||
# W1505, deprecated-method
|
||||
-disable=C, R, W0107, W0603, W0703, W1201, W1505
|
||||
+disable=C, R, W0107, W0603, W0703, W1505
|
||||
|
||||
|
||||
[REPORTS]
|
@ -1,841 +0,0 @@
|
||||
diff --git a/cgcs-patch/cgcs-patch/cgcs_patch/patch_agent.py b/cgcs-patch/cgcs-patch/cgcs_patch/patch_agent.py
|
||||
index 3abd891..d8bc375 100644
|
||||
--- a/cgcs-patch/cgcs-patch/cgcs_patch/patch_agent.py
|
||||
+++ b/cgcs-patch/cgcs-patch/cgcs_patch/patch_agent.py
|
||||
@@ -5,22 +5,26 @@ SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
"""
|
||||
|
||||
-import os
|
||||
-import time
|
||||
-import socket
|
||||
+import dnf
|
||||
+import dnf.callback
|
||||
+import dnf.comps
|
||||
+import dnf.exceptions
|
||||
+import dnf.rpm
|
||||
+import dnf.sack
|
||||
+import dnf.transaction
|
||||
import json
|
||||
-import select
|
||||
-import subprocess
|
||||
+import libdnf.transaction
|
||||
+import os
|
||||
import random
|
||||
import requests
|
||||
-import xml.etree.ElementTree as ElementTree
|
||||
-import rpm
|
||||
-import sys
|
||||
-import yaml
|
||||
+import select
|
||||
import shutil
|
||||
+import socket
|
||||
+import subprocess
|
||||
+import sys
|
||||
+import time
|
||||
|
||||
from cgcs_patch.patch_functions import configure_logging
|
||||
-from cgcs_patch.patch_functions import parse_pkgver
|
||||
from cgcs_patch.patch_functions import LOG
|
||||
import cgcs_patch.config as cfg
|
||||
from cgcs_patch.base import PatchService
|
||||
@@ -50,19 +54,13 @@ pa = None
|
||||
|
||||
http_port_real = http_port
|
||||
|
||||
-# Smart commands
|
||||
-smart_cmd = ["/usr/bin/smart"]
|
||||
-smart_quiet = smart_cmd + ["--quiet"]
|
||||
-smart_update = smart_quiet + ["update"]
|
||||
-smart_newer = smart_quiet + ["newer"]
|
||||
-smart_orphans = smart_quiet + ["query", "--orphans", "--show-format", "$name\n"]
|
||||
-smart_query = smart_quiet + ["query"]
|
||||
-smart_query_repos = smart_quiet + ["query", "--channel=base", "--channel=updates"]
|
||||
-smart_install_cmd = smart_cmd + ["install", "--yes", "--explain"]
|
||||
-smart_remove_cmd = smart_cmd + ["remove", "--yes", "--explain"]
|
||||
-smart_query_installed = smart_quiet + ["query", "--installed", "--show-format", "$name $version\n"]
|
||||
-smart_query_base = smart_quiet + ["query", "--channel=base", "--show-format", "$name $version\n"]
|
||||
-smart_query_updates = smart_quiet + ["query", "--channel=updates", "--show-format", "$name $version\n"]
|
||||
+# DNF commands
|
||||
+dnf_cmd = ['/usr/bin/dnf']
|
||||
+dnf_quiet = dnf_cmd + ['--quiet']
|
||||
+dnf_makecache = dnf_quiet + ['makecache',
|
||||
+ '--disablerepo="*"',
|
||||
+ '--enablerepo', 'platform-base',
|
||||
+ '--enablerepo', 'platform-updates']
|
||||
|
||||
|
||||
def setflag(fname):
|
||||
@@ -123,10 +121,6 @@ class PatchMessageHelloAgent(messages.PatchMessage):
|
||||
def handle(self, sock, addr):
|
||||
# Send response
|
||||
|
||||
- # Run the smart config audit
|
||||
- global pa
|
||||
- pa.timed_audit_smart_config()
|
||||
-
|
||||
#
|
||||
# If a user tries to do a host-install on an unlocked node,
|
||||
# without bypassing the lock check (either via in-service
|
||||
@@ -289,6 +283,46 @@ class PatchMessageAgentInstallResp(messages.PatchMessage):
|
||||
resp.send(sock)
|
||||
|
||||
|
||||
+class PatchAgentDnfTransLogCB(dnf.callback.TransactionProgress):
|
||||
+ def __init__(self):
|
||||
+ dnf.callback.TransactionProgress.__init__(self)
|
||||
+
|
||||
+ self.log_prefix = 'dnf trans'
|
||||
+
|
||||
+ def progress(self, package, action, ti_done, ti_total, ts_done, ts_total):
|
||||
+ if action in dnf.transaction.ACTIONS:
|
||||
+ action_str = dnf.transaction.ACTIONS[action]
|
||||
+ elif action == dnf.transaction.TRANS_POST:
|
||||
+ action_str = 'Post transaction'
|
||||
+ else:
|
||||
+ action_str = 'unknown(%d)' % action
|
||||
+
|
||||
+ if ti_done is not None:
|
||||
+ # To reduce the volume of logs, only log 0% and 100%
|
||||
+ if ti_done == 0 or ti_done == ti_total:
|
||||
+ LOG.info('%s PROGRESS %s: %s %0.1f%% [%s/%s]',
|
||||
+ self.log_prefix, action_str, package,
|
||||
+ (ti_done * 100 / ti_total),
|
||||
+ ts_done, ts_total)
|
||||
+ else:
|
||||
+ LOG.info('%s PROGRESS %s: %s [%s/%s]',
|
||||
+ self.log_prefix, action_str, package, ts_done, ts_total)
|
||||
+
|
||||
+ def filelog(self, package, action):
|
||||
+ if action in dnf.transaction.FILE_ACTIONS:
|
||||
+ msg = '%s: %s' % (dnf.transaction.FILE_ACTIONS[action], package)
|
||||
+ else:
|
||||
+ msg = '%s: %s' % (package, action)
|
||||
+ LOG.info('%s FILELOG %s', self.log_prefix, msg)
|
||||
+
|
||||
+ def scriptout(self, msgs):
|
||||
+ if msgs:
|
||||
+ LOG.info("%s SCRIPTOUT :\n%s", self.log_prefix, msgs)
|
||||
+
|
||||
+ def error(self, message):
|
||||
+ LOG.error("%s ERROR: %s", self.log_prefix, message)
|
||||
+
|
||||
+
|
||||
class PatchAgent(PatchService):
|
||||
def __init__(self):
|
||||
PatchService.__init__(self)
|
||||
@@ -298,9 +332,14 @@ class PatchAgent(PatchService):
|
||||
self.listener = None
|
||||
self.changes = False
|
||||
self.installed = {}
|
||||
+ self.installed_dnf = []
|
||||
self.to_install = {}
|
||||
+ self.to_install_dnf = []
|
||||
+ self.to_downgrade_dnf = []
|
||||
self.to_remove = []
|
||||
+ self.to_remove_dnf = []
|
||||
self.missing_pkgs = []
|
||||
+ self.missing_pkgs_dnf = []
|
||||
self.patch_op_counter = 0
|
||||
self.node_is_patched = os.path.exists(node_is_patched_file)
|
||||
self.node_is_patched_timestamp = 0
|
||||
@@ -308,6 +347,7 @@ class PatchAgent(PatchService):
|
||||
self.state = constants.PATCH_AGENT_STATE_IDLE
|
||||
self.last_config_audit = 0
|
||||
self.rejection_timestamp = 0
|
||||
+ self.dnfb = None
|
||||
|
||||
# Check state flags
|
||||
if os.path.exists(patch_installing_file):
|
||||
@@ -343,289 +383,40 @@ class PatchAgent(PatchService):
|
||||
self.listener.bind(('', self.port))
|
||||
self.listener.listen(2) # Allow two connections, for two controllers
|
||||
|
||||
- def audit_smart_config(self):
|
||||
- LOG.info("Auditing smart configuration")
|
||||
-
|
||||
- # Get the current channel config
|
||||
- try:
|
||||
- output = subprocess.check_output(smart_cmd +
|
||||
- ["channel", "--yaml"],
|
||||
- stderr=subprocess.STDOUT)
|
||||
- config = yaml.load(output)
|
||||
- except subprocess.CalledProcessError as e:
|
||||
- LOG.exception("Failed to query channels")
|
||||
- LOG.error("Command output: %s", e.output)
|
||||
- return False
|
||||
- except Exception:
|
||||
- LOG.exception("Failed to query channels")
|
||||
- return False
|
||||
-
|
||||
- expected = [{'channel': 'rpmdb',
|
||||
- 'type': 'rpm-sys',
|
||||
- 'name': 'RPM Database',
|
||||
- 'baseurl': None},
|
||||
- {'channel': 'base',
|
||||
- 'type': 'rpm-md',
|
||||
- 'name': 'Base',
|
||||
- 'baseurl': "http://controller:%s/feed/rel-%s" % (http_port_real, SW_VERSION)},
|
||||
- {'channel': 'updates',
|
||||
- 'type': 'rpm-md',
|
||||
- 'name': 'Patches',
|
||||
- 'baseurl': "http://controller:%s/updates/rel-%s" % (http_port_real, SW_VERSION)}]
|
||||
-
|
||||
- updated = False
|
||||
-
|
||||
- for item in expected:
|
||||
- channel = item['channel']
|
||||
- ch_type = item['type']
|
||||
- ch_name = item['name']
|
||||
- ch_baseurl = item['baseurl']
|
||||
-
|
||||
- add_channel = False
|
||||
-
|
||||
- if channel in config:
|
||||
- # Verify existing channel config
|
||||
- if (config[channel].get('type') != ch_type or
|
||||
- config[channel].get('name') != ch_name or
|
||||
- config[channel].get('baseurl') != ch_baseurl):
|
||||
- # Config is invalid
|
||||
- add_channel = True
|
||||
- LOG.warning("Invalid smart config found for %s", channel)
|
||||
- try:
|
||||
- output = subprocess.check_output(smart_cmd +
|
||||
- ["channel", "--yes",
|
||||
- "--remove", channel],
|
||||
- stderr=subprocess.STDOUT)
|
||||
- except subprocess.CalledProcessError as e:
|
||||
- LOG.exception("Failed to configure %s channel", channel)
|
||||
- LOG.error("Command output: %s", e.output)
|
||||
- return False
|
||||
- else:
|
||||
- # Channel is missing
|
||||
- add_channel = True
|
||||
- LOG.warning("Channel %s is missing from config", channel)
|
||||
-
|
||||
- if add_channel:
|
||||
- LOG.info("Adding channel %s", channel)
|
||||
- cmd_args = ["channel", "--yes", "--add", channel,
|
||||
- "type=%s" % ch_type,
|
||||
- "name=%s" % ch_name]
|
||||
- if ch_baseurl is not None:
|
||||
- cmd_args += ["baseurl=%s" % ch_baseurl]
|
||||
-
|
||||
- try:
|
||||
- output = subprocess.check_output(smart_cmd + cmd_args,
|
||||
- stderr=subprocess.STDOUT)
|
||||
- except subprocess.CalledProcessError as e:
|
||||
- LOG.exception("Failed to configure %s channel", channel)
|
||||
- LOG.error("Command output: %s", e.output)
|
||||
- return False
|
||||
-
|
||||
- updated = True
|
||||
-
|
||||
- # Validate the smart config
|
||||
- try:
|
||||
- output = subprocess.check_output(smart_cmd +
|
||||
- ["config", "--yaml"],
|
||||
- stderr=subprocess.STDOUT)
|
||||
- config = yaml.load(output)
|
||||
- except subprocess.CalledProcessError as e:
|
||||
- LOG.exception("Failed to query smart config")
|
||||
- LOG.error("Command output: %s", e.output)
|
||||
- return False
|
||||
- except Exception:
|
||||
- LOG.exception("Failed to query smart config")
|
||||
- return False
|
||||
-
|
||||
- # Check for the rpm-nolinktos flag
|
||||
- nolinktos = 'rpm-nolinktos'
|
||||
- if config.get(nolinktos) is not True:
|
||||
- # Set the flag
|
||||
- LOG.warning("Setting %s option", nolinktos)
|
||||
- try:
|
||||
- output = subprocess.check_output(smart_cmd +
|
||||
- ["config", "--set",
|
||||
- "%s=true" % nolinktos],
|
||||
- stderr=subprocess.STDOUT)
|
||||
- except subprocess.CalledProcessError as e:
|
||||
- LOG.exception("Failed to configure %s option", nolinktos)
|
||||
- LOG.error("Command output: %s", e.output)
|
||||
- return False
|
||||
-
|
||||
- updated = True
|
||||
-
|
||||
- # Check for the rpm-check-signatures flag
|
||||
- nosignature = 'rpm-check-signatures'
|
||||
- if config.get(nosignature) is not False:
|
||||
- # Set the flag
|
||||
- LOG.warning("Setting %s option", nosignature)
|
||||
- try:
|
||||
- output = subprocess.check_output(smart_cmd +
|
||||
- ["config", "--set",
|
||||
- "%s=false" % nosignature],
|
||||
- stderr=subprocess.STDOUT)
|
||||
- except subprocess.CalledProcessError as e:
|
||||
- LOG.exception("Failed to configure %s option", nosignature)
|
||||
- LOG.error("Command output: %s", e.output)
|
||||
- return False
|
||||
-
|
||||
- updated = True
|
||||
-
|
||||
- if updated:
|
||||
- try:
|
||||
- subprocess.check_output(smart_update, stderr=subprocess.STDOUT)
|
||||
- except subprocess.CalledProcessError as e:
|
||||
- LOG.exception("Failed to update smartpm")
|
||||
- LOG.error("Command output: %s", e.output)
|
||||
- return False
|
||||
-
|
||||
- # Reset the patch op counter to force a detailed query
|
||||
- self.patch_op_counter = 0
|
||||
-
|
||||
- self.last_config_audit = time.time()
|
||||
- return True
|
||||
-
|
||||
- def timed_audit_smart_config(self):
|
||||
- rc = True
|
||||
- if (time.time() - self.last_config_audit) > 1800:
|
||||
- # It's been 30 minutes since the last completed audit
|
||||
- LOG.info("Kicking timed audit")
|
||||
- rc = self.audit_smart_config()
|
||||
-
|
||||
- return rc
|
||||
-
|
||||
@staticmethod
|
||||
- def parse_smart_pkglist(output):
|
||||
- pkglist = {}
|
||||
- for line in output.splitlines():
|
||||
- if line == '':
|
||||
- continue
|
||||
-
|
||||
- fields = line.split()
|
||||
- pkgname = fields[0]
|
||||
- (version, arch) = fields[1].split('@')
|
||||
-
|
||||
- if pkgname not in pkglist:
|
||||
- pkglist[pkgname] = {}
|
||||
- pkglist[pkgname][arch] = version
|
||||
- elif arch not in pkglist[pkgname]:
|
||||
- pkglist[pkgname][arch] = version
|
||||
+ def pkgobjs_to_list(pkgobjs):
|
||||
+ # Transform pkgobj list to format used by patch-controller
|
||||
+ output = {}
|
||||
+ for pkg in pkgobjs:
|
||||
+ if pkg.epoch != 0:
|
||||
+ output[pkg.name] = "%s:%s-%s@%s" % (pkg.epoch, pkg.version, pkg.release, pkg.arch)
|
||||
else:
|
||||
- stored_ver = pkglist[pkgname][arch]
|
||||
-
|
||||
- # The rpm.labelCompare takes version broken into 3 components
|
||||
- # It returns:
|
||||
- # 1, if first arg is higher version
|
||||
- # 0, if versions are same
|
||||
- # -1, if first arg is lower version
|
||||
- rc = rpm.labelCompare(parse_pkgver(version),
|
||||
- parse_pkgver(stored_ver))
|
||||
+ output[pkg.name] = "%s-%s@%s" % (pkg.version, pkg.release, pkg.arch)
|
||||
|
||||
- if rc > 0:
|
||||
- # Update version
|
||||
- pkglist[pkgname][arch] = version
|
||||
+ return output
|
||||
|
||||
- return pkglist
|
||||
+ def dnf_reset_client(self):
|
||||
+ if self.dnfb is not None:
|
||||
+ self.dnfb.close()
|
||||
+ self.dnfb = None
|
||||
|
||||
- @staticmethod
|
||||
- def get_pkg_version(pkglist, pkg, arch):
|
||||
- if pkg not in pkglist:
|
||||
- return None
|
||||
- if arch not in pkglist[pkg]:
|
||||
- return None
|
||||
- return pkglist[pkg][arch]
|
||||
-
|
||||
- def parse_smart_newer(self, output):
|
||||
- # Skip the first two lines, which are headers
|
||||
- for line in output.splitlines()[2:]:
|
||||
- if line == '':
|
||||
- continue
|
||||
-
|
||||
- fields = line.split()
|
||||
- pkgname = fields[0]
|
||||
- installedver = fields[2]
|
||||
- newver = fields[5]
|
||||
+ self.dnfb = dnf.Base()
|
||||
+ self.dnfb.conf.substitutions['infra'] = 'stock'
|
||||
|
||||
- self.installed[pkgname] = installedver
|
||||
- self.to_install[pkgname] = newver
|
||||
-
|
||||
- def parse_smart_orphans(self, output):
|
||||
- for pkgname in output.splitlines():
|
||||
- if pkgname == '':
|
||||
- continue
|
||||
+ # Reset default installonlypkgs list
|
||||
+ self.dnfb.conf.installonlypkgs = []
|
||||
|
||||
- highest_version = None
|
||||
+ self.dnfb.read_all_repos()
|
||||
|
||||
- try:
|
||||
- query = subprocess.check_output(smart_query_repos + ["--show-format", '$version\n', pkgname])
|
||||
- # The last non-blank version is the highest
|
||||
- for version in query.splitlines():
|
||||
- if version == '':
|
||||
- continue
|
||||
- highest_version = version.split('@')[0]
|
||||
-
|
||||
- except subprocess.CalledProcessError:
|
||||
- # Package is not in the repo
|
||||
- highest_version = None
|
||||
-
|
||||
- if highest_version is None:
|
||||
- # Package is to be removed
|
||||
- self.to_remove.append(pkgname)
|
||||
+ # Ensure only platform repos are enabled for transaction
|
||||
+ for repo in self.dnfb.repos.all():
|
||||
+ if repo.id == 'platform-base' or repo.id == 'platform-updates':
|
||||
+ repo.enable()
|
||||
else:
|
||||
- # Rollback to the highest version
|
||||
- self.to_install[pkgname] = highest_version
|
||||
+ repo.disable()
|
||||
|
||||
- # Get the installed version
|
||||
- try:
|
||||
- query = subprocess.check_output(smart_query + ["--installed", "--show-format", '$version\n', pkgname])
|
||||
- for version in query.splitlines():
|
||||
- if version == '':
|
||||
- continue
|
||||
- self.installed[pkgname] = version.split('@')[0]
|
||||
- break
|
||||
- except subprocess.CalledProcessError:
|
||||
- LOG.error("Failed to query installed version of %s", pkgname)
|
||||
-
|
||||
- self.changes = True
|
||||
-
|
||||
- def check_groups(self):
|
||||
- # Get the groups file
|
||||
- mygroup = "updates-%s" % "-".join(subfunctions)
|
||||
- self.missing_pkgs = []
|
||||
- installed_pkgs = []
|
||||
-
|
||||
- groups_url = "http://controller:%s/updates/rel-%s/comps.xml" % (http_port_real, SW_VERSION)
|
||||
- try:
|
||||
- req = requests.get(groups_url)
|
||||
- if req.status_code != 200:
|
||||
- LOG.error("Failed to get groups list from server")
|
||||
- return False
|
||||
- except requests.ConnectionError:
|
||||
- LOG.error("Failed to connect to server")
|
||||
- return False
|
||||
-
|
||||
- # Get list of installed packages
|
||||
- try:
|
||||
- query = subprocess.check_output(["rpm", "-qa", "--queryformat", "%{NAME}\n"])
|
||||
- installed_pkgs = query.split()
|
||||
- except subprocess.CalledProcessError:
|
||||
- LOG.exception("Failed to query RPMs")
|
||||
- return False
|
||||
-
|
||||
- root = ElementTree.fromstring(req.text)
|
||||
- for child in root:
|
||||
- group_id = child.find('id')
|
||||
- if group_id is None or group_id.text != mygroup:
|
||||
- continue
|
||||
-
|
||||
- pkglist = child.find('packagelist')
|
||||
- if pkglist is None:
|
||||
- continue
|
||||
-
|
||||
- for pkg in pkglist.findall('packagereq'):
|
||||
- if pkg.text not in installed_pkgs and pkg.text not in self.missing_pkgs:
|
||||
- self.missing_pkgs.append(pkg.text)
|
||||
- self.changes = True
|
||||
+ # Read repo info
|
||||
+ self.dnfb.fill_sack()
|
||||
|
||||
def query(self):
|
||||
""" Check current patch state """
|
||||
@@ -633,14 +424,15 @@ class PatchAgent(PatchService):
|
||||
LOG.info("Failed install_uuid check. Skipping query")
|
||||
return False
|
||||
|
||||
- if not self.audit_smart_config():
|
||||
- # Set a state to "unknown"?
|
||||
- return False
|
||||
+ if self.dnfb is not None:
|
||||
+ self.dnfb.close()
|
||||
+ self.dnfb = None
|
||||
|
||||
+ # TODO(dpenney): Use python APIs for makecache
|
||||
try:
|
||||
- subprocess.check_output(smart_update, stderr=subprocess.STDOUT)
|
||||
+ subprocess.check_output(dnf_makecache, stderr=subprocess.STDOUT)
|
||||
except subprocess.CalledProcessError as e:
|
||||
- LOG.error("Failed to update smartpm")
|
||||
+ LOG.error("Failed to run dnf makecache")
|
||||
LOG.error("Command output: %s", e.output)
|
||||
# Set a state to "unknown"?
|
||||
return False
|
||||
@@ -649,78 +441,72 @@ class PatchAgent(PatchService):
|
||||
self.query_id = random.random()
|
||||
|
||||
self.changes = False
|
||||
+ self.installed_dnf = []
|
||||
self.installed = {}
|
||||
- self.to_install = {}
|
||||
+ self.to_install_dnf = []
|
||||
+ self.to_downgrade_dnf = []
|
||||
self.to_remove = []
|
||||
+ self.to_remove_dnf = []
|
||||
self.missing_pkgs = []
|
||||
+ self.missing_pkgs_dnf = []
|
||||
|
||||
- # Get the repo data
|
||||
- pkgs_installed = {}
|
||||
- pkgs_base = {}
|
||||
- pkgs_updates = {}
|
||||
-
|
||||
- try:
|
||||
- output = subprocess.check_output(smart_query_installed)
|
||||
- pkgs_installed = self.parse_smart_pkglist(output)
|
||||
- except subprocess.CalledProcessError as e:
|
||||
- LOG.error("Failed to query installed pkgs: %s", e.output)
|
||||
- # Set a state to "unknown"?
|
||||
- return False
|
||||
-
|
||||
- try:
|
||||
- output = subprocess.check_output(smart_query_base)
|
||||
- pkgs_base = self.parse_smart_pkglist(output)
|
||||
- except subprocess.CalledProcessError as e:
|
||||
- LOG.error("Failed to query base pkgs: %s", e.output)
|
||||
- # Set a state to "unknown"?
|
||||
- return False
|
||||
+ self.dnf_reset_client()
|
||||
|
||||
- try:
|
||||
- output = subprocess.check_output(smart_query_updates)
|
||||
- pkgs_updates = self.parse_smart_pkglist(output)
|
||||
- except subprocess.CalledProcessError as e:
|
||||
- LOG.error("Failed to query patched pkgs: %s", e.output)
|
||||
- # Set a state to "unknown"?
|
||||
- return False
|
||||
+ # Get the repo data
|
||||
+ pkgs_installed = dnf.sack._rpmdb_sack(self.dnfb).query().installed() # pylint: disable=protected-access
|
||||
+ avail = self.dnfb.sack.query().available().latest()
|
||||
|
||||
- # There are four possible actions:
|
||||
- # 1. If installed pkg is not in base or updates, remove it.
|
||||
- # 2. If installed pkg version is higher than highest in base
|
||||
- # or updates, downgrade it.
|
||||
- # 3. If installed pkg version is lower than highest in updates,
|
||||
- # upgrade it.
|
||||
- # 4. If pkg in grouplist is not in installed, install it.
|
||||
+ # There are three possible actions:
|
||||
+ # 1. If installed pkg is not in a repo, remove it.
|
||||
+ # 2. If installed pkg version does not match newest repo version, update it.
|
||||
+ # 3. If a package in the grouplist is not installed, install it.
|
||||
|
||||
for pkg in pkgs_installed:
|
||||
- for arch in pkgs_installed[pkg]:
|
||||
- installed_version = pkgs_installed[pkg][arch]
|
||||
- updates_version = self.get_pkg_version(pkgs_updates, pkg, arch)
|
||||
- base_version = self.get_pkg_version(pkgs_base, pkg, arch)
|
||||
-
|
||||
- if updates_version is None and base_version is None:
|
||||
- # Remove it
|
||||
- self.to_remove.append(pkg)
|
||||
- self.changes = True
|
||||
- continue
|
||||
+ highest = avail.filter(name=pkg.name, arch=pkg.arch)
|
||||
+ if highest:
|
||||
+ highest_pkg = highest[0]
|
||||
|
||||
- compare_version = updates_version
|
||||
- if compare_version is None:
|
||||
- compare_version = base_version
|
||||
-
|
||||
- # Compare the installed version to what's in the repo
|
||||
- rc = rpm.labelCompare(parse_pkgver(installed_version),
|
||||
- parse_pkgver(compare_version))
|
||||
- if rc == 0:
|
||||
- # Versions match, nothing to do.
|
||||
+ if pkg.evr_eq(highest_pkg):
|
||||
continue
|
||||
+
|
||||
+ if pkg.evr_gt(highest_pkg):
|
||||
+ self.to_downgrade_dnf.append(highest_pkg)
|
||||
else:
|
||||
- # Install the version from the repo
|
||||
- self.to_install[pkg] = "@".join([compare_version, arch])
|
||||
- self.installed[pkg] = "@".join([installed_version, arch])
|
||||
- self.changes = True
|
||||
+ self.to_install_dnf.append(highest_pkg)
|
||||
+ else:
|
||||
+ self.to_remove_dnf.append(pkg)
|
||||
+ self.to_remove.append(pkg.name)
|
||||
+
|
||||
+ self.installed_dnf.append(pkg)
|
||||
+ self.changes = True
|
||||
|
||||
# Look for new packages
|
||||
- self.check_groups()
|
||||
+ self.dnfb.read_comps()
|
||||
+ grp_id = 'updates-%s' % '-'.join(subfunctions)
|
||||
+ pkggrp = None
|
||||
+ for grp in self.dnfb.comps.groups_iter():
|
||||
+ if grp.id == grp_id:
|
||||
+ pkggrp = grp
|
||||
+ break
|
||||
+
|
||||
+ if pkggrp is None:
|
||||
+ LOG.error("Could not find software group: %s", grp_id)
|
||||
+
|
||||
+ for pkg in pkggrp.packages_iter():
|
||||
+ try:
|
||||
+ res = pkgs_installed.filter(name=pkg.name)
|
||||
+ if len(res) == 0:
|
||||
+ found_pkg = avail.filter(name=pkg.name)
|
||||
+ self.missing_pkgs_dnf.append(found_pkg[0])
|
||||
+ self.missing_pkgs.append(found_pkg[0].name)
|
||||
+ self.changes = True
|
||||
+ except dnf.exceptions.PackageNotFoundError:
|
||||
+ self.missing_pkgs_dnf.append(pkg)
|
||||
+ self.missing_pkgs.append(pkg.name)
|
||||
+ self.changes = True
|
||||
+
|
||||
+ self.installed = self.pkgobjs_to_list(self.installed_dnf)
|
||||
+ self.to_install = self.pkgobjs_to_list(self.to_install_dnf + self.to_downgrade_dnf)
|
||||
|
||||
LOG.info("Patch state query returns %s", self.changes)
|
||||
LOG.info("Installed: %s", self.installed)
|
||||
@@ -730,6 +516,35 @@ class PatchAgent(PatchService):
|
||||
|
||||
return True
|
||||
|
||||
+ def resolve_dnf_transaction(self, undo_failure=True):
|
||||
+ LOG.info("Starting to process transaction: undo_failure=%s", undo_failure)
|
||||
+ self.dnfb.resolve()
|
||||
+ self.dnfb.download_packages(self.dnfb.transaction.install_set)
|
||||
+
|
||||
+ tid = self.dnfb.do_transaction(display=PatchAgentDnfTransLogCB())
|
||||
+
|
||||
+ transaction_rc = True
|
||||
+ for t in self.dnfb.transaction:
|
||||
+ if t.state != libdnf.transaction.TransactionItemState_DONE:
|
||||
+ transaction_rc = False
|
||||
+ break
|
||||
+
|
||||
+ self.dnf_reset_client()
|
||||
+
|
||||
+ if not transaction_rc:
|
||||
+ if undo_failure:
|
||||
+ LOG.error("Failure occurred... Undoing last transaction (%s)", tid)
|
||||
+ old = self.dnfb.history.old((tid,))[0]
|
||||
+ mobj = dnf.db.history.MergedTransactionWrapper(old)
|
||||
+
|
||||
+ self.dnfb._history_undo_operations(mobj, old.tid, True) # pylint: disable=protected-access
|
||||
+
|
||||
+ if not self.resolve_dnf_transaction(undo_failure=False):
|
||||
+ LOG.error("Failed to undo transaction")
|
||||
+
|
||||
+ LOG.info("Transaction complete: undo_failure=%s, success=%s", undo_failure, transaction_rc)
|
||||
+ return transaction_rc
|
||||
+
|
||||
def handle_install(self, verbose_to_stdout=False, disallow_insvc_patch=False):
|
||||
#
|
||||
# The disallow_insvc_patch parameter is set when we're installing
|
||||
@@ -781,64 +596,54 @@ class PatchAgent(PatchService):
|
||||
if verbose_to_stdout:
|
||||
print("Checking for software updates...")
|
||||
self.query()
|
||||
- install_set = []
|
||||
- for pkg, version in self.to_install.items():
|
||||
- install_set.append("%s-%s" % (pkg, version))
|
||||
-
|
||||
- install_set += self.missing_pkgs
|
||||
|
||||
changed = False
|
||||
rc = True
|
||||
|
||||
- if len(install_set) > 0:
|
||||
+ if len(self.to_install_dnf) > 0 or len(self.to_downgrade_dnf) > 0:
|
||||
+ LOG.info("Adding pkgs to installation set: %s", self.to_install)
|
||||
+ for pkg in self.to_install_dnf:
|
||||
+ self.dnfb.package_install(pkg)
|
||||
+
|
||||
+ for pkg in self.to_downgrade_dnf:
|
||||
+ self.dnfb.package_downgrade(pkg)
|
||||
+
|
||||
+ changed = True
|
||||
+
|
||||
+ if len(self.missing_pkgs_dnf) > 0:
|
||||
+ LOG.info("Adding missing pkgs to installation set: %s", self.missing_pkgs)
|
||||
+ for pkg in self.missing_pkgs_dnf:
|
||||
+ self.dnfb.package_install(pkg)
|
||||
+ changed = True
|
||||
+
|
||||
+ if len(self.to_remove_dnf) > 0:
|
||||
+ LOG.info("Adding pkgs to be removed: %s", self.to_remove)
|
||||
+ for pkg in self.to_remove_dnf:
|
||||
+ self.dnfb.package_remove(pkg)
|
||||
+ changed = True
|
||||
+
|
||||
+ if changed:
|
||||
+ # Run the transaction set
|
||||
+ transaction_rc = False
|
||||
try:
|
||||
- if verbose_to_stdout:
|
||||
- print("Installing software updates...")
|
||||
- LOG.info("Installing: %s", ", ".join(install_set))
|
||||
- output = subprocess.check_output(smart_install_cmd + install_set, stderr=subprocess.STDOUT)
|
||||
- changed = True
|
||||
- for line in output.split('\n'):
|
||||
- LOG.info("INSTALL: %s", line)
|
||||
- if verbose_to_stdout:
|
||||
- print("Software updated.")
|
||||
- except subprocess.CalledProcessError as e:
|
||||
- LOG.exception("Failed to install RPMs")
|
||||
- LOG.error("Command output: %s", e.output)
|
||||
+ transaction_rc = self.resolve_dnf_transaction()
|
||||
+ except dnf.exceptions.DepsolveError:
|
||||
+ LOG.error("Failures resolving dependencies in transaction")
|
||||
+ except dnf.exceptions.DownloadError:
|
||||
+ LOG.error("Failures downloading in transaction")
|
||||
+
|
||||
+ if not transaction_rc:
|
||||
+ LOG.error("Failures occurred during transaction")
|
||||
rc = False
|
||||
if verbose_to_stdout:
|
||||
print("WARNING: Software update failed.")
|
||||
+
|
||||
else:
|
||||
if verbose_to_stdout:
|
||||
print("Nothing to install.")
|
||||
LOG.info("Nothing to install")
|
||||
|
||||
- if rc:
|
||||
- self.query()
|
||||
- remove_set = self.to_remove
|
||||
-
|
||||
- if len(remove_set) > 0:
|
||||
- try:
|
||||
- if verbose_to_stdout:
|
||||
- print("Handling patch removal...")
|
||||
- LOG.info("Removing: %s", ", ".join(remove_set))
|
||||
- output = subprocess.check_output(smart_remove_cmd + remove_set, stderr=subprocess.STDOUT)
|
||||
- changed = True
|
||||
- for line in output.split('\n'):
|
||||
- LOG.info("REMOVE: %s", line)
|
||||
- if verbose_to_stdout:
|
||||
- print("Patch removal complete.")
|
||||
- except subprocess.CalledProcessError as e:
|
||||
- LOG.exception("Failed to remove RPMs")
|
||||
- LOG.error("Command output: %s", e.output)
|
||||
- rc = False
|
||||
- if verbose_to_stdout:
|
||||
- print("WARNING: Patch removal failed.")
|
||||
- else:
|
||||
- if verbose_to_stdout:
|
||||
- print("Nothing to remove.")
|
||||
- LOG.info("Nothing to remove")
|
||||
-
|
||||
- if changed:
|
||||
+ if changed and rc:
|
||||
# Update the node_is_patched flag
|
||||
setflag(node_is_patched_file)
|
||||
|
||||
@@ -1057,7 +862,7 @@ class PatchAgent(PatchService):
|
||||
def main():
|
||||
global pa
|
||||
|
||||
- configure_logging()
|
||||
+ configure_logging(dnf_log=True)
|
||||
|
||||
cfg.read_config()
|
||||
|
||||
diff --git a/cgcs-patch/cgcs-patch/cgcs_patch/patch_functions.py b/cgcs-patch/cgcs-patch/cgcs_patch/patch_functions.py
|
||||
index e9017f2..2ee9fce 100644
|
||||
--- a/cgcs-patch/cgcs-patch/cgcs_patch/patch_functions.py
|
||||
+++ b/cgcs-patch/cgcs-patch/cgcs_patch/patch_functions.py
|
||||
@@ -69,7 +69,7 @@ def handle_exception(exc_type, exc_value, exc_traceback):
|
||||
sys.__excepthook__(exc_type, exc_value, exc_traceback)
|
||||
|
||||
|
||||
-def configure_logging(logtofile=True, level=logging.INFO):
|
||||
+def configure_logging(logtofile=True, level=logging.INFO, dnf_log=False):
|
||||
if logtofile:
|
||||
my_exec = os.path.basename(sys.argv[0])
|
||||
|
||||
@@ -84,6 +84,11 @@ def configure_logging(logtofile=True, level=logging.INFO):
|
||||
main_log_handler = logging.FileHandler(logfile)
|
||||
main_log_handler.setFormatter(formatter)
|
||||
LOG.addHandler(main_log_handler)
|
||||
+
|
||||
+ if dnf_log:
|
||||
+ dnf_logger = logging.getLogger('dnf')
|
||||
+ dnf_logger.addHandler(main_log_handler)
|
||||
+
|
||||
try:
|
||||
os.chmod(logfile, 0o640)
|
||||
except Exception:
|
||||
diff --git a/cgcs-patch/cgcs-patch/cgcs_patch/tests/test_patch_agent.py b/cgcs-patch/cgcs-patch/cgcs_patch/tests/test_patch_agent.py
|
||||
index bd1eef9..7e30fc5 100644
|
||||
--- a/cgcs-patch/cgcs-patch/cgcs_patch/tests/test_patch_agent.py
|
||||
+++ b/cgcs-patch/cgcs-patch/cgcs_patch/tests/test_patch_agent.py
|
||||
@@ -10,6 +10,15 @@ import sys
|
||||
import testtools
|
||||
|
||||
sys.modules['rpm'] = mock.Mock()
|
||||
+sys.modules['dnf'] = mock.Mock()
|
||||
+sys.modules['dnf.callback'] = mock.Mock()
|
||||
+sys.modules['dnf.comps'] = mock.Mock()
|
||||
+sys.modules['dnf.exceptions'] = mock.Mock()
|
||||
+sys.modules['dnf.rpm'] = mock.Mock()
|
||||
+sys.modules['dnf.sack'] = mock.Mock()
|
||||
+sys.modules['dnf.transaction'] = mock.Mock()
|
||||
+sys.modules['libdnf'] = mock.Mock()
|
||||
+sys.modules['libdnf.transaction'] = mock.Mock()
|
||||
|
||||
import cgcs_patch.patch_agent # noqa: E402
|
||||
|
||||
diff --git a/cgcs-patch/cgcs-patch/pylint.rc b/cgcs-patch/cgcs-patch/pylint.rc
|
||||
index 57a9829..f511718 100644
|
||||
--- a/cgcs-patch/cgcs-patch/pylint.rc
|
||||
+++ b/cgcs-patch/cgcs-patch/pylint.rc
|
||||
@@ -45,10 +45,11 @@ symbols=no
|
||||
# no Warning level messages displayed, use"--disable=all --enable=classes
|
||||
# --disable=W"
|
||||
# W0107 unnecessary-pass
|
||||
+# W0511 fixme
|
||||
# W0603 global-statement
|
||||
# W0703 broad-except
|
||||
# W1505, deprecated-method
|
||||
-disable=C, R, W0107, W0603, W0703, W1505
|
||||
+disable=C, R, W0107, W0511, W0603, W0703, W1505
|
||||
|
||||
|
||||
[REPORTS]
|
||||
@@ -235,7 +236,7 @@ ignore-mixin-members=yes
|
||||
# List of module names for which member attributes should not be checked
|
||||
# (useful for modules/projects where namespaces are manipulated during runtime
|
||||
# and thus existing member attributes cannot be deduced by static analysis
|
||||
-ignored-modules=
|
||||
+ignored-modules=dnf,libdnf
|
||||
|
||||
# List of classes names for which member attributes should not be checked
|
||||
# (useful for classes with attributes dynamically set).
|
||||
diff --git a/cgcs-patch/cgcs-patch/test-requirements.txt b/cgcs-patch/cgcs-patch/test-requirements.txt
|
||||
index 3f4e581..56e4806 100644
|
||||
--- a/cgcs-patch/cgcs-patch/test-requirements.txt
|
||||
+++ b/cgcs-patch/cgcs-patch/test-requirements.txt
|
||||
@@ -8,4 +8,3 @@ coverage!=4.4,>=4.0 # Apache-2.0
|
||||
mock>=2.0.0 # BSD
|
||||
stestr>=1.0.0 # Apache-2.0
|
||||
testtools>=2.2.0 # MIT
|
||||
-
|
Loading…
Reference in New Issue
Block a user