Merge "Update hacking for Python3"
This commit is contained in:
commit
3cceec2ee5
@ -159,8 +159,8 @@ if 'SOURCE_DATE_EPOCH' in os.environ:
|
|||||||
now = float(os.environ.get('SOURCE_DATE_EPOCH'))
|
now = float(os.environ.get('SOURCE_DATE_EPOCH'))
|
||||||
html_last_updated_fmt = datetime.datetime.utcfromtimestamp(now).isoformat()
|
html_last_updated_fmt = datetime.datetime.utcfromtimestamp(now).isoformat()
|
||||||
else:
|
else:
|
||||||
git_cmd = ["git", "log", "--pretty=format:'%ad, commit %h'", "--date=local",
|
git_cmd = ["git", "log", "--pretty=format:'%ad, commit %h'",
|
||||||
"-n1"]
|
"--date=local", "-n1"]
|
||||||
try:
|
try:
|
||||||
html_last_updated_fmt = subprocess.Popen(
|
html_last_updated_fmt = subprocess.Popen(
|
||||||
git_cmd, stdout=subprocess.PIPE).communicate()[0]
|
git_cmd, stdout=subprocess.PIPE).communicate()[0]
|
||||||
|
@ -21,12 +21,10 @@ enum-compat==0.0.2
|
|||||||
eventlet==0.25.0
|
eventlet==0.25.0
|
||||||
extras==1.0.0
|
extras==1.0.0
|
||||||
fixtures==3.0.0
|
fixtures==3.0.0
|
||||||
flake8==2.5.5
|
|
||||||
future==0.16.0
|
future==0.16.0
|
||||||
gitdb2==2.0.3
|
gitdb2==2.0.3
|
||||||
GitPython==2.1.8
|
GitPython==2.1.8
|
||||||
greenlet==0.3.2
|
greenlet==0.3.2
|
||||||
hacking==0.11.0
|
|
||||||
idna==2.6
|
idna==2.6
|
||||||
imagesize==1.0.0
|
imagesize==1.0.0
|
||||||
iso8601==0.1.12
|
iso8601==0.1.12
|
||||||
@ -56,12 +54,10 @@ oslo.serialization==2.25.0
|
|||||||
oslo.utils==3.36.0
|
oslo.utils==3.36.0
|
||||||
PasteDeploy==1.3.3
|
PasteDeploy==1.3.3
|
||||||
pbr==3.1.1
|
pbr==3.1.1
|
||||||
pep8==1.5.7
|
|
||||||
prettytable==0.7.2
|
prettytable==0.7.2
|
||||||
pycparser==2.18
|
pycparser==2.18
|
||||||
pyeclib==1.3.1
|
pyeclib==1.3.1
|
||||||
pykmip==0.7.0
|
pykmip==0.7.0
|
||||||
pyflakes==0.8.1
|
|
||||||
Pygments==2.2.0
|
Pygments==2.2.0
|
||||||
pyparsing==2.2.0
|
pyparsing==2.2.0
|
||||||
pyperclip==1.6.0
|
pyperclip==1.6.0
|
||||||
|
@ -158,8 +158,8 @@ html_theme = 'openstackdocs'
|
|||||||
# html_logo = None
|
# html_logo = None
|
||||||
|
|
||||||
# The name of an image file (relative to this directory) to use as a favicon of
|
# The name of an image file (relative to this directory) to use as a favicon of
|
||||||
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
# the docs. This file should be a Windows icon file (.ico) being 16x16 or
|
||||||
# pixels large.
|
# 32x32 pixels large.
|
||||||
#
|
#
|
||||||
# html_favicon = None
|
# html_favicon = None
|
||||||
|
|
||||||
|
@ -382,7 +382,7 @@ class AccountReaper(Daemon):
|
|||||||
self.stats_return_codes.get(err.http_status // 100, 0) + 1
|
self.stats_return_codes.get(err.http_status // 100, 0) + 1
|
||||||
self.logger.increment(
|
self.logger.increment(
|
||||||
'return_codes.%d' % (err.http_status // 100,))
|
'return_codes.%d' % (err.http_status // 100,))
|
||||||
except (Timeout, socket.error) as err:
|
except (Timeout, socket.error):
|
||||||
self.logger.error(
|
self.logger.error(
|
||||||
'Timeout Exception with %(ip)s:%(port)s/%(device)s',
|
'Timeout Exception with %(ip)s:%(port)s/%(device)s',
|
||||||
node)
|
node)
|
||||||
@ -433,7 +433,7 @@ class AccountReaper(Daemon):
|
|||||||
self.stats_return_codes.get(err.http_status // 100, 0) + 1
|
self.stats_return_codes.get(err.http_status // 100, 0) + 1
|
||||||
self.logger.increment(
|
self.logger.increment(
|
||||||
'return_codes.%d' % (err.http_status // 100,))
|
'return_codes.%d' % (err.http_status // 100,))
|
||||||
except (Timeout, socket.error) as err:
|
except (Timeout, socket.error):
|
||||||
self.logger.error(
|
self.logger.error(
|
||||||
'Timeout Exception with %(ip)s:%(port)s/%(device)s',
|
'Timeout Exception with %(ip)s:%(port)s/%(device)s',
|
||||||
node)
|
node)
|
||||||
@ -509,7 +509,7 @@ class AccountReaper(Daemon):
|
|||||||
self.stats_return_codes.get(err.http_status // 100, 0) + 1
|
self.stats_return_codes.get(err.http_status // 100, 0) + 1
|
||||||
self.logger.increment(
|
self.logger.increment(
|
||||||
'return_codes.%d' % (err.http_status // 100,))
|
'return_codes.%d' % (err.http_status // 100,))
|
||||||
except (Timeout, socket.error) as err:
|
except (Timeout, socket.error):
|
||||||
failures += 1
|
failures += 1
|
||||||
self.logger.increment('objects_failures')
|
self.logger.increment('objects_failures')
|
||||||
self.logger.error(
|
self.logger.error(
|
||||||
|
@ -1164,7 +1164,7 @@ swift-ring-builder <builder_file> rebalance [options]
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def dispersion():
|
def dispersion():
|
||||||
"""
|
r"""
|
||||||
swift-ring-builder <builder_file> dispersion <search_filter> [options]
|
swift-ring-builder <builder_file> dispersion <search_filter> [options]
|
||||||
|
|
||||||
Output report on dispersion.
|
Output report on dispersion.
|
||||||
|
@ -937,8 +937,7 @@ class SimpleClient(object):
|
|||||||
elif self.attempts > retries:
|
elif self.attempts > retries:
|
||||||
raise ClientException('Raise too many retries',
|
raise ClientException('Raise too many retries',
|
||||||
http_status=err.getcode())
|
http_status=err.getcode())
|
||||||
except (socket.error, httplib.HTTPException, urllib2.URLError) \
|
except (socket.error, httplib.HTTPException, urllib2.URLError):
|
||||||
as err:
|
|
||||||
if self.attempts > retries:
|
if self.attempts > retries:
|
||||||
raise
|
raise
|
||||||
sleep(backoff)
|
sleep(backoff)
|
||||||
|
@ -614,7 +614,7 @@ class Server(object):
|
|||||||
'%(signal)s') %
|
'%(signal)s') %
|
||||||
{'server': self.server, 'pid': pid, 'signal': sig})
|
{'server': self.server, 'pid': pid, 'signal': sig})
|
||||||
safe_kill(pid, sig, 'swift-%s' % self.server)
|
safe_kill(pid, sig, 'swift-%s' % self.server)
|
||||||
except InvalidPidFileException as e:
|
except InvalidPidFileException:
|
||||||
if kwargs.get('verbose'):
|
if kwargs.get('verbose'):
|
||||||
print(_('Removing pid file %(pid_file)s with wrong pid '
|
print(_('Removing pid file %(pid_file)s with wrong pid '
|
||||||
'%(pid)d') % {'pid_file': pid_file, 'pid': pid})
|
'%(pid)d') % {'pid_file': pid_file, 'pid': pid})
|
||||||
|
@ -48,7 +48,7 @@ from swift.common.swob import Request, HTTPBadRequest
|
|||||||
|
|
||||||
FORBIDDEN_CHARS = "\'\"`<>"
|
FORBIDDEN_CHARS = "\'\"`<>"
|
||||||
MAX_LENGTH = 255
|
MAX_LENGTH = 255
|
||||||
FORBIDDEN_REGEXP = "/\./|/\.\./|/\.$|/\.\.$"
|
FORBIDDEN_REGEXP = r"/\./|/\.\./|/\.$|/\.\.$"
|
||||||
|
|
||||||
|
|
||||||
class NameCheckMiddleware(object):
|
class NameCheckMiddleware(object):
|
||||||
|
@ -82,16 +82,16 @@ Multiple Reseller Prefix Items
|
|||||||
|
|
||||||
The reseller prefix specifies which parts of the account namespace this
|
The reseller prefix specifies which parts of the account namespace this
|
||||||
middleware is responsible for managing authentication and authorization.
|
middleware is responsible for managing authentication and authorization.
|
||||||
By default, the prefix is 'AUTH' so accounts and tokens are prefixed
|
By default, the prefix is ``AUTH`` so accounts and tokens are prefixed
|
||||||
by 'AUTH\_'. When a request's token and/or path start with 'AUTH\_', this
|
by ``AUTH_``. When a request's token and/or path start with ``AUTH_``, this
|
||||||
middleware knows it is responsible.
|
middleware knows it is responsible.
|
||||||
|
|
||||||
We allow the reseller prefix to be a list. In tempauth, the first item
|
We allow the reseller prefix to be a list. In tempauth, the first item
|
||||||
in the list is used as the prefix for tokens and user groups. The
|
in the list is used as the prefix for tokens and user groups. The
|
||||||
other prefixes provide alternate accounts that user's can access. For
|
other prefixes provide alternate accounts that user's can access. For
|
||||||
example if the reseller prefix list is 'AUTH, OTHER', a user with
|
example if the reseller prefix list is ``AUTH, OTHER``, a user with
|
||||||
admin access to 'AUTH_account' also has admin access to
|
admin access to ``AUTH_account`` also has admin access to
|
||||||
'OTHER_account'.
|
``OTHER_account``.
|
||||||
|
|
||||||
Required Group
|
Required Group
|
||||||
^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^
|
||||||
@ -112,7 +112,7 @@ derived from the token are appended to the roles derived from
|
|||||||
|
|
||||||
The ``X-Service-Token`` is useful when combined with multiple reseller
|
The ``X-Service-Token`` is useful when combined with multiple reseller
|
||||||
prefix items. In the following configuration, accounts prefixed
|
prefix items. In the following configuration, accounts prefixed
|
||||||
``SERVICE\_`` are only accessible if ``X-Auth-Token`` is from the end-user
|
``SERVICE_`` are only accessible if ``X-Auth-Token`` is from the end-user
|
||||||
and ``X-Service-Token`` is from the ``glance`` user::
|
and ``X-Service-Token`` is from the ``glance`` user::
|
||||||
|
|
||||||
[filter:tempauth]
|
[filter:tempauth]
|
||||||
|
@ -323,7 +323,7 @@ class HTMLViewer(object):
|
|||||||
if not fulldirs:
|
if not fulldirs:
|
||||||
stats.strip_dirs()
|
stats.strip_dirs()
|
||||||
stats.sort_stats(sort)
|
stats.sort_stats(sort)
|
||||||
nfl_filter_esc = nfl_filter.replace('(', '\(').replace(')', '\)')
|
nfl_filter_esc = nfl_filter.replace(r'(', r'\(').replace(r')', r'\)')
|
||||||
amount = [nfl_filter_esc, limit] if nfl_filter_esc else [limit]
|
amount = [nfl_filter_esc, limit] if nfl_filter_esc else [limit]
|
||||||
profile_html = self.generate_stats_html(stats, self.app_path,
|
profile_html = self.generate_stats_html(stats, self.app_path,
|
||||||
profile_id, *amount)
|
profile_id, *amount)
|
||||||
@ -371,7 +371,7 @@ class HTMLViewer(object):
|
|||||||
if len(log_files) == 0:
|
if len(log_files) == 0:
|
||||||
raise NotFoundException(_('no log file found'))
|
raise NotFoundException(_('no log file found'))
|
||||||
try:
|
try:
|
||||||
nfl_esc = nfl_filter.replace('(', '\(').replace(')', '\)')
|
nfl_esc = nfl_filter.replace(r'(', r'\(').replace(r')', r'\)')
|
||||||
# remove the slash that is intentionally added in the URL
|
# remove the slash that is intentionally added in the URL
|
||||||
# to avoid failure of filtering stats data.
|
# to avoid failure of filtering stats data.
|
||||||
if nfl_esc.startswith('/'):
|
if nfl_esc.startswith('/'):
|
||||||
@ -454,15 +454,15 @@ class HTMLViewer(object):
|
|||||||
fmt = '<span id="L%d" rel="#L%d">%' + max_width\
|
fmt = '<span id="L%d" rel="#L%d">%' + max_width\
|
||||||
+ 'd|<code>%s</code></span>'
|
+ 'd|<code>%s</code></span>'
|
||||||
for line in lines:
|
for line in lines:
|
||||||
l = html_escape(line)
|
el = html_escape(line)
|
||||||
i = i + 1
|
i = i + 1
|
||||||
if i == lineno:
|
if i == lineno:
|
||||||
fmt2 = '<span id="L%d" style="background-color: \
|
fmt2 = '<span id="L%d" style="background-color: \
|
||||||
rgb(127,255,127)">%' + max_width +\
|
rgb(127,255,127)">%' + max_width +\
|
||||||
'd|<code>%s</code></span>'
|
'd|<code>%s</code></span>'
|
||||||
data.append(fmt2 % (i, i, l))
|
data.append(fmt2 % (i, i, el))
|
||||||
else:
|
else:
|
||||||
data.append(fmt % (i, i, i, l))
|
data.append(fmt % (i, i, i, el))
|
||||||
data = ''.join(data)
|
data = ''.join(data)
|
||||||
except Exception:
|
except Exception:
|
||||||
return _('Can not access the file %s.') % file_path
|
return _('Can not access the file %s.') % file_path
|
||||||
|
@ -79,7 +79,7 @@ rebuilding of the composite ring.
|
|||||||
|
|
||||||
The ``id`` of each component RingBuilder is therefore stored in metadata of
|
The ``id`` of each component RingBuilder is therefore stored in metadata of
|
||||||
the composite and used to check for the component ordering when the same
|
the composite and used to check for the component ordering when the same
|
||||||
composite ring is re-composed. RingBuilder ``id``\s are normally assigned
|
composite ring is re-composed. RingBuilder ``id``\\s are normally assigned
|
||||||
when a RingBuilder instance is first saved. Older RingBuilder instances
|
when a RingBuilder instance is first saved. Older RingBuilder instances
|
||||||
loaded from file may not have an ``id`` assigned and will need to be saved
|
loaded from file may not have an ``id`` assigned and will need to be saved
|
||||||
before they can be used as components of a composite ring. This can be
|
before they can be used as components of a composite ring. This can be
|
||||||
|
@ -199,7 +199,7 @@ def is_valid_hostname(hostname):
|
|||||||
if hostname.endswith('.'):
|
if hostname.endswith('.'):
|
||||||
# strip exactly one dot from the right, if present
|
# strip exactly one dot from the right, if present
|
||||||
hostname = hostname[:-1]
|
hostname = hostname[:-1]
|
||||||
allowed = re.compile("(?!-)[A-Z\d-]{1,63}(?<!-)$", re.IGNORECASE)
|
allowed = re.compile(r"(?!-)[A-Z\d-]{1,63}(?<!-)$", re.IGNORECASE)
|
||||||
return all(allowed.match(x) for x in hostname.split("."))
|
return all(allowed.match(x) for x in hostname.split("."))
|
||||||
|
|
||||||
|
|
||||||
|
@ -403,7 +403,7 @@ class BaseStoragePolicy(object):
|
|||||||
(self.diskfile_module, self.name, err))
|
(self.diskfile_module, self.name, err))
|
||||||
try:
|
try:
|
||||||
dfm_cls.check_policy(self)
|
dfm_cls.check_policy(self)
|
||||||
except ValueError as err:
|
except ValueError:
|
||||||
raise PolicyError(
|
raise PolicyError(
|
||||||
'Invalid diskfile_module %s for policy %s:%s (%s)' %
|
'Invalid diskfile_module %s for policy %s:%s (%s)' %
|
||||||
(self.diskfile_module, int(self), self.name, self.policy_type))
|
(self.diskfile_module, int(self), self.name, self.policy_type))
|
||||||
|
@ -186,7 +186,7 @@ F_SETPIPE_SZ = getattr(fcntl, 'F_SETPIPE_SZ', 1031)
|
|||||||
O_TMPFILE = getattr(os, 'O_TMPFILE', 0o20000000 | os.O_DIRECTORY)
|
O_TMPFILE = getattr(os, 'O_TMPFILE', 0o20000000 | os.O_DIRECTORY)
|
||||||
|
|
||||||
# Used by the parse_socket_string() function to validate IPv6 addresses
|
# Used by the parse_socket_string() function to validate IPv6 addresses
|
||||||
IPV6_RE = re.compile("^\[(?P<address>.*)\](:(?P<port>[0-9]+))?$")
|
IPV6_RE = re.compile(r"^\[(?P<address>.*)\](:(?P<port>[0-9]+))?$")
|
||||||
|
|
||||||
MD5_OF_EMPTY_STRING = 'd41d8cd98f00b204e9800998ecf8427e'
|
MD5_OF_EMPTY_STRING = 'd41d8cd98f00b204e9800998ecf8427e'
|
||||||
RESERVED_BYTE = b'\x00'
|
RESERVED_BYTE = b'\x00'
|
||||||
@ -3506,7 +3506,7 @@ def affinity_key_function(affinity_str):
|
|||||||
pieces = [s.strip() for s in affinity_str.split(',')]
|
pieces = [s.strip() for s in affinity_str.split(',')]
|
||||||
for piece in pieces:
|
for piece in pieces:
|
||||||
# matches r<number>=<number> or r<number>z<number>=<number>
|
# matches r<number>=<number> or r<number>z<number>=<number>
|
||||||
match = re.match("r(\d+)(?:z(\d+))?=(\d+)$", piece)
|
match = re.match(r"r(\d+)(?:z(\d+))?=(\d+)$", piece)
|
||||||
if match:
|
if match:
|
||||||
region, zone, priority = match.groups()
|
region, zone, priority = match.groups()
|
||||||
region = int(region)
|
region = int(region)
|
||||||
@ -3559,7 +3559,7 @@ def affinity_locality_predicate(write_affinity_str):
|
|||||||
pieces = [s.strip() for s in affinity_str.split(',')]
|
pieces = [s.strip() for s in affinity_str.split(',')]
|
||||||
for piece in pieces:
|
for piece in pieces:
|
||||||
# matches r<number> or r<number>z<number>
|
# matches r<number> or r<number>z<number>
|
||||||
match = re.match("r(\d+)(?:z(\d+))?$", piece)
|
match = re.match(r"r(\d+)(?:z(\d+))?$", piece)
|
||||||
if match:
|
if match:
|
||||||
region, zone = match.groups()
|
region, zone = match.groups()
|
||||||
region = int(region)
|
region = int(region)
|
||||||
|
@ -1302,7 +1302,7 @@ def run_wsgi(conf_path, app_section, *args, **kwargs):
|
|||||||
os.getpid(), orig_server_pid)
|
os.getpid(), orig_server_pid)
|
||||||
try:
|
try:
|
||||||
got_pid = os.read(read_fd, 30)
|
got_pid = os.read(read_fd, 30)
|
||||||
except Exception as e:
|
except Exception:
|
||||||
logger.warning('Unexpected exception while reading from '
|
logger.warning('Unexpected exception while reading from '
|
||||||
'pipe:', exc_info=True)
|
'pipe:', exc_info=True)
|
||||||
else:
|
else:
|
||||||
|
@ -664,7 +664,7 @@ class ContainerSync(Daemon):
|
|||||||
self.container_failures += 1
|
self.container_failures += 1
|
||||||
self.logger.increment('failures')
|
self.logger.increment('failures')
|
||||||
return False
|
return False
|
||||||
except (Exception, Timeout) as err:
|
except (Exception, Timeout):
|
||||||
self.logger.exception(
|
self.logger.exception(
|
||||||
_('ERROR Syncing %(db_file)s %(row)s'),
|
_('ERROR Syncing %(db_file)s %(row)s'),
|
||||||
{'db_file': str(broker), 'row': row})
|
{'db_file': str(broker), 'row': row})
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
# process, which may cause wedges in the gate later.
|
# process, which may cause wedges in the gate later.
|
||||||
|
|
||||||
# Hacking already pins down pep8, pyflakes and flake8
|
# Hacking already pins down pep8, pyflakes and flake8
|
||||||
hacking>=0.11.0,<0.12 # Apache-2.0
|
hacking>=2.0,<2.1.0 # Apache-2.0
|
||||||
coverage>=3.6 # Apache-2.0
|
coverage>=3.6 # Apache-2.0
|
||||||
nose>=1.3.7 # LGPL
|
nose>=1.3.7 # LGPL
|
||||||
nosexcover>=1.0.10 # BSD
|
nosexcover>=1.0.10 # BSD
|
||||||
|
@ -248,7 +248,7 @@ def _in_process_setup_ring(swift_conf, conf_src_dir, testdir):
|
|||||||
try:
|
try:
|
||||||
ring_file_src = _in_process_find_conf_file(conf_src_dir, ring_file_src,
|
ring_file_src = _in_process_find_conf_file(conf_src_dir, ring_file_src,
|
||||||
use_sample=False)
|
use_sample=False)
|
||||||
except InProcessException as e:
|
except InProcessException:
|
||||||
if policy_specified:
|
if policy_specified:
|
||||||
raise InProcessException('Failed to find ring file %s'
|
raise InProcessException('Failed to find ring file %s'
|
||||||
% ring_file_src)
|
% ring_file_src)
|
||||||
|
@ -104,7 +104,7 @@ class Connection(object):
|
|||||||
# 404 means NoSuchBucket, NoSuchKey, or NoSuchUpload
|
# 404 means NoSuchBucket, NoSuchKey, or NoSuchUpload
|
||||||
if e.status != 404:
|
if e.status != 404:
|
||||||
raise
|
raise
|
||||||
except Exception as e:
|
except Exception:
|
||||||
exceptions.append(''.join(
|
exceptions.append(''.join(
|
||||||
traceback.format_exception(*sys.exc_info())))
|
traceback.format_exception(*sys.exc_info())))
|
||||||
if exceptions:
|
if exceptions:
|
||||||
@ -186,7 +186,7 @@ def tear_down_s3(conn):
|
|||||||
# 404 means NoSuchBucket, NoSuchKey, or NoSuchUpload
|
# 404 means NoSuchBucket, NoSuchKey, or NoSuchUpload
|
||||||
if e.response['ResponseMetadata']['HTTPStatusCode'] != 404:
|
if e.response['ResponseMetadata']['HTTPStatusCode'] != 404:
|
||||||
raise
|
raise
|
||||||
except Exception as e:
|
except Exception:
|
||||||
exceptions.append(''.join(
|
exceptions.append(''.join(
|
||||||
traceback.format_exception(*sys.exc_info())))
|
traceback.format_exception(*sys.exc_info())))
|
||||||
if exceptions:
|
if exceptions:
|
||||||
|
@ -16,8 +16,8 @@
|
|||||||
import unittest
|
import unittest
|
||||||
import os
|
import os
|
||||||
import test.functional as tf
|
import test.functional as tf
|
||||||
from swift.common.middleware.s3api.etree import fromstring, tostring, Element, \
|
from swift.common.middleware.s3api.etree import fromstring, tostring, \
|
||||||
SubElement
|
Element, SubElement
|
||||||
|
|
||||||
from test.functional.s3api import S3ApiBase
|
from test.functional.s3api import S3ApiBase
|
||||||
from test.functional.s3api.s3_test_client import Connection
|
from test.functional.s3api.s3_test_client import Connection
|
||||||
|
@ -27,8 +27,8 @@ from hashlib import md5
|
|||||||
from six.moves import zip, zip_longest
|
from six.moves import zip, zip_longest
|
||||||
|
|
||||||
import test.functional as tf
|
import test.functional as tf
|
||||||
from swift.common.middleware.s3api.etree import fromstring, tostring, Element, \
|
from swift.common.middleware.s3api.etree import fromstring, tostring, \
|
||||||
SubElement
|
Element, SubElement
|
||||||
from swift.common.middleware.s3api.utils import mktime
|
from swift.common.middleware.s3api.utils import mktime
|
||||||
|
|
||||||
from test.functional.s3api import S3ApiBase
|
from test.functional.s3api import S3ApiBase
|
||||||
|
@ -260,12 +260,12 @@ class TestAccount(Base):
|
|||||||
|
|
||||||
def testListingLimit(self):
|
def testListingLimit(self):
|
||||||
limit = load_constraint('account_listing_limit')
|
limit = load_constraint('account_listing_limit')
|
||||||
for l in (1, 100, limit / 2, limit - 1, limit, limit + 1, limit * 2):
|
for lim in (1, 100, limit / 2, limit - 1, limit, limit + 1, limit * 2):
|
||||||
p = {'limit': l}
|
p = {'limit': lim}
|
||||||
|
|
||||||
if l <= limit:
|
if lim <= limit:
|
||||||
self.assertLessEqual(len(self.env.account.containers(parms=p)),
|
self.assertLessEqual(len(self.env.account.containers(parms=p)),
|
||||||
l)
|
lim)
|
||||||
self.assert_status(200)
|
self.assert_status(200)
|
||||||
else:
|
else:
|
||||||
self.assertRaises(ResponseError,
|
self.assertRaises(ResponseError,
|
||||||
@ -578,10 +578,10 @@ class TestContainer(Base):
|
|||||||
def testContainerNameLimit(self):
|
def testContainerNameLimit(self):
|
||||||
limit = load_constraint('max_container_name_length')
|
limit = load_constraint('max_container_name_length')
|
||||||
|
|
||||||
for l in (limit - 100, limit - 10, limit - 1, limit,
|
for lim in (limit - 100, limit - 10, limit - 1, limit,
|
||||||
limit + 1, limit + 10, limit + 100):
|
limit + 1, limit + 10, limit + 100):
|
||||||
cont = self.env.account.container('a' * l)
|
cont = self.env.account.container('a' * lim)
|
||||||
if l <= limit:
|
if lim <= limit:
|
||||||
self.assertTrue(cont.create())
|
self.assertTrue(cont.create())
|
||||||
self.assert_status((201, 202))
|
self.assert_status((201, 202))
|
||||||
else:
|
else:
|
||||||
@ -1949,10 +1949,10 @@ class TestFile(Base):
|
|||||||
def testNameLimit(self):
|
def testNameLimit(self):
|
||||||
limit = load_constraint('max_object_name_length')
|
limit = load_constraint('max_object_name_length')
|
||||||
|
|
||||||
for l in (1, 10, limit // 2, limit - 1, limit, limit + 1, limit * 2):
|
for lim in (1, 10, limit // 2, limit - 1, limit, limit + 1, limit * 2):
|
||||||
file_item = self.env.container.file('a' * l)
|
file_item = self.env.container.file('a' * lim)
|
||||||
|
|
||||||
if l <= limit:
|
if lim <= limit:
|
||||||
self.assertTrue(file_item.write())
|
self.assertTrue(file_item.write())
|
||||||
self.assert_status(201)
|
self.assert_status(201)
|
||||||
else:
|
else:
|
||||||
|
@ -98,7 +98,7 @@ class TestContainerMergePolicyIndex(ReplProbeTest):
|
|||||||
self.object_name,
|
self.object_name,
|
||||||
headers={'X-Backend-Storage-Policy-Index':
|
headers={'X-Backend-Storage-Policy-Index':
|
||||||
policy_index})
|
policy_index})
|
||||||
except direct_client.ClientException as err:
|
except direct_client.ClientException:
|
||||||
continue
|
continue
|
||||||
orig_policy_index = policy_index
|
orig_policy_index = policy_index
|
||||||
break
|
break
|
||||||
|
@ -29,7 +29,7 @@ from test.probe.common import ReplProbeTest
|
|||||||
from swift.common.request_helpers import get_reserved_name
|
from swift.common.request_helpers import get_reserved_name
|
||||||
from swift.common.utils import readconf
|
from swift.common.utils import readconf
|
||||||
|
|
||||||
EXCLUDE_FILES = re.compile('^(hashes\.(pkl|invalid)|lock(-\d+)?)$')
|
EXCLUDE_FILES = re.compile(r'^(hashes\.(pkl|invalid)|lock(-\d+)?)$')
|
||||||
|
|
||||||
|
|
||||||
def collect_info(path_list):
|
def collect_info(path_list):
|
||||||
|
@ -438,7 +438,7 @@ aliases = %s
|
|||||||
self.recon_instance.quarantine_check(hosts)
|
self.recon_instance.quarantine_check(hosts)
|
||||||
|
|
||||||
output = stdout.getvalue()
|
output = stdout.getvalue()
|
||||||
r = re.compile("\[quarantined_(.*)\](.*)")
|
r = re.compile(r"\[quarantined_(.*)\](.*)")
|
||||||
for line in output.splitlines():
|
for line in output.splitlines():
|
||||||
m = r.match(line)
|
m = r.match(line)
|
||||||
if m:
|
if m:
|
||||||
@ -473,7 +473,7 @@ aliases = %s
|
|||||||
self.recon_instance.async_check(hosts)
|
self.recon_instance.async_check(hosts)
|
||||||
|
|
||||||
output = stdout.getvalue()
|
output = stdout.getvalue()
|
||||||
r = re.compile("\[async_pending(.*)\](.*)")
|
r = re.compile(r"\[async_pending(.*)\](.*)")
|
||||||
lines = output.splitlines()
|
lines = output.splitlines()
|
||||||
self.assertTrue(lines)
|
self.assertTrue(lines)
|
||||||
for line in lines:
|
for line in lines:
|
||||||
@ -514,7 +514,7 @@ aliases = %s
|
|||||||
self.recon_instance.umount_check(hosts)
|
self.recon_instance.umount_check(hosts)
|
||||||
|
|
||||||
output = stdout.getvalue()
|
output = stdout.getvalue()
|
||||||
r = re.compile("^Not mounted:|Device errors: .*")
|
r = re.compile(r"^Not mounted:|Device errors: .*")
|
||||||
lines = output.splitlines()
|
lines = output.splitlines()
|
||||||
self.assertTrue(lines)
|
self.assertTrue(lines)
|
||||||
for line in lines:
|
for line in lines:
|
||||||
@ -548,7 +548,7 @@ aliases = %s
|
|||||||
self.recon_instance.driveaudit_check(hosts)
|
self.recon_instance.driveaudit_check(hosts)
|
||||||
|
|
||||||
output = stdout.getvalue()
|
output = stdout.getvalue()
|
||||||
r = re.compile("\[drive_audit_errors(.*)\](.*)")
|
r = re.compile(r"\[drive_audit_errors(.*)\](.*)")
|
||||||
lines = output.splitlines()
|
lines = output.splitlines()
|
||||||
self.assertTrue(lines)
|
self.assertTrue(lines)
|
||||||
for line in lines:
|
for line in lines:
|
||||||
|
@ -2192,7 +2192,7 @@ class TestCommands(unittest.TestCase, RunSwiftRingBuilderMixin):
|
|||||||
with mock.patch("sys.stdout", mock_stdout):
|
with mock.patch("sys.stdout", mock_stdout):
|
||||||
with mock.patch("sys.stderr", mock_stderr):
|
with mock.patch("sys.stderr", mock_stderr):
|
||||||
self.assertSystemExit(EXIT_SUCCESS, ringbuilder.main, argv)
|
self.assertSystemExit(EXIT_SUCCESS, ringbuilder.main, argv)
|
||||||
ring_not_found_re = re.compile("Ring file .*\.ring\.gz not found")
|
ring_not_found_re = re.compile(r"Ring file .*\.ring\.gz not found")
|
||||||
self.assertTrue(ring_not_found_re.findall(mock_stdout.getvalue()))
|
self.assertTrue(ring_not_found_re.findall(mock_stdout.getvalue()))
|
||||||
|
|
||||||
# write ring file
|
# write ring file
|
||||||
@ -2204,7 +2204,9 @@ class TestCommands(unittest.TestCase, RunSwiftRingBuilderMixin):
|
|||||||
with mock.patch("sys.stdout", mock_stdout):
|
with mock.patch("sys.stdout", mock_stdout):
|
||||||
with mock.patch("sys.stderr", mock_stderr):
|
with mock.patch("sys.stderr", mock_stderr):
|
||||||
self.assertSystemExit(EXIT_SUCCESS, ringbuilder.main, argv)
|
self.assertSystemExit(EXIT_SUCCESS, ringbuilder.main, argv)
|
||||||
ring_up_to_date_re = re.compile("Ring file .*\.ring\.gz is up-to-date")
|
ring_up_to_date_re = re.compile(
|
||||||
|
r"Ring file .*\.ring\.gz is up-to-date"
|
||||||
|
)
|
||||||
self.assertTrue(ring_up_to_date_re.findall(mock_stdout.getvalue()))
|
self.assertTrue(ring_up_to_date_re.findall(mock_stdout.getvalue()))
|
||||||
|
|
||||||
# change builder (set weight)
|
# change builder (set weight)
|
||||||
@ -2216,7 +2218,7 @@ class TestCommands(unittest.TestCase, RunSwiftRingBuilderMixin):
|
|||||||
with mock.patch("sys.stdout", mock_stdout):
|
with mock.patch("sys.stdout", mock_stdout):
|
||||||
with mock.patch("sys.stderr", mock_stderr):
|
with mock.patch("sys.stderr", mock_stderr):
|
||||||
self.assertSystemExit(EXIT_SUCCESS, ringbuilder.main, argv)
|
self.assertSystemExit(EXIT_SUCCESS, ringbuilder.main, argv)
|
||||||
ring_obsolete_re = re.compile("Ring file .*\.ring\.gz is obsolete")
|
ring_obsolete_re = re.compile(r"Ring file .*\.ring\.gz is obsolete")
|
||||||
self.assertTrue(ring_obsolete_re.findall(mock_stdout.getvalue()))
|
self.assertTrue(ring_obsolete_re.findall(mock_stdout.getvalue()))
|
||||||
|
|
||||||
# write ring file
|
# write ring file
|
||||||
@ -2238,7 +2240,7 @@ class TestCommands(unittest.TestCase, RunSwiftRingBuilderMixin):
|
|||||||
with mock.patch("sys.stdout", mock_stdout):
|
with mock.patch("sys.stdout", mock_stdout):
|
||||||
with mock.patch("sys.stderr", mock_stderr):
|
with mock.patch("sys.stderr", mock_stderr):
|
||||||
self.assertSystemExit(EXIT_SUCCESS, ringbuilder.main, argv)
|
self.assertSystemExit(EXIT_SUCCESS, ringbuilder.main, argv)
|
||||||
ring_invalid_re = re.compile("Ring file .*\.ring\.gz is invalid")
|
ring_invalid_re = re.compile(r"Ring file .*\.ring\.gz is invalid")
|
||||||
self.assertTrue(ring_invalid_re.findall(mock_stdout.getvalue()))
|
self.assertTrue(ring_invalid_re.findall(mock_stdout.getvalue()))
|
||||||
|
|
||||||
def test_default_no_device_ring_without_exception(self):
|
def test_default_no_device_ring_without_exception(self):
|
||||||
|
@ -26,8 +26,8 @@ from swift.common.swob import Request
|
|||||||
from test.unit import make_timestamp_iter
|
from test.unit import make_timestamp_iter
|
||||||
from test.unit.common.middleware.s3api import S3ApiTestCase
|
from test.unit.common.middleware.s3api import S3ApiTestCase
|
||||||
from test.unit.common.middleware.s3api.helpers import UnreadableInput
|
from test.unit.common.middleware.s3api.helpers import UnreadableInput
|
||||||
from swift.common.middleware.s3api.etree import fromstring, tostring, Element, \
|
from swift.common.middleware.s3api.etree import fromstring, tostring, \
|
||||||
SubElement
|
Element, SubElement
|
||||||
from test.unit.common.middleware.s3api.test_s3_acl import s3acl
|
from test.unit.common.middleware.s3api.test_s3_acl import s3acl
|
||||||
|
|
||||||
|
|
||||||
|
@ -30,7 +30,7 @@ from swift.common import utils
|
|||||||
|
|
||||||
MAX_LENGTH = 255
|
MAX_LENGTH = 255
|
||||||
FORBIDDEN_CHARS = '\'\"<>`'
|
FORBIDDEN_CHARS = '\'\"<>`'
|
||||||
FORBIDDEN_REGEXP = "/\./|/\.\./|/\.$|/\.\.$"
|
FORBIDDEN_REGEXP = r"/\./|/\.\./|/\.$|/\.\.$"
|
||||||
|
|
||||||
|
|
||||||
class FakeApp(object):
|
class FakeApp(object):
|
||||||
@ -94,7 +94,7 @@ class TestNameCheckMiddleware(unittest.TestCase):
|
|||||||
self.assertEqual(resp.status_int, 400)
|
self.assertEqual(resp.status_int, 400)
|
||||||
|
|
||||||
def test_invalid_regexp(self):
|
def test_invalid_regexp(self):
|
||||||
for s in ['/.', '/..', '/./foo', '/../foo']:
|
for s in [r'/.', r'/..', r'/./foo', r'/../foo']:
|
||||||
path = '/V1.0/' + s
|
path = '/V1.0/' + s
|
||||||
resp = Request.blank(
|
resp = Request.blank(
|
||||||
path, environ={'REQUEST_METHOD': 'PUT'}).get_response(
|
path, environ={'REQUEST_METHOD': 'PUT'}).get_response(
|
||||||
@ -107,7 +107,7 @@ class TestNameCheckMiddleware(unittest.TestCase):
|
|||||||
self.assertEqual(resp.status_int, 400)
|
self.assertEqual(resp.status_int, 400)
|
||||||
|
|
||||||
def test_valid_regexp(self):
|
def test_valid_regexp(self):
|
||||||
for s in ['/...', '/.\.', '/foo']:
|
for s in [r'/...', r'/.\.', r'/foo']:
|
||||||
path = '/V1.0/' + s
|
path = '/V1.0/' + s
|
||||||
resp = Request.blank(
|
resp = Request.blank(
|
||||||
path, environ={'REQUEST_METHOD': 'PUT'}).get_response(
|
path, environ={'REQUEST_METHOD': 'PUT'}).get_response(
|
||||||
@ -137,7 +137,7 @@ class TestSwiftInfo(unittest.TestCase):
|
|||||||
def test_registered_configured_options(self):
|
def test_registered_configured_options(self):
|
||||||
conf = {'maximum_length': 512,
|
conf = {'maximum_length': 512,
|
||||||
'forbidden_chars': '\'\"`',
|
'forbidden_chars': '\'\"`',
|
||||||
'forbidden_regexp': "/\./|/\.\./|/\.$"}
|
'forbidden_regexp': r"/\./|/\.\./|/\.$"}
|
||||||
name_check.filter_factory(conf)(FakeApp())
|
name_check.filter_factory(conf)(FakeApp())
|
||||||
swift_info = utils.get_swift_info()
|
swift_info = utils.get_swift_info()
|
||||||
self.assertTrue('name_check' in swift_info)
|
self.assertTrue('name_check' in swift_info)
|
||||||
@ -145,7 +145,7 @@ class TestSwiftInfo(unittest.TestCase):
|
|||||||
self.assertEqual(set(swift_info['name_check'].get('forbidden_chars')),
|
self.assertEqual(set(swift_info['name_check'].get('forbidden_chars')),
|
||||||
set('\'\"`'))
|
set('\'\"`'))
|
||||||
self.assertEqual(swift_info['name_check'].get('forbidden_regexp'),
|
self.assertEqual(swift_info['name_check'].get('forbidden_regexp'),
|
||||||
"/\./|/\.\./|/\.$")
|
r"/\./|/\.\./|/\.$")
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
@ -2281,7 +2281,6 @@ class ObjectVersioningTestContainerOperations(ObjectVersioningBaseTestCase):
|
|||||||
'name': 'unexpected-symlink',
|
'name': 'unexpected-symlink',
|
||||||
'hash': 'd41d8cd98f00b204e9800998ecf8427e',
|
'hash': 'd41d8cd98f00b204e9800998ecf8427e',
|
||||||
'last_modified': '2019-07-26T15:09:54.518990',
|
'last_modified': '2019-07-26T15:09:54.518990',
|
||||||
'content_type': 'application/foo',
|
|
||||||
'symlink_bytes': 9,
|
'symlink_bytes': 9,
|
||||||
'symlink_path': '/v1/a/tgt_container/tgt_obj',
|
'symlink_path': '/v1/a/tgt_container/tgt_obj',
|
||||||
'symlink_etag': 'e55cedc11adb39c404b7365f7d6291fa',
|
'symlink_etag': 'e55cedc11adb39c404b7365f7d6291fa',
|
||||||
|
@ -5688,7 +5688,7 @@ class TestStatsdLoggingDelegation(unittest.TestCase):
|
|||||||
'some.counter')
|
'some.counter')
|
||||||
self.assertStat('some-name.some.operation:4900.0|ms',
|
self.assertStat('some-name.some.operation:4900.0|ms',
|
||||||
self.logger.timing, 'some.operation', 4.9 * 1000)
|
self.logger.timing, 'some.operation', 4.9 * 1000)
|
||||||
self.assertStatMatches('some-name\.another\.operation:\d+\.\d+\|ms',
|
self.assertStatMatches(r'some-name\.another\.operation:\d+\.\d+\|ms',
|
||||||
self.logger.timing_since, 'another.operation',
|
self.logger.timing_since, 'another.operation',
|
||||||
time.time())
|
time.time())
|
||||||
self.assertStat('some-name.another.counter:42|c',
|
self.assertStat('some-name.another.counter:42|c',
|
||||||
@ -5703,7 +5703,7 @@ class TestStatsdLoggingDelegation(unittest.TestCase):
|
|||||||
self.assertStat('pfx.some.operation:4900.0|ms|@0.972',
|
self.assertStat('pfx.some.operation:4900.0|ms|@0.972',
|
||||||
self.logger.timing, 'some.operation', 4.9 * 1000,
|
self.logger.timing, 'some.operation', 4.9 * 1000,
|
||||||
sample_rate=0.972)
|
sample_rate=0.972)
|
||||||
self.assertStatMatches('pfx\.another\.op:\d+\.\d+\|ms|@0.972',
|
self.assertStatMatches(r'pfx\.another\.op:\d+\.\d+\|ms|@0.972',
|
||||||
self.logger.timing_since, 'another.op',
|
self.logger.timing_since, 'another.op',
|
||||||
time.time(), sample_rate=0.972)
|
time.time(), sample_rate=0.972)
|
||||||
self.assertStat('pfx.another.counter:3|c|@0.972',
|
self.assertStat('pfx.another.counter:3|c|@0.972',
|
||||||
@ -5719,7 +5719,7 @@ class TestStatsdLoggingDelegation(unittest.TestCase):
|
|||||||
self.assertStat('some.operation:4900.0|ms|@0.939',
|
self.assertStat('some.operation:4900.0|ms|@0.939',
|
||||||
self.logger.timing, 'some.operation',
|
self.logger.timing, 'some.operation',
|
||||||
4.9 * 1000, 0.939)
|
4.9 * 1000, 0.939)
|
||||||
self.assertStatMatches('another\.op:\d+\.\d+\|ms|@0.939',
|
self.assertStatMatches(r'another\.op:\d+\.\d+\|ms|@0.939',
|
||||||
self.logger.timing_since, 'another.op',
|
self.logger.timing_since, 'another.op',
|
||||||
time.time(), 0.939)
|
time.time(), 0.939)
|
||||||
self.assertStat('another.counter:3|c|@0.939',
|
self.assertStat('another.counter:3|c|@0.939',
|
||||||
@ -5737,7 +5737,7 @@ class TestStatsdLoggingDelegation(unittest.TestCase):
|
|||||||
'some.counter')
|
'some.counter')
|
||||||
self.assertStat('pfx.some.operation:4760.0|ms|@0.93',
|
self.assertStat('pfx.some.operation:4760.0|ms|@0.93',
|
||||||
self.logger.timing, 'some.operation', 4.76 * 1000)
|
self.logger.timing, 'some.operation', 4.76 * 1000)
|
||||||
self.assertStatMatches('pfx\.another\.op:\d+\.\d+\|ms|@0.93',
|
self.assertStatMatches(r'pfx\.another\.op:\d+\.\d+\|ms|@0.93',
|
||||||
self.logger.timing_since, 'another.op',
|
self.logger.timing_since, 'another.op',
|
||||||
time.time())
|
time.time())
|
||||||
self.assertStat('pfx.another.counter:3|c|@0.93',
|
self.assertStat('pfx.another.counter:3|c|@0.93',
|
||||||
@ -5751,7 +5751,7 @@ class TestStatsdLoggingDelegation(unittest.TestCase):
|
|||||||
self.assertStat('pfx.some.operation:4900.0|ms|@0.9912',
|
self.assertStat('pfx.some.operation:4900.0|ms|@0.9912',
|
||||||
self.logger.timing, 'some.operation', 4.9 * 1000,
|
self.logger.timing, 'some.operation', 4.9 * 1000,
|
||||||
sample_rate=0.9912)
|
sample_rate=0.9912)
|
||||||
self.assertStatMatches('pfx\.another\.op:\d+\.\d+\|ms|@0.9912',
|
self.assertStatMatches(r'pfx\.another\.op:\d+\.\d+\|ms|@0.9912',
|
||||||
self.logger.timing_since, 'another.op',
|
self.logger.timing_since, 'another.op',
|
||||||
time.time(), sample_rate=0.9912)
|
time.time(), sample_rate=0.9912)
|
||||||
self.assertStat('pfx.another.counter:3|c|@0.9912',
|
self.assertStat('pfx.another.counter:3|c|@0.9912',
|
||||||
@ -5767,7 +5767,7 @@ class TestStatsdLoggingDelegation(unittest.TestCase):
|
|||||||
self.assertStat('some.operation:4900.0|ms|@0.987654',
|
self.assertStat('some.operation:4900.0|ms|@0.987654',
|
||||||
self.logger.timing, 'some.operation',
|
self.logger.timing, 'some.operation',
|
||||||
4.9 * 1000, 0.987654)
|
4.9 * 1000, 0.987654)
|
||||||
self.assertStatMatches('another\.op:\d+\.\d+\|ms|@0.987654',
|
self.assertStatMatches(r'another\.op:\d+\.\d+\|ms|@0.987654',
|
||||||
self.logger.timing_since, 'another.op',
|
self.logger.timing_since, 'another.op',
|
||||||
time.time(), 0.987654)
|
time.time(), 0.987654)
|
||||||
self.assertStat('another.counter:3|c|@0.987654',
|
self.assertStat('another.counter:3|c|@0.987654',
|
||||||
@ -5787,7 +5787,7 @@ class TestStatsdLoggingDelegation(unittest.TestCase):
|
|||||||
self.assertStat('alpha.beta.pfx.some.operation:4760.0|ms',
|
self.assertStat('alpha.beta.pfx.some.operation:4760.0|ms',
|
||||||
self.logger.timing, 'some.operation', 4.76 * 1000)
|
self.logger.timing, 'some.operation', 4.76 * 1000)
|
||||||
self.assertStatMatches(
|
self.assertStatMatches(
|
||||||
'alpha\.beta\.pfx\.another\.op:\d+\.\d+\|ms',
|
r'alpha\.beta\.pfx\.another\.op:\d+\.\d+\|ms',
|
||||||
self.logger.timing_since, 'another.op', time.time())
|
self.logger.timing_since, 'another.op', time.time())
|
||||||
self.assertStat('alpha.beta.pfx.another.counter:3|c',
|
self.assertStat('alpha.beta.pfx.another.counter:3|c',
|
||||||
self.logger.update_stats, 'another.counter', 3)
|
self.logger.update_stats, 'another.counter', 3)
|
||||||
@ -5801,7 +5801,8 @@ class TestStatsdLoggingDelegation(unittest.TestCase):
|
|||||||
self.assertStat('alpha.beta.some.operation:4900.0|ms|@0.9912',
|
self.assertStat('alpha.beta.some.operation:4900.0|ms|@0.9912',
|
||||||
self.logger.timing, 'some.operation', 4.9 * 1000,
|
self.logger.timing, 'some.operation', 4.9 * 1000,
|
||||||
sample_rate=0.9912)
|
sample_rate=0.9912)
|
||||||
self.assertStatMatches('alpha\.beta\.another\.op:\d+\.\d+\|ms|@0.9912',
|
self.assertStatMatches(
|
||||||
|
r'alpha\.beta\.another\.op:\d+\.\d+\|ms|@0.9912',
|
||||||
self.logger.timing_since, 'another.op',
|
self.logger.timing_since, 'another.op',
|
||||||
time.time(), sample_rate=0.9912)
|
time.time(), sample_rate=0.9912)
|
||||||
self.assertStat('alpha.beta.another.counter:3|c|@0.9912',
|
self.assertStat('alpha.beta.another.counter:3|c|@0.9912',
|
||||||
|
@ -1262,7 +1262,7 @@ class TestContainerController(unittest.TestCase):
|
|||||||
try:
|
try:
|
||||||
with Timeout(3):
|
with Timeout(3):
|
||||||
resp = req.get_response(self.controller)
|
resp = req.get_response(self.controller)
|
||||||
except BaseException as err:
|
except BaseException:
|
||||||
got_exc = True
|
got_exc = True
|
||||||
finally:
|
finally:
|
||||||
err = event.wait()
|
err = event.wait()
|
||||||
@ -2337,7 +2337,7 @@ class TestContainerController(unittest.TestCase):
|
|||||||
try:
|
try:
|
||||||
with Timeout(3):
|
with Timeout(3):
|
||||||
resp = req.get_response(self.controller)
|
resp = req.get_response(self.controller)
|
||||||
except BaseException as err:
|
except BaseException:
|
||||||
got_exc = True
|
got_exc = True
|
||||||
finally:
|
finally:
|
||||||
err = event.wait()
|
err = event.wait()
|
||||||
|
@ -5027,7 +5027,7 @@ class DiskFileMixin(BaseDiskFileTestMixin):
|
|||||||
|
|
||||||
with open('/dev/null', 'w') as devnull:
|
with open('/dev/null', 'w') as devnull:
|
||||||
exc_re = (r'tee\(\) failed: tried to move \d+ bytes, but only '
|
exc_re = (r'tee\(\) failed: tried to move \d+ bytes, but only '
|
||||||
'moved -?\d+')
|
r'moved -?\d+')
|
||||||
try:
|
try:
|
||||||
reader.zero_copy_send(devnull.fileno())
|
reader.zero_copy_send(devnull.fileno())
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
@ -381,8 +381,8 @@ class TestReceiver(unittest.TestCase):
|
|||||||
b':UPDATES: START', b':UPDATES: END'])
|
b':UPDATES: START', b':UPDATES: END'])
|
||||||
self.assertRegexpMatches(
|
self.assertRegexpMatches(
|
||||||
b''.join(body_lines2),
|
b''.join(body_lines2),
|
||||||
b"^:ERROR: 0 '0\.0[0-9]+ seconds: "
|
br"^:ERROR: 0 '0\.0[0-9]+ seconds: "
|
||||||
b"/.+/sda1/objects/1/.lock-replication'$")
|
br"/.+/sda1/objects/1/.lock-replication'$")
|
||||||
|
|
||||||
def test_SSYNC_initial_path(self):
|
def test_SSYNC_initial_path(self):
|
||||||
with mock.patch.object(
|
with mock.patch.object(
|
||||||
|
@ -2030,7 +2030,7 @@ class BaseTestObjectController(object):
|
|||||||
self.app.update_request(req)
|
self.app.update_request(req)
|
||||||
try:
|
try:
|
||||||
res = method(req)
|
res = method(req)
|
||||||
except HTTPException as res:
|
except HTTPException as res: # noqa: F841
|
||||||
pass
|
pass
|
||||||
self.assertEqual(res.status_int, expected)
|
self.assertEqual(res.status_int, expected)
|
||||||
|
|
||||||
@ -2043,7 +2043,7 @@ class BaseTestObjectController(object):
|
|||||||
self.app.update_request(req)
|
self.app.update_request(req)
|
||||||
try:
|
try:
|
||||||
res = method(req)
|
res = method(req)
|
||||||
except HTTPException as res:
|
except HTTPException as res: # noqa: F841
|
||||||
pass
|
pass
|
||||||
self.assertEqual(res.status_int, expected)
|
self.assertEqual(res.status_int, expected)
|
||||||
|
|
||||||
@ -3596,7 +3596,7 @@ class TestReplicatedObjectController(
|
|||||||
self.app.update_request(req)
|
self.app.update_request(req)
|
||||||
try:
|
try:
|
||||||
res = controller.PUT(req)
|
res = controller.PUT(req)
|
||||||
except HTTPException as res:
|
except HTTPException as res: # noqa: F841
|
||||||
pass
|
pass
|
||||||
expected = str(expected)
|
expected = str(expected)
|
||||||
self.assertEqual(res.status[:len(expected)], expected)
|
self.assertEqual(res.status[:len(expected)], expected)
|
||||||
@ -3628,7 +3628,7 @@ class TestReplicatedObjectController(
|
|||||||
self.app.update_request(req)
|
self.app.update_request(req)
|
||||||
try:
|
try:
|
||||||
res = controller.PUT(req)
|
res = controller.PUT(req)
|
||||||
except HTTPException as res:
|
except HTTPException as res: # noqa: F841
|
||||||
pass
|
pass
|
||||||
expected = str(expected)
|
expected = str(expected)
|
||||||
self.assertEqual(res.status[:len(expected)], expected)
|
self.assertEqual(res.status[:len(expected)], expected)
|
||||||
@ -3673,7 +3673,7 @@ class TestReplicatedObjectController(
|
|||||||
self.app.update_request(req)
|
self.app.update_request(req)
|
||||||
try:
|
try:
|
||||||
res = controller.PUT(req)
|
res = controller.PUT(req)
|
||||||
except HTTPException as res:
|
except HTTPException as res: # noqa: F841
|
||||||
pass
|
pass
|
||||||
expected = str(expected)
|
expected = str(expected)
|
||||||
self.assertEqual(res.status[:len(str(expected))],
|
self.assertEqual(res.status[:len(str(expected))],
|
||||||
@ -9935,7 +9935,7 @@ class TestContainerController(unittest.TestCase):
|
|||||||
self.assertEqual(3, len(timestamps))
|
self.assertEqual(3, len(timestamps))
|
||||||
for timestamp in timestamps:
|
for timestamp in timestamps:
|
||||||
self.assertEqual(timestamp, timestamps[0])
|
self.assertEqual(timestamp, timestamps[0])
|
||||||
self.assertTrue(re.match('[0-9]{10}\.[0-9]{5}', timestamp))
|
self.assertTrue(re.match(r'[0-9]{10}\.[0-9]{5}', timestamp))
|
||||||
|
|
||||||
def test_DELETE_backed_x_timestamp_header(self):
|
def test_DELETE_backed_x_timestamp_header(self):
|
||||||
timestamps = []
|
timestamps = []
|
||||||
@ -9961,7 +9961,7 @@ class TestContainerController(unittest.TestCase):
|
|||||||
self.assertEqual(3, len(timestamps))
|
self.assertEqual(3, len(timestamps))
|
||||||
for timestamp in timestamps:
|
for timestamp in timestamps:
|
||||||
self.assertEqual(timestamp, timestamps[0])
|
self.assertEqual(timestamp, timestamps[0])
|
||||||
self.assertTrue(re.match('[0-9]{10}\.[0-9]{5}', timestamp))
|
self.assertTrue(re.match(r'[0-9]{10}\.[0-9]{5}', timestamp))
|
||||||
|
|
||||||
def test_node_read_timeout_retry_to_container(self):
|
def test_node_read_timeout_retry_to_container(self):
|
||||||
with save_globals():
|
with save_globals():
|
||||||
|
10
tox.ini
10
tox.ini
@ -127,7 +127,15 @@ commands = bandit -c bandit.yaml -r swift -n 5
|
|||||||
# H404: multi line docstring should start without a leading new line
|
# H404: multi line docstring should start without a leading new line
|
||||||
# H405: multi line docstring summary not separated with an empty line
|
# H405: multi line docstring summary not separated with an empty line
|
||||||
# H501: Do not use self.__dict__ for string formatting
|
# H501: Do not use self.__dict__ for string formatting
|
||||||
ignore = H101,H202,H301,H306,H404,H405,H501
|
# Disabled with going to hacking 2.0, needs further investigation and
|
||||||
|
# changes to enable:
|
||||||
|
# E305 expected 2 blank lines after class or function definition, found 1
|
||||||
|
# E402: module level import not at top of file
|
||||||
|
# E731 do not assign a lambda expression, use a def
|
||||||
|
# Swift team needs to decide if they want to enable either of these:
|
||||||
|
# W503: line break before binary operator
|
||||||
|
# W504: line break after binary operator
|
||||||
|
ignore = H101,H202,H301,H306,H404,H405,H501,W503,W504,E305,E402,E731
|
||||||
exclude = .venv,.tox,dist,*egg
|
exclude = .venv,.tox,dist,*egg
|
||||||
filename = *.py,bin/*
|
filename = *.py,bin/*
|
||||||
show-source = True
|
show-source = True
|
||||||
|
Loading…
x
Reference in New Issue
Block a user