Merge "Get functional/tests.py running under py3"
This commit is contained in:
commit
72a0115514
@ -3,7 +3,7 @@
|
|||||||
# How-To debug functional tests:
|
# How-To debug functional tests:
|
||||||
# SWIFT_TEST_IN_PROCESS=1 tox -e func -- --pdb test.functional.tests.TestFile.testCopy
|
# SWIFT_TEST_IN_PROCESS=1 tox -e func -- --pdb test.functional.tests.TestFile.testCopy
|
||||||
|
|
||||||
SRC_DIR=$(python -c "import os; print os.path.dirname(os.path.realpath('$0'))")
|
SRC_DIR=$(python -c "import os; print(os.path.dirname(os.path.realpath('$0')))")
|
||||||
|
|
||||||
cd ${SRC_DIR} > /dev/null
|
cd ${SRC_DIR} > /dev/null
|
||||||
export TESTS_DIR=${SRC_DIR}/test/functional
|
export TESTS_DIR=${SRC_DIR}/test/functional
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
SRC_DIR=$(python -c "import os; print os.path.dirname(os.path.realpath('$0'))")
|
SRC_DIR=$(python -c "import os; print(os.path.dirname(os.path.realpath('$0')))")
|
||||||
|
|
||||||
cd ${SRC_DIR}/test/probe
|
cd ${SRC_DIR}/test/probe
|
||||||
nosetests --exe $@
|
nosetests --exe $@
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
TOP_DIR=$(python -c "import os; print os.path.dirname(os.path.realpath('$0'))")
|
TOP_DIR=$(python -c "import os; print(os.path.dirname(os.path.realpath('$0')))")
|
||||||
|
|
||||||
python -c 'from distutils.version import LooseVersion as Ver; import nose, sys; sys.exit(0 if Ver(nose.__version__) >= Ver("1.2.0") else 1)'
|
python -c 'from distutils.version import LooseVersion as Ver; import nose, sys; sys.exit(0 if Ver(nose.__version__) >= Ver("1.2.0") else 1)'
|
||||||
if [ $? != 0 ]; then
|
if [ $? != 0 ]; then
|
||||||
|
@ -41,7 +41,11 @@ if six.PY2:
|
|||||||
httplib = eventlet.import_patched('httplib')
|
httplib = eventlet.import_patched('httplib')
|
||||||
else:
|
else:
|
||||||
httplib = eventlet.import_patched('http.client')
|
httplib = eventlet.import_patched('http.client')
|
||||||
httplib._MAXHEADERS = constraints.MAX_HEADER_COUNT
|
|
||||||
|
# Apparently http.server uses this to decide when/whether to send a 431.
|
||||||
|
# Give it some slack, so the app is more likely to get the chance to reject
|
||||||
|
# with a 400 instead.
|
||||||
|
httplib._MAXHEADERS = constraints.MAX_HEADER_COUNT * 1.6
|
||||||
|
|
||||||
|
|
||||||
class BufferedHTTPResponse(HTTPResponse):
|
class BufferedHTTPResponse(HTTPResponse):
|
||||||
|
@ -67,7 +67,7 @@ import time
|
|||||||
|
|
||||||
import six
|
import six
|
||||||
|
|
||||||
from swift.common.swob import Range
|
from swift.common.swob import Range, bytes_to_wsgi
|
||||||
from swift.common.utils import json, public, reiterate
|
from swift.common.utils import json, public, reiterate
|
||||||
from swift.common.db import utf8encode
|
from swift.common.db import utf8encode
|
||||||
|
|
||||||
@ -529,7 +529,8 @@ class UploadController(Controller):
|
|||||||
objects = json.loads(resp.body)
|
objects = json.loads(resp.body)
|
||||||
for o in objects:
|
for o in objects:
|
||||||
container = req.container_name + MULTIUPLOAD_SUFFIX
|
container = req.container_name + MULTIUPLOAD_SUFFIX
|
||||||
req.get_response(self.app, container=container, obj=o['name'])
|
obj = bytes_to_wsgi(o['name'].encode('utf-8'))
|
||||||
|
req.get_response(self.app, container=container, obj=obj)
|
||||||
|
|
||||||
return HTTPNoContent()
|
return HTTPNoContent()
|
||||||
|
|
||||||
|
@ -13,6 +13,7 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
from swift.common.swob import bytes_to_wsgi
|
||||||
from swift.common.utils import json, public
|
from swift.common.utils import json, public
|
||||||
|
|
||||||
from swift.common.middleware.s3api.controllers.base import Controller
|
from swift.common.middleware.s3api.controllers.base import Controller
|
||||||
@ -51,8 +52,9 @@ class ServiceController(Controller):
|
|||||||
buckets = SubElement(elem, 'Buckets')
|
buckets = SubElement(elem, 'Buckets')
|
||||||
for c in containers:
|
for c in containers:
|
||||||
if self.conf.s3_acl and self.conf.check_bucket_owner:
|
if self.conf.s3_acl and self.conf.check_bucket_owner:
|
||||||
|
container = bytes_to_wsgi(c['name'].encode('utf8'))
|
||||||
try:
|
try:
|
||||||
req.get_response(self.app, 'HEAD', c['name'])
|
req.get_response(self.app, 'HEAD', container)
|
||||||
except AccessDenied:
|
except AccessDenied:
|
||||||
continue
|
continue
|
||||||
except NoSuchBucket:
|
except NoSuchBucket:
|
||||||
|
@ -942,16 +942,11 @@ class Request(object):
|
|||||||
'https': 443}.get(parsed_path.scheme, 80)
|
'https': 443}.get(parsed_path.scheme, 80)
|
||||||
if parsed_path.scheme and parsed_path.scheme not in ['http', 'https']:
|
if parsed_path.scheme and parsed_path.scheme not in ['http', 'https']:
|
||||||
raise TypeError('Invalid scheme: %s' % parsed_path.scheme)
|
raise TypeError('Invalid scheme: %s' % parsed_path.scheme)
|
||||||
if six.PY2:
|
|
||||||
path_info = urllib.parse.unquote(parsed_path.path)
|
|
||||||
else:
|
|
||||||
path_info = urllib.parse.unquote(parsed_path.path,
|
|
||||||
encoding='latin-1')
|
|
||||||
env = {
|
env = {
|
||||||
'REQUEST_METHOD': 'GET',
|
'REQUEST_METHOD': 'GET',
|
||||||
'SCRIPT_NAME': '',
|
'SCRIPT_NAME': '',
|
||||||
'QUERY_STRING': parsed_path.query,
|
'QUERY_STRING': parsed_path.query,
|
||||||
'PATH_INFO': path_info,
|
'PATH_INFO': wsgi_unquote(parsed_path.path),
|
||||||
'SERVER_NAME': server_name,
|
'SERVER_NAME': server_name,
|
||||||
'SERVER_PORT': str(server_port),
|
'SERVER_PORT': str(server_port),
|
||||||
'HTTP_HOST': '%s:%d' % (server_name, server_port),
|
'HTTP_HOST': '%s:%d' % (server_name, server_port),
|
||||||
@ -1038,13 +1033,8 @@ class Request(object):
|
|||||||
@property
|
@property
|
||||||
def path(self):
|
def path(self):
|
||||||
"Provides the full path of the request, excluding the QUERY_STRING"
|
"Provides the full path of the request, excluding the QUERY_STRING"
|
||||||
if six.PY2:
|
return wsgi_quote(self.environ.get('SCRIPT_NAME', '') +
|
||||||
return urllib.parse.quote(self.environ.get('SCRIPT_NAME', '') +
|
|
||||||
self.environ['PATH_INFO'])
|
self.environ['PATH_INFO'])
|
||||||
else:
|
|
||||||
return urllib.parse.quote(self.environ.get('SCRIPT_NAME', '') +
|
|
||||||
self.environ['PATH_INFO'],
|
|
||||||
encoding='latin-1')
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def swift_entity_path(self):
|
def swift_entity_path(self):
|
||||||
@ -1482,7 +1472,7 @@ class Response(object):
|
|||||||
realm = 'unknown'
|
realm = 'unknown'
|
||||||
except (AttributeError, ValueError):
|
except (AttributeError, ValueError):
|
||||||
realm = 'unknown'
|
realm = 'unknown'
|
||||||
return 'Swift realm="%s"' % urllib.parse.quote(realm)
|
return 'Swift realm="%s"' % wsgi_quote(realm)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_success(self):
|
def is_success(self):
|
||||||
|
@ -57,7 +57,7 @@ from swift.common.http import is_informational, is_success, is_redirection, \
|
|||||||
HTTP_INSUFFICIENT_STORAGE, HTTP_UNAUTHORIZED, HTTP_CONTINUE, HTTP_GONE
|
HTTP_INSUFFICIENT_STORAGE, HTTP_UNAUTHORIZED, HTTP_CONTINUE, HTTP_GONE
|
||||||
from swift.common.swob import Request, Response, Range, \
|
from swift.common.swob import Request, Response, Range, \
|
||||||
HTTPException, HTTPRequestedRangeNotSatisfiable, HTTPServiceUnavailable, \
|
HTTPException, HTTPRequestedRangeNotSatisfiable, HTTPServiceUnavailable, \
|
||||||
status_map
|
status_map, wsgi_to_str, str_to_wsgi, wsgi_quote
|
||||||
from swift.common.request_helpers import strip_sys_meta_prefix, \
|
from swift.common.request_helpers import strip_sys_meta_prefix, \
|
||||||
strip_user_meta_prefix, is_user_meta, is_sys_meta, is_sys_or_user_meta, \
|
strip_user_meta_prefix, is_user_meta, is_sys_meta, is_sys_or_user_meta, \
|
||||||
http_response_to_document_iters, is_object_transient_sysmeta, \
|
http_response_to_document_iters, is_object_transient_sysmeta, \
|
||||||
@ -327,8 +327,10 @@ def get_container_info(env, app, swift_source=None):
|
|||||||
This call bypasses auth. Success does not imply that the request has
|
This call bypasses auth. Success does not imply that the request has
|
||||||
authorization to the container.
|
authorization to the container.
|
||||||
"""
|
"""
|
||||||
(version, account, container, unused) = \
|
(version, wsgi_account, wsgi_container, unused) = \
|
||||||
split_path(env['PATH_INFO'], 3, 4, True)
|
split_path(env['PATH_INFO'], 3, 4, True)
|
||||||
|
account = wsgi_to_str(wsgi_account)
|
||||||
|
container = wsgi_to_str(wsgi_container)
|
||||||
|
|
||||||
# Check in environment cache and in memcache (in that order)
|
# Check in environment cache and in memcache (in that order)
|
||||||
info = _get_info_from_caches(app, env, account, container)
|
info = _get_info_from_caches(app, env, account, container)
|
||||||
@ -350,7 +352,7 @@ def get_container_info(env, app, swift_source=None):
|
|||||||
return headers_to_container_info({}, 0)
|
return headers_to_container_info({}, 0)
|
||||||
|
|
||||||
req = _prepare_pre_auth_info_request(
|
req = _prepare_pre_auth_info_request(
|
||||||
env, ("/%s/%s/%s" % (version, account, container)),
|
env, ("/%s/%s/%s" % (version, wsgi_account, wsgi_container)),
|
||||||
(swift_source or 'GET_CONTAINER_INFO'))
|
(swift_source or 'GET_CONTAINER_INFO'))
|
||||||
resp = req.get_response(app)
|
resp = req.get_response(app)
|
||||||
# Check in infocache to see if the proxy (or anyone else) already
|
# Check in infocache to see if the proxy (or anyone else) already
|
||||||
@ -395,7 +397,8 @@ def get_account_info(env, app, swift_source=None):
|
|||||||
|
|
||||||
:raises ValueError: when path doesn't contain an account
|
:raises ValueError: when path doesn't contain an account
|
||||||
"""
|
"""
|
||||||
(version, account, _junk, _junk) = split_path(env['PATH_INFO'], 2, 4, True)
|
(version, wsgi_account, _junk) = split_path(env['PATH_INFO'], 2, 3, True)
|
||||||
|
account = wsgi_to_str(wsgi_account)
|
||||||
|
|
||||||
# Check in environment cache and in memcache (in that order)
|
# Check in environment cache and in memcache (in that order)
|
||||||
info = _get_info_from_caches(app, env, account)
|
info = _get_info_from_caches(app, env, account)
|
||||||
@ -404,7 +407,7 @@ def get_account_info(env, app, swift_source=None):
|
|||||||
if not info:
|
if not info:
|
||||||
env.setdefault('swift.infocache', {})
|
env.setdefault('swift.infocache', {})
|
||||||
req = _prepare_pre_auth_info_request(
|
req = _prepare_pre_auth_info_request(
|
||||||
env, "/%s/%s" % (version, account),
|
env, "/%s/%s" % (version, wsgi_account),
|
||||||
(swift_source or 'GET_ACCOUNT_INFO'))
|
(swift_source or 'GET_ACCOUNT_INFO'))
|
||||||
resp = req.get_response(app)
|
resp = req.get_response(app)
|
||||||
# Check in infocache to see if the proxy (or anyone else) already
|
# Check in infocache to see if the proxy (or anyone else) already
|
||||||
@ -625,7 +628,7 @@ def _prepare_pre_auth_info_request(env, path, swift_source):
|
|||||||
Prepares a pre authed request to obtain info using a HEAD.
|
Prepares a pre authed request to obtain info using a HEAD.
|
||||||
|
|
||||||
:param env: the environment used by the current request
|
:param env: the environment used by the current request
|
||||||
:param path: The unquoted request path
|
:param path: The unquoted, WSGI-str request path
|
||||||
:param swift_source: value for swift.source in WSGI environment
|
:param swift_source: value for swift.source in WSGI environment
|
||||||
:returns: the pre authed request
|
:returns: the pre authed request
|
||||||
"""
|
"""
|
||||||
@ -641,7 +644,7 @@ def _prepare_pre_auth_info_request(env, path, swift_source):
|
|||||||
newenv['swift_owner'] = True
|
newenv['swift_owner'] = True
|
||||||
|
|
||||||
# Note that Request.blank expects quoted path
|
# Note that Request.blank expects quoted path
|
||||||
return Request.blank(quote(path), environ=newenv)
|
return Request.blank(wsgi_quote(path), environ=newenv)
|
||||||
|
|
||||||
|
|
||||||
def get_info(app, env, account, container=None, swift_source=None):
|
def get_info(app, env, account, container=None, swift_source=None):
|
||||||
@ -685,9 +688,9 @@ def _get_object_info(app, env, account, container, obj, swift_source=None):
|
|||||||
|
|
||||||
:param app: the application object
|
:param app: the application object
|
||||||
:param env: the environment used by the current request
|
:param env: the environment used by the current request
|
||||||
:param account: The unquoted name of the account
|
:param account: The unquoted, WSGI-str name of the account
|
||||||
:param container: The unquoted name of the container
|
:param container: The unquoted, WSGI-str name of the container
|
||||||
:param obj: The unquoted name of the object
|
:param obj: The unquoted, WSGI-str name of the object
|
||||||
:returns: the cached info or None if cannot be retrieved
|
:returns: the cached info or None if cannot be retrieved
|
||||||
"""
|
"""
|
||||||
cache_key = get_cache_key(account, container, obj)
|
cache_key = get_cache_key(account, container, obj)
|
||||||
@ -1584,7 +1587,7 @@ class Controller(object):
|
|||||||
"""
|
"""
|
||||||
Get account information, and also verify that the account exists.
|
Get account information, and also verify that the account exists.
|
||||||
|
|
||||||
:param account: name of the account to get the info for
|
:param account: native str name of the account to get the info for
|
||||||
:param req: caller's HTTP request context object (optional)
|
:param req: caller's HTTP request context object (optional)
|
||||||
:returns: tuple of (account partition, account nodes, container_count)
|
:returns: tuple of (account partition, account nodes, container_count)
|
||||||
or (None, None, None) if it does not exist
|
or (None, None, None) if it does not exist
|
||||||
@ -1596,7 +1599,7 @@ class Controller(object):
|
|||||||
env = {}
|
env = {}
|
||||||
env.setdefault('swift.infocache', {})
|
env.setdefault('swift.infocache', {})
|
||||||
path_env = env.copy()
|
path_env = env.copy()
|
||||||
path_env['PATH_INFO'] = "/v1/%s" % (account,)
|
path_env['PATH_INFO'] = "/v1/%s" % (str_to_wsgi(account),)
|
||||||
|
|
||||||
info = get_account_info(path_env, self.app)
|
info = get_account_info(path_env, self.app)
|
||||||
if (not info
|
if (not info
|
||||||
@ -1611,8 +1614,8 @@ class Controller(object):
|
|||||||
Get container information and thusly verify container existence.
|
Get container information and thusly verify container existence.
|
||||||
This will also verify account existence.
|
This will also verify account existence.
|
||||||
|
|
||||||
:param account: account name for the container
|
:param account: native-str account name for the container
|
||||||
:param container: container name to look up
|
:param container: native-str container name to look up
|
||||||
:param req: caller's HTTP request context object (optional)
|
:param req: caller's HTTP request context object (optional)
|
||||||
:returns: dict containing at least container partition ('partition'),
|
:returns: dict containing at least container partition ('partition'),
|
||||||
container nodes ('containers'), container read
|
container nodes ('containers'), container read
|
||||||
@ -1627,7 +1630,8 @@ class Controller(object):
|
|||||||
env = {}
|
env = {}
|
||||||
env.setdefault('swift.infocache', {})
|
env.setdefault('swift.infocache', {})
|
||||||
path_env = env.copy()
|
path_env = env.copy()
|
||||||
path_env['PATH_INFO'] = "/v1/%s/%s" % (account, container)
|
path_env['PATH_INFO'] = "/v1/%s/%s" % (
|
||||||
|
str_to_wsgi(account), str_to_wsgi(container))
|
||||||
info = get_container_info(path_env, self.app)
|
info = get_container_info(path_env, self.app)
|
||||||
if not info or not is_success(info.get('status')):
|
if not info or not is_success(info.get('status')):
|
||||||
info = headers_to_container_info({}, 0)
|
info = headers_to_container_info({}, 0)
|
||||||
|
@ -574,7 +574,7 @@ def in_process_setup(the_object_server=object_server):
|
|||||||
"Content-Language, Expires, X-Robots-Tag",
|
"Content-Language, Expires, X-Robots-Tag",
|
||||||
# Below are values used by the functional test framework, as well as
|
# Below are values used by the functional test framework, as well as
|
||||||
# by the various in-process swift servers
|
# by the various in-process swift servers
|
||||||
'auth_uri': 'http://127.0.0.1:%d/auth/v1.0' % prolis.getsockname()[1],
|
'auth_uri': 'http://127.0.0.1:%d/auth/v1.0/' % prolis.getsockname()[1],
|
||||||
# Primary functional test account (needs admin access to the
|
# Primary functional test account (needs admin access to the
|
||||||
# account)
|
# account)
|
||||||
'account': 'test',
|
'account': 'test',
|
||||||
@ -848,9 +848,10 @@ def setup_package():
|
|||||||
# improve it to take a s3_storage_url option
|
# improve it to take a s3_storage_url option
|
||||||
parsed = urlsplit(config['auth_uri'])
|
parsed = urlsplit(config['auth_uri'])
|
||||||
config.update({
|
config.update({
|
||||||
'auth_ssl': parsed.scheme == 'https',
|
'auth_ssl': str(parsed.scheme == 'https'),
|
||||||
'auth_host': parsed.hostname,
|
'auth_host': parsed.hostname,
|
||||||
'auth_port': (parsed.port if parsed.port is not None else
|
'auth_port': str(
|
||||||
|
parsed.port if parsed.port is not None else
|
||||||
443 if parsed.scheme == 'https' else 80),
|
443 if parsed.scheme == 'https' else 80),
|
||||||
'auth_prefix': parsed.path,
|
'auth_prefix': parsed.path,
|
||||||
})
|
})
|
||||||
|
@ -32,6 +32,7 @@ from swiftclient import get_auth
|
|||||||
|
|
||||||
from swift.common import constraints
|
from swift.common import constraints
|
||||||
from swift.common.http import is_success
|
from swift.common.http import is_success
|
||||||
|
from swift.common.swob import str_to_wsgi, wsgi_to_str
|
||||||
from swift.common.utils import config_true_value
|
from swift.common.utils import config_true_value
|
||||||
|
|
||||||
from test import safe_repr
|
from test import safe_repr
|
||||||
@ -324,7 +325,7 @@ class Connection(object):
|
|||||||
if path:
|
if path:
|
||||||
quote = urllib.parse.quote
|
quote = urllib.parse.quote
|
||||||
if cfg.get('no_quote') or cfg.get('no_path_quote'):
|
if cfg.get('no_quote') or cfg.get('no_path_quote'):
|
||||||
quote = lambda x: x
|
quote = str_to_wsgi
|
||||||
return '%s/%s' % (self.storage_path,
|
return '%s/%s' % (self.storage_path,
|
||||||
'/'.join([quote(i) for i in path]))
|
'/'.join([quote(i) for i in path]))
|
||||||
else:
|
else:
|
||||||
@ -342,7 +343,8 @@ class Connection(object):
|
|||||||
headers['X-Auth-Token'] = cfg.get('use_token')
|
headers['X-Auth-Token'] = cfg.get('use_token')
|
||||||
|
|
||||||
if isinstance(hdrs, dict):
|
if isinstance(hdrs, dict):
|
||||||
headers.update(hdrs)
|
headers.update((str_to_wsgi(h), str_to_wsgi(v))
|
||||||
|
for h, v in hdrs.items())
|
||||||
return headers
|
return headers
|
||||||
|
|
||||||
def make_request(self, method, path=None, data=b'', hdrs=None, parms=None,
|
def make_request(self, method, path=None, data=b'', hdrs=None, parms=None,
|
||||||
@ -489,7 +491,10 @@ class Base(object):
|
|||||||
'x-container-bytes-used',
|
'x-container-bytes-used',
|
||||||
)
|
)
|
||||||
|
|
||||||
headers = dict(self.conn.response.getheaders())
|
# NB: on py2, headers are always lower; on py3, they match the bytes
|
||||||
|
# on the wire
|
||||||
|
headers = dict((wsgi_to_str(h).lower(), wsgi_to_str(v))
|
||||||
|
for h, v in self.conn.response.getheaders())
|
||||||
ret = {}
|
ret = {}
|
||||||
|
|
||||||
for return_key, header in required_fields:
|
for return_key, header in required_fields:
|
||||||
@ -954,17 +959,19 @@ class File(Base):
|
|||||||
raise ResponseError(self.conn.response, 'HEAD',
|
raise ResponseError(self.conn.response, 'HEAD',
|
||||||
self.conn.make_path(self.path))
|
self.conn.make_path(self.path))
|
||||||
|
|
||||||
for hdr in self.conn.response.getheaders():
|
for hdr, val in self.conn.response.getheaders():
|
||||||
if hdr[0].lower() == 'content-type':
|
hdr = wsgi_to_str(hdr).lower()
|
||||||
self.content_type = hdr[1]
|
val = wsgi_to_str(val)
|
||||||
if hdr[0].lower().startswith('x-object-meta-'):
|
if hdr == 'content-type':
|
||||||
self.metadata[hdr[0][14:]] = hdr[1]
|
self.content_type = val
|
||||||
if hdr[0].lower() == 'etag':
|
if hdr.startswith('x-object-meta-'):
|
||||||
self.etag = hdr[1]
|
self.metadata[hdr[14:]] = val
|
||||||
if hdr[0].lower() == 'content-length':
|
if hdr == 'etag':
|
||||||
self.size = int(hdr[1])
|
self.etag = val
|
||||||
if hdr[0].lower() == 'last-modified':
|
if hdr == 'content-length':
|
||||||
self.last_modified = hdr[1]
|
self.size = int(val)
|
||||||
|
if hdr == 'last-modified':
|
||||||
|
self.last_modified = val
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@ -1007,11 +1014,11 @@ class File(Base):
|
|||||||
raise ResponseError(self.conn.response, 'GET',
|
raise ResponseError(self.conn.response, 'GET',
|
||||||
self.conn.make_path(self.path))
|
self.conn.make_path(self.path))
|
||||||
|
|
||||||
for hdr in self.conn.response.getheaders():
|
for hdr, val in self.conn.response.getheaders():
|
||||||
if hdr[0].lower() == 'content-type':
|
if hdr.lower() == 'content-type':
|
||||||
self.content_type = hdr[1]
|
self.content_type = wsgi_to_str(val)
|
||||||
if hdr[0].lower() == 'content-range':
|
if hdr.lower() == 'content-range':
|
||||||
self.content_range = hdr[1]
|
self.content_range = val
|
||||||
|
|
||||||
if hasattr(buffer, 'write'):
|
if hasattr(buffer, 'write'):
|
||||||
scratch = self.conn.response.read(8192)
|
scratch = self.conn.response.read(8192)
|
||||||
|
@ -15,7 +15,6 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
import email.parser
|
|
||||||
import hashlib
|
import hashlib
|
||||||
import locale
|
import locale
|
||||||
import random
|
import random
|
||||||
@ -29,6 +28,11 @@ import eventlet
|
|||||||
from swift.common.http import is_success, is_client_error
|
from swift.common.http import is_success, is_client_error
|
||||||
from email.utils import parsedate
|
from email.utils import parsedate
|
||||||
|
|
||||||
|
if six.PY2:
|
||||||
|
from email.parser import FeedParser
|
||||||
|
else:
|
||||||
|
from email.parser import BytesFeedParser as FeedParser
|
||||||
|
|
||||||
import mock
|
import mock
|
||||||
|
|
||||||
from test.functional import normalized_urls, load_constraint, cluster_info
|
from test.functional import normalized_urls, load_constraint, cluster_info
|
||||||
@ -244,7 +248,8 @@ class TestAccount(Base):
|
|||||||
self.assertGreaterEqual(a['count'], 0)
|
self.assertGreaterEqual(a['count'], 0)
|
||||||
self.assertGreaterEqual(a['bytes'], 0)
|
self.assertGreaterEqual(a['bytes'], 0)
|
||||||
|
|
||||||
headers = dict(self.env.conn.response.getheaders())
|
headers = dict((k.lower(), v)
|
||||||
|
for k, v in self.env.conn.response.getheaders())
|
||||||
if format_type == 'json':
|
if format_type == 'json':
|
||||||
self.assertEqual(headers['content-type'],
|
self.assertEqual(headers['content-type'],
|
||||||
'application/json; charset=utf-8')
|
'application/json; charset=utf-8')
|
||||||
@ -402,7 +407,7 @@ class TestAccount(Base):
|
|||||||
quoted_hax = urllib.parse.quote(hax)
|
quoted_hax = urllib.parse.quote(hax)
|
||||||
conn.connection.request('GET', '/v1/' + quoted_hax, None, {})
|
conn.connection.request('GET', '/v1/' + quoted_hax, None, {})
|
||||||
resp = conn.connection.getresponse()
|
resp = conn.connection.getresponse()
|
||||||
resp_headers = dict(resp.getheaders())
|
resp_headers = dict((h.lower(), v) for h, v in resp.getheaders())
|
||||||
self.assertIn('www-authenticate', resp_headers)
|
self.assertIn('www-authenticate', resp_headers)
|
||||||
actual = resp_headers['www-authenticate']
|
actual = resp_headers['www-authenticate']
|
||||||
expected = 'Swift realm="%s"' % quoted_hax
|
expected = 'Swift realm="%s"' % quoted_hax
|
||||||
@ -1271,7 +1276,7 @@ class TestFile(Base):
|
|||||||
if k.lower() in unexpected_hdrs:
|
if k.lower() in unexpected_hdrs:
|
||||||
errors.append('Found unexpected header %s: %s' % (k, v))
|
errors.append('Found unexpected header %s: %s' % (k, v))
|
||||||
for k, v in expected_hdrs.items():
|
for k, v in expected_hdrs.items():
|
||||||
matches = [hdr for hdr in resp_headers if hdr[0] == k]
|
matches = [hdr for hdr in resp_headers if hdr[0].lower() == k]
|
||||||
if not matches:
|
if not matches:
|
||||||
errors.append('Missing expected header %s' % k)
|
errors.append('Missing expected header %s' % k)
|
||||||
for (got_k, got_v) in matches:
|
for (got_k, got_v) in matches:
|
||||||
@ -1941,7 +1946,11 @@ class TestFile(Base):
|
|||||||
|
|
||||||
if len(key) > j:
|
if len(key) > j:
|
||||||
key = key[:j]
|
key = key[:j]
|
||||||
|
if isinstance(val, bytes):
|
||||||
val = val[:j]
|
val = val[:j]
|
||||||
|
else:
|
||||||
|
val = val.encode('utf8')[:j].decode(
|
||||||
|
'utf8', 'surrogateescape')
|
||||||
|
|
||||||
metadata[key] = val
|
metadata[key] = val
|
||||||
|
|
||||||
@ -2071,8 +2080,8 @@ class TestFile(Base):
|
|||||||
# HTTP response bodies don't). We fake it out by constructing a
|
# HTTP response bodies don't). We fake it out by constructing a
|
||||||
# one-header preamble containing just the Content-Type, then
|
# one-header preamble containing just the Content-Type, then
|
||||||
# feeding in the response body.
|
# feeding in the response body.
|
||||||
parser = email.parser.FeedParser()
|
parser = FeedParser()
|
||||||
parser.feed("Content-Type: %s\r\n\r\n" % content_type)
|
parser.feed(b"Content-Type: %s\r\n\r\n" % content_type.encode())
|
||||||
parser.feed(fetched)
|
parser.feed(fetched)
|
||||||
root_message = parser.close()
|
root_message = parser.close()
|
||||||
self.assertTrue(root_message.is_multipart())
|
self.assertTrue(root_message.is_multipart())
|
||||||
@ -2086,7 +2095,7 @@ class TestFile(Base):
|
|||||||
byteranges[0]['Content-Range'],
|
byteranges[0]['Content-Range'],
|
||||||
"bytes %d-%d/%d" % (i, i + subrange_size - 1, file_length))
|
"bytes %d-%d/%d" % (i, i + subrange_size - 1, file_length))
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
byteranges[0].get_payload(),
|
byteranges[0].get_payload(decode=True),
|
||||||
data[i:(i + subrange_size)])
|
data[i:(i + subrange_size)])
|
||||||
|
|
||||||
self.assertEqual(byteranges[1]['Content-Type'],
|
self.assertEqual(byteranges[1]['Content-Type'],
|
||||||
@ -2096,7 +2105,7 @@ class TestFile(Base):
|
|||||||
"bytes %d-%d/%d" % (i + 2 * subrange_size,
|
"bytes %d-%d/%d" % (i + 2 * subrange_size,
|
||||||
i + 3 * subrange_size - 1, file_length))
|
i + 3 * subrange_size - 1, file_length))
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
byteranges[1].get_payload(),
|
byteranges[1].get_payload(decode=True),
|
||||||
data[(i + 2 * subrange_size):(i + 3 * subrange_size)])
|
data[(i + 2 * subrange_size):(i + 3 * subrange_size)])
|
||||||
|
|
||||||
self.assertEqual(byteranges[2]['Content-Type'],
|
self.assertEqual(byteranges[2]['Content-Type'],
|
||||||
@ -2106,7 +2115,7 @@ class TestFile(Base):
|
|||||||
"bytes %d-%d/%d" % (i + 4 * subrange_size,
|
"bytes %d-%d/%d" % (i + 4 * subrange_size,
|
||||||
i + 5 * subrange_size - 1, file_length))
|
i + 5 * subrange_size - 1, file_length))
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
byteranges[2].get_payload(),
|
byteranges[2].get_payload(decode=True),
|
||||||
data[(i + 4 * subrange_size):(i + 5 * subrange_size)])
|
data[(i + 4 * subrange_size):(i + 5 * subrange_size)])
|
||||||
|
|
||||||
# The first two ranges are satisfiable but the third is not; the
|
# The first two ranges are satisfiable but the third is not; the
|
||||||
@ -2123,8 +2132,8 @@ class TestFile(Base):
|
|||||||
self.assertTrue(content_type.startswith("multipart/byteranges"))
|
self.assertTrue(content_type.startswith("multipart/byteranges"))
|
||||||
self.assertIsNone(file_item.content_range)
|
self.assertIsNone(file_item.content_range)
|
||||||
|
|
||||||
parser = email.parser.FeedParser()
|
parser = FeedParser()
|
||||||
parser.feed("Content-Type: %s\r\n\r\n" % content_type)
|
parser.feed(b"Content-Type: %s\r\n\r\n" % content_type.encode())
|
||||||
parser.feed(fetched)
|
parser.feed(fetched)
|
||||||
root_message = parser.close()
|
root_message = parser.close()
|
||||||
|
|
||||||
@ -2137,7 +2146,8 @@ class TestFile(Base):
|
|||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
byteranges[0]['Content-Range'],
|
byteranges[0]['Content-Range'],
|
||||||
"bytes %d-%d/%d" % (0, subrange_size - 1, file_length))
|
"bytes %d-%d/%d" % (0, subrange_size - 1, file_length))
|
||||||
self.assertEqual(byteranges[0].get_payload(), data[:subrange_size])
|
self.assertEqual(byteranges[0].get_payload(decode=True),
|
||||||
|
data[:subrange_size])
|
||||||
|
|
||||||
self.assertEqual(byteranges[1]['Content-Type'],
|
self.assertEqual(byteranges[1]['Content-Type'],
|
||||||
"lovecraft/rugose; squamous=true")
|
"lovecraft/rugose; squamous=true")
|
||||||
@ -2146,7 +2156,7 @@ class TestFile(Base):
|
|||||||
"bytes %d-%d/%d" % (2 * subrange_size, 3 * subrange_size - 1,
|
"bytes %d-%d/%d" % (2 * subrange_size, 3 * subrange_size - 1,
|
||||||
file_length))
|
file_length))
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
byteranges[1].get_payload(),
|
byteranges[1].get_payload(decode=True),
|
||||||
data[(2 * subrange_size):(3 * subrange_size)])
|
data[(2 * subrange_size):(3 * subrange_size)])
|
||||||
|
|
||||||
# The first range is satisfiable but the second is not; the
|
# The first range is satisfiable but the second is not; the
|
||||||
@ -2161,8 +2171,8 @@ class TestFile(Base):
|
|||||||
content_type = file_item.content_type
|
content_type = file_item.content_type
|
||||||
if content_type.startswith("multipart/byteranges"):
|
if content_type.startswith("multipart/byteranges"):
|
||||||
self.assertIsNone(file_item.content_range)
|
self.assertIsNone(file_item.content_range)
|
||||||
parser = email.parser.FeedParser()
|
parser = FeedParser()
|
||||||
parser.feed("Content-Type: %s\r\n\r\n" % content_type)
|
parser.feed(b"Content-Type: %s\r\n\r\n" % content_type.encode())
|
||||||
parser.feed(fetched)
|
parser.feed(fetched)
|
||||||
root_message = parser.close()
|
root_message = parser.close()
|
||||||
|
|
||||||
@ -2175,7 +2185,8 @@ class TestFile(Base):
|
|||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
byteranges[0]['Content-Range'],
|
byteranges[0]['Content-Range'],
|
||||||
"bytes %d-%d/%d" % (0, subrange_size - 1, file_length))
|
"bytes %d-%d/%d" % (0, subrange_size - 1, file_length))
|
||||||
self.assertEqual(byteranges[0].get_payload(), data[:subrange_size])
|
self.assertEqual(byteranges[0].get_payload(decode=True),
|
||||||
|
data[:subrange_size])
|
||||||
else:
|
else:
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
file_item.content_range,
|
file_item.content_range,
|
||||||
@ -2494,7 +2505,8 @@ class TestFile(Base):
|
|||||||
found, 'Unexpected file %s found in '
|
found, 'Unexpected file %s found in '
|
||||||
'%s listing' % (file_item['name'], format_type))
|
'%s listing' % (file_item['name'], format_type))
|
||||||
|
|
||||||
headers = dict(self.env.conn.response.getheaders())
|
headers = dict((h.lower(), v)
|
||||||
|
for h, v in self.env.conn.response.getheaders())
|
||||||
if format_type == 'json':
|
if format_type == 'json':
|
||||||
self.assertEqual(headers['content-type'],
|
self.assertEqual(headers['content-type'],
|
||||||
'application/json; charset=utf-8')
|
'application/json; charset=utf-8')
|
||||||
@ -2536,7 +2548,8 @@ class TestFile(Base):
|
|||||||
data = six.BytesIO(file_item.write_random(512))
|
data = six.BytesIO(file_item.write_random(512))
|
||||||
etag = File.compute_md5sum(data)
|
etag = File.compute_md5sum(data)
|
||||||
|
|
||||||
headers = dict(self.env.conn.response.getheaders())
|
headers = dict((h.lower(), v)
|
||||||
|
for h, v in self.env.conn.response.getheaders())
|
||||||
self.assertIn('etag', headers.keys())
|
self.assertIn('etag', headers.keys())
|
||||||
|
|
||||||
header_etag = headers['etag'].strip('"')
|
header_etag = headers['etag'].strip('"')
|
||||||
|
@ -99,7 +99,7 @@ class PatchedObjControllerApp(proxy_server.Application):
|
|||||||
|
|
||||||
def _fake_get_container_info(env, app, swift_source=None):
|
def _fake_get_container_info(env, app, swift_source=None):
|
||||||
_vrs, account, container, _junk = utils.split_path(
|
_vrs, account, container, _junk = utils.split_path(
|
||||||
env['PATH_INFO'], 3, 4)
|
swob.wsgi_to_str(env['PATH_INFO']), 3, 4)
|
||||||
|
|
||||||
# Seed the cache with our container info so that the real
|
# Seed the cache with our container info so that the real
|
||||||
# get_container_info finds it.
|
# get_container_info finds it.
|
||||||
|
11
tox.ini
11
tox.ini
@ -13,7 +13,7 @@ deps =
|
|||||||
-c{env:UPPER_CONSTRAINTS_FILE:https://opendev.org/openstack/requirements/raw/branch/master/upper-constraints.txt}
|
-c{env:UPPER_CONSTRAINTS_FILE:https://opendev.org/openstack/requirements/raw/branch/master/upper-constraints.txt}
|
||||||
-r{toxinidir}/requirements.txt
|
-r{toxinidir}/requirements.txt
|
||||||
-r{toxinidir}/test-requirements.txt
|
-r{toxinidir}/test-requirements.txt
|
||||||
commands = find . ( -type f -o -type l ) -name "*.py[c|o]" -delete
|
commands = find . ( -type f -o -type l ) -name "*.py[co]" -delete
|
||||||
find . -type d -name "__pycache__" -delete
|
find . -type d -name "__pycache__" -delete
|
||||||
nosetests {posargs:test/unit}
|
nosetests {posargs:test/unit}
|
||||||
whitelist_externals = find
|
whitelist_externals = find
|
||||||
@ -119,6 +119,15 @@ commands = {[testenv:pep8]commands}
|
|||||||
basepython = python2.7
|
basepython = python2.7
|
||||||
commands = ./.functests {posargs}
|
commands = ./.functests {posargs}
|
||||||
|
|
||||||
|
[testenv:func-py3]
|
||||||
|
basepython = python3
|
||||||
|
# Need to pick up (unreleased as of 2019-03) commit:
|
||||||
|
# https://github.com/eventlet/eventlet/commit/f0bc79e
|
||||||
|
commands =
|
||||||
|
pip install -U eventlet@git+https://github.com/eventlet/eventlet.git
|
||||||
|
nosetests {posargs: \
|
||||||
|
test/functional/tests.py}
|
||||||
|
|
||||||
[testenv:func-encryption]
|
[testenv:func-encryption]
|
||||||
commands = ./.functests {posargs}
|
commands = ./.functests {posargs}
|
||||||
setenv = SWIFT_TEST_IN_PROCESS=1
|
setenv = SWIFT_TEST_IN_PROCESS=1
|
||||||
|
Loading…
Reference in New Issue
Block a user