Merge "Start transition to boto3 in s3api tests."

This commit is contained in:
Zuul 2019-05-22 05:25:13 +00:00 committed by Gerrit Code Review
commit 5be0e9ff09
6 changed files with 322 additions and 310 deletions

View File

@ -2,7 +2,8 @@
# of appearance. Changing the order has an impact on the overall integration # of appearance. Changing the order has an impact on the overall integration
# process, which may cause wedges in the gate later. # process, which may cause wedges in the gate later.
# this is required for the docs build jobs # this is required for the docs build jobs
sphinx>=1.6.2 # BSD sphinx>=1.6.2,<2.0.0;python_version=='2.7' # BSD
sphinx>=1.6.2;python_version>='3.4' # BSD
openstackdocstheme>=1.11.0 # Apache-2.0 openstackdocstheme>=1.11.0 # Apache-2.0
reno>=1.8.0 # Apache-2.0 reno>=1.8.0 # Apache-2.0
os-api-ref>=1.0.0 # Apache-2.0 os-api-ref>=1.0.0 # Apache-2.0

View File

@ -3,6 +3,8 @@ asn1crypto==0.24.0
Babel==2.5.3 Babel==2.5.3
bandit==1.1.0 bandit==1.1.0
boto==2.32.1 boto==2.32.1
boto3==1.9
botocore==1.12
castellan==0.13.0 castellan==0.13.0
certifi==2018.1.18 certifi==2018.1.18
cffi==1.11.5 cffi==1.11.5

View File

@ -15,6 +15,8 @@ python-keystoneclient!=2.1.0,>=2.0.0 # Apache-2.0
reno>=1.8.0 # Apache-2.0 reno>=1.8.0 # Apache-2.0
python-openstackclient>=3.12.0 python-openstackclient>=3.12.0
boto>=2.32.1 boto>=2.32.1
boto3>=1.9
botocore>=1.12
requests-mock>=1.2.0 # Apache-2.0 requests-mock>=1.2.0 # Apache-2.0
fixtures>=3.0.0 # Apache-2.0/BSD fixtures>=3.0.0 # Apache-2.0/BSD
keystonemiddleware>=4.17.0 # Apache-2.0 keystonemiddleware>=4.17.0 # Apache-2.0

View File

@ -16,7 +16,8 @@
import unittest2 import unittest2
import traceback import traceback
import test.functional as tf import test.functional as tf
from test.functional.s3api.s3_test_client import Connection from test.functional.s3api.s3_test_client import (
Connection, get_boto3_conn, tear_down_s3)
def setUpModule(): def setUpModule():
@ -59,3 +60,23 @@ class S3ApiBase(unittest2.TestCase):
if etag is not None: if etag is not None:
self.assertTrue('etag' in headers) # sanity self.assertTrue('etag' in headers) # sanity
self.assertEqual(etag, headers['etag'].strip('"')) self.assertEqual(etag, headers['etag'].strip('"'))
class S3ApiBaseBoto3(S3ApiBase):
def setUp(self):
if 's3api' not in tf.cluster_info:
raise tf.SkipTest('s3api middleware is not enabled')
try:
self.conn = get_boto3_conn()
self.endpoint_url = self.conn._endpoint.host
self.access_key = self.conn._request_signer._credentials.access_key
self.region = self.conn._client_config.region_name
tear_down_s3(self.conn)
except Exception:
message = '%s got an error during initialize process.\n\n%s' % \
(self.method_name, traceback.format_exc())
# TODO: Find a way to make this go to FAIL instead of Error
self.fail(message)
def tearDown(self):
tear_down_s3(self.conn)

View File

@ -15,6 +15,8 @@
import os import os
import test.functional as tf import test.functional as tf
import boto3
from botocore.exceptions import ClientError
from boto.s3.connection import S3Connection, OrdinaryCallingFormat, \ from boto.s3.connection import S3Connection, OrdinaryCallingFormat, \
S3ResponseError S3ResponseError
import six import six
@ -135,6 +137,53 @@ class Connection(object):
return url, {} return url, {}
def get_boto3_conn(aws_access_key='test:tester', aws_secret_key='testing'):
host = tf.config['auth_host']
port = int(tf.config['auth_port'])
config = boto3.session.Config(s3={'addressing_style': 'path'})
return boto3.client(
's3', aws_access_key_id=aws_access_key,
aws_secret_access_key=aws_secret_key,
config=config, region_name='us-east-1', use_ssl=False,
endpoint_url='http://{}:{}'.format(host, port))
def tear_down_s3(conn):
"""
Reset all swift environment to keep clean. As a result by calling this
method, we can assume the backend swift keeps no containers and no
objects on this connection's account.
"""
exceptions = []
for i in range(RETRY_COUNT):
try:
resp = conn.list_buckets()
buckets = [bucket['Name'] for bucket in resp.get('Buckets', [])]
for bucket in buckets:
try:
resp = conn.list_multipart_uploads(Bucket=bucket)
for upload in resp.get('Uploads', []):
conn.abort_multipart_upload(
Bucket=bucket,
Key=upload['Key'],
UploadId=upload['UploadId'])
resp = conn.list_objects(Bucket=bucket)
for obj in resp.get('Contents', []):
conn.delete_object(Bucket=bucket, Key=obj['Key'])
conn.delete_bucket(Bucket=bucket)
except ClientError as e:
# 404 means NoSuchBucket, NoSuchKey, or NoSuchUpload
if e.response['ResponseMetadata']['HTTPStatusCode'] != 404:
raise
except Exception as e:
exceptions.append(''.join(
traceback.format_exception(*sys.exc_info())))
if exceptions:
exceptions.insert(0, 'Too many errors to continue:')
raise Exception('\n========\n'.join(exceptions))
# TODO: make sure where this function is used # TODO: make sure where this function is used
def get_admin_connection(): def get_admin_connection():
""" """

View File

@ -13,16 +13,15 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import botocore
import datetime
import unittest2 import unittest2
import os import os
import test.functional as tf import test.functional as tf
from swift.common.utils import config_true_value from swift.common.utils import config_true_value
from swift.common.middleware.s3api.etree import fromstring, tostring, Element, \ from test.functional.s3api import S3ApiBaseBoto3
SubElement from test.functional.s3api.s3_test_client import get_boto3_conn
from test.functional.s3api import S3ApiBase
from test.functional.s3api.s3_test_client import Connection
from test.functional.s3api.utils import get_error_code
def setUpModule(): def setUpModule():
@ -33,14 +32,21 @@ def tearDownModule():
tf.teardown_package() tf.teardown_package()
class TestS3ApiBucket(S3ApiBase): class TestS3ApiBucket(S3ApiBaseBoto3):
def setUp(self): def _validate_object_listing(self, resp_objects, req_objects,
super(TestS3ApiBucket, self).setUp() expect_owner=True):
self.assertEqual(len(resp_objects), len(req_objects))
def _gen_location_xml(self, location): for i, obj in enumerate(resp_objects):
elem = Element('CreateBucketConfiguration') self.assertEqual(obj['Key'], req_objects[i])
SubElement(elem, 'LocationConstraint').text = location self.assertEqual(type(obj['LastModified']), datetime.datetime)
return tostring(elem) self.assertIn('ETag', obj)
self.assertIn('Size', obj)
self.assertEqual(obj['StorageClass'], 'STANDARD')
if expect_owner:
self.assertEqual(obj['Owner']['ID'], self.access_key)
self.assertEqual(obj['Owner']['DisplayName'], self.access_key)
else:
self.assertNotIn('Owner', obj)
def test_bucket(self): def test_bucket(self):
bucket = 'bucket' bucket = 'bucket'
@ -48,112 +54,95 @@ class TestS3ApiBucket(S3ApiBase):
'max_bucket_listing', 1000) 'max_bucket_listing', 1000)
# PUT Bucket # PUT Bucket
status, headers, body = self.conn.make_request('PUT', bucket) resp = self.conn.create_bucket(Bucket=bucket)
self.assertEqual(status, 200) self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
headers = resp['ResponseMetadata']['HTTPHeaders']
self.assertCommonResponseHeaders(headers) self.assertCommonResponseHeaders(headers)
self.assertIn(headers['location'], ( self.assertIn(headers['location'], (
'/' + bucket, # swob won't touch it... '/' + bucket, # swob won't touch it...
# but webob (which we get because of auth_token) *does* # but webob (which we get because of auth_token) *does*
'http://%s%s/%s' % ( '%s/%s' % (self.endpoint_url, bucket),
self.conn.host,
'' if self.conn.port == 80 else ':%d' % self.conn.port,
bucket),
# This is all based on the Host header the client provided,
# and boto will double-up ports for sig v4. See
# - https://github.com/boto/boto/issues/2623
# - https://github.com/boto/boto/issues/3716
# with proposed fixes at
# - https://github.com/boto/boto/pull/3513
# - https://github.com/boto/boto/pull/3676
'http://%s%s:%d/%s' % (
self.conn.host,
'' if self.conn.port == 80 else ':%d' % self.conn.port,
self.conn.port,
bucket),
)) ))
self.assertEqual(headers['content-length'], '0') self.assertEqual(headers['content-length'], '0')
# GET Bucket(Without Object) # GET Bucket(Without Object)
status, headers, body = self.conn.make_request('GET', bucket) resp = self.conn.list_objects(Bucket=bucket)
self.assertEqual(status, 200) self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
headers = resp['ResponseMetadata']['HTTPHeaders']
self.assertCommonResponseHeaders(headers) self.assertCommonResponseHeaders(headers)
self.assertIsNotNone(headers['content-type']) self.assertIsNotNone(headers['content-type'])
self.assertEqual(headers['content-length'], str(len(body)))
# TODO; requires consideration # TODO; requires consideration
# self.assertEqual(headers['transfer-encoding'], 'chunked') # self.assertEqual(headers['transfer-encoding'], 'chunked')
elem = fromstring(body, 'ListBucketResult') self.assertEqual(resp['Name'], bucket)
self.assertEqual(elem.find('Name').text, bucket) self.assertEqual(resp['Prefix'], '')
self.assertIsNone(elem.find('Prefix').text) self.assertEqual(resp['Marker'], '')
self.assertIsNone(elem.find('Marker').text) self.assertEqual(resp['MaxKeys'], max_bucket_listing)
self.assertEqual( self.assertFalse(resp['IsTruncated'])
elem.find('MaxKeys').text, str(max_bucket_listing)) self.assertNotIn('Contents', bucket)
self.assertEqual(elem.find('IsTruncated').text, 'false')
objects = elem.findall('./Contents')
self.assertEqual(list(objects), [])
# GET Bucket(With Object) # GET Bucket(With Object)
req_objects = ('object', 'object2') req_objects = ['object', 'object2']
for obj in req_objects: for obj in req_objects:
self.conn.make_request('PUT', bucket, obj) self.conn.put_object(Bucket=bucket, Key=obj, Body=b'')
status, headers, body = self.conn.make_request('GET', bucket) resp = self.conn.list_objects(Bucket=bucket)
self.assertEqual(status, 200) self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
elem = fromstring(body, 'ListBucketResult') self.assertEqual(resp['Name'], bucket)
self.assertEqual(elem.find('Name').text, bucket) self.assertEqual(resp['Prefix'], '')
self.assertIsNone(elem.find('Prefix').text) self.assertEqual(resp['Marker'], '')
self.assertIsNone(elem.find('Marker').text) self.assertEqual(resp['MaxKeys'], max_bucket_listing)
self.assertEqual(elem.find('MaxKeys').text, self.assertFalse(resp['IsTruncated'])
str(max_bucket_listing)) self._validate_object_listing(resp['Contents'], req_objects)
self.assertEqual(elem.find('IsTruncated').text, 'false')
resp_objects = elem.findall('./Contents')
self.assertEqual(len(list(resp_objects)), 2)
for o in resp_objects:
self.assertIn(o.find('Key').text, req_objects)
self.assertIsNotNone(o.find('LastModified').text)
self.assertRegexpMatches(
o.find('LastModified').text,
r'^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$')
self.assertIsNotNone(o.find('ETag').text)
self.assertIsNotNone(o.find('Size').text)
self.assertIsNotNone(o.find('StorageClass').text)
self.assertEqual(o.find('Owner/ID').text, self.conn.user_id)
self.assertEqual(o.find('Owner/DisplayName').text,
self.conn.user_id)
# HEAD Bucket # HEAD Bucket
status, headers, body = self.conn.make_request('HEAD', bucket) resp = self.conn.head_bucket(Bucket=bucket)
self.assertEqual(status, 200) self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
headers = resp['ResponseMetadata']['HTTPHeaders']
self.assertCommonResponseHeaders(headers) self.assertCommonResponseHeaders(headers)
self.assertIsNotNone(headers['content-type']) self.assertIsNotNone(headers['content-type'])
self.assertEqual(headers['content-length'], str(len(body)))
# TODO; requires consideration # TODO; requires consideration
# self.assertEqual(headers['transfer-encoding'], 'chunked') # self.assertEqual(headers['transfer-encoding'], 'chunked')
# DELETE Bucket # DELETE Bucket
for obj in req_objects: for obj in req_objects:
self.conn.make_request('DELETE', bucket, obj) self.conn.delete_object(Bucket=bucket, Key=obj)
status, headers, body = self.conn.make_request('DELETE', bucket) resp = self.conn.delete_bucket(Bucket=bucket)
self.assertEqual(status, 204) self.assertEqual(204, resp['ResponseMetadata']['HTTPStatusCode'])
self.assertCommonResponseHeaders(headers) self.assertCommonResponseHeaders(
resp['ResponseMetadata']['HTTPHeaders'])
def test_put_bucket_error(self): def test_put_bucket_error(self):
status, headers, body = \ event_system = self.conn.meta.events
self.conn.make_request('PUT', 'bucket+invalid') event_system.unregister(
self.assertEqual(get_error_code(body), 'InvalidBucketName') 'before-parameter-build.s3',
botocore.handlers.validate_bucket_name)
with self.assertRaises(botocore.exceptions.ClientError) as ctx:
self.conn.create_bucket(Bucket='bucket+invalid')
self.assertEqual(
ctx.exception.response['ResponseMetadata']['HTTPStatusCode'], 400)
self.assertEqual(
ctx.exception.response['Error']['Code'], 'InvalidBucketName')
auth_error_conn = Connection(aws_secret_key='invalid') auth_error_conn = get_boto3_conn(aws_secret_key='invalid')
status, headers, body = auth_error_conn.make_request('PUT', 'bucket') with self.assertRaises(botocore.exceptions.ClientError) as ctx:
self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch') auth_error_conn.create_bucket(Bucket='bucket')
self.assertEqual(
ctx.exception.response['ResponseMetadata']['HTTPStatusCode'], 403)
self.assertEqual(ctx.exception.response['Error']['Code'],
'SignatureDoesNotMatch')
self.conn.make_request('PUT', 'bucket') self.conn.create_bucket(Bucket='bucket')
status, headers, body = self.conn.make_request('PUT', 'bucket') with self.assertRaises(botocore.exceptions.ClientError) as ctx:
self.assertEqual(status, 409) self.conn.create_bucket(Bucket='bucket')
self.assertEqual(get_error_code(body), 'BucketAlreadyOwnedByYou') self.assertEqual(
ctx.exception.response['ResponseMetadata']['HTTPStatusCode'], 409)
self.assertEqual(
ctx.exception.response['Error']['Code'], 'BucketAlreadyOwnedByYou')
def test_put_bucket_error_key2(self): def test_put_bucket_error_key2(self):
if config_true_value(tf.cluster_info['s3api'].get('s3_acl')): if config_true_value(tf.cluster_info['s3api'].get('s3_acl')):
@ -163,15 +152,18 @@ class TestS3ApiBucket(S3ApiBase):
'Cannot test for BucketAlreadyExists with second user; ' 'Cannot test for BucketAlreadyExists with second user; '
'need s3_access_key2 and s3_secret_key2 configured') 'need s3_access_key2 and s3_secret_key2 configured')
self.conn.make_request('PUT', 'bucket') self.conn.create_bucket(Bucket='bucket')
# Other users of the same account get the same 409 error # Other users of the same account get the same 409 error
conn2 = Connection(tf.config['s3_access_key2'], conn2 = get_boto3_conn(tf.config['s3_access_key2'],
tf.config['s3_secret_key2'], tf.config['s3_secret_key2'])
tf.config['s3_access_key2']) with self.assertRaises(botocore.exceptions.ClientError) as ctx:
status, headers, body = conn2.make_request('PUT', 'bucket') conn2.create_bucket(Bucket='bucket')
self.assertEqual(status, 409) self.assertEqual(
self.assertEqual(get_error_code(body), 'BucketAlreadyExists') ctx.exception.response['ResponseMetadata']['HTTPStatusCode'],
409)
self.assertEqual(
ctx.exception.response['Error']['Code'], 'BucketAlreadyExists')
def test_put_bucket_error_key3(self): def test_put_bucket_error_key3(self):
if 's3_access_key3' not in tf.config or \ if 's3_access_key3' not in tf.config or \
@ -179,41 +171,51 @@ class TestS3ApiBucket(S3ApiBase):
raise tf.SkipTest('Cannot test for AccessDenied; need ' raise tf.SkipTest('Cannot test for AccessDenied; need '
's3_access_key3 and s3_secret_key3 configured') 's3_access_key3 and s3_secret_key3 configured')
self.conn.make_request('PUT', 'bucket') self.conn.create_bucket(Bucket='bucket')
# If the user can't create buckets, they shouldn't even know # If the user can't create buckets, they shouldn't even know
# whether the bucket exists. # whether the bucket exists.
conn3 = Connection(tf.config['s3_access_key3'], conn3 = get_boto3_conn(tf.config['s3_access_key3'],
tf.config['s3_secret_key3'], tf.config['s3_secret_key3'])
tf.config['s3_access_key3']) with self.assertRaises(botocore.exceptions.ClientError) as ctx:
status, headers, body = conn3.make_request('PUT', 'bucket') conn3.create_bucket(Bucket='bucket')
self.assertEqual(status, 403) self.assertEqual(
self.assertEqual(get_error_code(body), 'AccessDenied') ctx.exception.response['ResponseMetadata']['HTTPStatusCode'], 403)
self.assertEqual(
ctx.exception.response['Error']['Code'], 'AccessDenied')
def test_put_bucket_with_LocationConstraint(self): def test_put_bucket_with_LocationConstraint(self):
bucket = 'bucket' resp = self.conn.create_bucket(
xml = self._gen_location_xml(self.conn.conn.auth_region_name) Bucket='bucket',
status, headers, body = \ CreateBucketConfiguration={'LocationConstraint': self.region})
self.conn.make_request('PUT', bucket, body=xml) self.assertEqual(resp['ResponseMetadata']['HTTPStatusCode'], 200)
self.assertEqual(status, 200)
def test_get_bucket_error(self): def test_get_bucket_error(self):
self.conn.make_request('PUT', 'bucket') event_system = self.conn.meta.events
event_system.unregister(
'before-parameter-build.s3',
botocore.handlers.validate_bucket_name)
self.conn.create_bucket(Bucket='bucket')
status, headers, body = \ with self.assertRaises(botocore.exceptions.ClientError) as ctx:
self.conn.make_request('GET', 'bucket+invalid') self.conn.list_objects(Bucket='bucket+invalid')
self.assertEqual(get_error_code(body), 'InvalidBucketName') self.assertEqual(
ctx.exception.response['Error']['Code'], 'InvalidBucketName')
auth_error_conn = Connection(aws_secret_key='invalid') auth_error_conn = get_boto3_conn(aws_secret_key='invalid')
status, headers, body = auth_error_conn.make_request('GET', 'bucket') with self.assertRaises(botocore.exceptions.ClientError) as ctx:
self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch') auth_error_conn.list_objects(Bucket='bucket')
self.assertEqual(
ctx.exception.response['Error']['Code'], 'SignatureDoesNotMatch')
status, headers, body = self.conn.make_request('GET', 'nothing') with self.assertRaises(botocore.exceptions.ClientError) as ctx:
self.assertEqual(get_error_code(body), 'NoSuchBucket') self.conn.list_objects(Bucket='nothing')
self.assertEqual(
ctx.exception.response['Error']['Code'], 'NoSuchBucket')
def _prepare_test_get_bucket(self, bucket, objects): def _prepare_test_get_bucket(self, bucket, objects):
self.conn.make_request('PUT', bucket) self.conn.create_bucket(Bucket=bucket)
for obj in objects: for obj in objects:
self.conn.make_request('PUT', bucket, obj) self.conn.put_object(Bucket=bucket, Key=obj, Body=b'')
def test_get_bucket_with_delimiter(self): def test_get_bucket_with_delimiter(self):
bucket = 'bucket' bucket = 'bucket'
@ -222,32 +224,16 @@ class TestS3ApiBucket(S3ApiBase):
self._prepare_test_get_bucket(bucket, put_objects) self._prepare_test_get_bucket(bucket, put_objects)
delimiter = '/' delimiter = '/'
query = 'delimiter=%s' % delimiter
expect_objects = ('object', 'object2') expect_objects = ('object', 'object2')
expect_prefixes = ('dir/', 'subdir/', 'subdir2/') expect_prefixes = ('dir/', 'subdir/', 'subdir2/')
status, headers, body = \ resp = self.conn.list_objects(Bucket=bucket, Delimiter=delimiter)
self.conn.make_request('GET', bucket, query=query) self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
self.assertEqual(status, 200) self.assertEqual(resp['Delimiter'], delimiter)
elem = fromstring(body, 'ListBucketResult') self._validate_object_listing(resp['Contents'], expect_objects)
self.assertEqual(elem.find('Delimiter').text, delimiter) resp_prefixes = resp['CommonPrefixes']
resp_objects = elem.findall('./Contents') self.assertEqual(
self.assertEqual(len(list(resp_objects)), len(expect_objects)) resp_prefixes,
for i, o in enumerate(resp_objects): [{'Prefix': p} for p in expect_prefixes])
self.assertEqual(o.find('Key').text, expect_objects[i])
self.assertIsNotNone(o.find('LastModified').text)
self.assertRegexpMatches(
o.find('LastModified').text,
r'^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$')
self.assertIsNotNone(o.find('ETag').text)
self.assertIsNotNone(o.find('Size').text)
self.assertEqual(o.find('StorageClass').text, 'STANDARD')
self.assertEqual(o.find('Owner/ID').text, self.conn.user_id)
self.assertEqual(o.find('Owner/DisplayName').text,
self.conn.user_id)
resp_prefixes = elem.findall('CommonPrefixes')
self.assertEqual(len(resp_prefixes), len(expect_prefixes))
for i, p in enumerate(resp_prefixes):
self.assertEqual(p.find('./Prefix').text, expect_prefixes[i])
def test_get_bucket_with_encoding_type(self): def test_get_bucket_with_encoding_type(self):
bucket = 'bucket' bucket = 'bucket'
@ -255,12 +241,10 @@ class TestS3ApiBucket(S3ApiBase):
self._prepare_test_get_bucket(bucket, put_objects) self._prepare_test_get_bucket(bucket, put_objects)
encoding_type = 'url' encoding_type = 'url'
query = 'encoding-type=%s' % encoding_type resp = self.conn.list_objects(
status, headers, body = \ Bucket=bucket, EncodingType=encoding_type)
self.conn.make_request('GET', bucket, query=query) self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
self.assertEqual(status, 200) self.assertEqual(resp['EncodingType'], encoding_type)
elem = fromstring(body, 'ListBucketResult')
self.assertEqual(elem.find('EncodingType').text, encoding_type)
def test_get_bucket_with_marker(self): def test_get_bucket_with_marker(self):
bucket = 'bucket' bucket = 'bucket'
@ -269,27 +253,11 @@ class TestS3ApiBucket(S3ApiBase):
self._prepare_test_get_bucket(bucket, put_objects) self._prepare_test_get_bucket(bucket, put_objects)
marker = 'object' marker = 'object'
query = 'marker=%s' % marker
expect_objects = ('object2', 'subdir/object', 'subdir2/object') expect_objects = ('object2', 'subdir/object', 'subdir2/object')
status, headers, body = \ resp = self.conn.list_objects(Bucket=bucket, Marker=marker)
self.conn.make_request('GET', bucket, query=query) self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
self.assertEqual(status, 200) self.assertEqual(resp['Marker'], marker)
elem = fromstring(body, 'ListBucketResult') self._validate_object_listing(resp['Contents'], expect_objects)
self.assertEqual(elem.find('Marker').text, marker)
resp_objects = elem.findall('./Contents')
self.assertEqual(len(list(resp_objects)), len(expect_objects))
for i, o in enumerate(resp_objects):
self.assertEqual(o.find('Key').text, expect_objects[i])
self.assertIsNotNone(o.find('LastModified').text)
self.assertRegexpMatches(
o.find('LastModified').text,
r'^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$')
self.assertIsNotNone(o.find('ETag').text)
self.assertIsNotNone(o.find('Size').text)
self.assertEqual(o.find('StorageClass').text, 'STANDARD')
self.assertEqual(o.find('Owner/ID').text, self.conn.user_id)
self.assertEqual(o.find('Owner/DisplayName').text,
self.conn.user_id)
def test_get_bucket_with_max_keys(self): def test_get_bucket_with_max_keys(self):
bucket = 'bucket' bucket = 'bucket'
@ -297,28 +265,12 @@ class TestS3ApiBucket(S3ApiBase):
'dir/subdir/object') 'dir/subdir/object')
self._prepare_test_get_bucket(bucket, put_objects) self._prepare_test_get_bucket(bucket, put_objects)
max_keys = '2' max_keys = 2
query = 'max-keys=%s' % max_keys
expect_objects = ('dir/subdir/object', 'object') expect_objects = ('dir/subdir/object', 'object')
status, headers, body = \ resp = self.conn.list_objects(Bucket=bucket, MaxKeys=max_keys)
self.conn.make_request('GET', bucket, query=query) self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
self.assertEqual(status, 200) self.assertEqual(resp['MaxKeys'], max_keys)
elem = fromstring(body, 'ListBucketResult') self._validate_object_listing(resp['Contents'], expect_objects)
self.assertEqual(elem.find('MaxKeys').text, max_keys)
resp_objects = elem.findall('./Contents')
self.assertEqual(len(list(resp_objects)), len(expect_objects))
for i, o in enumerate(resp_objects):
self.assertEqual(o.find('Key').text, expect_objects[i])
self.assertIsNotNone(o.find('LastModified').text)
self.assertRegexpMatches(
o.find('LastModified').text,
r'^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$')
self.assertIsNotNone(o.find('ETag').text)
self.assertIsNotNone(o.find('Size').text)
self.assertEqual(o.find('StorageClass').text, 'STANDARD')
self.assertEqual(o.find('Owner/ID').text, self.conn.user_id)
self.assertEqual(o.find('Owner/DisplayName').text,
self.conn.user_id)
def test_get_bucket_with_prefix(self): def test_get_bucket_with_prefix(self):
bucket = 'bucket' bucket = 'bucket'
@ -327,27 +279,11 @@ class TestS3ApiBucket(S3ApiBase):
self._prepare_test_get_bucket(bucket, req_objects) self._prepare_test_get_bucket(bucket, req_objects)
prefix = 'object' prefix = 'object'
query = 'prefix=%s' % prefix
expect_objects = ('object', 'object2') expect_objects = ('object', 'object2')
status, headers, body = \ resp = self.conn.list_objects(Bucket=bucket, Prefix=prefix)
self.conn.make_request('GET', bucket, query=query) self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
self.assertEqual(status, 200) self.assertEqual(resp['Prefix'], prefix)
elem = fromstring(body, 'ListBucketResult') self._validate_object_listing(resp['Contents'], expect_objects)
self.assertEqual(elem.find('Prefix').text, prefix)
resp_objects = elem.findall('./Contents')
self.assertEqual(len(list(resp_objects)), len(expect_objects))
for i, o in enumerate(resp_objects):
self.assertEqual(o.find('Key').text, expect_objects[i])
self.assertIsNotNone(o.find('LastModified').text)
self.assertRegexpMatches(
o.find('LastModified').text,
r'^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$')
self.assertIsNotNone(o.find('ETag').text)
self.assertIsNotNone(o.find('Size').text)
self.assertEqual(o.find('StorageClass').text, 'STANDARD')
self.assertEqual(o.find('Owner/ID').text, self.conn.user_id)
self.assertEqual(o.find('Owner/DisplayName').text,
self.conn.user_id)
def test_get_bucket_v2_with_start_after(self): def test_get_bucket_v2_with_start_after(self):
bucket = 'bucket' bucket = 'bucket'
@ -356,26 +292,13 @@ class TestS3ApiBucket(S3ApiBase):
self._prepare_test_get_bucket(bucket, put_objects) self._prepare_test_get_bucket(bucket, put_objects)
marker = 'object' marker = 'object'
query = 'list-type=2&start-after=%s' % marker
expect_objects = ('object2', 'subdir/object', 'subdir2/object') expect_objects = ('object2', 'subdir/object', 'subdir2/object')
status, headers, body = \ resp = self.conn.list_objects_v2(Bucket=bucket, StartAfter=marker)
self.conn.make_request('GET', bucket, query=query) self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
self.assertEqual(status, 200) self.assertEqual(resp['StartAfter'], marker)
elem = fromstring(body, 'ListBucketResult') self.assertEqual(resp['KeyCount'], 3)
self.assertEqual(elem.find('StartAfter').text, marker) self._validate_object_listing(resp['Contents'], expect_objects,
resp_objects = elem.findall('./Contents') expect_owner=False)
self.assertEqual(len(list(resp_objects)), len(expect_objects))
for i, o in enumerate(resp_objects):
self.assertEqual(o.find('Key').text, expect_objects[i])
self.assertTrue(o.find('LastModified').text is not None)
self.assertRegexpMatches(
o.find('LastModified').text,
r'^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$')
self.assertTrue(o.find('ETag').text is not None)
self.assertTrue(o.find('Size').text is not None)
self.assertEqual(o.find('StorageClass').text, 'STANDARD')
self.assertIsNone(o.find('Owner/ID'))
self.assertIsNone(o.find('Owner/DisplayName'))
def test_get_bucket_v2_with_fetch_owner(self): def test_get_bucket_v2_with_fetch_owner(self):
bucket = 'bucket' bucket = 'bucket'
@ -383,28 +306,12 @@ class TestS3ApiBucket(S3ApiBase):
'dir/subdir/object') 'dir/subdir/object')
self._prepare_test_get_bucket(bucket, put_objects) self._prepare_test_get_bucket(bucket, put_objects)
query = 'list-type=2&fetch-owner=true'
expect_objects = ('dir/subdir/object', 'object', 'object2', expect_objects = ('dir/subdir/object', 'object', 'object2',
'subdir/object', 'subdir2/object') 'subdir/object', 'subdir2/object')
status, headers, body = \ resp = self.conn.list_objects_v2(Bucket=bucket, FetchOwner=True)
self.conn.make_request('GET', bucket, query=query) self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
self.assertEqual(status, 200) self.assertEqual(resp['KeyCount'], 5)
elem = fromstring(body, 'ListBucketResult') self._validate_object_listing(resp['Contents'], expect_objects)
self.assertEqual(elem.find('KeyCount').text, '5')
resp_objects = elem.findall('./Contents')
self.assertEqual(len(list(resp_objects)), len(expect_objects))
for i, o in enumerate(resp_objects):
self.assertEqual(o.find('Key').text, expect_objects[i])
self.assertTrue(o.find('LastModified').text is not None)
self.assertRegexpMatches(
o.find('LastModified').text,
r'^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$')
self.assertTrue(o.find('ETag').text is not None)
self.assertTrue(o.find('Size').text is not None)
self.assertEqual(o.find('StorageClass').text, 'STANDARD')
self.assertTrue(o.find('Owner/ID').text, self.conn.user_id)
self.assertTrue(o.find('Owner/DisplayName').text,
self.conn.user_id)
def test_get_bucket_v2_with_continuation_token_and_delimiter(self): def test_get_bucket_v2_with_continuation_token_and_delimiter(self):
bucket = 'bucket' bucket = 'bucket'
@ -421,86 +328,116 @@ class TestS3ApiBucket(S3ApiBase):
'subdirs': []}] 'subdirs': []}]
continuation_token = '' continuation_token = ''
query = 'list-type=2&max-keys=3&delimiter=/&continuation-token=%s'
for i in range(len(expected)): for i in range(len(expected)):
status, headers, body = self.conn.make_request( resp = self.conn.list_objects_v2(
'GET', bucket, query=query % continuation_token) Bucket=bucket,
self.assertEqual(status, 200) MaxKeys=3,
elem = fromstring(body, 'ListBucketResult') Delimiter='/',
self.assertEqual(elem.find('MaxKeys').text, '3') ContinuationToken=continuation_token)
self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
self.assertEqual(resp['MaxKeys'], 3)
self.assertEqual( self.assertEqual(
elem.find('KeyCount').text, resp['KeyCount'],
str(len(expected[i]['objects']) + len(expected[i]['subdirs']))) len(expected[i]['objects']) + len(expected[i]['subdirs']))
expect_truncated = 'true' if i < len(expected) - 1 else 'false' expect_truncated = i < len(expected) - 1
self.assertEqual(elem.find('IsTruncated').text, expect_truncated) self.assertEqual(resp['IsTruncated'], expect_truncated)
next_cont_token_elem = elem.find('NextContinuationToken') if expect_truncated:
if expect_truncated == 'true': self.assertIsNotNone(resp['NextContinuationToken'])
self.assertIsNotNone(next_cont_token_elem) continuation_token = resp['NextContinuationToken']
continuation_token = next_cont_token_elem.text self._validate_object_listing(resp['Contents'],
resp_objects = elem.findall('./Contents') expected[i]['objects'],
expect_owner=False)
resp_subdirs = resp.get('CommonPrefixes', [])
self.assertEqual( self.assertEqual(
len(list(resp_objects)), len(expected[i]['objects'])) resp_subdirs,
for j, o in enumerate(resp_objects): [{'Prefix': p} for p in expected[i]['subdirs']])
self.assertEqual(o.find('Key').text,
expected[i]['objects'][j].encode('utf-8'))
self.assertTrue(o.find('LastModified').text is not None)
self.assertRegexpMatches(
o.find('LastModified').text,
r'^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$')
self.assertTrue(o.find('ETag').text is not None)
self.assertTrue(o.find('Size').text is not None)
self.assertEqual(o.find('StorageClass').text, 'STANDARD')
self.assertIsNone(o.find('Owner/ID'))
self.assertIsNone(o.find('Owner/DisplayName'))
resp_subdirs = elem.findall('./CommonPrefixes')
self.assertEqual(
len(list(resp_subdirs)), len(expected[i]['subdirs']))
for j, o in enumerate(resp_subdirs):
self.assertEqual(
o.find('Prefix').text,
expected[i]['subdirs'][j].encode('utf-8'))
def test_head_bucket_error(self): def test_head_bucket_error(self):
self.conn.make_request('PUT', 'bucket') event_system = self.conn.meta.events
event_system.unregister(
'before-parameter-build.s3',
botocore.handlers.validate_bucket_name)
status, headers, body = \ self.conn.create_bucket(Bucket='bucket')
self.conn.make_request('HEAD', 'bucket+invalid')
self.assertEqual(status, 400)
self.assertEqual(body, '') # sanity
auth_error_conn = Connection(aws_secret_key='invalid') with self.assertRaises(botocore.exceptions.ClientError) as ctx:
status, headers, body = \ self.conn.head_bucket(Bucket='bucket+invalid')
auth_error_conn.make_request('HEAD', 'bucket') self.assertEqual(
self.assertEqual(status, 403) ctx.exception.response['ResponseMetadata']['HTTPStatusCode'], 400)
self.assertEqual(body, '') # sanity self.assertEqual(ctx.exception.response['Error']['Code'], '400')
self.assertEqual(
ctx.exception.response[
'ResponseMetadata']['HTTPHeaders']['content-length'], '0')
status, headers, body = self.conn.make_request('HEAD', 'nothing') auth_error_conn = get_boto3_conn(aws_secret_key='invalid')
self.assertEqual(status, 404) with self.assertRaises(botocore.exceptions.ClientError) as ctx:
self.assertEqual(body, '') # sanity auth_error_conn.head_bucket(Bucket='bucket')
self.assertEqual(
ctx.exception.response['ResponseMetadata']['HTTPStatusCode'], 403)
self.assertEqual(
ctx.exception.response['Error']['Code'], '403')
self.assertEqual(
ctx.exception.response[
'ResponseMetadata']['HTTPHeaders']['content-length'], '0')
with self.assertRaises(botocore.exceptions.ClientError) as ctx:
self.conn.head_bucket(Bucket='nothing')
self.assertEqual(
ctx.exception.response['ResponseMetadata']['HTTPStatusCode'], 404)
self.assertEqual(
ctx.exception.response['Error']['Code'], '404')
self.assertEqual(
ctx.exception.response[
'ResponseMetadata']['HTTPHeaders']['content-length'], '0')
def test_delete_bucket_error(self): def test_delete_bucket_error(self):
status, headers, body = \ event_system = self.conn.meta.events
self.conn.make_request('DELETE', 'bucket+invalid') event_system.unregister(
self.assertEqual(get_error_code(body), 'InvalidBucketName') 'before-parameter-build.s3',
botocore.handlers.validate_bucket_name)
with self.assertRaises(botocore.exceptions.ClientError) as ctx:
self.conn.delete_bucket(Bucket='bucket+invalid')
self.assertEqual(
ctx.exception.response['Error']['Code'], 'InvalidBucketName')
auth_error_conn = Connection(aws_secret_key='invalid') auth_error_conn = get_boto3_conn(aws_secret_key='invalid')
status, headers, body = \ with self.assertRaises(botocore.exceptions.ClientError) as ctx:
auth_error_conn.make_request('DELETE', 'bucket') auth_error_conn.delete_bucket(Bucket='bucket')
self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch') self.assertEqual(
ctx.exception.response['Error']['Code'], 'SignatureDoesNotMatch')
status, headers, body = self.conn.make_request('DELETE', 'bucket') with self.assertRaises(botocore.exceptions.ClientError) as ctx:
self.assertEqual(get_error_code(body), 'NoSuchBucket') self.conn.delete_bucket(Bucket='bucket')
self.assertEqual(
ctx.exception.response['Error']['Code'], 'NoSuchBucket')
def test_bucket_invalid_method_error(self): def test_bucket_invalid_method_error(self):
def _mangle_req_method(request, **kwargs):
request.method = 'GETPUT'
def _mangle_req_controller_method(request, **kwargs):
request.method = '_delete_segments_bucket'
event_system = self.conn.meta.events
event_system.register(
'request-created.s3.CreateBucket',
_mangle_req_method)
# non existed verb in the controller # non existed verb in the controller
status, headers, body = \ with self.assertRaises(botocore.exceptions.ClientError) as ctx:
self.conn.make_request('GETPUT', 'bucket') self.conn.create_bucket(Bucket='bucket')
self.assertEqual(get_error_code(body), 'MethodNotAllowed') self.assertEqual(
ctx.exception.response['Error']['Code'], 'MethodNotAllowed')
event_system.unregister('request-created.s3.CreateBucket',
_mangle_req_method)
event_system.register('request-created.s3.CreateBucket',
_mangle_req_controller_method)
# the method exists in the controller but deny as MethodNotAllowed # the method exists in the controller but deny as MethodNotAllowed
status, headers, body = \ with self.assertRaises(botocore.exceptions.ClientError) as ctx:
self.conn.make_request('_delete_segments_bucket', 'bucket') self.conn.create_bucket(Bucket='bucket')
self.assertEqual(get_error_code(body), 'MethodNotAllowed') self.assertEqual(
ctx.exception.response['Error']['Code'], 'MethodNotAllowed')
class TestS3ApiBucketSigV4(TestS3ApiBucket): class TestS3ApiBucketSigV4(TestS3ApiBucket):