Round s3api listing LastModified to integer resolution
s3api bucket listing elements currently have LastModified values with millisecond precision. This is inconsistent with the value of the Last-Modified header returned with an object GET or HEAD response which has second precision. This patch reduces the precision to seconds in bucket listings and upload part listings. This is also consistent with observation of an aws listing response. The last modified values in the swift native listing *up* to the nearest second to be consistent with the seconds-precision Last-Modified time header that is returned with an object GET or HEAD. However, we continue to include millisecond digits set to 0 in the last-modified string, e.g.: '2014-06-10T22:47:32.000Z'. Also, fix the last modified time returned in an object copy response to be consistent with the last modified time of the object that was created. Previously it was rounded down, but it should be rounded up. Change-Id: I8c98791a920eeedfc79e8a9d83e5032c07ae86d3
This commit is contained in:
parent
52254bb5ca
commit
2f607cd319
@ -34,7 +34,7 @@ from swift.common.middleware.s3api.s3response import \
|
||||
MalformedXML, InvalidLocationConstraint, NoSuchBucket, \
|
||||
BucketNotEmpty, VersionedBucketNotEmpty, InternalError, \
|
||||
ServiceUnavailable, NoSuchKey
|
||||
from swift.common.middleware.s3api.utils import MULTIUPLOAD_SUFFIX
|
||||
from swift.common.middleware.s3api.utils import MULTIUPLOAD_SUFFIX, S3Timestamp
|
||||
|
||||
MAX_PUT_BUCKET_BODY_SIZE = 10240
|
||||
|
||||
@ -291,7 +291,7 @@ class BucketController(Controller):
|
||||
contents = SubElement(elem, 'Contents')
|
||||
SubElement(contents, 'Key').text = name
|
||||
SubElement(contents, 'LastModified').text = \
|
||||
o['last_modified'][:-3] + 'Z'
|
||||
S3Timestamp.from_isoformat(o['last_modified']).s3xmlformat
|
||||
if contents.tag != 'DeleteMarker':
|
||||
if 's3_etag' in o:
|
||||
# New-enough MUs are already in the right format
|
||||
|
@ -397,7 +397,7 @@ class UploadsController(Controller):
|
||||
SubElement(owner_elem, 'DisplayName').text = req.user_id
|
||||
SubElement(upload_elem, 'StorageClass').text = 'STANDARD'
|
||||
SubElement(upload_elem, 'Initiated').text = \
|
||||
u['last_modified'][:-3] + 'Z'
|
||||
S3Timestamp.from_isoformat(u['last_modified']).s3xmlformat
|
||||
|
||||
for p in prefixes:
|
||||
elem = SubElement(result_elem, 'CommonPrefixes')
|
||||
@ -582,7 +582,7 @@ class UploadController(Controller):
|
||||
part_elem = SubElement(result_elem, 'Part')
|
||||
SubElement(part_elem, 'PartNumber').text = i['name'].split('/')[-1]
|
||||
SubElement(part_elem, 'LastModified').text = \
|
||||
i['last_modified'][:-3] + 'Z'
|
||||
S3Timestamp.from_isoformat(i['last_modified']).s3xmlformat
|
||||
SubElement(part_elem, 'ETag').text = '"%s"' % i['hash']
|
||||
SubElement(part_elem, 'Size').text = str(i['bytes'])
|
||||
|
||||
|
@ -15,6 +15,7 @@
|
||||
|
||||
import base64
|
||||
import calendar
|
||||
import datetime
|
||||
import email.utils
|
||||
import re
|
||||
import six
|
||||
@ -108,9 +109,19 @@ def validate_bucket_name(name, dns_compliant_bucket_names):
|
||||
|
||||
|
||||
class S3Timestamp(utils.Timestamp):
|
||||
S3_XML_FORMAT = "%Y-%m-%dT%H:%M:%S.000Z"
|
||||
|
||||
@property
|
||||
def s3xmlformat(self):
|
||||
return self.isoformat[:-7] + '.000Z'
|
||||
dt = datetime.datetime.utcfromtimestamp(self.ceil())
|
||||
return dt.strftime(self.S3_XML_FORMAT)
|
||||
|
||||
@classmethod
|
||||
def from_s3xmlformat(cls, date_string):
|
||||
dt = datetime.datetime.strptime(date_string, cls.S3_XML_FORMAT)
|
||||
dt = dt.replace(tzinfo=utils.UTC)
|
||||
seconds = calendar.timegm(dt.timetuple())
|
||||
return cls(seconds)
|
||||
|
||||
@property
|
||||
def amz_date_format(self):
|
||||
|
@ -1324,6 +1324,15 @@ class Timestamp(object):
|
||||
|
||||
@property
|
||||
def isoformat(self):
|
||||
"""
|
||||
Get an isoformat string representation of the 'normal' part of the
|
||||
Timestamp with microsecond precision and no trailing timezone, for
|
||||
example:
|
||||
|
||||
1970-01-01T00:00:00.000000
|
||||
|
||||
:return: an isoformat string
|
||||
"""
|
||||
t = float(self.normal)
|
||||
if six.PY3:
|
||||
# On Python 3, round manually using ROUND_HALF_EVEN rounding
|
||||
@ -1350,6 +1359,21 @@ class Timestamp(object):
|
||||
isoformat += ".000000"
|
||||
return isoformat
|
||||
|
||||
@classmethod
|
||||
def from_isoformat(cls, date_string):
|
||||
"""
|
||||
Parse an isoformat string representation of time to a Timestamp object.
|
||||
|
||||
:param date_string: a string formatted as per an Timestamp.isoformat
|
||||
property.
|
||||
:return: an instance of this class.
|
||||
"""
|
||||
start = datetime.datetime.strptime(date_string, "%Y-%m-%dT%H:%M:%S.%f")
|
||||
delta = start - EPOCH
|
||||
# This calculation is based on Python 2.7's Modules/datetimemodule.c,
|
||||
# function delta_to_microseconds(), but written in Python.
|
||||
return cls(delta.total_seconds())
|
||||
|
||||
def ceil(self):
|
||||
"""
|
||||
Return the 'normal' part of the timestamp rounded up to the nearest
|
||||
@ -1506,13 +1530,7 @@ def last_modified_date_to_timestamp(last_modified_date_str):
|
||||
Convert a last modified date (like you'd get from a container listing,
|
||||
e.g. 2014-02-28T23:22:36.698390) to a float.
|
||||
"""
|
||||
start = datetime.datetime.strptime(last_modified_date_str,
|
||||
'%Y-%m-%dT%H:%M:%S.%f')
|
||||
delta = start - EPOCH
|
||||
|
||||
# This calculation is based on Python 2.7's Modules/datetimemodule.c,
|
||||
# function delta_to_microseconds(), but written in Python.
|
||||
return Timestamp(delta.total_seconds())
|
||||
return Timestamp.from_isoformat(last_modified_date_str)
|
||||
|
||||
|
||||
def normalize_delete_at_timestamp(timestamp, high_precision=False):
|
||||
|
@ -29,7 +29,8 @@ from six.moves import urllib, zip, zip_longest
|
||||
import test.functional as tf
|
||||
from swift.common.middleware.s3api.etree import fromstring, tostring, \
|
||||
Element, SubElement
|
||||
from swift.common.middleware.s3api.utils import MULTIUPLOAD_SUFFIX, mktime
|
||||
from swift.common.middleware.s3api.utils import MULTIUPLOAD_SUFFIX, mktime, \
|
||||
S3Timestamp
|
||||
from swift.common.utils import md5
|
||||
|
||||
from test.functional.s3api import S3ApiBase
|
||||
@ -213,7 +214,8 @@ class TestS3ApiMultiUpload(S3ApiBase):
|
||||
self.assertEqual(headers['content-type'], 'text/html; charset=UTF-8')
|
||||
self.assertTrue('content-length' in headers)
|
||||
self.assertEqual(headers['content-length'], '0')
|
||||
expected_parts_list = [(headers['etag'], mktime(headers['date']))]
|
||||
expected_parts_list = [(headers['etag'],
|
||||
mktime(headers['last-modified']))]
|
||||
|
||||
# Upload Part Copy
|
||||
key, upload_id = uploads[1]
|
||||
@ -242,8 +244,8 @@ class TestS3ApiMultiUpload(S3ApiBase):
|
||||
self.assertTrue('etag' not in headers)
|
||||
elem = fromstring(body, 'CopyPartResult')
|
||||
|
||||
last_modified = elem.find('LastModified').text
|
||||
self.assertTrue(last_modified is not None)
|
||||
copy_resp_last_modified = elem.find('LastModified').text
|
||||
self.assertIsNotNone(copy_resp_last_modified)
|
||||
|
||||
self.assertEqual(resp_etag, etag)
|
||||
|
||||
@ -256,15 +258,10 @@ class TestS3ApiMultiUpload(S3ApiBase):
|
||||
self.assertEqual(200, status)
|
||||
elem = fromstring(body, 'ListPartsResult')
|
||||
|
||||
# FIXME: COPY result drops milli/microseconds but GET doesn't
|
||||
last_modified_gets = [p.find('LastModified').text
|
||||
for p in elem.iterfind('Part')]
|
||||
self.assertEqual(
|
||||
last_modified_gets[0].rsplit('.', 1)[0],
|
||||
last_modified.rsplit('.', 1)[0],
|
||||
'%r != %r' % (last_modified_gets[0], last_modified))
|
||||
# There should be *exactly* two parts in the result
|
||||
self.assertEqual(1, len(last_modified_gets))
|
||||
listing_last_modified = [p.find('LastModified').text
|
||||
for p in elem.iterfind('Part')]
|
||||
# There should be *exactly* one parts in the result
|
||||
self.assertEqual(listing_last_modified, [copy_resp_last_modified])
|
||||
|
||||
# List Parts
|
||||
key, upload_id = uploads[0]
|
||||
@ -299,15 +296,10 @@ class TestS3ApiMultiUpload(S3ApiBase):
|
||||
for (expected_etag, expected_date), p in \
|
||||
zip(expected_parts_list, elem.findall('Part')):
|
||||
last_modified = p.find('LastModified').text
|
||||
self.assertTrue(last_modified is not None)
|
||||
# TODO: sanity check
|
||||
# (kota_) How do we check the sanity?
|
||||
# the last-modified header drops milli-seconds info
|
||||
# by the constraint of the format.
|
||||
# For now, we can do either the format check or round check
|
||||
# last_modified_from_xml = mktime(last_modified)
|
||||
# self.assertEqual(expected_date,
|
||||
# last_modified_from_xml)
|
||||
self.assertIsNotNone(last_modified)
|
||||
last_modified_from_xml = S3Timestamp.from_s3xmlformat(
|
||||
last_modified)
|
||||
self.assertEqual(expected_date, float(last_modified_from_xml))
|
||||
self.assertEqual(expected_etag, p.find('ETag').text)
|
||||
self.assertEqual(self.min_segment_size, int(p.find('Size').text))
|
||||
etags.append(p.find('ETag').text)
|
||||
@ -496,7 +488,7 @@ class TestS3ApiMultiUpload(S3ApiBase):
|
||||
self.assertIsNotNone(o.find('LastModified').text)
|
||||
self.assertRegex(
|
||||
o.find('LastModified').text,
|
||||
r'^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$')
|
||||
r'^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.000Z$')
|
||||
self.assertEqual(o.find('ETag').text, exp_etag)
|
||||
self.assertEqual(o.find('Size').text, str(exp_size))
|
||||
self.assertIsNotNone(o.find('StorageClass').text)
|
||||
@ -932,8 +924,8 @@ class TestS3ApiMultiUpload(S3ApiBase):
|
||||
self.assertTrue('etag' not in headers)
|
||||
elem = fromstring(body, 'CopyPartResult')
|
||||
|
||||
last_modified = elem.find('LastModified').text
|
||||
self.assertTrue(last_modified is not None)
|
||||
copy_resp_last_modified = elem.find('LastModified').text
|
||||
self.assertIsNotNone(copy_resp_last_modified)
|
||||
|
||||
self.assertEqual(resp_etag, etag)
|
||||
|
||||
@ -945,16 +937,10 @@ class TestS3ApiMultiUpload(S3ApiBase):
|
||||
|
||||
elem = fromstring(body, 'ListPartsResult')
|
||||
|
||||
# FIXME: COPY result drops milli/microseconds but GET doesn't
|
||||
last_modified_gets = [p.find('LastModified').text
|
||||
for p in elem.iterfind('Part')]
|
||||
self.assertEqual(
|
||||
last_modified_gets[0].rsplit('.', 1)[0],
|
||||
last_modified.rsplit('.', 1)[0],
|
||||
'%r != %r' % (last_modified_gets[0], last_modified))
|
||||
|
||||
listing_last_modified = [p.find('LastModified').text
|
||||
for p in elem.iterfind('Part')]
|
||||
# There should be *exactly* one parts in the result
|
||||
self.assertEqual(1, len(last_modified_gets))
|
||||
self.assertEqual(listing_last_modified, [copy_resp_last_modified])
|
||||
|
||||
# Abort Multipart Upload
|
||||
key, upload_id = uploads[0]
|
||||
@ -1044,8 +1030,8 @@ class TestS3ApiMultiUpload(S3ApiBase):
|
||||
self.assertTrue('etag' not in headers)
|
||||
elem = fromstring(body, 'CopyPartResult')
|
||||
|
||||
last_modifieds = [elem.find('LastModified').text]
|
||||
self.assertTrue(last_modifieds[0] is not None)
|
||||
copy_resp_last_modifieds = [elem.find('LastModified').text]
|
||||
self.assertTrue(copy_resp_last_modifieds[0] is not None)
|
||||
|
||||
self.assertEqual(resp_etag, etags[0])
|
||||
|
||||
@ -1062,8 +1048,8 @@ class TestS3ApiMultiUpload(S3ApiBase):
|
||||
self.assertTrue('etag' not in headers)
|
||||
elem = fromstring(body, 'CopyPartResult')
|
||||
|
||||
last_modifieds.append(elem.find('LastModified').text)
|
||||
self.assertTrue(last_modifieds[1] is not None)
|
||||
copy_resp_last_modifieds.append(elem.find('LastModified').text)
|
||||
self.assertTrue(copy_resp_last_modifieds[1] is not None)
|
||||
|
||||
self.assertEqual(resp_etag, etags[1])
|
||||
|
||||
@ -1075,15 +1061,9 @@ class TestS3ApiMultiUpload(S3ApiBase):
|
||||
|
||||
elem = fromstring(body, 'ListPartsResult')
|
||||
|
||||
# FIXME: COPY result drops milli/microseconds but GET doesn't
|
||||
last_modified_gets = [p.find('LastModified').text
|
||||
for p in elem.iterfind('Part')]
|
||||
self.assertEqual(
|
||||
[lm.rsplit('.', 1)[0] for lm in last_modified_gets],
|
||||
[lm.rsplit('.', 1)[0] for lm in last_modifieds])
|
||||
|
||||
# There should be *exactly* two parts in the result
|
||||
self.assertEqual(2, len(last_modified_gets))
|
||||
listing_last_modified = [p.find('LastModified').text
|
||||
for p in elem.iterfind('Part')]
|
||||
self.assertEqual(listing_last_modified, copy_resp_last_modifieds)
|
||||
|
||||
# Abort Multipart Upload
|
||||
key, upload_id = uploads[0]
|
||||
|
@ -22,6 +22,7 @@ import boto
|
||||
# pylint: disable-msg=E0611,F0401
|
||||
from distutils.version import StrictVersion
|
||||
|
||||
import calendar
|
||||
import email.parser
|
||||
from email.utils import formatdate, parsedate
|
||||
from time import mktime
|
||||
@ -30,6 +31,7 @@ import six
|
||||
import test.functional as tf
|
||||
|
||||
from swift.common.middleware.s3api.etree import fromstring
|
||||
from swift.common.middleware.s3api.utils import S3Timestamp
|
||||
from swift.common.utils import md5, quote
|
||||
|
||||
from test.functional.s3api import S3ApiBase
|
||||
@ -98,21 +100,32 @@ class TestS3ApiObject(S3ApiBase):
|
||||
|
||||
elem = fromstring(body, 'CopyObjectResult')
|
||||
self.assertTrue(elem.find('LastModified').text is not None)
|
||||
last_modified_xml = elem.find('LastModified').text
|
||||
copy_resp_last_modified_xml = elem.find('LastModified').text
|
||||
self.assertTrue(elem.find('ETag').text is not None)
|
||||
self.assertEqual(etag, elem.find('ETag').text.strip('"'))
|
||||
self._assertObjectEtag(dst_bucket, dst_obj, etag)
|
||||
|
||||
# Check timestamp on Copy:
|
||||
# Check timestamp on Copy in listing:
|
||||
status, headers, body = \
|
||||
self.conn.make_request('GET', dst_bucket)
|
||||
self.assertEqual(status, 200)
|
||||
elem = fromstring(body, 'ListBucketResult')
|
||||
|
||||
# FIXME: COPY result drops milli/microseconds but GET doesn't
|
||||
self.assertEqual(
|
||||
elem.find('Contents').find("LastModified").text.rsplit('.', 1)[0],
|
||||
last_modified_xml.rsplit('.', 1)[0])
|
||||
elem.find('Contents').find("LastModified").text,
|
||||
copy_resp_last_modified_xml)
|
||||
|
||||
# GET Object copy
|
||||
status, headers, body = \
|
||||
self.conn.make_request('GET', dst_bucket, dst_obj)
|
||||
self.assertEqual(status, 200)
|
||||
|
||||
self.assertCommonResponseHeaders(headers, etag)
|
||||
self.assertTrue(headers['last-modified'] is not None)
|
||||
self.assertEqual(
|
||||
float(S3Timestamp.from_s3xmlformat(copy_resp_last_modified_xml)),
|
||||
calendar.timegm(parsedate(headers['last-modified'])))
|
||||
self.assertTrue(headers['content-type'] is not None)
|
||||
self.assertEqual(headers['content-length'], str(len(content)))
|
||||
|
||||
# GET Object
|
||||
status, headers, body = \
|
||||
@ -770,6 +783,26 @@ class TestS3ApiObject(S3ApiBase):
|
||||
self.assertEqual(status, 200)
|
||||
self.assertCommonResponseHeaders(headers)
|
||||
|
||||
# check we can use the last modified time from the listing...
|
||||
status, headers, body = \
|
||||
self.conn.make_request('GET', self.bucket)
|
||||
elem = fromstring(body, 'ListBucketResult')
|
||||
last_modified = elem.find('./Contents/LastModified').text
|
||||
listing_datetime = S3Timestamp.from_s3xmlformat(last_modified)
|
||||
headers = \
|
||||
{'If-Unmodified-Since': formatdate(listing_datetime)}
|
||||
status, headers, body = \
|
||||
self.conn.make_request('GET', self.bucket, obj, headers=headers)
|
||||
self.assertEqual(status, 200)
|
||||
self.assertCommonResponseHeaders(headers)
|
||||
|
||||
headers = \
|
||||
{'If-Modified-Since': formatdate(listing_datetime)}
|
||||
status, headers, body = \
|
||||
self.conn.make_request('GET', self.bucket, obj, headers=headers)
|
||||
self.assertEqual(status, 304)
|
||||
self.assertCommonResponseHeaders(headers)
|
||||
|
||||
def test_get_object_if_match(self):
|
||||
obj = 'object'
|
||||
self.conn.make_request('PUT', self.bucket, obj)
|
||||
|
@ -92,7 +92,7 @@ class TestS3ApiPresignedUrls(S3ApiBase):
|
||||
self.assertIsNotNone(o.find('LastModified').text)
|
||||
self.assertRegex(
|
||||
o.find('LastModified').text,
|
||||
r'^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$')
|
||||
r'^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.000Z$')
|
||||
self.assertIsNotNone(o.find('ETag').text)
|
||||
self.assertEqual(o.find('Size').text, '0')
|
||||
self.assertIsNotNone(o.find('StorageClass').text is not None)
|
||||
|
@ -1042,9 +1042,9 @@ def make_timestamp_iter(offset=0):
|
||||
|
||||
|
||||
@contextmanager
|
||||
def mock_timestamp_now(now=None):
|
||||
def mock_timestamp_now(now=None, klass=Timestamp):
|
||||
if now is None:
|
||||
now = Timestamp.now()
|
||||
now = klass.now()
|
||||
with mocklib.patch('swift.common.utils.Timestamp.now',
|
||||
classmethod(lambda c: now)):
|
||||
yield now
|
||||
|
@ -201,7 +201,7 @@ class TestS3ApiBucket(S3ApiTestCase):
|
||||
items = []
|
||||
for o in objects:
|
||||
items.append((o.find('./Key').text, o.find('./ETag').text))
|
||||
self.assertEqual('2011-01-05T02:19:14.275Z',
|
||||
self.assertEqual('2011-01-05T02:19:15.000Z',
|
||||
o.find('./LastModified').text)
|
||||
expected = [
|
||||
(i[0].encode('utf-8') if six.PY2 else i[0],
|
||||
@ -211,6 +211,37 @@ class TestS3ApiBucket(S3ApiTestCase):
|
||||
]
|
||||
self.assertEqual(items, expected)
|
||||
|
||||
def test_bucket_GET_last_modified_rounding(self):
|
||||
objects_list = [
|
||||
{'name': 'a', 'last_modified': '2011-01-05T02:19:59.275290',
|
||||
'content_type': 'application/octet-stream',
|
||||
'hash': 'ahash', 'bytes': '12345'},
|
||||
{'name': 'b', 'last_modified': '2011-01-05T02:19:59.000000',
|
||||
'content_type': 'application/octet-stream',
|
||||
'hash': 'ahash', 'bytes': '12345'},
|
||||
]
|
||||
self.swift.register(
|
||||
'GET', '/v1/AUTH_test/junk',
|
||||
swob.HTTPOk, {'Content-Type': 'application/json'},
|
||||
json.dumps(objects_list))
|
||||
req = Request.blank('/junk',
|
||||
environ={'REQUEST_METHOD': 'GET'},
|
||||
headers={'Authorization': 'AWS test:tester:hmac',
|
||||
'Date': self.get_date_header()})
|
||||
status, headers, body = self.call_s3api(req)
|
||||
self.assertEqual(status.split()[0], '200')
|
||||
|
||||
elem = fromstring(body, 'ListBucketResult')
|
||||
name = elem.find('./Name').text
|
||||
self.assertEqual(name, 'junk')
|
||||
objects = elem.iterchildren('Contents')
|
||||
actual = [(obj.find('./Key').text, obj.find('./LastModified').text)
|
||||
for obj in objects]
|
||||
self.assertEqual(
|
||||
[('a', '2011-01-05T02:20:00.000Z'),
|
||||
('b', '2011-01-05T02:19:59.000Z')],
|
||||
actual)
|
||||
|
||||
def test_bucket_GET_url_encoded(self):
|
||||
bucket_name = 'junk'
|
||||
req = Request.blank('/%s?encoding-type=url' % bucket_name,
|
||||
@ -229,7 +260,7 @@ class TestS3ApiBucket(S3ApiTestCase):
|
||||
items = []
|
||||
for o in objects:
|
||||
items.append((o.find('./Key').text, o.find('./ETag').text))
|
||||
self.assertEqual('2011-01-05T02:19:14.275Z',
|
||||
self.assertEqual('2011-01-05T02:19:15.000Z',
|
||||
o.find('./LastModified').text)
|
||||
|
||||
self.assertEqual(items, [
|
||||
@ -673,9 +704,9 @@ class TestS3ApiBucket(S3ApiTestCase):
|
||||
self.assertEqual([v.find('./VersionId').text for v in versions],
|
||||
['null' for v in objects])
|
||||
# Last modified in self.objects is 2011-01-05T02:19:14.275290 but
|
||||
# the returned value is 2011-01-05T02:19:14.275Z
|
||||
# the returned value is rounded up to 2011-01-05T02:19:15Z
|
||||
self.assertEqual([v.find('./LastModified').text for v in versions],
|
||||
[v[1][:-3] + 'Z' for v in objects])
|
||||
['2011-01-05T02:19:15.000Z'] * len(objects))
|
||||
self.assertEqual([v.find('./ETag').text for v in versions],
|
||||
[PFS_ETAG if v[0] == 'pfs-obj' else
|
||||
'"0-N"' if v[0] == 'slo' else '"0"'
|
||||
|
@ -51,29 +51,41 @@ XML = '<CompleteMultipartUpload>' \
|
||||
'</CompleteMultipartUpload>'
|
||||
|
||||
OBJECTS_TEMPLATE = \
|
||||
(('object/X/1', '2014-05-07T19:47:51.592270', '0123456789abcdef', 100),
|
||||
('object/X/2', '2014-05-07T19:47:52.592270', 'fedcba9876543210', 200))
|
||||
(('object/X/1', '2014-05-07T19:47:51.592270', '0123456789abcdef', 100,
|
||||
'2014-05-07T19:47:52.000Z'),
|
||||
('object/X/2', '2014-05-07T19:47:52.592270', 'fedcba9876543210', 200,
|
||||
'2014-05-07T19:47:53.000Z'))
|
||||
|
||||
MULTIPARTS_TEMPLATE = \
|
||||
(('object/X', '2014-05-07T19:47:50.592270', 'HASH', 1),
|
||||
('object/X/1', '2014-05-07T19:47:51.592270', '0123456789abcdef', 11),
|
||||
('object/X/2', '2014-05-07T19:47:52.592270', 'fedcba9876543210', 21),
|
||||
('object/Y', '2014-05-07T19:47:53.592270', 'HASH', 2),
|
||||
('object/Y/1', '2014-05-07T19:47:54.592270', '0123456789abcdef', 12),
|
||||
('object/Y/2', '2014-05-07T19:47:55.592270', 'fedcba9876543210', 22),
|
||||
('object/Z', '2014-05-07T19:47:56.592270', 'HASH', 3),
|
||||
('object/Z/1', '2014-05-07T19:47:57.592270', '0123456789abcdef', 13),
|
||||
('object/Z/2', '2014-05-07T19:47:58.592270', 'fedcba9876543210', 23),
|
||||
('subdir/object/Z', '2014-05-07T19:47:58.592270', 'HASH', 4),
|
||||
(('object/X', '2014-05-07T19:47:50.592270', 'HASH', 1,
|
||||
'2014-05-07T19:47:51.000Z'),
|
||||
('object/X/1', '2014-05-07T19:47:51.592270', '0123456789abcdef', 11,
|
||||
'2014-05-07T19:47:52.000Z'),
|
||||
('object/X/2', '2014-05-07T19:47:52.592270', 'fedcba9876543210', 21,
|
||||
'2014-05-07T19:47:53.000Z'),
|
||||
('object/Y', '2014-05-07T19:47:53.592270', 'HASH', 2,
|
||||
'2014-05-07T19:47:54.000Z'),
|
||||
('object/Y/1', '2014-05-07T19:47:54.592270', '0123456789abcdef', 12,
|
||||
'2014-05-07T19:47:55.000Z'),
|
||||
('object/Y/2', '2014-05-07T19:47:55.592270', 'fedcba9876543210', 22,
|
||||
'2014-05-07T19:47:56.000Z'),
|
||||
('object/Z', '2014-05-07T19:47:56.592270', 'HASH', 3,
|
||||
'2014-05-07T19:47:57.000Z'),
|
||||
('object/Z/1', '2014-05-07T19:47:57.592270', '0123456789abcdef', 13,
|
||||
'2014-05-07T19:47:58.000Z'),
|
||||
('object/Z/2', '2014-05-07T19:47:58.592270', 'fedcba9876543210', 23,
|
||||
'2014-05-07T19:47:59.000Z'),
|
||||
('subdir/object/Z', '2014-05-07T19:47:58.592270', 'HASH', 4,
|
||||
'2014-05-07T19:47:59.000Z'),
|
||||
('subdir/object/Z/1', '2014-05-07T19:47:58.592270', '0123456789abcdef',
|
||||
41),
|
||||
41, '2014-05-07T19:47:59.000Z'),
|
||||
('subdir/object/Z/2', '2014-05-07T19:47:58.592270', 'fedcba9876543210',
|
||||
41),
|
||||
41, '2014-05-07T19:47:59.000Z'),
|
||||
# NB: wsgi strings
|
||||
('subdir/object/completed\xe2\x98\x83/W/1', '2014-05-07T19:47:58.592270',
|
||||
'0123456789abcdef', 41),
|
||||
'0123456789abcdef', 41, '2014-05-07T19:47:59.000Z'),
|
||||
('subdir/object/completed\xe2\x98\x83/W/2', '2014-05-07T19:47:58.592270',
|
||||
'fedcba9876543210', 41))
|
||||
'fedcba9876543210', 41, '2014-05-07T19:47:59'))
|
||||
|
||||
S3_ETAG = '"%s-2"' % md5(binascii.a2b_hex(
|
||||
'0123456789abcdef0123456789abcdef'
|
||||
@ -285,7 +297,7 @@ class TestS3ApiMultiUpload(S3ApiTestCase):
|
||||
self.assertEqual(elem.find('MaxUploads').text, '1000')
|
||||
self.assertEqual(elem.find('IsTruncated').text, 'false')
|
||||
self.assertEqual(len(elem.findall('Upload')), len(uploads))
|
||||
expected_uploads = [(upload[0], '2014-05-07T19:47:50.592Z')
|
||||
expected_uploads = [(upload[0], '2014-05-07T19:47:51.000Z')
|
||||
for upload in uploads]
|
||||
for u in elem.findall('Upload'):
|
||||
name = u.find('Key').text + '/' + u.find('UploadId').text
|
||||
@ -310,7 +322,7 @@ class TestS3ApiMultiUpload(S3ApiTestCase):
|
||||
self.assertEqual(elem.find('MaxUploads').text, '1000')
|
||||
self.assertEqual(elem.find('IsTruncated').text, 'false')
|
||||
self.assertEqual(len(elem.findall('Upload')), 4)
|
||||
objects = [(o[0], o[1][:-3] + 'Z') for o in MULTIPARTS_TEMPLATE]
|
||||
objects = [(o[0], o[4]) for o in MULTIPARTS_TEMPLATE]
|
||||
for u in elem.findall('Upload'):
|
||||
name = u.find('Key').text + '/' + u.find('UploadId').text
|
||||
initiated = u.find('Initiated').text
|
||||
@ -417,9 +429,12 @@ class TestS3ApiMultiUpload(S3ApiTestCase):
|
||||
def test_bucket_multipart_uploads_GET_with_id_and_key_marker(self):
|
||||
query = 'upload-id-marker=Y&key-marker=object'
|
||||
multiparts = \
|
||||
(('object/Y', '2014-05-07T19:47:53.592270', 'HASH', 2),
|
||||
('object/Y/1', '2014-05-07T19:47:54.592270', 'HASH', 12),
|
||||
('object/Y/2', '2014-05-07T19:47:55.592270', 'HASH', 22))
|
||||
(('object/Y', '2014-05-07T19:47:53.592270', 'HASH', 2,
|
||||
'2014-05-07T19:47:54.000Z'),
|
||||
('object/Y/1', '2014-05-07T19:47:54.592270', 'HASH', 12,
|
||||
'2014-05-07T19:47:55.000Z'),
|
||||
('object/Y/2', '2014-05-07T19:47:55.592270', 'HASH', 22,
|
||||
'2014-05-07T19:47:56.000Z'))
|
||||
|
||||
status, headers, body = \
|
||||
self._test_bucket_multipart_uploads_GET(query, multiparts)
|
||||
@ -427,7 +442,7 @@ class TestS3ApiMultiUpload(S3ApiTestCase):
|
||||
self.assertEqual(elem.find('KeyMarker').text, 'object')
|
||||
self.assertEqual(elem.find('UploadIdMarker').text, 'Y')
|
||||
self.assertEqual(len(elem.findall('Upload')), 1)
|
||||
objects = [(o[0], o[1][:-3] + 'Z') for o in multiparts]
|
||||
objects = [(o[0], o[4]) for o in multiparts]
|
||||
for u in elem.findall('Upload'):
|
||||
name = u.find('Key').text + '/' + u.find('UploadId').text
|
||||
initiated = u.find('Initiated').text
|
||||
@ -447,12 +462,18 @@ class TestS3ApiMultiUpload(S3ApiTestCase):
|
||||
def test_bucket_multipart_uploads_GET_with_key_marker(self):
|
||||
query = 'key-marker=object'
|
||||
multiparts = \
|
||||
(('object/X', '2014-05-07T19:47:50.592270', 'HASH', 1),
|
||||
('object/X/1', '2014-05-07T19:47:51.592270', 'HASH', 11),
|
||||
('object/X/2', '2014-05-07T19:47:52.592270', 'HASH', 21),
|
||||
('object/Y', '2014-05-07T19:47:53.592270', 'HASH', 2),
|
||||
('object/Y/1', '2014-05-07T19:47:54.592270', 'HASH', 12),
|
||||
('object/Y/2', '2014-05-07T19:47:55.592270', 'HASH', 22))
|
||||
(('object/X', '2014-05-07T19:47:50.592270', 'HASH', 1,
|
||||
'2014-05-07T19:47:51.000Z'),
|
||||
('object/X/1', '2014-05-07T19:47:51.592270', 'HASH', 11,
|
||||
'2014-05-07T19:47:52.000Z'),
|
||||
('object/X/2', '2014-05-07T19:47:52.592270', 'HASH', 21,
|
||||
'2014-05-07T19:47:53.000Z'),
|
||||
('object/Y', '2014-05-07T19:47:53.592270', 'HASH', 2,
|
||||
'2014-05-07T19:47:54.000Z'),
|
||||
('object/Y/1', '2014-05-07T19:47:54.592270', 'HASH', 12,
|
||||
'2014-05-07T19:47:55.000Z'),
|
||||
('object/Y/2', '2014-05-07T19:47:55.592270', 'HASH', 22,
|
||||
'2014-05-07T19:47:56.000Z'))
|
||||
status, headers, body = \
|
||||
self._test_bucket_multipart_uploads_GET(query, multiparts)
|
||||
elem = fromstring(body, 'ListMultipartUploadsResult')
|
||||
@ -460,11 +481,11 @@ class TestS3ApiMultiUpload(S3ApiTestCase):
|
||||
self.assertEqual(elem.find('NextKeyMarker').text, 'object')
|
||||
self.assertEqual(elem.find('NextUploadIdMarker').text, 'Y')
|
||||
self.assertEqual(len(elem.findall('Upload')), 2)
|
||||
objects = [(o[0], o[1][:-3] + 'Z') for o in multiparts]
|
||||
objects = [(o[0], o[4]) for o in multiparts]
|
||||
for u in elem.findall('Upload'):
|
||||
name = u.find('Key').text + '/' + u.find('UploadId').text
|
||||
initiated = u.find('Initiated').text
|
||||
self.assertTrue((name, initiated) in objects)
|
||||
self.assertIn((name, initiated), objects)
|
||||
self.assertEqual(status.split()[0], '200')
|
||||
|
||||
_, path, _ = self.swift.calls_with_headers[-1]
|
||||
@ -480,14 +501,17 @@ class TestS3ApiMultiUpload(S3ApiTestCase):
|
||||
def test_bucket_multipart_uploads_GET_with_prefix(self):
|
||||
query = 'prefix=X'
|
||||
multiparts = \
|
||||
(('object/X', '2014-05-07T19:47:50.592270', 'HASH', 1),
|
||||
('object/X/1', '2014-05-07T19:47:51.592270', 'HASH', 11),
|
||||
('object/X/2', '2014-05-07T19:47:52.592270', 'HASH', 21))
|
||||
(('object/X', '2014-05-07T19:47:50.592270', 'HASH', 1,
|
||||
'2014-05-07T19:47:51.000Z'),
|
||||
('object/X/1', '2014-05-07T19:47:51.592270', 'HASH', 11,
|
||||
'2014-05-07T19:47:52.000Z'),
|
||||
('object/X/2', '2014-05-07T19:47:52.592270', 'HASH', 21,
|
||||
'2014-05-07T19:47:53.000Z'))
|
||||
status, headers, body = \
|
||||
self._test_bucket_multipart_uploads_GET(query, multiparts)
|
||||
elem = fromstring(body, 'ListMultipartUploadsResult')
|
||||
self.assertEqual(len(elem.findall('Upload')), 1)
|
||||
objects = [(o[0], o[1][:-3] + 'Z') for o in multiparts]
|
||||
objects = [(o[0], o[4]) for o in multiparts]
|
||||
for u in elem.findall('Upload'):
|
||||
name = u.find('Key').text + '/' + u.find('UploadId').text
|
||||
initiated = u.find('Initiated').text
|
||||
@ -507,38 +531,56 @@ class TestS3ApiMultiUpload(S3ApiTestCase):
|
||||
def test_bucket_multipart_uploads_GET_with_delimiter(self):
|
||||
query = 'delimiter=/'
|
||||
multiparts = \
|
||||
(('object/X', '2014-05-07T19:47:50.592270', 'HASH', 1),
|
||||
('object/X/1', '2014-05-07T19:47:51.592270', 'HASH', 11),
|
||||
('object/X/2', '2014-05-07T19:47:52.592270', 'HASH', 21),
|
||||
('object/Y', '2014-05-07T19:47:50.592270', 'HASH', 2),
|
||||
('object/Y/1', '2014-05-07T19:47:51.592270', 'HASH', 21),
|
||||
('object/Y/2', '2014-05-07T19:47:52.592270', 'HASH', 22),
|
||||
('object/Z', '2014-05-07T19:47:50.592270', 'HASH', 3),
|
||||
('object/Z/1', '2014-05-07T19:47:51.592270', 'HASH', 31),
|
||||
('object/Z/2', '2014-05-07T19:47:52.592270', 'HASH', 32),
|
||||
('subdir/object/X', '2014-05-07T19:47:50.592270', 'HASH', 4),
|
||||
('subdir/object/X/1', '2014-05-07T19:47:51.592270', 'HASH', 41),
|
||||
('subdir/object/X/2', '2014-05-07T19:47:52.592270', 'HASH', 42),
|
||||
('subdir/object/Y', '2014-05-07T19:47:50.592270', 'HASH', 5),
|
||||
('subdir/object/Y/1', '2014-05-07T19:47:51.592270', 'HASH', 51),
|
||||
('subdir/object/Y/2', '2014-05-07T19:47:52.592270', 'HASH', 52),
|
||||
('subdir2/object/Z', '2014-05-07T19:47:50.592270', 'HASH', 6),
|
||||
('subdir2/object/Z/1', '2014-05-07T19:47:51.592270', 'HASH', 61),
|
||||
('subdir2/object/Z/2', '2014-05-07T19:47:52.592270', 'HASH', 62))
|
||||
(('object/X', '2014-05-07T19:47:50.592270', 'HASH', 1,
|
||||
'2014-05-07T19:47:51.000Z'),
|
||||
('object/X/1', '2014-05-07T19:47:51.592270', 'HASH', 11,
|
||||
'2014-05-07T19:47:52.000Z'),
|
||||
('object/X/2', '2014-05-07T19:47:52.592270', 'HASH', 21,
|
||||
'2014-05-07T19:47:53.000Z'),
|
||||
('object/Y', '2014-05-07T19:47:50.592270', 'HASH', 2,
|
||||
'2014-05-07T19:47:51.000Z'),
|
||||
('object/Y/1', '2014-05-07T19:47:51.592270', 'HASH', 21,
|
||||
'2014-05-07T19:47:52.000Z'),
|
||||
('object/Y/2', '2014-05-07T19:47:52.592270', 'HASH', 22,
|
||||
'2014-05-07T19:47:53.000Z'),
|
||||
('object/Z', '2014-05-07T19:47:50.592270', 'HASH', 3,
|
||||
'2014-05-07T19:47:51.000Z'),
|
||||
('object/Z/1', '2014-05-07T19:47:51.592270', 'HASH', 31,
|
||||
'2014-05-07T19:47:52.000Z'),
|
||||
('object/Z/2', '2014-05-07T19:47:52.592270', 'HASH', 32,
|
||||
'2014-05-07T19:47:53.000Z'),
|
||||
('subdir/object/X', '2014-05-07T19:47:50.592270', 'HASH', 4,
|
||||
'2014-05-07T19:47:51.000Z'),
|
||||
('subdir/object/X/1', '2014-05-07T19:47:51.592270', 'HASH', 41,
|
||||
'2014-05-07T19:47:52.000Z'),
|
||||
('subdir/object/X/2', '2014-05-07T19:47:52.592270', 'HASH', 42,
|
||||
'2014-05-07T19:47:53.000Z'),
|
||||
('subdir/object/Y', '2014-05-07T19:47:50.592270', 'HASH', 5,
|
||||
'2014-05-07T19:47:51.000Z'),
|
||||
('subdir/object/Y/1', '2014-05-07T19:47:51.592270', 'HASH', 51,
|
||||
'2014-05-07T19:47:52.000Z'),
|
||||
('subdir/object/Y/2', '2014-05-07T19:47:52.592270', 'HASH', 52,
|
||||
'2014-05-07T19:47:53.000Z'),
|
||||
('subdir2/object/Z', '2014-05-07T19:47:50.592270', 'HASH', 6,
|
||||
'2014-05-07T19:47:51.000Z'),
|
||||
('subdir2/object/Z/1', '2014-05-07T19:47:51.592270', 'HASH', 61,
|
||||
'2014-05-07T19:47:52.000Z'),
|
||||
('subdir2/object/Z/2', '2014-05-07T19:47:52.592270', 'HASH', 62,
|
||||
'2014-05-07T19:47:53.000Z'))
|
||||
|
||||
status, headers, body = \
|
||||
self._test_bucket_multipart_uploads_GET(query, multiparts)
|
||||
elem = fromstring(body, 'ListMultipartUploadsResult')
|
||||
self.assertEqual(len(elem.findall('Upload')), 3)
|
||||
self.assertEqual(len(elem.findall('CommonPrefixes')), 2)
|
||||
objects = [(o[0], o[1][:-3] + 'Z') for o in multiparts
|
||||
objects = [(o[0], o[4]) for o in multiparts
|
||||
if o[0].startswith('o')]
|
||||
prefixes = set([o[0].split('/')[0] + '/' for o in multiparts
|
||||
if o[0].startswith('s')])
|
||||
for u in elem.findall('Upload'):
|
||||
name = u.find('Key').text + '/' + u.find('UploadId').text
|
||||
initiated = u.find('Initiated').text
|
||||
self.assertTrue((name, initiated) in objects)
|
||||
self.assertIn((name, initiated), objects)
|
||||
for p in elem.findall('CommonPrefixes'):
|
||||
prefix = p.find('Prefix').text
|
||||
self.assertTrue(prefix in prefixes)
|
||||
@ -557,31 +599,43 @@ class TestS3ApiMultiUpload(S3ApiTestCase):
|
||||
def test_bucket_multipart_uploads_GET_with_multi_chars_delimiter(self):
|
||||
query = 'delimiter=subdir'
|
||||
multiparts = \
|
||||
(('object/X', '2014-05-07T19:47:50.592270', 'HASH', 1),
|
||||
('object/X/1', '2014-05-07T19:47:51.592270', 'HASH', 11),
|
||||
('object/X/2', '2014-05-07T19:47:52.592270', 'HASH', 21),
|
||||
(('object/X', '2014-05-07T19:47:50.592270', 'HASH', 1,
|
||||
'2014-05-07T19:47:51.000Z'),
|
||||
('object/X/1', '2014-05-07T19:47:51.592270', 'HASH', 11,
|
||||
'2014-05-07T19:47:52.000Z'),
|
||||
('object/X/2', '2014-05-07T19:47:52.592270', 'HASH', 21,
|
||||
'2014-05-07T19:47:53.000Z'),
|
||||
('dir/subdir/object/X', '2014-05-07T19:47:50.592270',
|
||||
'HASH', 3),
|
||||
'HASH', 3, '2014-05-07T19:47:51.000Z'),
|
||||
('dir/subdir/object/X/1', '2014-05-07T19:47:51.592270',
|
||||
'HASH', 31),
|
||||
'HASH', 31, '2014-05-07T19:47:52.000Z'),
|
||||
('dir/subdir/object/X/2', '2014-05-07T19:47:52.592270',
|
||||
'HASH', 32),
|
||||
('subdir/object/X', '2014-05-07T19:47:50.592270', 'HASH', 4),
|
||||
('subdir/object/X/1', '2014-05-07T19:47:51.592270', 'HASH', 41),
|
||||
('subdir/object/X/2', '2014-05-07T19:47:52.592270', 'HASH', 42),
|
||||
('subdir/object/Y', '2014-05-07T19:47:50.592270', 'HASH', 5),
|
||||
('subdir/object/Y/1', '2014-05-07T19:47:51.592270', 'HASH', 51),
|
||||
('subdir/object/Y/2', '2014-05-07T19:47:52.592270', 'HASH', 52),
|
||||
('subdir2/object/Z', '2014-05-07T19:47:50.592270', 'HASH', 6),
|
||||
('subdir2/object/Z/1', '2014-05-07T19:47:51.592270', 'HASH', 61),
|
||||
('subdir2/object/Z/2', '2014-05-07T19:47:52.592270', 'HASH', 62))
|
||||
'HASH', 32, '2014-05-07T19:47:53.000Z'),
|
||||
('subdir/object/X', '2014-05-07T19:47:50.592270', 'HASH', 4,
|
||||
'2014-05-07T19:47:51.000Z'),
|
||||
('subdir/object/X/1', '2014-05-07T19:47:51.592270', 'HASH', 41,
|
||||
'2014-05-07T19:47:52.000Z'),
|
||||
('subdir/object/X/2', '2014-05-07T19:47:52.592270', 'HASH', 42,
|
||||
'2014-05-07T19:47:53.000Z'),
|
||||
('subdir/object/Y', '2014-05-07T19:47:50.592270', 'HASH', 5,
|
||||
'2014-05-07T19:47:51.000Z'),
|
||||
('subdir/object/Y/1', '2014-05-07T19:47:51.592270', 'HASH', 51,
|
||||
'2014-05-07T19:47:52.000Z'),
|
||||
('subdir/object/Y/2', '2014-05-07T19:47:52.592270', 'HASH', 52,
|
||||
'2014-05-07T19:47:53.000Z'),
|
||||
('subdir2/object/Z', '2014-05-07T19:47:50.592270', 'HASH', 6,
|
||||
'2014-05-07T19:47:51.000Z'),
|
||||
('subdir2/object/Z/1', '2014-05-07T19:47:51.592270', 'HASH', 61,
|
||||
'2014-05-07T19:47:52.000Z'),
|
||||
('subdir2/object/Z/2', '2014-05-07T19:47:52.592270', 'HASH', 62,
|
||||
'2014-05-07T19:47:53.000Z'))
|
||||
|
||||
status, headers, body = \
|
||||
self._test_bucket_multipart_uploads_GET(query, multiparts)
|
||||
elem = fromstring(body, 'ListMultipartUploadsResult')
|
||||
self.assertEqual(len(elem.findall('Upload')), 1)
|
||||
self.assertEqual(len(elem.findall('CommonPrefixes')), 2)
|
||||
objects = [(o[0], o[1][:-3] + 'Z') for o in multiparts
|
||||
objects = [(o[0], o[4]) for o in multiparts
|
||||
if o[0].startswith('object')]
|
||||
prefixes = ('dir/subdir', 'subdir')
|
||||
for u in elem.findall('Upload'):
|
||||
@ -607,27 +661,30 @@ class TestS3ApiMultiUpload(S3ApiTestCase):
|
||||
query = 'prefix=dir/&delimiter=/'
|
||||
multiparts = \
|
||||
(('dir/subdir/object/X', '2014-05-07T19:47:50.592270',
|
||||
'HASH', 4),
|
||||
'HASH', 4, '2014-05-07T19:47:51.000Z'),
|
||||
('dir/subdir/object/X/1', '2014-05-07T19:47:51.592270',
|
||||
'HASH', 41),
|
||||
'HASH', 41, '2014-05-07T19:47:52.000Z'),
|
||||
('dir/subdir/object/X/2', '2014-05-07T19:47:52.592270',
|
||||
'HASH', 42),
|
||||
('dir/object/X', '2014-05-07T19:47:50.592270', 'HASH', 5),
|
||||
('dir/object/X/1', '2014-05-07T19:47:51.592270', 'HASH', 51),
|
||||
('dir/object/X/2', '2014-05-07T19:47:52.592270', 'HASH', 52))
|
||||
'HASH', 42, '2014-05-07T19:47:53.000Z'),
|
||||
('dir/object/X', '2014-05-07T19:47:50.592270', 'HASH', 5,
|
||||
'2014-05-07T19:47:51.000Z'),
|
||||
('dir/object/X/1', '2014-05-07T19:47:51.592270', 'HASH', 51,
|
||||
'2014-05-07T19:47:52.000Z'),
|
||||
('dir/object/X/2', '2014-05-07T19:47:52.592270', 'HASH', 52,
|
||||
'2014-05-07T19:47:53.000Z'))
|
||||
|
||||
status, headers, body = \
|
||||
self._test_bucket_multipart_uploads_GET(query, multiparts)
|
||||
elem = fromstring(body, 'ListMultipartUploadsResult')
|
||||
self.assertEqual(len(elem.findall('Upload')), 1)
|
||||
self.assertEqual(len(elem.findall('CommonPrefixes')), 1)
|
||||
objects = [(o[0], o[1][:-3] + 'Z') for o in multiparts
|
||||
objects = [(o[0], o[4]) for o in multiparts
|
||||
if o[0].startswith('dir/o')]
|
||||
prefixes = ['dir/subdir/']
|
||||
for u in elem.findall('Upload'):
|
||||
name = u.find('Key').text + '/' + u.find('UploadId').text
|
||||
initiated = u.find('Initiated').text
|
||||
self.assertTrue((name, initiated) in objects)
|
||||
self.assertIn((name, initiated), objects)
|
||||
for p in elem.findall('CommonPrefixes'):
|
||||
prefix = p.find('Prefix').text
|
||||
self.assertTrue(prefix in prefixes)
|
||||
@ -1838,6 +1895,9 @@ class TestS3ApiMultiUpload(S3ApiTestCase):
|
||||
'hash': hex(i),
|
||||
'bytes': 100 * i}
|
||||
for i in range(1, 2000)]
|
||||
ceil_last_modified = ['2014-05-07T19:%02d:%02d.000Z'
|
||||
% (47 if (i + 1) % 60 else 48, (i + 1) % 60)
|
||||
for i in range(1, 2000)]
|
||||
swift_sorted = sorted(swift_parts, key=lambda part: part['name'])
|
||||
self.swift.register('GET',
|
||||
"%s?delimiter=/&format=json&marker=&"
|
||||
@ -1872,7 +1932,7 @@ class TestS3ApiMultiUpload(S3ApiTestCase):
|
||||
s3_parts.append(partnum)
|
||||
self.assertEqual(
|
||||
p.find('LastModified').text,
|
||||
swift_parts[partnum - 1]['last_modified'][:-3] + 'Z')
|
||||
ceil_last_modified[partnum - 1])
|
||||
self.assertEqual(p.find('ETag').text.strip(),
|
||||
'"%s"' % swift_parts[partnum - 1]['hash'])
|
||||
self.assertEqual(p.find('Size').text,
|
||||
@ -1970,7 +2030,7 @@ class TestS3ApiMultiUpload(S3ApiTestCase):
|
||||
for p in elem.findall('Part'):
|
||||
partnum = int(p.find('PartNumber').text)
|
||||
self.assertEqual(p.find('LastModified').text,
|
||||
OBJECTS_TEMPLATE[partnum - 1][1][:-3] + 'Z')
|
||||
OBJECTS_TEMPLATE[partnum - 1][4])
|
||||
self.assertEqual(p.find('ETag').text,
|
||||
'"%s"' % OBJECTS_TEMPLATE[partnum - 1][2])
|
||||
self.assertEqual(p.find('Size').text,
|
||||
|
@ -28,6 +28,7 @@ import json
|
||||
from swift.common import swob
|
||||
from swift.common.swob import Request
|
||||
from swift.common.middleware.proxy_logging import ProxyLoggingMiddleware
|
||||
from test.unit import mock_timestamp_now
|
||||
|
||||
from test.unit.common.middleware.s3api import S3ApiTestCase
|
||||
from test.unit.common.middleware.s3api.test_s3_acl import s3acl
|
||||
@ -872,33 +873,29 @@ class TestS3ApiObj(S3ApiTestCase):
|
||||
|
||||
@s3acl
|
||||
def test_object_PUT_copy_metadata_replace(self):
|
||||
date_header = self.get_date_header()
|
||||
timestamp = mktime(date_header)
|
||||
allowed_last_modified = [S3Timestamp(timestamp).s3xmlformat]
|
||||
status, headers, body = \
|
||||
self._test_object_PUT_copy(
|
||||
swob.HTTPOk,
|
||||
{'X-Amz-Metadata-Directive': 'REPLACE',
|
||||
'X-Amz-Meta-Something': 'oh hai',
|
||||
'X-Amz-Meta-Unreadable-Prefix': '\x04w',
|
||||
'X-Amz-Meta-Unreadable-Suffix': 'h\x04',
|
||||
'X-Amz-Meta-Lots-Of-Unprintable': 5 * '\x04',
|
||||
'Cache-Control': 'hello',
|
||||
'content-disposition': 'how are you',
|
||||
'content-encoding': 'good and you',
|
||||
'content-language': 'great',
|
||||
'content-type': 'so',
|
||||
'expires': 'yeah',
|
||||
'x-robots-tag': 'bye'})
|
||||
date_header = self.get_date_header()
|
||||
timestamp = mktime(date_header)
|
||||
allowed_last_modified.append(S3Timestamp(timestamp).s3xmlformat)
|
||||
with mock_timestamp_now(klass=S3Timestamp) as now:
|
||||
status, headers, body = \
|
||||
self._test_object_PUT_copy(
|
||||
swob.HTTPOk,
|
||||
{'X-Amz-Metadata-Directive': 'REPLACE',
|
||||
'X-Amz-Meta-Something': 'oh hai',
|
||||
'X-Amz-Meta-Unreadable-Prefix': '\x04w',
|
||||
'X-Amz-Meta-Unreadable-Suffix': 'h\x04',
|
||||
'X-Amz-Meta-Lots-Of-Unprintable': 5 * '\x04',
|
||||
'Cache-Control': 'hello',
|
||||
'content-disposition': 'how are you',
|
||||
'content-encoding': 'good and you',
|
||||
'content-language': 'great',
|
||||
'content-type': 'so',
|
||||
'expires': 'yeah',
|
||||
'x-robots-tag': 'bye'})
|
||||
|
||||
self.assertEqual(status.split()[0], '200')
|
||||
self.assertEqual(headers['Content-Type'], 'application/xml')
|
||||
self.assertIsNone(headers.get('etag'))
|
||||
elem = fromstring(body, 'CopyObjectResult')
|
||||
self.assertIn(elem.find('LastModified').text, allowed_last_modified)
|
||||
self.assertEqual(S3Timestamp(now.ceil()).s3xmlformat,
|
||||
elem.find('LastModified').text)
|
||||
self.assertEqual(elem.find('ETag').text, '"%s"' % self.etag)
|
||||
|
||||
_, _, headers = self.swift.calls_with_headers[-1]
|
||||
@ -926,34 +923,30 @@ class TestS3ApiObj(S3ApiTestCase):
|
||||
|
||||
@s3acl
|
||||
def test_object_PUT_copy_metadata_copy(self):
|
||||
date_header = self.get_date_header()
|
||||
timestamp = mktime(date_header)
|
||||
allowed_last_modified = [S3Timestamp(timestamp).s3xmlformat]
|
||||
status, headers, body = \
|
||||
self._test_object_PUT_copy(
|
||||
swob.HTTPOk,
|
||||
{'X-Amz-Metadata-Directive': 'COPY',
|
||||
'X-Amz-Meta-Something': 'oh hai',
|
||||
'X-Amz-Meta-Unreadable-Prefix': '\x04w',
|
||||
'X-Amz-Meta-Unreadable-Suffix': 'h\x04',
|
||||
'X-Amz-Meta-Lots-Of-Unprintable': 5 * '\x04',
|
||||
'Cache-Control': 'hello',
|
||||
'content-disposition': 'how are you',
|
||||
'content-encoding': 'good and you',
|
||||
'content-language': 'great',
|
||||
'content-type': 'so',
|
||||
'expires': 'yeah',
|
||||
'x-robots-tag': 'bye'})
|
||||
date_header = self.get_date_header()
|
||||
timestamp = mktime(date_header)
|
||||
allowed_last_modified.append(S3Timestamp(timestamp).s3xmlformat)
|
||||
with mock_timestamp_now(klass=S3Timestamp) as now:
|
||||
status, headers, body = \
|
||||
self._test_object_PUT_copy(
|
||||
swob.HTTPOk,
|
||||
{'X-Amz-Metadata-Directive': 'COPY',
|
||||
'X-Amz-Meta-Something': 'oh hai',
|
||||
'X-Amz-Meta-Unreadable-Prefix': '\x04w',
|
||||
'X-Amz-Meta-Unreadable-Suffix': 'h\x04',
|
||||
'X-Amz-Meta-Lots-Of-Unprintable': 5 * '\x04',
|
||||
'Cache-Control': 'hello',
|
||||
'content-disposition': 'how are you',
|
||||
'content-encoding': 'good and you',
|
||||
'content-language': 'great',
|
||||
'content-type': 'so',
|
||||
'expires': 'yeah',
|
||||
'x-robots-tag': 'bye'})
|
||||
|
||||
self.assertEqual(status.split()[0], '200')
|
||||
self.assertEqual(headers['Content-Type'], 'application/xml')
|
||||
self.assertIsNone(headers.get('etag'))
|
||||
|
||||
elem = fromstring(body, 'CopyObjectResult')
|
||||
self.assertIn(elem.find('LastModified').text, allowed_last_modified)
|
||||
self.assertEqual(S3Timestamp(now.ceil()).s3xmlformat,
|
||||
elem.find('LastModified').text)
|
||||
self.assertEqual(elem.find('ETag').text, '"%s"' % self.etag)
|
||||
|
||||
_, _, headers = self.swift.calls_with_headers[-1]
|
||||
|
@ -81,21 +81,6 @@ class TestS3ApiUtils(unittest.TestCase):
|
||||
self.assertFalse(utils.validate_bucket_name('bucket.', False))
|
||||
self.assertFalse(utils.validate_bucket_name('a' * 256, False))
|
||||
|
||||
def test_s3timestamp(self):
|
||||
expected = '1970-01-01T00:00:01.000Z'
|
||||
# integer
|
||||
ts = utils.S3Timestamp(1)
|
||||
self.assertEqual(expected, ts.s3xmlformat)
|
||||
# milliseconds unit should be floored
|
||||
ts = utils.S3Timestamp(1.1)
|
||||
self.assertEqual(expected, ts.s3xmlformat)
|
||||
# float (microseconds) should be floored too
|
||||
ts = utils.S3Timestamp(1.000001)
|
||||
self.assertEqual(expected, ts.s3xmlformat)
|
||||
# Bigger float (milliseconds) should be floored too
|
||||
ts = utils.S3Timestamp(1.9)
|
||||
self.assertEqual(expected, ts.s3xmlformat)
|
||||
|
||||
def test_mktime(self):
|
||||
date_headers = [
|
||||
'Thu, 01 Jan 1970 00:00:00 -0000',
|
||||
@ -130,6 +115,48 @@ class TestS3ApiUtils(unittest.TestCase):
|
||||
time.tzset()
|
||||
|
||||
|
||||
class TestS3Timestamp(unittest.TestCase):
|
||||
def test_s3xmlformat(self):
|
||||
expected = '1970-01-01T00:00:01.000Z'
|
||||
# integer
|
||||
ts = utils.S3Timestamp(1)
|
||||
self.assertEqual(expected, ts.s3xmlformat)
|
||||
# milliseconds unit should be rounded up
|
||||
expected = '1970-01-01T00:00:02.000Z'
|
||||
ts = utils.S3Timestamp(1.1)
|
||||
self.assertEqual(expected, ts.s3xmlformat)
|
||||
# float (microseconds) should be floored too
|
||||
ts = utils.S3Timestamp(1.000001)
|
||||
self.assertEqual(expected, ts.s3xmlformat)
|
||||
# Bigger float (milliseconds) should be floored too
|
||||
ts = utils.S3Timestamp(1.9)
|
||||
self.assertEqual(expected, ts.s3xmlformat)
|
||||
|
||||
def test_from_s3xmlformat(self):
|
||||
ts = utils.S3Timestamp.from_s3xmlformat('2014-06-10T22:47:32.000Z')
|
||||
self.assertIsInstance(ts, utils.S3Timestamp)
|
||||
self.assertEqual(1402440452, float(ts))
|
||||
self.assertEqual('2014-06-10T22:47:32.000000', ts.isoformat)
|
||||
|
||||
ts = utils.S3Timestamp.from_s3xmlformat('1970-01-01T00:00:00.000Z')
|
||||
self.assertIsInstance(ts, utils.S3Timestamp)
|
||||
self.assertEqual(0.0, float(ts))
|
||||
self.assertEqual('1970-01-01T00:00:00.000000', ts.isoformat)
|
||||
|
||||
ts = utils.S3Timestamp(1402440452.0)
|
||||
self.assertIsInstance(ts, utils.S3Timestamp)
|
||||
ts1 = utils.S3Timestamp.from_s3xmlformat(ts.s3xmlformat)
|
||||
self.assertIsInstance(ts1, utils.S3Timestamp)
|
||||
self.assertEqual(ts, ts1)
|
||||
|
||||
def test_from_isoformat(self):
|
||||
ts = utils.S3Timestamp.from_isoformat('2014-06-10T22:47:32.054580')
|
||||
self.assertIsInstance(ts, utils.S3Timestamp)
|
||||
self.assertEqual(1402440452.05458, float(ts))
|
||||
self.assertEqual('2014-06-10T22:47:32.054580', ts.isoformat)
|
||||
self.assertEqual('2014-06-10T22:47:33.000Z', ts.s3xmlformat)
|
||||
|
||||
|
||||
class TestConfig(unittest.TestCase):
|
||||
|
||||
def _assert_defaults(self, conf):
|
||||
|
@ -305,6 +305,21 @@ class TestTimestamp(unittest.TestCase):
|
||||
for value in test_values:
|
||||
self.assertEqual(utils.Timestamp(value).isoformat, expected)
|
||||
|
||||
def test_from_isoformat(self):
|
||||
ts = utils.Timestamp.from_isoformat('2014-06-10T22:47:32.054580')
|
||||
self.assertIsInstance(ts, utils.Timestamp)
|
||||
self.assertEqual(1402440452.05458, float(ts))
|
||||
self.assertEqual('2014-06-10T22:47:32.054580', ts.isoformat)
|
||||
|
||||
ts = utils.Timestamp.from_isoformat('1970-01-01T00:00:00.000000')
|
||||
self.assertIsInstance(ts, utils.Timestamp)
|
||||
self.assertEqual(0.0, float(ts))
|
||||
self.assertEqual('1970-01-01T00:00:00.000000', ts.isoformat)
|
||||
|
||||
ts = utils.Timestamp(1402440452.05458)
|
||||
self.assertIsInstance(ts, utils.Timestamp)
|
||||
self.assertEqual(ts, utils.Timestamp.from_isoformat(ts.isoformat))
|
||||
|
||||
def test_ceil(self):
|
||||
self.assertEqual(0.0, utils.Timestamp(0).ceil())
|
||||
self.assertEqual(1.0, utils.Timestamp(0.00001).ceil())
|
||||
|
Loading…
x
Reference in New Issue
Block a user