2012-03-19 13:45:34 -05:00
|
|
|
# Copyright (c) 2010-2012 OpenStack, LLC.
|
2010-07-12 17:03:45 -05:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
|
|
|
# implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
from __future__ import with_statement
|
|
|
|
import cPickle as pickle
|
|
|
|
import logging
|
|
|
|
import os
|
|
|
|
import sys
|
|
|
|
import unittest
|
2012-11-12 18:45:26 -08:00
|
|
|
import urlparse
|
2012-08-24 20:20:14 +03:00
|
|
|
import signal
|
2013-04-06 01:35:58 +00:00
|
|
|
from contextlib import contextmanager, nested
|
2010-07-12 17:03:45 -05:00
|
|
|
from gzip import GzipFile
|
|
|
|
from shutil import rmtree
|
2013-01-30 16:33:28 +11:00
|
|
|
import time
|
2013-03-20 19:26:45 -07:00
|
|
|
from urllib import quote
|
2010-10-11 17:33:11 -05:00
|
|
|
from hashlib import md5
|
2010-11-24 14:48:03 -08:00
|
|
|
from tempfile import mkdtemp
|
2013-02-07 22:07:18 -08:00
|
|
|
import random
|
2010-07-12 17:03:45 -05:00
|
|
|
|
2013-04-06 01:35:58 +00:00
|
|
|
import mock
|
2013-03-26 20:42:26 +00:00
|
|
|
from eventlet import sleep, spawn, wsgi, listen
|
2010-07-12 17:03:45 -05:00
|
|
|
import simplejson
|
|
|
|
|
2013-03-20 19:26:45 -07:00
|
|
|
from test.unit import connect_tcp, readuntil2crlfs, FakeLogger, fake_http_connect
|
2010-07-12 17:03:45 -05:00
|
|
|
from swift.proxy import server as proxy_server
|
|
|
|
from swift.account import server as account_server
|
|
|
|
from swift.container import server as container_server
|
|
|
|
from swift.obj import server as object_server
|
|
|
|
from swift.common import ring
|
2012-08-23 12:38:09 -07:00
|
|
|
from swift.common.exceptions import ChunkReadTimeout
|
2010-07-12 17:03:45 -05:00
|
|
|
from swift.common.constraints import MAX_META_NAME_LENGTH, \
|
2012-09-05 20:49:50 -07:00
|
|
|
MAX_META_VALUE_LENGTH, MAX_META_COUNT, MAX_META_OVERALL_SIZE, \
|
|
|
|
MAX_FILE_SIZE, MAX_ACCOUNT_NAME_LENGTH, MAX_CONTAINER_NAME_LENGTH
|
2012-11-12 23:10:08 -08:00
|
|
|
from swift.common import utils
|
2010-07-12 17:03:45 -05:00
|
|
|
from swift.common.utils import mkdirs, normalize_timestamp, NullLogger
|
2011-01-19 03:56:13 +00:00
|
|
|
from swift.common.wsgi import monkey_patch_mimetools
|
2012-08-23 12:38:09 -07:00
|
|
|
from swift.proxy.controllers.obj import SegmentedIterable
|
|
|
|
from swift.proxy.controllers.base import get_container_memcache_key, \
|
2013-01-15 19:31:42 +00:00
|
|
|
get_account_memcache_key, cors_validation
|
2012-08-23 12:38:09 -07:00
|
|
|
import swift.proxy.controllers
|
2012-09-04 14:02:19 -07:00
|
|
|
from swift.common.swob import Request, Response, HTTPNotFound, \
|
|
|
|
HTTPUnauthorized
|
2010-07-12 17:03:45 -05:00
|
|
|
|
|
|
|
# mocks
|
|
|
|
logging.getLogger().addHandler(logging.StreamHandler(sys.stdout))
|
|
|
|
|
2010-10-11 17:33:11 -05:00
|
|
|
|
2012-09-25 19:23:31 +00:00
|
|
|
_request_instances = 0
|
|
|
|
|
|
|
|
|
|
|
|
def request_init(self, *args, **kwargs):
|
|
|
|
global _request_instances
|
|
|
|
self._orig_init(*args, **kwargs)
|
|
|
|
_request_instances += 1
|
|
|
|
|
|
|
|
|
|
|
|
def request_del(self):
|
|
|
|
global _request_instances
|
|
|
|
if self._orig_del:
|
|
|
|
self._orig_del()
|
|
|
|
_request_instances -= 1
|
|
|
|
|
|
|
|
|
2011-01-19 03:56:13 +00:00
|
|
|
def setup():
|
2012-11-12 23:10:08 -08:00
|
|
|
utils.HASH_PATH_SUFFIX = 'endcap'
|
2011-01-19 03:56:13 +00:00
|
|
|
global _testdir, _test_servers, _test_sockets, \
|
2012-10-07 14:28:41 +11:00
|
|
|
_orig_container_listing_limit, _test_coros
|
2012-09-25 19:23:31 +00:00
|
|
|
Request._orig_init = Request.__init__
|
|
|
|
Request.__init__ = request_init
|
|
|
|
Request._orig_del = getattr(Request, '__del__', None)
|
|
|
|
Request.__del__ = request_del
|
2011-01-19 03:56:13 +00:00
|
|
|
monkey_patch_mimetools()
|
|
|
|
# Since we're starting up a lot here, we're going to test more than
|
|
|
|
# just chunked puts; we're also going to test parts of
|
|
|
|
# proxy_server.Application we couldn't get to easily otherwise.
|
|
|
|
_testdir = \
|
2011-01-19 11:05:42 -06:00
|
|
|
os.path.join(mkdtemp(), 'tmp_test_proxy_server_chunked')
|
2011-01-19 03:56:13 +00:00
|
|
|
mkdirs(_testdir)
|
|
|
|
rmtree(_testdir)
|
|
|
|
mkdirs(os.path.join(_testdir, 'sda1'))
|
|
|
|
mkdirs(os.path.join(_testdir, 'sda1', 'tmp'))
|
|
|
|
mkdirs(os.path.join(_testdir, 'sdb1'))
|
|
|
|
mkdirs(os.path.join(_testdir, 'sdb1', 'tmp'))
|
2012-08-23 12:38:09 -07:00
|
|
|
_orig_container_listing_limit = \
|
|
|
|
swift.proxy.controllers.obj.CONTAINER_LISTING_LIMIT
|
2011-01-19 03:56:13 +00:00
|
|
|
conf = {'devices': _testdir, 'swift_dir': _testdir,
|
2011-08-22 10:22:50 -05:00
|
|
|
'mount_check': 'false', 'allowed_headers':
|
2011-12-29 11:29:19 -06:00
|
|
|
'content-encoding, x-object-manifest, content-disposition, foo',
|
|
|
|
'allow_versions': 'True'}
|
2011-01-19 03:56:13 +00:00
|
|
|
prolis = listen(('localhost', 0))
|
|
|
|
acc1lis = listen(('localhost', 0))
|
|
|
|
acc2lis = listen(('localhost', 0))
|
|
|
|
con1lis = listen(('localhost', 0))
|
|
|
|
con2lis = listen(('localhost', 0))
|
|
|
|
obj1lis = listen(('localhost', 0))
|
|
|
|
obj2lis = listen(('localhost', 0))
|
|
|
|
_test_sockets = \
|
2012-05-17 19:20:31 -06:00
|
|
|
(prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis, obj2lis)
|
2011-01-19 03:56:13 +00:00
|
|
|
pickle.dump(ring.RingData([[0, 1, 0, 1], [1, 0, 1, 0]],
|
2012-10-07 14:28:41 +11:00
|
|
|
[{'id': 0, 'zone': 0, 'device': 'sda1', 'ip': '127.0.0.1',
|
|
|
|
'port': acc1lis.getsockname()[1]},
|
|
|
|
{'id': 1, 'zone': 1, 'device': 'sdb1', 'ip': '127.0.0.1',
|
|
|
|
'port': acc2lis.getsockname()[1]}], 30),
|
|
|
|
GzipFile(os.path.join(_testdir, 'account.ring.gz'), 'wb'))
|
2011-01-19 03:56:13 +00:00
|
|
|
pickle.dump(ring.RingData([[0, 1, 0, 1], [1, 0, 1, 0]],
|
2012-10-07 14:28:41 +11:00
|
|
|
[{'id': 0, 'zone': 0, 'device': 'sda1', 'ip': '127.0.0.1',
|
|
|
|
'port': con1lis.getsockname()[1]},
|
|
|
|
{'id': 1, 'zone': 1, 'device': 'sdb1', 'ip': '127.0.0.1',
|
|
|
|
'port': con2lis.getsockname()[1]}], 30),
|
|
|
|
GzipFile(os.path.join(_testdir, 'container.ring.gz'), 'wb'))
|
2011-01-19 03:56:13 +00:00
|
|
|
pickle.dump(ring.RingData([[0, 1, 0, 1], [1, 0, 1, 0]],
|
2012-10-07 14:28:41 +11:00
|
|
|
[{'id': 0, 'zone': 0, 'device': 'sda1', 'ip': '127.0.0.1',
|
|
|
|
'port': obj1lis.getsockname()[1]},
|
|
|
|
{'id': 1, 'zone': 1, 'device': 'sdb1', 'ip': '127.0.0.1',
|
|
|
|
'port': obj2lis.getsockname()[1]}], 30),
|
|
|
|
GzipFile(os.path.join(_testdir, 'object.ring.gz'), 'wb'))
|
2011-01-19 03:56:13 +00:00
|
|
|
prosrv = proxy_server.Application(conf, FakeMemcacheReturnsNone())
|
|
|
|
acc1srv = account_server.AccountController(conf)
|
|
|
|
acc2srv = account_server.AccountController(conf)
|
|
|
|
con1srv = container_server.ContainerController(conf)
|
|
|
|
con2srv = container_server.ContainerController(conf)
|
|
|
|
obj1srv = object_server.ObjectController(conf)
|
|
|
|
obj2srv = object_server.ObjectController(conf)
|
|
|
|
_test_servers = \
|
2012-05-17 19:20:31 -06:00
|
|
|
(prosrv, acc1srv, acc2srv, con1srv, con2srv, obj1srv, obj2srv)
|
2011-01-19 03:56:13 +00:00
|
|
|
nl = NullLogger()
|
|
|
|
prospa = spawn(wsgi.server, prolis, prosrv, nl)
|
|
|
|
acc1spa = spawn(wsgi.server, acc1lis, acc1srv, nl)
|
|
|
|
acc2spa = spawn(wsgi.server, acc2lis, acc2srv, nl)
|
|
|
|
con1spa = spawn(wsgi.server, con1lis, con1srv, nl)
|
|
|
|
con2spa = spawn(wsgi.server, con2lis, con2srv, nl)
|
|
|
|
obj1spa = spawn(wsgi.server, obj1lis, obj1srv, nl)
|
|
|
|
obj2spa = spawn(wsgi.server, obj2lis, obj2srv, nl)
|
|
|
|
_test_coros = \
|
2012-05-17 19:20:31 -06:00
|
|
|
(prospa, acc1spa, acc2spa, con1spa, con2spa, obj1spa, obj2spa)
|
2011-01-19 03:56:13 +00:00
|
|
|
# Create account
|
2013-01-30 16:33:28 +11:00
|
|
|
ts = normalize_timestamp(time.time())
|
2011-01-19 03:56:13 +00:00
|
|
|
partition, nodes = prosrv.account_ring.get_nodes('a')
|
|
|
|
for node in nodes:
|
2012-08-23 12:38:09 -07:00
|
|
|
conn = swift.proxy.controllers.obj.http_connect(node['ip'],
|
2012-10-07 14:28:41 +11:00
|
|
|
node['port'],
|
|
|
|
node['device'],
|
|
|
|
partition, 'PUT', '/a',
|
|
|
|
{'X-Timestamp': ts,
|
|
|
|
'x-trans-id': 'test'})
|
2011-01-19 03:56:13 +00:00
|
|
|
resp = conn.getresponse()
|
|
|
|
assert(resp.status == 201)
|
|
|
|
# Create container
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('PUT /v1/a/c HTTP/1.1\r\nHost: localhost\r\n'
|
2012-10-07 14:28:41 +11:00
|
|
|
'Connection: close\r\nX-Auth-Token: t\r\n'
|
|
|
|
'Content-Length: 0\r\n\r\n')
|
2011-01-19 03:56:13 +00:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 201'
|
|
|
|
assert(headers[:len(exp)] == exp)
|
|
|
|
|
|
|
|
|
|
|
|
def teardown():
|
|
|
|
for server in _test_coros:
|
|
|
|
server.kill()
|
2012-08-23 12:38:09 -07:00
|
|
|
swift.proxy.controllers.obj.CONTAINER_LISTING_LIMIT = \
|
|
|
|
_orig_container_listing_limit
|
2011-01-24 17:12:38 -08:00
|
|
|
rmtree(os.path.dirname(_testdir))
|
2012-09-25 19:23:31 +00:00
|
|
|
Request.__init__ = Request._orig_init
|
|
|
|
if Request._orig_del:
|
|
|
|
Request.__del__ = Request._orig_del
|
2011-01-19 03:56:13 +00:00
|
|
|
|
|
|
|
|
2013-01-15 19:31:42 +00:00
|
|
|
def sortHeaderNames(headerNames):
|
|
|
|
"""
|
|
|
|
Return the given string of header names sorted.
|
|
|
|
|
|
|
|
headerName: a comma-delimited list of header names
|
|
|
|
"""
|
|
|
|
headers = [a.strip() for a in headerNames.split(',') if a.strip()]
|
|
|
|
headers.sort()
|
|
|
|
return ', '.join(headers)
|
|
|
|
|
|
|
|
|
2010-07-12 17:03:45 -05:00
|
|
|
class FakeRing(object):
|
|
|
|
|
Allow for multiple X-(Account|Container)-* headers.
When the number of account/container or container/object replicas are
different, Swift had a few misbehaviors. This commit fixes them.
* On an object PUT/POST/DELETE, if there were 3 object replicas and
only 2 container replicas, then only 2 requests would be made to
object servers. Now, 3 requests will be made, but the third won't
have any X-Container-* headers in it.
* On an object PUT/POST/DELETE, if there were 3 object replicas and 4
container replicas, then only 3/4 container servers would receive
immediate updates; the fourth would be ignored. Now one of the
object servers will receive multiple (comma-separated) values in the
X-Container-* headers and it will attempt to contact both of them.
One side effect is that multiple async_pendings may be written for
updates to the same object. They'll have differing timestamps,
though, so all but the newest will be deleted unread. To trigger
this behavior, you have to have more container replicas than object
replicas, 2 or more of the container servers must be down, and the
headers sent to one object server must reference 2 or more down
container servers; it's unlikely enough and the consequences are so
minor that it didn't seem worth fixing.
The situation with account/containers is analogous, only without the
async_pendings.
Change-Id: I98bc2de93fb6b2346d6de1d764213d7563653e8d
2012-12-12 17:47:04 -08:00
|
|
|
def __init__(self, replicas=3):
|
2010-07-12 17:03:45 -05:00
|
|
|
# 9 total nodes (6 more past the initial 3) is the cap, no matter if
|
Allow for multiple X-(Account|Container)-* headers.
When the number of account/container or container/object replicas are
different, Swift had a few misbehaviors. This commit fixes them.
* On an object PUT/POST/DELETE, if there were 3 object replicas and
only 2 container replicas, then only 2 requests would be made to
object servers. Now, 3 requests will be made, but the third won't
have any X-Container-* headers in it.
* On an object PUT/POST/DELETE, if there were 3 object replicas and 4
container replicas, then only 3/4 container servers would receive
immediate updates; the fourth would be ignored. Now one of the
object servers will receive multiple (comma-separated) values in the
X-Container-* headers and it will attempt to contact both of them.
One side effect is that multiple async_pendings may be written for
updates to the same object. They'll have differing timestamps,
though, so all but the newest will be deleted unread. To trigger
this behavior, you have to have more container replicas than object
replicas, 2 or more of the container servers must be down, and the
headers sent to one object server must reference 2 or more down
container servers; it's unlikely enough and the consequences are so
minor that it didn't seem worth fixing.
The situation with account/containers is analogous, only without the
async_pendings.
Change-Id: I98bc2de93fb6b2346d6de1d764213d7563653e8d
2012-12-12 17:47:04 -08:00
|
|
|
# this is set higher, or R^2 for R replicas
|
|
|
|
self.replicas = replicas
|
2010-07-12 17:03:45 -05:00
|
|
|
self.max_more_nodes = 0
|
|
|
|
self.devs = {}
|
|
|
|
|
Allow for multiple X-(Account|Container)-* headers.
When the number of account/container or container/object replicas are
different, Swift had a few misbehaviors. This commit fixes them.
* On an object PUT/POST/DELETE, if there were 3 object replicas and
only 2 container replicas, then only 2 requests would be made to
object servers. Now, 3 requests will be made, but the third won't
have any X-Container-* headers in it.
* On an object PUT/POST/DELETE, if there were 3 object replicas and 4
container replicas, then only 3/4 container servers would receive
immediate updates; the fourth would be ignored. Now one of the
object servers will receive multiple (comma-separated) values in the
X-Container-* headers and it will attempt to contact both of them.
One side effect is that multiple async_pendings may be written for
updates to the same object. They'll have differing timestamps,
though, so all but the newest will be deleted unread. To trigger
this behavior, you have to have more container replicas than object
replicas, 2 or more of the container servers must be down, and the
headers sent to one object server must reference 2 or more down
container servers; it's unlikely enough and the consequences are so
minor that it didn't seem worth fixing.
The situation with account/containers is analogous, only without the
async_pendings.
Change-Id: I98bc2de93fb6b2346d6de1d764213d7563653e8d
2012-12-12 17:47:04 -08:00
|
|
|
def set_replicas(self, replicas):
|
|
|
|
self.replicas = replicas
|
|
|
|
self.devs = {}
|
|
|
|
|
2013-04-06 01:35:58 +00:00
|
|
|
@property
|
|
|
|
def replica_count(self):
|
|
|
|
return self.replicas
|
|
|
|
|
|
|
|
def get_part(self, account, container=None, obj=None):
|
|
|
|
return 1
|
|
|
|
|
2010-07-12 17:03:45 -05:00
|
|
|
def get_nodes(self, account, container=None, obj=None):
|
|
|
|
devs = []
|
Allow for multiple X-(Account|Container)-* headers.
When the number of account/container or container/object replicas are
different, Swift had a few misbehaviors. This commit fixes them.
* On an object PUT/POST/DELETE, if there were 3 object replicas and
only 2 container replicas, then only 2 requests would be made to
object servers. Now, 3 requests will be made, but the third won't
have any X-Container-* headers in it.
* On an object PUT/POST/DELETE, if there were 3 object replicas and 4
container replicas, then only 3/4 container servers would receive
immediate updates; the fourth would be ignored. Now one of the
object servers will receive multiple (comma-separated) values in the
X-Container-* headers and it will attempt to contact both of them.
One side effect is that multiple async_pendings may be written for
updates to the same object. They'll have differing timestamps,
though, so all but the newest will be deleted unread. To trigger
this behavior, you have to have more container replicas than object
replicas, 2 or more of the container servers must be down, and the
headers sent to one object server must reference 2 or more down
container servers; it's unlikely enough and the consequences are so
minor that it didn't seem worth fixing.
The situation with account/containers is analogous, only without the
async_pendings.
Change-Id: I98bc2de93fb6b2346d6de1d764213d7563653e8d
2012-12-12 17:47:04 -08:00
|
|
|
for x in xrange(self.replicas):
|
2010-07-12 17:03:45 -05:00
|
|
|
devs.append(self.devs.get(x))
|
|
|
|
if devs[x] is None:
|
|
|
|
self.devs[x] = devs[x] = \
|
Allow for multiple X-(Account|Container)-* headers.
When the number of account/container or container/object replicas are
different, Swift had a few misbehaviors. This commit fixes them.
* On an object PUT/POST/DELETE, if there were 3 object replicas and
only 2 container replicas, then only 2 requests would be made to
object servers. Now, 3 requests will be made, but the third won't
have any X-Container-* headers in it.
* On an object PUT/POST/DELETE, if there were 3 object replicas and 4
container replicas, then only 3/4 container servers would receive
immediate updates; the fourth would be ignored. Now one of the
object servers will receive multiple (comma-separated) values in the
X-Container-* headers and it will attempt to contact both of them.
One side effect is that multiple async_pendings may be written for
updates to the same object. They'll have differing timestamps,
though, so all but the newest will be deleted unread. To trigger
this behavior, you have to have more container replicas than object
replicas, 2 or more of the container servers must be down, and the
headers sent to one object server must reference 2 or more down
container servers; it's unlikely enough and the consequences are so
minor that it didn't seem worth fixing.
The situation with account/containers is analogous, only without the
async_pendings.
Change-Id: I98bc2de93fb6b2346d6de1d764213d7563653e8d
2012-12-12 17:47:04 -08:00
|
|
|
{'ip': '10.0.0.%s' % x,
|
|
|
|
'port': 1000 + x,
|
2013-02-07 22:07:18 -08:00
|
|
|
'device': 'sd' + (chr(ord('a') + x)),
|
|
|
|
'id': x}
|
2010-07-12 17:03:45 -05:00
|
|
|
return 1, devs
|
|
|
|
|
2011-01-16 14:50:29 +00:00
|
|
|
def get_part_nodes(self, part):
|
|
|
|
return self.get_nodes('blah')[1]
|
|
|
|
|
2010-07-12 17:03:45 -05:00
|
|
|
def get_more_nodes(self, nodes):
|
Allow for multiple X-(Account|Container)-* headers.
When the number of account/container or container/object replicas are
different, Swift had a few misbehaviors. This commit fixes them.
* On an object PUT/POST/DELETE, if there were 3 object replicas and
only 2 container replicas, then only 2 requests would be made to
object servers. Now, 3 requests will be made, but the third won't
have any X-Container-* headers in it.
* On an object PUT/POST/DELETE, if there were 3 object replicas and 4
container replicas, then only 3/4 container servers would receive
immediate updates; the fourth would be ignored. Now one of the
object servers will receive multiple (comma-separated) values in the
X-Container-* headers and it will attempt to contact both of them.
One side effect is that multiple async_pendings may be written for
updates to the same object. They'll have differing timestamps,
though, so all but the newest will be deleted unread. To trigger
this behavior, you have to have more container replicas than object
replicas, 2 or more of the container servers must be down, and the
headers sent to one object server must reference 2 or more down
container servers; it's unlikely enough and the consequences are so
minor that it didn't seem worth fixing.
The situation with account/containers is analogous, only without the
async_pendings.
Change-Id: I98bc2de93fb6b2346d6de1d764213d7563653e8d
2012-12-12 17:47:04 -08:00
|
|
|
# replicas^2 is the true cap
|
|
|
|
for x in xrange(self.replicas, min(self.replicas + self.max_more_nodes,
|
|
|
|
self.replicas * self.replicas)):
|
2010-07-12 17:03:45 -05:00
|
|
|
yield {'ip': '10.0.0.%s' % x, 'port': 1000 + x, 'device': 'sda'}
|
|
|
|
|
|
|
|
|
|
|
|
class FakeMemcache(object):
|
2010-11-16 15:35:39 -08:00
|
|
|
|
2010-07-12 17:03:45 -05:00
|
|
|
def __init__(self):
|
|
|
|
self.store = {}
|
|
|
|
|
|
|
|
def get(self, key):
|
|
|
|
return self.store.get(key)
|
|
|
|
|
2010-11-30 22:40:44 +00:00
|
|
|
def keys(self):
|
|
|
|
return self.store.keys()
|
|
|
|
|
Swift MemcacheRing (set) interface is incompatible fixes
This patch fixes the Swift MemcacheRing set and set_multi
interface incompatible problem with python memcache. The fix
added two extra named parameters to both set and set_multi
method. When only time or timeout parameter is present, then one
of the value will be used. When both time and timeout are present,
the time parameter will be used.
Named parameter min_compress_len is added for pure compatibility
purposes. The current implementation ignores this parameter.
To make swift memcached methods all consistent cross the board,
method incr and decr have also been changed to include a new
named parameter time.
In future OpenStack releases, the named parameter timeout will be
removed, keep the named parameter timeout around for now is
to make sure that mismatched releases between client and server
will still work.
From now on, when a call is made to set, set_multi, decr, incr
by using timeout parametner, a warning message will be logged to
indicate the deprecation of the parameter.
Fixes: bug #1095730
Change-Id: I07af784a54d7d79395fc3265e74145f92f38a893
2013-02-13 13:54:51 -05:00
|
|
|
def set(self, key, value, time=0):
|
2010-07-12 17:03:45 -05:00
|
|
|
self.store[key] = value
|
|
|
|
return True
|
|
|
|
|
Swift MemcacheRing (set) interface is incompatible fixes
This patch fixes the Swift MemcacheRing set and set_multi
interface incompatible problem with python memcache. The fix
added two extra named parameters to both set and set_multi
method. When only time or timeout parameter is present, then one
of the value will be used. When both time and timeout are present,
the time parameter will be used.
Named parameter min_compress_len is added for pure compatibility
purposes. The current implementation ignores this parameter.
To make swift memcached methods all consistent cross the board,
method incr and decr have also been changed to include a new
named parameter time.
In future OpenStack releases, the named parameter timeout will be
removed, keep the named parameter timeout around for now is
to make sure that mismatched releases between client and server
will still work.
From now on, when a call is made to set, set_multi, decr, incr
by using timeout parametner, a warning message will be logged to
indicate the deprecation of the parameter.
Fixes: bug #1095730
Change-Id: I07af784a54d7d79395fc3265e74145f92f38a893
2013-02-13 13:54:51 -05:00
|
|
|
def incr(self, key, time=0):
|
2010-07-12 17:03:45 -05:00
|
|
|
self.store[key] = self.store.setdefault(key, 0) + 1
|
|
|
|
return self.store[key]
|
|
|
|
|
|
|
|
@contextmanager
|
|
|
|
def soft_lock(self, key, timeout=0, retries=5):
|
|
|
|
yield True
|
|
|
|
|
|
|
|
def delete(self, key):
|
|
|
|
try:
|
|
|
|
del self.store[key]
|
2011-01-26 14:38:13 -08:00
|
|
|
except Exception:
|
2010-07-12 17:03:45 -05:00
|
|
|
pass
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
class FakeMemcacheReturnsNone(FakeMemcache):
|
|
|
|
|
|
|
|
def get(self, key):
|
|
|
|
# Returns None as the timestamp of the container; assumes we're only
|
|
|
|
# using the FakeMemcache for container existence checks.
|
|
|
|
return None
|
|
|
|
|
2010-10-11 17:33:11 -05:00
|
|
|
|
2010-07-12 17:03:45 -05:00
|
|
|
@contextmanager
|
|
|
|
def save_globals():
|
2012-08-23 12:38:09 -07:00
|
|
|
orig_http_connect = getattr(swift.proxy.controllers.base, 'http_connect',
|
2012-10-07 14:28:41 +11:00
|
|
|
None)
|
2013-01-30 16:33:28 +11:00
|
|
|
orig_account_info = getattr(swift.proxy.controllers.Controller,
|
|
|
|
'account_info', None)
|
2010-07-12 17:03:45 -05:00
|
|
|
try:
|
|
|
|
yield True
|
|
|
|
finally:
|
2013-01-30 16:33:28 +11:00
|
|
|
swift.proxy.controllers.Controller.account_info = orig_account_info
|
2012-08-23 12:38:09 -07:00
|
|
|
swift.proxy.controllers.base.http_connect = orig_http_connect
|
|
|
|
swift.proxy.controllers.obj.http_connect = orig_http_connect
|
|
|
|
swift.proxy.controllers.account.http_connect = orig_http_connect
|
|
|
|
swift.proxy.controllers.container.http_connect = orig_http_connect
|
|
|
|
|
|
|
|
|
|
|
|
def set_http_connect(*args, **kwargs):
|
|
|
|
new_connect = fake_http_connect(*args, **kwargs)
|
|
|
|
swift.proxy.controllers.base.http_connect = new_connect
|
|
|
|
swift.proxy.controllers.obj.http_connect = new_connect
|
|
|
|
swift.proxy.controllers.account.http_connect = new_connect
|
|
|
|
swift.proxy.controllers.container.http_connect = new_connect
|
|
|
|
|
|
|
|
|
2011-08-31 06:08:59 -07:00
|
|
|
# tests
|
2010-11-17 21:40:12 +00:00
|
|
|
class TestController(unittest.TestCase):
|
|
|
|
|
|
|
|
def setUp(self):
|
|
|
|
self.account_ring = FakeRing()
|
2010-11-30 22:40:44 +00:00
|
|
|
self.container_ring = FakeRing()
|
|
|
|
self.memcache = FakeMemcache()
|
2010-11-17 21:40:12 +00:00
|
|
|
|
2010-11-30 22:40:44 +00:00
|
|
|
app = proxy_server.Application(None, self.memcache,
|
2012-10-07 14:28:41 +11:00
|
|
|
account_ring=self.account_ring,
|
|
|
|
container_ring=self.container_ring,
|
|
|
|
object_ring=FakeRing())
|
2013-01-30 16:33:28 +11:00
|
|
|
self.controller = swift.proxy.controllers.Controller(app)
|
2010-11-17 21:40:12 +00:00
|
|
|
|
2010-11-30 22:40:44 +00:00
|
|
|
self.account = 'some_account'
|
|
|
|
self.container = 'some_container'
|
|
|
|
self.read_acl = 'read_acl'
|
|
|
|
self.write_acl = 'write_acl'
|
|
|
|
|
2013-03-17 07:30:00 +08:00
|
|
|
def test_transfer_headers(self):
|
|
|
|
src_headers = {'x-remove-base-meta-owner': 'x',
|
|
|
|
'x-base-meta-size': '151M',
|
|
|
|
'new-owner': 'Kun'}
|
|
|
|
dst_headers = {'x-base-meta-owner': 'Gareth',
|
|
|
|
'x-base-meta-size': '150M'}
|
|
|
|
self.controller.transfer_headers(src_headers, dst_headers)
|
|
|
|
expected_headers = {'x-base-meta-owner': '',
|
|
|
|
'x-base-meta-size': '151M'}
|
|
|
|
self.assertEquals(dst_headers, expected_headers)
|
|
|
|
|
2010-11-30 22:40:44 +00:00
|
|
|
def check_account_info_return(self, partition, nodes, is_none=False):
|
|
|
|
if is_none:
|
|
|
|
p, n = None, None
|
|
|
|
else:
|
|
|
|
p, n = self.account_ring.get_nodes(self.account)
|
2010-11-17 21:40:12 +00:00
|
|
|
self.assertEqual(p, partition)
|
|
|
|
self.assertEqual(n, nodes)
|
|
|
|
|
2013-03-12 19:15:35 +00:00
|
|
|
def test_account_info_container_count(self):
|
|
|
|
with save_globals():
|
|
|
|
set_http_connect(200, count=123)
|
|
|
|
partition, nodes, count = \
|
|
|
|
self.controller.account_info(self.account)
|
|
|
|
self.assertEquals(count, 123)
|
|
|
|
with save_globals():
|
|
|
|
set_http_connect(200, count='123')
|
|
|
|
partition, nodes, count = \
|
|
|
|
self.controller.account_info(self.account)
|
|
|
|
self.assertEquals(count, 123)
|
|
|
|
with save_globals():
|
|
|
|
cache_key = get_account_memcache_key(self.account)
|
|
|
|
account_info = {'status': 200, 'container_count': 1234}
|
|
|
|
self.memcache.set(cache_key, account_info)
|
|
|
|
partition, nodes, count = \
|
|
|
|
self.controller.account_info(self.account)
|
|
|
|
self.assertEquals(count, 1234)
|
|
|
|
with save_globals():
|
|
|
|
cache_key = get_account_memcache_key(self.account)
|
|
|
|
account_info = {'status': 200, 'container_count': '1234'}
|
|
|
|
self.memcache.set(cache_key, account_info)
|
|
|
|
partition, nodes, count = \
|
|
|
|
self.controller.account_info(self.account)
|
|
|
|
self.assertEquals(count, 1234)
|
|
|
|
|
2011-04-11 16:26:50 -07:00
|
|
|
def test_make_requests(self):
|
|
|
|
with save_globals():
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200)
|
2012-03-14 17:30:02 +00:00
|
|
|
partition, nodes, count = \
|
|
|
|
self.controller.account_info(self.account)
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(201, raise_timeout_exc=True)
|
2012-07-29 17:22:18 +00:00
|
|
|
self.controller._make_request(
|
|
|
|
nodes, partition, 'POST', '/', '', '',
|
|
|
|
self.controller.app.logger.thread_locals)
|
2011-04-11 16:26:50 -07:00
|
|
|
|
2010-11-30 22:40:44 +00:00
|
|
|
# tests if 200 is cached and used
|
|
|
|
def test_account_info_200(self):
|
2010-11-17 21:40:12 +00:00
|
|
|
with save_globals():
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200)
|
2012-03-14 17:30:02 +00:00
|
|
|
partition, nodes, count = \
|
|
|
|
self.controller.account_info(self.account)
|
2010-11-30 22:40:44 +00:00
|
|
|
self.check_account_info_return(partition, nodes)
|
2012-03-14 17:30:02 +00:00
|
|
|
self.assertEquals(count, 12345)
|
2010-11-17 21:40:12 +00:00
|
|
|
|
2012-08-23 12:38:09 -07:00
|
|
|
cache_key = get_account_memcache_key(self.account)
|
2013-02-08 11:48:26 +01:00
|
|
|
container_info = {'status': 200,
|
|
|
|
'container_count': 12345,
|
|
|
|
'total_object_count': None,
|
|
|
|
'bytes': None,
|
|
|
|
'meta': {}}
|
|
|
|
self.assertEquals(container_info,
|
2012-03-14 17:30:02 +00:00
|
|
|
self.memcache.get(cache_key))
|
2010-11-17 21:40:12 +00:00
|
|
|
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect()
|
2012-03-14 17:30:02 +00:00
|
|
|
partition, nodes, count = \
|
|
|
|
self.controller.account_info(self.account)
|
2010-11-30 22:40:44 +00:00
|
|
|
self.check_account_info_return(partition, nodes)
|
2012-03-14 17:30:02 +00:00
|
|
|
self.assertEquals(count, 12345)
|
2010-11-17 21:40:12 +00:00
|
|
|
|
2010-11-30 22:40:44 +00:00
|
|
|
# tests if 404 is cached and used
|
|
|
|
def test_account_info_404(self):
|
2010-11-17 21:40:12 +00:00
|
|
|
with save_globals():
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(404, 404, 404)
|
2012-03-14 17:30:02 +00:00
|
|
|
partition, nodes, count = \
|
|
|
|
self.controller.account_info(self.account)
|
2010-11-30 22:40:44 +00:00
|
|
|
self.check_account_info_return(partition, nodes, True)
|
2012-03-14 17:30:02 +00:00
|
|
|
self.assertEquals(count, None)
|
2010-11-17 21:40:12 +00:00
|
|
|
|
2012-08-23 12:38:09 -07:00
|
|
|
cache_key = get_account_memcache_key(self.account)
|
2013-02-08 11:48:26 +01:00
|
|
|
container_info = {'status': 404,
|
|
|
|
'container_count': 0,
|
|
|
|
'total_object_count': None,
|
|
|
|
'bytes': None,
|
|
|
|
'meta': {}}
|
|
|
|
self.assertEquals(container_info,
|
2012-03-14 17:30:02 +00:00
|
|
|
self.memcache.get(cache_key))
|
2010-11-17 21:40:12 +00:00
|
|
|
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect()
|
2012-03-14 17:30:02 +00:00
|
|
|
partition, nodes, count = \
|
|
|
|
self.controller.account_info(self.account)
|
2010-11-30 22:40:44 +00:00
|
|
|
self.check_account_info_return(partition, nodes, True)
|
2012-03-14 17:30:02 +00:00
|
|
|
self.assertEquals(count, None)
|
2010-11-30 22:40:44 +00:00
|
|
|
|
|
|
|
# tests if some http status codes are not cached
|
|
|
|
def test_account_info_no_cache(self):
|
|
|
|
def test(*status_list):
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(*status_list)
|
2012-03-14 17:30:02 +00:00
|
|
|
partition, nodes, count = \
|
|
|
|
self.controller.account_info(self.account)
|
2010-11-30 22:40:44 +00:00
|
|
|
self.assertEqual(len(self.memcache.keys()), 0)
|
|
|
|
self.check_account_info_return(partition, nodes, True)
|
2012-03-14 17:30:02 +00:00
|
|
|
self.assertEquals(count, None)
|
2010-11-17 21:40:12 +00:00
|
|
|
|
|
|
|
with save_globals():
|
2010-11-30 22:40:44 +00:00
|
|
|
test(503, 404, 404)
|
|
|
|
test(404, 404, 503)
|
|
|
|
test(404, 507, 503)
|
|
|
|
test(503, 503, 503)
|
|
|
|
|
2011-06-10 15:55:25 +00:00
|
|
|
def test_account_info_account_autocreate(self):
|
|
|
|
with save_globals():
|
2011-06-10 16:56:53 +00:00
|
|
|
self.memcache.store = {}
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(404, 404, 404, 201, 201, 201)
|
2012-03-14 17:30:02 +00:00
|
|
|
partition, nodes, count = \
|
2011-06-10 15:55:25 +00:00
|
|
|
self.controller.account_info(self.account, autocreate=False)
|
|
|
|
self.check_account_info_return(partition, nodes, is_none=True)
|
2012-03-14 17:30:02 +00:00
|
|
|
self.assertEquals(count, None)
|
2011-06-10 15:55:25 +00:00
|
|
|
|
2011-06-10 16:56:53 +00:00
|
|
|
self.memcache.store = {}
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(404, 404, 404, 201, 201, 201)
|
2012-03-14 17:30:02 +00:00
|
|
|
partition, nodes, count = \
|
2011-06-10 15:55:25 +00:00
|
|
|
self.controller.account_info(self.account)
|
|
|
|
self.check_account_info_return(partition, nodes, is_none=True)
|
2012-03-14 17:30:02 +00:00
|
|
|
self.assertEquals(count, None)
|
2011-06-10 15:55:25 +00:00
|
|
|
|
2011-06-10 16:56:53 +00:00
|
|
|
self.memcache.store = {}
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(404, 404, 404, 201, 201, 201)
|
2012-03-14 17:30:02 +00:00
|
|
|
partition, nodes, count = \
|
2011-06-10 15:55:25 +00:00
|
|
|
self.controller.account_info(self.account, autocreate=True)
|
|
|
|
self.check_account_info_return(partition, nodes)
|
2012-03-14 17:30:02 +00:00
|
|
|
self.assertEquals(count, 0)
|
2011-06-10 15:55:25 +00:00
|
|
|
|
2011-06-10 16:56:53 +00:00
|
|
|
self.memcache.store = {}
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(404, 404, 404, 503, 201, 201)
|
2012-03-14 17:30:02 +00:00
|
|
|
partition, nodes, count = \
|
2011-06-10 16:56:53 +00:00
|
|
|
self.controller.account_info(self.account, autocreate=True)
|
|
|
|
self.check_account_info_return(partition, nodes)
|
2012-03-14 17:30:02 +00:00
|
|
|
self.assertEquals(count, 0)
|
2011-06-10 16:56:53 +00:00
|
|
|
|
|
|
|
self.memcache.store = {}
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(404, 404, 404, 503, 201, 503)
|
2011-06-10 16:56:53 +00:00
|
|
|
exc = None
|
2012-08-17 19:44:21 +08:00
|
|
|
partition, nodes, count = \
|
|
|
|
self.controller.account_info(self.account, autocreate=True)
|
|
|
|
self.check_account_info_return(partition, nodes, is_none=True)
|
|
|
|
self.assertEquals(None, count)
|
|
|
|
|
|
|
|
self.memcache.store = {}
|
2012-08-20 22:51:46 -07:00
|
|
|
set_http_connect(404, 404, 404, 403, 403, 403)
|
2012-08-17 19:44:21 +08:00
|
|
|
exc = None
|
|
|
|
partition, nodes, count = \
|
|
|
|
self.controller.account_info(self.account, autocreate=True)
|
|
|
|
self.check_account_info_return(partition, nodes, is_none=True)
|
|
|
|
self.assertEquals(None, count)
|
2012-10-07 14:28:41 +11:00
|
|
|
|
2012-08-17 19:44:21 +08:00
|
|
|
self.memcache.store = {}
|
2012-08-20 22:51:46 -07:00
|
|
|
set_http_connect(404, 404, 404, 409, 409, 409)
|
2012-08-17 19:44:21 +08:00
|
|
|
exc = None
|
|
|
|
partition, nodes, count = \
|
|
|
|
self.controller.account_info(self.account, autocreate=True)
|
|
|
|
self.check_account_info_return(partition, nodes, is_none=True)
|
|
|
|
self.assertEquals(None, count)
|
2011-06-10 16:56:53 +00:00
|
|
|
|
2010-11-30 22:40:44 +00:00
|
|
|
def check_container_info_return(self, ret, is_none=False):
|
|
|
|
if is_none:
|
|
|
|
partition, nodes, read_acl, write_acl = None, None, None, None
|
|
|
|
else:
|
|
|
|
partition, nodes = self.container_ring.get_nodes(self.account,
|
2012-10-07 14:28:41 +11:00
|
|
|
self.container)
|
2010-11-30 22:40:44 +00:00
|
|
|
read_acl, write_acl = self.read_acl, self.write_acl
|
2012-09-07 11:44:19 -07:00
|
|
|
self.assertEqual(partition, ret['partition'])
|
|
|
|
self.assertEqual(nodes, ret['nodes'])
|
|
|
|
self.assertEqual(read_acl, ret['read_acl'])
|
|
|
|
self.assertEqual(write_acl, ret['write_acl'])
|
2010-11-30 22:40:44 +00:00
|
|
|
|
|
|
|
def test_container_info_invalid_account(self):
|
2011-06-05 23:44:39 +00:00
|
|
|
def account_info(self, account, autocreate=False):
|
2010-11-30 22:40:44 +00:00
|
|
|
return None, None
|
2010-11-17 21:40:12 +00:00
|
|
|
|
2010-11-30 22:40:44 +00:00
|
|
|
with save_globals():
|
2013-01-30 16:33:28 +11:00
|
|
|
swift.proxy.controllers.Controller.account_info = account_info
|
2010-11-30 22:40:44 +00:00
|
|
|
ret = self.controller.container_info(self.account,
|
2012-10-07 14:28:41 +11:00
|
|
|
self.container)
|
2010-11-30 22:40:44 +00:00
|
|
|
self.check_container_info_return(ret, True)
|
|
|
|
|
|
|
|
# tests if 200 is cached and used
|
|
|
|
def test_container_info_200(self):
|
2011-06-05 23:44:39 +00:00
|
|
|
def account_info(self, account, autocreate=False):
|
2012-03-14 17:30:02 +00:00
|
|
|
return True, True, 0
|
2010-11-17 21:40:12 +00:00
|
|
|
|
|
|
|
with save_globals():
|
2010-11-30 22:40:44 +00:00
|
|
|
headers = {'x-container-read': self.read_acl,
|
2012-10-07 14:28:41 +11:00
|
|
|
'x-container-write': self.write_acl}
|
2013-01-30 16:33:28 +11:00
|
|
|
swift.proxy.controllers.Controller.account_info = account_info
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, headers=headers)
|
2010-11-30 22:40:44 +00:00
|
|
|
ret = self.controller.container_info(self.account,
|
2012-10-07 14:28:41 +11:00
|
|
|
self.container)
|
2010-11-30 22:40:44 +00:00
|
|
|
self.check_container_info_return(ret)
|
|
|
|
|
2012-08-23 12:38:09 -07:00
|
|
|
cache_key = get_container_memcache_key(self.account,
|
2012-10-07 14:28:41 +11:00
|
|
|
self.container)
|
2010-11-30 22:40:44 +00:00
|
|
|
cache_value = self.memcache.get(cache_key)
|
2012-01-04 14:43:16 +08:00
|
|
|
self.assertTrue(isinstance(cache_value, dict))
|
2010-11-30 22:40:44 +00:00
|
|
|
self.assertEquals(200, cache_value.get('status'))
|
|
|
|
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect()
|
2010-11-30 22:40:44 +00:00
|
|
|
ret = self.controller.container_info(self.account,
|
2012-10-07 14:28:41 +11:00
|
|
|
self.container)
|
2010-11-30 22:40:44 +00:00
|
|
|
self.check_container_info_return(ret)
|
|
|
|
|
|
|
|
# tests if 404 is cached and used
|
|
|
|
def test_container_info_404(self):
|
2011-06-05 23:44:39 +00:00
|
|
|
def account_info(self, account, autocreate=False):
|
2012-03-14 17:30:02 +00:00
|
|
|
return True, True, 0
|
2010-11-17 21:40:12 +00:00
|
|
|
|
2010-11-30 22:40:44 +00:00
|
|
|
with save_globals():
|
2013-01-30 16:33:28 +11:00
|
|
|
swift.proxy.controllers.Controller.account_info = account_info
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(404, 404, 404)
|
2010-11-30 22:40:44 +00:00
|
|
|
ret = self.controller.container_info(self.account,
|
2012-10-07 14:28:41 +11:00
|
|
|
self.container)
|
2010-11-30 22:40:44 +00:00
|
|
|
self.check_container_info_return(ret, True)
|
|
|
|
|
2012-08-23 12:38:09 -07:00
|
|
|
cache_key = get_container_memcache_key(self.account,
|
2012-10-07 14:28:41 +11:00
|
|
|
self.container)
|
2010-11-30 22:40:44 +00:00
|
|
|
cache_value = self.memcache.get(cache_key)
|
2012-01-04 14:43:16 +08:00
|
|
|
self.assertTrue(isinstance(cache_value, dict))
|
2010-11-30 22:40:44 +00:00
|
|
|
self.assertEquals(404, cache_value.get('status'))
|
|
|
|
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect()
|
2010-11-30 22:40:44 +00:00
|
|
|
ret = self.controller.container_info(self.account,
|
2012-10-07 14:28:41 +11:00
|
|
|
self.container)
|
2010-11-30 22:40:44 +00:00
|
|
|
self.check_container_info_return(ret, True)
|
|
|
|
|
|
|
|
# tests if some http status codes are not cached
|
|
|
|
def test_container_info_no_cache(self):
|
|
|
|
def test(*status_list):
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(*status_list)
|
2010-11-30 22:40:44 +00:00
|
|
|
ret = self.controller.container_info(self.account,
|
2012-10-07 14:28:41 +11:00
|
|
|
self.container)
|
2010-11-30 22:40:44 +00:00
|
|
|
self.assertEqual(len(self.memcache.keys()), 0)
|
|
|
|
self.check_container_info_return(ret, True)
|
|
|
|
|
|
|
|
with save_globals():
|
|
|
|
test(503, 404, 404)
|
|
|
|
test(404, 404, 503)
|
|
|
|
test(404, 507, 503)
|
|
|
|
test(503, 503, 503)
|
2010-07-19 11:54:11 +00:00
|
|
|
|
2011-02-10 14:59:52 -06:00
|
|
|
|
2010-07-19 11:54:11 +00:00
|
|
|
class TestProxyServer(unittest.TestCase):
|
|
|
|
|
|
|
|
def test_unhandled_exception(self):
|
2010-11-16 15:35:39 -08:00
|
|
|
|
2010-07-19 11:54:11 +00:00
|
|
|
class MyApp(proxy_server.Application):
|
2010-11-16 15:35:39 -08:00
|
|
|
|
2010-07-19 11:54:11 +00:00
|
|
|
def get_controller(self, path):
|
|
|
|
raise Exception('this shouldnt be caught')
|
2010-11-16 15:35:39 -08:00
|
|
|
|
2010-07-19 11:54:11 +00:00
|
|
|
app = MyApp(None, FakeMemcache(), account_ring=FakeRing(),
|
2012-10-07 14:28:41 +11:00
|
|
|
container_ring=FakeRing(), object_ring=FakeRing())
|
2010-07-19 11:54:11 +00:00
|
|
|
req = Request.blank('/account', environ={'REQUEST_METHOD': 'HEAD'})
|
2010-09-02 21:50:16 -07:00
|
|
|
app.update_request(req)
|
2010-07-19 11:54:11 +00:00
|
|
|
resp = app.handle_request(req)
|
|
|
|
self.assertEquals(resp.status_int, 500)
|
|
|
|
|
2012-06-01 16:39:35 +02:00
|
|
|
def test_internal_method_request(self):
|
Upating proxy-server StatsD logging.
Removed many StatsD logging calls in proxy-server and added
swift-informant-style catch-all logging in the proxy-logger middleware.
Many errors previously rolled into the "proxy-server.<type>.errors"
counter will now appear broken down by response code and with timing
data at: "proxy-server.<type>.<verb>.<status>.timing". Also, bytes
transferred (sum of in + out) will be at:
"proxy-server.<type>.<verb>.<status>.xfer". The proxy-logging
middleware can get its StatsD config from standard vars in [DEFAULT] or
from access_log_statsd_* config vars in its config section.
Similarly to Swift Informant, request methods ("verbs") are filtered
using the new proxy-logging config var, "log_statsd_valid_http_methods"
which defaults to GET, HEAD, POST, PUT, DELETE, and COPY. Requests with
methods not in this list use "BAD_METHOD" for <verb> in the metric name.
To avoid user error, access_log_statsd_valid_http_methods is also
accepted.
Previously, proxy-server metrics used "Account", "Container", and
"Object" for the <type>, but these are now all lowercase.
Updated the admin guide's StatsD docs to reflect the above changes and
also include the "proxy-server.<type>.handoff_count" and
"proxy-server.<type>.handoff_all_count" metrics.
The proxy server now saves off the original req.method and proxy_logging
will use this if it can (both for request logging and as the "<verb>" in
the statsd timing metric). This fixes bug 1025433.
Removed some stale access_log_* related code in proxy/server.py. Also
removed the BaseApplication/Application distinction as it's no longer
necessary.
Fixed up the sample config files a bit (logging lines, mostly).
Fixed typo in SAIO development guide.
Got proxy_logging.py test coverage to 100%.
Fixed proxy_logging.py for PEP8 v1.3.2.
Enhanced test.unit.FakeLogger to track more calls to enable testing
StatsD metric calls.
Change-Id: I45d94cb76450be96d66fcfab56359bdfdc3a2576
2012-08-19 17:44:43 -07:00
|
|
|
baseapp = proxy_server.Application({},
|
2012-10-07 14:28:41 +11:00
|
|
|
FakeMemcache(),
|
|
|
|
container_ring=FakeRing(),
|
|
|
|
object_ring=FakeRing(),
|
|
|
|
account_ring=FakeRing())
|
2012-06-01 16:39:35 +02:00
|
|
|
resp = baseapp.handle_request(
|
|
|
|
Request.blank('/v1/a', environ={'REQUEST_METHOD': '__init__'}))
|
|
|
|
self.assertEquals(resp.status, '405 Method Not Allowed')
|
|
|
|
|
|
|
|
def test_inexistent_method_request(self):
|
Upating proxy-server StatsD logging.
Removed many StatsD logging calls in proxy-server and added
swift-informant-style catch-all logging in the proxy-logger middleware.
Many errors previously rolled into the "proxy-server.<type>.errors"
counter will now appear broken down by response code and with timing
data at: "proxy-server.<type>.<verb>.<status>.timing". Also, bytes
transferred (sum of in + out) will be at:
"proxy-server.<type>.<verb>.<status>.xfer". The proxy-logging
middleware can get its StatsD config from standard vars in [DEFAULT] or
from access_log_statsd_* config vars in its config section.
Similarly to Swift Informant, request methods ("verbs") are filtered
using the new proxy-logging config var, "log_statsd_valid_http_methods"
which defaults to GET, HEAD, POST, PUT, DELETE, and COPY. Requests with
methods not in this list use "BAD_METHOD" for <verb> in the metric name.
To avoid user error, access_log_statsd_valid_http_methods is also
accepted.
Previously, proxy-server metrics used "Account", "Container", and
"Object" for the <type>, but these are now all lowercase.
Updated the admin guide's StatsD docs to reflect the above changes and
also include the "proxy-server.<type>.handoff_count" and
"proxy-server.<type>.handoff_all_count" metrics.
The proxy server now saves off the original req.method and proxy_logging
will use this if it can (both for request logging and as the "<verb>" in
the statsd timing metric). This fixes bug 1025433.
Removed some stale access_log_* related code in proxy/server.py. Also
removed the BaseApplication/Application distinction as it's no longer
necessary.
Fixed up the sample config files a bit (logging lines, mostly).
Fixed typo in SAIO development guide.
Got proxy_logging.py test coverage to 100%.
Fixed proxy_logging.py for PEP8 v1.3.2.
Enhanced test.unit.FakeLogger to track more calls to enable testing
StatsD metric calls.
Change-Id: I45d94cb76450be96d66fcfab56359bdfdc3a2576
2012-08-19 17:44:43 -07:00
|
|
|
baseapp = proxy_server.Application({},
|
2012-10-07 14:28:41 +11:00
|
|
|
FakeMemcache(),
|
|
|
|
container_ring=FakeRing(),
|
|
|
|
account_ring=FakeRing(),
|
|
|
|
object_ring=FakeRing())
|
2012-06-01 16:39:35 +02:00
|
|
|
resp = baseapp.handle_request(
|
|
|
|
Request.blank('/v1/a', environ={'REQUEST_METHOD': '!invalid'}))
|
|
|
|
self.assertEquals(resp.status, '405 Method Not Allowed')
|
|
|
|
|
2010-09-06 13:26:31 -07:00
|
|
|
def test_calls_authorize_allow(self):
|
|
|
|
called = [False]
|
2010-10-11 17:33:11 -05:00
|
|
|
|
2010-09-06 13:26:31 -07:00
|
|
|
def authorize(req):
|
|
|
|
called[0] = True
|
|
|
|
with save_globals():
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200)
|
2010-09-06 13:26:31 -07:00
|
|
|
app = proxy_server.Application(None, FakeMemcache(),
|
2012-10-07 14:28:41 +11:00
|
|
|
account_ring=FakeRing(),
|
|
|
|
container_ring=FakeRing(),
|
|
|
|
object_ring=FakeRing())
|
2010-09-06 13:26:31 -07:00
|
|
|
req = Request.blank('/v1/a')
|
|
|
|
req.environ['swift.authorize'] = authorize
|
|
|
|
app.update_request(req)
|
|
|
|
resp = app.handle_request(req)
|
|
|
|
self.assert_(called[0])
|
|
|
|
|
|
|
|
def test_calls_authorize_deny(self):
|
|
|
|
called = [False]
|
2010-10-11 17:33:11 -05:00
|
|
|
|
2010-09-06 13:26:31 -07:00
|
|
|
def authorize(req):
|
|
|
|
called[0] = True
|
|
|
|
return HTTPUnauthorized(request=req)
|
|
|
|
app = proxy_server.Application(None, FakeMemcache(),
|
2012-10-07 14:28:41 +11:00
|
|
|
account_ring=FakeRing(),
|
|
|
|
container_ring=FakeRing(),
|
|
|
|
object_ring=FakeRing())
|
2010-09-06 13:26:31 -07:00
|
|
|
req = Request.blank('/v1/a')
|
|
|
|
req.environ['swift.authorize'] = authorize
|
|
|
|
app.update_request(req)
|
|
|
|
resp = app.handle_request(req)
|
|
|
|
self.assert_(called[0])
|
|
|
|
|
2012-01-31 16:58:00 +00:00
|
|
|
def test_negative_content_length(self):
|
|
|
|
swift_dir = mkdtemp()
|
|
|
|
try:
|
Upating proxy-server StatsD logging.
Removed many StatsD logging calls in proxy-server and added
swift-informant-style catch-all logging in the proxy-logger middleware.
Many errors previously rolled into the "proxy-server.<type>.errors"
counter will now appear broken down by response code and with timing
data at: "proxy-server.<type>.<verb>.<status>.timing". Also, bytes
transferred (sum of in + out) will be at:
"proxy-server.<type>.<verb>.<status>.xfer". The proxy-logging
middleware can get its StatsD config from standard vars in [DEFAULT] or
from access_log_statsd_* config vars in its config section.
Similarly to Swift Informant, request methods ("verbs") are filtered
using the new proxy-logging config var, "log_statsd_valid_http_methods"
which defaults to GET, HEAD, POST, PUT, DELETE, and COPY. Requests with
methods not in this list use "BAD_METHOD" for <verb> in the metric name.
To avoid user error, access_log_statsd_valid_http_methods is also
accepted.
Previously, proxy-server metrics used "Account", "Container", and
"Object" for the <type>, but these are now all lowercase.
Updated the admin guide's StatsD docs to reflect the above changes and
also include the "proxy-server.<type>.handoff_count" and
"proxy-server.<type>.handoff_all_count" metrics.
The proxy server now saves off the original req.method and proxy_logging
will use this if it can (both for request logging and as the "<verb>" in
the statsd timing metric). This fixes bug 1025433.
Removed some stale access_log_* related code in proxy/server.py. Also
removed the BaseApplication/Application distinction as it's no longer
necessary.
Fixed up the sample config files a bit (logging lines, mostly).
Fixed typo in SAIO development guide.
Got proxy_logging.py test coverage to 100%.
Fixed proxy_logging.py for PEP8 v1.3.2.
Enhanced test.unit.FakeLogger to track more calls to enable testing
StatsD metric calls.
Change-Id: I45d94cb76450be96d66fcfab56359bdfdc3a2576
2012-08-19 17:44:43 -07:00
|
|
|
baseapp = proxy_server.Application({'swift_dir': swift_dir},
|
2012-10-07 14:28:41 +11:00
|
|
|
FakeMemcache(), FakeLogger(),
|
|
|
|
FakeRing(), FakeRing(),
|
|
|
|
FakeRing())
|
2012-01-31 16:58:00 +00:00
|
|
|
resp = baseapp.handle_request(
|
|
|
|
Request.blank('/', environ={'CONTENT_LENGTH': '-1'}))
|
|
|
|
self.assertEquals(resp.status, '400 Bad Request')
|
|
|
|
self.assertEquals(resp.body, 'Invalid Content-Length')
|
|
|
|
resp = baseapp.handle_request(
|
|
|
|
Request.blank('/', environ={'CONTENT_LENGTH': '-123'}))
|
|
|
|
self.assertEquals(resp.status, '400 Bad Request')
|
|
|
|
self.assertEquals(resp.body, 'Invalid Content-Length')
|
|
|
|
finally:
|
|
|
|
rmtree(swift_dir, ignore_errors=True)
|
|
|
|
|
2012-04-12 12:46:03 -07:00
|
|
|
def test_denied_host_header(self):
|
|
|
|
swift_dir = mkdtemp()
|
|
|
|
try:
|
Upating proxy-server StatsD logging.
Removed many StatsD logging calls in proxy-server and added
swift-informant-style catch-all logging in the proxy-logger middleware.
Many errors previously rolled into the "proxy-server.<type>.errors"
counter will now appear broken down by response code and with timing
data at: "proxy-server.<type>.<verb>.<status>.timing". Also, bytes
transferred (sum of in + out) will be at:
"proxy-server.<type>.<verb>.<status>.xfer". The proxy-logging
middleware can get its StatsD config from standard vars in [DEFAULT] or
from access_log_statsd_* config vars in its config section.
Similarly to Swift Informant, request methods ("verbs") are filtered
using the new proxy-logging config var, "log_statsd_valid_http_methods"
which defaults to GET, HEAD, POST, PUT, DELETE, and COPY. Requests with
methods not in this list use "BAD_METHOD" for <verb> in the metric name.
To avoid user error, access_log_statsd_valid_http_methods is also
accepted.
Previously, proxy-server metrics used "Account", "Container", and
"Object" for the <type>, but these are now all lowercase.
Updated the admin guide's StatsD docs to reflect the above changes and
also include the "proxy-server.<type>.handoff_count" and
"proxy-server.<type>.handoff_all_count" metrics.
The proxy server now saves off the original req.method and proxy_logging
will use this if it can (both for request logging and as the "<verb>" in
the statsd timing metric). This fixes bug 1025433.
Removed some stale access_log_* related code in proxy/server.py. Also
removed the BaseApplication/Application distinction as it's no longer
necessary.
Fixed up the sample config files a bit (logging lines, mostly).
Fixed typo in SAIO development guide.
Got proxy_logging.py test coverage to 100%.
Fixed proxy_logging.py for PEP8 v1.3.2.
Enhanced test.unit.FakeLogger to track more calls to enable testing
StatsD metric calls.
Change-Id: I45d94cb76450be96d66fcfab56359bdfdc3a2576
2012-08-19 17:44:43 -07:00
|
|
|
baseapp = proxy_server.Application({'swift_dir': swift_dir,
|
2012-10-07 14:28:41 +11:00
|
|
|
'deny_host_headers':
|
|
|
|
'invalid_host.com'},
|
|
|
|
FakeMemcache(), FakeLogger(),
|
|
|
|
FakeRing(), FakeRing(),
|
|
|
|
FakeRing())
|
2012-04-12 12:46:03 -07:00
|
|
|
resp = baseapp.handle_request(
|
|
|
|
Request.blank('/v1/a/c/o',
|
|
|
|
environ={'HTTP_HOST': 'invalid_host.com'}))
|
|
|
|
self.assertEquals(resp.status, '403 Forbidden')
|
|
|
|
finally:
|
|
|
|
rmtree(swift_dir, ignore_errors=True)
|
2010-07-19 11:54:11 +00:00
|
|
|
|
2013-02-07 22:07:18 -08:00
|
|
|
def test_node_timing(self):
|
|
|
|
baseapp = proxy_server.Application({'sorting_method': 'timing'},
|
|
|
|
FakeMemcache(),
|
|
|
|
container_ring=FakeRing(),
|
|
|
|
object_ring=FakeRing(),
|
|
|
|
account_ring=FakeRing())
|
|
|
|
self.assertEquals(baseapp.node_timings, {})
|
|
|
|
|
|
|
|
req = Request.blank('/v1/account', environ={'REQUEST_METHOD': 'HEAD'})
|
|
|
|
baseapp.update_request(req)
|
|
|
|
resp = baseapp.handle_request(req)
|
|
|
|
self.assertEquals(resp.status_int, 503) # couldn't connect to anything
|
|
|
|
exp_timings = {}
|
|
|
|
self.assertEquals(baseapp.node_timings, exp_timings)
|
|
|
|
|
|
|
|
proxy_server.time = lambda: times.pop(0)
|
|
|
|
try:
|
|
|
|
times = [time.time()]
|
|
|
|
exp_timings = {'127.0.0.1': (0.1,
|
|
|
|
times[0] + baseapp.timing_expiry)}
|
|
|
|
baseapp.set_node_timing({'ip': '127.0.0.1'}, 0.1)
|
|
|
|
self.assertEquals(baseapp.node_timings, exp_timings)
|
|
|
|
finally:
|
|
|
|
proxy_server.time = time.time
|
|
|
|
|
|
|
|
proxy_server.shuffle = lambda l: l
|
|
|
|
try:
|
|
|
|
nodes = [{'ip': '127.0.0.1'}, {'ip': '127.0.0.2'}, {'ip': '127.0.0.3'}]
|
|
|
|
res = baseapp.sort_nodes(nodes)
|
|
|
|
exp_sorting = [{'ip': '127.0.0.2'}, {'ip': '127.0.0.3'},
|
|
|
|
{'ip': '127.0.0.1'}]
|
|
|
|
self.assertEquals(res, exp_sorting)
|
|
|
|
finally:
|
|
|
|
proxy_server.shuffle = random.shuffle
|
|
|
|
|
2012-10-07 14:28:41 +11:00
|
|
|
|
2010-07-12 17:03:45 -05:00
|
|
|
class TestObjectController(unittest.TestCase):
|
|
|
|
|
|
|
|
def setUp(self):
|
2010-07-13 14:23:39 -07:00
|
|
|
self.app = proxy_server.Application(None, FakeMemcache(),
|
2012-10-07 14:28:41 +11:00
|
|
|
account_ring=FakeRing(),
|
|
|
|
container_ring=FakeRing(),
|
|
|
|
object_ring=FakeRing())
|
2011-01-26 14:31:33 -08:00
|
|
|
monkey_patch_mimetools()
|
2010-07-12 17:03:45 -05:00
|
|
|
|
Allow for multiple X-(Account|Container)-* headers.
When the number of account/container or container/object replicas are
different, Swift had a few misbehaviors. This commit fixes them.
* On an object PUT/POST/DELETE, if there were 3 object replicas and
only 2 container replicas, then only 2 requests would be made to
object servers. Now, 3 requests will be made, but the third won't
have any X-Container-* headers in it.
* On an object PUT/POST/DELETE, if there were 3 object replicas and 4
container replicas, then only 3/4 container servers would receive
immediate updates; the fourth would be ignored. Now one of the
object servers will receive multiple (comma-separated) values in the
X-Container-* headers and it will attempt to contact both of them.
One side effect is that multiple async_pendings may be written for
updates to the same object. They'll have differing timestamps,
though, so all but the newest will be deleted unread. To trigger
this behavior, you have to have more container replicas than object
replicas, 2 or more of the container servers must be down, and the
headers sent to one object server must reference 2 or more down
container servers; it's unlikely enough and the consequences are so
minor that it didn't seem worth fixing.
The situation with account/containers is analogous, only without the
async_pendings.
Change-Id: I98bc2de93fb6b2346d6de1d764213d7563653e8d
2012-12-12 17:47:04 -08:00
|
|
|
def tearDown(self):
|
|
|
|
self.app.account_ring.set_replicas(3)
|
|
|
|
self.app.container_ring.set_replicas(3)
|
|
|
|
self.app.object_ring.set_replicas(3)
|
|
|
|
|
2010-07-12 17:03:45 -05:00
|
|
|
def assert_status_map(self, method, statuses, expected, raise_exc=False):
|
|
|
|
with save_globals():
|
|
|
|
kwargs = {}
|
|
|
|
if raise_exc:
|
|
|
|
kwargs['raise_exc'] = raise_exc
|
2010-10-11 17:33:11 -05:00
|
|
|
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(*statuses, **kwargs)
|
2010-07-12 17:03:45 -05:00
|
|
|
self.app.memcache.store = {}
|
2012-10-07 14:28:41 +11:00
|
|
|
req = Request.blank('/a/c/o',
|
|
|
|
headers={'Content-Length': '0',
|
|
|
|
'Content-Type': 'text/plain'})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-07-12 17:03:45 -05:00
|
|
|
res = method(req)
|
|
|
|
self.assertEquals(res.status_int, expected)
|
2010-10-11 17:33:11 -05:00
|
|
|
|
|
|
|
# repeat test
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(*statuses, **kwargs)
|
2010-07-12 17:03:45 -05:00
|
|
|
self.app.memcache.store = {}
|
2012-10-07 14:28:41 +11:00
|
|
|
req = Request.blank('/a/c/o',
|
|
|
|
headers={'Content-Length': '0',
|
|
|
|
'Content-Type': 'text/plain'})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-07-12 17:03:45 -05:00
|
|
|
res = method(req)
|
|
|
|
self.assertEquals(res.status_int, expected)
|
|
|
|
|
2012-08-24 20:20:14 +03:00
|
|
|
def test_GET_newest_large_file(self):
|
|
|
|
calls = [0]
|
|
|
|
|
|
|
|
def handler(_junk1, _junk2):
|
|
|
|
calls[0] += 1
|
|
|
|
|
2012-09-25 11:22:11 -07:00
|
|
|
old_handler = signal.signal(signal.SIGPIPE, handler)
|
2012-08-24 20:20:14 +03:00
|
|
|
try:
|
|
|
|
prolis = _test_sockets[0]
|
|
|
|
prosrv = _test_servers[0]
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
obj = 'a' * (1024 * 1024)
|
|
|
|
path = '/v1/a/c/o.large'
|
|
|
|
fd.write('PUT %s HTTP/1.1\r\n'
|
|
|
|
'Host: localhost\r\n'
|
|
|
|
'Connection: close\r\n'
|
|
|
|
'X-Storage-Token: t\r\n'
|
|
|
|
'Content-Length: %s\r\n'
|
|
|
|
'Content-Type: application/octet-stream\r\n'
|
2013-03-20 19:26:45 -07:00
|
|
|
'\r\n%s' % (path, str(len(obj)), obj))
|
2012-08-24 20:20:14 +03:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 201'
|
|
|
|
self.assertEqual(headers[:len(exp)], exp)
|
|
|
|
req = Request.blank(path,
|
2012-10-07 14:28:41 +11:00
|
|
|
environ={'REQUEST_METHOD': 'GET'},
|
|
|
|
headers={'Content-Type':
|
|
|
|
'application/octet-stream',
|
|
|
|
'X-Newest': 'true'})
|
2012-08-24 20:20:14 +03:00
|
|
|
res = req.get_response(prosrv)
|
|
|
|
self.assertEqual(res.status_int, 200)
|
|
|
|
self.assertEqual(res.body, obj)
|
|
|
|
self.assertEqual(calls[0], 0)
|
|
|
|
finally:
|
2012-09-25 11:22:11 -07:00
|
|
|
signal.signal(signal.SIGPIPE, old_handler)
|
2012-08-24 20:20:14 +03:00
|
|
|
|
2012-10-23 10:07:53 +02:00
|
|
|
def test_PUT_expect_header_zero_content_length(self):
|
|
|
|
test_errors = []
|
|
|
|
|
|
|
|
def test_connect(ipaddr, port, device, partition, method, path,
|
|
|
|
headers=None, query_string=None):
|
|
|
|
if path == '/a/c/o.jpg':
|
|
|
|
if 'expect' in headers or 'Expect' in headers:
|
|
|
|
test_errors.append('Expect was in headers for object '
|
|
|
|
'server!')
|
|
|
|
|
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
|
|
|
'container', 'object')
|
|
|
|
# The (201, -4) tuples in there have the effect of letting the
|
|
|
|
# initial connect succeed, after which getexpect() gets called and
|
|
|
|
# then the -4 makes the response of that actually be 201 instead of
|
|
|
|
# 100. Perfectly straightforward.
|
|
|
|
set_http_connect(200, 200, (201, -4), (201, -4), (201, -4),
|
|
|
|
give_connect=test_connect)
|
|
|
|
req = Request.blank('/a/c/o.jpg', {})
|
|
|
|
req.content_length = 0
|
|
|
|
self.app.update_request(req)
|
|
|
|
self.app.memcache.store = {}
|
|
|
|
res = controller.PUT(req)
|
|
|
|
self.assertEqual(test_errors, [])
|
|
|
|
self.assertTrue(res.status.startswith('201 '), res.status)
|
|
|
|
|
|
|
|
def test_PUT_expect_header_nonzero_content_length(self):
|
|
|
|
test_errors = []
|
|
|
|
|
|
|
|
def test_connect(ipaddr, port, device, partition, method, path,
|
|
|
|
headers=None, query_string=None):
|
|
|
|
if path == '/a/c/o.jpg':
|
|
|
|
if 'Expect' not in headers:
|
|
|
|
test_errors.append('Expect was not in headers for '
|
|
|
|
'non-zero byte PUT!')
|
|
|
|
|
|
|
|
with save_globals():
|
|
|
|
controller = \
|
|
|
|
proxy_server.ObjectController(self.app, 'a', 'c', 'o.jpg')
|
|
|
|
set_http_connect(200, 200, 201, 201, 201,
|
|
|
|
give_connect=test_connect)
|
|
|
|
req = Request.blank('/a/c/o.jpg', {})
|
|
|
|
req.content_length = 1
|
|
|
|
req.body = 'a'
|
|
|
|
self.app.update_request(req)
|
|
|
|
self.app.memcache.store = {}
|
|
|
|
res = controller.PUT(req)
|
|
|
|
self.assertTrue(res.status.startswith('201 '))
|
|
|
|
|
2012-11-01 00:05:42 -07:00
|
|
|
def test_expirer_DELETE_on_versioned_object(self):
|
|
|
|
test_errors = []
|
|
|
|
|
|
|
|
def test_connect(ipaddr, port, device, partition, method, path,
|
|
|
|
headers=None, query_string=None):
|
|
|
|
if method == 'DELETE':
|
|
|
|
if 'x-if-delete-at' in headers or 'X-If-Delete-At' in headers:
|
|
|
|
test_errors.append('X-If-Delete-At in headers')
|
|
|
|
|
|
|
|
body = simplejson.dumps(
|
|
|
|
[{"name": "001o/1",
|
|
|
|
"hash": "x",
|
|
|
|
"bytes": 0,
|
|
|
|
"content_type": "text/plain",
|
|
|
|
"last_modified": "1970-01-01T00:00:01.000000"}])
|
|
|
|
body_iter = ('', '', body, '', '', '', '', '', '', '', '', '', '', '')
|
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o')
|
|
|
|
# HEAD HEAD GET GET HEAD GET GET GET PUT PUT
|
|
|
|
# PUT DEL DEL DEL
|
|
|
|
set_http_connect(200, 200, 200, 200, 200, 200, 200, 200, 201, 201,
|
|
|
|
201, 200, 200, 200,
|
|
|
|
give_connect=test_connect,
|
2012-11-12 18:45:26 -08:00
|
|
|
body_iter=body_iter,
|
2012-11-01 00:05:42 -07:00
|
|
|
headers={'x-versions-location': 'foo'})
|
|
|
|
self.app.memcache.store = {}
|
|
|
|
req = Request.blank('/a/c/o',
|
|
|
|
headers={'X-If-Delete-At': 1},
|
|
|
|
environ={'REQUEST_METHOD': 'DELETE'})
|
|
|
|
self.app.update_request(req)
|
|
|
|
res = controller.DELETE(req)
|
|
|
|
self.assertEquals(test_errors, [])
|
|
|
|
|
2012-11-12 18:45:26 -08:00
|
|
|
def test_GET_manifest_no_segments(self):
|
2013-02-13 12:31:55 -08:00
|
|
|
for hdict in [{"X-Object-Manifest": "segments/seg"},
|
|
|
|
{"X-Static-Large-Object": "True"}]:
|
|
|
|
response_bodies = (
|
|
|
|
'', # HEAD /a
|
|
|
|
'', # HEAD /a/c
|
|
|
|
'', # GET manifest
|
|
|
|
simplejson.dumps([])) # GET empty listing
|
|
|
|
|
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.ObjectController(
|
|
|
|
self.app, 'a', 'c', 'manifest')
|
|
|
|
set_http_connect(
|
|
|
|
200, # HEAD /a
|
|
|
|
200, # HEAD /a/c
|
|
|
|
200, # GET manifest
|
|
|
|
200, # GET empty listing
|
|
|
|
headers=hdict,
|
|
|
|
body_iter=response_bodies)
|
2012-11-12 18:45:26 -08:00
|
|
|
|
2013-02-13 12:31:55 -08:00
|
|
|
req = Request.blank('/a/c/manifest')
|
|
|
|
resp = controller.GET(req)
|
|
|
|
self.assertEqual(resp.status_int, 200)
|
|
|
|
self.assertEqual(resp.body, '')
|
2012-11-12 18:45:26 -08:00
|
|
|
|
|
|
|
def test_GET_manifest_limited_listing(self):
|
|
|
|
listing1 = [{"hash": "454dfc73af632012ce3e6217dc464241",
|
|
|
|
"last_modified": "2012-11-08T04:05:37.866820",
|
|
|
|
"bytes": 2,
|
|
|
|
"name": "seg01",
|
|
|
|
"content_type": "application/octet-stream"},
|
|
|
|
{"hash": "474bab96c67528d42d5c0c52b35228eb",
|
|
|
|
"last_modified": "2012-11-08T04:05:37.846710",
|
|
|
|
"bytes": 2,
|
|
|
|
"name": "seg02",
|
|
|
|
"content_type": "application/octet-stream"}]
|
|
|
|
|
|
|
|
listing2 = [{"hash": "116baa5508693d1d1ca36abdd9f9478b",
|
|
|
|
"last_modified": "2012-11-08T04:05:37.849510",
|
|
|
|
"bytes": 2,
|
|
|
|
"name": "seg03",
|
|
|
|
"content_type": "application/octet-stream"},
|
|
|
|
{"hash": "7bd6aaa1ef6013353f0420459574ac9d",
|
|
|
|
"last_modified": "2012-11-08T04:05:37.855180",
|
|
|
|
"bytes": 2,
|
|
|
|
"name": "seg04",
|
|
|
|
"content_type": "application/octet-stream"
|
|
|
|
}]
|
|
|
|
|
|
|
|
listing3 = [{"hash": "6605f80e3cefaa24e9823544df4edbd6",
|
|
|
|
"last_modified": "2012-11-08T04:05:37.853710",
|
|
|
|
"bytes": 2,
|
2013-02-13 12:04:21 -08:00
|
|
|
"name": u'\N{SNOWMAN}seg05',
|
2012-11-12 18:45:26 -08:00
|
|
|
"content_type": "application/octet-stream"}]
|
|
|
|
|
|
|
|
response_bodies = (
|
|
|
|
'', # HEAD /a
|
|
|
|
'', # HEAD /a/c
|
|
|
|
'', # GET manifest
|
|
|
|
simplejson.dumps(listing1), # GET listing1
|
|
|
|
'Aa', # GET seg01
|
|
|
|
'Bb', # GET seg02
|
|
|
|
simplejson.dumps(listing2), # GET listing2
|
|
|
|
'Cc', # GET seg03
|
|
|
|
'Dd', # GET seg04
|
|
|
|
simplejson.dumps(listing3), # GET listing3
|
|
|
|
'Ee', # GET seg05
|
|
|
|
simplejson.dumps([])) # GET final empty listing
|
|
|
|
with save_globals():
|
|
|
|
try:
|
|
|
|
swift.proxy.controllers.obj.CONTAINER_LISTING_LIMIT = 2
|
|
|
|
controller = proxy_server.ObjectController(
|
|
|
|
self.app, 'a', 'c', 'manifest')
|
|
|
|
|
|
|
|
requested = []
|
2013-02-13 12:31:55 -08:00
|
|
|
|
2012-11-12 18:45:26 -08:00
|
|
|
def capture_requested_paths(ipaddr, port, device, partition,
|
|
|
|
method, path, headers=None,
|
|
|
|
query_string=None):
|
|
|
|
qs_dict = dict(urlparse.parse_qsl(query_string or ''))
|
|
|
|
requested.append([method, path, qs_dict])
|
|
|
|
|
|
|
|
set_http_connect(
|
|
|
|
200, # HEAD /a
|
|
|
|
200, # HEAD /a/c
|
|
|
|
200, # GET manifest
|
|
|
|
200, # GET listing1
|
|
|
|
200, # GET seg01
|
|
|
|
200, # GET seg02
|
|
|
|
200, # GET listing2
|
|
|
|
200, # GET seg03
|
|
|
|
200, # GET seg04
|
|
|
|
200, # GET listing3
|
|
|
|
200, # GET seg05
|
|
|
|
200, # GET final empty listing
|
|
|
|
headers={"X-Object-Manifest": "segments/seg"},
|
|
|
|
body_iter=response_bodies,
|
|
|
|
give_connect=capture_requested_paths)
|
|
|
|
|
|
|
|
req = Request.blank('/a/c/manifest')
|
|
|
|
resp = controller.GET(req)
|
|
|
|
self.assertEqual(resp.status_int, 200)
|
|
|
|
self.assertEqual(resp.body, 'AaBbCcDdEe')
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
requested,
|
|
|
|
[['HEAD', '/a', {}],
|
|
|
|
['HEAD', '/a/c', {}],
|
|
|
|
['GET', '/a/c/manifest', {}],
|
|
|
|
['GET', '/a/segments',
|
|
|
|
{'format': 'json', 'prefix': 'seg'}],
|
|
|
|
['GET', '/a/segments/seg01', {}],
|
|
|
|
['GET', '/a/segments/seg02', {}],
|
|
|
|
['GET', '/a/segments',
|
|
|
|
{'format': 'json', 'prefix': 'seg', 'marker': 'seg02'}],
|
|
|
|
['GET', '/a/segments/seg03', {}],
|
|
|
|
['GET', '/a/segments/seg04', {}],
|
|
|
|
['GET', '/a/segments',
|
|
|
|
{'format': 'json', 'prefix': 'seg', 'marker': 'seg04'}],
|
2013-02-13 12:04:21 -08:00
|
|
|
['GET', '/a/segments/\xe2\x98\x83seg05', {}],
|
2012-11-12 18:45:26 -08:00
|
|
|
['GET', '/a/segments',
|
2013-02-13 12:04:21 -08:00
|
|
|
{'format': 'json', 'prefix': 'seg',
|
|
|
|
'marker': '\xe2\x98\x83seg05'}]])
|
2012-11-12 18:45:26 -08:00
|
|
|
|
|
|
|
finally:
|
|
|
|
# other tests in this file get very unhappy if this
|
|
|
|
# isn't set back, which leads to time-wasting
|
|
|
|
# debugging of other tests.
|
|
|
|
swift.proxy.controllers.obj.CONTAINER_LISTING_LIMIT = \
|
|
|
|
_orig_container_listing_limit
|
|
|
|
|
2013-02-13 12:31:55 -08:00
|
|
|
def test_GET_manifest_slo(self):
|
|
|
|
listing = [{"hash": "98568d540134639be4655198a36614a4",
|
|
|
|
"last_modified": "2012-11-08T04:05:37.866820",
|
|
|
|
"bytes": 2,
|
|
|
|
"name": "/d1/seg01",
|
|
|
|
"content_type": "application/octet-stream"},
|
|
|
|
{"hash": "d526f1c8ef6c1e4e980e2b8471352d23",
|
|
|
|
"last_modified": "2012-11-08T04:05:37.846710",
|
|
|
|
"bytes": 2,
|
|
|
|
"name": "/d2/seg02",
|
|
|
|
"content_type": "application/octet-stream"}]
|
|
|
|
|
|
|
|
response_bodies = (
|
|
|
|
'', # HEAD /a
|
|
|
|
'', # HEAD /a/c
|
|
|
|
simplejson.dumps(listing), # GET manifest
|
|
|
|
'Aa', # GET seg01
|
|
|
|
'Bb') # GET seg02
|
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.ObjectController(
|
|
|
|
self.app, 'a', 'c', 'manifest')
|
|
|
|
|
|
|
|
requested = []
|
|
|
|
|
|
|
|
def capture_requested_paths(ipaddr, port, device, partition,
|
|
|
|
method, path, headers=None,
|
|
|
|
query_string=None):
|
|
|
|
qs_dict = dict(urlparse.parse_qsl(query_string or ''))
|
|
|
|
requested.append([method, path, qs_dict])
|
|
|
|
|
|
|
|
set_http_connect(
|
|
|
|
200, # HEAD /a
|
|
|
|
200, # HEAD /a/c
|
|
|
|
200, # GET listing1
|
|
|
|
200, # GET seg01
|
|
|
|
200, # GET seg02
|
2013-04-11 12:52:33 -07:00
|
|
|
headers=[{}, {}, {"X-Static-Large-Object": "True",
|
|
|
|
'content-type': 'text/html; swift_bytes=4'}, {}, {}],
|
2013-02-13 12:31:55 -08:00
|
|
|
body_iter=response_bodies,
|
|
|
|
give_connect=capture_requested_paths)
|
|
|
|
|
|
|
|
req = Request.blank('/a/c/manifest')
|
|
|
|
resp = controller.GET(req)
|
|
|
|
self.assertEqual(resp.status_int, 200)
|
|
|
|
self.assertEqual(resp.body, 'AaBb')
|
|
|
|
self.assertEqual(resp.content_length, 4)
|
|
|
|
self.assertEqual(resp.content_type, 'text/html')
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
requested,
|
|
|
|
[['HEAD', '/a', {}],
|
|
|
|
['HEAD', '/a/c', {}],
|
|
|
|
['GET', '/a/c/manifest', {}],
|
|
|
|
['GET', '/a/d1/seg01', {}],
|
|
|
|
['GET', '/a/d2/seg02', {}]])
|
|
|
|
|
2013-04-01 13:16:46 -07:00
|
|
|
def test_GET_slo_multipart_manifest(self):
|
|
|
|
listing = [{"hash": "98568d540134639be4655198a36614a4",
|
|
|
|
"last_modified": "2012-11-08T04:05:37.866820",
|
|
|
|
"bytes": 2,
|
|
|
|
"name": "/d1/seg01",
|
|
|
|
"content_type": "application/octet-stream"},
|
|
|
|
{"hash": "d526f1c8ef6c1e4e980e2b8471352d23",
|
|
|
|
"last_modified": "2012-11-08T04:05:37.846710",
|
|
|
|
"bytes": 2,
|
|
|
|
"name": "/d2/seg02",
|
|
|
|
"content_type": "application/octet-stream"}]
|
|
|
|
json_listing = simplejson.dumps(listing)
|
|
|
|
response_bodies = (
|
|
|
|
'', # HEAD /a
|
|
|
|
'', # HEAD /a/c
|
|
|
|
json_listing) # GET manifest
|
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.ObjectController(
|
|
|
|
self.app, 'a', 'c', 'manifest')
|
|
|
|
|
|
|
|
requested = []
|
|
|
|
|
|
|
|
def capture_requested_paths(ipaddr, port, device, partition,
|
|
|
|
method, path, headers=None,
|
|
|
|
query_string=None):
|
|
|
|
qs_dict = dict(urlparse.parse_qsl(query_string or ''))
|
|
|
|
requested.append([method, path, qs_dict])
|
|
|
|
|
|
|
|
set_http_connect(
|
|
|
|
200, # HEAD /a
|
|
|
|
200, # HEAD /a/c
|
|
|
|
200, # GET listing1
|
|
|
|
headers={"X-Static-Large-Object": "True",
|
|
|
|
'content-type': 'text/html; swift_bytes=4'},
|
|
|
|
body_iter=response_bodies,
|
|
|
|
give_connect=capture_requested_paths)
|
|
|
|
|
|
|
|
req = Request.blank('/a/c/manifest?multipart-manifest=get')
|
|
|
|
resp = controller.GET(req)
|
|
|
|
self.assertEqual(resp.status_int, 200)
|
|
|
|
self.assertEqual(resp.body, json_listing)
|
|
|
|
self.assertEqual(resp.content_type, 'application/json')
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
requested,
|
|
|
|
[['HEAD', '/a', {}],
|
|
|
|
['HEAD', '/a/c', {}],
|
|
|
|
['GET', '/a/c/manifest', {'multipart-manifest': 'get'}]])
|
|
|
|
|
2013-04-03 14:15:26 -07:00
|
|
|
def test_GET_slo_multipart_manifest_from_copy(self):
|
|
|
|
listing = [{"hash": "98568d540134639be4655198a36614a4",
|
|
|
|
"last_modified": "2012-11-08T04:05:37.866820",
|
|
|
|
"bytes": 2,
|
|
|
|
"name": "/d1/seg01",
|
|
|
|
"content_type": "application/octet-stream"},
|
|
|
|
{"hash": "d526f1c8ef6c1e4e980e2b8471352d23",
|
|
|
|
"last_modified": "2012-11-08T04:05:37.846710",
|
|
|
|
"bytes": 2,
|
|
|
|
"name": "/d2/seg02",
|
|
|
|
"content_type": "application/octet-stream"}]
|
|
|
|
json_listing = simplejson.dumps(listing)
|
|
|
|
response_bodies = (
|
|
|
|
'', # HEAD /a
|
|
|
|
'', # HEAD /a/c
|
|
|
|
json_listing) # GET manifest
|
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.ObjectController(
|
|
|
|
self.app, 'a', 'c', 'manifest')
|
|
|
|
|
|
|
|
requested = []
|
|
|
|
|
|
|
|
def capture_requested_paths(ipaddr, port, device, partition,
|
|
|
|
method, path, headers=None,
|
|
|
|
query_string=None):
|
|
|
|
qs_dict = dict(urlparse.parse_qsl(query_string or ''))
|
|
|
|
requested.append([method, path, qs_dict])
|
|
|
|
|
|
|
|
set_http_connect(
|
|
|
|
200, # HEAD /a
|
|
|
|
200, # HEAD /a/c
|
|
|
|
200, # GET listing1
|
|
|
|
headers={"X-Static-Large-Object": "True",
|
|
|
|
'content-type': 'text/html; swift_bytes=4'},
|
|
|
|
body_iter=response_bodies,
|
|
|
|
give_connect=capture_requested_paths)
|
|
|
|
|
|
|
|
req = Request.blank('/a/c/manifest?multipart-manifest=get',
|
|
|
|
headers={'x-copy-from': '/a/c/manifest'})
|
|
|
|
resp = controller.GET(req)
|
|
|
|
self.assertEqual(resp.status_int, 200)
|
|
|
|
self.assertEqual(resp.body, json_listing)
|
|
|
|
self.assertEqual(resp.content_type, 'text/html')
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
requested,
|
|
|
|
[['HEAD', '/a', {}],
|
|
|
|
['HEAD', '/a/c', {}],
|
|
|
|
['GET', '/a/c/manifest', {'multipart-manifest': 'get'}]])
|
|
|
|
|
2013-02-13 12:31:55 -08:00
|
|
|
def test_GET_bad_etag_manifest_slo(self):
|
|
|
|
listing = [{"hash": "98568d540134639be4655198a36614a4",
|
|
|
|
"last_modified": "2012-11-08T04:05:37.866820",
|
|
|
|
"bytes": 2,
|
|
|
|
"name": "/d1/seg01",
|
|
|
|
"content_type": "application/octet-stream"},
|
|
|
|
{"hash": "invalidhash",
|
|
|
|
"last_modified": "2012-11-08T04:05:37.846710",
|
|
|
|
"bytes": 2,
|
|
|
|
"name": "/d2/seg02",
|
|
|
|
"content_type": "application/octet-stream"}]
|
|
|
|
|
|
|
|
response_bodies = (
|
|
|
|
'', # HEAD /a
|
|
|
|
'', # HEAD /a/c
|
|
|
|
simplejson.dumps(listing), # GET manifest
|
|
|
|
'Aa', # GET seg01
|
|
|
|
'Bb') # GET seg02
|
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.ObjectController(
|
|
|
|
self.app, 'a', 'c', 'manifest')
|
|
|
|
|
|
|
|
requested = []
|
|
|
|
|
|
|
|
def capture_requested_paths(ipaddr, port, device, partition,
|
|
|
|
method, path, headers=None,
|
|
|
|
query_string=None):
|
|
|
|
qs_dict = dict(urlparse.parse_qsl(query_string or ''))
|
|
|
|
requested.append([method, path, qs_dict])
|
|
|
|
|
|
|
|
set_http_connect(
|
|
|
|
200, # HEAD /a
|
|
|
|
200, # HEAD /a/c
|
|
|
|
200, # GET listing1
|
|
|
|
200, # GET seg01
|
|
|
|
200, # GET seg02
|
2013-04-11 12:52:33 -07:00
|
|
|
headers=[{}, {}, {"X-Static-Large-Object": "True",
|
|
|
|
'content-type': 'text/html; swift_bytes=4'}, {}, {}],
|
|
|
|
body_iter=response_bodies,
|
|
|
|
give_connect=capture_requested_paths)
|
|
|
|
req = Request.blank('/a/c/manifest')
|
|
|
|
resp = controller.GET(req)
|
|
|
|
self.assertEqual(resp.status_int, 200)
|
|
|
|
self.assertEqual(resp.body, 'Aa') # dropped connection
|
|
|
|
self.assertEqual(resp.content_length, 4) # content incomplete
|
|
|
|
self.assertEqual(resp.content_type, 'text/html')
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
requested,
|
|
|
|
[['HEAD', '/a', {}],
|
|
|
|
['HEAD', '/a/c', {}],
|
|
|
|
['GET', '/a/c/manifest', {}],
|
|
|
|
['GET', '/a/d1/seg01', {}],
|
|
|
|
['GET', '/a/d2/seg02', {}]])
|
|
|
|
|
|
|
|
def test_GET_nested_slo(self):
|
|
|
|
listing = [{"hash": "98568d540134639be4655198a36614a4",
|
|
|
|
"last_modified": "2012-11-08T04:05:37.866820",
|
|
|
|
"bytes": 2,
|
|
|
|
"name": "/d1/seg01",
|
|
|
|
"content_type": "application/octet-stream"},
|
|
|
|
{"hash": "d526f1c8ef6c1e4e980e2b8471352d23",
|
|
|
|
"last_modified": "2012-11-08T04:05:37.846710",
|
|
|
|
"bytes": 2,
|
|
|
|
"name": "/d2/seg02",
|
|
|
|
"content_type": "application/octet-stream"}]
|
|
|
|
|
|
|
|
response_bodies = (
|
|
|
|
'', # HEAD /a
|
|
|
|
'', # HEAD /a/c
|
|
|
|
simplejson.dumps(listing), # GET manifest
|
|
|
|
'Aa', # GET seg01
|
|
|
|
'Bb') # GET seg02
|
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.ObjectController(
|
|
|
|
self.app, 'a', 'c', 'manifest')
|
|
|
|
|
|
|
|
requested = []
|
|
|
|
|
|
|
|
def capture_requested_paths(ipaddr, port, device, partition,
|
|
|
|
method, path, headers=None,
|
|
|
|
query_string=None):
|
|
|
|
qs_dict = dict(urlparse.parse_qsl(query_string or ''))
|
|
|
|
requested.append([method, path, qs_dict])
|
|
|
|
|
|
|
|
slob_headers = {"X-Static-Large-Object": "True",
|
|
|
|
'content-type': 'text/html; swift_bytes=4'}
|
|
|
|
set_http_connect(
|
|
|
|
200, # HEAD /a
|
|
|
|
200, # HEAD /a/c
|
|
|
|
200, # GET listing1
|
|
|
|
200, # GET seg01
|
|
|
|
200, # GET seg02
|
|
|
|
headers=[{}, {}, slob_headers, {}, slob_headers],
|
2013-02-13 12:31:55 -08:00
|
|
|
body_iter=response_bodies,
|
|
|
|
give_connect=capture_requested_paths)
|
|
|
|
req = Request.blank('/a/c/manifest')
|
|
|
|
resp = controller.GET(req)
|
|
|
|
self.assertEqual(resp.status_int, 200)
|
|
|
|
self.assertEqual(resp.body, 'Aa') # dropped connection
|
|
|
|
self.assertEqual(resp.content_length, 4) # content incomplete
|
|
|
|
self.assertEqual(resp.content_type, 'text/html')
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
requested,
|
|
|
|
[['HEAD', '/a', {}],
|
|
|
|
['HEAD', '/a/c', {}],
|
|
|
|
['GET', '/a/c/manifest', {}],
|
|
|
|
['GET', '/a/d1/seg01', {}],
|
|
|
|
['GET', '/a/d2/seg02', {}]])
|
|
|
|
|
|
|
|
def test_GET_bad_404_manifest_slo(self):
|
|
|
|
listing = [{"hash": "98568d540134639be4655198a36614a4",
|
|
|
|
"last_modified": "2012-11-08T04:05:37.866820",
|
|
|
|
"bytes": 2,
|
|
|
|
"name": "/d1/seg01",
|
|
|
|
"content_type": "application/octet-stream"},
|
|
|
|
{"hash": "d526f1c8ef6c1e4e980e2b8471352d23",
|
|
|
|
"last_modified": "2012-11-08T04:05:37.846710",
|
|
|
|
"bytes": 2,
|
|
|
|
"name": "/d2/seg02",
|
|
|
|
"content_type": "application/octet-stream"},
|
|
|
|
{"hash": "invalidhash",
|
|
|
|
"last_modified": "2012-11-08T04:05:37.846710",
|
|
|
|
"bytes": 2,
|
|
|
|
"name": "/d2/seg03",
|
|
|
|
"content_type": "application/octet-stream"}]
|
|
|
|
|
|
|
|
response_bodies = (
|
|
|
|
'', # HEAD /a
|
|
|
|
'', # HEAD /a/c
|
|
|
|
simplejson.dumps(listing), # GET manifest
|
|
|
|
'Aa', # GET seg01
|
|
|
|
'') # GET seg02
|
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.ObjectController(
|
|
|
|
self.app, 'a', 'c', 'manifest')
|
|
|
|
|
|
|
|
requested = []
|
|
|
|
|
|
|
|
def capture_requested_paths(ipaddr, port, device, partition,
|
|
|
|
method, path, headers=None,
|
|
|
|
query_string=None):
|
|
|
|
qs_dict = dict(urlparse.parse_qsl(query_string or ''))
|
|
|
|
requested.append([method, path, qs_dict])
|
|
|
|
|
|
|
|
set_http_connect(
|
|
|
|
200, # HEAD /a
|
|
|
|
200, # HEAD /a/c
|
|
|
|
200, # GET listing1
|
|
|
|
200, # GET seg01
|
|
|
|
404, # GET seg02
|
2013-04-11 12:52:33 -07:00
|
|
|
headers=[{}, {}, {"X-Static-Large-Object": "True",
|
|
|
|
'content-type': 'text/html; swift_bytes=4'}, {}, {}],
|
2013-02-13 12:31:55 -08:00
|
|
|
body_iter=response_bodies,
|
|
|
|
give_connect=capture_requested_paths)
|
|
|
|
req = Request.blank('/a/c/manifest')
|
|
|
|
resp = controller.GET(req)
|
|
|
|
self.assertEqual(resp.status_int, 200)
|
|
|
|
self.assertEqual(resp.body, 'Aa') # dropped connection
|
|
|
|
self.assertEqual(resp.content_length, 6) # content incomplete
|
|
|
|
self.assertEqual(resp.content_type, 'text/html')
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
requested,
|
|
|
|
[['HEAD', '/a', {}],
|
|
|
|
['HEAD', '/a/c', {}],
|
|
|
|
['GET', '/a/c/manifest', {}],
|
|
|
|
['GET', '/a/d1/seg01', {}],
|
|
|
|
['GET', '/a/d2/seg02', {}],
|
|
|
|
['GET', '/a/d2/seg02', {}],
|
|
|
|
['GET', '/a/d2/seg02', {}]]) # 2nd segment not found
|
|
|
|
|
|
|
|
def test_HEAD_manifest_slo(self):
|
|
|
|
listing = [{"hash": "454dfc73af632012ce3e6217dc464241",
|
|
|
|
"last_modified": "2012-11-08T04:05:37.866820",
|
|
|
|
"bytes": 2,
|
|
|
|
"name": "/d1/seg01",
|
|
|
|
"content_type": "application/octet-stream"},
|
|
|
|
{"hash": "474bab96c67528d42d5c0c52b35228eb",
|
|
|
|
"last_modified": "2012-11-08T04:05:37.846710",
|
|
|
|
"bytes": 2,
|
|
|
|
"name": "/d2/seg02",
|
|
|
|
"content_type": "application/octet-stream"}]
|
|
|
|
|
|
|
|
response_bodies = (
|
|
|
|
'', # HEAD /a
|
|
|
|
'', # HEAD /a/c
|
|
|
|
'', # HEAD manifest
|
|
|
|
simplejson.dumps(listing)) # GET manifest
|
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.ObjectController(
|
|
|
|
self.app, 'a', 'c', 'manifest')
|
|
|
|
|
|
|
|
requested = []
|
|
|
|
|
|
|
|
def capture_requested_paths(ipaddr, port, device, partition,
|
|
|
|
method, path, headers=None,
|
|
|
|
query_string=None):
|
|
|
|
qs_dict = dict(urlparse.parse_qsl(query_string or ''))
|
|
|
|
requested.append([method, path, qs_dict])
|
|
|
|
|
|
|
|
set_http_connect(
|
|
|
|
200, # HEAD /a
|
|
|
|
200, # HEAD /a/c
|
|
|
|
200, # HEAD listing1
|
|
|
|
200, # GET listing1
|
|
|
|
headers={"X-Static-Large-Object": "True"},
|
|
|
|
body_iter=response_bodies,
|
|
|
|
give_connect=capture_requested_paths)
|
|
|
|
|
|
|
|
req = Request.blank('/a/c/manifest',
|
|
|
|
environ={'REQUEST_METHOD': 'HEAD'})
|
|
|
|
resp = controller.HEAD(req)
|
|
|
|
self.assertEqual(resp.status_int, 200)
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
requested,
|
|
|
|
[['HEAD', '/a', {}],
|
|
|
|
['HEAD', '/a/c', {}],
|
|
|
|
['HEAD', '/a/c/manifest', {}],
|
|
|
|
['GET', '/a/c/manifest', {}]])
|
|
|
|
|
2010-07-12 17:03:45 -05:00
|
|
|
def test_PUT_auto_content_type(self):
|
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
2012-10-07 14:28:41 +11:00
|
|
|
'container', 'object')
|
2010-10-11 17:33:11 -05:00
|
|
|
|
2010-07-12 17:03:45 -05:00
|
|
|
def test_content_type(filename, expected):
|
2012-10-07 14:28:41 +11:00
|
|
|
# The three responses here are for account_info() (HEAD to
|
|
|
|
# account server), container_info() (HEAD to container server)
|
|
|
|
# and three calls to _connect_put_node() (PUT to three object
|
|
|
|
# servers)
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(201, 201, 201, 201, 201,
|
2012-10-07 14:28:41 +11:00
|
|
|
give_content_type=lambda content_type:
|
|
|
|
self.assertEquals(content_type,
|
|
|
|
expected.next()))
|
2012-04-15 09:49:35 +01:00
|
|
|
# We need into include a transfer-encoding to get past
|
|
|
|
# constraints.check_object_creation()
|
2012-10-07 14:28:41 +11:00
|
|
|
req = Request.blank('/a/c/%s' % filename, {},
|
|
|
|
headers={'transfer-encoding': 'chunked'})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2012-04-15 09:49:35 +01:00
|
|
|
self.app.memcache.store = {}
|
2010-07-12 17:03:45 -05:00
|
|
|
res = controller.PUT(req)
|
2012-10-07 14:28:41 +11:00
|
|
|
# If we don't check the response here we could miss problems
|
|
|
|
# in PUT()
|
2012-04-15 09:49:35 +01:00
|
|
|
self.assertEquals(res.status_int, 201)
|
|
|
|
|
|
|
|
test_content_type('test.jpg', iter(['', '', 'image/jpeg',
|
2010-10-11 17:33:11 -05:00
|
|
|
'image/jpeg', 'image/jpeg']))
|
2012-04-15 09:49:35 +01:00
|
|
|
test_content_type('test.html', iter(['', '', 'text/html',
|
2010-10-11 17:33:11 -05:00
|
|
|
'text/html', 'text/html']))
|
2012-04-15 09:49:35 +01:00
|
|
|
test_content_type('test.css', iter(['', '', 'text/css',
|
2010-10-11 17:33:11 -05:00
|
|
|
'text/css', 'text/css']))
|
2011-08-31 06:08:59 -07:00
|
|
|
|
2010-11-24 15:36:10 -06:00
|
|
|
def test_custom_mime_types_files(self):
|
2010-11-24 14:58:17 -08:00
|
|
|
swift_dir = mkdtemp()
|
2010-11-24 14:48:03 -08:00
|
|
|
try:
|
2010-11-24 14:58:17 -08:00
|
|
|
with open(os.path.join(swift_dir, 'mime.types'), 'w') as fp:
|
2010-11-24 14:48:03 -08:00
|
|
|
fp.write('foo/bar foo\n')
|
Upating proxy-server StatsD logging.
Removed many StatsD logging calls in proxy-server and added
swift-informant-style catch-all logging in the proxy-logger middleware.
Many errors previously rolled into the "proxy-server.<type>.errors"
counter will now appear broken down by response code and with timing
data at: "proxy-server.<type>.<verb>.<status>.timing". Also, bytes
transferred (sum of in + out) will be at:
"proxy-server.<type>.<verb>.<status>.xfer". The proxy-logging
middleware can get its StatsD config from standard vars in [DEFAULT] or
from access_log_statsd_* config vars in its config section.
Similarly to Swift Informant, request methods ("verbs") are filtered
using the new proxy-logging config var, "log_statsd_valid_http_methods"
which defaults to GET, HEAD, POST, PUT, DELETE, and COPY. Requests with
methods not in this list use "BAD_METHOD" for <verb> in the metric name.
To avoid user error, access_log_statsd_valid_http_methods is also
accepted.
Previously, proxy-server metrics used "Account", "Container", and
"Object" for the <type>, but these are now all lowercase.
Updated the admin guide's StatsD docs to reflect the above changes and
also include the "proxy-server.<type>.handoff_count" and
"proxy-server.<type>.handoff_all_count" metrics.
The proxy server now saves off the original req.method and proxy_logging
will use this if it can (both for request logging and as the "<verb>" in
the statsd timing metric). This fixes bug 1025433.
Removed some stale access_log_* related code in proxy/server.py. Also
removed the BaseApplication/Application distinction as it's no longer
necessary.
Fixed up the sample config files a bit (logging lines, mostly).
Fixed typo in SAIO development guide.
Got proxy_logging.py test coverage to 100%.
Fixed proxy_logging.py for PEP8 v1.3.2.
Enhanced test.unit.FakeLogger to track more calls to enable testing
StatsD metric calls.
Change-Id: I45d94cb76450be96d66fcfab56359bdfdc3a2576
2012-08-19 17:44:43 -07:00
|
|
|
ba = proxy_server.Application({'swift_dir': swift_dir},
|
2012-10-07 14:28:41 +11:00
|
|
|
FakeMemcache(), FakeLogger(),
|
|
|
|
FakeRing(), FakeRing(),
|
|
|
|
FakeRing())
|
2010-11-24 14:48:03 -08:00
|
|
|
self.assertEquals(proxy_server.mimetypes.guess_type('blah.foo')[0],
|
|
|
|
'foo/bar')
|
|
|
|
self.assertEquals(proxy_server.mimetypes.guess_type('blah.jpg')[0],
|
|
|
|
'image/jpeg')
|
|
|
|
finally:
|
2010-11-24 14:58:17 -08:00
|
|
|
rmtree(swift_dir, ignore_errors=True)
|
2010-07-12 17:03:45 -05:00
|
|
|
|
|
|
|
def test_PUT(self):
|
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
2012-10-07 14:28:41 +11:00
|
|
|
'container', 'object')
|
2010-10-11 17:33:11 -05:00
|
|
|
|
2010-07-12 17:03:45 -05:00
|
|
|
def test_status_map(statuses, expected):
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(*statuses)
|
2010-07-12 17:03:45 -05:00
|
|
|
req = Request.blank('/a/c/o.jpg', {})
|
|
|
|
req.content_length = 0
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-07-12 17:03:45 -05:00
|
|
|
self.app.memcache.store = {}
|
|
|
|
res = controller.PUT(req)
|
|
|
|
expected = str(expected)
|
|
|
|
self.assertEquals(res.status[:len(expected)], expected)
|
|
|
|
test_status_map((200, 200, 201, 201, 201), 201)
|
|
|
|
test_status_map((200, 200, 201, 201, 500), 201)
|
|
|
|
test_status_map((200, 200, 204, 404, 404), 404)
|
|
|
|
test_status_map((200, 200, 204, 500, 404), 503)
|
|
|
|
|
|
|
|
def test_PUT_connect_exceptions(self):
|
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
2012-10-07 14:28:41 +11:00
|
|
|
'container', 'object')
|
2010-10-11 17:33:11 -05:00
|
|
|
|
2010-07-12 17:03:45 -05:00
|
|
|
def test_status_map(statuses, expected):
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(*statuses)
|
2010-07-12 17:03:45 -05:00
|
|
|
self.app.memcache.store = {}
|
|
|
|
req = Request.blank('/a/c/o.jpg', {})
|
|
|
|
req.content_length = 0
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-07-12 17:03:45 -05:00
|
|
|
res = controller.PUT(req)
|
|
|
|
expected = str(expected)
|
|
|
|
self.assertEquals(res.status[:len(expected)], expected)
|
|
|
|
test_status_map((200, 200, 201, 201, -1), 201)
|
2010-10-12 12:49:20 -05:00
|
|
|
test_status_map((200, 200, 201, 201, -2), 201) # expect timeout
|
|
|
|
test_status_map((200, 200, 201, 201, -3), 201) # error limited
|
2010-07-12 17:03:45 -05:00
|
|
|
test_status_map((200, 200, 201, -1, -1), 503)
|
|
|
|
test_status_map((200, 200, 503, 503, -1), 503)
|
|
|
|
|
|
|
|
def test_PUT_send_exceptions(self):
|
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
2012-10-07 14:28:41 +11:00
|
|
|
'container', 'object')
|
2010-10-11 17:33:11 -05:00
|
|
|
|
2010-07-12 17:03:45 -05:00
|
|
|
def test_status_map(statuses, expected):
|
|
|
|
self.app.memcache.store = {}
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(*statuses)
|
2011-03-10 08:52:03 -08:00
|
|
|
req = Request.blank('/a/c/o.jpg',
|
2012-10-07 14:28:41 +11:00
|
|
|
environ={'REQUEST_METHOD': 'PUT'},
|
|
|
|
body='some data')
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-07-12 17:03:45 -05:00
|
|
|
res = controller.PUT(req)
|
|
|
|
expected = str(expected)
|
|
|
|
self.assertEquals(res.status[:len(expected)], expected)
|
2010-10-11 17:33:11 -05:00
|
|
|
test_status_map((200, 200, 201, -1, 201), 201)
|
2010-07-12 17:03:45 -05:00
|
|
|
test_status_map((200, 200, 201, -1, -1), 503)
|
|
|
|
test_status_map((200, 200, 503, 503, -1), 503)
|
|
|
|
|
|
|
|
def test_PUT_max_size(self):
|
|
|
|
with save_globals():
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(201, 201, 201)
|
2010-07-12 17:03:45 -05:00
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
2012-10-07 14:28:41 +11:00
|
|
|
'container', 'object')
|
2010-07-12 17:03:45 -05:00
|
|
|
req = Request.blank('/a/c/o', {}, headers={
|
|
|
|
'Content-Length': str(MAX_FILE_SIZE + 1),
|
|
|
|
'Content-Type': 'foo/bar'})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-07-12 17:03:45 -05:00
|
|
|
res = controller.PUT(req)
|
|
|
|
self.assertEquals(res.status_int, 413)
|
|
|
|
|
2013-02-13 12:31:55 -08:00
|
|
|
def test_PUT_bad_content_type(self):
|
|
|
|
with save_globals():
|
|
|
|
set_http_connect(201, 201, 201)
|
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
|
|
|
'container', 'object')
|
|
|
|
req = Request.blank('/a/c/o', {}, headers={
|
|
|
|
'Content-Length': 0, 'Content-Type': 'foo/bar;swift_hey=45'})
|
|
|
|
self.app.update_request(req)
|
|
|
|
res = controller.PUT(req)
|
|
|
|
self.assertEquals(res.status_int, 400)
|
|
|
|
|
2010-07-12 17:03:45 -05:00
|
|
|
def test_PUT_getresponse_exceptions(self):
|
2010-11-16 15:35:39 -08:00
|
|
|
|
2010-07-12 17:03:45 -05:00
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
2012-10-07 14:28:41 +11:00
|
|
|
'container', 'object')
|
2010-10-11 17:33:11 -05:00
|
|
|
|
2010-07-12 17:03:45 -05:00
|
|
|
def test_status_map(statuses, expected):
|
|
|
|
self.app.memcache.store = {}
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(*statuses)
|
2010-07-12 17:03:45 -05:00
|
|
|
req = Request.blank('/a/c/o.jpg', {})
|
|
|
|
req.content_length = 0
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-07-12 17:03:45 -05:00
|
|
|
res = controller.PUT(req)
|
|
|
|
expected = str(expected)
|
|
|
|
self.assertEquals(res.status[:len(str(expected))],
|
|
|
|
str(expected))
|
|
|
|
test_status_map((200, 200, 201, 201, -1), 201)
|
|
|
|
test_status_map((200, 200, 201, -1, -1), 503)
|
|
|
|
test_status_map((200, 200, 503, 503, -1), 503)
|
|
|
|
|
|
|
|
def test_POST(self):
|
|
|
|
with save_globals():
|
2011-06-08 04:29:24 +00:00
|
|
|
self.app.object_post_as_copy = False
|
2010-07-12 17:03:45 -05:00
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
2012-10-07 14:28:41 +11:00
|
|
|
'container', 'object')
|
2010-10-11 17:33:11 -05:00
|
|
|
|
2010-07-12 17:03:45 -05:00
|
|
|
def test_status_map(statuses, expected):
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(*statuses)
|
2010-07-12 17:03:45 -05:00
|
|
|
self.app.memcache.store = {}
|
2012-10-07 14:28:41 +11:00
|
|
|
req = Request.blank('/a/c/o', {},
|
|
|
|
headers={'Content-Type': 'foo/bar'})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-07-12 17:03:45 -05:00
|
|
|
res = controller.POST(req)
|
|
|
|
expected = str(expected)
|
|
|
|
self.assertEquals(res.status[:len(expected)], expected)
|
|
|
|
test_status_map((200, 200, 202, 202, 202), 202)
|
|
|
|
test_status_map((200, 200, 202, 202, 500), 202)
|
|
|
|
test_status_map((200, 200, 202, 500, 500), 503)
|
|
|
|
test_status_map((200, 200, 202, 404, 500), 503)
|
|
|
|
test_status_map((200, 200, 202, 404, 404), 404)
|
|
|
|
test_status_map((200, 200, 404, 500, 500), 503)
|
|
|
|
test_status_map((200, 200, 404, 404, 404), 404)
|
|
|
|
|
2011-06-08 04:19:34 +00:00
|
|
|
def test_POST_as_copy(self):
|
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
2012-10-07 14:28:41 +11:00
|
|
|
'container', 'object')
|
2011-06-08 04:19:34 +00:00
|
|
|
|
|
|
|
def test_status_map(statuses, expected):
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(*statuses)
|
2011-06-08 04:19:34 +00:00
|
|
|
self.app.memcache.store = {}
|
2012-10-07 14:28:41 +11:00
|
|
|
req = Request.blank('/a/c/o', {},
|
|
|
|
headers={'Content-Type': 'foo/bar'})
|
2011-06-08 04:19:34 +00:00
|
|
|
self.app.update_request(req)
|
|
|
|
res = controller.POST(req)
|
|
|
|
expected = str(expected)
|
|
|
|
self.assertEquals(res.status[:len(expected)], expected)
|
|
|
|
test_status_map((200, 200, 200, 200, 200, 202, 202, 202), 202)
|
|
|
|
test_status_map((200, 200, 200, 200, 200, 202, 202, 500), 202)
|
|
|
|
test_status_map((200, 200, 200, 200, 200, 202, 500, 500), 503)
|
|
|
|
test_status_map((200, 200, 200, 200, 200, 202, 404, 500), 503)
|
|
|
|
test_status_map((200, 200, 200, 200, 200, 202, 404, 404), 404)
|
|
|
|
test_status_map((200, 200, 200, 200, 200, 404, 500, 500), 503)
|
|
|
|
test_status_map((200, 200, 200, 200, 200, 404, 404, 404), 404)
|
|
|
|
|
2010-07-12 17:03:45 -05:00
|
|
|
def test_DELETE(self):
|
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
2012-10-07 14:28:41 +11:00
|
|
|
'container', 'object')
|
2010-10-11 17:33:11 -05:00
|
|
|
|
2010-07-12 17:03:45 -05:00
|
|
|
def test_status_map(statuses, expected):
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(*statuses)
|
2010-07-12 17:03:45 -05:00
|
|
|
self.app.memcache.store = {}
|
|
|
|
req = Request.blank('/a/c/o', {})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-07-12 17:03:45 -05:00
|
|
|
res = controller.DELETE(req)
|
|
|
|
self.assertEquals(res.status[:len(str(expected))],
|
|
|
|
str(expected))
|
|
|
|
test_status_map((200, 200, 204, 204, 204), 204)
|
|
|
|
test_status_map((200, 200, 204, 204, 500), 204)
|
|
|
|
test_status_map((200, 200, 204, 404, 404), 404)
|
|
|
|
test_status_map((200, 200, 204, 500, 404), 503)
|
|
|
|
test_status_map((200, 200, 404, 404, 404), 404)
|
|
|
|
test_status_map((200, 200, 404, 404, 500), 404)
|
|
|
|
|
|
|
|
def test_HEAD(self):
|
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
2012-10-07 14:28:41 +11:00
|
|
|
'container', 'object')
|
2010-10-11 17:33:11 -05:00
|
|
|
|
2010-07-12 17:03:45 -05:00
|
|
|
def test_status_map(statuses, expected):
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(*statuses)
|
2010-07-12 17:03:45 -05:00
|
|
|
self.app.memcache.store = {}
|
|
|
|
req = Request.blank('/a/c/o', {})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-07-12 17:03:45 -05:00
|
|
|
res = controller.HEAD(req)
|
|
|
|
self.assertEquals(res.status[:len(str(expected))],
|
|
|
|
str(expected))
|
|
|
|
if expected < 400:
|
|
|
|
self.assert_('x-works' in res.headers)
|
|
|
|
self.assertEquals(res.headers['x-works'], 'yes')
|
2011-04-20 15:10:02 -07:00
|
|
|
self.assert_('accept-ranges' in res.headers)
|
|
|
|
self.assertEquals(res.headers['accept-ranges'], 'bytes')
|
|
|
|
|
2011-12-29 11:29:19 -06:00
|
|
|
test_status_map((200, 200, 200, 404, 404), 200)
|
|
|
|
test_status_map((200, 200, 200, 500, 404), 200)
|
|
|
|
test_status_map((200, 200, 304, 500, 404), 304)
|
|
|
|
test_status_map((200, 200, 404, 404, 404), 404)
|
|
|
|
test_status_map((200, 200, 404, 404, 500), 404)
|
|
|
|
test_status_map((200, 200, 500, 500, 500), 503)
|
2010-07-12 17:03:45 -05:00
|
|
|
|
2011-06-07 23:19:48 +00:00
|
|
|
def test_HEAD_newest(self):
|
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
2012-10-07 14:28:41 +11:00
|
|
|
'container', 'object')
|
2011-06-07 23:19:48 +00:00
|
|
|
|
|
|
|
def test_status_map(statuses, expected, timestamps,
|
|
|
|
expected_timestamp):
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(*statuses, timestamps=timestamps)
|
2011-06-07 23:19:48 +00:00
|
|
|
self.app.memcache.store = {}
|
|
|
|
req = Request.blank('/a/c/o', {}, headers={'x-newest': 'true'})
|
|
|
|
self.app.update_request(req)
|
|
|
|
res = controller.HEAD(req)
|
|
|
|
self.assertEquals(res.status[:len(str(expected))],
|
|
|
|
str(expected))
|
|
|
|
self.assertEquals(res.headers.get('last-modified'),
|
|
|
|
expected_timestamp)
|
|
|
|
|
2011-12-29 11:29:19 -06:00
|
|
|
# acct cont obj obj obj
|
2012-10-07 14:28:41 +11:00
|
|
|
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '1',
|
|
|
|
'2', '3'), '3')
|
|
|
|
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '1',
|
|
|
|
'3', '2'), '3')
|
|
|
|
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '1',
|
|
|
|
'3', '1'), '3')
|
|
|
|
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '3',
|
|
|
|
'3', '1'), '3')
|
|
|
|
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', None,
|
|
|
|
None, None), None)
|
|
|
|
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', None,
|
|
|
|
None, '1'), '1')
|
2011-06-07 23:19:48 +00:00
|
|
|
|
2011-06-08 02:26:16 +00:00
|
|
|
def test_GET_newest(self):
|
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
2012-10-07 14:28:41 +11:00
|
|
|
'container', 'object')
|
2011-06-08 02:26:16 +00:00
|
|
|
|
|
|
|
def test_status_map(statuses, expected, timestamps,
|
|
|
|
expected_timestamp):
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(*statuses, timestamps=timestamps)
|
2011-06-08 02:26:16 +00:00
|
|
|
self.app.memcache.store = {}
|
|
|
|
req = Request.blank('/a/c/o', {}, headers={'x-newest': 'true'})
|
|
|
|
self.app.update_request(req)
|
|
|
|
res = controller.GET(req)
|
|
|
|
self.assertEquals(res.status[:len(str(expected))],
|
|
|
|
str(expected))
|
|
|
|
self.assertEquals(res.headers.get('last-modified'),
|
|
|
|
expected_timestamp)
|
|
|
|
|
2012-10-07 14:28:41 +11:00
|
|
|
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '1',
|
|
|
|
'2', '3'), '3')
|
|
|
|
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '1',
|
|
|
|
'3', '2'), '3')
|
|
|
|
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '1',
|
|
|
|
'3', '1'), '3')
|
|
|
|
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '3',
|
|
|
|
'3', '1'), '3')
|
|
|
|
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', None,
|
|
|
|
None, None), None)
|
|
|
|
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', None,
|
|
|
|
None, '1'), '1')
|
2011-06-08 02:26:16 +00:00
|
|
|
|
2011-06-07 23:19:48 +00:00
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
2012-10-07 14:28:41 +11:00
|
|
|
'container', 'object')
|
2011-06-07 23:19:48 +00:00
|
|
|
|
|
|
|
def test_status_map(statuses, expected, timestamps,
|
|
|
|
expected_timestamp):
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(*statuses, timestamps=timestamps)
|
2011-06-07 23:19:48 +00:00
|
|
|
self.app.memcache.store = {}
|
|
|
|
req = Request.blank('/a/c/o', {})
|
|
|
|
self.app.update_request(req)
|
|
|
|
res = controller.HEAD(req)
|
|
|
|
self.assertEquals(res.status[:len(str(expected))],
|
|
|
|
str(expected))
|
|
|
|
self.assertEquals(res.headers.get('last-modified'),
|
|
|
|
expected_timestamp)
|
|
|
|
|
2012-10-07 14:28:41 +11:00
|
|
|
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '1',
|
|
|
|
'2', '3'), '1')
|
|
|
|
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '1',
|
|
|
|
'3', '2'), '1')
|
|
|
|
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '1',
|
|
|
|
'3', '1'), '1')
|
|
|
|
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '3',
|
|
|
|
'3', '1'), '3')
|
|
|
|
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', None,
|
|
|
|
'1', '2'), None)
|
2011-06-07 23:19:48 +00:00
|
|
|
|
2010-07-12 17:03:45 -05:00
|
|
|
def test_POST_meta_val_len(self):
|
|
|
|
with save_globals():
|
2012-09-05 20:49:50 -07:00
|
|
|
limit = MAX_META_VALUE_LENGTH
|
2011-06-08 04:29:24 +00:00
|
|
|
self.app.object_post_as_copy = False
|
2010-07-12 17:03:45 -05:00
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
2012-10-07 14:28:41 +11:00
|
|
|
'container', 'object')
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 202, 202, 202)
|
|
|
|
# acct cont obj obj obj
|
2010-07-12 17:03:45 -05:00
|
|
|
req = Request.blank('/a/c/o', {}, headers={
|
2012-09-05 20:49:50 -07:00
|
|
|
'Content-Type': 'foo/bar',
|
|
|
|
'X-Object-Meta-Foo': 'x' * limit})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-07-12 17:03:45 -05:00
|
|
|
res = controller.POST(req)
|
|
|
|
self.assertEquals(res.status_int, 202)
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(202, 202, 202)
|
2010-07-12 17:03:45 -05:00
|
|
|
req = Request.blank('/a/c/o', {}, headers={
|
2012-09-05 20:49:50 -07:00
|
|
|
'Content-Type': 'foo/bar',
|
|
|
|
'X-Object-Meta-Foo': 'x' * (limit + 1)})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-07-12 17:03:45 -05:00
|
|
|
res = controller.POST(req)
|
|
|
|
self.assertEquals(res.status_int, 400)
|
|
|
|
|
2011-06-08 04:19:34 +00:00
|
|
|
def test_POST_as_copy_meta_val_len(self):
|
|
|
|
with save_globals():
|
2012-09-05 20:49:50 -07:00
|
|
|
limit = MAX_META_VALUE_LENGTH
|
2011-06-08 04:19:34 +00:00
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
2012-10-07 14:28:41 +11:00
|
|
|
'container', 'object')
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 200, 200, 200, 202, 202, 202)
|
|
|
|
# acct cont objc objc objc obj obj obj
|
2011-06-08 04:19:34 +00:00
|
|
|
req = Request.blank('/a/c/o', {}, headers={
|
2012-09-05 20:49:50 -07:00
|
|
|
'Content-Type': 'foo/bar',
|
|
|
|
'X-Object-Meta-Foo': 'x' * limit})
|
2011-06-08 04:19:34 +00:00
|
|
|
self.app.update_request(req)
|
|
|
|
res = controller.POST(req)
|
|
|
|
self.assertEquals(res.status_int, 202)
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(202, 202, 202)
|
2011-06-08 04:19:34 +00:00
|
|
|
req = Request.blank('/a/c/o', {}, headers={
|
2012-09-05 20:49:50 -07:00
|
|
|
'Content-Type': 'foo/bar',
|
|
|
|
'X-Object-Meta-Foo': 'x' * (limit + 1)})
|
2011-06-08 04:19:34 +00:00
|
|
|
self.app.update_request(req)
|
|
|
|
res = controller.POST(req)
|
|
|
|
self.assertEquals(res.status_int, 400)
|
|
|
|
|
2010-07-12 17:03:45 -05:00
|
|
|
def test_POST_meta_key_len(self):
|
|
|
|
with save_globals():
|
2012-09-05 20:49:50 -07:00
|
|
|
limit = MAX_META_NAME_LENGTH
|
2011-06-08 04:29:24 +00:00
|
|
|
self.app.object_post_as_copy = False
|
2010-07-12 17:03:45 -05:00
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
2012-10-07 14:28:41 +11:00
|
|
|
'container', 'object')
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 202, 202, 202)
|
|
|
|
# acct cont obj obj obj
|
2010-07-12 17:03:45 -05:00
|
|
|
req = Request.blank('/a/c/o', {}, headers={
|
2010-10-11 17:33:11 -05:00
|
|
|
'Content-Type': 'foo/bar',
|
2012-09-05 20:49:50 -07:00
|
|
|
('X-Object-Meta-' + 'x' * limit): 'x'})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-07-12 17:03:45 -05:00
|
|
|
res = controller.POST(req)
|
|
|
|
self.assertEquals(res.status_int, 202)
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(202, 202, 202)
|
2010-07-12 17:03:45 -05:00
|
|
|
req = Request.blank('/a/c/o', {}, headers={
|
2010-10-11 17:33:11 -05:00
|
|
|
'Content-Type': 'foo/bar',
|
2012-09-05 20:49:50 -07:00
|
|
|
('X-Object-Meta-' + 'x' * (limit + 1)): 'x'})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-07-12 17:03:45 -05:00
|
|
|
res = controller.POST(req)
|
|
|
|
self.assertEquals(res.status_int, 400)
|
|
|
|
|
2011-06-08 04:19:34 +00:00
|
|
|
def test_POST_as_copy_meta_key_len(self):
|
|
|
|
with save_globals():
|
2012-09-05 20:49:50 -07:00
|
|
|
limit = MAX_META_NAME_LENGTH
|
2011-06-08 04:19:34 +00:00
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
2012-10-07 14:28:41 +11:00
|
|
|
'container', 'object')
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 200, 200, 200, 202, 202, 202)
|
|
|
|
# acct cont objc objc objc obj obj obj
|
2011-06-08 04:19:34 +00:00
|
|
|
req = Request.blank('/a/c/o', {}, headers={
|
|
|
|
'Content-Type': 'foo/bar',
|
2012-09-05 20:49:50 -07:00
|
|
|
('X-Object-Meta-' + 'x' * limit): 'x'})
|
2011-06-08 04:19:34 +00:00
|
|
|
self.app.update_request(req)
|
|
|
|
res = controller.POST(req)
|
|
|
|
self.assertEquals(res.status_int, 202)
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(202, 202, 202)
|
2011-06-08 04:19:34 +00:00
|
|
|
req = Request.blank('/a/c/o', {}, headers={
|
|
|
|
'Content-Type': 'foo/bar',
|
2012-09-05 20:49:50 -07:00
|
|
|
('X-Object-Meta-' + 'x' * (limit + 1)): 'x'})
|
2011-06-08 04:19:34 +00:00
|
|
|
self.app.update_request(req)
|
|
|
|
res = controller.POST(req)
|
|
|
|
self.assertEquals(res.status_int, 400)
|
|
|
|
|
2010-07-12 17:03:45 -05:00
|
|
|
def test_POST_meta_count(self):
|
|
|
|
with save_globals():
|
2012-09-05 20:49:50 -07:00
|
|
|
limit = MAX_META_COUNT
|
2010-07-12 17:03:45 -05:00
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
2012-10-07 14:28:41 +11:00
|
|
|
'container', 'object')
|
2010-10-11 17:33:11 -05:00
|
|
|
headers = dict(
|
2012-09-05 20:49:50 -07:00
|
|
|
(('X-Object-Meta-' + str(i), 'a') for i in xrange(limit + 1)))
|
2010-07-12 17:03:45 -05:00
|
|
|
headers.update({'Content-Type': 'foo/bar'})
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(202, 202, 202)
|
2010-07-12 17:03:45 -05:00
|
|
|
req = Request.blank('/a/c/o', {}, headers=headers)
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-07-12 17:03:45 -05:00
|
|
|
res = controller.POST(req)
|
|
|
|
self.assertEquals(res.status_int, 400)
|
|
|
|
|
|
|
|
def test_POST_meta_size(self):
|
|
|
|
with save_globals():
|
2012-09-05 20:49:50 -07:00
|
|
|
limit = MAX_META_OVERALL_SIZE
|
2010-07-12 17:03:45 -05:00
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
2012-10-07 14:28:41 +11:00
|
|
|
'container', 'object')
|
2013-02-12 15:38:40 -08:00
|
|
|
count = limit / 256 # enough to cause the limit to be reached
|
2010-10-11 17:33:11 -05:00
|
|
|
headers = dict(
|
2012-09-05 20:49:50 -07:00
|
|
|
(('X-Object-Meta-' + str(i), 'a' * 256)
|
|
|
|
for i in xrange(count + 1)))
|
2010-07-12 17:03:45 -05:00
|
|
|
headers.update({'Content-Type': 'foo/bar'})
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(202, 202, 202)
|
2010-07-12 17:03:45 -05:00
|
|
|
req = Request.blank('/a/c/o', {}, headers=headers)
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-07-12 17:03:45 -05:00
|
|
|
res = controller.POST(req)
|
|
|
|
self.assertEquals(res.status_int, 400)
|
|
|
|
|
|
|
|
def test_client_timeout(self):
|
|
|
|
with save_globals():
|
|
|
|
self.app.account_ring.get_nodes('account')
|
|
|
|
for dev in self.app.account_ring.devs.values():
|
|
|
|
dev['ip'] = '127.0.0.1'
|
|
|
|
dev['port'] = 1
|
|
|
|
self.app.container_ring.get_nodes('account')
|
|
|
|
for dev in self.app.container_ring.devs.values():
|
|
|
|
dev['ip'] = '127.0.0.1'
|
|
|
|
dev['port'] = 1
|
|
|
|
self.app.object_ring.get_nodes('account')
|
|
|
|
for dev in self.app.object_ring.devs.values():
|
|
|
|
dev['ip'] = '127.0.0.1'
|
|
|
|
dev['port'] = 1
|
2010-10-11 17:33:11 -05:00
|
|
|
|
2010-07-12 17:03:45 -05:00
|
|
|
class SlowBody():
|
2010-11-16 15:35:39 -08:00
|
|
|
|
2010-07-12 17:03:45 -05:00
|
|
|
def __init__(self):
|
|
|
|
self.sent = 0
|
2010-10-11 17:33:11 -05:00
|
|
|
|
2010-07-12 17:03:45 -05:00
|
|
|
def read(self, size=-1):
|
|
|
|
if self.sent < 4:
|
|
|
|
sleep(0.1)
|
|
|
|
self.sent += 1
|
|
|
|
return ' '
|
|
|
|
return ''
|
2010-11-16 15:35:39 -08:00
|
|
|
|
2010-07-12 17:03:45 -05:00
|
|
|
req = Request.blank('/a/c/o',
|
2012-10-07 14:28:41 +11:00
|
|
|
environ={'REQUEST_METHOD': 'PUT',
|
|
|
|
'wsgi.input': SlowBody()},
|
|
|
|
headers={'Content-Length': '4',
|
|
|
|
'Content-Type': 'text/plain'})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-07-12 17:03:45 -05:00
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
|
|
|
'container', 'object')
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 201, 201, 201)
|
|
|
|
# acct cont obj obj obj
|
2010-07-12 17:03:45 -05:00
|
|
|
resp = controller.PUT(req)
|
|
|
|
self.assertEquals(resp.status_int, 201)
|
|
|
|
self.app.client_timeout = 0.1
|
|
|
|
req = Request.blank('/a/c/o',
|
2012-10-07 14:28:41 +11:00
|
|
|
environ={'REQUEST_METHOD': 'PUT',
|
|
|
|
'wsgi.input': SlowBody()},
|
|
|
|
headers={'Content-Length': '4',
|
|
|
|
'Content-Type': 'text/plain'})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(201, 201, 201)
|
|
|
|
# obj obj obj
|
2010-07-12 17:03:45 -05:00
|
|
|
resp = controller.PUT(req)
|
|
|
|
self.assertEquals(resp.status_int, 408)
|
|
|
|
|
|
|
|
def test_client_disconnect(self):
|
|
|
|
with save_globals():
|
|
|
|
self.app.account_ring.get_nodes('account')
|
|
|
|
for dev in self.app.account_ring.devs.values():
|
|
|
|
dev['ip'] = '127.0.0.1'
|
|
|
|
dev['port'] = 1
|
|
|
|
self.app.container_ring.get_nodes('account')
|
|
|
|
for dev in self.app.container_ring.devs.values():
|
|
|
|
dev['ip'] = '127.0.0.1'
|
|
|
|
dev['port'] = 1
|
|
|
|
self.app.object_ring.get_nodes('account')
|
|
|
|
for dev in self.app.object_ring.devs.values():
|
|
|
|
dev['ip'] = '127.0.0.1'
|
|
|
|
dev['port'] = 1
|
2010-10-11 17:33:11 -05:00
|
|
|
|
2010-07-12 17:03:45 -05:00
|
|
|
class SlowBody():
|
2010-11-16 15:35:39 -08:00
|
|
|
|
2010-07-12 17:03:45 -05:00
|
|
|
def __init__(self):
|
|
|
|
self.sent = 0
|
2010-10-11 17:33:11 -05:00
|
|
|
|
2010-07-12 17:03:45 -05:00
|
|
|
def read(self, size=-1):
|
|
|
|
raise Exception('Disconnected')
|
2010-11-16 15:35:39 -08:00
|
|
|
|
2010-07-12 17:03:45 -05:00
|
|
|
req = Request.blank('/a/c/o',
|
2012-10-07 14:28:41 +11:00
|
|
|
environ={'REQUEST_METHOD': 'PUT',
|
|
|
|
'wsgi.input': SlowBody()},
|
|
|
|
headers={'Content-Length': '4',
|
|
|
|
'Content-Type': 'text/plain'})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-07-12 17:03:45 -05:00
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
|
|
|
'container', 'object')
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 201, 201, 201)
|
|
|
|
# acct cont obj obj obj
|
2010-07-12 17:03:45 -05:00
|
|
|
resp = controller.PUT(req)
|
|
|
|
self.assertEquals(resp.status_int, 499)
|
|
|
|
|
|
|
|
def test_node_read_timeout(self):
|
|
|
|
with save_globals():
|
|
|
|
self.app.account_ring.get_nodes('account')
|
|
|
|
for dev in self.app.account_ring.devs.values():
|
|
|
|
dev['ip'] = '127.0.0.1'
|
|
|
|
dev['port'] = 1
|
|
|
|
self.app.container_ring.get_nodes('account')
|
|
|
|
for dev in self.app.container_ring.devs.values():
|
|
|
|
dev['ip'] = '127.0.0.1'
|
|
|
|
dev['port'] = 1
|
|
|
|
self.app.object_ring.get_nodes('account')
|
|
|
|
for dev in self.app.object_ring.devs.values():
|
|
|
|
dev['ip'] = '127.0.0.1'
|
|
|
|
dev['port'] = 1
|
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'GET'})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-07-12 17:03:45 -05:00
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
|
|
|
'container', 'object')
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 200, slow=True)
|
2010-07-12 17:03:45 -05:00
|
|
|
req.sent_size = 0
|
|
|
|
resp = controller.GET(req)
|
|
|
|
got_exc = False
|
|
|
|
try:
|
|
|
|
resp.body
|
2012-08-23 12:38:09 -07:00
|
|
|
except ChunkReadTimeout:
|
2010-07-12 17:03:45 -05:00
|
|
|
got_exc = True
|
|
|
|
self.assert_(not got_exc)
|
2010-10-11 17:33:11 -05:00
|
|
|
self.app.node_timeout = 0.1
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 200, slow=True)
|
2010-07-12 17:03:45 -05:00
|
|
|
resp = controller.GET(req)
|
|
|
|
got_exc = False
|
|
|
|
try:
|
|
|
|
resp.body
|
2012-08-23 12:38:09 -07:00
|
|
|
except ChunkReadTimeout:
|
2010-07-12 17:03:45 -05:00
|
|
|
got_exc = True
|
|
|
|
self.assert_(got_exc)
|
|
|
|
|
|
|
|
def test_node_write_timeout(self):
|
|
|
|
with save_globals():
|
|
|
|
self.app.account_ring.get_nodes('account')
|
|
|
|
for dev in self.app.account_ring.devs.values():
|
|
|
|
dev['ip'] = '127.0.0.1'
|
|
|
|
dev['port'] = 1
|
|
|
|
self.app.container_ring.get_nodes('account')
|
|
|
|
for dev in self.app.container_ring.devs.values():
|
|
|
|
dev['ip'] = '127.0.0.1'
|
|
|
|
dev['port'] = 1
|
|
|
|
self.app.object_ring.get_nodes('account')
|
|
|
|
for dev in self.app.object_ring.devs.values():
|
|
|
|
dev['ip'] = '127.0.0.1'
|
|
|
|
dev['port'] = 1
|
|
|
|
req = Request.blank('/a/c/o',
|
2012-10-07 14:28:41 +11:00
|
|
|
environ={'REQUEST_METHOD': 'PUT'},
|
|
|
|
headers={'Content-Length': '4',
|
|
|
|
'Content-Type': 'text/plain'},
|
|
|
|
body=' ')
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-07-12 17:03:45 -05:00
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
|
|
|
'container', 'object')
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 201, 201, 201, slow=True)
|
2010-07-12 17:03:45 -05:00
|
|
|
resp = controller.PUT(req)
|
|
|
|
self.assertEquals(resp.status_int, 201)
|
2010-10-11 17:33:11 -05:00
|
|
|
self.app.node_timeout = 0.1
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(201, 201, 201, slow=True)
|
2010-07-12 17:03:45 -05:00
|
|
|
req = Request.blank('/a/c/o',
|
2012-10-07 14:28:41 +11:00
|
|
|
environ={'REQUEST_METHOD': 'PUT'},
|
|
|
|
headers={'Content-Length': '4',
|
|
|
|
'Content-Type': 'text/plain'},
|
|
|
|
body=' ')
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-07-12 17:03:45 -05:00
|
|
|
resp = controller.PUT(req)
|
|
|
|
self.assertEquals(resp.status_int, 503)
|
|
|
|
|
|
|
|
def test_iter_nodes(self):
|
|
|
|
with save_globals():
|
|
|
|
try:
|
|
|
|
self.app.object_ring.max_more_nodes = 2
|
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
2012-10-07 14:28:41 +11:00
|
|
|
'container',
|
|
|
|
'object')
|
2010-07-12 17:03:45 -05:00
|
|
|
partition, nodes = self.app.object_ring.get_nodes('account',
|
2012-10-07 14:28:41 +11:00
|
|
|
'container',
|
|
|
|
'object')
|
2010-07-12 17:03:45 -05:00
|
|
|
collected_nodes = []
|
2013-04-06 01:35:58 +00:00
|
|
|
for node in controller.iter_nodes(self.app.object_ring,
|
|
|
|
partition):
|
2010-07-12 17:03:45 -05:00
|
|
|
collected_nodes.append(node)
|
|
|
|
self.assertEquals(len(collected_nodes), 5)
|
|
|
|
|
|
|
|
self.app.object_ring.max_more_nodes = 20
|
2013-04-06 01:35:58 +00:00
|
|
|
self.app.request_node_count = lambda r: 20
|
2010-07-12 17:03:45 -05:00
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
2012-10-07 14:28:41 +11:00
|
|
|
'container',
|
|
|
|
'object')
|
2010-07-12 17:03:45 -05:00
|
|
|
partition, nodes = self.app.object_ring.get_nodes('account',
|
2012-10-07 14:28:41 +11:00
|
|
|
'container',
|
|
|
|
'object')
|
2010-07-12 17:03:45 -05:00
|
|
|
collected_nodes = []
|
2013-04-06 01:35:58 +00:00
|
|
|
for node in controller.iter_nodes(self.app.object_ring,
|
|
|
|
partition):
|
2010-07-12 17:03:45 -05:00
|
|
|
collected_nodes.append(node)
|
|
|
|
self.assertEquals(len(collected_nodes), 9)
|
2012-07-29 17:22:18 +00:00
|
|
|
|
|
|
|
self.app.log_handoffs = True
|
|
|
|
self.app.logger = FakeLogger()
|
|
|
|
self.app.object_ring.max_more_nodes = 2
|
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
2012-10-07 14:28:41 +11:00
|
|
|
'container',
|
|
|
|
'object')
|
2012-07-29 17:22:18 +00:00
|
|
|
partition, nodes = self.app.object_ring.get_nodes('account',
|
2012-10-07 14:28:41 +11:00
|
|
|
'container',
|
|
|
|
'object')
|
2012-07-29 17:22:18 +00:00
|
|
|
collected_nodes = []
|
2013-04-06 01:35:58 +00:00
|
|
|
for node in controller.iter_nodes(self.app.object_ring,
|
|
|
|
partition):
|
2012-07-29 17:22:18 +00:00
|
|
|
collected_nodes.append(node)
|
|
|
|
self.assertEquals(len(collected_nodes), 5)
|
|
|
|
self.assertEquals(
|
|
|
|
self.app.logger.log_dict['warning'],
|
|
|
|
[(('Handoff requested (1)',), {}),
|
|
|
|
(('Handoff requested (2)',), {})])
|
|
|
|
|
|
|
|
self.app.log_handoffs = False
|
|
|
|
self.app.logger = FakeLogger()
|
|
|
|
self.app.object_ring.max_more_nodes = 2
|
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
2012-10-07 14:28:41 +11:00
|
|
|
'container',
|
|
|
|
'object')
|
2012-07-29 17:22:18 +00:00
|
|
|
partition, nodes = self.app.object_ring.get_nodes('account',
|
2012-10-07 14:28:41 +11:00
|
|
|
'container',
|
|
|
|
'object')
|
2012-07-29 17:22:18 +00:00
|
|
|
collected_nodes = []
|
2013-04-06 01:35:58 +00:00
|
|
|
for node in controller.iter_nodes(self.app.object_ring,
|
|
|
|
partition):
|
2012-07-29 17:22:18 +00:00
|
|
|
collected_nodes.append(node)
|
|
|
|
self.assertEquals(len(collected_nodes), 5)
|
|
|
|
self.assertEquals(self.app.logger.log_dict['warning'], [])
|
2010-07-12 17:03:45 -05:00
|
|
|
finally:
|
|
|
|
self.app.object_ring.max_more_nodes = 0
|
|
|
|
|
2013-04-06 01:35:58 +00:00
|
|
|
def test_iter_nodes_calls_sort_nodes(self):
|
|
|
|
with mock.patch.object(self.app, 'sort_nodes') as sort_nodes:
|
|
|
|
controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o')
|
|
|
|
for node in controller.iter_nodes(self.app.object_ring, 0):
|
|
|
|
pass
|
|
|
|
sort_nodes.assert_called_once_with(
|
|
|
|
self.app.object_ring.get_part_nodes(0))
|
|
|
|
|
|
|
|
def test_iter_nodes_skips_error_limited(self):
|
|
|
|
with mock.patch.object(self.app, 'sort_nodes', lambda n: n):
|
|
|
|
controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o')
|
|
|
|
first_nodes = list(controller.iter_nodes(self.app.object_ring, 0))
|
|
|
|
second_nodes = list(controller.iter_nodes(self.app.object_ring, 0))
|
|
|
|
self.assertTrue(first_nodes[0] in second_nodes)
|
|
|
|
|
|
|
|
controller.error_limit(first_nodes[0], 'test')
|
|
|
|
second_nodes = list(controller.iter_nodes(self.app.object_ring, 0))
|
|
|
|
self.assertTrue(first_nodes[0] not in second_nodes)
|
|
|
|
|
|
|
|
def test_iter_nodes_gives_extra_if_error_limited_inline(self):
|
|
|
|
with nested(
|
|
|
|
mock.patch.object(self.app, 'sort_nodes', lambda n: n),
|
|
|
|
mock.patch.object(self.app, 'request_node_count',
|
|
|
|
lambda r: 6),
|
|
|
|
mock.patch.object(self.app.object_ring, 'max_more_nodes', 99)):
|
|
|
|
controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o')
|
|
|
|
first_nodes = list(controller.iter_nodes(self.app.object_ring, 0))
|
|
|
|
second_nodes = []
|
|
|
|
for node in controller.iter_nodes(self.app.object_ring, 0):
|
|
|
|
if not second_nodes:
|
|
|
|
controller.error_limit(node, 'test')
|
|
|
|
second_nodes.append(node)
|
|
|
|
self.assertEquals(len(first_nodes), 6)
|
|
|
|
self.assertEquals(len(second_nodes), 7)
|
|
|
|
|
2010-07-12 17:03:45 -05:00
|
|
|
def test_best_response_sets_etag(self):
|
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
|
|
|
'container', 'object')
|
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'GET'})
|
|
|
|
resp = controller.best_response(req, [200] * 3, ['OK'] * 3, [''] * 3,
|
|
|
|
'Object')
|
|
|
|
self.assertEquals(resp.etag, None)
|
|
|
|
resp = controller.best_response(req, [200] * 3, ['OK'] * 3, [''] * 3,
|
2012-10-07 14:28:41 +11:00
|
|
|
'Object',
|
|
|
|
etag='68b329da9893e34099c7d8ad5cb9c940'
|
|
|
|
)
|
2010-07-12 17:03:45 -05:00
|
|
|
self.assertEquals(resp.etag, '68b329da9893e34099c7d8ad5cb9c940')
|
2011-08-31 06:08:59 -07:00
|
|
|
|
2010-07-12 17:03:45 -05:00
|
|
|
def test_proxy_passes_content_type(self):
|
|
|
|
with save_globals():
|
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'GET'})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-07-12 17:03:45 -05:00
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
|
|
|
'container', 'object')
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 200)
|
2010-07-12 17:03:45 -05:00
|
|
|
resp = controller.GET(req)
|
|
|
|
self.assertEquals(resp.status_int, 200)
|
|
|
|
self.assertEquals(resp.content_type, 'x-application/test')
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 200)
|
2010-07-12 17:03:45 -05:00
|
|
|
resp = controller.GET(req)
|
|
|
|
self.assertEquals(resp.status_int, 200)
|
|
|
|
self.assertEquals(resp.content_length, 0)
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 200, slow=True)
|
2010-07-12 17:03:45 -05:00
|
|
|
resp = controller.GET(req)
|
|
|
|
self.assertEquals(resp.status_int, 200)
|
|
|
|
self.assertEquals(resp.content_length, 4)
|
|
|
|
|
|
|
|
def test_proxy_passes_content_length_on_head(self):
|
|
|
|
with save_globals():
|
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'HEAD'})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-07-12 17:03:45 -05:00
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
|
|
|
'container', 'object')
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 200)
|
2010-07-12 17:03:45 -05:00
|
|
|
resp = controller.HEAD(req)
|
|
|
|
self.assertEquals(resp.status_int, 200)
|
|
|
|
self.assertEquals(resp.content_length, 0)
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 200, slow=True)
|
2010-07-12 17:03:45 -05:00
|
|
|
resp = controller.HEAD(req)
|
|
|
|
self.assertEquals(resp.status_int, 200)
|
|
|
|
self.assertEquals(resp.content_length, 4)
|
|
|
|
|
|
|
|
def test_error_limiting(self):
|
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
|
|
|
'container', 'object')
|
2013-02-07 22:07:18 -08:00
|
|
|
controller.app.sort_nodes = lambda l: l
|
2012-10-07 14:28:41 +11:00
|
|
|
self.assert_status_map(controller.HEAD, (200, 200, 503, 200, 200),
|
|
|
|
200)
|
2010-07-12 17:03:45 -05:00
|
|
|
self.assertEquals(controller.app.object_ring.devs[0]['errors'], 2)
|
|
|
|
self.assert_('last_error' in controller.app.object_ring.devs[0])
|
2011-01-19 15:21:57 -08:00
|
|
|
for _junk in xrange(self.app.error_suppression_limit):
|
2012-10-07 14:28:41 +11:00
|
|
|
self.assert_status_map(controller.HEAD, (200, 200, 503, 503,
|
|
|
|
503), 503)
|
2010-07-12 17:03:45 -05:00
|
|
|
self.assertEquals(controller.app.object_ring.devs[0]['errors'],
|
|
|
|
self.app.error_suppression_limit + 1)
|
2012-10-07 14:28:41 +11:00
|
|
|
self.assert_status_map(controller.HEAD, (200, 200, 200, 200, 200),
|
|
|
|
503)
|
2010-07-12 17:03:45 -05:00
|
|
|
self.assert_('last_error' in controller.app.object_ring.devs[0])
|
2012-10-07 14:28:41 +11:00
|
|
|
self.assert_status_map(controller.PUT, (200, 200, 200, 201, 201,
|
|
|
|
201), 503)
|
2011-06-08 04:19:34 +00:00
|
|
|
self.assert_status_map(controller.POST,
|
2012-10-07 14:28:41 +11:00
|
|
|
(200, 200, 200, 200, 200, 200, 202, 202,
|
|
|
|
202), 503)
|
2010-10-11 17:33:11 -05:00
|
|
|
self.assert_status_map(controller.DELETE,
|
2011-12-29 11:29:19 -06:00
|
|
|
(200, 200, 200, 204, 204, 204), 503)
|
2010-07-12 17:03:45 -05:00
|
|
|
self.app.error_suppression_interval = -300
|
2012-10-07 14:28:41 +11:00
|
|
|
self.assert_status_map(controller.HEAD, (200, 200, 200, 200, 200),
|
|
|
|
200)
|
2010-07-12 17:03:45 -05:00
|
|
|
self.assertRaises(BaseException,
|
2012-10-07 14:28:41 +11:00
|
|
|
self.assert_status_map, controller.DELETE,
|
|
|
|
(200, 200, 200, 204, 204, 204), 503,
|
|
|
|
raise_exc=True)
|
2010-07-12 17:03:45 -05:00
|
|
|
|
|
|
|
def test_acc_or_con_missing_returns_404(self):
|
|
|
|
with save_globals():
|
|
|
|
self.app.memcache = FakeMemcacheReturnsNone()
|
|
|
|
for dev in self.app.account_ring.devs.values():
|
|
|
|
del dev['errors']
|
|
|
|
del dev['last_error']
|
|
|
|
for dev in self.app.container_ring.devs.values():
|
|
|
|
del dev['errors']
|
|
|
|
del dev['last_error']
|
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
2012-10-07 14:28:41 +11:00
|
|
|
'container', 'object')
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 200, 200, 200, 200)
|
2010-07-12 17:03:45 -05:00
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'DELETE'})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-07-12 17:03:45 -05:00
|
|
|
resp = getattr(controller, 'DELETE')(req)
|
|
|
|
self.assertEquals(resp.status_int, 200)
|
|
|
|
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(404, 404, 404)
|
|
|
|
# acct acct acct
|
2010-07-12 17:03:45 -05:00
|
|
|
resp = getattr(controller, 'DELETE')(req)
|
|
|
|
self.assertEquals(resp.status_int, 404)
|
|
|
|
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(503, 404, 404)
|
|
|
|
# acct acct acct
|
2010-07-12 17:03:45 -05:00
|
|
|
resp = getattr(controller, 'DELETE')(req)
|
|
|
|
self.assertEquals(resp.status_int, 404)
|
|
|
|
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(503, 503, 404)
|
|
|
|
# acct acct acct
|
2010-07-12 17:03:45 -05:00
|
|
|
resp = getattr(controller, 'DELETE')(req)
|
|
|
|
self.assertEquals(resp.status_int, 404)
|
|
|
|
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(503, 503, 503)
|
|
|
|
# acct acct acct
|
2010-07-12 17:03:45 -05:00
|
|
|
resp = getattr(controller, 'DELETE')(req)
|
|
|
|
self.assertEquals(resp.status_int, 404)
|
|
|
|
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 204, 204, 204)
|
|
|
|
# acct cont obj obj obj
|
2010-07-12 17:03:45 -05:00
|
|
|
resp = getattr(controller, 'DELETE')(req)
|
|
|
|
self.assertEquals(resp.status_int, 204)
|
|
|
|
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 404, 404, 404)
|
|
|
|
# acct cont cont cont
|
2010-07-12 17:03:45 -05:00
|
|
|
resp = getattr(controller, 'DELETE')(req)
|
|
|
|
self.assertEquals(resp.status_int, 404)
|
|
|
|
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 503, 503, 503)
|
|
|
|
# acct cont cont cont
|
2010-07-12 17:03:45 -05:00
|
|
|
resp = getattr(controller, 'DELETE')(req)
|
|
|
|
self.assertEquals(resp.status_int, 404)
|
|
|
|
|
|
|
|
for dev in self.app.account_ring.devs.values():
|
|
|
|
dev['errors'] = self.app.error_suppression_limit + 1
|
2013-01-30 16:33:28 +11:00
|
|
|
dev['last_error'] = time.time()
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200)
|
|
|
|
# acct [isn't actually called since everything
|
|
|
|
# is error limited]
|
2010-07-12 17:03:45 -05:00
|
|
|
resp = getattr(controller, 'DELETE')(req)
|
|
|
|
self.assertEquals(resp.status_int, 404)
|
|
|
|
|
|
|
|
for dev in self.app.account_ring.devs.values():
|
|
|
|
dev['errors'] = 0
|
|
|
|
for dev in self.app.container_ring.devs.values():
|
|
|
|
dev['errors'] = self.app.error_suppression_limit + 1
|
2013-01-30 16:33:28 +11:00
|
|
|
dev['last_error'] = time.time()
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200)
|
|
|
|
# acct cont [isn't actually called since
|
|
|
|
# everything is error limited]
|
2010-07-12 17:03:45 -05:00
|
|
|
resp = getattr(controller, 'DELETE')(req)
|
|
|
|
self.assertEquals(resp.status_int, 404)
|
|
|
|
|
|
|
|
def test_PUT_POST_requires_container_exist(self):
|
2011-06-08 04:19:34 +00:00
|
|
|
with save_globals():
|
2011-06-08 04:29:24 +00:00
|
|
|
self.app.object_post_as_copy = False
|
2011-06-08 04:19:34 +00:00
|
|
|
self.app.memcache = FakeMemcacheReturnsNone()
|
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
|
|
|
'container', 'object')
|
|
|
|
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 404, 404, 404, 200, 200, 200)
|
2011-06-08 04:19:34 +00:00
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'})
|
|
|
|
self.app.update_request(req)
|
|
|
|
resp = controller.PUT(req)
|
|
|
|
self.assertEquals(resp.status_int, 404)
|
|
|
|
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 404, 404, 404, 200, 200)
|
2011-06-08 04:19:34 +00:00
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'POST'},
|
|
|
|
headers={'Content-Type': 'text/plain'})
|
|
|
|
self.app.update_request(req)
|
|
|
|
resp = controller.POST(req)
|
|
|
|
self.assertEquals(resp.status_int, 404)
|
|
|
|
|
|
|
|
def test_PUT_POST_as_copy_requires_container_exist(self):
|
2010-07-12 17:03:45 -05:00
|
|
|
with save_globals():
|
|
|
|
self.app.memcache = FakeMemcacheReturnsNone()
|
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
|
|
|
'container', 'object')
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 404, 404, 404, 200, 200, 200)
|
2010-07-12 17:03:45 -05:00
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-07-12 17:03:45 -05:00
|
|
|
resp = controller.PUT(req)
|
|
|
|
self.assertEquals(resp.status_int, 404)
|
|
|
|
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 404, 404, 404, 200, 200, 200, 200, 200, 200)
|
2010-07-12 17:03:45 -05:00
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'POST'},
|
|
|
|
headers={'Content-Type': 'text/plain'})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-07-12 17:03:45 -05:00
|
|
|
resp = controller.POST(req)
|
|
|
|
self.assertEquals(resp.status_int, 404)
|
|
|
|
|
|
|
|
def test_bad_metadata(self):
|
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
|
|
|
'container', 'object')
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 201, 201, 201)
|
|
|
|
# acct cont obj obj obj
|
2010-07-12 17:03:45 -05:00
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
|
|
|
|
headers={'Content-Length': '0'})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-07-12 17:03:45 -05:00
|
|
|
resp = controller.PUT(req)
|
|
|
|
self.assertEquals(resp.status_int, 201)
|
|
|
|
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(201, 201, 201)
|
2010-07-12 17:03:45 -05:00
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
|
2012-10-07 14:28:41 +11:00
|
|
|
headers={'Content-Length': '0',
|
|
|
|
'X-Object-Meta-' + ('a' *
|
|
|
|
MAX_META_NAME_LENGTH): 'v'})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-07-12 17:03:45 -05:00
|
|
|
resp = controller.PUT(req)
|
|
|
|
self.assertEquals(resp.status_int, 201)
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(201, 201, 201)
|
2010-07-12 17:03:45 -05:00
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
|
2012-10-07 14:28:41 +11:00
|
|
|
headers={'Content-Length': '0',
|
|
|
|
'X-Object-Meta-' + ('a' *
|
|
|
|
(MAX_META_NAME_LENGTH + 1)): 'v'})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-07-12 17:03:45 -05:00
|
|
|
resp = controller.PUT(req)
|
|
|
|
self.assertEquals(resp.status_int, 400)
|
|
|
|
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(201, 201, 201)
|
2010-07-12 17:03:45 -05:00
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
|
2012-10-07 14:28:41 +11:00
|
|
|
headers={'Content-Length': '0',
|
|
|
|
'X-Object-Meta-Too-Long': 'a' *
|
|
|
|
MAX_META_VALUE_LENGTH})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-07-12 17:03:45 -05:00
|
|
|
resp = controller.PUT(req)
|
|
|
|
self.assertEquals(resp.status_int, 201)
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(201, 201, 201)
|
2010-07-12 17:03:45 -05:00
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
|
2012-10-07 14:28:41 +11:00
|
|
|
headers={'Content-Length': '0',
|
|
|
|
'X-Object-Meta-Too-Long': 'a' *
|
|
|
|
(MAX_META_VALUE_LENGTH + 1)})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-07-12 17:03:45 -05:00
|
|
|
resp = controller.PUT(req)
|
|
|
|
self.assertEquals(resp.status_int, 400)
|
|
|
|
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(201, 201, 201)
|
2010-07-12 17:03:45 -05:00
|
|
|
headers = {'Content-Length': '0'}
|
|
|
|
for x in xrange(MAX_META_COUNT):
|
|
|
|
headers['X-Object-Meta-%d' % x] = 'v'
|
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
|
|
|
|
headers=headers)
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-07-12 17:03:45 -05:00
|
|
|
resp = controller.PUT(req)
|
|
|
|
self.assertEquals(resp.status_int, 201)
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(201, 201, 201)
|
2010-07-12 17:03:45 -05:00
|
|
|
headers = {'Content-Length': '0'}
|
|
|
|
for x in xrange(MAX_META_COUNT + 1):
|
|
|
|
headers['X-Object-Meta-%d' % x] = 'v'
|
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
|
|
|
|
headers=headers)
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-07-12 17:03:45 -05:00
|
|
|
resp = controller.PUT(req)
|
|
|
|
self.assertEquals(resp.status_int, 400)
|
|
|
|
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(201, 201, 201)
|
2010-07-12 17:03:45 -05:00
|
|
|
headers = {'Content-Length': '0'}
|
|
|
|
header_value = 'a' * MAX_META_VALUE_LENGTH
|
|
|
|
size = 0
|
|
|
|
x = 0
|
|
|
|
while size < MAX_META_OVERALL_SIZE - 4 - \
|
|
|
|
MAX_META_VALUE_LENGTH:
|
|
|
|
size += 4 + MAX_META_VALUE_LENGTH
|
|
|
|
headers['X-Object-Meta-%04d' % x] = header_value
|
|
|
|
x += 1
|
|
|
|
if MAX_META_OVERALL_SIZE - size > 1:
|
|
|
|
headers['X-Object-Meta-a'] = \
|
|
|
|
'a' * (MAX_META_OVERALL_SIZE - size - 1)
|
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
|
|
|
|
headers=headers)
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-07-12 17:03:45 -05:00
|
|
|
resp = controller.PUT(req)
|
|
|
|
self.assertEquals(resp.status_int, 201)
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(201, 201, 201)
|
2010-07-12 17:03:45 -05:00
|
|
|
headers['X-Object-Meta-a'] = \
|
|
|
|
'a' * (MAX_META_OVERALL_SIZE - size)
|
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
|
|
|
|
headers=headers)
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-07-12 17:03:45 -05:00
|
|
|
resp = controller.PUT(req)
|
|
|
|
self.assertEquals(resp.status_int, 400)
|
|
|
|
|
|
|
|
def test_copy_from(self):
|
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
|
|
|
'container', 'object')
|
2011-01-05 16:24:14 -06:00
|
|
|
# initial source object PUT
|
2010-07-12 17:03:45 -05:00
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
|
|
|
|
headers={'Content-Length': '0'})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 201, 201, 201)
|
|
|
|
# acct cont obj obj obj
|
2010-07-12 17:03:45 -05:00
|
|
|
resp = controller.PUT(req)
|
|
|
|
self.assertEquals(resp.status_int, 201)
|
2010-10-26 15:00:17 -05:00
|
|
|
|
2011-01-05 16:24:14 -06:00
|
|
|
# basic copy
|
2010-10-26 14:55:12 -05:00
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
|
2010-10-26 15:00:17 -05:00
|
|
|
headers={'Content-Length': '0',
|
2012-10-07 14:28:41 +11:00
|
|
|
'X-Copy-From': 'c/o'})
|
2010-10-26 14:55:12 -05:00
|
|
|
self.app.update_request(req)
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 200, 200, 200, 200, 200, 201, 201, 201)
|
|
|
|
# acct cont acct cont objc objc objc obj obj obj
|
2010-10-26 15:00:17 -05:00
|
|
|
self.app.memcache.store = {}
|
2010-10-26 14:55:12 -05:00
|
|
|
resp = controller.PUT(req)
|
2010-10-26 15:00:17 -05:00
|
|
|
self.assertEquals(resp.status_int, 201)
|
|
|
|
self.assertEquals(resp.headers['x-copied-from'], 'c/o')
|
2010-07-12 17:03:45 -05:00
|
|
|
|
2011-01-05 16:24:14 -06:00
|
|
|
# non-zero content length
|
2010-07-12 17:03:45 -05:00
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
|
2010-10-26 15:00:17 -05:00
|
|
|
headers={'Content-Length': '5',
|
2012-10-07 14:28:41 +11:00
|
|
|
'X-Copy-From': 'c/o'})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 200, 200, 200, 200, 200)
|
|
|
|
# acct cont acct cont objc objc objc
|
2010-07-12 17:03:45 -05:00
|
|
|
self.app.memcache.store = {}
|
|
|
|
resp = controller.PUT(req)
|
2010-10-26 15:01:34 -05:00
|
|
|
self.assertEquals(resp.status_int, 400)
|
2010-07-12 17:03:45 -05:00
|
|
|
|
2011-01-05 16:24:14 -06:00
|
|
|
# extra source path parsing
|
2010-09-24 09:51:00 -05:00
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
|
|
|
|
headers={'Content-Length': '0',
|
2012-10-07 14:28:41 +11:00
|
|
|
'X-Copy-From': 'c/o/o2'})
|
2010-09-24 09:51:00 -05:00
|
|
|
req.account = 'a'
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 200, 200, 200, 200, 200, 201, 201, 201)
|
|
|
|
# acct cont acct cont objc objc objc obj obj obj
|
2010-09-24 09:51:00 -05:00
|
|
|
self.app.memcache.store = {}
|
|
|
|
resp = controller.PUT(req)
|
|
|
|
self.assertEquals(resp.status_int, 201)
|
|
|
|
self.assertEquals(resp.headers['x-copied-from'], 'c/o/o2')
|
|
|
|
|
2011-01-05 16:24:14 -06:00
|
|
|
# space in soure path
|
2010-10-27 10:52:00 -05:00
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
|
|
|
|
headers={'Content-Length': '0',
|
2012-10-07 14:28:41 +11:00
|
|
|
'X-Copy-From': 'c/o%20o2'})
|
2010-10-27 10:52:00 -05:00
|
|
|
req.account = 'a'
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 200, 200, 200, 200, 200, 201, 201, 201)
|
|
|
|
# acct cont acct cont objc objc objc obj obj obj
|
2010-10-27 10:52:00 -05:00
|
|
|
self.app.memcache.store = {}
|
|
|
|
resp = controller.PUT(req)
|
|
|
|
self.assertEquals(resp.status_int, 201)
|
2010-10-27 10:53:52 -05:00
|
|
|
self.assertEquals(resp.headers['x-copied-from'], 'c/o%20o2')
|
2010-10-27 10:52:00 -05:00
|
|
|
|
2010-10-11 17:33:11 -05:00
|
|
|
# repeat tests with leading /
|
2010-07-12 17:03:45 -05:00
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
|
|
|
|
headers={'Content-Length': '0',
|
2012-10-07 14:28:41 +11:00
|
|
|
'X-Copy-From': '/c/o'})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 200, 200, 200, 200, 200, 201, 201, 201)
|
|
|
|
# acct cont acct cont objc objc objc obj obj obj
|
2010-07-12 17:03:45 -05:00
|
|
|
self.app.memcache.store = {}
|
|
|
|
resp = controller.PUT(req)
|
|
|
|
self.assertEquals(resp.status_int, 201)
|
|
|
|
self.assertEquals(resp.headers['x-copied-from'], 'c/o')
|
|
|
|
|
2010-09-24 09:51:00 -05:00
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
|
|
|
|
headers={'Content-Length': '0',
|
2012-10-07 14:28:41 +11:00
|
|
|
'X-Copy-From': '/c/o/o2'})
|
2010-09-24 09:51:00 -05:00
|
|
|
req.account = 'a'
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 200, 200, 200, 200, 200, 201, 201, 201)
|
|
|
|
# acct cont acct cont objc objc objc obj obj obj
|
2010-09-24 09:51:00 -05:00
|
|
|
self.app.memcache.store = {}
|
|
|
|
resp = controller.PUT(req)
|
|
|
|
self.assertEquals(resp.status_int, 201)
|
|
|
|
self.assertEquals(resp.headers['x-copied-from'], 'c/o/o2')
|
|
|
|
|
2010-10-11 17:33:11 -05:00
|
|
|
# negative tests
|
|
|
|
|
|
|
|
# invalid x-copy-from path
|
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
|
|
|
|
headers={'Content-Length': '0',
|
2012-10-07 14:28:41 +11:00
|
|
|
'X-Copy-From': '/c'})
|
2010-10-11 17:33:11 -05:00
|
|
|
self.app.update_request(req)
|
|
|
|
self.app.memcache.store = {}
|
|
|
|
resp = controller.PUT(req)
|
|
|
|
self.assertEquals(resp.status_int // 100, 4) # client error
|
|
|
|
|
|
|
|
# server error
|
2010-07-12 17:03:45 -05:00
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
|
|
|
|
headers={'Content-Length': '0',
|
2012-10-07 14:28:41 +11:00
|
|
|
'X-Copy-From': '/c/o'})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 503, 503, 503)
|
|
|
|
# acct cont objc objc objc
|
2010-07-12 17:03:45 -05:00
|
|
|
self.app.memcache.store = {}
|
|
|
|
resp = controller.PUT(req)
|
|
|
|
self.assertEquals(resp.status_int, 503)
|
|
|
|
|
2010-10-11 17:33:11 -05:00
|
|
|
# not found
|
2010-07-12 17:03:45 -05:00
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
|
|
|
|
headers={'Content-Length': '0',
|
2012-10-07 14:28:41 +11:00
|
|
|
'X-Copy-From': '/c/o'})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 404, 404, 404)
|
|
|
|
# acct cont objc objc objc
|
2010-07-12 17:03:45 -05:00
|
|
|
self.app.memcache.store = {}
|
|
|
|
resp = controller.PUT(req)
|
|
|
|
self.assertEquals(resp.status_int, 404)
|
|
|
|
|
2010-10-11 17:33:11 -05:00
|
|
|
# some missing containers
|
2010-07-12 17:03:45 -05:00
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
|
|
|
|
headers={'Content-Length': '0',
|
2012-10-07 14:28:41 +11:00
|
|
|
'X-Copy-From': '/c/o'})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 404, 404, 200, 201, 201, 201)
|
|
|
|
# acct cont objc objc objc obj obj obj
|
2010-07-12 17:03:45 -05:00
|
|
|
self.app.memcache.store = {}
|
|
|
|
resp = controller.PUT(req)
|
|
|
|
self.assertEquals(resp.status_int, 201)
|
|
|
|
|
2010-10-11 17:33:11 -05:00
|
|
|
# test object meta data
|
2010-07-12 17:03:45 -05:00
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
|
|
|
|
headers={'Content-Length': '0',
|
2012-10-07 14:28:41 +11:00
|
|
|
'X-Copy-From': '/c/o',
|
|
|
|
'X-Object-Meta-Ours': 'okay'})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 200, 200, 200, 201, 201, 201)
|
|
|
|
# acct cont objc objc objc obj obj obj
|
2010-07-12 17:03:45 -05:00
|
|
|
self.app.memcache.store = {}
|
|
|
|
resp = controller.PUT(req)
|
|
|
|
self.assertEquals(resp.status_int, 201)
|
2010-10-11 17:33:11 -05:00
|
|
|
self.assertEquals(resp.headers.get('x-object-meta-test'),
|
|
|
|
'testing')
|
2010-07-12 17:03:45 -05:00
|
|
|
self.assertEquals(resp.headers.get('x-object-meta-ours'), 'okay')
|
|
|
|
|
2013-03-21 12:59:13 +00:00
|
|
|
# copy-from object is too large to fit in target object
|
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
|
|
|
|
headers={'Content-Length': '0',
|
|
|
|
'X-Copy-From': '/c/o'})
|
|
|
|
self.app.update_request(req)
|
2013-03-20 19:26:45 -07:00
|
|
|
|
2013-03-21 12:59:13 +00:00
|
|
|
class LargeResponseBody(object):
|
|
|
|
|
|
|
|
def __len__(self):
|
|
|
|
return MAX_FILE_SIZE + 1
|
|
|
|
|
|
|
|
def __getitem__(self, key):
|
|
|
|
return ''
|
|
|
|
|
|
|
|
copy_from_obj_body = LargeResponseBody()
|
|
|
|
set_http_connect(200, 200, 200, 200, 200, 201, 201, 201,
|
|
|
|
body=copy_from_obj_body)
|
|
|
|
self.app.memcache.store = {}
|
|
|
|
resp = controller.PUT(req)
|
|
|
|
self.assertEquals(resp.status_int, 413)
|
|
|
|
|
2010-09-24 09:51:00 -05:00
|
|
|
def test_COPY(self):
|
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o')
|
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
|
|
|
|
headers={'Content-Length': '0'})
|
|
|
|
req.account = 'a'
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 201, 201, 201)
|
|
|
|
# acct cont obj obj obj
|
2010-09-24 09:51:00 -05:00
|
|
|
resp = controller.PUT(req)
|
|
|
|
self.assertEquals(resp.status_int, 201)
|
|
|
|
|
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'COPY'},
|
|
|
|
headers={'Destination': 'c/o'})
|
|
|
|
req.account = 'a'
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 200, 200, 200, 200, 200, 201, 201, 201)
|
|
|
|
# acct cont acct cont objc objc objc obj obj obj
|
2010-09-24 09:51:00 -05:00
|
|
|
self.app.memcache.store = {}
|
|
|
|
resp = controller.COPY(req)
|
|
|
|
self.assertEquals(resp.status_int, 201)
|
|
|
|
self.assertEquals(resp.headers['x-copied-from'], 'c/o')
|
|
|
|
|
2010-10-11 17:33:11 -05:00
|
|
|
req = Request.blank('/a/c/o/o2',
|
|
|
|
environ={'REQUEST_METHOD': 'COPY'},
|
2010-09-24 09:51:00 -05:00
|
|
|
headers={'Destination': 'c/o'})
|
|
|
|
req.account = 'a'
|
|
|
|
controller.object_name = 'o/o2'
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 200, 200, 200, 200, 200, 201, 201, 201)
|
|
|
|
# acct cont acct cont objc objc objc obj obj obj
|
2010-09-24 09:51:00 -05:00
|
|
|
self.app.memcache.store = {}
|
|
|
|
resp = controller.COPY(req)
|
|
|
|
self.assertEquals(resp.status_int, 201)
|
|
|
|
self.assertEquals(resp.headers['x-copied-from'], 'c/o/o2')
|
|
|
|
|
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'COPY'},
|
|
|
|
headers={'Destination': '/c/o'})
|
|
|
|
req.account = 'a'
|
|
|
|
controller.object_name = 'o'
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 200, 200, 200, 200, 200, 201, 201, 201)
|
|
|
|
# acct cont acct cont objc objc objc obj obj obj
|
2010-09-24 09:51:00 -05:00
|
|
|
self.app.memcache.store = {}
|
|
|
|
resp = controller.COPY(req)
|
|
|
|
self.assertEquals(resp.status_int, 201)
|
|
|
|
self.assertEquals(resp.headers['x-copied-from'], 'c/o')
|
|
|
|
|
2010-10-11 17:33:11 -05:00
|
|
|
req = Request.blank('/a/c/o/o2',
|
|
|
|
environ={'REQUEST_METHOD': 'COPY'},
|
2010-09-24 09:51:00 -05:00
|
|
|
headers={'Destination': '/c/o'})
|
|
|
|
req.account = 'a'
|
|
|
|
controller.object_name = 'o/o2'
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 200, 200, 200, 200, 200, 201, 201, 201)
|
|
|
|
# acct cont acct cont objc objc objc obj obj obj
|
2010-09-24 09:51:00 -05:00
|
|
|
self.app.memcache.store = {}
|
|
|
|
resp = controller.COPY(req)
|
|
|
|
self.assertEquals(resp.status_int, 201)
|
|
|
|
self.assertEquals(resp.headers['x-copied-from'], 'c/o/o2')
|
|
|
|
|
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'COPY'},
|
|
|
|
headers={'Destination': 'c_o'})
|
|
|
|
req.account = 'a'
|
|
|
|
controller.object_name = 'o'
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200)
|
|
|
|
# acct cont
|
2010-09-24 09:51:00 -05:00
|
|
|
self.app.memcache.store = {}
|
|
|
|
resp = controller.COPY(req)
|
|
|
|
self.assertEquals(resp.status_int, 412)
|
|
|
|
|
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'COPY'},
|
|
|
|
headers={'Destination': '/c/o'})
|
|
|
|
req.account = 'a'
|
|
|
|
controller.object_name = 'o'
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 503, 503, 503)
|
|
|
|
# acct cont objc objc objc
|
2010-09-24 09:51:00 -05:00
|
|
|
self.app.memcache.store = {}
|
|
|
|
resp = controller.COPY(req)
|
|
|
|
self.assertEquals(resp.status_int, 503)
|
|
|
|
|
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'COPY'},
|
|
|
|
headers={'Destination': '/c/o'})
|
|
|
|
req.account = 'a'
|
|
|
|
controller.object_name = 'o'
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 404, 404, 404)
|
|
|
|
# acct cont objc objc objc
|
2010-09-24 09:51:00 -05:00
|
|
|
self.app.memcache.store = {}
|
|
|
|
resp = controller.COPY(req)
|
|
|
|
self.assertEquals(resp.status_int, 404)
|
|
|
|
|
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'COPY'},
|
|
|
|
headers={'Destination': '/c/o'})
|
|
|
|
req.account = 'a'
|
|
|
|
controller.object_name = 'o'
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 404, 404, 200, 201, 201, 201)
|
|
|
|
# acct cont objc objc objc obj obj obj
|
2010-09-24 09:51:00 -05:00
|
|
|
self.app.memcache.store = {}
|
|
|
|
resp = controller.COPY(req)
|
|
|
|
self.assertEquals(resp.status_int, 201)
|
|
|
|
|
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'COPY'},
|
|
|
|
headers={'Destination': '/c/o',
|
2012-10-07 14:28:41 +11:00
|
|
|
'X-Object-Meta-Ours': 'okay'})
|
2010-09-24 09:51:00 -05:00
|
|
|
req.account = 'a'
|
|
|
|
controller.object_name = 'o'
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 200, 200, 200, 201, 201, 201)
|
|
|
|
# acct cont objc objc objc obj obj obj
|
2010-09-24 09:51:00 -05:00
|
|
|
self.app.memcache.store = {}
|
|
|
|
resp = controller.COPY(req)
|
|
|
|
self.assertEquals(resp.status_int, 201)
|
2010-10-11 17:33:11 -05:00
|
|
|
self.assertEquals(resp.headers.get('x-object-meta-test'),
|
|
|
|
'testing')
|
2010-09-24 09:51:00 -05:00
|
|
|
self.assertEquals(resp.headers.get('x-object-meta-ours'), 'okay')
|
|
|
|
|
2013-03-21 12:59:13 +00:00
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'COPY'},
|
|
|
|
headers={'Destination': '/c/o'})
|
|
|
|
self.app.update_request(req)
|
|
|
|
|
|
|
|
class LargeResponseBody(object):
|
|
|
|
|
|
|
|
def __len__(self):
|
|
|
|
return MAX_FILE_SIZE + 1
|
|
|
|
|
|
|
|
def __getitem__(self, key):
|
|
|
|
return ''
|
|
|
|
|
|
|
|
copy_from_obj_body = LargeResponseBody()
|
|
|
|
set_http_connect(200, 200, 200, 200, 200, 201, 201, 201,
|
|
|
|
body=copy_from_obj_body)
|
|
|
|
self.app.memcache.store = {}
|
|
|
|
resp = controller.COPY(req)
|
|
|
|
self.assertEquals(resp.status_int, 413)
|
|
|
|
|
2011-06-08 02:26:16 +00:00
|
|
|
def test_COPY_newest(self):
|
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o')
|
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'COPY'},
|
|
|
|
headers={'Destination': '/c/o'})
|
|
|
|
req.account = 'a'
|
|
|
|
controller.object_name = 'o'
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 200, 200, 200, 201, 201, 201,
|
2012-10-07 14:28:41 +11:00
|
|
|
#act cont objc objc objc obj obj obj
|
|
|
|
timestamps=('1', '1', '1', '3', '2', '4', '4',
|
|
|
|
'4'))
|
2011-06-08 02:26:16 +00:00
|
|
|
self.app.memcache.store = {}
|
|
|
|
resp = controller.COPY(req)
|
|
|
|
self.assertEquals(resp.status_int, 201)
|
|
|
|
self.assertEquals(resp.headers['x-copied-from-last-modified'],
|
|
|
|
'3')
|
|
|
|
|
2010-10-11 17:33:11 -05:00
|
|
|
def test_chunked_put(self):
|
2010-11-16 15:35:39 -08:00
|
|
|
|
2010-10-11 17:33:11 -05:00
|
|
|
class ChunkedFile():
|
2010-11-16 15:35:39 -08:00
|
|
|
|
2010-10-11 17:33:11 -05:00
|
|
|
def __init__(self, bytes):
|
|
|
|
self.bytes = bytes
|
|
|
|
self.read_bytes = 0
|
|
|
|
|
|
|
|
@property
|
|
|
|
def bytes_left(self):
|
|
|
|
return self.bytes - self.read_bytes
|
|
|
|
|
|
|
|
def read(self, amt=None):
|
|
|
|
if self.read_bytes >= self.bytes:
|
|
|
|
raise StopIteration()
|
|
|
|
if not amt:
|
|
|
|
amt = self.bytes_left
|
|
|
|
data = 'a' * min(amt, self.bytes_left)
|
|
|
|
self.read_bytes += len(data)
|
|
|
|
return data
|
|
|
|
|
|
|
|
with save_globals():
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(201, 201, 201, 201)
|
2010-10-11 17:33:11 -05:00
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
2012-10-07 14:28:41 +11:00
|
|
|
'container', 'object')
|
2011-06-30 10:20:36 -05:00
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'COPY'},
|
2012-10-07 14:28:41 +11:00
|
|
|
headers={'Transfer-Encoding': 'chunked',
|
|
|
|
'Content-Type': 'foo/bar'})
|
2010-10-11 17:33:11 -05:00
|
|
|
|
|
|
|
req.body_file = ChunkedFile(10)
|
|
|
|
self.app.memcache.store = {}
|
|
|
|
self.app.update_request(req)
|
|
|
|
res = controller.PUT(req)
|
|
|
|
self.assertEquals(res.status_int // 100, 2) # success
|
2010-10-12 12:49:20 -05:00
|
|
|
|
2010-10-11 17:33:11 -05:00
|
|
|
# test 413 entity to large
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(201, 201, 201, 201)
|
2011-06-30 10:20:36 -05:00
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'COPY'},
|
2012-10-07 14:28:41 +11:00
|
|
|
headers={'Transfer-Encoding': 'chunked',
|
|
|
|
'Content-Type': 'foo/bar'})
|
2010-10-12 12:49:20 -05:00
|
|
|
req.body_file = ChunkedFile(11)
|
|
|
|
self.app.memcache.store = {}
|
|
|
|
self.app.update_request(req)
|
2010-10-11 17:33:11 -05:00
|
|
|
try:
|
2012-08-23 12:38:09 -07:00
|
|
|
swift.proxy.controllers.obj.MAX_FILE_SIZE = 10
|
2010-10-11 17:33:11 -05:00
|
|
|
res = controller.PUT(req)
|
|
|
|
self.assertEquals(res.status_int, 413)
|
|
|
|
finally:
|
2012-08-23 12:38:09 -07:00
|
|
|
swift.proxy.controllers.obj.MAX_FILE_SIZE = MAX_FILE_SIZE
|
2010-10-11 17:33:11 -05:00
|
|
|
|
2011-01-19 03:56:13 +00:00
|
|
|
def test_chunked_put_bad_version(self):
|
|
|
|
# Check bad version
|
2012-10-07 14:28:41 +11:00
|
|
|
(prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis,
|
|
|
|
obj2lis) = _test_sockets
|
2011-01-19 03:56:13 +00:00
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('GET /v0 HTTP/1.1\r\nHost: localhost\r\n'
|
2012-10-07 14:28:41 +11:00
|
|
|
'Connection: close\r\nContent-Length: 0\r\n\r\n')
|
2011-01-19 03:56:13 +00:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 412'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
|
|
|
|
def test_chunked_put_bad_path(self):
|
|
|
|
# Check bad path
|
2012-10-07 14:28:41 +11:00
|
|
|
(prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis,
|
|
|
|
obj2lis) = _test_sockets
|
2011-01-19 03:56:13 +00:00
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('GET invalid HTTP/1.1\r\nHost: localhost\r\n'
|
2012-10-07 14:28:41 +11:00
|
|
|
'Connection: close\r\nContent-Length: 0\r\n\r\n')
|
2011-01-19 03:56:13 +00:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 404'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
|
|
|
|
def test_chunked_put_bad_utf8(self):
|
|
|
|
# Check invalid utf-8
|
2012-10-07 14:28:41 +11:00
|
|
|
(prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis,
|
|
|
|
obj2lis) = _test_sockets
|
2011-01-19 03:56:13 +00:00
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('GET /v1/a%80 HTTP/1.1\r\nHost: localhost\r\n'
|
2012-10-07 14:28:41 +11:00
|
|
|
'Connection: close\r\nX-Auth-Token: t\r\n'
|
|
|
|
'Content-Length: 0\r\n\r\n')
|
2011-01-19 03:56:13 +00:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 412'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
|
|
|
|
def test_chunked_put_bad_path_no_controller(self):
|
|
|
|
# Check bad path, no controller
|
2012-10-07 14:28:41 +11:00
|
|
|
(prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis,
|
|
|
|
obj2lis) = _test_sockets
|
2011-01-19 03:56:13 +00:00
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('GET /v1 HTTP/1.1\r\nHost: localhost\r\n'
|
2012-10-07 14:28:41 +11:00
|
|
|
'Connection: close\r\nX-Auth-Token: t\r\n'
|
|
|
|
'Content-Length: 0\r\n\r\n')
|
2011-01-19 03:56:13 +00:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 412'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
|
|
|
|
def test_chunked_put_bad_method(self):
|
|
|
|
# Check bad method
|
2012-10-07 14:28:41 +11:00
|
|
|
(prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis,
|
|
|
|
obj2lis) = _test_sockets
|
2011-01-19 03:56:13 +00:00
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('LICK /v1/a HTTP/1.1\r\nHost: localhost\r\n'
|
2012-10-07 14:28:41 +11:00
|
|
|
'Connection: close\r\nX-Auth-Token: t\r\n'
|
|
|
|
'Content-Length: 0\r\n\r\n')
|
2011-01-19 03:56:13 +00:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 405'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
|
|
|
|
def test_chunked_put_unhandled_exception(self):
|
|
|
|
# Check unhandled exception
|
2012-10-07 14:28:41 +11:00
|
|
|
(prosrv, acc1srv, acc2srv, con1srv, con2srv, obj1srv,
|
|
|
|
obj2srv) = _test_servers
|
|
|
|
(prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis,
|
|
|
|
obj2lis) = _test_sockets
|
2011-01-19 03:56:13 +00:00
|
|
|
orig_update_request = prosrv.update_request
|
|
|
|
|
2011-04-11 16:26:50 -07:00
|
|
|
def broken_update_request(*args, **kwargs):
|
|
|
|
raise Exception('fake: this should be printed')
|
2011-01-19 03:56:13 +00:00
|
|
|
|
|
|
|
prosrv.update_request = broken_update_request
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('HEAD /v1/a HTTP/1.1\r\nHost: localhost\r\n'
|
2012-10-07 14:28:41 +11:00
|
|
|
'Connection: close\r\nX-Auth-Token: t\r\n'
|
|
|
|
'Content-Length: 0\r\n\r\n')
|
2011-01-19 03:56:13 +00:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 500'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
prosrv.update_request = orig_update_request
|
|
|
|
|
|
|
|
def test_chunked_put_head_account(self):
|
|
|
|
# Head account, just a double check and really is here to test
|
|
|
|
# the part Application.log_request that 'enforces' a
|
|
|
|
# content_length on the response.
|
2012-10-07 14:28:41 +11:00
|
|
|
(prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis,
|
|
|
|
obj2lis) = _test_sockets
|
2011-01-19 03:56:13 +00:00
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('HEAD /v1/a HTTP/1.1\r\nHost: localhost\r\n'
|
2012-10-07 14:28:41 +11:00
|
|
|
'Connection: close\r\nX-Auth-Token: t\r\n'
|
|
|
|
'Content-Length: 0\r\n\r\n')
|
2011-01-19 03:56:13 +00:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 204'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
self.assert_('\r\nContent-Length: 0\r\n' in headers)
|
|
|
|
|
|
|
|
def test_chunked_put_utf8_all_the_way_down(self):
|
|
|
|
# Test UTF-8 Unicode all the way through the system
|
|
|
|
ustr = '\xe1\xbc\xb8\xce\xbf\xe1\xbd\xba \xe1\xbc\xb0\xce' \
|
|
|
|
'\xbf\xe1\xbd\xbb\xce\x87 \xcf\x84\xe1\xbd\xb0 \xcf' \
|
|
|
|
'\x80\xe1\xbd\xb1\xce\xbd\xcf\x84\xca\xbc \xe1\xbc' \
|
|
|
|
'\x82\xce\xbd \xe1\xbc\x90\xce\xbe\xe1\xbd\xb5\xce' \
|
|
|
|
'\xba\xce\xbf\xce\xb9 \xcf\x83\xce\xb1\xcf\x86\xe1' \
|
|
|
|
'\xbf\x86.Test'
|
|
|
|
ustr_short = '\xe1\xbc\xb8\xce\xbf\xe1\xbd\xbatest'
|
|
|
|
# Create ustr container
|
2012-10-07 14:28:41 +11:00
|
|
|
(prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis,
|
|
|
|
obj2lis) = _test_sockets
|
2011-01-19 03:56:13 +00:00
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('PUT /v1/a/%s HTTP/1.1\r\nHost: localhost\r\n'
|
|
|
|
'Connection: close\r\nX-Storage-Token: t\r\n'
|
|
|
|
'Content-Length: 0\r\n\r\n' % quote(ustr))
|
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 201'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
# List account with ustr container (test plain)
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('GET /v1/a HTTP/1.1\r\nHost: localhost\r\n'
|
|
|
|
'Connection: close\r\nX-Storage-Token: t\r\n'
|
|
|
|
'Content-Length: 0\r\n\r\n')
|
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 200'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
containers = fd.read().split('\n')
|
|
|
|
self.assert_(ustr in containers)
|
|
|
|
# List account with ustr container (test json)
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('GET /v1/a?format=json HTTP/1.1\r\n'
|
|
|
|
'Host: localhost\r\nConnection: close\r\n'
|
|
|
|
'X-Storage-Token: t\r\nContent-Length: 0\r\n\r\n')
|
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 200'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
listing = simplejson.loads(fd.read())
|
|
|
|
self.assert_(ustr.decode('utf8') in [l['name'] for l in listing])
|
|
|
|
# List account with ustr container (test xml)
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('GET /v1/a?format=xml HTTP/1.1\r\n'
|
|
|
|
'Host: localhost\r\nConnection: close\r\n'
|
|
|
|
'X-Storage-Token: t\r\nContent-Length: 0\r\n\r\n')
|
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 200'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
self.assert_('<name>%s</name>' % ustr in fd.read())
|
|
|
|
# Create ustr object with ustr metadata in ustr container
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('PUT /v1/a/%s/%s HTTP/1.1\r\nHost: localhost\r\n'
|
|
|
|
'Connection: close\r\nX-Storage-Token: t\r\n'
|
|
|
|
'X-Object-Meta-%s: %s\r\nContent-Length: 0\r\n\r\n' %
|
|
|
|
(quote(ustr), quote(ustr), quote(ustr_short),
|
|
|
|
quote(ustr)))
|
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 201'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
# List ustr container with ustr object (test plain)
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('GET /v1/a/%s HTTP/1.1\r\nHost: localhost\r\n'
|
|
|
|
'Connection: close\r\nX-Storage-Token: t\r\n'
|
|
|
|
'Content-Length: 0\r\n\r\n' % quote(ustr))
|
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 200'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
objects = fd.read().split('\n')
|
|
|
|
self.assert_(ustr in objects)
|
|
|
|
# List ustr container with ustr object (test json)
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('GET /v1/a/%s?format=json HTTP/1.1\r\n'
|
|
|
|
'Host: localhost\r\nConnection: close\r\n'
|
|
|
|
'X-Storage-Token: t\r\nContent-Length: 0\r\n\r\n' %
|
|
|
|
quote(ustr))
|
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 200'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
listing = simplejson.loads(fd.read())
|
|
|
|
self.assertEquals(listing[0]['name'], ustr.decode('utf8'))
|
|
|
|
# List ustr container with ustr object (test xml)
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('GET /v1/a/%s?format=xml HTTP/1.1\r\n'
|
|
|
|
'Host: localhost\r\nConnection: close\r\n'
|
|
|
|
'X-Storage-Token: t\r\nContent-Length: 0\r\n\r\n' %
|
|
|
|
quote(ustr))
|
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 200'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
self.assert_('<name>%s</name>' % ustr in fd.read())
|
|
|
|
# Retrieve ustr object with ustr metadata
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('GET /v1/a/%s/%s HTTP/1.1\r\nHost: localhost\r\n'
|
|
|
|
'Connection: close\r\nX-Storage-Token: t\r\n'
|
|
|
|
'Content-Length: 0\r\n\r\n' %
|
|
|
|
(quote(ustr), quote(ustr)))
|
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 200'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
self.assert_('\r\nX-Object-Meta-%s: %s\r\n' %
|
2012-10-07 14:28:41 +11:00
|
|
|
(quote(ustr_short).lower(), quote(ustr)) in headers)
|
2011-01-19 03:56:13 +00:00
|
|
|
|
|
|
|
def test_chunked_put_chunked_put(self):
|
|
|
|
# Do chunked object put
|
2012-10-07 14:28:41 +11:00
|
|
|
(prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis,
|
|
|
|
obj2lis) = _test_sockets
|
2011-01-19 03:56:13 +00:00
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
# Also happens to assert that x-storage-token is taken as a
|
|
|
|
# replacement for x-auth-token.
|
|
|
|
fd.write('PUT /v1/a/c/o/chunky HTTP/1.1\r\nHost: localhost\r\n'
|
2012-10-07 14:28:41 +11:00
|
|
|
'Connection: close\r\nX-Storage-Token: t\r\n'
|
|
|
|
'Transfer-Encoding: chunked\r\n\r\n'
|
|
|
|
'2\r\noh\r\n4\r\n hai\r\nf\r\n123456789abcdef\r\n'
|
|
|
|
'0\r\n\r\n')
|
2011-01-19 03:56:13 +00:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 201'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
# Ensure we get what we put
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('GET /v1/a/c/o/chunky HTTP/1.1\r\nHost: localhost\r\n'
|
|
|
|
'Connection: close\r\nX-Auth-Token: t\r\n\r\n')
|
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 200'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
body = fd.read()
|
|
|
|
self.assertEquals(body, 'oh hai123456789abcdef')
|
|
|
|
|
2011-12-29 11:29:19 -06:00
|
|
|
def test_version_manifest(self):
|
|
|
|
versions_to_create = 3
|
|
|
|
# Create a container for our versioned object testing
|
2012-10-07 14:28:41 +11:00
|
|
|
(prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis,
|
|
|
|
obj2lis) = _test_sockets
|
2011-12-29 11:29:19 -06:00
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('PUT /v1/a/versions HTTP/1.1\r\nHost: localhost\r\n'
|
|
|
|
'Connection: close\r\nX-Storage-Token: t\r\n'
|
|
|
|
'Content-Length: 0\r\nX-Versions-Location: vers\r\n\r\n')
|
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 201'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
# check that the header was set
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('GET /v1/a/versions HTTP/1.1\r\nHost: localhost\r\n'
|
|
|
|
'Connection: close\r\nX-Storage-Token: t\r\n\r\n\r\n')
|
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 2' # 2xx series response
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
self.assert_('X-Versions-Location: vers' in headers)
|
|
|
|
# make the container for the object versions
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('PUT /v1/a/vers HTTP/1.1\r\nHost: localhost\r\n'
|
|
|
|
'Connection: close\r\nX-Storage-Token: t\r\n'
|
|
|
|
'Content-Length: 0\r\n\r\n')
|
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 201'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
# Create the versioned file
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('PUT /v1/a/versions/name HTTP/1.1\r\nHost: '
|
2012-10-07 14:28:41 +11:00
|
|
|
'localhost\r\nConnection: close\r\nX-Storage-Token: '
|
|
|
|
't\r\nContent-Length: 5\r\nContent-Type: text/jibberish0\r\n'
|
|
|
|
'X-Object-Meta-Foo: barbaz\r\n\r\n00000\r\n')
|
2011-12-29 11:29:19 -06:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 201'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
# Create the object versions
|
|
|
|
for segment in xrange(1, versions_to_create):
|
|
|
|
sleep(.01) # guarantee that the timestamp changes
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('PUT /v1/a/versions/name HTTP/1.1\r\nHost: '
|
2012-10-07 14:28:41 +11:00
|
|
|
'localhost\r\nConnection: close\r\nX-Storage-Token: '
|
|
|
|
't\r\nContent-Length: 5\r\nContent-Type: text/jibberish%s'
|
|
|
|
'\r\n\r\n%05d\r\n' % (segment, segment))
|
2011-12-29 11:29:19 -06:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 201'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
# Ensure retrieving the manifest file gets the latest version
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('GET /v1/a/versions/name HTTP/1.1\r\nHost: '
|
2012-10-07 14:28:41 +11:00
|
|
|
'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n'
|
|
|
|
'\r\n')
|
2011-12-29 11:29:19 -06:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 200'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
self.assert_('Content-Type: text/jibberish%s' % segment in headers)
|
|
|
|
self.assert_('X-Object-Meta-Foo: barbaz' not in headers)
|
|
|
|
body = fd.read()
|
|
|
|
self.assertEquals(body, '%05d' % segment)
|
|
|
|
# Ensure we have the right number of versions saved
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('GET /v1/a/vers?prefix=004name/ HTTP/1.1\r\nHost: '
|
2012-10-07 14:28:41 +11:00
|
|
|
'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r\n')
|
2011-12-29 11:29:19 -06:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 200'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
body = fd.read()
|
|
|
|
versions = [x for x in body.split('\n') if x]
|
|
|
|
self.assertEquals(len(versions), versions_to_create - 1)
|
|
|
|
# copy a version and make sure the version info is stripped
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('COPY /v1/a/versions/name HTTP/1.1\r\nHost: '
|
2012-10-07 14:28:41 +11:00
|
|
|
'localhost\r\nConnection: close\r\nX-Auth-Token: '
|
|
|
|
't\r\nDestination: versions/copied_name\r\n'
|
|
|
|
'Content-Length: 0\r\n\r\n')
|
2011-12-29 11:29:19 -06:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 2' # 2xx series response to the COPY
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('GET /v1/a/versions/copied_name HTTP/1.1\r\nHost: '
|
2012-10-07 14:28:41 +11:00
|
|
|
'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r\n')
|
2011-12-29 11:29:19 -06:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 200'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
body = fd.read()
|
|
|
|
self.assertEquals(body, '%05d' % segment)
|
|
|
|
# post and make sure it's updated
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('POST /v1/a/versions/name HTTP/1.1\r\nHost: '
|
2012-10-07 14:28:41 +11:00
|
|
|
'localhost\r\nConnection: close\r\nX-Auth-Token: '
|
|
|
|
't\r\nContent-Type: foo/bar\r\nContent-Length: 0\r\n'
|
|
|
|
'X-Object-Meta-Bar: foo\r\n\r\n')
|
2011-12-29 11:29:19 -06:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 2' # 2xx series response to the POST
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('GET /v1/a/versions/name HTTP/1.1\r\nHost: '
|
2012-10-07 14:28:41 +11:00
|
|
|
'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r\n')
|
2011-12-29 11:29:19 -06:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 200'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
self.assert_('Content-Type: foo/bar' in headers)
|
|
|
|
self.assert_('X-Object-Meta-Bar: foo' in headers)
|
|
|
|
body = fd.read()
|
|
|
|
self.assertEquals(body, '%05d' % segment)
|
|
|
|
# Delete the object versions
|
|
|
|
for segment in xrange(versions_to_create - 1, 0, -1):
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
2012-10-07 14:28:41 +11:00
|
|
|
fd.write('DELETE /v1/a/versions/name HTTP/1.1\r\nHost: localhost\r'
|
|
|
|
'\nConnection: close\r\nX-Storage-Token: t\r\n\r\n')
|
2011-12-29 11:29:19 -06:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 2' # 2xx series response
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
# Ensure retrieving the manifest file gets the latest version
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
2012-10-07 14:28:41 +11:00
|
|
|
fd.write('GET /v1/a/versions/name HTTP/1.1\r\nHost: localhost\r\n'
|
|
|
|
'Connection: close\r\nX-Auth-Token: t\r\n\r\n')
|
2011-12-29 11:29:19 -06:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 200'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
self.assert_('Content-Type: text/jibberish%s' % (segment - 1)
|
2012-10-07 14:28:41 +11:00
|
|
|
in headers)
|
2011-12-29 11:29:19 -06:00
|
|
|
body = fd.read()
|
|
|
|
self.assertEquals(body, '%05d' % (segment - 1))
|
|
|
|
# Ensure we have the right number of versions saved
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('GET /v1/a/vers?prefix=004name/ HTTP/1.1\r\nHost: '
|
2012-10-07 14:28:41 +11:00
|
|
|
'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r'
|
|
|
|
'\n')
|
2011-12-29 11:29:19 -06:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 2' # 2xx series response
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
body = fd.read()
|
|
|
|
versions = [x for x in body.split('\n') if x]
|
|
|
|
self.assertEquals(len(versions), segment - 1)
|
|
|
|
# there is now one segment left (in the manifest)
|
|
|
|
# Ensure we have no saved versions
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('GET /v1/a/vers?prefix=004name/ HTTP/1.1\r\nHost: '
|
2012-10-07 14:28:41 +11:00
|
|
|
'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r\n')
|
2011-12-29 11:29:19 -06:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 204 No Content'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
# delete the last verision
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
2012-10-07 14:28:41 +11:00
|
|
|
fd.write('DELETE /v1/a/versions/name HTTP/1.1\r\nHost: localhost\r\n'
|
|
|
|
'Connection: close\r\nX-Storage-Token: t\r\n\r\n')
|
2011-12-29 11:29:19 -06:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 2' # 2xx series response
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
# Ensure it's all gone
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('GET /v1/a/versions/name HTTP/1.1\r\nHost: '
|
2012-10-07 14:28:41 +11:00
|
|
|
'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r\n')
|
2011-12-29 11:29:19 -06:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 404'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
|
|
|
|
# make sure manifest files don't get versioned
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('PUT /v1/a/versions/name HTTP/1.1\r\nHost: '
|
2012-10-07 14:28:41 +11:00
|
|
|
'localhost\r\nConnection: close\r\nX-Storage-Token: '
|
|
|
|
't\r\nContent-Length: 0\r\nContent-Type: text/jibberish0\r\n'
|
|
|
|
'Foo: barbaz\r\nX-Object-Manifest: vers/foo_\r\n\r\n')
|
2011-12-29 11:29:19 -06:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 201'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
# Ensure we have no saved versions
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('GET /v1/a/vers?prefix=004name/ HTTP/1.1\r\nHost: '
|
2012-10-07 14:28:41 +11:00
|
|
|
'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r\n')
|
2011-12-29 11:29:19 -06:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 204 No Content'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
|
|
|
|
# DELETE v1/a/c/obj shouldn't delete v1/a/c/obj/sub versions
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('PUT /v1/a/versions/name HTTP/1.1\r\nHost: '
|
2012-10-07 14:28:41 +11:00
|
|
|
'localhost\r\nConnection: close\r\nX-Storage-Token: '
|
|
|
|
't\r\nContent-Length: 5\r\nContent-Type: text/jibberish0\r\n'
|
|
|
|
'Foo: barbaz\r\n\r\n00000\r\n')
|
2011-12-29 11:29:19 -06:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 201'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('PUT /v1/a/versions/name HTTP/1.1\r\nHost: '
|
2012-10-07 14:28:41 +11:00
|
|
|
'localhost\r\nConnection: close\r\nX-Storage-Token: '
|
|
|
|
't\r\nContent-Length: 5\r\nContent-Type: text/jibberish0\r\n'
|
|
|
|
'Foo: barbaz\r\n\r\n00001\r\n')
|
2011-12-29 11:29:19 -06:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 201'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('PUT /v1/a/versions/name/sub HTTP/1.1\r\nHost: '
|
2012-10-07 14:28:41 +11:00
|
|
|
'localhost\r\nConnection: close\r\nX-Storage-Token: '
|
|
|
|
't\r\nContent-Length: 4\r\nContent-Type: text/jibberish0\r\n'
|
|
|
|
'Foo: barbaz\r\n\r\nsub1\r\n')
|
2011-12-29 11:29:19 -06:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 201'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('PUT /v1/a/versions/name/sub HTTP/1.1\r\nHost: '
|
2012-10-07 14:28:41 +11:00
|
|
|
'localhost\r\nConnection: close\r\nX-Storage-Token: '
|
|
|
|
't\r\nContent-Length: 4\r\nContent-Type: text/jibberish0\r\n'
|
|
|
|
'Foo: barbaz\r\n\r\nsub2\r\n')
|
2011-12-29 11:29:19 -06:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 201'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
2012-10-07 14:28:41 +11:00
|
|
|
fd.write('DELETE /v1/a/versions/name HTTP/1.1\r\nHost: localhost\r\n'
|
|
|
|
'Connection: close\r\nX-Storage-Token: t\r\n\r\n')
|
2011-12-29 11:29:19 -06:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 2' # 2xx series response
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('GET /v1/a/vers?prefix=008name/sub/ HTTP/1.1\r\nHost: '
|
2012-10-07 14:28:41 +11:00
|
|
|
'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r\n')
|
2011-12-29 11:29:19 -06:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 2' # 2xx series response
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
body = fd.read()
|
|
|
|
versions = [x for x in body.split('\n') if x]
|
|
|
|
self.assertEquals(len(versions), 1)
|
|
|
|
|
2012-04-09 14:18:49 -05:00
|
|
|
# Check for when the versions target container doesn't exist
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('PUT /v1/a/whoops HTTP/1.1\r\nHost: localhost\r\n'
|
|
|
|
'Connection: close\r\nX-Storage-Token: t\r\n'
|
|
|
|
'Content-Length: 0\r\nX-Versions-Location: none\r\n\r\n')
|
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 201'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
# Create the versioned file
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('PUT /v1/a/whoops/foo HTTP/1.1\r\nHost: '
|
2012-10-07 14:28:41 +11:00
|
|
|
'localhost\r\nConnection: close\r\nX-Storage-Token: '
|
|
|
|
't\r\nContent-Length: 5\r\n\r\n00000\r\n')
|
2012-04-09 14:18:49 -05:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 201'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
# Create another version
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('PUT /v1/a/whoops/foo HTTP/1.1\r\nHost: '
|
2012-10-07 14:28:41 +11:00
|
|
|
'localhost\r\nConnection: close\r\nX-Storage-Token: '
|
|
|
|
't\r\nContent-Length: 5\r\n\r\n00001\r\n')
|
2012-04-09 14:18:49 -05:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 412'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
# Delete the object
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
2012-10-07 14:28:41 +11:00
|
|
|
fd.write('DELETE /v1/a/whoops/foo HTTP/1.1\r\nHost: localhost\r\n'
|
|
|
|
'Connection: close\r\nX-Storage-Token: t\r\n\r\n')
|
2012-04-09 14:18:49 -05:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 2' # 2xx response
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
|
2012-06-23 05:19:00 +09:00
|
|
|
def test_chunked_put_lobjects_with_nonzero_size_manifest_file(self):
|
|
|
|
# Create a container for our segmented/manifest object testing
|
|
|
|
(prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis, obj2lis) = \
|
|
|
|
_test_sockets
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('PUT /v1/a/segmented_nonzero HTTP/1.1\r\nHost: localhost\r\n'
|
|
|
|
'Connection: close\r\nX-Storage-Token: t\r\n'
|
|
|
|
'Content-Length: 0\r\n\r\n')
|
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 201'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
# Create the object segments
|
|
|
|
segment_etags = []
|
|
|
|
for segment in xrange(5):
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('PUT /v1/a/segmented_nonzero/name/%s HTTP/1.1\r\nHost: '
|
|
|
|
'localhost\r\nConnection: close\r\nX-Storage-Token: '
|
|
|
|
't\r\nContent-Length: 5\r\n\r\n1234 ' % str(segment))
|
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 201'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
segment_etags.append(md5('1234 ').hexdigest())
|
|
|
|
|
|
|
|
# Create the nonzero size manifest file
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('PUT /v1/a/segmented_nonzero/name HTTP/1.1\r\nHost: '
|
|
|
|
'localhost\r\nConnection: close\r\nX-Storage-Token: '
|
|
|
|
't\r\nContent-Length: 5\r\n\r\nabcd ')
|
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 201'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
|
|
|
|
# Create the object manifest file
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('POST /v1/a/segmented_nonzero/name HTTP/1.1\r\nHost: '
|
|
|
|
'localhost\r\nConnection: close\r\nX-Storage-Token: t\r\n'
|
|
|
|
'X-Object-Manifest: segmented_nonzero/name/\r\n'
|
|
|
|
'Foo: barbaz\r\nContent-Type: text/jibberish\r\n'
|
|
|
|
'\r\n\r\n')
|
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 202'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
|
|
|
|
# Ensure retrieving the manifest file gets the whole object
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('GET /v1/a/segmented_nonzero/name HTTP/1.1\r\nHost: '
|
|
|
|
'localhost\r\nConnection: close\r\nX-Auth-Token: '
|
|
|
|
't\r\n\r\n')
|
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 200'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
self.assert_('X-Object-Manifest: segmented_nonzero/name/' in headers)
|
|
|
|
self.assert_('Content-Type: text/jibberish' in headers)
|
|
|
|
self.assert_('Foo: barbaz' in headers)
|
|
|
|
expected_etag = md5(''.join(segment_etags)).hexdigest()
|
|
|
|
self.assert_('Etag: "%s"' % expected_etag in headers)
|
|
|
|
body = fd.read()
|
|
|
|
self.assertEquals(body, '1234 1234 1234 1234 1234 ')
|
|
|
|
|
|
|
|
# Get lobjects with Range smaller than manifest file
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('GET /v1/a/segmented_nonzero/name HTTP/1.1\r\nHost: '
|
|
|
|
'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n'
|
|
|
|
'Range: bytes=0-4\r\n\r\n')
|
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 206'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
self.assert_('X-Object-Manifest: segmented_nonzero/name/' in headers)
|
|
|
|
self.assert_('Content-Type: text/jibberish' in headers)
|
|
|
|
self.assert_('Foo: barbaz' in headers)
|
|
|
|
expected_etag = md5(''.join(segment_etags)).hexdigest()
|
|
|
|
body = fd.read()
|
|
|
|
self.assertEquals(body, '1234 ')
|
|
|
|
|
|
|
|
# Get lobjects with Range bigger than manifest file
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('GET /v1/a/segmented_nonzero/name HTTP/1.1\r\nHost: '
|
|
|
|
'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n'
|
|
|
|
'Range: bytes=11-15\r\n\r\n')
|
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 206'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
self.assert_('X-Object-Manifest: segmented_nonzero/name/' in headers)
|
|
|
|
self.assert_('Content-Type: text/jibberish' in headers)
|
|
|
|
self.assert_('Foo: barbaz' in headers)
|
|
|
|
expected_etag = md5(''.join(segment_etags)).hexdigest()
|
|
|
|
body = fd.read()
|
|
|
|
self.assertEquals(body, '234 1')
|
|
|
|
|
2011-01-19 03:56:13 +00:00
|
|
|
def test_chunked_put_lobjects(self):
|
|
|
|
# Create a container for our segmented/manifest object testing
|
2012-06-28 23:25:27 +00:00
|
|
|
(prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis,
|
|
|
|
obj2lis) = _test_sockets
|
2011-01-19 03:56:13 +00:00
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
2012-06-28 23:25:27 +00:00
|
|
|
fd.write('PUT /v1/a/segmented%20object HTTP/1.1\r\n'
|
|
|
|
'Host: localhost\r\n'
|
|
|
|
'Connection: close\r\n'
|
|
|
|
'X-Storage-Token: t\r\n'
|
|
|
|
'Content-Length: 0\r\n'
|
|
|
|
'\r\n')
|
2011-01-19 03:56:13 +00:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 201'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
# Create the object segments
|
2011-11-11 13:55:58 -06:00
|
|
|
segment_etags = []
|
2011-01-19 03:56:13 +00:00
|
|
|
for segment in xrange(5):
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
2012-06-28 23:25:27 +00:00
|
|
|
fd.write('PUT /v1/a/segmented%%20object/object%%20name/%s '
|
|
|
|
'HTTP/1.1\r\n'
|
|
|
|
'Host: localhost\r\n'
|
|
|
|
'Connection: close\r\n'
|
|
|
|
'X-Storage-Token: t\r\n'
|
|
|
|
'Content-Length: 5\r\n'
|
|
|
|
'\r\n'
|
|
|
|
'1234 ' % str(segment))
|
2011-01-19 03:56:13 +00:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 201'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
2011-11-11 13:55:58 -06:00
|
|
|
segment_etags.append(md5('1234 ').hexdigest())
|
2011-01-19 03:56:13 +00:00
|
|
|
# Create the object manifest file
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
2012-06-28 23:25:27 +00:00
|
|
|
fd.write('PUT /v1/a/segmented%20object/object%20name HTTP/1.1\r\n'
|
|
|
|
'Host: localhost\r\n'
|
|
|
|
'Connection: close\r\n'
|
|
|
|
'X-Storage-Token: t\r\n'
|
|
|
|
'Content-Length: 0\r\n'
|
|
|
|
'X-Object-Manifest: segmented%20object/object%20name/\r\n'
|
|
|
|
'Content-Type: text/jibberish\r\n'
|
|
|
|
'Foo: barbaz\r\n'
|
|
|
|
'\r\n')
|
2011-01-19 03:56:13 +00:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 201'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
2012-06-28 23:25:27 +00:00
|
|
|
# Check retrieving the listing the manifest would retrieve
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('GET /v1/a/segmented%20object?prefix=object%20name/ '
|
|
|
|
'HTTP/1.1\r\n'
|
|
|
|
'Host: localhost\r\n'
|
|
|
|
'Connection: close\r\n'
|
|
|
|
'X-Auth-Token: t\r\n'
|
|
|
|
'\r\n')
|
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 200'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
body = fd.read()
|
|
|
|
self.assertEquals(
|
|
|
|
body,
|
|
|
|
'object name/0\n'
|
|
|
|
'object name/1\n'
|
|
|
|
'object name/2\n'
|
|
|
|
'object name/3\n'
|
|
|
|
'object name/4\n')
|
2011-01-19 03:56:13 +00:00
|
|
|
# Ensure retrieving the manifest file gets the whole object
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
2012-06-28 23:25:27 +00:00
|
|
|
fd.write('GET /v1/a/segmented%20object/object%20name HTTP/1.1\r\n'
|
|
|
|
'Host: localhost\r\n'
|
|
|
|
'Connection: close\r\n'
|
|
|
|
'X-Auth-Token: t\r\n'
|
|
|
|
'\r\n')
|
2011-01-19 03:56:13 +00:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 200'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
2012-06-28 23:25:27 +00:00
|
|
|
self.assert_('X-Object-Manifest: segmented%20object/object%20name/' in
|
|
|
|
headers)
|
2011-01-19 03:56:13 +00:00
|
|
|
self.assert_('Content-Type: text/jibberish' in headers)
|
2011-08-22 10:22:50 -05:00
|
|
|
self.assert_('Foo: barbaz' in headers)
|
2011-11-11 13:55:58 -06:00
|
|
|
expected_etag = md5(''.join(segment_etags)).hexdigest()
|
|
|
|
self.assert_('Etag: "%s"' % expected_etag in headers)
|
2011-01-19 03:56:13 +00:00
|
|
|
body = fd.read()
|
|
|
|
self.assertEquals(body, '1234 1234 1234 1234 1234 ')
|
|
|
|
# Do it again but exceeding the container listing limit
|
2012-08-23 12:38:09 -07:00
|
|
|
swift.proxy.controllers.obj.CONTAINER_LISTING_LIMIT = 2
|
2011-01-19 03:56:13 +00:00
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
2012-09-04 14:02:19 -07:00
|
|
|
|
2011-01-19 03:56:13 +00:00
|
|
|
fd = sock.makefile()
|
2012-06-28 23:25:27 +00:00
|
|
|
fd.write('GET /v1/a/segmented%20object/object%20name HTTP/1.1\r\n'
|
|
|
|
'Host: localhost\r\n'
|
|
|
|
'Connection: close\r\n'
|
|
|
|
'X-Auth-Token: t\r\n'
|
|
|
|
'\r\n')
|
2011-01-19 03:56:13 +00:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 200'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
2012-06-28 23:25:27 +00:00
|
|
|
self.assert_('X-Object-Manifest: segmented%20object/object%20name/' in
|
|
|
|
headers)
|
2011-01-19 03:56:13 +00:00
|
|
|
self.assert_('Content-Type: text/jibberish' in headers)
|
|
|
|
body = fd.read()
|
|
|
|
# A bit fragile of a test; as it makes the assumption that all
|
|
|
|
# will be sent in a single chunk.
|
2012-06-28 23:25:27 +00:00
|
|
|
self.assertEquals(
|
|
|
|
body, '19\r\n1234 1234 1234 1234 1234 \r\n0\r\n\r\n')
|
2011-01-19 03:56:13 +00:00
|
|
|
# Make a copy of the manifested object, which should
|
|
|
|
# error since the number of segments exceeds
|
|
|
|
# CONTAINER_LISTING_LIMIT.
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
2012-06-28 23:25:27 +00:00
|
|
|
fd.write('PUT /v1/a/segmented%20object/copy HTTP/1.1\r\n'
|
|
|
|
'Host: localhost\r\n'
|
|
|
|
'Connection: close\r\n'
|
|
|
|
'X-Auth-Token: t\r\n'
|
|
|
|
'X-Copy-From: segmented%20object/object%20name\r\n'
|
|
|
|
'Content-Length: 0\r\n'
|
|
|
|
'\r\n')
|
2011-01-19 03:56:13 +00:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 413'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
body = fd.read()
|
|
|
|
# After adjusting the CONTAINER_LISTING_LIMIT, make a copy of
|
|
|
|
# the manifested object which should consolidate the segments.
|
2012-08-23 12:38:09 -07:00
|
|
|
swift.proxy.controllers.obj.CONTAINER_LISTING_LIMIT = 10000
|
2011-01-19 03:56:13 +00:00
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
2012-06-28 23:25:27 +00:00
|
|
|
fd.write('PUT /v1/a/segmented%20object/copy HTTP/1.1\r\n'
|
|
|
|
'Host: localhost\r\n'
|
|
|
|
'Connection: close\r\n'
|
|
|
|
'X-Auth-Token: t\r\n'
|
|
|
|
'X-Copy-From: segmented%20object/object%20name\r\n'
|
|
|
|
'Content-Length: 0\r\n'
|
|
|
|
'\r\n')
|
2011-01-19 03:56:13 +00:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 201'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
body = fd.read()
|
|
|
|
# Retrieve and validate the copy.
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
2012-06-28 23:25:27 +00:00
|
|
|
fd.write('GET /v1/a/segmented%20object/copy HTTP/1.1\r\n'
|
|
|
|
'Host: localhost\r\n'
|
|
|
|
'Connection: close\r\n'
|
|
|
|
'X-Auth-Token: t\r\n'
|
|
|
|
'\r\n')
|
2011-01-19 03:56:13 +00:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 200'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
self.assert_('x-object-manifest:' not in headers.lower())
|
|
|
|
self.assert_('Content-Length: 25\r' in headers)
|
|
|
|
body = fd.read()
|
|
|
|
self.assertEquals(body, '1234 1234 1234 1234 1234 ')
|
|
|
|
# Create an object manifest file pointing to nothing
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
2012-06-28 23:25:27 +00:00
|
|
|
fd.write('PUT /v1/a/segmented%20object/empty HTTP/1.1\r\n'
|
|
|
|
'Host: localhost\r\n'
|
|
|
|
'Connection: close\r\n'
|
|
|
|
'X-Storage-Token: t\r\n'
|
|
|
|
'Content-Length: 0\r\n'
|
|
|
|
'X-Object-Manifest: segmented%20object/empty/\r\n'
|
|
|
|
'Content-Type: text/jibberish\r\n'
|
|
|
|
'\r\n')
|
2011-01-19 03:56:13 +00:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 201'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
# Ensure retrieving the manifest file gives a zero-byte file
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
2012-06-28 23:25:27 +00:00
|
|
|
fd.write('GET /v1/a/segmented%20object/empty HTTP/1.1\r\n'
|
|
|
|
'Host: localhost\r\n'
|
|
|
|
'Connection: close\r\n'
|
|
|
|
'X-Auth-Token: t\r\n'
|
|
|
|
'\r\n')
|
2011-01-19 03:56:13 +00:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 200'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
2012-06-28 23:25:27 +00:00
|
|
|
self.assert_('X-Object-Manifest: segmented%20object/empty/' in headers)
|
2011-01-19 03:56:13 +00:00
|
|
|
self.assert_('Content-Type: text/jibberish' in headers)
|
|
|
|
body = fd.read()
|
|
|
|
self.assertEquals(body, '')
|
|
|
|
# Check copy content type
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
2012-06-28 23:25:27 +00:00
|
|
|
fd.write('PUT /v1/a/c/obj HTTP/1.1\r\n'
|
|
|
|
'Host: localhost\r\n'
|
|
|
|
'Connection: close\r\n'
|
|
|
|
'X-Storage-Token: t\r\n'
|
|
|
|
'Content-Length: 0\r\n'
|
|
|
|
'Content-Type: text/jibberish\r\n'
|
|
|
|
'\r\n')
|
2011-01-19 03:56:13 +00:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 201'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
2012-06-28 23:25:27 +00:00
|
|
|
fd.write('PUT /v1/a/c/obj2 HTTP/1.1\r\n'
|
|
|
|
'Host: localhost\r\n'
|
|
|
|
'Connection: close\r\n'
|
|
|
|
'X-Storage-Token: t\r\n'
|
|
|
|
'Content-Length: 0\r\n'
|
|
|
|
'X-Copy-From: c/obj\r\n'
|
|
|
|
'\r\n')
|
2011-01-19 03:56:13 +00:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 201'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
# Ensure getting the copied file gets original content-type
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
2012-06-28 23:25:27 +00:00
|
|
|
fd.write('GET /v1/a/c/obj2 HTTP/1.1\r\n'
|
|
|
|
'Host: localhost\r\n'
|
|
|
|
'Connection: close\r\n'
|
|
|
|
'X-Auth-Token: t\r\n'
|
|
|
|
'\r\n')
|
2011-01-19 03:56:13 +00:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 200'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
self.assert_('Content-Type: text/jibberish' in headers)
|
|
|
|
# Check set content type
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
2012-06-28 23:25:27 +00:00
|
|
|
fd.write('PUT /v1/a/c/obj3 HTTP/1.1\r\n'
|
|
|
|
'Host: localhost\r\n'
|
|
|
|
'Connection: close\r\n'
|
|
|
|
'X-Storage-Token: t\r\n'
|
|
|
|
'Content-Length: 0\r\n'
|
|
|
|
'Content-Type: foo/bar\r\n'
|
|
|
|
'\r\n')
|
2011-01-19 03:56:13 +00:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 201'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
# Ensure getting the copied file gets original content-type
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
2012-06-28 23:25:27 +00:00
|
|
|
fd.write('GET /v1/a/c/obj3 HTTP/1.1\r\n'
|
|
|
|
'Host: localhost\r\n'
|
|
|
|
'Connection: close\r\n'
|
|
|
|
'X-Auth-Token: t\r\n'
|
|
|
|
'\r\n')
|
2011-01-19 03:56:13 +00:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 200'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
self.assert_('Content-Type: foo/bar' in
|
2012-06-28 23:25:27 +00:00
|
|
|
headers.split('\r\n'), repr(headers.split('\r\n')))
|
2011-01-19 03:56:13 +00:00
|
|
|
# Check set content type with charset
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
2012-06-28 23:25:27 +00:00
|
|
|
fd.write('PUT /v1/a/c/obj4 HTTP/1.1\r\n'
|
|
|
|
'Host: localhost\r\n'
|
|
|
|
'Connection: close\r\n'
|
|
|
|
'X-Storage-Token: t\r\n'
|
|
|
|
'Content-Length: 0\r\n'
|
|
|
|
'Content-Type: foo/bar; charset=UTF-8\r\n'
|
|
|
|
'\r\n')
|
2011-01-19 03:56:13 +00:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 201'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
# Ensure getting the copied file gets original content-type
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
2012-06-28 23:25:27 +00:00
|
|
|
fd.write('GET /v1/a/c/obj4 HTTP/1.1\r\n'
|
|
|
|
'Host: localhost\r\n'
|
|
|
|
'Connection: close\r\n'
|
|
|
|
'X-Auth-Token: t\r\n'
|
|
|
|
'\r\n')
|
2011-01-19 03:56:13 +00:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 200'
|
|
|
|
self.assertEquals(headers[:len(exp)], exp)
|
|
|
|
self.assert_('Content-Type: foo/bar; charset=UTF-8' in
|
2012-06-28 23:25:27 +00:00
|
|
|
headers.split('\r\n'), repr(headers.split('\r\n')))
|
2010-07-12 17:03:45 -05:00
|
|
|
|
|
|
|
def test_mismatched_etags(self):
|
|
|
|
with save_globals():
|
2010-10-12 12:49:20 -05:00
|
|
|
# no etag supplied, object servers return success w/ diff values
|
2010-07-12 17:03:45 -05:00
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
|
|
|
'container', 'object')
|
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
|
|
|
|
headers={'Content-Length': '0'})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 201, 201, 201,
|
2012-10-07 14:28:41 +11:00
|
|
|
etags=[None,
|
|
|
|
'68b329da9893e34099c7d8ad5cb9c940',
|
|
|
|
'68b329da9893e34099c7d8ad5cb9c940',
|
|
|
|
'68b329da9893e34099c7d8ad5cb9c941'])
|
2010-07-12 17:03:45 -05:00
|
|
|
resp = controller.PUT(req)
|
2010-10-12 12:49:20 -05:00
|
|
|
self.assertEquals(resp.status_int // 100, 5) # server error
|
|
|
|
|
|
|
|
# req supplies etag, object servers return 422 - mismatch
|
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
|
|
|
|
headers={
|
|
|
|
'Content-Length': '0',
|
|
|
|
'ETag': '68b329da9893e34099c7d8ad5cb9c940',
|
|
|
|
})
|
|
|
|
self.app.update_request(req)
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 422, 422, 503,
|
2012-10-07 14:28:41 +11:00
|
|
|
etags=['68b329da9893e34099c7d8ad5cb9c940',
|
|
|
|
'68b329da9893e34099c7d8ad5cb9c941',
|
|
|
|
None,
|
|
|
|
None])
|
2010-10-12 12:49:20 -05:00
|
|
|
resp = controller.PUT(req)
|
|
|
|
self.assertEquals(resp.status_int // 100, 4) # client error
|
2010-07-12 17:03:45 -05:00
|
|
|
|
2011-04-20 15:10:02 -07:00
|
|
|
def test_response_get_accept_ranges_header(self):
|
|
|
|
with save_globals():
|
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'GET'})
|
|
|
|
self.app.update_request(req)
|
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
|
|
|
'container', 'object')
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 200)
|
2011-04-20 15:10:02 -07:00
|
|
|
resp = controller.GET(req)
|
|
|
|
self.assert_('accept-ranges' in resp.headers)
|
|
|
|
self.assertEquals(resp.headers['accept-ranges'], 'bytes')
|
2010-08-09 13:10:05 -07:00
|
|
|
|
2011-04-20 15:10:02 -07:00
|
|
|
def test_response_head_accept_ranges_header(self):
|
|
|
|
with save_globals():
|
2011-04-21 15:09:04 -05:00
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'HEAD'})
|
2011-04-20 15:10:02 -07:00
|
|
|
self.app.update_request(req)
|
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
|
|
|
'container', 'object')
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 200)
|
2011-04-20 15:10:02 -07:00
|
|
|
resp = controller.HEAD(req)
|
|
|
|
self.assert_('accept-ranges' in resp.headers)
|
|
|
|
self.assertEquals(resp.headers['accept-ranges'], 'bytes')
|
2011-08-31 06:08:59 -07:00
|
|
|
|
2010-09-06 13:26:31 -07:00
|
|
|
def test_GET_calls_authorize(self):
|
|
|
|
called = [False]
|
2010-10-11 17:33:11 -05:00
|
|
|
|
2010-09-06 13:26:31 -07:00
|
|
|
def authorize(req):
|
|
|
|
called[0] = True
|
|
|
|
return HTTPUnauthorized(request=req)
|
|
|
|
with save_globals():
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 201, 201, 201)
|
2010-09-06 13:26:31 -07:00
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
2012-10-07 14:28:41 +11:00
|
|
|
'container', 'object')
|
2010-09-06 13:26:31 -07:00
|
|
|
req = Request.blank('/a/c/o')
|
|
|
|
req.environ['swift.authorize'] = authorize
|
|
|
|
self.app.update_request(req)
|
|
|
|
res = controller.GET(req)
|
|
|
|
self.assert_(called[0])
|
|
|
|
|
|
|
|
def test_HEAD_calls_authorize(self):
|
|
|
|
called = [False]
|
2010-10-11 17:33:11 -05:00
|
|
|
|
2010-09-06 13:26:31 -07:00
|
|
|
def authorize(req):
|
|
|
|
called[0] = True
|
|
|
|
return HTTPUnauthorized(request=req)
|
|
|
|
with save_globals():
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 201, 201, 201)
|
2010-09-06 13:26:31 -07:00
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
2012-10-07 14:28:41 +11:00
|
|
|
'container', 'object')
|
2010-09-06 13:26:31 -07:00
|
|
|
req = Request.blank('/a/c/o', {'REQUEST_METHOD': 'HEAD'})
|
|
|
|
req.environ['swift.authorize'] = authorize
|
|
|
|
self.app.update_request(req)
|
|
|
|
res = controller.HEAD(req)
|
|
|
|
self.assert_(called[0])
|
|
|
|
|
|
|
|
def test_POST_calls_authorize(self):
|
|
|
|
called = [False]
|
2010-10-11 17:33:11 -05:00
|
|
|
|
2010-09-06 13:26:31 -07:00
|
|
|
def authorize(req):
|
|
|
|
called[0] = True
|
|
|
|
return HTTPUnauthorized(request=req)
|
|
|
|
with save_globals():
|
2011-06-08 04:29:24 +00:00
|
|
|
self.app.object_post_as_copy = False
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 201, 201, 201)
|
2010-09-06 13:26:31 -07:00
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
2012-10-07 14:28:41 +11:00
|
|
|
'container', 'object')
|
2010-09-06 13:26:31 -07:00
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'POST'},
|
|
|
|
headers={'Content-Length': '5'}, body='12345')
|
|
|
|
req.environ['swift.authorize'] = authorize
|
|
|
|
self.app.update_request(req)
|
|
|
|
res = controller.POST(req)
|
|
|
|
self.assert_(called[0])
|
|
|
|
|
2011-06-08 04:19:34 +00:00
|
|
|
def test_POST_as_copy_calls_authorize(self):
|
|
|
|
called = [False]
|
|
|
|
|
|
|
|
def authorize(req):
|
|
|
|
called[0] = True
|
|
|
|
return HTTPUnauthorized(request=req)
|
|
|
|
with save_globals():
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 200, 200, 200, 201, 201, 201)
|
2011-06-08 04:19:34 +00:00
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
2012-10-07 14:28:41 +11:00
|
|
|
'container', 'object')
|
2011-06-08 04:19:34 +00:00
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'POST'},
|
|
|
|
headers={'Content-Length': '5'}, body='12345')
|
|
|
|
req.environ['swift.authorize'] = authorize
|
|
|
|
self.app.update_request(req)
|
|
|
|
res = controller.POST(req)
|
|
|
|
self.assert_(called[0])
|
|
|
|
|
2010-09-06 13:26:31 -07:00
|
|
|
def test_PUT_calls_authorize(self):
|
|
|
|
called = [False]
|
2010-10-11 17:33:11 -05:00
|
|
|
|
2010-09-06 13:26:31 -07:00
|
|
|
def authorize(req):
|
|
|
|
called[0] = True
|
|
|
|
return HTTPUnauthorized(request=req)
|
|
|
|
with save_globals():
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 201, 201, 201)
|
2010-09-06 13:26:31 -07:00
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
2012-10-07 14:28:41 +11:00
|
|
|
'container', 'object')
|
2010-09-06 13:26:31 -07:00
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
|
|
|
|
headers={'Content-Length': '5'}, body='12345')
|
|
|
|
req.environ['swift.authorize'] = authorize
|
|
|
|
self.app.update_request(req)
|
|
|
|
res = controller.PUT(req)
|
|
|
|
self.assert_(called[0])
|
|
|
|
|
2010-11-04 14:39:29 -05:00
|
|
|
def test_COPY_calls_authorize(self):
|
|
|
|
called = [False]
|
|
|
|
|
|
|
|
def authorize(req):
|
|
|
|
called[0] = True
|
|
|
|
return HTTPUnauthorized(request=req)
|
|
|
|
with save_globals():
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 200, 200, 200, 201, 201, 201)
|
2010-11-04 14:39:29 -05:00
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
2012-10-07 14:28:41 +11:00
|
|
|
'container', 'object')
|
2010-11-04 14:39:29 -05:00
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'COPY'},
|
|
|
|
headers={'Destination': 'c/o'})
|
|
|
|
req.environ['swift.authorize'] = authorize
|
|
|
|
self.app.update_request(req)
|
|
|
|
res = controller.COPY(req)
|
|
|
|
self.assert_(called[0])
|
2010-09-06 13:26:31 -07:00
|
|
|
|
2011-10-26 21:42:24 +00:00
|
|
|
def test_POST_converts_delete_after_to_delete_at(self):
|
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
2012-10-07 14:28:41 +11:00
|
|
|
'container', 'object')
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 200, 200, 200, 202, 202, 202)
|
2011-10-26 21:42:24 +00:00
|
|
|
self.app.memcache.store = {}
|
2013-01-30 16:33:28 +11:00
|
|
|
orig_time = time.time
|
2011-10-26 21:42:24 +00:00
|
|
|
try:
|
2013-01-30 16:33:28 +11:00
|
|
|
t = time.time()
|
|
|
|
time.time = lambda: t
|
2011-10-26 21:42:24 +00:00
|
|
|
req = Request.blank('/a/c/o', {},
|
2012-10-07 14:28:41 +11:00
|
|
|
headers={'Content-Type': 'foo/bar',
|
|
|
|
'X-Delete-After': '60'})
|
2011-10-26 21:42:24 +00:00
|
|
|
self.app.update_request(req)
|
|
|
|
res = controller.POST(req)
|
|
|
|
self.assertEquals(res.status, '202 Fake')
|
2012-06-07 20:19:31 +00:00
|
|
|
self.assertEquals(req.headers.get('x-delete-at'),
|
2011-10-26 21:42:24 +00:00
|
|
|
str(int(t + 60)))
|
|
|
|
|
|
|
|
self.app.object_post_as_copy = False
|
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
2012-10-07 14:28:41 +11:00
|
|
|
'container',
|
|
|
|
'object')
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 202, 202, 202)
|
2011-10-26 21:42:24 +00:00
|
|
|
self.app.memcache.store = {}
|
|
|
|
req = Request.blank('/a/c/o', {},
|
2012-10-07 14:28:41 +11:00
|
|
|
headers={'Content-Type': 'foo/bar',
|
|
|
|
'X-Delete-After': '60'})
|
2011-10-26 21:42:24 +00:00
|
|
|
self.app.update_request(req)
|
|
|
|
res = controller.POST(req)
|
|
|
|
self.assertEquals(res.status, '202 Fake')
|
|
|
|
self.assertEquals(req.headers.get('x-delete-at'),
|
|
|
|
str(int(t + 60)))
|
|
|
|
finally:
|
2013-01-30 16:33:28 +11:00
|
|
|
time.time = orig_time
|
2011-10-26 21:42:24 +00:00
|
|
|
|
|
|
|
def test_POST_non_int_delete_after(self):
|
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
2012-10-07 14:28:41 +11:00
|
|
|
'container', 'object')
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 200, 200, 200, 202, 202, 202)
|
2011-10-26 21:42:24 +00:00
|
|
|
self.app.memcache.store = {}
|
|
|
|
req = Request.blank('/a/c/o', {},
|
2012-10-07 14:28:41 +11:00
|
|
|
headers={'Content-Type': 'foo/bar',
|
|
|
|
'X-Delete-After': '60.1'})
|
2011-10-26 21:42:24 +00:00
|
|
|
self.app.update_request(req)
|
|
|
|
res = controller.POST(req)
|
|
|
|
self.assertEquals(res.status, '400 Bad Request')
|
|
|
|
self.assertTrue('Non-integer X-Delete-After' in res.body)
|
|
|
|
|
|
|
|
def test_POST_negative_delete_after(self):
|
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
2012-10-07 14:28:41 +11:00
|
|
|
'container', 'object')
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 200, 200, 200, 202, 202, 202)
|
2011-10-26 21:42:24 +00:00
|
|
|
self.app.memcache.store = {}
|
|
|
|
req = Request.blank('/a/c/o', {},
|
2012-10-07 14:28:41 +11:00
|
|
|
headers={'Content-Type': 'foo/bar',
|
|
|
|
'X-Delete-After': '-60'})
|
2011-10-26 21:42:24 +00:00
|
|
|
self.app.update_request(req)
|
|
|
|
res = controller.POST(req)
|
|
|
|
self.assertEquals(res.status, '400 Bad Request')
|
|
|
|
self.assertTrue('X-Delete-At in past' in res.body)
|
|
|
|
|
|
|
|
def test_POST_delete_at(self):
|
|
|
|
with save_globals():
|
|
|
|
given_headers = {}
|
|
|
|
|
|
|
|
def fake_make_requests(req, ring, part, method, path, headers,
|
|
|
|
query_string=''):
|
|
|
|
given_headers.update(headers[0])
|
|
|
|
|
|
|
|
self.app.object_post_as_copy = False
|
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
2012-10-07 14:28:41 +11:00
|
|
|
'container', 'object')
|
2011-10-26 21:42:24 +00:00
|
|
|
controller.make_requests = fake_make_requests
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200)
|
2011-10-26 21:42:24 +00:00
|
|
|
self.app.memcache.store = {}
|
2013-01-30 16:33:28 +11:00
|
|
|
t = str(int(time.time() + 100))
|
2011-10-26 21:42:24 +00:00
|
|
|
req = Request.blank('/a/c/o', {},
|
2012-10-07 14:28:41 +11:00
|
|
|
headers={'Content-Type': 'foo/bar',
|
|
|
|
'X-Delete-At': t})
|
2011-10-26 21:42:24 +00:00
|
|
|
self.app.update_request(req)
|
|
|
|
controller.POST(req)
|
|
|
|
self.assertEquals(given_headers.get('X-Delete-At'), t)
|
|
|
|
self.assertTrue('X-Delete-At-Host' in given_headers)
|
|
|
|
self.assertTrue('X-Delete-At-Device' in given_headers)
|
|
|
|
self.assertTrue('X-Delete-At-Partition' in given_headers)
|
|
|
|
|
2013-01-30 16:33:28 +11:00
|
|
|
t = str(int(time.time() + 100)) + '.1'
|
2011-10-26 21:42:24 +00:00
|
|
|
req = Request.blank('/a/c/o', {},
|
2012-10-07 14:28:41 +11:00
|
|
|
headers={'Content-Type': 'foo/bar',
|
|
|
|
'X-Delete-At': t})
|
2011-10-26 21:42:24 +00:00
|
|
|
self.app.update_request(req)
|
|
|
|
resp = controller.POST(req)
|
|
|
|
self.assertEquals(resp.status_int, 400)
|
|
|
|
self.assertTrue('Non-integer X-Delete-At' in resp.body)
|
|
|
|
|
2013-01-30 16:33:28 +11:00
|
|
|
t = str(int(time.time() - 100))
|
2011-10-26 21:42:24 +00:00
|
|
|
req = Request.blank('/a/c/o', {},
|
2012-10-07 14:28:41 +11:00
|
|
|
headers={'Content-Type': 'foo/bar',
|
|
|
|
'X-Delete-At': t})
|
2011-10-26 21:42:24 +00:00
|
|
|
self.app.update_request(req)
|
|
|
|
resp = controller.POST(req)
|
|
|
|
self.assertEquals(resp.status_int, 400)
|
|
|
|
self.assertTrue('X-Delete-At in past' in resp.body)
|
|
|
|
|
|
|
|
def test_PUT_converts_delete_after_to_delete_at(self):
|
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
2012-10-07 14:28:41 +11:00
|
|
|
'container', 'object')
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 201, 201, 201)
|
2011-10-26 21:42:24 +00:00
|
|
|
self.app.memcache.store = {}
|
2013-01-30 16:33:28 +11:00
|
|
|
orig_time = time.time
|
2011-10-26 21:42:24 +00:00
|
|
|
try:
|
2013-01-30 16:33:28 +11:00
|
|
|
t = time.time()
|
|
|
|
time.time = lambda: t
|
2011-10-26 21:42:24 +00:00
|
|
|
req = Request.blank('/a/c/o', {},
|
2012-10-07 14:28:41 +11:00
|
|
|
headers={'Content-Length': '0',
|
|
|
|
'Content-Type': 'foo/bar',
|
|
|
|
'X-Delete-After': '60'})
|
2011-10-26 21:42:24 +00:00
|
|
|
self.app.update_request(req)
|
|
|
|
res = controller.PUT(req)
|
|
|
|
self.assertEquals(res.status, '201 Fake')
|
|
|
|
self.assertEquals(req.headers.get('x-delete-at'),
|
|
|
|
str(int(t + 60)))
|
|
|
|
finally:
|
2013-01-30 16:33:28 +11:00
|
|
|
time.time = orig_time
|
2011-10-26 21:42:24 +00:00
|
|
|
|
|
|
|
def test_PUT_non_int_delete_after(self):
|
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
2012-10-07 14:28:41 +11:00
|
|
|
'container', 'object')
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 201, 201, 201)
|
2011-10-26 21:42:24 +00:00
|
|
|
self.app.memcache.store = {}
|
|
|
|
req = Request.blank('/a/c/o', {},
|
2012-10-07 14:28:41 +11:00
|
|
|
headers={'Content-Length': '0',
|
|
|
|
'Content-Type': 'foo/bar',
|
|
|
|
'X-Delete-After': '60.1'})
|
2011-10-26 21:42:24 +00:00
|
|
|
self.app.update_request(req)
|
|
|
|
res = controller.PUT(req)
|
|
|
|
self.assertEquals(res.status, '400 Bad Request')
|
|
|
|
self.assertTrue('Non-integer X-Delete-After' in res.body)
|
|
|
|
|
|
|
|
def test_PUT_negative_delete_after(self):
|
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
2012-10-07 14:28:41 +11:00
|
|
|
'container', 'object')
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 201, 201, 201)
|
2011-10-26 21:42:24 +00:00
|
|
|
self.app.memcache.store = {}
|
|
|
|
req = Request.blank('/a/c/o', {},
|
2012-10-07 14:28:41 +11:00
|
|
|
headers={'Content-Length': '0',
|
|
|
|
'Content-Type': 'foo/bar',
|
|
|
|
'X-Delete-After': '-60'})
|
2011-10-26 21:42:24 +00:00
|
|
|
self.app.update_request(req)
|
|
|
|
res = controller.PUT(req)
|
|
|
|
self.assertEquals(res.status, '400 Bad Request')
|
|
|
|
self.assertTrue('X-Delete-At in past' in res.body)
|
|
|
|
|
|
|
|
def test_PUT_delete_at(self):
|
|
|
|
with save_globals():
|
|
|
|
given_headers = {}
|
|
|
|
|
2012-07-29 17:22:18 +00:00
|
|
|
def fake_connect_put_node(nodes, part, path, headers,
|
|
|
|
logger_thread_locals):
|
2011-10-26 21:42:24 +00:00
|
|
|
given_headers.update(headers)
|
|
|
|
|
|
|
|
controller = proxy_server.ObjectController(self.app, 'account',
|
2012-10-07 14:28:41 +11:00
|
|
|
'container', 'object')
|
2011-10-26 21:42:24 +00:00
|
|
|
controller._connect_put_node = fake_connect_put_node
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200)
|
2011-10-26 21:42:24 +00:00
|
|
|
self.app.memcache.store = {}
|
2013-01-30 16:33:28 +11:00
|
|
|
t = str(int(time.time() + 100))
|
2011-10-26 21:42:24 +00:00
|
|
|
req = Request.blank('/a/c/o', {},
|
2012-10-07 14:28:41 +11:00
|
|
|
headers={'Content-Length': '0',
|
|
|
|
'Content-Type': 'foo/bar',
|
|
|
|
'X-Delete-At': t})
|
2011-10-26 21:42:24 +00:00
|
|
|
self.app.update_request(req)
|
|
|
|
controller.PUT(req)
|
|
|
|
self.assertEquals(given_headers.get('X-Delete-At'), t)
|
|
|
|
self.assertTrue('X-Delete-At-Host' in given_headers)
|
|
|
|
self.assertTrue('X-Delete-At-Device' in given_headers)
|
|
|
|
self.assertTrue('X-Delete-At-Partition' in given_headers)
|
|
|
|
|
2013-01-30 16:33:28 +11:00
|
|
|
t = str(int(time.time() + 100)) + '.1'
|
2011-10-26 21:42:24 +00:00
|
|
|
req = Request.blank('/a/c/o', {},
|
2012-10-07 14:28:41 +11:00
|
|
|
headers={'Content-Length': '0',
|
|
|
|
'Content-Type': 'foo/bar',
|
|
|
|
'X-Delete-At': t})
|
2011-10-26 21:42:24 +00:00
|
|
|
self.app.update_request(req)
|
|
|
|
resp = controller.PUT(req)
|
|
|
|
self.assertEquals(resp.status_int, 400)
|
|
|
|
self.assertTrue('Non-integer X-Delete-At' in resp.body)
|
|
|
|
|
2013-01-30 16:33:28 +11:00
|
|
|
t = str(int(time.time() - 100))
|
2011-10-26 21:42:24 +00:00
|
|
|
req = Request.blank('/a/c/o', {},
|
2012-10-07 14:28:41 +11:00
|
|
|
headers={'Content-Length': '0',
|
|
|
|
'Content-Type': 'foo/bar',
|
|
|
|
'X-Delete-At': t})
|
2011-10-26 21:42:24 +00:00
|
|
|
self.app.update_request(req)
|
|
|
|
resp = controller.PUT(req)
|
|
|
|
self.assertEquals(resp.status_int, 400)
|
|
|
|
self.assertTrue('X-Delete-At in past' in resp.body)
|
|
|
|
|
2012-09-25 19:23:31 +00:00
|
|
|
def test_leak_1(self):
|
|
|
|
global _request_instances
|
|
|
|
prolis = _test_sockets[0]
|
|
|
|
prosrv = _test_servers[0]
|
|
|
|
obj_len = prosrv.client_chunk_size * 2
|
|
|
|
# PUT test file
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('PUT /v1/a/c/test_leak_1 HTTP/1.1\r\n'
|
|
|
|
'Host: localhost\r\n'
|
|
|
|
'Connection: close\r\n'
|
|
|
|
'X-Auth-Token: t\r\n'
|
|
|
|
'Content-Length: %s\r\n'
|
|
|
|
'Content-Type: application/octet-stream\r\n'
|
2013-03-20 19:26:45 -07:00
|
|
|
'\r\n%s' % (obj_len, 'a' * obj_len))
|
2012-09-25 19:23:31 +00:00
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 201'
|
|
|
|
self.assertEqual(headers[:len(exp)], exp)
|
|
|
|
# Remember Request instance count
|
|
|
|
before_request_instances = _request_instances
|
|
|
|
# GET test file, but disconnect early
|
|
|
|
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
|
|
|
|
fd = sock.makefile()
|
|
|
|
fd.write('GET /v1/a/c/test_leak_1 HTTP/1.1\r\n'
|
|
|
|
'Host: localhost\r\n'
|
|
|
|
'Connection: close\r\n'
|
|
|
|
'X-Auth-Token: t\r\n'
|
|
|
|
'\r\n')
|
|
|
|
fd.flush()
|
|
|
|
headers = readuntil2crlfs(fd)
|
|
|
|
exp = 'HTTP/1.1 200'
|
|
|
|
self.assertEqual(headers[:len(exp)], exp)
|
|
|
|
fd.read(1)
|
|
|
|
fd.close()
|
|
|
|
sock.close()
|
|
|
|
self.assertEquals(before_request_instances, _request_instances)
|
2010-11-16 15:35:39 -08:00
|
|
|
|
2012-10-11 16:52:26 -05:00
|
|
|
def test_OPTIONS(self):
|
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.ObjectController(self.app, 'a',
|
|
|
|
'c', 'o.jpg')
|
|
|
|
|
|
|
|
def my_empty_container_info(*args):
|
|
|
|
return {}
|
|
|
|
controller.container_info = my_empty_container_info
|
|
|
|
req = Request.blank(
|
|
|
|
'/a/c/o.jpg',
|
|
|
|
{'REQUEST_METHOD': 'OPTIONS'},
|
2012-11-02 16:46:38 -07:00
|
|
|
headers={'Origin': 'http://foo.com',
|
|
|
|
'Access-Control-Request-Method': 'GET'})
|
2012-10-11 16:52:26 -05:00
|
|
|
resp = controller.OPTIONS(req)
|
|
|
|
self.assertEquals(401, resp.status_int)
|
|
|
|
|
|
|
|
def my_empty_origin_container_info(*args):
|
|
|
|
return {'cors': {'allow_origin': None}}
|
|
|
|
controller.container_info = my_empty_origin_container_info
|
|
|
|
req = Request.blank(
|
|
|
|
'/a/c/o.jpg',
|
|
|
|
{'REQUEST_METHOD': 'OPTIONS'},
|
2012-11-02 16:46:38 -07:00
|
|
|
headers={'Origin': 'http://foo.com',
|
|
|
|
'Access-Control-Request-Method': 'GET'})
|
2012-10-11 16:52:26 -05:00
|
|
|
resp = controller.OPTIONS(req)
|
|
|
|
self.assertEquals(401, resp.status_int)
|
|
|
|
|
|
|
|
def my_container_info(*args):
|
|
|
|
return {
|
|
|
|
'cors': {
|
|
|
|
'allow_origin': 'http://foo.bar:8080 https://foo.bar',
|
2012-11-09 14:39:39 -08:00
|
|
|
'max_age': '999',
|
2012-10-11 16:52:26 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
controller.container_info = my_container_info
|
|
|
|
req = Request.blank(
|
|
|
|
'/a/c/o.jpg',
|
|
|
|
{'REQUEST_METHOD': 'OPTIONS'},
|
2012-11-02 16:46:38 -07:00
|
|
|
headers={'Origin': 'https://foo.bar',
|
|
|
|
'Access-Control-Request-Method': 'GET'})
|
2012-10-11 16:52:26 -05:00
|
|
|
req.content_length = 0
|
|
|
|
resp = controller.OPTIONS(req)
|
|
|
|
self.assertEquals(200, resp.status_int)
|
|
|
|
self.assertEquals(
|
2012-11-02 16:46:38 -07:00
|
|
|
'https://foo.bar',
|
|
|
|
resp.headers['access-control-allow-origin'])
|
|
|
|
for verb in 'OPTIONS COPY GET POST PUT DELETE HEAD'.split():
|
|
|
|
self.assertTrue(
|
|
|
|
verb in resp.headers['access-control-allow-methods'])
|
2012-10-11 16:52:26 -05:00
|
|
|
self.assertEquals(
|
2012-11-02 16:46:38 -07:00
|
|
|
len(resp.headers['access-control-allow-methods'].split(', ')),
|
|
|
|
7)
|
2012-10-11 16:52:26 -05:00
|
|
|
self.assertEquals('999', resp.headers['access-control-max-age'])
|
2012-11-02 16:46:38 -07:00
|
|
|
req = Request.blank(
|
|
|
|
'/a/c/o.jpg',
|
|
|
|
{'REQUEST_METHOD': 'OPTIONS'},
|
|
|
|
headers={'Origin': 'https://foo.bar'})
|
2012-10-11 16:52:26 -05:00
|
|
|
req.content_length = 0
|
|
|
|
resp = controller.OPTIONS(req)
|
|
|
|
self.assertEquals(401, resp.status_int)
|
2012-11-02 16:46:38 -07:00
|
|
|
req = Request.blank('/a/c/o.jpg', {'REQUEST_METHOD': 'OPTIONS'})
|
|
|
|
req.content_length = 0
|
|
|
|
resp = controller.OPTIONS(req)
|
|
|
|
self.assertEquals(200, resp.status_int)
|
|
|
|
for verb in 'OPTIONS COPY GET POST PUT DELETE HEAD'.split():
|
|
|
|
self.assertTrue(
|
|
|
|
verb in resp.headers['Allow'])
|
|
|
|
self.assertEquals(len(resp.headers['Allow'].split(', ')), 7)
|
2012-10-11 16:52:26 -05:00
|
|
|
req = Request.blank(
|
|
|
|
'/a/c/o.jpg',
|
|
|
|
{'REQUEST_METHOD': 'OPTIONS'},
|
|
|
|
headers={'Origin': 'http://foo.com'})
|
|
|
|
resp = controller.OPTIONS(req)
|
|
|
|
self.assertEquals(401, resp.status_int)
|
|
|
|
req = Request.blank(
|
|
|
|
'/a/c/o.jpg',
|
|
|
|
{'REQUEST_METHOD': 'OPTIONS'},
|
2012-11-02 16:46:38 -07:00
|
|
|
headers={'Origin': 'http://foo.bar',
|
|
|
|
'Access-Control-Request-Method': 'GET'})
|
2012-10-11 16:52:26 -05:00
|
|
|
controller.app.cors_allow_origin = ['http://foo.bar', ]
|
|
|
|
resp = controller.OPTIONS(req)
|
|
|
|
self.assertEquals(200, resp.status_int)
|
|
|
|
|
2012-11-02 16:46:38 -07:00
|
|
|
def my_container_info_wildcard(*args):
|
|
|
|
return {
|
|
|
|
'cors': {
|
|
|
|
'allow_origin': '*',
|
2012-11-09 14:39:39 -08:00
|
|
|
'max_age': '999',
|
2012-11-02 16:46:38 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
controller.container_info = my_container_info_wildcard
|
|
|
|
req = Request.blank(
|
|
|
|
'/a/c/o.jpg',
|
|
|
|
{'REQUEST_METHOD': 'OPTIONS'},
|
|
|
|
headers={'Origin': 'https://bar.baz',
|
|
|
|
'Access-Control-Request-Method': 'GET'})
|
|
|
|
req.content_length = 0
|
|
|
|
resp = controller.OPTIONS(req)
|
|
|
|
self.assertEquals(200, resp.status_int)
|
|
|
|
self.assertEquals(
|
|
|
|
'https://bar.baz',
|
|
|
|
resp.headers['access-control-allow-origin'])
|
|
|
|
for verb in 'OPTIONS COPY GET POST PUT DELETE HEAD'.split():
|
|
|
|
self.assertTrue(
|
|
|
|
verb in resp.headers['access-control-allow-methods'])
|
|
|
|
self.assertEquals(
|
|
|
|
len(resp.headers['access-control-allow-methods'].split(', ')),
|
|
|
|
7)
|
|
|
|
self.assertEquals('999', resp.headers['access-control-max-age'])
|
2013-01-15 19:31:42 +00:00
|
|
|
|
|
|
|
def test_CORS_valid(self):
|
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o')
|
|
|
|
|
|
|
|
def stubContainerInfo(*args):
|
|
|
|
return {
|
|
|
|
'cors': {
|
|
|
|
'allow_origin': 'http://foo.bar'
|
|
|
|
}
|
|
|
|
}
|
|
|
|
controller.container_info = stubContainerInfo
|
|
|
|
|
|
|
|
def objectGET(controller, req):
|
|
|
|
return Response(headers={
|
|
|
|
'X-Object-Meta-Color': 'red',
|
|
|
|
'X-Super-Secret': 'hush',
|
|
|
|
})
|
|
|
|
|
|
|
|
req = Request.blank(
|
|
|
|
'/a/c/o.jpg',
|
|
|
|
{'REQUEST_METHOD': 'GET'},
|
|
|
|
headers={'Origin': 'http://foo.bar'})
|
|
|
|
|
|
|
|
resp = cors_validation(objectGET)(controller, req)
|
|
|
|
|
|
|
|
self.assertEquals(200, resp.status_int)
|
|
|
|
self.assertEquals('http://foo.bar',
|
|
|
|
resp.headers['access-control-allow-origin'])
|
|
|
|
self.assertEquals('red', resp.headers['x-object-meta-color'])
|
|
|
|
# X-Super-Secret is in the response, but not "exposed"
|
|
|
|
self.assertEquals('hush', resp.headers['x-super-secret'])
|
|
|
|
self.assertTrue('access-control-expose-headers' in resp.headers)
|
|
|
|
exposed = set(
|
|
|
|
h.strip() for h in
|
|
|
|
resp.headers['access-control-expose-headers'].split(','))
|
|
|
|
expected_exposed = set(['cache-control', 'content-language',
|
|
|
|
'content-type', 'expires', 'last-modified',
|
|
|
|
'pragma', 'etag', 'x-timestamp',
|
|
|
|
'x-trans-id', 'x-object-meta-color'])
|
|
|
|
self.assertEquals(expected_exposed, exposed)
|
2012-11-02 16:46:38 -07:00
|
|
|
|
Allow for multiple X-(Account|Container)-* headers.
When the number of account/container or container/object replicas are
different, Swift had a few misbehaviors. This commit fixes them.
* On an object PUT/POST/DELETE, if there were 3 object replicas and
only 2 container replicas, then only 2 requests would be made to
object servers. Now, 3 requests will be made, but the third won't
have any X-Container-* headers in it.
* On an object PUT/POST/DELETE, if there were 3 object replicas and 4
container replicas, then only 3/4 container servers would receive
immediate updates; the fourth would be ignored. Now one of the
object servers will receive multiple (comma-separated) values in the
X-Container-* headers and it will attempt to contact both of them.
One side effect is that multiple async_pendings may be written for
updates to the same object. They'll have differing timestamps,
though, so all but the newest will be deleted unread. To trigger
this behavior, you have to have more container replicas than object
replicas, 2 or more of the container servers must be down, and the
headers sent to one object server must reference 2 or more down
container servers; it's unlikely enough and the consequences are so
minor that it didn't seem worth fixing.
The situation with account/containers is analogous, only without the
async_pendings.
Change-Id: I98bc2de93fb6b2346d6de1d764213d7563653e8d
2012-12-12 17:47:04 -08:00
|
|
|
def _gather_x_container_headers(self, controller_call, req, *connect_args,
|
|
|
|
**kwargs):
|
|
|
|
header_list = kwargs.pop('header_list', ['X-Container-Partition',
|
|
|
|
'X-Container-Host',
|
|
|
|
'X-Container-Device'])
|
|
|
|
seen_headers = []
|
2013-03-20 19:26:45 -07:00
|
|
|
|
Allow for multiple X-(Account|Container)-* headers.
When the number of account/container or container/object replicas are
different, Swift had a few misbehaviors. This commit fixes them.
* On an object PUT/POST/DELETE, if there were 3 object replicas and
only 2 container replicas, then only 2 requests would be made to
object servers. Now, 3 requests will be made, but the third won't
have any X-Container-* headers in it.
* On an object PUT/POST/DELETE, if there were 3 object replicas and 4
container replicas, then only 3/4 container servers would receive
immediate updates; the fourth would be ignored. Now one of the
object servers will receive multiple (comma-separated) values in the
X-Container-* headers and it will attempt to contact both of them.
One side effect is that multiple async_pendings may be written for
updates to the same object. They'll have differing timestamps,
though, so all but the newest will be deleted unread. To trigger
this behavior, you have to have more container replicas than object
replicas, 2 or more of the container servers must be down, and the
headers sent to one object server must reference 2 or more down
container servers; it's unlikely enough and the consequences are so
minor that it didn't seem worth fixing.
The situation with account/containers is analogous, only without the
async_pendings.
Change-Id: I98bc2de93fb6b2346d6de1d764213d7563653e8d
2012-12-12 17:47:04 -08:00
|
|
|
def capture_headers(ipaddr, port, device, partition, method,
|
|
|
|
path, headers=None, query_string=None):
|
|
|
|
captured = {}
|
|
|
|
for header in header_list:
|
|
|
|
captured[header] = headers.get(header)
|
|
|
|
seen_headers.append(captured)
|
|
|
|
|
|
|
|
with save_globals():
|
|
|
|
self.app.allow_account_management = True
|
|
|
|
|
|
|
|
set_http_connect(*connect_args, give_connect=capture_headers,
|
|
|
|
**kwargs)
|
|
|
|
resp = controller_call(req)
|
2013-03-20 19:26:45 -07:00
|
|
|
self.assertEqual(2, resp.status_int // 100) # sanity check
|
Allow for multiple X-(Account|Container)-* headers.
When the number of account/container or container/object replicas are
different, Swift had a few misbehaviors. This commit fixes them.
* On an object PUT/POST/DELETE, if there were 3 object replicas and
only 2 container replicas, then only 2 requests would be made to
object servers. Now, 3 requests will be made, but the third won't
have any X-Container-* headers in it.
* On an object PUT/POST/DELETE, if there were 3 object replicas and 4
container replicas, then only 3/4 container servers would receive
immediate updates; the fourth would be ignored. Now one of the
object servers will receive multiple (comma-separated) values in the
X-Container-* headers and it will attempt to contact both of them.
One side effect is that multiple async_pendings may be written for
updates to the same object. They'll have differing timestamps,
though, so all but the newest will be deleted unread. To trigger
this behavior, you have to have more container replicas than object
replicas, 2 or more of the container servers must be down, and the
headers sent to one object server must reference 2 or more down
container servers; it's unlikely enough and the consequences are so
minor that it didn't seem worth fixing.
The situation with account/containers is analogous, only without the
async_pendings.
Change-Id: I98bc2de93fb6b2346d6de1d764213d7563653e8d
2012-12-12 17:47:04 -08:00
|
|
|
|
|
|
|
# don't care about the account/container HEADs, so chuck
|
|
|
|
# the first two requests
|
|
|
|
return sorted(seen_headers[2:],
|
|
|
|
key=lambda d: d.get(header_list[0]) or 'Z')
|
|
|
|
|
|
|
|
def test_PUT_x_container_headers_with_equal_replicas(self):
|
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
|
|
|
|
headers={'Content-Length': '5'}, body='12345')
|
|
|
|
controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o')
|
|
|
|
seen_headers = self._gather_x_container_headers(
|
|
|
|
controller.PUT, req,
|
|
|
|
200, 200, 201, 201, 201) # HEAD HEAD PUT PUT PUT
|
|
|
|
self.assertEqual(seen_headers, [
|
|
|
|
{'X-Container-Host': '10.0.0.0:1000',
|
|
|
|
'X-Container-Partition': 1,
|
|
|
|
'X-Container-Device': 'sda'},
|
|
|
|
{'X-Container-Host': '10.0.0.1:1001',
|
|
|
|
'X-Container-Partition': 1,
|
|
|
|
'X-Container-Device': 'sdb'},
|
|
|
|
{'X-Container-Host': '10.0.0.2:1002',
|
|
|
|
'X-Container-Partition': 1,
|
|
|
|
'X-Container-Device': 'sdc'}])
|
|
|
|
|
|
|
|
def test_PUT_x_container_headers_with_fewer_container_replicas(self):
|
|
|
|
self.app.container_ring.set_replicas(2)
|
|
|
|
|
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
|
|
|
|
headers={'Content-Length': '5'}, body='12345')
|
|
|
|
controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o')
|
|
|
|
seen_headers = self._gather_x_container_headers(
|
|
|
|
controller.PUT, req,
|
|
|
|
200, 200, 201, 201, 201) # HEAD HEAD PUT PUT PUT
|
|
|
|
|
|
|
|
self.assertEqual(seen_headers, [
|
|
|
|
{'X-Container-Host': '10.0.0.0:1000',
|
|
|
|
'X-Container-Partition': 1,
|
|
|
|
'X-Container-Device': 'sda'},
|
|
|
|
{'X-Container-Host': '10.0.0.1:1001',
|
|
|
|
'X-Container-Partition': 1,
|
|
|
|
'X-Container-Device': 'sdb'},
|
|
|
|
{'X-Container-Host': None,
|
|
|
|
'X-Container-Partition': None,
|
|
|
|
'X-Container-Device': None}])
|
|
|
|
|
|
|
|
def test_PUT_x_container_headers_with_more_container_replicas(self):
|
|
|
|
self.app.container_ring.set_replicas(4)
|
|
|
|
|
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
|
|
|
|
headers={'Content-Length': '5'}, body='12345')
|
|
|
|
controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o')
|
|
|
|
seen_headers = self._gather_x_container_headers(
|
|
|
|
controller.PUT, req,
|
|
|
|
200, 200, 201, 201, 201) # HEAD HEAD PUT PUT PUT
|
|
|
|
|
|
|
|
self.assertEqual(seen_headers, [
|
|
|
|
{'X-Container-Host': '10.0.0.0:1000,10.0.0.3:1003',
|
|
|
|
'X-Container-Partition': 1,
|
|
|
|
'X-Container-Device': 'sda,sdd'},
|
|
|
|
{'X-Container-Host': '10.0.0.1:1001',
|
|
|
|
'X-Container-Partition': 1,
|
|
|
|
'X-Container-Device': 'sdb'},
|
|
|
|
{'X-Container-Host': '10.0.0.2:1002',
|
|
|
|
'X-Container-Partition': 1,
|
|
|
|
'X-Container-Device': 'sdc'}])
|
|
|
|
|
|
|
|
def test_POST_x_container_headers_with_more_container_replicas(self):
|
|
|
|
self.app.container_ring.set_replicas(4)
|
|
|
|
self.app.object_post_as_copy = False
|
|
|
|
|
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'POST'},
|
|
|
|
headers={'Content-Type': 'application/stuff'})
|
|
|
|
controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o')
|
|
|
|
seen_headers = self._gather_x_container_headers(
|
|
|
|
controller.POST, req,
|
|
|
|
200, 200, 200, 200, 200) # HEAD HEAD POST POST POST
|
|
|
|
|
|
|
|
self.assertEqual(seen_headers, [
|
|
|
|
{'X-Container-Host': '10.0.0.0:1000,10.0.0.3:1003',
|
|
|
|
'X-Container-Partition': 1,
|
|
|
|
'X-Container-Device': 'sda,sdd'},
|
|
|
|
{'X-Container-Host': '10.0.0.1:1001',
|
|
|
|
'X-Container-Partition': 1,
|
|
|
|
'X-Container-Device': 'sdb'},
|
|
|
|
{'X-Container-Host': '10.0.0.2:1002',
|
|
|
|
'X-Container-Partition': 1,
|
|
|
|
'X-Container-Device': 'sdc'}])
|
|
|
|
|
|
|
|
def test_DELETE_x_container_headers_with_more_container_replicas(self):
|
|
|
|
self.app.container_ring.set_replicas(4)
|
|
|
|
|
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'DELETE'},
|
|
|
|
headers={'Content-Type': 'application/stuff'})
|
|
|
|
controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o')
|
|
|
|
seen_headers = self._gather_x_container_headers(
|
|
|
|
controller.DELETE, req,
|
|
|
|
200, 200, 200, 200, 200) # HEAD HEAD DELETE DELETE DELETE
|
|
|
|
|
|
|
|
self.assertEqual(seen_headers, [
|
|
|
|
{'X-Container-Host': '10.0.0.0:1000,10.0.0.3:1003',
|
|
|
|
'X-Container-Partition': 1,
|
|
|
|
'X-Container-Device': 'sda,sdd'},
|
|
|
|
{'X-Container-Host': '10.0.0.1:1001',
|
|
|
|
'X-Container-Partition': 1,
|
|
|
|
'X-Container-Device': 'sdb'},
|
|
|
|
{'X-Container-Host': '10.0.0.2:1002',
|
|
|
|
'X-Container-Partition': 1,
|
|
|
|
'X-Container-Device': 'sdc'}])
|
|
|
|
|
|
|
|
def test_PUT_x_delete_at_with_fewer_container_replicas(self):
|
|
|
|
self.app.container_ring.set_replicas(2)
|
|
|
|
|
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
|
|
|
|
headers={'Content-Type': 'application/stuff',
|
|
|
|
'Content-Length': '0',
|
2013-01-30 16:33:28 +11:00
|
|
|
'X-Delete-At': int(time.time()) + 100000})
|
Allow for multiple X-(Account|Container)-* headers.
When the number of account/container or container/object replicas are
different, Swift had a few misbehaviors. This commit fixes them.
* On an object PUT/POST/DELETE, if there were 3 object replicas and
only 2 container replicas, then only 2 requests would be made to
object servers. Now, 3 requests will be made, but the third won't
have any X-Container-* headers in it.
* On an object PUT/POST/DELETE, if there were 3 object replicas and 4
container replicas, then only 3/4 container servers would receive
immediate updates; the fourth would be ignored. Now one of the
object servers will receive multiple (comma-separated) values in the
X-Container-* headers and it will attempt to contact both of them.
One side effect is that multiple async_pendings may be written for
updates to the same object. They'll have differing timestamps,
though, so all but the newest will be deleted unread. To trigger
this behavior, you have to have more container replicas than object
replicas, 2 or more of the container servers must be down, and the
headers sent to one object server must reference 2 or more down
container servers; it's unlikely enough and the consequences are so
minor that it didn't seem worth fixing.
The situation with account/containers is analogous, only without the
async_pendings.
Change-Id: I98bc2de93fb6b2346d6de1d764213d7563653e8d
2012-12-12 17:47:04 -08:00
|
|
|
controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o')
|
|
|
|
seen_headers = self._gather_x_container_headers(
|
|
|
|
controller.PUT, req,
|
|
|
|
200, 200, 201, 201, 201, # HEAD HEAD PUT PUT PUT
|
|
|
|
header_list=('X-Delete-At-Host', 'X-Delete-At-Device',
|
|
|
|
'X-Delete-At-Partition'))
|
|
|
|
|
|
|
|
self.assertEqual(seen_headers, [
|
|
|
|
{'X-Delete-At-Host': '10.0.0.0:1000',
|
|
|
|
'X-Delete-At-Partition': 1,
|
|
|
|
'X-Delete-At-Device': 'sda'},
|
|
|
|
{'X-Delete-At-Host': '10.0.0.1:1001',
|
|
|
|
'X-Delete-At-Partition': 1,
|
|
|
|
'X-Delete-At-Device': 'sdb'},
|
|
|
|
{'X-Delete-At-Host': None,
|
|
|
|
'X-Delete-At-Partition': None,
|
|
|
|
'X-Delete-At-Device': None}])
|
|
|
|
|
|
|
|
def test_PUT_x_delete_at_with_more_container_replicas(self):
|
|
|
|
self.app.container_ring.set_replicas(4)
|
|
|
|
self.app.expiring_objects_account = 'expires'
|
|
|
|
self.app.expiring_objects_container_divisor = 60
|
|
|
|
|
|
|
|
req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
|
|
|
|
headers={'Content-Type': 'application/stuff',
|
|
|
|
'Content-Length': 0,
|
2013-01-30 16:33:28 +11:00
|
|
|
'X-Delete-At': int(time.time()) + 100000})
|
Allow for multiple X-(Account|Container)-* headers.
When the number of account/container or container/object replicas are
different, Swift had a few misbehaviors. This commit fixes them.
* On an object PUT/POST/DELETE, if there were 3 object replicas and
only 2 container replicas, then only 2 requests would be made to
object servers. Now, 3 requests will be made, but the third won't
have any X-Container-* headers in it.
* On an object PUT/POST/DELETE, if there were 3 object replicas and 4
container replicas, then only 3/4 container servers would receive
immediate updates; the fourth would be ignored. Now one of the
object servers will receive multiple (comma-separated) values in the
X-Container-* headers and it will attempt to contact both of them.
One side effect is that multiple async_pendings may be written for
updates to the same object. They'll have differing timestamps,
though, so all but the newest will be deleted unread. To trigger
this behavior, you have to have more container replicas than object
replicas, 2 or more of the container servers must be down, and the
headers sent to one object server must reference 2 or more down
container servers; it's unlikely enough and the consequences are so
minor that it didn't seem worth fixing.
The situation with account/containers is analogous, only without the
async_pendings.
Change-Id: I98bc2de93fb6b2346d6de1d764213d7563653e8d
2012-12-12 17:47:04 -08:00
|
|
|
controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o')
|
|
|
|
seen_headers = self._gather_x_container_headers(
|
|
|
|
controller.PUT, req,
|
|
|
|
200, 200, 201, 201, 201, # HEAD HEAD PUT PUT PUT
|
|
|
|
header_list=('X-Delete-At-Host', 'X-Delete-At-Device',
|
|
|
|
'X-Delete-At-Partition'))
|
|
|
|
self.assertEqual(seen_headers, [
|
|
|
|
{'X-Delete-At-Host': '10.0.0.0:1000,10.0.0.3:1003',
|
|
|
|
'X-Delete-At-Partition': 1,
|
|
|
|
'X-Delete-At-Device': 'sda,sdd'},
|
|
|
|
{'X-Delete-At-Host': '10.0.0.1:1001',
|
|
|
|
'X-Delete-At-Partition': 1,
|
|
|
|
'X-Delete-At-Device': 'sdb'},
|
|
|
|
{'X-Delete-At-Host': '10.0.0.2:1002',
|
|
|
|
'X-Delete-At-Partition': 1,
|
|
|
|
'X-Delete-At-Device': 'sdc'}])
|
|
|
|
|
2012-10-07 14:28:41 +11:00
|
|
|
|
2010-07-12 17:03:45 -05:00
|
|
|
class TestContainerController(unittest.TestCase):
|
|
|
|
"Test swift.proxy_server.ContainerController"
|
|
|
|
|
|
|
|
def setUp(self):
|
2010-07-13 14:23:39 -07:00
|
|
|
self.app = proxy_server.Application(None, FakeMemcache(),
|
2012-10-07 14:28:41 +11:00
|
|
|
account_ring=FakeRing(),
|
|
|
|
container_ring=FakeRing(),
|
|
|
|
object_ring=FakeRing())
|
2010-07-12 17:03:45 -05:00
|
|
|
|
2013-03-17 07:30:00 +08:00
|
|
|
def test_transfer_headers(self):
|
|
|
|
src_headers = {'x-remove-versions-location': 'x',
|
|
|
|
'x-container-read': '*:user'}
|
|
|
|
dst_headers = {'x-versions-location': 'backup'}
|
|
|
|
controller = swift.proxy.controllers.ContainerController(self.app,
|
|
|
|
'a', 'c')
|
|
|
|
controller.transfer_headers(src_headers, dst_headers)
|
|
|
|
expected_headers = {'x-versions-location': '',
|
|
|
|
'x-container-read': '*:user'}
|
|
|
|
self.assertEqual(dst_headers, expected_headers)
|
|
|
|
|
2010-10-11 17:33:11 -05:00
|
|
|
def assert_status_map(self, method, statuses, expected,
|
|
|
|
raise_exc=False, missing_container=False):
|
2010-07-12 17:03:45 -05:00
|
|
|
with save_globals():
|
|
|
|
kwargs = {}
|
|
|
|
if raise_exc:
|
|
|
|
kwargs['raise_exc'] = raise_exc
|
|
|
|
kwargs['missing_container'] = missing_container
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(*statuses, **kwargs)
|
2010-07-12 17:03:45 -05:00
|
|
|
self.app.memcache.store = {}
|
|
|
|
req = Request.blank('/a/c', headers={'Content-Length': '0',
|
2012-10-07 14:28:41 +11:00
|
|
|
'Content-Type': 'text/plain'})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-07-12 17:03:45 -05:00
|
|
|
res = method(req)
|
|
|
|
self.assertEquals(res.status_int, expected)
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(*statuses, **kwargs)
|
2010-07-12 17:03:45 -05:00
|
|
|
self.app.memcache.store = {}
|
|
|
|
req = Request.blank('/a/c/', headers={'Content-Length': '0',
|
2012-10-07 14:28:41 +11:00
|
|
|
'Content-Type': 'text/plain'})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-07-12 17:03:45 -05:00
|
|
|
res = method(req)
|
|
|
|
self.assertEquals(res.status_int, expected)
|
|
|
|
|
|
|
|
def test_HEAD(self):
|
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.ContainerController(self.app, 'account',
|
2012-10-07 14:28:41 +11:00
|
|
|
'container')
|
2010-10-11 17:33:11 -05:00
|
|
|
|
2010-07-12 17:03:45 -05:00
|
|
|
def test_status_map(statuses, expected, **kwargs):
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(*statuses, **kwargs)
|
2010-07-12 17:03:45 -05:00
|
|
|
self.app.memcache.store = {}
|
|
|
|
req = Request.blank('/a/c', {})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-07-12 17:03:45 -05:00
|
|
|
res = controller.HEAD(req)
|
|
|
|
self.assertEquals(res.status[:len(str(expected))],
|
|
|
|
str(expected))
|
|
|
|
if expected < 400:
|
|
|
|
self.assert_('x-works' in res.headers)
|
|
|
|
self.assertEquals(res.headers['x-works'], 'yes')
|
|
|
|
test_status_map((200, 200, 404, 404), 200)
|
|
|
|
test_status_map((200, 200, 500, 404), 200)
|
|
|
|
test_status_map((200, 304, 500, 404), 304)
|
|
|
|
test_status_map((200, 404, 404, 404), 404)
|
|
|
|
test_status_map((200, 404, 404, 500), 404)
|
|
|
|
test_status_map((200, 500, 500, 500), 503)
|
|
|
|
|
|
|
|
def test_PUT(self):
|
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.ContainerController(self.app, 'account',
|
|
|
|
'container')
|
2010-10-11 17:33:11 -05:00
|
|
|
|
2010-07-12 17:03:45 -05:00
|
|
|
def test_status_map(statuses, expected, **kwargs):
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(*statuses, **kwargs)
|
2010-07-12 17:03:45 -05:00
|
|
|
self.app.memcache.store = {}
|
|
|
|
req = Request.blank('/a/c', {})
|
|
|
|
req.content_length = 0
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-07-12 17:03:45 -05:00
|
|
|
res = controller.PUT(req)
|
|
|
|
expected = str(expected)
|
|
|
|
self.assertEquals(res.status[:len(expected)], expected)
|
|
|
|
test_status_map((200, 201, 201, 201), 201, missing_container=True)
|
|
|
|
test_status_map((200, 201, 201, 500), 201, missing_container=True)
|
|
|
|
test_status_map((200, 204, 404, 404), 404, missing_container=True)
|
|
|
|
test_status_map((200, 204, 500, 404), 503, missing_container=True)
|
|
|
|
|
2012-03-14 17:30:02 +00:00
|
|
|
def test_PUT_max_containers_per_account(self):
|
|
|
|
with save_globals():
|
|
|
|
self.app.max_containers_per_account = 12346
|
|
|
|
controller = proxy_server.ContainerController(self.app, 'account',
|
|
|
|
'container')
|
|
|
|
self.assert_status_map(controller.PUT,
|
|
|
|
(200, 200, 200, 201, 201, 201), 201,
|
|
|
|
missing_container=True)
|
|
|
|
|
|
|
|
self.app.max_containers_per_account = 12345
|
|
|
|
controller = proxy_server.ContainerController(self.app, 'account',
|
|
|
|
'container')
|
|
|
|
self.assert_status_map(controller.PUT, (201, 201, 201), 403,
|
|
|
|
missing_container=True)
|
|
|
|
|
|
|
|
self.app.max_containers_per_account = 12345
|
|
|
|
self.app.max_containers_whitelist = ['account']
|
|
|
|
controller = proxy_server.ContainerController(self.app, 'account',
|
|
|
|
'container')
|
|
|
|
self.assert_status_map(controller.PUT,
|
|
|
|
(200, 200, 200, 201, 201, 201), 201,
|
|
|
|
missing_container=True)
|
|
|
|
|
2010-07-12 17:03:45 -05:00
|
|
|
def test_PUT_max_container_name_length(self):
|
|
|
|
with save_globals():
|
2012-09-05 20:49:50 -07:00
|
|
|
limit = MAX_CONTAINER_NAME_LENGTH
|
2010-07-12 17:03:45 -05:00
|
|
|
controller = proxy_server.ContainerController(self.app, 'account',
|
2012-09-05 20:49:50 -07:00
|
|
|
'1' * limit)
|
2010-10-11 17:33:11 -05:00
|
|
|
self.assert_status_map(controller.PUT,
|
|
|
|
(200, 200, 200, 201, 201, 201), 201,
|
|
|
|
missing_container=True)
|
2010-07-12 17:03:45 -05:00
|
|
|
controller = proxy_server.ContainerController(self.app, 'account',
|
2012-09-05 20:49:50 -07:00
|
|
|
'2' * (limit + 1))
|
2010-10-11 17:33:11 -05:00
|
|
|
self.assert_status_map(controller.PUT, (201, 201, 201), 400,
|
|
|
|
missing_container=True)
|
2010-07-12 17:03:45 -05:00
|
|
|
|
|
|
|
def test_PUT_connect_exceptions(self):
|
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.ContainerController(self.app, 'account',
|
|
|
|
'container')
|
2010-10-11 17:33:11 -05:00
|
|
|
self.assert_status_map(controller.PUT, (200, 201, 201, -1), 201,
|
|
|
|
missing_container=True)
|
|
|
|
self.assert_status_map(controller.PUT, (200, 201, -1, -1), 503,
|
|
|
|
missing_container=True)
|
|
|
|
self.assert_status_map(controller.PUT, (200, 503, 503, -1), 503,
|
|
|
|
missing_container=True)
|
2010-07-12 17:03:45 -05:00
|
|
|
|
|
|
|
def test_acc_missing_returns_404(self):
|
|
|
|
for meth in ('DELETE', 'PUT'):
|
|
|
|
with save_globals():
|
|
|
|
self.app.memcache = FakeMemcacheReturnsNone()
|
|
|
|
for dev in self.app.account_ring.devs.values():
|
|
|
|
del dev['errors']
|
|
|
|
del dev['last_error']
|
|
|
|
controller = proxy_server.ContainerController(self.app,
|
2012-10-07 14:28:41 +11:00
|
|
|
'account',
|
|
|
|
'container')
|
2010-07-12 17:03:45 -05:00
|
|
|
if meth == 'PUT':
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 200, 200, 200, 200,
|
2012-10-07 14:28:41 +11:00
|
|
|
missing_container=True)
|
2010-07-12 17:03:45 -05:00
|
|
|
else:
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 200, 200)
|
2010-07-12 17:03:45 -05:00
|
|
|
self.app.memcache.store = {}
|
|
|
|
req = Request.blank('/a/c', environ={'REQUEST_METHOD': meth})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-07-12 17:03:45 -05:00
|
|
|
resp = getattr(controller, meth)(req)
|
|
|
|
self.assertEquals(resp.status_int, 200)
|
|
|
|
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(404, 404, 404, 200, 200, 200)
|
2010-07-12 17:03:45 -05:00
|
|
|
resp = getattr(controller, meth)(req)
|
|
|
|
self.assertEquals(resp.status_int, 404)
|
|
|
|
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(503, 404, 404)
|
2010-07-12 17:03:45 -05:00
|
|
|
resp = getattr(controller, meth)(req)
|
|
|
|
self.assertEquals(resp.status_int, 404)
|
|
|
|
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(503, 404, raise_exc=True)
|
2010-07-12 17:03:45 -05:00
|
|
|
resp = getattr(controller, meth)(req)
|
|
|
|
self.assertEquals(resp.status_int, 404)
|
|
|
|
|
|
|
|
for dev in self.app.account_ring.devs.values():
|
|
|
|
dev['errors'] = self.app.error_suppression_limit + 1
|
2013-01-30 16:33:28 +11:00
|
|
|
dev['last_error'] = time.time()
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 200, 200, 200, 200)
|
2010-07-12 17:03:45 -05:00
|
|
|
resp = getattr(controller, meth)(req)
|
|
|
|
self.assertEquals(resp.status_int, 404)
|
|
|
|
|
|
|
|
def test_put_locking(self):
|
2010-11-16 15:35:39 -08:00
|
|
|
|
2010-07-12 17:03:45 -05:00
|
|
|
class MockMemcache(FakeMemcache):
|
2010-11-16 15:35:39 -08:00
|
|
|
|
2010-07-12 17:03:45 -05:00
|
|
|
def __init__(self, allow_lock=None):
|
|
|
|
self.allow_lock = allow_lock
|
|
|
|
super(MockMemcache, self).__init__()
|
2010-10-11 17:33:11 -05:00
|
|
|
|
2010-07-12 17:03:45 -05:00
|
|
|
@contextmanager
|
|
|
|
def soft_lock(self, key, timeout=0, retries=5):
|
|
|
|
if self.allow_lock:
|
|
|
|
yield True
|
|
|
|
else:
|
|
|
|
raise MemcacheLockError()
|
2010-11-16 15:35:39 -08:00
|
|
|
|
2010-07-12 17:03:45 -05:00
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.ContainerController(self.app, 'account',
|
|
|
|
'container')
|
|
|
|
self.app.memcache = MockMemcache(allow_lock=True)
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, 200, 201, 201, 201,
|
|
|
|
missing_container=True)
|
2010-07-12 17:03:45 -05:00
|
|
|
req = Request.blank('/a/c', environ={'REQUEST_METHOD': 'PUT'})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-07-12 17:03:45 -05:00
|
|
|
res = controller.PUT(req)
|
|
|
|
self.assertEquals(res.status_int, 201)
|
|
|
|
|
|
|
|
def test_error_limiting(self):
|
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.ContainerController(self.app, 'account',
|
|
|
|
'container')
|
2013-02-07 22:07:18 -08:00
|
|
|
controller.app.sort_nodes = lambda l: l
|
2010-10-11 17:33:11 -05:00
|
|
|
self.assert_status_map(controller.HEAD, (200, 503, 200, 200), 200,
|
|
|
|
missing_container=False)
|
2010-07-12 17:03:45 -05:00
|
|
|
self.assertEquals(
|
|
|
|
controller.app.container_ring.devs[0]['errors'], 2)
|
|
|
|
self.assert_('last_error' in controller.app.container_ring.devs[0])
|
2011-01-19 15:21:57 -08:00
|
|
|
for _junk in xrange(self.app.error_suppression_limit):
|
2010-10-11 17:33:11 -05:00
|
|
|
self.assert_status_map(controller.HEAD,
|
|
|
|
(200, 503, 503, 503), 503)
|
2010-07-12 17:03:45 -05:00
|
|
|
self.assertEquals(controller.app.container_ring.devs[0]['errors'],
|
|
|
|
self.app.error_suppression_limit + 1)
|
|
|
|
self.assert_status_map(controller.HEAD, (200, 200, 200, 200), 503)
|
|
|
|
self.assert_('last_error' in controller.app.container_ring.devs[0])
|
2010-10-11 17:33:11 -05:00
|
|
|
self.assert_status_map(controller.PUT, (200, 201, 201, 201), 503,
|
|
|
|
missing_container=True)
|
|
|
|
self.assert_status_map(controller.DELETE,
|
|
|
|
(200, 204, 204, 204), 503)
|
2010-07-12 17:03:45 -05:00
|
|
|
self.app.error_suppression_interval = -300
|
|
|
|
self.assert_status_map(controller.HEAD, (200, 200, 200, 200), 200)
|
2010-10-11 17:33:11 -05:00
|
|
|
self.assert_status_map(controller.DELETE, (200, 204, 204, 204),
|
|
|
|
404, raise_exc=True)
|
2010-07-12 17:03:45 -05:00
|
|
|
|
|
|
|
def test_DELETE(self):
|
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.ContainerController(self.app, 'account',
|
|
|
|
'container')
|
2010-10-11 17:33:11 -05:00
|
|
|
self.assert_status_map(controller.DELETE,
|
|
|
|
(200, 204, 204, 204), 204)
|
2011-01-31 01:46:33 +00:00
|
|
|
self.assert_status_map(controller.DELETE,
|
|
|
|
(200, 204, 204, 503), 204)
|
2010-10-11 17:33:11 -05:00
|
|
|
self.assert_status_map(controller.DELETE,
|
|
|
|
(200, 204, 503, 503), 503)
|
|
|
|
self.assert_status_map(controller.DELETE,
|
|
|
|
(200, 204, 404, 404), 404)
|
|
|
|
self.assert_status_map(controller.DELETE,
|
|
|
|
(200, 404, 404, 404), 404)
|
|
|
|
self.assert_status_map(controller.DELETE,
|
|
|
|
(200, 204, 503, 404), 503)
|
2010-07-12 17:03:45 -05:00
|
|
|
|
|
|
|
self.app.memcache = FakeMemcacheReturnsNone()
|
|
|
|
# 200: Account check, 404x3: Container check
|
2010-10-11 17:33:11 -05:00
|
|
|
self.assert_status_map(controller.DELETE,
|
|
|
|
(200, 404, 404, 404), 404)
|
2010-07-12 17:03:45 -05:00
|
|
|
|
2011-04-20 15:10:02 -07:00
|
|
|
def test_response_get_accept_ranges_header(self):
|
|
|
|
with save_globals():
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, body='{}')
|
2011-04-20 15:10:02 -07:00
|
|
|
controller = proxy_server.ContainerController(self.app, 'account',
|
|
|
|
'container')
|
|
|
|
req = Request.blank('/a/c?format=json')
|
|
|
|
self.app.update_request(req)
|
|
|
|
res = controller.GET(req)
|
|
|
|
self.assert_('accept-ranges' in res.headers)
|
|
|
|
self.assertEqual(res.headers['accept-ranges'], 'bytes')
|
|
|
|
|
|
|
|
def test_response_head_accept_ranges_header(self):
|
|
|
|
with save_globals():
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, body='{}')
|
2011-04-20 15:10:02 -07:00
|
|
|
controller = proxy_server.ContainerController(self.app, 'account',
|
|
|
|
'container')
|
|
|
|
req = Request.blank('/a/c?format=json')
|
|
|
|
self.app.update_request(req)
|
|
|
|
res = controller.HEAD(req)
|
|
|
|
self.assert_('accept-ranges' in res.headers)
|
|
|
|
self.assertEqual(res.headers['accept-ranges'], 'bytes')
|
2011-08-31 06:08:59 -07:00
|
|
|
|
2010-08-16 15:30:27 -07:00
|
|
|
def test_PUT_metadata(self):
|
|
|
|
self.metadata_helper('PUT')
|
|
|
|
|
|
|
|
def test_POST_metadata(self):
|
|
|
|
self.metadata_helper('POST')
|
|
|
|
|
|
|
|
def metadata_helper(self, method):
|
|
|
|
for test_header, test_value in (
|
|
|
|
('X-Container-Meta-TestHeader', 'TestValue'),
|
2012-03-10 20:02:49 +00:00
|
|
|
('X-Container-Meta-TestHeader', ''),
|
2012-10-07 14:28:41 +11:00
|
|
|
('X-Remove-Container-Meta-TestHeader', 'anything'),
|
|
|
|
('X-Container-Read', '.r:*'),
|
|
|
|
('X-Remove-Container-Read', 'anything'),
|
|
|
|
('X-Container-Write', 'anyone'),
|
|
|
|
('X-Remove-Container-Write', 'anything')):
|
2010-08-16 15:30:27 -07:00
|
|
|
test_errors = []
|
2010-10-11 17:33:11 -05:00
|
|
|
|
2010-08-16 15:30:27 -07:00
|
|
|
def test_connect(ipaddr, port, device, partition, method, path,
|
|
|
|
headers=None, query_string=None):
|
|
|
|
if path == '/a/c':
|
2012-03-10 20:02:49 +00:00
|
|
|
find_header = test_header
|
|
|
|
find_value = test_value
|
|
|
|
if find_header.lower().startswith('x-remove-'):
|
|
|
|
find_header = \
|
|
|
|
find_header.lower().replace('-remove', '', 1)
|
|
|
|
find_value = ''
|
2010-08-16 15:30:27 -07:00
|
|
|
for k, v in headers.iteritems():
|
2012-03-10 20:02:49 +00:00
|
|
|
if k.lower() == find_header.lower() and \
|
|
|
|
v == find_value:
|
2010-08-16 15:30:27 -07:00
|
|
|
break
|
|
|
|
else:
|
|
|
|
test_errors.append('%s: %s not in %s' %
|
2012-03-10 20:02:49 +00:00
|
|
|
(find_header, find_value, headers))
|
2010-08-16 15:30:27 -07:00
|
|
|
with save_globals():
|
|
|
|
controller = \
|
|
|
|
proxy_server.ContainerController(self.app, 'a', 'c')
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 201, 201, 201, give_connect=test_connect)
|
2010-08-16 15:30:27 -07:00
|
|
|
req = Request.blank('/a/c', environ={'REQUEST_METHOD': method},
|
2012-10-07 14:28:41 +11:00
|
|
|
headers={test_header: test_value})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-08-16 15:30:27 -07:00
|
|
|
res = getattr(controller, method)(req)
|
|
|
|
self.assertEquals(test_errors, [])
|
|
|
|
|
|
|
|
def test_PUT_bad_metadata(self):
|
|
|
|
self.bad_metadata_helper('PUT')
|
|
|
|
|
|
|
|
def test_POST_bad_metadata(self):
|
|
|
|
self.bad_metadata_helper('POST')
|
|
|
|
|
|
|
|
def bad_metadata_helper(self, method):
|
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.ContainerController(self.app, 'a', 'c')
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 201, 201, 201)
|
2010-08-16 15:30:27 -07:00
|
|
|
req = Request.blank('/a/c', environ={'REQUEST_METHOD': method})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-08-16 15:30:27 -07:00
|
|
|
resp = getattr(controller, method)(req)
|
|
|
|
self.assertEquals(resp.status_int, 201)
|
|
|
|
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(201, 201, 201)
|
2010-08-16 15:30:27 -07:00
|
|
|
req = Request.blank('/a/c', environ={'REQUEST_METHOD': method},
|
2012-10-07 14:28:41 +11:00
|
|
|
headers={'X-Container-Meta-' +
|
2010-08-16 15:30:27 -07:00
|
|
|
('a' * MAX_META_NAME_LENGTH): 'v'})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-08-16 15:30:27 -07:00
|
|
|
resp = getattr(controller, method)(req)
|
|
|
|
self.assertEquals(resp.status_int, 201)
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(201, 201, 201)
|
2010-08-16 15:30:27 -07:00
|
|
|
req = Request.blank('/a/c', environ={'REQUEST_METHOD': method},
|
2012-10-07 14:28:41 +11:00
|
|
|
headers={'X-Container-Meta-' +
|
2010-08-16 15:30:27 -07:00
|
|
|
('a' * (MAX_META_NAME_LENGTH + 1)): 'v'})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-08-16 15:30:27 -07:00
|
|
|
resp = getattr(controller, method)(req)
|
|
|
|
self.assertEquals(resp.status_int, 400)
|
|
|
|
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(201, 201, 201)
|
2010-08-16 15:30:27 -07:00
|
|
|
req = Request.blank('/a/c', environ={'REQUEST_METHOD': method},
|
2012-10-07 14:28:41 +11:00
|
|
|
headers={'X-Container-Meta-Too-Long':
|
2010-08-16 15:30:27 -07:00
|
|
|
'a' * MAX_META_VALUE_LENGTH})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-08-16 15:30:27 -07:00
|
|
|
resp = getattr(controller, method)(req)
|
|
|
|
self.assertEquals(resp.status_int, 201)
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(201, 201, 201)
|
2010-08-16 15:30:27 -07:00
|
|
|
req = Request.blank('/a/c', environ={'REQUEST_METHOD': method},
|
2012-10-07 14:28:41 +11:00
|
|
|
headers={'X-Container-Meta-Too-Long':
|
2010-08-16 15:30:27 -07:00
|
|
|
'a' * (MAX_META_VALUE_LENGTH + 1)})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-08-16 15:30:27 -07:00
|
|
|
resp = getattr(controller, method)(req)
|
|
|
|
self.assertEquals(resp.status_int, 400)
|
|
|
|
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(201, 201, 201)
|
2010-08-16 15:30:27 -07:00
|
|
|
headers = {}
|
|
|
|
for x in xrange(MAX_META_COUNT):
|
|
|
|
headers['X-Container-Meta-%d' % x] = 'v'
|
|
|
|
req = Request.blank('/a/c', environ={'REQUEST_METHOD': method},
|
|
|
|
headers=headers)
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-08-16 15:30:27 -07:00
|
|
|
resp = getattr(controller, method)(req)
|
|
|
|
self.assertEquals(resp.status_int, 201)
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(201, 201, 201)
|
2010-08-16 15:30:27 -07:00
|
|
|
headers = {}
|
|
|
|
for x in xrange(MAX_META_COUNT + 1):
|
|
|
|
headers['X-Container-Meta-%d' % x] = 'v'
|
|
|
|
req = Request.blank('/a/c', environ={'REQUEST_METHOD': method},
|
|
|
|
headers=headers)
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-08-16 15:30:27 -07:00
|
|
|
resp = getattr(controller, method)(req)
|
|
|
|
self.assertEquals(resp.status_int, 400)
|
|
|
|
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(201, 201, 201)
|
2010-08-16 15:30:27 -07:00
|
|
|
headers = {}
|
|
|
|
header_value = 'a' * MAX_META_VALUE_LENGTH
|
|
|
|
size = 0
|
|
|
|
x = 0
|
|
|
|
while size < MAX_META_OVERALL_SIZE - 4 - MAX_META_VALUE_LENGTH:
|
|
|
|
size += 4 + MAX_META_VALUE_LENGTH
|
|
|
|
headers['X-Container-Meta-%04d' % x] = header_value
|
|
|
|
x += 1
|
|
|
|
if MAX_META_OVERALL_SIZE - size > 1:
|
|
|
|
headers['X-Container-Meta-a'] = \
|
|
|
|
'a' * (MAX_META_OVERALL_SIZE - size - 1)
|
|
|
|
req = Request.blank('/a/c', environ={'REQUEST_METHOD': method},
|
|
|
|
headers=headers)
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-08-16 15:30:27 -07:00
|
|
|
resp = getattr(controller, method)(req)
|
|
|
|
self.assertEquals(resp.status_int, 201)
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(201, 201, 201)
|
2010-08-16 15:30:27 -07:00
|
|
|
headers['X-Container-Meta-a'] = \
|
|
|
|
'a' * (MAX_META_OVERALL_SIZE - size)
|
|
|
|
req = Request.blank('/a/c', environ={'REQUEST_METHOD': method},
|
|
|
|
headers=headers)
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-08-16 15:30:27 -07:00
|
|
|
resp = getattr(controller, method)(req)
|
|
|
|
self.assertEquals(resp.status_int, 400)
|
|
|
|
|
2010-09-06 13:26:31 -07:00
|
|
|
def test_POST_calls_clean_acl(self):
|
|
|
|
called = [False]
|
2010-10-11 17:33:11 -05:00
|
|
|
|
2010-09-06 13:26:31 -07:00
|
|
|
def clean_acl(header, value):
|
|
|
|
called[0] = True
|
|
|
|
raise ValueError('fake error')
|
|
|
|
with save_globals():
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 201, 201, 201)
|
2010-09-06 13:26:31 -07:00
|
|
|
controller = proxy_server.ContainerController(self.app, 'account',
|
|
|
|
'container')
|
|
|
|
req = Request.blank('/a/c', environ={'REQUEST_METHOD': 'POST'},
|
2010-09-08 22:37:27 -07:00
|
|
|
headers={'X-Container-Read': '.r:*'})
|
2010-09-06 13:26:31 -07:00
|
|
|
req.environ['swift.clean_acl'] = clean_acl
|
|
|
|
self.app.update_request(req)
|
|
|
|
res = controller.POST(req)
|
|
|
|
self.assert_(called[0])
|
|
|
|
called[0] = False
|
|
|
|
with save_globals():
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 201, 201, 201)
|
2010-09-06 13:26:31 -07:00
|
|
|
controller = proxy_server.ContainerController(self.app, 'account',
|
|
|
|
'container')
|
|
|
|
req = Request.blank('/a/c', environ={'REQUEST_METHOD': 'POST'},
|
2010-09-08 22:37:27 -07:00
|
|
|
headers={'X-Container-Write': '.r:*'})
|
2010-09-06 13:26:31 -07:00
|
|
|
req.environ['swift.clean_acl'] = clean_acl
|
|
|
|
self.app.update_request(req)
|
|
|
|
res = controller.POST(req)
|
|
|
|
self.assert_(called[0])
|
|
|
|
|
|
|
|
def test_PUT_calls_clean_acl(self):
|
|
|
|
called = [False]
|
2010-10-11 17:33:11 -05:00
|
|
|
|
2010-09-06 13:26:31 -07:00
|
|
|
def clean_acl(header, value):
|
|
|
|
called[0] = True
|
|
|
|
raise ValueError('fake error')
|
|
|
|
with save_globals():
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 201, 201, 201)
|
2010-09-06 13:26:31 -07:00
|
|
|
controller = proxy_server.ContainerController(self.app, 'account',
|
|
|
|
'container')
|
|
|
|
req = Request.blank('/a/c', environ={'REQUEST_METHOD': 'PUT'},
|
2010-09-08 22:37:27 -07:00
|
|
|
headers={'X-Container-Read': '.r:*'})
|
2010-09-06 13:26:31 -07:00
|
|
|
req.environ['swift.clean_acl'] = clean_acl
|
|
|
|
self.app.update_request(req)
|
|
|
|
res = controller.PUT(req)
|
|
|
|
self.assert_(called[0])
|
|
|
|
called[0] = False
|
|
|
|
with save_globals():
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 201, 201, 201)
|
2010-09-06 13:26:31 -07:00
|
|
|
controller = proxy_server.ContainerController(self.app, 'account',
|
|
|
|
'container')
|
|
|
|
req = Request.blank('/a/c', environ={'REQUEST_METHOD': 'PUT'},
|
2010-09-08 22:37:27 -07:00
|
|
|
headers={'X-Container-Write': '.r:*'})
|
2010-09-06 13:26:31 -07:00
|
|
|
req.environ['swift.clean_acl'] = clean_acl
|
|
|
|
self.app.update_request(req)
|
|
|
|
res = controller.PUT(req)
|
|
|
|
self.assert_(called[0])
|
|
|
|
|
2012-05-31 22:58:56 +00:00
|
|
|
def test_GET_no_content(self):
|
|
|
|
with save_globals():
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 204, 204, 204)
|
2012-05-31 22:58:56 +00:00
|
|
|
controller = proxy_server.ContainerController(self.app, 'account',
|
|
|
|
'container')
|
|
|
|
req = Request.blank('/a/c')
|
|
|
|
self.app.update_request(req)
|
|
|
|
res = controller.GET(req)
|
|
|
|
self.assertEquals(res.content_length, 0)
|
|
|
|
self.assertTrue('transfer-encoding' not in res.headers)
|
|
|
|
|
2010-09-06 13:26:31 -07:00
|
|
|
def test_GET_calls_authorize(self):
|
|
|
|
called = [False]
|
2010-10-11 17:33:11 -05:00
|
|
|
|
2010-09-06 13:26:31 -07:00
|
|
|
def authorize(req):
|
|
|
|
called[0] = True
|
|
|
|
return HTTPUnauthorized(request=req)
|
|
|
|
with save_globals():
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 201, 201, 201)
|
2010-09-06 13:26:31 -07:00
|
|
|
controller = proxy_server.ContainerController(self.app, 'account',
|
|
|
|
'container')
|
|
|
|
req = Request.blank('/a/c')
|
|
|
|
req.environ['swift.authorize'] = authorize
|
|
|
|
self.app.update_request(req)
|
|
|
|
res = controller.GET(req)
|
|
|
|
self.assert_(called[0])
|
|
|
|
|
|
|
|
def test_HEAD_calls_authorize(self):
|
|
|
|
called = [False]
|
2010-10-11 17:33:11 -05:00
|
|
|
|
2010-09-06 13:26:31 -07:00
|
|
|
def authorize(req):
|
|
|
|
called[0] = True
|
|
|
|
return HTTPUnauthorized(request=req)
|
|
|
|
with save_globals():
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 201, 201, 201)
|
2010-09-06 13:26:31 -07:00
|
|
|
controller = proxy_server.ContainerController(self.app, 'account',
|
|
|
|
'container')
|
|
|
|
req = Request.blank('/a/c', {'REQUEST_METHOD': 'HEAD'})
|
|
|
|
req.environ['swift.authorize'] = authorize
|
|
|
|
self.app.update_request(req)
|
|
|
|
res = controller.HEAD(req)
|
|
|
|
self.assert_(called[0])
|
|
|
|
|
2012-10-11 16:52:26 -05:00
|
|
|
def test_OPTIONS(self):
|
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.ContainerController(self.app, 'a', 'c')
|
|
|
|
|
|
|
|
def my_empty_container_info(*args):
|
|
|
|
return {}
|
|
|
|
controller.container_info = my_empty_container_info
|
|
|
|
req = Request.blank(
|
|
|
|
'/a/c',
|
|
|
|
{'REQUEST_METHOD': 'OPTIONS'},
|
2012-11-02 16:46:38 -07:00
|
|
|
headers={'Origin': 'http://foo.com',
|
|
|
|
'Access-Control-Request-Method': 'GET'})
|
2012-10-11 16:52:26 -05:00
|
|
|
resp = controller.OPTIONS(req)
|
|
|
|
self.assertEquals(401, resp.status_int)
|
|
|
|
|
|
|
|
def my_empty_origin_container_info(*args):
|
|
|
|
return {'cors': {'allow_origin': None}}
|
|
|
|
controller.container_info = my_empty_origin_container_info
|
|
|
|
req = Request.blank(
|
|
|
|
'/a/c',
|
|
|
|
{'REQUEST_METHOD': 'OPTIONS'},
|
2012-11-02 16:46:38 -07:00
|
|
|
headers={'Origin': 'http://foo.com',
|
|
|
|
'Access-Control-Request-Method': 'GET'})
|
2012-10-11 16:52:26 -05:00
|
|
|
resp = controller.OPTIONS(req)
|
|
|
|
self.assertEquals(401, resp.status_int)
|
|
|
|
|
|
|
|
def my_container_info(*args):
|
|
|
|
return {
|
|
|
|
'cors': {
|
|
|
|
'allow_origin': 'http://foo.bar:8080 https://foo.bar',
|
2012-11-09 14:39:39 -08:00
|
|
|
'max_age': '999',
|
2012-10-11 16:52:26 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
controller.container_info = my_container_info
|
|
|
|
req = Request.blank(
|
|
|
|
'/a/c',
|
|
|
|
{'REQUEST_METHOD': 'OPTIONS'},
|
2012-11-02 16:46:38 -07:00
|
|
|
headers={'Origin': 'https://foo.bar',
|
|
|
|
'Access-Control-Request-Method': 'GET'})
|
2012-10-11 16:52:26 -05:00
|
|
|
req.content_length = 0
|
|
|
|
resp = controller.OPTIONS(req)
|
|
|
|
self.assertEquals(200, resp.status_int)
|
|
|
|
self.assertEquals(
|
2012-11-02 16:46:38 -07:00
|
|
|
'https://foo.bar',
|
|
|
|
resp.headers['access-control-allow-origin'])
|
|
|
|
for verb in 'OPTIONS GET POST PUT DELETE HEAD'.split():
|
|
|
|
self.assertTrue(
|
|
|
|
verb in resp.headers['access-control-allow-methods'])
|
2012-10-11 16:52:26 -05:00
|
|
|
self.assertEquals(
|
2012-11-02 16:46:38 -07:00
|
|
|
len(resp.headers['access-control-allow-methods'].split(', ')),
|
|
|
|
6)
|
2012-10-11 16:52:26 -05:00
|
|
|
self.assertEquals('999', resp.headers['access-control-max-age'])
|
2012-11-02 16:46:38 -07:00
|
|
|
req = Request.blank(
|
|
|
|
'/a/c',
|
|
|
|
{'REQUEST_METHOD': 'OPTIONS'},
|
|
|
|
headers={'Origin': 'https://foo.bar'})
|
2012-10-11 16:52:26 -05:00
|
|
|
req.content_length = 0
|
|
|
|
resp = controller.OPTIONS(req)
|
|
|
|
self.assertEquals(401, resp.status_int)
|
2012-11-02 16:46:38 -07:00
|
|
|
req = Request.blank('/a/c', {'REQUEST_METHOD': 'OPTIONS'})
|
|
|
|
req.content_length = 0
|
|
|
|
resp = controller.OPTIONS(req)
|
|
|
|
self.assertEquals(200, resp.status_int)
|
|
|
|
for verb in 'OPTIONS GET POST PUT DELETE HEAD'.split():
|
|
|
|
self.assertTrue(
|
|
|
|
verb in resp.headers['Allow'])
|
|
|
|
self.assertEquals(len(resp.headers['Allow'].split(', ')), 6)
|
2012-10-11 16:52:26 -05:00
|
|
|
req = Request.blank(
|
|
|
|
'/a/c',
|
|
|
|
{'REQUEST_METHOD': 'OPTIONS'},
|
2012-11-02 16:46:38 -07:00
|
|
|
headers={'Origin': 'http://foo.bar',
|
|
|
|
'Access-Control-Request-Method': 'GET'})
|
2012-10-11 16:52:26 -05:00
|
|
|
resp = controller.OPTIONS(req)
|
|
|
|
self.assertEquals(401, resp.status_int)
|
|
|
|
req = Request.blank(
|
|
|
|
'/a/c',
|
|
|
|
{'REQUEST_METHOD': 'OPTIONS'},
|
2012-11-02 16:46:38 -07:00
|
|
|
headers={'Origin': 'http://foo.bar',
|
|
|
|
'Access-Control-Request-Method': 'GET'})
|
2012-10-11 16:52:26 -05:00
|
|
|
controller.app.cors_allow_origin = ['http://foo.bar', ]
|
|
|
|
resp = controller.OPTIONS(req)
|
|
|
|
self.assertEquals(200, resp.status_int)
|
|
|
|
|
2012-11-02 16:46:38 -07:00
|
|
|
def my_container_info_wildcard(*args):
|
|
|
|
return {
|
|
|
|
'cors': {
|
|
|
|
'allow_origin': '*',
|
2012-11-09 14:39:39 -08:00
|
|
|
'max_age': '999',
|
2012-11-02 16:46:38 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
controller.container_info = my_container_info_wildcard
|
|
|
|
req = Request.blank(
|
|
|
|
'/a/c/o.jpg',
|
|
|
|
{'REQUEST_METHOD': 'OPTIONS'},
|
|
|
|
headers={'Origin': 'https://bar.baz',
|
|
|
|
'Access-Control-Request-Method': 'GET'})
|
|
|
|
req.content_length = 0
|
|
|
|
resp = controller.OPTIONS(req)
|
|
|
|
self.assertEquals(200, resp.status_int)
|
|
|
|
self.assertEquals(
|
|
|
|
'https://bar.baz',
|
|
|
|
resp.headers['access-control-allow-origin'])
|
|
|
|
for verb in 'OPTIONS GET POST PUT DELETE HEAD'.split():
|
|
|
|
self.assertTrue(
|
|
|
|
verb in resp.headers['access-control-allow-methods'])
|
|
|
|
self.assertEquals(
|
|
|
|
len(resp.headers['access-control-allow-methods'].split(', ')),
|
|
|
|
6)
|
|
|
|
self.assertEquals('999', resp.headers['access-control-max-age'])
|
2013-03-16 09:40:38 +01:00
|
|
|
|
|
|
|
req = Request.blank(
|
|
|
|
'/a/c/o.jpg',
|
|
|
|
{'REQUEST_METHOD': 'OPTIONS'},
|
|
|
|
headers={'Origin': 'https://bar.baz',
|
|
|
|
'Access-Control-Request-Headers':
|
|
|
|
'x-foo, x-bar, x-auth-token',
|
|
|
|
'Access-Control-Request-Method': 'GET'}
|
|
|
|
)
|
|
|
|
req.content_length = 0
|
|
|
|
resp = controller.OPTIONS(req)
|
|
|
|
self.assertEquals(200, resp.status_int)
|
2012-11-02 16:46:38 -07:00
|
|
|
self.assertEquals(
|
2013-03-16 09:40:38 +01:00
|
|
|
sortHeaderNames('x-foo, x-bar, x-auth-token'),
|
2013-01-15 19:31:42 +00:00
|
|
|
sortHeaderNames(resp.headers['access-control-allow-headers']))
|
|
|
|
|
|
|
|
def test_CORS_valid(self):
|
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.ContainerController(self.app, 'a', 'c')
|
|
|
|
|
|
|
|
def stubContainerInfo(*args):
|
|
|
|
return {
|
|
|
|
'cors': {
|
|
|
|
'allow_origin': 'http://foo.bar'
|
|
|
|
}
|
|
|
|
}
|
|
|
|
controller.container_info = stubContainerInfo
|
|
|
|
|
|
|
|
def containerGET(controller, req):
|
|
|
|
return Response(headers={
|
|
|
|
'X-Container-Meta-Color': 'red',
|
|
|
|
'X-Super-Secret': 'hush',
|
|
|
|
})
|
|
|
|
|
|
|
|
req = Request.blank(
|
|
|
|
'/a/c',
|
|
|
|
{'REQUEST_METHOD': 'GET'},
|
|
|
|
headers={'Origin': 'http://foo.bar'})
|
|
|
|
|
|
|
|
resp = cors_validation(containerGET)(controller, req)
|
|
|
|
|
|
|
|
self.assertEquals(200, resp.status_int)
|
|
|
|
self.assertEquals('http://foo.bar',
|
|
|
|
resp.headers['access-control-allow-origin'])
|
|
|
|
self.assertEquals('red', resp.headers['x-container-meta-color'])
|
|
|
|
# X-Super-Secret is in the response, but not "exposed"
|
|
|
|
self.assertEquals('hush', resp.headers['x-super-secret'])
|
|
|
|
self.assertTrue('access-control-expose-headers' in resp.headers)
|
|
|
|
exposed = set(
|
|
|
|
h.strip() for h in
|
|
|
|
resp.headers['access-control-expose-headers'].split(','))
|
|
|
|
expected_exposed = set(['cache-control', 'content-language',
|
|
|
|
'content-type', 'expires', 'last-modified',
|
|
|
|
'pragma', 'etag', 'x-timestamp',
|
|
|
|
'x-trans-id', 'x-container-meta-color'])
|
|
|
|
self.assertEquals(expected_exposed, exposed)
|
2012-11-02 16:46:38 -07:00
|
|
|
|
Allow for multiple X-(Account|Container)-* headers.
When the number of account/container or container/object replicas are
different, Swift had a few misbehaviors. This commit fixes them.
* On an object PUT/POST/DELETE, if there were 3 object replicas and
only 2 container replicas, then only 2 requests would be made to
object servers. Now, 3 requests will be made, but the third won't
have any X-Container-* headers in it.
* On an object PUT/POST/DELETE, if there were 3 object replicas and 4
container replicas, then only 3/4 container servers would receive
immediate updates; the fourth would be ignored. Now one of the
object servers will receive multiple (comma-separated) values in the
X-Container-* headers and it will attempt to contact both of them.
One side effect is that multiple async_pendings may be written for
updates to the same object. They'll have differing timestamps,
though, so all but the newest will be deleted unread. To trigger
this behavior, you have to have more container replicas than object
replicas, 2 or more of the container servers must be down, and the
headers sent to one object server must reference 2 or more down
container servers; it's unlikely enough and the consequences are so
minor that it didn't seem worth fixing.
The situation with account/containers is analogous, only without the
async_pendings.
Change-Id: I98bc2de93fb6b2346d6de1d764213d7563653e8d
2012-12-12 17:47:04 -08:00
|
|
|
def _gather_x_account_headers(self, controller_call, req, *connect_args,
|
|
|
|
**kwargs):
|
|
|
|
seen_headers = []
|
|
|
|
to_capture = ('X-Account-Partition', 'X-Account-Host',
|
|
|
|
'X-Account-Device')
|
|
|
|
|
|
|
|
def capture_headers(ipaddr, port, device, partition, method,
|
|
|
|
path, headers=None, query_string=None):
|
|
|
|
captured = {}
|
|
|
|
for header in to_capture:
|
|
|
|
captured[header] = headers.get(header)
|
|
|
|
seen_headers.append(captured)
|
|
|
|
|
|
|
|
with save_globals():
|
|
|
|
self.app.allow_account_management = True
|
|
|
|
|
|
|
|
set_http_connect(*connect_args, give_connect=capture_headers,
|
|
|
|
**kwargs)
|
|
|
|
resp = controller_call(req)
|
2013-03-20 19:26:45 -07:00
|
|
|
self.assertEqual(2, resp.status_int // 100) # sanity check
|
Allow for multiple X-(Account|Container)-* headers.
When the number of account/container or container/object replicas are
different, Swift had a few misbehaviors. This commit fixes them.
* On an object PUT/POST/DELETE, if there were 3 object replicas and
only 2 container replicas, then only 2 requests would be made to
object servers. Now, 3 requests will be made, but the third won't
have any X-Container-* headers in it.
* On an object PUT/POST/DELETE, if there were 3 object replicas and 4
container replicas, then only 3/4 container servers would receive
immediate updates; the fourth would be ignored. Now one of the
object servers will receive multiple (comma-separated) values in the
X-Container-* headers and it will attempt to contact both of them.
One side effect is that multiple async_pendings may be written for
updates to the same object. They'll have differing timestamps,
though, so all but the newest will be deleted unread. To trigger
this behavior, you have to have more container replicas than object
replicas, 2 or more of the container servers must be down, and the
headers sent to one object server must reference 2 or more down
container servers; it's unlikely enough and the consequences are so
minor that it didn't seem worth fixing.
The situation with account/containers is analogous, only without the
async_pendings.
Change-Id: I98bc2de93fb6b2346d6de1d764213d7563653e8d
2012-12-12 17:47:04 -08:00
|
|
|
|
|
|
|
# don't care about the account HEAD, so throw away the
|
|
|
|
# first element
|
|
|
|
return sorted(seen_headers[1:],
|
|
|
|
key=lambda d: d['X-Account-Host'] or 'Z')
|
|
|
|
|
|
|
|
def test_PUT_x_account_headers_with_fewer_account_replicas(self):
|
|
|
|
self.app.account_ring.set_replicas(2)
|
|
|
|
req = Request.blank('/a/c', headers={'': ''})
|
|
|
|
controller = proxy_server.ContainerController(self.app, 'a', 'c')
|
|
|
|
|
|
|
|
seen_headers = self._gather_x_account_headers(
|
|
|
|
controller.PUT, req,
|
|
|
|
200, 201, 201, 201) # HEAD PUT PUT PUT
|
|
|
|
self.assertEqual(seen_headers, [
|
|
|
|
{'X-Account-Host': '10.0.0.0:1000',
|
|
|
|
'X-Account-Partition': 1,
|
|
|
|
'X-Account-Device': 'sda'},
|
|
|
|
{'X-Account-Host': '10.0.0.1:1001',
|
|
|
|
'X-Account-Partition': 1,
|
|
|
|
'X-Account-Device': 'sdb'},
|
|
|
|
{'X-Account-Host': None,
|
|
|
|
'X-Account-Partition': None,
|
|
|
|
'X-Account-Device': None}])
|
|
|
|
|
|
|
|
def test_PUT_x_account_headers_with_more_account_replicas(self):
|
|
|
|
self.app.account_ring.set_replicas(4)
|
|
|
|
req = Request.blank('/a/c', headers={'': ''})
|
|
|
|
controller = proxy_server.ContainerController(self.app, 'a', 'c')
|
|
|
|
|
|
|
|
seen_headers = self._gather_x_account_headers(
|
|
|
|
controller.PUT, req,
|
|
|
|
200, 201, 201, 201) # HEAD PUT PUT PUT
|
|
|
|
self.assertEqual(seen_headers, [
|
|
|
|
{'X-Account-Host': '10.0.0.0:1000,10.0.0.3:1003',
|
|
|
|
'X-Account-Partition': 1,
|
|
|
|
'X-Account-Device': 'sda,sdd'},
|
|
|
|
{'X-Account-Host': '10.0.0.1:1001',
|
|
|
|
'X-Account-Partition': 1,
|
|
|
|
'X-Account-Device': 'sdb'},
|
|
|
|
{'X-Account-Host': '10.0.0.2:1002',
|
|
|
|
'X-Account-Partition': 1,
|
|
|
|
'X-Account-Device': 'sdc'}])
|
|
|
|
|
|
|
|
def test_DELETE_x_account_headers_with_fewer_account_replicas(self):
|
|
|
|
self.app.account_ring.set_replicas(2)
|
|
|
|
req = Request.blank('/a/c', headers={'': ''})
|
|
|
|
controller = proxy_server.ContainerController(self.app, 'a', 'c')
|
|
|
|
|
|
|
|
seen_headers = self._gather_x_account_headers(
|
|
|
|
controller.DELETE, req,
|
|
|
|
200, 204, 204, 204) # HEAD DELETE DELETE DELETE
|
|
|
|
self.assertEqual(seen_headers, [
|
|
|
|
{'X-Account-Host': '10.0.0.0:1000',
|
|
|
|
'X-Account-Partition': 1,
|
|
|
|
'X-Account-Device': 'sda'},
|
|
|
|
{'X-Account-Host': '10.0.0.1:1001',
|
|
|
|
'X-Account-Partition': 1,
|
|
|
|
'X-Account-Device': 'sdb'},
|
|
|
|
{'X-Account-Host': None,
|
|
|
|
'X-Account-Partition': None,
|
|
|
|
'X-Account-Device': None}])
|
|
|
|
|
|
|
|
def test_DELETE_x_account_headers_with_more_account_replicas(self):
|
|
|
|
self.app.account_ring.set_replicas(4)
|
|
|
|
req = Request.blank('/a/c', headers={'': ''})
|
|
|
|
controller = proxy_server.ContainerController(self.app, 'a', 'c')
|
|
|
|
|
|
|
|
seen_headers = self._gather_x_account_headers(
|
|
|
|
controller.DELETE, req,
|
|
|
|
200, 204, 204, 204) # HEAD DELETE DELETE DELETE
|
|
|
|
self.assertEqual(seen_headers, [
|
|
|
|
{'X-Account-Host': '10.0.0.0:1000,10.0.0.3:1003',
|
|
|
|
'X-Account-Partition': 1,
|
|
|
|
'X-Account-Device': 'sda,sdd'},
|
|
|
|
{'X-Account-Host': '10.0.0.1:1001',
|
|
|
|
'X-Account-Partition': 1,
|
|
|
|
'X-Account-Device': 'sdb'},
|
|
|
|
{'X-Account-Host': '10.0.0.2:1002',
|
|
|
|
'X-Account-Partition': 1,
|
|
|
|
'X-Account-Device': 'sdc'}])
|
|
|
|
|
2010-07-12 17:03:45 -05:00
|
|
|
|
|
|
|
class TestAccountController(unittest.TestCase):
|
|
|
|
|
|
|
|
def setUp(self):
|
2010-07-13 14:23:39 -07:00
|
|
|
self.app = proxy_server.Application(None, FakeMemcache(),
|
2012-10-07 14:28:41 +11:00
|
|
|
account_ring=FakeRing(),
|
|
|
|
container_ring=FakeRing(),
|
|
|
|
object_ring=FakeRing)
|
2010-07-12 17:03:45 -05:00
|
|
|
|
|
|
|
def assert_status_map(self, method, statuses, expected):
|
|
|
|
with save_globals():
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(*statuses)
|
2010-07-12 17:03:45 -05:00
|
|
|
req = Request.blank('/a', {})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-07-12 17:03:45 -05:00
|
|
|
res = method(req)
|
|
|
|
self.assertEquals(res.status_int, expected)
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(*statuses)
|
2010-07-12 17:03:45 -05:00
|
|
|
req = Request.blank('/a/', {})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-07-12 17:03:45 -05:00
|
|
|
res = method(req)
|
|
|
|
self.assertEquals(res.status_int, expected)
|
|
|
|
|
2012-11-02 16:46:38 -07:00
|
|
|
def test_OPTIONS(self):
|
|
|
|
with save_globals():
|
|
|
|
self.app.allow_account_management = False
|
|
|
|
controller = proxy_server.AccountController(self.app, 'account')
|
|
|
|
req = Request.blank('/account', {'REQUEST_METHOD': 'OPTIONS'})
|
|
|
|
req.content_length = 0
|
|
|
|
resp = controller.OPTIONS(req)
|
|
|
|
self.assertEquals(200, resp.status_int)
|
|
|
|
for verb in 'OPTIONS GET POST HEAD'.split():
|
|
|
|
self.assertTrue(
|
|
|
|
verb in resp.headers['Allow'])
|
|
|
|
self.assertEquals(len(resp.headers['Allow'].split(', ')), 4)
|
2013-01-15 19:31:42 +00:00
|
|
|
|
|
|
|
# Test a CORS OPTIONS request (i.e. including Origin and
|
|
|
|
# Access-Control-Request-Method headers)
|
|
|
|
self.app.allow_account_management = False
|
|
|
|
controller = proxy_server.AccountController(self.app, 'account')
|
|
|
|
req = Request.blank('/account', {'REQUEST_METHOD': 'OPTIONS'},
|
2013-03-20 19:26:45 -07:00
|
|
|
headers={'Origin': 'http://foo.com',
|
2013-01-15 19:31:42 +00:00
|
|
|
'Access-Control-Request-Method': 'GET'})
|
|
|
|
req.content_length = 0
|
|
|
|
resp = controller.OPTIONS(req)
|
|
|
|
self.assertEquals(200, resp.status_int)
|
|
|
|
for verb in 'OPTIONS GET POST HEAD'.split():
|
|
|
|
self.assertTrue(
|
|
|
|
verb in resp.headers['Allow'])
|
|
|
|
self.assertEquals(len(resp.headers['Allow'].split(', ')), 4)
|
|
|
|
|
2012-11-02 16:46:38 -07:00
|
|
|
self.app.allow_account_management = True
|
|
|
|
controller = proxy_server.AccountController(self.app, 'account')
|
|
|
|
req = Request.blank('/account', {'REQUEST_METHOD': 'OPTIONS'})
|
|
|
|
req.content_length = 0
|
|
|
|
resp = controller.OPTIONS(req)
|
|
|
|
self.assertEquals(200, resp.status_int)
|
|
|
|
print resp.headers['Allow']
|
|
|
|
for verb in 'OPTIONS GET POST PUT DELETE HEAD'.split():
|
|
|
|
self.assertTrue(
|
|
|
|
verb in resp.headers['Allow'])
|
|
|
|
self.assertEquals(len(resp.headers['Allow'].split(', ')), 6)
|
|
|
|
|
2010-07-12 17:03:45 -05:00
|
|
|
def test_GET(self):
|
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.AccountController(self.app, 'account')
|
|
|
|
self.assert_status_map(controller.GET, (200, 200, 200), 200)
|
|
|
|
self.assert_status_map(controller.GET, (200, 200, 503), 200)
|
|
|
|
self.assert_status_map(controller.GET, (200, 503, 503), 200)
|
|
|
|
self.assert_status_map(controller.GET, (204, 204, 204), 204)
|
|
|
|
self.assert_status_map(controller.GET, (204, 204, 503), 204)
|
|
|
|
self.assert_status_map(controller.GET, (204, 503, 503), 204)
|
|
|
|
self.assert_status_map(controller.GET, (204, 204, 200), 204)
|
|
|
|
self.assert_status_map(controller.GET, (204, 200, 200), 204)
|
|
|
|
self.assert_status_map(controller.GET, (404, 404, 404), 404)
|
|
|
|
self.assert_status_map(controller.GET, (404, 404, 200), 200)
|
|
|
|
self.assert_status_map(controller.GET, (404, 200, 200), 200)
|
|
|
|
self.assert_status_map(controller.GET, (404, 404, 503), 404)
|
|
|
|
self.assert_status_map(controller.GET, (404, 503, 503), 503)
|
|
|
|
self.assert_status_map(controller.GET, (404, 204, 503), 204)
|
|
|
|
|
|
|
|
self.app.memcache = FakeMemcacheReturnsNone()
|
|
|
|
self.assert_status_map(controller.GET, (404, 404, 404), 404)
|
|
|
|
|
2011-06-11 04:57:04 +00:00
|
|
|
def test_GET_autocreate(self):
|
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.AccountController(self.app, 'account')
|
|
|
|
self.app.memcache = FakeMemcacheReturnsNone()
|
|
|
|
self.assert_status_map(controller.GET,
|
2012-10-07 14:28:41 +11:00
|
|
|
(404, 404, 404, 201, 201, 201, 204), 404)
|
2011-06-11 04:57:04 +00:00
|
|
|
controller.app.account_autocreate = True
|
|
|
|
self.assert_status_map(controller.GET,
|
2012-10-07 14:28:41 +11:00
|
|
|
(404, 404, 404, 201, 201, 201, 204), 204)
|
2012-08-17 19:44:21 +08:00
|
|
|
self.assert_status_map(controller.GET,
|
2012-10-07 14:28:41 +11:00
|
|
|
(404, 404, 404, 403, 403, 403, 403), 403)
|
2012-08-17 19:44:21 +08:00
|
|
|
self.assert_status_map(controller.GET,
|
2012-10-07 14:28:41 +11:00
|
|
|
(404, 404, 404, 409, 409, 409, 409), 409)
|
2011-06-11 04:57:04 +00:00
|
|
|
|
2010-07-12 17:03:45 -05:00
|
|
|
def test_HEAD(self):
|
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.AccountController(self.app, 'account')
|
|
|
|
self.assert_status_map(controller.HEAD, (200, 200, 200), 200)
|
|
|
|
self.assert_status_map(controller.HEAD, (200, 200, 503), 200)
|
|
|
|
self.assert_status_map(controller.HEAD, (200, 503, 503), 200)
|
|
|
|
self.assert_status_map(controller.HEAD, (204, 204, 204), 204)
|
|
|
|
self.assert_status_map(controller.HEAD, (204, 204, 503), 204)
|
|
|
|
self.assert_status_map(controller.HEAD, (204, 503, 503), 204)
|
|
|
|
self.assert_status_map(controller.HEAD, (204, 204, 200), 204)
|
|
|
|
self.assert_status_map(controller.HEAD, (204, 200, 200), 204)
|
|
|
|
self.assert_status_map(controller.HEAD, (404, 404, 404), 404)
|
|
|
|
self.assert_status_map(controller.HEAD, (404, 404, 200), 200)
|
|
|
|
self.assert_status_map(controller.HEAD, (404, 200, 200), 200)
|
|
|
|
self.assert_status_map(controller.HEAD, (404, 404, 503), 404)
|
|
|
|
self.assert_status_map(controller.HEAD, (404, 503, 503), 503)
|
|
|
|
self.assert_status_map(controller.HEAD, (404, 204, 503), 204)
|
|
|
|
|
2011-06-11 04:57:04 +00:00
|
|
|
def test_HEAD_autocreate(self):
|
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.AccountController(self.app, 'account')
|
|
|
|
self.app.memcache = FakeMemcacheReturnsNone()
|
|
|
|
self.assert_status_map(controller.HEAD,
|
2012-10-07 14:28:41 +11:00
|
|
|
(404, 404, 404, 201, 201, 201, 204), 404)
|
2011-06-11 04:57:04 +00:00
|
|
|
controller.app.account_autocreate = True
|
|
|
|
self.assert_status_map(controller.HEAD,
|
2012-10-07 14:28:41 +11:00
|
|
|
(404, 404, 404, 201, 201, 201, 204), 204)
|
2012-08-17 19:44:21 +08:00
|
|
|
self.assert_status_map(controller.HEAD,
|
2012-10-07 14:28:41 +11:00
|
|
|
(404, 404, 404, 403, 403, 403, 403), 403)
|
2012-08-17 19:44:21 +08:00
|
|
|
self.assert_status_map(controller.HEAD,
|
2012-10-07 14:28:41 +11:00
|
|
|
(404, 404, 404, 409, 409, 409, 409), 409)
|
2011-06-11 04:57:04 +00:00
|
|
|
|
|
|
|
def test_POST_autocreate(self):
|
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.AccountController(self.app, 'account')
|
|
|
|
self.app.memcache = FakeMemcacheReturnsNone()
|
|
|
|
self.assert_status_map(controller.POST,
|
2012-10-07 14:28:41 +11:00
|
|
|
(404, 404, 404, 201, 201, 201), 404)
|
2011-06-11 04:57:04 +00:00
|
|
|
controller.app.account_autocreate = True
|
|
|
|
self.assert_status_map(controller.POST,
|
2012-10-07 14:28:41 +11:00
|
|
|
(404, 404, 404, 201, 201, 201), 201)
|
2012-08-17 19:44:21 +08:00
|
|
|
self.assert_status_map(controller.POST,
|
2012-10-07 14:28:41 +11:00
|
|
|
(404, 404, 404, 403, 403, 403, 403), 403)
|
2012-08-17 19:44:21 +08:00
|
|
|
self.assert_status_map(controller.POST,
|
2012-10-07 14:28:41 +11:00
|
|
|
(404, 404, 404, 409, 409, 409, 409), 409)
|
2011-06-11 04:57:04 +00:00
|
|
|
|
2010-07-12 17:03:45 -05:00
|
|
|
def test_connection_refused(self):
|
|
|
|
self.app.account_ring.get_nodes('account')
|
|
|
|
for dev in self.app.account_ring.devs.values():
|
|
|
|
dev['ip'] = '127.0.0.1'
|
2010-10-11 17:33:11 -05:00
|
|
|
dev['port'] = 1 # can't connect on this port
|
2010-07-12 17:03:45 -05:00
|
|
|
controller = proxy_server.AccountController(self.app, 'account')
|
|
|
|
req = Request.blank('/account', environ={'REQUEST_METHOD': 'HEAD'})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-07-12 17:03:45 -05:00
|
|
|
resp = controller.HEAD(req)
|
|
|
|
self.assertEquals(resp.status_int, 503)
|
|
|
|
|
|
|
|
def test_other_socket_error(self):
|
|
|
|
self.app.account_ring.get_nodes('account')
|
|
|
|
for dev in self.app.account_ring.devs.values():
|
|
|
|
dev['ip'] = '127.0.0.1'
|
2010-10-11 17:33:11 -05:00
|
|
|
dev['port'] = -1 # invalid port number
|
2010-07-12 17:03:45 -05:00
|
|
|
controller = proxy_server.AccountController(self.app, 'account')
|
|
|
|
req = Request.blank('/account', environ={'REQUEST_METHOD': 'HEAD'})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-07-12 17:03:45 -05:00
|
|
|
resp = controller.HEAD(req)
|
|
|
|
self.assertEquals(resp.status_int, 503)
|
|
|
|
|
2011-04-20 15:10:02 -07:00
|
|
|
def test_response_get_accept_ranges_header(self):
|
|
|
|
with save_globals():
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, body='{}')
|
2011-04-20 15:10:02 -07:00
|
|
|
controller = proxy_server.AccountController(self.app, 'account')
|
|
|
|
req = Request.blank('/a?format=json')
|
|
|
|
self.app.update_request(req)
|
|
|
|
res = controller.GET(req)
|
|
|
|
self.assert_('accept-ranges' in res.headers)
|
|
|
|
self.assertEqual(res.headers['accept-ranges'], 'bytes')
|
2011-08-31 06:08:59 -07:00
|
|
|
|
2011-04-20 15:10:02 -07:00
|
|
|
def test_response_head_accept_ranges_header(self):
|
|
|
|
with save_globals():
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 200, body='{}')
|
2011-04-20 15:10:02 -07:00
|
|
|
controller = proxy_server.AccountController(self.app, 'account')
|
|
|
|
req = Request.blank('/a?format=json')
|
|
|
|
self.app.update_request(req)
|
|
|
|
res = controller.HEAD(req)
|
|
|
|
res.body
|
|
|
|
self.assert_('accept-ranges' in res.headers)
|
|
|
|
self.assertEqual(res.headers['accept-ranges'], 'bytes')
|
2011-08-31 06:08:59 -07:00
|
|
|
|
2010-09-11 17:03:09 -07:00
|
|
|
def test_PUT(self):
|
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.AccountController(self.app, 'account')
|
2010-10-11 17:33:11 -05:00
|
|
|
|
2010-09-11 17:03:09 -07:00
|
|
|
def test_status_map(statuses, expected, **kwargs):
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(*statuses, **kwargs)
|
2010-09-11 17:03:09 -07:00
|
|
|
self.app.memcache.store = {}
|
|
|
|
req = Request.blank('/a', {})
|
|
|
|
req.content_length = 0
|
|
|
|
self.app.update_request(req)
|
|
|
|
res = controller.PUT(req)
|
|
|
|
expected = str(expected)
|
|
|
|
self.assertEquals(res.status[:len(expected)], expected)
|
2010-11-29 15:19:29 -08:00
|
|
|
test_status_map((201, 201, 201), 405)
|
|
|
|
self.app.allow_account_management = True
|
2010-09-11 17:03:09 -07:00
|
|
|
test_status_map((201, 201, 201), 201)
|
|
|
|
test_status_map((201, 201, 500), 201)
|
|
|
|
test_status_map((201, 500, 500), 503)
|
|
|
|
test_status_map((204, 500, 404), 503)
|
|
|
|
|
|
|
|
def test_PUT_max_account_name_length(self):
|
|
|
|
with save_globals():
|
2010-11-29 15:19:29 -08:00
|
|
|
self.app.allow_account_management = True
|
2012-09-05 20:49:50 -07:00
|
|
|
limit = MAX_ACCOUNT_NAME_LENGTH
|
|
|
|
controller = proxy_server.AccountController(self.app, '1' * limit)
|
2010-09-11 17:03:09 -07:00
|
|
|
self.assert_status_map(controller.PUT, (201, 201, 201), 201)
|
2012-09-05 20:49:50 -07:00
|
|
|
controller = proxy_server.AccountController(
|
|
|
|
self.app, '2' * (limit + 1))
|
2010-09-11 17:03:09 -07:00
|
|
|
self.assert_status_map(controller.PUT, (201, 201, 201), 400)
|
|
|
|
|
|
|
|
def test_PUT_connect_exceptions(self):
|
|
|
|
with save_globals():
|
2010-11-29 15:19:29 -08:00
|
|
|
self.app.allow_account_management = True
|
2010-09-11 17:03:09 -07:00
|
|
|
controller = proxy_server.AccountController(self.app, 'account')
|
|
|
|
self.assert_status_map(controller.PUT, (201, 201, -1), 201)
|
|
|
|
self.assert_status_map(controller.PUT, (201, -1, -1), 503)
|
|
|
|
self.assert_status_map(controller.PUT, (503, 503, -1), 503)
|
|
|
|
|
|
|
|
def test_PUT_metadata(self):
|
|
|
|
self.metadata_helper('PUT')
|
|
|
|
|
2010-08-16 15:30:27 -07:00
|
|
|
def test_POST_metadata(self):
|
2010-09-11 17:03:09 -07:00
|
|
|
self.metadata_helper('POST')
|
|
|
|
|
|
|
|
def metadata_helper(self, method):
|
2010-08-16 15:30:27 -07:00
|
|
|
for test_header, test_value in (
|
|
|
|
('X-Account-Meta-TestHeader', 'TestValue'),
|
2012-03-10 20:02:49 +00:00
|
|
|
('X-Account-Meta-TestHeader', ''),
|
|
|
|
('X-Remove-Account-Meta-TestHeader', 'anything')):
|
2010-08-16 15:30:27 -07:00
|
|
|
test_errors = []
|
2010-10-11 17:33:11 -05:00
|
|
|
|
2010-08-16 15:30:27 -07:00
|
|
|
def test_connect(ipaddr, port, device, partition, method, path,
|
|
|
|
headers=None, query_string=None):
|
2010-09-11 17:03:09 -07:00
|
|
|
if path == '/a':
|
2012-03-10 20:02:49 +00:00
|
|
|
find_header = test_header
|
|
|
|
find_value = test_value
|
|
|
|
if find_header.lower().startswith('x-remove-'):
|
|
|
|
find_header = \
|
|
|
|
find_header.lower().replace('-remove', '', 1)
|
|
|
|
find_value = ''
|
2010-09-11 17:03:09 -07:00
|
|
|
for k, v in headers.iteritems():
|
2012-03-10 20:02:49 +00:00
|
|
|
if k.lower() == find_header.lower() and \
|
|
|
|
v == find_value:
|
2010-09-11 17:03:09 -07:00
|
|
|
break
|
|
|
|
else:
|
|
|
|
test_errors.append('%s: %s not in %s' %
|
2012-03-10 20:02:49 +00:00
|
|
|
(find_header, find_value, headers))
|
2010-08-16 15:30:27 -07:00
|
|
|
with save_globals():
|
2010-11-29 15:19:29 -08:00
|
|
|
self.app.allow_account_management = True
|
2010-08-16 15:30:27 -07:00
|
|
|
controller = \
|
|
|
|
proxy_server.AccountController(self.app, 'a')
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(201, 201, 201, give_connect=test_connect)
|
2010-09-11 17:03:09 -07:00
|
|
|
req = Request.blank('/a/c', environ={'REQUEST_METHOD': method},
|
2012-10-07 14:28:41 +11:00
|
|
|
headers={test_header: test_value})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-09-11 17:03:09 -07:00
|
|
|
res = getattr(controller, method)(req)
|
2010-08-16 15:30:27 -07:00
|
|
|
self.assertEquals(test_errors, [])
|
|
|
|
|
2010-09-11 17:03:09 -07:00
|
|
|
def test_PUT_bad_metadata(self):
|
|
|
|
self.bad_metadata_helper('PUT')
|
|
|
|
|
2010-08-16 15:30:27 -07:00
|
|
|
def test_POST_bad_metadata(self):
|
2010-09-11 17:03:09 -07:00
|
|
|
self.bad_metadata_helper('POST')
|
|
|
|
|
|
|
|
def bad_metadata_helper(self, method):
|
2010-08-16 15:30:27 -07:00
|
|
|
with save_globals():
|
2010-11-29 15:19:29 -08:00
|
|
|
self.app.allow_account_management = True
|
2010-08-16 15:30:27 -07:00
|
|
|
controller = proxy_server.AccountController(self.app, 'a')
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(200, 201, 201, 201)
|
2010-09-11 17:03:09 -07:00
|
|
|
req = Request.blank('/a/c', environ={'REQUEST_METHOD': method})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-09-11 17:03:09 -07:00
|
|
|
resp = getattr(controller, method)(req)
|
|
|
|
self.assertEquals(resp.status_int, 201)
|
2010-08-16 15:30:27 -07:00
|
|
|
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(201, 201, 201)
|
2010-09-11 17:03:09 -07:00
|
|
|
req = Request.blank('/a/c', environ={'REQUEST_METHOD': method},
|
2012-10-07 14:28:41 +11:00
|
|
|
headers={'X-Account-Meta-' +
|
2010-08-16 15:30:27 -07:00
|
|
|
('a' * MAX_META_NAME_LENGTH): 'v'})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-09-11 17:03:09 -07:00
|
|
|
resp = getattr(controller, method)(req)
|
|
|
|
self.assertEquals(resp.status_int, 201)
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(201, 201, 201)
|
2010-09-11 17:03:09 -07:00
|
|
|
req = Request.blank('/a/c', environ={'REQUEST_METHOD': method},
|
2012-10-07 14:28:41 +11:00
|
|
|
headers={'X-Account-Meta-' +
|
2010-08-16 15:30:27 -07:00
|
|
|
('a' * (MAX_META_NAME_LENGTH + 1)): 'v'})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-09-11 17:03:09 -07:00
|
|
|
resp = getattr(controller, method)(req)
|
2010-08-16 15:30:27 -07:00
|
|
|
self.assertEquals(resp.status_int, 400)
|
|
|
|
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(201, 201, 201)
|
2010-09-11 17:03:09 -07:00
|
|
|
req = Request.blank('/a/c', environ={'REQUEST_METHOD': method},
|
2012-10-07 14:28:41 +11:00
|
|
|
headers={'X-Account-Meta-Too-Long':
|
2010-08-16 15:30:27 -07:00
|
|
|
'a' * MAX_META_VALUE_LENGTH})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-09-11 17:03:09 -07:00
|
|
|
resp = getattr(controller, method)(req)
|
|
|
|
self.assertEquals(resp.status_int, 201)
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(201, 201, 201)
|
2010-09-11 17:03:09 -07:00
|
|
|
req = Request.blank('/a/c', environ={'REQUEST_METHOD': method},
|
2012-10-07 14:28:41 +11:00
|
|
|
headers={'X-Account-Meta-Too-Long':
|
2010-08-16 15:30:27 -07:00
|
|
|
'a' * (MAX_META_VALUE_LENGTH + 1)})
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-09-11 17:03:09 -07:00
|
|
|
resp = getattr(controller, method)(req)
|
2010-08-16 15:30:27 -07:00
|
|
|
self.assertEquals(resp.status_int, 400)
|
|
|
|
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(201, 201, 201)
|
2010-08-16 15:30:27 -07:00
|
|
|
headers = {}
|
|
|
|
for x in xrange(MAX_META_COUNT):
|
|
|
|
headers['X-Account-Meta-%d' % x] = 'v'
|
2010-09-11 17:03:09 -07:00
|
|
|
req = Request.blank('/a/c', environ={'REQUEST_METHOD': method},
|
2010-08-16 15:30:27 -07:00
|
|
|
headers=headers)
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-09-11 17:03:09 -07:00
|
|
|
resp = getattr(controller, method)(req)
|
|
|
|
self.assertEquals(resp.status_int, 201)
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(201, 201, 201)
|
2010-08-16 15:30:27 -07:00
|
|
|
headers = {}
|
|
|
|
for x in xrange(MAX_META_COUNT + 1):
|
|
|
|
headers['X-Account-Meta-%d' % x] = 'v'
|
2010-09-11 17:03:09 -07:00
|
|
|
req = Request.blank('/a/c', environ={'REQUEST_METHOD': method},
|
2010-08-16 15:30:27 -07:00
|
|
|
headers=headers)
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-09-11 17:03:09 -07:00
|
|
|
resp = getattr(controller, method)(req)
|
2010-08-16 15:30:27 -07:00
|
|
|
self.assertEquals(resp.status_int, 400)
|
|
|
|
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(201, 201, 201)
|
2010-08-16 15:30:27 -07:00
|
|
|
headers = {}
|
|
|
|
header_value = 'a' * MAX_META_VALUE_LENGTH
|
|
|
|
size = 0
|
|
|
|
x = 0
|
|
|
|
while size < MAX_META_OVERALL_SIZE - 4 - MAX_META_VALUE_LENGTH:
|
|
|
|
size += 4 + MAX_META_VALUE_LENGTH
|
|
|
|
headers['X-Account-Meta-%04d' % x] = header_value
|
|
|
|
x += 1
|
|
|
|
if MAX_META_OVERALL_SIZE - size > 1:
|
|
|
|
headers['X-Account-Meta-a'] = \
|
|
|
|
'a' * (MAX_META_OVERALL_SIZE - size - 1)
|
2010-09-11 17:03:09 -07:00
|
|
|
req = Request.blank('/a/c', environ={'REQUEST_METHOD': method},
|
2010-08-16 15:30:27 -07:00
|
|
|
headers=headers)
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-09-11 17:03:09 -07:00
|
|
|
resp = getattr(controller, method)(req)
|
|
|
|
self.assertEquals(resp.status_int, 201)
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(201, 201, 201)
|
2010-08-16 15:30:27 -07:00
|
|
|
headers['X-Account-Meta-a'] = \
|
|
|
|
'a' * (MAX_META_OVERALL_SIZE - size)
|
2010-09-11 17:03:09 -07:00
|
|
|
req = Request.blank('/a/c', environ={'REQUEST_METHOD': method},
|
2010-08-16 15:30:27 -07:00
|
|
|
headers=headers)
|
2010-09-02 21:50:16 -07:00
|
|
|
self.app.update_request(req)
|
2010-09-11 17:03:09 -07:00
|
|
|
resp = getattr(controller, method)(req)
|
2010-08-16 15:30:27 -07:00
|
|
|
self.assertEquals(resp.status_int, 400)
|
|
|
|
|
2010-11-29 15:19:29 -08:00
|
|
|
def test_DELETE(self):
|
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.AccountController(self.app, 'account')
|
|
|
|
|
|
|
|
def test_status_map(statuses, expected, **kwargs):
|
2012-08-23 12:38:09 -07:00
|
|
|
set_http_connect(*statuses, **kwargs)
|
2010-11-29 15:19:29 -08:00
|
|
|
self.app.memcache.store = {}
|
|
|
|
req = Request.blank('/a', {'REQUEST_METHOD': 'DELETE'})
|
|
|
|
req.content_length = 0
|
|
|
|
self.app.update_request(req)
|
|
|
|
res = controller.DELETE(req)
|
|
|
|
expected = str(expected)
|
|
|
|
self.assertEquals(res.status[:len(expected)], expected)
|
|
|
|
test_status_map((201, 201, 201), 405)
|
|
|
|
self.app.allow_account_management = True
|
|
|
|
test_status_map((201, 201, 201), 201)
|
|
|
|
test_status_map((201, 201, 500), 201)
|
|
|
|
test_status_map((201, 500, 500), 503)
|
|
|
|
test_status_map((204, 500, 404), 503)
|
|
|
|
|
2013-04-19 09:43:31 +00:00
|
|
|
def test_DELETE_with_query_string(self):
|
|
|
|
# Extra safety in case someone typos a query string for an
|
|
|
|
# account-level DELETE request that was really meant to be caught by
|
|
|
|
# some middleware.
|
|
|
|
with save_globals():
|
|
|
|
controller = proxy_server.AccountController(self.app, 'account')
|
|
|
|
|
|
|
|
def test_status_map(statuses, expected, **kwargs):
|
|
|
|
set_http_connect(*statuses, **kwargs)
|
|
|
|
self.app.memcache.store = {}
|
|
|
|
req = Request.blank('/a?whoops', {'REQUEST_METHOD': 'DELETE'})
|
|
|
|
req.content_length = 0
|
|
|
|
self.app.update_request(req)
|
|
|
|
res = controller.DELETE(req)
|
|
|
|
expected = str(expected)
|
|
|
|
self.assertEquals(res.status[:len(expected)], expected)
|
|
|
|
test_status_map((201, 201, 201), 400)
|
|
|
|
self.app.allow_account_management = True
|
|
|
|
test_status_map((201, 201, 201), 400)
|
|
|
|
test_status_map((201, 201, 500), 400)
|
|
|
|
test_status_map((201, 500, 500), 400)
|
|
|
|
test_status_map((204, 500, 404), 400)
|
|
|
|
|
2010-07-12 17:03:45 -05:00
|
|
|
|
2010-11-16 15:35:39 -08:00
|
|
|
class FakeObjectController(object):
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
self.app = self
|
|
|
|
self.logger = self
|
|
|
|
self.account_name = 'a'
|
|
|
|
self.container_name = 'c'
|
|
|
|
self.object_name = 'o'
|
|
|
|
self.trans_id = 'tx1'
|
|
|
|
self.object_ring = FakeRing()
|
|
|
|
self.node_timeout = 1
|
2012-08-20 22:51:46 -07:00
|
|
|
self.rate_limit_after_segment = 3
|
|
|
|
self.rate_limit_segments_per_sec = 2
|
2010-11-16 15:35:39 -08:00
|
|
|
|
|
|
|
def exception(self, *args):
|
|
|
|
self.exception_args = args
|
|
|
|
self.exception_info = sys.exc_info()
|
|
|
|
|
|
|
|
def GETorHEAD_base(self, *args):
|
|
|
|
self.GETorHEAD_base_args = args
|
|
|
|
req = args[0]
|
|
|
|
path = args[4]
|
|
|
|
body = data = path[-1] * int(path[-1])
|
2012-06-06 03:39:53 +09:00
|
|
|
if req.range:
|
2012-11-01 20:45:11 -04:00
|
|
|
r = req.range.ranges_for_length(len(data))
|
2012-06-06 03:39:53 +09:00
|
|
|
if r:
|
2012-11-01 20:45:11 -04:00
|
|
|
(start, stop) = r[0]
|
2012-06-06 03:39:53 +09:00
|
|
|
body = data[start:stop]
|
2010-11-16 15:35:39 -08:00
|
|
|
resp = Response(app_iter=iter(body))
|
|
|
|
return resp
|
|
|
|
|
2013-04-06 01:35:58 +00:00
|
|
|
def iter_nodes(self, ring, partition):
|
|
|
|
for node in ring.get_part_nodes(partition):
|
2010-11-16 15:35:39 -08:00
|
|
|
yield node
|
|
|
|
for node in ring.get_more_nodes(partition):
|
|
|
|
yield node
|
|
|
|
|
2013-02-07 22:07:18 -08:00
|
|
|
def sort_nodes(self, nodes):
|
|
|
|
return nodes
|
|
|
|
|
|
|
|
def set_node_timing(self, node, timing):
|
|
|
|
return
|
|
|
|
|
2010-11-16 15:35:39 -08:00
|
|
|
|
|
|
|
class Stub(object):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
class TestSegmentedIterable(unittest.TestCase):
|
|
|
|
|
|
|
|
def setUp(self):
|
|
|
|
self.controller = FakeObjectController()
|
|
|
|
|
|
|
|
def test_load_next_segment_unexpected_error(self):
|
2010-12-13 14:14:26 -08:00
|
|
|
# Iterator value isn't a dict
|
2010-11-16 15:35:39 -08:00
|
|
|
self.assertRaises(Exception,
|
2012-10-07 14:28:41 +11:00
|
|
|
SegmentedIterable(self.controller, None,
|
|
|
|
[None])._load_next_segment)
|
2010-12-22 16:36:31 +00:00
|
|
|
self.assert_(self.controller.exception_args[0].startswith(
|
2012-10-07 14:28:41 +11:00
|
|
|
'ERROR: While processing manifest'))
|
2010-11-16 15:35:39 -08:00
|
|
|
|
|
|
|
def test_load_next_segment_with_no_segments(self):
|
|
|
|
self.assertRaises(StopIteration,
|
2012-10-07 14:28:41 +11:00
|
|
|
SegmentedIterable(self.controller, 'lc',
|
|
|
|
[])._load_next_segment)
|
2010-11-16 15:35:39 -08:00
|
|
|
|
|
|
|
def test_load_next_segment_with_one_segment(self):
|
2012-08-23 12:38:09 -07:00
|
|
|
segit = SegmentedIterable(self.controller, 'lc', [{'name':
|
2012-10-07 14:28:41 +11:00
|
|
|
'o1'}])
|
2010-11-16 15:35:39 -08:00
|
|
|
segit._load_next_segment()
|
|
|
|
self.assertEquals(self.controller.GETorHEAD_base_args[4], '/a/lc/o1')
|
|
|
|
data = ''.join(segit.segment_iter)
|
|
|
|
self.assertEquals(data, '1')
|
|
|
|
|
|
|
|
def test_load_next_segment_with_two_segments(self):
|
2012-08-23 12:38:09 -07:00
|
|
|
segit = SegmentedIterable(self.controller, 'lc', [{'name':
|
2012-10-07 14:28:41 +11:00
|
|
|
'o1'}, {'name': 'o2'}])
|
2010-11-16 15:35:39 -08:00
|
|
|
segit._load_next_segment()
|
|
|
|
self.assertEquals(self.controller.GETorHEAD_base_args[4], '/a/lc/o1')
|
|
|
|
data = ''.join(segit.segment_iter)
|
|
|
|
self.assertEquals(data, '1')
|
|
|
|
segit._load_next_segment()
|
|
|
|
self.assertEquals(self.controller.GETorHEAD_base_args[4], '/a/lc/o2')
|
|
|
|
data = ''.join(segit.segment_iter)
|
|
|
|
self.assertEquals(data, '22')
|
|
|
|
|
2012-08-20 22:51:46 -07:00
|
|
|
def test_load_next_segment_rate_limiting(self):
|
|
|
|
sleep_calls = []
|
2012-10-07 14:28:41 +11:00
|
|
|
|
2012-08-20 22:51:46 -07:00
|
|
|
def _stub_sleep(sleepy_time):
|
|
|
|
sleep_calls.append(sleepy_time)
|
|
|
|
orig_sleep = swift.proxy.controllers.obj.sleep
|
|
|
|
try:
|
|
|
|
swift.proxy.controllers.obj.sleep = _stub_sleep
|
|
|
|
segit = SegmentedIterable(
|
|
|
|
self.controller, 'lc', [
|
|
|
|
{'name': 'o1'}, {'name': 'o2'}, {'name': 'o3'},
|
|
|
|
{'name': 'o4'}, {'name': 'o5'}])
|
|
|
|
|
2012-10-07 14:28:41 +11:00
|
|
|
# rate_limit_after_segment == 3, so the first 3 segments should
|
|
|
|
# invoke no sleeping.
|
2012-08-20 22:51:46 -07:00
|
|
|
for _ in xrange(3):
|
|
|
|
segit._load_next_segment()
|
|
|
|
self.assertEquals([], sleep_calls)
|
2012-10-07 14:28:41 +11:00
|
|
|
self.assertEquals(self.controller.GETorHEAD_base_args[4],
|
|
|
|
'/a/lc/o3')
|
2012-08-20 22:51:46 -07:00
|
|
|
|
|
|
|
# Loading of next (4th) segment starts rate-limiting.
|
|
|
|
segit._load_next_segment()
|
|
|
|
self.assertAlmostEqual(0.5, sleep_calls[0], places=2)
|
2012-10-07 14:28:41 +11:00
|
|
|
self.assertEquals(self.controller.GETorHEAD_base_args[4],
|
|
|
|
'/a/lc/o4')
|
2012-08-20 22:51:46 -07:00
|
|
|
|
|
|
|
sleep_calls = []
|
|
|
|
segit._load_next_segment()
|
|
|
|
self.assertAlmostEqual(0.5, sleep_calls[0], places=2)
|
2012-10-07 14:28:41 +11:00
|
|
|
self.assertEquals(self.controller.GETorHEAD_base_args[4],
|
|
|
|
'/a/lc/o5')
|
2012-08-20 22:51:46 -07:00
|
|
|
finally:
|
|
|
|
swift.proxy.controllers.obj.sleep = orig_sleep
|
|
|
|
|
2013-04-01 10:19:35 -07:00
|
|
|
def test_load_next_segment_range_req_rate_limiting(self):
|
|
|
|
sleep_calls = []
|
|
|
|
|
|
|
|
def _stub_sleep(sleepy_time):
|
|
|
|
sleep_calls.append(sleepy_time)
|
|
|
|
orig_sleep = swift.proxy.controllers.obj.sleep
|
|
|
|
try:
|
|
|
|
swift.proxy.controllers.obj.sleep = _stub_sleep
|
|
|
|
segit = SegmentedIterable(
|
|
|
|
self.controller, 'lc', [
|
|
|
|
{'name': 'o0', 'bytes': 5}, {'name': 'o1', 'bytes': 5},
|
|
|
|
{'name': 'o2', 'bytes': 1}, {'name': 'o3'}, {'name': 'o4'},
|
|
|
|
{'name': 'o5'}, {'name': 'o6'}])
|
|
|
|
|
|
|
|
# this tests for a range request which skips over the whole first
|
|
|
|
# segment, after that 3 segments will be read in because the
|
|
|
|
# rate_limit_after_segment == 3, then sleeping starts
|
|
|
|
segit_iter = segit.app_iter_range(10, None)
|
|
|
|
segit_iter.next()
|
|
|
|
for _ in xrange(2):
|
|
|
|
# this is set to 2 instead of 3 because o2 was loaded after
|
|
|
|
# o0 and o1 were skipped.
|
|
|
|
segit._load_next_segment()
|
|
|
|
self.assertEquals([], sleep_calls)
|
|
|
|
self.assertEquals(self.controller.GETorHEAD_base_args[4],
|
|
|
|
'/a/lc/o4')
|
|
|
|
|
|
|
|
# Loading of next (5th) segment starts rate-limiting.
|
|
|
|
segit._load_next_segment()
|
|
|
|
self.assertAlmostEqual(0.5, sleep_calls[0], places=2)
|
|
|
|
self.assertEquals(self.controller.GETorHEAD_base_args[4],
|
|
|
|
'/a/lc/o5')
|
|
|
|
|
|
|
|
sleep_calls = []
|
|
|
|
segit._load_next_segment()
|
|
|
|
self.assertAlmostEqual(0.5, sleep_calls[0], places=2)
|
|
|
|
self.assertEquals(self.controller.GETorHEAD_base_args[4],
|
|
|
|
'/a/lc/o6')
|
|
|
|
finally:
|
|
|
|
swift.proxy.controllers.obj.sleep = orig_sleep
|
|
|
|
|
2010-11-16 15:35:39 -08:00
|
|
|
def test_load_next_segment_with_two_segments_skip_first(self):
|
2012-08-23 12:38:09 -07:00
|
|
|
segit = SegmentedIterable(self.controller, 'lc', [{'name':
|
2012-10-07 14:28:41 +11:00
|
|
|
'o1'}, {'name': 'o2'}])
|
2013-04-01 10:19:35 -07:00
|
|
|
segit.ratelimit_index = 0
|
2010-12-13 14:14:26 -08:00
|
|
|
segit.listing.next()
|
2010-11-16 15:35:39 -08:00
|
|
|
segit._load_next_segment()
|
|
|
|
self.assertEquals(self.controller.GETorHEAD_base_args[4], '/a/lc/o2')
|
|
|
|
data = ''.join(segit.segment_iter)
|
|
|
|
self.assertEquals(data, '22')
|
|
|
|
|
|
|
|
def test_load_next_segment_with_seek(self):
|
2012-08-23 12:38:09 -07:00
|
|
|
segit = SegmentedIterable(self.controller, 'lc', [{'name':
|
2012-10-07 14:28:41 +11:00
|
|
|
'o1'}, {'name': 'o2'}])
|
2013-04-01 10:19:35 -07:00
|
|
|
segit.ratelimit_index = 0
|
2010-12-13 14:14:26 -08:00
|
|
|
segit.listing.next()
|
2010-11-16 15:35:39 -08:00
|
|
|
segit.seek = 1
|
|
|
|
segit._load_next_segment()
|
|
|
|
self.assertEquals(self.controller.GETorHEAD_base_args[4], '/a/lc/o2')
|
|
|
|
self.assertEquals(str(self.controller.GETorHEAD_base_args[0].range),
|
2012-10-07 14:28:41 +11:00
|
|
|
'bytes=1-')
|
2010-11-16 15:35:39 -08:00
|
|
|
data = ''.join(segit.segment_iter)
|
|
|
|
self.assertEquals(data, '2')
|
|
|
|
|
|
|
|
def test_load_next_segment_with_get_error(self):
|
|
|
|
|
|
|
|
def local_GETorHEAD_base(*args):
|
|
|
|
return HTTPNotFound()
|
|
|
|
|
|
|
|
self.controller.GETorHEAD_base = local_GETorHEAD_base
|
|
|
|
self.assertRaises(Exception,
|
2012-10-07 14:28:41 +11:00
|
|
|
SegmentedIterable(self.controller, 'lc',
|
|
|
|
[{'name': 'o1'}])._load_next_segment)
|
2010-12-22 16:36:31 +00:00
|
|
|
self.assert_(self.controller.exception_args[0].startswith(
|
2012-10-07 14:28:41 +11:00
|
|
|
'ERROR: While processing manifest'))
|
2010-11-16 15:35:39 -08:00
|
|
|
self.assertEquals(str(self.controller.exception_info[1]),
|
2012-10-07 14:28:41 +11:00
|
|
|
'Could not load object segment /a/lc/o1: 404')
|
2010-11-16 15:35:39 -08:00
|
|
|
|
|
|
|
def test_iter_unexpected_error(self):
|
2010-12-13 14:14:26 -08:00
|
|
|
# Iterator value isn't a dict
|
2010-11-16 15:35:39 -08:00
|
|
|
self.assertRaises(Exception, ''.join,
|
2012-10-07 14:28:41 +11:00
|
|
|
SegmentedIterable(self.controller, None, [None]))
|
2010-12-22 16:36:31 +00:00
|
|
|
self.assert_(self.controller.exception_args[0].startswith(
|
|
|
|
'ERROR: While processing manifest'))
|
2010-11-16 15:35:39 -08:00
|
|
|
|
|
|
|
def test_iter_with_no_segments(self):
|
2012-08-23 12:38:09 -07:00
|
|
|
segit = SegmentedIterable(self.controller, 'lc', [])
|
2010-11-16 15:35:39 -08:00
|
|
|
self.assertEquals(''.join(segit), '')
|
|
|
|
|
|
|
|
def test_iter_with_one_segment(self):
|
2012-08-23 12:38:09 -07:00
|
|
|
segit = SegmentedIterable(self.controller, 'lc', [{'name':
|
2012-10-07 14:28:41 +11:00
|
|
|
'o1'}])
|
2010-11-16 15:35:39 -08:00
|
|
|
segit.response = Stub()
|
|
|
|
self.assertEquals(''.join(segit), '1')
|
|
|
|
|
|
|
|
def test_iter_with_two_segments(self):
|
2012-08-23 12:38:09 -07:00
|
|
|
segit = SegmentedIterable(self.controller, 'lc', [{'name':
|
2012-10-07 14:28:41 +11:00
|
|
|
'o1'}, {'name': 'o2'}])
|
2010-11-16 15:35:39 -08:00
|
|
|
segit.response = Stub()
|
|
|
|
self.assertEquals(''.join(segit), '122')
|
|
|
|
|
|
|
|
def test_iter_with_get_error(self):
|
|
|
|
|
|
|
|
def local_GETorHEAD_base(*args):
|
|
|
|
return HTTPNotFound()
|
|
|
|
|
|
|
|
self.controller.GETorHEAD_base = local_GETorHEAD_base
|
|
|
|
self.assertRaises(Exception, ''.join,
|
2012-10-07 14:28:41 +11:00
|
|
|
SegmentedIterable(self.controller, 'lc', [{'name':
|
|
|
|
'o1'}]))
|
2010-12-22 16:36:31 +00:00
|
|
|
self.assert_(self.controller.exception_args[0].startswith(
|
2012-10-07 14:28:41 +11:00
|
|
|
'ERROR: While processing manifest'))
|
2010-11-16 15:35:39 -08:00
|
|
|
self.assertEquals(str(self.controller.exception_info[1]),
|
2012-10-07 14:28:41 +11:00
|
|
|
'Could not load object segment /a/lc/o1: 404')
|
2010-11-16 15:35:39 -08:00
|
|
|
|
|
|
|
def test_app_iter_range_unexpected_error(self):
|
2010-12-13 14:14:26 -08:00
|
|
|
# Iterator value isn't a dict
|
2010-11-16 15:35:39 -08:00
|
|
|
self.assertRaises(Exception,
|
2012-10-07 14:28:41 +11:00
|
|
|
SegmentedIterable(self.controller, None,
|
|
|
|
[None]).app_iter_range(None,
|
|
|
|
None).next)
|
2010-12-22 16:36:31 +00:00
|
|
|
self.assert_(self.controller.exception_args[0].startswith(
|
|
|
|
'ERROR: While processing manifest'))
|
2010-11-16 15:35:39 -08:00
|
|
|
|
|
|
|
def test_app_iter_range_with_no_segments(self):
|
2012-08-23 12:38:09 -07:00
|
|
|
self.assertEquals(''.join(SegmentedIterable(
|
2010-11-16 15:35:39 -08:00
|
|
|
self.controller, 'lc', []).app_iter_range(None, None)), '')
|
2012-08-23 12:38:09 -07:00
|
|
|
self.assertEquals(''.join(SegmentedIterable(
|
2010-11-16 15:35:39 -08:00
|
|
|
self.controller, 'lc', []).app_iter_range(3, None)), '')
|
2012-08-23 12:38:09 -07:00
|
|
|
self.assertEquals(''.join(SegmentedIterable(
|
2010-11-16 15:35:39 -08:00
|
|
|
self.controller, 'lc', []).app_iter_range(3, 5)), '')
|
2012-08-23 12:38:09 -07:00
|
|
|
self.assertEquals(''.join(SegmentedIterable(
|
2010-11-16 15:35:39 -08:00
|
|
|
self.controller, 'lc', []).app_iter_range(None, 5)), '')
|
|
|
|
|
|
|
|
def test_app_iter_range_with_one_segment(self):
|
|
|
|
listing = [{'name': 'o1', 'bytes': 1}]
|
|
|
|
|
2012-08-23 12:38:09 -07:00
|
|
|
segit = SegmentedIterable(self.controller, 'lc', listing)
|
2010-11-16 15:35:39 -08:00
|
|
|
segit.response = Stub()
|
|
|
|
self.assertEquals(''.join(segit.app_iter_range(None, None)), '1')
|
|
|
|
|
2012-08-23 12:38:09 -07:00
|
|
|
segit = SegmentedIterable(self.controller, 'lc', listing)
|
2010-11-16 15:35:39 -08:00
|
|
|
self.assertEquals(''.join(segit.app_iter_range(3, None)), '')
|
|
|
|
|
2012-08-23 12:38:09 -07:00
|
|
|
segit = SegmentedIterable(self.controller, 'lc', listing)
|
2010-11-16 15:35:39 -08:00
|
|
|
self.assertEquals(''.join(segit.app_iter_range(3, 5)), '')
|
|
|
|
|
2012-08-23 12:38:09 -07:00
|
|
|
segit = SegmentedIterable(self.controller, 'lc', listing)
|
2010-11-16 15:35:39 -08:00
|
|
|
segit.response = Stub()
|
|
|
|
self.assertEquals(''.join(segit.app_iter_range(None, 5)), '1')
|
|
|
|
|
|
|
|
def test_app_iter_range_with_two_segments(self):
|
|
|
|
listing = [{'name': 'o1', 'bytes': 1}, {'name': 'o2', 'bytes': 2}]
|
|
|
|
|
2012-08-23 12:38:09 -07:00
|
|
|
segit = SegmentedIterable(self.controller, 'lc', listing)
|
2010-11-16 15:35:39 -08:00
|
|
|
segit.response = Stub()
|
|
|
|
self.assertEquals(''.join(segit.app_iter_range(None, None)), '122')
|
|
|
|
|
2012-08-23 12:38:09 -07:00
|
|
|
segit = SegmentedIterable(self.controller, 'lc', listing)
|
2010-11-16 15:35:39 -08:00
|
|
|
segit.response = Stub()
|
|
|
|
self.assertEquals(''.join(segit.app_iter_range(1, None)), '22')
|
|
|
|
|
2012-08-23 12:38:09 -07:00
|
|
|
segit = SegmentedIterable(self.controller, 'lc', listing)
|
2010-11-16 15:35:39 -08:00
|
|
|
segit.response = Stub()
|
|
|
|
self.assertEquals(''.join(segit.app_iter_range(1, 5)), '22')
|
|
|
|
|
2012-08-23 12:38:09 -07:00
|
|
|
segit = SegmentedIterable(self.controller, 'lc', listing)
|
2010-11-16 15:35:39 -08:00
|
|
|
segit.response = Stub()
|
|
|
|
self.assertEquals(''.join(segit.app_iter_range(None, 2)), '12')
|
|
|
|
|
|
|
|
def test_app_iter_range_with_many_segments(self):
|
|
|
|
listing = [{'name': 'o1', 'bytes': 1}, {'name': 'o2', 'bytes': 2},
|
2012-10-07 14:28:41 +11:00
|
|
|
{'name': 'o3', 'bytes': 3}, {'name': 'o4', 'bytes': 4},
|
|
|
|
{'name': 'o5', 'bytes': 5}]
|
2010-11-16 15:35:39 -08:00
|
|
|
|
2012-08-23 12:38:09 -07:00
|
|
|
segit = SegmentedIterable(self.controller, 'lc', listing)
|
2010-11-16 15:35:39 -08:00
|
|
|
segit.response = Stub()
|
|
|
|
self.assertEquals(''.join(segit.app_iter_range(None, None)),
|
2012-10-07 14:28:41 +11:00
|
|
|
'122333444455555')
|
2010-11-16 15:35:39 -08:00
|
|
|
|
2012-08-23 12:38:09 -07:00
|
|
|
segit = SegmentedIterable(self.controller, 'lc', listing)
|
2010-11-16 15:35:39 -08:00
|
|
|
segit.response = Stub()
|
|
|
|
self.assertEquals(''.join(segit.app_iter_range(3, None)),
|
2012-10-07 14:28:41 +11:00
|
|
|
'333444455555')
|
2010-11-16 15:35:39 -08:00
|
|
|
|
2012-08-23 12:38:09 -07:00
|
|
|
segit = SegmentedIterable(self.controller, 'lc', listing)
|
2010-11-16 15:35:39 -08:00
|
|
|
segit.response = Stub()
|
|
|
|
self.assertEquals(''.join(segit.app_iter_range(5, None)), '3444455555')
|
|
|
|
|
2012-08-23 12:38:09 -07:00
|
|
|
segit = SegmentedIterable(self.controller, 'lc', listing)
|
2010-11-16 15:35:39 -08:00
|
|
|
segit.response = Stub()
|
|
|
|
self.assertEquals(''.join(segit.app_iter_range(None, 6)), '122333')
|
|
|
|
|
2012-08-23 12:38:09 -07:00
|
|
|
segit = SegmentedIterable(self.controller, 'lc', listing)
|
2010-11-16 15:35:39 -08:00
|
|
|
segit.response = Stub()
|
|
|
|
self.assertEquals(''.join(segit.app_iter_range(None, 7)), '1223334')
|
|
|
|
|
2012-08-23 12:38:09 -07:00
|
|
|
segit = SegmentedIterable(self.controller, 'lc', listing)
|
2010-11-16 15:35:39 -08:00
|
|
|
segit.response = Stub()
|
|
|
|
self.assertEquals(''.join(segit.app_iter_range(3, 7)), '3334')
|
|
|
|
|
2012-08-23 12:38:09 -07:00
|
|
|
segit = SegmentedIterable(self.controller, 'lc', listing)
|
2010-11-16 15:35:39 -08:00
|
|
|
segit.response = Stub()
|
|
|
|
self.assertEquals(''.join(segit.app_iter_range(5, 7)), '34')
|
|
|
|
|
|
|
|
|
2010-07-12 17:03:45 -05:00
|
|
|
if __name__ == '__main__':
|
2011-01-19 03:56:13 +00:00
|
|
|
setup()
|
2011-01-24 17:12:38 -08:00
|
|
|
try:
|
|
|
|
unittest.main()
|
|
|
|
finally:
|
|
|
|
teardown()
|