Merge "Follow-up for per-policy proxy configs"
This commit is contained in:
commit
0d5b2a867d
@ -167,11 +167,11 @@ use = egg:swift#proxy
|
|||||||
# Depth of the proxy put queue.
|
# Depth of the proxy put queue.
|
||||||
# put_queue_depth = 10
|
# put_queue_depth = 10
|
||||||
#
|
#
|
||||||
# Storage nodes can be chosen at random (shuffle), by using timing
|
# During GET and HEAD requests, storage nodes can be chosen at random
|
||||||
# measurements (timing), or by using an explicit match (affinity).
|
# (shuffle), by using timing measurements (timing), or by using an explicit
|
||||||
# Using timing measurements may allow for lower overall latency, while
|
# region/zone match (affinity). Using timing measurements may allow for lower
|
||||||
# using affinity allows for finer control. In both the timing and
|
# overall latency, while using affinity allows for finer control. In both the
|
||||||
# affinity cases, equally-sorting nodes are still randomly chosen to
|
# timing and affinity cases, equally-sorting nodes are still randomly chosen to
|
||||||
# spread load.
|
# spread load.
|
||||||
# The valid values for sorting_method are "affinity", "shuffle", or "timing".
|
# The valid values for sorting_method are "affinity", "shuffle", or "timing".
|
||||||
# This option may be overridden in a per-policy configuration section.
|
# This option may be overridden in a per-policy configuration section.
|
||||||
@ -215,7 +215,7 @@ use = egg:swift#proxy
|
|||||||
# This option may be overridden in a per-policy configuration section.
|
# This option may be overridden in a per-policy configuration section.
|
||||||
# read_affinity =
|
# read_affinity =
|
||||||
#
|
#
|
||||||
# Specifies which backend servers to prefer on writes. Format is a comma
|
# Specifies which backend servers to prefer on object writes. Format is a comma
|
||||||
# separated list of affinity descriptors of the form r<N> for region N or
|
# separated list of affinity descriptors of the form r<N> for region N or
|
||||||
# r<N>z<M> for region N, zone M. If this is set, then when handling an object
|
# r<N>z<M> for region N, zone M. If this is set, then when handling an object
|
||||||
# PUT request, some number (see setting write_affinity_node_count) of local
|
# PUT request, some number (see setting write_affinity_node_count) of local
|
||||||
|
@ -115,7 +115,7 @@ class ConfigString(NamedConfigLoader):
|
|||||||
self.filename = "string"
|
self.filename = "string"
|
||||||
defaults = {
|
defaults = {
|
||||||
'here': "string",
|
'here': "string",
|
||||||
'__file__': StringIO(dedent(config_string)),
|
'__file__': self.contents,
|
||||||
}
|
}
|
||||||
self.parser = loadwsgi.NicerConfigParser("string", defaults=defaults)
|
self.parser = loadwsgi.NicerConfigParser("string", defaults=defaults)
|
||||||
self.parser.optionxform = str # Don't lower-case keys
|
self.parser.optionxform = str # Don't lower-case keys
|
||||||
|
@ -24,7 +24,6 @@
|
|||||||
# These shenanigans are to ensure all related objects can be garbage
|
# These shenanigans are to ensure all related objects can be garbage
|
||||||
# collected. We've seen objects hang around forever otherwise.
|
# collected. We've seen objects hang around forever otherwise.
|
||||||
|
|
||||||
import six
|
|
||||||
from six.moves.urllib.parse import unquote
|
from six.moves.urllib.parse import unquote
|
||||||
|
|
||||||
import collections
|
import collections
|
||||||
@ -143,26 +142,26 @@ class BaseObjectController(Controller):
|
|||||||
:param ring: ring to get nodes from
|
:param ring: ring to get nodes from
|
||||||
:param partition: ring partition to yield nodes for
|
:param partition: ring partition to yield nodes for
|
||||||
"""
|
"""
|
||||||
policy_conf = self.app.get_policy_options(policy)
|
policy_options = self.app.get_policy_options(policy)
|
||||||
is_local = policy_conf.write_affinity_is_local_fn
|
is_local = policy_options.write_affinity_is_local_fn
|
||||||
if is_local is None:
|
if is_local is None:
|
||||||
return self.app.iter_nodes(ring, partition, policy=policy)
|
return self.app.iter_nodes(ring, partition, policy=policy)
|
||||||
|
|
||||||
primary_nodes = ring.get_part_nodes(partition)
|
primary_nodes = ring.get_part_nodes(partition)
|
||||||
num_locals = policy_conf.write_affinity_node_count(len(primary_nodes))
|
num_locals = policy_options.write_affinity_node_count_fn(
|
||||||
|
len(primary_nodes))
|
||||||
|
|
||||||
all_nodes = itertools.chain(primary_nodes,
|
all_nodes = itertools.chain(primary_nodes,
|
||||||
ring.get_more_nodes(partition))
|
ring.get_more_nodes(partition))
|
||||||
first_n_local_nodes = list(itertools.islice(
|
first_n_local_nodes = list(itertools.islice(
|
||||||
six.moves.filter(is_local, all_nodes), num_locals))
|
(node for node in all_nodes if is_local(node)), num_locals))
|
||||||
|
|
||||||
# refresh it; it moved when we computed first_n_local_nodes
|
# refresh it; it moved when we computed first_n_local_nodes
|
||||||
all_nodes = itertools.chain(primary_nodes,
|
all_nodes = itertools.chain(primary_nodes,
|
||||||
ring.get_more_nodes(partition))
|
ring.get_more_nodes(partition))
|
||||||
local_first_node_iter = itertools.chain(
|
local_first_node_iter = itertools.chain(
|
||||||
first_n_local_nodes,
|
first_n_local_nodes,
|
||||||
six.moves.filter(lambda node: node not in first_n_local_nodes,
|
(node for node in all_nodes if node not in first_n_local_nodes))
|
||||||
all_nodes))
|
|
||||||
|
|
||||||
return self.app.iter_nodes(
|
return self.app.iter_nodes(
|
||||||
ring, partition, node_iter=local_first_node_iter, policy=policy)
|
ring, partition, node_iter=local_first_node_iter, policy=policy)
|
||||||
|
@ -85,20 +85,20 @@ def _label_for_policy(policy):
|
|||||||
return '(default)'
|
return '(default)'
|
||||||
|
|
||||||
|
|
||||||
class OverrideConf(object):
|
class ProxyOverrideOptions(object):
|
||||||
"""
|
"""
|
||||||
Encapsulates proxy server properties that may be overridden e.g. for
|
Encapsulates proxy server options that may be overridden e.g. for
|
||||||
policy specific configurations.
|
policy specific configurations.
|
||||||
|
|
||||||
:param conf: the proxy-server config dict.
|
:param conf: the proxy-server config dict.
|
||||||
:param override_conf: a dict of overriding configuration options.
|
:param override_conf: a dict of overriding configuration options.
|
||||||
"""
|
"""
|
||||||
def __init__(self, base_conf, override_conf):
|
def __init__(self, base_conf, override_conf):
|
||||||
self.conf = base_conf
|
def get(key, default):
|
||||||
self.override_conf = override_conf
|
return override_conf.get(key, base_conf.get(key, default))
|
||||||
|
|
||||||
self.sorting_method = self._get('sorting_method', 'shuffle').lower()
|
self.sorting_method = get('sorting_method', 'shuffle').lower()
|
||||||
self.read_affinity = self._get('read_affinity', '')
|
self.read_affinity = get('read_affinity', '')
|
||||||
try:
|
try:
|
||||||
self.read_affinity_sort_key = affinity_key_function(
|
self.read_affinity_sort_key = affinity_key_function(
|
||||||
self.read_affinity)
|
self.read_affinity)
|
||||||
@ -107,7 +107,7 @@ class OverrideConf(object):
|
|||||||
raise ValueError("Invalid read_affinity value: %r (%s)" %
|
raise ValueError("Invalid read_affinity value: %r (%s)" %
|
||||||
(self.read_affinity, err.message))
|
(self.read_affinity, err.message))
|
||||||
|
|
||||||
self.write_affinity = self._get('write_affinity', '')
|
self.write_affinity = get('write_affinity', '')
|
||||||
try:
|
try:
|
||||||
self.write_affinity_is_local_fn \
|
self.write_affinity_is_local_fn \
|
||||||
= affinity_locality_predicate(self.write_affinity)
|
= affinity_locality_predicate(self.write_affinity)
|
||||||
@ -115,15 +115,15 @@ class OverrideConf(object):
|
|||||||
# make the message a little more useful
|
# make the message a little more useful
|
||||||
raise ValueError("Invalid write_affinity value: %r (%s)" %
|
raise ValueError("Invalid write_affinity value: %r (%s)" %
|
||||||
(self.write_affinity, err.message))
|
(self.write_affinity, err.message))
|
||||||
self.write_affinity_node_value = self._get(
|
self.write_affinity_node_count = get(
|
||||||
'write_affinity_node_count', '2 * replicas').lower()
|
'write_affinity_node_count', '2 * replicas').lower()
|
||||||
value = self.write_affinity_node_value.split()
|
value = self.write_affinity_node_count.split()
|
||||||
if len(value) == 1:
|
if len(value) == 1:
|
||||||
wanc_value = int(value[0])
|
wanc_value = int(value[0])
|
||||||
self.write_affinity_node_count = lambda replicas: wanc_value
|
self.write_affinity_node_count_fn = lambda replicas: wanc_value
|
||||||
elif len(value) == 3 and value[1] == '*' and value[2] == 'replicas':
|
elif len(value) == 3 and value[1] == '*' and value[2] == 'replicas':
|
||||||
wanc_value = int(value[0])
|
wanc_value = int(value[0])
|
||||||
self.write_affinity_node_count = \
|
self.write_affinity_node_count_fn = \
|
||||||
lambda replicas: wanc_value * replicas
|
lambda replicas: wanc_value * replicas
|
||||||
else:
|
else:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
@ -131,13 +131,21 @@ class OverrideConf(object):
|
|||||||
(' '.join(value)))
|
(' '.join(value)))
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return ('sorting_method: %s, read_affinity: %s, write_affinity: %s, '
|
return '%s({}, {%s})' % (self.__class__.__name__, ', '.join(
|
||||||
'write_affinity_node_count: %s' %
|
'%r: %r' % (k, getattr(self, k)) for k in (
|
||||||
(self.sorting_method, self.read_affinity, self.write_affinity,
|
'sorting_method',
|
||||||
self.write_affinity_node_value))
|
'read_affinity',
|
||||||
|
'write_affinity',
|
||||||
|
'write_affinity_node_count')))
|
||||||
|
|
||||||
def _get(self, key, default):
|
def __eq__(self, other):
|
||||||
return self.override_conf.get(key, self.conf.get(key, default))
|
if not isinstance(other, ProxyOverrideOptions):
|
||||||
|
return False
|
||||||
|
return all(getattr(self, k) == getattr(other, k) for k in (
|
||||||
|
'sorting_method',
|
||||||
|
'read_affinity',
|
||||||
|
'write_affinity',
|
||||||
|
'write_affinity_node_count'))
|
||||||
|
|
||||||
|
|
||||||
class Application(object):
|
class Application(object):
|
||||||
@ -151,9 +159,9 @@ class Application(object):
|
|||||||
self.logger = get_logger(conf, log_route='proxy-server')
|
self.logger = get_logger(conf, log_route='proxy-server')
|
||||||
else:
|
else:
|
||||||
self.logger = logger
|
self.logger = logger
|
||||||
self._override_confs = self._load_per_policy_config(conf)
|
self._override_options = self._load_per_policy_config(conf)
|
||||||
self.sorts_by_timing = any(pc.sorting_method == 'timing'
|
self.sorts_by_timing = any(pc.sorting_method == 'timing'
|
||||||
for pc in self._override_confs.values())
|
for pc in self._override_options.values())
|
||||||
|
|
||||||
self._error_limiting = {}
|
self._error_limiting = {}
|
||||||
|
|
||||||
@ -277,7 +285,7 @@ class Application(object):
|
|||||||
def _make_policy_override(self, policy, conf, override_conf):
|
def _make_policy_override(self, policy, conf, override_conf):
|
||||||
label_for_policy = _label_for_policy(policy)
|
label_for_policy = _label_for_policy(policy)
|
||||||
try:
|
try:
|
||||||
override = OverrideConf(conf, override_conf)
|
override = ProxyOverrideOptions(conf, override_conf)
|
||||||
self.logger.debug("Loaded override config for %s: %r" %
|
self.logger.debug("Loaded override config for %s: %r" %
|
||||||
(label_for_policy, override))
|
(label_for_policy, override))
|
||||||
return override
|
return override
|
||||||
@ -290,15 +298,16 @@ class Application(object):
|
|||||||
|
|
||||||
:param conf: the proxy server local conf dict
|
:param conf: the proxy server local conf dict
|
||||||
:return: a dict mapping :class:`BaseStoragePolicy` to an instance of
|
:return: a dict mapping :class:`BaseStoragePolicy` to an instance of
|
||||||
:class:`OverrideConf` that has policy specific config attributes
|
:class:`ProxyOverrideOptions` that has policy-specific config
|
||||||
|
attributes
|
||||||
"""
|
"""
|
||||||
# the default conf will be used when looking up a policy that had no
|
# the default options will be used when looking up a policy that had no
|
||||||
# override conf
|
# override options
|
||||||
default_conf = self._make_policy_override(None, conf, {})
|
default_options = self._make_policy_override(None, conf, {})
|
||||||
override_confs = defaultdict(lambda: default_conf)
|
overrides = defaultdict(lambda: default_options)
|
||||||
# force None key to be set in the defaultdict so that it is found when
|
# force None key to be set in the defaultdict so that it is found when
|
||||||
# iterating over items in check_config
|
# iterating over items in check_config
|
||||||
override_confs[None] = default_conf
|
overrides[None] = default_options
|
||||||
for index, override_conf in conf.get('policy_config', {}).items():
|
for index, override_conf in conf.get('policy_config', {}).items():
|
||||||
try:
|
try:
|
||||||
index = int(index)
|
index = int(index)
|
||||||
@ -313,29 +322,29 @@ class Application(object):
|
|||||||
raise ValueError(
|
raise ValueError(
|
||||||
"No policy found for override config, index: %s" % index)
|
"No policy found for override config, index: %s" % index)
|
||||||
override = self._make_policy_override(policy, conf, override_conf)
|
override = self._make_policy_override(policy, conf, override_conf)
|
||||||
override_confs[policy] = override
|
overrides[policy] = override
|
||||||
return override_confs
|
return overrides
|
||||||
|
|
||||||
def get_policy_options(self, policy):
|
def get_policy_options(self, policy):
|
||||||
"""
|
"""
|
||||||
Return policy specific options.
|
Return policy specific options.
|
||||||
|
|
||||||
:param policy: an instance of :class:`BaseStoragePolicy`
|
:param policy: an instance of :class:`BaseStoragePolicy`
|
||||||
:return: an instance of :class:`OverrideConf`
|
:return: an instance of :class:`ProxyOverrideOptions`
|
||||||
"""
|
"""
|
||||||
return self._override_confs[policy]
|
return self._override_options[policy]
|
||||||
|
|
||||||
def check_config(self):
|
def check_config(self):
|
||||||
"""
|
"""
|
||||||
Check the configuration for possible errors
|
Check the configuration for possible errors
|
||||||
"""
|
"""
|
||||||
for policy, conf in self._override_confs.items():
|
for policy, options in self._override_options.items():
|
||||||
if conf.read_affinity and conf.sorting_method != 'affinity':
|
if options.read_affinity and options.sorting_method != 'affinity':
|
||||||
self.logger.warning(
|
self.logger.warning(
|
||||||
_("sorting_method is set to '%(method)s', not 'affinity'; "
|
_("sorting_method is set to '%(method)s', not 'affinity'; "
|
||||||
"%(label)s read_affinity setting will have no effect."),
|
"%(label)s read_affinity setting will have no effect."),
|
||||||
{'label': _label_for_policy(policy),
|
{'label': _label_for_policy(policy),
|
||||||
'method': conf.sorting_method})
|
'method': options.sorting_method})
|
||||||
|
|
||||||
def get_object_ring(self, policy_idx):
|
def get_object_ring(self, policy_idx):
|
||||||
"""
|
"""
|
||||||
@ -531,16 +540,16 @@ class Application(object):
|
|||||||
# (ie within the rounding resolution) won't prefer one over another.
|
# (ie within the rounding resolution) won't prefer one over another.
|
||||||
# Python's sort is stable (http://wiki.python.org/moin/HowTo/Sorting/)
|
# Python's sort is stable (http://wiki.python.org/moin/HowTo/Sorting/)
|
||||||
shuffle(nodes)
|
shuffle(nodes)
|
||||||
policy_conf = self.get_policy_options(policy)
|
policy_options = self.get_policy_options(policy)
|
||||||
if policy_conf.sorting_method == 'timing':
|
if policy_options.sorting_method == 'timing':
|
||||||
now = time()
|
now = time()
|
||||||
|
|
||||||
def key_func(node):
|
def key_func(node):
|
||||||
timing, expires = self.node_timings.get(node['ip'], (-1.0, 0))
|
timing, expires = self.node_timings.get(node['ip'], (-1.0, 0))
|
||||||
return timing if expires > now else -1.0
|
return timing if expires > now else -1.0
|
||||||
nodes.sort(key=key_func)
|
nodes.sort(key=key_func)
|
||||||
elif policy_conf.sorting_method == 'affinity':
|
elif policy_options.sorting_method == 'affinity':
|
||||||
nodes.sort(key=policy_conf.read_affinity_sort_key)
|
nodes.sort(key=policy_options.read_affinity_sort_key)
|
||||||
return nodes
|
return nodes
|
||||||
|
|
||||||
def set_node_timing(self, node, timing):
|
def set_node_timing(self, node, timing):
|
||||||
@ -683,14 +692,7 @@ def parse_per_policy_config(conf):
|
|||||||
:raises ValueError: if a policy config section has an invalid name
|
:raises ValueError: if a policy config section has an invalid name
|
||||||
"""
|
"""
|
||||||
policy_config = {}
|
policy_config = {}
|
||||||
try:
|
all_conf = readconf(conf['__file__'])
|
||||||
all_conf = readconf(conf['__file__'])
|
|
||||||
except KeyError:
|
|
||||||
get_logger(conf).warning(
|
|
||||||
"Unable to load policy specific configuration options: "
|
|
||||||
"cannot access proxy server conf file")
|
|
||||||
return policy_config
|
|
||||||
|
|
||||||
policy_section_prefix = conf['__name__'] + ':policy:'
|
policy_section_prefix = conf['__name__'] + ':policy:'
|
||||||
for section, options in all_conf.items():
|
for section, options in all_conf.items():
|
||||||
if not section.startswith(policy_section_prefix):
|
if not section.startswith(policy_section_prefix):
|
||||||
|
@ -218,7 +218,7 @@ class BaseObjectControllerMixin(object):
|
|||||||
policy_conf.write_affinity_is_local_fn = (
|
policy_conf.write_affinity_is_local_fn = (
|
||||||
lambda node: node['region'] == 1)
|
lambda node: node['region'] == 1)
|
||||||
# we'll write to one more than replica count local nodes
|
# we'll write to one more than replica count local nodes
|
||||||
policy_conf.write_affinity_node_count = lambda r: r + 1
|
policy_conf.write_affinity_node_count_fn = lambda r: r + 1
|
||||||
|
|
||||||
object_ring = self.policy.object_ring
|
object_ring = self.policy.object_ring
|
||||||
# make our fake ring have plenty of nodes, and not get limited
|
# make our fake ring have plenty of nodes, and not get limited
|
||||||
|
@ -40,7 +40,6 @@ import re
|
|||||||
import random
|
import random
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
import uuid
|
import uuid
|
||||||
from copy import deepcopy
|
|
||||||
|
|
||||||
import mock
|
import mock
|
||||||
from eventlet import sleep, spawn, wsgi, Timeout, debug
|
from eventlet import sleep, spawn, wsgi, Timeout, debug
|
||||||
@ -753,9 +752,8 @@ class TestProxyServer(unittest.TestCase):
|
|||||||
node_timings=None):
|
node_timings=None):
|
||||||
# Note with shuffling mocked out, sort_nodes will by default return
|
# Note with shuffling mocked out, sort_nodes will by default return
|
||||||
# nodes in the order they are given
|
# nodes in the order they are given
|
||||||
nodes = deepcopy(nodes)
|
nodes = list(nodes)
|
||||||
conf = deepcopy(conf)
|
conf = dict(conf, policy_config=policy_conf)
|
||||||
conf['policy_config'] = deepcopy(policy_conf)
|
|
||||||
baseapp = proxy_server.Application(conf,
|
baseapp = proxy_server.Application(conf,
|
||||||
FakeMemcache(),
|
FakeMemcache(),
|
||||||
logger=FakeLogger(),
|
logger=FakeLogger(),
|
||||||
@ -1298,17 +1296,17 @@ class TestProxyServerConfigLoading(unittest.TestCase):
|
|||||||
f.write(dedent(conf_body))
|
f.write(dedent(conf_body))
|
||||||
return conf_path
|
return conf_path
|
||||||
|
|
||||||
def _write_conf_and_load_app(self, conf_sections):
|
def _write_conf_and_load_app(self, conf_sections, app_name='proxy-server'):
|
||||||
# write proxy-server.conf file, load app
|
# write proxy-server.conf file, load app
|
||||||
conf_body = """
|
conf_body = dedent("""
|
||||||
[DEFAULT]
|
[DEFAULT]
|
||||||
swift_dir = %s
|
swift_dir = %s
|
||||||
|
|
||||||
[pipeline:main]
|
[pipeline:main]
|
||||||
pipeline = proxy-server
|
pipeline = %s
|
||||||
|
|
||||||
%s
|
%s
|
||||||
""" % (self.tempdir, conf_sections)
|
""") % (self.tempdir, app_name, dedent(conf_sections))
|
||||||
|
|
||||||
conf_path = self._write_conf(conf_body)
|
conf_path = self._write_conf(conf_body)
|
||||||
with mock.patch('swift.proxy.server.get_logger',
|
with mock.patch('swift.proxy.server.get_logger',
|
||||||
@ -1316,12 +1314,12 @@ class TestProxyServerConfigLoading(unittest.TestCase):
|
|||||||
app = loadapp(conf_path, allow_modify_pipeline=False)
|
app = loadapp(conf_path, allow_modify_pipeline=False)
|
||||||
return app
|
return app
|
||||||
|
|
||||||
def _check_policy_conf(self, app, exp_conf, exp_is_local):
|
def _check_policy_options(self, app, exp_options, exp_is_local):
|
||||||
# verify expected config
|
# verify expected config
|
||||||
for policy, options in exp_conf.items():
|
for policy, options in exp_options.items():
|
||||||
for k, v in options.items():
|
for k, v in options.items():
|
||||||
actual = getattr(app.get_policy_options(policy), k)
|
actual = getattr(app.get_policy_options(policy), k)
|
||||||
if k == "write_affinity_node_count":
|
if k == "write_affinity_node_count_fn":
|
||||||
if policy: # this check only applies when using a policy
|
if policy: # this check only applies when using a policy
|
||||||
actual = actual(policy.object_ring.replica_count)
|
actual = actual(policy.object_ring.replica_count)
|
||||||
self.assertEqual(v, actual)
|
self.assertEqual(v, actual)
|
||||||
@ -1340,7 +1338,6 @@ class TestProxyServerConfigLoading(unittest.TestCase):
|
|||||||
self.assertIs(expected_result, actual,
|
self.assertIs(expected_result, actual,
|
||||||
"Expected %s but got %s for %s, policy %s" %
|
"Expected %s but got %s for %s, policy %s" %
|
||||||
(expected_result, actual, node, policy))
|
(expected_result, actual, node, policy))
|
||||||
return app
|
|
||||||
|
|
||||||
def test_per_policy_conf_none_configured(self):
|
def test_per_policy_conf_none_configured(self):
|
||||||
conf_sections = """
|
conf_sections = """
|
||||||
@ -1349,14 +1346,14 @@ class TestProxyServerConfigLoading(unittest.TestCase):
|
|||||||
"""
|
"""
|
||||||
expected_default = {"read_affinity": "",
|
expected_default = {"read_affinity": "",
|
||||||
"sorting_method": "shuffle",
|
"sorting_method": "shuffle",
|
||||||
"write_affinity_node_count": 6}
|
"write_affinity_node_count_fn": 6}
|
||||||
exp_conf = {None: expected_default,
|
exp_options = {None: expected_default,
|
||||||
POLICIES[0]: expected_default,
|
POLICIES[0]: expected_default,
|
||||||
POLICIES[1]: expected_default}
|
POLICIES[1]: expected_default}
|
||||||
exp_is_local = {POLICIES[0]: None,
|
exp_is_local = {POLICIES[0]: None,
|
||||||
POLICIES[1]: None}
|
POLICIES[1]: None}
|
||||||
app = self._write_conf_and_load_app(conf_sections)
|
app = self._write_conf_and_load_app(conf_sections)
|
||||||
self._check_policy_conf(app, exp_conf, exp_is_local)
|
self._check_policy_options(app, exp_options, exp_is_local)
|
||||||
|
|
||||||
def test_per_policy_conf_one_configured(self):
|
def test_per_policy_conf_one_configured(self):
|
||||||
conf_sections = """
|
conf_sections = """
|
||||||
@ -1371,30 +1368,39 @@ class TestProxyServerConfigLoading(unittest.TestCase):
|
|||||||
"""
|
"""
|
||||||
expected_default = {"read_affinity": "",
|
expected_default = {"read_affinity": "",
|
||||||
"sorting_method": "shuffle",
|
"sorting_method": "shuffle",
|
||||||
"write_affinity_node_count": 6}
|
"write_affinity_node_count_fn": 6}
|
||||||
exp_conf = {None: expected_default,
|
exp_options = {None: expected_default,
|
||||||
POLICIES[0]: {"read_affinity": "r1=100",
|
POLICIES[0]: {"read_affinity": "r1=100",
|
||||||
"sorting_method": "affinity",
|
"sorting_method": "affinity",
|
||||||
"write_affinity_node_count": 3},
|
"write_affinity_node_count_fn": 3},
|
||||||
POLICIES[1]: expected_default}
|
POLICIES[1]: expected_default}
|
||||||
exp_is_local = {POLICIES[0]: [({'region': 1, 'zone': 2}, True),
|
exp_is_local = {POLICIES[0]: [({'region': 1, 'zone': 2}, True),
|
||||||
({'region': 2, 'zone': 1}, False)],
|
({'region': 2, 'zone': 1}, False)],
|
||||||
POLICIES[1]: None}
|
POLICIES[1]: None}
|
||||||
app = self._write_conf_and_load_app(conf_sections)
|
app = self._write_conf_and_load_app(conf_sections)
|
||||||
self._check_policy_conf(app, exp_conf, exp_is_local)
|
self._check_policy_options(app, exp_options, exp_is_local)
|
||||||
|
|
||||||
default_conf = app.get_policy_options(None)
|
default_options = app.get_policy_options(None)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
('sorting_method: shuffle, read_affinity: , write_affinity: , '
|
"ProxyOverrideOptions({}, {'sorting_method': 'shuffle', "
|
||||||
'write_affinity_node_count: 2 * replicas'),
|
"'read_affinity': '', 'write_affinity': '', "
|
||||||
repr(default_conf))
|
"'write_affinity_node_count': '2 * replicas'})",
|
||||||
policy_0_conf = app.get_policy_options(POLICIES[0])
|
repr(default_options))
|
||||||
|
self.assertEqual(default_options, eval(repr(default_options), {
|
||||||
|
'ProxyOverrideOptions': default_options.__class__}))
|
||||||
|
|
||||||
|
policy_0_options = app.get_policy_options(POLICIES[0])
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
('sorting_method: affinity, read_affinity: r1=100, '
|
"ProxyOverrideOptions({}, {'sorting_method': 'affinity', "
|
||||||
'write_affinity: r1, write_affinity_node_count: 1 * replicas'),
|
"'read_affinity': 'r1=100', 'write_affinity': 'r1', "
|
||||||
repr(policy_0_conf))
|
"'write_affinity_node_count': '1 * replicas'})",
|
||||||
policy_1_conf = app.get_policy_options(POLICIES[1])
|
repr(policy_0_options))
|
||||||
self.assertIs(default_conf, policy_1_conf)
|
self.assertEqual(policy_0_options, eval(repr(policy_0_options), {
|
||||||
|
'ProxyOverrideOptions': policy_0_options.__class__}))
|
||||||
|
self.assertNotEqual(default_options, policy_0_options)
|
||||||
|
|
||||||
|
policy_1_options = app.get_policy_options(POLICIES[1])
|
||||||
|
self.assertIs(default_options, policy_1_options)
|
||||||
|
|
||||||
def test_per_policy_conf_inherits_defaults(self):
|
def test_per_policy_conf_inherits_defaults(self):
|
||||||
conf_sections = """
|
conf_sections = """
|
||||||
@ -1409,17 +1415,17 @@ class TestProxyServerConfigLoading(unittest.TestCase):
|
|||||||
"""
|
"""
|
||||||
expected_default = {"read_affinity": "",
|
expected_default = {"read_affinity": "",
|
||||||
"sorting_method": "affinity",
|
"sorting_method": "affinity",
|
||||||
"write_affinity_node_count": 3}
|
"write_affinity_node_count_fn": 3}
|
||||||
exp_conf = {None: expected_default,
|
exp_options = {None: expected_default,
|
||||||
POLICIES[0]: {"read_affinity": "r1=100",
|
POLICIES[0]: {"read_affinity": "r1=100",
|
||||||
"sorting_method": "affinity",
|
"sorting_method": "affinity",
|
||||||
"write_affinity_node_count": 3},
|
"write_affinity_node_count_fn": 3},
|
||||||
POLICIES[1]: expected_default}
|
POLICIES[1]: expected_default}
|
||||||
exp_is_local = {POLICIES[0]: [({'region': 1, 'zone': 2}, True),
|
exp_is_local = {POLICIES[0]: [({'region': 1, 'zone': 2}, True),
|
||||||
({'region': 2, 'zone': 1}, False)],
|
({'region': 2, 'zone': 1}, False)],
|
||||||
POLICIES[1]: None}
|
POLICIES[1]: None}
|
||||||
app = self._write_conf_and_load_app(conf_sections)
|
app = self._write_conf_and_load_app(conf_sections)
|
||||||
self._check_policy_conf(app, exp_conf, exp_is_local)
|
self._check_policy_options(app, exp_options, exp_is_local)
|
||||||
|
|
||||||
def test_per_policy_conf_overrides_default_affinity(self):
|
def test_per_policy_conf_overrides_default_affinity(self):
|
||||||
conf_sections = """
|
conf_sections = """
|
||||||
@ -1440,22 +1446,22 @@ class TestProxyServerConfigLoading(unittest.TestCase):
|
|||||||
write_affinity = r3
|
write_affinity = r3
|
||||||
write_affinity_node_count = 4
|
write_affinity_node_count = 4
|
||||||
"""
|
"""
|
||||||
exp_conf = {None: {"read_affinity": "r2=10",
|
exp_options = {None: {"read_affinity": "r2=10",
|
||||||
"sorting_method": "affinity",
|
"sorting_method": "affinity",
|
||||||
"write_affinity_node_count": 3},
|
"write_affinity_node_count_fn": 3},
|
||||||
POLICIES[0]: {"read_affinity": "r1=100",
|
POLICIES[0]: {"read_affinity": "r1=100",
|
||||||
"sorting_method": "affinity",
|
"sorting_method": "affinity",
|
||||||
"write_affinity_node_count": 5},
|
"write_affinity_node_count_fn": 5},
|
||||||
POLICIES[1]: {"read_affinity": "r1=1",
|
POLICIES[1]: {"read_affinity": "r1=1",
|
||||||
"sorting_method": "affinity",
|
"sorting_method": "affinity",
|
||||||
"write_affinity_node_count": 4}}
|
"write_affinity_node_count_fn": 4}}
|
||||||
exp_is_local = {POLICIES[0]: [({'region': 1, 'zone': 2}, True),
|
exp_is_local = {POLICIES[0]: [({'region': 1, 'zone': 2}, True),
|
||||||
({'region': 2, 'zone': 1}, False)],
|
({'region': 2, 'zone': 1}, False)],
|
||||||
POLICIES[1]: [({'region': 3, 'zone': 2}, True),
|
POLICIES[1]: [({'region': 3, 'zone': 2}, True),
|
||||||
({'region': 1, 'zone': 1}, False),
|
({'region': 1, 'zone': 1}, False),
|
||||||
({'region': 2, 'zone': 1}, False)]}
|
({'region': 2, 'zone': 1}, False)]}
|
||||||
app = self._write_conf_and_load_app(conf_sections)
|
app = self._write_conf_and_load_app(conf_sections)
|
||||||
self._check_policy_conf(app, exp_conf, exp_is_local)
|
self._check_policy_options(app, exp_options, exp_is_local)
|
||||||
|
|
||||||
def test_per_policy_conf_overrides_default_sorting_method(self):
|
def test_per_policy_conf_overrides_default_sorting_method(self):
|
||||||
conf_sections = """
|
conf_sections = """
|
||||||
@ -1471,14 +1477,14 @@ class TestProxyServerConfigLoading(unittest.TestCase):
|
|||||||
sorting_method = affinity
|
sorting_method = affinity
|
||||||
read_affinity = r1=1
|
read_affinity = r1=1
|
||||||
"""
|
"""
|
||||||
exp_conf = {None: {"read_affinity": "",
|
exp_options = {None: {"read_affinity": "",
|
||||||
"sorting_method": "timing"},
|
"sorting_method": "timing"},
|
||||||
POLICIES[0]: {"read_affinity": "r1=100",
|
POLICIES[0]: {"read_affinity": "r1=100",
|
||||||
"sorting_method": "affinity"},
|
"sorting_method": "affinity"},
|
||||||
POLICIES[1]: {"read_affinity": "r1=1",
|
POLICIES[1]: {"read_affinity": "r1=1",
|
||||||
"sorting_method": "affinity"}}
|
"sorting_method": "affinity"}}
|
||||||
app = self._write_conf_and_load_app(conf_sections)
|
app = self._write_conf_and_load_app(conf_sections)
|
||||||
self._check_policy_conf(app, exp_conf, {})
|
self._check_policy_options(app, exp_options, {})
|
||||||
|
|
||||||
def test_per_policy_conf_with_DEFAULT_options(self):
|
def test_per_policy_conf_with_DEFAULT_options(self):
|
||||||
conf_body = """
|
conf_body = """
|
||||||
@ -1507,25 +1513,27 @@ class TestProxyServerConfigLoading(unittest.TestCase):
|
|||||||
sorting_method = affinity
|
sorting_method = affinity
|
||||||
""" % self.tempdir
|
""" % self.tempdir
|
||||||
|
|
||||||
|
# Don't just use _write_conf_and_load_app, as we don't want to have
|
||||||
|
# duplicate DEFAULT sections
|
||||||
conf_path = self._write_conf(conf_body)
|
conf_path = self._write_conf(conf_body)
|
||||||
with mock.patch('swift.proxy.server.get_logger',
|
with mock.patch('swift.proxy.server.get_logger',
|
||||||
return_value=FakeLogger()):
|
return_value=FakeLogger()):
|
||||||
app = loadapp(conf_path, allow_modify_pipeline=False)
|
app = loadapp(conf_path, allow_modify_pipeline=False)
|
||||||
|
|
||||||
exp_conf = {
|
exp_options = {
|
||||||
# default read_affinity is r1, set in proxy-server section
|
# default read_affinity is r1, set in proxy-server section
|
||||||
None: {"read_affinity": "r1=100",
|
None: {"read_affinity": "r1=100",
|
||||||
"sorting_method": "shuffle",
|
"sorting_method": "shuffle",
|
||||||
"write_affinity_node_count": 6},
|
"write_affinity_node_count_fn": 6},
|
||||||
# policy 0 read affinity is r2, dictated by policy 0 section
|
# policy 0 read affinity is r2, dictated by policy 0 section
|
||||||
POLICIES[0]: {"read_affinity": "r2=100",
|
POLICIES[0]: {"read_affinity": "r2=100",
|
||||||
"sorting_method": "affinity",
|
"sorting_method": "affinity",
|
||||||
"write_affinity_node_count": 6},
|
"write_affinity_node_count_fn": 6},
|
||||||
# policy 1 read_affinity is r0, dictated by DEFAULT section,
|
# policy 1 read_affinity is r0, dictated by DEFAULT section,
|
||||||
# overrides proxy server section
|
# overrides proxy server section
|
||||||
POLICIES[1]: {"read_affinity": "r0=100",
|
POLICIES[1]: {"read_affinity": "r0=100",
|
||||||
"sorting_method": "affinity",
|
"sorting_method": "affinity",
|
||||||
"write_affinity_node_count": 6}}
|
"write_affinity_node_count_fn": 6}}
|
||||||
exp_is_local = {
|
exp_is_local = {
|
||||||
# default write_affinity is r0, dictated by DEFAULT section
|
# default write_affinity is r0, dictated by DEFAULT section
|
||||||
None: [({'region': 0, 'zone': 2}, True),
|
None: [({'region': 0, 'zone': 2}, True),
|
||||||
@ -1536,7 +1544,7 @@ class TestProxyServerConfigLoading(unittest.TestCase):
|
|||||||
# policy 1 write_affinity is r0, inherited from default
|
# policy 1 write_affinity is r0, inherited from default
|
||||||
POLICIES[1]: [({'region': 0, 'zone': 2}, True),
|
POLICIES[1]: [({'region': 0, 'zone': 2}, True),
|
||||||
({'region': 1, 'zone': 1}, False)]}
|
({'region': 1, 'zone': 1}, False)]}
|
||||||
self._check_policy_conf(app, exp_conf, exp_is_local)
|
self._check_policy_options(app, exp_options, exp_is_local)
|
||||||
|
|
||||||
def test_per_policy_conf_warns_about_sorting_method_mismatch(self):
|
def test_per_policy_conf_warns_about_sorting_method_mismatch(self):
|
||||||
# verify that policy specific warnings are emitted when read_affinity
|
# verify that policy specific warnings are emitted when read_affinity
|
||||||
@ -1554,14 +1562,14 @@ class TestProxyServerConfigLoading(unittest.TestCase):
|
|||||||
sorting_method = affinity
|
sorting_method = affinity
|
||||||
read_affinity = r1=1
|
read_affinity = r1=1
|
||||||
"""
|
"""
|
||||||
exp_conf = {None: {"read_affinity": "r2=10",
|
exp_options = {None: {"read_affinity": "r2=10",
|
||||||
"sorting_method": "timing"},
|
"sorting_method": "timing"},
|
||||||
POLICIES[0]: {"read_affinity": "r1=100",
|
POLICIES[0]: {"read_affinity": "r1=100",
|
||||||
"sorting_method": "timing"},
|
"sorting_method": "timing"},
|
||||||
POLICIES[1]: {"read_affinity": "r1=1",
|
POLICIES[1]: {"read_affinity": "r1=1",
|
||||||
"sorting_method": "affinity"}}
|
"sorting_method": "affinity"}}
|
||||||
app = self._write_conf_and_load_app(conf_sections)
|
app = self._write_conf_and_load_app(conf_sections)
|
||||||
self._check_policy_conf(app, exp_conf, {})
|
self._check_policy_options(app, exp_options, {})
|
||||||
lines = app.logger.get_lines_for_level('warning')
|
lines = app.logger.get_lines_for_level('warning')
|
||||||
scopes = {'default', 'policy 0 (nulo)'}
|
scopes = {'default', 'policy 0 (nulo)'}
|
||||||
for line in lines[:2]:
|
for line in lines[:2]:
|
||||||
@ -1575,6 +1583,25 @@ class TestProxyServerConfigLoading(unittest.TestCase):
|
|||||||
self.fail("None of %s found in warning: %r" % (scopes, line))
|
self.fail("None of %s found in warning: %r" % (scopes, line))
|
||||||
self.assertFalse(scopes)
|
self.assertFalse(scopes)
|
||||||
|
|
||||||
|
def test_per_policy_conf_section_name_inherits_from_app_section_name(self):
|
||||||
|
conf_sections = """
|
||||||
|
[app:proxy-srv]
|
||||||
|
use = egg:swift#proxy
|
||||||
|
sorting_method = affinity
|
||||||
|
|
||||||
|
[proxy-server:policy:0]
|
||||||
|
sorting_method = timing
|
||||||
|
# ignored!
|
||||||
|
|
||||||
|
[proxy-srv:policy:1]
|
||||||
|
sorting_method = shuffle
|
||||||
|
"""
|
||||||
|
exp_options = {None: {'sorting_method': 'affinity'},
|
||||||
|
POLICIES[0]: {'sorting_method': 'affinity'},
|
||||||
|
POLICIES[1]: {'sorting_method': 'shuffle'}}
|
||||||
|
app = self._write_conf_and_load_app(conf_sections, 'proxy-srv')
|
||||||
|
self._check_policy_options(app, exp_options, {})
|
||||||
|
|
||||||
def test_per_policy_conf_with_unknown_policy(self):
|
def test_per_policy_conf_with_unknown_policy(self):
|
||||||
# verify that unknown policy section raises an error
|
# verify that unknown policy section raises an error
|
||||||
conf_sections = """
|
conf_sections = """
|
||||||
@ -1603,14 +1630,14 @@ class TestProxyServerConfigLoading(unittest.TestCase):
|
|||||||
[proxy-server:policy:1]
|
[proxy-server:policy:1]
|
||||||
read_affinity = r1=1
|
read_affinity = r1=1
|
||||||
"""
|
"""
|
||||||
exp_conf = {None: {"read_affinity": "",
|
exp_options = {None: {"read_affinity": "",
|
||||||
"sorting_method": "affinity"},
|
"sorting_method": "affinity"},
|
||||||
POLICIES[0]: {"read_affinity": "",
|
POLICIES[0]: {"read_affinity": "",
|
||||||
"sorting_method": "timing"},
|
"sorting_method": "timing"},
|
||||||
POLICIES[1]: {"read_affinity": "r1=1",
|
POLICIES[1]: {"read_affinity": "r1=1",
|
||||||
"sorting_method": "affinity"}}
|
"sorting_method": "affinity"}}
|
||||||
app = self._write_conf_and_load_app(conf_sections)
|
app = self._write_conf_and_load_app(conf_sections)
|
||||||
self._check_policy_conf(app, exp_conf, {})
|
self._check_policy_options(app, exp_options, {})
|
||||||
|
|
||||||
def test_per_policy_conf_invalid_read_affinity_value(self):
|
def test_per_policy_conf_invalid_read_affinity_value(self):
|
||||||
def do_test(conf_sections, scope):
|
def do_test(conf_sections, scope):
|
||||||
@ -1706,28 +1733,22 @@ class TestProxyServerConfigLoading(unittest.TestCase):
|
|||||||
do_test(conf_sections, '(default)')
|
do_test(conf_sections, '(default)')
|
||||||
|
|
||||||
def test_per_policy_conf_bad_section_name(self):
|
def test_per_policy_conf_bad_section_name(self):
|
||||||
conf_sections = """
|
def do_test(policy):
|
||||||
[app:proxy-server]
|
conf_sections = """
|
||||||
use = egg:swift#proxy
|
[app:proxy-server]
|
||||||
|
use = egg:swift#proxy
|
||||||
|
|
||||||
[proxy-server:policy:]
|
[proxy-server:policy:%s]
|
||||||
"""
|
""" % policy
|
||||||
with self.assertRaises(ValueError) as cm:
|
with self.assertRaises(ValueError) as cm:
|
||||||
self._write_conf_and_load_app(conf_sections)
|
self._write_conf_and_load_app(conf_sections)
|
||||||
self.assertIn("Override config must refer to policy index: ''",
|
self.assertEqual(
|
||||||
cm.exception.message)
|
"Override config must refer to policy index: %r" % policy,
|
||||||
|
cm.exception.message)
|
||||||
|
|
||||||
def test_per_policy_conf_section_name_not_index(self):
|
do_test('')
|
||||||
conf_sections = """
|
do_test('uno')
|
||||||
[app:proxy-server]
|
do_test('0.0')
|
||||||
use = egg:swift#proxy
|
|
||||||
|
|
||||||
[proxy-server:policy:uno]
|
|
||||||
"""
|
|
||||||
with self.assertRaises(ValueError) as cm:
|
|
||||||
self._write_conf_and_load_app(conf_sections)
|
|
||||||
self.assertIn("Override config must refer to policy index: 'uno'",
|
|
||||||
cm.exception.message)
|
|
||||||
|
|
||||||
|
|
||||||
class TestProxyServerConfigStringLoading(TestProxyServerConfigLoading):
|
class TestProxyServerConfigStringLoading(TestProxyServerConfigLoading):
|
||||||
@ -2634,9 +2655,9 @@ class TestReplicatedObjectController(
|
|||||||
|
|
||||||
object_ring = self.app.get_object_ring(0)
|
object_ring = self.app.get_object_ring(0)
|
||||||
object_ring.max_more_nodes = 100
|
object_ring.max_more_nodes = 100
|
||||||
policy_conf = self.app.get_policy_options(POLICIES[0])
|
policy_options = self.app.get_policy_options(POLICIES[0])
|
||||||
policy_conf.write_affinity_is_local_fn = is_r0
|
policy_options.write_affinity_is_local_fn = is_r0
|
||||||
policy_conf.write_affinity_node_count = lambda r: 3
|
policy_options.write_affinity_node_count_fn = lambda r: 3
|
||||||
|
|
||||||
controller = \
|
controller = \
|
||||||
ReplicatedObjectController(
|
ReplicatedObjectController(
|
||||||
@ -2653,13 +2674,13 @@ class TestReplicatedObjectController(
|
|||||||
res = controller.PUT(req)
|
res = controller.PUT(req)
|
||||||
self.assertTrue(res.status.startswith('201 '))
|
self.assertTrue(res.status.startswith('201 '))
|
||||||
|
|
||||||
self.assertEqual(3, len(written_to))
|
|
||||||
# this is kind of a hokey test, but in FakeRing, the port is even when
|
# this is kind of a hokey test, but in FakeRing, the port is even when
|
||||||
# the region is 0, and odd when the region is 1, so this test asserts
|
# the region is 0, and odd when the region is 1, so this test asserts
|
||||||
# that we wrote to 2 nodes in region 0, then went to 1 non-r0 node.
|
# that we wrote to 2 nodes in region 0, then went to 1 non-r0 node.
|
||||||
self.assertEqual(0, written_to[0][1] % 2) # it's (ip, port, device)
|
def get_region(x):
|
||||||
self.assertEqual(0, written_to[1][1] % 2)
|
return x[1] % 2 # it's (ip, port, device)
|
||||||
self.assertNotEqual(0, written_to[2][1] % 2)
|
|
||||||
|
self.assertEqual([0, 0, 1], [get_region(x) for x in written_to])
|
||||||
|
|
||||||
@unpatch_policies
|
@unpatch_policies
|
||||||
def test_PUT_no_etag_fallocate(self):
|
def test_PUT_no_etag_fallocate(self):
|
||||||
|
Loading…
x
Reference in New Issue
Block a user