From 5677a04c8f5d46da80ac4cf8be135549b422772c Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Thu, 23 Jun 2016 13:53:49 +0200 Subject: [PATCH] Python 3: Fix usage of dict methods * Replace "c = dict(a.items() + b.items())" with "c = dict(a); c.update(b)". It works on Python 2 and Python 3, and it may be a little bit more efficient on Python 2 (no need to create a temporary list of items). * Replace "dict.values() + dict.values()" with "list(dict.values()) + list(dict.values())": on Python 3, dict.values() is a view which doesn't support a+b operator. Change-Id: Id5a65628fe2fb7a02c713b758fcaa81154db28a0 --- .../unit/common/middleware/test_gatekeeper.py | 20 +++++++++---------- test/unit/obj/test_ssync.py | 8 ++++---- 2 files changed, 13 insertions(+), 15 deletions(-) diff --git a/test/unit/common/middleware/test_gatekeeper.py b/test/unit/common/middleware/test_gatekeeper.py index 5f4e87b5a2..d07c4c007a 100644 --- a/test/unit/common/middleware/test_gatekeeper.py +++ b/test/unit/common/middleware/test_gatekeeper.py @@ -79,13 +79,11 @@ class TestGatekeeper(unittest.TestCase): 'x-object-transient-sysmeta-foo': 'value'} x_timestamp_headers = {'X-Timestamp': '1455952805.719739'} - forbidden_headers_out = dict(sysmeta_headers.items() + - x_backend_headers.items() + - object_transient_sysmeta_headers.items()) - forbidden_headers_in = dict(sysmeta_headers.items() + - x_backend_headers.items() + - object_transient_sysmeta_headers.items()) - shunted_headers_in = dict(x_timestamp_headers.items()) + forbidden_headers_out = dict(sysmeta_headers) + forbidden_headers_out.update(x_backend_headers) + forbidden_headers_out.update(object_transient_sysmeta_headers) + forbidden_headers_in = dict(forbidden_headers_out) + shunted_headers_in = dict(x_timestamp_headers) def _assertHeadersEqual(self, expected, actual): for key in expected: @@ -126,8 +124,8 @@ class TestGatekeeper(unittest.TestCase): expected_headers.update({'X-Backend-Inbound-' + k: v for k, v in self.shunted_headers_in.items()}) self._assertHeadersEqual(expected_headers, fake_app.req.headers) - unexpected_headers = dict(self.forbidden_headers_in.items() + - self.shunted_headers_in.items()) + unexpected_headers = dict(self.forbidden_headers_in) + unexpected_headers.update(self.shunted_headers_in) self._assertHeadersAbsent(unexpected_headers, fake_app.req.headers) def test_reserved_header_removed_inbound(self): @@ -163,8 +161,8 @@ class TestGatekeeper(unittest.TestCase): app = self.get_app(fake_app, {}, shunt_inbound_x_timestamp='false') resp = req.get_response(app) self.assertEqual('200 OK', resp.status) - expected_headers = dict(self.allowed_headers.items() + - self.shunted_headers_in.items()) + expected_headers = dict(self.allowed_headers) + expected_headers.update(self.shunted_headers_in) self._assertHeadersEqual(expected_headers, fake_app.req.headers) def test_reserved_header_shunt_bypassed_inbound(self): diff --git a/test/unit/obj/test_ssync.py b/test/unit/obj/test_ssync.py index e51a7c4455..21c09b59f5 100644 --- a/test/unit/obj/test_ssync.py +++ b/test/unit/obj/test_ssync.py @@ -346,7 +346,7 @@ class TestSsyncEC(TestBaseSsync): tx_tombstones['o5'][0].delete(t5) suffixes = set() - for diskfiles in (tx_objs.values() + tx_tombstones.values()): + for diskfiles in list(tx_objs.values()) + list(tx_tombstones.values()): for df in diskfiles: suffixes.add(os.path.basename(os.path.dirname(df._datadir))) @@ -536,7 +536,7 @@ class TestSsyncEC(TestBaseSsync): tx_tombstones['o5'][0].delete(t5) suffixes = set() - for diskfiles in (tx_objs.values() + tx_tombstones.values()): + for diskfiles in list(tx_objs.values()) + list(tx_tombstones.values()): for df in diskfiles: suffixes.add(os.path.basename(os.path.dirname(df._datadir))) @@ -706,7 +706,7 @@ class TestSsyncReplication(TestBaseSsync): tx_tombstones['o7'][0].delete(t7b) suffixes = set() - for diskfiles in (tx_objs.values() + tx_tombstones.values()): + for diskfiles in list(tx_objs.values()) + list(tx_tombstones.values()): for df in diskfiles: suffixes.add(os.path.basename(os.path.dirname(df._datadir))) @@ -879,7 +879,7 @@ class TestSsyncReplication(TestBaseSsync): rx_tombstones['o7'][0].delete(next(self.ts_iter)) suffixes = set() - for diskfiles in (tx_objs.values() + tx_tombstones.values()): + for diskfiles in list(tx_objs.values()) + list(tx_tombstones.values()): for df in diskfiles: suffixes.add(os.path.basename(os.path.dirname(df._datadir)))