Merge "diskfile: Prevent get_hashes from creating missing partition dirs"

This commit is contained in:
Zuul 2021-03-30 11:11:01 +00:00 committed by Gerrit Code Review
commit 0dd057ea5f
4 changed files with 116 additions and 56 deletions

View File

@ -341,6 +341,12 @@ def quarantine_renamer(device_path, corrupted_file_path):
return to_dir return to_dir
def valid_suffix(value):
if not isinstance(value, str) or len(value) != 3:
return False
return all(c in '0123456789abcdef' for c in value)
def read_hashes(partition_dir): def read_hashes(partition_dir):
""" """
Read the existing hashes.pkl Read the existing hashes.pkl
@ -365,9 +371,9 @@ def read_hashes(partition_dir):
pass pass
# Check for corrupted data that could break os.listdir() # Check for corrupted data that could break os.listdir()
for suffix in hashes.keys(): if not all(valid_suffix(key) or key in ('valid', 'updated')
if not suffix.isalnum(): for key in hashes):
return {'valid': False} return {'valid': False}
# hashes.pkl w/o valid updated key is "valid" but "forever old" # hashes.pkl w/o valid updated key is "valid" but "forever old"
hashes.setdefault('valid', True) hashes.setdefault('valid', True)
@ -1292,6 +1298,8 @@ class BaseDiskFileManager(object):
self.logger.debug('Run listdir on %s', partition_path) self.logger.debug('Run listdir on %s', partition_path)
hashes.update((suffix, None) for suffix in recalculate) hashes.update((suffix, None) for suffix in recalculate)
for suffix, hash_ in list(hashes.items()): for suffix, hash_ in list(hashes.items()):
if suffix in ('valid', 'updated'):
continue
if not hash_: if not hash_:
suffix_dir = join(partition_path, suffix) suffix_dir = join(partition_path, suffix)
try: try:
@ -1539,12 +1547,14 @@ class BaseDiskFileManager(object):
if not dev_path: if not dev_path:
raise DiskFileDeviceUnavailable() raise DiskFileDeviceUnavailable()
partition_path = get_part_path(dev_path, policy, partition) partition_path = get_part_path(dev_path, policy, partition)
if not os.path.exists(partition_path): suffixes = [suf for suf in suffixes or [] if valid_suffix(suf)]
mkdirs(partition_path)
if skip_rehash: if skip_rehash:
for suffix in suffixes or []: for suffix in suffixes:
invalidate_hash(os.path.join(partition_path, suffix)) self.invalidate_hash(os.path.join(partition_path, suffix))
return None hashes = None
elif not os.path.exists(partition_path):
hashes = {}
else: else:
_junk, hashes = tpool.execute( _junk, hashes = tpool.execute(
self._get_hashes, device, partition, policy, self._get_hashes, device, partition, policy,

View File

@ -453,6 +453,11 @@ class TestRelinker(unittest.TestCase):
self.assertEqual('foo', hashes[self._hash[-3:]]) self.assertEqual('foo', hashes[self._hash[-3:]])
self.assertFalse(os.path.exists( self.assertFalse(os.path.exists(
os.path.join(self.part_dir, 'hashes.invalid'))) os.path.join(self.part_dir, 'hashes.invalid')))
# Check that only the dirty partition in upper half of next part power
# has been created and rehashed
other_next_part = self.next_part ^ 1
other_next_part_dir = os.path.join(self.objects, str(other_next_part))
self.assertFalse(os.path.exists(other_next_part_dir))
def _do_link_test(self, command, old_file_specs, new_file_specs, def _do_link_test(self, command, old_file_specs, new_file_specs,
conflict_file_specs, exp_old_specs, exp_new_specs, conflict_file_specs, exp_old_specs, exp_new_specs,

View File

@ -6743,6 +6743,15 @@ class TestSuffixHashes(unittest.TestCase):
if suffix_dir != suffix_dir2: if suffix_dir != suffix_dir2:
return df2 return df2
def test_valid_suffix(self):
self.assertTrue(diskfile.valid_suffix('000'))
self.assertTrue(diskfile.valid_suffix('123'))
self.assertTrue(diskfile.valid_suffix('fff'))
self.assertFalse(diskfile.valid_suffix(list('123')))
self.assertFalse(diskfile.valid_suffix(123))
self.assertFalse(diskfile.valid_suffix(' 12'))
self.assertFalse(diskfile.valid_suffix('-00'))
def check_cleanup_ondisk_files(self, policy, input_files, output_files): def check_cleanup_ondisk_files(self, policy, input_files, output_files):
orig_unlink = os.unlink orig_unlink = os.unlink
file_list = list(input_files) file_list = list(input_files)
@ -7104,7 +7113,12 @@ class TestSuffixHashes(unittest.TestCase):
def test_invalidate_hash_empty_file_exists(self): def test_invalidate_hash_empty_file_exists(self):
for policy in self.iter_policies(): for policy in self.iter_policies():
df_mgr = self.df_router[policy] df_mgr = self.df_router[policy]
part_path = os.path.join(self.devices, 'sda1',
diskfile.get_data_dir(policy), '0')
mkdirs(part_path)
hashes = df_mgr.get_hashes('sda1', '0', [], policy) hashes = df_mgr.get_hashes('sda1', '0', [], policy)
pkl_path = os.path.join(part_path, diskfile.HASH_FILE)
self.assertTrue(os.path.exists(pkl_path))
self.assertEqual(hashes, {}) self.assertEqual(hashes, {})
# create something to hash # create something to hash
df = df_mgr.get_diskfile('sda1', '0', 'a', 'c', 'o', df = df_mgr.get_diskfile('sda1', '0', 'a', 'c', 'o',
@ -7127,6 +7141,7 @@ class TestSuffixHashes(unittest.TestCase):
df_mgr = self.df_router[policy] df_mgr = self.df_router[policy]
part_path = os.path.join(self.devices, 'sda1', part_path = os.path.join(self.devices, 'sda1',
diskfile.get_data_dir(policy), '0') diskfile.get_data_dir(policy), '0')
mkdirs(part_path)
inv_file = os.path.join( inv_file = os.path.join(
part_path, diskfile.HASH_INVALIDATIONS_FILE) part_path, diskfile.HASH_INVALIDATIONS_FILE)
hash_file = os.path.join( hash_file = os.path.join(
@ -7162,9 +7177,14 @@ class TestSuffixHashes(unittest.TestCase):
# get_hashes call # get_hashes call
for policy in self.iter_policies(): for policy in self.iter_policies():
df_mgr = self.df_router[policy] df_mgr = self.df_router[policy]
part_path = os.path.join(self.devices, 'sda1',
diskfile.get_data_dir(policy), '0')
if existing: if existing:
mkdirs(part_path)
# force hashes.pkl to exist # force hashes.pkl to exist
df_mgr.get_hashes('sda1', '0', [], policy) df_mgr.get_hashes('sda1', '0', [], policy)
self.assertTrue(os.path.exists(os.path.join(
part_path, diskfile.HASH_FILE)))
orig_listdir = os.listdir orig_listdir = os.listdir
df = df_mgr.get_diskfile('sda1', '0', 'a', 'c', 'o', df = df_mgr.get_diskfile('sda1', '0', 'a', 'c', 'o',
policy=policy) policy=policy)
@ -7184,10 +7204,15 @@ class TestSuffixHashes(unittest.TestCase):
df2.delete(self.ts()) df2.delete(self.ts())
return result return result
if not existing:
self.assertFalse(os.path.exists(os.path.join(
part_path, diskfile.HASH_FILE)))
with mock.patch('swift.obj.diskfile.os.listdir', with mock.patch('swift.obj.diskfile.os.listdir',
mock_listdir): mock_listdir):
# creates pkl file # creates pkl file if not already there
hashes = df_mgr.get_hashes('sda1', '0', [], policy) hashes = df_mgr.get_hashes('sda1', '0', [], policy)
self.assertTrue(os.path.exists(os.path.join(
part_path, diskfile.HASH_FILE)))
# second suffix added after directory listing, it's added later # second suffix added after directory listing, it's added later
self.assertIn(suffix, hashes) self.assertIn(suffix, hashes)
@ -7216,7 +7241,12 @@ class TestSuffixHashes(unittest.TestCase):
orig_hash_suffix = df_mgr._hash_suffix orig_hash_suffix = df_mgr._hash_suffix
if existing: if existing:
# create hashes.pkl # create hashes.pkl
part_path = os.path.join(self.devices, 'sda1',
diskfile.get_data_dir(policy), '0')
mkdirs(part_path)
df_mgr.get_hashes('sda1', '0', [], policy) df_mgr.get_hashes('sda1', '0', [], policy)
self.assertTrue(os.path.exists(os.path.join(
part_path, diskfile.HASH_FILE)))
df = df_mgr.get_diskfile('sda1', '0', 'a', 'c', 'o', df = df_mgr.get_diskfile('sda1', '0', 'a', 'c', 'o',
policy=policy) policy=policy)
@ -7257,7 +7287,7 @@ class TestSuffixHashes(unittest.TestCase):
return result return result
with mock.patch.object(df_mgr, '_hash_suffix', mock_hash_suffix): with mock.patch.object(df_mgr, '_hash_suffix', mock_hash_suffix):
# creates pkl file and repeats listing when pkl modified # repeats listing when pkl modified
hashes = df_mgr.get_hashes('sda1', '0', [], policy) hashes = df_mgr.get_hashes('sda1', '0', [], policy)
# first get_hashes should complete with suffix1 state # first get_hashes should complete with suffix1 state
@ -7452,6 +7482,12 @@ class TestSuffixHashes(unittest.TestCase):
# verify that if consolidate_hashes raises an exception then suffixes # verify that if consolidate_hashes raises an exception then suffixes
# are rehashed and a hashes.pkl is written # are rehashed and a hashes.pkl is written
for policy in self.iter_policies(): for policy in self.iter_policies():
part_path = os.path.join(self.devices, 'sda1',
diskfile.get_data_dir(policy), '0')
hashes_file = os.path.join(part_path, diskfile.HASH_FILE)
invalidations_file = os.path.join(
part_path, diskfile.HASH_INVALIDATIONS_FILE)
self.logger.clear() self.logger.clear()
df_mgr = self.df_router[policy] df_mgr = self.df_router[policy]
# create something to hash # create something to hash
@ -7461,10 +7497,14 @@ class TestSuffixHashes(unittest.TestCase):
# avoid getting O_TMPFILE warning in logs # avoid getting O_TMPFILE warning in logs
if not utils.o_tmpfile_in_tmpdir_supported(): if not utils.o_tmpfile_in_tmpdir_supported():
df.manager.use_linkat = False df.manager.use_linkat = False
self.assertFalse(os.path.exists(part_path))
df.delete(self.ts()) df.delete(self.ts())
self.assertTrue(os.path.exists(invalidations_file))
suffix_dir = os.path.dirname(df._datadir) suffix_dir = os.path.dirname(df._datadir)
suffix = os.path.basename(suffix_dir) suffix = os.path.basename(suffix_dir)
# no pre-existing hashes.pkl # no pre-existing hashes.pkl
self.assertFalse(os.path.exists(hashes_file))
with mock.patch.object(df_mgr, '_hash_suffix', with mock.patch.object(df_mgr, '_hash_suffix',
return_value='fake hash'): return_value='fake hash'):
with mock.patch.object(df_mgr, 'consolidate_hashes', with mock.patch.object(df_mgr, 'consolidate_hashes',
@ -7473,10 +7513,6 @@ class TestSuffixHashes(unittest.TestCase):
self.assertEqual({suffix: 'fake hash'}, hashes) self.assertEqual({suffix: 'fake hash'}, hashes)
# sanity check hashes file # sanity check hashes file
part_path = os.path.join(self.devices, 'sda1',
diskfile.get_data_dir(policy), '0')
hashes_file = os.path.join(part_path, diskfile.HASH_FILE)
with open(hashes_file, 'rb') as f: with open(hashes_file, 'rb') as f:
found_hashes = pickle.load(f) found_hashes = pickle.load(f)
found_hashes.pop('updated') found_hashes.pop('updated')
@ -7497,9 +7533,6 @@ class TestSuffixHashes(unittest.TestCase):
self.assertEqual({suffix: 'new fake hash'}, hashes) self.assertEqual({suffix: 'new fake hash'}, hashes)
# sanity check hashes file # sanity check hashes file
part_path = os.path.join(self.devices, 'sda1',
diskfile.get_data_dir(policy), '0')
hashes_file = os.path.join(part_path, diskfile.HASH_FILE)
with open(hashes_file, 'rb') as f: with open(hashes_file, 'rb') as f:
found_hashes = pickle.load(f) found_hashes = pickle.load(f)
found_hashes.pop('updated') found_hashes.pop('updated')
@ -8185,6 +8218,9 @@ class TestSuffixHashes(unittest.TestCase):
def test_hash_suffix_listdir_enoent(self): def test_hash_suffix_listdir_enoent(self):
for policy in self.iter_policies(): for policy in self.iter_policies():
df_mgr = self.df_router[policy] df_mgr = self.df_router[policy]
part_path = os.path.join(self.devices, 'sda1',
diskfile.get_data_dir(policy), '0')
mkdirs(part_path) # ensure we'll bother writing a pkl at all
orig_listdir = os.listdir orig_listdir = os.listdir
listdir_calls = [] listdir_calls = []
@ -8203,9 +8239,6 @@ class TestSuffixHashes(unittest.TestCase):
# does not exist! # does not exist!
df_mgr.get_hashes('sda1', '0', ['123'], policy) df_mgr.get_hashes('sda1', '0', ['123'], policy)
part_path = os.path.join(self.devices, 'sda1',
diskfile.get_data_dir(policy), '0')
self.assertEqual(listdir_calls, [ self.assertEqual(listdir_calls, [
# part path gets created automatically # part path gets created automatically
(part_path, True), (part_path, True),
@ -8340,7 +8373,7 @@ class TestSuffixHashes(unittest.TestCase):
# get_hashes tests - behaviors # get_hashes tests - behaviors
def test_get_hashes_creates_partition_and_pkl(self): def test_get_hashes_does_not_create_partition(self):
for policy in self.iter_policies(): for policy in self.iter_policies():
df_mgr = self.df_router[policy] df_mgr = self.df_router[policy]
hashes = df_mgr.get_hashes(self.existing_device, '0', [], hashes = df_mgr.get_hashes(self.existing_device, '0', [],
@ -8348,6 +8381,18 @@ class TestSuffixHashes(unittest.TestCase):
self.assertEqual(hashes, {}) self.assertEqual(hashes, {})
part_path = os.path.join( part_path = os.path.join(
self.devices, 'sda1', diskfile.get_data_dir(policy), '0') self.devices, 'sda1', diskfile.get_data_dir(policy), '0')
self.assertFalse(os.path.exists(part_path))
def test_get_hashes_creates_pkl(self):
# like above, but -- if the partition already exists, make the pickle
for policy in self.iter_policies():
part_path = os.path.join(
self.devices, 'sda1', diskfile.get_data_dir(policy), '0')
mkdirs(part_path)
df_mgr = self.df_router[policy]
hashes = df_mgr.get_hashes(self.existing_device, '0', [],
policy)
self.assertEqual(hashes, {})
self.assertTrue(os.path.exists(part_path)) self.assertTrue(os.path.exists(part_path))
hashes_file = os.path.join(part_path, hashes_file = os.path.join(part_path,
diskfile.HASH_FILE) diskfile.HASH_FILE)
@ -8438,6 +8483,7 @@ class TestSuffixHashes(unittest.TestCase):
# create an empty stale pickle # create an empty stale pickle
part_path = os.path.join( part_path = os.path.join(
self.devices, 'sda1', diskfile.get_data_dir(policy), '0') self.devices, 'sda1', diskfile.get_data_dir(policy), '0')
mkdirs(part_path)
hashes_file = os.path.join(part_path, hashes_file = os.path.join(part_path,
diskfile.HASH_FILE) diskfile.HASH_FILE)
hashes = df_mgr.get_hashes(self.existing_device, '0', [], policy) hashes = df_mgr.get_hashes(self.existing_device, '0', [], policy)
@ -8641,6 +8687,7 @@ class TestSuffixHashes(unittest.TestCase):
suffix2 = os.path.basename(os.path.dirname(df2._datadir)) suffix2 = os.path.basename(os.path.dirname(df2._datadir))
part_path = os.path.dirname(os.path.dirname( part_path = os.path.dirname(os.path.dirname(
os.path.join(df._datadir))) os.path.join(df._datadir)))
mkdirs(part_path)
hashfile_path = os.path.join(part_path, diskfile.HASH_FILE) hashfile_path = os.path.join(part_path, diskfile.HASH_FILE)
# create hashes.pkl # create hashes.pkl
hashes = df_mgr.get_hashes(self.existing_device, '0', [], hashes = df_mgr.get_hashes(self.existing_device, '0', [],
@ -8745,8 +8792,13 @@ class TestSuffixHashes(unittest.TestCase):
def test_get_hashes_modified_recursive_retry(self): def test_get_hashes_modified_recursive_retry(self):
for policy in self.iter_policies(): for policy in self.iter_policies():
df_mgr = self.df_router[policy] df_mgr = self.df_router[policy]
part_path = os.path.join(self.devices, self.existing_device,
diskfile.get_data_dir(policy), '0')
mkdirs(part_path)
# first create an empty pickle # first create an empty pickle
df_mgr.get_hashes(self.existing_device, '0', [], policy) df_mgr.get_hashes(self.existing_device, '0', [], policy)
self.assertTrue(os.path.exists(os.path.join(
part_path, diskfile.HASH_FILE)))
non_local = {'suffix_count': 1} non_local = {'suffix_count': 1}
calls = [] calls = []
@ -8788,19 +8840,19 @@ class TestHashesHelpers(unittest.TestCase):
rmtree(self.testdir, ignore_errors=1) rmtree(self.testdir, ignore_errors=1)
def test_read_legacy_hashes(self): def test_read_legacy_hashes(self):
hashes = {'stub': 'fake'} hashes = {'fff': 'fake'}
hashes_file = os.path.join(self.testdir, diskfile.HASH_FILE) hashes_file = os.path.join(self.testdir, diskfile.HASH_FILE)
with open(hashes_file, 'wb') as f: with open(hashes_file, 'wb') as f:
pickle.dump(hashes, f) pickle.dump(hashes, f)
expected = { expected = {
'stub': 'fake', 'fff': 'fake',
'updated': -1, 'updated': -1,
'valid': True, 'valid': True,
} }
self.assertEqual(expected, diskfile.read_hashes(self.testdir)) self.assertEqual(expected, diskfile.read_hashes(self.testdir))
def test_write_hashes_valid_updated(self): def test_write_hashes_valid_updated(self):
hashes = {'stub': 'fake', 'valid': True} hashes = {'888': 'fake', 'valid': True}
now = time() now = time()
with mock.patch('swift.obj.diskfile.time.time', return_value=now): with mock.patch('swift.obj.diskfile.time.time', return_value=now):
diskfile.write_hashes(self.testdir, hashes) diskfile.write_hashes(self.testdir, hashes)
@ -8808,7 +8860,7 @@ class TestHashesHelpers(unittest.TestCase):
with open(hashes_file, 'rb') as f: with open(hashes_file, 'rb') as f:
data = pickle.load(f) data = pickle.load(f)
expected = { expected = {
'stub': 'fake', '888': 'fake',
'updated': now, 'updated': now,
'valid': True, 'valid': True,
} }
@ -8843,7 +8895,7 @@ class TestHashesHelpers(unittest.TestCase):
self.assertEqual(expected, data) self.assertEqual(expected, data)
def test_read_write_valid_hashes_mutation_and_transative_equality(self): def test_read_write_valid_hashes_mutation_and_transative_equality(self):
hashes = {'stub': 'fake', 'valid': True} hashes = {'000': 'fake', 'valid': True}
diskfile.write_hashes(self.testdir, hashes) diskfile.write_hashes(self.testdir, hashes)
# write_hashes mutates the passed in hashes, it adds the updated key # write_hashes mutates the passed in hashes, it adds the updated key
self.assertIn('updated', hashes) self.assertIn('updated', hashes)

View File

@ -7158,11 +7158,11 @@ class TestObjectController(unittest.TestCase):
def my_tpool_execute(func, *args, **kwargs): def my_tpool_execute(func, *args, **kwargs):
return func(*args, **kwargs) return func(*args, **kwargs)
was_get_hashes = diskfile.DiskFileManager._get_hashes with mock.patch.object(diskfile.DiskFileManager, '_get_hashes',
was_tpool_exe = tpool.execute fake_get_hashes), \
try: mock.patch.object(tpool, 'execute', my_tpool_execute), \
diskfile.DiskFileManager._get_hashes = fake_get_hashes mock.patch('swift.obj.diskfile.os.path.exists',
tpool.execute = my_tpool_execute return_value=True):
req = Request.blank('/sda1/p/', req = Request.blank('/sda1/p/',
environ={'REQUEST_METHOD': 'REPLICATE'}, environ={'REQUEST_METHOD': 'REPLICATE'},
headers={}) headers={})
@ -7170,9 +7170,6 @@ class TestObjectController(unittest.TestCase):
self.assertEqual(resp.status_int, 200) self.assertEqual(resp.status_int, 200)
p_data = pickle.loads(resp.body) p_data = pickle.loads(resp.body)
self.assertEqual(p_data, {1: 2}) self.assertEqual(p_data, {1: 2})
finally:
tpool.execute = was_tpool_exe
diskfile.DiskFileManager._get_hashes = was_get_hashes
def test_REPLICATE_pickle_protocol(self): def test_REPLICATE_pickle_protocol(self):
@ -7182,25 +7179,22 @@ class TestObjectController(unittest.TestCase):
def my_tpool_execute(func, *args, **kwargs): def my_tpool_execute(func, *args, **kwargs):
return func(*args, **kwargs) return func(*args, **kwargs)
was_get_hashes = diskfile.DiskFileManager._get_hashes with mock.patch.object(diskfile.DiskFileManager, '_get_hashes',
was_tpool_exe = tpool.execute fake_get_hashes), \
try: mock.patch.object(tpool, 'execute', my_tpool_execute), \
diskfile.DiskFileManager._get_hashes = fake_get_hashes mock.patch('swift.obj.server.pickle.dumps') as fake_pickle, \
tpool.execute = my_tpool_execute mock.patch('swift.obj.diskfile.os.path.exists',
return_value=True):
req = Request.blank('/sda1/p/', req = Request.blank('/sda1/p/',
environ={'REQUEST_METHOD': 'REPLICATE'}, environ={'REQUEST_METHOD': 'REPLICATE'},
headers={}) headers={})
with mock.patch('swift.obj.server.pickle.dumps') as fake_pickle: fake_pickle.return_value = b''
fake_pickle.return_value = b'' req.get_response(self.object_controller)
req.get_response(self.object_controller) # This is the key assertion: starting in Python 3.0, the
# This is the key assertion: starting in Python 3.0, the # default protocol version is 3, but such pickles can't be read
# default protocol version is 3, but such pickles can't be read # on Python 2. As long as we may need to talk to a Python 2
# on Python 2. As long as we may need to talk to a Python 2 # process, we need to cap our protocol version.
# process, we need to cap our protocol version. fake_pickle.assert_called_once_with({1: 2}, protocol=2)
fake_pickle.assert_called_once_with({1: 2}, protocol=2)
finally:
tpool.execute = was_tpool_exe
diskfile.DiskFileManager._get_hashes = was_get_hashes
def test_REPLICATE_timeout(self): def test_REPLICATE_timeout(self):
@ -7210,18 +7204,17 @@ class TestObjectController(unittest.TestCase):
def my_tpool_execute(func, *args, **kwargs): def my_tpool_execute(func, *args, **kwargs):
return func(*args, **kwargs) return func(*args, **kwargs)
was_get_hashes = diskfile.DiskFileManager._get_hashes with mock.patch.object(diskfile.DiskFileManager, '_get_hashes',
was_tpool_exe = tpool.execute fake_get_hashes), \
try: mock.patch.object(tpool, 'execute', my_tpool_execute), \
mock.patch('swift.obj.diskfile.os.path.exists',
return_value=True):
diskfile.DiskFileManager._get_hashes = fake_get_hashes diskfile.DiskFileManager._get_hashes = fake_get_hashes
tpool.execute = my_tpool_execute tpool.execute = my_tpool_execute
req = Request.blank('/sda1/p/', req = Request.blank('/sda1/p/',
environ={'REQUEST_METHOD': 'REPLICATE'}, environ={'REQUEST_METHOD': 'REPLICATE'},
headers={}) headers={})
self.assertRaises(Timeout, self.object_controller.REPLICATE, req) self.assertRaises(Timeout, self.object_controller.REPLICATE, req)
finally:
tpool.execute = was_tpool_exe
diskfile.DiskFileManager._get_hashes = was_get_hashes
def test_REPLICATE_reclaims_tombstones(self): def test_REPLICATE_reclaims_tombstones(self):
conf = {'devices': self.testdir, 'mount_check': False, conf = {'devices': self.testdir, 'mount_check': False,