Merge "pep8 db.py"
This commit is contained in:
commit
2ad23a25e8
@ -32,7 +32,7 @@ from eventlet import sleep, Timeout
|
||||
import sqlite3
|
||||
|
||||
from swift.common.utils import json, normalize_timestamp, renamer, \
|
||||
mkdirs, lock_parent_directory, fallocate
|
||||
mkdirs, lock_parent_directory, fallocate
|
||||
from swift.common.exceptions import LockTimeout
|
||||
|
||||
|
||||
@ -60,7 +60,7 @@ class DatabaseConnectionError(sqlite3.DatabaseError):
|
||||
|
||||
def __str__(self):
|
||||
return 'DB connection error (%s, %s):\n%s' % (
|
||||
self.path, self.timeout, self.msg)
|
||||
self.path, self.timeout, self.msg)
|
||||
|
||||
|
||||
class GreenDBConnection(sqlite3.Connection):
|
||||
@ -84,7 +84,7 @@ class GreenDBConnection(sqlite3.Connection):
|
||||
|
||||
def execute(self, *args, **kwargs):
|
||||
return self._timeout(lambda: sqlite3.Connection.execute(
|
||||
self, *args, **kwargs))
|
||||
self, *args, **kwargs))
|
||||
|
||||
def commit(self):
|
||||
return self._timeout(lambda: sqlite3.Connection.commit(self))
|
||||
@ -131,14 +131,14 @@ def get_db_connection(path, timeout=30, okay_to_create=False):
|
||||
try:
|
||||
connect_time = time.time()
|
||||
conn = sqlite3.connect(path, check_same_thread=False,
|
||||
factory=GreenDBConnection, timeout=timeout)
|
||||
factory=GreenDBConnection, timeout=timeout)
|
||||
if path != ':memory:' and not okay_to_create:
|
||||
# attempt to detect and fail when connect creates the db file
|
||||
stat = os.stat(path)
|
||||
if stat.st_size == 0 and stat.st_ctime >= connect_time:
|
||||
os.unlink(path)
|
||||
raise DatabaseConnectionError(path,
|
||||
'DB file created by connect?')
|
||||
'DB file created by connect?')
|
||||
conn.row_factory = sqlite3.Row
|
||||
conn.text_factory = str
|
||||
conn.execute('PRAGMA synchronous = NORMAL')
|
||||
@ -149,7 +149,7 @@ def get_db_connection(path, timeout=30, okay_to_create=False):
|
||||
except sqlite3.DatabaseError:
|
||||
import traceback
|
||||
raise DatabaseConnectionError(path, traceback.format_exc(),
|
||||
timeout=timeout)
|
||||
timeout=timeout)
|
||||
return conn
|
||||
|
||||
|
||||
@ -186,7 +186,7 @@ class DatabaseBroker(object):
|
||||
fd, tmp_db_file = mkstemp(suffix='.tmp', dir=self.db_dir)
|
||||
os.close(fd)
|
||||
conn = sqlite3.connect(tmp_db_file, check_same_thread=False,
|
||||
factory=GreenDBConnection, timeout=0)
|
||||
factory=GreenDBConnection, timeout=0)
|
||||
# creating dbs implicitly does a lot of transactions, so we
|
||||
# pick fast, unsafe options here and do a big fsync at the end.
|
||||
conn.execute('PRAGMA synchronous = OFF')
|
||||
@ -243,8 +243,9 @@ class DatabaseBroker(object):
|
||||
if os.path.exists(self.db_file):
|
||||
# It's as if there was a "condition" where different parts
|
||||
# of the system were "racing" each other.
|
||||
raise DatabaseConnectionError(self.db_file,
|
||||
'DB created by someone else while working?')
|
||||
raise DatabaseConnectionError(
|
||||
self.db_file,
|
||||
'DB created by someone else while working?')
|
||||
renamer(tmp_db_file, self.db_file)
|
||||
self.conn = get_db_connection(self.db_file, self.timeout)
|
||||
else:
|
||||
@ -285,7 +286,8 @@ class DatabaseBroker(object):
|
||||
dbs_path = os.path.dirname(partition_path)
|
||||
device_path = os.path.dirname(dbs_path)
|
||||
quar_path = os.path.join(device_path, 'quarantined',
|
||||
self.db_type + 's', os.path.basename(self.db_dir))
|
||||
self.db_type + 's',
|
||||
os.path.basename(self.db_dir))
|
||||
try:
|
||||
renamer(self.db_dir, quar_path)
|
||||
except OSError, e:
|
||||
@ -294,7 +296,7 @@ class DatabaseBroker(object):
|
||||
quar_path = "%s-%s" % (quar_path, uuid4().hex)
|
||||
renamer(self.db_dir, quar_path)
|
||||
detail = _('Quarantined %s to %s due to %s database') % \
|
||||
(self.db_dir, quar_path, exc_hint)
|
||||
(self.db_dir, quar_path, exc_hint)
|
||||
self.logger.error(detail)
|
||||
raise sqlite3.DatabaseError(detail)
|
||||
|
||||
@ -498,13 +500,13 @@ class DatabaseBroker(object):
|
||||
INSERT INTO %s_sync (sync_point, remote_id)
|
||||
VALUES (?, ?)
|
||||
''' % ('incoming' if incoming else 'outgoing'),
|
||||
(rec['sync_point'], rec['remote_id']))
|
||||
(rec['sync_point'], rec['remote_id']))
|
||||
except sqlite3.IntegrityError:
|
||||
conn.execute('''
|
||||
UPDATE %s_sync SET sync_point=max(?, sync_point)
|
||||
WHERE remote_id=?
|
||||
''' % ('incoming' if incoming else 'outgoing'),
|
||||
(rec['sync_point'], rec['remote_id']))
|
||||
(rec['sync_point'], rec['remote_id']))
|
||||
conn.commit()
|
||||
|
||||
def _preallocate(self):
|
||||
@ -787,7 +789,7 @@ class ContainerBroker(DatabaseBroker):
|
||||
raise
|
||||
with self.get() as conn:
|
||||
row = conn.execute(
|
||||
'SELECT object_count from container_stat').fetchone()
|
||||
'SELECT object_count from container_stat').fetchone()
|
||||
return (row[0] == 0)
|
||||
|
||||
def _commit_puts(self, item_list=None):
|
||||
@ -808,10 +810,12 @@ class ContainerBroker(DatabaseBroker):
|
||||
try:
|
||||
(name, timestamp, size, content_type, etag,
|
||||
deleted) = pickle.loads(entry.decode('base64'))
|
||||
item_list.append({'name': name, 'created_at':
|
||||
timestamp, 'size': size, 'content_type':
|
||||
content_type, 'etag': etag,
|
||||
'deleted': deleted})
|
||||
item_list.append({'name': name,
|
||||
'created_at': timestamp,
|
||||
'size': size,
|
||||
'content_type': content_type,
|
||||
'etag': etag,
|
||||
'deleted': deleted})
|
||||
except Exception:
|
||||
self.logger.exception(
|
||||
_('Invalid pending entry %(file)s: %(entry)s'),
|
||||
@ -1277,7 +1281,7 @@ class AccountBroker(DatabaseBroker):
|
||||
UPDATE account_stat SET account = ?, created_at = ?, id = ?,
|
||||
put_timestamp = ?
|
||||
''', (self.account, normalize_timestamp(time.time()), str(uuid4()),
|
||||
put_timestamp))
|
||||
put_timestamp))
|
||||
|
||||
def get_db_version(self, conn):
|
||||
if self._db_version == -1:
|
||||
@ -1332,14 +1336,15 @@ class AccountBroker(DatabaseBroker):
|
||||
if entry:
|
||||
try:
|
||||
(name, put_timestamp, delete_timestamp,
|
||||
object_count, bytes_used, deleted) = \
|
||||
object_count, bytes_used, deleted) = \
|
||||
pickle.loads(entry.decode('base64'))
|
||||
item_list.append({'name': name,
|
||||
'put_timestamp': put_timestamp,
|
||||
'delete_timestamp': delete_timestamp,
|
||||
'object_count': object_count,
|
||||
'bytes_used': bytes_used,
|
||||
'deleted': deleted})
|
||||
item_list.append(
|
||||
{'name': name,
|
||||
'put_timestamp': put_timestamp,
|
||||
'delete_timestamp': delete_timestamp,
|
||||
'object_count': object_count,
|
||||
'bytes_used': bytes_used,
|
||||
'deleted': deleted})
|
||||
except Exception:
|
||||
self.logger.exception(
|
||||
_('Invalid pending entry %(file)s: %(entry)s'),
|
||||
@ -1365,7 +1370,7 @@ class AccountBroker(DatabaseBroker):
|
||||
raise
|
||||
with self.get() as conn:
|
||||
row = conn.execute(
|
||||
'SELECT container_count from account_stat').fetchone()
|
||||
'SELECT container_count from account_stat').fetchone()
|
||||
return (row[0] == 0)
|
||||
|
||||
def reclaim(self, container_timestamp, sync_timestamp):
|
||||
@ -1419,7 +1424,7 @@ class AccountBroker(DatabaseBroker):
|
||||
ret = conn.execute('''
|
||||
SELECT put_timestamp FROM container
|
||||
WHERE name = ? AND deleted != 1''',
|
||||
(container_name,)).fetchone()
|
||||
(container_name,)).fetchone()
|
||||
if ret:
|
||||
ret = ret[0]
|
||||
return ret
|
||||
@ -1513,8 +1518,8 @@ class AccountBroker(DatabaseBroker):
|
||||
SELECT put_timestamp, delete_timestamp, container_count, status
|
||||
FROM account_stat''').fetchone()
|
||||
return row['status'] == 'DELETED' or (
|
||||
row['container_count'] in (None, '', 0, '0') and
|
||||
row['delete_timestamp'] > row['put_timestamp'])
|
||||
row['container_count'] in (None, '', 0, '0') and
|
||||
row['delete_timestamp'] > row['put_timestamp'])
|
||||
|
||||
def is_status_deleted(self):
|
||||
"""Only returns true if the status field is set to DELETED."""
|
||||
|
Loading…
Reference in New Issue
Block a user