Change (started|finished)_at column type

Db column started_at and finished_at are defined as float type, but
float time doesn't fit into default db Float size. This change migrates
columns type to DateTime.

Closes-Bug: 1658975
Change-Id: I13bbb3c9a3e6e8bc744473437a378453ebc81318
This commit is contained in:
Anton Arefiev 2016-11-15 11:34:44 +02:00 committed by Dmitry Tantsur
parent fcce530a3e
commit 71099c9248
10 changed files with 201 additions and 59 deletions

View File

@ -22,7 +22,7 @@ from oslo_db import options as db_opts
from oslo_db.sqlalchemy import models
from oslo_db.sqlalchemy import session as db_session
from oslo_db.sqlalchemy import types as db_types
from sqlalchemy import (Boolean, Column, DateTime, Enum, Float, ForeignKey,
from sqlalchemy import (Boolean, Column, DateTime, Enum, ForeignKey,
Integer, String, Text)
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import orm
@ -56,8 +56,8 @@ class Node(Base):
state = Column(Enum(*istate.States.all()), nullable=False,
default=istate.States.finished,
server_default=istate.States.finished)
started_at = Column(Float, nullable=True)
finished_at = Column(Float, nullable=True)
started_at = Column(DateTime, nullable=True)
finished_at = Column(DateTime, nullable=True)
error = Column(Text, nullable=True)
# version_id is being tracked in the NodeInfo object

View File

@ -140,11 +140,14 @@ def generate_introspection_status(node):
:param node: a NodeInfo instance
:return: dictionary
"""
started_at = node.started_at.isoformat()
finished_at = node.finished_at.isoformat() if node.finished_at else None
status = {}
status['uuid'] = node.uuid
status['finished'] = bool(node.finished_at)
status['started_at'] = utils.iso_timestamp(node.started_at)
status['finished_at'] = utils.iso_timestamp(node.finished_at)
status['started_at'] = started_at
status['finished_at'] = finished_at
status['error'] = node.error
status['links'] = create_link_object(
["v%s/introspection/%s" % (CURRENT_API_VERSION[0], node.uuid)])

View File

@ -0,0 +1,68 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Change created|finished_at type to DateTime
Revision ID: d00d6e3f38c4
Revises: d2e48801c8ef
Create Date: 2016-12-15 17:18:10.728695
"""
# revision identifiers, used by Alembic.
revision = 'd00d6e3f38c4'
down_revision = 'd2e48801c8ef'
branch_labels = None
depends_on = None
import datetime
from alembic import op
import sqlalchemy as sa
def upgrade():
started_at = sa.Column('started_at', sa.types.Float, nullable=True)
finished_at = sa.Column('finished_at', sa.types.Float, nullable=True)
temp_started_at = sa.Column("temp_started_at", sa.types.DateTime,
nullable=True)
temp_finished_at = sa.Column("temp_finished_at", sa.types.DateTime,
nullable=True)
op.add_column("nodes", temp_started_at)
op.add_column("nodes", temp_finished_at)
t = sa.table('nodes', started_at, finished_at,
temp_started_at, temp_finished_at)
conn = op.get_bind()
rows = conn.execute(sa.select([t.c.started_at, t.c.finished_at]))
for row in rows:
temp_started = datetime.datetime.utcfromtimestamp(row['started_at'])
temp_finished = row['finished_at']
# Note(milan) this is just a precaution; sa.null shouldn't happen here
if temp_finished is not None:
temp_finished = datetime.datetime.utcfromtimestamp(temp_finished)
conn.execute(t.update().values(
temp_started_at=temp_started, temp_finished_at=temp_finished))
with op.batch_alter_table('nodes') as batch_op:
batch_op.drop_column('started_at')
batch_op.drop_column('finished_at')
batch_op.alter_column('temp_started_at',
existing_type=sa.types.DateTime,
nullable=True,
new_column_name='started_at')
batch_op.alter_column('temp_finished_at',
existing_type=sa.types.DateTime,
nullable=True,
new_column_name='finished_at')

View File

@ -15,9 +15,9 @@
import contextlib
import copy
import datetime
import json
import six
import time
from automaton import exceptions as automaton_errors
from ironicclient import exceptions
@ -26,6 +26,7 @@ from oslo_config import cfg
from oslo_db import exception as db_exc
from oslo_db.sqlalchemy import utils as db_utils
from oslo_utils import excutils
from oslo_utils import timeutils
from oslo_utils import uuidutils
from sqlalchemy.orm import exc as orm_errors
from sqlalchemy import text
@ -278,7 +279,7 @@ class NodeInfo(object):
"""
self.release_lock()
self.finished_at = time.time()
self.finished_at = timeutils.utcnow()
self.error = error
with db.ensure_transaction() as session:
@ -670,7 +671,7 @@ def add_node(uuid, state, **attributes):
also ironic client instance may be passed under 'ironic'
:returns: NodeInfo
"""
started_at = time.time()
started_at = timeutils.utcnow()
with db.ensure_transaction() as session:
_delete_node(uuid)
db.Node(uuid=uuid, state=state, started_at=started_at).save(session)
@ -840,8 +841,8 @@ def clean_up():
:return: list of timed out node UUID's
"""
status_keep_threshold = (time.time() -
CONF.node_status_keep_time)
status_keep_threshold = (timeutils.utcnow() - datetime.timedelta(
seconds=CONF.node_status_keep_time))
with db.ensure_transaction() as session:
db.model_query(db.Node, session=session).filter(
@ -851,7 +852,7 @@ def clean_up():
timeout = CONF.timeout
if timeout <= 0:
return []
threshold = time.time() - timeout
threshold = timeutils.utcnow() - datetime.timedelta(seconds=timeout)
uuids = [row.uuid for row in
db.model_query(db.Node.uuid, session=session).filter(
db.Node.started_at < threshold,

View File

@ -11,6 +11,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
import futurist
import mock
@ -159,7 +160,9 @@ class NodeTest(InventoryTest):
self.node.to_dict = mock_to_dict
self.ports = []
self.node_info = node_cache.NodeInfo(uuid=self.uuid, started_at=0,
self.node_info = node_cache.NodeInfo(
uuid=self.uuid,
started_at=datetime.datetime(1, 1, 1),
node=self.node, ports=self.ports)
self.node_info.node = mock.Mock(return_value=self.node)

View File

@ -11,6 +11,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
import json
import ssl
import sys
@ -179,9 +180,10 @@ class GetStatusAPIBaseTest(BaseAPITest):
def setUp(self):
super(GetStatusAPIBaseTest, self).setUp()
self.uuid2 = uuidutils.generate_uuid()
self.finished_node = node_cache.NodeInfo(uuid=self.uuid,
started_at=42.0,
finished_at=100.1,
self.finished_node = node_cache.NodeInfo(
uuid=self.uuid,
started_at=datetime.datetime(1, 1, 1),
finished_at=datetime.datetime(1, 1, 2),
error='boom')
self.finished_node.links = [
{u'href': u'http://localhost/v1/introspection/%s' %
@ -190,25 +192,27 @@ class GetStatusAPIBaseTest(BaseAPITest):
]
self.finished_node.status = {
'finished': True,
'started_at': utils.iso_timestamp(self.finished_node.started_at),
'finished_at': utils.iso_timestamp(self.finished_node.finished_at),
'started_at': self.finished_node.started_at.isoformat(),
'finished_at': self.finished_node.finished_at.isoformat(),
'error': self.finished_node.error,
'uuid': self.finished_node.uuid,
'links': self.finished_node.links
}
self.unfinished_node = node_cache.NodeInfo(uuid=self.uuid2,
started_at=42.0)
self.unfinished_node = node_cache.NodeInfo(
uuid=self.uuid2,
started_at=datetime.datetime(1, 1, 1))
self.unfinished_node.links = [
{u'href': u'http://localhost/v1/introspection/%s' %
self.unfinished_node.uuid,
u'rel': u'self'}
]
finished_at = (self.unfinished_node.finished_at.isoformat()
if self.unfinished_node.finished_at else None)
self.unfinished_node.status = {
'finished': False,
'started_at': utils.iso_timestamp(self.unfinished_node.started_at),
'finished_at': utils.iso_timestamp(
self.unfinished_node.finished_at),
'started_at': self.unfinished_node.started_at.isoformat(),
'finished_at': finished_at,
'error': None,
'uuid': self.unfinished_node.uuid,
'links': self.unfinished_node.links

View File

@ -24,6 +24,8 @@ The test will then use that db and u/p combo to run the tests.
import contextlib
import datetime
import time
import alembic
from alembic import script
@ -364,6 +366,35 @@ class MigrationCheckersMixin(object):
err_node = nodes.select(nodes.c.uuid == err_node_id).execute().first()
self.assertEqual(istate.States.error, err_node['state'])
def _pre_upgrade_d00d6e3f38c4(self, engine):
nodes = db_utils.get_table(engine, 'nodes')
for finished_at in (None, time.time()):
data = {'uuid': uuidutils.generate_uuid(),
'started_at': time.time(),
'finished_at': finished_at,
'error': None}
nodes.insert().values(data).execute()
return data
def _check_d00d6e3f38c4(self, engine, data):
nodes = db_utils.get_table(engine, 'nodes')
col_names = [column.name for column in nodes.c]
self.assertIn('started_at', col_names)
self.assertIn('finished_at', col_names)
self.assertIsInstance(nodes.c.started_at.type,
sqlalchemy.types.DateTime)
self.assertIsInstance(nodes.c.finished_at.type,
sqlalchemy.types.DateTime)
node = nodes.select(nodes.c.uuid == data['uuid']).execute().first()
self.assertEqual(
datetime.datetime.utcfromtimestamp(data['started_at']),
node['started_at'])
self.assertEqual(
datetime.datetime.utcfromtimestamp(data['finished_at']),
node['finished_at'])
def test_upgrade_and_version(self):
with patch_with_engine(self.engine):
self.migration_ext.upgrade('head')

View File

@ -12,15 +12,16 @@
# limitations under the License.
import copy
import datetime
import json
import six
import time
import unittest
import automaton
import mock
from oslo_config import cfg
import oslo_db
from oslo_utils import timeutils
from oslo_utils import uuidutils
from ironic_inspector.common import ironic as ir_utils
@ -52,7 +53,10 @@ class TestNodeCache(test_base.NodeTest):
mac=self.macs, bmc_address='1.2.3.4',
foo=None)
self.assertEqual(self.uuid, node.uuid)
self.assertTrue(time.time() - 60 < node.started_at < time.time() + 60)
self.assertTrue(
(datetime.datetime.utcnow() - datetime.timedelta(seconds=60)
< node.started_at <
datetime.datetime.utcnow() + datetime.timedelta(seconds=60)))
self.assertFalse(node._locked)
res = set(db.model_query(db.Node.uuid,
@ -205,13 +209,19 @@ class TestNodeCacheFind(test_base.NodeTest):
def test_bmc(self):
res = node_cache.find_node(bmc_address='1.2.3.4')
self.assertEqual(self.uuid, res.uuid)
self.assertTrue(time.time() - 60 < res.started_at < time.time() + 1)
self.assertTrue(
datetime.datetime.utcnow() - datetime.timedelta(seconds=60)
< res.started_at <
datetime.datetime.utcnow() + datetime.timedelta(seconds=1))
self.assertTrue(res._locked)
def test_macs(self):
res = node_cache.find_node(mac=['11:22:33:33:33:33', self.macs[1]])
self.assertEqual(self.uuid, res.uuid)
self.assertTrue(time.time() - 60 < res.started_at < time.time() + 1)
self.assertTrue(
datetime.datetime.utcnow() - datetime.timedelta(seconds=60)
< res.started_at <
datetime.datetime.utcnow() + datetime.timedelta(seconds=1))
self.assertTrue(res._locked)
def test_macs_not_found(self):
@ -230,7 +240,10 @@ class TestNodeCacheFind(test_base.NodeTest):
res = node_cache.find_node(bmc_address='1.2.3.4',
mac=self.macs)
self.assertEqual(self.uuid, res.uuid)
self.assertTrue(time.time() - 60 < res.started_at < time.time() + 1)
self.assertTrue(
datetime.datetime.utcnow() - datetime.timedelta(seconds=60)
< res.started_at <
datetime.datetime.utcnow() + datetime.timedelta(seconds=1))
self.assertTrue(res._locked)
def test_inconsistency(self):
@ -245,7 +258,7 @@ class TestNodeCacheFind(test_base.NodeTest):
session = db.get_session()
with session.begin():
(db.model_query(db.Node).filter_by(uuid=self.uuid).
update({'finished_at': 42.0}))
update({'finished_at': datetime.datetime.utcnow()}))
self.assertRaises(utils.Error, node_cache.find_node,
bmc_address='1.2.3.4')
@ -253,7 +266,7 @@ class TestNodeCacheFind(test_base.NodeTest):
class TestNodeCacheCleanUp(test_base.NodeTest):
def setUp(self):
super(TestNodeCacheCleanUp, self).setUp()
self.started_at = 100.0
self.started_at = datetime.datetime.utcnow()
session = db.get_session()
with session.begin():
db.Node(uuid=self.uuid,
@ -280,9 +293,9 @@ class TestNodeCacheCleanUp(test_base.NodeTest):
self.assertEqual(1, db.model_query(db.Option).count())
@mock.patch.object(node_cache, '_get_lock', autospec=True)
@mock.patch.object(time, 'time')
@mock.patch.object(timeutils, 'utcnow')
def test_ok(self, time_mock, get_lock_mock):
time_mock.return_value = 1000
time_mock.return_value = datetime.datetime.utcnow()
self.assertFalse(node_cache.clean_up())
@ -295,28 +308,30 @@ class TestNodeCacheCleanUp(test_base.NodeTest):
self.assertFalse(get_lock_mock.called)
@mock.patch.object(node_cache, '_get_lock', autospec=True)
@mock.patch.object(time, 'time')
@mock.patch.object(timeutils, 'utcnow')
def test_timeout(self, time_mock, get_lock_mock):
# Add a finished node to confirm we don't try to timeout it
time_mock.return_value = self.started_at
session = db.get_session()
finished_at = self.started_at + datetime.timedelta(seconds=60)
with session.begin():
db.Node(uuid=self.uuid + '1',
db.Node(uuid=self.uuid + '1', started_at=self.started_at,
state=istate.States.waiting,
started_at=self.started_at,
finished_at=self.started_at + 60).save(session)
finished_at=finished_at).save(session)
CONF.set_override('timeout', 99)
time_mock.return_value = (self.started_at + 100)
time_mock.return_value = (self.started_at +
datetime.timedelta(seconds=100))
self.assertEqual([self.uuid], node_cache.clean_up())
res = [(row.state, row.finished_at, row.error) for row in
db.model_query(db.Node).all()]
self.assertEqual([(istate.States.error,
self.started_at + 100,
self.assertEqual(
[(istate.States.error,
self.started_at + datetime.timedelta(seconds=100),
'Introspection timeout'),
(istate.States.waiting,
self.started_at + 60, None)],
self.started_at + datetime.timedelta(seconds=60), None)],
res)
self.assertEqual([], db.model_query(db.Attribute).all())
self.assertEqual([], db.model_query(db.Option).all())
@ -328,7 +343,8 @@ class TestNodeCacheCleanUp(test_base.NodeTest):
session = db.get_session()
with session.begin():
db.model_query(db.Node).update(
{'finished_at': time.time() - 100})
{'finished_at': (datetime.datetime.utcnow() -
datetime.timedelta(seconds=100))})
self.assertEqual([], node_cache.clean_up())
@ -337,7 +353,8 @@ class TestNodeCacheCleanUp(test_base.NodeTest):
class TestNodeCacheGetNode(test_base.NodeTest):
def test_ok(self):
started_at = time.time() - 42
started_at = (datetime.datetime.utcnow() -
datetime.timedelta(seconds=42))
session = db.get_session()
with session.begin():
db.Node(uuid=self.uuid,
@ -352,7 +369,8 @@ class TestNodeCacheGetNode(test_base.NodeTest):
self.assertFalse(info._locked)
def test_locked(self):
started_at = time.time() - 42
started_at = (datetime.datetime.utcnow() -
datetime.timedelta(seconds=42))
session = db.get_session()
with session.begin():
db.Node(uuid=self.uuid,
@ -371,7 +389,8 @@ class TestNodeCacheGetNode(test_base.NodeTest):
uuidutils.generate_uuid())
def test_with_name(self):
started_at = time.time() - 42
started_at = (datetime.datetime.utcnow() -
datetime.timedelta(seconds=42))
session = db.get_session()
with session.begin():
db.Node(uuid=self.uuid,
@ -390,7 +409,7 @@ class TestNodeCacheGetNode(test_base.NodeTest):
ironic.node.get.assert_called_once_with('name')
@mock.patch.object(time, 'time', lambda: 42.0)
@mock.patch.object(timeutils, 'utcnow', lambda: datetime.datetime(1, 1, 1))
class TestNodeInfoFinished(test_base.NodeTest):
def setUp(self):
super(TestNodeInfoFinished, self).setUp()
@ -398,7 +417,8 @@ class TestNodeInfoFinished(test_base.NodeTest):
istate.States.processing,
bmc_address='1.2.3.4',
mac=self.macs)
self.node_info = node_cache.NodeInfo(uuid=self.uuid, started_at=3.14)
self.node_info = node_cache.NodeInfo(
uuid=self.uuid, started_at=datetime.datetime(3, 1, 4))
session = db.get_session()
with session.begin():
db.Option(uuid=self.uuid, name='foo', value='bar').save(
@ -409,7 +429,7 @@ class TestNodeInfoFinished(test_base.NodeTest):
session = db.get_session()
with session.begin():
self.assertEqual((42.0, None),
self.assertEqual((datetime.datetime(1, 1, 1), None),
tuple(db.model_query(
db.Node.finished_at,
db.Node.error).first()))
@ -421,7 +441,7 @@ class TestNodeInfoFinished(test_base.NodeTest):
def test_error(self):
self.node_info.finished(error='boom')
self.assertEqual((42.0, 'boom'),
self.assertEqual((datetime.datetime(1, 1, 1), 'boom'),
tuple(db.model_query(db.Node.finished_at,
db.Node.error).first()))
self.assertEqual([], db.model_query(db.Attribute).all())
@ -772,9 +792,10 @@ class TestNodeCacheListNode(test_base.NodeTest):
self.uuid2 = uuidutils.generate_uuid()
session = db.get_session()
with session.begin():
db.Node(uuid=self.uuid, started_at=42.42).save(session)
db.Node(uuid=self.uuid2, started_at=42.24,
finished_at=100.0).save(session)
db.Node(uuid=self.uuid,
started_at=datetime.datetime(1, 1, 2)).save(session)
db.Node(uuid=self.uuid2, started_at=datetime.datetime(1, 1, 1),
finished_at=datetime.datetime(1, 1, 3)).save(session)
# mind please node(self.uuid).started_at > node(self.uuid2).started_at
# and the result ordering is strict in node_cache.get_node_list newer first
@ -848,7 +869,7 @@ class TestNodeInfoState(test_base.NodeStateTest):
self.node_info._set_state, 'foo')
def test_commit(self):
current_time = time.time()
current_time = timeutils.utcnow()
self.node_info.started_at = self.node_info.finished_at = current_time
self.node_info.error = "Boo!"
self.node_info.commit()

View File

@ -17,7 +17,6 @@ import json
import os
import shutil
import tempfile
import time
import eventlet
import fixtures
@ -25,6 +24,7 @@ from ironicclient import exceptions
import mock
from oslo_config import cfg
from oslo_serialization import base64
from oslo_utils import timeutils
from oslo_utils import uuidutils
from ironic_inspector.common import ironic as ir_utils
@ -44,7 +44,7 @@ CONF = cfg.CONF
class BaseTest(test_base.NodeTest):
def setUp(self):
super(BaseTest, self).setUp()
self.started_at = time.time()
self.started_at = timeutils.utcnow()
self.all_ports = [mock.Mock(uuid=uuidutils.generate_uuid(),
address=mac) for mac in self.macs]
self.ports = [self.all_ports[1]]
@ -120,7 +120,7 @@ class TestProcess(BaseProcessTest):
self.node_info.finished.assert_called_once_with(error=mock.ANY)
def test_already_finished(self):
self.node_info.finished_at = time.time()
self.node_info.finished_at = timeutils.utcnow()
self.assertRaisesRegex(utils.Error, 'already finished',
process.process, self.data)
self.assertFalse(self.process_mock.called)

View File

@ -0,0 +1,11 @@
---
fixes:
- |
DB's columns started_at and finished_at are defined as float type,
as result timestamp doesn't fit into default Float size. This change
migrates columns type to DateTime.
upgrade:
- |
New migration from Float to DateTime consists data migration,
which could take some time, based on the number of introspection
statuses in DB.