Change (started|finished)_at column type

Db column started_at and finished_at are defined as float type, but
float time doesn't fit into default db Float size. This change migrates
columns type to DateTime.

Closes-Bug: 1658975
Change-Id: I13bbb3c9a3e6e8bc744473437a378453ebc81318
This commit is contained in:
Anton Arefiev 2016-11-15 11:34:44 +02:00 committed by Dmitry Tantsur
parent fcce530a3e
commit 71099c9248
10 changed files with 201 additions and 59 deletions

View File

@ -22,7 +22,7 @@ from oslo_db import options as db_opts
from oslo_db.sqlalchemy import models from oslo_db.sqlalchemy import models
from oslo_db.sqlalchemy import session as db_session from oslo_db.sqlalchemy import session as db_session
from oslo_db.sqlalchemy import types as db_types from oslo_db.sqlalchemy import types as db_types
from sqlalchemy import (Boolean, Column, DateTime, Enum, Float, ForeignKey, from sqlalchemy import (Boolean, Column, DateTime, Enum, ForeignKey,
Integer, String, Text) Integer, String, Text)
from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import orm from sqlalchemy import orm
@ -56,8 +56,8 @@ class Node(Base):
state = Column(Enum(*istate.States.all()), nullable=False, state = Column(Enum(*istate.States.all()), nullable=False,
default=istate.States.finished, default=istate.States.finished,
server_default=istate.States.finished) server_default=istate.States.finished)
started_at = Column(Float, nullable=True) started_at = Column(DateTime, nullable=True)
finished_at = Column(Float, nullable=True) finished_at = Column(DateTime, nullable=True)
error = Column(Text, nullable=True) error = Column(Text, nullable=True)
# version_id is being tracked in the NodeInfo object # version_id is being tracked in the NodeInfo object

View File

@ -140,11 +140,14 @@ def generate_introspection_status(node):
:param node: a NodeInfo instance :param node: a NodeInfo instance
:return: dictionary :return: dictionary
""" """
started_at = node.started_at.isoformat()
finished_at = node.finished_at.isoformat() if node.finished_at else None
status = {} status = {}
status['uuid'] = node.uuid status['uuid'] = node.uuid
status['finished'] = bool(node.finished_at) status['finished'] = bool(node.finished_at)
status['started_at'] = utils.iso_timestamp(node.started_at) status['started_at'] = started_at
status['finished_at'] = utils.iso_timestamp(node.finished_at) status['finished_at'] = finished_at
status['error'] = node.error status['error'] = node.error
status['links'] = create_link_object( status['links'] = create_link_object(
["v%s/introspection/%s" % (CURRENT_API_VERSION[0], node.uuid)]) ["v%s/introspection/%s" % (CURRENT_API_VERSION[0], node.uuid)])

View File

@ -0,0 +1,68 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Change created|finished_at type to DateTime
Revision ID: d00d6e3f38c4
Revises: d2e48801c8ef
Create Date: 2016-12-15 17:18:10.728695
"""
# revision identifiers, used by Alembic.
revision = 'd00d6e3f38c4'
down_revision = 'd2e48801c8ef'
branch_labels = None
depends_on = None
import datetime
from alembic import op
import sqlalchemy as sa
def upgrade():
started_at = sa.Column('started_at', sa.types.Float, nullable=True)
finished_at = sa.Column('finished_at', sa.types.Float, nullable=True)
temp_started_at = sa.Column("temp_started_at", sa.types.DateTime,
nullable=True)
temp_finished_at = sa.Column("temp_finished_at", sa.types.DateTime,
nullable=True)
op.add_column("nodes", temp_started_at)
op.add_column("nodes", temp_finished_at)
t = sa.table('nodes', started_at, finished_at,
temp_started_at, temp_finished_at)
conn = op.get_bind()
rows = conn.execute(sa.select([t.c.started_at, t.c.finished_at]))
for row in rows:
temp_started = datetime.datetime.utcfromtimestamp(row['started_at'])
temp_finished = row['finished_at']
# Note(milan) this is just a precaution; sa.null shouldn't happen here
if temp_finished is not None:
temp_finished = datetime.datetime.utcfromtimestamp(temp_finished)
conn.execute(t.update().values(
temp_started_at=temp_started, temp_finished_at=temp_finished))
with op.batch_alter_table('nodes') as batch_op:
batch_op.drop_column('started_at')
batch_op.drop_column('finished_at')
batch_op.alter_column('temp_started_at',
existing_type=sa.types.DateTime,
nullable=True,
new_column_name='started_at')
batch_op.alter_column('temp_finished_at',
existing_type=sa.types.DateTime,
nullable=True,
new_column_name='finished_at')

View File

@ -15,9 +15,9 @@
import contextlib import contextlib
import copy import copy
import datetime
import json import json
import six import six
import time
from automaton import exceptions as automaton_errors from automaton import exceptions as automaton_errors
from ironicclient import exceptions from ironicclient import exceptions
@ -26,6 +26,7 @@ from oslo_config import cfg
from oslo_db import exception as db_exc from oslo_db import exception as db_exc
from oslo_db.sqlalchemy import utils as db_utils from oslo_db.sqlalchemy import utils as db_utils
from oslo_utils import excutils from oslo_utils import excutils
from oslo_utils import timeutils
from oslo_utils import uuidutils from oslo_utils import uuidutils
from sqlalchemy.orm import exc as orm_errors from sqlalchemy.orm import exc as orm_errors
from sqlalchemy import text from sqlalchemy import text
@ -278,7 +279,7 @@ class NodeInfo(object):
""" """
self.release_lock() self.release_lock()
self.finished_at = time.time() self.finished_at = timeutils.utcnow()
self.error = error self.error = error
with db.ensure_transaction() as session: with db.ensure_transaction() as session:
@ -670,7 +671,7 @@ def add_node(uuid, state, **attributes):
also ironic client instance may be passed under 'ironic' also ironic client instance may be passed under 'ironic'
:returns: NodeInfo :returns: NodeInfo
""" """
started_at = time.time() started_at = timeutils.utcnow()
with db.ensure_transaction() as session: with db.ensure_transaction() as session:
_delete_node(uuid) _delete_node(uuid)
db.Node(uuid=uuid, state=state, started_at=started_at).save(session) db.Node(uuid=uuid, state=state, started_at=started_at).save(session)
@ -840,8 +841,8 @@ def clean_up():
:return: list of timed out node UUID's :return: list of timed out node UUID's
""" """
status_keep_threshold = (time.time() - status_keep_threshold = (timeutils.utcnow() - datetime.timedelta(
CONF.node_status_keep_time) seconds=CONF.node_status_keep_time))
with db.ensure_transaction() as session: with db.ensure_transaction() as session:
db.model_query(db.Node, session=session).filter( db.model_query(db.Node, session=session).filter(
@ -851,7 +852,7 @@ def clean_up():
timeout = CONF.timeout timeout = CONF.timeout
if timeout <= 0: if timeout <= 0:
return [] return []
threshold = time.time() - timeout threshold = timeutils.utcnow() - datetime.timedelta(seconds=timeout)
uuids = [row.uuid for row in uuids = [row.uuid for row in
db.model_query(db.Node.uuid, session=session).filter( db.model_query(db.Node.uuid, session=session).filter(
db.Node.started_at < threshold, db.Node.started_at < threshold,

View File

@ -11,6 +11,7 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import datetime
import futurist import futurist
import mock import mock
@ -159,8 +160,10 @@ class NodeTest(InventoryTest):
self.node.to_dict = mock_to_dict self.node.to_dict = mock_to_dict
self.ports = [] self.ports = []
self.node_info = node_cache.NodeInfo(uuid=self.uuid, started_at=0, self.node_info = node_cache.NodeInfo(
node=self.node, ports=self.ports) uuid=self.uuid,
started_at=datetime.datetime(1, 1, 1),
node=self.node, ports=self.ports)
self.node_info.node = mock.Mock(return_value=self.node) self.node_info.node = mock.Mock(return_value=self.node)

View File

@ -11,6 +11,7 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import datetime
import json import json
import ssl import ssl
import sys import sys
@ -179,10 +180,11 @@ class GetStatusAPIBaseTest(BaseAPITest):
def setUp(self): def setUp(self):
super(GetStatusAPIBaseTest, self).setUp() super(GetStatusAPIBaseTest, self).setUp()
self.uuid2 = uuidutils.generate_uuid() self.uuid2 = uuidutils.generate_uuid()
self.finished_node = node_cache.NodeInfo(uuid=self.uuid, self.finished_node = node_cache.NodeInfo(
started_at=42.0, uuid=self.uuid,
finished_at=100.1, started_at=datetime.datetime(1, 1, 1),
error='boom') finished_at=datetime.datetime(1, 1, 2),
error='boom')
self.finished_node.links = [ self.finished_node.links = [
{u'href': u'http://localhost/v1/introspection/%s' % {u'href': u'http://localhost/v1/introspection/%s' %
self.finished_node.uuid, self.finished_node.uuid,
@ -190,25 +192,27 @@ class GetStatusAPIBaseTest(BaseAPITest):
] ]
self.finished_node.status = { self.finished_node.status = {
'finished': True, 'finished': True,
'started_at': utils.iso_timestamp(self.finished_node.started_at), 'started_at': self.finished_node.started_at.isoformat(),
'finished_at': utils.iso_timestamp(self.finished_node.finished_at), 'finished_at': self.finished_node.finished_at.isoformat(),
'error': self.finished_node.error, 'error': self.finished_node.error,
'uuid': self.finished_node.uuid, 'uuid': self.finished_node.uuid,
'links': self.finished_node.links 'links': self.finished_node.links
} }
self.unfinished_node = node_cache.NodeInfo(uuid=self.uuid2, self.unfinished_node = node_cache.NodeInfo(
started_at=42.0) uuid=self.uuid2,
started_at=datetime.datetime(1, 1, 1))
self.unfinished_node.links = [ self.unfinished_node.links = [
{u'href': u'http://localhost/v1/introspection/%s' % {u'href': u'http://localhost/v1/introspection/%s' %
self.unfinished_node.uuid, self.unfinished_node.uuid,
u'rel': u'self'} u'rel': u'self'}
] ]
finished_at = (self.unfinished_node.finished_at.isoformat()
if self.unfinished_node.finished_at else None)
self.unfinished_node.status = { self.unfinished_node.status = {
'finished': False, 'finished': False,
'started_at': utils.iso_timestamp(self.unfinished_node.started_at), 'started_at': self.unfinished_node.started_at.isoformat(),
'finished_at': utils.iso_timestamp( 'finished_at': finished_at,
self.unfinished_node.finished_at),
'error': None, 'error': None,
'uuid': self.unfinished_node.uuid, 'uuid': self.unfinished_node.uuid,
'links': self.unfinished_node.links 'links': self.unfinished_node.links

View File

@ -24,6 +24,8 @@ The test will then use that db and u/p combo to run the tests.
import contextlib import contextlib
import datetime
import time
import alembic import alembic
from alembic import script from alembic import script
@ -364,6 +366,35 @@ class MigrationCheckersMixin(object):
err_node = nodes.select(nodes.c.uuid == err_node_id).execute().first() err_node = nodes.select(nodes.c.uuid == err_node_id).execute().first()
self.assertEqual(istate.States.error, err_node['state']) self.assertEqual(istate.States.error, err_node['state'])
def _pre_upgrade_d00d6e3f38c4(self, engine):
nodes = db_utils.get_table(engine, 'nodes')
for finished_at in (None, time.time()):
data = {'uuid': uuidutils.generate_uuid(),
'started_at': time.time(),
'finished_at': finished_at,
'error': None}
nodes.insert().values(data).execute()
return data
def _check_d00d6e3f38c4(self, engine, data):
nodes = db_utils.get_table(engine, 'nodes')
col_names = [column.name for column in nodes.c]
self.assertIn('started_at', col_names)
self.assertIn('finished_at', col_names)
self.assertIsInstance(nodes.c.started_at.type,
sqlalchemy.types.DateTime)
self.assertIsInstance(nodes.c.finished_at.type,
sqlalchemy.types.DateTime)
node = nodes.select(nodes.c.uuid == data['uuid']).execute().first()
self.assertEqual(
datetime.datetime.utcfromtimestamp(data['started_at']),
node['started_at'])
self.assertEqual(
datetime.datetime.utcfromtimestamp(data['finished_at']),
node['finished_at'])
def test_upgrade_and_version(self): def test_upgrade_and_version(self):
with patch_with_engine(self.engine): with patch_with_engine(self.engine):
self.migration_ext.upgrade('head') self.migration_ext.upgrade('head')

View File

@ -12,15 +12,16 @@
# limitations under the License. # limitations under the License.
import copy import copy
import datetime
import json import json
import six import six
import time
import unittest import unittest
import automaton import automaton
import mock import mock
from oslo_config import cfg from oslo_config import cfg
import oslo_db import oslo_db
from oslo_utils import timeutils
from oslo_utils import uuidutils from oslo_utils import uuidutils
from ironic_inspector.common import ironic as ir_utils from ironic_inspector.common import ironic as ir_utils
@ -52,7 +53,10 @@ class TestNodeCache(test_base.NodeTest):
mac=self.macs, bmc_address='1.2.3.4', mac=self.macs, bmc_address='1.2.3.4',
foo=None) foo=None)
self.assertEqual(self.uuid, node.uuid) self.assertEqual(self.uuid, node.uuid)
self.assertTrue(time.time() - 60 < node.started_at < time.time() + 60) self.assertTrue(
(datetime.datetime.utcnow() - datetime.timedelta(seconds=60)
< node.started_at <
datetime.datetime.utcnow() + datetime.timedelta(seconds=60)))
self.assertFalse(node._locked) self.assertFalse(node._locked)
res = set(db.model_query(db.Node.uuid, res = set(db.model_query(db.Node.uuid,
@ -205,13 +209,19 @@ class TestNodeCacheFind(test_base.NodeTest):
def test_bmc(self): def test_bmc(self):
res = node_cache.find_node(bmc_address='1.2.3.4') res = node_cache.find_node(bmc_address='1.2.3.4')
self.assertEqual(self.uuid, res.uuid) self.assertEqual(self.uuid, res.uuid)
self.assertTrue(time.time() - 60 < res.started_at < time.time() + 1) self.assertTrue(
datetime.datetime.utcnow() - datetime.timedelta(seconds=60)
< res.started_at <
datetime.datetime.utcnow() + datetime.timedelta(seconds=1))
self.assertTrue(res._locked) self.assertTrue(res._locked)
def test_macs(self): def test_macs(self):
res = node_cache.find_node(mac=['11:22:33:33:33:33', self.macs[1]]) res = node_cache.find_node(mac=['11:22:33:33:33:33', self.macs[1]])
self.assertEqual(self.uuid, res.uuid) self.assertEqual(self.uuid, res.uuid)
self.assertTrue(time.time() - 60 < res.started_at < time.time() + 1) self.assertTrue(
datetime.datetime.utcnow() - datetime.timedelta(seconds=60)
< res.started_at <
datetime.datetime.utcnow() + datetime.timedelta(seconds=1))
self.assertTrue(res._locked) self.assertTrue(res._locked)
def test_macs_not_found(self): def test_macs_not_found(self):
@ -230,7 +240,10 @@ class TestNodeCacheFind(test_base.NodeTest):
res = node_cache.find_node(bmc_address='1.2.3.4', res = node_cache.find_node(bmc_address='1.2.3.4',
mac=self.macs) mac=self.macs)
self.assertEqual(self.uuid, res.uuid) self.assertEqual(self.uuid, res.uuid)
self.assertTrue(time.time() - 60 < res.started_at < time.time() + 1) self.assertTrue(
datetime.datetime.utcnow() - datetime.timedelta(seconds=60)
< res.started_at <
datetime.datetime.utcnow() + datetime.timedelta(seconds=1))
self.assertTrue(res._locked) self.assertTrue(res._locked)
def test_inconsistency(self): def test_inconsistency(self):
@ -245,7 +258,7 @@ class TestNodeCacheFind(test_base.NodeTest):
session = db.get_session() session = db.get_session()
with session.begin(): with session.begin():
(db.model_query(db.Node).filter_by(uuid=self.uuid). (db.model_query(db.Node).filter_by(uuid=self.uuid).
update({'finished_at': 42.0})) update({'finished_at': datetime.datetime.utcnow()}))
self.assertRaises(utils.Error, node_cache.find_node, self.assertRaises(utils.Error, node_cache.find_node,
bmc_address='1.2.3.4') bmc_address='1.2.3.4')
@ -253,7 +266,7 @@ class TestNodeCacheFind(test_base.NodeTest):
class TestNodeCacheCleanUp(test_base.NodeTest): class TestNodeCacheCleanUp(test_base.NodeTest):
def setUp(self): def setUp(self):
super(TestNodeCacheCleanUp, self).setUp() super(TestNodeCacheCleanUp, self).setUp()
self.started_at = 100.0 self.started_at = datetime.datetime.utcnow()
session = db.get_session() session = db.get_session()
with session.begin(): with session.begin():
db.Node(uuid=self.uuid, db.Node(uuid=self.uuid,
@ -280,9 +293,9 @@ class TestNodeCacheCleanUp(test_base.NodeTest):
self.assertEqual(1, db.model_query(db.Option).count()) self.assertEqual(1, db.model_query(db.Option).count())
@mock.patch.object(node_cache, '_get_lock', autospec=True) @mock.patch.object(node_cache, '_get_lock', autospec=True)
@mock.patch.object(time, 'time') @mock.patch.object(timeutils, 'utcnow')
def test_ok(self, time_mock, get_lock_mock): def test_ok(self, time_mock, get_lock_mock):
time_mock.return_value = 1000 time_mock.return_value = datetime.datetime.utcnow()
self.assertFalse(node_cache.clean_up()) self.assertFalse(node_cache.clean_up())
@ -295,29 +308,31 @@ class TestNodeCacheCleanUp(test_base.NodeTest):
self.assertFalse(get_lock_mock.called) self.assertFalse(get_lock_mock.called)
@mock.patch.object(node_cache, '_get_lock', autospec=True) @mock.patch.object(node_cache, '_get_lock', autospec=True)
@mock.patch.object(time, 'time') @mock.patch.object(timeutils, 'utcnow')
def test_timeout(self, time_mock, get_lock_mock): def test_timeout(self, time_mock, get_lock_mock):
# Add a finished node to confirm we don't try to timeout it # Add a finished node to confirm we don't try to timeout it
time_mock.return_value = self.started_at time_mock.return_value = self.started_at
session = db.get_session() session = db.get_session()
finished_at = self.started_at + datetime.timedelta(seconds=60)
with session.begin(): with session.begin():
db.Node(uuid=self.uuid + '1', db.Node(uuid=self.uuid + '1', started_at=self.started_at,
state=istate.States.waiting, state=istate.States.waiting,
started_at=self.started_at, finished_at=finished_at).save(session)
finished_at=self.started_at + 60).save(session)
CONF.set_override('timeout', 99) CONF.set_override('timeout', 99)
time_mock.return_value = (self.started_at + 100) time_mock.return_value = (self.started_at +
datetime.timedelta(seconds=100))
self.assertEqual([self.uuid], node_cache.clean_up()) self.assertEqual([self.uuid], node_cache.clean_up())
res = [(row.state, row.finished_at, row.error) for row in res = [(row.state, row.finished_at, row.error) for row in
db.model_query(db.Node).all()] db.model_query(db.Node).all()]
self.assertEqual([(istate.States.error, self.assertEqual(
self.started_at + 100, [(istate.States.error,
'Introspection timeout'), self.started_at + datetime.timedelta(seconds=100),
(istate.States.waiting, 'Introspection timeout'),
self.started_at + 60, None)], (istate.States.waiting,
res) self.started_at + datetime.timedelta(seconds=60), None)],
res)
self.assertEqual([], db.model_query(db.Attribute).all()) self.assertEqual([], db.model_query(db.Attribute).all())
self.assertEqual([], db.model_query(db.Option).all()) self.assertEqual([], db.model_query(db.Option).all())
get_lock_mock.assert_called_once_with(self.uuid) get_lock_mock.assert_called_once_with(self.uuid)
@ -328,7 +343,8 @@ class TestNodeCacheCleanUp(test_base.NodeTest):
session = db.get_session() session = db.get_session()
with session.begin(): with session.begin():
db.model_query(db.Node).update( db.model_query(db.Node).update(
{'finished_at': time.time() - 100}) {'finished_at': (datetime.datetime.utcnow() -
datetime.timedelta(seconds=100))})
self.assertEqual([], node_cache.clean_up()) self.assertEqual([], node_cache.clean_up())
@ -337,7 +353,8 @@ class TestNodeCacheCleanUp(test_base.NodeTest):
class TestNodeCacheGetNode(test_base.NodeTest): class TestNodeCacheGetNode(test_base.NodeTest):
def test_ok(self): def test_ok(self):
started_at = time.time() - 42 started_at = (datetime.datetime.utcnow() -
datetime.timedelta(seconds=42))
session = db.get_session() session = db.get_session()
with session.begin(): with session.begin():
db.Node(uuid=self.uuid, db.Node(uuid=self.uuid,
@ -352,7 +369,8 @@ class TestNodeCacheGetNode(test_base.NodeTest):
self.assertFalse(info._locked) self.assertFalse(info._locked)
def test_locked(self): def test_locked(self):
started_at = time.time() - 42 started_at = (datetime.datetime.utcnow() -
datetime.timedelta(seconds=42))
session = db.get_session() session = db.get_session()
with session.begin(): with session.begin():
db.Node(uuid=self.uuid, db.Node(uuid=self.uuid,
@ -371,7 +389,8 @@ class TestNodeCacheGetNode(test_base.NodeTest):
uuidutils.generate_uuid()) uuidutils.generate_uuid())
def test_with_name(self): def test_with_name(self):
started_at = time.time() - 42 started_at = (datetime.datetime.utcnow() -
datetime.timedelta(seconds=42))
session = db.get_session() session = db.get_session()
with session.begin(): with session.begin():
db.Node(uuid=self.uuid, db.Node(uuid=self.uuid,
@ -390,7 +409,7 @@ class TestNodeCacheGetNode(test_base.NodeTest):
ironic.node.get.assert_called_once_with('name') ironic.node.get.assert_called_once_with('name')
@mock.patch.object(time, 'time', lambda: 42.0) @mock.patch.object(timeutils, 'utcnow', lambda: datetime.datetime(1, 1, 1))
class TestNodeInfoFinished(test_base.NodeTest): class TestNodeInfoFinished(test_base.NodeTest):
def setUp(self): def setUp(self):
super(TestNodeInfoFinished, self).setUp() super(TestNodeInfoFinished, self).setUp()
@ -398,7 +417,8 @@ class TestNodeInfoFinished(test_base.NodeTest):
istate.States.processing, istate.States.processing,
bmc_address='1.2.3.4', bmc_address='1.2.3.4',
mac=self.macs) mac=self.macs)
self.node_info = node_cache.NodeInfo(uuid=self.uuid, started_at=3.14) self.node_info = node_cache.NodeInfo(
uuid=self.uuid, started_at=datetime.datetime(3, 1, 4))
session = db.get_session() session = db.get_session()
with session.begin(): with session.begin():
db.Option(uuid=self.uuid, name='foo', value='bar').save( db.Option(uuid=self.uuid, name='foo', value='bar').save(
@ -409,7 +429,7 @@ class TestNodeInfoFinished(test_base.NodeTest):
session = db.get_session() session = db.get_session()
with session.begin(): with session.begin():
self.assertEqual((42.0, None), self.assertEqual((datetime.datetime(1, 1, 1), None),
tuple(db.model_query( tuple(db.model_query(
db.Node.finished_at, db.Node.finished_at,
db.Node.error).first())) db.Node.error).first()))
@ -421,7 +441,7 @@ class TestNodeInfoFinished(test_base.NodeTest):
def test_error(self): def test_error(self):
self.node_info.finished(error='boom') self.node_info.finished(error='boom')
self.assertEqual((42.0, 'boom'), self.assertEqual((datetime.datetime(1, 1, 1), 'boom'),
tuple(db.model_query(db.Node.finished_at, tuple(db.model_query(db.Node.finished_at,
db.Node.error).first())) db.Node.error).first()))
self.assertEqual([], db.model_query(db.Attribute).all()) self.assertEqual([], db.model_query(db.Attribute).all())
@ -772,9 +792,10 @@ class TestNodeCacheListNode(test_base.NodeTest):
self.uuid2 = uuidutils.generate_uuid() self.uuid2 = uuidutils.generate_uuid()
session = db.get_session() session = db.get_session()
with session.begin(): with session.begin():
db.Node(uuid=self.uuid, started_at=42.42).save(session) db.Node(uuid=self.uuid,
db.Node(uuid=self.uuid2, started_at=42.24, started_at=datetime.datetime(1, 1, 2)).save(session)
finished_at=100.0).save(session) db.Node(uuid=self.uuid2, started_at=datetime.datetime(1, 1, 1),
finished_at=datetime.datetime(1, 1, 3)).save(session)
# mind please node(self.uuid).started_at > node(self.uuid2).started_at # mind please node(self.uuid).started_at > node(self.uuid2).started_at
# and the result ordering is strict in node_cache.get_node_list newer first # and the result ordering is strict in node_cache.get_node_list newer first
@ -848,7 +869,7 @@ class TestNodeInfoState(test_base.NodeStateTest):
self.node_info._set_state, 'foo') self.node_info._set_state, 'foo')
def test_commit(self): def test_commit(self):
current_time = time.time() current_time = timeutils.utcnow()
self.node_info.started_at = self.node_info.finished_at = current_time self.node_info.started_at = self.node_info.finished_at = current_time
self.node_info.error = "Boo!" self.node_info.error = "Boo!"
self.node_info.commit() self.node_info.commit()

View File

@ -17,7 +17,6 @@ import json
import os import os
import shutil import shutil
import tempfile import tempfile
import time
import eventlet import eventlet
import fixtures import fixtures
@ -25,6 +24,7 @@ from ironicclient import exceptions
import mock import mock
from oslo_config import cfg from oslo_config import cfg
from oslo_serialization import base64 from oslo_serialization import base64
from oslo_utils import timeutils
from oslo_utils import uuidutils from oslo_utils import uuidutils
from ironic_inspector.common import ironic as ir_utils from ironic_inspector.common import ironic as ir_utils
@ -44,7 +44,7 @@ CONF = cfg.CONF
class BaseTest(test_base.NodeTest): class BaseTest(test_base.NodeTest):
def setUp(self): def setUp(self):
super(BaseTest, self).setUp() super(BaseTest, self).setUp()
self.started_at = time.time() self.started_at = timeutils.utcnow()
self.all_ports = [mock.Mock(uuid=uuidutils.generate_uuid(), self.all_ports = [mock.Mock(uuid=uuidutils.generate_uuid(),
address=mac) for mac in self.macs] address=mac) for mac in self.macs]
self.ports = [self.all_ports[1]] self.ports = [self.all_ports[1]]
@ -120,7 +120,7 @@ class TestProcess(BaseProcessTest):
self.node_info.finished.assert_called_once_with(error=mock.ANY) self.node_info.finished.assert_called_once_with(error=mock.ANY)
def test_already_finished(self): def test_already_finished(self):
self.node_info.finished_at = time.time() self.node_info.finished_at = timeutils.utcnow()
self.assertRaisesRegex(utils.Error, 'already finished', self.assertRaisesRegex(utils.Error, 'already finished',
process.process, self.data) process.process, self.data)
self.assertFalse(self.process_mock.called) self.assertFalse(self.process_mock.called)

View File

@ -0,0 +1,11 @@
---
fixes:
- |
DB's columns started_at and finished_at are defined as float type,
as result timestamp doesn't fit into default Float size. This change
migrates columns type to DateTime.
upgrade:
- |
New migration from Float to DateTime consists data migration,
which could take some time, based on the number of introspection
statuses in DB.