Handle duplicate node inventory entries per node

When a node is inspected more than one time and the database is
configured as a storage backend, a new entry is made in the database
for each inspection result (node inventory). This patch handles this
behaviour as follows:
By deleting previous inventory entries for the same node before adding
 a new entry in the database.
By retrieving the most recent node inventory from the database when the
database is queried.

Change-Id: Ic3df86f395601742d2fea2bcde62f7547067d8e4
This commit is contained in:
Mahnoor Asghar 2023-06-05 01:33:32 -04:00
parent 964a82db18
commit fa2d6685f3
5 changed files with 72 additions and 14 deletions

View File

@ -2651,12 +2651,13 @@ class Connection(api.Connection):
inventory = models.NodeInventory() inventory = models.NodeInventory()
inventory.update(values) inventory.update(values)
with _session_for_write() as session: with _session_for_write() as session:
try: session.query(
session.add(inventory) models.NodeInventory
session.flush() ).filter(
except db_exc.DBDuplicateEntry: models.NodeInventory.node_id == values['node_id']
raise exception.NodeInventoryAlreadyExists( ).delete()
id=values['id']) session.add(inventory)
session.flush()
return inventory return inventory
@oslo_db_api.retry_on_deadlock @oslo_db_api.retry_on_deadlock
@ -2670,12 +2671,26 @@ class Connection(api.Connection):
node=node_id) node=node_id)
def get_node_inventory_by_node_id(self, node_id): def get_node_inventory_by_node_id(self, node_id):
try: with _session_for_read() as session:
with _session_for_read() as session: # Note(masghar): The most recent node inventory is extracted
query = session.query(models.NodeInventory).filter_by( # (as per the created_at field). This is because previously, it was
node_id=node_id) # possible to add more than one inventory per node into the
res = query.one() # database, due to there being no unique constraint on the node_id
except NoResultFound: # column in the node_inventory table. Now, all previous node
# inventories are deleted before a new one is added using
# create_node_inventory. However, some databases would already
# contain multiple node inventories due to the prior
# implementation. Hence the most recent one is being retrieved.
query = session.query(
models.NodeInventory
).filter_by(
node_id=node_id
).order_by(
models.NodeInventory.created_at.desc()
)
res = query.first()
if res is None:
raise exception.NodeInventoryNotFound(node=node_id) raise exception.NodeInventoryNotFound(node=node_id)
return res return res

View File

@ -7976,7 +7976,9 @@ class TestNodeHistory(test_api_base.BaseApiTest):
class TestNodeInventory(test_api_base.BaseApiTest): class TestNodeInventory(test_api_base.BaseApiTest):
fake_inventory_data = {"cpu": "amd"} fake_inventory_data = {'cpu': {'count': 1,
'model_name': 'qemu64',
'architecture': 'x86_64'}}
fake_plugin_data = {"disks": [{"name": "/dev/vda"}]} fake_plugin_data = {"disks": [{"name": "/dev/vda"}]}
def setUp(self): def setUp(self):

View File

@ -24,6 +24,7 @@ from sqlalchemy.orm import exc as sa_exc
from ironic.common import exception from ironic.common import exception
from ironic.common import states from ironic.common import states
from ironic.db.sqlalchemy.models import NodeInventory
from ironic.tests.unit.db import base from ironic.tests.unit.db import base
from ironic.tests.unit.db import utils from ironic.tests.unit.db import utils
@ -763,6 +764,31 @@ class DbNodeTestCase(base.DbTestCase):
self.assertRaises(exception.NodeHistoryNotFound, self.assertRaises(exception.NodeHistoryNotFound,
self.dbapi.get_node_history_by_id, history.id) self.dbapi.get_node_history_by_id, history.id)
def test_inventory_updated_for_node(self):
node = utils.create_test_node()
first_timestamp = datetime.datetime(2000, 1, 1, 0, 0)
second_timestamp = first_timestamp + datetime.timedelta(minutes=8)
utils.create_test_inventory(node_id=node.id,
id=1,
created_at=first_timestamp)
utils.create_test_inventory(node_id=node.id,
id=2,
inventory={"inventory": "test2"},
created_at=second_timestamp)
node_inventory = self.dbapi.get_node_inventory_by_node_id(
node_id=node.id)
expected_inventory = NodeInventory(node_id=node.id,
id=2,
inventory_data={"inventory":
"test2"},
created_at=second_timestamp,
plugin_data={"pdata":
{"plugin": "data"}},
version='1.0')
self.assertJsonEqual(expected_inventory, node_inventory)
def test_inventory_get_destroyed_after_destroying_a_node_by_uuid(self): def test_inventory_get_destroyed_after_destroying_a_node_by_uuid(self):
node = utils.create_test_node() node = utils.create_test_node()

View File

@ -743,7 +743,7 @@ def get_test_inventory(**kw):
def create_test_inventory(**kw): def create_test_inventory(**kw):
"""Create test inventory entry in DB and return NodeInventory DB object. """Create test inventory entry in DB and return NodeInventory DB object.
:param kw: kwargs with overriding values for port's attributes. :param kw: kwargs with overriding values for inventory attributes.
:returns: Test NodeInventory DB object. :returns: Test NodeInventory DB object.
""" """
inventory = get_test_inventory(**kw) inventory = get_test_inventory(**kw)

View File

@ -0,0 +1,15 @@
---
fixes:
- |
Fixes a bug that occurs when a node is inspected more than once and the
database is configured as a storage backend: a new node inventory entry
is added in the database for each inspection result, causing more than one
inventory to exist for the node in the node_inventory table.
This is handled by:
* Deleting any previous inventory entries for a node before adding a new
entry in the database.
* Retrieving the most recent node inventory from the database when the
database is queried. (To cater for databases that already contain
duplicate node inventories due to the bug.)