commit
ca01a442e7
2
.gitignore
vendored
2
.gitignore
vendored
@ -126,3 +126,5 @@ dmypy.json
|
||||
# End of https://www.gitignore.io/api/python
|
||||
|
||||
.stestr/
|
||||
AUTHORS
|
||||
ChangeLog
|
||||
|
49
README.md
49
README.md
@ -14,23 +14,58 @@ $ python setup.py install
|
||||
```
|
||||
3- Verify if the driver is installed
|
||||
```
|
||||
$ python verify_installation.py
|
||||
$ pip install entry_point_inspector --user <user>
|
||||
$ epi group show oslo.messaging.notify.drivers
|
||||
|
||||
```
|
||||
Output in case of a successful instalation:
|
||||
`prometheus_exporter driver found.`
|
||||
`prometheus_exporter` is listed in the `Name` column and the `Error` column should be empty.
|
||||
Output in case of an unsuccessful instalation:
|
||||
`prometheus_exporter driver not found.`
|
||||
`Available drivers: ['log', 'messagingv2', 'noop', 'routing', 'test', 'messaging']`
|
||||
`prometheus_exporter` is listed in the `Name` column and the `Error` column will have more information.
|
||||
|
||||
|
||||
### Configuration ###
|
||||
|
||||
After install the driver you will need to update the :ironic.conf: and add
|
||||
:file_path: option (the file extension should be .json)
|
||||
After install the driver you will need to update the `ironic.conf` and add the below information.
|
||||
|
||||
```
|
||||
[conductor]
|
||||
send_sensor_data=true
|
||||
|
||||
[oslo_messaging_notifications]
|
||||
driver = prometheus_exporter
|
||||
transport_url = fake://
|
||||
file_path=/tmp/ironic_prometheus_exporter/metrics.json
|
||||
location=/tmp/ironic_prometheus_exporter
|
||||
```
|
||||
|
||||
|
||||
### Running exporter application ###
|
||||
|
||||
The Flask Application is responsible to merge all the metrics files present in the directory
|
||||
set in `[oslo_messaging_notifications]/location`.
|
||||
|
||||
**NOTE:** if you want to deploy in production please check the Flask [documentation](http://flask.pocoo.org/docs/dev/deploying/)
|
||||
|
||||
To run the Flask Application follow the steps listed below:
|
||||
1 - open the repository directory
|
||||
```
|
||||
$ cd ironic-prometheus-exporter/
|
||||
```
|
||||
2- set the `FLASK_*` environment variables and the location of the `ironic.conf` file.
|
||||
```
|
||||
$ export IRONIC_CONFIG=/etc/ironic/ironic.conf
|
||||
$ export FLASK_APP=ironic_prometheus_exporter/app/exporter.py
|
||||
$ export FLASK_RUN_HOST=$HOST_IP
|
||||
$ export FLASK_RUN_PORT=5000
|
||||
```
|
||||
3- run the Flask Application
|
||||
```
|
||||
$ python -m flask run &
|
||||
```
|
||||
|
||||
**Running under uWSGI**
|
||||
Reproduce the Steps 1 and 2 (You don't need to set `FLASK_APP` variable) and run the command below:
|
||||
```
|
||||
$ uwsgi --socket $FLASK_RUN_HOST:$FLASK_RUN_PORT --protocol=http -w ironic_prometheus_exporter.app.wsgi
|
||||
|
||||
```
|
||||
|
0
ironic_prometheus_exporter/app/__init__.py
Normal file
0
ironic_prometheus_exporter/app/__init__.py
Normal file
31
ironic_prometheus_exporter/app/exporter.py
Normal file
31
ironic_prometheus_exporter/app/exporter.py
Normal file
@ -0,0 +1,31 @@
|
||||
import configparser
|
||||
import logging
|
||||
import os
|
||||
|
||||
from flask import abort, Flask, Response
|
||||
application = Flask(__name__)
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@application.route('/metrics', methods=['GET'])
|
||||
def prometheus_metrics():
|
||||
try:
|
||||
config = configparser.ConfigParser()
|
||||
config.read(os.environ.get('IRONIC_CONFIG'))
|
||||
DIR = config['oslo_messaging_notifications']['location']
|
||||
except Exception:
|
||||
LOG.error('Unexpected error')
|
||||
abort(500)
|
||||
|
||||
all_files = [os.path.join(DIR, name) for name in os.listdir(DIR)
|
||||
if os.path.isfile(os.path.join(DIR, name))]
|
||||
|
||||
def merge_content():
|
||||
for file_name in all_files:
|
||||
with open(file_name, 'r') as file:
|
||||
yield file.read()
|
||||
return Response(merge_content(), mimetype='text/plain')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
application.run()
|
4
ironic_prometheus_exporter/app/wsgi.py
Normal file
4
ironic_prometheus_exporter/app/wsgi.py
Normal file
@ -0,0 +1,4 @@
|
||||
from ironic_prometheus_exporter.app.exporter import application
|
||||
|
||||
if __name__ == '__main__':
|
||||
application.run()
|
@ -1,12 +1,17 @@
|
||||
import logging
|
||||
import os
|
||||
import json
|
||||
|
||||
from ironic_prometheus_exporter.parsers import ipmi
|
||||
from oslo_config import cfg
|
||||
from oslo_messaging.notify import notifier
|
||||
from prometheus_client import write_to_textfile, CollectorRegistry
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
prometheus_opts = [
|
||||
cfg.StrOpt('file_path', required=True,
|
||||
help='Path for the json file where the metrics will be stored.')
|
||||
cfg.StrOpt('location', required=True,
|
||||
help='Directory where the files will be written.')
|
||||
]
|
||||
|
||||
|
||||
@ -18,13 +23,35 @@ class PrometheusFileDriver(notifier.Driver):
|
||||
"""Publish notifications into a File to be used by Prometheus"""
|
||||
|
||||
def __init__(self, conf, topics, transport):
|
||||
self.file_path = conf.oslo_messaging_notifications.file_path
|
||||
if not self.file_path.endswith('.json'):
|
||||
raise Exception('The file should end with .json')
|
||||
if not os.path.exists(os.path.dirname(self.file_path)):
|
||||
os.makedirs(os.path.dirname(self.file_path))
|
||||
self.location = conf.oslo_messaging_notifications.location
|
||||
if not os.path.exists(self.location):
|
||||
os.makedirs(self.location)
|
||||
super(PrometheusFileDriver, self).__init__(conf, topics, transport)
|
||||
|
||||
def notify(self, ctxt, message, priority, retry):
|
||||
with open(self.file_path, 'w') as prometheus_file:
|
||||
json.dump(message, prometheus_file)
|
||||
try:
|
||||
if message['event_type'] == 'hardware.ipmi.metrics':
|
||||
registry = CollectorRegistry()
|
||||
node_name = message['payload']['node_name']
|
||||
node_payload = message['payload']['payload']
|
||||
for category in node_payload:
|
||||
ipmi.category_registry(category.lower(),
|
||||
node_payload[category], node_name,
|
||||
registry)
|
||||
nodeFile = os.path.join(self.location, node_name)
|
||||
write_to_textfile(nodeFile, registry)
|
||||
except Exception as e:
|
||||
LOG.error(e)
|
||||
|
||||
|
||||
class SimpleFileDriver(notifier.Driver):
|
||||
|
||||
def __init__(self, conf, topics, transport):
|
||||
self.location = conf.oslo_messaging_notifications.location
|
||||
if not os.path.exists(self.location):
|
||||
os.makedirs(os.path.dirname(self.location))
|
||||
super(SimpleFileDriver, self).__init__(conf, topics, transport)
|
||||
|
||||
def notify(self, ctx, message, priority, retry):
|
||||
with open(os.path.join(self.location, 'simplefile'), 'w') as file:
|
||||
file.write(message)
|
||||
|
0
ironic_prometheus_exporter/parsers/__init__.py
Normal file
0
ironic_prometheus_exporter/parsers/__init__.py
Normal file
166
ironic_prometheus_exporter/parsers/ipmi.py
Normal file
166
ironic_prometheus_exporter/parsers/ipmi.py
Normal file
@ -0,0 +1,166 @@
|
||||
import logging
|
||||
import re
|
||||
|
||||
from prometheus_client import Gauge
|
||||
|
||||
# NOTE (iurygregory): most of the sensor readings come in the ipmi format
|
||||
# each type of sensor consider a different range of values that aren't integers
|
||||
# (eg: 0h, 2eh), 0h will be published as 0 and the other values as 1, this way
|
||||
# we will be able to create prometheus alerts.
|
||||
# Documentation: https://www.intel.com/content/www/us/en/servers/ipmi/
|
||||
# ipmi-second-gen-interface-spec-v2-rev1-1.html
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def metric_names(payload, prefix, sufix, **kwargs):
|
||||
LOG.info('metric_names function called with payload=%s' % str(payload))
|
||||
LOG.info('prefix=%s | sufix=%s | kwargs=%s' %
|
||||
(prefix, sufix, str(kwargs.items())))
|
||||
|
||||
metric_dic = {}
|
||||
extract_unit = kwargs.get('extract_unit')
|
||||
special_label = kwargs.get('special_label')
|
||||
for entry in payload:
|
||||
if special_label == 'fan':
|
||||
e = re.sub(r'[\d].*$', '', entry.lower())
|
||||
e = re.sub(r'[\(\)]', '', e).split()
|
||||
label = '_'.join(e)
|
||||
else:
|
||||
e = re.sub(r"[\d]+", "", entry).lower().split()
|
||||
label = '_'.join(e[:-1]).replace('-', '_')
|
||||
|
||||
if extract_unit:
|
||||
sensor_read = payload[entry]['Sensor Reading'].split()
|
||||
if len(sensor_read) > 1:
|
||||
sufix = '_' + sensor_read[-1].lower()
|
||||
|
||||
if special_label == 'memory':
|
||||
if 'mem' not in label and 'memory' not in label:
|
||||
label = 'memory_' + label
|
||||
|
||||
metric_name = re.sub(r'[\W]', '_', prefix + label + sufix)
|
||||
if metric_name[0].isdigit():
|
||||
metric_name = metric_name.lstrip('0123456789')
|
||||
if metric_name in metric_dic:
|
||||
metric_dic[metric_name].append(entry)
|
||||
else:
|
||||
metric_dic[metric_name] = [entry]
|
||||
return metric_dic
|
||||
|
||||
|
||||
def extract_labels(entries, payload, node_name):
|
||||
""" This function extract the labels to be used by a metric
|
||||
|
||||
If a metric has many entries we add the 'Sensor ID' information as label
|
||||
otherwise we will only use the default label that is the 'node_name' and
|
||||
'Entity ID'.
|
||||
|
||||
e.g: for Temperature we have two entries for baremetal_temperature_celsius
|
||||
metric ('Temp (0x1)' and 'Temp (0x2)') and one entry for 'Inlet Temp (0x5)'
|
||||
and other for 'Exhaust Temp (0x6)', this will produce a dictionary where
|
||||
the keys are the entries and the values are the respective label to be used
|
||||
when writting the metrics in the Prometheus format.
|
||||
{'Inlet Temp (0x5)': '{node_name=...}',
|
||||
'Exhaust Temp (0x6)': '{node_name=...}',
|
||||
'Temp (0x1)': '{node_name=...,sensor=Temp1}',
|
||||
'Temp (0x2)': '{node_name=...,sensor=Temp2}'}
|
||||
|
||||
returns: a dictionarty of dictionaries {<entry>: {label_name: label_value}}
|
||||
"""
|
||||
LOG.info('extract_labels function called with: entries=%s | payload=%s | \
|
||||
node_name=%s' % (str(entries), str(payload), node_name))
|
||||
if len(entries) == 1:
|
||||
labels = {'node_name': node_name,
|
||||
'entity_id': payload[entries[0]]['Entity ID']}
|
||||
return {entries[0]: labels}
|
||||
entries_labels = {}
|
||||
for entry in entries:
|
||||
try:
|
||||
entity_id = payload[entry]['Entity ID']
|
||||
sensor_id = payload[entry]['Sensor ID']
|
||||
metric_label = {'node_name': node_name,
|
||||
'entity_id': entity_id,
|
||||
'sensor_id': sensor_id}
|
||||
entries_labels[entry] = metric_label
|
||||
except Exception as e:
|
||||
LOG.exception(e)
|
||||
return entries_labels
|
||||
|
||||
|
||||
def extract_values(entries, payload, use_ipmi_format=True):
|
||||
LOG.info('extract_values function called with: entries=%s | payload=%s |'
|
||||
% (str(entries), str(payload)))
|
||||
values = {}
|
||||
for entry in entries:
|
||||
try:
|
||||
no_values = ['No Reading', 'Disabled']
|
||||
if payload[entry]['Sensor Reading'] in no_values:
|
||||
values[entry] = None
|
||||
else:
|
||||
sensor_values = payload[entry]['Sensor Reading'].split()
|
||||
if not use_ipmi_format:
|
||||
if not re.search(r'(\d+(\.\d*)?|\.\d+)', sensor_values[0]):
|
||||
raise Exception("No valid value in Sensor Reading")
|
||||
values[entry] = sensor_values[0]
|
||||
if len(sensor_values) > 1:
|
||||
values[entry] = sensor_values[0]
|
||||
elif sensor_values[0] == "0h":
|
||||
values[entry] = 0
|
||||
else:
|
||||
values[entry] = 1
|
||||
except Exception as e:
|
||||
LOG.exception(e)
|
||||
return values
|
||||
|
||||
|
||||
def prometheus_format(payload, node_name, ipmi_metric_registry,
|
||||
available_metrics, use_ipmi_format):
|
||||
for metric in available_metrics:
|
||||
entries = available_metrics[metric]
|
||||
labels = extract_labels(entries, payload, node_name)
|
||||
values = extract_values(entries, payload,
|
||||
use_ipmi_format=use_ipmi_format)
|
||||
if all(v is None for v in values.values()):
|
||||
continue
|
||||
g = Gauge(metric, '', labelnames=labels.get(entries[0]).keys(),
|
||||
registry=ipmi_metric_registry)
|
||||
for e in entries:
|
||||
if values[e] is None:
|
||||
continue
|
||||
g.labels(**labels[e]).set(values[e])
|
||||
|
||||
|
||||
CATEGORY_PARAMS = {
|
||||
'management': {'prefix': 'baremetal_', 'sufix': '',
|
||||
'extra_params': {}, 'use_ipmi_format': True},
|
||||
'temperature': {'prefix': 'baremetal_', 'sufix': '_celsius',
|
||||
'extra_params': {}, 'use_ipmi_format': False},
|
||||
'system': {'prefix': 'baremetal_system_', 'sufix': '', 'extra_params': {},
|
||||
'use_ipmi_format': True},
|
||||
'current': {'prefix': 'baremetal_', 'sufix': '', 'extra_params': {},
|
||||
'use_ipmi_format': False},
|
||||
'version': {'prefix': 'baremetal_', 'sufix': '', 'extra_params': {},
|
||||
'use_ipmi_format': True},
|
||||
'memory': {'prefix': 'baremetal_', 'sufix': '',
|
||||
'extra_params': {'special_label': 'memory'},
|
||||
'use_ipmi_format': True},
|
||||
'power': {'prefix': 'baremetal_power_', 'sufix': '', 'extra_params': {},
|
||||
'use_ipmi_format': True},
|
||||
'watchdog2': {'prefix': 'baremetal_', 'sufix': '', 'extra_params': {},
|
||||
'use_ipmi_format': True},
|
||||
'fan': {'prefix': 'baremetal_', 'sufix': '',
|
||||
'extra_params': {'extract_unit': True, 'special_label': 'fan'},
|
||||
'use_ipmi_format': True}
|
||||
}
|
||||
|
||||
|
||||
def category_registry(category_name, payload, node_name, ipmi_metric_registry):
|
||||
if category_name in CATEGORY_PARAMS:
|
||||
prefix = CATEGORY_PARAMS[category_name]['prefix']
|
||||
sufix = CATEGORY_PARAMS[category_name]['sufix']
|
||||
extra = CATEGORY_PARAMS[category_name]['extra_params']
|
||||
available_metrics = metric_names(payload, prefix, sufix, **extra)
|
||||
use_ipmi_format = CATEGORY_PARAMS[category_name]['use_ipmi_format']
|
||||
prometheus_format(payload, node_name, ipmi_metric_registry,
|
||||
available_metrics, use_ipmi_format)
|
2348
ironic_prometheus_exporter/tests/data.json
Normal file
2348
ironic_prometheus_exporter/tests/data.json
Normal file
File diff suppressed because it is too large
Load Diff
2348
ironic_prometheus_exporter/tests/data2.json
Normal file
2348
ironic_prometheus_exporter/tests/data2.json
Normal file
File diff suppressed because it is too large
Load Diff
@ -1,5 +1,9 @@
|
||||
import fixtures
|
||||
import json
|
||||
import os
|
||||
import oslo_messaging
|
||||
|
||||
from ironic_prometheus_exporter.messaging import PrometheusFileDriver
|
||||
from oslo_messaging.tests import utils as test_utils
|
||||
|
||||
|
||||
@ -8,12 +12,66 @@ class TestPrometheusFileNotifier(test_utils.BaseTestCase):
|
||||
def setUp(self):
|
||||
super(TestPrometheusFileNotifier, self).setUp()
|
||||
|
||||
def test_notifier(self):
|
||||
self.config(file_path='/tmp/ironic_prometheus_exporter/test.json',
|
||||
def test_instanciate(self):
|
||||
temp_dir = self.useFixture(fixtures.TempDir()).path
|
||||
self.config(location=temp_dir,
|
||||
group='oslo_messaging_notifications')
|
||||
transport = oslo_messaging.get_notification_transport(self.conf)
|
||||
oslo_messaging.Notifier(transport, driver='prometheus_exporter',
|
||||
topics=['my_topics'])
|
||||
|
||||
self.assertEqual(self.conf.oslo_messaging_notifications.file_path,
|
||||
"/tmp/ironic_prometheus_exporter/test.json")
|
||||
self.assertEqual(self.conf.oslo_messaging_notifications.location,
|
||||
temp_dir)
|
||||
self.assertTrue(os.path.isdir(
|
||||
self.conf.oslo_messaging_notifications.location))
|
||||
|
||||
def test_messages_from_same_node(self):
|
||||
temp_dir = self.useFixture(fixtures.TempDir()).path
|
||||
self.config(location=temp_dir,
|
||||
group='oslo_messaging_notifications')
|
||||
transport = oslo_messaging.get_notification_transport(self.conf)
|
||||
driver = PrometheusFileDriver(self.conf, None, transport)
|
||||
|
||||
msg1 = json.load(open('./ironic_prometheus_exporter/tests/data.json'))
|
||||
node1 = msg1['payload']['node_name']
|
||||
msg2 = json.load(open('./ironic_prometheus_exporter/tests/data2.json'))
|
||||
# Override data2 node_name
|
||||
msg2['payload']['node_name'] = node1
|
||||
node2 = msg2['payload']['node_name']
|
||||
self.assertNotEqual(msg1['payload']['timestamp'],
|
||||
msg2['payload']['timestamp'])
|
||||
|
||||
driver.notify(None, msg1, 'info', 0)
|
||||
driver.notify(None, msg2, 'info', 0)
|
||||
|
||||
DIR = self.conf.oslo_messaging_notifications.location
|
||||
all_files = [name for name in os.listdir(DIR)
|
||||
if os.path.isfile(os.path.join(DIR, name))]
|
||||
self.assertEqual(node1, node2)
|
||||
self.assertEqual(len(all_files), 1)
|
||||
self.assertIn(node1, all_files)
|
||||
self.assertIn(node2, all_files)
|
||||
|
||||
def test_messages_from_different_nodes(self):
|
||||
temp_dir = self.useFixture(fixtures.TempDir()).path
|
||||
self.config(location=temp_dir,
|
||||
group='oslo_messaging_notifications')
|
||||
transport = oslo_messaging.get_notification_transport(self.conf)
|
||||
driver = PrometheusFileDriver(self.conf, None, transport)
|
||||
|
||||
msg1 = json.load(open('./ironic_prometheus_exporter/tests/data.json'))
|
||||
node1 = msg1['payload']['node_name']
|
||||
msg2 = json.load(open('./ironic_prometheus_exporter/tests/data2.json'))
|
||||
node2 = msg2['payload']['node_name']
|
||||
self.assertNotEqual(msg1['payload']['timestamp'],
|
||||
msg2['payload']['timestamp'])
|
||||
|
||||
driver.notify(None, msg1, 'info', 0)
|
||||
driver.notify(None, msg2, 'info', 0)
|
||||
|
||||
DIR = self.conf.oslo_messaging_notifications.location
|
||||
all_files = [name for name in os.listdir(DIR)
|
||||
if os.path.isfile(os.path.join(DIR, name))]
|
||||
self.assertEqual(len(all_files), 2)
|
||||
self.assertIn(node1, all_files)
|
||||
self.assertIn(node2, all_files)
|
||||
|
355
ironic_prometheus_exporter/tests/test_ipmi_parser.py
Normal file
355
ironic_prometheus_exporter/tests/test_ipmi_parser.py
Normal file
@ -0,0 +1,355 @@
|
||||
import json
|
||||
import unittest
|
||||
|
||||
from ironic_prometheus_exporter.parsers import ipmi
|
||||
from prometheus_client import CollectorRegistry
|
||||
|
||||
|
||||
DATA = json.load(open('./ironic_prometheus_exporter/tests/data.json'))
|
||||
|
||||
|
||||
class TestPayloadsParser(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.node_name = DATA['payload']['node_name']
|
||||
self.payload = DATA['payload']['payload']
|
||||
self.metric_registry = CollectorRegistry()
|
||||
|
||||
def test_management_parser(self):
|
||||
prefix = ipmi.CATEGORY_PARAMS['management']['prefix']
|
||||
sufix = ipmi.CATEGORY_PARAMS['management']['sufix']
|
||||
extra = ipmi.CATEGORY_PARAMS['management']['extra_params']
|
||||
ipmi_format = ipmi.CATEGORY_PARAMS['management']['use_ipmi_format']
|
||||
|
||||
management_metrics_name = ipmi.metric_names(self.payload['Management'],
|
||||
prefix, sufix, **extra)
|
||||
self.assertEqual(len(management_metrics_name), 1)
|
||||
self.assertIn('baremetal_front_led_panel', management_metrics_name)
|
||||
|
||||
ipmi.prometheus_format(self.payload['Management'], self.node_name,
|
||||
self.metric_registry, management_metrics_name,
|
||||
ipmi_format)
|
||||
|
||||
self.assertEqual(0.0, self.metric_registry.get_sample_value(
|
||||
'baremetal_front_led_panel',
|
||||
{'node_name': 'knilab-master-u9',
|
||||
'entity_id': '7.1 (System Board)'}))
|
||||
|
||||
def test_temperature_parser(self):
|
||||
prefix = ipmi.CATEGORY_PARAMS['temperature']['prefix']
|
||||
sufix = ipmi.CATEGORY_PARAMS['temperature']['sufix']
|
||||
extra = ipmi.CATEGORY_PARAMS['temperature']['extra_params']
|
||||
ipmi_format = ipmi.CATEGORY_PARAMS['temperature']['use_ipmi_format']
|
||||
temperature_metrics_name = ipmi.metric_names(
|
||||
self.payload['Temperature'], prefix, sufix, **extra)
|
||||
self.assertEqual(len(temperature_metrics_name), 3)
|
||||
self.assertIn('baremetal_temp_celsius', temperature_metrics_name)
|
||||
self.assertIn('baremetal_exhaust_temp_celsius',
|
||||
temperature_metrics_name)
|
||||
self.assertIn('baremetal_inlet_temp_celsius', temperature_metrics_name)
|
||||
|
||||
ipmi.prometheus_format(self.payload['Temperature'], self.node_name,
|
||||
self.metric_registry, temperature_metrics_name,
|
||||
ipmi_format)
|
||||
|
||||
self.assertEqual(21.0, self.metric_registry.get_sample_value(
|
||||
'baremetal_inlet_temp_celsius', {'node_name': self.node_name,
|
||||
'entity_id': '7.1 (System Board)'}
|
||||
))
|
||||
self.assertEqual(36.0, self.metric_registry.get_sample_value(
|
||||
'baremetal_exhaust_temp_celsius',
|
||||
{'node_name': self.node_name, 'entity_id': '7.1 (System Board)'}))
|
||||
self.assertEqual(44.0, self.metric_registry.get_sample_value(
|
||||
'baremetal_temp_celsius', {'node_name': self.node_name,
|
||||
'sensor_id': 'Temp (0x1)',
|
||||
'entity_id': '3.1 (Processor)'}))
|
||||
self.assertEqual(43.0, self.metric_registry.get_sample_value(
|
||||
'baremetal_temp_celsius', {'node_name': self.node_name,
|
||||
'sensor_id': 'Temp (0x2)',
|
||||
'entity_id': '3.2 (Processor)'}))
|
||||
|
||||
def test_system_parser(self):
|
||||
prefix = ipmi.CATEGORY_PARAMS['system']['prefix']
|
||||
sufix = ipmi.CATEGORY_PARAMS['system']['sufix']
|
||||
extra = ipmi.CATEGORY_PARAMS['system']['extra_params']
|
||||
ipmi_format = ipmi.CATEGORY_PARAMS['system']['use_ipmi_format']
|
||||
system_metrics_name = ipmi.metric_names(self.payload['System'],
|
||||
prefix, sufix, **extra)
|
||||
self.assertEqual(len(system_metrics_name), 2)
|
||||
self.assertIn('baremetal_system_unknown', system_metrics_name)
|
||||
self.assertIn('baremetal_system_post_err', system_metrics_name)
|
||||
|
||||
ipmi.prometheus_format(self.payload['System'], self.node_name,
|
||||
self.metric_registry, system_metrics_name,
|
||||
ipmi_format)
|
||||
self.assertEqual(0.0, self.metric_registry.get_sample_value(
|
||||
'baremetal_system_unknown',
|
||||
{'node_name': self.node_name, 'entity_id': '34.1 (BIOS)'}
|
||||
))
|
||||
self.assertEqual(None, self.metric_registry.get_sample_value(
|
||||
'baremetal_system_post_err',
|
||||
{'node_name': self.node_name, 'entity_id': '34.1 (BIOS)'}
|
||||
))
|
||||
|
||||
def test_current_parser(self):
|
||||
prefix = ipmi.CATEGORY_PARAMS['current']['prefix']
|
||||
sufix = ipmi.CATEGORY_PARAMS['current']['sufix']
|
||||
extra = ipmi.CATEGORY_PARAMS['current']['extra_params']
|
||||
ipmi_format = ipmi.CATEGORY_PARAMS['current']['use_ipmi_format']
|
||||
current_metrics_name = ipmi.metric_names(self.payload['Current'],
|
||||
prefix, sufix, **extra)
|
||||
self.assertEqual(len(current_metrics_name), 2)
|
||||
self.assertIn('baremetal_current', current_metrics_name)
|
||||
self.assertIn('baremetal_pwr_consumption', current_metrics_name)
|
||||
|
||||
ipmi.prometheus_format(self.payload['Current'], self.node_name,
|
||||
self.metric_registry, current_metrics_name,
|
||||
ipmi_format)
|
||||
self.assertEqual(264.0, self.metric_registry.get_sample_value(
|
||||
'baremetal_pwr_consumption',
|
||||
{'node_name': self.node_name, 'entity_id': '7.1 (System Board)'}
|
||||
))
|
||||
self.assertEqual(0.600, self.metric_registry.get_sample_value(
|
||||
'baremetal_current',
|
||||
{'node_name': self.node_name, 'entity_id': '10.1 (Power Supply)',
|
||||
'sensor_id': 'Current 1 (0x6b)'}
|
||||
))
|
||||
self.assertEqual(0.600, self.metric_registry.get_sample_value(
|
||||
'baremetal_current',
|
||||
{'node_name': self.node_name, 'entity_id': '10.2 (Power Supply)',
|
||||
'sensor_id': 'Current 2 (0x6c)'}
|
||||
))
|
||||
|
||||
def test_version_parser(self):
|
||||
prefix = ipmi.CATEGORY_PARAMS['version']['prefix']
|
||||
sufix = ipmi.CATEGORY_PARAMS['version']['sufix']
|
||||
extra = ipmi.CATEGORY_PARAMS['version']['extra_params']
|
||||
ipmi_format = ipmi.CATEGORY_PARAMS['version']['use_ipmi_format']
|
||||
|
||||
version_metrics_name = ipmi.metric_names(self.payload['Version'],
|
||||
prefix, sufix, **extra)
|
||||
self.assertEqual(len(version_metrics_name), 3)
|
||||
self.assertIn('baremetal_tpm_presence', version_metrics_name)
|
||||
self.assertIn('baremetal_hdwr_version_err', version_metrics_name)
|
||||
self.assertIn('baremetal_chassis_mismatch', version_metrics_name)
|
||||
|
||||
ipmi.prometheus_format(self.payload['Version'], self.node_name,
|
||||
self.metric_registry, version_metrics_name,
|
||||
ipmi_format)
|
||||
self.assertEqual(1.0, self.metric_registry.get_sample_value(
|
||||
'baremetal_tpm_presence',
|
||||
{'node_name': self.node_name, 'entity_id': '34.1 (BIOS)'}
|
||||
))
|
||||
self.assertEqual(None, self.metric_registry.get_sample_value(
|
||||
'baremetal_hdwr_version_err',
|
||||
{'node_name': self.node_name, 'entity_id': '34.1 (BIOS)'}
|
||||
))
|
||||
self.assertEqual(0.0, self.metric_registry.get_sample_value(
|
||||
'baremetal_chassis_mismatch',
|
||||
{'node_name': self.node_name, 'entity_id': '34.1 (BIOS)'}
|
||||
))
|
||||
|
||||
def test_memory_parser(self):
|
||||
prefix = ipmi.CATEGORY_PARAMS['memory']['prefix']
|
||||
sufix = ipmi.CATEGORY_PARAMS['memory']['sufix']
|
||||
extra = ipmi.CATEGORY_PARAMS['memory']['extra_params']
|
||||
ipmi_format = ipmi.CATEGORY_PARAMS['memory']['use_ipmi_format']
|
||||
memory_metrics_name = ipmi.metric_names(self.payload['Memory'], prefix,
|
||||
sufix, **extra)
|
||||
|
||||
self.assertEqual(len(memory_metrics_name), 10)
|
||||
self.assertIn('baremetal_memory_ecc_corr_err', memory_metrics_name)
|
||||
self.assertIn('baremetal_idpt_mem_fail', memory_metrics_name)
|
||||
self.assertIn('baremetal_memory_ecc_uncorr_err',
|
||||
memory_metrics_name)
|
||||
self.assertIn('baremetal_memory_mirrored', memory_metrics_name)
|
||||
self.assertIn('baremetal_mem_ecc_warning', memory_metrics_name)
|
||||
self.assertIn('baremetal_memory_b', memory_metrics_name)
|
||||
self.assertIn('baremetal_memory_a', memory_metrics_name)
|
||||
self.assertIn('baremetal_memory_usb_over_current',
|
||||
memory_metrics_name)
|
||||
self.assertIn('baremetal_memory_post_pkg_repair',
|
||||
memory_metrics_name)
|
||||
self.assertIn('baremetal_memory_spared', memory_metrics_name)
|
||||
|
||||
ipmi.prometheus_format(self.payload['Memory'], self.node_name,
|
||||
self.metric_registry, memory_metrics_name,
|
||||
ipmi_format)
|
||||
|
||||
self.assertEqual(None, self.metric_registry.get_sample_value(
|
||||
'baremetal_mem_ecc_warning',
|
||||
{'node_name': self.node_name, 'entity_id': '34.1 (BIOS)'}
|
||||
))
|
||||
self.assertEqual(1.0, self.metric_registry.get_sample_value(
|
||||
'baremetal_memory_post_pkg_repair',
|
||||
{'node_name': self.node_name, 'entity_id': '34.1 (BIOS)'}
|
||||
))
|
||||
self.assertEqual(0.0, self.metric_registry.get_sample_value(
|
||||
'baremetal_idpt_mem_fail',
|
||||
{'node_name': self.node_name, 'entity_id': '34.1 (BIOS)'}
|
||||
))
|
||||
self.assertEqual(0.0, self.metric_registry.get_sample_value(
|
||||
'baremetal_memory_spared',
|
||||
{'node_name': self.node_name, 'entity_id': '34.1 (BIOS)'}
|
||||
))
|
||||
self.assertEqual(0.0, self.metric_registry.get_sample_value(
|
||||
'baremetal_memory_mirrored',
|
||||
{'node_name': self.node_name, 'entity_id': '34.1 (BIOS)'}
|
||||
))
|
||||
self.assertEqual(None, self.metric_registry.get_sample_value(
|
||||
'baremetal_memory_usb_over_current',
|
||||
{'node_name': self.node_name, 'entity_id': '34.1 (BIOS)'}
|
||||
))
|
||||
self.assertEqual(1.0, self.metric_registry.get_sample_value(
|
||||
'baremetal_memory_ecc_uncorr_err',
|
||||
{'node_name': self.node_name, 'entity_id': '34.1 (BIOS)'}
|
||||
))
|
||||
self.assertEqual(0.0, self.metric_registry.get_sample_value(
|
||||
'baremetal_memory_b',
|
||||
{'node_name': self.node_name, 'entity_id': '32.1 (Memory Device)'}
|
||||
))
|
||||
self.assertEqual(0.0, self.metric_registry.get_sample_value(
|
||||
'baremetal_memory_a',
|
||||
{'node_name': self.node_name, 'entity_id': '32.1 (Memory Device)'}
|
||||
))
|
||||
self.assertEqual(1.0, self.metric_registry.get_sample_value(
|
||||
'baremetal_memory_ecc_corr_err',
|
||||
{'node_name': self.node_name, 'entity_id': '34.1 (BIOS)'}
|
||||
))
|
||||
|
||||
def test_power_parser(self):
|
||||
prefix = ipmi.CATEGORY_PARAMS['power']['prefix']
|
||||
sufix = ipmi.CATEGORY_PARAMS['power']['sufix']
|
||||
extra = ipmi.CATEGORY_PARAMS['power']['extra_params']
|
||||
ipmi_format = ipmi.CATEGORY_PARAMS['power']['use_ipmi_format']
|
||||
power_metrics_name = ipmi.metric_names(self.payload['Power'], prefix,
|
||||
sufix, **extra)
|
||||
self.assertEqual(len(power_metrics_name), 2)
|
||||
self.assertIn('baremetal_power_ps_redundancy', power_metrics_name)
|
||||
self.assertIn('baremetal_power_status', power_metrics_name)
|
||||
|
||||
ipmi.prometheus_format(self.payload['Power'], self.node_name,
|
||||
self.metric_registry, power_metrics_name,
|
||||
ipmi_format)
|
||||
|
||||
self.assertEqual(None, self.metric_registry.get_sample_value(
|
||||
'baremetal_power_ps_redundancy',
|
||||
{'node_name': self.node_name, 'entity_id': '7.1 (System Board)'}
|
||||
))
|
||||
self.assertEqual(0.0, self.metric_registry.get_sample_value(
|
||||
'baremetal_power_status', {'node_name': self.node_name,
|
||||
'sensor_id': 'Status (0x86)',
|
||||
'entity_id': '10.2 (Power Supply)'}))
|
||||
self.assertEqual(0.0, self.metric_registry.get_sample_value(
|
||||
'baremetal_power_status', {'node_name': self.node_name,
|
||||
'sensor_id': 'Status (0x85)',
|
||||
'entity_id': '10.1 (Power Supply)'}))
|
||||
|
||||
def test_watchdog2_parser(self):
|
||||
print('WATCHDOG2')
|
||||
prefix = ipmi.CATEGORY_PARAMS['watchdog2']['prefix']
|
||||
sufix = ipmi.CATEGORY_PARAMS['watchdog2']['sufix']
|
||||
extra = ipmi.CATEGORY_PARAMS['watchdog2']['extra_params']
|
||||
ipmi_format = ipmi.CATEGORY_PARAMS['watchdog2']['use_ipmi_format']
|
||||
watchdog2_metrics_name = ipmi.metric_names(self.payload['Watchdog2'],
|
||||
prefix, sufix, **extra)
|
||||
self.assertEqual(len(watchdog2_metrics_name), 2)
|
||||
self.assertIn('baremetal_os_watchdog_time', watchdog2_metrics_name)
|
||||
self.assertIn('baremetal_os_watchdog', watchdog2_metrics_name)
|
||||
|
||||
ipmi.prometheus_format(self.payload['Watchdog2'], self.node_name,
|
||||
self.metric_registry, watchdog2_metrics_name,
|
||||
ipmi_format)
|
||||
|
||||
self.assertEqual(0.0, self.metric_registry.get_sample_value(
|
||||
'baremetal_os_watchdog_time', {'node_name': self.node_name,
|
||||
'entity_id': '34.1 (BIOS)'}
|
||||
))
|
||||
self.assertEqual(0.0, self.metric_registry.get_sample_value(
|
||||
'baremetal_os_watchdog', {'node_name': self.node_name,
|
||||
'entity_id': '7.1 (System Board)'}
|
||||
))
|
||||
|
||||
def test_fan_parser(self):
|
||||
prefix = ipmi.CATEGORY_PARAMS['fan']['prefix']
|
||||
sufix = ipmi.CATEGORY_PARAMS['fan']['sufix']
|
||||
extra = ipmi.CATEGORY_PARAMS['fan']['extra_params']
|
||||
ipmi_format = ipmi.CATEGORY_PARAMS['fan']['use_ipmi_format']
|
||||
fan_metrics_name = ipmi.metric_names(self.payload['Fan'], prefix,
|
||||
sufix, **extra)
|
||||
self.assertEqual(len(fan_metrics_name), 2)
|
||||
self.assertIn('baremetal_fan_redundancy_rpm', fan_metrics_name)
|
||||
self.assertIn('baremetal_fan_rpm', fan_metrics_name)
|
||||
|
||||
ipmi.prometheus_format(self.payload['Fan'], self.node_name,
|
||||
self.metric_registry, fan_metrics_name,
|
||||
ipmi_format)
|
||||
|
||||
self.assertEqual(0.0, self.metric_registry.get_sample_value(
|
||||
'baremetal_fan_redundancy_rpm', {'node_name': self.node_name,
|
||||
'entity_id': '7.1 (System Board)'}
|
||||
))
|
||||
self.assertEqual(9960.0, self.metric_registry.get_sample_value(
|
||||
'baremetal_fan_rpm', {'node_name': self.node_name,
|
||||
'sensor_id': 'Fan4A (0x3b)',
|
||||
'entity_id': '7.1 (System Board)'}))
|
||||
self.assertEqual(5520.0, self.metric_registry.get_sample_value(
|
||||
'baremetal_fan_rpm', {'node_name': self.node_name,
|
||||
'sensor_id': 'Fan1B (0x40)',
|
||||
'entity_id': '7.1 (System Board)'}))
|
||||
self.assertEqual(5520.0, self.metric_registry.get_sample_value(
|
||||
'baremetal_fan_rpm', {'node_name': self.node_name,
|
||||
'sensor_id': 'Fan8B (0x47)',
|
||||
'entity_id': '7.1 (System Board)'}))
|
||||
self.assertEqual(9360.0, self.metric_registry.get_sample_value(
|
||||
'baremetal_fan_rpm', {'node_name': self.node_name,
|
||||
'sensor_id': 'Fan3A (0x3a)',
|
||||
'entity_id': '7.1 (System Board)'}))
|
||||
self.assertEqual(9360.0, self.metric_registry.get_sample_value(
|
||||
'baremetal_fan_rpm', {'node_name': self.node_name,
|
||||
'sensor_id': 'Fan2A (0x39)',
|
||||
'entity_id': '7.1 (System Board)'}))
|
||||
self.assertEqual(5520.0, self.metric_registry.get_sample_value(
|
||||
'baremetal_fan_rpm', {'node_name': self.node_name,
|
||||
'sensor_id': 'Fan6B (0x45)',
|
||||
'entity_id': '7.1 (System Board)'}))
|
||||
self.assertEqual(9720.0, self.metric_registry.get_sample_value(
|
||||
'baremetal_fan_rpm', {'node_name': self.node_name,
|
||||
'sensor_id': 'Fan5A (0x3c)',
|
||||
'entity_id': '7.1 (System Board)'}))
|
||||
self.assertEqual(5520.0, self.metric_registry.get_sample_value(
|
||||
'baremetal_fan_rpm', {'node_name': self.node_name,
|
||||
'sensor_id': 'Fan3B (0x42)',
|
||||
'entity_id': '7.1 (System Board)'}))
|
||||
self.assertEqual(9360.0, self.metric_registry.get_sample_value(
|
||||
'baremetal_fan_rpm', {'node_name': self.node_name,
|
||||
'sensor_id': 'Fan7A (0x3e)',
|
||||
'entity_id': '7.1 (System Board)'}))
|
||||
self.assertEqual(5520.0, self.metric_registry.get_sample_value(
|
||||
'baremetal_fan_rpm', {'node_name': self.node_name,
|
||||
'sensor_id': 'Fan7B (0x46)',
|
||||
'entity_id': '7.1 (System Board)'}))
|
||||
self.assertEqual(5880.0, self.metric_registry.get_sample_value(
|
||||
'baremetal_fan_rpm', {'node_name': self.node_name,
|
||||
'sensor_id': 'Fan4B (0x43)',
|
||||
'entity_id': '7.1 (System Board)'}))
|
||||
self.assertEqual(9360.0, self.metric_registry.get_sample_value(
|
||||
'baremetal_fan_rpm', {'node_name': self.node_name,
|
||||
'sensor_id': 'Fan1A (0x38)',
|
||||
'entity_id': '7.1 (System Board)'}))
|
||||
self.assertEqual(9360.0, self.metric_registry.get_sample_value(
|
||||
'baremetal_fan_rpm', {'node_name': self.node_name,
|
||||
'sensor_id': 'Fan6A (0x3d)',
|
||||
'entity_id': '7.1 (System Board)'}))
|
||||
self.assertEqual(5520.0, self.metric_registry.get_sample_value(
|
||||
'baremetal_fan_rpm', {'node_name': self.node_name,
|
||||
'sensor_id': 'Fan2B (0x41)',
|
||||
'entity_id': '7.1 (System Board)'}))
|
||||
self.assertEqual(5640.0, self.metric_registry.get_sample_value(
|
||||
'baremetal_fan_rpm', {'node_name': self.node_name,
|
||||
'sensor_id': 'Fan5B (0x44)',
|
||||
'entity_id': '7.1 (System Board)'}))
|
||||
self.assertEqual(9240.0, self.metric_registry.get_sample_value(
|
||||
'baremetal_fan_rpm', {'node_name': self.node_name,
|
||||
'sensor_id': 'Fan8A (0x3f)',
|
||||
'entity_id': '7.1 (System Board)'}))
|
@ -1,5 +1,6 @@
|
||||
pbr!=2.1.0,>=2.0.0 # Apache-2.0
|
||||
flake8
|
||||
stevedore>=1.20.0 # Apache-2.0
|
||||
oslo.messaging!=9.0.0 # Apache-2.0
|
||||
stestr>=2.0.0 # Apache-2.0
|
||||
oslo.messaging>=9.4.0 # Apache-2.0
|
||||
uWSGI # Apache-2.0
|
||||
Flask>=0.11,!=0.12.3
|
||||
prometheus_client # Apache-2.0
|
||||
|
@ -24,3 +24,4 @@ packages =
|
||||
[entry_points]
|
||||
oslo.messaging.notify.drivers =
|
||||
prometheus_exporter = ironic_prometheus_exporter.messaging:PrometheusFileDriver
|
||||
file_exporter = ironic_prometheus_exporter.messaging:SimpleFileDriver
|
||||
|
3
test-requirements.txt
Normal file
3
test-requirements.txt
Normal file
@ -0,0 +1,3 @@
|
||||
flake8
|
||||
stestr>=2.0.0 # Apache-2.0
|
||||
oslotest>=3.2.0 # Apache-2.0
|
4
tox.ini
4
tox.ini
@ -9,7 +9,9 @@ install_command = pip install {opts} {packages}
|
||||
setenv =
|
||||
VIRTUAL_ENV={envdir}
|
||||
PYTHONWARNINGS=default::DeprecationWarning
|
||||
deps = -r{toxinidir}/requirements.txt
|
||||
deps =
|
||||
-r{toxinidir}/test-requirements.txt
|
||||
-r{toxinidir}/requirements.txt
|
||||
commands = stestr run {posargs}
|
||||
|
||||
[testenv:pep8]
|
||||
|
Loading…
Reference in New Issue
Block a user