Updates for Prometheus Exporter
- Split requirements in test-requirements and requirements - Update .gitignore - Initial Parser for IPMI metrics that was collected and tests (This will be added in another PR: Cable, Drive, Processor, Battery, Module, Entity, Add-in, Critical, Voltage, OS, Event, Physical) - Update Prometheus Driver to generate files with the metrics in prometheus format - Create simple driver to be used when we want to collect sample of data
This commit is contained in:
parent
9c305eca7d
commit
8167352772
2
.gitignore
vendored
2
.gitignore
vendored
@ -126,3 +126,5 @@ dmypy.json
|
||||
# End of https://www.gitignore.io/api/python
|
||||
|
||||
.stestr/
|
||||
AUTHORS
|
||||
ChangeLog
|
||||
|
@ -1,12 +1,13 @@
|
||||
import os
|
||||
import json
|
||||
|
||||
from ironic_prometheus_exporter.parsers import manager
|
||||
from oslo_config import cfg
|
||||
from oslo_messaging.notify import notifier
|
||||
|
||||
|
||||
prometheus_opts = [
|
||||
cfg.StrOpt('file_path', required=True,
|
||||
help='Path for the json file where the metrics will be stored.')
|
||||
cfg.StrOpt('files_dir', required=True,
|
||||
help='Directory where the files will be written.')
|
||||
]
|
||||
|
||||
|
||||
@ -18,13 +19,32 @@ class PrometheusFileDriver(notifier.Driver):
|
||||
"""Publish notifications into a File to be used by Prometheus"""
|
||||
|
||||
def __init__(self, conf, topics, transport):
|
||||
self.file_path = conf.oslo_messaging_notifications.file_path
|
||||
if not self.file_path.endswith('.json'):
|
||||
raise Exception('The file should end with .json')
|
||||
if not os.path.exists(os.path.dirname(self.file_path)):
|
||||
os.makedirs(os.path.dirname(self.file_path))
|
||||
self.files_dir = conf.oslo_messaging_notifications.files_dir
|
||||
if not os.path.exists(self.files_dir):
|
||||
os.makedirs(os.path.dirname(self.files_dir))
|
||||
super(PrometheusFileDriver, self).__init__(conf, topics, transport)
|
||||
|
||||
def notify(self, ctxt, message, priority, retry):
|
||||
with open(self.file_path, 'w') as prometheus_file:
|
||||
json.dump(message, prometheus_file)
|
||||
try:
|
||||
node_parser_manager = manager.ParserManager(message)
|
||||
node_metrics = node_parser_manager.merge_information()
|
||||
node_name = message['payload']['node_name']
|
||||
node_file = open(os.path.join(self.files_dir, node_name), 'w')
|
||||
node_file.write(node_metrics)
|
||||
node_file.close()
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
|
||||
class SimpleFileDriver(notifier.Driver):
|
||||
|
||||
def __init__(self, conf, topics, transport):
|
||||
self.files_dir = conf.oslo_messaging_notifications.files_dir
|
||||
if not os.path.exists(self.files_dir):
|
||||
os.makedirs(os.path.dirname(self.files_dir))
|
||||
super(SimpleFileDriver, self).__init__(conf, topics, transport)
|
||||
|
||||
def notify(self, ctx, message, priority, retry):
|
||||
file = open(os.path.join(self.files_dir, 'simplefile'), 'w')
|
||||
file.write(message)
|
||||
file.close()
|
||||
|
0
ironic_prometheus_exporter/parsers/__init__.py
Normal file
0
ironic_prometheus_exporter/parsers/__init__.py
Normal file
616
ironic_prometheus_exporter/parsers/ipmi.py
Normal file
616
ironic_prometheus_exporter/parsers/ipmi.py
Normal file
@ -0,0 +1,616 @@
|
||||
import re
|
||||
|
||||
# NOTE (iurygregory): most of the sensor readings come in the ipmi format
|
||||
# each type of sensor consider a different range of values that aren't integers
|
||||
# (eg: 0h, 2eh), 0h will be published as 0 and the other values as 1, this way
|
||||
# we will be able to create prometheus alerts.
|
||||
# Documentation: https://www.intel.com/content/www/us/en/servers/ipmi/
|
||||
# ipmi-second-gen-interface-spec-v2-rev1-1.html
|
||||
|
||||
|
||||
def add_prometheus_type(name, metric_type):
|
||||
return '# TYPE %s %s' % (name, metric_type)
|
||||
|
||||
|
||||
class Management(object):
|
||||
|
||||
def __init__(self, payload, node_name):
|
||||
self.payload = payload
|
||||
self.node_name = node_name
|
||||
|
||||
def _metric_names(self):
|
||||
prefix = 'baremetal_'
|
||||
metric_dic = {}
|
||||
for entry in self.payload:
|
||||
e = entry.lower().split()
|
||||
label = '_'.join(e[:-1])
|
||||
metric_name = prefix + label
|
||||
if metric_name in metric_dic:
|
||||
metric_dic[metric_name].append(entry)
|
||||
else:
|
||||
metric_dic[metric_name] = [entry]
|
||||
return metric_dic
|
||||
|
||||
def _extract_labels(self, entries):
|
||||
deafult_label = 'node_name="%s"' % self.node_name
|
||||
if len(entries) == 1:
|
||||
return {entries[0]: '{%s}' % deafult_label}
|
||||
entries_labels = {}
|
||||
for entry in entries:
|
||||
try:
|
||||
sensor = self.payload[entry]['Sensor ID'].split()
|
||||
sensor_id = str(int(re.sub(r'[\(\)]', '', sensor[-1]), 0))
|
||||
metric_label = [deafult_label,
|
||||
'sensor="%s"' % (sensor[0] + sensor_id)]
|
||||
entries_labels[entry] = '{%s}' % ','.join(metric_label)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return entries_labels
|
||||
|
||||
def _extract_values(self, entries):
|
||||
entries_values = {}
|
||||
for entry in entries:
|
||||
try:
|
||||
if self.payload[entry]['Sensor Reading'] == "0h":
|
||||
entries_values[entry] = 0
|
||||
else:
|
||||
entries_values[entry] = 1
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return entries_values
|
||||
|
||||
def prometheus_format(self):
|
||||
prometheus_info = []
|
||||
available_metrics = self._metric_names()
|
||||
|
||||
for metric in available_metrics:
|
||||
prometheus_info.append(add_prometheus_type(metric, 'gauge'))
|
||||
entries = available_metrics[metric]
|
||||
labels = self._extract_labels(entries)
|
||||
values = self._extract_values(entries)
|
||||
for e in entries:
|
||||
prometheus_info.append("%s%s %s" % (metric, labels[e],
|
||||
values[e]))
|
||||
return '\n'.join(prometheus_info)
|
||||
|
||||
|
||||
class Temperature(object):
|
||||
|
||||
def __init__(self, payload, node_name):
|
||||
self.payload = payload
|
||||
self.node_name = node_name
|
||||
|
||||
def _metric_names(self):
|
||||
prefix = 'baremetal_'
|
||||
sufix = 'temp_celcius'
|
||||
metric_dic = {}
|
||||
for entry in self.payload:
|
||||
e = entry.split()[0]
|
||||
label = e.lower()
|
||||
metric_name = prefix + sufix
|
||||
if label not in sufix:
|
||||
metric_name = prefix + label + "_" + sufix
|
||||
if metric_name in metric_dic:
|
||||
metric_dic[metric_name].append(entry)
|
||||
else:
|
||||
metric_dic[metric_name] = [entry]
|
||||
return metric_dic
|
||||
|
||||
def _extract_labels(self, entries):
|
||||
deafult_label = 'node_name="%s"' % self.node_name
|
||||
if len(entries) == 1:
|
||||
return {entries[0]: '{%s}' % deafult_label}
|
||||
entries_labels = {}
|
||||
for entry in entries:
|
||||
try:
|
||||
sensor = self.payload[entry]['Sensor ID'].split()
|
||||
sensor_id = str(int(re.sub(r'[\(\)]', '', sensor[-1]), 0))
|
||||
metric_label = [deafult_label,
|
||||
'sensor="%s"' % (sensor[0] + sensor_id)]
|
||||
entries_labels[entry] = '{%s}' % ','.join(metric_label)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return entries_labels
|
||||
|
||||
def _extract_values(self, entries):
|
||||
entries_values = {}
|
||||
for entry in entries:
|
||||
try:
|
||||
value = self.payload[entry]['Sensor Reading'].split()
|
||||
if not re.search(r'(\d+(\.\d*)?|\.\d+)', value[0]):
|
||||
raise Exception("No valid value in Sensor Reading")
|
||||
entries_values[entry] = value[0]
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return entries_values
|
||||
|
||||
def prometheus_format(self):
|
||||
prometheus_info = []
|
||||
available_metrics = self._metric_names()
|
||||
|
||||
for metric in available_metrics:
|
||||
prometheus_info.append(add_prometheus_type(metric, 'gauge'))
|
||||
entries = available_metrics[metric]
|
||||
labels = self._extract_labels(entries)
|
||||
values = self._extract_values(entries)
|
||||
for e in entries:
|
||||
prometheus_info.append("%s%s %s" % (metric, labels[e],
|
||||
values[e]))
|
||||
return '\n'.join(prometheus_info)
|
||||
|
||||
|
||||
class System(object):
|
||||
|
||||
def __init__(self, payload, node_name):
|
||||
self.payload = payload
|
||||
self.node_name = node_name
|
||||
|
||||
def _metric_names(self):
|
||||
prefix = 'baremetal_system_'
|
||||
metric_dic = {}
|
||||
for entry in self.payload:
|
||||
e = entry.lower().split()
|
||||
label = '_'.join(e[:-1])
|
||||
metric_name = prefix + label
|
||||
if metric_name in metric_dic:
|
||||
metric_dic[metric_name].append(entry)
|
||||
else:
|
||||
metric_dic[metric_name] = [entry]
|
||||
return metric_dic
|
||||
|
||||
def _extract_labels(self, entries):
|
||||
deafult_label = 'node_name="%s"' % self.node_name
|
||||
if len(entries) == 1:
|
||||
return {entries[0]: '{%s}' % deafult_label}
|
||||
entries_labels = {}
|
||||
for entry in entries:
|
||||
try:
|
||||
sensor = self.payload[entry]['Sensor ID'].split()
|
||||
sensor_id = str(int(re.sub(r'[\(\)]', '', sensor[-1]), 0))
|
||||
metric_label = [deafult_label,
|
||||
'sensor="%s"' % (sensor[0] + sensor_id)]
|
||||
entries_labels[entry] = '{%s}' % ','.join(metric_label)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return entries_labels
|
||||
|
||||
def _extract_values(self, entries):
|
||||
entries_values = {}
|
||||
for entry in entries:
|
||||
try:
|
||||
if self.payload[entry]['Sensor Reading'] == 'No Reading':
|
||||
entries_values[entry] = None
|
||||
else:
|
||||
if self.payload[entry]['Sensor Reading'] == "0h":
|
||||
entries_values[entry] = 0
|
||||
else:
|
||||
entries_values[entry] = 1
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return entries_values
|
||||
|
||||
def prometheus_format(self):
|
||||
prometheus_info = []
|
||||
available_metrics = self._metric_names()
|
||||
|
||||
for metric in available_metrics:
|
||||
prometheus_info.append(add_prometheus_type(metric, 'gauge'))
|
||||
entries = available_metrics[metric]
|
||||
labels = self._extract_labels(entries)
|
||||
values = self._extract_values(entries)
|
||||
for e in entries:
|
||||
if values[e] is None:
|
||||
continue
|
||||
prometheus_info.append("%s%s %s" % (metric, labels[e],
|
||||
values[e]))
|
||||
return '\n'.join(prometheus_info)
|
||||
|
||||
|
||||
class Current(object):
|
||||
|
||||
def __init__(self, payload, node_name):
|
||||
self.payload = payload
|
||||
self.node_name = node_name
|
||||
|
||||
def _metric_names(self):
|
||||
prefix = 'baremetal_'
|
||||
metric_dic = {}
|
||||
for entry in self.payload:
|
||||
e = re.sub(r'[\d]', '', entry.lower()).split()
|
||||
label = '_'.join(e[:-1])
|
||||
sufix = '_' + self.payload[entry]['Sensor Reading'].split()[-1]
|
||||
metric_name = prefix + label + sufix.lower()
|
||||
if metric_name in metric_dic:
|
||||
metric_dic[metric_name].append(entry)
|
||||
else:
|
||||
metric_dic[metric_name] = [entry]
|
||||
return metric_dic
|
||||
|
||||
def _extract_labels(self, entries):
|
||||
deafult_label = 'node_name="%s"' % self.node_name
|
||||
if len(entries) == 1:
|
||||
return {entries[0]: '{%s}' % deafult_label}
|
||||
entries_labels = {}
|
||||
for entry in entries:
|
||||
try:
|
||||
sensor = self.payload[entry]['Sensor ID'].split()
|
||||
sensor_id = str(int(re.sub(r'[\(\)]', '', sensor[-1]), 0))
|
||||
metric_label = [deafult_label,
|
||||
'sensor="%s"' % (sensor[0] + sensor_id)]
|
||||
entries_labels[entry] = '{%s}' % ','.join(metric_label)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return entries_labels
|
||||
|
||||
def _extract_values(self, entries):
|
||||
entries_values = {}
|
||||
for entry in entries:
|
||||
try:
|
||||
value = self.payload[entry]['Sensor Reading'].split()
|
||||
if not re.search(r'(\d+(\.\d*)?|\.\d+)', value[0]):
|
||||
raise Exception("No valid value in Sensor Reading")
|
||||
entries_values[entry] = value[0]
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return entries_values
|
||||
|
||||
def prometheus_format(self):
|
||||
prometheus_info = []
|
||||
available_metrics = self._metric_names()
|
||||
|
||||
for metric in available_metrics:
|
||||
prometheus_info.append(add_prometheus_type(metric, 'gauge'))
|
||||
entries = available_metrics[metric]
|
||||
labels = self._extract_labels(entries)
|
||||
values = self._extract_values(entries)
|
||||
for e in entries:
|
||||
prometheus_info.append("%s%s %s" % (metric, labels[e],
|
||||
values[e]))
|
||||
return '\n'.join(prometheus_info)
|
||||
|
||||
|
||||
class Version(object):
|
||||
|
||||
def __init__(self, payload, node_name):
|
||||
self.payload = payload
|
||||
self.node_name = node_name
|
||||
|
||||
def _metric_names(self):
|
||||
prefix = 'baremetal_'
|
||||
metric_dic = {}
|
||||
for entry in self.payload:
|
||||
e = entry.lower().split()
|
||||
label = '_'.join(e[:-1])
|
||||
metric_name = prefix + label
|
||||
if metric_name in metric_dic:
|
||||
metric_dic[metric_name].append(entry)
|
||||
else:
|
||||
metric_dic[metric_name] = [entry]
|
||||
return metric_dic
|
||||
|
||||
def _extract_labels(self, entries):
|
||||
deafult_label = 'node_name="%s"' % self.node_name
|
||||
if len(entries) == 1:
|
||||
return {entries[0]: '{%s}' % deafult_label}
|
||||
entries_labels = {}
|
||||
for entry in entries:
|
||||
try:
|
||||
sensor = self.payload[entry]['Sensor ID'].split()
|
||||
sensor_id = str(int(re.sub(r'[\(\)]', '', sensor[-1]), 0))
|
||||
metric_label = [deafult_label,
|
||||
'sensor="%s"' % (sensor[0] + sensor_id)]
|
||||
entries_labels[entry] = '{%s}' % ','.join(metric_label)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return entries_labels
|
||||
|
||||
def _extract_values(self, entries):
|
||||
entries_values = {}
|
||||
for entry in entries:
|
||||
try:
|
||||
if self.payload[entry]['Sensor Reading'] == 'No Reading':
|
||||
entries_values[entry] = None
|
||||
else:
|
||||
if self.payload[entry]['Sensor Reading'] == "0h":
|
||||
entries_values[entry] = 0
|
||||
else:
|
||||
entries_values[entry] = 1
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return entries_values
|
||||
|
||||
def prometheus_format(self):
|
||||
prometheus_info = []
|
||||
available_metrics = self._metric_names()
|
||||
|
||||
for metric in available_metrics:
|
||||
prometheus_info.append(add_prometheus_type(metric, 'gauge'))
|
||||
entries = available_metrics[metric]
|
||||
labels = self._extract_labels(entries)
|
||||
values = self._extract_values(entries)
|
||||
for e in entries:
|
||||
if values[e] is None:
|
||||
continue
|
||||
prometheus_info.append("%s%s %s" % (metric, labels[e],
|
||||
values[e]))
|
||||
return '\n'.join(prometheus_info)
|
||||
|
||||
|
||||
class Memory(object):
|
||||
|
||||
def __init__(self, payload, node_name):
|
||||
self.payload = payload
|
||||
self.node_name = node_name
|
||||
|
||||
def _metric_names(self):
|
||||
prefix = 'baremetal_'
|
||||
metric_dic = {}
|
||||
for entry in self.payload:
|
||||
e = entry.lower().split()
|
||||
label = '_'.join(e[:-1])
|
||||
if 'memory' not in label:
|
||||
label = 'memory_' + label
|
||||
metric_name = prefix + label.replace('-', '_')
|
||||
if metric_name in metric_dic:
|
||||
metric_dic[metric_name].append(entry)
|
||||
else:
|
||||
metric_dic[metric_name] = [entry]
|
||||
return metric_dic
|
||||
|
||||
def _extract_labels(self, entries):
|
||||
deafult_label = 'node_name="%s"' % self.node_name
|
||||
if len(entries) == 1:
|
||||
return {entries[0]: '{%s}' % deafult_label}
|
||||
entries_labels = {}
|
||||
for entry in entries:
|
||||
try:
|
||||
sensor = self.payload[entry]['Sensor ID'].split()
|
||||
sensor_id = str(int(re.sub(r'[\(\)]', '', sensor[-1]), 0))
|
||||
metric_label = [deafult_label,
|
||||
'sensor="%s"' % (sensor[0] + sensor_id)]
|
||||
entries_labels[entry] = '{%s}' % ','.join(metric_label)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return entries_labels
|
||||
|
||||
def _extract_values(self, entries):
|
||||
entries_values = {}
|
||||
for entry in entries:
|
||||
try:
|
||||
if self.payload[entry]['Sensor Reading'] == 'No Reading':
|
||||
entries_values[entry] = None
|
||||
else:
|
||||
if self.payload[entry]['Sensor Reading'] == "0h":
|
||||
entries_values[entry] = 0
|
||||
else:
|
||||
entries_values[entry] = 1
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return entries_values
|
||||
|
||||
def prometheus_format(self):
|
||||
prometheus_info = []
|
||||
available_metrics = self._metric_names()
|
||||
|
||||
for metric in available_metrics:
|
||||
prometheus_info.append(add_prometheus_type(metric, 'gauge'))
|
||||
entries = available_metrics[metric]
|
||||
labels = self._extract_labels(entries)
|
||||
values = self._extract_values(entries)
|
||||
for e in entries:
|
||||
if values[e] is None:
|
||||
continue
|
||||
prometheus_info.append("%s%s %s" % (metric, labels[e],
|
||||
values[e]))
|
||||
return '\n'.join(prometheus_info)
|
||||
|
||||
|
||||
class Power(object):
|
||||
|
||||
def __init__(self, payload, node_name):
|
||||
self.payload = payload
|
||||
self.node_name = node_name
|
||||
|
||||
def _metric_names(self):
|
||||
prefix = 'baremetal_power_'
|
||||
metric_dic = {}
|
||||
for entry in self.payload:
|
||||
e = entry.lower().split()
|
||||
label = '_'.join(e[:-1])
|
||||
metric_name = prefix + label.replace('-', '_')
|
||||
if metric_name in metric_dic:
|
||||
metric_dic[metric_name].append(entry)
|
||||
else:
|
||||
metric_dic[metric_name] = [entry]
|
||||
return metric_dic
|
||||
|
||||
def _extract_labels(self, entries):
|
||||
deafult_label = 'node_name="%s"' % self.node_name
|
||||
if len(entries) == 1:
|
||||
return {entries[0]: '{%s}' % deafult_label}
|
||||
entries_labels = {}
|
||||
for entry in entries:
|
||||
try:
|
||||
sensor = self.payload[entry]['Sensor ID'].split()
|
||||
sensor_id = str(int(re.sub(r'[\(\)]', '', sensor[-1]), 0))
|
||||
metric_label = [deafult_label,
|
||||
'sensor="%s"' % (sensor[0] + sensor_id)]
|
||||
entries_labels[entry] = '{%s}' % ','.join(metric_label)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return entries_labels
|
||||
|
||||
def _extract_values(self, entries):
|
||||
entries_values = {}
|
||||
for entry in entries:
|
||||
try:
|
||||
no_values = ['No Reading', 'Disabled']
|
||||
if self.payload[entry]['Sensor Reading'] in no_values:
|
||||
entries_values[entry] = None
|
||||
else:
|
||||
if self.payload[entry]['Sensor Reading'] == "0h":
|
||||
entries_values[entry] = 0
|
||||
else:
|
||||
entries_values[entry] = 1
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return entries_values
|
||||
|
||||
def prometheus_format(self):
|
||||
prometheus_info = []
|
||||
available_metrics = self._metric_names()
|
||||
|
||||
for metric in available_metrics:
|
||||
prometheus_info.append(add_prometheus_type(metric, 'gauge'))
|
||||
entries = available_metrics[metric]
|
||||
labels = self._extract_labels(entries)
|
||||
values = self._extract_values(entries)
|
||||
for e in entries:
|
||||
if values[e] is None:
|
||||
continue
|
||||
prometheus_info.append("%s%s %s" % (metric, labels[e],
|
||||
values[e]))
|
||||
return '\n'.join(prometheus_info)
|
||||
|
||||
|
||||
class Watchdog2(object):
|
||||
|
||||
def __init__(self, payload, node_name):
|
||||
self.payload = payload
|
||||
self.node_name = node_name
|
||||
|
||||
def _metric_names(self):
|
||||
prefix = 'baremetal_'
|
||||
metric_dic = {}
|
||||
for entry in self.payload:
|
||||
e = entry.lower().split()
|
||||
label = '_'.join(e[:-1])
|
||||
metric_name = prefix + label.replace('-', '_')
|
||||
if metric_name in metric_dic:
|
||||
metric_dic[metric_name].append(entry)
|
||||
else:
|
||||
metric_dic[metric_name] = [entry]
|
||||
return metric_dic
|
||||
|
||||
def _extract_labels(self, entries):
|
||||
deafult_label = 'node_name="%s"' % self.node_name
|
||||
if len(entries) == 1:
|
||||
return {entries[0]: '{%s}' % deafult_label}
|
||||
entries_labels = {}
|
||||
for entry in entries:
|
||||
try:
|
||||
sensor = self.payload[entry]['Sensor ID'].split()
|
||||
sensor_id = str(int(re.sub(r'[\(\)]', '', sensor[-1]), 0))
|
||||
metric_label = [deafult_label,
|
||||
'sensor="%s"' % (sensor[0] + sensor_id)]
|
||||
entries_labels[entry] = '{%s}' % ','.join(metric_label)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return entries_labels
|
||||
|
||||
def _extract_values(self, entries):
|
||||
entries_values = {}
|
||||
for entry in entries:
|
||||
try:
|
||||
no_values = ['No Reading', 'Disabled']
|
||||
if self.payload[entry]['Sensor Reading'] in no_values:
|
||||
entries_values[entry] = None
|
||||
else:
|
||||
if self.payload[entry]['Sensor Reading'] == "0h":
|
||||
entries_values[entry] = 0
|
||||
else:
|
||||
entries_values[entry] = 1
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return entries_values
|
||||
|
||||
def prometheus_format(self):
|
||||
prometheus_info = []
|
||||
available_metrics = self._metric_names()
|
||||
|
||||
for metric in available_metrics:
|
||||
prometheus_info.append(add_prometheus_type(metric, 'gauge'))
|
||||
entries = available_metrics[metric]
|
||||
labels = self._extract_labels(entries)
|
||||
values = self._extract_values(entries)
|
||||
for e in entries:
|
||||
if values[e] is None:
|
||||
continue
|
||||
prometheus_info.append("%s%s %s" % (metric, labels[e],
|
||||
values[e]))
|
||||
return '\n'.join(prometheus_info)
|
||||
|
||||
|
||||
class Fan(object):
|
||||
def __init__(self, payload, node_name):
|
||||
self.payload = payload
|
||||
self.node_name = node_name
|
||||
|
||||
def _metric_names(self):
|
||||
prefix = 'baremetal_'
|
||||
metric_dic = {}
|
||||
for entry in self.payload:
|
||||
sufix = ''
|
||||
e = re.sub(r'[\d].*$', '', entry.lower())
|
||||
e = re.sub(r'[\(\)]', '', e).split()
|
||||
label = '_'.join(e)
|
||||
label_unit = self.payload[entry]['Sensor Reading'].split()
|
||||
if len(label_unit) > 1:
|
||||
sufix = '_' + label_unit[-1].lower()
|
||||
metric_name = prefix + label.replace('-', '_') + sufix
|
||||
if metric_name in metric_dic:
|
||||
metric_dic[metric_name].append(entry)
|
||||
else:
|
||||
metric_dic[metric_name] = [entry]
|
||||
return metric_dic
|
||||
|
||||
def _extract_labels(self, entries):
|
||||
deafult_label = 'node_name="%s"' % self.node_name
|
||||
if len(entries) == 1:
|
||||
return {entries[0]: '{%s}' % deafult_label}
|
||||
entries_labels = {}
|
||||
for entry in entries:
|
||||
try:
|
||||
sensor = self.payload[entry]['Sensor ID'].split()
|
||||
sensor_id = str(int(re.sub(r'[\(\)]', '', sensor[-1]), 0))
|
||||
metric_label = [deafult_label,
|
||||
'sensor="%s"' % (sensor[0] + sensor_id)]
|
||||
entries_labels[entry] = '{%s}' % ','.join(metric_label)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return entries_labels
|
||||
|
||||
def _extract_values(self, entries):
|
||||
entries_values = {}
|
||||
for entry in entries:
|
||||
try:
|
||||
no_values = ['No Reading', 'Disabled']
|
||||
if self.payload[entry]['Sensor Reading'] in no_values:
|
||||
entries_values[entry] = None
|
||||
else:
|
||||
values = self.payload[entry]['Sensor Reading'].split()
|
||||
if len(values) > 1:
|
||||
entries_values[entry] = values[0]
|
||||
elif values[0] == "0h":
|
||||
entries_values[entry] = 0
|
||||
else:
|
||||
entries_values[entry] = 1
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return entries_values
|
||||
|
||||
def prometheus_format(self):
|
||||
prometheus_info = []
|
||||
available_metrics = self._metric_names()
|
||||
|
||||
for metric in available_metrics:
|
||||
prometheus_info.append(add_prometheus_type(metric, 'gauge'))
|
||||
entries = available_metrics[metric]
|
||||
labels = self._extract_labels(entries)
|
||||
values = self._extract_values(entries)
|
||||
for e in entries:
|
||||
if values[e] is None:
|
||||
continue
|
||||
prometheus_info.append("%s%s %s" % (metric, labels[e],
|
||||
values[e]))
|
||||
return '\n'.join(prometheus_info)
|
26
ironic_prometheus_exporter/parsers/manager.py
Normal file
26
ironic_prometheus_exporter/parsers/manager.py
Normal file
@ -0,0 +1,26 @@
|
||||
from ironic_prometheus_exporter.parsers import ipmi
|
||||
|
||||
|
||||
class ParserManager(object):
|
||||
|
||||
def __init__(self, data):
|
||||
|
||||
node_name = data['payload']['node_name']
|
||||
payload = data['payload']['payload']
|
||||
self.ipmi_objects = [
|
||||
ipmi.Management(payload['Management'], node_name),
|
||||
ipmi.Temperature(payload['Temperature'], node_name),
|
||||
ipmi.System(payload['System'], node_name),
|
||||
ipmi.Current(payload['Current'], node_name),
|
||||
ipmi.Version(payload['Version'], node_name),
|
||||
ipmi.Memory(payload['Memory'], node_name),
|
||||
ipmi.Power(payload['Power'], node_name),
|
||||
ipmi.Watchdog2(payload['Watchdog2'], node_name),
|
||||
ipmi.Fan(payload['Fan'], node_name)
|
||||
]
|
||||
|
||||
def merge_information(self):
|
||||
info = ''
|
||||
for obj in self.ipmi_objects:
|
||||
info += obj.prometheus_format() + '\n'
|
||||
return info.rstrip('\n')
|
2348
ironic_prometheus_exporter/tests/data.json
Normal file
2348
ironic_prometheus_exporter/tests/data.json
Normal file
File diff suppressed because it is too large
Load Diff
2348
ironic_prometheus_exporter/tests/data2.json
Normal file
2348
ironic_prometheus_exporter/tests/data2.json
Normal file
File diff suppressed because it is too large
Load Diff
@ -1,5 +1,8 @@
|
||||
import json
|
||||
import os
|
||||
import oslo_messaging
|
||||
|
||||
from ironic_prometheus_exporter.messaging import PrometheusFileDriver
|
||||
from oslo_messaging.tests import utils as test_utils
|
||||
|
||||
|
||||
@ -8,12 +11,65 @@ class TestPrometheusFileNotifier(test_utils.BaseTestCase):
|
||||
def setUp(self):
|
||||
super(TestPrometheusFileNotifier, self).setUp()
|
||||
|
||||
def test_notifier(self):
|
||||
self.config(file_path='/tmp/ironic_prometheus_exporter/test.json',
|
||||
def test_instanciate(self):
|
||||
self.config(files_dir='/tmp/ironic_prometheus_exporter',
|
||||
group='oslo_messaging_notifications')
|
||||
transport = oslo_messaging.get_notification_transport(self.conf)
|
||||
oslo_messaging.Notifier(transport, driver='prometheus_exporter',
|
||||
topics=['my_topics'])
|
||||
|
||||
self.assertEqual(self.conf.oslo_messaging_notifications.file_path,
|
||||
"/tmp/ironic_prometheus_exporter/test.json")
|
||||
self.assertEqual(self.conf.oslo_messaging_notifications.files_dir,
|
||||
"/tmp/ironic_prometheus_exporter")
|
||||
self.assertTrue(os.path.isdir(
|
||||
self.conf.oslo_messaging_notifications.files_dir))
|
||||
|
||||
def test_messages_from_same_node(self):
|
||||
self.config(files_dir='/tmp/ironic_prometheus_exporter',
|
||||
group='oslo_messaging_notifications')
|
||||
transport = oslo_messaging.get_notification_transport(self.conf)
|
||||
driver = PrometheusFileDriver(self.conf, None, transport)
|
||||
|
||||
msg1 = json.load(open('./ironic_prometheus_exporter/tests/data.json'))
|
||||
node1 = msg1['payload']['node_name']
|
||||
msg2 = json.load(open('./ironic_prometheus_exporter/tests/data2.json'))
|
||||
# Override data2 node_name
|
||||
msg2['payload']['node_name'] = node1
|
||||
node2 = msg2['payload']['node_name']
|
||||
self.assertNotEqual(msg1['payload']['timestamp'],
|
||||
msg2['payload']['timestamp'])
|
||||
|
||||
driver.notify(None, msg1, 'info', 0)
|
||||
driver.notify(None, msg2, 'info', 0)
|
||||
|
||||
DIR = self.conf.oslo_messaging_notifications.files_dir
|
||||
all_files = [name for name in os.listdir(DIR)
|
||||
if os.path.isfile(os.path.join(DIR, name))]
|
||||
self.assertEqual(node1, node2)
|
||||
self.assertEqual(len(all_files), 1)
|
||||
self.assertTrue(node1 and node2 in all_files)
|
||||
for f in all_files:
|
||||
os.remove(os.path.join(DIR, f))
|
||||
|
||||
def test_messages_from_different_nodes(self):
|
||||
self.config(files_dir='/tmp/ironic_prometheus_exporter',
|
||||
group='oslo_messaging_notifications')
|
||||
transport = oslo_messaging.get_notification_transport(self.conf)
|
||||
driver = PrometheusFileDriver(self.conf, None, transport)
|
||||
|
||||
msg1 = json.load(open('./ironic_prometheus_exporter/tests/data.json'))
|
||||
node1 = msg1['payload']['node_name']
|
||||
msg2 = json.load(open('./ironic_prometheus_exporter/tests/data2.json'))
|
||||
node2 = msg2['payload']['node_name']
|
||||
self.assertNotEqual(msg1['payload']['timestamp'],
|
||||
msg2['payload']['timestamp'])
|
||||
|
||||
driver.notify(None, msg1, 'info', 0)
|
||||
driver.notify(None, msg2, 'info', 0)
|
||||
|
||||
DIR = self.conf.oslo_messaging_notifications.files_dir
|
||||
all_files = [name for name in os.listdir(DIR)
|
||||
if os.path.isfile(os.path.join(DIR, name))]
|
||||
self.assertEqual(len(all_files), 2)
|
||||
self.assertTrue(node1 and node2 in all_files)
|
||||
for f in all_files:
|
||||
os.remove(os.path.join(DIR, f))
|
||||
|
124
ironic_prometheus_exporter/tests/test_ipmi_parser.py
Normal file
124
ironic_prometheus_exporter/tests/test_ipmi_parser.py
Normal file
@ -0,0 +1,124 @@
|
||||
import json
|
||||
import unittest
|
||||
|
||||
from ironic_prometheus_exporter.parsers import ipmi, manager
|
||||
|
||||
|
||||
DATA = json.load(open('./ironic_prometheus_exporter/tests/data.json'))
|
||||
|
||||
|
||||
class TestPayloadManagementParser(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.node_name = DATA['payload']['node_name']
|
||||
self.payload = DATA['payload']['payload']['Management']
|
||||
|
||||
def test_parser(self):
|
||||
management_parser = ipmi.Management(self.payload, self.node_name)
|
||||
metrics = management_parser.prometheus_format()
|
||||
self.assertEqual(len(metrics.split('\n')), 2)
|
||||
|
||||
|
||||
class TestPayloadTemperatureParser(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.node_name = DATA['payload']['node_name']
|
||||
self.payload = DATA['payload']['payload']['Temperature']
|
||||
|
||||
def test_parser(self):
|
||||
temperature_parser = ipmi.Temperature(self.payload, self.node_name)
|
||||
metrics = temperature_parser.prometheus_format()
|
||||
self.assertEqual(len(metrics.split('\n')), 7)
|
||||
|
||||
|
||||
class TestPayloadSystemParser(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.node_name = DATA['payload']['node_name']
|
||||
self.payload = DATA['payload']['payload']['System']
|
||||
|
||||
def test_parser(self):
|
||||
system_parser = ipmi.System(self.payload, self.node_name)
|
||||
metrics = system_parser.prometheus_format()
|
||||
self.assertEqual(len(metrics.split('\n')), 3)
|
||||
|
||||
|
||||
class TestPayloadCurrentParser(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.node_name = DATA['payload']['node_name']
|
||||
self.payload = DATA['payload']['payload']['Current']
|
||||
|
||||
def test_parser(self):
|
||||
current_parser = ipmi.Current(self.payload, self.node_name)
|
||||
metrics = current_parser.prometheus_format()
|
||||
self.assertEqual(len(metrics.split('\n')), 5)
|
||||
|
||||
|
||||
class TestPayloadVersionParser(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.node_name = DATA['payload']['node_name']
|
||||
self.payload = DATA['payload']['payload']['Version']
|
||||
|
||||
def test_parser(self):
|
||||
version_parser = ipmi.Version(self.payload, self.node_name)
|
||||
metrics = version_parser.prometheus_format()
|
||||
self.assertEqual(len(metrics.split('\n')), 5)
|
||||
|
||||
|
||||
class TestPayloadMemoryParser(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.node_name = DATA['payload']['node_name']
|
||||
self.payload = DATA['payload']['payload']['Memory']
|
||||
|
||||
def test_parser(self):
|
||||
memory_parser = ipmi.Memory(self.payload, self.node_name)
|
||||
metrics = memory_parser.prometheus_format()
|
||||
self.assertEqual(len(metrics.split('\n')), 18)
|
||||
|
||||
|
||||
class TestPayloadPowerParser(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.node_name = DATA['payload']['node_name']
|
||||
self.payload = DATA['payload']['payload']['Power']
|
||||
|
||||
def test_parser(self):
|
||||
power_parser = ipmi.Power(self.payload, self.node_name)
|
||||
metrics = power_parser.prometheus_format()
|
||||
self.assertEqual(len(metrics.split('\n')), 4)
|
||||
|
||||
|
||||
class TestPayloadWatchdog2Parser(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.node_name = DATA['payload']['node_name']
|
||||
self.payload = DATA['payload']['payload']['Watchdog2']
|
||||
|
||||
def test_parser(self):
|
||||
watchdog_parser = ipmi.Watchdog2(self.payload, self.node_name)
|
||||
metrics = watchdog_parser.prometheus_format()
|
||||
self.assertEqual(len(metrics.split('\n')), 4)
|
||||
|
||||
|
||||
class TestPayloadFanParser(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.node_name = DATA['payload']['node_name']
|
||||
self.payload = DATA['payload']['payload']['Fan']
|
||||
|
||||
def test_parser(self):
|
||||
fan_parser = ipmi.Fan(self.payload, self.node_name)
|
||||
metrics = fan_parser.prometheus_format()
|
||||
self.assertEqual(len(metrics.split('\n')), 19)
|
||||
|
||||
|
||||
class TestIpmiManager(unittest.TestCase):
|
||||
|
||||
def test_manager(self):
|
||||
node_manager = manager.ParserManager(DATA)
|
||||
node_metrics = node_manager.merge_information()
|
||||
print(node_metrics.split('\n'))
|
||||
self.assertEqual(len(node_metrics.split('\n')), 67)
|
@ -1,5 +1,3 @@
|
||||
pbr!=2.1.0,>=2.0.0 # Apache-2.0
|
||||
flake8
|
||||
stevedore>=1.20.0 # Apache-2.0
|
||||
oslo.messaging!=9.0.0 # Apache-2.0
|
||||
stestr>=2.0.0 # Apache-2.0
|
||||
oslo.messaging==9.4.0 # Apache-2.0
|
||||
|
@ -24,3 +24,4 @@ packages =
|
||||
[entry_points]
|
||||
oslo.messaging.notify.drivers =
|
||||
prometheus_exporter = ironic_prometheus_exporter.messaging:PrometheusFileDriver
|
||||
file_exporter = ironic_prometheus_exporter.messaging:SimpleFileDriver
|
||||
|
3
test-requirements.txt
Normal file
3
test-requirements.txt
Normal file
@ -0,0 +1,3 @@
|
||||
flake8
|
||||
stestr>=2.0.0 # Apache-2.0
|
||||
oslotest>=3.2.0 # Apache-2.0
|
4
tox.ini
4
tox.ini
@ -9,7 +9,9 @@ install_command = pip install {opts} {packages}
|
||||
setenv =
|
||||
VIRTUAL_ENV={envdir}
|
||||
PYTHONWARNINGS=default::DeprecationWarning
|
||||
deps = -r{toxinidir}/requirements.txt
|
||||
deps =
|
||||
-r{toxinidir}/test-requirements.txt
|
||||
-r{toxinidir}/requirements.txt
|
||||
commands = stestr run {posargs}
|
||||
|
||||
[testenv:pep8]
|
||||
|
Loading…
Reference in New Issue
Block a user