Extração de dados do gráfico de Capacity Planning

1. Extração de dados do gráfico de Capacity Planning

Fabricio
fmaced1

(usa CentOS)

Enviado em 01/05/2018 - 18:31h

Pessoal boa noite,

Estou estudando e testando o Nagios e achei bem interessante porem meio confuso o relatório de capacity planning (ele pode ajudar na hora de explicar o porque investir em mais recurso), teria alguma forma de extrair os dados desse relatório para personalizar a visualização ?

Tentei usar a opção de extrair para csv, porem não aparece como na tabela do gráfico.

Achei o script em python que o Nagios usa pra pegar os dados de performace ".rrd" e calcular a matriz, mas como não manjo muito de Python to apanhando um pouco, mas acredito que o segredo esta nesse script.

/usr/local/nagiosxi/html/includes/components/capacityplanning/backend/capacityplanning.py

Alguém poderia me ajudar a alterar esse script em Python para extrair no formato "Timestamp,valor" os dados de Observed e Predicted ?

ex: capacityplanning.py -H "localhost" -S "PING" -T "rta" -M "Holt-Winters" -P "30" --json-indent=3


#!/usr/bin/env python

import os
import optparse
import warnings
import logging
import rrddatastore
import forecast
import re
import datetime
import numpy as np

warnings.filterwarnings('ignore')

try:
import json
except ImportError:
import simplejson as json



logging.basicConfig(level=logging.DEBUG,
format='%(asctime)s %(levelname)-8s %(message)s',
datefmt='%a, %d %b %Y %H:%M:%S',
filename='/usr/local/nagiosxi/var/components/capacityplanning.log',
filemode='a')




def parse_period(period):
try:
period = int(period)
except TypeError:
period = 1
except ValueError:
WEEKS_IN_YEAR = 52
WEEKS_IN_MONTH = 4

period = str(period)
result = re.search(r'.*([0-9]+)(.*)', period)
if result:
prefix = int(result.group(1))
unit = result.group(2).strip().lower()
if 'year' in unit:
period = prefix * WEEKS_IN_YEAR
elif 'month' in unit:
period = prefix * WEEKS_IN_MONTH
else:
period = prefix
else:
period = 1
return period


def parse_values(dtype, parser, values, message):
if values is not None:
try:
return [dtype(x) for x in values.split(',')]
except Exception, e:
parser.error(message)
else:
return None




def parse_args():
"""Parse the input args of the script. """

parser = optparse.OptionParser()



datagroup = optparse.OptionGroup(parser, "Data Selection Options",
"Use these to specify the host/service/track to report on.")

datagroup.add_option("-D", "--dir", default="/usr/local/nagios/share/perfdata",
help="Base directory to search for data files (default: %default).")
datagroup.add_option("-H", "--host",
help="Host name to run report for. *Required*")
datagroup.add_option("-S", "--service",
help="Service name to run report for. *Required*")
datagroup.add_option("-T", "--track",
help="Track name to run report for.")

parser.add_option_group(datagroup)


#capacityplanning.py -H "localhost" -S "PING" -T "rta" -M "Holt-Winters" -P "2" --json-indent=INDENT


manipulation = optparse.OptionGroup(parser, "Extrapolation Options",
"Use these to specify how the data will be extrapolated.")

manipulation.add_option("-M", "--method", default="Holt-Winters",
choices=['Holt-Winters', 'Linear Fit', 'Quadratic Fit', 'Cubic Fit'],
help="Extrapolation method: Holt-Winters, Linear Fit, Quadratic Fit or Cubic Fit (default: %default).")
manipulation.add_option("-E", "--end", default="now",
help="End time of observed data to extrapolate from (default: %default).")
manipulation.add_option("-P", "--period", default=1,
help="Period of data to use as a basis for future data (default: %default).")
manipulation.add_option("-s", "--steps", type="int", default=1,
help="Number of periods to extrapolate out (default: %default).")

parser.add_option_group(manipulation)



reporting = optparse.OptionGroup(parser, "Reporting Options",
"Use these to specify information to be reported.")

reporting.add_option("--no-highcharts",
dest="highcharts", action="store_false", default=True,
help="Suppress outpout of Highcharts JSON data.")

reporting.add_option("-j", "--json-indent", type="int", metavar="INDENT",
help="Specify a JSON indent level for human-readable output.")
reporting.add_option("-k", "--json-sort", action="store_true", default=False,
help="Sort JSON keys for human-readable output.")
reporting.add_option("-p", "--json-prettify", type="int", metavar="INDENT",
help="Prettify JSON output, sorting the keys and using the specified indent level. This is equivalent to specifying: '--json-indent=INDENT --json-sort'.")

reporting.add_option("-d", "--dates",
help="Comma-delimited list of unix timestamps to get extrapolated values for. Output a list of date-string / value pairs.")
reporting.add_option("-t", "--times",
help="Comma-delimited list of unix timestamps to get extrapolated values for. Output a list of timestamp / value pairs.")

reporting.add_option("--gt",
help="Comma-delimited list of values to return times the extrapolation becomes greater than.")
reporting.add_option("--ge",
help="Comma-delimited list of values to return times the extrapolation becomes greater than or equal to.")
reporting.add_option("--lt",
help="Comma-delimited list of values to return times the extrapolation becomes less than.")
reporting.add_option("--le",
help="Comma-delimited list of values to return times the extrapolation becomes less than or equal to.")
reporting.add_option("--eq",
help="Comma-delimited list of values to return times the extrapolation becomes equal to.")

reporting.add_option("-w", "--warn", action="store_true", default=False,
help="Return the first time the extrapolation exceeds its track's warning value.")
reporting.add_option("-c", "--crit", action="store_true", default=False,
help="Return the first time the extrapolation exceeds its track's critical value.")

parser.add_option_group(reporting)



# Process the argv/argc command line.
options, args = parser.parse_args()


if not os.path.isdir(options.dir + '/.'):
parser.error("'%s' is not an accessible directory." % options.dir)
if not options.host:
parser.error('Host is a required argument.')
if not options.service:
parser.error('Service is a required argument.')
if not options.track:
parser.error('Track is a required argument.')

# Extract a sane period value.
options.period = parse_period(options.period)

logging.info("dir: %s, host: %s, service: %s, track: %s, method: %s, steps: %d, period: %d." % (options.dir, options.host, options.service, options.track, options.method, options.steps, options.period))

if options.json_prettify is not None:
options.json_indent = options.json_prettify
options.json_sort = True

# Convert comma-separated timestamps to lists.
options.dates = parse_values(int, parser, options.dates,
"Dates values must be unix timestamps.")
options.times = parse_values(int, parser, options.times,
"Times values must be unix timestamps.")

# Convert comma-separated real (floating point) values to lists.
options.gt = parse_values(float, parser, options.gt,
"'Greater than' values must be numbers.")
options.ge = parse_values(float, parser, options.ge,
"'Greater than or equal' values must be numbers.")
options.lt = parse_values(float, parser, options.lt,
"'Less than' values must be numbers.")
options.le = parse_values(float, parser, options.le,
"'Less than or equal' values must be numbers.")
options.eq = parse_values(float, parser, options.eq,
"'Equal to' values must be numbers.")


#logging.info(options)
return options




def main():
o = parse_args()

datastore = rrddatastore.Datastore(o.dir, o.host, o.service, o.track)
datastore.parse_rrd_file(o.period * 2, o.end)
datastore.parse_xml_file()
observed = datastore.dataset
observed.set_period(o.period)


if o.method == 'Holt-Winters':
extrapolated, f, residues = forecast.holt_winters(observed, o.steps)
elif o.method == 'Linear Fit':
extrapolated, f, residues = forecast.linear_fit(observed, o.steps)
elif o.method == 'Quadratic Fit':
extrapolated, f, residues = forecast.quadratic_fit(observed, o.steps)
elif o.method == 'Cubic Fit':
extrapolated, f, residues = forecast.cubic_fit(observed, o.steps)



# Our output information starts with the forecast and some statistics.
info = forecast.make_projections(observed, extrapolated, f, residues)

info['dmean'] = observed.get_mean()
info['dmax'] = observed.get_max()
info['emean'] = extrapolated.get_mean()
info['emax'] = extrapolated.get_max()
info['unit'] = observed.unit
info['integrity'] = observed.get_integrity()

info['t_start'] = observed.start
info['t_step'] = observed.step
info['t_stop'] = extrapolated.end



# Add Highcharts data to the output if requested.
if o.highcharts:
info['highcharts'] = [observed.get_highcharts_dataset('Observed'),
extrapolated.get_highcharts_dataset('Predicted')]

fit_params = (observed, observed.start, extrapolated.end, observed.step)

fit = forecast.get_fitted_dataset(*(fit_params + (f,)))
fit.label = 'Fit'
fit_chart = fit.get_highcharts_dataset(fit.label, 'line')
fit_chart['color'] = '#005656'
fit_chart['zIndex'] = 1,

info['highcharts'] += [fit_chart]



# Process and add dates and times values to the output if present.
if o.dates is not None:
info['dates'] = [{'date': datetime.datetime.fromtimestamp(x).strftime('%c'),
'value': extrapolated.get_value_on_date(x)
} for x in o.dates]
if o.times is not None:
info['times'] = [{'time': x,
'value': extrapolated.get_value_on_date(x)
} for x in o.times]


# Finally process any requested comparisons against the extrapolation.
# We'll use a helper function to build a list of points that match values
# passed to the list options.
def process_comparison(method, values, key):
if values is not None:
info[key] = [time_value_dict(method, x) for x in values]

# Another helper function that calls a method with a value to get a point
# p, and then zips that point into a {'time':p[0], 'value':p[1]} dict.
def time_value_dict(method, value):
return dict(zip(('time', 'value'), method(value)))

process_comparison(extrapolated.get_point_gt, o.gt, 'gt')
process_comparison(extrapolated.get_point_ge, o.ge, 'ge')
process_comparison(extrapolated.get_point_lt, o.lt, 'lt')
process_comparison(extrapolated.get_point_le, o.le, 'le')
process_comparison(extrapolated.get_point_eq, o.eq, 'eq')

# Our warning and critical values are scalar and not output as arrays.
if o.warn:
info['warn'] = time_value_dict(extrapolated.get_point_ge, extrapolated.warn)
if o.crit:
info['crit'] = time_value_dict(extrapolated.get_point_ge, extrapolated.crit)

logging.info("warn: %s, crit: %s" % (extrapolated.warn, extrapolated.crit))

try:
info['warn_level'] = float(extrapolated.warn)
logging.info("warn: %s" % info['warn_level'])
except Exception:
pass

try:
info['crit_level'] = float(extrapolated.crit)
logging.info("crit: %s" % info['crit_level'])
except Exception:
pass



# ....ahhhnnnd we're finally done.
print json.dumps(info, allow_nan=False, sort_keys=o.json_sort, indent=o.json_indent)
#print json.dumps(info, allow_nan=False)



if __name__ == '__main__':
try:
main()
except Exception, e:
logging.exception(e)
info = {
'emax': None,
'emean': None,
'dmax': None,
'dmean': None,
'integrity': 0,
'highcharts': [],
'error': str(e),
}
print json.dumps(info, sort_keys=True)

---------------------------------------------------------------------------------------------

Output:
{
"emax": 0.086753530266276441,
"eslope": 3.9106800338371787e-15,
"evalue_max": 0.079700628533427936,
"t_start": 1488693600,
"warn_level": 100.0,
"dmean": 0.079239679388081879,
"t_stop": 1543147200,
"dmax": 0.094486518518518553,
"integrity": 0.0029744199881023281,
"unit": "ms",
"residue": 0.00052850205957779379,
"emean": 0.079162432183103912,
"edate": 1543125600,
"evalue": 0.079139750151537083,
"nd": 1680.0,
"ne": 840.0,
"evalue_min": 0.07857887176964623,
"adjusted_sigma": 0.18856731199170268,
"t_step": 21600,
"crit_level": 500.0,
"f_of_x_on_date": 0.079239821298840801,
"sigma": 0.00056087838189084827,
"highcharts": [
{
"pointStart": 1488693600000,
"pointInterval": 21600000,
"type": "area",
"name": "Observed",
"data": [
0.079239679388083739,
0.079239679388083739,
...
{
"pointStart": 1525003200000,
"pointInterval": 21600000,
"type": "area",
"name": "Predicted",
"data": [
0.079185020141343626,
...
{
"name": "Fit",
"pointStart": 1488693600000,
"data": [
0.079239608432705194,
0.079239608517175888,
...
"pointInterval": 21600000,
"zIndex": [
1
],
"color": "#005656",
"type": "line"
}


  






Patrocínio

Site hospedado pelo provedor RedeHost.
Linux banner

Destaques

Artigos

Dicas

Tópicos

Top 10 do mês

Scripts