add Prometheus vor accelerated / meerfarbig / datafabrik / werkhaus stats

This commit is contained in:
Steffen Vogel 2022-08-20 00:01:38 +02:00
parent 71eb82932c
commit b9f5d3deb0
10 changed files with 510 additions and 0 deletions

View file

@ -0,0 +1,8 @@
FROM python:3.8-slim
RUN mkdir /app
ADD . /app
RUN pip3 install /app
ENTRYPOINT [ "/usr/local/bin/accelerated_exporter" ]

View file

@ -0,0 +1,12 @@
# Accelerated Server Stats Exporter for Prometheus
## Examples
```bash
export KDNUMMER=10642
export PASSWORD=$(pass providers/interface.datafabrik.de | head -n1)
# python3 accelerated_stats $* usage
python3 accelerated_stats $* usage_sum
# python3 accelerated_stats $* -f raw -F usage_avg stats
```

View file

@ -0,0 +1,44 @@
from flask import Flask, Response
from accelerated_stats.tenant import Tenant
from accelerated_stats.utils import parse_arguments
args = parse_arguments(with_subcommands=False)
args_dict = vars(args)
acc = Tenant(**args_dict)
app = Flask(__name__)
app.config['DEBUG'] = args.debug
if args.server:
servers = [ args.server ]
else:
servers = acc.get_servers()
@app.route('/healthz')
def healthz():
return Response('OK', mimetype='text/plain')
@app.route('/metrics')
def metrics():
metrics = []
for server in servers:
stats = server.get_stats()
for k, v in stats.items():
if k == 'port':
continue
t = [f'{p}="{q}"' for p, q in server.as_dict().items()]
t = ','.join(t)
metrics.append(f'{args.prefix}{k}{{{t}}} {v}')
resp = '\n'.join(metrics)
return Response(resp, mimetype='text/plain')
def main():
app.run(host='::')

View file

@ -0,0 +1,74 @@
#!/bin/env python3
import os
import sys
import json
from datetime import datetime
from accelerated_stats import utils
from accelerated_stats.tenant import Tenant
def converter(o):
if isinstance(o, datetime):
return o.strftime('%Y-%m-%d')
def main():
args = utils.parse_arguments()
args.coerce = not args.no_coerce
args_dict = vars(args)
acc = Tenant(**args_dict)
servers = acc.get_servers()
if args.server:
servers = filter(lambda s: s.id == args.server, servers)
if args.cmd == 'servers':
out = [s.as_dict() for s in servers]
else:
out = []
for server in servers:
if args.cmd == 'stats':
sout = server.get_stats()
elif args.cmd == 'usage':
sout = {
'usage': server.get_usage(args.date)
}
elif args.cmd == 'usage_sum':
usage = server.get_usage(args.date)
usage_sum = {
k: sum([ d[k] for d in usage ]) for k in ['in', 'out']
}
usage_sum['total'] = usage_sum['in'] + usage_sum['out']
sout = {
'usage_sum': usage_sum
}
out.append({
**server.as_dict(),
**sout
})
if args.format == 'json':
json.dump(out, sys.stdout, indent=4, default=converter)
sys.stdout.write('\n')
elif args.format == 'raw':
if out is list:
out.keys().join(',')
for l in out:
l.values().join(',')
elif out is dict:
if args.field:
print(out[args.field])
else:
for k, v in out.enumerate():
print(f'{k}: {v}')

View file

@ -0,0 +1,228 @@
import requests
from datetime import datetime
from lxml import etree
from pint import UnitRegistry
import re
import io
ureg = UnitRegistry()
ureg.define('MBit = Mbit')
ureg.define('KBit = kbit')
ureg.define('MB = megabyte')
ureg.define('KB = kilobyte')
ureg.define('GB = gigabyte')
class Server:
def __init__(self, tenant, id, switch=None, port=None, name=None, hostname=None):
self.tenant = tenant
self.id = id
self.switch = switch
self.port = port
self.name = name
self.hostname = hostname
def get_usage(self, date):
return self.tenant._get_usage(self.id, date)
def get_stats(self):
return self.tenant._get_stats(self.id)
def as_dict(self):
srv = {
'id': self.id,
'tenant': int(self.tenant.kdnummer)
}
if self.switch:
srv['switch'] = self.switch
if self.port:
srv['port'] = self.port
if self.name:
srv['name'] = self.name
if self.hostname:
srv['hostname'] = self.hostname
return srv
class Tenant:
STATS_FIELDS = {
# 'port': 1,
'max_speed': 2,
'switch_uptime': 3,
'incoming': 4,
'outgoing': 5,
'sum': 6,
'usage_95perc': 7,
'usage_avg': 8,
'current_in': 10,
'current_out': 11
}
XPATH_CONTENT = '//*[@id="accelerated-layout-container-content"]'
XPATH_USAGE_TABLE = XPATH_CONTENT + '/table'
XPATH_SERVER_TABLE = XPATH_USAGE_TABLE
XPATH_STATS_TABLE = XPATH_CONTENT + '/table[3]/tr[1]/td/table'
XPATH_SERVER_ROWS = XPATH_SERVER_TABLE + '/tr[position() > 2 and position() < last() and position() mod 2]'
XPATH_FIELDS = { k: f'tr[{i}]/td[2]' for k, i in STATS_FIELDS.items() }
def __init__(self, **kwargs):
self.sess = requests.Session()
self.coerce = kwargs.get('coerce', True)
self.url = kwargs.get('url')
self.kdnummer = kwargs.get('kdnummer')
self.password = kwargs.get('password')
self.unit_volume = kwargs.get('unit_volume', 'TiB')
self.unit_speed = kwargs.get('unit_speed', 'MBit/s')
self.unit_time = kwargs.get('unit_time', 's')
self.do_login()
def as_dict(self):
return {
'kdnummer': self.kdnummer
}
@property
def login_url(self):
return f'{self.url}/verify.php'
@property
def server_url(self):
return f'{self.url}/CServer.php'
def usage_url(self, server, date):
d = date.strftime('%Y.%m')
return f'{self.server_url}?action=detailUsage&id={server}&date={d}'
def stats_url(self, server):
return f'{self.server_url}?action=stats&id={server}'
def do_login(self):
payload = {
'kdnummer': self.kdnummer,
'passwort': self.password,
'Login': 'Login',
'url': ''
}
r = self.sess.post(self.login_url, data=payload)
def get_servers(self):
r = self.sess.get(self.server_url + '?switchPort=show')
parser = etree.HTMLParser()
root = etree.parse(io.StringIO(r.text), parser)
table = root.xpath(self.XPATH_USAGE_TABLE)[0]
rows = root.xpath(self.XPATH_SERVER_ROWS)
servers = []
for row in rows:
anchor = row.xpath('td[3]/a')[0]
href = anchor.get('href')
match = re.match('CServer.php\?action=stats&id=([0-9]+)', href)
if match:
server_id = int(match.group(1))
server = {
'id': server_id
}
name = row.xpath('td[2]')
if name:
server['name'] = re.sub(r'\s+|\|', ' ', name[0].text).strip()
hostname = row.xpath('td[2]/u/font')
if len(hostname) > 0:
server['hostname'] = hostname[0].text.strip()
swport_row = row.getnext()
if swport_row is not None:
swport = swport_row.xpath('td[2]/table/tr/td[2]/font')
if len(swport) > 0:
switch, port = swport[0].text.strip().split(' -> ')
server['port'] = port
server['switch'] = switch
servers.append(Server(self, **server))
return servers
def _get_usage(self, server_id, date):
r = self.sess.get(self.usage_url(server_id, date))
parser = etree.HTMLParser()
root = etree.parse(io.StringIO(r.text), parser)
table = root.xpath(self.XPATH_USAGE_TABLE)[0]
rows = table.xpath('tr')
data = []
for row in rows[1:]:
columns = row.xpath('td')
data_row = {
'date': datetime.strptime(columns[2].xpath('b')[0].text.strip(), '%d.%m.%Y'),
'in': columns[3].text.strip(),
'out': columns[4].text.strip()
}
if self.coerce:
target_unit = ureg.parse_expression(self.unit_volume)
for f in [ 'in', 'out' ]:
d = data_row[f]
d = ureg.parse_expression(d)
d = d.to(target_unit).magnitude
data_row[f] = d
data.append(data_row)
return data
def _get_stats(self, server_id):
r = self.sess.get(self.stats_url(server_id))
parser = etree.HTMLParser()
root = etree.parse(io.StringIO(r.text), parser)
table = root.xpath(self.XPATH_STATS_TABLE)[0]
data = { k: table.xpath(p)[0].text for k, p in self.XPATH_FIELDS.items() }
if self.coerce:
target_units = {
'switch_uptime': ureg.parse_expression(self.unit_time),
'incoming': ureg.parse_expression(self.unit_volume),
'outgoing': ureg.parse_expression(self.unit_volume),
'sum': ureg.parse_expression(self.unit_volume),
'max_speed': ureg.parse_expression(self.unit_speed),
'usage_95perc': ureg.parse_expression(self.unit_speed),
'usage_avg': ureg.parse_expression(self.unit_speed),
'current_in': ureg.parse_expression(self.unit_speed),
'current_out': ureg.parse_expression(self.unit_speed)
}
for f in [ 'incoming', 'outgoing', 'sum' ]:
data[f] = re.sub(r"(K|M|G|T|)B$", r"\1iB", data[f])
data['switch_uptime'] = re.sub(r"(\d+) days, (\d+):(\d+):(\d+).(\d+)", r"\1 days + \2 hours + \3 minutes + \4 seconds + \5 centiseconds", data['switch_uptime'])
coerced_data = { k: ureg.parse_expression(v) for k, v in data.items() if k != 'port' }
converted_data = { k: coerced_data[k].to(target_units[k]).magnitude for k, v in coerced_data.items() }
data = { **data, **converted_data }
return data

View file

@ -0,0 +1,32 @@
import argparse
from datetime import datetime
import os
def parse_arguments(with_subcommands=True):
def valid_date(s):
try:
return datetime.strptime(s, "%Y-%m")
except ValueError:
msg = "Not a valid date: '{0}'.".format(s)
raise argparse.ArgumentTypeError(msg)
parser = argparse.ArgumentParser('accelerated_stats',
description='Get port stats from Accelerated Customer Interface (e.g. interface.datafabrik.de)')
parser.add_argument('--debug', '-d', type=bool, default=False)
parser.add_argument('--format', '-f', choices=['raw', 'json'], default='json')
parser.add_argument('--field', '-F', type=str)
parser.add_argument('--no-coerce', '-c', action='store_true', default=False)
parser.add_argument('--unit-volume', type=str, default='B')
parser.add_argument('--unit-speed', type=str, default='bit/s')
parser.add_argument('--unit-time', type=str, default='s')
parser.add_argument('--kdnummer', '-u', type=str, default=os.environ.get('KDNUMMER'))
parser.add_argument('--password', '-p', type=str, default=os.environ.get('PASSWORD'))
parser.add_argument('--url', '-U', type=str, default='https://interface.datafabrik.de/')
parser.add_argument('--server', '-s', type=int)
parser.add_argument('--date', '-D', type=valid_date, default=datetime.now())
parser.add_argument('--prefix', '-P', type=str, default='accelerated_')
if with_subcommands:
parser.add_argument('cmd', metavar='CMD', choices=['stats', 'usage', 'usage_sum', 'servers'])
return parser.parse_args()

View file

@ -0,0 +1,4 @@
bind = "[::]:5000"
workers = 4
threads = 4
timeout = 120

View file

@ -0,0 +1,72 @@
---
apiVersion: v1
kind: Namespace
metadata:
name: accelerated-stats
---
apiVersion: apps/v1
kind: Deployment
metadata:
name: accelerated-stats
namespace: accelerated-stats
labels:
app: accelerated-stats
spec:
selector:
matchLabels:
app: accelerated-stats
template:
metadata:
labels:
app: accelerated-stats
spec:
containers:
- name: accelerated-stats
image: stv0g/accelerated-stats
imagePullPolicy: Always
ports:
- name: http-metrics
containerPort: 5000
envFrom:
- secretRef:
name: accelerated-stats
resource:
---
apiVersion: v1
kind: Service
metadata:
name: accelerated-stats
namespace: accelerated-stats
labels:
app: accelerated-stats
spec:
selector:
app: accelerated-stats
ports:
- protocol: TCP
port: 80
name: http-metrics
targetPort: http-metrics
---
apiVersion: monitoring.coreos.com/v1
kind: ServiceMonitor
metadata:
name: accelerated-stats
namespace: accelerated-stats
labels:
app: accelerated-stats
spec:
selector:
matchLabels:
app: accelerated-stats
namespaceSelector:
matchNames:
- accelerated-stats
endpoints:
- port: http-metrics
interval: 1m
# Create secret:
# kubectl --namespace=accelerated-stats create secret generic accelerated-stats \
# --from-literal=KDNUMMER=$(pass providers/interface.datafabrik.de | sed -nE 's/^User: (.*)/\1/p') \
# --from-literal=PASSWORD=$(pass providers/interface.datafabrik.de | sed -n 1p)

View file

@ -0,0 +1,36 @@
import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = 'accelerated_stats',
version = '0.1.0',
author = 'Steffen Vogel',
author_email = 'post@steffenvogel.de',
description = ('Fetch and export status and bandwidth '
'for servers hosted by Accelerated'),
license = 'GPL-3.0',
keywords = 'accelerated promtheus exporter',
url = 'http://packages.python.org/an_example_pypi_project',
packages=find_packages(),
long_description=read('README'),
classifiers=[
'Development Status :: 3 - Alpha',
'Topic :: Utilities',
'License :: OSI Approved :: BSD License',
],
install_requires=[
'pint',
'flask',
'requests',
'lxml'
],
entry_points={
'console_scripts': [
'accelerated_stats = accelerated_stats.stats:main',
'accelerated_exporter = accelerated_stats.exporter:main',
]
}
)