Compare commits

...

24 commits

Author SHA1 Message Date
df9cc04a82 push-status: Fix return code 2024-03-22 00:46:10 +01:00
3189c0d9ee push-status: Change host/unit separator 2024-03-22 00:41:01 +01:00
b95b85d50c push-status: Add uptime-kuma-api overrides 2024-03-22 00:23:45 +01:00
2403d89a30 push-status: Fix flake package attribute name 2024-03-21 23:31:32 +01:00
1890062e34 push-status: Relaxy python requirement to 3.6 2024-03-13 16:26:13 +01:00
6a3cfadaa7 push-status: More fixes 2024-03-13 16:19:54 +01:00
8bd80a8a34 push-status: More fixes 2024-03-13 16:17:05 +01:00
5421e3bd99 push-status: Fix typo 2024-03-13 13:15:40 +01:00
dcf82aad23 push-status: Lower required Python version 2024-03-13 12:09:24 +01:00
daea893309 push-status: First version 2024-03-13 12:07:12 +01:00
43c0fa3d29 fix indention 2022-08-20 00:06:34 +02:00
5b005ac95d more cleanups in bash scripts 2022-08-20 00:06:22 +02:00
c323cfb52b fix indention 2022-08-20 00:06:01 +02:00
ba5aca0716 remove some outdated stuff 2022-08-20 00:05:52 +02:00
a61f67823e update skeleton 2022-08-20 00:04:55 +02:00
6a828f4a52 add script to convert wiki tables to csv 2022-08-20 00:04:31 +02:00
7f231c2e5b add script to upload scans to Paperless 2022-08-20 00:04:14 +02:00
309159fc91 add script to fetch all DNS records 2022-08-20 00:04:00 +02:00
9806c8f4a4 add script to do recursive AXFR DNS requests 2022-08-20 00:03:37 +02:00
b9f5d3deb0 add Prometheus vor accelerated / meerfarbig / datafabrik / werkhaus stats 2022-08-20 00:03:03 +02:00
71eb82932c misc cleanups 2022-08-19 23:59:53 +02:00
94cd7efbed update skeletons 2020-12-31 12:36:37 +01:00
e235b1824f added new Bash scripts 2020-12-31 12:35:38 +01:00
2b33de5b6a added script to fetch / filter Hetzners SB offers 2019-08-14 09:09:27 +02:00
91 changed files with 2802 additions and 659 deletions

4
.gitignore vendored
View file

@ -5,3 +5,7 @@
gc_spider/cookie.txt
php/campus/cocal.db
# Python
*.pyc
*.egg-info/

View file

@ -9,28 +9,15 @@
# It uses the 'btrfs send' and 'btrfs receive' commands.
# Its not intended for simple snapshots in a single filesystem enviroment.
#
# @copyright 2013 Steffen Vogel
# @license http://www.gnu.org/licenses/gpl.txt GNU Public License
# @author Steffen Vogel <info@steffenvogel.de>
# @link http://www.steffenvogel.de
##
##
# This script is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# This script is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this script. If not, see <http://www.gnu.org/licenses/>.
# @copyright 2021, Steffen Vogel
# @license http://www.gnu.org/licenses/gpl.txt GNU Public License
# @author Steffen Vogel <post@steffenvogel.de>
# @link https://www.steffenvogel.de
##
# TODO: delete old snapshots in destination fs
# TODO: print statistics of send | receive pipe (speed & size)
# TODO: delete old snapshots in source and destination fs
set -e
function usage {
echo "Usage: $(basename $0) SOURCE [DEST]"
@ -41,53 +28,55 @@ function usage {
exit 1
}
set -e
if [ $# -lt 1 ]; then
echo -e "missing source"
echo
usage
fi
SRC=$(readlink -f "$1")
SRC=$(readlink -e "$1")
if [ $SRC == "/" ]; then
SRC=""
fi
if [ -h "$SRC/.backup/destination" ]; then
DEST=$(readlink -f "$SRC/.backup/destination")
DEST=$(readlink -e "$SRC/.backup/destination")
elif [ $# -ne 2 ] ; then
echo -e "missing destination"
echo
usage
else
DEST=$(readlink -f $2)
mkdir -p "$SRC/.backup/"
mkdir -p "$DEST"
ln -sf "$DEST" "$SRC/.backup/destination"
ln -sf "$SRC" "$DEST/source"
DEST=$(readlink -e $2)
fi
# create directories if not existing
mkdir -p "$SRC/.backup/"
mkdir -p "$DEST/"
# create symbolic links if not existing
ln -snf "$DEST/" "$SRC/.backup/destination"
ln -snf "$SRC/" "$DEST/source"
# name for the new snapshot
SNAPSHOT=$(date +%F_%H-%M-%S)
LATEST="$SRC/.backup/$SNAPSHOT"
# snapshot the current state
btrfs subvolume snapshot -r "$SRC" "$LATEST"
btrfs subvolume snapshot -r "$SRC/" "$LATEST/"
# send changes
if [ -h "$DEST/latest-source" ]; then
PREVIOUS=$(readlink -f "$DEST/latest-source")
btrfs send -p "$PREVIOUS" "$LATEST" | btrfs receive "$DEST"
PREVIOUS=$(readlink -e "$DEST/latest-source")
btrfs send -p "$PREVIOUS/" "$LATEST/" | pv | btrfs receive "$DEST/"
else
btrfs send "$LATEST" | btrfs receive "$DEST"
btrfs send "$LATEST/" | pv | btrfs receive "$DEST/"
fi
# delete old snapshot in source fs
if [ -n "$PREVIOUS" ]; then
btrfs subvolume delete "$PREVIOUS"
btrfs subvolume delete "$PREVIOUS/"
fi
# update links to last backup
ln -rsfT "$DEST/$SNAPSHOT" "$DEST/latest"
ln -sfT "$LATEST" "$DEST/latest-source"

View file

@ -2,24 +2,10 @@
##
# Backup mySQL databases in separate sql dumps
#
# @copyright 2013 Steffen Vogel
# @license http://www.gnu.org/licenses/gpl.txt GNU Public License
# @author Steffen Vogel <info@steffenvogel.de>
# @link http://www.steffenvogel.de
##
##
# This script is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# This script is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this script. If not, see <http://www.gnu.org/licenses/>.
# @copyright 2021, Steffen Vogel
# @license http://www.gnu.org/licenses/gpl.txt GNU Public License
# @author Steffen Vogel <post@steffenvogel.de>
# @link https://www.steffenvogel.de
##
set -e
@ -78,7 +64,7 @@ shift $((OPTIND-1))
# parsing backup directory
if [ -n "$1" ]; then
DIR=$(readlink -f $1)
DIR=$(readlink -f $1)
else
DIR=$(pwd)
fi

53
bash/backup-remote-restic.sh Executable file
View file

@ -0,0 +1,53 @@
#!/bin/bash
##
# Backup remote machines via restic (pull)
#
# @copyright 2021, Steffen Vogel
# @license http://www.gnu.org/licenses/gpl.txt GNU Public License
# @author Steffen Vogel <post@steffenvogel.de>
# @link https://www.steffenvogel.de
##
set -e
if [ $# -ne 2 ]; then
echo "Usage: $(basename $0) SOURCE REPO"
exit 1
fi
SRC=$1
REPO=$2
RESTIC="/usr/local/bin/restic"
# Install Restic
ssh ${SRC} <<ENDSSH
export RESTIC_REPOSITORY="s3:http://moon.int.0l.de:9001/${REPO}"
if [ "${REPO}" == "mail.0l.de" ]; then
export RESTIC_PASSWORD="Ca8vut7Y5hksuc1IkZfsrBf7ZKnHZwMYofLCWlmCPpJAMgqciwTZ5yxQUlUrii7h"
else
export RESTIC_PASSWORD="NtogK'D~>)r%2g'{-gm#rWak<EKu1W5mri)E8/dWD|5.\NP}wC*(Q#{>*M_SiJ\i"
fi
export AWS_ACCESS_KEY_ID="restic"
export AWS_SECRET_ACCESS_KEY="akuuphieyaizieGaneocheituGhe9oreagohzie6go4Euzai8ail2do7pohRai0e"
# Install or update restic
if ! [ -x ${RESTIC} ]; then
curl -qL https://github.com/restic/restic/releases/download/v0.9.5/restic_0.9.5_linux_amd64.bz2 | bunzip2 > ${RESTIC}
chmod +x ${RESTIC}
else
${RESTIC} self-update
fi
${RESTIC} version
# Check if repo exists
${RESTIC} snapshots || ${RESTIC} init
# Start backup
${RESTIC} -vv backup --one-file-system --exclude=/var/log/lastlog /
ENDSSH

View file

@ -17,32 +17,18 @@
# sudo ssh-keygen
# sudo cat /root/.ssh/id_dsa.pub | ssh user@remote 'cat >> /root/.ssh/authorized_keys'
#
# @copyright 2013 Steffen Vogel
# @license http://www.gnu.org/licenses/gpl.txt GNU Public License
# @author Steffen Vogel <info@steffenvogel.de>
# @link http://www.steffenvogel.de
##
##
# This script is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# This script is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this script. If not, see <http://www.gnu.org/licenses/>.
# @copyright 2021, Steffen Vogel
# @license http://www.gnu.org/licenses/gpl.txt GNU Public License
# @author Steffen Vogel <post@steffenvogel.de>
# @link https://www.steffenvogel.de
##
function usage {
echo "Usage: $(basename $0) SOURCE DEST"
echo
echo " SOURCE a path to the subvolume to backup"
echo " DEST a path to the backup destination"
exit 1
echo "Usage: $(basename $0) SOURCE DEST"
echo
echo " SOURCE a path to the subvolume to backup"
echo " DEST a path to the backup destination"
exit 1
}
set -e
@ -50,7 +36,7 @@ set -e
if [ $# -ne 2 ]; then
echo -e "invalid args!"
echo
usage
usage
fi
DATE=$(date +%F_%H-%M-%S)
@ -89,4 +75,3 @@ btrfs subvolume snapshot -r $DEST/.current $DEST/$DATE
# create symlink to latest snapshot
ln -rsfT $DEST/$DATE $DEST/latest

View file

@ -2,24 +2,10 @@
##
# System Backupscript
#
# @copyright 2012 Steffen Vogel
# @license http://www.gnu.org/licenses/gpl.txt GNU Public License
# @author Steffen Vogel <info@steffenvogel.de>
# @link http://www.steffenvogel.de
##
##
# This script is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# This script is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this script. If not, see <http://www.gnu.org/licenses/>.
# @copyright 2021, Steffen Vogel
# @license http://www.gnu.org/licenses/gpl.txt GNU Public License
# @author Steffen Vogel <post@steffenvogel.de>
# @link https://www.steffenvogel.de
##
#=============================================================
@ -34,7 +20,7 @@ BACKUPDIR="/backup"
MYSQL_BACKUPDIR="/backup/mysql"
DATA_BACKUPDIR="/backup/data"
# Wochentag für wöchentliche Backups (1-7; 1 steht für Montag)
# Wochentag f<EFBFBD>r w<>chentliche Backups (1-7; 1 steht f<>r Montag)
DOWEEKLY=5
# Kompressionsmethode (gzip oder bzip2)
@ -68,7 +54,7 @@ LOGERR=$BACKUPDIR/ERRORS_$HOST-`date +%N`.log
# - quiet : sendet nur Error Logs per Mail
MAIL_CONTENT="stdout"
# Maximale Größe des Mail Anhangs
# Maximale Gr<EFBFBD><EFBFBD>e des Mail Anhangs
MAIL_MAXATTSIZE="4000"
# Mail Adresse
@ -81,7 +67,7 @@ MAIL_ADDR="admin@localhost"
# FTP Benutzer
FTP_USERNAME=yourftpusername
# FTP Passwort für $FTP_USERNAME
# FTP Passwort f<EFBFBD>r $FTP_USERNAME
FTP_PASSWORD=yourftppassword
# Hostname oder IP Adresse des FTP Servers
@ -91,10 +77,10 @@ FTP_HOST=yourftpserver
# Daten Einstellungen
#=============================================================
# Liste der täglichen Backupverzeichnisse (durch " " getrennt)
# Liste der t<EFBFBD>glichen Backupverzeichnisse (durch " " getrennt)
DATA_DIRNAMES="/home /opt/mails /etc"
# Liste der wöchentlichen Backupverzeichnisse (durch " " getrennt)
# Liste der w<EFBFBD>chentlichen Backupverzeichnisse (durch " " getrennt)
DATA_WDIRNAMES="/var/www $DATA_DIRNAMES"
# Liste der monatlichen Backupverzeichnisse (durch " " getrennt)
@ -114,16 +100,16 @@ TARFLAGS="--create --preserve-permissions --dereference --ignore-failed-read --e
# mySQL Benutzer
MYSQL_USERNAME=yourmysqlusername
# mySQL Passwort für $MYSQL_USERNAME
# mySQL Passwort f<EFBFBD>r $MYSQL_USERNAME
MYSQL_PASSWORD=yourmysqlpassword
# Hostname oder IP Adresse des mySQL Servers
MYSQL_HOST=$HOST
# Liste der täglichen Backupdatenbanken (durch " " getrennt; "all" für alle Datenbanken)
# Liste der t<EFBFBD>glichen Backupdatenbanken (durch " " getrennt; "all" f<>r alle Datenbanken)
MYSQL_DBNAMES="all"
# Liste der wöchentlichen Backupdatenbanken (durch " " getrennt)
# Liste der w<EFBFBD>chentlichen Backupdatenbanken (durch " " getrennt)
MYSQL_WDBNAMES=$MYSQL_DBNAMES
# Liste der monatlichen Backupdatenbanken (durch " " getrennt)
@ -132,13 +118,13 @@ MYSQL_MDBNAMES="$MYSQL_WDBNAMES"
# Datenbanken zum Excluden
MYSQL_DBEXCLUDE=""
# CREATE DATABASE zu den mySQL Dumps hinzufügen?
# CREATE DATABASE zu den mySQL Dumps hinzuf<EFBFBD>gen?
MYSQL_CREATE_DATABASE=yes
# Komprimierte Verbindung zum mySQL Server
MYSQL_COMMCOMP=no
# Maximale Größe des Verbindungspuffer zum mySQL Server (Maximum 1GB)
# Maximale Gr<EFBFBD><EFBFBD>e des Verbindungspuffer zum mySQL Server (Maximum 1GB)
MYSQL_MAX_ALLOWED_PACKET=
# Socketadresse des mySQL Server bei localhost Verbindungen
@ -174,13 +160,13 @@ if [ "$MYSQL_COMMCOMP" = "yes" ];
MYSQL_OPT="$OPT --compress"
fi
# Maximale Größe des Verbindungspuffer zum mySQL Server (Maximum 1GB)
# Maximale Gr<EFBFBD><EFBFBD>e des Verbindungspuffer zum mySQL Server (Maximum 1GB)
if [ "$MYSQL_MAX_ALLOWED_PACKET" ];
then
MYSQL_OPT="$MYSQL_OPT --max_allowed_packet=$MYSQL_MAX_ALLOWED_PACKET"
fi
# Benötigte Verzeichnisse erstellen
# Ben<EFBFBD>tigte Verzeichnisse erstellen
if [ ! -e "$BACKUPDIR" ]
then
mkdir -p "$BACKUPDIR"
@ -236,7 +222,7 @@ display () {
end)
echo Backup Ende `date`
echo ======================================================================
echo Benötigter Speicherplatz für Backups:
echo Ben<EFBFBD>tigter Speicherplatz f<>r Backups:
echo Data : `du -hs "$DATA_BACKUPDIR"`
echo mySQL: `du -hs "$MYSQL_BACKUPDIR"`
echo All : `du -hs "$BACKUPDIR"`
@ -299,33 +285,33 @@ archive () {
gzip -l "$1.gz"
SUFFIX=".gz"
elif [ "$COMP" = "bzip2" ]; then
echo Komprimierungs Informationen für "$1.bz2"
echo Komprimierungs Informationen f<EFBFBD>r "$1.bz2"
bzip2 -f -v $1 2>&1
SUFFIX=".bz2"
else
echo "Keine Kompressionsmethode gewählt!"
echo "Keine Kompressionsmethode gew<EFBFBD>hlt!"
fi
return 0
}
# Soll CREATE_DATABASE hinzugefügt werden?
# Soll CREATE_DATABASE hinzugef<EFBFBD>gt werden?
if [ "$MYSQL_CREATE_DATABASE" = "no" ]; then
MYSQL_OPT="$MYSQL_OPT --no-create-db"
else
MYSQL_OPT="$MYSQL_OPT --databases"
fi
# Wähle alle Datenbanken aus
# W<EFBFBD>hle alle Datenbanken aus
if [ "$MYSQL_DBNAMES" = "all" ]; then
MYSQL_DBNAMES="`mysql --user=$MYSQL_USERNAME --password=$MYSQL_PASSWORD --host=$MYSQL_HOST --batch --skip-column-names -e "show databases"| sed 's/ /%/g'`"
MYSQL_DBNAMES="`mysql --user=$MYSQL_USERNAME --password=$MYSQL_PASSWORD --host=$MYSQL_HOST --batch --skip-column-names -e "show databases"| sed 's/ /%/g'`"
# Schließe Datenbanken aus
# Schließe Datenbanken aus
for exclude in $MYSQL_DBEXCLUDE
do
MYSQL_DBNAMES=`echo $MYSQL_DBNAMES | sed "s/\b$exclude\b//g"`
done
MYSQL_MDBNAMES=$MYSQL_DBNAMES
MYSQL_MDBNAMES=$MYSQL_DBNAMES
fi
display start # Zeige Start Informationen
@ -340,7 +326,7 @@ fi
#================================================
if [ $DOM = "01" ]; then
# Erstellen benötigte Verzeichnisse
# Erstellen ben<EFBFBD>tigte Verzeichnisse
if [ ! -e "$MYSQL_BACKUPDIR/monthly/$M" ]
then
mkdir -p "$MYSQL_BACKUPDIR/monthly/$M"
@ -374,7 +360,7 @@ display datastart
for DATA_MDIR in $DATA_MDIRNAMES
do
# Bereite $DATA_MDIR für den Dateinamen vor
# Bereite $DATA_MDIR f<EFBFBD>r den Dateinamen vor
DATA_MDIR_DISP="`echo $DATA_MDIR | cut -b 2- | sed 's/\//_/g' | sed 's/ //g'"
echo Monthly Backup of $DATA_MDIR...
@ -391,11 +377,11 @@ fi
#================================================
# Wöchentliches Backup
# W<EFBFBD>chentliches Backup
#================================================
if [ $DNOW = $DOWEEKLY ]; then
# Erstellen benötigte Verzeichnisse
# Erstellen ben<EFBFBD>tigte Verzeichnisse
if [ ! -e "$MYSQL_BACKUPDIR/weekly/week_$W" ]
then
mkdir -p "$MYSQL_BACKUPDIR/weekly/week_$W"
@ -406,7 +392,7 @@ if [ ! -e "$DATA_BACKUPDIR/weekly/week_$W" ]
mkdir -p "$DATA_BACKUPDIR/weekly/week_$W"
fi
# Lösche alte Backups
# L<EFBFBD>sche alte Backups
echo Rotating 5 weeks Backups...
display dl
if [ "$W" -le 05 ];then
@ -442,7 +428,7 @@ display datastart
for DATA_WDIR in $DATA_WDIRNAMES
do
# Bereite $DATA_WDIR für den Dateinamen vor
# Bereite $DATA_WDIR f<EFBFBD>r den Dateinamen vor
DATA_DIR_DISP="`echo $DATA_WDIR | cut -b 2- | sed 's/\//_/g' | sed 's/ //g'"
echo Weekly Backup of $DATA_WDIR...
@ -458,9 +444,9 @@ display dataend
fi
#================================================
# Tägliches Backup
# T<EFBFBD>gliches Backup
#================================================
# Erstellen benötigte Verzeichnisse
# Erstellen ben<EFBFBD>tigte Verzeichnisse
if [ ! -e "$MYSQL_BACKUPDIR/daily/$DOW" ]
then
mkdir -p "$MYSQL_BACKUPDIR/daily/$DOW"
@ -471,7 +457,7 @@ if [ ! -e "$DATA_BACKUPDIR/daily/$DOW" ]
mkdir -p "$DATA_BACKUPDIR/daily/$DOW"
fi
# Lösche alte Backups
# L<EFBFBD>sche alte Backups
echo Rotating last weeks Backup...
display l
eval rm -fv "$MYSQL_BACKUPDIR/daily/$DOW/*"
@ -497,7 +483,7 @@ display datastart
for DATA_DIR in $DATA_DIRNAMES
do
# Bereite $DATA_DIR für den Dateinamen vor
# Bereite $DATA_DIR f<EFBFBD>r den Dateinamen vor
DATA_DIR_DISP="`echo $DATA_DIR | cut -b 2- | sed 's/\//_/g' | sed 's/ //g'"
echo Daily Backup of $DATA_DIR...
@ -519,39 +505,39 @@ if [ "$POSTBACKUP" ]; then
fi
#Clean up IO redirection
exec 1>&6 6>&- # Stelle Standartausgabe wieder her und schließe Datei #6
exec 1>&7 7>&- # Stelle Standartausgabe wieder her und schließe Datei #7
exec 1>&6 6>&- # Stelle Standartausgabe wieder her und schlie<EFBFBD>e Datei #6
exec 1>&7 7>&- # Stelle Standartausgabe wieder her und schlie<EFBFBD>e Datei #7
if [ "$MAIL_CONTENT" = "files" ]
then
if [ -s "$LOGERR" ]
then
# Füge bei Fehlern Error Log hinzu
# F<EFBFBD>ge bei Fehlern Error Log hinzu
MYSQL_BACKUPFILES="$MYSQL_BACKUPFILES $LOGERR"
ERRORNOTE="ACHTUNG Backup Fehler: "
fi
# Ermittel SQL Dump Größe
# Ermittel SQL Dump Gr<EFBFBD><EFBFBD>e
MAIL_ATTSIZE=`du -c $MYSQL_BACKUPFILES | grep "[[:digit:][:space:]]total$" |sed s/\s*total//`
if [ $MAIL_MAXATTSIZE -ge $MAIL_ATTSIZE ]
then
BACKUPFILES=`echo "$BACKUPFILES" | sed -e "s# # -a #g"` # enable multiple attachments
mutt -s "$ERRORNOTE Backup Log and SQL Dump für $HOST - $DATE" $BACKUPFILES $MAIL_ADDR < $LOGFILE #senden via mutt
mutt -s "$ERRORNOTE Backup Log and SQL Dump f<EFBFBD>r $HOST - $DATE" $BACKUPFILES $MAIL_ADDR < $LOGFILE #senden via mutt
else
cat "$LOGFILE" | mail -s "ACHTUNG! - SQL Dump ist zu groß um gemailt zu werden auf $HOST - $DATE" $MAIL_ADDR
cat "$LOGFILE" | mail -s "ACHTUNG! - SQL Dump ist zu gro<EFBFBD> um gemailt zu werden auf $HOST - $DATE" $MAIL_ADDR
fi
elif [ "$MAIL_CONTENT" = "log" ]
then
cat "$LOGFILE" | mail -s "Backup Log für $HOST - $DATE" $MAIL_ADDR
cat "$LOGFILE" | mail -s "Backup Log f<EFBFBD>r $HOST - $DATE" $MAIL_ADDR
if [ -s "$LOGERR" ]
then
cat "$LOGERR" | mail -s "$ERRORNOTE Error Log für: $HOST - $DATE" $MAIL_ADDR
cat "$LOGERR" | mail -s "$ERRORNOTE Error Log f<EFBFBD>r: $HOST - $DATE" $MAIL_ADDR
fi
elif [ "$MAIL_CONTENT" = "quiet" ]
then
if [ -s "$LOGERR" ]
then
cat "$LOGERR" | mail -s "$ERRORNOTE Error Log für $HOST - $DATE" $MAIL_ADDR
cat "$LOGFILE" | mail -s "Log für $HOST - $DATE" $MAIL_ADDR
cat "$LOGERR" | mail -s "$ERRORNOTE Error Log f<EFBFBD>r $HOST - $DATE" $MAIL_ADDR
cat "$LOGFILE" | mail -s "Log f<EFBFBD>r $HOST - $DATE" $MAIL_ADDR
fi
else
if [ -s "$LOGERR" ]
@ -574,7 +560,7 @@ if [ -s "$LOGERR" ]
STATUS=0
fi
# Löschen der Logfiles
# L<EFBFBD>schen der Logfiles
eval rm -f "$LOGFILE"
eval rm -f "$LOGERR"

View file

@ -1,16 +1,18 @@
#!/bin/bash
#
# Import your sport activities from tapiriik.com to cartoco.com.
#
# Prerequisistes:
# - rclone
# - curl
# - jq
# - xsqlproc
#
# Author: Steffen Vogel <post@steffenvogel.de>
# Copyright: 2016, Steffen Vogel
# License: GPLv3
##
# Import your sport activities from tapiriik.com to cartoco.com.
#
# Prerequisistes:
# - rclone
# - curl
# - jq
# - xsqlproc
#
# @copyright 2021, Steffen Vogel
# @license http://www.gnu.org/licenses/gpl.txt GNU Public License
# @author Steffen Vogel <post@steffenvogel.de>
# @link https://www.steffenvogel.de
##
CARTODB_API_KEY=$(pass apis/cartodb)
CARTODB_USER=stv0g
@ -89,4 +91,4 @@ echo "${FILES_NEW}" | while read FILE; do
fi
done
rm ${STYLESHEET} ${JQFILTER}
rm ${STYLESHEET} ${JQFILTER}

View file

@ -1,4 +1,12 @@
#!/bin/bash
##
# Convert TCX files to GPX
#
# @copyright 2021, Steffen Vogel
# @license http://www.gnu.org/licenses/gpl.txt GNU Public License
# @author Steffen Vogel <post@steffenvogel.de>
# @link https://www.steffenvogel.de
##
SRC=${1:-${DROPBOX}/Apps/tapiriik}
DEST=${2:-${DROPBOX}/Apps/cartodb}
@ -6,8 +14,7 @@ DEST=${2:-${DROPBOX}/Apps/cartodb}
SPORTS=""
# Convert all TXC into GPX files
for FILE in ${SRC}/*.tcx
do
for FILE in ${SRC}/*.tcx; do
BASE=$(basename "${FILE// /_}" .tcx)
INPUT="${FILE}"
OUTPUT="${BASE}.gpx"
@ -27,8 +34,7 @@ done
SPORTS=$(echo $SPORTS | tr ' ' '\n' | sort -u | tr '\n' ' ')
# Merge all activities per sport
for SPORT in ${SPORTS}
do
for SPORT in ${SPORTS}; do
FILES=""
for FILE in ${DEST}/${SPORT}/*.gpx; do
@ -38,4 +44,4 @@ do
echo "Merging into $SPORT.gpx"
${BABEL} -t -r -w -i gpx ${FILES} -o gpx -F ${DEST}/${SPORT}.gpx
done
done

View file

@ -2,24 +2,10 @@
##
# Changestation script for lastfmproxy
#
# @copyright 2012 Steffen Vogel
# @license http://www.gnu.org/licenses/gpl.txt GNU Public License
# @author Steffen Vogel <info@steffenvogel.de>
# @link http://www.steffenvogel.de
##
##
# This script is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# This script is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this script. If not, see <http://www.gnu.org/licenses/>.
# @copyright 2021, Steffen Vogel
# @license http://www.gnu.org/licenses/gpl.txt GNU Public License
# @author Steffen Vogel <post@steffenvogel.de>
# @link https://www.steffenvogel.de
##
wget http://87.230.33.74:2944/$1

47
bash/cronic.sh Executable file
View file

@ -0,0 +1,47 @@
#!/bin/bash
# Cronic v3 - cron job report wrapper
# Copyright 2007-2016 Chuck Houpt. No rights reserved, whatsoever.
# Public Domain CC0: http://creativecommons.org/publicdomain/zero/1.0/
set -eu
TMP=$(mktemp -d)
OUT=$TMP/cronic.out
ERR=$TMP/cronic.err
TRACE=$TMP/cronic.trace
set +e
"$@" >$OUT 2>$TRACE
RESULT=$?
set -e
PATTERN="^${PS4:0:1}\\+${PS4:1}"
if grep -aq "$PATTERN" $TRACE
then
! grep -av "$PATTERN" $TRACE > $ERR
else
ERR=$TRACE
fi
if [ $RESULT -ne 0 -o -s "$ERR" ]
then
echo "Cronic detected failure or error output for the command:"
echo "$@"
echo
echo "RESULT CODE: $RESULT"
echo
echo "ERROR OUTPUT:"
cat "$ERR"
echo
echo "STANDARD OUTPUT:"
cat "$OUT"
if [ $TRACE != $ERR ]
then
echo
echo "TRACE-ERROR OUTPUT:"
cat "$TRACE"
fi
fi
rm -rf "$TMP"

17
bash/crop.sh Executable file
View file

@ -0,0 +1,17 @@
#!/bin/bash
DPI=600
for INPUT in $@; do
OUTPUT=${INPUT%.*}_crop.pdf
WIDTH_PTS=$(identify -density ${DPI} -format "%w" ${INPUT})
HEIGHT_PTS=$(identify -density ${DPI} -format "%h" ${INPUT})
BON_WIDTH_INCH=$(bc <<< "scale=2; 8/2.54") # inch
BON_WIDTH_PTS=$(bc <<< "${BON_WIDTH_INCH} * ${DPI}")
OFFSET_X_PTS=$(bc <<< "${WIDTH_PTS} / 2 - ${BON_WIDTH_PTS} / 2")
convert -density ${DPI} -crop "${BON_WIDTH_PTS}x${HEIGHT_PTS}+${OFFSET_X_PTS}+0" +repage -compress JPEG ${INPUT} ${OUTPUT}
done

View file

@ -2,25 +2,11 @@
##
# Deviant Background Changer
#
# @copyright 2012 Steffen Vogel
# @license http://www.gnu.org/licenses/gpl.txt GNU Public License
# @author Steffen Vogel <info@steffenvogel.de>
# @link http://www.steffenvogel.de/2009/11/28/deviantart-wallpapers/
# @version 1.1
##
##
# This script is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# This script is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this script. If not, see <http://www.gnu.org/licenses/>.
# @copyright 2021, Steffen Vogel
# @license http://www.gnu.org/licenses/gpl.txt GNU Public License
# @author Steffen Vogel <post@steffenvogel.de>
# @link https://www.steffenvogel.de/2009/11/28/deviantart-wallpapers/
# @version 1.1
##
# Path to save downloaded images

35
bash/dump.sh Executable file
View file

@ -0,0 +1,35 @@
#!/bin/sh
#===================================================================================
#
# FILE: dump.sh
# USAGE: dump.sh [-i interface] [tcpdump-parameters]
# DESCRIPTION: tcpdump on any interface and add the prefix [Interace:xy] in front of the dump data.
# OPTIONS: same as tcpdump
# REQUIREMENTS: tcpdump, sed, ifconfig, kill, awk, grep, posix regex matching
# BUGS: ---
# FIXED: - In 1.0 The parameter -w would not work without -i parameter as multiple tcpdumps are started.
# - In 1.1 VLAN's would not be shown if a single interface was dumped.
# - In 1.3 Some fixes for virtual interfaces have been provided by Reiner Keller. (Thanks!)
# NOTES: ---
# - 1.2 git initial
# AUTHOR: Sebastian Haas
# VERSION: 1.2
# CREATED: 16.09.2014
# REVISION: 22.09.2014
#
#===================================================================================
# When this exits, exit all background processes:
trap 'kill $(jobs -p) &> /dev/null && sleep 0.2 && echo ' EXIT
# Create one tcpdump output per interface and add an identifier to the beginning of each line:
if [[ $@ =~ -i[[:space:]]?[^[:space:]]+ ]]; then
tcpdump -l $@ | sed 's/^/[Interface:'"${BASH_REMATCH[0]:2}"'] /' &
else
for interface in $(ifconfig | grep '^[a-z0-9]' | awk '{print $1}'i | sed "/:[0-9]/d")
do
tcpdump -l -i $interface -nn $@ | sed 's/^/[Interface:'"$interface"'] /' 2>/dev/null &
done
fi
# wait .. until CTRL+C
wait

124
bash/dyndns-update.sh Executable file
View file

@ -0,0 +1,124 @@
#!/bin/bash
##
# dyndns-update update script
#
# @copyright 2021, Steffen Vogel
# @license http://www.gnu.org/licenses/gpl.txt GNU Public License
# @author Steffen Vogel <post@steffenvogel.de>
# @link https://www.steffenvogel.de
##
# default options
VER=4
SECRET=bx8qNQAnGic9OnFuqQu9XjG2NS9ed1fOaDds53R2jbq59m1WKWH3Rd1S3nijZ87u
ZONE=dyn.0l.de
HOST=$(hostname)
function usage {
cat <<-EOF
Usage: $0 [-4,-6] [-s SECRET] [-z ZONE] [-d] [-D] [HOST]
Options:
-s is the secret from the webservice otherwise prompted
-z nameserver zone
-4 update A record (default)
-6 update AAAA record
-D live monitor interface for changing addresses
-d enable verbose output
-h show this help
HOST is the hostname which you want to update
defaults to the local hostname
Example: $0 -6 -z dyn.0l.de sea
written by Steffen Vogel <post@steffenvogel.de>
EOF
}
function deps() {
FAILED=0
for DEP in $*; do
if ! which ${DEP} &>/dev/null; then
echo -e "This script requires ${DEP} to run but it is not installed."
((FAILED++))
fi
done
return ${FAILED}
}
function update() {
RDATA=$1
WAIT=1
URL="https://dyndns.k8s.0l.de/update?secret=${SECRET}&domain=${HOST}&addr=${RDATA}"
while true; do
if (( $DEBUG )); then echo "Updating record: ${URL}"; fi
CODE=$(curl -w %{http_code} -s -o /dev/stderr "${URL}") 2>&1
if [ ${CODE} -eq 0 ]; then
if (( ${DEBUG} )); then echo "Sleeping for ${WAIT} secs..."; fi
sleep ${WAIT} # wait until interface is ready
WAIT=$((${WAIT}*2))
elif [ ${CODE} -ge 500 ]; then
if (( ${DEBUG} )); then echo "Request failed. Aborting.."; fi
return 1
else
return 0
fi
done
}
function get() {
curl -${VER} -s http://ident.me
}
# check dependencies
if ! deps dig curl ip; then
echo -e "Unmet dependencies: Aborting!"
exit 1
fi
# parse arguments
while getopts "z:p:u:t:i:Dhd46" OPT; do
case ${OPT} in
s) SECRET=${OPTARG} ;;
4) VER=4 ;;
6) VER=6 ;;
D) DAEMON=1 ;;
z) ZONE=${OPTARG} ;;
d) DEBUG=${OPTARG:-5} ;;
h)
usage
exit 0 ;;
*)
usage
exit 1
esac
done
# clear all options and reset the command line
shift $((OPTIND-1))
# parsing host
if [ -n "$1" ]; then
HOST=$1
else
echo -e "missing host"
exit 1
fi
# prompting for secret
if [ -z "${SECRET}" ]; then
read -s -p "secret: " SECRET
echo
fi
IP=$(get)
if [ -n "${IP}" ]; then
update "${IP}" "${TYPE}" || exit
else
echo -e "failed to get ip from net"
exit 1
fi

View file

@ -6,24 +6,10 @@
# This script creates a temporary symlink and redirects the supplied filename to
# the temporary one.
#
# @copyright 2013 Steffen Vogel
# @license http://www.gnu.org/licenses/gpl.txt GNU Public License
# @author Steffen Vogel <info@steffenvogel.de>
# @link http://www.steffenvogel.de/
##
##
# This script is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# This script is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this script. If not, see <http://www.gnu.org/licenses/>.
# @copyright 2021, Steffen Vogel
# @license http://www.gnu.org/licenses/gpl.txt GNU Public License
# @author Steffen Vogel <post@steffenvogel.de>
# @link https://www.steffenvogel.de
##
ARGS=$@

70
bash/fan-ctl.sh Executable file
View file

@ -0,0 +1,70 @@
#!/usr/bin/env bash
# You'll need to enable IPMI over lan in idrac first
# iDRAC Settings -> Network -> IPMI Settings
# Channel Privilege Level Limit needs to be Administrator
# You may want to create a dedicated username/pass with IPMI permission in iDRAC Settings -> User Authentication
# See also: https://www.spxlabs.com/blog/2019/3/16/silence-your-dell-poweredge-server
IPMIHOST=169.254.0.1
IPMIUSER=root
IPMIPW=XXXXX # Please change
IPMIEK=XXXXX # Please change
FANSPEEDHEX=${1:-0x08} # See https://i.imgur.com/u1HMyqI.png
MAXTEMP=60
HYSTERESIS=5
FANFILE=/var/run/autofan
function ipmi() {
ipmitool -I lanplus -H "$IPMIHOST" -U "$IPMIUSER" -P "$IPMIPW" -y "$IPMIEK" $@
}
# For R710, which doesn't have cpu temps, try this line instead:
# if ! TEMPS=$(ipmi sdr type temperature | grep -i inlet | grep -Po '\d{2,3}' 2> /dev/null);
# thanks @bumbaclot
if ! TEMPS=$(ipmi sdr type temperature | grep -vi inlet | grep -vi exhaust | grep -Po '\d{2,3}' 2> /dev/null); then
echo "FAILED TO READ TEMPERATURE SENSOR!" >&2
logger -t "fanctl" -p user.err -i "Error: Could not read temperature sensor"
fi
HIGHTEMP=0
LOWTEMP=1
echo "Temps: ${TEMPS}"
for TEMP in $TEMPS; do
if [[ $TEMP > $MAXTEMP ]]; then
HIGHTEMP=1
fi
if [[ $TEMP > $(($MAXTEMP - $HYSTERESIS)) ]]; then
LOWTEMP=0
fi
done
if [[ -r "$FANFILE" ]]; then
AUTO=$(< "$FANFILE")
else
AUTO=1
fi
echo "Low: ${LOWTEMP}"
echo "High: ${HIGHTEMP}"
if [[ $HIGHTEMP == 1 ]]; then
# Automatic fan control
ipmi raw 0x30 0x30 0x01 0x01 >& /dev/null || echo "FAILED TO SET FAN CONTROL MODE" >&2; exit 1
echo "1" > "$FANFILE"
if [[ $AUTO == 0 ]]; then
logger -t "fanctl" -p user.info -i "Setting fan control to automatic"
fi
elif [[ $LOWTEMP == 1 ]]; then
# Manual fan control
ipmi raw 0x30 0x30 0x01 0x00 >& /dev/null || echo "FAILED TO SET FAN CONTROL SPEED" >&2
ipmi raw 0x30 0x30 0x02 0xff "$FANSPEEDHEX" >& /dev/null || echo "FAILED TO SET FAN SPEED" >&2
echo "0" > "$FANFILE"
if [[ $AUTO == 1 ]]; then
logger -t "fanctl" -p user.info -i "Setting fan control to manual"
fi
fi

View file

@ -1,2 +0,0 @@
#!/bin/bash
ffmpeg -i "$1" -f mp4 -vcodec mpeg4 -maxrate 1000 -b 700 -qmin 3 -qmax 5 -bufsize 4096 -g 300 -acodec aac -ar 44100 -ab 192 -s 320x240 -aspect 4:3 "$1.mp4"

View file

@ -1,25 +1,11 @@
#!/bin/bash
##
# reconnect avm fritzbox router
# Reconnect AVM Fritzbox router
#
# @copyright 2012 Steffen Vogel
# @license http://www.gnu.org/licenses/gpl.txt GNU Public License
# @author Steffen Vogel <info@steffenvogel.de>
# @link http://www.steffenvogel.de
##
##
# This script is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# This script is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this script. If not, see <http://www.gnu.org/licenses/>.
# @copyright 2021, Steffen Vogel
# @license http://www.gnu.org/licenses/gpl.txt GNU Public License
# @author Steffen Vogel <post@steffenvogel.de>
# @link https://www.steffenvogel.de
##
curl 'http://fritz.box:49000/upnp/control/WANIPConn1' \

View file

@ -1,2 +0,0 @@
#!/bin/bash
gpsbabel -i geo -f $1 -o garmin -F /dev/ttyS0

27
bash/get-panoramas.sh Executable file
View file

@ -0,0 +1,27 @@
#!/bin/bash
##
# Find images taken with little time diff (panoramas)
#
# @copyright 2021, Steffen Vogel
# @license http://www.gnu.org/licenses/gpl.txt GNU Public License
# @author Steffen Vogel <post@steffenvogel.de>
# @link https://www.steffenvogel.de
##
MIN_DIFF=5
LAST_TS=0
mkdir panorama
for i in *.JPG; do
TS=`stat -c %Y $i`
let DIFF=$TS-$LAST_TS
if [ "$DIFF" -lt "$MIN_DIFF" ]; then
echo $i
cp $i panorama/$i
fi
LAST_TS=$TS
done

View file

@ -1,41 +0,0 @@
#!/bin/bash
##
# Find images taken with little time diff (panoramas)
#
# @copyright 2012 Steffen Vogel
# @license http://www.gnu.org/licenses/gpl.txt GNU Public License
# @author Steffen Vogel <info@steffenvogel.de>
# @link http://www.steffenvogel.de/
##
##
# This script is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# This script is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this script. If not, see <http://www.gnu.org/licenses/>.
##
MIN_DIFF=5
LAST_TS=0
mkdir panorama
for i in *.JPG; do
TS=`stat -c %Y $i`
let DIFF=$TS-$LAST_TS
if [ "$DIFF" -lt "$MIN_DIFF" ]; then
echo $i
cp $i panorama/$i
fi
LAST_TS=$TS
done

View file

@ -2,10 +2,9 @@
# Must be called with two command-line args.
# Example: git-svn-relocate.sh http://old.server https://new.server
if [ $# -ne 2 ]
then
echo "Please invoke this script with two command-line arguments (old and new SVN URLs)."
exit $E_NO_ARGS
if [ $# -ne 2 ]; then
echo "Please invoke this script with two command-line arguments (old and new SVN URLs)."
exit $E_NO_ARGS
fi
# Prepare URLs for regex search and replace.

View file

@ -7,50 +7,46 @@
# Use at your own risk. For the love of cthulhu, back
# your repo up before letting this loose on it.
if [ $# -ne 1 ]
then
echo "Usage: `basename $0` {new subversion url}"
exit -1
if [ $# -ne 1 ]; then
echo "Usage: `basename $0` {new subversion url}"
exit -1
fi
if [[ $1 = "--help" || $1 = "-h" ]]
then
echo
echo "Usage: `basename $0` {new subversion url}"
echo
echo " Changes the url of the subversion repository a git-svn repo is connected to."
echo " Analogous to svn switch. Potentially a weapon of mass destruction. Use with care."
echo " Run this from within your git repo. You only need one argument: the new url of the svn repo."
echo " git-svn-switch will attempt to verify that the url is at least a svn repo before starting the switch"
echo " but don't depend on it to stop you from doing summat daft."
echo
exit 1
if [[ $1 = "--help" || $1 = "-h" ]]; then
echo
echo "Usage: `basename $0` {new subversion url}"
echo
echo " Changes the url of the subversion repository a git-svn repo is connected to."
echo " Analogous to svn switch. Potentially a weapon of mass destruction. Use with care."
echo " Run this from within your git repo. You only need one argument: the new url of the svn repo."
echo " git-svn-switch will attempt to verify that the url is at least a svn repo before starting the switch"
echo " but don't depend on it to stop you from doing summat daft."
echo
exit 1
fi
# get the current subversion url
SRC=`git svn info --url`
if [ -n "$SRC" ]
then
FROM=`echo $SRC | sed "s|/trunk||"`
REPO=`svn info $1`
echo "Checking $REPO is actually a subversion repository..."
if [ -n "$REPO" ]
then
echo "The new URL looks valid."
echo "Rewriting the git history with the new url..."
SED_FILTER="sed 's;git-svn-id: "$FROM";git-svn-id: "$1";g'"
git gc
git filter-branch --msg-filter "$SED_FILTER" $(cat .git/packed-refs | awk '// {print $2}' | grep -v 'pack-refs')
if [ -n "$SRC" ]; then
FROM=`echo $SRC | sed "s|/trunk||"`
REPO=`svn info $1`
echo "Checking $REPO is actually a subversion repository..."
if [ -n "$REPO" ]; then
echo "The new URL looks valid."
echo "Rewriting the git history with the new url..."
SED_FILTER="sed 's;git-svn-id: "$FROM";git-svn-id: "$1";g'"
git gc
git filter-branch --msg-filter "$SED_FILTER" $(cat .git/packed-refs | awk '// {print $2}' | grep -v 'pack-refs')
#Couple of pointless checkouts - on some repos the log changes seem to need flushing by an operation like this
git checkout trunk
git checkout master
echo "Rebuild git-svn internals and updating the repo"
rm -rf .git/svn
sed -i~ 's|'$FROM'|'$1'|g' .git/config
git svn rebase
else
echo "Error: $1 Does not appear to be a subversion repository."
fi
git checkout trunk
git checkout master
echo "Rebuild git-svn internals and updating the repo"
rm -rf .git/svn
sed -i~ 's|'$FROM'|'$1'|g' .git/config
git svn rebase
else
echo "Error: $1 Does not appear to be a subversion repository."
fi
else
echo "Error: This doesn't appear to be a git working directory, or it's a git repo that hasn't been created using a git-svn bridge"
echo "Error: This doesn't appear to be a git working directory, or it's a git repo that hasn't been created using a git-svn bridge"
fi

View file

@ -1,2 +0,0 @@
#!/bin/bash
gpsbabel -t -i garmin -f /dev/ttyS0 -o gpx -F /home/steffen/Desktop/track.gpx

View file

@ -1,2 +0,0 @@
#!/bin/bash
gpsbabel -w -t -r -i garmin -f /dev/ttyS0 -o kml -F /home/steffen/.googleearth/gps2pc.kml

29
bash/hetzer-sb-notify.sh Executable file
View file

@ -0,0 +1,29 @@
#!/bin/bash
##
# Scrape Hetzners Serverbörse for good deals
#
# @copyright 2021, Steffen Vogel
# @license http://www.gnu.org/licenses/gpl.txt GNU Public License
# @author Steffen Vogel <post@steffenvogel.de>
# @link https://www.steffenvogel.de
##
FILTER=$(mktemp)
cat > ${FILTER} <<EOF
.server | map(
select(
.hdd_size >= 3000 and
.ram >= 32 and
.bandwith >= 1000 and
.traffic == "unlimited" and
.cpu_benchmark >= 9000 and
(.setup_price | tonumber) == 0 and
(.price | tonumber) <= 50 and
(.specials | map(ascii_downcase ) | index("ssd"))
)
) |
sort_by(.price | tonumber) | reverse
EOF
curl https://www.hetzner.de/a_hz_serverboerse/live_data.json | jq -f $FILTER

31
bash/ip-rule-restore.sh Executable file
View file

@ -0,0 +1,31 @@
#!/bin/bash
##
# Setup policy routing
#
# @copyright 2021, Steffen Vogel
# @license http://www.gnu.org/licenses/gpl.txt GNU Public License
# @author Steffen Vogel <post@steffenvogel.de>
# @link https://www.steffenvogel.de
##
GW_IF=bond0
for V in -4 -6; do
IPR="ip $V rule"
$IPR flush
ip $V route flush table default
if [ $V == -4 ]; then
ip $V route add 141.98.136.128/29 dev ${GW_IF} table default
ip $V route add default via 141.98.136.129 table default
else
ip $V route add 2a09:11c0:f0:bbf0::/64 dev ${GW_IF} table default
ip $V route add default via 2a09:11c0:f0:bbf0::1 dev ${GW_IF} src 2a09:11c0:f0:bbf0::3 table default
fi
$IPR add pref 200 not fwmark 0x1000 lookup main
$IPR add pref 240 not fwmark 0x1001 lookup dn42
$IPR add pref 250 lookup ebgp
$IPR add pref 300 lookup default
done

View file

@ -1,2 +0,0 @@
#!/bin/sh
ffmpeg -i "$1" -f mp4 -vcodec mpeg4 -maxrate 1000 -b 700 -qmin 3 -qmax 5 -bufsize 4096 -g 300 -acodec aac -ar 44100 -ab 192 -s 320x240 -aspect 4:3 $2

32
bash/luks-open.sh Executable file
View file

@ -0,0 +1,32 @@
#!/bin/bash
##
# Opens all LUKS volumes
#
# @copyright 2021, Steffen Vogel
# @license http://www.gnu.org/licenses/gpl.txt GNU Public License
# @author Steffen Vogel <post@steffenvogel.de>
# @link https://www.steffenvogel.de
##
# Set IFS to a newline:
IFS="
"
for VOLUME in $(ls -1 /dev/vg*/*-luks); do
if ! cryptsetup isLuks ${VOLUME}; then
echo "${VOLUME} is not a luks device"
continue
fi
if [ -b /dev/disk/by-id/dm-uuid-*$(cryptsetup luksUUID ${VOLUME} | tr -d -)* ]; then
echo "${VOLUME} is opened"
else
NAME=$(basename -s '-luks' ${VOLUME})
cryptsetup luksOpen --allow-discards ${VOLUME} ${NAME}
# systemd-ask-password --id="zfs:$dataset" \
# "Enter passphrase for '$dataset':" | \
# zfs load-key "$dataset"
fi
done

View file

@ -1,5 +0,0 @@
#!/bin/bash
for i in *.m4a; do
echo "Converting: ${i%.m4a}.mp3"
faad -o - "$i" | lame - "${i%.m4a}.mp3"
done

View file

@ -2,24 +2,10 @@
##
# Mount MS Sharepoint folders of the RWTH L²P System in gvfs
#
# @copyright 2012 Steffen Vogel
# @license http://www.gnu.org/licenses/gpl.txt GNU Public License
# @author Steffen Vogel <info@steffenvogel.de>
# @link http://www.steffenvogel.de/
##
##
# This script is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# This script is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this script. If not, see <http://www.gnu.org/licenses/>.
# @copyright 2021, Steffen Vogel
# @license http://www.gnu.org/licenses/gpl.txt GNU Public License
# @author Steffen Vogel <post@steffenvogel.de>
# @link https://www.steffenvogel.de
##
function usage {

View file

@ -26,10 +26,10 @@
# followed by:
# mount /home
#
# @copyright 2013 Steffen Vogel
# @license http://www.gnu.org/licenses/gpl.txt GNU Public License
# @author Steffen Vogel <post@steffenvogel.de>
# @link http://www.steffenvogel.de
# @copyright 2021, Steffen Vogel
# @license http://www.gnu.org/licenses/gpl.txt GNU Public License
# @author Steffen Vogel <post@steffenvogel.de>
# @link https://www.steffenvogel.de
##
if [ "$(basename $0)" == "mount.luks" ]; then
@ -39,9 +39,9 @@ if [ "$(basename $0)" == "mount.luks" ]; then
shift 2
OPTS=$@
UUID=$(cryptsetup luksUUID $DEV)
if [ $? -ne 0 ]; then
echo -e "$DEV is not a LUKS device"
UUID=$(cryptsetup luksUUID $DEV)
if [ $? -ne 0 ]; then
echo -e "$DEV is not a LUKS device"
exit 1
fi
@ -61,9 +61,9 @@ elif [ "$(basename $0)" == "umount.luks" ]; then
shift
OPTS=$@
umount -i $OPTS $DEV
# NOTE: The umount option '-i' is essentially required. It skips this
# helper script which would cause otherwise an endless self recursion
umount -i $OPTS $DEV
# NOTE: The umount option '-i' is essentially required. It skips this
# helper script which would cause otherwise an endless self recursion
cryptsetup luksClose $UUID
fi

View file

@ -1,6 +0,0 @@
#!/bin/sh
xinput set-int-prop "Logitech USB Trackball" "Wheel Emulation" 8 1
xinput set-int-prop "Logitech USB Trackball" "Wheel Emulation Button" 8 8
xinput set-int-prop "Logitech USB Trackball" "Wheel Emulation Timeout" 16 200
xinput set-int-prop "Logitech USB Trackball" "Wheel Emulation X Axis" 8 6 7
xinput set-int-prop "Logitech USB Trackball" "Wheel Emulation Y Axis" 8 4 5

View file

@ -1,5 +0,0 @@
#!/bin/bash
sudo /etc/init.d/apache2 restart
sudo /etc/init.d/mysql restart
firefox -new-tab http://localhost/workspace/ &
cd ~/workspace/

View file

@ -2,25 +2,14 @@
##
# dhclient wrapper to update your dns
#
# @copyright 2013 Steffen Vogel
# @license http://www.apache.org/licenses/LICENSE-2.0 Apache License 2.0
# @author Steffen Vogel <post@steffenvogel.de>
# @link http://www.steffenvogel.de
# @copyright 2021, Steffen Vogel
# @license http://www.apache.org/licenses/LICENSE-2.0 Apache License 2.0
# @author Steffen Vogel <post@steffenvogel.de>
# @link https://www.steffenvogel.de
#
# Add this file to /etc/dhcp/dhclient-exit-hooks.d/nsupdate
# to update your dns autmatically when you get a new DHCP/IP lease from your ISP
##
##
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing
# permissions and limitations under the License.
##
NS=/usr/local/bin/nsupdate.sh
key=/etc/bind/dhcp.key
@ -29,8 +18,8 @@ host=wg.0l.de
server=127.0.0.1
case $reason in
BOUND|RENEW|REBIND|TIMEOUT)
$NS update -d $new_ip_address -k $key -z $zone -n $server -i $interface $host ;;
RELEASE)
$NS delete -d $old_ip_address -k $key -z $zone -n $server $host ;;
BOUND|RENEW|REBIND|TIMEOUT)
$NS update -d $new_ip_address -k $key -z $zone -n $server -i $interface $host ;;
RELEASE)
$NS delete -d $old_ip_address -k $key -z $zone -n $server $host ;;
esac

View file

@ -2,23 +2,12 @@
##
# Bind9 nsupdate wrapper
#
# @copyright 2013 Andrew Leonard
# @license http://www.apache.org/licenses/LICENSE-2.0 Apache License 2.0
# @author Andrew Leonard <sysadmin@andyleonard.com>
# @author Steffen Vogel <post@steffenvogel.de>
# @link http://www.steffenvogel.de
# @copyright 2013, Andrew Leonard
# @license http://www.apache.org/licenses/LICENSE-2.0 Apache License 2.0
# @author Andrew Leonard <sysadmin@andyleonard.com>
# @author Steffen Vogel <post@steffenvogel.de>
# @link https://www.steffenvogel.de
##
##
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing
# permissions and limitations under the License.
##
function usage {
@ -81,7 +70,7 @@ shift $((OPTIND-1))
# parsing host
if [ -n "$1" ]; then
HOST=$1
HOST=$1
else
echo -e "missing host"
echo

View file

@ -1,19 +0,0 @@
#!/bin/bash
if [ `gconftool --get /apps/panel/toplevels/bottom_panel_screen0/monitor` == 1 ]; then
gconftool --type int --set /apps/panel/toplevels/bottom_panel_screen0/monitor 0
else
gconftool --type int --set /apps/panel/toplevels/bottom_panel_screen0/monitor 1
fi
if [ `gconftool --get /apps/panel/toplevels/top_panel_screen0/monitor` == 1 ]; then
gconftool --type int --set /apps/panel/toplevels/top_panel_screen0/monitor 0
else
gconftool --type int --set /apps/panel/toplevels/top_panel_screen0/monitor 1
fi
if [ `gconftool --get /apps/panel/toplevels/panel_0/monitor` == 1 ]; then
gconftool --type int --set /apps/panel/toplevels/panel_0/monitor 0
else
gconftool --type int --set /apps/panel/toplevels/panel_0/monitor 1
fi

View file

@ -1,2 +0,0 @@
#!/bin/bash
gpsbabel -w -t -r -i kml -f /home/steffen/.googleearth/pc2gps.kml -o garmin -F /dev/ttyS0

View file

@ -1,5 +0,0 @@
#/bin/bash
gksudo modprobe uinput
wminput -c /etc/cwiid/wminput/presentation &
openoffice.org -show /home/steffen/Schule/Informatik/Künstliche\ Intelligenz/Künstliche\ Intelligenz.odp

46
bash/restic-btrfs-snapshots.sh Executable file
View file

@ -0,0 +1,46 @@
#!/bin/bash
##
# Convert BTRFS snapshots to Restic Snapshots
#
# @copyright 2021, Steffen Vogel
# @license http://www.gnu.org/licenses/gpl.txt GNU Public License
# @author Steffen Vogel <post@steffenvogel.de>
# @link https://www.steffenvogel.de
##
PARENT=""
HOST=$1
AFTER=$(date -d"$2" +%s)
for SNAP in $(ls -1); do
D=$(echo $SNAP | cut -d_ -f1)
T=$(echo $SNAP | cut -d_ -f2 | tr - :)
W=$(date -d "$D $T" +%u)
if [ -z "$D" -o -z "$T" -o -z "$W" ]; then
echo "Failed to parse: $SNAP"
break
fi
if [ -n "$PARENT" ]; then
RESTIC_OPTS="--parent $PARENT"
else
RESTIC_OPTS=""
fi
if [ "$W" != "7" ]; then continue; fi
echo $SNAP
continue
UNIX=$(date -d"$D $T" +%s)
if (( $UNIX < $AFTER )); then continue; fi
pushd $SNAP
restic backup $RESTIC_OPTS --tag old_btrfs_snapshot --host $HOST --time "$D $T" --ignore-inode .
popd
PARENT=$(restic snapshots --tag old_btrfs_snapshot --host $HOST --last --json | jq -r .[0].id)
done

View file

@ -2,26 +2,10 @@
##
# SDDNS update script
#
# @copyright 2013 Steffen Vogel
# @copyright 2021, Steffen Vogel
# @license http://www.gnu.org/licenses/gpl.txt GNU Public License
# @author Steffen Vogel <post@steffenvogel.de>
# @link http://www.steffenvogel.de
##
##
# This file is part of sddns
#
# sddns is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# sddns is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with sddns. If not, see <http://www.gnu.org/licenses/>.
# @link https://www.steffenvogel.de
##
# default options

8
bash/sep.sh Executable file
View file

@ -0,0 +1,8 @@
#!/bin/bash
for file in *.PDF; do
name=$(basename -s .PDF $file)
pdfseparate $file "$name-%d.pdf"
done

118
bash/smart-read.sh Executable file
View file

@ -0,0 +1,118 @@
#!/bin/bash
# Copyright (c) 2020 Manuel Pitz
#
# Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or
# http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
# http://opensource.org/licenses/MIT>, at your option. This file may not be
# copied, modified, or distributed except according to those terms.
DEBUG=0
handle_Type () {
local vendor=$1
local attrName=$2
local data=$3[@]
temp=`grep "$attrName" <<< "$data" | sed "s/^[ \t]*//" | tr -s ' ' | cut -d" " -f10 | sed "s/^[ \t]*//"`
echo $temp
}
handle_singleCol () {
local vendor=$1
local attrName=$2
local data=$3[@]
temp=`grep "$attrName" <<< "$data" | sed "s/^[ \t]*//" | tr -s ' ' | cut -d":" -f2 | sed "s/^[ \t]*//"`
echo $temp
}
handle_SATA_HDD () {
local vendor=$1
local driveData=$2[@]
temp=$(handle_Type $vendor "Temperature_Celsius" "$driveData")
seek_err=$(handle_Type $vendor "Seek_Error_Rate" "$driveData")
read_err=$(handle_Type $vendor "Raw_Read_Error_Rate" "$driveData")
power_on=$(handle_Type $vendor "Power_On_Hours" "$driveData")
status=$(handle_singleCol $vendor "SMART overall-health self-assessment test result:" "$driveData")
printf "%10s %10s %20s %20s %10s %10s %10s %10s %10s\n" $path "$vendor" "$driveModel" "$driveSerial" "$temp" "$seek_err" "$read_err" "$power_on" "$status"
}
handle_SAS_HDD () {
local vendor=$1
local driveData=$2[@]
if [ $DEBUG == 1 ]; then
echo "SAS handle"
fi
temp=`grep "Drive Temperature:" <<< "$driveData" | tr -s ' ' | cut -d" " -f4 | sed "s/^[ \t]*//"`
readCorrected=`grep "read:" <<< "$driveData" | tr -s ' ' | cut -d" " -f5 | sed "s/^[ \t]*//"`
readunCorrected=`grep "read:" <<< "$driveData" | tr -s ' ' | cut -d" " -f8 | sed "s/^[ \t]*//"`
writeCorrected=`grep "write:" <<< "$driveData" | tr -s ' ' | cut -d" " -f5 | sed "s/^[ \t]*//"`
writeunCorrected=`grep "write:" <<< "$driveData" | tr -s ' ' | cut -d" " -f8 | sed "s/^[ \t]*//"`
seek_err=$(handle_Type $vendor "Seek_Error_Rate" "$driveData")
read_err=$(($readCorrected + $readunCorrected + $writeCorrected + $writeunCorrected))
power_on=$(handle_Type $vendor "Power_On_Hours" "$driveData")
status=$(handle_singleCol $vendor "Status:" "$driveData")
printf "%10s %10s %20s %20s %10s %10s %10s %10s %10s\n" $path "$vendor" "$driveModel" "$driveSerial" "$temp" "$seek_err" "$read_err" "$power_on" "$status"
}
echo "readSmartData"
mapfile -t DRIVES < <(smartctl --scan)
printf "%10s %10s %20s %20s %10s %10s %10s %10s %10s\n" "Path" "Vendor" "Model" "Serial" "Temp" "Seek_err" "Read_err" "Power_on" "Status"
for drive in "${DRIVES[@]}"
do
path=`cut -d" " -f1 <<< "$drive"`
devType=`cut -d" " -f6 <<< "$drive"`
if [ $path == "/dev/bus/0" ]; then continue; fi
driveData=`smartctl -a $path`
driveFamily=`grep "Model Family:" <<< "$driveData" | tr -s ' ' | cut -d":" -f2 | sed "s/^[ \t]*//"`
driveVendor=`grep "Vendor:" <<< "$driveData" | tr -s ' ' | cut -d":" -f2 | sed "s/^[ \t]*//"`
driveModel=`grep "Device Model:" <<< "$driveData" | tr -s ' ' | cut -d":" -f2 | sed "s/^[ \t]*//"`
driveSerial=`grep "Serial Number:" <<< "$driveData" | tr -s ' ' | cut -d":" -f2 | sed "s/^[ \t]*//"`
if [ -z "$driveSerial" ]; then
driveSerial=`grep "Serial number:" <<< "$driveData" | tr -s ' ' | cut -d":" -f2 | sed "s/^[ \t]*//"`
fi
if [ -z "$driveModel" ]; then
driveModel=`grep "Product:" <<< "$driveData" | tr -s ' ' | cut -d":" -f2 | sed "s/^[ \t]*//"`
fi
#echo $driveName
if [ -n "$driveVendor" ]; then
vendor=$driveVendor
elif [ -z "$driveFamily" ]; then
vendor=`cut -d" " -f1 <<< "$driveModel"`
else
vendor=`cut -d" " -f1 <<< "$driveFamily"`
fi
tmpModel=`cut -d" " -f2 <<< "$driveModel"`
if [ -n "$tmpModel" ]; then
driveModel=$tmpModel
fi
if [[ $vendor == *"Seagate"* ]]; then
#echo "rerun smartctl for Seagate drives"
driveData=`smartctl -a -v 7,raw48:54 -v 1,raw48:54 $path`
fi
sasFlag=`grep "Transport protocol:" <<< "$driveData" | tr -s ' ' | cut -d":" -f2 | sed "s/^[ \t]*//"`
if [[ $sasFlag == *"SAS"* ]]; then
handle_SAS_HDD $vendor "$driveData"
else
handle_SATA_HDD $vendor "$driveData"
fi
done

View file

@ -4,24 +4,10 @@
#
# for automated syncronisation of my home directory
#
# @copyright 2012 Steffen Vogel
# @copyright 2021, Steffen Vogel
# @license http://www.gnu.org/licenses/gpl.txt GNU Public License
# @author Steffen Vogel <info@steffenvogel.de>
# @link http://www.steffenvogel.de
##
##
# This script is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# This script is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this script. If not, see <http://www.gnu.org/licenses/>.
# @author Steffen Vogel <post@steffenvogel.de>
# @link https://www.steffenvogel.de
##
# Hostname or IP address of remote box

16
bash/update-roa.sh Executable file
View file

@ -0,0 +1,16 @@
#!/bin/bash
##
# Update ROA tables for DN42
#
# @copyright 2021, Steffen Vogel
# @license http://www.gnu.org/licenses/gpl.txt GNU Public License
# @author Steffen Vogel <post@steffenvogel.de>
# @link https://www.steffenvogel.de
##
set +x
curl -sfSLR {-o,-z}/var/lib/bird/bird_roa_dn42_v4.conf https://dn42.burble.com/roa/dn42_roa_bird2_4.conf
curl -sfSLR {-o,-z}/var/lib/bird/bird_roa_dn42_v6.conf https://dn42.burble.com/roa/dn42_roa_bird2_6.conf
birdc configure

15
bash/update-xmltv.sh Executable file
View file

@ -0,0 +1,15 @@
#!/bin/bash
##
# Update XMLTV data between Emby and TVHeadEnd
#
# @copyright 2021, Steffen Vogel
# @license http://www.gnu.org/licenses/gpl.txt GNU Public License
# @author Steffen Vogel <post@steffenvogel.de>
# @link https://www.steffenvogel.de
##
tv_grab_eu_epgdata --output /srv/Data/Emby/epgdata.xml
tv_grab_eu_xmltvse --output /srv/Data/Emby/xmltvse.xml
cat /srv/Data/Emby/epgdata.xml | socat - UNIX-CONNECT:/var/lib/tvheadend/config/epggrab/xmltv.sock
cat /srv/Data/Emby/xmltvse.xml | socat - UNIX-CONNECT:/var/lib/tvheadend/config/epggrab/xmltv.sock

View file

@ -1,11 +0,0 @@
#!/bin/bash
### BEGIN INIT INFO
# Provides: uptime
# Required-Start: $remote_fs
# Required-Stop: $remote_fs
# Default-Stop: 0 1 6
# Short-Description: Log uptime of server before shutdown
### END INIT INFO
echo $(date +%s) $(cat /proc/uptime) >> /var/log/uptime.log

15
bash/virsh-all.sh Executable file
View file

@ -0,0 +1,15 @@
#!/bin/bash
##
# Perform an action for all libvirt VMs
#
# @copyright 2021, Steffen Vogel
# @license http://www.gnu.org/licenses/gpl.txt GNU Public License
# @author Steffen Vogel <post@steffenvogel.de>
# @link https://www.steffenvogel.de
##
ACTION=${ACTION:-start}
for VM in $(virsh list --inactive --name); do
virsh ${ACTION} ${VM}
done

View file

@ -4,24 +4,10 @@
#
# includes MAC lookup via DNS and ARP
#
# @copyright 2012 Steffen Vogel
# @license http://www.gnu.org/licenses/gpl.txt GNU Public License
# @author Steffen Vogel <info@steffenvogel.de>
# @link http://www.steffenvogel.de/
##
##
# This script is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# This script is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this script. If not, see <http://www.gnu.org/licenses/>.
# @copyright 2021, Steffen Vogel
# @license http://www.gnu.org/licenses/gpl.txt GNU Public License
# @author Steffen Vogel <post@steffenvogel.de>
# @link https://www.steffenvogel.de/
##
IP_REGEX="[[:digit:]]{1,3}\.[[:digit:]]{1,3}\.[[:digit:]]{1,3}\.[[:digit:]]{1,3}"

25
bash/zfs-load-keys.sh Executable file
View file

@ -0,0 +1,25 @@
#!/bin/bash
##
# Load ZFS encryption keys
#
# @copyright 2021, Steffen Vogel
# @license http://www.gnu.org/licenses/gpl.txt GNU Public License
# @author Steffen Vogel <post@steffenvogel.de>
# @link https://www.steffenvogel.de
##
# Set IFS to a newline:
IFS="
"
for dataset in $(zfs list -H -p -o name,encryptionroot | \
awk -F "\t" '{if ($1 == $2) { print $1 }}')
do
if [ "$(zfs get -H -p -o value keylocation "$dataset")" = "prompt" ] &&
[ "$(zfs get -H -p -o value keystatus "$dataset")" = "unavailable" ]
then
systemd-ask-password --id="zfs:$dataset" \
"Enter passphrase for '$dataset':" | \
zfs load-key "$dataset"
fi
done

View file

@ -1,35 +1,21 @@
#!/bin/bash
##
# reconnect zyxel prestige router
# Reconnect Zyxel Prestige Router
#
# @copyright 2012 Steffen Vogel
# @copyright 2021, Steffen Vogel
# @license http://www.gnu.org/licenses/gpl.txt GNU Public License
# @author Steffen Vogel <info@steffenvogel.de>
# @link http://www.steffenvogel.de
##
##
# This script is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# This script is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this script. If not, see <http://www.gnu.org/licenses/>.
# @author Steffen Vogel <post@steffenvogel.de>
# @link https://www.steffenvogel.de
##
IP=192.168.1.1
USER=admin
# change password here
#PW=
PW=XXXXX # Change me
OLD_IP=`wget http://checkip.dyndns.org/ -O /dev/stdout 2&gt;/dev/null | sed "s/.*Current IP Address: \([0-9]\{1,3\}\.[0-9]\{1,3\}\.[0-9]\{1,3\}\.[0-9]\{1,3\}\).*/\1/"`
echo "Alte IP: $OLD_IP"
curl http://$USER:$PW@$IP/Forms/DiagADSL_1 -d "LineInfoDisplay=&amp;DiagDSLDisconnect=PPPoE+Trennung"
NEW_IP=`wget http://checkip.dyndns.org/ -O /dev/stdout 2&gt;/dev/null | sed "s/.*Current IP Address: \([0-9]\{1,3\}\.[0-9]\{1,3\}\.[0-9]\{1,3\}\.[0-9]\{1,3\}\).*/\1/"`
echo "Neue IP: $NEW_IP"

View file

@ -50,11 +50,11 @@ for ($i = 3; $i < 14; $i++) {
echo '</select></form>
<div id="copy">&copy; Steffen Vogel<br />
<a href="mailto:info@steffenvogel.de">info@steffenvogel.de</a><br />
<a href="http://www.steffenvogel.de">http://www.steffenvogel.de</a><br />
<a href="mailto:post@steffenvogel.de">post@steffenvogel.de</a><br />
<a href="https://www.steffenvogel.de">https://www.steffenvogel.de</a><br />
Based on Micha\'s Javascript & CSS frontend</div>
</div>
</body>
</html>';
?>
?>

View file

@ -1,9 +1,9 @@
## Proof-of-concept to show different methods to load executables in the Linux kernel
#
# @copyright 2016 Steffen Vogel
# @license http://www.gnu.org/licenses/gpl.txt GNU Public License
# @author Steffen Vogel <post@steffenvogel.de>
# @link http://www.steffenvogel.de
# @copyright 2021, Steffen Vogel
# @license http://www.gnu.org/licenses/gpl.txt GNU Public License
# @author Steffen Vogel <post@steffenvogel.de>
# @link https://www.steffenvogel.de
#########################################################################################
TARGETS = demo-interpreter demo-binfmt_misc proxy proxy-static

View file

@ -3,7 +3,7 @@
* @copyright 2016 Steffen Vogel
* @license http://www.gnu.org/licenses/gpl.txt GNU Public License
* @author Steffen Vogel <post@steffenvogel.de>
* @link http://www.steffenvogel.de
* @link https://www.steffenvogel.de
*/
#include <stdio.h>

View file

@ -1,9 +1,9 @@
/** Proof-of-concept to show different methods to load executables in the Linux kernel
*
* @copyright 2016 Steffen Vogel
* @license http://www.gnu.org/licenses/gpl.txt GNU Public License
* @author Steffen Vogel <post@steffenvogel.de>
* @link http://www.steffenvogel.de
* @copyright 2021, Steffen Vogel
* @license http://www.gnu.org/licenses/gpl.txt GNU Public License
* @author Steffen Vogel <post@steffenvogel.de>
* @link https://www.steffenvogel.de
*/
#include <linux/elf.h>
@ -98,8 +98,8 @@ static int load_binary(struct linux_binprm *bprm)
}
static struct linux_binfmt script_format = {
.module = THIS_MODULE,
.load_binary = load_binary,
.module = THIS_MODULE,
.load_binary = load_binary,
};
static int init(void)

View file

@ -1,9 +1,9 @@
/** Proof-of-concept to show different methods to load executables in the Linux kernel
*
* @copyright 2016 Steffen Vogel
* @license http://www.gnu.org/licenses/gpl.txt GNU Public License
* @author Steffen Vogel <post@steffenvogel.de>
* @link http://www.steffenvogel.de
* @copyright 2021, Steffen Vogel
* @license http://www.gnu.org/licenses/gpl.txt GNU Public License
* @author Steffen Vogel <post@steffenvogel.de>
* @link https://www.steffenvogel.de
*/
#include <unistd.h>

View file

@ -358,7 +358,7 @@ else {
</p>
<footer>
<p>by <a href="http://www.steffenvogel.de">Steffen Vogel</a> - <a href="http://dev.0l.de/tools/campus">help</a></p>
<p>by <a href="https://www.steffenvogel.de">Steffen Vogel</a> - <a href="http://dev.0l.de/tools/campus">help</a></p>
</footer>
</div>
</body>

View file

@ -2,7 +2,7 @@
/**
* Keygen for CKFinder
* tested successfully with version 1.4.1.1
* written by Steffen Vogel (info@steffenvogel.de)
* written by Steffen Vogel (post@steffenvogel.de)
* reverse engenering by Micha Schwab & Steffen Vogel
*/
?>
@ -15,8 +15,8 @@
<title>Keygen for CKFinder</title>
<meta http-equiv="content-type" content="text/html; charset=UTF-8" />
<script src="scripts.js" type="text/javascript"></script>
<link rel="stylesheet" type="text/css" href="style.css">
<meta http-equiv="content-type" content="text/html; charset=UTF-8">
<link rel="stylesheet" type="text/css" href="style.css">
<meta http-equiv="content-type" content="text/html; charset=UTF-8">
</head>
<body>

View file

@ -1,26 +1,11 @@
<?php
/**
* github migration script
* Github migration script
*
* @author Steffen Vogel <info@steffenvogel.de>
* @copyright Copyright (c) 2011, Steffen Vogel
* @author Steffen Vogel <post@steffenvogel.de>
* @copyright 2021, Steffen Vogel
* @license http://opensource.org/licenses/gpl-license.php GNU Public License
*/
/*
*
* This script is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* any later version.
*
* This script is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this script. If not, see <http://www.gnu.org/licenses/>.
*/
// Configuration
$rootDir = getcwd();

View file

@ -28,8 +28,8 @@
<option value="5">5</option>
<option value="6">6</option>
</select></div>
<div style="margin-top: 25px;" >Code &amp; Erl&auml;uterungen auf <a href="http://www.steffenvogel.de/">http://www.steffenvogel.de</a></div>
<div style="float: right; font-size: 90%;" >&copy; Steffen Vogel<br /><a href="mailto:info@steffenvogel.de">info@steffenvogel.de</a><br /><a href="http://www.steffenvogel.de">http://www.steffenvogel.de</a><br /><br />Slider by Erik Arvidsson @ <a href="http://webfx.eae.net/dhtml/slider/slider.html" >WebFx</a></div>
<div style="margin-top: 25px;" >Code &amp; Erl&auml;uterungen auf <a href="https://www.steffenvogel.de/">https://www.steffenvogel.de</a></div>
<div style="float: right; font-size: 90%;" >&copy; Steffen Vogel<br /><a href="mailto:post@steffenvogel.de">post@steffenvogel.de</a><br /><a href="https://www.steffenvogel.de">https://www.steffenvogel.de</a><br /><br />Slider by Erik Arvidsson @ <a href="http://webfx.eae.net/dhtml/slider/slider.html" >WebFx</a></div>
<script type="text/javascript">
var int;

View file

@ -5,22 +5,9 @@ Plugin URI: http://0l.de/projects/wordpress/plugins/linkpreview
Description: Place an overview of your post embedded links below your post
Version: 0.1
Author: Steffen Vogel
Author URI: http://www.steffemvogel.de
Copyright 2010 Steffen Vogel (info@steffenvogel.de)
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License, version 2, as
published by the Free Software Foundation.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
Author URI: https://www.steffenvogel.de
Author Mail: post@steffenvogel.de
Copyright: 2021, Steffen Vogel
*/
/* Use the admin_menu action to define the custom boxes */

View file

@ -0,0 +1,8 @@
FROM python:3.8-slim
RUN mkdir /app
ADD . /app
RUN pip3 install /app
ENTRYPOINT [ "/usr/local/bin/accelerated_exporter" ]

View file

@ -0,0 +1,12 @@
# Accelerated Server Stats Exporter for Prometheus
## Examples
```bash
export KDNUMMER=10642
export PASSWORD=$(pass providers/interface.datafabrik.de | head -n1)
# python3 accelerated_stats $* usage
python3 accelerated_stats $* usage_sum
# python3 accelerated_stats $* -f raw -F usage_avg stats
```

View file

@ -0,0 +1,44 @@
from flask import Flask, Response
from accelerated_stats.tenant import Tenant
from accelerated_stats.utils import parse_arguments
args = parse_arguments(with_subcommands=False)
args_dict = vars(args)
acc = Tenant(**args_dict)
app = Flask(__name__)
app.config['DEBUG'] = args.debug
if args.server:
servers = [ args.server ]
else:
servers = acc.get_servers()
@app.route('/healthz')
def healthz():
return Response('OK', mimetype='text/plain')
@app.route('/metrics')
def metrics():
metrics = []
for server in servers:
stats = server.get_stats()
for k, v in stats.items():
if k == 'port':
continue
t = [f'{p}="{q}"' for p, q in server.as_dict().items()]
t = ','.join(t)
metrics.append(f'{args.prefix}{k}{{{t}}} {v}')
resp = '\n'.join(metrics)
return Response(resp, mimetype='text/plain')
def main():
app.run(host='::')

View file

@ -0,0 +1,74 @@
#!/bin/env python3
import os
import sys
import json
from datetime import datetime
from accelerated_stats import utils
from accelerated_stats.tenant import Tenant
def converter(o):
if isinstance(o, datetime):
return o.strftime('%Y-%m-%d')
def main():
args = utils.parse_arguments()
args.coerce = not args.no_coerce
args_dict = vars(args)
acc = Tenant(**args_dict)
servers = acc.get_servers()
if args.server:
servers = filter(lambda s: s.id == args.server, servers)
if args.cmd == 'servers':
out = [s.as_dict() for s in servers]
else:
out = []
for server in servers:
if args.cmd == 'stats':
sout = server.get_stats()
elif args.cmd == 'usage':
sout = {
'usage': server.get_usage(args.date)
}
elif args.cmd == 'usage_sum':
usage = server.get_usage(args.date)
usage_sum = {
k: sum([ d[k] for d in usage ]) for k in ['in', 'out']
}
usage_sum['total'] = usage_sum['in'] + usage_sum['out']
sout = {
'usage_sum': usage_sum
}
out.append({
**server.as_dict(),
**sout
})
if args.format == 'json':
json.dump(out, sys.stdout, indent=4, default=converter)
sys.stdout.write('\n')
elif args.format == 'raw':
if out is list:
out.keys().join(',')
for l in out:
l.values().join(',')
elif out is dict:
if args.field:
print(out[args.field])
else:
for k, v in out.enumerate():
print(f'{k}: {v}')

View file

@ -0,0 +1,228 @@
import requests
from datetime import datetime
from lxml import etree
from pint import UnitRegistry
import re
import io
ureg = UnitRegistry()
ureg.define('MBit = Mbit')
ureg.define('KBit = kbit')
ureg.define('MB = megabyte')
ureg.define('KB = kilobyte')
ureg.define('GB = gigabyte')
class Server:
def __init__(self, tenant, id, switch=None, port=None, name=None, hostname=None):
self.tenant = tenant
self.id = id
self.switch = switch
self.port = port
self.name = name
self.hostname = hostname
def get_usage(self, date):
return self.tenant._get_usage(self.id, date)
def get_stats(self):
return self.tenant._get_stats(self.id)
def as_dict(self):
srv = {
'id': self.id,
'tenant': int(self.tenant.kdnummer)
}
if self.switch:
srv['switch'] = self.switch
if self.port:
srv['port'] = self.port
if self.name:
srv['name'] = self.name
if self.hostname:
srv['hostname'] = self.hostname
return srv
class Tenant:
STATS_FIELDS = {
# 'port': 1,
'max_speed': 2,
'switch_uptime': 3,
'incoming': 4,
'outgoing': 5,
'sum': 6,
'usage_95perc': 7,
'usage_avg': 8,
'current_in': 10,
'current_out': 11
}
XPATH_CONTENT = '//*[@id="accelerated-layout-container-content"]'
XPATH_USAGE_TABLE = XPATH_CONTENT + '/table'
XPATH_SERVER_TABLE = XPATH_USAGE_TABLE
XPATH_STATS_TABLE = XPATH_CONTENT + '/table[3]/tr[1]/td/table'
XPATH_SERVER_ROWS = XPATH_SERVER_TABLE + '/tr[position() > 2 and position() < last() and position() mod 2]'
XPATH_FIELDS = { k: f'tr[{i}]/td[2]' for k, i in STATS_FIELDS.items() }
def __init__(self, **kwargs):
self.sess = requests.Session()
self.coerce = kwargs.get('coerce', True)
self.url = kwargs.get('url')
self.kdnummer = kwargs.get('kdnummer')
self.password = kwargs.get('password')
self.unit_volume = kwargs.get('unit_volume', 'TiB')
self.unit_speed = kwargs.get('unit_speed', 'MBit/s')
self.unit_time = kwargs.get('unit_time', 's')
self.do_login()
def as_dict(self):
return {
'kdnummer': self.kdnummer
}
@property
def login_url(self):
return f'{self.url}/verify.php'
@property
def server_url(self):
return f'{self.url}/CServer.php'
def usage_url(self, server, date):
d = date.strftime('%Y.%m')
return f'{self.server_url}?action=detailUsage&id={server}&date={d}'
def stats_url(self, server):
return f'{self.server_url}?action=stats&id={server}'
def do_login(self):
payload = {
'kdnummer': self.kdnummer,
'passwort': self.password,
'Login': 'Login',
'url': ''
}
r = self.sess.post(self.login_url, data=payload)
def get_servers(self):
r = self.sess.get(self.server_url + '?switchPort=show')
parser = etree.HTMLParser()
root = etree.parse(io.StringIO(r.text), parser)
table = root.xpath(self.XPATH_USAGE_TABLE)[0]
rows = root.xpath(self.XPATH_SERVER_ROWS)
servers = []
for row in rows:
anchor = row.xpath('td[3]/a')[0]
href = anchor.get('href')
match = re.match('CServer.php\?action=stats&id=([0-9]+)', href)
if match:
server_id = int(match.group(1))
server = {
'id': server_id
}
name = row.xpath('td[2]')
if name:
server['name'] = re.sub(r'\s+|\|', ' ', name[0].text).strip()
hostname = row.xpath('td[2]/u/font')
if len(hostname) > 0:
server['hostname'] = hostname[0].text.strip()
swport_row = row.getnext()
if swport_row is not None:
swport = swport_row.xpath('td[2]/table/tr/td[2]/font')
if len(swport) > 0:
switch, port = swport[0].text.strip().split(' -> ')
server['port'] = port
server['switch'] = switch
servers.append(Server(self, **server))
return servers
def _get_usage(self, server_id, date):
r = self.sess.get(self.usage_url(server_id, date))
parser = etree.HTMLParser()
root = etree.parse(io.StringIO(r.text), parser)
table = root.xpath(self.XPATH_USAGE_TABLE)[0]
rows = table.xpath('tr')
data = []
for row in rows[1:]:
columns = row.xpath('td')
data_row = {
'date': datetime.strptime(columns[2].xpath('b')[0].text.strip(), '%d.%m.%Y'),
'in': columns[3].text.strip(),
'out': columns[4].text.strip()
}
if self.coerce:
target_unit = ureg.parse_expression(self.unit_volume)
for f in [ 'in', 'out' ]:
d = data_row[f]
d = ureg.parse_expression(d)
d = d.to(target_unit).magnitude
data_row[f] = d
data.append(data_row)
return data
def _get_stats(self, server_id):
r = self.sess.get(self.stats_url(server_id))
parser = etree.HTMLParser()
root = etree.parse(io.StringIO(r.text), parser)
table = root.xpath(self.XPATH_STATS_TABLE)[0]
data = { k: table.xpath(p)[0].text for k, p in self.XPATH_FIELDS.items() }
if self.coerce:
target_units = {
'switch_uptime': ureg.parse_expression(self.unit_time),
'incoming': ureg.parse_expression(self.unit_volume),
'outgoing': ureg.parse_expression(self.unit_volume),
'sum': ureg.parse_expression(self.unit_volume),
'max_speed': ureg.parse_expression(self.unit_speed),
'usage_95perc': ureg.parse_expression(self.unit_speed),
'usage_avg': ureg.parse_expression(self.unit_speed),
'current_in': ureg.parse_expression(self.unit_speed),
'current_out': ureg.parse_expression(self.unit_speed)
}
for f in [ 'incoming', 'outgoing', 'sum' ]:
data[f] = re.sub(r"(K|M|G|T|)B$", r"\1iB", data[f])
data['switch_uptime'] = re.sub(r"(\d+) days, (\d+):(\d+):(\d+).(\d+)", r"\1 days + \2 hours + \3 minutes + \4 seconds + \5 centiseconds", data['switch_uptime'])
coerced_data = { k: ureg.parse_expression(v) for k, v in data.items() if k != 'port' }
converted_data = { k: coerced_data[k].to(target_units[k]).magnitude for k, v in coerced_data.items() }
data = { **data, **converted_data }
return data

View file

@ -0,0 +1,32 @@
import argparse
from datetime import datetime
import os
def parse_arguments(with_subcommands=True):
def valid_date(s):
try:
return datetime.strptime(s, "%Y-%m")
except ValueError:
msg = "Not a valid date: '{0}'.".format(s)
raise argparse.ArgumentTypeError(msg)
parser = argparse.ArgumentParser('accelerated_stats',
description='Get port stats from Accelerated Customer Interface (e.g. interface.datafabrik.de)')
parser.add_argument('--debug', '-d', type=bool, default=False)
parser.add_argument('--format', '-f', choices=['raw', 'json'], default='json')
parser.add_argument('--field', '-F', type=str)
parser.add_argument('--no-coerce', '-c', action='store_true', default=False)
parser.add_argument('--unit-volume', type=str, default='B')
parser.add_argument('--unit-speed', type=str, default='bit/s')
parser.add_argument('--unit-time', type=str, default='s')
parser.add_argument('--kdnummer', '-u', type=str, default=os.environ.get('KDNUMMER'))
parser.add_argument('--password', '-p', type=str, default=os.environ.get('PASSWORD'))
parser.add_argument('--url', '-U', type=str, default='https://interface.datafabrik.de/')
parser.add_argument('--server', '-s', type=int)
parser.add_argument('--date', '-D', type=valid_date, default=datetime.now())
parser.add_argument('--prefix', '-P', type=str, default='accelerated_')
if with_subcommands:
parser.add_argument('cmd', metavar='CMD', choices=['stats', 'usage', 'usage_sum', 'servers'])
return parser.parse_args()

View file

@ -0,0 +1,4 @@
bind = "[::]:5000"
workers = 4
threads = 4
timeout = 120

View file

@ -0,0 +1,72 @@
---
apiVersion: v1
kind: Namespace
metadata:
name: accelerated-stats
---
apiVersion: apps/v1
kind: Deployment
metadata:
name: accelerated-stats
namespace: accelerated-stats
labels:
app: accelerated-stats
spec:
selector:
matchLabels:
app: accelerated-stats
template:
metadata:
labels:
app: accelerated-stats
spec:
containers:
- name: accelerated-stats
image: stv0g/accelerated-stats
imagePullPolicy: Always
ports:
- name: http-metrics
containerPort: 5000
envFrom:
- secretRef:
name: accelerated-stats
resource:
---
apiVersion: v1
kind: Service
metadata:
name: accelerated-stats
namespace: accelerated-stats
labels:
app: accelerated-stats
spec:
selector:
app: accelerated-stats
ports:
- protocol: TCP
port: 80
name: http-metrics
targetPort: http-metrics
---
apiVersion: monitoring.coreos.com/v1
kind: ServiceMonitor
metadata:
name: accelerated-stats
namespace: accelerated-stats
labels:
app: accelerated-stats
spec:
selector:
matchLabels:
app: accelerated-stats
namespaceSelector:
matchNames:
- accelerated-stats
endpoints:
- port: http-metrics
interval: 1m
# Create secret:
# kubectl --namespace=accelerated-stats create secret generic accelerated-stats \
# --from-literal=KDNUMMER=$(pass providers/interface.datafabrik.de | sed -nE 's/^User: (.*)/\1/p') \
# --from-literal=PASSWORD=$(pass providers/interface.datafabrik.de | sed -n 1p)

View file

@ -0,0 +1,36 @@
import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = 'accelerated_stats',
version = '0.1.0',
author = 'Steffen Vogel',
author_email = 'post@steffenvogel.de',
description = ('Fetch and export status and bandwidth '
'for servers hosted by Accelerated'),
license = 'GPL-3.0',
keywords = 'accelerated promtheus exporter',
url = 'http://packages.python.org/an_example_pypi_project',
packages=find_packages(),
long_description=read('README'),
classifiers=[
'Development Status :: 3 - Alpha',
'Topic :: Utilities',
'License :: OSI Approved :: BSD License',
],
install_requires=[
'pint',
'flask',
'requests',
'lxml'
],
entry_points={
'console_scripts': [
'accelerated_stats = accelerated_stats.stats:main',
'accelerated_exporter = accelerated_stats.exporter:main',
]
}
)

View file

@ -0,0 +1,158 @@
from glob import glob
import os
import logging
import shutil
import time
import sys
from PyPDF2 import PdfWriter, PdfReader
from urllib.parse import urljoin
from typing import List
from watchdog.observers import Observer
from watchdog.events import PatternMatchingEventHandler
import tempfile
import requests
from requests.auth import HTTPBasicAuth
# 1pt == 1/72th inch
# 1inch == 2.54cm
PTCM = 1 / 72 * 2.54
# You authenticate via BasicAuth or with a session id.
# We use BasicAuth here
username = os.environ.get("PAPERLESS_USERNAME")
password = os.environ.get("PAPERLESS_PASSWORD")
# Where you have Paperless installed and listening
url = os.environ.get("PAPERLESS_URL")
default_tags = set(os.environ.get("PAPERLESS_DEFAULT_TAGS", "Scan").split(","))
receipe_tags = {"Receipes", "Receipes-Small"}
receipe_trim_y = 0 # 0.5 / PTCM
receipe_width_map = {"receipes": 8.5 / PTCM, "receipes-small": 6 / PTCM}
tag_id_map = {"Scan": 38, "Steffen": 89, "Britta": 88, "Bus": 52, "Wohnen": 40}
tag_type_map = {"Receipes": "Quittung", "Receipes-small": "Quittung"}
document_type_id_map = {"Quittung": 9}
def append_suffix(filename, suffix):
return "{0}_{2}.{1}".format(*filename.rsplit('.', 1) + [suffix])
def crop_width(in_path, new_width):
out_path = append_suffix(in_path, 'cropped')
with open(in_path, "rb") as in_f, open(out_path, 'wb+') as out_f:
input = PdfReader(in_f)
output = PdfWriter()
numPages = input.getNumPages()
for i in range(numPages):
page = input.getPage(i)
width = float(page.mediaBox.getUpperRight_x())
height = float(page.mediaBox.getUpperRight_y())
center = width / 2
page.trimbox.lowerLeft = (center - new_width / 2, 0 + receipe_trim_y)
page.trimbox.upperRight = (center + new_width / 2, height - receipe_trim_y)
page.cropbox = page.trimbox
page.mediabox = page.trimbox
output.addPage(page)
output.write(out_f)
return out_path
def upload_file(path):
logging.info("Uploading: %s", path)
dir = os.path.dirname(path)
tag = os.path.basename(dir)
tags = {tag.title()} | default_tags
types = {tag_type_map[tag] for tag in tags if tag in tag_type_map}
logging.info("Tags: %s", ", ".join(tags))
logging.info("Document types: %s", ", ".join(types))
tag_ids = {tag_id_map[tag] for tag in tags if tag in tag_id_map}
type_ids = {document_type_id_map[typ] for typ in types if typ in document_type_id_map}
if len(tags & receipe_tags) > 0:
old_path = path
logging.info("Cropping receipe...")
path = crop_width(path, receipe_width_map[tag])
os.remove(old_path)
with open(path, "rb") as f:
title = os.path.splitext(os.path.basename(path))[0]
response = requests.post(
url=urljoin(url, "api/documents/post_document/"),
data=[("tags", tag_id) for tag_id in tag_ids] +
[("document_type", type_id) for type_id in type_ids]+
[("title", title)],
files={"document": (title, f, "application/pdf")},
auth=HTTPBasicAuth(username, password),
allow_redirects=False,
)
if response.status_code in [200, 202]:
logging.info("Successful")
os.remove(path)
else:
logging.error("Failed: %d (%s)", response.status_code, response.text)
class Handler(PatternMatchingEventHandler):
def on_closed(self, event):
if not event.is_directory and not event.src_path.endswith("_cropped.pdf"):
upload_file(event.src_path)
def main():
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s - %(message)s",
datefmt="%Y-%m-%d %H:%M:%S",
)
path = sys.argv[1] if len(sys.argv) > 1 else "."
event_handler = Handler(["*.pdf"])
files = glob(f"{path}/**/*.pdf", recursive=True)
logging.info("Initial upload of: %s", files)
for file in files:
upload_file(file)
observer = Observer()
observer.schedule(event_handler, path, recursive=True)
observer.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
observer.stop()
observer.join()
if __name__ == "__main__":
main()

View file

@ -0,0 +1,5 @@
# Push Status
## License
Push Status is licensed under the Apache-2.0 license

175
python/push-status/flake.lock generated Normal file
View file

@ -0,0 +1,175 @@
{
"nodes": {
"flake-utils": {
"inputs": {
"systems": "systems"
},
"locked": {
"lastModified": 1710146030,
"narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"flake-utils_2": {
"inputs": {
"systems": "systems_2"
},
"locked": {
"lastModified": 1705309234,
"narHash": "sha256-uNRRNRKmJyCRC/8y1RqBkqWBLM034y4qN7EprSdmgyA=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "1ef2e671c3b0c19053962c07dbda38332dcebf26",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"nix-github-actions": {
"inputs": {
"nixpkgs": [
"poetry2nix",
"nixpkgs"
]
},
"locked": {
"lastModified": 1703863825,
"narHash": "sha256-rXwqjtwiGKJheXB43ybM8NwWB8rO2dSRrEqes0S7F5Y=",
"owner": "nix-community",
"repo": "nix-github-actions",
"rev": "5163432afc817cf8bd1f031418d1869e4c9d5547",
"type": "github"
},
"original": {
"owner": "nix-community",
"repo": "nix-github-actions",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1710272261,
"narHash": "sha256-g0bDwXFmTE7uGDOs9HcJsfLFhH7fOsASbAuOzDC+fhQ=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "0ad13a6833440b8e238947e47bea7f11071dc2b2",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixos-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"poetry2nix": {
"inputs": {
"flake-utils": "flake-utils_2",
"nix-github-actions": "nix-github-actions",
"nixpkgs": [
"nixpkgs"
],
"systems": "systems_3",
"treefmt-nix": "treefmt-nix"
},
"locked": {
"lastModified": 1708589824,
"narHash": "sha256-2GOiFTkvs5MtVF65sC78KNVxQSmsxtk0WmV1wJ9V2ck=",
"owner": "nix-community",
"repo": "poetry2nix",
"rev": "3c92540611f42d3fb2d0d084a6c694cd6544b609",
"type": "github"
},
"original": {
"owner": "nix-community",
"repo": "poetry2nix",
"type": "github"
}
},
"root": {
"inputs": {
"flake-utils": "flake-utils",
"nixpkgs": "nixpkgs",
"poetry2nix": "poetry2nix"
}
},
"systems": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
},
"systems_2": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
},
"systems_3": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"id": "systems",
"type": "indirect"
}
},
"treefmt-nix": {
"inputs": {
"nixpkgs": [
"poetry2nix",
"nixpkgs"
]
},
"locked": {
"lastModified": 1708335038,
"narHash": "sha256-ETLZNFBVCabo7lJrpjD6cAbnE11eDOjaQnznmg/6hAE=",
"owner": "numtide",
"repo": "treefmt-nix",
"rev": "e504621290a1fd896631ddbc5e9c16f4366c9f65",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "treefmt-nix",
"type": "github"
}
}
},
"root": "root",
"version": 7
}

View file

@ -0,0 +1,47 @@
{
description = "Application packaged using poetry2nix";
inputs = {
flake-utils.url = "github:numtide/flake-utils";
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
poetry2nix = {
url = "github:nix-community/poetry2nix";
inputs.nixpkgs.follows = "nixpkgs";
};
};
outputs = inputs @ {
self,
nixpkgs,
flake-utils,
poetry2nix,
}:
flake-utils.lib.eachDefaultSystem (system: let
pkgs = nixpkgs.legacyPackages.${system};
poetry2nix = inputs.poetry2nix.lib.mkPoetry2Nix {inherit pkgs;};
inherit (poetry2nix) mkPoetryApplication defaultPoetryOverrides;
in {
packages = {
push-status = mkPoetryApplication {
projectDir = self;
overrides =
defaultPoetryOverrides.extend
(self: super: {
uptime-kuma-api =
super.uptime-kuma-api.overridePythonAttrs
(
old: {
buildInputs = (old.buildInputs or []) ++ [super.setuptools];
}
);
});
};
default = self.packages.${system}.push-status;
};
devShells.default = pkgs.mkShell {
inputsFrom = [self.packages.${system}.push-status];
packages = [pkgs.poetry];
};
});
}

348
python/push-status/poetry.lock generated Normal file
View file

@ -0,0 +1,348 @@
# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand.
[[package]]
name = "bidict"
version = "0.23.1"
description = "The bidirectional mapping library for Python."
optional = false
python-versions = ">=3.8"
files = [
{file = "bidict-0.23.1-py3-none-any.whl", hash = "sha256:5dae8d4d79b552a71cbabc7deb25dfe8ce710b17ff41711e13010ead2abfc3e5"},
{file = "bidict-0.23.1.tar.gz", hash = "sha256:03069d763bc387bbd20e7d49914e75fc4132a41937fa3405417e1a5a2d006d71"},
]
[[package]]
name = "certifi"
version = "2024.2.2"
description = "Python package for providing Mozilla's CA Bundle."
optional = false
python-versions = ">=3.6"
files = [
{file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"},
{file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"},
]
[[package]]
name = "charset-normalizer"
version = "3.3.2"
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
optional = false
python-versions = ">=3.7.0"
files = [
{file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"},
{file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"},
{file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"},
{file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"},
{file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"},
{file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"},
{file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"},
{file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"},
{file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"},
{file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"},
{file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"},
{file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"},
{file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"},
{file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"},
{file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"},
{file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"},
{file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"},
{file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"},
{file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"},
{file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"},
{file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"},
{file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"},
{file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"},
{file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"},
{file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"},
{file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"},
{file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"},
{file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"},
{file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"},
{file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"},
{file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"},
{file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"},
{file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"},
{file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"},
{file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"},
{file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"},
{file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"},
{file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"},
{file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"},
{file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"},
{file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"},
{file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"},
{file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"},
{file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"},
{file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"},
{file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"},
{file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"},
{file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"},
{file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"},
{file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"},
{file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"},
{file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"},
{file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"},
{file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"},
{file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"},
{file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"},
{file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"},
{file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"},
{file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"},
{file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"},
{file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"},
{file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"},
{file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"},
{file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"},
{file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"},
{file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"},
{file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"},
{file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"},
{file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"},
{file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"},
{file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"},
{file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"},
{file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"},
{file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"},
{file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"},
{file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"},
{file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"},
]
[[package]]
name = "decorator"
version = "5.1.1"
description = "Decorators for Humans"
optional = false
python-versions = ">=3.5"
files = [
{file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"},
{file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"},
]
[[package]]
name = "diskcache"
version = "5.6.3"
description = "Disk Cache -- Disk and file backed persistent cache."
optional = false
python-versions = ">=3"
files = [
{file = "diskcache-5.6.3-py3-none-any.whl", hash = "sha256:5e31b2d5fbad117cc363ebaf6b689474db18a1f6438bc82358b024abd4c2ca19"},
{file = "diskcache-5.6.3.tar.gz", hash = "sha256:2c3a3fa2743d8535d832ec61c2054a1641f41775aa7c556758a109941e33e4fc"},
]
[[package]]
name = "h11"
version = "0.14.0"
description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1"
optional = false
python-versions = ">=3.7"
files = [
{file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"},
{file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"},
]
[[package]]
name = "idna"
version = "3.6"
description = "Internationalized Domain Names in Applications (IDNA)"
optional = false
python-versions = ">=3.5"
files = [
{file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"},
{file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"},
]
[[package]]
name = "packaging"
version = "24.0"
description = "Core utilities for Python packages"
optional = false
python-versions = ">=3.7"
files = [
{file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"},
{file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"},
]
[[package]]
name = "py"
version = "1.11.0"
description = "library with cross-python path, ini-parsing, io, code, log facilities"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
files = [
{file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"},
{file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"},
]
[[package]]
name = "python-engineio"
version = "4.9.0"
description = "Engine.IO server and client for Python"
optional = false
python-versions = ">=3.6"
files = [
{file = "python-engineio-4.9.0.tar.gz", hash = "sha256:e87459c15638e567711fd156e6f9c4a402668871bed79523f0ecfec744729ec7"},
{file = "python_engineio-4.9.0-py3-none-any.whl", hash = "sha256:979859bff770725b75e60353d7ae53b397e8b517d05ba76733b404a3dcca3e4c"},
]
[package.dependencies]
simple-websocket = ">=0.10.0"
[package.extras]
asyncio-client = ["aiohttp (>=3.4)"]
client = ["requests (>=2.21.0)", "websocket-client (>=0.54.0)"]
docs = ["sphinx"]
[[package]]
name = "python-socketio"
version = "5.11.1"
description = "Socket.IO server and client for Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "python-socketio-5.11.1.tar.gz", hash = "sha256:bbcbd758ed8c183775cb2853ba001361e2fa018babf5cbe11a5b77e91c2ec2a2"},
{file = "python_socketio-5.11.1-py3-none-any.whl", hash = "sha256:f1a0228b8b1fbdbd93fbbedd821ebce0ef54b2b5bf6e98fcf710deaa7c574259"},
]
[package.dependencies]
bidict = ">=0.21.0"
python-engineio = ">=4.8.0"
requests = {version = ">=2.21.0", optional = true, markers = "extra == \"client\""}
websocket-client = {version = ">=0.54.0", optional = true, markers = "extra == \"client\""}
[package.extras]
asyncio-client = ["aiohttp (>=3.4)"]
client = ["requests (>=2.21.0)", "websocket-client (>=0.54.0)"]
docs = ["sphinx"]
[[package]]
name = "requests"
version = "2.31.0"
description = "Python HTTP for Humans."
optional = false
python-versions = ">=3.7"
files = [
{file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"},
{file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"},
]
[package.dependencies]
certifi = ">=2017.4.17"
charset-normalizer = ">=2,<4"
idna = ">=2.5,<4"
urllib3 = ">=1.21.1,<3"
[package.extras]
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
[[package]]
name = "retry"
version = "0.9.2"
description = "Easy to use retry decorator."
optional = false
python-versions = "*"
files = [
{file = "retry-0.9.2-py2.py3-none-any.whl", hash = "sha256:ccddf89761fa2c726ab29391837d4327f819ea14d244c232a1d24c67a2f98606"},
{file = "retry-0.9.2.tar.gz", hash = "sha256:f8bfa8b99b69c4506d6f5bd3b0aabf77f98cdb17f3c9fc3f5ca820033336fba4"},
]
[package.dependencies]
decorator = ">=3.4.2"
py = ">=1.4.26,<2.0.0"
[[package]]
name = "simple-websocket"
version = "1.0.0"
description = "Simple WebSocket server and client for Python"
optional = false
python-versions = ">=3.6"
files = [
{file = "simple-websocket-1.0.0.tar.gz", hash = "sha256:17d2c72f4a2bd85174a97e3e4c88b01c40c3f81b7b648b0cc3ce1305968928c8"},
{file = "simple_websocket-1.0.0-py3-none-any.whl", hash = "sha256:1d5bf585e415eaa2083e2bcf02a3ecf91f9712e7b3e6b9fa0b461ad04e0837bc"},
]
[package.dependencies]
wsproto = "*"
[package.extras]
docs = ["sphinx"]
[[package]]
name = "uptime-kuma-api"
version = "1.2.1"
description = "A python wrapper for the Uptime Kuma WebSocket API"
optional = false
python-versions = ">=3.7, <4"
files = [
{file = "uptime_kuma_api-1.2.1.tar.gz", hash = "sha256:b59e659f7b32e96e512dcc23ccb6e1a1b08d2db1d72215c8ceb70e5421be67e1"},
]
[package.dependencies]
packaging = "*"
python-socketio = {version = ">=5.0.0", extras = ["client"]}
[[package]]
name = "urllib3"
version = "2.2.1"
description = "HTTP library with thread-safe connection pooling, file post, and more."
optional = false
python-versions = ">=3.8"
files = [
{file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"},
{file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"},
]
[package.extras]
brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"]
h2 = ["h2 (>=4,<5)"]
socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
zstd = ["zstandard (>=0.18.0)"]
[[package]]
name = "websocket-client"
version = "1.7.0"
description = "WebSocket client for Python with low level API options"
optional = false
python-versions = ">=3.8"
files = [
{file = "websocket-client-1.7.0.tar.gz", hash = "sha256:10e511ea3a8c744631d3bd77e61eb17ed09304c413ad42cf6ddfa4c7787e8fe6"},
{file = "websocket_client-1.7.0-py3-none-any.whl", hash = "sha256:f4c3d22fec12a2461427a29957ff07d35098ee2d976d3ba244e688b8b4057588"},
]
[package.extras]
docs = ["Sphinx (>=6.0)", "sphinx-rtd-theme (>=1.1.0)"]
optional = ["python-socks", "wsaccel"]
test = ["websockets"]
[[package]]
name = "wsproto"
version = "1.2.0"
description = "WebSockets state-machine based protocol implementation"
optional = false
python-versions = ">=3.7.0"
files = [
{file = "wsproto-1.2.0-py3-none-any.whl", hash = "sha256:b9acddd652b585d75b20477888c56642fdade28bdfd3579aa24a4d2c037dd736"},
{file = "wsproto-1.2.0.tar.gz", hash = "sha256:ad565f26ecb92588a3e43bc3d96164de84cd9902482b130d0ddbaa9664a85065"},
]
[package.dependencies]
h11 = ">=0.9.0,<1"
[metadata]
lock-version = "2.0"
python-versions = "^3.11"
content-hash = "39c8f260404e5a0bc72b76cff8fb48904ff008f07b388475370b9e798bad9f8c"

View file

@ -0,0 +1,57 @@
import os
import logging
from typing import Optional
from pathlib import Path
from retry import retry
from diskcache import Cache
from uptime_kuma_api import UptimeKumaApi, MonitorType
from uptime_kuma_api.exceptions import UptimeKumaException
cache_dir = os.environ.get("XDG_CACHE_HOME")
if cache_dir is None:
cache_dir = Path.home() / ".cache"
else:
cache_dir = Path(cache_dir)
cache = Cache(directory=cache_dir / "uptime_kuma")
@cache.memoize('monitors', expire=60*60*24, ignore=(2,))
@retry(tries=6, exceptions=UptimeKumaException)
def get(url: str, username: str, password: str):
logging.debug("Fetching monitors from: %s with user %s...", url, username)
with UptimeKumaApi(url) as api:
api.login(username, password)
monitors = api.get_monitors()
logging.debug("Found %d monitors", len(monitors))
return monitors
def get_systemd(url, username, password, host, unit) -> Optional[dict]:
monitors = get(url, username, password)
for monitor in monitors:
if monitor.get('type') != MonitorType.PUSH:
continue
if not monitor.get('active'):
continue
u = None
h = None
for tag in monitor.get('tags', []):
if tag.get('name') == 'systemd-unit' and tag.get('value'):
u = tag.get('value')
if tag.get('name') == 'host' and tag.get('value'):
h = tag.get('value')
if unit != u or host != h:
continue
return monitor

View file

@ -0,0 +1,91 @@
#!/usr/bin/env python3
import argparse
import os
import socket
import sys
import logging
import subprocess
from urllib.parse import urlencode
from push_status import monitors, systemd
def push(url: str, token: str, status: str = 'up', msg: str = 'OK', ping: str = ''):
args = {
'status': status,
'msg': msg,
'ping': ping
}
url = f'{url}/api/push/{token}?' + urlencode(args)
logging.debug('Push status: %s', url)
resp = subprocess.check_output(['curl', '-s', url])
logging.info("Response: %s", resp.decode('utf-8'))
def main():
logging.basicConfig(level=logging.DEBUG)
default_unit = os.environ.get('MONITOR_UNIT', '')
default_host = socket.getfqdn()
parser = argparse.ArgumentParser()
parser.add_argument('--clear-cache', '-c', action='store_true', help='Clear cache')
parser.add_argument('--username', '-u', type=str, required=True)
password = parser.add_mutually_exclusive_group(required=True)
password.add_argument('--password', '-p', type=str)
password.add_argument('--password-file', type=str)
parser.add_argument('--url', '-U', type=str, default='https://status.0l.de')
parser.add_argument('host_unit', type=str, default=f'{default_host}/{default_unit}')
args = parser.parse_args()
if args.clear_cache:
monitors.cache.clear()
if args.password_file is not None:
with open(args.password_file) as f:
password = f.readline().strip()
else:
password = args.password
try:
host, unit = args.host_unit.split('/')
except:
host = default_host
unit = args.host_unit
state = systemd.get_unit_state(unit)
desc = state.get('Description', 'Unknown unit')
start_ts = state.get('ActiveEnterTimestamp')
start = int(state.get('ActiveEnterTimestampMonotonic', 0))
stop = int(state.get('ActiveExitTimestampMonotonic', 0))
duration = (stop - start) * 1e-6
rc = int(state.get('ExecMainStatus', '-1'))
state = 'up' if rc == 0 else 'down'
msg = f'Execution of {desc} started at {start_ts} finished after {duration} s with exit code {rc}'
logging.debug("State: state=%s, rc=%d, duration=%f", state, rc, duration)
monitor = monitors.get_systemd(args.url, args.username, password, host, unit)
if monitor is None:
logging.error("No monitor found for: %s/%s", host, unit)
sys.exit(1)
logging.info("Monitor: %s", monitor.get('name'))
push(args.url, monitor.get('pushToken'), state, msg, duration*1e3)
return 0
if __name__ == '__main__':
main()

View file

@ -0,0 +1,18 @@
import subprocess
import logging
def get_unit_state(unit: str) -> dict:
values = {}
logging.debug("Get state of systemd unit: %s", unit)
out = subprocess.check_output(['systemctl', 'show', unit])
out = out.decode('utf-8')
for line in out.split('\n'):
if len(line) == 0:
continue
key, value = line.split('=', 1)
values[key] = value
return values

View file

@ -0,0 +1,21 @@
[tool.poetry]
name = "push-status"
version = "0.1.3"
description = "Push status of systemd units to Uptime-Kuma"
authors = ["Steffen Vogel <post@steffenvogel.de>"]
license = "Apache-2.0"
readme = "README.md"
[tool.poetry.dependencies]
python = "^3.6"
diskcache = "^5.6.3"
retry = "^0.9.2"
uptime-kuma-api = "^1.2.1"
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
[tool.poetry.scripts]
push-status = 'push_status.push_status:main'

View file

@ -0,0 +1,52 @@
import dns.resolver
import dns.zone
import dns.query
import dns.rdatatype
import dns.reversename
import dns.name
import sys
ZONES = ['0l.de', 'steffenvogel.de', 'dn42.org', 'vogel.cc', 'noteblok.net', 'chaos.family', '0l.dn42']
master_name = 'ipa-0.edgy.vms.0l.de'
master_answer = dns.resolver.resolve(master_name, 'A')
NS = master_answer[0].address
def get_names(zone, rdtypes=['A', 'AAAA', 'CNAME', 'NS']):
names = set()
zone = dns.name.from_text(zone)
try:
x = dns.query.xfr(NS, zone)
z = dns.zone.from_xfr(x)
for rdtype in rdtypes:
rdtype = dns.rdatatype.from_text(rdtype)
for name, ttl, rdata in z.iterate_rdatas(rdtype):
fqdn = name.derelativize(zone).to_text(True)
if rdtype == dns.rdatatype.NS and len(name) > 0:
names |= get_names(fqdn)
elif not name.is_wild():
names.add(fqdn)
except dns.xfr.TransferError as e:
print(f'{e}: {zone}', file=sys.stderr)
return names
def main():
names = set()
for zone in ZONES:
names |= get_names(zone)
print('\n'.join(sorted(names)))
if __name__ == '__main__':
main()

View file

@ -0,0 +1 @@
dnspython

View file

@ -0,0 +1,4 @@
#dnspython
git+https://github.com/rthalley/dnspython
python-freeipa
pynetbox

133
python/sync-dns/run.py Normal file
View file

@ -0,0 +1,133 @@
import dns.resolver
import dns.zone
import dns.query
import dns.rdatatype
import dns.reversename
import dns.name
import ipaddress
import itertools
import pynetbox
MYNETS_V4 = [
'172.23.156.0/23',
'192.168.178.0/24'
]
MYNETS_V6 = [
'2a09:11c0:200::/44'
]
ZONES = [
'0l.de.'
]
RZONES_V4 = [
'156.23.172.in-addr.arpa.',
'157.23.172.in-addr.arpa.',
'178.168.192.in-addr.arpa.'
]
RZONES_V6 = [
'0.2.0.0.c.1.1.9.0.a.2.ip6.arpa.'
]
master_name = 'ipa-0.edgy.vms.0l.de'
master_answer = dns.resolver.resolve(master_name, 'A')
NS = master_answer[0].address
def is_mine_v4(ip):
ip = ipaddress.IPv4Address(ip)
for mynet in MYNETS_V4:
if ip in ipaddress.IPv4Network(mynet):
return True
return False
def is_mine_v6(ip):
ip = ipaddress.IPv6Address(ip)
for mynet in MYNETS_V6:
if ip in ipaddress.IPv6Network(mynet):
return True
return False
def get_ips(zones, rdtype='A'):
rdtype = dns.rdatatype.from_text(rdtype)
ips = {}
for zone in zones:
zone = dns.name.from_text(zone)
x = dns.query.xfr(NS, zone)
z = dns.zone.from_xfr(x)
for name, ttl, rdata in z.iterate_rdatas(rdtype):
name = name.derelativize(zone).to_text()
addr = rdata.address
ips[name] = addr
return ips
def get_ptrs(rzones):
ptrs = {}
for rzone in rzones:
rzone = dns.name.from_text(rzone)
x = dns.query.xfr(NS, rzone)
z = dns.zone.from_xfr(x)
for name, ttl, rdata in z.iterate_rdatas(dns.rdatatype.PTR):
name = name.derelativize(rzone)
addr = dns.reversename.to_address(name)
fname = rdata.target
ptrs[addr] = fname.to_text()
return ptrs
def get_netbox_ips():
nb = pynetbox.core.api.Api('https://netbox.0l.de', 'f3b10d7f8d5f573ac69042df8d5242aef2f90d1d')
ips = nb.ipam.ip_addresses.all()
return { ip.address: ip.dns_name for ip in ips }
nb_ips = get_netbox_ips()
for ip in dict(filter(lambda ip: ip[1] == '', nb_ips.items())):
print('Missing DNS name in Netbox: ' + ip)
afs = [
(get_ips(ZONES, 'A'), get_ptrs(RZONES_V4), is_mine_v4),
(get_ips(ZONES, 'AAAA'), get_ptrs(RZONES_V6), is_mine_v6),
]
for ips, ptrs, is_mine in afs:
for fqdn, ip in ips.items():
try:
if ptrs[ip] != fqdn:
print(f'PTR Mismatch: PTR of {fqdn} is {ptrs[ip]} IP: {ip}')
except KeyError:
if is_mine(ip):
print(f'Missing PTR for {fqdn} IP: {ip}')
try:
if nb_ips[ip] != fqdn:
print(f'Name mismatch in Netbox: {nb_ips[ip]} != {fqdn}')
except KeyError:
print(f'Missing Netbox IP: {ip}')

211
python/wiki2csv.py Normal file
View file

@ -0,0 +1,211 @@
#!/usr/bin/python3
# -*- tab-width: 2; indent-tabs-mode: t; -*-
# Copyright 2012 Jan Kanis
# License: GPL-3.0
# wiki2csv
#
# An explanation of this program is given in the accompanying README file.
# This program is maintained at http://www.bitbucket.org/JanKanis/wiki2csv/
# If you find any bugs, you can report them there.
# For command line options, see the help output of "wiki2csv.py --help".
# See http://en.wikipedia.org/wiki/Help:Wikitable for the wikitable syntax.
from collections import namedtuple
import sys, re, os.path, argparse, csv
Lexeme = namedtuple('Lexeme', 'type data raw')
# the different lexeme types in the wiki table syntax
class PreTable (object): # All text before the table starts gets this type
pass
class TableStart (object):
pass
class TableCaption (object):
pass
class TableRow (object):
pass
class TableHeader (object):
pass
class TableHeaderSinglerow (TableHeader):
pass
class TableHeaderContinued (TableHeader):
pass
class TableData (object):
pass
class TableDataSinglerow (TableData):
pass
class TableDataContinued (TableData):
pass
class TableEnd (object):
pass
# what should happen for each type
actions = dict(
# Store the item on a row of its own
singlerow=(TableStart, TableCaption, TableEnd),
# Store the data without the sytax marker
data=(TableData,),
# Store the full raw text
raw=(TableHeader,)
)
# associations between wiki syntax and types
wikitypes = [
('{|', TableStart),
('|+', TableCaption),
('|-', TableRow),
('|}', TableEnd),
('!', TableHeader),
('|', TableData),
]
# a generator that returns Lexemes. Input is a single string with a wikitable.
def wikitableparse(table):
stable = table.split('\n')
if not stable[-1]:
del stable[-1]
current = dict(type=PreTable, data='', raw='')
for row in stable:
srow = row.lstrip()
for marker, type in wikitypes:
if srow.startswith(marker):
if current['type'] != PreTable:
yield Lexeme(**current)
current = dict(type=type, data=srow[len(marker):], raw=row)
# process multiple cells on one line
if current['type'] == TableData and '||' in current['data']:
rows = current['raw'].split('||')
yield Lexeme(type=TableDataSinglerow, data=rows[0].lstrip()[2:], raw=rows[0])
for r in rows[1:-1]:
yield Lexeme(type=TableDataContinued, data=r, raw='||'+r)
current = dict(type=TableDataContinued, data=r, raw='||'+r)
# same for multiple header cells on one line
if current['type'] == TableHeader and '!!' in current['data']:
rows = current['raw'].split('!!')
yield Lexeme(type=TableHeaderSinglerow, data=rows[0].lstrip()[2:], raw=rows[0])
for r in rows[1:-1]:
yield Lexeme(type=TableHeaderContinued, data=r, raw='!!'+r)
current = dict(type=TableHeaderContinued, data=r, raw='!!'+r)
# Don't try to match again if we already hava a match
break
# continuation of previous lexeme on next line
else:
current['data'] += '\n' + row
yield Lexeme(**current)
def wiki2csv(wikifile, csvfile):
writer = csv.writer(csvfile)
parser = wikitableparse(wikifile.read())
row = []
for lex in parser:
if lex.type == TableRow:
if row: writer.writerow(row)
row = []
elif lex.type in actions['singlerow']:
if row: writer.writerow(row)
writer.writerow([lex.raw])
row = []
elif lex.type in actions['data']:
row.append(lex.data)
elif lex.type in actions['raw']:
row.append(lex.raw)
if row:
writer.writerow(row)
rawtypes = re.compile('|'.join((re.escape(marker) for marker, type in wikitypes
if type in actions['raw'])))
singlerowtypes = re.compile('|'.join((re.escape(marker) for marker, type in wikitypes
if type in actions['singlerow'])))
def parsecsv(csvfile):
reader = csv.reader(csvfile)
newrow = False
for line in reader:
for cell in line:
if singlerowtypes.match(cell):
yield cell
break
elif rawtypes.match(cell):
yield cell
elif len(cell) and cell[0] in '-+}':
# Avoid a cornercase where a normal data cell has e.g. '-1' as content,
# which would result in a new row marker
yield '| '+cell
else:
yield '|'+cell
if not singlerowtypes.match(cell):
yield '|-'
def csv2wiki(csvfile, wikifile):
for cell in parsecsv(csvfile):
wikifile.write(cell+'\n')
def main():
progname = os.path.basename(sys.argv[0])
progname_cooked = os.path.splitext(progname)[0]
# to show the correct help text
towikidefault = tocsvdefault = ''
if progname_cooked == 'csv2wiki':
towikidefault = '(default for {}) '.format(progname)
description = "Convert SOURCE containing a table CSV format to Mediawikis wikitable syntax in DEST. Do the reverse if --tocsv is given."
else:
tocsvdefault = '(default for {}) '.format(progname)
description = "Convert SOURCE containing a table in Mediawikis wikitable syntax to Excel-readable CSV in DEST. Do the reverse if --towiki is given."
# parse arguments
parser = argparse.ArgumentParser(description=description)
parser.add_argument('-v', '--verbose', action='store_true', help="be more verbose")
direction = parser.add_mutually_exclusive_group()
direction.add_argument('--tocsv', '-c', action='store_true',
help=tocsvdefault+"Convert SOURCE from wikitable format to CSV in DEST")
direction.add_argument('--towiki', '-w', action='store_true',
help=towikidefault+"Convert SOURCE from CSV format back to wikitable format in DEST")
parser.add_argument('source', metavar='SOURCE', type=argparse.FileType('r'), nargs='?', default=sys.stdin,
help="The input file to read from. Omit or use '-' to read from stdin")
parser.add_argument('dest', metavar='DEST', type=argparse.FileType('w'), nargs='?', default=sys.stdout,
help="The file to write output to. Omit or use '-' to write to stdout")
args = parser.parse_args()
if args.towiki:
direction = 'towiki'
elif args.tocsv:
direction = 'tocsv'
elif progname_cooked == 'csv2wiki':
direction = 'towiki'
else:
direction = 'tocsv'
if args.verbose:
print >>sys.stderr, 'direction=%s\n' % direction, 'source=%s\n' % args.source, 'dest=%s\n' % args.dest,
if direction == 'towiki':
csv2wiki(args.source, args.dest)
else:
wiki2csv(args.source, args.dest)
if args.verbose:
print >>sys.stderr, 'Conversion completed'
if __name__ == '__main__':
main()

View file

@ -3,30 +3,14 @@
*
* Long description
*
* @copyright 2016 Steffen Vogel
* @license http://www.gnu.org/licenses/gpl.txt GNU Public License
* @author Steffen Vogel <post@steffenvogel.de>
* @link http://www.steffenvogel.de
* @copyright 2021, Steffen Vogel
* @license http://www.gnu.org/licenses/gpl.txt GNU Public License
* @author Steffen Vogel <post@steffenvogel.de>
* @link https://www.steffenvogel.de
* @package
* @category
* @since
*/
/*
* This file is part of [...]
*
* [...] is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* any later version.
*
* [...] is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with [...]. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
#include <stdio.h>

View file

@ -4,30 +4,13 @@
*
* Long description
*
* @copyright 2016 Steffen Vogel
* @license http://www.gnu.org/licenses/gpl.txt GNU Public License
* @author Steffen Vogel <post@steffenvogel.de>
* @link http://www.steffenvogel.de
* @copyright 2021, Steffen Vogel
* @license http://www.gnu.org/licenses/gpl.txt GNU Public License
* @author Steffen Vogel <post@steffenvogel.de>
* @link https://www.steffenvogel.de
* @package
* @category
* @since
*/
/*
* This file is part of [...]
*
* [...] is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* any later version.
*
* [...] is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with [...]. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
?>

View file

@ -4,30 +4,12 @@
#
# Long description
#
# @copyright 2016 Steffen Vogel
# @license http://www.gnu.org/licenses/gpl.txt GNU Public License
# @author Steffen Vogel <post@steffenvogel.de>
# @link http://www.steffenvogel.de
# @copyright 2021, Steffen Vogel
# @license http://www.gnu.org/licenses/gpl.txt GNU Public License
# @author Steffen Vogel <post@steffenvogel.de>
# @link https://www.steffenvogel.de
# @package
# @category
# @since
#
################################################################################
#
# This file is part of [...]
#
# [...] is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# [...] is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with [...]. If not, see <http://www.gnu.org/licenses/>.
################################################################################

View file

@ -2,31 +2,15 @@
--
-- Long description
--
-- @copyright 2016 Steffen Vogel
-- @license http://www.gnu.org/licenses/gpl.txt GNU Public License
-- @author Steffen Vogel <post@steffenvogel.de>
-- @link http://www.steffenvogel.de
-- @copyright 2021, Steffen Vogel
-- @license http://www.gnu.org/licenses/gpl.txt GNU Public License
-- @author Steffen Vogel <post@steffenvogel.de>
-- @link https://www.steffenvogel.de
-- @package
-- @category
-- @since
--
--------------------------------------------------------------------------------
--
-- This file is part of [...]
--
-- [...] is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- any later version.
--
-- [...] is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with [...]. If not, see <http://www.gnu.org/licenses/>.
--------------------------------------------------------------------------------
library ieee;
use ieee.std_logic_1164.all;
@ -54,4 +38,4 @@ end entity;
architecture rtl of name is
begin
end architecture;
end architecture;

View file

@ -89,7 +89,7 @@ function check() {
}
alert('Alright! Welcome on board!');
window.location.href = 'http://www.steffenvogel.de';
window.location.href = 'https://www.steffenvogel.de';
}
function intro(step) {