Compare commits
10 commits
00da3fdeb6
...
95dca96cab
Author | SHA1 | Date | |
---|---|---|---|
95dca96cab | |||
199a1d0562 | |||
caed3f9696 | |||
b7a705351e | |||
|
e5f2ee0e30 | ||
|
2ea4238a84 | ||
|
fff862e212 | ||
|
9d2eeb3ed3 | ||
|
8d2d94db1f | ||
|
03590e23b1 |
6 changed files with 319 additions and 62 deletions
23
Dockerfile
Normal file
23
Dockerfile
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
FROM debian:12
|
||||||
|
|
||||||
|
RUN apt-get update && \
|
||||||
|
apt-get install -y dateutils binutils borgbackup openssh-client && \
|
||||||
|
apt-get clean
|
||||||
|
|
||||||
|
COPY borg_exporter.rc borg_exporter.sh /
|
||||||
|
|
||||||
|
# Authorize SSH Host
|
||||||
|
RUN mkdir -p /root/.ssh && \
|
||||||
|
chmod 0700 /root/.ssh
|
||||||
|
|
||||||
|
# See: https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/githubs-ssh-key-fingerprints
|
||||||
|
COPY known_hosts /root/.ssh/known_hosts
|
||||||
|
|
||||||
|
# Add the Keys
|
||||||
|
COPY id_rsa id_rsa.pub /root/.ssh/
|
||||||
|
|
||||||
|
# Set permissions
|
||||||
|
RUN chmod 600 /root/.ssh/id_rsa && \
|
||||||
|
chmod 600 /root/.ssh/id_rsa.pub
|
||||||
|
|
||||||
|
CMD ["/borg_exporter.sh"]
|
126
README.md
126
README.md
|
@ -1,47 +1,129 @@
|
||||||
# Borg exporter
|
# Borg exporter
|
||||||
|
|
||||||
Export borg information to prometheus.
|
Export borg information to prometheus. Extended to export information about a list of borg repositories (discovered via `find`), and also to export details about today's backups.
|
||||||
|
|
||||||
## Dependencies
|
## Dependencies
|
||||||
|
|
||||||
* [Dateutils](http://www.fresse.org/dateutils/)
|
* [Dateutils](http://www.fresse.org/dateutils/) `sudo apt-get install dateutils`
|
||||||
* Prometheus (obviously)
|
|
||||||
* Node Exporter with textfile collector
|
* Node Exporter with textfile collector
|
||||||
* [Borg](https://github.com/borgbackup/borg)
|
* [Borg](https://github.com/borgbackup/borg)
|
||||||
|
* binutils (sed, grep, wc, etc)
|
||||||
|
|
||||||
## Install
|
## Install
|
||||||
|
|
||||||
### Manually
|
### Manually
|
||||||
Copy `borg_exporter.sh` to `/usr/local/bin`.
|
Copy `borg_exporter.sh` to `/usr/local/bin`.
|
||||||
|
|
||||||
Copy `borg.env` to `/etc/borg` and replace your repokey and repository in it.
|
Copy `borg_exporter.rc` to `/etc/` and configure it (see the configuration section below).
|
||||||
|
|
||||||
Copy the systemd unit to `/etc/systemd/system` and run
|
Copy the systemd unit and timer to `/etc/systemd/system`:
|
||||||
|
```
|
||||||
|
sudo cp prometheus-borg-exporter.* /etc/systemd/system
|
||||||
|
```
|
||||||
|
and run
|
||||||
|
|
||||||
```
|
```
|
||||||
systemctl enable prometheus-borg-exporter.timer
|
sudo systemctl enable prometheus-borg-exporter.timer
|
||||||
systemctl start prometheus-borg-exporter.timer
|
sudo systemctl start prometheus-borg-exporter.timer
|
||||||
```
|
```
|
||||||
|
|
||||||
Alternative: Use `ExecStartPost` in your borg backupt timer itself to write our the metrics.
|
Alternative: Use `ExecStartPost` in your borg backup timer itself to write our the metrics.
|
||||||
|
|
||||||
## Exported metrics
|
### Config file
|
||||||
|
The config file has a few options:
|
||||||
|
```
|
||||||
|
BORG_PASSPHRASE="mysecret"
|
||||||
|
REPOSITORY="/path/to/repository"
|
||||||
|
PUSHGATEWAY_URL=http://pushgateway.clems4ever.com
|
||||||
|
BASEREPODIR="/backup"
|
||||||
|
NODE_EXPORTER_DIR="/path/to/node/exporter/textfile/collector/dir"
|
||||||
|
```
|
||||||
|
|
||||||
|
* If you leave `BORG_PASSPHRASE=""` empty, no password will be used to access your backups
|
||||||
|
* `REPOSITORY` should either point to a valid repository (if you're running this on each server you are backing-up) or should be left empty in case you set `BASEREPODIR`
|
||||||
|
* `PUSHGATEWAY_URL` should be a valid URL for push gateway. If you're not using it, leave it blank (`PUSHGATEWAY_URL=""`) and data will be exported via node_exporter textfile collector
|
||||||
|
* `BASEREPODIR` should point to a directory on disk from where you want to search for all the repos. This makes sense when you run this exporter on the backup server, so you can access all the backups in one place. It's only taken into consideration when `REPOSITORY=""`
|
||||||
|
* `NODE_EXPORTER_DIR` should point to your node_exporter textfile collector directory (where it writes .prom files). It's used only if `PUSHGATEWAY_URL=""`
|
||||||
|
|
||||||
|
|
||||||
|
### Caveats
|
||||||
|
* The repository names shouldn't contain spaces
|
||||||
|
* The archive names shouldn't contain spaces
|
||||||
|
* The hostnames from the machines that do the export shouldn't contain spaces
|
||||||
|
|
||||||
|
### Troubleshooting
|
||||||
|
You can manually run the script with `bash -x` to get the output of intermediary commands
|
||||||
|
|
||||||
|
## Exported metrics example
|
||||||
|
|
||||||
```
|
```
|
||||||
borg_extract_exit_code
|
# HELP borg_archives_count The total number of archives in the repo
|
||||||
borg_hours_from_last_backup
|
# TYPE borg_archives_count gauge
|
||||||
borg_archives_count
|
borg_archives_count{backupserver="my_backup_server",host="server1",repo="/backup/server1/server1"} 3
|
||||||
borg_files_count
|
borg_archives_count{backupserver="my_backup_server",host="server2",repo="/backup/server2/server2"} 29
|
||||||
borg_chunks_unique
|
# HELP borg_archives_count_today The total number of archives created today in the repo
|
||||||
borg_chunks_total
|
# TYPE borg_archives_count_today gauge
|
||||||
borg_last_size
|
borg_archives_count_today{backupserver="my_backup_server",host="server1",repo="/backup/server1/server1"} 0
|
||||||
borg_last_size_compressed
|
borg_archives_count_today{backupserver="my_backup_server",host="server2",repo="/backup/server2/server2"} 4
|
||||||
borg_last_size_dedup
|
# HELP borg_chunks_total The total number of chunks in the archive (today)
|
||||||
borg_total_size
|
# TYPE borg_chunks_total gauge
|
||||||
borg_total_size_compressed
|
borg_chunks_total{archive="_etc",backupserver="my_backup_server",host="server2",repo="/backup/server2/server2"} 11829
|
||||||
borg_total_size_dedup
|
borg_chunks_total{archive="_home_user_scripts",backupserver="my_backup_server",host="server2",repo="/backup/server2/server2"} 11829
|
||||||
|
borg_chunks_total{archive="_usr_share_cacti",backupserver="my_backup_server",host="server2",repo="/backup/server2/server2"} 11829
|
||||||
|
borg_chunks_total{archive="mysqldump",backupserver="my_backup_server",host="server2",repo="/backup/server2/server2"} 11829
|
||||||
|
# HELP borg_chunks_unique The number of unique chunks in the archive (today)
|
||||||
|
# TYPE borg_chunks_unique gauge
|
||||||
|
borg_chunks_unique{archive="_etc",backupserver="my_backup_server",host="server2",repo="/backup/server2/server2"} 2076
|
||||||
|
borg_chunks_unique{archive="_home_user_scripts",backupserver="my_backup_server",host="server2",repo="/backup/server2/server2"} 2076
|
||||||
|
borg_chunks_unique{archive="_usr_share_cacti",backupserver="my_backup_server",host="server2",repo="/backup/server2/server2"} 2076
|
||||||
|
borg_chunks_unique{archive="_var_spool_cron",backupserver="my_backup_server",host="server2",repo="/backup/server2/server2"} 2076
|
||||||
|
borg_chunks_unique{archive="mysqldump",backupserver="my_backup_server",host="server2",repo="/backup/server2/server2"} 2076
|
||||||
|
# HELP borg_files_count The number of files contained in the archive (today)
|
||||||
|
# TYPE borg_files_count gauge
|
||||||
|
borg_files_count{archive="_etc",backupserver="my_backup_server",host="server2",repo="/backup/server2/server2"} 1030
|
||||||
|
borg_files_count{archive="_home_user_scripts",backupserver="my_backup_server",host="server2",repo="/backup/server2/server2"} 36
|
||||||
|
borg_files_count{archive="_usr_share_cacti",backupserver="my_backup_server",host="server2",repo="/backup/server2/server2"} 593
|
||||||
|
borg_files_count{archive="_var_spool_cron",backupserver="my_backup_server",host="server2",repo="/backup/server2/server2"} 1
|
||||||
|
borg_files_count{archive="mysqldump",backupserver="my_backup_server",host="server2",repo="/backup/server2/server2"} 1
|
||||||
|
# HELP borg_hours_from_last_archive How many hours have passed since the last archive was added to the repo (counted by borg_exporter.sh)
|
||||||
|
# TYPE borg_hours_from_last_archive gauge
|
||||||
|
borg_hours_from_last_archive{backupserver="my_backup_server",host="server2",repo="/backup/server2/server2"} 10
|
||||||
|
# HELP borg_last_archive_timestamp The timestamp of the last archive (unixtimestamp)
|
||||||
|
# TYPE borg_last_archive_timestamp gauge
|
||||||
|
borg_last_archive_timestamp{backupserver="my_backup_server",host="server2",repo="/backup/server2/server2"} 1.622421272e+09
|
||||||
|
# HELP borg_last_size The size of the archive (today)
|
||||||
|
# TYPE borg_last_size gauge
|
||||||
|
borg_last_size{archive="_etc",backupserver="my_backup_server",host="server2",repo="/backup/server2/server2"} 2.43479e+07
|
||||||
|
borg_last_size{archive="_home_user_scripts",backupserver="my_backup_server",host="server2",repo="/backup/server2/server2"} 146749
|
||||||
|
borg_last_size{archive="_usr_share_cacti",backupserver="my_backup_server",host="server2",repo="/backup/server2/server2"} 1.09157e+07
|
||||||
|
borg_last_size{archive="_var_spool_cron",backupserver="my_backup_server",host="server2",repo="/backup/server2/server2"} 1177.6
|
||||||
|
borg_last_size{archive="mysqldump",backupserver="my_backup_server",host="server2",repo="/backup/server2/server2"} 8.32286e+08
|
||||||
|
# HELP borg_last_size_compressed The compressed size of the archive (today)
|
||||||
|
# TYPE borg_last_size_compressed gauge
|
||||||
|
borg_last_size_compressed{archive="_etc",backupserver="my_backup_server",host="server2",repo="/backup/server2/server2"} 8.40958e+06
|
||||||
|
borg_last_size_compressed{archive="_home_user_scripts",backupserver="my_backup_server",host="server2",repo="/backup/server2/server2"} 44769.3
|
||||||
|
borg_last_size_compressed{archive="_usr_share_cacti",backupserver="my_backup_server",host="server2",repo="/backup/server2/server2"} 5.05414e+06
|
||||||
|
borg_last_size_compressed{archive="_var_spool_cron",backupserver="my_backup_server",host="server2",repo="/backup/server2/server2"} 445
|
||||||
|
borg_last_size_compressed{archive="mysqldump",backupserver="my_backup_server",host="server2",repo="/backup/server2/server2"} 5.55326e+07
|
||||||
|
# HELP borg_last_size_dedup The deduplicated size of the archive (today), (size on disk)
|
||||||
|
# TYPE borg_last_size_dedup gauge
|
||||||
|
borg_last_size_dedup{archive="_etc",backupserver="my_backup_server",host="server2",repo="/backup/server2/server2"} 608
|
||||||
|
borg_last_size_dedup{archive="_home_user_scripts",backupserver="my_backup_server",host="server2",repo="/backup/server2/server2"} 558
|
||||||
|
borg_last_size_dedup{archive="_usr_share_cacti",backupserver="my_backup_server",host="server2",repo="/backup/server2/server2"} 536
|
||||||
|
borg_last_size_dedup{archive="_var_spool_cron",backupserver="my_backup_server",host="server2",repo="/backup/server2/server2"} 548
|
||||||
|
borg_last_size_dedup{archive="mysqldump",backupserver="my_backup_server",host="server2",repo="/backup/server2/server2"} 9.81467e+06
|
||||||
|
# HELP borg_total_size The total size of all archives in the repo
|
||||||
|
# TYPE borg_total_size gauge
|
||||||
|
borg_total_size{backupserver="my_backup_server",host="server2",repo="/backup/server2/server2"} 5.35797e+09
|
||||||
|
# HELP borg_total_size_compressed The total compressed size of all archives in the repo
|
||||||
|
# TYPE borg_total_size_compressed gauge
|
||||||
|
borg_total_size_compressed{backupserver="my_backup_server",host="server2",repo="/backup/server2/server2"} 4.17637e+08
|
||||||
|
# HELP borg_total_size_dedup The total deduplicated size of all archives in the repo (size on disk)
|
||||||
|
# TYPE borg_total_size_dedup gauge
|
||||||
|
borg_total_size_dedup{backupserver="my_backup_server",host="server2",repo="/backup/server2/server2"} 1.14284e+08
|
||||||
```
|
```
|
||||||
|
|
||||||
### Grafana dashboard
|
### Grafana dashboard
|
||||||
|
|
||||||
See [here](https://grafana.com/dashboards/7856) for a sample grafana dashboard.
|
See [here](https://grafana.com/dashboards/14516) for a sample grafana dashboard.
|
||||||
|
The original dashboard code is also available as `borg_backup_status_dashboard.json` in the repo.
|
||||||
|
|
2
VERSION
2
VERSION
|
@ -1 +1 @@
|
||||||
0.1.1+git
|
0.2
|
207
borg_exporter.sh
207
borg_exporter.sh
|
@ -1,34 +1,25 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
set -eu
|
while true
|
||||||
|
do
|
||||||
|
|
||||||
TMP_FILE=$(mktemp)
|
source /borg_exporter.rc
|
||||||
|
|
||||||
|
#sleep 30
|
||||||
|
|
||||||
|
TMP_FILE=$(mktemp /tmp/prometheus-borg-XXXXX)
|
||||||
|
DATEDIFF=`which datediff`
|
||||||
|
if [ -z "$DATEDIFF" ]; then
|
||||||
|
#ubuntu packages have a different executable name
|
||||||
|
DATEDIFF=`which dateutils.ddiff`
|
||||||
|
fi
|
||||||
|
|
||||||
[ -e $TMP_FILE ] && rm -f $TMP_FILE
|
[ -e $TMP_FILE ] && rm -f $TMP_FILE
|
||||||
|
|
||||||
|
#prevent "Attempting to access a previously unknown unencrypted repository" prompt
|
||||||
|
export BORG_RELOCATED_REPO_ACCESS_IS_OK=yes
|
||||||
|
export BORG_UNKNOWN_UNENCRYPTED_REPO_ACCESS_IS_OK=yes
|
||||||
HOSTNAME=$(hostname)
|
HOSTNAME=$(hostname)
|
||||||
ARCHIVES="$(BORG_PASSPHRASE=$BORG_PASSPHRASE borg list $REPOSITORY)"
|
|
||||||
COUNTER=0
|
|
||||||
|
|
||||||
|
|
||||||
COUNTER=$(echo "$ARCHIVES" | wc -l)
|
|
||||||
LAST_ARCHIVE=$(BORG_PASSPHRASE=$BORG_PASSPHRASE borg list --last 1 $REPOSITORY)
|
|
||||||
LAST_ARCHIVE_NAME=$(echo $LAST_ARCHIVE | awk '{print $1}')
|
|
||||||
LAST_ARCHIVE_DATE=$(echo $LAST_ARCHIVE | awk '{print $3" "$4}')
|
|
||||||
LAST_ARCHIVE_TIMESTAMP=$(date -d "$LAST_ARCHIVE_DATE" +"%s")
|
|
||||||
CURRENT_DATE="$(date '+%Y-%m-%d %H:%M:%S')"
|
|
||||||
NB_HOUR_FROM_LAST_BCK=$(datediff "$LAST_ARCHIVE_DATE" "$CURRENT_DATE" -f '%H')
|
|
||||||
|
|
||||||
# BORG_EXTRACT_EXIT_CODE=$(BORG_PASSPHRASE="$BORG_PASSPHRASE" borg extract --dry-run "$REPOSITORY::$LAST_ARCHIVE_NAME" > /dev/null 2>&1; echo $?)
|
|
||||||
BORG_INFO=$(BORG_PASSPHRASE="$BORG_PASSPHRASE" borg info "$REPOSITORY::$LAST_ARCHIVE_NAME")
|
|
||||||
|
|
||||||
echo "borg_last_archive_timestamp $LAST_ARCHIVE_TIMESTAMP" >> $TMP_FILE
|
|
||||||
# echo "borg_extract_exit_code $BORG_EXTRACT_EXIT_CODE" >> $TMP_FILE
|
|
||||||
echo "borg_hours_from_last_archive $NB_HOUR_FROM_LAST_BCK" >> $TMP_FILE
|
|
||||||
echo "borg_archives_count $COUNTER" >> $TMP_FILE
|
|
||||||
echo "borg_files_count $(echo "$BORG_INFO" | grep "Number of files" | awk '{print $4}')" >> $TMP_FILE
|
|
||||||
echo "borg_chunks_unique $(echo "$BORG_INFO" | grep "Chunk index" | awk '{print $3}')" >> $TMP_FILE
|
|
||||||
echo "borg_chunks_total $(echo "$BORG_INFO" | grep "Chunk index" | awk '{print $4}')" >> $TMP_FILE
|
|
||||||
|
|
||||||
function calc_bytes {
|
function calc_bytes {
|
||||||
NUM=$1
|
NUM=$1
|
||||||
|
@ -53,20 +44,168 @@ function calc_bytes {
|
||||||
esac
|
esac
|
||||||
}
|
}
|
||||||
|
|
||||||
# byte size
|
|
||||||
LAST_SIZE=$(calc_bytes $(echo "$BORG_INFO" |grep "This archive" |awk '{print $3}') $(echo "$BORG_INFO" |grep "This archive" |awk '{print $4}'))
|
function getBorgDataForRepository {
|
||||||
LAST_SIZE_COMPRESSED=$(calc_bytes $(echo "$BORG_INFO" |grep "This archive" |awk '{print $5}') $(echo "$BORG_INFO" |grep "This archive" |awk '{print $6}'))
|
REPOSITORY=$1 #repository we're looking into
|
||||||
LAST_SIZE_DEDUP=$(calc_bytes $(echo "$BORG_INFO" |grep "This archive" |awk '{print $7}') $(echo "$BORG_INFO" |grep "This archive" |awk '{print $8}'))
|
host=$2 #the host for which the backups are made
|
||||||
|
|
||||||
|
ARCHIVES="$(BORG_PASSPHRASE=$BORG_PASSPHRASE borg list $REPOSITORY)"
|
||||||
|
COUNTER=0
|
||||||
|
BACKUPS_TODAY_COUNT=0
|
||||||
|
|
||||||
|
COUNTER=$(echo "$ARCHIVES" | wc -l)
|
||||||
|
TODAY=$(date +%Y-%m-%d)
|
||||||
|
BACKUPS_TODAY=$(echo "$ARCHIVES" | grep ", $TODAY ")
|
||||||
|
BACKUPS_TODAY_COUNT=$(echo -n "$BACKUPS_TODAY" | wc -l)
|
||||||
|
|
||||||
|
#extract data for last archive
|
||||||
|
LAST_ARCHIVE=$(BORG_PASSPHRASE=$BORG_PASSPHRASE borg list --last 1 $REPOSITORY)
|
||||||
|
#we need at least one valid backup to list anything meaningfull
|
||||||
|
if [ -n "${LAST_ARCHIVE}" ]
|
||||||
|
then
|
||||||
|
LAST_ARCHIVE_NAME=$(echo $LAST_ARCHIVE | awk '{print $1}')
|
||||||
|
LAST_ARCHIVE_DATE=$(echo $LAST_ARCHIVE | awk '{print $3" "$4}')
|
||||||
|
LAST_ARCHIVE_TIMESTAMP=$(date -d "$LAST_ARCHIVE_DATE" +"%s")
|
||||||
|
CURRENT_DATE="$(date '+%Y-%m-%d %H:%M:%S')"
|
||||||
|
NB_HOUR_FROM_LAST_BCK=$($DATEDIFF "$LAST_ARCHIVE_DATE" "$CURRENT_DATE" -f '%H')
|
||||||
|
|
||||||
|
# in case the date parsing from BORG didn't work (e.g. archive with space in it), datediff will output
|
||||||
|
# a usage message on stdout and will break prometheus formatting. We need to
|
||||||
|
# check for that here
|
||||||
|
DATEDIFF_LINES=$(echo "$NB_HOUR_FROM_LAST_BCK" | wc -l)
|
||||||
|
if [ "${DATEDIFF_LINES}" -eq 1 ]
|
||||||
|
then
|
||||||
|
|
||||||
|
echo "borg_hours_from_last_archive{host=\"$host\", backupserver=\"$HOSTNAME\", repo=\"$REPOSITORY\"} $NB_HOUR_FROM_LAST_BCK" >> $TMP_FILE
|
||||||
|
|
||||||
|
BORG_INFO=$(BORG_PASSPHRASE="$BORG_PASSPHRASE" borg info "$REPOSITORY::$LAST_ARCHIVE_NAME")
|
||||||
|
echo "borg_last_archive_timestamp{host=\"$host\", backupserver=\"$HOSTNAME\", repo=\"$REPOSITORY\"} $LAST_ARCHIVE_TIMESTAMP" >> $TMP_FILE
|
||||||
|
|
||||||
TOTAL_SIZE=$(calc_bytes $(echo "$BORG_INFO" |grep "All archives" |awk '{print $3}') $(echo "$BORG_INFO" |grep "All archives" |awk '{print $4}'))
|
TOTAL_SIZE=$(calc_bytes $(echo "$BORG_INFO" |grep "All archives" |awk '{print $3}') $(echo "$BORG_INFO" |grep "All archives" |awk '{print $4}'))
|
||||||
TOTAL_SIZE_COMPRESSED=$(calc_bytes $(echo "$BORG_INFO" |grep "All archives" |awk '{print $5}') $(echo "$BORG_INFO" |grep "All archives" |awk '{print $6}'))
|
TOTAL_SIZE_COMPRESSED=$(calc_bytes $(echo "$BORG_INFO" |grep "All archives" |awk '{print $5}') $(echo "$BORG_INFO" |grep "All archives" |awk '{print $6}'))
|
||||||
TOTAL_SIZE_DEDUP=$(calc_bytes $(echo "$BORG_INFO" |grep "All archives" |awk '{print $7}') $(echo "$BORG_INFO" |grep "All archives" |awk '{print $8}'))
|
TOTAL_SIZE_DEDUP=$(calc_bytes $(echo "$BORG_INFO" |grep "All archives" |awk '{print $7}') $(echo "$BORG_INFO" |grep "All archives" |awk '{print $8}'))
|
||||||
|
|
||||||
|
|
||||||
echo "borg_last_size $LAST_SIZE" >> $TMP_FILE
|
echo "borg_total_size{host=\"$host\", backupserver=\"$HOSTNAME\", repo=\"$REPOSITORY\"} $TOTAL_SIZE" >> $TMP_FILE
|
||||||
echo "borg_last_size_compressed $LAST_SIZE_COMPRESSED" >> $TMP_FILE
|
echo "borg_total_size_compressed{host=\"$host\", backupserver=\"$HOSTNAME\", repo=\"$REPOSITORY\"} $TOTAL_SIZE_COMPRESSED" >> $TMP_FILE
|
||||||
echo "borg_last_size_dedup $LAST_SIZE_DEDUP" >> $TMP_FILE
|
echo "borg_total_size_dedup{host=\"$host\", backupserver=\"$HOSTNAME\", repo=\"$REPOSITORY\"} $TOTAL_SIZE_DEDUP" >> $TMP_FILE
|
||||||
echo "borg_total_size $TOTAL_SIZE" >> $TMP_FILE
|
|
||||||
echo "borg_total_size_compressed $TOTAL_SIZE_COMPRESSED" >> $TMP_FILE
|
|
||||||
echo "borg_total_size_dedup $TOTAL_SIZE_DEDUP" >> $TMP_FILE
|
|
||||||
|
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "borg_archives_count{host=\"$host\", backupserver=\"$HOSTNAME\", repo=\"$REPOSITORY\"} $COUNTER" >> $TMP_FILE
|
||||||
|
echo "borg_archives_count_today{host=\"$host\", backupserver=\"$HOSTNAME\", repo=\"$REPOSITORY\"} $BACKUPS_TODAY_COUNT" >> $TMP_FILE
|
||||||
|
|
||||||
|
#go through the day's archives and count the files/chunks/etc.
|
||||||
|
|
||||||
|
TODAY_ARCHIVES=$(echo -n "$BACKUPS_TODAY" | awk '{print $1}' | xargs echo )
|
||||||
|
#echo $TODAY_ARCHIVES
|
||||||
|
if [ -n "${TODAY_ARCHIVES}" ]
|
||||||
|
then
|
||||||
|
for archive in $TODAY_ARCHIVES
|
||||||
|
do
|
||||||
|
echo "Looking at $REPOSITORY::$archive"
|
||||||
|
#ask for an info on it
|
||||||
|
CURRENT_INFO=$(BORG_PASSPHRASE="$BORG_PASSPHRASE" borg info "$REPOSITORY::$archive")
|
||||||
|
#cut out something that looks like a timestamp when reporting: 20210528-1315
|
||||||
|
readable_archive=$(echo $archive | sed -r "s/-[0-9]{8}-[0-9]{4,6}//")
|
||||||
|
|
||||||
|
echo "borg_files_count{host=\"$host\", backupserver=\"$HOSTNAME\", repo=\"$REPOSITORY\", archive=\"$readable_archive\"} $(echo "$CURRENT_INFO" | grep "Number of files" | awk '{print $4}')" >> $TMP_FILE
|
||||||
|
echo "borg_chunks_unique{host=\"$host\", backupserver=\"$HOSTNAME\", repo=\"$REPOSITORY\", archive=\"$readable_archive\"} $(echo "$CURRENT_INFO" | grep "Chunk index" | awk '{print $3}')" >> $TMP_FILE
|
||||||
|
echo "borg_chunks_total{host=\"$host\", backupserver=\"$HOSTNAME\", repo=\"$REPOSITORY\", archive=\"$readable_archive\"} $(echo "$CURRENT_INFO" | grep "Chunk index" | awk '{print $4}')" >> $TMP_FILE
|
||||||
|
|
||||||
|
# byte size
|
||||||
|
LAST_SIZE=$(calc_bytes $(echo "$CURRENT_INFO" |grep "This archive" |awk '{print $3}') $(echo "$CURRENT_INFO" |grep "This archive" |awk '{print $4}'))
|
||||||
|
LAST_SIZE_COMPRESSED=$(calc_bytes $(echo "$CURRENT_INFO" |grep "This archive" |awk '{print $5}') $(echo "$CURRENT_INFO" |grep "This archive" |awk '{print $6}'))
|
||||||
|
LAST_SIZE_DEDUP=$(calc_bytes $(echo "$CURRENT_INFO" |grep "This archive" |awk '{print $7}') $(echo "$CURRENT_INFO" |grep "This archive" |awk '{print $8}'))
|
||||||
|
|
||||||
|
echo "borg_last_size{host=\"$host\", backupserver=\"$HOSTNAME\", repo=\"$REPOSITORY\", archive=\"$readable_archive\"} $LAST_SIZE" >> $TMP_FILE
|
||||||
|
echo "borg_last_size_compressed{host=\"$host\", backupserver=\"$HOSTNAME\", repo=\"$REPOSITORY\", archive=\"$readable_archive\"} $LAST_SIZE_COMPRESSED" >> $TMP_FILE
|
||||||
|
echo "borg_last_size_dedup{host=\"$host\", backupserver=\"$HOSTNAME\", repo=\"$REPOSITORY\", archive=\"$readable_archive\"} $LAST_SIZE_DEDUP" >> $TMP_FILE
|
||||||
|
done
|
||||||
|
else
|
||||||
|
echo "Unable to find any archives for today in $REPOSITORY."
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo "Unable to find any archives in $REPOSITORY. Processing skipped for it"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
#print the definition of the metrics
|
||||||
|
echo "# HELP borg_hours_from_last_archive How many hours have passed since the last archive was added to the repo (counted by borg_exporter.sh)" >> $TMP_FILE
|
||||||
|
echo "# TYPE borg_hours_from_last_archive gauge" >> $TMP_FILE
|
||||||
|
echo "# HELP borg_last_archive_timestamp The timestamp of the last archive (unixtimestamp)" >> $TMP_FILE
|
||||||
|
echo "# TYPE borg_last_archive_timestamp gauge" >> $TMP_FILE
|
||||||
|
echo "# HELP borg_total_size The total size of all archives in the repo" >> $TMP_FILE
|
||||||
|
echo "# TYPE borg_total_size gauge" >> $TMP_FILE
|
||||||
|
echo "# HELP borg_total_size_compressed The total compressed size of all archives in the repo" >> $TMP_FILE
|
||||||
|
echo "# TYPE borg_total_size_compressed gauge" >> $TMP_FILE
|
||||||
|
echo "# HELP borg_total_size_dedup The total deduplicated size of all archives in the repo (size on disk)" >> $TMP_FILE
|
||||||
|
echo "# TYPE borg_total_size_dedup gauge" >> $TMP_FILE
|
||||||
|
echo "# HELP borg_archives_count The total number of archives in the repo" >> $TMP_FILE
|
||||||
|
echo "# TYPE borg_archives_count gauge" >> $TMP_FILE
|
||||||
|
echo "# HELP borg_archives_count_today The total number of archives created today in the repo" >> $TMP_FILE
|
||||||
|
echo "# TYPE borg_archives_count_today gauge" >> $TMP_FILE
|
||||||
|
echo "# HELP borg_files_count The number of files contained in the archive (today)" >> $TMP_FILE
|
||||||
|
echo "# TYPE borg_files_count gauge" >> $TMP_FILE
|
||||||
|
echo "# HELP borg_chunks_unique The number of unique chunks in the archive (today)" >> $TMP_FILE
|
||||||
|
echo "# TYPE borg_chunks_unique gauge" >> $TMP_FILE
|
||||||
|
echo "# HELP borg_chunks_total The total number of chunks in the archive (today)" >> $TMP_FILE
|
||||||
|
echo "# TYPE borg_chunks_total gauge" >> $TMP_FILE
|
||||||
|
echo "# HELP borg_last_size The size of the archive (today)" >> $TMP_FILE
|
||||||
|
echo "# TYPE borg_last_size gauge" >> $TMP_FILE
|
||||||
|
echo "# HELP borg_last_size_compressed The compressed size of the archive (today)" >> $TMP_FILE
|
||||||
|
echo "# TYPE borg_last_size_compressed gauge" >> $TMP_FILE
|
||||||
|
echo "# HELP borg_last_size_dedup The deduplicated size of the archive (today), (size on disk)" >> $TMP_FILE
|
||||||
|
echo "# TYPE borg_last_size_dedup gauge" >> $TMP_FILE
|
||||||
|
|
||||||
|
if [ -n "${REPOSITORY}" ]
|
||||||
|
then
|
||||||
|
for i in $(echo $REPOSITORY | tr ";" "\n")
|
||||||
|
do
|
||||||
|
echo "Use Repository: $i"
|
||||||
|
getBorgDataForRepository "${i}" "${HOSTNAME}"
|
||||||
|
done
|
||||||
|
else
|
||||||
|
#discover (recursively) borg repositories starting from a path and extract info for each
|
||||||
|
#(e.g. when running on the backup server directly)
|
||||||
|
if [ -d "${BASEREPODIR}" ]
|
||||||
|
then
|
||||||
|
REPOS=`find "$BASEREPODIR" -type f -name "README" | grep -v ".cache/borg"`
|
||||||
|
# e.g. /backup/servers/server_name/README
|
||||||
|
for REPO in $REPOS
|
||||||
|
do
|
||||||
|
#cut out the /README from the name
|
||||||
|
REPO=$(echo "$REPO" | sed -r "s/\/README//")
|
||||||
|
#assume the name convention for the repo contains the hostname as the repo name
|
||||||
|
# e.g. /backup/servers/server_name
|
||||||
|
host=$(basename "$REPO")
|
||||||
|
getBorgDataForRepository $REPO $host
|
||||||
|
done
|
||||||
|
else
|
||||||
|
echo "Error: Either set REPOSITORY or BASEREPODIR in /borg_exporter.rc"
|
||||||
|
fi
|
||||||
|
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -n "${PUSHGATEWAY_URL}" ]
|
||||||
|
then
|
||||||
|
#send data via pushgateway
|
||||||
cat $TMP_FILE | curl --data-binary @- ${PUSHGATEWAY_URL}/metrics/job/borg-exporter/host/$HOSTNAME/repository/$REPOSITORY
|
cat $TMP_FILE | curl --data-binary @- ${PUSHGATEWAY_URL}/metrics/job/borg-exporter/host/$HOSTNAME/repository/$REPOSITORY
|
||||||
|
else
|
||||||
|
#send data via node_exporter
|
||||||
|
if [ -d "${NODE_EXPORTER_DIR}" ]
|
||||||
|
then
|
||||||
|
cp $TMP_FILE ${NODE_EXPORTER_DIR}/borg_exporter.prom
|
||||||
|
else
|
||||||
|
echo "Please configure either PUSHGATEWAY_URL or NODE_EXPORTER_DIR in /etc/borg_exporter.rc"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
#cleanup
|
||||||
|
rm -f $TMP_FILE
|
||||||
|
|
||||||
|
# Wait 10 minutes
|
||||||
|
echo "sleep 100 minutes"
|
||||||
|
sleep 6000
|
||||||
|
|
||||||
|
done
|
||||||
|
|
13
docker-compose.yml
Normal file
13
docker-compose.yml
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
version: "3.8"
|
||||||
|
|
||||||
|
services:
|
||||||
|
|
||||||
|
borg-exporter:
|
||||||
|
build: builds/prometheus-borg-exporter/.
|
||||||
|
container_name: borg-exporter
|
||||||
|
volumes:
|
||||||
|
- ./node_exporter/textfile_collector/:/node_exporter/textfile_collector
|
||||||
|
restart: always
|
||||||
|
networks:
|
||||||
|
default:
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
Description=Prometheus Borg Exporter Timer
|
Description=Prometheus Borg Exporter Timer
|
||||||
|
|
||||||
[Timer]
|
[Timer]
|
||||||
OnCalendar=daily
|
OnCalendar=*-*-* 08:00:00
|
||||||
|
|
||||||
[Install]
|
[Install]
|
||||||
WantedBy=timers.target
|
WantedBy=timers.target
|
||||||
|
|
Loading…
Reference in a new issue