2020-09-29 11:10:15 +02:00
|
|
|
import time
|
|
|
|
import os
|
2020-09-29 11:45:56 +02:00
|
|
|
import sys
|
2020-09-29 11:10:15 +02:00
|
|
|
import signal
|
|
|
|
import faulthandler
|
2023-03-15 00:01:48 +01:00
|
|
|
|
2023-03-14 18:52:58 +01:00
|
|
|
import requests
|
2020-09-29 11:10:15 +02:00
|
|
|
from prometheus_client import start_http_server
|
|
|
|
from prometheus_client.core import GaugeMetricFamily, CounterMetricFamily, REGISTRY
|
|
|
|
import logging
|
|
|
|
from pythonjsonlogger import jsonlogger
|
|
|
|
|
|
|
|
# Enable dumps on stderr in case of segfault
|
|
|
|
faulthandler.enable()
|
2021-07-17 00:59:50 +02:00
|
|
|
logger = logging.getLogger()
|
2020-09-29 11:10:15 +02:00
|
|
|
|
|
|
|
|
2023-03-15 10:23:48 +01:00
|
|
|
class ImmichMetricsCollector:
|
2020-09-29 11:10:15 +02:00
|
|
|
|
|
|
|
def __init__(self, config):
|
|
|
|
self.config = config
|
|
|
|
|
|
|
|
def collect(self):
|
|
|
|
|
2023-03-14 20:51:19 +01:00
|
|
|
metrics = self.get_immich_metrics()
|
2020-09-29 11:10:15 +02:00
|
|
|
|
|
|
|
for metric in metrics:
|
|
|
|
name = metric["name"]
|
|
|
|
value = metric["value"]
|
|
|
|
help_text = metric.get("help", "")
|
|
|
|
labels = metric.get("labels", {})
|
|
|
|
metric_type = metric.get("type", "gauge")
|
|
|
|
|
|
|
|
if metric_type == "counter":
|
|
|
|
prom_metric = CounterMetricFamily(name, help_text, labels=labels.keys())
|
|
|
|
else:
|
|
|
|
prom_metric = GaugeMetricFamily(name, help_text, labels=labels.keys())
|
|
|
|
prom_metric.add_metric(value=value, labels=labels.values())
|
|
|
|
yield prom_metric
|
|
|
|
|
2023-03-14 20:51:19 +01:00
|
|
|
def get_immich_metrics(self):
|
2020-09-29 11:10:15 +02:00
|
|
|
metrics = []
|
2023-03-14 18:55:02 +01:00
|
|
|
metrics.extend(self.get_immich_server_version_number())
|
2023-03-15 00:01:48 +01:00
|
|
|
metrics.extend(self.get_immich_server_info())
|
2023-03-15 10:23:48 +01:00
|
|
|
metrics.extend(self.get_immich_users_stat)
|
|
|
|
metrics.extend(self.get_immich_users_stat_growth())
|
2023-03-15 04:46:30 +01:00
|
|
|
|
|
|
|
return metrics
|
|
|
|
|
2023-03-15 10:23:48 +01:00
|
|
|
def get_immich_users_stat_growth(self):
|
2023-03-15 04:46:30 +01:00
|
|
|
|
2023-03-15 10:23:48 +01:00
|
|
|
global response_user_stats
|
2023-03-15 04:46:30 +01:00
|
|
|
try:
|
|
|
|
endpoint_user_stats = "/api/server-info/stats"
|
|
|
|
response_user_stats = requests.request(
|
|
|
|
"GET",
|
|
|
|
self.combine_url(endpoint_user_stats),
|
|
|
|
headers={'Accept': 'application/json',
|
|
|
|
"x-api-key": self.config["token"]}
|
|
|
|
)
|
|
|
|
except requests.exceptions.RequestException as e:
|
|
|
|
logger.error(f"Couldn't get server version: {e}")
|
|
|
|
|
2023-03-15 10:23:48 +01:00
|
|
|
userData = response_user_stats.json()["usageByUser"]
|
|
|
|
# photos growth gauge
|
2023-03-15 04:46:30 +01:00
|
|
|
userCount = len(response_user_stats.json()["usageByUser"])
|
2023-03-15 10:23:48 +01:00
|
|
|
photos_growth_total = 0
|
|
|
|
videos_growth_total = 0
|
|
|
|
usage_growth_total = 0
|
|
|
|
|
|
|
|
for x in range(0, userCount):
|
|
|
|
photos_growth_total += userData[x]["photos"]
|
|
|
|
# total video growth
|
|
|
|
videos_growth_total += userData[x]["videos"]
|
|
|
|
# total disk growth
|
|
|
|
usage_growth_total += userData[x]["usage"]
|
2023-03-15 04:46:30 +01:00
|
|
|
|
2023-03-15 10:23:48 +01:00
|
|
|
return [
|
2023-03-15 04:46:30 +01:00
|
|
|
{
|
2023-03-15 10:23:48 +01:00
|
|
|
"name": f"{self.config['metrics_prefix']}_server_stats_user_count",
|
2023-03-15 04:46:30 +01:00
|
|
|
"value": userCount,
|
|
|
|
"help": "number of users on the immich server"
|
2023-03-15 10:23:48 +01:00
|
|
|
},
|
|
|
|
{
|
|
|
|
"name": f"{self.config['metrics_prefix']}_server_stats_photos_growth",
|
|
|
|
"value": photos_growth_total,
|
|
|
|
"help": "photos counter that is added or removed"
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"name": f"{self.config['metrics_prefix']}_server_stats_videos_growth",
|
|
|
|
"value": videos_growth_total,
|
|
|
|
"help": "videos counter that is added or removed"
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"name": f"{self.config['metrics_prefix']}_server_stats_usage_growth",
|
|
|
|
"value": usage_growth_total,
|
|
|
|
"help": "videos counter that is added or removed"
|
2023-03-15 04:46:30 +01:00
|
|
|
}
|
|
|
|
|
2023-03-15 10:23:48 +01:00
|
|
|
]
|
|
|
|
|
|
|
|
@property
|
|
|
|
def get_immich_users_stat(self):
|
|
|
|
|
|
|
|
metrics = []
|
|
|
|
# To get the user count an api-endpoint exists but this works too. As a result one less api call is being made
|
|
|
|
userCount = len(response_user_stats.json()["usageByUser"])
|
|
|
|
# json array of all users with stats
|
2023-03-15 06:12:50 +01:00
|
|
|
userData = response_user_stats.json()["usageByUser"]
|
2023-03-15 10:23:48 +01:00
|
|
|
|
|
|
|
for x in range(0, userCount):
|
2023-03-15 04:46:30 +01:00
|
|
|
metrics.append(
|
|
|
|
|
2023-03-15 10:23:48 +01:00
|
|
|
{
|
|
|
|
"name": f"{self.config['metrics_prefix']}_server_stats_photos_by_users",
|
2023-03-15 06:12:50 +01:00
|
|
|
"value": userData[x]['photos'],
|
2023-03-15 04:46:30 +01:00
|
|
|
"labels": {
|
2023-03-15 06:12:50 +01:00
|
|
|
"firstName": userData[x]["userFirstName"],
|
2023-03-15 04:46:30 +01:00
|
|
|
|
|
|
|
},
|
2023-03-15 06:12:50 +01:00
|
|
|
"help": f"Number of photos by user {userData[x]['userFirstName']} "
|
2023-03-15 04:46:30 +01:00
|
|
|
|
2023-03-15 10:23:48 +01:00
|
|
|
}
|
|
|
|
)
|
2023-03-15 04:46:30 +01:00
|
|
|
|
2023-03-15 10:23:48 +01:00
|
|
|
# videos
|
|
|
|
for x in range(0, userCount):
|
2023-03-15 04:46:30 +01:00
|
|
|
metrics.append(
|
2023-03-15 10:23:48 +01:00
|
|
|
{
|
|
|
|
"name": f"{self.config['metrics_prefix']}_server_stats_videos_by_users",
|
2023-03-15 06:12:50 +01:00
|
|
|
"value": userData[x]['videos'],
|
2023-03-15 04:46:30 +01:00
|
|
|
"labels": {
|
2023-03-15 06:12:50 +01:00
|
|
|
"firstName": userData[x]["userFirstName"],
|
2023-03-15 04:46:30 +01:00
|
|
|
|
|
|
|
},
|
2023-03-15 06:12:50 +01:00
|
|
|
"help": f"Number of photos by user {userData[x]['userFirstName']} "
|
2023-03-15 04:46:30 +01:00
|
|
|
|
2023-03-15 10:23:48 +01:00
|
|
|
}
|
|
|
|
)
|
|
|
|
# usage
|
|
|
|
for x in range(0, userCount):
|
2023-03-15 04:46:30 +01:00
|
|
|
metrics.append(
|
2023-03-15 10:23:48 +01:00
|
|
|
{
|
|
|
|
"name": f"{self.config['metrics_prefix']}_server_stats_usage_by_users",
|
2023-03-15 06:12:50 +01:00
|
|
|
"value": (userData[x]['usage']),
|
2023-03-15 04:46:30 +01:00
|
|
|
"labels": {
|
2023-03-15 06:12:50 +01:00
|
|
|
"firstName": userData[x]["userFirstName"],
|
2023-03-15 04:46:30 +01:00
|
|
|
|
|
|
|
},
|
2023-03-15 06:12:50 +01:00
|
|
|
"help": f"Number of photos by user {userData[x]['userFirstName']} "
|
2023-03-15 04:46:30 +01:00
|
|
|
|
2023-03-15 10:23:48 +01:00
|
|
|
}
|
|
|
|
)
|
2020-09-29 11:10:15 +02:00
|
|
|
|
|
|
|
return metrics
|
|
|
|
|
2023-03-14 18:55:02 +01:00
|
|
|
def get_immich_server_info(self):
|
2023-03-14 18:52:58 +01:00
|
|
|
|
2023-03-14 18:55:02 +01:00
|
|
|
try:
|
|
|
|
endpoint_server_info = "/api/server-info"
|
|
|
|
response_server_info = requests.request(
|
|
|
|
"GET",
|
|
|
|
self.combine_url(endpoint_server_info),
|
|
|
|
headers={'Accept': 'application/json'}
|
|
|
|
)
|
|
|
|
except requests.exceptions.RequestException as e:
|
2023-03-15 04:46:30 +01:00
|
|
|
logger.error(f"Couldn't get server version: {e}")
|
|
|
|
|
2023-03-14 18:55:02 +01:00
|
|
|
return [
|
|
|
|
{
|
2023-03-15 10:23:48 +01:00
|
|
|
"name": f"{self.config['metrics_prefix']}_server_info_diskAvailable",
|
2023-03-15 04:46:30 +01:00
|
|
|
"value": (response_server_info.json()["diskAvailableRaw"]),
|
2023-03-14 20:51:19 +01:00
|
|
|
"help": "Available space on disk",
|
2023-03-14 18:55:02 +01:00
|
|
|
},
|
|
|
|
{
|
2023-03-15 10:23:48 +01:00
|
|
|
"name": f"{self.config['metrics_prefix']}_server_info_totalDiskSize",
|
2023-03-15 04:46:30 +01:00
|
|
|
"value": (response_server_info.json()["diskSizeRaw"]),
|
2023-03-15 00:01:48 +01:00
|
|
|
"help": "tota disk size",
|
2023-03-15 10:23:48 +01:00
|
|
|
# "type": "counter"
|
2023-03-14 18:55:02 +01:00
|
|
|
},
|
|
|
|
{
|
2023-03-15 10:23:48 +01:00
|
|
|
"name": f"{self.config['metrics_prefix']}_server_info_diskUse",
|
2023-03-15 04:46:30 +01:00
|
|
|
"value": (response_server_info.json()["diskUseRaw"]),
|
2023-03-14 20:51:19 +01:00
|
|
|
"help": "disk space in use",
|
2023-03-15 10:23:48 +01:00
|
|
|
# "type": "counter"
|
2023-03-14 18:55:02 +01:00
|
|
|
},
|
2023-03-15 00:01:48 +01:00
|
|
|
{
|
2023-03-15 10:23:48 +01:00
|
|
|
"name": f"{self.config['metrics_prefix']}_server_info_diskUsagePercentage",
|
2023-03-15 04:46:30 +01:00
|
|
|
"value": (response_server_info.json()["diskUsagePercentage"]),
|
2023-03-15 00:01:48 +01:00
|
|
|
"help": "disk usage in percent",
|
|
|
|
# "type": "counter"
|
|
|
|
}
|
2023-03-14 18:55:02 +01:00
|
|
|
]
|
|
|
|
|
|
|
|
def get_immich_server_version_number(self):
|
2023-03-14 20:51:19 +01:00
|
|
|
|
|
|
|
server_version_endpoint = "/api/server-info/version"
|
2023-03-15 10:23:48 +01:00
|
|
|
response_server_version = ""
|
2023-03-14 20:51:19 +01:00
|
|
|
|
2023-03-14 18:52:58 +01:00
|
|
|
try:
|
2023-03-14 20:51:19 +01:00
|
|
|
|
2023-03-14 18:52:58 +01:00
|
|
|
response_server_version = requests.request(
|
|
|
|
"GET",
|
|
|
|
self.combine_url(server_version_endpoint),
|
|
|
|
headers={'Accept': 'application/json'}
|
|
|
|
)
|
|
|
|
except requests.exceptions.RequestException as e:
|
|
|
|
logger.error(f"Couldn't get server version: {e}")
|
|
|
|
|
2023-03-15 10:23:48 +01:00
|
|
|
server_version_number = (str(response_server_version.json()["major"]) + "." +
|
|
|
|
str(response_server_version.json()["minor"]) + "." +
|
|
|
|
str(response_server_version.json()["patch"])
|
|
|
|
)
|
2023-03-14 20:51:19 +01:00
|
|
|
|
2023-03-14 18:52:58 +01:00
|
|
|
return [
|
|
|
|
{
|
2023-03-15 10:23:48 +01:00
|
|
|
"name": f"{self.config['metrics_prefix']}_server_info_version_number",
|
2023-03-15 00:01:48 +01:00
|
|
|
"value": bool(server_version_number),
|
2023-03-14 20:51:19 +01:00
|
|
|
"help": "server version number",
|
2023-03-15 00:01:48 +01:00
|
|
|
"labels": {"version": server_version_number}
|
2023-03-14 20:51:19 +01:00
|
|
|
|
2023-03-14 18:52:58 +01:00
|
|
|
}
|
|
|
|
]
|
|
|
|
|
|
|
|
def combine_url(self, api_endpoint):
|
2023-03-15 10:23:48 +01:00
|
|
|
prefix_url = "http://"
|
2023-03-14 18:52:58 +01:00
|
|
|
base_url = self.config["immich_host"]
|
|
|
|
base_url_port = self.config["immich_port"]
|
2023-03-15 10:23:48 +01:00
|
|
|
combined_url = prefix_url + base_url + ":" + base_url_port + api_endpoint
|
2023-03-14 18:52:58 +01:00
|
|
|
|
|
|
|
return combined_url
|
|
|
|
|
2020-09-29 11:10:15 +02:00
|
|
|
|
|
|
|
class SignalHandler():
|
|
|
|
def __init__(self):
|
2021-07-17 00:59:50 +02:00
|
|
|
self.shutdownCount = 0
|
2020-09-29 11:10:15 +02:00
|
|
|
|
|
|
|
# Register signal handler
|
|
|
|
signal.signal(signal.SIGINT, self._on_signal_received)
|
|
|
|
signal.signal(signal.SIGTERM, self._on_signal_received)
|
|
|
|
|
|
|
|
def is_shutting_down(self):
|
2021-07-17 00:59:50 +02:00
|
|
|
return self.shutdownCount > 0
|
2020-09-29 11:10:15 +02:00
|
|
|
|
|
|
|
def _on_signal_received(self, signal, frame):
|
2021-07-17 00:59:50 +02:00
|
|
|
if self.shutdownCount > 1:
|
2023-03-14 18:52:58 +01:00
|
|
|
logger.warning("Forcibly killing exporter")
|
2021-07-17 00:59:50 +02:00
|
|
|
sys.exit(1)
|
2020-09-29 11:10:15 +02:00
|
|
|
logger.info("Exporter is shutting down")
|
2021-07-17 00:59:50 +02:00
|
|
|
self.shutdownCount += 1
|
2020-09-29 11:10:15 +02:00
|
|
|
|
2023-03-14 18:52:58 +01:00
|
|
|
|
2021-07-17 00:06:36 +02:00
|
|
|
def get_config_value(key, default=""):
|
|
|
|
input_path = os.environ.get("FILE__" + key, None)
|
|
|
|
if input_path is not None:
|
|
|
|
try:
|
|
|
|
with open(input_path, "r") as input_file:
|
|
|
|
return input_file.read().strip()
|
|
|
|
except IOError as e:
|
|
|
|
logger.error(f"Unable to read value for {key} from {input_path}: {str(e)}")
|
|
|
|
|
|
|
|
return os.environ.get(key, default)
|
|
|
|
|
2020-09-29 11:10:15 +02:00
|
|
|
|
|
|
|
def main():
|
2021-07-17 00:06:58 +02:00
|
|
|
# Init logger so it can be used
|
|
|
|
logHandler = logging.StreamHandler()
|
|
|
|
formatter = jsonlogger.JsonFormatter(
|
|
|
|
"%(asctime) %(levelname) %(message)",
|
|
|
|
datefmt="%Y-%m-%d %H:%M:%S"
|
|
|
|
)
|
|
|
|
logHandler.setFormatter(formatter)
|
|
|
|
logger.addHandler(logHandler)
|
2023-03-14 18:52:58 +01:00
|
|
|
logger.setLevel("INFO") # default until config is loaded
|
2021-07-17 00:06:58 +02:00
|
|
|
|
2020-09-29 11:10:15 +02:00
|
|
|
config = {
|
2023-03-14 18:52:58 +01:00
|
|
|
"immich_host": get_config_value("IMMICH_HOST", ""),
|
|
|
|
"immich_port": get_config_value("IMMICH_PORT", ""),
|
2023-03-14 18:41:23 +01:00
|
|
|
"token": get_config_value("IMMICH_API_TOKEN", ""),
|
2021-07-17 00:06:36 +02:00
|
|
|
"exporter_port": int(get_config_value("EXPORTER_PORT", "8000")),
|
|
|
|
"log_level": get_config_value("EXPORTER_LOG_LEVEL", "INFO"),
|
2023-03-14 18:41:23 +01:00
|
|
|
"metrics_prefix": get_config_value("METRICS_PREFIX", "immich"),
|
2020-09-29 11:10:15 +02:00
|
|
|
}
|
2021-07-17 00:06:58 +02:00
|
|
|
# set level once config has been loaded
|
|
|
|
logger.setLevel(config["log_level"])
|
2020-09-29 11:10:15 +02:00
|
|
|
|
|
|
|
# Register signal handler
|
|
|
|
signal_handler = SignalHandler()
|
|
|
|
|
2023-03-14 20:51:19 +01:00
|
|
|
if not config["immich_host"]:
|
|
|
|
logger.error("No host specified, please set IMMICH_HOST environment variable")
|
2020-09-29 11:45:56 +02:00
|
|
|
sys.exit(1)
|
2023-03-15 10:23:48 +01:00
|
|
|
if not config["immich_port"]:
|
|
|
|
logger.error("No host specified, please set IMMICH_PORT environment variable")
|
|
|
|
sys.exit(1)
|
2023-03-14 20:51:19 +01:00
|
|
|
if not config["token"]:
|
|
|
|
logger.error("No token specified, please set IMMICH_API_TOKEN environment variable")
|
2020-09-29 11:45:56 +02:00
|
|
|
sys.exit(1)
|
|
|
|
|
2020-09-29 11:10:15 +02:00
|
|
|
# Register our custom collector
|
|
|
|
logger.info("Exporter is starting up")
|
2023-03-15 10:23:48 +01:00
|
|
|
REGISTRY.register(ImmichMetricsCollector(config))
|
2020-09-29 11:10:15 +02:00
|
|
|
|
|
|
|
# Start server
|
|
|
|
start_http_server(config["exporter_port"])
|
|
|
|
logger.info(
|
|
|
|
f"Exporter listening on port {config['exporter_port']}"
|
|
|
|
)
|
|
|
|
|
|
|
|
while not signal_handler.is_shutting_down():
|
|
|
|
time.sleep(1)
|
|
|
|
|
|
|
|
logger.info("Exporter has shutdown")
|