Compare commits
10 commits
66cfd838d4
...
227264e6fd
Author | SHA1 | Date | |
---|---|---|---|
|
227264e6fd | ||
|
b4c94a7990 | ||
|
84524fc4f1 | ||
|
9144e64be6 | ||
|
abaa555a7a | ||
|
e99b46b510 | ||
|
7edef5a6fb | ||
|
b3dcebf20a | ||
|
2ad3a67640 | ||
|
5cf5a65ebd |
5 changed files with 29 additions and 22 deletions
|
@ -1,4 +1,4 @@
|
|||
FROM lsiobase/alpine:3.12
|
||||
FROM lsiobase/alpine:3.15
|
||||
LABEL maintainer="GilbN"
|
||||
|
||||
WORKDIR /geoip2influx
|
||||
|
|
|
@ -100,6 +100,7 @@ services:
|
|||
## Grafana dashboard:
|
||||
### [Grafana Dashboard Link](https://grafana.com/grafana/dashboards/12268/)
|
||||
|
||||
Needs the [grafana-worldmap-panel](https://grafana.com/grafana/plugins/grafana-worldmap-panel/?tab=installation)
|
||||
***
|
||||
|
||||
## Sending Nginx log metrics
|
||||
|
|
|
@ -36,6 +36,10 @@ g2i_log_path = env.get('GEOIP2INFLUX_LOG_PATH','/config/log/geoip2influx/geoip2i
|
|||
# Logging
|
||||
logging.basicConfig(level=log_level,format='GEOIP2INFLUX %(asctime)s :: %(levelname)s :: %(message)s',datefmt='%d/%b/%Y %H:%M:%S',handlers=[logging.StreamHandler(),logging.FileHandler(g2i_log_path)])
|
||||
|
||||
# global variables
|
||||
monitored_ip_types = ['PUBLIC', 'ALLOCATED APNIC', 'ALLOCATED ARIN', 'ALLOCATED RIPE NCC', 'ALLOCATED LACNIC', 'ALLOCATED AFRINIC']
|
||||
|
||||
|
||||
def regex_tester(log_path, N):
|
||||
time_out = time() + 60
|
||||
re_ipv4 = compile(r'(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})')
|
||||
|
@ -196,9 +200,10 @@ def logparse(
|
|||
f'Line: {line}'
|
||||
)
|
||||
continue
|
||||
if ipadd(ip).iptype() == 'PUBLIC' and ip:
|
||||
ip_type = ipadd(ip).iptype()
|
||||
if ip_type in monitored_ip_types and ip:
|
||||
info = gi.city(ip)
|
||||
if info is not None:
|
||||
if info:
|
||||
geohash = encode(info.location.latitude, info.location.longitude)
|
||||
geohash_fields['count'] = 1
|
||||
geohash_tags['geohash'] = geohash
|
||||
|
@ -206,12 +211,12 @@ def logparse(
|
|||
geohash_tags['host'] = hostname
|
||||
geohash_tags['country_code'] = info.country.iso_code
|
||||
geohash_tags['country_name'] = info.country.name
|
||||
geohash_tags['state'] = info.subdivisions.most_specific.name
|
||||
geohash_tags['state_code'] = info.subdivisions.most_specific.iso_code
|
||||
geohash_tags['city'] = info.city.name
|
||||
geohash_tags['postal_code'] = info.postal.code
|
||||
geohash_tags['latitude'] = info.location.latitude
|
||||
geohash_tags['longitude'] = info.location.longitude
|
||||
geohash_tags['state'] = info.subdivisions.most_specific.name if info.subdivisions.most_specific.name else "-"
|
||||
geohash_tags['state_code'] = info.subdivisions.most_specific.iso_code if info.subdivisions.most_specific.iso_code else "-"
|
||||
geohash_tags['city'] = info.city.name if info.city.name else "-"
|
||||
geohash_tags['postal_code'] = info.postal.code if info.postal.code else "-"
|
||||
geohash_tags['latitude'] = info.location.latitude if info.location.latitude else "-"
|
||||
geohash_tags['longitude'] = info.location.longitude if info.location.longitude else "-"
|
||||
ips['tags'] = geohash_tags
|
||||
ips['fields'] = geohash_fields
|
||||
ips['measurement'] = geo_measurement
|
||||
|
@ -223,20 +228,21 @@ def logparse(
|
|||
logging.error('Error writing data to InfluxDB! Check your database!\n'
|
||||
f'Error: {e}'
|
||||
)
|
||||
|
||||
else:
|
||||
logging.debug(f"Incorrect IP type: {ip_type}")
|
||||
if send_logs:
|
||||
data = search(log, line)
|
||||
if ipadd(ip).iptype() == 'PUBLIC' and ip:
|
||||
if ip_type in monitored_ip_types and ip:
|
||||
info = gi.city(ip)
|
||||
if info is not None:
|
||||
if info:
|
||||
datadict = data.groupdict()
|
||||
log_data_fields['count'] = 1
|
||||
log_data_fields['bytes_sent'] = int(datadict['bytes_sent'])
|
||||
log_data_fields['request_time'] = float(datadict['request_time'])
|
||||
if datadict['connect_time'] == '-':
|
||||
log_data_fields['connect_time'] = 0.0
|
||||
else:
|
||||
log_data_fields['connect_time'] = float(datadict['connect_time'])
|
||||
try:
|
||||
log_data_fields['connect_time'] = float(datadict['connect_time']) if datadict['connect_time'] != '-' else 0.0
|
||||
except ValueError:
|
||||
log_data_fields['connect_time'] = str(datadict['connect_time'])
|
||||
log_data_tags['ip'] = datadict['ipaddress']
|
||||
log_data_tags['datetime'] = datetime.strptime(datadict['dateandtime'], '%d/%b/%Y:%H:%M:%S %z')
|
||||
log_data_tags['remote_user'] = datadict['remote_user']
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
geoip2==3.0.0
|
||||
geohash2==1.1
|
||||
influxdb==5.3.0
|
||||
IPy==1.0
|
||||
IPy==1.01
|
||||
|
|
|
@ -31,11 +31,11 @@ fi
|
|||
ln -s /config/geoip2db /var/lib/libmaxminddb
|
||||
# check GeoIP2 database
|
||||
if [ -n "$MAXMINDDB_LICENSE_KEY" ]; then
|
||||
sed -i "s|.*MAXMINDDB_LICENSE_KEY.*|MAXMINDDB_LICENSE_KEY=\"${MAXMINDDB_LICENSE_KEY}\"|g" /etc/conf.d/libmaxminddb
|
||||
if [ ! -f /var/lib/libmaxminddb/GeoLite2-City.mmdb ]; then
|
||||
echo "Downloading GeoIP2 City database."
|
||||
/etc/periodic/weekly/libmaxminddb
|
||||
fi
|
||||
sed -i "s|.*MAXMINDDB_LICENSE_KEY.*|MAXMINDDB_LICENSE_KEY=\"${MAXMINDDB_LICENSE_KEY}\"|g" /etc/libmaxminddb.cron.conf
|
||||
if [ ! -f /var/lib/libmaxminddb/GeoLite2-City.mmdb ]; then
|
||||
echo "Downloading GeoIP2 City database."
|
||||
/etc/periodic/weekly/libmaxminddb
|
||||
fi
|
||||
fi
|
||||
|
||||
# permissions
|
||||
|
|
Loading…
Reference in a new issue