Compare commits
No commits in common. "227264e6fd30f1237f56b5b9cfdea4ce4f83b4ad" and "66cfd838d49aa075a4905d2526c079d95138aab7" have entirely different histories.
227264e6fd
...
66cfd838d4
5 changed files with 22 additions and 29 deletions
|
@ -1,4 +1,4 @@
|
|||
FROM lsiobase/alpine:3.15
|
||||
FROM lsiobase/alpine:3.12
|
||||
LABEL maintainer="GilbN"
|
||||
|
||||
WORKDIR /geoip2influx
|
||||
|
|
|
@ -100,7 +100,6 @@ services:
|
|||
## Grafana dashboard:
|
||||
### [Grafana Dashboard Link](https://grafana.com/grafana/dashboards/12268/)
|
||||
|
||||
Needs the [grafana-worldmap-panel](https://grafana.com/grafana/plugins/grafana-worldmap-panel/?tab=installation)
|
||||
***
|
||||
|
||||
## Sending Nginx log metrics
|
||||
|
|
|
@ -36,10 +36,6 @@ g2i_log_path = env.get('GEOIP2INFLUX_LOG_PATH','/config/log/geoip2influx/geoip2i
|
|||
# Logging
|
||||
logging.basicConfig(level=log_level,format='GEOIP2INFLUX %(asctime)s :: %(levelname)s :: %(message)s',datefmt='%d/%b/%Y %H:%M:%S',handlers=[logging.StreamHandler(),logging.FileHandler(g2i_log_path)])
|
||||
|
||||
# global variables
|
||||
monitored_ip_types = ['PUBLIC', 'ALLOCATED APNIC', 'ALLOCATED ARIN', 'ALLOCATED RIPE NCC', 'ALLOCATED LACNIC', 'ALLOCATED AFRINIC']
|
||||
|
||||
|
||||
def regex_tester(log_path, N):
|
||||
time_out = time() + 60
|
||||
re_ipv4 = compile(r'(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})')
|
||||
|
@ -200,10 +196,9 @@ def logparse(
|
|||
f'Line: {line}'
|
||||
)
|
||||
continue
|
||||
ip_type = ipadd(ip).iptype()
|
||||
if ip_type in monitored_ip_types and ip:
|
||||
if ipadd(ip).iptype() == 'PUBLIC' and ip:
|
||||
info = gi.city(ip)
|
||||
if info:
|
||||
if info is not None:
|
||||
geohash = encode(info.location.latitude, info.location.longitude)
|
||||
geohash_fields['count'] = 1
|
||||
geohash_tags['geohash'] = geohash
|
||||
|
@ -211,12 +206,12 @@ def logparse(
|
|||
geohash_tags['host'] = hostname
|
||||
geohash_tags['country_code'] = info.country.iso_code
|
||||
geohash_tags['country_name'] = info.country.name
|
||||
geohash_tags['state'] = info.subdivisions.most_specific.name if info.subdivisions.most_specific.name else "-"
|
||||
geohash_tags['state_code'] = info.subdivisions.most_specific.iso_code if info.subdivisions.most_specific.iso_code else "-"
|
||||
geohash_tags['city'] = info.city.name if info.city.name else "-"
|
||||
geohash_tags['postal_code'] = info.postal.code if info.postal.code else "-"
|
||||
geohash_tags['latitude'] = info.location.latitude if info.location.latitude else "-"
|
||||
geohash_tags['longitude'] = info.location.longitude if info.location.longitude else "-"
|
||||
geohash_tags['state'] = info.subdivisions.most_specific.name
|
||||
geohash_tags['state_code'] = info.subdivisions.most_specific.iso_code
|
||||
geohash_tags['city'] = info.city.name
|
||||
geohash_tags['postal_code'] = info.postal.code
|
||||
geohash_tags['latitude'] = info.location.latitude
|
||||
geohash_tags['longitude'] = info.location.longitude
|
||||
ips['tags'] = geohash_tags
|
||||
ips['fields'] = geohash_fields
|
||||
ips['measurement'] = geo_measurement
|
||||
|
@ -228,21 +223,20 @@ def logparse(
|
|||
logging.error('Error writing data to InfluxDB! Check your database!\n'
|
||||
f'Error: {e}'
|
||||
)
|
||||
else:
|
||||
logging.debug(f"Incorrect IP type: {ip_type}")
|
||||
|
||||
if send_logs:
|
||||
data = search(log, line)
|
||||
if ip_type in monitored_ip_types and ip:
|
||||
if ipadd(ip).iptype() == 'PUBLIC' and ip:
|
||||
info = gi.city(ip)
|
||||
if info:
|
||||
if info is not None:
|
||||
datadict = data.groupdict()
|
||||
log_data_fields['count'] = 1
|
||||
log_data_fields['bytes_sent'] = int(datadict['bytes_sent'])
|
||||
log_data_fields['request_time'] = float(datadict['request_time'])
|
||||
try:
|
||||
log_data_fields['connect_time'] = float(datadict['connect_time']) if datadict['connect_time'] != '-' else 0.0
|
||||
except ValueError:
|
||||
log_data_fields['connect_time'] = str(datadict['connect_time'])
|
||||
if datadict['connect_time'] == '-':
|
||||
log_data_fields['connect_time'] = 0.0
|
||||
else:
|
||||
log_data_fields['connect_time'] = float(datadict['connect_time'])
|
||||
log_data_tags['ip'] = datadict['ipaddress']
|
||||
log_data_tags['datetime'] = datetime.strptime(datadict['dateandtime'], '%d/%b/%Y:%H:%M:%S %z')
|
||||
log_data_tags['remote_user'] = datadict['remote_user']
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
geoip2==3.0.0
|
||||
geohash2==1.1
|
||||
influxdb==5.3.0
|
||||
IPy==1.01
|
||||
IPy==1.0
|
||||
|
|
|
@ -31,11 +31,11 @@ fi
|
|||
ln -s /config/geoip2db /var/lib/libmaxminddb
|
||||
# check GeoIP2 database
|
||||
if [ -n "$MAXMINDDB_LICENSE_KEY" ]; then
|
||||
sed -i "s|.*MAXMINDDB_LICENSE_KEY.*|MAXMINDDB_LICENSE_KEY=\"${MAXMINDDB_LICENSE_KEY}\"|g" /etc/libmaxminddb.cron.conf
|
||||
if [ ! -f /var/lib/libmaxminddb/GeoLite2-City.mmdb ]; then
|
||||
echo "Downloading GeoIP2 City database."
|
||||
/etc/periodic/weekly/libmaxminddb
|
||||
fi
|
||||
sed -i "s|.*MAXMINDDB_LICENSE_KEY.*|MAXMINDDB_LICENSE_KEY=\"${MAXMINDDB_LICENSE_KEY}\"|g" /etc/conf.d/libmaxminddb
|
||||
if [ ! -f /var/lib/libmaxminddb/GeoLite2-City.mmdb ]; then
|
||||
echo "Downloading GeoIP2 City database."
|
||||
/etc/periodic/weekly/libmaxminddb
|
||||
fi
|
||||
fi
|
||||
|
||||
# permissions
|
||||
|
|
Loading…
Reference in a new issue