2018-12-17 18:19:41 -08:00
|
|
|
from hashlib import md5
|
2018-12-17 21:47:29 -08:00
|
|
|
from datetime import date
|
2018-12-17 18:19:41 -08:00
|
|
|
from logging import getLogger
|
2018-12-17 21:47:29 -08:00
|
|
|
from calendar import monthcalendar
|
2018-12-17 18:19:41 -08:00
|
|
|
from geoip2.database import Reader
|
2018-12-18 20:08:21 -08:00
|
|
|
from tarfile import open as taropen
|
2018-12-17 18:19:41 -08:00
|
|
|
from urllib3 import disable_warnings
|
|
|
|
from os import stat, remove, makedirs
|
|
|
|
from urllib.request import urlretrieve
|
2018-12-03 20:56:12 -08:00
|
|
|
from json.decoder import JSONDecodeError
|
2018-12-17 20:08:33 -08:00
|
|
|
from os.path import abspath, join, basename, isdir
|
2018-12-17 18:19:41 -08:00
|
|
|
from urllib3.exceptions import InsecureRequestWarning
|
2018-12-11 11:41:00 -08:00
|
|
|
from requests.exceptions import InvalidSchema, SSLError, ConnectionError
|
2019-01-01 20:41:23 -08:00
|
|
|
from ipaddress import IPv4Address
|
2018-11-28 12:32:39 -08:00
|
|
|
|
2018-12-17 18:19:41 -08:00
|
|
|
logger = getLogger()
|
2018-12-04 19:17:33 -08:00
|
|
|
|
2018-11-28 12:32:39 -08:00
|
|
|
|
2018-12-17 21:47:29 -08:00
|
|
|
class GeoIPHandler(object):
|
|
|
|
def __init__(self, data_folder):
|
|
|
|
self.data_folder = data_folder
|
|
|
|
self.dbfile = abspath(join(self.data_folder, 'GeoLite2-City.mmdb'))
|
|
|
|
self.logger = getLogger()
|
|
|
|
self.update()
|
|
|
|
|
|
|
|
self.logger.info('Opening persistent connection to GeoLite2 DB...')
|
|
|
|
self.reader = Reader(self.dbfile)
|
|
|
|
|
|
|
|
def lookup(self, ipaddress):
|
|
|
|
ip = ipaddress
|
2018-12-18 20:21:56 -08:00
|
|
|
self.logger.debug('Getting lat/long for Tautulli stream using ip with last octet ending in %s',
|
2019-01-02 21:27:07 -08:00
|
|
|
ip.split('.')[-1:][0])
|
2018-12-17 21:47:29 -08:00
|
|
|
return self.reader.city(ip)
|
|
|
|
|
|
|
|
def update(self):
|
|
|
|
today = date.today()
|
2018-12-29 19:57:48 -08:00
|
|
|
|
2018-12-17 21:47:29 -08:00
|
|
|
try:
|
|
|
|
dbdate = date.fromtimestamp(stat(self.dbfile).st_ctime)
|
|
|
|
except FileNotFoundError:
|
|
|
|
self.logger.error("Could not find GeoLite2 DB as: %s", self.dbfile)
|
|
|
|
self.download()
|
2018-12-27 08:24:45 -08:00
|
|
|
dbdate = date.fromtimestamp(stat(self.dbfile).st_ctime)
|
2018-12-27 09:33:35 -08:00
|
|
|
|
2018-12-17 21:47:29 -08:00
|
|
|
first_wednesday_day = [week[2:3][0] for week in monthcalendar(today.year, today.month) if week[2:3][0] != 0][0]
|
|
|
|
first_wednesday_date = date(today.year, today.month, first_wednesday_day)
|
|
|
|
|
|
|
|
if dbdate < first_wednesday_date < today:
|
|
|
|
self.logger.info("Newer GeoLite2 DB available, Updating...")
|
|
|
|
remove(self.dbfile)
|
|
|
|
self.download()
|
|
|
|
else:
|
|
|
|
td = first_wednesday_date - today
|
|
|
|
if td.days < 0:
|
|
|
|
self.logger.debug('Geolite2 DB is only %s days old. Keeping current copy', abs(td.days))
|
|
|
|
else:
|
|
|
|
self.logger.debug('Geolite2 DB will update in %s days', abs(td.days))
|
|
|
|
|
|
|
|
def download(self):
|
|
|
|
tar_dbfile = abspath(join(self.data_folder, 'GeoLite2-City.tar.gz'))
|
|
|
|
url = 'http://geolite.maxmind.com/download/geoip/database/GeoLite2-City.tar.gz'
|
|
|
|
|
|
|
|
self.logger.info('Downloading GeoLite2 from %s', url)
|
|
|
|
urlretrieve(url, tar_dbfile)
|
|
|
|
|
|
|
|
self.logger.debug('Opening GeoLite2 tar file : %s', tar_dbfile)
|
|
|
|
|
|
|
|
tar = taropen(tar_dbfile, 'r:gz')
|
|
|
|
|
|
|
|
for files in tar.getmembers():
|
|
|
|
if 'GeoLite2-City.mmdb' in files.name:
|
|
|
|
self.logger.debug('"GeoLite2-City.mmdb" FOUND in tar file')
|
|
|
|
files.name = basename(files.name)
|
|
|
|
tar.extract(files, self.data_folder)
|
|
|
|
self.logger.debug('%s has been extracted to %s', files, self.data_folder)
|
|
|
|
tar.close()
|
|
|
|
remove(tar_dbfile)
|
2018-12-02 19:59:46 -08:00
|
|
|
|
|
|
|
|
|
|
|
def hashit(string):
|
|
|
|
encoded = string.encode()
|
2018-12-17 18:19:41 -08:00
|
|
|
hashed = md5(encoded).hexdigest()
|
2018-12-02 19:59:46 -08:00
|
|
|
|
|
|
|
return hashed
|
2018-12-03 20:56:12 -08:00
|
|
|
|
|
|
|
|
2019-01-01 20:41:23 -08:00
|
|
|
def rfc1918_ip_check(ip):
|
|
|
|
rfc1918_ip = IPv4Address(ip).is_private
|
|
|
|
|
|
|
|
return rfc1918_ip
|
|
|
|
|
|
|
|
|
2018-12-03 20:56:12 -08:00
|
|
|
def connection_handler(session, request, verify):
|
|
|
|
s = session
|
|
|
|
r = request
|
|
|
|
v = verify
|
|
|
|
return_json = False
|
|
|
|
|
2018-12-17 18:19:41 -08:00
|
|
|
disable_warnings(InsecureRequestWarning)
|
2018-12-09 19:41:38 -08:00
|
|
|
|
2018-12-03 20:56:12 -08:00
|
|
|
try:
|
|
|
|
get = s.send(r, verify=v)
|
|
|
|
if get.status_code == 401:
|
2018-12-11 08:30:52 -08:00
|
|
|
logger.info('Your api key is incorrect for %s', r.url)
|
2018-12-03 20:56:12 -08:00
|
|
|
elif get.status_code == 404:
|
2018-12-11 08:30:52 -08:00
|
|
|
logger.info('This url doesnt even resolve: %s', r.url)
|
2018-12-03 20:56:12 -08:00
|
|
|
elif get.status_code == 200:
|
|
|
|
try:
|
|
|
|
return_json = get.json()
|
|
|
|
except JSONDecodeError:
|
2018-12-11 08:30:52 -08:00
|
|
|
logger.error('No JSON response. Response is: %s', get.text)
|
2018-12-09 19:41:38 -08:00
|
|
|
# 204 No Content is for ASA only
|
|
|
|
elif get.status_code == 204:
|
|
|
|
if get.headers['X-Auth-Token']:
|
|
|
|
return get.headers['X-Auth-Token']
|
2018-12-03 20:56:12 -08:00
|
|
|
|
|
|
|
except InvalidSchema:
|
2018-12-11 08:30:52 -08:00
|
|
|
logger.error("You added http(s):// in the config file. Don't do that.")
|
2018-12-03 20:56:12 -08:00
|
|
|
|
|
|
|
except SSLError as e:
|
2018-12-04 21:42:43 -08:00
|
|
|
logger.error('Either your host is unreachable or you have an SSL issue. : %s', e)
|
2018-12-03 20:56:12 -08:00
|
|
|
|
2018-12-11 11:41:00 -08:00
|
|
|
except ConnectionError as e:
|
|
|
|
logger.error('Cannot resolve the url/ip/port. Check connectivity. Error: %s', e)
|
|
|
|
|
2018-12-03 20:56:12 -08:00
|
|
|
return return_json
|
2018-12-04 21:42:43 -08:00
|
|
|
|
|
|
|
|
|
|
|
def mkdir_p(path):
|
2018-12-17 18:19:41 -08:00
|
|
|
templogger = getLogger('temp')
|
2018-12-04 21:42:43 -08:00
|
|
|
try:
|
2018-12-17 20:08:33 -08:00
|
|
|
if not isdir(path):
|
|
|
|
templogger.info('Creating folder %s ', path)
|
|
|
|
makedirs(path, exist_ok=True)
|
2018-12-04 21:42:43 -08:00
|
|
|
except Exception as e:
|
2018-12-17 17:12:37 -08:00
|
|
|
templogger.error('Could not create folder %s : %s ', path, e)
|
2018-12-11 09:45:43 -08:00
|
|
|
|
|
|
|
|
|
|
|
def clean_sid_check(server_id_list, server_type=None):
|
|
|
|
t = server_type
|
|
|
|
sid_list = server_id_list
|
|
|
|
cleaned_list = sid_list.replace(' ', '').split(',')
|
|
|
|
valid_sids = []
|
|
|
|
for sid in cleaned_list:
|
|
|
|
try:
|
|
|
|
valid_sids.append(int(sid))
|
|
|
|
except ValueError:
|
2018-12-17 17:12:37 -08:00
|
|
|
logger.error("%s is not a valid server id number", sid)
|
2018-12-11 09:45:43 -08:00
|
|
|
if valid_sids:
|
|
|
|
logger.info('%s : %s', t.upper(), valid_sids)
|
|
|
|
return valid_sids
|
|
|
|
else:
|
|
|
|
logger.error('No valid %s', t.upper())
|
|
|
|
return False
|