Varken/varken/helpers.py

221 lines
8 KiB
Python
Raw Permalink Normal View History

from hashlib import md5
2019-03-11 11:08:10 -07:00
from datetime import date, timedelta
from time import sleep
from logging import getLogger
2019-01-08 15:43:32 -08:00
from ipaddress import IPv4Address
from urllib.error import HTTPError, URLError
from geoip2.database import Reader
2018-12-18 20:08:21 -08:00
from tarfile import open as taropen
from urllib3 import disable_warnings
from os import stat, remove, makedirs
from urllib.request import urlretrieve
from json.decoder import JSONDecodeError
from os.path import abspath, join, basename, isdir
from urllib3.exceptions import InsecureRequestWarning
2019-03-11 17:42:53 -07:00
from requests.exceptions import InvalidSchema, SSLError, ConnectionError, ChunkedEncodingError
2018-11-28 12:32:39 -08:00
logger = getLogger()
2018-11-28 12:32:39 -08:00
2018-12-17 21:47:29 -08:00
class GeoIPHandler(object):
def __init__(self, data_folder, maxmind_license_key):
2018-12-17 21:47:29 -08:00
self.data_folder = data_folder
self.maxmind_license_key = maxmind_license_key
2018-12-17 21:47:29 -08:00
self.dbfile = abspath(join(self.data_folder, 'GeoLite2-City.mmdb'))
self.logger = getLogger()
2019-02-12 10:56:40 -08:00
self.reader = None
self.reader_manager(action='open')
2018-12-17 21:47:29 -08:00
self.logger.info('Opening persistent connection to the MaxMind DB...')
2019-02-12 10:44:32 -08:00
def reader_manager(self, action=None):
if action == 'open':
2019-03-11 11:08:10 -07:00
try:
self.reader = Reader(self.dbfile)
except FileNotFoundError:
self.logger.error("Could not find MaxMind DB! Downloading!")
2019-03-11 11:08:10 -07:00
result_status = self.download()
if result_status:
self.logger.error("Could not download MaxMind DB! You may need to manually install it.")
2019-03-11 11:08:10 -07:00
exit(1)
else:
self.reader = Reader(self.dbfile)
2019-02-12 10:44:32 -08:00
else:
self.reader.close()
2018-12-17 21:47:29 -08:00
def lookup(self, ipaddress):
ip = ipaddress
self.logger.debug('Getting lat/long for Tautulli stream using ip with last octet ending in %s',
ip.split('.')[-1:][0])
2018-12-17 21:47:29 -08:00
return self.reader.city(ip)
def update(self):
today = date.today()
2018-12-29 19:57:48 -08:00
2018-12-17 21:47:29 -08:00
try:
dbdate = date.fromtimestamp(stat(self.dbfile).st_mtime)
db_next_update = date.fromtimestamp(stat(self.dbfile).st_mtime) + timedelta(days=30)
2019-03-11 11:08:10 -07:00
2018-12-17 21:47:29 -08:00
except FileNotFoundError:
self.logger.error("Could not find MaxMind DB as: %s", self.dbfile)
2018-12-17 21:47:29 -08:00
self.download()
dbdate = date.fromtimestamp(stat(self.dbfile).st_mtime)
db_next_update = date.fromtimestamp(stat(self.dbfile).st_mtime) + timedelta(days=30)
2018-12-27 09:33:35 -08:00
2019-03-11 11:08:10 -07:00
if db_next_update < today:
self.logger.info("Newer MaxMind DB available, Updating...")
self.logger.debug("MaxMind DB date %s, DB updates after: %s, Today: %s",
2019-03-11 11:08:10 -07:00
dbdate, db_next_update, today)
2019-02-12 10:44:32 -08:00
self.reader_manager(action='close')
2018-12-17 21:47:29 -08:00
self.download()
2019-02-12 10:44:32 -08:00
self.reader_manager(action='open')
2018-12-17 21:47:29 -08:00
else:
2019-03-11 13:36:44 -07:00
db_days_update = db_next_update - today
self.logger.debug("MaxMind DB will update in %s days", abs(db_days_update.days))
self.logger.debug("MaxMind DB date %s, DB updates after: %s, Today: %s",
2019-03-11 13:36:44 -07:00
dbdate, db_next_update, today)
2019-03-11 11:08:10 -07:00
2018-12-17 21:47:29 -08:00
def download(self):
tar_dbfile = abspath(join(self.data_folder, 'GeoLite2-City.tar.gz'))
2019-12-31 13:41:46 -08:00
maxmind_url = ('https://download.maxmind.com/app/geoip_download?edition_id=GeoLite2-City'
f'&suffix=tar.gz&license_key={self.maxmind_license_key}')
downloaded = False
2019-03-11 11:08:10 -07:00
retry_counter = 0
while not downloaded:
self.logger.info('Downloading GeoLite2 DB from MaxMind...')
try:
urlretrieve(maxmind_url, tar_dbfile)
downloaded = True
except URLError as e:
self.logger.error("Problem downloading new MaxMind DB: %s", e)
result_status = 1
return result_status
except HTTPError as e:
if e.code == 401:
self.logger.error("Your MaxMind license key is incorect! Check your config: %s", e)
result_status = 1
return result_status
else:
self.logger.error("Problem downloading new MaxMind DB... Trying again: %s", e)
sleep(2)
retry_counter = (retry_counter + 1)
2019-03-11 11:08:10 -07:00
if retry_counter >= 3:
self.logger.error("Retried downloading the new MaxMind DB 3 times and failed... Aborting!")
2019-03-11 11:08:10 -07:00
result_status = 1
return result_status
try:
remove(self.dbfile)
except FileNotFoundError:
self.logger.warning("Cannot remove MaxMind DB as it does not exist!")
2018-12-17 21:47:29 -08:00
self.logger.debug("Opening MaxMind tar file : %s", tar_dbfile)
2018-12-17 21:47:29 -08:00
tar = taropen(tar_dbfile, 'r:gz')
for files in tar.getmembers():
if 'GeoLite2-City.mmdb' in files.name:
self.logger.debug('"GeoLite2-City.mmdb" FOUND in tar file')
files.name = basename(files.name)
tar.extract(files, self.data_folder)
self.logger.debug('%s has been extracted to %s', files, self.data_folder)
tar.close()
2019-03-11 13:36:44 -07:00
try:
remove(tar_dbfile)
self.logger.debug('Removed the MaxMind DB tar file.')
2019-03-11 13:36:44 -07:00
except FileNotFoundError:
self.logger.warning("Cannot remove MaxMind DB TAR file as it does not exist!")
2018-12-02 19:59:46 -08:00
def hashit(string):
encoded = string.encode()
hashed = md5(encoded).hexdigest()
2018-12-02 19:59:46 -08:00
return hashed
2019-01-01 20:41:23 -08:00
def rfc1918_ip_check(ip):
rfc1918_ip = IPv4Address(ip).is_private
return rfc1918_ip
2019-01-04 13:30:27 -08:00
def connection_handler(session, request, verify, as_is_reply=False):
air = as_is_reply
s = session
r = request
v = verify
return_json = False
disable_warnings(InsecureRequestWarning)
try:
get = s.send(r, verify=v)
if get.status_code == 401:
2019-03-11 17:37:29 -07:00
if 'NoSiteContext' in str(get.content):
logger.info('Your Site is incorrect for %s', r.url)
elif 'LoginRequired' in str(get.content):
logger.info('Your login credentials are incorrect for %s', r.url)
else:
logger.info('Your api key is incorrect for %s', r.url)
elif get.status_code == 404:
2018-12-11 08:30:52 -08:00
logger.info('This url doesnt even resolve: %s', r.url)
elif get.status_code == 200:
try:
return_json = get.json()
except JSONDecodeError:
2018-12-11 08:30:52 -08:00
logger.error('No JSON response. Response is: %s', get.text)
2019-01-04 13:30:27 -08:00
if air:
return get
except InvalidSchema:
2018-12-11 08:30:52 -08:00
logger.error("You added http(s):// in the config file. Don't do that.")
except SSLError as e:
logger.error('Either your host is unreachable or you have an SSL issue. : %s', e)
2018-12-11 11:41:00 -08:00
except ConnectionError as e:
logger.error('Cannot resolve the url/ip/port. Check connectivity. Error: %s', e)
2019-03-11 17:42:53 -07:00
except ChunkedEncodingError as e:
logger.error('Broken connection during request... oops? Error: %s', e)
2018-12-11 11:41:00 -08:00
return return_json
def mkdir_p(path):
templogger = getLogger('temp')
try:
if not isdir(path):
templogger.info('Creating folder %s ', path)
makedirs(path, exist_ok=True)
except Exception as e:
templogger.error('Could not create folder %s : %s ', path, e)
2018-12-11 09:45:43 -08:00
def clean_sid_check(server_id_list, server_type=None):
t = server_type
sid_list = server_id_list
cleaned_list = sid_list.replace(' ', '').split(',')
valid_sids = []
for sid in cleaned_list:
try:
valid_sids.append(int(sid))
except ValueError:
logger.error("%s is not a valid server id number", sid)
2018-12-11 09:45:43 -08:00
if valid_sids:
logger.info('%s : %s', t.upper(), valid_sids)
return valid_sids
else:
logger.error('No valid %s', t.upper())
return False
def boolcheck(var):
if var.lower() in ['true', 'yes']:
return True
else:
return False
2019-10-08 08:15:25 -07:00
def itemgetter_with_default(**defaults):
return lambda obj: tuple(obj.get(k, v) for k, v in defaults.items())