This commit is contained in:
Nicholas St. Germain 2018-12-17 23:47:29 -06:00
parent 7f04419d4c
commit 556109e8c5
5 changed files with 62 additions and 48 deletions

View file

@ -17,10 +17,12 @@ from varken.sonarr import SonarrAPI
from varken.radarr import RadarrAPI from varken.radarr import RadarrAPI
from varken.iniparser import INIParser from varken.iniparser import INIParser
from varken.dbmanager import DBManager from varken.dbmanager import DBManager
from varken.helpers import GeoIPHandler
from varken.tautulli import TautulliAPI from varken.tautulli import TautulliAPI
from varken.sickchill import SickChillAPI from varken.sickchill import SickChillAPI
from varken.varkenlogger import VarkenLogger from varken.varkenlogger import VarkenLogger
PLATFORM_LINUX_DISTRO = ' '.join(x for x in linux_distribution() if x) PLATFORM_LINUX_DISTRO = ' '.join(x for x in linux_distribution() if x)
@ -95,8 +97,10 @@ if __name__ == "__main__":
schedule.every(server.future_days_run_seconds).seconds.do(threaded, SONARR.get_future) schedule.every(server.future_days_run_seconds).seconds.do(threaded, SONARR.get_future)
if CONFIG.tautulli_enabled: if CONFIG.tautulli_enabled:
GEOIPHANDLER = GeoIPHandler(DATA_FOLDER)
schedule.every(12).to(24).hours.do(threaded, GEOIPHANDLER.update)
for server in CONFIG.tautulli_servers: for server in CONFIG.tautulli_servers:
TAUTULLI = TautulliAPI(server, DBMANAGER, DATA_FOLDER) TAUTULLI = TautulliAPI(server, DBMANAGER, GEOIPHANDLER)
if server.get_activity: if server.get_activity:
schedule.every(server.get_activity_run_seconds).seconds.do(threaded, TAUTULLI.get_activity) schedule.every(server.get_activity_run_seconds).seconds.do(threaded, TAUTULLI.get_activity)

View file

@ -1,2 +1,2 @@
VERSION = 1.3 VERSION = 1.3
BRANCH = 'pre-nightly' BRANCH = 'nightly'

View file

@ -1,7 +1,8 @@
from time import time
from hashlib import md5 from hashlib import md5
from tarfile import open from tarfile import open as taropen
from datetime import date
from logging import getLogger from logging import getLogger
from calendar import monthcalendar
from geoip2.database import Reader from geoip2.database import Reader
from urllib3 import disable_warnings from urllib3 import disable_warnings
from os import stat, remove, makedirs from os import stat, remove, makedirs
@ -14,52 +15,63 @@ from requests.exceptions import InvalidSchema, SSLError, ConnectionError
logger = getLogger() logger = getLogger()
def geoip_download(data_folder): class GeoIPHandler(object):
datafolder = data_folder def __init__(self, data_folder):
self.data_folder = data_folder
self.dbfile = abspath(join(self.data_folder, 'GeoLite2-City.mmdb'))
self.logger = getLogger()
self.update()
tar_dbfile = abspath(join(datafolder, 'GeoLite2-City.tar.gz')) self.logger.info('Opening persistent connection to GeoLite2 DB...')
self.reader = Reader(self.dbfile)
url = 'http://geolite.maxmind.com/download/geoip/database/GeoLite2-City.tar.gz' def lookup(self, ipaddress):
logger.info('Downloading GeoLite2 from %s', url) ip = ipaddress
urlretrieve(url, tar_dbfile) self.logger.debug('Getting lat/long for Tautulli stream')
return self.reader.city(ip)
tar = open(tar_dbfile, 'r:gz') def update(self):
logger.debug('Opening GeoLite2 tar file : %s', tar_dbfile) today = date.today()
dbdate = None
try:
dbdate = date.fromtimestamp(stat(self.dbfile).st_ctime)
except FileNotFoundError:
self.logger.error("Could not find GeoLite2 DB as: %s", self.dbfile)
self.download()
first_wednesday_day = [week[2:3][0] for week in monthcalendar(today.year, today.month) if week[2:3][0] != 0][0]
first_wednesday_date = date(today.year, today.month, first_wednesday_day)
for files in tar.getmembers(): if dbdate < first_wednesday_date < today:
if 'GeoLite2-City.mmdb' in files.name: self.logger.info("Newer GeoLite2 DB available, Updating...")
logger.debug('"GeoLite2-City.mmdb" FOUND in tar file') remove(self.dbfile)
files.name = basename(files.name) self.download()
else:
tar.extract(files, datafolder) td = first_wednesday_date - today
logger.debug('%s has been extracted to %s', files, datafolder) if td.days < 0:
self.logger.debug('Geolite2 DB is only %s days old. Keeping current copy', abs(td.days))
remove(tar_dbfile) else:
self.logger.debug('Geolite2 DB will update in %s days', abs(td.days))
def geo_lookup(ipaddress, data_folder): def download(self):
datafolder = data_folder tar_dbfile = abspath(join(self.data_folder, 'GeoLite2-City.tar.gz'))
logger.debug('Reading GeoLite2 DB from %s', datafolder) url = 'http://geolite.maxmind.com/download/geoip/database/GeoLite2-City.tar.gz'
dbfile = abspath(join(datafolder, 'GeoLite2-City.mmdb')) self.logger.info('Downloading GeoLite2 from %s', url)
now = time() urlretrieve(url, tar_dbfile)
try: self.logger.debug('Opening GeoLite2 tar file : %s', tar_dbfile)
dbinfo = stat(dbfile)
db_age = now - dbinfo.st_ctime
if db_age > (35 * 86400):
logger.info('GeoLite2 DB is older than 35 days. Attempting to re-download...')
remove(dbfile) tar = taropen(tar_dbfile, 'r:gz')
geoip_download(datafolder) for files in tar.getmembers():
except FileNotFoundError: if 'GeoLite2-City.mmdb' in files.name:
logger.error('GeoLite2 DB not found. Attempting to download...') self.logger.debug('"GeoLite2-City.mmdb" FOUND in tar file')
geoip_download(datafolder) files.name = basename(files.name)
tar.extract(files, self.data_folder)
reader = Reader(dbfile) self.logger.debug('%s has been extracted to %s', files, self.data_folder)
tar.close()
return reader.city(ipaddress) remove(tar_dbfile)
def hashit(string): def hashit(string):

View file

@ -75,8 +75,6 @@ class INIParser(object):
regex = compile('{}'.format(search), IGNORECASE) regex = compile('{}'.format(search), IGNORECASE)
print(match(regex, url_check))
valid = match(regex, url_check) is not None valid = match(regex, url_check) is not None
if not valid: if not valid:
if inc_port: if inc_port:

View file

@ -4,18 +4,18 @@ from datetime import datetime, timezone
from geoip2.errors import AddressNotFoundError from geoip2.errors import AddressNotFoundError
from varken.structures import TautulliStream from varken.structures import TautulliStream
from varken.helpers import geo_lookup, hashit, connection_handler from varken.helpers import hashit, connection_handler
class TautulliAPI(object): class TautulliAPI(object):
def __init__(self, server, dbmanager, data_folder): def __init__(self, server, dbmanager, geoiphandler):
self.dbmanager = dbmanager self.dbmanager = dbmanager
self.server = server self.server = server
self.geoiphandler = geoiphandler
self.session = Session() self.session = Session()
self.session.params = {'apikey': self.server.api_key, 'cmd': 'get_activity'} self.session.params = {'apikey': self.server.api_key, 'cmd': 'get_activity'}
self.endpoint = '/api/v2' self.endpoint = '/api/v2'
self.logger = getLogger() self.logger = getLogger()
self.data_folder = data_folder
def __repr__(self): def __repr__(self):
return f"<tautulli-{self.server.id}>" return f"<tautulli-{self.server.id}>"
@ -47,13 +47,13 @@ class TautulliAPI(object):
exit(1) exit(1)
try: try:
geodata = geo_lookup(session.ip_address_public, self.data_folder) geodata = self.geoiphandler.lookup(session.ip_address_public)
except (ValueError, AddressNotFoundError): except (ValueError, AddressNotFoundError):
if self.server.fallback_ip: if self.server.fallback_ip:
geodata = geo_lookup(self.server.fallback_ip, self.data_folder) geodata = self.geoiphandler.lookup(self.server.fallback_ip)
else: else:
my_ip = self.session.get('http://ip.42.pl/raw').text my_ip = self.session.get('http://ip.42.pl/raw').text
geodata = geo_lookup(my_ip, self.data_folder) geodata = self.geoiphandler.lookup(my_ip)
if not all([geodata.location.latitude, geodata.location.longitude]): if not all([geodata.location.latitude, geodata.location.longitude]):
latitude = 37.234332396 latitude = 37.234332396