v1.7.6
This commit is contained in:
samwiseg0 2020-01-01 19:30:41 -05:00 committed by GitHub
commit b3b1876b82
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
8 changed files with 61 additions and 26 deletions

View file

@ -1,5 +1,19 @@
# Change Log # Change Log
## [v1.7.6](https://github.com/Boerderij/Varken/tree/v1.7.6) (2020-01-01)
[Full Changelog](https://github.com/Boerderij/Varken/compare/1.7.5...v1.7.6)
**Merged pull requests:**
- v1.7.6 Merge [\#163](https://github.com/Boerderij/Varken/pull/163) ([samwiseg0](https://github.com/samwiseg0))
**Fixed bugs:**
- \[BUG\] Geolite database download failing [\#164](https://github.com/Boerderij/Varken/issues/164)
**Notes:**
- A MaxMind license key will be required in order to download the GeoLite2 DB. Please see the [wiki](https://wiki.cajun.pro/link/5#bkmrk-maxmind) for more details.
## [v1.7.5](https://github.com/Boerderij/Varken/tree/v1.7.5) (2019-12-11) ## [v1.7.5](https://github.com/Boerderij/Varken/tree/v1.7.5) (2019-12-11)
[Full Changelog](https://github.com/Boerderij/Varken/compare/1.7.4...v1.7.5) [Full Changelog](https://github.com/Boerderij/Varken/compare/1.7.4...v1.7.5)

View file

@ -107,7 +107,7 @@ if __name__ == "__main__":
at_time.do(thread, SONARR.get_calendar, query="Future").tag("sonarr-{}-get_future".format(server.id)) at_time.do(thread, SONARR.get_calendar, query="Future").tag("sonarr-{}-get_future".format(server.id))
if CONFIG.tautulli_enabled: if CONFIG.tautulli_enabled:
GEOIPHANDLER = GeoIPHandler(DATA_FOLDER) GEOIPHANDLER = GeoIPHandler(DATA_FOLDER, CONFIG.tautulli_servers[0].maxmind_license_key)
schedule.every(12).to(24).hours.do(thread, GEOIPHANDLER.update) schedule.every(12).to(24).hours.do(thread, GEOIPHANDLER.update)
for server in CONFIG.tautulli_servers: for server in CONFIG.tautulli_servers:
TAUTULLI = TautulliAPI(server, DBMANAGER, GEOIPHANDLER) TAUTULLI = TautulliAPI(server, DBMANAGER, GEOIPHANDLER)

View file

@ -6,6 +6,7 @@ tautulli_server_ids = 1
ombi_server_ids = 1 ombi_server_ids = 1
sickchill_server_ids = false sickchill_server_ids = false
unifi_server_ids = false unifi_server_ids = false
maxmind_license_key = xxxxxxxxxxxxxxxx
[influxdb] [influxdb]
url = influxdb.domain.tld url = influxdb.domain.tld

View file

@ -29,6 +29,7 @@ services:
- VRKN_GLOBAL_OMBI_SERVER_IDS=1 - VRKN_GLOBAL_OMBI_SERVER_IDS=1
- VRKN_GLOBAL_SICKCHILL_SERVER_IDS=false - VRKN_GLOBAL_SICKCHILL_SERVER_IDS=false
- VRKN_GLOBAL_UNIFI_SERVER_IDS=false - VRKN_GLOBAL_UNIFI_SERVER_IDS=false
- VRKN_GLOBAL_MAXMIND_LICENSE_KEY=xxxxxxxxxxxxxxxx
- VRKN_INFLUXDB_URL=influxdb.domain.tld - VRKN_INFLUXDB_URL=influxdb.domain.tld
- VRKN_INFLUXDB_PORT=8086 - VRKN_INFLUXDB_PORT=8086
- VRKN_INFLUXDB_SSL=false - VRKN_INFLUXDB_SSL=false

View file

@ -1,2 +1,2 @@
VERSION = "1.7.5" VERSION = "1.7.6"
BRANCH = 'master' BRANCH = 'master'

View file

@ -3,7 +3,7 @@ from datetime import date, timedelta
from time import sleep from time import sleep
from logging import getLogger from logging import getLogger
from ipaddress import IPv4Address from ipaddress import IPv4Address
from urllib.error import HTTPError from urllib.error import HTTPError, URLError
from geoip2.database import Reader from geoip2.database import Reader
from tarfile import open as taropen from tarfile import open as taropen
from urllib3 import disable_warnings from urllib3 import disable_warnings
@ -18,24 +18,25 @@ logger = getLogger()
class GeoIPHandler(object): class GeoIPHandler(object):
def __init__(self, data_folder): def __init__(self, data_folder, maxmind_license_key):
self.data_folder = data_folder self.data_folder = data_folder
self.maxmind_license_key = maxmind_license_key
self.dbfile = abspath(join(self.data_folder, 'GeoLite2-City.mmdb')) self.dbfile = abspath(join(self.data_folder, 'GeoLite2-City.mmdb'))
self.logger = getLogger() self.logger = getLogger()
self.reader = None self.reader = None
self.reader_manager(action='open') self.reader_manager(action='open')
self.logger.info('Opening persistent connection to GeoLite2 DB...') self.logger.info('Opening persistent connection to the MaxMind DB...')
def reader_manager(self, action=None): def reader_manager(self, action=None):
if action == 'open': if action == 'open':
try: try:
self.reader = Reader(self.dbfile) self.reader = Reader(self.dbfile)
except FileNotFoundError: except FileNotFoundError:
self.logger.error("Could not find GeoLite2 DB! Downloading!") self.logger.error("Could not find MaxMind DB! Downloading!")
result_status = self.download() result_status = self.download()
if result_status: if result_status:
self.logger.error("Could not download GeoLite2 DB!!!, You may need to manually install it.") self.logger.error("Could not download MaxMind DB! You may need to manually install it.")
exit(1) exit(1)
else: else:
self.reader = Reader(self.dbfile) self.reader = Reader(self.dbfile)
@ -53,54 +54,64 @@ class GeoIPHandler(object):
try: try:
dbdate = date.fromtimestamp(stat(self.dbfile).st_mtime) dbdate = date.fromtimestamp(stat(self.dbfile).st_mtime)
db_next_update = date.fromtimestamp(stat(self.dbfile).st_mtime) + timedelta(days=60) db_next_update = date.fromtimestamp(stat(self.dbfile).st_mtime) + timedelta(days=30)
except FileNotFoundError: except FileNotFoundError:
self.logger.error("Could not find GeoLite2 DB as: %s", self.dbfile) self.logger.error("Could not find MaxMind DB as: %s", self.dbfile)
self.download() self.download()
dbdate = date.fromtimestamp(stat(self.dbfile).st_mtime) dbdate = date.fromtimestamp(stat(self.dbfile).st_mtime)
db_next_update = date.fromtimestamp(stat(self.dbfile).st_mtime) + timedelta(days=60) db_next_update = date.fromtimestamp(stat(self.dbfile).st_mtime) + timedelta(days=30)
if db_next_update < today: if db_next_update < today:
self.logger.info("Newer GeoLite2 DB available, Updating...") self.logger.info("Newer MaxMind DB available, Updating...")
self.logger.debug("GeoLite2 DB date %s, DB updates after: %s, Today: %s", self.logger.debug("MaxMind DB date %s, DB updates after: %s, Today: %s",
dbdate, db_next_update, today) dbdate, db_next_update, today)
self.reader_manager(action='close') self.reader_manager(action='close')
self.download() self.download()
self.reader_manager(action='open') self.reader_manager(action='open')
else: else:
db_days_update = db_next_update - today db_days_update = db_next_update - today
self.logger.debug("Geolite2 DB will update in %s days", abs(db_days_update.days)) self.logger.debug("MaxMind DB will update in %s days", abs(db_days_update.days))
self.logger.debug("GeoLite2 DB date %s, DB updates after: %s, Today: %s", self.logger.debug("MaxMind DB date %s, DB updates after: %s, Today: %s",
dbdate, db_next_update, today) dbdate, db_next_update, today)
def download(self): def download(self):
tar_dbfile = abspath(join(self.data_folder, 'GeoLite2-City.tar.gz')) tar_dbfile = abspath(join(self.data_folder, 'GeoLite2-City.tar.gz'))
url = 'http://geolite.maxmind.com/download/geoip/database/GeoLite2-City.tar.gz' maxmind_url = ('https://download.maxmind.com/app/geoip_download?edition_id=GeoLite2-City'
f'&suffix=tar.gz&license_key={self.maxmind_license_key}')
downloaded = False downloaded = False
retry_counter = 0 retry_counter = 0
while not downloaded: while not downloaded:
self.logger.info('Downloading GeoLite2 from %s', url) self.logger.info('Downloading GeoLite2 DB from MaxMind...')
try: try:
urlretrieve(url, tar_dbfile) urlretrieve(maxmind_url, tar_dbfile)
downloaded = True downloaded = True
except URLError as e:
self.logger.error("Problem downloading new MaxMind DB: %s", e)
result_status = 1
return result_status
except HTTPError as e: except HTTPError as e:
self.logger.error("Problem downloading new GeoLite2 DB... Trying again. Error: %s", e) if e.code == 401:
self.logger.error("Your MaxMind license key is incorect! Check your config: %s", e)
result_status = 1
return result_status
else:
self.logger.error("Problem downloading new MaxMind DB... Trying again: %s", e)
sleep(2) sleep(2)
retry_counter = (retry_counter + 1) retry_counter = (retry_counter + 1)
if retry_counter >= 3: if retry_counter >= 3:
self.logger.error("Retried downloading the new GeoLite2 DB 3 times and failed... Aborting!") self.logger.error("Retried downloading the new MaxMind DB 3 times and failed... Aborting!")
result_status = 1 result_status = 1
return result_status return result_status
try: try:
remove(self.dbfile) remove(self.dbfile)
except FileNotFoundError: except FileNotFoundError:
self.logger.warning("Cannot remove GeoLite2 DB as it does not exist!") self.logger.warning("Cannot remove MaxMind DB as it does not exist!")
self.logger.debug("Opening GeoLite2 tar file : %s", tar_dbfile) self.logger.debug("Opening MaxMind tar file : %s", tar_dbfile)
tar = taropen(tar_dbfile, 'r:gz') tar = taropen(tar_dbfile, 'r:gz')
@ -113,9 +124,9 @@ class GeoIPHandler(object):
tar.close() tar.close()
try: try:
remove(tar_dbfile) remove(tar_dbfile)
self.logger.debug('Removed the GeoLite2 DB TAR file.') self.logger.debug('Removed the MaxMind DB tar file.')
except FileNotFoundError: except FileNotFoundError:
self.logger.warning("Cannot remove GeoLite2 DB TAR file as it does not exist!") self.logger.warning("Cannot remove MaxMind DB TAR file as it does not exist!")
def hashit(string): def hashit(string):

View file

@ -207,6 +207,7 @@ class INIParser(object):
missing_days_run_seconds = int(env.get( missing_days_run_seconds = int(env.get(
f'VRKN_{envsection}_MISSING_DAYS_RUN_SECONDS', f'VRKN_{envsection}_MISSING_DAYS_RUN_SECONDS',
self.config.getint(section, 'missing_days_run_seconds'))) self.config.getint(section, 'missing_days_run_seconds')))
future_days_run_seconds = int(env.get( future_days_run_seconds = int(env.get(
f'VRKN_{envsection}_FUTURE_DAYS_RUN_SECONDS', f'VRKN_{envsection}_FUTURE_DAYS_RUN_SECONDS',
self.config.getint(section, 'future_days_run_seconds'))) self.config.getint(section, 'future_days_run_seconds')))
@ -234,12 +235,14 @@ class INIParser(object):
get_stats = boolcheck(env.get(f'VRKN_{envsection}_GET_STATS', get_stats = boolcheck(env.get(f'VRKN_{envsection}_GET_STATS',
self.config.get(section, 'get_stats'))) self.config.get(section, 'get_stats')))
get_activity = boolcheck(env.get(f'VRKN_{envsection}_GET_ACTIVITY', get_activity = boolcheck(env.get(f'VRKN_{envsection}_GET_ACTIVITY',
self.config.get(section, 'get_activity'))) self.config.get(section, 'get_activity')))
get_activity_run_seconds = int(env.get( get_activity_run_seconds = int(env.get(
f'VRKN_{envsection}_GET_ACTIVITY_RUN_SECONDS', f'VRKN_{envsection}_GET_ACTIVITY_RUN_SECONDS',
self.config.getint(section, 'get_activity_run_seconds'))) self.config.getint(section, 'get_activity_run_seconds')))
get_stats_run_seconds = int(env.get( get_stats_run_seconds = int(env.get(
f'VRKN_{envsection}_GET_STATS_RUN_SECONDS', f'VRKN_{envsection}_GET_STATS_RUN_SECONDS',
self.config.getint(section, 'get_stats_run_seconds'))) self.config.getint(section, 'get_stats_run_seconds')))
@ -251,11 +254,15 @@ class INIParser(object):
server_id) server_id)
exit(1) exit(1)
maxmind_license_key = env.get(f'VRKN_GLOBAL_MAXMIND_LICENSE_KEY',
self.config.get('global', 'maxmind_license_key'))
server = TautulliServer(id=server_id, url=scheme + url, api_key=apikey, server = TautulliServer(id=server_id, url=scheme + url, api_key=apikey,
verify_ssl=verify_ssl, get_activity=get_activity, verify_ssl=verify_ssl, get_activity=get_activity,
fallback_ip=fallback_ip, get_stats=get_stats, fallback_ip=fallback_ip, get_stats=get_stats,
get_activity_run_seconds=get_activity_run_seconds, get_activity_run_seconds=get_activity_run_seconds,
get_stats_run_seconds=get_stats_run_seconds) get_stats_run_seconds=get_stats_run_seconds,
maxmind_license_key=maxmind_license_key)
if service == 'ombi': if service == 'ombi':
issue_status_counts = boolcheck(env.get( issue_status_counts = boolcheck(env.get(

View file

@ -67,6 +67,7 @@ class TautulliServer(NamedTuple):
id: int = None id: int = None
url: str = None url: str = None
verify_ssl: bool = None verify_ssl: bool = None
maxmind_license_key: str = None
class SickChillServer(NamedTuple): class SickChillServer(NamedTuple):