commit
b3b1876b82
8 changed files with 61 additions and 26 deletions
14
CHANGELOG.md
14
CHANGELOG.md
|
@ -1,5 +1,19 @@
|
|||
# Change Log
|
||||
|
||||
## [v1.7.6](https://github.com/Boerderij/Varken/tree/v1.7.6) (2020-01-01)
|
||||
[Full Changelog](https://github.com/Boerderij/Varken/compare/1.7.5...v1.7.6)
|
||||
|
||||
**Merged pull requests:**
|
||||
|
||||
- v1.7.6 Merge [\#163](https://github.com/Boerderij/Varken/pull/163) ([samwiseg0](https://github.com/samwiseg0))
|
||||
|
||||
**Fixed bugs:**
|
||||
|
||||
- \[BUG\] Geolite database download failing [\#164](https://github.com/Boerderij/Varken/issues/164)
|
||||
|
||||
**Notes:**
|
||||
- A MaxMind license key will be required in order to download the GeoLite2 DB. Please see the [wiki](https://wiki.cajun.pro/link/5#bkmrk-maxmind) for more details.
|
||||
|
||||
## [v1.7.5](https://github.com/Boerderij/Varken/tree/v1.7.5) (2019-12-11)
|
||||
[Full Changelog](https://github.com/Boerderij/Varken/compare/1.7.4...v1.7.5)
|
||||
|
||||
|
|
|
@ -107,7 +107,7 @@ if __name__ == "__main__":
|
|||
at_time.do(thread, SONARR.get_calendar, query="Future").tag("sonarr-{}-get_future".format(server.id))
|
||||
|
||||
if CONFIG.tautulli_enabled:
|
||||
GEOIPHANDLER = GeoIPHandler(DATA_FOLDER)
|
||||
GEOIPHANDLER = GeoIPHandler(DATA_FOLDER, CONFIG.tautulli_servers[0].maxmind_license_key)
|
||||
schedule.every(12).to(24).hours.do(thread, GEOIPHANDLER.update)
|
||||
for server in CONFIG.tautulli_servers:
|
||||
TAUTULLI = TautulliAPI(server, DBMANAGER, GEOIPHANDLER)
|
||||
|
|
|
@ -6,6 +6,7 @@ tautulli_server_ids = 1
|
|||
ombi_server_ids = 1
|
||||
sickchill_server_ids = false
|
||||
unifi_server_ids = false
|
||||
maxmind_license_key = xxxxxxxxxxxxxxxx
|
||||
|
||||
[influxdb]
|
||||
url = influxdb.domain.tld
|
||||
|
|
|
@ -29,6 +29,7 @@ services:
|
|||
- VRKN_GLOBAL_OMBI_SERVER_IDS=1
|
||||
- VRKN_GLOBAL_SICKCHILL_SERVER_IDS=false
|
||||
- VRKN_GLOBAL_UNIFI_SERVER_IDS=false
|
||||
- VRKN_GLOBAL_MAXMIND_LICENSE_KEY=xxxxxxxxxxxxxxxx
|
||||
- VRKN_INFLUXDB_URL=influxdb.domain.tld
|
||||
- VRKN_INFLUXDB_PORT=8086
|
||||
- VRKN_INFLUXDB_SSL=false
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
VERSION = "1.7.5"
|
||||
VERSION = "1.7.6"
|
||||
BRANCH = 'master'
|
||||
|
|
|
@ -3,7 +3,7 @@ from datetime import date, timedelta
|
|||
from time import sleep
|
||||
from logging import getLogger
|
||||
from ipaddress import IPv4Address
|
||||
from urllib.error import HTTPError
|
||||
from urllib.error import HTTPError, URLError
|
||||
from geoip2.database import Reader
|
||||
from tarfile import open as taropen
|
||||
from urllib3 import disable_warnings
|
||||
|
@ -18,24 +18,25 @@ logger = getLogger()
|
|||
|
||||
|
||||
class GeoIPHandler(object):
|
||||
def __init__(self, data_folder):
|
||||
def __init__(self, data_folder, maxmind_license_key):
|
||||
self.data_folder = data_folder
|
||||
self.maxmind_license_key = maxmind_license_key
|
||||
self.dbfile = abspath(join(self.data_folder, 'GeoLite2-City.mmdb'))
|
||||
self.logger = getLogger()
|
||||
self.reader = None
|
||||
self.reader_manager(action='open')
|
||||
|
||||
self.logger.info('Opening persistent connection to GeoLite2 DB...')
|
||||
self.logger.info('Opening persistent connection to the MaxMind DB...')
|
||||
|
||||
def reader_manager(self, action=None):
|
||||
if action == 'open':
|
||||
try:
|
||||
self.reader = Reader(self.dbfile)
|
||||
except FileNotFoundError:
|
||||
self.logger.error("Could not find GeoLite2 DB! Downloading!")
|
||||
self.logger.error("Could not find MaxMind DB! Downloading!")
|
||||
result_status = self.download()
|
||||
if result_status:
|
||||
self.logger.error("Could not download GeoLite2 DB!!!, You may need to manually install it.")
|
||||
self.logger.error("Could not download MaxMind DB! You may need to manually install it.")
|
||||
exit(1)
|
||||
else:
|
||||
self.reader = Reader(self.dbfile)
|
||||
|
@ -53,54 +54,64 @@ class GeoIPHandler(object):
|
|||
|
||||
try:
|
||||
dbdate = date.fromtimestamp(stat(self.dbfile).st_mtime)
|
||||
db_next_update = date.fromtimestamp(stat(self.dbfile).st_mtime) + timedelta(days=60)
|
||||
db_next_update = date.fromtimestamp(stat(self.dbfile).st_mtime) + timedelta(days=30)
|
||||
|
||||
except FileNotFoundError:
|
||||
self.logger.error("Could not find GeoLite2 DB as: %s", self.dbfile)
|
||||
self.logger.error("Could not find MaxMind DB as: %s", self.dbfile)
|
||||
self.download()
|
||||
dbdate = date.fromtimestamp(stat(self.dbfile).st_mtime)
|
||||
db_next_update = date.fromtimestamp(stat(self.dbfile).st_mtime) + timedelta(days=60)
|
||||
db_next_update = date.fromtimestamp(stat(self.dbfile).st_mtime) + timedelta(days=30)
|
||||
|
||||
if db_next_update < today:
|
||||
self.logger.info("Newer GeoLite2 DB available, Updating...")
|
||||
self.logger.debug("GeoLite2 DB date %s, DB updates after: %s, Today: %s",
|
||||
self.logger.info("Newer MaxMind DB available, Updating...")
|
||||
self.logger.debug("MaxMind DB date %s, DB updates after: %s, Today: %s",
|
||||
dbdate, db_next_update, today)
|
||||
self.reader_manager(action='close')
|
||||
self.download()
|
||||
self.reader_manager(action='open')
|
||||
else:
|
||||
db_days_update = db_next_update - today
|
||||
self.logger.debug("Geolite2 DB will update in %s days", abs(db_days_update.days))
|
||||
self.logger.debug("GeoLite2 DB date %s, DB updates after: %s, Today: %s",
|
||||
self.logger.debug("MaxMind DB will update in %s days", abs(db_days_update.days))
|
||||
self.logger.debug("MaxMind DB date %s, DB updates after: %s, Today: %s",
|
||||
dbdate, db_next_update, today)
|
||||
|
||||
def download(self):
|
||||
tar_dbfile = abspath(join(self.data_folder, 'GeoLite2-City.tar.gz'))
|
||||
url = 'http://geolite.maxmind.com/download/geoip/database/GeoLite2-City.tar.gz'
|
||||
maxmind_url = ('https://download.maxmind.com/app/geoip_download?edition_id=GeoLite2-City'
|
||||
f'&suffix=tar.gz&license_key={self.maxmind_license_key}')
|
||||
downloaded = False
|
||||
|
||||
retry_counter = 0
|
||||
|
||||
while not downloaded:
|
||||
self.logger.info('Downloading GeoLite2 from %s', url)
|
||||
self.logger.info('Downloading GeoLite2 DB from MaxMind...')
|
||||
try:
|
||||
urlretrieve(url, tar_dbfile)
|
||||
urlretrieve(maxmind_url, tar_dbfile)
|
||||
downloaded = True
|
||||
except URLError as e:
|
||||
self.logger.error("Problem downloading new MaxMind DB: %s", e)
|
||||
result_status = 1
|
||||
return result_status
|
||||
except HTTPError as e:
|
||||
self.logger.error("Problem downloading new GeoLite2 DB... Trying again. Error: %s", e)
|
||||
sleep(2)
|
||||
retry_counter = (retry_counter + 1)
|
||||
if e.code == 401:
|
||||
self.logger.error("Your MaxMind license key is incorect! Check your config: %s", e)
|
||||
result_status = 1
|
||||
return result_status
|
||||
else:
|
||||
self.logger.error("Problem downloading new MaxMind DB... Trying again: %s", e)
|
||||
sleep(2)
|
||||
retry_counter = (retry_counter + 1)
|
||||
|
||||
if retry_counter >= 3:
|
||||
self.logger.error("Retried downloading the new GeoLite2 DB 3 times and failed... Aborting!")
|
||||
self.logger.error("Retried downloading the new MaxMind DB 3 times and failed... Aborting!")
|
||||
result_status = 1
|
||||
return result_status
|
||||
try:
|
||||
remove(self.dbfile)
|
||||
except FileNotFoundError:
|
||||
self.logger.warning("Cannot remove GeoLite2 DB as it does not exist!")
|
||||
self.logger.warning("Cannot remove MaxMind DB as it does not exist!")
|
||||
|
||||
self.logger.debug("Opening GeoLite2 tar file : %s", tar_dbfile)
|
||||
self.logger.debug("Opening MaxMind tar file : %s", tar_dbfile)
|
||||
|
||||
tar = taropen(tar_dbfile, 'r:gz')
|
||||
|
||||
|
@ -113,9 +124,9 @@ class GeoIPHandler(object):
|
|||
tar.close()
|
||||
try:
|
||||
remove(tar_dbfile)
|
||||
self.logger.debug('Removed the GeoLite2 DB TAR file.')
|
||||
self.logger.debug('Removed the MaxMind DB tar file.')
|
||||
except FileNotFoundError:
|
||||
self.logger.warning("Cannot remove GeoLite2 DB TAR file as it does not exist!")
|
||||
self.logger.warning("Cannot remove MaxMind DB TAR file as it does not exist!")
|
||||
|
||||
|
||||
def hashit(string):
|
||||
|
|
|
@ -207,6 +207,7 @@ class INIParser(object):
|
|||
missing_days_run_seconds = int(env.get(
|
||||
f'VRKN_{envsection}_MISSING_DAYS_RUN_SECONDS',
|
||||
self.config.getint(section, 'missing_days_run_seconds')))
|
||||
|
||||
future_days_run_seconds = int(env.get(
|
||||
f'VRKN_{envsection}_FUTURE_DAYS_RUN_SECONDS',
|
||||
self.config.getint(section, 'future_days_run_seconds')))
|
||||
|
@ -234,12 +235,14 @@ class INIParser(object):
|
|||
|
||||
get_stats = boolcheck(env.get(f'VRKN_{envsection}_GET_STATS',
|
||||
self.config.get(section, 'get_stats')))
|
||||
|
||||
get_activity = boolcheck(env.get(f'VRKN_{envsection}_GET_ACTIVITY',
|
||||
self.config.get(section, 'get_activity')))
|
||||
|
||||
get_activity_run_seconds = int(env.get(
|
||||
f'VRKN_{envsection}_GET_ACTIVITY_RUN_SECONDS',
|
||||
self.config.getint(section, 'get_activity_run_seconds')))
|
||||
|
||||
get_stats_run_seconds = int(env.get(
|
||||
f'VRKN_{envsection}_GET_STATS_RUN_SECONDS',
|
||||
self.config.getint(section, 'get_stats_run_seconds')))
|
||||
|
@ -251,11 +254,15 @@ class INIParser(object):
|
|||
server_id)
|
||||
exit(1)
|
||||
|
||||
maxmind_license_key = env.get(f'VRKN_GLOBAL_MAXMIND_LICENSE_KEY',
|
||||
self.config.get('global', 'maxmind_license_key'))
|
||||
|
||||
server = TautulliServer(id=server_id, url=scheme + url, api_key=apikey,
|
||||
verify_ssl=verify_ssl, get_activity=get_activity,
|
||||
fallback_ip=fallback_ip, get_stats=get_stats,
|
||||
get_activity_run_seconds=get_activity_run_seconds,
|
||||
get_stats_run_seconds=get_stats_run_seconds)
|
||||
get_stats_run_seconds=get_stats_run_seconds,
|
||||
maxmind_license_key=maxmind_license_key)
|
||||
|
||||
if service == 'ombi':
|
||||
issue_status_counts = boolcheck(env.get(
|
||||
|
|
|
@ -67,6 +67,7 @@ class TautulliServer(NamedTuple):
|
|||
id: int = None
|
||||
url: str = None
|
||||
verify_ssl: bool = None
|
||||
maxmind_license_key: str = None
|
||||
|
||||
|
||||
class SickChillServer(NamedTuple):
|
||||
|
|
Loading…
Reference in a new issue