2018-12-04 16:58:37 -08:00
|
|
|
import platform
|
2018-12-17 17:12:37 -08:00
|
|
|
import schedule
|
2018-12-04 16:58:37 -08:00
|
|
|
|
2018-11-28 20:21:16 -08:00
|
|
|
from time import sleep
|
2018-12-17 17:12:37 -08:00
|
|
|
from sys import version
|
|
|
|
from threading import Thread
|
|
|
|
from os import access, R_OK, getenv
|
|
|
|
from distro import linux_distribution
|
2018-12-03 21:47:46 -08:00
|
|
|
from os.path import isdir, abspath, dirname, join
|
|
|
|
from argparse import ArgumentParser, RawTextHelpFormatter
|
2018-12-17 17:12:37 -08:00
|
|
|
from logging import getLogger, StreamHandler, Formatter, DEBUG
|
2018-11-28 20:21:16 -08:00
|
|
|
|
2018-12-04 08:45:18 -08:00
|
|
|
from varken.ombi import OmbiAPI
|
2018-12-09 19:41:38 -08:00
|
|
|
from varken.cisco import CiscoAPI
|
2018-12-17 19:23:37 -08:00
|
|
|
from varken import VERSION, BRANCH
|
2018-12-17 17:12:37 -08:00
|
|
|
from varken.sonarr import SonarrAPI
|
|
|
|
from varken.radarr import RadarrAPI
|
|
|
|
from varken.iniparser import INIParser
|
2018-12-04 08:45:18 -08:00
|
|
|
from varken.dbmanager import DBManager
|
2018-12-17 17:12:37 -08:00
|
|
|
from varken.tautulli import TautulliAPI
|
|
|
|
from varken.sickchill import SickChillAPI
|
2018-12-04 16:16:00 -08:00
|
|
|
from varken.varkenlogger import VarkenLogger
|
2018-11-28 20:21:16 -08:00
|
|
|
|
2018-12-17 17:12:37 -08:00
|
|
|
PLATFORM_LINUX_DISTRO = ' '.join(x for x in linux_distribution() if x)
|
2018-12-04 16:58:37 -08:00
|
|
|
|
2018-12-06 23:55:16 -08:00
|
|
|
|
2018-12-01 19:31:58 -08:00
|
|
|
def threaded(job):
|
2018-12-17 17:12:37 -08:00
|
|
|
thread = Thread(target=job)
|
2018-11-28 20:21:16 -08:00
|
|
|
thread.start()
|
|
|
|
|
2018-11-29 10:42:10 -08:00
|
|
|
|
2018-11-28 20:21:16 -08:00
|
|
|
if __name__ == "__main__":
|
2018-12-04 08:45:18 -08:00
|
|
|
parser = ArgumentParser(prog='varken',
|
2018-12-03 21:47:46 -08:00
|
|
|
description='Command-line utility to aggregate data from the plex ecosystem into InfluxDB',
|
|
|
|
formatter_class=RawTextHelpFormatter)
|
|
|
|
|
|
|
|
parser.add_argument("-d", "--data-folder", help='Define an alternate data folder location')
|
2018-12-04 21:43:15 -08:00
|
|
|
parser.add_argument("-D", "--debug", action='store_true', help='Use to enable DEBUG logging')
|
2018-12-03 21:47:46 -08:00
|
|
|
|
|
|
|
opts = parser.parse_args()
|
|
|
|
|
|
|
|
DATA_FOLDER = abspath(join(dirname(__file__), 'data'))
|
|
|
|
|
2018-12-17 17:12:37 -08:00
|
|
|
templogger = getLogger('temp')
|
|
|
|
templogger.setLevel(DEBUG)
|
|
|
|
tempch = StreamHandler()
|
|
|
|
tempformatter = Formatter('%(asctime)s : %(levelname)s : %(module)s : %(message)s', '%Y-%m-%d %H:%M:%S')
|
|
|
|
tempch.setFormatter(tempformatter)
|
|
|
|
templogger.addHandler(tempch)
|
|
|
|
|
2018-12-03 21:47:46 -08:00
|
|
|
if opts.data_folder:
|
|
|
|
ARG_FOLDER = opts.data_folder
|
|
|
|
|
|
|
|
if isdir(ARG_FOLDER):
|
|
|
|
DATA_FOLDER = ARG_FOLDER
|
2018-12-17 17:12:37 -08:00
|
|
|
if not access(DATA_FOLDER, R_OK):
|
|
|
|
templogger.error("Read permission error for %s", DATA_FOLDER)
|
|
|
|
exit(1)
|
2018-12-03 21:47:46 -08:00
|
|
|
else:
|
2018-12-17 17:12:37 -08:00
|
|
|
templogger.error("%s does not exist", ARG_FOLDER)
|
|
|
|
exit(1)
|
2018-12-03 21:47:46 -08:00
|
|
|
|
2018-12-13 11:53:18 -08:00
|
|
|
# Set Debug to True if DEBUG env is set
|
|
|
|
enable_opts = ['True', 'true', 'yes']
|
|
|
|
debug_opts = ['debug', 'Debug', 'DEBUG']
|
|
|
|
|
2018-12-13 11:59:53 -08:00
|
|
|
if not opts.debug:
|
2018-12-17 17:12:37 -08:00
|
|
|
opts.debug = True if any([getenv(string, False) for true in enable_opts
|
|
|
|
for string in debug_opts if getenv(string, False) == true]) else False
|
2018-12-12 09:38:58 -08:00
|
|
|
|
2018-12-04 21:43:15 -08:00
|
|
|
# Initiate the logger
|
|
|
|
vl = VarkenLogger(data_folder=DATA_FOLDER, debug=opts.debug)
|
|
|
|
vl.logger.info('Starting Varken...')
|
|
|
|
|
2018-12-10 19:35:36 -08:00
|
|
|
vl.logger.info('Data folder is "%s"', DATA_FOLDER)
|
|
|
|
|
2018-12-17 17:12:37 -08:00
|
|
|
vl.logger.info(u"%s %s (%s%s)", platform.system(), platform.release(), platform.version(),
|
|
|
|
f' - {PLATFORM_LINUX_DISTRO}' if PLATFORM_LINUX_DISTRO else '')
|
|
|
|
|
|
|
|
vl.logger.info(u"Python %s", version)
|
2018-12-04 21:43:15 -08:00
|
|
|
|
2018-12-17 19:23:37 -08:00
|
|
|
vl.logger.info("Varken v%s-%s", VERSION, BRANCH)
|
|
|
|
|
2018-12-03 21:47:46 -08:00
|
|
|
CONFIG = INIParser(DATA_FOLDER)
|
2018-12-01 21:15:12 -08:00
|
|
|
DBMANAGER = DBManager(CONFIG.influx_server)
|
2018-11-28 20:21:16 -08:00
|
|
|
|
|
|
|
if CONFIG.sonarr_enabled:
|
|
|
|
for server in CONFIG.sonarr_servers:
|
2018-12-01 21:15:12 -08:00
|
|
|
SONARR = SonarrAPI(server, DBMANAGER)
|
2018-11-28 20:21:16 -08:00
|
|
|
if server.queue:
|
2018-11-29 10:42:10 -08:00
|
|
|
schedule.every(server.queue_run_seconds).seconds.do(threaded, SONARR.get_queue)
|
2018-11-28 20:57:10 -08:00
|
|
|
if server.missing_days > 0:
|
2018-12-01 19:31:58 -08:00
|
|
|
schedule.every(server.missing_days_run_seconds).seconds.do(threaded, SONARR.get_missing)
|
2018-11-28 20:57:10 -08:00
|
|
|
if server.future_days > 0:
|
2018-12-01 19:31:58 -08:00
|
|
|
schedule.every(server.future_days_run_seconds).seconds.do(threaded, SONARR.get_future)
|
2018-11-28 20:21:16 -08:00
|
|
|
|
2018-12-01 14:30:41 -08:00
|
|
|
if CONFIG.tautulli_enabled:
|
|
|
|
for server in CONFIG.tautulli_servers:
|
2018-12-09 20:56:09 -08:00
|
|
|
TAUTULLI = TautulliAPI(server, DBMANAGER, DATA_FOLDER)
|
2018-12-01 14:30:41 -08:00
|
|
|
if server.get_activity:
|
|
|
|
schedule.every(server.get_activity_run_seconds).seconds.do(threaded, TAUTULLI.get_activity)
|
|
|
|
|
2018-12-01 18:33:33 -08:00
|
|
|
if CONFIG.radarr_enabled:
|
|
|
|
for server in CONFIG.radarr_servers:
|
2018-12-01 21:15:12 -08:00
|
|
|
RADARR = RadarrAPI(server, DBMANAGER)
|
2018-12-01 19:31:58 -08:00
|
|
|
if server.get_missing:
|
2018-12-01 18:33:33 -08:00
|
|
|
schedule.every(server.get_missing_run_seconds).seconds.do(threaded, RADARR.get_missing)
|
|
|
|
if server.queue:
|
|
|
|
schedule.every(server.queue_run_seconds).seconds.do(threaded, RADARR.get_queue)
|
|
|
|
|
2018-12-01 20:26:44 -08:00
|
|
|
if CONFIG.ombi_enabled:
|
|
|
|
for server in CONFIG.ombi_servers:
|
2018-12-01 21:15:12 -08:00
|
|
|
OMBI = OmbiAPI(server, DBMANAGER)
|
2018-12-01 20:26:44 -08:00
|
|
|
if server.request_type_counts:
|
|
|
|
schedule.every(server.request_type_run_seconds).seconds.do(threaded, OMBI.get_request_counts)
|
|
|
|
if server.request_total_counts:
|
2018-12-15 19:37:42 -08:00
|
|
|
schedule.every(server.request_total_run_seconds).seconds.do(threaded, OMBI.get_all_requests)
|
2018-12-01 20:26:44 -08:00
|
|
|
|
2018-12-15 22:55:59 -08:00
|
|
|
if CONFIG.sickchill_enabled:
|
|
|
|
for server in CONFIG.sickchill_servers:
|
|
|
|
SICKCHILL = SickChillAPI(server, DBMANAGER)
|
|
|
|
if server.get_missing:
|
|
|
|
schedule.every(server.get_missing_run_seconds).seconds.do(threaded, SICKCHILL.get_missing)
|
|
|
|
|
2018-12-09 19:41:38 -08:00
|
|
|
if CONFIG.ciscoasa_enabled:
|
2018-12-17 19:16:46 -08:00
|
|
|
for firewall in CONFIG.ciscoasa_servers:
|
2018-12-09 19:41:38 -08:00
|
|
|
ASA = CiscoAPI(firewall, DBMANAGER)
|
|
|
|
schedule.every(firewall.get_bandwidth_run_seconds).seconds.do(threaded, ASA.get_bandwidth)
|
|
|
|
|
2018-12-03 12:28:15 -08:00
|
|
|
# Run all on startup
|
2018-12-09 19:41:38 -08:00
|
|
|
SERVICES_ENABLED = [CONFIG.ombi_enabled, CONFIG.radarr_enabled, CONFIG.tautulli_enabled,
|
2018-12-15 22:55:59 -08:00
|
|
|
CONFIG.sonarr_enabled, CONFIG.ciscoasa_enabled, CONFIG.sickchill_enabled]
|
2018-12-04 19:45:58 -08:00
|
|
|
if not [enabled for enabled in SERVICES_ENABLED if enabled]:
|
2018-12-17 17:12:37 -08:00
|
|
|
vl.logger.error("All services disabled. Exiting")
|
|
|
|
exit(1)
|
2018-12-03 12:28:15 -08:00
|
|
|
schedule.run_all()
|
|
|
|
|
2018-11-28 20:21:16 -08:00
|
|
|
while True:
|
|
|
|
schedule.run_pending()
|
|
|
|
sleep(1)
|