Varken/Varken.py

172 lines
7.1 KiB
Python
Raw Normal View History

2018-12-04 16:58:37 -08:00
import platform
import schedule
2018-11-28 20:21:16 -08:00
from time import sleep
2019-01-09 00:00:50 -08:00
from queue import Queue
from sys import version
from threading import Thread
from os import access, R_OK, getenv
from distro import linux_distribution
2018-12-03 21:47:46 -08:00
from os.path import isdir, abspath, dirname, join
from argparse import ArgumentParser, RawTextHelpFormatter
from logging import getLogger, StreamHandler, Formatter, DEBUG
2018-11-28 20:21:16 -08:00
from varken import structures # Needed to check version of python
2018-12-04 08:45:18 -08:00
from varken.ombi import OmbiAPI
2019-01-04 13:30:27 -08:00
from varken.unifi import UniFiAPI
from varken import VERSION, BRANCH
from varken.sonarr import SonarrAPI
from varken.radarr import RadarrAPI
from varken.iniparser import INIParser
2018-12-04 08:45:18 -08:00
from varken.dbmanager import DBManager
2018-12-17 21:47:29 -08:00
from varken.helpers import GeoIPHandler
from varken.tautulli import TautulliAPI
from varken.sickchill import SickChillAPI
2018-12-04 16:16:00 -08:00
from varken.varkenlogger import VarkenLogger
2018-11-28 20:21:16 -08:00
2018-12-17 21:47:29 -08:00
PLATFORM_LINUX_DISTRO = ' '.join(x for x in linux_distribution() if x)
2018-12-04 16:58:37 -08:00
2018-12-06 23:55:16 -08:00
2019-01-09 00:00:50 -08:00
def thread():
while schedule.jobs:
job = QUEUE.get()
a = job()
print(a)
if a is not None:
schedule.clear(a)
QUEUE.task_done()
2018-11-28 20:21:16 -08:00
2018-11-29 10:42:10 -08:00
2018-11-28 20:21:16 -08:00
if __name__ == "__main__":
2018-12-04 08:45:18 -08:00
parser = ArgumentParser(prog='varken',
2018-12-03 21:47:46 -08:00
description='Command-line utility to aggregate data from the plex ecosystem into InfluxDB',
formatter_class=RawTextHelpFormatter)
parser.add_argument("-d", "--data-folder", help='Define an alternate data folder location')
2018-12-04 21:43:15 -08:00
parser.add_argument("-D", "--debug", action='store_true', help='Use to enable DEBUG logging')
2018-12-03 21:47:46 -08:00
opts = parser.parse_args()
DATA_FOLDER = abspath(join(dirname(__file__), 'data'))
templogger = getLogger('temp')
templogger.setLevel(DEBUG)
tempch = StreamHandler()
tempformatter = Formatter('%(asctime)s : %(levelname)s : %(module)s : %(message)s', '%Y-%m-%d %H:%M:%S')
tempch.setFormatter(tempformatter)
templogger.addHandler(tempch)
2018-12-03 21:47:46 -08:00
if opts.data_folder:
ARG_FOLDER = opts.data_folder
if isdir(ARG_FOLDER):
DATA_FOLDER = ARG_FOLDER
if not access(DATA_FOLDER, R_OK):
templogger.error("Read permission error for %s", DATA_FOLDER)
exit(1)
2018-12-03 21:47:46 -08:00
else:
templogger.error("%s does not exist", ARG_FOLDER)
exit(1)
2018-12-03 21:47:46 -08:00
2018-12-13 11:53:18 -08:00
# Set Debug to True if DEBUG env is set
enable_opts = ['True', 'true', 'yes']
debug_opts = ['debug', 'Debug', 'DEBUG']
2018-12-13 11:59:53 -08:00
if not opts.debug:
opts.debug = True if any([getenv(string, False) for true in enable_opts
for string in debug_opts if getenv(string, False) == true]) else False
2018-12-04 21:43:15 -08:00
# Initiate the logger
vl = VarkenLogger(data_folder=DATA_FOLDER, debug=opts.debug)
vl.logger.info('Starting Varken...')
2018-12-10 19:35:36 -08:00
vl.logger.info('Data folder is "%s"', DATA_FOLDER)
vl.logger.info(u"%s %s (%s%s)", platform.system(), platform.release(), platform.version(),
2018-12-31 16:29:01 -08:00
' - ' + PLATFORM_LINUX_DISTRO if PLATFORM_LINUX_DISTRO else '')
vl.logger.info(u"Python %s", version)
2018-12-04 21:43:15 -08:00
vl.logger.info("Varken v%s-%s", VERSION, BRANCH)
2018-12-03 21:47:46 -08:00
CONFIG = INIParser(DATA_FOLDER)
2018-12-01 21:15:12 -08:00
DBMANAGER = DBManager(CONFIG.influx_server)
2019-01-09 00:00:50 -08:00
QUEUE = Queue()
2018-11-28 20:21:16 -08:00
if CONFIG.sonarr_enabled:
for server in CONFIG.sonarr_servers:
2018-12-01 21:15:12 -08:00
SONARR = SonarrAPI(server, DBMANAGER)
2018-11-28 20:21:16 -08:00
if server.queue:
2019-01-09 00:00:50 -08:00
at_time = schedule.every(server.queue_run_seconds).seconds
at_time.do(QUEUE.put, SONARR.get_queue).tag("sonarr-{}-get_queue".format(server.id))
2018-11-28 20:57:10 -08:00
if server.missing_days > 0:
2019-01-09 00:00:50 -08:00
at_time = schedule.every(server.missing_days_run_seconds).seconds
at_time.do(QUEUE.put, SONARR.get_missing).tag("sonarr-{}-get_missing".format(server.id))
2018-11-28 20:57:10 -08:00
if server.future_days > 0:
2019-01-09 00:00:50 -08:00
at_time = schedule.every(server.future_days_run_seconds).seconds
at_time.do(QUEUE.put, SONARR.get_future).tag("sonarr-{}-get_future".format(server.id))
2018-11-28 20:21:16 -08:00
if CONFIG.tautulli_enabled:
2018-12-17 21:47:29 -08:00
GEOIPHANDLER = GeoIPHandler(DATA_FOLDER)
2019-01-09 00:00:50 -08:00
schedule.every(12).to(24).hours.do(QUEUE.put, GEOIPHANDLER.update)
for server in CONFIG.tautulli_servers:
2018-12-17 21:47:29 -08:00
TAUTULLI = TautulliAPI(server, DBMANAGER, GEOIPHANDLER)
if server.get_activity:
2019-01-09 00:00:50 -08:00
at_time = schedule.every(server.get_activity_run_seconds).seconds
at_time.do(QUEUE.put, TAUTULLI.get_activity).tag("tautulli-{}-get_activity".format(server.id))
2018-12-18 18:30:04 -08:00
if server.get_stats:
2019-01-09 00:00:50 -08:00
at_time = schedule.every(server.get_stats_run_seconds).seconds
at_time.do(QUEUE.put, TAUTULLI.get_stats).tag("tautulli-{}-get_stats".format(server.id))
2018-12-01 18:33:33 -08:00
if CONFIG.radarr_enabled:
for server in CONFIG.radarr_servers:
2018-12-01 21:15:12 -08:00
RADARR = RadarrAPI(server, DBMANAGER)
if server.get_missing:
2019-01-09 00:00:50 -08:00
at_time = schedule.every(server.get_missing_run_seconds).seconds
at_time.do(QUEUE.put, RADARR.get_missing).tag("radarr-{}-get_missing".format(server.id))
2018-12-01 18:33:33 -08:00
if server.queue:
2019-01-09 00:00:50 -08:00
at_time = schedule.every(server.queue_run_seconds).seconds
at_time.do(QUEUE.put, RADARR.get_queue).tag("radarr-{}-get_queue".format(server.id))
2018-12-01 18:33:33 -08:00
2018-12-01 20:26:44 -08:00
if CONFIG.ombi_enabled:
for server in CONFIG.ombi_servers:
2018-12-01 21:15:12 -08:00
OMBI = OmbiAPI(server, DBMANAGER)
2018-12-01 20:26:44 -08:00
if server.request_type_counts:
2019-01-09 00:00:50 -08:00
at_time = schedule.every(server.request_type_run_seconds).seconds
at_time.do(QUEUE.put, OMBI.get_request_counts).tag("ombi-{}-get_request_counts".format(server.id))
2018-12-01 20:26:44 -08:00
if server.request_total_counts:
2019-01-09 00:00:50 -08:00
at_time = schedule.every(server.request_total_run_seconds).seconds
at_time.do(QUEUE.put, OMBI.get_all_requests).tag("ombi-{}-get_all_requests".format(server.id))
2018-12-27 13:28:36 -08:00
if server.issue_status_counts:
2019-01-09 00:00:50 -08:00
at_time = schedule.every(server.issue_status_run_seconds).seconds
at_time.do(QUEUE.put, OMBI.get_issue_counts).tag("ombi-{}-get_issue_counts".format(server.id))
2018-12-01 20:26:44 -08:00
2018-12-15 22:55:59 -08:00
if CONFIG.sickchill_enabled:
for server in CONFIG.sickchill_servers:
SICKCHILL = SickChillAPI(server, DBMANAGER)
if server.get_missing:
2019-01-09 00:00:50 -08:00
at_time = schedule.every(server.get_missing_run_seconds).seconds
at_time.do(QUEUE.put, SICKCHILL.get_missing).tag("sickchill-{}-get_missing".format(server.id))
2018-12-15 22:55:59 -08:00
2019-01-04 13:30:27 -08:00
if CONFIG.unifi_enabled:
for server in CONFIG.unifi_servers:
UNIFI = UniFiAPI(server, DBMANAGER)
2019-01-09 00:00:50 -08:00
at_time = schedule.every(server.get_usg_stats_run_seconds).seconds
at_time.do(QUEUE.put, UNIFI.get_usg_stats).tag("unifi-{}-get_usg_stats".format(server.id))
2019-01-04 13:30:27 -08:00
2018-12-03 12:28:15 -08:00
# Run all on startup
2019-01-04 13:30:27 -08:00
SERVICES_ENABLED = [CONFIG.ombi_enabled, CONFIG.radarr_enabled, CONFIG.tautulli_enabled, CONFIG.unifi_enabled,
2019-01-05 13:45:31 -08:00
CONFIG.sonarr_enabled, CONFIG.sickchill_enabled]
2018-12-04 19:45:58 -08:00
if not [enabled for enabled in SERVICES_ENABLED if enabled]:
vl.logger.error("All services disabled. Exiting")
exit(1)
2019-01-09 00:00:50 -08:00
WORKER = Thread(target=thread)
WORKER.start()
2018-12-03 12:28:15 -08:00
schedule.run_all()
2019-01-09 00:00:50 -08:00
while schedule.jobs:
2018-11-28 20:21:16 -08:00
schedule.run_pending()
sleep(1)