Varken/Varken.py

184 lines
8.1 KiB
Python
Raw Normal View History

2018-12-04 16:58:37 -08:00
import platform
import schedule
2018-11-28 20:21:16 -08:00
from time import sleep
2019-01-09 00:00:50 -08:00
from queue import Queue
from sys import version
from threading import Thread
from os import environ as env
from os import access, R_OK, getenv
from distro import linux_distribution
2018-12-03 21:47:46 -08:00
from os.path import isdir, abspath, dirname, join
from argparse import ArgumentParser, RawTextHelpFormatter
from logging import getLogger, StreamHandler, Formatter, DEBUG
2018-11-28 20:21:16 -08:00
2019-02-05 18:52:41 -08:00
# Needed to check version of python
from varken import structures # noqa
2018-12-04 08:45:18 -08:00
from varken.ombi import OmbiAPI
2019-01-04 13:30:27 -08:00
from varken.unifi import UniFiAPI
from varken import VERSION, BRANCH
from varken.sonarr import SonarrAPI
from varken.radarr import RadarrAPI
2019-04-24 13:27:30 -07:00
from varken.lidarr import LidarrAPI
from varken.iniparser import INIParser
2018-12-04 08:45:18 -08:00
from varken.dbmanager import DBManager
2018-12-17 21:47:29 -08:00
from varken.helpers import GeoIPHandler
from varken.tautulli import TautulliAPI
from varken.sickchill import SickChillAPI
2018-12-04 16:16:00 -08:00
from varken.varkenlogger import VarkenLogger
2018-11-28 20:21:16 -08:00
2018-12-17 21:47:29 -08:00
PLATFORM_LINUX_DISTRO = ' '.join(x for x in linux_distribution() if x)
2018-12-04 16:58:37 -08:00
2018-12-06 23:55:16 -08:00
2019-04-24 16:51:32 -07:00
def thread(job, **kwargs):
worker = Thread(target=job, kwargs=dict(**kwargs))
worker.start()
2018-11-28 20:21:16 -08:00
2018-11-29 10:42:10 -08:00
2018-11-28 20:21:16 -08:00
if __name__ == "__main__":
2018-12-04 08:45:18 -08:00
parser = ArgumentParser(prog='varken',
2018-12-03 21:47:46 -08:00
description='Command-line utility to aggregate data from the plex ecosystem into InfluxDB',
formatter_class=RawTextHelpFormatter)
parser.add_argument("-d", "--data-folder", help='Define an alternate data folder location')
2019-04-24 09:15:02 -07:00
parser.add_argument("-D", "--debug", action='store_true', help='Use to enable DEBUG logging. (Depreciated)')
parser.add_argument("-ND", "--no_debug", action='store_true', help='Use to disable DEBUG logging')
2018-12-03 21:47:46 -08:00
opts = parser.parse_args()
templogger = getLogger('temp')
templogger.setLevel(DEBUG)
tempch = StreamHandler()
tempformatter = Formatter('%(asctime)s : %(levelname)s : %(module)s : %(message)s', '%Y-%m-%d %H:%M:%S')
tempch.setFormatter(tempformatter)
templogger.addHandler(tempch)
DATA_FOLDER = env.get('DATA_FOLDER', vars(opts).get('data_folder') or abspath(join(dirname(__file__), 'data')))
2018-12-03 21:47:46 -08:00
if isdir(DATA_FOLDER):
if not access(DATA_FOLDER, R_OK):
templogger.error("Read permission error for %s", DATA_FOLDER)
exit(1)
else:
templogger.error("%s does not exist", DATA_FOLDER)
exit(1)
2018-12-03 21:47:46 -08:00
2018-12-13 11:53:18 -08:00
# Set Debug to True if DEBUG env is set
enable_opts = ['True', 'true', 'yes']
debug_opts = ['debug', 'Debug', 'DEBUG']
opts.debug = True
if getenv('DEBUG') is not None:
opts.debug = True if any([getenv(string, False) for true in enable_opts
for string in debug_opts if getenv(string, False) == true]) else False
elif opts.no_debug:
opts.debug = False
2018-12-04 21:43:15 -08:00
# Initiate the logger
vl = VarkenLogger(data_folder=DATA_FOLDER, debug=opts.debug)
vl.logger.info('Starting Varken...')
2018-12-10 19:35:36 -08:00
vl.logger.info('Data folder is "%s"', DATA_FOLDER)
vl.logger.info(u"%s %s (%s%s)", platform.system(), platform.release(), platform.version(),
2018-12-31 16:29:01 -08:00
' - ' + PLATFORM_LINUX_DISTRO if PLATFORM_LINUX_DISTRO else '')
vl.logger.info(u"Python %s", version)
2018-12-04 21:43:15 -08:00
vl.logger.info("Varken v%s-%s", VERSION, BRANCH)
2018-12-03 21:47:46 -08:00
CONFIG = INIParser(DATA_FOLDER)
2018-12-01 21:15:12 -08:00
DBMANAGER = DBManager(CONFIG.influx_server)
2019-01-09 00:00:50 -08:00
QUEUE = Queue()
2018-11-28 20:21:16 -08:00
if CONFIG.sonarr_enabled:
for server in CONFIG.sonarr_servers:
2018-12-01 21:15:12 -08:00
SONARR = SonarrAPI(server, DBMANAGER)
2018-11-28 20:21:16 -08:00
if server.queue:
2019-01-09 00:00:50 -08:00
at_time = schedule.every(server.queue_run_seconds).seconds
at_time.do(thread, SONARR.get_queue).tag("sonarr-{}-get_queue".format(server.id))
2018-11-28 20:57:10 -08:00
if server.missing_days > 0:
2019-01-09 00:00:50 -08:00
at_time = schedule.every(server.missing_days_run_seconds).seconds
2019-04-24 16:51:32 -07:00
at_time.do(thread, SONARR.get_calendar, query="Missing").tag("sonarr-{}-get_missing".format(server.id))
2018-11-28 20:57:10 -08:00
if server.future_days > 0:
2019-01-09 00:00:50 -08:00
at_time = schedule.every(server.future_days_run_seconds).seconds
2019-04-24 16:51:32 -07:00
at_time.do(thread, SONARR.get_calendar, query="Future").tag("sonarr-{}-get_future".format(server.id))
2018-11-28 20:21:16 -08:00
if CONFIG.tautulli_enabled:
GEOIPHANDLER = GeoIPHandler(DATA_FOLDER, CONFIG.tautulli_servers[0].maxmind_license_key)
schedule.every(12).to(24).hours.do(thread, GEOIPHANDLER.update)
for server in CONFIG.tautulli_servers:
2018-12-17 21:47:29 -08:00
TAUTULLI = TautulliAPI(server, DBMANAGER, GEOIPHANDLER)
if server.get_activity:
2019-01-09 00:00:50 -08:00
at_time = schedule.every(server.get_activity_run_seconds).seconds
at_time.do(thread, TAUTULLI.get_activity).tag("tautulli-{}-get_activity".format(server.id))
2018-12-18 18:30:04 -08:00
if server.get_stats:
2019-01-09 00:00:50 -08:00
at_time = schedule.every(server.get_stats_run_seconds).seconds
at_time.do(thread, TAUTULLI.get_stats).tag("tautulli-{}-get_stats".format(server.id))
2018-12-01 18:33:33 -08:00
if CONFIG.radarr_enabled:
for server in CONFIG.radarr_servers:
2018-12-01 21:15:12 -08:00
RADARR = RadarrAPI(server, DBMANAGER)
if server.get_missing:
2019-01-09 00:00:50 -08:00
at_time = schedule.every(server.get_missing_run_seconds).seconds
at_time.do(thread, RADARR.get_missing).tag("radarr-{}-get_missing".format(server.id))
2018-12-01 18:33:33 -08:00
if server.queue:
2019-01-09 00:00:50 -08:00
at_time = schedule.every(server.queue_run_seconds).seconds
at_time.do(thread, RADARR.get_queue).tag("radarr-{}-get_queue".format(server.id))
2018-12-01 18:33:33 -08:00
2019-04-24 13:27:30 -07:00
if CONFIG.lidarr_enabled:
for server in CONFIG.lidarr_servers:
LIDARR = LidarrAPI(server, DBMANAGER)
if server.queue:
at_time = schedule.every(server.queue_run_seconds).seconds
at_time.do(thread, LIDARR.get_queue).tag("lidarr-{}-get_queue".format(server.id))
2019-04-24 13:27:30 -07:00
if server.missing_days > 0:
at_time = schedule.every(server.missing_days_run_seconds).seconds
2019-04-24 16:51:32 -07:00
at_time.do(thread, LIDARR.get_calendar, query="Missing").tag(
2019-04-24 13:27:30 -07:00
"lidarr-{}-get_missing".format(server.id))
if server.future_days > 0:
at_time = schedule.every(server.future_days_run_seconds).seconds
2019-04-24 16:51:32 -07:00
at_time.do(thread, LIDARR.get_calendar, query="Future").tag("lidarr-{}-get_future".format(
2019-04-24 14:07:25 -07:00
server.id))
2019-04-24 13:27:30 -07:00
2018-12-01 20:26:44 -08:00
if CONFIG.ombi_enabled:
for server in CONFIG.ombi_servers:
2018-12-01 21:15:12 -08:00
OMBI = OmbiAPI(server, DBMANAGER)
2018-12-01 20:26:44 -08:00
if server.request_type_counts:
2019-01-09 00:00:50 -08:00
at_time = schedule.every(server.request_type_run_seconds).seconds
at_time.do(thread, OMBI.get_request_counts).tag("ombi-{}-get_request_counts".format(server.id))
2018-12-01 20:26:44 -08:00
if server.request_total_counts:
2019-01-09 00:00:50 -08:00
at_time = schedule.every(server.request_total_run_seconds).seconds
at_time.do(thread, OMBI.get_all_requests).tag("ombi-{}-get_all_requests".format(server.id))
2018-12-27 13:28:36 -08:00
if server.issue_status_counts:
2019-01-09 00:00:50 -08:00
at_time = schedule.every(server.issue_status_run_seconds).seconds
at_time.do(thread, OMBI.get_issue_counts).tag("ombi-{}-get_issue_counts".format(server.id))
2018-12-01 20:26:44 -08:00
2018-12-15 22:55:59 -08:00
if CONFIG.sickchill_enabled:
for server in CONFIG.sickchill_servers:
SICKCHILL = SickChillAPI(server, DBMANAGER)
if server.get_missing:
2019-01-09 00:00:50 -08:00
at_time = schedule.every(server.get_missing_run_seconds).seconds
at_time.do(thread, SICKCHILL.get_missing).tag("sickchill-{}-get_missing".format(server.id))
2018-12-15 22:55:59 -08:00
2019-01-04 13:30:27 -08:00
if CONFIG.unifi_enabled:
for server in CONFIG.unifi_servers:
UNIFI = UniFiAPI(server, DBMANAGER)
2019-01-09 00:00:50 -08:00
at_time = schedule.every(server.get_usg_stats_run_seconds).seconds
at_time.do(thread, UNIFI.get_usg_stats).tag("unifi-{}-get_usg_stats".format(server.id))
2019-01-04 13:30:27 -08:00
2018-12-03 12:28:15 -08:00
# Run all on startup
2019-01-04 13:30:27 -08:00
SERVICES_ENABLED = [CONFIG.ombi_enabled, CONFIG.radarr_enabled, CONFIG.tautulli_enabled, CONFIG.unifi_enabled,
2019-04-24 15:37:41 -07:00
CONFIG.sonarr_enabled, CONFIG.sickchill_enabled, CONFIG.lidarr_enabled]
2018-12-04 19:45:58 -08:00
if not [enabled for enabled in SERVICES_ENABLED if enabled]:
vl.logger.error("All services disabled. Exiting")
exit(1)
2019-01-09 00:00:50 -08:00
2018-12-03 12:28:15 -08:00
schedule.run_all()
2019-01-09 00:00:50 -08:00
while schedule.jobs:
2018-11-28 20:21:16 -08:00
schedule.run_pending()
sleep(1)