bunch of tweaks. Scheduling is working

This commit is contained in:
Nicholas St. Germain 2018-11-28 22:57:10 -06:00
parent 7910efc641
commit b7605ebbcc
4 changed files with 42 additions and 58 deletions

View file

@ -44,8 +44,12 @@ class INIParser(object):
try:
if not self.config.getboolean('global', 'sonarr_server_ids'):
sys.exit('sonarr_server_ids must be either false, or a comma-separated list of server ids')
elif self.config.getint('global', 'sonarr_server_ids'):
self.sonarr_enabled = True
except ValueError:
self.sonarr_enabled = True
if self.sonarr_enabled:
sids = self.config.get('global', 'sonarr_server_ids').strip(' ').split(',')
for server_id in sids:

View file

@ -1 +1,11 @@
import functools
import functools
def logging(function):
@functools.wraps(function)
def wrapper(*args, **kwargs):
print('LOG: Running job "%s"' % function.__name__)
result = function(*args, **kwargs)
print('LOG: Job "%s" completed' % function.__name__)
return result
return wrapper

View file

@ -1,11 +1,10 @@
#!/usr/bin/env python3
# Do not edit this script. Edit configuration.py
import sys
import requests
import argparse
from influxdb import InfluxDBClient
from datetime import datetime, timezone, date, timedelta
from Varken.logger import logging
from Varken.helpers import TVShow, Queue
@ -16,16 +15,17 @@ class SonarrAPI(object):
self.today = str(date.today())
self.influx = InfluxDBClient(influx_server.url, influx_server.port, influx_server.username,
influx_server.password, 'plex')
self.influx_payload = []
self.servers = sonarr_servers
# Create session to reduce server web thread load, and globally define pageSize for all requests
self.session = requests.Session()
self.session.params = {'pageSize': 1000}
@logging
def get_missing(self, days_past):
endpoint = '/api/calendar'
last_days = str(date.today() + timedelta(days=-days_past))
params = {'start': last_days, 'end': self.today}
influx_payload = []
for server in self.servers:
missing = []
@ -42,7 +42,7 @@ class SonarrAPI(object):
missing.append((show.series['title'], sxe, show.airDate, show.title, show.id))
for series_title, sxe, air_date, episode_title, sonarr_id in missing:
self.influx_payload.append(
influx_payload.append(
{
"measurement": "Sonarr",
"tags": {
@ -60,12 +60,17 @@ class SonarrAPI(object):
}
)
self.influx_push(influx_payload)
@logging
def get_future(self, future_days):
endpoint = '/api/calendar/'
future = str(date.today() + timedelta(days=future_days))
influx_payload = []
for server in self.servers:
air_days = []
headers = {'X-Api-Key': server.api_key}
params = {'start': self.today, 'end': future}
@ -77,7 +82,7 @@ class SonarrAPI(object):
air_days.append((show.series['title'], show.hasFile, sxe, show.title, show.airDate, show.id))
for series_title, dl_status, sxe, episode_title, air_date, sonarr_id in air_days:
self.influx_payload.append(
influx_payload.append(
{
"measurement": "Sonarr",
"tags": {
@ -96,7 +101,11 @@ class SonarrAPI(object):
}
)
self.influx_push(influx_payload)
@logging
def get_queue(self):
influx_payload = []
endpoint = '/api/queue'
for server in self.servers:
@ -117,7 +126,7 @@ class SonarrAPI(object):
protocol_id, sxe, show.id))
for series_title, episode_title, protocol, protocol_id, sxe, sonarr_id in queue:
self.influx_payload.append(
influx_payload.append(
{
"measurement": "Sonarr",
"tags": {
@ -137,43 +146,8 @@ class SonarrAPI(object):
}
)
def influx_push(self):
self.influx_push(influx_payload)
def influx_push(self, payload):
# TODO: error handling for failed connection
self.influx.write_points(self.influx_payload)
if __name__ == "__main__":
parser = argparse.ArgumentParser(prog='Sonarr stats operations',
description='Script to aid in data gathering from Sonarr',
formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument("--missing", metavar='$days', type=int,
help='Get missing TV shows in past X days'
'\ni.e. --missing 7 is in the last week')
parser.add_argument("--missing_days", metavar='$days', type=int,
help='legacy command. Deprecated in favor of --missing'
'\nfunctions identically to --missing'
'\nNote: Will be removed in a future release')
parser.add_argument("--future", metavar='$days', type=int,
help='Get TV shows on X days into the future. Includes today.'
'\ni.e. --future 2 is Today and Tomorrow')
parser.add_argument("--queue", action='store_true', help='Get TV shows in queue')
opts = parser.parse_args()
sonarr = SonarrAPI()
if len(sys.argv) == 1:
parser.print_help(sys.stderr)
sys.exit(1)
if any([opts.missing, opts.missing_days]):
days = opts.missing if opts.missing else opts.missing_days
sonarr.get_missing(days)
if opts.upcoming:
sonarr.get_upcoming()
if opts.future:
sonarr.get_future(opts.future)
if opts.queue:
sonarr.get_queue()
sonarr.influx_push()
self.influx.write_points(payload)

View file

@ -1,23 +1,13 @@
import schedule
import threading
import functools
from time import sleep
from Varken.iniparser import INIParser
from Varken.sonarr import SonarrAPI
def logging(function):
@functools.wraps(function)
def wrapper(*args, **kwargs):
print('LOG: Running job "%s"' % function.__name__)
result = function(*args, **kwargs)
print('LOG: Job "%s" completed' % function.__name__)
return result
return wrapper
@logging
def threaded(job):
print('test')
thread = threading.Thread(target=job)
thread.start()
@ -26,9 +16,15 @@ if __name__ == "__main__":
if CONFIG.sonarr_enabled:
SONARR = SonarrAPI(CONFIG.sonarr_servers, CONFIG.influx_server)
for server in CONFIG.sonarr_servers:
if server.queue:
schedule.every().minute.do(threaded, SONARR.get_queue)
schedule.every(1).minutes.do(threaded, SONARR.get_queue)
if server.missing_days > 0:
schedule.every(30).minutes.do(threaded, SONARR.get_missing, server.missing_days)
if server.future_days > 0:
schedule.every(30).minutes.do(threaded, SONARR.get_future, server.future_days)
while True:
schedule.run_pending()