Varken/Varken/sonarr.py

159 lines
6.1 KiB
Python
Raw Normal View History

2018-11-28 14:32:39 -06:00
#!/usr/bin/env python3
# Do not edit this script. Edit configuration.py
2018-02-10 00:40:32 -06:00
import requests
2018-11-28 18:08:59 -06:00
from influxdb import InfluxDBClient
from datetime import datetime, timezone, date, timedelta
2018-11-28 22:57:10 -06:00
from Varken.logger import logging
2018-11-28 22:20:42 -06:00
from Varken.helpers import TVShow, Queue
2018-11-28 14:32:39 -06:00
class SonarrAPI(object):
def __init__(self, servers, influx_server):
2018-11-28 18:08:59 -06:00
# Set Time of initialization
2018-11-28 14:32:39 -06:00
self.now = datetime.now(timezone.utc).astimezone().isoformat()
self.today = str(date.today())
self.influx = InfluxDBClient(influx_server.url, influx_server.port, influx_server.username,
influx_server.password, 'plex')
self.servers = servers
2018-11-28 18:08:59 -06:00
# Create session to reduce server web thread load, and globally define pageSize for all requests
2018-11-28 14:32:39 -06:00
self.session = requests.Session()
self.session.params = {'pageSize': 1000}
2018-11-28 22:57:10 -06:00
@logging
2018-11-28 14:32:39 -06:00
def get_missing(self, days_past):
endpoint = '/api/calendar'
last_days = str(date.today() + timedelta(days=-days_past))
2018-12-01 17:11:46 -06:00
self.now = datetime.now(timezone.utc).astimezone().isoformat()
2018-11-28 14:32:39 -06:00
params = {'start': last_days, 'end': self.today}
2018-11-28 22:57:10 -06:00
influx_payload = []
2018-11-28 14:32:39 -06:00
for server in self.servers:
missing = []
headers = {'X-Api-Key': server.api_key}
2018-11-28 23:47:04 -06:00
get = self.session.get(server.url + endpoint, params=params, headers=headers,
verify=server.verify_ssl).json()
2018-11-28 18:08:59 -06:00
# Iteratively create a list of TVShow Objects from response json
2018-11-28 14:32:39 -06:00
tv_shows = [TVShow(**show) for show in get]
2018-11-28 18:08:59 -06:00
# Add show to missing list if file does not exist
2018-11-28 14:32:39 -06:00
for show in tv_shows:
if not show.hasFile:
sxe = 'S{:0>2}E{:0>2}'.format(show.seasonNumber, show.episodeNumber)
missing.append((show.series['title'], sxe, show.airDate, show.title, show.id))
for series_title, sxe, air_date, episode_title, sonarr_id in missing:
2018-11-28 22:57:10 -06:00
influx_payload.append(
2018-11-28 14:32:39 -06:00
{
"measurement": "Sonarr",
"tags": {
"type": "Missing",
"sonarrId": sonarr_id,
"server": server.id
},
"time": self.now,
"fields": {
"name": series_title,
"epname": episode_title,
"sxe": sxe,
"airs": air_date
}
}
2018-11-28 14:32:39 -06:00
)
2018-11-28 22:57:10 -06:00
self.influx_push(influx_payload)
@logging
2018-11-28 14:32:39 -06:00
def get_future(self, future_days):
endpoint = '/api/calendar/'
2018-12-01 17:11:46 -06:00
self.now = datetime.now(timezone.utc).astimezone().isoformat()
2018-11-28 14:32:39 -06:00
future = str(date.today() + timedelta(days=future_days))
2018-11-28 22:57:10 -06:00
influx_payload = []
2018-11-28 14:32:39 -06:00
for server in self.servers:
air_days = []
2018-11-28 22:57:10 -06:00
2018-11-28 14:32:39 -06:00
headers = {'X-Api-Key': server.api_key}
params = {'start': self.today, 'end': future}
2018-11-28 23:47:04 -06:00
get = self.session.get(server.url + endpoint, params=params, headers=headers,
verify=server.verify_ssl).json()
2018-11-28 14:32:39 -06:00
tv_shows = [TVShow(**show) for show in get]
for show in tv_shows:
sxe = 'S{:0>2}E{:0>2}'.format(show.seasonNumber, show.episodeNumber)
air_days.append((show.series['title'], show.hasFile, sxe, show.title, show.airDate, show.id))
for series_title, dl_status, sxe, episode_title, air_date, sonarr_id in air_days:
2018-11-28 22:57:10 -06:00
influx_payload.append(
2018-11-28 14:32:39 -06:00
{
"measurement": "Sonarr",
"tags": {
"type": "Future",
"sonarrId": sonarr_id,
"server": server.id
},
"time": self.now,
"fields": {
"name": series_title,
"epname": episode_title,
"sxe": sxe,
"airs": air_date,
"downloaded": dl_status
}
2018-11-28 10:55:58 -06:00
}
2018-11-28 14:32:39 -06:00
)
2018-11-28 22:57:10 -06:00
self.influx_push(influx_payload)
@logging
2018-11-28 23:47:04 -06:00
def get_queue(self, notimplemented):
2018-11-28 22:57:10 -06:00
influx_payload = []
2018-11-28 14:32:39 -06:00
endpoint = '/api/queue'
2018-12-01 17:11:46 -06:00
self.now = datetime.now(timezone.utc).astimezone().isoformat()
2018-11-28 14:32:39 -06:00
for server in self.servers:
queue = []
headers = {'X-Api-Key': server.api_key}
2018-11-28 23:47:04 -06:00
get = self.session.get(server.url + endpoint, headers=headers, verify=server.verify_ssl).json()
2018-11-28 14:32:39 -06:00
download_queue = [Queue(**show) for show in get]
for show in download_queue:
sxe = 'S{:0>2}E{:0>2}'.format(show.episode['seasonNumber'], show.episode['episodeNumber'])
if show.protocol.upper() == 'USENET':
protocol_id = 1
else:
protocol_id = 0
queue.append((show.series['title'], show.episode['title'], show.protocol.upper(),
protocol_id, sxe, show.id))
for series_title, episode_title, protocol, protocol_id, sxe, sonarr_id in queue:
2018-11-28 22:57:10 -06:00
influx_payload.append(
2018-11-28 14:32:39 -06:00
{
"measurement": "Sonarr",
"tags": {
"type": "Queue",
"sonarrId": sonarr_id,
"server": server.id
},
"time": self.now,
"fields": {
"name": series_title,
"epname": episode_title,
"sxe": sxe,
"protocol": protocol,
"protocol_id": protocol_id
}
}
2018-11-28 14:32:39 -06:00
)
2018-11-28 22:57:10 -06:00
self.influx_push(influx_payload)
def influx_push(self, payload):
2018-11-28 14:32:39 -06:00
# TODO: error handling for failed connection
2018-11-28 22:57:10 -06:00
self.influx.write_points(payload)