Varken/varken/sonarr.py

178 lines
6.4 KiB
Python
Raw Normal View History

from logging import getLogger
2018-12-04 08:45:18 -08:00
from requests import Session, Request
from datetime import datetime, timezone, date, timedelta
from varken.structures import Queue, TVShow
from varken.helpers import hashit, connection_handler
2018-12-04 08:45:18 -08:00
2018-12-04 08:45:18 -08:00
class SonarrAPI(object):
def __init__(self, server, dbmanager):
self.dbmanager = dbmanager
self.server = server
# Create session to reduce server web thread load, and globally define pageSize for all requests
self.session = Session()
self.session.headers = {'X-Api-Key': self.server.api_key}
self.session.params = {'pageSize': 1000}
self.logger = getLogger()
2018-12-04 08:45:18 -08:00
def __repr__(self):
return f"<sonarr-{self.server.id}>"
2018-12-04 08:45:18 -08:00
def get_missing(self):
endpoint = '/api/calendar'
today = str(date.today())
2018-12-04 08:45:18 -08:00
last_days = str(date.today() + timedelta(days=-self.server.missing_days))
now = datetime.now(timezone.utc).astimezone().isoformat()
params = {'start': last_days, 'end': today}
2018-12-04 08:45:18 -08:00
influx_payload = []
missing = []
req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params))
get = connection_handler(self.session, req, self.server.verify_ssl)
if not get:
return
# Iteratively create a list of TVShow Objects from response json
try:
tv_shows = [TVShow(**show) for show in get]
except TypeError as e:
2018-12-06 10:04:02 -08:00
self.logger.error('TypeError has occurred : %s while creating TVShow structure', e)
return
2018-12-04 08:45:18 -08:00
# Add show to missing list if file does not exist
for show in tv_shows:
if not show.hasFile:
sxe = f'S{show.seasonNumber:0>2}E{show.episodeNumber:0>2}'
2018-12-14 19:47:27 -08:00
missing.append((show.series['title'], sxe, show.airDateUtc, show.title, show.id))
2018-12-04 08:45:18 -08:00
2018-12-14 19:47:27 -08:00
for series_title, sxe, air_date_utc, episode_title, sonarr_id in missing:
hash_id = hashit(f'{self.server.id}{series_title}{sxe}')
2018-12-04 08:45:18 -08:00
influx_payload.append(
{
"measurement": "Sonarr",
"tags": {
"type": "Missing",
"sonarrId": sonarr_id,
"server": self.server.id,
"name": series_title,
"epname": episode_title,
"sxe": sxe,
2018-12-14 19:47:27 -08:00
"airsUTC": air_date_utc
2018-12-04 08:45:18 -08:00
},
"time": now,
2018-12-04 08:45:18 -08:00
"fields": {
"hash": hash_id
}
}
)
self.dbmanager.write_points(influx_payload)
def get_future(self):
endpoint = '/api/calendar/'
today = str(date.today())
now = datetime.now(timezone.utc).astimezone().isoformat()
2018-12-04 08:45:18 -08:00
future = str(date.today() + timedelta(days=self.server.future_days))
influx_payload = []
air_days = []
params = {'start': today, 'end': future}
2018-12-04 08:45:18 -08:00
req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params))
get = connection_handler(self.session, req, self.server.verify_ssl)
if not get:
return
try:
tv_shows = [TVShow(**show) for show in get]
except TypeError as e:
2018-12-06 10:04:02 -08:00
self.logger.error('TypeError has occurred : %s while creating TVShow structure', e)
return
2018-12-04 08:45:18 -08:00
for show in tv_shows:
sxe = f'S{show.seasonNumber:0>2}E{show.episodeNumber:0>2}'
2018-12-04 08:45:18 -08:00
if show.hasFile:
downloaded = 1
else:
downloaded = 0
2018-12-14 19:47:27 -08:00
air_days.append((show.series['title'], downloaded, sxe, show.title, show.airDateUtc, show.id))
2018-12-04 08:45:18 -08:00
2018-12-14 19:47:27 -08:00
for series_title, dl_status, sxe, episode_title, air_date_utc, sonarr_id in air_days:
hash_id = hashit(f'{self.server.id}{series_title}{sxe}')
2018-12-04 08:45:18 -08:00
influx_payload.append(
{
"measurement": "Sonarr",
"tags": {
"type": "Future",
"sonarrId": sonarr_id,
"server": self.server.id,
"name": series_title,
"epname": episode_title,
"sxe": sxe,
2018-12-14 19:47:27 -08:00
"airsUTC": air_date_utc,
2018-12-04 08:45:18 -08:00
"downloaded": dl_status
},
"time": now,
2018-12-04 08:45:18 -08:00
"fields": {
"hash": hash_id
}
}
)
self.dbmanager.write_points(influx_payload)
def get_queue(self):
influx_payload = []
endpoint = '/api/queue'
now = datetime.now(timezone.utc).astimezone().isoformat()
2018-12-04 08:45:18 -08:00
queue = []
req = self.session.prepare_request(Request('GET', self.server.url + endpoint))
get = connection_handler(self.session, req, self.server.verify_ssl)
if not get:
return
try:
download_queue = [Queue(**show) for show in get]
except TypeError as e:
2018-12-06 10:04:02 -08:00
self.logger.error('TypeError has occurred : %s while creating Queue structure', e)
return
2018-12-04 08:45:18 -08:00
for show in download_queue:
sxe = f"S{show.episode['seasonNumber']:0>2}E{show.episode['episodeNumber']:0>2}"
2018-12-04 08:45:18 -08:00
if show.protocol.upper() == 'USENET':
protocol_id = 1
else:
protocol_id = 0
queue.append((show.series['title'], show.episode['title'], show.protocol.upper(),
protocol_id, sxe, show.id))
for series_title, episode_title, protocol, protocol_id, sxe, sonarr_id in queue:
hash_id = hashit(f'{self.server.id}{series_title}{sxe}')
2018-12-04 08:45:18 -08:00
influx_payload.append(
{
"measurement": "Sonarr",
"tags": {
"type": "Queue",
"sonarrId": sonarr_id,
"server": self.server.id,
"name": series_title,
"epname": episode_title,
"sxe": sxe,
"protocol": protocol,
"protocol_id": protocol_id
},
"time": now,
2018-12-04 08:45:18 -08:00
"fields": {
"hash": hash_id
}
}
)
self.dbmanager.write_points(influx_payload)