Varken/varken/sonarr.py

182 lines
7.1 KiB
Python
Raw Normal View History

from logging import getLogger
2018-12-04 08:45:18 -08:00
from requests import Session, Request
from datetime import datetime, timezone, date, timedelta
2022-01-19 13:53:28 -08:00
from varken.structures import SonarrEpisode, SonarrTVShow, SonarrQueue, QueuePages
from varken.helpers import hashit, connection_handler
2018-12-04 08:45:18 -08:00
2018-12-04 08:45:18 -08:00
class SonarrAPI(object):
def __init__(self, server, dbmanager):
self.dbmanager = dbmanager
self.server = server
# Create session to reduce server web thread load, and globally define pageSize for all requests
self.session = Session()
self.session.headers = {'X-Api-Key': self.server.api_key}
self.session.params = {'pageSize': 1000}
self.logger = getLogger()
2018-12-04 08:45:18 -08:00
def __repr__(self):
return f"<sonarr-{self.server.id}>"
2022-01-14 18:49:17 -08:00
def get_episode(self, id):
endpoint = '/api/v3/episode'
params = {'episodeIds': id}
2022-01-14 18:49:17 -08:00
req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params))
get = connection_handler(self.session, req, self.server.verify_ssl)
if not get:
return
return SonarrEpisode(**get[0])
2019-04-24 16:51:32 -07:00
def get_calendar(self, query="Missing"):
endpoint = '/api/v3/calendar/'
today = str(date.today())
2019-04-24 16:51:32 -07:00
last_days = str(date.today() - timedelta(days=self.server.missing_days))
2018-12-04 08:45:18 -08:00
future = str(date.today() + timedelta(days=self.server.future_days))
2019-04-24 16:51:32 -07:00
now = datetime.now(timezone.utc).astimezone().isoformat()
if query == "Missing":
params = {'start': last_days, 'end': today, 'includeSeries': True}
2019-04-24 16:51:32 -07:00
else:
params = {'start': today, 'end': future, 'includeSeries': True}
2018-12-04 08:45:18 -08:00
influx_payload = []
air_days = []
2019-04-24 16:51:32 -07:00
missing = []
2018-12-04 08:45:18 -08:00
req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params))
get = connection_handler(self.session, req, self.server.verify_ssl)
if not get:
return
tv_shows = []
for show in get:
try:
tv_shows.append(SonarrEpisode(**show))
except TypeError as e:
self.logger.error('TypeError has occurred : %s while creating SonarrEpisode structure for show. Data '
2019-04-19 10:48:04 -07:00
'attempted is: %s', e, show)
2018-12-04 08:45:18 -08:00
for episode in tv_shows:
tvShow = episode.series
sxe = f'S{episode.seasonNumber:0>2}E{episode.episodeNumber:0>2}'
if episode.hasFile:
2018-12-04 08:45:18 -08:00
downloaded = 1
else:
downloaded = 0
2019-04-24 16:51:32 -07:00
if query == "Missing":
if episode.monitored and not downloaded:
2022-01-17 08:10:18 -08:00
missing.append((tvShow['title'], downloaded, sxe, episode.title,
episode.airDateUtc, episode.seriesId))
2019-04-24 16:51:32 -07:00
else:
2022-01-17 08:22:55 -08:00
air_days.append((tvShow['title'], downloaded, sxe, episode.title, episode.airDateUtc, episode.seriesId))
2018-12-04 08:45:18 -08:00
2019-04-24 16:51:32 -07:00
for series_title, dl_status, sxe, episode_title, air_date_utc, sonarr_id in (air_days or missing):
hash_id = hashit(f'{self.server.id}{series_title}{sxe}')
2018-12-04 08:45:18 -08:00
influx_payload.append(
{
"measurement": "Sonarr",
"tags": {
2019-04-24 16:51:32 -07:00
"type": query,
2018-12-04 08:45:18 -08:00
"sonarrId": sonarr_id,
"server": self.server.id,
"name": series_title,
"epname": episode_title,
"sxe": sxe,
2018-12-14 19:47:27 -08:00
"airsUTC": air_date_utc,
2018-12-04 08:45:18 -08:00
"downloaded": dl_status
},
"time": now,
2018-12-04 08:45:18 -08:00
"fields": {
"hash": hash_id
}
}
)
self.dbmanager.write_points(influx_payload)
def get_queue(self):
influx_payload = []
endpoint = '/api/v3/queue'
now = datetime.now(timezone.utc).astimezone().isoformat()
pageSize = 250
2022-01-20 20:09:12 -08:00
params = {'pageSize': pageSize, 'includeSeries': True, 'includeEpisode': True}
queueResponse = []
2018-12-04 08:45:18 -08:00
queue = []
2022-01-14 18:49:17 -08:00
req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params))
2018-12-04 08:45:18 -08:00
get = connection_handler(self.session, req, self.server.verify_ssl)
if not get:
return
2022-01-14 18:49:17 -08:00
response = QueuePages(**get)
queueResponse.extend(response.records)
while response.totalRecords > response.page * response.pageSize:
page = response.page + 1
params = {'pageSize': pageSize, 'page': page, 'includeSeries': True, 'includeEpisode': True}
2022-01-14 18:49:17 -08:00
req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params))
get = connection_handler(self.session, req, self.server.verify_ssl)
if not get:
return
response = QueuePages(**get)
queueResponse.extend(response.records)
2018-12-04 08:45:18 -08:00
download_queue = []
for queueItem in queueResponse:
try:
download_queue.append(SonarrQueue(**queueItem))
except TypeError as e:
2019-04-19 10:48:04 -07:00
self.logger.error('TypeError has occurred : %s while creating Queue structure. Data attempted is: '
'%s', e, queueItem)
if not download_queue:
return
2018-12-04 08:45:18 -08:00
for queueItem in download_queue:
2022-01-19 13:53:28 -08:00
tvShow = SonarrTVShow(**queueItem.series)
episode = SonarrEpisode(**queueItem.episode)
try:
sxe = f"S{episode.seasonNumber:0>2}E{episode.episodeNumber:0>2}"
except TypeError as e:
self.logger.error('TypeError has occurred : %s while processing the sonarr queue. \
Remove invalid queue entry. Data attempted is: %s', e, queueItem)
continue
if queueItem.protocol.upper() == 'USENET':
2018-12-04 08:45:18 -08:00
protocol_id = 1
else:
protocol_id = 0
2022-01-19 13:53:28 -08:00
queue.append((tvShow.title, episode.title, queueItem.protocol.upper(),
protocol_id, sxe, queueItem.seriesId, queueItem.quality['quality']['name']))
2018-12-04 08:45:18 -08:00
for series_title, episode_title, protocol, protocol_id, sxe, sonarr_id, quality in queue:
hash_id = hashit(f'{self.server.id}{series_title}{sxe}')
2018-12-04 08:45:18 -08:00
influx_payload.append(
{
"measurement": "Sonarr",
"tags": {
"type": "Queue",
"sonarrId": sonarr_id,
"server": self.server.id,
"name": series_title,
"epname": episode_title,
"sxe": sxe,
"protocol": protocol,
"protocol_id": protocol_id,
"quality": quality
2018-12-04 08:45:18 -08:00
},
"time": now,
2018-12-04 08:45:18 -08:00
"fields": {
"hash": hash_id
}
}
)
2019-04-19 10:54:16 -07:00
if influx_payload:
self.dbmanager.write_points(influx_payload)
else:
2022-01-14 18:51:43 -08:00
self.logger.debug("No data to send to influx for sonarr instance, discarding.")