add lidarr functionality

This commit is contained in:
Nicholas St. Germain 2019-04-24 15:27:30 -05:00
parent a7d2416485
commit ec4b069ec6
3 changed files with 191 additions and 0 deletions

View file

@ -17,6 +17,7 @@ from varken.unifi import UniFiAPI
from varken import VERSION, BRANCH
from varken.sonarr import SonarrAPI
from varken.radarr import RadarrAPI
from varken.lidarr import LidarrAPI
from varken.iniparser import INIParser
from varken.dbmanager import DBManager
from varken.helpers import GeoIPHandler
@ -134,6 +135,20 @@ if __name__ == "__main__":
at_time = schedule.every(server.queue_run_seconds).seconds
at_time.do(QUEUE.put, RADARR.get_queue).tag("radarr-{}-get_queue".format(server.id))
if CONFIG.lidarr_enabled:
for server in CONFIG.lidarr_servers:
LIDARR = LidarrAPI(server, DBMANAGER)
if server.queue:
at_time = schedule.every(server.queue_run_seconds).seconds
at_time.do(QUEUE.put, LIDARR.get_queue).tag("lidarr-{}-get_queue".format(server.id))
if server.missing_days > 0:
at_time = schedule.every(server.missing_days_run_seconds).seconds
at_time.do(QUEUE.put, LIDARR.get_calendar, query="Missing").tag(
"lidarr-{}-get_missing".format(server.id))
if server.future_days > 0:
at_time = schedule.every(server.future_days_run_seconds).seconds
at_time.do(QUEUE.put, LIDARR.get_calendar, query="Future").tag("lidarr-{}-get_future".format(server.id))
if CONFIG.ombi_enabled:
for server in CONFIG.ombi_servers:
OMBI = OmbiAPI(server, DBMANAGER)

130
varken/lidarr.py Normal file
View file

@ -0,0 +1,130 @@
from logging import getLogger
from requests import Session, Request
from datetime import datetime, timezone, date, timedelta
from varken.structures import LidarrQueue, LidarrAlbum
from varken.helpers import hashit, connection_handler
class LidarrAPI(object):
def __init__(self, server, dbmanager):
self.dbmanager = dbmanager
self.server = server
# Create session to reduce server web thread load, and globally define pageSize for all requests
self.session = Session()
self.session.headers = {'X-Api-Key': self.server.api_key}
self.logger = getLogger()
def __repr__(self):
return f"<lidarr-{self.server.id}>"
def get_calendar(self, query="Missing"):
endpoint = '/api/v1/calendar'
today = str(date.today())
last_days = str(date.today() - timedelta(days=self.server.missing_days))
future = str(date.today() + timedelta(days=self.server.future_days))
now = datetime.now(timezone.utc).astimezone().isoformat()
if query == "Missing":
params = {'start': last_days, 'end': today}
else:
params = {'start': today, 'end': future}
influx_payload = []
influx_albums = []
req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params))
get = connection_handler(self.session, req, self.server.verify_ssl)
if not get:
return
# Iteratively create a list of LidarrAlbum Objects from response json
albums = []
for album in get:
try:
albums.append(LidarrAlbum(**album))
except TypeError as e:
self.logger.error('TypeError has occurred : %s while creating LidarrAlbum structure for album. Data '
'attempted is: %s', e, album)
# Add Album to missing list if album is not complete
for album in albums:
if album.statistics['percentOfTracks'] != 100:
influx_albums.append((album.title, album.releaseDate, album.artist['artistName'], album.id,
album.statistics['percentOfTracks'],
f"{album.statistics['trackFileCount']}/{album.statistics['TrackCount']}"))
for title, release_date, artist_name, album_id, percent_complete, complete_count in influx_albums:
hash_id = hashit(f'{self.server.id}{title}{album_id}')
influx_payload.append(
{
"measurement": "Lidarr",
"tags": {
"type": query,
"sonarrId": album_id,
"server": self.server.id,
"albumName": title,
"artistName": artist_name,
"percentComplete": percent_complete,
"completeCount": complete_count,
"releaseDate": release_date
},
"time": now,
"fields": {
"hash": hash_id
}
}
)
self.dbmanager.write_points(influx_payload)
def get_queue(self):
endpoint = '/api/v1/queue'
now = datetime.now(timezone.utc).astimezone().isoformat()
influx_payload = []
params = {'pageSize': 1000}
req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params))
get = connection_handler(self.session, req, self.server.verify_ssl)
if not get:
return
queue = []
for song in get['records']:
try:
queue.append(LidarrQueue(**song))
except TypeError as e:
self.logger.error('TypeError has occurred : %s while creating LidarrQueue structure for show. Data '
'attempted is: %s', e, song)
if not queue:
return
for song in queue:
if song.protocol.upper() == 'USENET':
protocol_id = 1
else:
protocol_id = 0
hash_id = hashit(f'{self.server.id}{song.title}{song.artistId}')
influx_payload.append(
{
"measurement": "Lidarr",
"tags": {
"type": "Queue",
"id": song.id,
"server": self.server.id,
"title": song.title,
"quality": song.quality['quality']['name'],
"protocol": song.protocol,
"protocol_id": protocol_id,
"indexer": song.indexer
},
"time": now,
"fields": {
"hash": hash_id
}
}
)
self.dbmanager.write_points(influx_payload)

View file

@ -457,3 +457,49 @@ class TautulliStream(NamedTuple):
year: str = None
secure: str = None
relayed: int = None
# Lidarr
class LidarrQueue(NamedTuple):
artistId: int = None
albumId: int = None
language: dict = None
quality: dict = None
size: float = None
title: str = None
timeleft: str = None
sizeleft: float = None
status: str = None
trackedDownloadStatus: str = None
statusMessages: list = None
downloadId: str = None
protocol: str = None
downloadClient: str = None
indexer: str = None
downloadForced: bool = None
id: int = None
class LidarrAlbum(NamedTuple):
title: str = None
disambiguation: str = None
overview: str = None
artistId: int = None
foreignAlbumId: str = None
monitored: bool = None
anyReleaseOk: bool = None
profileId: int = None
duration: int = None
albumType: str = None
secondaryTypes: list = None
mediumCount: int = None
ratings: dict = None
releaseDate: str = None
releases: list = None
genres: list = None
media: list = None
artist: dict = None
images: list = None
links: list = None
statistics: dict = None
id: int = None