* #203 * Update docker compose to specify influxdb:1.8.4 * Update requirements to use urllib3==1.26.5 * updated to support Radarr and Sonarr V3 Api * bump requirements for requests * Fix Sonarr & Radarr V3 API /queue endpoint (#220) * Fix lint issues * More lint fixes * Update Sonarr structures * Add Overseerr Support (#210) * Remove duplicate structures * update changelog to reflect v1.7.7 changes * Add IP data to tautulli #202 * add missing ip address in tautulli * Fixed: Streamlined API calls to Radarr and Sonarr (#221) * Fixed: Sonarr Data pull issues (#222) * Fix Sonarrr calendar * Update lidarr structure (#225) Added missing arguments to Lidarr structure Fixes #223 * Clean up request totals. Upstream change sct/overseerr#2426 * Cleanup blank space * Fix requested_date syntax. * Fix requested_date for Overseerr tv and movie * Fix overseerr config refernces * Fix overseerr structures * Update intparser to accommodate changes to config structure * Cleanup overseerr data collection * Fix SERVICES_ENABLED in varken.py to acomidate overseerr * Fixed: Sonarr/Lidarr Queues (#227) * Change sonarr queue structures to str * Fixed: Multipage queue fetching * Update historical tautulli import (#226) * Fixed: Sonarr perams ordering * Fixed: Proper warnings for missing data in sonarr and radarr * Added: Overseerr ENVs to docker compose. * Added: Logging to empty/no data returns * Update Sonarr & Lidarr Structs to match latest API changes (#231) * Add support for estimatedCompletionTime in LidarrQueue * Add support for tvdbId in SonarrEpisode struct * Fix typo in docker yml * Rename example url for overseerr in docker yml * Update radarr structures to inclue originalLanguage * Update radarr structures to include addOptions * Update radarr structures to include popularity * fix(ombi): Update structures.py (#238) * feat(docker): remove envs from example * fix(logging): remove depreciation warning. Var for debug mode (#240) * fix(build): bump schedule version to 1.1 * fix(build): bump docker python version * fix(dep): update requests and urllib3 * fix(sonarr): ensure invalid sonarr queue items are just skipped over - fixes #239 (#243) * add branch to build inputs * update pipeline badge * Update automation * Add influxdb 2 client * Add structure for influxdb 2 params This contains all the data needed for connecting and writing to an InfluxDB2 server * Parse influxdb 2 config data * Add influxdb2 manager class This stores the data needed for InfluxDB2, and has a single `write_points` function on this that takes an array of points to add to the database * Use the correct db manager for varken * Add influxdb2 to the example varken config file * Create influx bucket if it doesn't exist * Update InfluxDB type on README * Clean up linting errors * Wrap create bucket in try/catch * Use bucket given in ini file * Log exception to troubleshoot errors * Allow configured influx2 address as URL (no port) * Bypass validity check to troubleshoot --------- Co-authored-by: mal5305 <malcolm.e.rogers@gmail.com> Co-authored-by: samwiseg0 <2241731+samwiseg0@users.noreply.github.com> Co-authored-by: Robin <19610103+RobinDadswell@users.noreply.github.com> Co-authored-by: tigattack <10629864+tigattack@users.noreply.github.com> Co-authored-by: Stewart Thomson <stewartthomson3@gmail.com> Co-authored-by: Cameron Stephen <mail@cajs.co.uk> Co-authored-by: MDHMatt <10845262+MDHMatt@users.noreply.github.com> Co-authored-by: Nathan Adams <dinnerbone@dinnerbone.com> Co-authored-by: Nicholas St. Germain <nick@cajun.pro> Co-authored-by: Gabe Revells <gcrevell@mtu.edu>
133 lines
5.6 KiB
Python
133 lines
5.6 KiB
Python
from logging import getLogger
|
|
from requests import Session, Request
|
|
from datetime import datetime, timezone
|
|
|
|
from varken.helpers import connection_handler, hashit
|
|
from varken.structures import OverseerrRequestCounts
|
|
|
|
|
|
class OverseerrAPI(object):
|
|
def __init__(self, server, dbmanager):
|
|
self.dbmanager = dbmanager
|
|
self.server = server
|
|
# Create session to reduce server web thread load, and globally define pageSize for all requests
|
|
self.session = Session()
|
|
self.session.headers = {'X-Api-Key': self.server.api_key}
|
|
self.logger = getLogger()
|
|
|
|
def __repr__(self):
|
|
return f"<overseerr-{self.server.id}>"
|
|
|
|
def get_request_counts(self):
|
|
now = datetime.now(timezone.utc).astimezone().isoformat()
|
|
endpoint = '/api/v1/request/count'
|
|
|
|
req = self.session.prepare_request(Request('GET', self.server.url + endpoint))
|
|
get_req = connection_handler(self.session, req, self.server.verify_ssl)
|
|
|
|
if not get_req:
|
|
return
|
|
|
|
requests = OverseerrRequestCounts(**get_req)
|
|
influx_payload = [
|
|
{
|
|
"measurement": "Overseerr",
|
|
"tags": {
|
|
"type": "Request_Counts"
|
|
},
|
|
"time": now,
|
|
"fields": {
|
|
"pending": requests.pending,
|
|
"approved": requests.approved,
|
|
"processing": requests.processing,
|
|
"available": requests.available,
|
|
"total": requests.total,
|
|
"movies": requests.movie,
|
|
"tv": requests.tv,
|
|
"declined": requests.declined
|
|
}
|
|
}
|
|
]
|
|
|
|
if influx_payload:
|
|
self.dbmanager.write_points(influx_payload)
|
|
else:
|
|
self.logger.warning("No data to send to influx for overseerr-request-counts instance, discarding.")
|
|
|
|
def get_latest_requests(self):
|
|
now = datetime.now(timezone.utc).astimezone().isoformat()
|
|
endpoint = '/api/v1/request?take=' + str(self.server.num_latest_requests_to_fetch) + '&filter=all&sort=added'
|
|
movie_endpoint = '/api/v1/movie/'
|
|
tv_endpoint = '/api/v1/tv/'
|
|
|
|
# GET THE LATEST n REQUESTS
|
|
req = self.session.prepare_request(Request('GET', self.server.url + endpoint))
|
|
get_latest_req = connection_handler(self.session, req, self.server.verify_ssl)
|
|
|
|
# RETURN NOTHING IF NO RESULTS
|
|
if not get_latest_req:
|
|
self.logger.warning("No data to send to influx for overseerr-latest-requests instance, discarding.")
|
|
return
|
|
|
|
influx_payload = []
|
|
|
|
# Request Type: Movie = 1, TV Show = 0
|
|
for result in get_latest_req['results']:
|
|
if result['type'] == 'tv':
|
|
req = self.session.prepare_request(Request('GET',
|
|
self.server.url +
|
|
tv_endpoint +
|
|
str(result['media']['tmdbId'])))
|
|
get_tv_req = connection_handler(self.session, req, self.server.verify_ssl)
|
|
hash_id = hashit(f'{get_tv_req["id"]}{get_tv_req["name"]}')
|
|
|
|
influx_payload.append(
|
|
{
|
|
"measurement": "Overseerr",
|
|
"tags": {
|
|
"type": "Requests",
|
|
"server": self.server.id,
|
|
"request_type": 0,
|
|
"status": get_tv_req['mediaInfo']['status'],
|
|
"title": get_tv_req['name'],
|
|
"requested_user": get_tv_req['mediaInfo']['requests'][0]['requestedBy']['displayName'],
|
|
"requested_date": get_tv_req['mediaInfo']['requests'][0]['createdAt']
|
|
},
|
|
"time": now,
|
|
"fields": {
|
|
"hash": hash_id
|
|
}
|
|
}
|
|
)
|
|
|
|
if result['type'] == 'movie':
|
|
req = self.session.prepare_request(Request('GET',
|
|
self.server.url +
|
|
movie_endpoint +
|
|
str(result['media']['tmdbId'])))
|
|
get_movie_req = connection_handler(self.session, req, self.server.verify_ssl)
|
|
hash_id = hashit(f'{get_movie_req["id"]}{get_movie_req["title"]}')
|
|
|
|
influx_payload.append(
|
|
{
|
|
"measurement": "Overseerr",
|
|
"tags": {
|
|
"type": "Requests",
|
|
"server": self.server.id,
|
|
"request_type": 1,
|
|
"status": get_movie_req['mediaInfo']['status'],
|
|
"title": get_movie_req['title'],
|
|
"requested_user": get_movie_req['mediaInfo']['requests'][0]['requestedBy']['displayName'],
|
|
"requested_date": get_movie_req['mediaInfo']['requests'][0]['createdAt']
|
|
},
|
|
"time": now,
|
|
"fields": {
|
|
"hash": hash_id
|
|
}
|
|
}
|
|
)
|
|
|
|
if influx_payload:
|
|
self.dbmanager.write_points(influx_payload)
|
|
else:
|
|
self.logger.warning("No data to send to influx for overseerr-latest-requests instance, discarding.")
|