Compare commits

...

66 commits

Author SHA1 Message Date
Daniel
e34e58aabc Bypass validity check to troubleshoot 2023-06-22 21:59:15 -07:00
Daniel
996ad353fa Allow configured influx2 address as URL (no port) 2023-06-22 21:59:15 -07:00
Daniel
5e68f6a459 Log exception to troubleshoot errors 2023-06-22 21:59:14 -07:00
Gabe Revells
51483f33a5 Use bucket given in ini file 2023-06-22 21:59:14 -07:00
Gabe Revells
a30450f224 Wrap create bucket in try/catch 2023-06-22 21:59:14 -07:00
Gabe Revells
a4d54f912c Clean up linting errors 2023-06-22 21:59:14 -07:00
Gabe Revells
cf1133e637 Update InfluxDB type on README 2023-06-22 21:59:14 -07:00
Gabe Revells
8119f908ba Create influx bucket if it doesn't exist 2023-06-22 21:59:14 -07:00
Gabe Revells
415f933143 Add influxdb2 to the example varken config file 2023-06-22 21:59:14 -07:00
Gabe Revells
2b90a45b42 Use the correct db manager for varken 2023-06-22 21:59:14 -07:00
Gabe Revells
82e6fd09a0 Add influxdb2 manager class
This stores the data needed for InfluxDB2, and has a single `write_points` function on this that takes an array of points to add to the database
2023-06-22 21:59:14 -07:00
Gabe Revells
b8df2671c3 Parse influxdb 2 config data 2023-06-22 21:59:14 -07:00
Gabe Revells
f027e14447 Add structure for influxdb 2 params
This contains all the data needed for connecting and writing to an InfluxDB2 server
2023-06-22 21:59:14 -07:00
Gabe Revells
453b6dad15 Add influxdb 2 client 2023-06-22 21:59:14 -07:00
Nicholas St. Germain
f57a25ac4e update pipeline badge 2023-06-22 21:11:33 -07:00
Nicholas St. Germain
4768870bfb add branch to build inputs 2023-06-22 21:11:33 -07:00
Nathan Adams
b077508a3e
fix(sonarr): ensure invalid sonarr queue items are just skipped over - fixes #239 (#243) 2022-07-18 11:10:29 -04:00
Samwiseg0
a197cb63f8 fix(dep): update requests and urllib3 2022-07-17 17:04:24 -04:00
Samwiseg0
0a9a20007e fix(build): bump docker python version 2022-07-17 16:44:45 -04:00
Samwiseg0
a6c8ffce25 fix(build): bump schedule version to 1.1 2022-07-17 16:43:54 -04:00
MDHMatt
cfc5c6998a
fix(logging): remove depreciation warning. Var for debug mode (#240) 2022-07-17 13:06:48 -04:00
Samwiseg0
2cc26c1813 feat(docker): remove envs from example 2022-07-17 13:00:17 -04:00
MDHMatt
817c7a5b53
fix(ombi): Update structures.py (#238) 2022-07-12 13:36:24 -04:00
Samwiseg0
0a927d0733 Update radarr structures to include popularity 2022-04-29 15:11:57 -04:00
Samwiseg0
7b4b44568a Update radarr structures to include addOptions 2022-04-21 14:55:27 -04:00
Samwiseg0
f3960d2921 Update radarr structures to inclue originalLanguage 2022-04-21 12:52:40 -04:00
Samwiseg0
5f8af8e029 Rename example url for overseerr in docker yml 2022-03-31 23:03:51 -04:00
Samwiseg0
2c81fe5f9f Fix typo in docker yml 2022-03-31 23:02:52 -04:00
Cameron Stephen
731808544d
Update Sonarr & Lidarr Structs to match latest API changes (#231)
* Add support for estimatedCompletionTime in LidarrQueue

* Add support for tvdbId in SonarrEpisode struct
2022-03-07 18:14:14 -05:00
samwiseg0
2f7f01edbb Added: Logging to empty/no data returns 2022-01-21 00:35:06 -05:00
samwiseg0
a60e41e6e4 Added: Overseerr ENVs to docker compose. 2022-01-21 00:31:20 -05:00
samwiseg0
752073d590 Fixed: Proper warnings for missing data in sonarr and radarr 2022-01-21 00:14:15 -05:00
samwiseg0
518ea6c384 Fixed: Sonarr perams ordering 2022-01-20 23:40:45 -05:00
Stewart Thomson
45f9a2092b
Update historical tautulli import (#226) 2022-01-20 23:38:02 -05:00
samwiseg0
62749f20f9 Fixed: Multipage queue fetching 2022-01-20 23:04:19 -05:00
samwiseg0
06c4777a34 Change sonarr queue structures to str 2022-01-19 17:01:12 -05:00
Robin Dadswell
707c4a28fe
Fixed: Sonarr/Lidarr Queues (#227) 2022-01-19 16:53:28 -05:00
samwiseg0
3c70ecbd0a Fix SERVICES_ENABLED in varken.py to acomidate overseerr 2022-01-19 15:18:56 -05:00
samwiseg0
885359986c Cleanup overseerr data collection 2022-01-19 15:17:19 -05:00
samwiseg0
e2920029ed Update intparser to accommodate changes to config structure 2022-01-19 14:20:43 -05:00
samwiseg0
442b518ced Fix overseerr structures 2022-01-19 14:05:24 -05:00
samwiseg0
d1b47e0bd9 Fix overseerr config refernces 2022-01-19 13:55:15 -05:00
samwiseg0
e4b9926f68 Fix requested_date for Overseerr tv and movie 2022-01-19 13:36:07 -05:00
samwiseg0
908cfb15a0 Fix requested_date syntax. 2022-01-19 13:31:22 -05:00
samwiseg0
756b89bc03 Cleanup blank space 2022-01-19 13:29:12 -05:00
samwiseg0
08c49698a7 Clean up request totals. Upstream change sct/overseerr#2426 2022-01-19 13:28:12 -05:00
Stewart Thomson
5e8c8eb5ce
Update lidarr structure (#225)
Added missing arguments to Lidarr structure

Fixes #223
2022-01-17 22:10:54 -05:00
samwiseg0
7a4cf59e7c Fix Sonarrr calendar 2022-01-17 11:22:55 -05:00
Robin Dadswell
463f37e286
Fixed: Sonarr Data pull issues (#222) 2022-01-17 11:10:18 -05:00
Robin Dadswell
870c6cdee2
Fixed: Streamlined API calls to Radarr and Sonarr (#221) 2022-01-16 20:40:12 -05:00
samwiseg0
9508c3c3f7 add missing ip address in tautulli 2022-01-15 12:43:32 -05:00
samwiseg0
436a682360 Add IP data to tautulli #202 2022-01-15 12:23:29 -05:00
samwiseg0
5a76459654 update changelog to reflect v1.7.7 changes 2022-01-15 12:11:47 -05:00
samwiseg0
d4c8037f56 Remove duplicate structures 2022-01-14 23:20:33 -05:00
samwiseg0
73b2686ba0 Add Overseerr Support (#210) 2022-01-14 23:16:44 -05:00
samwiseg0
9498a83bc8 Update Sonarr structures 2022-01-14 22:05:40 -05:00
samwiseg0
63746dd7be More lint fixes 2022-01-14 21:51:43 -05:00
samwiseg0
533ec1058f Fix lint issues 2022-01-14 21:49:17 -05:00
tigattack
2607584eba
Fix Sonarr & Radarr V3 API /queue endpoint (#220) 2022-01-14 21:21:54 -05:00
samwiseg0
7a8c5a3ee4 bump requirements for requests 2022-01-14 21:19:08 -05:00
samwiseg0
e3f2ee967c
Merge pull request #214 from RobinDadswell/RadarrAndSonarrV3Api 2021-09-20 15:24:29 -04:00
Robin
57028cd26c updated to support Radarr and Sonarr V3 Api 2021-09-17 22:55:02 +00:00
samwiseg0
c6b8dde29a
Update requirements to use urllib3==1.26.5 2021-06-03 13:40:57 -04:00
samwiseg0
c47b7c2c3b
Update docker compose to specify influxdb:1.8.4 2021-06-03 13:35:53 -04:00
samwiseg0
01b7d2c1e1
Merge pull request #205 from mal5305/#203 2021-05-21 17:32:29 -04:00
mal5305
2b1f6e0699 #203 2021-05-21 17:29:00 -04:00
16 changed files with 605 additions and 197 deletions

View file

@ -1,5 +1,29 @@
# Change Log
## [v1.7.7](https://github.com/Boerderij/Varken/tree/v1.7.7) (2020-12-21)
[Full Changelog](https://github.com/Boerderij/Varken/compare/1.7.6...v1.7.7)
**Implemented enhancements:**
- \[Enhancement\] Ombi 4.0 compatibility [\#186](https://github.com/Boerderij/Varken/issues/186)
([samwiseg0](https://github.com/samwiseg0))
**Merged pull requests:**
- v1.7.7 Merge [\#191](https://github.com/Boerderij/Varken/pull/191)
([DirtyCajunRice](https://github.com/DirtyCajunRice))
- Type Error fix [\#177](https://github.com/Boerderij/Varken/pull/177)
([derek-miller](https://github.com/derek-miller))
**Fixed bugs:**
- \[BUG\] Influxdb exit code [\#174](https://github.com/Boerderij/Varken/issues/174)
([samwiseg0](https://github.com/samwiseg0))
**Notes:**
- Now built via github actions
- Available on ghcr, quay.io, and dockerhub
- Nightly builds done to accommodate dependabot MRs
## [v1.7.6](https://github.com/Boerderij/Varken/tree/v1.7.6) (2020-01-01)
[Full Changelog](https://github.com/Boerderij/Varken/compare/1.7.5...v1.7.6)
@ -350,4 +374,4 @@
- Create crontabs [\#6](https://github.com/Boerderij/Varken/pull/6) ([ghost](https://github.com/ghost))
- update plex\_dashboard.json [\#5](https://github.com/Boerderij/Varken/pull/5) ([ghost](https://github.com/ghost))
- Update README.md [\#4](https://github.com/Boerderij/Varken/pull/4) ([ghost](https://github.com/ghost))
- added sickrage portion [\#3](https://github.com/Boerderij/Varken/pull/3) ([ghost](https://github.com/ghost))
- added sickrage portion [\#3](https://github.com/Boerderij/Varken/pull/3) ([ghost](https://github.com/ghost))

View file

@ -1,4 +1,4 @@
FROM python:3.9.1-alpine
FROM python:3.10.5-alpine
ENV DEBUG="True" \
DATA_FOLDER="/config" \

View file

@ -2,7 +2,7 @@
<img width="800" src="https://raw.githubusercontent.com/Boerderij/Varken/master/assets/varken_full_banner.jpg" alt="Logo Banner">
</p>
[![pipeline status](https://gitlab.com/boerderij/Varken/badges/master/pipeline.svg)](https://gitlab.com/boerderij/Varken/commits/master)
[![pipeline status](https://img.shields.io/github/workflow/status/Boerderij/Varken/varken?style=flat-square)](https://github.com/Boerderij/Varken/actions?query=workflow%3Avarken)
[![Discord](https://img.shields.io/discord/518970285773422592.svg?colorB=7289DA&label=Discord&logo=Discord&logoColor=7289DA&style=flat-square)](https://discord.gg/VjZ6qSM)
[![ko-fi](https://img.shields.io/badge/Buy%20Us%20A%20Coffee-Donate-ff813f.svg?logo=CoffeeScript&style=flat-square)](https://ko-fi.com/varken)
[![Docker-Layers](https://images.microbadger.com/badges/image/boerderij/varken.svg)](https://microbadger.com/images/boerderij/varken)
@ -17,7 +17,7 @@ ecosystem into InfluxDB using Grafana for a frontend
Requirements:
* [Python 3.6.7+](https://www.python.org/downloads/release/python-367/)
* [Python3-pip](https://pip.pypa.io/en/stable/installing/)
* [InfluxDB 1.8.x](https://www.influxdata.com/)
* [InfluxDB 1.8.x or 2.0.x](https://www.influxdata.com/)
* [Grafana](https://grafana.com/)
<p align="center">
@ -58,4 +58,4 @@ do not include database creation, please ensure you create an influx database
named `varken`
### Grafana
[Grafana Installation/Dashboard Documentation](https://wiki.cajun.pro/books/varken/page/grafana)
[Grafana Installation/Dashboard Documentation](https://wiki.cajun.pro/books/varken/page/grafana)

View file

@ -1,19 +1,21 @@
import platform
import schedule
import distro
from time import sleep
from queue import Queue
from sys import version
from threading import Thread
from os import environ as env
from os import access, R_OK, getenv
from distro import linux_distribution
from os.path import isdir, abspath, dirname, join
from argparse import ArgumentParser, RawTextHelpFormatter
from logging import getLogger, StreamHandler, Formatter, DEBUG
# Needed to check version of python
from varken import structures # noqa
from varken.ombi import OmbiAPI
from varken.overseerr import OverseerrAPI
from varken.unifi import UniFiAPI
from varken import VERSION, BRANCH, BUILD_DATE
from varken.sonarr import SonarrAPI
@ -21,13 +23,14 @@ from varken.radarr import RadarrAPI
from varken.lidarr import LidarrAPI
from varken.iniparser import INIParser
from varken.dbmanager import DBManager
from varken.influxdb2manager import InfluxDB2Manager
from varken.helpers import GeoIPHandler
from varken.tautulli import TautulliAPI
from varken.sickchill import SickChillAPI
from varken.varkenlogger import VarkenLogger
PLATFORM_LINUX_DISTRO = ' '.join(x for x in linux_distribution() if x)
PLATFORM_LINUX_DISTRO = ' '.join(distro.id() + distro.version() + distro.name())
def thread(job, **kwargs):
@ -90,7 +93,15 @@ if __name__ == "__main__":
vl.logger.info("Varken v%s-%s %s", VERSION, BRANCH, BUILD_DATE)
CONFIG = INIParser(DATA_FOLDER)
DBMANAGER = DBManager(CONFIG.influx_server)
if CONFIG.influx2_enabled:
# Use INFLUX version 2
vl.logger.info('Using INFLUXDBv2')
DBMANAGER = InfluxDB2Manager(CONFIG.influx_server)
else:
vl.logger.info('Using INFLUXDB')
DBMANAGER = DBManager(CONFIG.influx_server)
QUEUE = Queue()
if CONFIG.sonarr_enabled:
@ -156,6 +167,18 @@ if __name__ == "__main__":
at_time = schedule.every(server.issue_status_run_seconds).seconds
at_time.do(thread, OMBI.get_issue_counts).tag("ombi-{}-get_issue_counts".format(server.id))
if CONFIG.overseerr_enabled:
for server in CONFIG.overseerr_servers:
OVERSEER = OverseerrAPI(server, DBMANAGER)
if server.get_request_total_counts:
at_time = schedule.every(server.request_total_run_seconds).seconds
at_time.do(thread, OVERSEER.get_request_counts).tag("overseerr-{}-get_request_counts"
.format(server.id))
if server.num_latest_requests_to_fetch > 0:
at_time = schedule.every(server.num_latest_requests_seconds).seconds
at_time.do(thread, OVERSEER.get_latest_requests).tag("overseerr-{}-get_latest_requests"
.format(server.id))
if CONFIG.sickchill_enabled:
for server in CONFIG.sickchill_servers:
SICKCHILL = SickChillAPI(server, DBMANAGER)
@ -171,7 +194,8 @@ if __name__ == "__main__":
# Run all on startup
SERVICES_ENABLED = [CONFIG.ombi_enabled, CONFIG.radarr_enabled, CONFIG.tautulli_enabled, CONFIG.unifi_enabled,
CONFIG.sonarr_enabled, CONFIG.sickchill_enabled, CONFIG.lidarr_enabled]
CONFIG.sonarr_enabled, CONFIG.sickchill_enabled, CONFIG.lidarr_enabled,
CONFIG.overseerr_enabled]
if not [enabled for enabled in SERVICES_ENABLED if enabled]:
vl.logger.error("All services disabled. Exiting")
exit(1)

View file

@ -3,10 +3,12 @@ sonarr_server_ids = 1,2
radarr_server_ids = 1,2
lidarr_server_ids = false
tautulli_server_ids = 1
ombi_server_ids = 1
ombi_server_ids = false
overseerr_server_ids = 1
sickchill_server_ids = false
unifi_server_ids = false
maxmind_license_key = xxxxxxxxxxxxxxxx
influx2_enabled = false
[influxdb]
url = influxdb.domain.tld
@ -16,6 +18,15 @@ verify_ssl = false
username = root
password = root
[influx2]
url = influxdb2.domain.tld
org = ORG
token = TOKEN
timeout = 10000
ssl = false
verify_ssl = false
bucket = varken
[tautulli-1]
url = tautulli.domain.tld:8181
fallback_ip = 1.1.1.1
@ -95,6 +106,17 @@ request_total_run_seconds = 300
get_issue_status_counts = true
issue_status_run_seconds = 300
[overseerr-1]
url = overseerr.domain.tld
apikey = xxxxxxxxxxxxxxxx
ssl = false
verify_ssl = false
get_request_total_counts = true
request_total_run_seconds = 30
get_latest_requests = true
num_latest_requests_to_fetch = 10
num_latest_requests_seconds = 30
[sickchill-1]
url = sickchill.domain.tld:8081
apikey = xxxxxxxxxxxxxxxx

View file

@ -6,7 +6,7 @@ services:
influxdb:
hostname: influxdb
container_name: influxdb
image: influxdb
image: influxdb:1.8
networks:
- internal
volumes:
@ -22,91 +22,6 @@ services:
- /path/to/docker-varken/config-folder:/config
environment:
- TZ=America/Chicago
- VRKN_GLOBAL_SONARR_SERVER_IDS=1,2
- VRKN_GLOBAL_RADARR_SERVER_IDS=1,2
- VRKN_GLOBAL_LIDARR_SERVER_IDS=false
- VRKN_GLOBAL_TAUTULLI_SERVER_IDS=1
- VRKN_GLOBAL_OMBI_SERVER_IDS=1
- VRKN_GLOBAL_SICKCHILL_SERVER_IDS=false
- VRKN_GLOBAL_UNIFI_SERVER_IDS=false
- VRKN_GLOBAL_MAXMIND_LICENSE_KEY=xxxxxxxxxxxxxxxx
- VRKN_INFLUXDB_URL=influxdb.domain.tld
- VRKN_INFLUXDB_PORT=8086
- VRKN_INFLUXDB_SSL=false
- VRKN_INFLUXDB_VERIFY_SSL=false
- VRKN_INFLUXDB_USERNAME=root
- VRKN_INFLUXDB_PASSWORD=root
- VRKN_TAUTULLI_1_URL=tautulli.domain.tld:8181
- VRKN_TAUTULLI_1_FALLBACK_IP=1.1.1.1
- VRKN_TAUTULLI_1_APIKEY=xxxxxxxxxxxxxxxx
- VRKN_TAUTULLI_1_SSL=false
- VRKN_TAUTULLI_1_VERIFY_SSL=false
- VRKN_TAUTULLI_1_GET_ACTIVITY=true
- VRKN_TAUTULLI_1_GET_ACTIVITY_RUN_SECONDS=30
- VRKN_TAUTULLI_1_GET_STATS=true
- VRKN_TAUTULLI_1_GET_STATS_RUN_SECONDS=3600
- VRKN_SONARR_1_URL=sonarr1.domain.tld:8989
- VRKN_SONARR_1_APIKEY=xxxxxxxxxxxxxxxx
- VRKN_SONARR_1_SSL=false
- VRKN_SONARR_1_VERIFY_SSL=false
- VRKN_SONARR_1_MISSING_DAYS=7
- VRKN_SONARR_1_MISSING_DAYS_RUN_SECONDS=300
- VRKN_SONARR_1_FUTURE_DAYS=1
- VRKN_SONARR_1_FUTURE_DAYS_RUN_SECONDS=300
- VRKN_SONARR_1_QUEUE=true
- VRKN_SONARR_1_QUEUE_RUN_SECONDS=300
- VRKN_SONARR_2_URL=sonarr2.domain.tld:8989
- VRKN_SONARR_2_APIKEY=yyyyyyyyyyyyyyyy
- VRKN_SONARR_2_SSL=false
- VRKN_SONARR_2_VERIFY_SSL=false
- VRKN_SONARR_2_MISSING_DAYS=7
- VRKN_SONARR_2_MISSING_DAYS_RUN_SECONDS=300
- VRKN_SONARR_2_FUTURE_DAYS=1
- VRKN_SONARR_2_FUTURE_DAYS_RUN_SECONDS=300
- VRKN_SONARR_2_QUEUE=true
- VRKN_SONARR_2_QUEUE_RUN_SECONDS=300
- VRKN_RADARR_1_URL=radarr1.domain.tld
- VRKN_RADARR_1_APIKEY=xxxxxxxxxxxxxxxx
- VRKN_RADARR_1_SSL=false
- VRKN_RADARR_1_VERIFY_SSL=false
- VRKN_RADARR_1_QUEUE=true
- VRKN_RADARR_1_QUEUE_RUN_SECONDS=300
- VRKN_RADARR_1_GET_MISSING=true
- VRKN_RADARR_1_GET_MISSING_RUN_SECONDS=300
- VRKN_RADARR_2_URL=radarr2.domain.tld
- VRKN_RADARR_2_APIKEY=yyyyyyyyyyyyyyyy
- VRKN_RADARR_2_SSL=false
- VRKN_RADARR_2_VERIFY_SSL=false
- VRKN_RADARR_2_QUEUE=true
- VRKN_RADARR_2_QUEUE_RUN_SECONDS=300
- VRKN_RADARR_2_GET_MISSING=true
- VRKN_RADARR_2_GET_MISSING_RUN_SECONDS=300
- VRKN_LIDARR_1_URL=lidarr1.domain.tld:8686
- VRKN_LIDARR_1_APIKEY=xxxxxxxxxxxxxxxx
- VRKN_LIDARR_1_SSL=false
- VRKN_LIDARR_1_VERIFY_SSL=false
- VRKN_LIDARR_1_MISSING_DAYS=30
- VRKN_LIDARR_1_MISSING_DAYS_RUN_SECONDS=300
- VRKN_LIDARR_1_FUTURE_DAYS=30
- VRKN_LIDARR_1_FUTURE_DAYS_RUN_SECONDS=300
- VRKN_LIDARR_1_QUEUE=true
- VRKN_LIDARR_1_QUEUE_RUN_SECONDS=300
- VRKN_OMBI_1_URL=ombi.domain.tld
- VRKN_OMBI_1_APIKEY=xxxxxxxxxxxxxxxx
- VRKN_OMBI_1_SSL=false
- VRKN_OMBI_1_VERIFY_SSL=false
- VRKN_OMBI_1_GET_REQUEST_TYPE_COUNTS=true
- VRKN_OMBI_1_REQUEST_TYPE_RUN_SECONDS=300
- VRKN_OMBI_1_GET_REQUEST_TOTAL_COUNTS=true
- VRKN_OMBI_1_REQUEST_TOTAL_RUN_SECONDS=300
- VRKN_OMBI_1_GET_ISSUE_STATUS_COUNTS=true
- VRKN_OMBI_1_ISSUE_STATUS_RUN_SECONDS=300
- VRKN_SICKCHILL_1_URL=sickchill.domain.tld:8081
- VRKN_SICKCHILL_1_APIKEY=xxxxxxxxxxxxxxxx
- VRKN_SICKCHILL_1_SSL=false
- VRKN_SICKCHILL_1_VERIFY_SSL=false
- VRKN_SICKCHILL_1_GET_MISSING=true
- VRKN_SICKCHILL_1_GET_MISSING_RUN_SECONDS=300
depends_on:
- influxdb
restart: unless-stopped
@ -118,7 +33,7 @@ services:
- internal
ports:
- 3000:3000
volumes:
volumes:
- /path/to/docker-grafana/config-folder:/config
environment:
- GF_PATHS_DATA=/config/data
@ -128,4 +43,4 @@ services:
depends_on:
- influxdb
- varken
restart: unless-stopped
restart: unless-stopped

View file

@ -2,9 +2,10 @@
# Potential requirements.
# pip3 install -r requirements.txt
#---------------------------------------------------------
requests==2.21
requests==2.28.1
geoip2==2.9.0
influxdb==5.2.0
schedule==0.6.0
schedule==1.1.0
distro==1.4.0
urllib3==1.24.2
urllib3==1.26.10
influxdb-client==1.14.0

View file

@ -41,7 +41,7 @@ if __name__ == "__main__":
DBMANAGER = DBManager(CONFIG.influx_server)
if CONFIG.tautulli_enabled:
GEOIPHANDLER = GeoIPHandler(DATA_FOLDER)
GEOIPHANDLER = GeoIPHandler(DATA_FOLDER, CONFIG.tautulli_servers[0].maxmind_license_key)
for server in CONFIG.tautulli_servers:
TAUTULLI = TautulliAPI(server, DBMANAGER, GEOIPHANDLER)
TAUTULLI.get_historical(days=opts.days)

View file

@ -51,5 +51,6 @@
<Labels/>
<Config Name="PGID" Target="PGID" Default="" Mode="" Description="Container Variable: PGID" Type="Variable" Display="always" Required="true" Mask="false">99</Config>
<Config Name="PUID" Target="PUID" Default="" Mode="" Description="Container Variable: PUID" Type="Variable" Display="always" Required="true" Mask="false">100</Config>
<Config Name="Debug" Target="DEBUG" Default="False" Mode="" Description="Turn Debug on or off" Type="Variable" Display="always" Required="false" Mask="false">False</Config>
<Config Name="Varken DataDir" Target="/config" Default="" Mode="rw" Description="Container Path: /config" Type="Path" Display="advanced-hide" Required="true" Mask="false">/mnt/user/appdata/varken</Config>
</Container>

View file

@ -0,0 +1,48 @@
from sys import exit
from logging import getLogger
import influxdb_client
from influxdb_client import InfluxDBClient
from influxdb_client.client.write_api import SYNCHRONOUS
class InfluxDB2Manager(object):
def __init__(self, server):
self.server = server
self.logger = getLogger()
if self.server.url == "influxdb2.domain.tld":
self.logger.critical("You have not configured your varken.ini. Please read Wiki page for configuration")
exit()
self.influx = InfluxDBClient(url=self.server.url, token=self.server.token, org=self.server.org,
timeout=self.server.timeout, verify_ssl=self.server.verify_ssl,
ssl_ca_cert=self.server.ssl)
self.influx_write_api = self.influx.write_api(write_options=SYNCHRONOUS)
# Create the bucket if needed
bucket_api = self.influx.buckets_api()
try:
bucket = bucket_api.find_bucket_by_name(self.server.bucket)
if bucket is None:
self.logger.info('Creating bucket %s', self.server.bucket)
org_api = influxdb_client.service.organizations_service.OrganizationsService(self.influx.api_client)
orgs = org_api.get_orgs()
for org in orgs.orgs:
if org.name == self.server.org:
my_org = org
self.influx.buckets_api().create_bucket(bucket_name=self.server.bucket, org_id=my_org.id)
except Exception as e:
self.logger.error('Failed creating new InfluxDB bucket! Error: %s', e)
def write_points(self, data):
self.logger.info('Writing Data to InfluxDBv2 %s', data)
try:
self.influx_write_api.write(bucket=self.server.bucket, record=data)
except Exception as e:
self.logger.exception('Error writing data to influxdb2. Dropping this set of data. '
'Check your database! Error: %s', e)

View file

@ -9,7 +9,7 @@ from configparser import ConfigParser, NoOptionError, NoSectionError
from varken.varkenlogger import BlacklistFilter
from varken.structures import SickChillServer, UniFiServer
from varken.helpers import clean_sid_check, rfc1918_ip_check, boolcheck
from varken.structures import SonarrServer, RadarrServer, OmbiServer, TautulliServer, InfluxServer
from varken.structures import SonarrServer, RadarrServer, OmbiServer, OverseerrServer, TautulliServer, InfluxServer, Influx2Server
class INIParser(object):
@ -17,7 +17,7 @@ class INIParser(object):
self.config = None
self.data_folder = data_folder
self.filtered_strings = None
self.services = ['sonarr', 'radarr', 'lidarr', 'ombi', 'tautulli', 'sickchill', 'unifi']
self.services = ['sonarr', 'radarr', 'lidarr', 'ombi', 'overseerr', 'tautulli', 'sickchill', 'unifi']
self.logger = getLogger()
self.influx_server = InfluxServer()
@ -107,6 +107,7 @@ class INIParser(object):
valid = match(regex, url_check) is not None
if not valid:
return url_check
if inc_port:
self.logger.error('%s is invalid in module [%s]! URL must host/IP and '
'port if not 80 or 443. ie. localhost:8080',
@ -144,23 +145,47 @@ class INIParser(object):
if read_file:
self.config = self.read_file('varken.ini')
self.config_blacklist()
# Parse InfluxDB options
try:
url = self.url_check(env.get('VRKN_INFLUXDB_URL', self.config.get('influxdb', 'url')),
include_port=False, section='influxdb')
port = int(env.get('VRKN_INFLUXDB_PORT', self.config.getint('influxdb', 'port')))
ssl = boolcheck(env.get('VRKN_INFLUXDB_SSL', self.config.get('influxdb', 'ssl')))
verify_ssl = boolcheck(env.get('VRKN_INFLUXDB_VERIFY_SSL', self.config.get('influxdb', 'verify_ssl')))
self.influx2_enabled = env.get('VRKN_GLOBAL_INFLUXDB2_ENABLED',
self.config.getboolean('global', 'influx2_enabled'))
username = env.get('VRKN_INFLUXDB_USERNAME', self.config.get('influxdb', 'username'))
password = env.get('VRKN_INFLUXDB_PASSWORD', self.config.get('influxdb', 'password'))
except NoOptionError as e:
self.logger.error('Missing key in %s. Error: %s', "influxdb", e)
self.rectify_ini()
return
if self.influx2_enabled:
# Use INFLUX version 2
try:
url = self.url_check(env.get('VRKN_INFLUXDB2_URL', self.config.get('influx2', 'url')),
section='influx2', include_port=False)
ssl = boolcheck(env.get('VRKN_INFLUXDB2_SSL', self.config.get('influx2', 'ssl')))
verify_ssl = boolcheck(env.get('VRKN_INFLUXDB2_VERIFY_SSL', self.config.get('influx2', 'verify_ssl')))
self.influx_server = InfluxServer(url=url, port=port, username=username, password=password, ssl=ssl,
verify_ssl=verify_ssl)
org = env.get('VRKN_INFLUXDB2_ORG', self.config.get('influx2', 'org'))
bucket = env.get('VRKN_INFLUXDB2_BUCKET', self.config.get('influx2', 'bucket'))
token = env.get('VRKN_INFLUXDB2_TOKEN', self.config.get('influx2', 'token'))
timeout = env.get('VRKN_INFLUXDB2_TIMEOUT', self.config.get('influx2', 'timeout'))
except NoOptionError as e:
self.logger.error('Missing key in %s. Error: %s', "influx2", e)
self.rectify_ini()
return
self.influx_server = Influx2Server(url=url, token=token, org=org, timeout=timeout, ssl=ssl,
verify_ssl=verify_ssl, bucket=bucket)
else:
try:
url = self.url_check(env.get('VRKN_INFLUXDB_URL', self.config.get('influxdb', 'url')),
include_port=False, section='influxdb')
port = int(env.get('VRKN_INFLUXDB_PORT', self.config.getint('influxdb', 'port')))
ssl = boolcheck(env.get('VRKN_INFLUXDB_SSL', self.config.get('influxdb', 'ssl')))
verify_ssl = boolcheck(env.get('VRKN_INFLUXDB_VERIFY_SSL', self.config.get('influxdb', 'verify_ssl')))
username = env.get('VRKN_INFLUXDB_USERNAME', self.config.get('influxdb', 'username'))
password = env.get('VRKN_INFLUXDB_PASSWORD', self.config.get('influxdb', 'password'))
except NoOptionError as e:
self.logger.error('Missing key in %s. Error: %s', "influxdb", e)
self.rectify_ini()
return
self.influx_server = InfluxServer(url=url, port=port, username=username, password=password, ssl=ssl,
verify_ssl=verify_ssl)
# Check for all enabled services
for service in self.services:
@ -293,6 +318,27 @@ class INIParser(object):
issue_status_counts=issue_status_counts,
issue_status_run_seconds=issue_status_run_seconds)
if service == 'overseerr':
get_request_total_counts = boolcheck(env.get(
f'VRKN_{envsection}_GET_REQUEST_TOTAL_COUNTS',
self.config.get(section, 'get_request_total_counts')))
request_total_run_seconds = int(env.get(
f'VRKN_{envsection}_REQUEST_TOTAL_RUN_SECONDS',
self.config.getint(section, 'request_total_run_seconds')))
num_latest_requests_to_fetch = int(env.get(
f'VRKN_{envsection}_GET_LATEST_REQUESTS_TO_FETCH',
self.config.getint(section, 'num_latest_requests_to_fetch')))
num_latest_requests_seconds = int(env.get(
f'VRKN_{envsection}_NUM_LATEST_REQUESTS_SECONDS',
self.config.getint(section, 'num_latest_requests_seconds')))
server = OverseerrServer(id=server_id, url=scheme + url, api_key=apikey,
verify_ssl=verify_ssl,
get_request_total_counts=get_request_total_counts,
request_total_run_seconds=request_total_run_seconds,
num_latest_requests_to_fetch=num_latest_requests_to_fetch,
num_latest_requests_seconds=num_latest_requests_seconds)
if service == 'sickchill':
get_missing = boolcheck(env.get(f'VRKN_{envsection}_GET_MISSING',
self.config.get(section, 'get_missing')))

133
varken/overseerr.py Normal file
View file

@ -0,0 +1,133 @@
from logging import getLogger
from requests import Session, Request
from datetime import datetime, timezone
from varken.helpers import connection_handler, hashit
from varken.structures import OverseerrRequestCounts
class OverseerrAPI(object):
def __init__(self, server, dbmanager):
self.dbmanager = dbmanager
self.server = server
# Create session to reduce server web thread load, and globally define pageSize for all requests
self.session = Session()
self.session.headers = {'X-Api-Key': self.server.api_key}
self.logger = getLogger()
def __repr__(self):
return f"<overseerr-{self.server.id}>"
def get_request_counts(self):
now = datetime.now(timezone.utc).astimezone().isoformat()
endpoint = '/api/v1/request/count'
req = self.session.prepare_request(Request('GET', self.server.url + endpoint))
get_req = connection_handler(self.session, req, self.server.verify_ssl)
if not get_req:
return
requests = OverseerrRequestCounts(**get_req)
influx_payload = [
{
"measurement": "Overseerr",
"tags": {
"type": "Request_Counts"
},
"time": now,
"fields": {
"pending": requests.pending,
"approved": requests.approved,
"processing": requests.processing,
"available": requests.available,
"total": requests.total,
"movies": requests.movie,
"tv": requests.tv,
"declined": requests.declined
}
}
]
if influx_payload:
self.dbmanager.write_points(influx_payload)
else:
self.logger.warning("No data to send to influx for overseerr-request-counts instance, discarding.")
def get_latest_requests(self):
now = datetime.now(timezone.utc).astimezone().isoformat()
endpoint = '/api/v1/request?take=' + str(self.server.num_latest_requests_to_fetch) + '&filter=all&sort=added'
movie_endpoint = '/api/v1/movie/'
tv_endpoint = '/api/v1/tv/'
# GET THE LATEST n REQUESTS
req = self.session.prepare_request(Request('GET', self.server.url + endpoint))
get_latest_req = connection_handler(self.session, req, self.server.verify_ssl)
# RETURN NOTHING IF NO RESULTS
if not get_latest_req:
self.logger.warning("No data to send to influx for overseerr-latest-requests instance, discarding.")
return
influx_payload = []
# Request Type: Movie = 1, TV Show = 0
for result in get_latest_req['results']:
if result['type'] == 'tv':
req = self.session.prepare_request(Request('GET',
self.server.url +
tv_endpoint +
str(result['media']['tmdbId'])))
get_tv_req = connection_handler(self.session, req, self.server.verify_ssl)
hash_id = hashit(f'{get_tv_req["id"]}{get_tv_req["name"]}')
influx_payload.append(
{
"measurement": "Overseerr",
"tags": {
"type": "Requests",
"server": self.server.id,
"request_type": 0,
"status": get_tv_req['mediaInfo']['status'],
"title": get_tv_req['name'],
"requested_user": get_tv_req['mediaInfo']['requests'][0]['requestedBy']['displayName'],
"requested_date": get_tv_req['mediaInfo']['requests'][0]['createdAt']
},
"time": now,
"fields": {
"hash": hash_id
}
}
)
if result['type'] == 'movie':
req = self.session.prepare_request(Request('GET',
self.server.url +
movie_endpoint +
str(result['media']['tmdbId'])))
get_movie_req = connection_handler(self.session, req, self.server.verify_ssl)
hash_id = hashit(f'{get_movie_req["id"]}{get_movie_req["title"]}')
influx_payload.append(
{
"measurement": "Overseerr",
"tags": {
"type": "Requests",
"server": self.server.id,
"request_type": 1,
"status": get_movie_req['mediaInfo']['status'],
"title": get_movie_req['title'],
"requested_user": get_movie_req['mediaInfo']['requests'][0]['requestedBy']['displayName'],
"requested_date": get_movie_req['mediaInfo']['requests'][0]['createdAt']
},
"time": now,
"fields": {
"hash": hash_id
}
}
)
if influx_payload:
self.dbmanager.write_points(influx_payload)
else:
self.logger.warning("No data to send to influx for overseerr-latest-requests instance, discarding.")

View file

@ -2,7 +2,7 @@ from logging import getLogger
from requests import Session, Request
from datetime import datetime, timezone
from varken.structures import RadarrMovie, Queue
from varken.structures import QueuePages, RadarrMovie, RadarrQueue
from varken.helpers import hashit, connection_handler
@ -19,7 +19,7 @@ class RadarrAPI(object):
return f"<radarr-{self.server.id}>"
def get_missing(self):
endpoint = '/api/movie'
endpoint = '/api/v3/movie'
now = datetime.now(timezone.utc).astimezone().isoformat()
influx_payload = []
missing = []
@ -37,7 +37,7 @@ class RadarrAPI(object):
return
for movie in movies:
if movie.monitored and not movie.downloaded:
if movie.monitored and not movie.hasFile:
if movie.isAvailable:
ma = 0
else:
@ -66,35 +66,53 @@ class RadarrAPI(object):
}
)
self.dbmanager.write_points(influx_payload)
if influx_payload:
self.dbmanager.write_points(influx_payload)
else:
self.logger.warning("No data to send to influx for radarr-missing instance, discarding.")
def get_queue(self):
endpoint = '/api/queue'
endpoint = '/api/v3/queue'
now = datetime.now(timezone.utc).astimezone().isoformat()
influx_payload = []
pageSize = 250
params = {'pageSize': pageSize, 'includeMovie': True, 'includeUnknownMovieItems': False}
queueResponse = []
queue = []
req = self.session.prepare_request(Request('GET', self.server.url + endpoint))
req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params))
get = connection_handler(self.session, req, self.server.verify_ssl)
if not get:
return
for movie in get:
try:
movie['movie'] = RadarrMovie(**movie['movie'])
except TypeError as e:
self.logger.error('TypeError has occurred : %s while creating RadarrMovie structure', e)
response = QueuePages(**get)
queueResponse.extend(response.records)
while response.totalRecords > response.page * response.pageSize:
page = response.page + 1
params = {'pageSize': pageSize, 'page': page, 'includeMovie': True, 'includeUnknownMovieItems': False}
req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params))
get = connection_handler(self.session, req, self.server.verify_ssl)
if not get:
return
try:
download_queue = [Queue(**movie) for movie in get]
except TypeError as e:
self.logger.error('TypeError has occurred : %s while creating Queue structure', e)
response = QueuePages(**get)
queueResponse.extend(response.records)
download_queue = []
for queueItem in queueResponse:
try:
download_queue.append(RadarrQueue(**queueItem))
except TypeError as e:
self.logger.warning('TypeError has occurred : %s while creating RadarrQueue structure', e)
return
if not download_queue:
self.logger.warning("No data to send to influx for radarr-queue instance, discarding.")
return
for queue_item in download_queue:
movie = queue_item.movie
movie = RadarrMovie(**queue_item.movie)
name = f'{movie.title} ({movie.year})'
@ -128,4 +146,7 @@ class RadarrAPI(object):
}
)
self.dbmanager.write_points(influx_payload)
if influx_payload:
self.dbmanager.write_points(influx_payload)
else:
self.logger.warning("No data to send to influx for radarr-queue instance, discarding.")

View file

@ -2,7 +2,7 @@ from logging import getLogger
from requests import Session, Request
from datetime import datetime, timezone, date, timedelta
from varken.structures import Queue, SonarrTVShow
from varken.structures import SonarrEpisode, SonarrTVShow, SonarrQueue, QueuePages
from varken.helpers import hashit, connection_handler
@ -19,16 +19,28 @@ class SonarrAPI(object):
def __repr__(self):
return f"<sonarr-{self.server.id}>"
def get_episode(self, id):
endpoint = '/api/v3/episode'
params = {'episodeIds': id}
req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params))
get = connection_handler(self.session, req, self.server.verify_ssl)
if not get:
return
return SonarrEpisode(**get[0])
def get_calendar(self, query="Missing"):
endpoint = '/api/calendar/'
endpoint = '/api/v3/calendar/'
today = str(date.today())
last_days = str(date.today() - timedelta(days=self.server.missing_days))
future = str(date.today() + timedelta(days=self.server.future_days))
now = datetime.now(timezone.utc).astimezone().isoformat()
if query == "Missing":
params = {'start': last_days, 'end': today}
params = {'start': last_days, 'end': today, 'includeSeries': True}
else:
params = {'start': today, 'end': future}
params = {'start': today, 'end': future, 'includeSeries': True}
influx_payload = []
air_days = []
missing = []
@ -42,22 +54,24 @@ class SonarrAPI(object):
tv_shows = []
for show in get:
try:
tv_shows.append(SonarrTVShow(**show))
tv_shows.append(SonarrEpisode(**show))
except TypeError as e:
self.logger.error('TypeError has occurred : %s while creating SonarrTVShow structure for show. Data '
self.logger.error('TypeError has occurred : %s while creating SonarrEpisode structure for show. Data '
'attempted is: %s', e, show)
for show in tv_shows:
sxe = f'S{show.seasonNumber:0>2}E{show.episodeNumber:0>2}'
if show.hasFile:
for episode in tv_shows:
tvShow = episode.series
sxe = f'S{episode.seasonNumber:0>2}E{episode.episodeNumber:0>2}'
if episode.hasFile:
downloaded = 1
else:
downloaded = 0
if query == "Missing":
if show.monitored and not downloaded:
missing.append((show.series['title'], downloaded, sxe, show.title, show.airDateUtc, show.id))
if episode.monitored and not downloaded:
missing.append((tvShow['title'], downloaded, sxe, episode.title,
episode.airDateUtc, episode.seriesId))
else:
air_days.append((show.series['title'], downloaded, sxe, show.title, show.airDateUtc, show.id))
air_days.append((tvShow['title'], downloaded, sxe, episode.title, episode.airDateUtc, episode.seriesId))
for series_title, dl_status, sxe, episode_title, air_date_utc, sonarr_id in (air_days or missing):
hash_id = hashit(f'{self.server.id}{series_title}{sxe}')
@ -81,45 +95,68 @@ class SonarrAPI(object):
}
)
self.dbmanager.write_points(influx_payload)
if influx_payload:
self.dbmanager.write_points(influx_payload)
else:
self.logger.warning("No data to send to influx for sonarr-calendar instance, discarding.")
def get_queue(self):
influx_payload = []
endpoint = '/api/queue'
endpoint = '/api/v3/queue'
now = datetime.now(timezone.utc).astimezone().isoformat()
pageSize = 250
params = {'pageSize': pageSize, 'includeSeries': True, 'includeEpisode': True,
'includeUnknownSeriesItems': False}
queueResponse = []
queue = []
req = self.session.prepare_request(Request('GET', self.server.url + endpoint))
req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params))
get = connection_handler(self.session, req, self.server.verify_ssl)
if not get:
return
response = QueuePages(**get)
queueResponse.extend(response.records)
while response.totalRecords > response.page * response.pageSize:
page = response.page + 1
params = {'pageSize': pageSize, 'page': page, 'includeSeries': True, 'includeEpisode': True,
'includeUnknownSeriesItems': False}
req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params))
get = connection_handler(self.session, req, self.server.verify_ssl)
if not get:
return
response = QueuePages(**get)
queueResponse.extend(response.records)
download_queue = []
for show in get:
for queueItem in queueResponse:
try:
download_queue.append(Queue(**show))
download_queue.append(SonarrQueue(**queueItem))
except TypeError as e:
self.logger.error('TypeError has occurred : %s while creating Queue structure. Data attempted is: '
'%s', e, show)
'%s', e, queueItem)
if not download_queue:
return
for show in download_queue:
for queueItem in download_queue:
tvShow = SonarrTVShow(**queueItem.series)
try:
sxe = f"S{show.episode['seasonNumber']:0>2}E{show.episode['episodeNumber']:0>2}"
episode = SonarrEpisode(**queueItem.episode)
sxe = f"S{episode.seasonNumber:0>2}E{episode.episodeNumber:0>2}"
except TypeError as e:
self.logger.error('TypeError has occurred : %s while processing the sonarr queue. \
Remove invalid queue entry. Data attempted is: %s', e, show)
Remove invalid queue entry. Data attempted is: %s', e, queueItem)
continue
if show.protocol.upper() == 'USENET':
if queueItem.protocol.upper() == 'USENET':
protocol_id = 1
else:
protocol_id = 0
queue.append((show.series['title'], show.episode['title'], show.protocol.upper(),
protocol_id, sxe, show.id, show.quality['quality']['name']))
queue.append((tvShow.title, episode.title, queueItem.protocol.upper(),
protocol_id, sxe, queueItem.seriesId, queueItem.quality['quality']['name']))
for series_title, episode_title, protocol, protocol_id, sxe, sonarr_id, quality in queue:
hash_id = hashit(f'{self.server.id}{series_title}{sxe}')
@ -143,7 +180,8 @@ class SonarrAPI(object):
}
}
)
if influx_payload:
self.dbmanager.write_points(influx_payload)
else:
self.logger.debug("No data to send to influx for sonarr instance, discarding.")
self.logger.warning("No data to send to influx for sonarr-queue instance, discarding.")

View file

@ -20,6 +20,16 @@ class InfluxServer(NamedTuple):
verify_ssl: bool = False
class Influx2Server(NamedTuple):
url: str = 'localhost'
org: str = 'server'
token: str = 'TOKEN'
bucket: str = 'varken'
timeout: int = 10000
ssl: bool = False
verify_ssl: bool = False
class SonarrServer(NamedTuple):
api_key: str = None
future_days: int = 0
@ -57,6 +67,17 @@ class OmbiServer(NamedTuple):
verify_ssl: bool = False
class OverseerrServer(NamedTuple):
api_key: str = None
id: int = None
url: str = None
verify_ssl: bool = False
get_request_total_counts: bool = False
request_total_run_seconds: int = 30
num_latest_requests_to_fetch: int = 10
num_latest_requests_seconds: int = 30
class TautulliServer(NamedTuple):
api_key: str = None
fallback_ip: str = None
@ -91,22 +112,13 @@ class UniFiServer(NamedTuple):
# Shared
class Queue(NamedTuple):
downloadId: str = None
episode: dict = None
estimatedCompletionTime: str = None
id: int = None
movie: dict = None
protocol: str = None
quality: dict = None
series: dict = None
size: float = None
sizeleft: float = None
status: str = None
statusMessages: list = None
timeleft: str = None
title: str = None
trackedDownloadStatus: str = None
class QueuePages(NamedTuple):
page: int = None
pageSize: int = None
sortKey: str = None
sortDirection: str = None
totalRecords: str = None
records: list = None
# Ombi Structures
@ -127,8 +139,10 @@ class OmbiTVRequest(NamedTuple):
childRequests: list = None
denied: bool = None
deniedReason: None = None
externalProviderId: str = None
id: int = None
imdbId: str = None
languageProfile: str = None
markedAsDenied: str = None
overview: str = None
posterPath: str = None
@ -145,72 +159,159 @@ class OmbiTVRequest(NamedTuple):
class OmbiMovieRequest(NamedTuple):
approved: bool = None
approved4K: bool = None
available: bool = None
available4K: bool = None
background: str = None
canApprove: bool = None
denied: bool = None
denied4K: None = None
deniedReason: None = None
deniedReason4K: None = None
digitalRelease: bool = None
digitalReleaseDate: None = None
has4KRequest: bool = None
id: int = None
imdbId: str = None
is4kRequest: bool = None
issueId: None = None
issues: None = None
langCode: str = None
languageCode: str = None
markedAsApproved: str = None
markedAsApproved4K: str = None
markedAsAvailable: None = None
markedAsAvailable4K: None = None
markedAsDenied: str = None
markedAsDenied4K: str = None
overview: str = None
posterPath: str = None
qualityOverride: int = None
released: bool = None
releaseDate: str = None
requestedByAlias: str = None
requestedDate: str = None
requestedDate4k: str = None
requestedUser: dict = None
requestedUserId: str = None
requestStatus: str = None
requestType: int = None
rootPathOverride: int = None
showSubscribe: bool = None
source: int = None
status: str = None
subscribed: bool = None
theMovieDbId: int = None
title: str = None
langCode: str = None
languageCode: str = None
requestedByAlias: str = None
requestStatus: str = None
# Overseerr
class OverseerrRequestCounts(NamedTuple):
pending: int = None
approved: int = None
processing: int = None
available: int = None
total: int = None
movie: int = None
tv: int = None
declined: int = None
# Sonarr
class SonarrTVShow(NamedTuple):
added: str = None
airTime: str = None
alternateTitles: list = None
certification: str = None
cleanTitle: str = None
ended: bool = None
firstAired: str = None
genres: list = None
id: int = None
images: list = None
imdbId: str = None
languageProfileId: int = None
monitored: bool = None
nextAiring: str = None
network: str = None
overview: str = None
path: str = None
previousAiring: str = None
qualityProfileId: int = None
ratings: dict = None
rootFolderPath: str = None
runtime: int = None
seasonFolder: bool = None
seasons: list = None
seriesType: str = None
sortTitle: str = None
statistics: dict = None
status: str = None
tags: list = None
title: str = None
titleSlug: str = None
tvdbId: int = None
tvMazeId: int = None
tvRageId: int = None
useSceneNumbering: bool = None
year: int = None
class SonarrEpisode(NamedTuple):
absoluteEpisodeNumber: int = None
airDate: str = None
airDateUtc: str = None
episodeFile: dict = None
episodeFileId: int = None
episodeNumber: int = None
grabbed: bool = None
hasFile: bool = None
id: int = None
lastSearchTime: str = None
monitored: bool = None
overview: str = None
sceneAbsoluteEpisodeNumber: int = None
sceneEpisodeNumber: int = None
sceneSeasonNumber: int = None
seasonNumber: int = None
series: dict = None
seriesId: int = None
title: str = None
unverifiedSceneNumbering: bool = None
sceneAbsoluteEpisodeNumber: int = None
sceneEpisodeNumber: int = None
sceneSeasonNumber: int = None
series: SonarrTVShow = None
tvdbId: int = None
class SonarrQueue(NamedTuple):
downloadClient: str = None
downloadId: str = None
episodeId: int = None
id: int = None
indexer: str = None
language: dict = None
protocol: str = None
quality: dict = None
size: float = None
sizeleft: float = None
status: str = None
statusMessages: list = None
title: str = None
trackedDownloadState: str = None
trackedDownloadStatus: str = None
seriesId: int = None
errorMessage: str = None
outputPath: str = None
series: SonarrTVShow = None
episode: SonarrEpisode = None
timeleft: str = None
estimatedCompletionTime: str = None
# Radarr
class RadarrMovie(NamedTuple):
added: str = None
addOptions: str = None
alternativeTitles: list = None
alternateTitles: list = None
certification: str = None
cleanTitle: str = None
downloaded: bool = None
collection: dict = None
digitalRelease: str = None
folderName: str = None
genres: list = None
hasFile: bool = None
@ -219,32 +320,58 @@ class RadarrMovie(NamedTuple):
imdbId: str = None
inCinemas: str = None
isAvailable: bool = None
lastInfoSync: str = None
minimumAvailability: str = None
monitored: bool = None
movieFile: dict = None
originalTitle: str = None
overview: str = None
path: str = None
pathState: str = None
physicalRelease: str = None
physicalReleaseNote: str = None
profileId: int = None
qualityProfileId: int = None
ratings: dict = None
runtime: int = None
secondaryYear: str = None
secondaryYear: int = None
secondaryYearSourceId: int = None
sizeOnDisk: int = None
sizeOnDisk: float = None
sortTitle: str = None
status: str = None
studio: str = None
tags: list = None
title: str = None
titleSlug: str = None
tmdbId: int = None
website: str = None
year: int = None
youTubeTrailerId: str = None
title: str = None
originalLanguage: str = None
addOptions: str = None
popularity: str = None
# Radarr Queue
class RadarrQueue(NamedTuple):
customFormats: list = None
downloadClient: str = None
downloadId: str = None
id: int = None
indexer: str = None
languages: list = None
movieId: int = None
protocol: str = None
quality: dict = None
size: float = None
sizeleft: float = None
status: str = None
statusMessages: list = None
title: str = None
trackedDownloadState: str = None
trackedDownloadStatus: str = None
timeleft: str = None
estimatedCompletionTime: str = None
errorMessage: str = None
outputPath: str = None
movie: RadarrMovie = None
timeleft: str = None
# Sickchill
@ -364,6 +491,7 @@ class TautulliStream(NamedTuple):
reference_id: int = None
relay: int = None
relayed: int = None
row_id: int = None
section_id: str = None
secure: str = None
selected: int = None
@ -402,6 +530,7 @@ class TautulliStream(NamedTuple):
stream_video_codec: str = None
stream_video_codec_level: str = None
stream_video_decision: str = None
stream_video_dynamic_range: str = None
stream_video_framerate: str = None
stream_video_full_resolution: str = None
stream_video_height: str = None
@ -461,6 +590,7 @@ class TautulliStream(NamedTuple):
video_codec: str = None
video_codec_level: str = None
video_decision: str = None
video_dynamic_range: str = None
video_frame_rate: str = None
video_framerate: str = None
video_full_resolution: str = None
@ -491,7 +621,9 @@ class LidarrQueue(NamedTuple):
sizeleft: float = None
status: str = None
trackedDownloadStatus: str = None
trackedDownloadState: str = None
statusMessages: list = None
errorMessage: str = None
downloadId: str = None
protocol: str = None
downloadClient: str = None
@ -499,6 +631,7 @@ class LidarrQueue(NamedTuple):
outputPath: str = None
downloadForced: bool = None
id: int = None
estimatedCompletionTime: str = None
class LidarrAlbum(NamedTuple):

View file

@ -129,6 +129,7 @@ class TautulliAPI(object):
"tags": {
"type": "Session",
"session_id": session.session_id,
"ip_address": session.ip_address,
"friendly_name": session.friendly_name,
"username": session.username,
"title": session.full_title,
@ -327,6 +328,7 @@ class TautulliAPI(object):
"tags": {
"type": "Session",
"session_id": session.session_id,
"ip_address": session.ip_address,
"friendly_name": session.friendly_name,
"username": session.user,
"title": session.full_title,