Influxdb #1
					 15 changed files with 552 additions and 198 deletions
				
			
		
							
								
								
									
										24
									
								
								CHANGELOG.md
									
										
									
									
									
								
							
							
						
						
									
										24
									
								
								CHANGELOG.md
									
										
									
									
									
								
							|  | @ -1,5 +1,29 @@ | |||
| # Change Log | ||||
| 
 | ||||
| ## [v1.7.7](https://github.com/Boerderij/Varken/tree/v1.7.7) (2020-12-21) | ||||
| [Full Changelog](https://github.com/Boerderij/Varken/compare/1.7.6...v1.7.7) | ||||
| 
 | ||||
| **Implemented enhancements:** | ||||
| - \[Enhancement\] Ombi 4.0 compatibility [\#186](https://github.com/Boerderij/Varken/issues/186) | ||||
|   ([samwiseg0](https://github.com/samwiseg0)) | ||||
| 
 | ||||
| **Merged pull requests:** | ||||
| 
 | ||||
| - v1.7.7 Merge [\#191](https://github.com/Boerderij/Varken/pull/191)  | ||||
|   ([DirtyCajunRice](https://github.com/DirtyCajunRice)) | ||||
| - Type Error fix [\#177](https://github.com/Boerderij/Varken/pull/177) | ||||
|   ([derek-miller](https://github.com/derek-miller)) | ||||
| 
 | ||||
| **Fixed bugs:** | ||||
| 
 | ||||
| - \[BUG\] Influxdb exit code [\#174](https://github.com/Boerderij/Varken/issues/174)  | ||||
|   ([samwiseg0](https://github.com/samwiseg0)) | ||||
| 
 | ||||
| **Notes:** | ||||
| - Now built via github actions | ||||
| - Available on ghcr, quay.io, and dockerhub | ||||
| - Nightly builds done to accommodate dependabot MRs | ||||
| 
 | ||||
| ## [v1.7.6](https://github.com/Boerderij/Varken/tree/v1.7.6) (2020-01-01) | ||||
| [Full Changelog](https://github.com/Boerderij/Varken/compare/1.7.5...v1.7.6) | ||||
| 
 | ||||
|  |  | |||
|  | @ -17,7 +17,7 @@ ecosystem into InfluxDB using Grafana for a frontend | |||
| Requirements: | ||||
| * [Python 3.6.7+](https://www.python.org/downloads/release/python-367/) | ||||
| * [Python3-pip](https://pip.pypa.io/en/stable/installing/) | ||||
| * [InfluxDB 1.8.x](https://www.influxdata.com/) | ||||
| * [InfluxDB 1.8.x or 2.x](https://www.influxdata.com/) | ||||
| * [Grafana](https://grafana.com/) | ||||
| 
 | ||||
| <p align="center"> | ||||
|  | @ -50,7 +50,7 @@ Please read [Asking for Support](https://wiki.cajun.pro/books/varken/chapter/ask | |||
|      | ||||
| ### InfluxDB | ||||
| [InfluxDB Installation Documentation](https://wiki.cajun.pro/books/varken/page/influxdb-d1f) | ||||
| Note: Only v1.8.x is currently supported. | ||||
| Note: Only v1.8.x or v2.x are supported. | ||||
|   | ||||
| Influxdb is required but not packaged as part of Varken. Varken will create | ||||
| its database on its own. If you choose to give varken user permissions that | ||||
|  |  | |||
							
								
								
									
										21
									
								
								Varken.py
									
										
									
									
									
								
							
							
						
						
									
										21
									
								
								Varken.py
									
										
									
									
									
								
							|  | @ -1,19 +1,21 @@ | |||
| import platform | ||||
| import schedule | ||||
| import distro | ||||
| from time import sleep | ||||
| from queue import Queue | ||||
| from sys import version | ||||
| from threading import Thread | ||||
| from os import environ as env | ||||
| from os import access, R_OK, getenv | ||||
| from distro import linux_distribution | ||||
| from os.path import isdir, abspath, dirname, join | ||||
| from argparse import ArgumentParser, RawTextHelpFormatter | ||||
| from logging import getLogger, StreamHandler, Formatter, DEBUG | ||||
| 
 | ||||
| 
 | ||||
| # Needed to check version of python | ||||
| from varken import structures  # noqa | ||||
| from varken.ombi import OmbiAPI | ||||
| from varken.overseerr import OverseerrAPI | ||||
| from varken.unifi import UniFiAPI | ||||
| from varken import VERSION, BRANCH, BUILD_DATE | ||||
| from varken.sonarr import SonarrAPI | ||||
|  | @ -27,7 +29,7 @@ from varken.sickchill import SickChillAPI | |||
| from varken.varkenlogger import VarkenLogger | ||||
| 
 | ||||
| 
 | ||||
| PLATFORM_LINUX_DISTRO = ' '.join(x for x in linux_distribution() if x) | ||||
| PLATFORM_LINUX_DISTRO = ' '.join(distro.id() + distro.version() + distro.name()) | ||||
| 
 | ||||
| 
 | ||||
| def thread(job, **kwargs): | ||||
|  | @ -156,6 +158,18 @@ if __name__ == "__main__": | |||
|                 at_time = schedule.every(server.issue_status_run_seconds).seconds | ||||
|                 at_time.do(thread, OMBI.get_issue_counts).tag("ombi-{}-get_issue_counts".format(server.id)) | ||||
| 
 | ||||
|     if CONFIG.overseerr_enabled: | ||||
|         for server in CONFIG.overseerr_servers: | ||||
|             OVERSEER = OverseerrAPI(server, DBMANAGER) | ||||
|             if server.get_request_total_counts: | ||||
|                 at_time = schedule.every(server.request_total_run_seconds).seconds | ||||
|                 at_time.do(thread, OVERSEER.get_request_counts).tag("overseerr-{}-get_request_counts" | ||||
|                                                                     .format(server.id)) | ||||
|             if server.num_latest_requests_to_fetch > 0: | ||||
|                 at_time = schedule.every(server.num_latest_requests_seconds).seconds | ||||
|                 at_time.do(thread, OVERSEER.get_latest_requests).tag("overseerr-{}-get_latest_requests" | ||||
|                                                                      .format(server.id)) | ||||
| 
 | ||||
|     if CONFIG.sickchill_enabled: | ||||
|         for server in CONFIG.sickchill_servers: | ||||
|             SICKCHILL = SickChillAPI(server, DBMANAGER) | ||||
|  | @ -171,7 +185,8 @@ if __name__ == "__main__": | |||
| 
 | ||||
|     # Run all on startup | ||||
|     SERVICES_ENABLED = [CONFIG.ombi_enabled, CONFIG.radarr_enabled, CONFIG.tautulli_enabled, CONFIG.unifi_enabled, | ||||
|                         CONFIG.sonarr_enabled, CONFIG.sickchill_enabled, CONFIG.lidarr_enabled] | ||||
|                         CONFIG.sonarr_enabled, CONFIG.sickchill_enabled, CONFIG.lidarr_enabled, | ||||
|                         CONFIG.overseerr_enabled] | ||||
|     if not [enabled for enabled in SERVICES_ENABLED if enabled]: | ||||
|         vl.logger.error("All services disabled. Exiting") | ||||
|         exit(1) | ||||
|  |  | |||
|  | @ -3,7 +3,8 @@ sonarr_server_ids = 1,2 | |||
| radarr_server_ids = 1,2 | ||||
| lidarr_server_ids = false | ||||
| tautulli_server_ids = 1 | ||||
| ombi_server_ids = 1 | ||||
| ombi_server_ids = false | ||||
| overseerr_server_ids = 1 | ||||
| sickchill_server_ids = false | ||||
| unifi_server_ids = false | ||||
| maxmind_license_key = xxxxxxxxxxxxxxxx | ||||
|  | @ -15,6 +16,7 @@ ssl = false | |||
| verify_ssl = false | ||||
| username = root | ||||
| password = root | ||||
| org = - | ||||
| 
 | ||||
| [tautulli-1] | ||||
| url = tautulli.domain.tld:8181 | ||||
|  | @ -95,6 +97,17 @@ request_total_run_seconds = 300 | |||
| get_issue_status_counts = true | ||||
| issue_status_run_seconds = 300 | ||||
| 
 | ||||
| [overseerr-1] | ||||
| url = overseerr.domain.tld | ||||
| apikey = xxxxxxxxxxxxxxxx | ||||
| ssl = false | ||||
| verify_ssl = false | ||||
| get_request_total_counts = true | ||||
| request_total_run_seconds = 30 | ||||
| get_latest_requests = true | ||||
| num_latest_requests_to_fetch = 10 | ||||
| num_latest_requests_seconds = 30 | ||||
| 
 | ||||
| [sickchill-1] | ||||
| url = sickchill.domain.tld:8081 | ||||
| apikey = xxxxxxxxxxxxxxxx | ||||
|  |  | |||
|  | @ -6,7 +6,7 @@ services: | |||
|   influxdb: | ||||
|     hostname: influxdb | ||||
|     container_name: influxdb | ||||
|     image: influxdb | ||||
|     image: influxdb:1.8 | ||||
|     networks: | ||||
|       - internal | ||||
|     volumes: | ||||
|  | @ -22,91 +22,6 @@ services: | |||
|       - /path/to/docker-varken/config-folder:/config | ||||
|     environment: | ||||
|       - TZ=America/Chicago | ||||
|       - VRKN_GLOBAL_SONARR_SERVER_IDS=1,2 | ||||
|       - VRKN_GLOBAL_RADARR_SERVER_IDS=1,2 | ||||
|       - VRKN_GLOBAL_LIDARR_SERVER_IDS=false | ||||
|       - VRKN_GLOBAL_TAUTULLI_SERVER_IDS=1 | ||||
|       - VRKN_GLOBAL_OMBI_SERVER_IDS=1 | ||||
|       - VRKN_GLOBAL_SICKCHILL_SERVER_IDS=false | ||||
|       - VRKN_GLOBAL_UNIFI_SERVER_IDS=false | ||||
|       - VRKN_GLOBAL_MAXMIND_LICENSE_KEY=xxxxxxxxxxxxxxxx | ||||
|       - VRKN_INFLUXDB_URL=influxdb.domain.tld | ||||
|       - VRKN_INFLUXDB_PORT=8086 | ||||
|       - VRKN_INFLUXDB_SSL=false | ||||
|       - VRKN_INFLUXDB_VERIFY_SSL=false | ||||
|       - VRKN_INFLUXDB_USERNAME=root | ||||
|       - VRKN_INFLUXDB_PASSWORD=root | ||||
|       - VRKN_TAUTULLI_1_URL=tautulli.domain.tld:8181 | ||||
|       - VRKN_TAUTULLI_1_FALLBACK_IP=1.1.1.1 | ||||
|       - VRKN_TAUTULLI_1_APIKEY=xxxxxxxxxxxxxxxx | ||||
|       - VRKN_TAUTULLI_1_SSL=false | ||||
|       - VRKN_TAUTULLI_1_VERIFY_SSL=false | ||||
|       - VRKN_TAUTULLI_1_GET_ACTIVITY=true | ||||
|       - VRKN_TAUTULLI_1_GET_ACTIVITY_RUN_SECONDS=30 | ||||
|       - VRKN_TAUTULLI_1_GET_STATS=true | ||||
|       - VRKN_TAUTULLI_1_GET_STATS_RUN_SECONDS=3600 | ||||
|       - VRKN_SONARR_1_URL=sonarr1.domain.tld:8989 | ||||
|       - VRKN_SONARR_1_APIKEY=xxxxxxxxxxxxxxxx | ||||
|       - VRKN_SONARR_1_SSL=false | ||||
|       - VRKN_SONARR_1_VERIFY_SSL=false | ||||
|       - VRKN_SONARR_1_MISSING_DAYS=7 | ||||
|       - VRKN_SONARR_1_MISSING_DAYS_RUN_SECONDS=300 | ||||
|       - VRKN_SONARR_1_FUTURE_DAYS=1 | ||||
|       - VRKN_SONARR_1_FUTURE_DAYS_RUN_SECONDS=300 | ||||
|       - VRKN_SONARR_1_QUEUE=true | ||||
|       - VRKN_SONARR_1_QUEUE_RUN_SECONDS=300 | ||||
|       - VRKN_SONARR_2_URL=sonarr2.domain.tld:8989 | ||||
|       - VRKN_SONARR_2_APIKEY=yyyyyyyyyyyyyyyy | ||||
|       - VRKN_SONARR_2_SSL=false | ||||
|       - VRKN_SONARR_2_VERIFY_SSL=false | ||||
|       - VRKN_SONARR_2_MISSING_DAYS=7 | ||||
|       - VRKN_SONARR_2_MISSING_DAYS_RUN_SECONDS=300 | ||||
|       - VRKN_SONARR_2_FUTURE_DAYS=1 | ||||
|       - VRKN_SONARR_2_FUTURE_DAYS_RUN_SECONDS=300 | ||||
|       - VRKN_SONARR_2_QUEUE=true | ||||
|       - VRKN_SONARR_2_QUEUE_RUN_SECONDS=300 | ||||
|       - VRKN_RADARR_1_URL=radarr1.domain.tld | ||||
|       - VRKN_RADARR_1_APIKEY=xxxxxxxxxxxxxxxx | ||||
|       - VRKN_RADARR_1_SSL=false | ||||
|       - VRKN_RADARR_1_VERIFY_SSL=false | ||||
|       - VRKN_RADARR_1_QUEUE=true | ||||
|       - VRKN_RADARR_1_QUEUE_RUN_SECONDS=300 | ||||
|       - VRKN_RADARR_1_GET_MISSING=true | ||||
|       - VRKN_RADARR_1_GET_MISSING_RUN_SECONDS=300 | ||||
|       - VRKN_RADARR_2_URL=radarr2.domain.tld | ||||
|       - VRKN_RADARR_2_APIKEY=yyyyyyyyyyyyyyyy | ||||
|       - VRKN_RADARR_2_SSL=false | ||||
|       - VRKN_RADARR_2_VERIFY_SSL=false | ||||
|       - VRKN_RADARR_2_QUEUE=true | ||||
|       - VRKN_RADARR_2_QUEUE_RUN_SECONDS=300 | ||||
|       - VRKN_RADARR_2_GET_MISSING=true | ||||
|       - VRKN_RADARR_2_GET_MISSING_RUN_SECONDS=300 | ||||
|       - VRKN_LIDARR_1_URL=lidarr1.domain.tld:8686 | ||||
|       - VRKN_LIDARR_1_APIKEY=xxxxxxxxxxxxxxxx | ||||
|       - VRKN_LIDARR_1_SSL=false | ||||
|       - VRKN_LIDARR_1_VERIFY_SSL=false | ||||
|       - VRKN_LIDARR_1_MISSING_DAYS=30 | ||||
|       - VRKN_LIDARR_1_MISSING_DAYS_RUN_SECONDS=300 | ||||
|       - VRKN_LIDARR_1_FUTURE_DAYS=30 | ||||
|       - VRKN_LIDARR_1_FUTURE_DAYS_RUN_SECONDS=300 | ||||
|       - VRKN_LIDARR_1_QUEUE=true | ||||
|       - VRKN_LIDARR_1_QUEUE_RUN_SECONDS=300 | ||||
|       - VRKN_OMBI_1_URL=ombi.domain.tld | ||||
|       - VRKN_OMBI_1_APIKEY=xxxxxxxxxxxxxxxx | ||||
|       - VRKN_OMBI_1_SSL=false | ||||
|       - VRKN_OMBI_1_VERIFY_SSL=false | ||||
|       - VRKN_OMBI_1_GET_REQUEST_TYPE_COUNTS=true | ||||
|       - VRKN_OMBI_1_REQUEST_TYPE_RUN_SECONDS=300 | ||||
|       - VRKN_OMBI_1_GET_REQUEST_TOTAL_COUNTS=true | ||||
|       - VRKN_OMBI_1_REQUEST_TOTAL_RUN_SECONDS=300 | ||||
|       - VRKN_OMBI_1_GET_ISSUE_STATUS_COUNTS=true | ||||
|       - VRKN_OMBI_1_ISSUE_STATUS_RUN_SECONDS=300 | ||||
|       - VRKN_SICKCHILL_1_URL=sickchill.domain.tld:8081 | ||||
|       - VRKN_SICKCHILL_1_APIKEY=xxxxxxxxxxxxxxxx | ||||
|       - VRKN_SICKCHILL_1_SSL=false | ||||
|       - VRKN_SICKCHILL_1_VERIFY_SSL=false | ||||
|       - VRKN_SICKCHILL_1_GET_MISSING=true | ||||
|       - VRKN_SICKCHILL_1_GET_MISSING_RUN_SECONDS=300 | ||||
|     depends_on: | ||||
|       - influxdb | ||||
|     restart: unless-stopped | ||||
|  |  | |||
|  | @ -2,9 +2,10 @@ | |||
| # Potential requirements. | ||||
| # pip3 install -r requirements.txt | ||||
| #--------------------------------------------------------- | ||||
| requests==2.21 | ||||
| requests==2.25.1 | ||||
| geoip2==2.9.0 | ||||
| influxdb==5.2.0 | ||||
| influxdb-client==1.30.0 | ||||
| schedule==0.6.0 | ||||
| distro==1.4.0 | ||||
| urllib3==1.24.2 | ||||
| urllib3==1.26.5 | ||||
|  |  | |||
|  | @ -41,7 +41,7 @@ if __name__ == "__main__": | |||
|     DBMANAGER = DBManager(CONFIG.influx_server) | ||||
| 
 | ||||
|     if CONFIG.tautulli_enabled: | ||||
|         GEOIPHANDLER = GeoIPHandler(DATA_FOLDER) | ||||
|         GEOIPHANDLER = GeoIPHandler(DATA_FOLDER, CONFIG.tautulli_servers[0].maxmind_license_key) | ||||
|         for server in CONFIG.tautulli_servers: | ||||
|             TAUTULLI = TautulliAPI(server, DBMANAGER, GEOIPHANDLER) | ||||
|             TAUTULLI.get_historical(days=opts.days) | ||||
|  |  | |||
|  | @ -51,5 +51,6 @@ | |||
|   <Labels/> | ||||
|   <Config Name="PGID" Target="PGID" Default="" Mode="" Description="Container Variable: PGID" Type="Variable" Display="always" Required="true" Mask="false">99</Config> | ||||
|   <Config Name="PUID" Target="PUID" Default="" Mode="" Description="Container Variable: PUID" Type="Variable" Display="always" Required="true" Mask="false">100</Config> | ||||
|   <Config Name="Debug" Target="DEBUG" Default="False" Mode="" Description="Turn Debug on or off" Type="Variable" Display="always" Required="false" Mask="false">False</Config> | ||||
|   <Config Name="Varken DataDir" Target="/config" Default="" Mode="rw" Description="Container Path: /config" Type="Path" Display="advanced-hide" Required="true" Mask="false">/mnt/user/appdata/varken</Config> | ||||
| </Container> | ||||
|  | @ -1,45 +1,91 @@ | |||
| import re | ||||
| from sys import exit | ||||
| from logging import getLogger | ||||
| from influxdb import InfluxDBClient | ||||
| from requests.exceptions import ConnectionError | ||||
| from influxdb.exceptions import InfluxDBServerError | ||||
| from influxdb_client import InfluxDBClient, BucketRetentionRules | ||||
| from influxdb_client.client.write_api import SYNCHRONOUS | ||||
| from influxdb_client.client.exceptions import InfluxDBError | ||||
| from urllib3.exceptions import NewConnectionError | ||||
| 
 | ||||
| 
 | ||||
| class DBManager(object): | ||||
|     def __init__(self, server): | ||||
|         self.server = server | ||||
|         self.logger = getLogger() | ||||
|         self.bucket = "varken" | ||||
| 
 | ||||
|         if self.server.url == "influxdb.domain.tld": | ||||
|             self.logger.critical("You have not configured your varken.ini. Please read Wiki page for configuration") | ||||
|             exit() | ||||
|         self.influx = InfluxDBClient(host=self.server.url, port=self.server.port, username=self.server.username, | ||||
|                                      password=self.server.password, ssl=self.server.ssl, database='varken', | ||||
|                                      verify_ssl=self.server.verify_ssl) | ||||
| 
 | ||||
|         url = self.server.url | ||||
|         if 'http' not in url: | ||||
|             scheme = 'http' | ||||
|             if self.server.ssl: | ||||
|                 scheme = 'https' | ||||
|             url = "{}://{}:{}".format(scheme, self.server.url, self.server.port) | ||||
|         token = f'{self.server.username}:{self.server.password}' | ||||
| 
 | ||||
|         self.influx = InfluxDBClient(url=url, token=token, | ||||
|                                      verify_ssl=self.server.verify_ssl, org=self.server.org) | ||||
| 
 | ||||
|         try: | ||||
|             version = self.influx.request('ping', expected_response_code=204).headers['X-Influxdb-Version'] | ||||
|             version = self.influx.version() | ||||
|             self.logger.info('Influxdb version: %s', version) | ||||
|         except ConnectionError: | ||||
|             self.logger.critical("Error testing connection to InfluxDB. Please check your url/hostname") | ||||
|             match = re.match(r'v?(\d+)\.', version) | ||||
|             if match: | ||||
|                 self.version = int(match[1]) | ||||
|                 self.logger.info("Using InfluxDB API v%s", self.version) | ||||
|             else: | ||||
|                 self.logger.critical("Unknown influxdb version") | ||||
|                 exit(1) | ||||
|         except NewConnectionError: | ||||
|             self.logger.critical("Error getting InfluxDB version number. Please check your url/hostname are valid") | ||||
|             exit(1) | ||||
| 
 | ||||
|         databases = [db['name'] for db in self.influx.get_list_database()] | ||||
|         if self.version >= 2: | ||||
|             # If we pass username/password to a v1 server, it breaks :( | ||||
|             self.influx = InfluxDBClient(url=url, username=self.server.username, | ||||
|                                          password=self.server.password, | ||||
|                                          verify_ssl=self.server.verify_ssl, org=self.server.org) | ||||
|             self.create_v2_bucket() | ||||
|         else: | ||||
|             self.create_v1_database() | ||||
| 
 | ||||
|         if 'varken' not in databases: | ||||
|     def create_v2_bucket(self): | ||||
|         if not self.influx.buckets_api().find_bucket_by_name(self.bucket): | ||||
|             self.logger.info("Creating varken bucket") | ||||
| 
 | ||||
|             retention = BucketRetentionRules(type="expire", every_seconds=60 * 60 * 24 * 30, | ||||
|                                              shard_group_duration_seconds=60 * 60) | ||||
|             self.influx.buckets_api().create_bucket(bucket_name=self.bucket, | ||||
|                                                     retention_rules=retention) | ||||
| 
 | ||||
|     def create_v1_database(self): | ||||
|         from influxdb import InfluxDBClient | ||||
|         client = InfluxDBClient(host=self.server.url, port=self.server.port, username=self.server.username, | ||||
|                                 password=self.server.password, ssl=self.server.ssl, database=self.bucket, | ||||
|                                 verify_ssl=self.server.verify_ssl) | ||||
|         databases = [db['name'] for db in client.get_list_database()] | ||||
| 
 | ||||
|         if self.bucket not in databases: | ||||
|             self.logger.info("Creating varken database") | ||||
|             self.influx.create_database('varken') | ||||
|             client.create_database(self.bucket) | ||||
| 
 | ||||
|             retention_policies = [policy['name'] for policy in | ||||
|                                   self.influx.get_list_retention_policies(database='varken')] | ||||
|                                   client.get_list_retention_policies(database=self.bucket)] | ||||
|             if 'varken 30d-1h' not in retention_policies: | ||||
|                 self.logger.info("Creating varken retention policy (30d-1h)") | ||||
|                 self.influx.create_retention_policy(name='varken 30d-1h', duration='30d', replication='1', | ||||
|                                                     database='varken', default=True, shard_duration='1h') | ||||
|                 client.create_retention_policy(name='varken 30d-1h', duration='30d', replication='1', | ||||
|                                                database=self.bucket, default=True, shard_duration='1h') | ||||
| 
 | ||||
|         self.bucket = f'{self.bucket}/varken 30d-1h' | ||||
| 
 | ||||
|     def write_points(self, data): | ||||
|         d = data | ||||
|         self.logger.debug('Writing Data to InfluxDB %s', d) | ||||
|         write_api = self.influx.write_api(write_options=SYNCHRONOUS) | ||||
|         try: | ||||
|             self.influx.write_points(d) | ||||
|         except (InfluxDBServerError, ConnectionError) as e: | ||||
|             write_api.write(bucket=self.bucket, record=data) | ||||
|         except (InfluxDBError, NewConnectionError) as e: | ||||
|             self.logger.error('Error writing data to influxdb. Dropping this set of data. ' | ||||
|                               'Check your database! Error: %s', e) | ||||
|  |  | |||
|  | @ -9,7 +9,7 @@ from configparser import ConfigParser, NoOptionError, NoSectionError | |||
| from varken.varkenlogger import BlacklistFilter | ||||
| from varken.structures import SickChillServer, UniFiServer | ||||
| from varken.helpers import clean_sid_check, rfc1918_ip_check, boolcheck | ||||
| from varken.structures import SonarrServer, RadarrServer, OmbiServer, TautulliServer, InfluxServer | ||||
| from varken.structures import SonarrServer, RadarrServer, OmbiServer, OverseerrServer, TautulliServer, InfluxServer | ||||
| 
 | ||||
| 
 | ||||
| class INIParser(object): | ||||
|  | @ -17,7 +17,7 @@ class INIParser(object): | |||
|         self.config = None | ||||
|         self.data_folder = data_folder | ||||
|         self.filtered_strings = None | ||||
|         self.services = ['sonarr', 'radarr', 'lidarr', 'ombi', 'tautulli', 'sickchill', 'unifi'] | ||||
|         self.services = ['sonarr', 'radarr', 'lidarr', 'ombi', 'overseerr', 'tautulli', 'sickchill', 'unifi'] | ||||
| 
 | ||||
|         self.logger = getLogger() | ||||
|         self.influx_server = InfluxServer() | ||||
|  | @ -154,13 +154,15 @@ class INIParser(object): | |||
| 
 | ||||
|             username = env.get('VRKN_INFLUXDB_USERNAME', self.config.get('influxdb', 'username')) | ||||
|             password = env.get('VRKN_INFLUXDB_PASSWORD', self.config.get('influxdb', 'password')) | ||||
| 
 | ||||
|             org = env.get('VRKN_INFLUXDB_ORG', self.config.get('influxdb', 'org')) | ||||
|         except NoOptionError as e: | ||||
|             self.logger.error('Missing key in %s. Error: %s', "influxdb", e) | ||||
|             self.rectify_ini() | ||||
|             return | ||||
| 
 | ||||
|         self.influx_server = InfluxServer(url=url, port=port, username=username, password=password, ssl=ssl, | ||||
|                                           verify_ssl=verify_ssl) | ||||
|                                           verify_ssl=verify_ssl, org=org) | ||||
| 
 | ||||
|         # Check for all enabled services | ||||
|         for service in self.services: | ||||
|  | @ -293,6 +295,27 @@ class INIParser(object): | |||
|                                                 issue_status_counts=issue_status_counts, | ||||
|                                                 issue_status_run_seconds=issue_status_run_seconds) | ||||
| 
 | ||||
|                         if service == 'overseerr': | ||||
|                             get_request_total_counts = boolcheck(env.get( | ||||
|                                 f'VRKN_{envsection}_GET_REQUEST_TOTAL_COUNTS', | ||||
|                                 self.config.get(section, 'get_request_total_counts'))) | ||||
|                             request_total_run_seconds = int(env.get( | ||||
|                                 f'VRKN_{envsection}_REQUEST_TOTAL_RUN_SECONDS', | ||||
|                                 self.config.getint(section, 'request_total_run_seconds'))) | ||||
|                             num_latest_requests_to_fetch = int(env.get( | ||||
|                                 f'VRKN_{envsection}_GET_LATEST_REQUESTS_TO_FETCH', | ||||
|                                 self.config.getint(section, 'num_latest_requests_to_fetch'))) | ||||
|                             num_latest_requests_seconds = int(env.get( | ||||
|                                 f'VRKN_{envsection}_NUM_LATEST_REQUESTS_SECONDS', | ||||
|                                 self.config.getint(section, 'num_latest_requests_seconds'))) | ||||
| 
 | ||||
|                             server = OverseerrServer(id=server_id, url=scheme + url, api_key=apikey, | ||||
|                                                      verify_ssl=verify_ssl, | ||||
|                                                      get_request_total_counts=get_request_total_counts, | ||||
|                                                      request_total_run_seconds=request_total_run_seconds, | ||||
|                                                      num_latest_requests_to_fetch=num_latest_requests_to_fetch, | ||||
|                                                      num_latest_requests_seconds=num_latest_requests_seconds) | ||||
| 
 | ||||
|                         if service == 'sickchill': | ||||
|                             get_missing = boolcheck(env.get(f'VRKN_{envsection}_GET_MISSING', | ||||
|                                                             self.config.get(section, 'get_missing'))) | ||||
|  |  | |||
							
								
								
									
										133
									
								
								varken/overseerr.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										133
									
								
								varken/overseerr.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,133 @@ | |||
| from logging import getLogger | ||||
| from requests import Session, Request | ||||
| from datetime import datetime, timezone | ||||
| 
 | ||||
| from varken.helpers import connection_handler, hashit | ||||
| from varken.structures import OverseerrRequestCounts | ||||
| 
 | ||||
| 
 | ||||
| class OverseerrAPI(object): | ||||
|     def __init__(self, server, dbmanager): | ||||
|         self.dbmanager = dbmanager | ||||
|         self.server = server | ||||
|         # Create session to reduce server web thread load, and globally define pageSize for all requests | ||||
|         self.session = Session() | ||||
|         self.session.headers = {'X-Api-Key': self.server.api_key} | ||||
|         self.logger = getLogger() | ||||
| 
 | ||||
|     def __repr__(self): | ||||
|         return f"<overseerr-{self.server.id}>" | ||||
| 
 | ||||
|     def get_request_counts(self): | ||||
|         now = datetime.now(timezone.utc).astimezone().isoformat() | ||||
|         endpoint = '/api/v1/request/count' | ||||
| 
 | ||||
|         req = self.session.prepare_request(Request('GET', self.server.url + endpoint)) | ||||
|         get_req = connection_handler(self.session, req, self.server.verify_ssl) | ||||
| 
 | ||||
|         if not get_req: | ||||
|             return | ||||
| 
 | ||||
|         requests = OverseerrRequestCounts(**get_req) | ||||
|         influx_payload = [ | ||||
|             { | ||||
|                 "measurement": "Overseerr", | ||||
|                 "tags": { | ||||
|                     "type": "Request_Counts" | ||||
|                 }, | ||||
|                 "time": now, | ||||
|                 "fields": { | ||||
|                     "pending": requests.pending, | ||||
|                     "approved": requests.approved, | ||||
|                     "processing": requests.processing, | ||||
|                     "available": requests.available, | ||||
|                     "total": requests.total, | ||||
|                     "movies": requests.movie, | ||||
|                     "tv": requests.tv, | ||||
|                     "declined": requests.declined | ||||
|                 } | ||||
|             } | ||||
|         ] | ||||
| 
 | ||||
|         if influx_payload: | ||||
|             self.dbmanager.write_points(influx_payload) | ||||
|         else: | ||||
|             self.logger.warning("No data to send to influx for overseerr-request-counts instance, discarding.") | ||||
| 
 | ||||
|     def get_latest_requests(self): | ||||
|         now = datetime.now(timezone.utc).astimezone().isoformat() | ||||
|         endpoint = '/api/v1/request?take=' + str(self.server.num_latest_requests_to_fetch) + '&filter=all&sort=added' | ||||
|         movie_endpoint = '/api/v1/movie/' | ||||
|         tv_endpoint = '/api/v1/tv/' | ||||
| 
 | ||||
|         # GET THE LATEST n REQUESTS | ||||
|         req = self.session.prepare_request(Request('GET', self.server.url + endpoint)) | ||||
|         get_latest_req = connection_handler(self.session, req, self.server.verify_ssl) | ||||
| 
 | ||||
|         # RETURN NOTHING IF NO RESULTS | ||||
|         if not get_latest_req: | ||||
|             self.logger.warning("No data to send to influx for overseerr-latest-requests instance, discarding.") | ||||
|             return | ||||
| 
 | ||||
|         influx_payload = [] | ||||
| 
 | ||||
|         # Request Type: Movie = 1, TV Show = 0 | ||||
|         for result in get_latest_req['results']: | ||||
|             if result['type'] == 'tv': | ||||
|                 req = self.session.prepare_request(Request('GET', | ||||
|                                                            self.server.url + | ||||
|                                                            tv_endpoint + | ||||
|                                                            str(result['media']['tmdbId']))) | ||||
|                 get_tv_req = connection_handler(self.session, req, self.server.verify_ssl) | ||||
|                 hash_id = hashit(f'{get_tv_req["id"]}{get_tv_req["name"]}') | ||||
| 
 | ||||
|                 influx_payload.append( | ||||
|                     { | ||||
|                         "measurement": "Overseerr", | ||||
|                         "tags": { | ||||
|                             "type": "Requests", | ||||
|                             "server": self.server.id, | ||||
|                             "request_type": 0, | ||||
|                             "status": get_tv_req['mediaInfo']['status'], | ||||
|                             "title": get_tv_req['name'], | ||||
|                             "requested_user": get_tv_req['mediaInfo']['requests'][0]['requestedBy']['displayName'], | ||||
|                             "requested_date": get_tv_req['mediaInfo']['requests'][0]['createdAt'] | ||||
|                         }, | ||||
|                         "time": now, | ||||
|                         "fields": { | ||||
|                             "hash": hash_id | ||||
|                         } | ||||
|                     } | ||||
|                 ) | ||||
| 
 | ||||
|             if result['type'] == 'movie': | ||||
|                 req = self.session.prepare_request(Request('GET', | ||||
|                                                            self.server.url + | ||||
|                                                            movie_endpoint + | ||||
|                                                            str(result['media']['tmdbId']))) | ||||
|                 get_movie_req = connection_handler(self.session, req, self.server.verify_ssl) | ||||
|                 hash_id = hashit(f'{get_movie_req["id"]}{get_movie_req["title"]}') | ||||
| 
 | ||||
|                 influx_payload.append( | ||||
|                     { | ||||
|                         "measurement": "Overseerr", | ||||
|                         "tags": { | ||||
|                             "type": "Requests", | ||||
|                             "server": self.server.id, | ||||
|                             "request_type": 1, | ||||
|                             "status": get_movie_req['mediaInfo']['status'], | ||||
|                             "title": get_movie_req['title'], | ||||
|                             "requested_user": get_movie_req['mediaInfo']['requests'][0]['requestedBy']['displayName'], | ||||
|                             "requested_date": get_movie_req['mediaInfo']['requests'][0]['createdAt'] | ||||
|                         }, | ||||
|                         "time": now, | ||||
|                         "fields": { | ||||
|                             "hash": hash_id | ||||
|                         } | ||||
|                     } | ||||
|                 ) | ||||
| 
 | ||||
|         if influx_payload: | ||||
|             self.dbmanager.write_points(influx_payload) | ||||
|         else: | ||||
|             self.logger.warning("No data to send to influx for overseerr-latest-requests instance, discarding.") | ||||
|  | @ -2,7 +2,7 @@ from logging import getLogger | |||
| from requests import Session, Request | ||||
| from datetime import datetime, timezone | ||||
| 
 | ||||
| from varken.structures import RadarrMovie, Queue | ||||
| from varken.structures import QueuePages, RadarrMovie, RadarrQueue | ||||
| from varken.helpers import hashit, connection_handler | ||||
| 
 | ||||
| 
 | ||||
|  | @ -19,7 +19,7 @@ class RadarrAPI(object): | |||
|         return f"<radarr-{self.server.id}>" | ||||
| 
 | ||||
|     def get_missing(self): | ||||
|         endpoint = '/api/movie' | ||||
|         endpoint = '/api/v3/movie' | ||||
|         now = datetime.now(timezone.utc).astimezone().isoformat() | ||||
|         influx_payload = [] | ||||
|         missing = [] | ||||
|  | @ -37,7 +37,7 @@ class RadarrAPI(object): | |||
|             return | ||||
| 
 | ||||
|         for movie in movies: | ||||
|             if movie.monitored and not movie.downloaded: | ||||
|             if movie.monitored and not movie.hasFile: | ||||
|                 if movie.isAvailable: | ||||
|                     ma = 0 | ||||
|                 else: | ||||
|  | @ -66,35 +66,53 @@ class RadarrAPI(object): | |||
|                 } | ||||
|             ) | ||||
| 
 | ||||
|         if influx_payload: | ||||
|             self.dbmanager.write_points(influx_payload) | ||||
|         else: | ||||
|             self.logger.warning("No data to send to influx for radarr-missing instance, discarding.") | ||||
| 
 | ||||
|     def get_queue(self): | ||||
|         endpoint = '/api/queue' | ||||
|         endpoint = '/api/v3/queue' | ||||
|         now = datetime.now(timezone.utc).astimezone().isoformat() | ||||
|         influx_payload = [] | ||||
|         pageSize = 250 | ||||
|         params = {'pageSize': pageSize, 'includeMovie': True, 'includeUnknownMovieItems': False} | ||||
|         queueResponse = [] | ||||
|         queue = [] | ||||
| 
 | ||||
|         req = self.session.prepare_request(Request('GET', self.server.url + endpoint)) | ||||
|         req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params)) | ||||
|         get = connection_handler(self.session, req, self.server.verify_ssl) | ||||
| 
 | ||||
|         if not get: | ||||
|             return | ||||
| 
 | ||||
|         for movie in get: | ||||
|             try: | ||||
|                 movie['movie'] = RadarrMovie(**movie['movie']) | ||||
|             except TypeError as e: | ||||
|                 self.logger.error('TypeError has occurred : %s while creating RadarrMovie structure', e) | ||||
|         response = QueuePages(**get) | ||||
|         queueResponse.extend(response.records) | ||||
| 
 | ||||
|         while response.totalRecords > response.page * response.pageSize: | ||||
|             page = response.page + 1 | ||||
|             params = {'pageSize': pageSize, 'page': page, 'includeMovie': True, 'includeUnknownMovieItems': False} | ||||
|             req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params)) | ||||
|             get = connection_handler(self.session, req, self.server.verify_ssl) | ||||
|             if not get: | ||||
|                 return | ||||
| 
 | ||||
|             response = QueuePages(**get) | ||||
|             queueResponse.extend(response.records) | ||||
| 
 | ||||
|         download_queue = [] | ||||
|         for queueItem in queueResponse: | ||||
|             try: | ||||
|             download_queue = [Queue(**movie) for movie in get] | ||||
|                 download_queue.append(RadarrQueue(**queueItem)) | ||||
|             except TypeError as e: | ||||
|             self.logger.error('TypeError has occurred : %s while creating Queue structure', e) | ||||
|                 self.logger.warning('TypeError has occurred : %s while creating RadarrQueue structure', e) | ||||
|                 return | ||||
|         if not download_queue: | ||||
|             self.logger.warning("No data to send to influx for radarr-queue instance, discarding.") | ||||
|             return | ||||
| 
 | ||||
|         for queue_item in download_queue: | ||||
|             movie = queue_item.movie | ||||
|             movie = RadarrMovie(**queue_item.movie) | ||||
| 
 | ||||
|             name = f'{movie.title} ({movie.year})' | ||||
| 
 | ||||
|  | @ -128,4 +146,7 @@ class RadarrAPI(object): | |||
|                 } | ||||
|             ) | ||||
| 
 | ||||
|         if influx_payload: | ||||
|             self.dbmanager.write_points(influx_payload) | ||||
|         else: | ||||
|             self.logger.warning("No data to send to influx for radarr-queue instance, discarding.") | ||||
|  |  | |||
|  | @ -2,7 +2,7 @@ from logging import getLogger | |||
| from requests import Session, Request | ||||
| from datetime import datetime, timezone, date, timedelta | ||||
| 
 | ||||
| from varken.structures import Queue, SonarrTVShow | ||||
| from varken.structures import SonarrEpisode, SonarrTVShow, SonarrQueue, QueuePages | ||||
| from varken.helpers import hashit, connection_handler | ||||
| 
 | ||||
| 
 | ||||
|  | @ -19,16 +19,28 @@ class SonarrAPI(object): | |||
|     def __repr__(self): | ||||
|         return f"<sonarr-{self.server.id}>" | ||||
| 
 | ||||
|     def get_episode(self, id): | ||||
|         endpoint = '/api/v3/episode' | ||||
|         params = {'episodeIds': id} | ||||
| 
 | ||||
|         req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params)) | ||||
|         get = connection_handler(self.session, req, self.server.verify_ssl) | ||||
| 
 | ||||
|         if not get: | ||||
|             return | ||||
| 
 | ||||
|         return SonarrEpisode(**get[0]) | ||||
| 
 | ||||
|     def get_calendar(self, query="Missing"): | ||||
|         endpoint = '/api/calendar/' | ||||
|         endpoint = '/api/v3/calendar/' | ||||
|         today = str(date.today()) | ||||
|         last_days = str(date.today() - timedelta(days=self.server.missing_days)) | ||||
|         future = str(date.today() + timedelta(days=self.server.future_days)) | ||||
|         now = datetime.now(timezone.utc).astimezone().isoformat() | ||||
|         if query == "Missing": | ||||
|             params = {'start': last_days, 'end': today} | ||||
|             params = {'start': last_days, 'end': today, 'includeSeries': True} | ||||
|         else: | ||||
|             params = {'start': today, 'end': future} | ||||
|             params = {'start': today, 'end': future, 'includeSeries': True} | ||||
|         influx_payload = [] | ||||
|         air_days = [] | ||||
|         missing = [] | ||||
|  | @ -42,22 +54,24 @@ class SonarrAPI(object): | |||
|         tv_shows = [] | ||||
|         for show in get: | ||||
|             try: | ||||
|                 tv_shows.append(SonarrTVShow(**show)) | ||||
|                 tv_shows.append(SonarrEpisode(**show)) | ||||
|             except TypeError as e: | ||||
|                 self.logger.error('TypeError has occurred : %s while creating SonarrTVShow structure for show. Data ' | ||||
|                 self.logger.error('TypeError has occurred : %s while creating SonarrEpisode structure for show. Data ' | ||||
|                                   'attempted is: %s', e, show) | ||||
| 
 | ||||
|         for show in tv_shows: | ||||
|             sxe = f'S{show.seasonNumber:0>2}E{show.episodeNumber:0>2}' | ||||
|             if show.hasFile: | ||||
|         for episode in tv_shows: | ||||
|             tvShow = episode.series | ||||
|             sxe = f'S{episode.seasonNumber:0>2}E{episode.episodeNumber:0>2}' | ||||
|             if episode.hasFile: | ||||
|                 downloaded = 1 | ||||
|             else: | ||||
|                 downloaded = 0 | ||||
|             if query == "Missing": | ||||
|                 if show.monitored and not downloaded: | ||||
|                     missing.append((show.series['title'], downloaded, sxe, show.title, show.airDateUtc, show.id)) | ||||
|                 if episode.monitored and not downloaded: | ||||
|                     missing.append((tvShow['title'], downloaded, sxe, episode.title, | ||||
|                                     episode.airDateUtc, episode.seriesId)) | ||||
|             else: | ||||
|                 air_days.append((show.series['title'], downloaded, sxe, show.title, show.airDateUtc, show.id)) | ||||
|                 air_days.append((tvShow['title'], downloaded, sxe, episode.title, episode.airDateUtc, episode.seriesId)) | ||||
| 
 | ||||
|         for series_title, dl_status, sxe, episode_title, air_date_utc, sonarr_id in (air_days or missing): | ||||
|             hash_id = hashit(f'{self.server.id}{series_title}{sxe}') | ||||
|  | @ -81,45 +95,66 @@ class SonarrAPI(object): | |||
|                 } | ||||
|             ) | ||||
| 
 | ||||
|         if influx_payload: | ||||
|             self.dbmanager.write_points(influx_payload) | ||||
|         else: | ||||
|             self.logger.warning("No data to send to influx for sonarr-calendar instance, discarding.") | ||||
| 
 | ||||
|     def get_queue(self): | ||||
|         influx_payload = [] | ||||
|         endpoint = '/api/queue' | ||||
|         endpoint = '/api/v3/queue' | ||||
|         now = datetime.now(timezone.utc).astimezone().isoformat() | ||||
|         pageSize = 250 | ||||
|         params = {'pageSize': pageSize, 'includeSeries': True, 'includeEpisode': True} | ||||
|         queueResponse = [] | ||||
|         queue = [] | ||||
| 
 | ||||
|         req = self.session.prepare_request(Request('GET', self.server.url + endpoint)) | ||||
|         req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params)) | ||||
|         get = connection_handler(self.session, req, self.server.verify_ssl) | ||||
| 
 | ||||
|         if not get: | ||||
|             return | ||||
| 
 | ||||
|         response = QueuePages(**get) | ||||
|         queueResponse.extend(response.records) | ||||
| 
 | ||||
|         while response.totalRecords > response.page * response.pageSize: | ||||
|             page = response.page + 1 | ||||
|             params = {'pageSize': pageSize, 'page': page, 'includeSeries': True, 'includeEpisode': True} | ||||
|             req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params)) | ||||
|             get = connection_handler(self.session, req, self.server.verify_ssl) | ||||
|             if not get: | ||||
|                 return | ||||
| 
 | ||||
|             response = QueuePages(**get) | ||||
|             queueResponse.extend(response.records) | ||||
| 
 | ||||
|         download_queue = [] | ||||
|         for show in get: | ||||
|         for queueItem in queueResponse: | ||||
|             try: | ||||
|                 download_queue.append(Queue(**show)) | ||||
|                 download_queue.append(SonarrQueue(**queueItem)) | ||||
|             except TypeError as e: | ||||
|                 self.logger.error('TypeError has occurred : %s while creating Queue structure. Data attempted is: ' | ||||
|                                   '%s', e, show) | ||||
|                                   '%s', e, queueItem) | ||||
|         if not download_queue: | ||||
|             return | ||||
| 
 | ||||
|         for show in download_queue: | ||||
|         for queueItem in download_queue: | ||||
|             tvShow = SonarrTVShow(**queueItem.series) | ||||
|             episode = SonarrEpisode(**queueItem.episode) | ||||
|             try: | ||||
|                 sxe = f"S{show.episode['seasonNumber']:0>2}E{show.episode['episodeNumber']:0>2}" | ||||
|                 sxe = f"S{episode.seasonNumber:0>2}E{episode.episodeNumber:0>2}" | ||||
|             except TypeError as e: | ||||
|                 self.logger.error('TypeError has occurred : %s while processing the sonarr queue. \ | ||||
|                                   Remove invalid queue entry. Data attempted is: %s', e, show) | ||||
|                                   Remove invalid queue entry. Data attempted is: %s', e, queueItem) | ||||
|                 continue | ||||
| 
 | ||||
|             if show.protocol.upper() == 'USENET': | ||||
|             if queueItem.protocol.upper() == 'USENET': | ||||
|                 protocol_id = 1 | ||||
|             else: | ||||
|                 protocol_id = 0 | ||||
| 
 | ||||
|             queue.append((show.series['title'], show.episode['title'], show.protocol.upper(), | ||||
|                           protocol_id, sxe, show.id, show.quality['quality']['name'])) | ||||
|             queue.append((tvShow.title, episode.title, queueItem.protocol.upper(), | ||||
|                           protocol_id, sxe, queueItem.seriesId, queueItem.quality['quality']['name'])) | ||||
| 
 | ||||
|         for series_title, episode_title, protocol, protocol_id, sxe, sonarr_id, quality in queue: | ||||
|             hash_id = hashit(f'{self.server.id}{series_title}{sxe}') | ||||
|  | @ -143,7 +178,8 @@ class SonarrAPI(object): | |||
|                     } | ||||
|                 } | ||||
|             ) | ||||
| 
 | ||||
|         if influx_payload: | ||||
|             self.dbmanager.write_points(influx_payload) | ||||
|         else: | ||||
|             self.logger.debug("No data to send to influx for sonarr instance, discarding.") | ||||
|             self.logger.warning("No data to send to influx for sonarr-queue instance, discarding.") | ||||
|  |  | |||
|  | @ -18,6 +18,7 @@ class InfluxServer(NamedTuple): | |||
|     url: str = 'localhost' | ||||
|     username: str = 'root' | ||||
|     verify_ssl: bool = False | ||||
|     org: str = '-' | ||||
| 
 | ||||
| 
 | ||||
| class SonarrServer(NamedTuple): | ||||
|  | @ -57,6 +58,17 @@ class OmbiServer(NamedTuple): | |||
|     verify_ssl: bool = False | ||||
| 
 | ||||
| 
 | ||||
| class OverseerrServer(NamedTuple): | ||||
|     api_key: str = None | ||||
|     id: int = None | ||||
|     url: str = None | ||||
|     verify_ssl: bool = False | ||||
|     get_request_total_counts: bool = False | ||||
|     request_total_run_seconds: int = 30 | ||||
|     num_latest_requests_to_fetch: int = 10 | ||||
|     num_latest_requests_seconds: int = 30 | ||||
| 
 | ||||
| 
 | ||||
| class TautulliServer(NamedTuple): | ||||
|     api_key: str = None | ||||
|     fallback_ip: str = None | ||||
|  | @ -91,22 +103,13 @@ class UniFiServer(NamedTuple): | |||
| 
 | ||||
| 
 | ||||
| # Shared | ||||
| class Queue(NamedTuple): | ||||
|     downloadId: str = None | ||||
|     episode: dict = None | ||||
|     estimatedCompletionTime: str = None | ||||
|     id: int = None | ||||
|     movie: dict = None | ||||
|     protocol: str = None | ||||
|     quality: dict = None | ||||
|     series: dict = None | ||||
|     size: float = None | ||||
|     sizeleft: float = None | ||||
|     status: str = None | ||||
|     statusMessages: list = None | ||||
|     timeleft: str = None | ||||
|     title: str = None | ||||
|     trackedDownloadStatus: str = None | ||||
| class QueuePages(NamedTuple): | ||||
|     page: int = None | ||||
|     pageSize: int = None | ||||
|     sortKey: str = None | ||||
|     sortDirection: str = None | ||||
|     totalRecords: str = None | ||||
|     records: list = None | ||||
| 
 | ||||
| 
 | ||||
| # Ombi Structures | ||||
|  | @ -127,8 +130,10 @@ class OmbiTVRequest(NamedTuple): | |||
|     childRequests: list = None | ||||
|     denied: bool = None | ||||
|     deniedReason: None = None | ||||
|     externalProviderId: str = None | ||||
|     id: int = None | ||||
|     imdbId: str = None | ||||
|     languageProfile: str = None | ||||
|     markedAsDenied: str = None | ||||
|     overview: str = None | ||||
|     posterPath: str = None | ||||
|  | @ -145,72 +150,159 @@ class OmbiTVRequest(NamedTuple): | |||
| 
 | ||||
| class OmbiMovieRequest(NamedTuple): | ||||
|     approved: bool = None | ||||
|     approved4K: bool = None | ||||
|     available: bool = None | ||||
|     available4K: bool = None | ||||
|     background: str = None | ||||
|     canApprove: bool = None | ||||
|     denied: bool = None | ||||
|     denied4K: None = None | ||||
|     deniedReason: None = None | ||||
|     deniedReason4K: None = None | ||||
|     digitalRelease: bool = None | ||||
|     digitalReleaseDate: None = None | ||||
|     has4KRequest: bool = None | ||||
|     id: int = None | ||||
|     imdbId: str = None | ||||
|     is4kRequest: bool = None | ||||
|     issueId: None = None | ||||
|     issues: None = None | ||||
|     langCode: str = None | ||||
|     languageCode: str = None | ||||
|     markedAsApproved: str = None | ||||
|     markedAsApproved4K: str = None | ||||
|     markedAsAvailable: None = None | ||||
|     markedAsAvailable4K: None = None | ||||
|     markedAsDenied: str = None | ||||
|     markedAsDenied4K: str = None | ||||
|     overview: str = None | ||||
|     posterPath: str = None | ||||
|     qualityOverride: int = None | ||||
|     released: bool = None | ||||
|     releaseDate: str = None | ||||
|     requestedByAlias: str = None | ||||
|     requestedDate: str = None | ||||
|     requestedDate4k: str = None | ||||
|     requestedUser: dict = None | ||||
|     requestedUserId: str = None | ||||
|     requestStatus: str = None | ||||
|     requestType: int = None | ||||
|     rootPathOverride: int = None | ||||
|     showSubscribe: bool = None | ||||
|     source: int = None | ||||
|     status: str = None | ||||
|     subscribed: bool = None | ||||
|     theMovieDbId: int = None | ||||
|     title: str = None | ||||
|     langCode: str = None | ||||
|     languageCode: str = None | ||||
|     requestedByAlias: str = None | ||||
|     requestStatus: str = None | ||||
| 
 | ||||
| 
 | ||||
| # Overseerr | ||||
| class OverseerrRequestCounts(NamedTuple): | ||||
|     pending: int = None | ||||
|     approved: int = None | ||||
|     processing: int = None | ||||
|     available: int = None | ||||
|     total: int = None | ||||
|     movie: int = None | ||||
|     tv: int = None | ||||
|     declined: int = None | ||||
| 
 | ||||
| 
 | ||||
| # Sonarr | ||||
| class SonarrTVShow(NamedTuple): | ||||
|     added: str = None | ||||
|     airTime: str = None | ||||
|     alternateTitles: list = None | ||||
|     certification: str = None | ||||
|     cleanTitle: str = None | ||||
|     ended: bool = None | ||||
|     firstAired: str = None | ||||
|     genres: list = None | ||||
|     id: int = None | ||||
|     images: list = None | ||||
|     imdbId: str = None | ||||
|     languageProfileId: int = None | ||||
|     monitored: bool = None | ||||
|     nextAiring: str = None | ||||
|     network: str = None | ||||
|     overview: str = None | ||||
|     path: str = None | ||||
|     previousAiring: str = None | ||||
|     qualityProfileId: int = None | ||||
|     ratings: dict = None | ||||
|     rootFolderPath: str = None | ||||
|     runtime: int = None | ||||
|     seasonFolder: bool = None | ||||
|     seasons: list = None | ||||
|     seriesType: str = None | ||||
|     sortTitle: str = None | ||||
|     statistics: dict = None | ||||
|     status: str = None | ||||
|     tags: list = None | ||||
|     title: str = None | ||||
|     titleSlug: str = None | ||||
|     tvdbId: int = None | ||||
|     tvMazeId: int = None | ||||
|     tvRageId: int = None | ||||
|     useSceneNumbering: bool = None | ||||
|     year: int = None | ||||
| 
 | ||||
| 
 | ||||
| class SonarrEpisode(NamedTuple): | ||||
|     absoluteEpisodeNumber: int = None | ||||
|     airDate: str = None | ||||
|     airDateUtc: str = None | ||||
|     episodeFile: dict = None | ||||
|     episodeFileId: int = None | ||||
|     episodeNumber: int = None | ||||
|     grabbed: bool = None | ||||
|     hasFile: bool = None | ||||
|     id: int = None | ||||
|     lastSearchTime: str = None | ||||
|     monitored: bool = None | ||||
|     overview: str = None | ||||
|     sceneAbsoluteEpisodeNumber: int = None | ||||
|     sceneEpisodeNumber: int = None | ||||
|     sceneSeasonNumber: int = None | ||||
|     seasonNumber: int = None | ||||
|     series: dict = None | ||||
|     seriesId: int = None | ||||
|     title: str = None | ||||
|     unverifiedSceneNumbering: bool = None | ||||
|     sceneAbsoluteEpisodeNumber: int = None | ||||
|     sceneEpisodeNumber: int = None | ||||
|     sceneSeasonNumber: int = None | ||||
|     series: SonarrTVShow = None | ||||
|     tvdbId: int = None | ||||
| 
 | ||||
| 
 | ||||
| class SonarrQueue(NamedTuple): | ||||
|     downloadClient: str = None | ||||
|     downloadId: str = None | ||||
|     episodeId: int = None | ||||
|     id: int = None | ||||
|     indexer: str = None | ||||
|     language: dict = None | ||||
|     protocol: str = None | ||||
|     quality: dict = None | ||||
|     size: float = None | ||||
|     sizeleft: float = None | ||||
|     status: str = None | ||||
|     statusMessages: list = None | ||||
|     title: str = None | ||||
|     trackedDownloadState: str = None | ||||
|     trackedDownloadStatus: str = None | ||||
|     seriesId: int = None | ||||
|     errorMessage: str = None | ||||
|     outputPath: str = None | ||||
|     series: SonarrTVShow = None | ||||
|     episode: SonarrEpisode = None | ||||
|     timeleft: str = None | ||||
|     estimatedCompletionTime: str = None | ||||
| 
 | ||||
| 
 | ||||
| # Radarr | ||||
| class RadarrMovie(NamedTuple): | ||||
|     added: str = None | ||||
|     addOptions: str = None | ||||
|     alternativeTitles: list = None | ||||
|     alternateTitles: list = None | ||||
|     certification: str = None | ||||
|     cleanTitle: str = None | ||||
|     downloaded: bool = None | ||||
|     collection: dict = None | ||||
|     digitalRelease: str = None | ||||
|     folderName: str = None | ||||
|     genres: list = None | ||||
|     hasFile: bool = None | ||||
|  | @ -219,32 +311,58 @@ class RadarrMovie(NamedTuple): | |||
|     imdbId: str = None | ||||
|     inCinemas: str = None | ||||
|     isAvailable: bool = None | ||||
|     lastInfoSync: str = None | ||||
|     minimumAvailability: str = None | ||||
|     monitored: bool = None | ||||
|     movieFile: dict = None | ||||
|     originalTitle: str = None | ||||
|     overview: str = None | ||||
|     path: str = None | ||||
|     pathState: str = None | ||||
|     physicalRelease: str = None | ||||
|     physicalReleaseNote: str = None | ||||
|     profileId: int = None | ||||
|     qualityProfileId: int = None | ||||
|     ratings: dict = None | ||||
|     runtime: int = None | ||||
|     secondaryYear: str = None | ||||
|     secondaryYear: int = None | ||||
|     secondaryYearSourceId: int = None | ||||
|     sizeOnDisk: int = None | ||||
|     sizeOnDisk: float = None | ||||
|     sortTitle: str = None | ||||
|     status: str = None | ||||
|     studio: str = None | ||||
|     tags: list = None | ||||
|     title: str = None | ||||
|     titleSlug: str = None | ||||
|     tmdbId: int = None | ||||
|     website: str = None | ||||
|     year: int = None | ||||
|     youTubeTrailerId: str = None | ||||
|     title: str = None | ||||
|     originalLanguage: str = None | ||||
|     addOptions: str = None | ||||
|     popularity: str = None | ||||
| 
 | ||||
| 
 | ||||
| # Radarr Queue | ||||
| class RadarrQueue(NamedTuple): | ||||
|     customFormats: list = None | ||||
|     downloadClient: str = None | ||||
|     downloadId: str = None | ||||
|     id: int = None | ||||
|     indexer: str = None | ||||
|     languages: list = None | ||||
|     movieId: int = None | ||||
|     protocol: str = None | ||||
|     quality: dict = None | ||||
|     size: float = None | ||||
|     sizeleft: float = None | ||||
|     status: str = None | ||||
|     statusMessages: list = None | ||||
|     title: str = None | ||||
|     trackedDownloadState: str = None | ||||
|     trackedDownloadStatus: str = None | ||||
|     timeleft: str = None | ||||
|     estimatedCompletionTime: str = None | ||||
|     errorMessage: str = None | ||||
|     outputPath: str = None | ||||
|     movie: RadarrMovie = None | ||||
|     timeleft: str = None | ||||
| 
 | ||||
| 
 | ||||
| # Sickchill | ||||
|  | @ -364,6 +482,7 @@ class TautulliStream(NamedTuple): | |||
|     reference_id: int = None | ||||
|     relay: int = None | ||||
|     relayed: int = None | ||||
|     row_id: int = None | ||||
|     section_id: str = None | ||||
|     secure: str = None | ||||
|     selected: int = None | ||||
|  | @ -402,6 +521,7 @@ class TautulliStream(NamedTuple): | |||
|     stream_video_codec: str = None | ||||
|     stream_video_codec_level: str = None | ||||
|     stream_video_decision: str = None | ||||
|     stream_video_dynamic_range: str = None | ||||
|     stream_video_framerate: str = None | ||||
|     stream_video_full_resolution: str = None | ||||
|     stream_video_height: str = None | ||||
|  | @ -461,6 +581,7 @@ class TautulliStream(NamedTuple): | |||
|     video_codec: str = None | ||||
|     video_codec_level: str = None | ||||
|     video_decision: str = None | ||||
|     video_dynamic_range: str = None | ||||
|     video_frame_rate: str = None | ||||
|     video_framerate: str = None | ||||
|     video_full_resolution: str = None | ||||
|  | @ -491,7 +612,9 @@ class LidarrQueue(NamedTuple): | |||
|     sizeleft: float = None | ||||
|     status: str = None | ||||
|     trackedDownloadStatus: str = None | ||||
|     trackedDownloadState: str = None | ||||
|     statusMessages: list = None | ||||
|     errorMessage: str = None | ||||
|     downloadId: str = None | ||||
|     protocol: str = None | ||||
|     downloadClient: str = None | ||||
|  | @ -499,6 +622,7 @@ class LidarrQueue(NamedTuple): | |||
|     outputPath: str = None | ||||
|     downloadForced: bool = None | ||||
|     id: int = None | ||||
|     estimatedCompletionTime: str = None | ||||
| 
 | ||||
| 
 | ||||
| class LidarrAlbum(NamedTuple): | ||||
|  |  | |||
|  | @ -2,7 +2,7 @@ from logging import getLogger | |||
| from requests import Session, Request | ||||
| from geoip2.errors import AddressNotFoundError | ||||
| from datetime import datetime, timezone, date, timedelta | ||||
| from influxdb.exceptions import InfluxDBClientError | ||||
| from influxdb_client.client.exceptions import InfluxDBError | ||||
| 
 | ||||
| from varken.structures import TautulliStream | ||||
| from varken.helpers import hashit, connection_handler, itemgetter_with_default | ||||
|  | @ -129,6 +129,7 @@ class TautulliAPI(object): | |||
|                     "tags": { | ||||
|                         "type": "Session", | ||||
|                         "session_id": session.session_id, | ||||
|                         "ip_address": session.ip_address, | ||||
|                         "friendly_name": session.friendly_name, | ||||
|                         "username": session.username, | ||||
|                         "title": session.full_title, | ||||
|  | @ -327,6 +328,7 @@ class TautulliAPI(object): | |||
|                     "tags": { | ||||
|                         "type": "Session", | ||||
|                         "session_id": session.session_id, | ||||
|                         "ip_address": session.ip_address, | ||||
|                         "friendly_name": session.friendly_name, | ||||
|                         "username": session.user, | ||||
|                         "title": session.full_title, | ||||
|  | @ -361,7 +363,7 @@ class TautulliAPI(object): | |||
|             ) | ||||
|             try: | ||||
|                 self.dbmanager.write_points(influx_payload) | ||||
|             except InfluxDBClientError as e: | ||||
|             except InfluxDBError as e: | ||||
|                 if "beyond retention policy" in str(e): | ||||
|                     self.logger.debug('Only imported 30 days of data per retention policy') | ||||
|                 else: | ||||
|  |  | |||
		Loading…
	
	Add table
		Add a link
		
	
		Reference in a new issue