4268 lines
155 KiB
Diff
4268 lines
155 KiB
Diff
From 2b1f6e0699bdcf6dfa3b8451f653467df770c3f7 Mon Sep 17 00:00:00 2001
|
|
From: mal5305 <malcolm.e.rogers@gmail.com>
|
|
Date: Fri, 21 May 2021 17:29:00 -0400
|
|
Subject: [PATCH 01/65] #203
|
|
|
|
---
|
|
varken/structures.py | 2 ++
|
|
1 file changed, 2 insertions(+)
|
|
|
|
diff --git a/varken/structures.py b/varken/structures.py
|
|
index deb4017..da894ff 100644
|
|
--- a/varken/structures.py
|
|
+++ b/varken/structures.py
|
|
@@ -127,8 +127,10 @@ class OmbiTVRequest(NamedTuple):
|
|
childRequests: list = None
|
|
denied: bool = None
|
|
deniedReason: None = None
|
|
+ externalProviderId: str = None
|
|
id: int = None
|
|
imdbId: str = None
|
|
+ languageProfile: str = None
|
|
markedAsDenied: str = None
|
|
overview: str = None
|
|
posterPath: str = None
|
|
--
|
|
GitLab
|
|
|
|
|
|
From c47b7c2c3b86fc6c6d45f12f8f694a941a793176 Mon Sep 17 00:00:00 2001
|
|
From: samwiseg0 <2241731+samwiseg0@users.noreply.github.com>
|
|
Date: Thu, 3 Jun 2021 13:35:53 -0400
|
|
Subject: [PATCH 02/65] Update docker compose to specify influxdb:1.8.4
|
|
|
|
---
|
|
docker-compose.yml | 4 ++--
|
|
1 file changed, 2 insertions(+), 2 deletions(-)
|
|
|
|
diff --git a/docker-compose.yml b/docker-compose.yml
|
|
index a3cb252..c75e8a7 100644
|
|
--- a/docker-compose.yml
|
|
+++ b/docker-compose.yml
|
|
@@ -6,7 +6,7 @@ services:
|
|
influxdb:
|
|
hostname: influxdb
|
|
container_name: influxdb
|
|
- image: influxdb
|
|
+ image: influxdb:1.8.4
|
|
networks:
|
|
- internal
|
|
volumes:
|
|
@@ -128,4 +128,4 @@ services:
|
|
depends_on:
|
|
- influxdb
|
|
- varken
|
|
- restart: unless-stopped
|
|
\ No newline at end of file
|
|
+ restart: unless-stopped
|
|
--
|
|
GitLab
|
|
|
|
|
|
From c6b8dde29a334d03d821abce28bf57ecc3aabc78 Mon Sep 17 00:00:00 2001
|
|
From: samwiseg0 <2241731+samwiseg0@users.noreply.github.com>
|
|
Date: Thu, 3 Jun 2021 13:40:57 -0400
|
|
Subject: [PATCH 03/65] Update requirements to use urllib3==1.26.5
|
|
|
|
---
|
|
requirements.txt | 2 +-
|
|
1 file changed, 1 insertion(+), 1 deletion(-)
|
|
|
|
diff --git a/requirements.txt b/requirements.txt
|
|
index 38e1312..c5c790a 100644
|
|
--- a/requirements.txt
|
|
+++ b/requirements.txt
|
|
@@ -7,4 +7,4 @@ geoip2==2.9.0
|
|
influxdb==5.2.0
|
|
schedule==0.6.0
|
|
distro==1.4.0
|
|
-urllib3==1.24.2
|
|
\ No newline at end of file
|
|
+urllib3==1.26.5
|
|
--
|
|
GitLab
|
|
|
|
|
|
From 57028cd26c1a182a6c0f104d6a108fc5ed3902e9 Mon Sep 17 00:00:00 2001
|
|
From: Robin <19610103+RobinDadswell@users.noreply.github.com>
|
|
Date: Fri, 17 Sep 2021 22:55:02 +0000
|
|
Subject: [PATCH 04/65] updated to support Radarr and Sonarr V3 Api
|
|
|
|
---
|
|
varken/radarr.py | 54 +++++++++++++------
|
|
varken/sonarr.py | 87 ++++++++++++++++++++++--------
|
|
varken/structures.py | 125 ++++++++++++++++++++++++++++++++-----------
|
|
3 files changed, 197 insertions(+), 69 deletions(-)
|
|
|
|
diff --git a/varken/radarr.py b/varken/radarr.py
|
|
index 6692ddf..4a6e81a 100644
|
|
--- a/varken/radarr.py
|
|
+++ b/varken/radarr.py
|
|
@@ -2,7 +2,7 @@ from logging import getLogger
|
|
from requests import Session, Request
|
|
from datetime import datetime, timezone
|
|
|
|
-from varken.structures import RadarrMovie, Queue
|
|
+from varken.structures import QueuePages, RadarrMovie, RadarrQueue
|
|
from varken.helpers import hashit, connection_handler
|
|
|
|
|
|
@@ -18,8 +18,19 @@ class RadarrAPI(object):
|
|
def __repr__(self):
|
|
return f"<radarr-{self.server.id}>"
|
|
|
|
+ def get_movie(self,id):
|
|
+ endpoint = '/api/v3/movie/'
|
|
+
|
|
+ req = self.session.prepare_request(Request('GET', self.server.url + endpoint + str(id)))
|
|
+ get = connection_handler(self.session, req, self.server.verify_ssl)
|
|
+
|
|
+ if not get:
|
|
+ return
|
|
+
|
|
+ return RadarrMovie(**get)
|
|
+
|
|
def get_missing(self):
|
|
- endpoint = '/api/movie'
|
|
+ endpoint = '/api/v3/movie'
|
|
now = datetime.now(timezone.utc).astimezone().isoformat()
|
|
influx_payload = []
|
|
missing = []
|
|
@@ -37,7 +48,7 @@ class RadarrAPI(object):
|
|
return
|
|
|
|
for movie in movies:
|
|
- if movie.monitored and not movie.downloaded:
|
|
+ if movie.monitored and not movie.hasFile:
|
|
if movie.isAvailable:
|
|
ma = 0
|
|
else:
|
|
@@ -69,32 +80,45 @@ class RadarrAPI(object):
|
|
self.dbmanager.write_points(influx_payload)
|
|
|
|
def get_queue(self):
|
|
- endpoint = '/api/queue'
|
|
+ endpoint = '/api/v3/queue'
|
|
now = datetime.now(timezone.utc).astimezone().isoformat()
|
|
influx_payload = []
|
|
+ pageSize = 250
|
|
+ params = {'pageSize': pageSize}
|
|
+ queueResponse = []
|
|
queue = []
|
|
|
|
- req = self.session.prepare_request(Request('GET', self.server.url + endpoint))
|
|
+ req = self.session.prepare_request(Request('GET', self.server.url + endpoint,params=params))
|
|
get = connection_handler(self.session, req, self.server.verify_ssl)
|
|
-
|
|
if not get:
|
|
return
|
|
+
|
|
+ response = QueuePages(**get)
|
|
+ queueResponse.extend(response.records)
|
|
+
|
|
+ while response.totalRecords > response.page * response.pageSize:
|
|
+ page = response.page + 1
|
|
+ params = {'pageSize': pageSize, 'page': page}
|
|
+ req = self.session.prepare_request(Request('GET', self.server.url + endpoint,params=params))
|
|
+ get = connection_handler(self.session, req, self.server.verify_ssl)
|
|
+ if not get:
|
|
+ return
|
|
+
|
|
+ response = QueuePages(**get)
|
|
+ queueResponse.extend(response.records)
|
|
|
|
- for movie in get:
|
|
+ download_queue = []
|
|
+ for queueItem in queueResponse:
|
|
try:
|
|
- movie['movie'] = RadarrMovie(**movie['movie'])
|
|
+ download_queue.append(RadarrQueue(**queueItem))
|
|
except TypeError as e:
|
|
- self.logger.error('TypeError has occurred : %s while creating RadarrMovie structure', e)
|
|
+ self.logger.error('TypeError has occurred : %s while creating RadarrQueue structure', e)
|
|
return
|
|
-
|
|
- try:
|
|
- download_queue = [Queue(**movie) for movie in get]
|
|
- except TypeError as e:
|
|
- self.logger.error('TypeError has occurred : %s while creating Queue structure', e)
|
|
+ if not download_queue:
|
|
return
|
|
|
|
for queue_item in download_queue:
|
|
- movie = queue_item.movie
|
|
+ movie = self.get_movie(queue_item.movieId)
|
|
|
|
name = f'{movie.title} ({movie.year})'
|
|
|
|
diff --git a/varken/sonarr.py b/varken/sonarr.py
|
|
index db93ef7..e411e2f 100644
|
|
--- a/varken/sonarr.py
|
|
+++ b/varken/sonarr.py
|
|
@@ -2,7 +2,7 @@ from logging import getLogger
|
|
from requests import Session, Request
|
|
from datetime import datetime, timezone, date, timedelta
|
|
|
|
-from varken.structures import Queue, SonarrTVShow
|
|
+from varken.structures import SonarrEpisode, SonarrQueue, QueuePages, SonarrTVShow
|
|
from varken.helpers import hashit, connection_handler
|
|
|
|
|
|
@@ -18,9 +18,33 @@ class SonarrAPI(object):
|
|
|
|
def __repr__(self):
|
|
return f"<sonarr-{self.server.id}>"
|
|
+
|
|
+ def get_series(self, id):
|
|
+ endpoint = '/api/v3/series/'
|
|
+
|
|
+ req = self.session.prepare_request(Request('GET', self.server.url + endpoint + str(id)))
|
|
+ get = connection_handler(self.session, req, self.server.verify_ssl)
|
|
+
|
|
+ if not get:
|
|
+ return
|
|
+
|
|
+ return SonarrTVShow(**get)
|
|
+
|
|
+ def get_episode(self, id):
|
|
+ endpoint = '/api/v3/episode'
|
|
+ params = {'episodeIds': id}
|
|
+
|
|
+ req = self.session.prepare_request(Request('GET', self.server.url + endpoint,params = params))
|
|
+ get = connection_handler(self.session, req, self.server.verify_ssl)
|
|
+
|
|
+ if not get:
|
|
+ return
|
|
+
|
|
+ return SonarrEpisode(**get[0])
|
|
+
|
|
|
|
def get_calendar(self, query="Missing"):
|
|
- endpoint = '/api/calendar/'
|
|
+ endpoint = '/api/v3/calendar/'
|
|
today = str(date.today())
|
|
last_days = str(date.today() - timedelta(days=self.server.missing_days))
|
|
future = str(date.today() + timedelta(days=self.server.future_days))
|
|
@@ -42,22 +66,23 @@ class SonarrAPI(object):
|
|
tv_shows = []
|
|
for show in get:
|
|
try:
|
|
- tv_shows.append(SonarrTVShow(**show))
|
|
+ tv_shows.append(SonarrEpisode(**show))
|
|
except TypeError as e:
|
|
- self.logger.error('TypeError has occurred : %s while creating SonarrTVShow structure for show. Data '
|
|
+ self.logger.error('TypeError has occurred : %s while creating SonarrEpisode structure for show. Data '
|
|
'attempted is: %s', e, show)
|
|
|
|
- for show in tv_shows:
|
|
- sxe = f'S{show.seasonNumber:0>2}E{show.episodeNumber:0>2}'
|
|
- if show.hasFile:
|
|
+ for episode in tv_shows:
|
|
+ tvShow = self.get_series(episode.seriesId)
|
|
+ sxe = f'S{episode.seasonNumber:0>2}E{episode.episodeNumber:0>2}'
|
|
+ if episode.hasFile:
|
|
downloaded = 1
|
|
else:
|
|
downloaded = 0
|
|
if query == "Missing":
|
|
- if show.monitored and not downloaded:
|
|
- missing.append((show.series['title'], downloaded, sxe, show.title, show.airDateUtc, show.id))
|
|
+ if episode.monitored and not downloaded:
|
|
+ missing.append((tvShow.title, downloaded, sxe, episode.title, episode.airDateUtc, episode.seriesId))
|
|
else:
|
|
- air_days.append((show.series['title'], downloaded, sxe, show.title, show.airDateUtc, show.id))
|
|
+ air_days.append((tvShow.title, downloaded, sxe, episode.title, episode.airDateUtc, episode.seriesId))
|
|
|
|
for series_title, dl_status, sxe, episode_title, air_date_utc, sonarr_id in (air_days or missing):
|
|
hash_id = hashit(f'{self.server.id}{series_title}{sxe}')
|
|
@@ -85,41 +110,59 @@ class SonarrAPI(object):
|
|
|
|
def get_queue(self):
|
|
influx_payload = []
|
|
- endpoint = '/api/queue'
|
|
+ endpoint = '/api/v3/queue'
|
|
now = datetime.now(timezone.utc).astimezone().isoformat()
|
|
+ pageSize = 250
|
|
+ params = {'pageSize': pageSize}
|
|
+ queueResponse = []
|
|
queue = []
|
|
|
|
- req = self.session.prepare_request(Request('GET', self.server.url + endpoint))
|
|
+ req = self.session.prepare_request(Request('GET', self.server.url + endpoint,params=params))
|
|
get = connection_handler(self.session, req, self.server.verify_ssl)
|
|
-
|
|
if not get:
|
|
return
|
|
+
|
|
+ response = QueuePages(**get)
|
|
+ queueResponse.extend(response.records)
|
|
+
|
|
+ while response.totalRecords > response.page * response.pageSize:
|
|
+ page = response.page + 1
|
|
+ params = {'pageSize': pageSize, 'page': page}
|
|
+ req = self.session.prepare_request(Request('GET', self.server.url + endpoint,params=params))
|
|
+ get = connection_handler(self.session, req, self.server.verify_ssl)
|
|
+ if not get:
|
|
+ return
|
|
+
|
|
+ response = QueuePages(**get)
|
|
+ queueResponse.extend(response.records)
|
|
|
|
download_queue = []
|
|
- for show in get:
|
|
+ for queueItem in queueResponse:
|
|
try:
|
|
- download_queue.append(Queue(**show))
|
|
+ download_queue.append(SonarrQueue(**queueItem))
|
|
except TypeError as e:
|
|
self.logger.error('TypeError has occurred : %s while creating Queue structure. Data attempted is: '
|
|
- '%s', e, show)
|
|
+ '%s', e, queueItem)
|
|
if not download_queue:
|
|
return
|
|
|
|
- for show in download_queue:
|
|
+ for queueItem in download_queue:
|
|
+ tvShow = self.get_series(queueItem.seriesId)
|
|
+ episode = self.get_episode(queueItem.episodeId)
|
|
try:
|
|
- sxe = f"S{show.episode['seasonNumber']:0>2}E{show.episode['episodeNumber']:0>2}"
|
|
+ sxe = f"S{episode.seasonNumber:0>2}E{episode.episodeNumber:0>2}"
|
|
except TypeError as e:
|
|
self.logger.error('TypeError has occurred : %s while processing the sonarr queue. \
|
|
- Remove invalid queue entry. Data attempted is: %s', e, show)
|
|
+ Remove invalid queue entry. Data attempted is: %s', e, queueItem)
|
|
continue
|
|
|
|
- if show.protocol.upper() == 'USENET':
|
|
+ if queueItem.protocol.upper() == 'USENET':
|
|
protocol_id = 1
|
|
else:
|
|
protocol_id = 0
|
|
|
|
- queue.append((show.series['title'], show.episode['title'], show.protocol.upper(),
|
|
- protocol_id, sxe, show.id, show.quality['quality']['name']))
|
|
+ queue.append((tvShow.title, episode.title, queueItem.protocol.upper(),
|
|
+ protocol_id, sxe, queueItem.seriesId, queueItem.quality['quality']['name']))
|
|
|
|
for series_title, episode_title, protocol, protocol_id, sxe, sonarr_id, quality in queue:
|
|
hash_id = hashit(f'{self.server.id}{series_title}{sxe}')
|
|
diff --git a/varken/structures.py b/varken/structures.py
|
|
index da894ff..e79046c 100644
|
|
--- a/varken/structures.py
|
|
+++ b/varken/structures.py
|
|
@@ -91,22 +91,13 @@ class UniFiServer(NamedTuple):
|
|
|
|
|
|
# Shared
|
|
-class Queue(NamedTuple):
|
|
- downloadId: str = None
|
|
- episode: dict = None
|
|
- estimatedCompletionTime: str = None
|
|
- id: int = None
|
|
- movie: dict = None
|
|
- protocol: str = None
|
|
- quality: dict = None
|
|
- series: dict = None
|
|
- size: float = None
|
|
- sizeleft: float = None
|
|
- status: str = None
|
|
- statusMessages: list = None
|
|
- timeleft: str = None
|
|
- title: str = None
|
|
- trackedDownloadStatus: str = None
|
|
+class QueuePages(NamedTuple):
|
|
+ page: int = None
|
|
+ pageSize: int = None
|
|
+ sortKey: str = None
|
|
+ sortDirection: str = None
|
|
+ totalRecords: str = None
|
|
+ records: list = None
|
|
|
|
|
|
# Ombi Structures
|
|
@@ -184,35 +175,88 @@ class OmbiMovieRequest(NamedTuple):
|
|
|
|
# Sonarr
|
|
class SonarrTVShow(NamedTuple):
|
|
+ added: str = None
|
|
+ airTime: str = None
|
|
+ alternateTitles: list = None
|
|
+ certification: str = None
|
|
+ cleanTitle: str = None
|
|
+ ended: bool = None
|
|
+ firstAired: str = None
|
|
+ genres: list = None
|
|
+ id: int = None
|
|
+ images: list = None
|
|
+ imdbId: str = None
|
|
+ languageProfileId: int = None
|
|
+ monitored: bool = None
|
|
+ nextAiring: str = None
|
|
+ network: str = None
|
|
+ overview: str = None
|
|
+ path: str = None
|
|
+ previousAiring: str = None
|
|
+ qualityProfileId: int = None
|
|
+ ratings: dict = None
|
|
+ rootFolderPath: str = None
|
|
+ runtime: int = None
|
|
+ seasonFolder: bool = None
|
|
+ seasons: list = None
|
|
+ seriesType: str = None
|
|
+ sortTitle: str = None
|
|
+ statistics: dict = None
|
|
+ status: str = None
|
|
+ tags: list = None
|
|
+ title: str = None
|
|
+ titleSlug: str = None
|
|
+ tvdbId: int = None
|
|
+ tvMazeId: int = None
|
|
+ tvRageId: int = None
|
|
+ useSceneNumbering: bool = None
|
|
+ year: int = None
|
|
+
|
|
+
|
|
+class SonarrEpisode(NamedTuple):
|
|
absoluteEpisodeNumber: int = None
|
|
airDate: str = None
|
|
airDateUtc: str = None
|
|
- episodeFile: dict = None
|
|
episodeFileId: int = None
|
|
episodeNumber: int = None
|
|
+ grabbed: bool = None
|
|
hasFile: bool = None
|
|
id: int = None
|
|
- lastSearchTime: str = None
|
|
monitored: bool = None
|
|
overview: str = None
|
|
- sceneAbsoluteEpisodeNumber: int = None
|
|
- sceneEpisodeNumber: int = None
|
|
- sceneSeasonNumber: int = None
|
|
seasonNumber: int = None
|
|
- series: dict = None
|
|
seriesId: int = None
|
|
title: str = None
|
|
unverifiedSceneNumbering: bool = None
|
|
|
|
|
|
+class SonarrQueue(NamedTuple):
|
|
+ downloadClient: str = None
|
|
+ downloadId: str = None
|
|
+ episodeId: int = None
|
|
+ id: int = None
|
|
+ indexer: str = None
|
|
+ language: dict = None
|
|
+ protocol: str = None
|
|
+ quality: dict = None
|
|
+ size: float = None
|
|
+ sizeleft: float = None
|
|
+ status: str = None
|
|
+ statusMessages: list = None
|
|
+ title: str = None
|
|
+ trackedDownloadState: str = None
|
|
+ trackedDownloadStatus: str = None
|
|
+ seriesId: int = None
|
|
+
|
|
+
|
|
# Radarr
|
|
class RadarrMovie(NamedTuple):
|
|
added: str = None
|
|
- addOptions: str = None
|
|
- alternativeTitles: list = None
|
|
+ alternateTitles: list = None
|
|
certification: str = None
|
|
cleanTitle: str = None
|
|
- downloaded: bool = None
|
|
+ collection: dict = None
|
|
+ digitalRelease: str = None
|
|
folderName: str = None
|
|
genres: list = None
|
|
hasFile: bool = None
|
|
@@ -221,32 +265,49 @@ class RadarrMovie(NamedTuple):
|
|
imdbId: str = None
|
|
inCinemas: str = None
|
|
isAvailable: bool = None
|
|
- lastInfoSync: str = None
|
|
minimumAvailability: str = None
|
|
monitored: bool = None
|
|
movieFile: dict = None
|
|
+ originalTitle: str = None
|
|
overview: str = None
|
|
path: str = None
|
|
- pathState: str = None
|
|
physicalRelease: str = None
|
|
- physicalReleaseNote: str = None
|
|
- profileId: int = None
|
|
qualityProfileId: int = None
|
|
ratings: dict = None
|
|
runtime: int = None
|
|
- secondaryYear: str = None
|
|
+ secondaryYear: int = None
|
|
secondaryYearSourceId: int = None
|
|
- sizeOnDisk: int = None
|
|
+ sizeOnDisk: float = None
|
|
sortTitle: str = None
|
|
status: str = None
|
|
studio: str = None
|
|
tags: list = None
|
|
- title: str = None
|
|
titleSlug: str = None
|
|
tmdbId: int = None
|
|
website: str = None
|
|
year: int = None
|
|
youTubeTrailerId: str = None
|
|
+ title: str = None
|
|
+
|
|
+
|
|
+# Radarr Queue
|
|
+class RadarrQueue(NamedTuple):
|
|
+ customFormats: list = None
|
|
+ downloadClient: str = None
|
|
+ downloadId: str = None
|
|
+ id: int = None
|
|
+ indexer: str = None
|
|
+ languages: list = None
|
|
+ movieId: int = None
|
|
+ protocol: str = None
|
|
+ quality: dict = None
|
|
+ size: float = None
|
|
+ sizeleft: float = None
|
|
+ status: str = None
|
|
+ statusMessages: list = None
|
|
+ title: str = None
|
|
+ trackedDownloadState: str = None
|
|
+ trackedDownloadStatus: str = None
|
|
|
|
|
|
# Sickchill
|
|
--
|
|
GitLab
|
|
|
|
|
|
From 7a8c5a3ee488897338bccee9020734f42706365b Mon Sep 17 00:00:00 2001
|
|
From: samwiseg0 <2241731+samwiseg0@users.noreply.github.com>
|
|
Date: Fri, 14 Jan 2022 21:19:08 -0500
|
|
Subject: [PATCH 05/65] bump requirements for requests
|
|
|
|
---
|
|
requirements.txt | 2 +-
|
|
1 file changed, 1 insertion(+), 1 deletion(-)
|
|
|
|
diff --git a/requirements.txt b/requirements.txt
|
|
index c5c790a..523e427 100644
|
|
--- a/requirements.txt
|
|
+++ b/requirements.txt
|
|
@@ -2,7 +2,7 @@
|
|
# Potential requirements.
|
|
# pip3 install -r requirements.txt
|
|
#---------------------------------------------------------
|
|
-requests==2.21
|
|
+requests==2.25.1
|
|
geoip2==2.9.0
|
|
influxdb==5.2.0
|
|
schedule==0.6.0
|
|
--
|
|
GitLab
|
|
|
|
|
|
From 2607584eba4ac994092357dee3f8c2eb470885e8 Mon Sep 17 00:00:00 2001
|
|
From: tigattack <10629864+tigattack@users.noreply.github.com>
|
|
Date: Sat, 15 Jan 2022 02:21:54 +0000
|
|
Subject: [PATCH 06/65] Fix Sonarr & Radarr V3 API /queue endpoint (#220)
|
|
|
|
---
|
|
varken/structures.py | 6 ++++++
|
|
1 file changed, 6 insertions(+)
|
|
|
|
diff --git a/varken/structures.py b/varken/structures.py
|
|
index e79046c..30f4d9d 100644
|
|
--- a/varken/structures.py
|
|
+++ b/varken/structures.py
|
|
@@ -247,6 +247,8 @@ class SonarrQueue(NamedTuple):
|
|
trackedDownloadState: str = None
|
|
trackedDownloadStatus: str = None
|
|
seriesId: int = None
|
|
+ errorMessage: str = None
|
|
+ outputPath: str = None
|
|
|
|
|
|
# Radarr
|
|
@@ -308,6 +310,10 @@ class RadarrQueue(NamedTuple):
|
|
title: str = None
|
|
trackedDownloadState: str = None
|
|
trackedDownloadStatus: str = None
|
|
+ timeleft: str = None
|
|
+ estimatedCompletionTime: str = None
|
|
+ errorMessage: str = None
|
|
+ outputPath: str = None
|
|
|
|
|
|
# Sickchill
|
|
--
|
|
GitLab
|
|
|
|
|
|
From 533ec1058f9d0b083706e481e308c494821e7e78 Mon Sep 17 00:00:00 2001
|
|
From: samwiseg0 <2241731+samwiseg0@users.noreply.github.com>
|
|
Date: Fri, 14 Jan 2022 21:49:17 -0500
|
|
Subject: [PATCH 07/65] Fix lint issues
|
|
|
|
---
|
|
varken/radarr.py | 12 ++++++------
|
|
varken/sonarr.py | 16 ++++++++--------
|
|
2 files changed, 14 insertions(+), 14 deletions(-)
|
|
|
|
diff --git a/varken/radarr.py b/varken/radarr.py
|
|
index 4a6e81a..2dc7367 100644
|
|
--- a/varken/radarr.py
|
|
+++ b/varken/radarr.py
|
|
@@ -18,7 +18,7 @@ class RadarrAPI(object):
|
|
def __repr__(self):
|
|
return f"<radarr-{self.server.id}>"
|
|
|
|
- def get_movie(self,id):
|
|
+ def get_movie(self, id):
|
|
endpoint = '/api/v3/movie/'
|
|
|
|
req = self.session.prepare_request(Request('GET', self.server.url + endpoint + str(id)))
|
|
@@ -88,19 +88,19 @@ class RadarrAPI(object):
|
|
queueResponse = []
|
|
queue = []
|
|
|
|
- req = self.session.prepare_request(Request('GET', self.server.url + endpoint,params=params))
|
|
+ req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params))
|
|
get = connection_handler(self.session, req, self.server.verify_ssl)
|
|
if not get:
|
|
return
|
|
-
|
|
+
|
|
response = QueuePages(**get)
|
|
queueResponse.extend(response.records)
|
|
|
|
while response.totalRecords > response.page * response.pageSize:
|
|
page = response.page + 1
|
|
params = {'pageSize': pageSize, 'page': page}
|
|
- req = self.session.prepare_request(Request('GET', self.server.url + endpoint,params=params))
|
|
- get = connection_handler(self.session, req, self.server.verify_ssl)
|
|
+ req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params))
|
|
+ get = connection_handler(self.session, req, self.server.verify_ssl)
|
|
if not get:
|
|
return
|
|
|
|
@@ -152,4 +152,4 @@ class RadarrAPI(object):
|
|
}
|
|
)
|
|
|
|
- self.dbmanager.write_points(influx_payload)
|
|
+ self.dbmanager.write_points(influx_payload)
|
|
\ No newline at end of file
|
|
diff --git a/varken/sonarr.py b/varken/sonarr.py
|
|
index e411e2f..90919c2 100644
|
|
--- a/varken/sonarr.py
|
|
+++ b/varken/sonarr.py
|
|
@@ -18,7 +18,7 @@ class SonarrAPI(object):
|
|
|
|
def __repr__(self):
|
|
return f"<sonarr-{self.server.id}>"
|
|
-
|
|
+
|
|
def get_series(self, id):
|
|
endpoint = '/api/v3/series/'
|
|
|
|
@@ -27,14 +27,14 @@ class SonarrAPI(object):
|
|
|
|
if not get:
|
|
return
|
|
-
|
|
+
|
|
return SonarrTVShow(**get)
|
|
|
|
def get_episode(self, id):
|
|
endpoint = '/api/v3/episode'
|
|
params = {'episodeIds': id}
|
|
|
|
- req = self.session.prepare_request(Request('GET', self.server.url + endpoint,params = params))
|
|
+ req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params))
|
|
get = connection_handler(self.session, req, self.server.verify_ssl)
|
|
|
|
if not get:
|
|
@@ -117,19 +117,19 @@ class SonarrAPI(object):
|
|
queueResponse = []
|
|
queue = []
|
|
|
|
- req = self.session.prepare_request(Request('GET', self.server.url + endpoint,params=params))
|
|
+ req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params))
|
|
get = connection_handler(self.session, req, self.server.verify_ssl)
|
|
if not get:
|
|
return
|
|
-
|
|
+
|
|
response = QueuePages(**get)
|
|
queueResponse.extend(response.records)
|
|
|
|
while response.totalRecords > response.page * response.pageSize:
|
|
page = response.page + 1
|
|
params = {'pageSize': pageSize, 'page': page}
|
|
- req = self.session.prepare_request(Request('GET', self.server.url + endpoint,params=params))
|
|
- get = connection_handler(self.session, req, self.server.verify_ssl)
|
|
+ req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params))
|
|
+ get = connection_handler(self.session, req, self.server.verify_ssl)
|
|
if not get:
|
|
return
|
|
|
|
@@ -189,4 +189,4 @@ class SonarrAPI(object):
|
|
if influx_payload:
|
|
self.dbmanager.write_points(influx_payload)
|
|
else:
|
|
- self.logger.debug("No data to send to influx for sonarr instance, discarding.")
|
|
+ self.logger.debug("No data to send to influx for sonarr instance, discarding.")
|
|
\ No newline at end of file
|
|
--
|
|
GitLab
|
|
|
|
|
|
From 63746dd7be4e99c06c3c64eb8fe3e76e794e2c4f Mon Sep 17 00:00:00 2001
|
|
From: samwiseg0 <2241731+samwiseg0@users.noreply.github.com>
|
|
Date: Fri, 14 Jan 2022 21:51:43 -0500
|
|
Subject: [PATCH 08/65] More lint fixes
|
|
|
|
---
|
|
varken/radarr.py | 2 +-
|
|
varken/sonarr.py | 3 +--
|
|
2 files changed, 2 insertions(+), 3 deletions(-)
|
|
|
|
diff --git a/varken/radarr.py b/varken/radarr.py
|
|
index 2dc7367..f654198 100644
|
|
--- a/varken/radarr.py
|
|
+++ b/varken/radarr.py
|
|
@@ -152,4 +152,4 @@ class RadarrAPI(object):
|
|
}
|
|
)
|
|
|
|
- self.dbmanager.write_points(influx_payload)
|
|
\ No newline at end of file
|
|
+ self.dbmanager.write_points(influx_payload)
|
|
diff --git a/varken/sonarr.py b/varken/sonarr.py
|
|
index 90919c2..0bb8684 100644
|
|
--- a/varken/sonarr.py
|
|
+++ b/varken/sonarr.py
|
|
@@ -42,7 +42,6 @@ class SonarrAPI(object):
|
|
|
|
return SonarrEpisode(**get[0])
|
|
|
|
-
|
|
def get_calendar(self, query="Missing"):
|
|
endpoint = '/api/v3/calendar/'
|
|
today = str(date.today())
|
|
@@ -189,4 +188,4 @@ class SonarrAPI(object):
|
|
if influx_payload:
|
|
self.dbmanager.write_points(influx_payload)
|
|
else:
|
|
- self.logger.debug("No data to send to influx for sonarr instance, discarding.")
|
|
\ No newline at end of file
|
|
+ self.logger.debug("No data to send to influx for sonarr instance, discarding.")
|
|
--
|
|
GitLab
|
|
|
|
|
|
From 9498a83bc8438570dedc6fb3ea1c8560ed2d927a Mon Sep 17 00:00:00 2001
|
|
From: samwiseg0 <2241731+samwiseg0@users.noreply.github.com>
|
|
Date: Fri, 14 Jan 2022 22:05:40 -0500
|
|
Subject: [PATCH 09/65] Update Sonarr structures
|
|
|
|
---
|
|
varken/structures.py | 3 +++
|
|
1 file changed, 3 insertions(+)
|
|
|
|
diff --git a/varken/structures.py b/varken/structures.py
|
|
index 30f4d9d..f799ab4 100644
|
|
--- a/varken/structures.py
|
|
+++ b/varken/structures.py
|
|
@@ -228,6 +228,9 @@ class SonarrEpisode(NamedTuple):
|
|
seriesId: int = None
|
|
title: str = None
|
|
unverifiedSceneNumbering: bool = None
|
|
+ sceneAbsoluteEpisodeNumber: int = None
|
|
+ sceneEpisodeNumber: int = None
|
|
+ sceneSeasonNumber: int = None
|
|
|
|
|
|
class SonarrQueue(NamedTuple):
|
|
--
|
|
GitLab
|
|
|
|
|
|
From 73b2686ba0c70bd0dda369d3c02a073fac0bc1fd Mon Sep 17 00:00:00 2001
|
|
From: samwiseg0 <2241731+samwiseg0@users.noreply.github.com>
|
|
Date: Fri, 14 Jan 2022 23:16:44 -0500
|
|
Subject: [PATCH 10/65] Add Overseerr Support (#210)
|
|
|
|
---
|
|
Varken.py | 16 ++++
|
|
data/varken.example.ini | 16 +++-
|
|
varken/iniparser.py | 36 +++++++-
|
|
varken/overseerr.py | 179 ++++++++++++++++++++++++++++++++++++++++
|
|
varken/structures.py | 68 +++++++++++++++
|
|
5 files changed, 312 insertions(+), 3 deletions(-)
|
|
create mode 100644 varken/overseerr.py
|
|
|
|
diff --git a/Varken.py b/Varken.py
|
|
index 3641cbc..fedb080 100644
|
|
--- a/Varken.py
|
|
+++ b/Varken.py
|
|
@@ -14,6 +14,7 @@ from logging import getLogger, StreamHandler, Formatter, DEBUG
|
|
# Needed to check version of python
|
|
from varken import structures # noqa
|
|
from varken.ombi import OmbiAPI
|
|
+from varken.overseerr import OverseerrAPI
|
|
from varken.unifi import UniFiAPI
|
|
from varken import VERSION, BRANCH, BUILD_DATE
|
|
from varken.sonarr import SonarrAPI
|
|
@@ -156,6 +157,21 @@ if __name__ == "__main__":
|
|
at_time = schedule.every(server.issue_status_run_seconds).seconds
|
|
at_time.do(thread, OMBI.get_issue_counts).tag("ombi-{}-get_issue_counts".format(server.id))
|
|
|
|
+ if CONFIG.overseerr_enabled:
|
|
+ for server in CONFIG.overseerr_servers:
|
|
+ OVERSEER = OverseerrAPI(server, DBMANAGER)
|
|
+ if server.get_request_total_counts:
|
|
+ at_time = schedule.every(server.request_total_run_seconds).seconds
|
|
+ at_time.do(thread, OVERSEER.get_total_requests).tag("overseerr-{}-get_total_requests".format(server.id))
|
|
+ if server.get_request_status_counts:
|
|
+ at_time = schedule.every(server.request_status_run_seconds).seconds
|
|
+ at_time.do(thread, OVERSEER.get_request_status_counts).tag("overseerr-{}-get_request_status_counts"
|
|
+ .format(server.id))
|
|
+ if server.get_latest_requests:
|
|
+ at_time = schedule.every(server.num_latest_requests_seconds).seconds
|
|
+ at_time.do(thread, OVERSEER.get_latest_requests).tag("overseerr-{}-get_latest_requests"
|
|
+ .format(server.id))
|
|
+
|
|
if CONFIG.sickchill_enabled:
|
|
for server in CONFIG.sickchill_servers:
|
|
SICKCHILL = SickChillAPI(server, DBMANAGER)
|
|
diff --git a/data/varken.example.ini b/data/varken.example.ini
|
|
index fa072cf..e5eb650 100644
|
|
--- a/data/varken.example.ini
|
|
+++ b/data/varken.example.ini
|
|
@@ -3,7 +3,8 @@ sonarr_server_ids = 1,2
|
|
radarr_server_ids = 1,2
|
|
lidarr_server_ids = false
|
|
tautulli_server_ids = 1
|
|
-ombi_server_ids = 1
|
|
+ombi_server_ids = false
|
|
+overseerr_server_ids = 1
|
|
sickchill_server_ids = false
|
|
unifi_server_ids = false
|
|
maxmind_license_key = xxxxxxxxxxxxxxxx
|
|
@@ -95,6 +96,19 @@ request_total_run_seconds = 300
|
|
get_issue_status_counts = true
|
|
issue_status_run_seconds = 300
|
|
|
|
+[overseerr-1]
|
|
+url = overseerr.domain.tld
|
|
+apikey = xxxxxxxxxxxxxxxx
|
|
+ssl = false
|
|
+verify_ssl = false
|
|
+get_request_total_counts = true
|
|
+request_total_run_seconds = 300
|
|
+get_request_status_counts = true
|
|
+request_status_run_seconds = 300
|
|
+get_latest_requests = true
|
|
+num_latest_requests_to_fetch = 10
|
|
+num_latest_requests_seconds = 300
|
|
+
|
|
[sickchill-1]
|
|
url = sickchill.domain.tld:8081
|
|
apikey = xxxxxxxxxxxxxxxx
|
|
diff --git a/varken/iniparser.py b/varken/iniparser.py
|
|
index e241f31..4db95f1 100644
|
|
--- a/varken/iniparser.py
|
|
+++ b/varken/iniparser.py
|
|
@@ -9,7 +9,7 @@ from configparser import ConfigParser, NoOptionError, NoSectionError
|
|
from varken.varkenlogger import BlacklistFilter
|
|
from varken.structures import SickChillServer, UniFiServer
|
|
from varken.helpers import clean_sid_check, rfc1918_ip_check, boolcheck
|
|
-from varken.structures import SonarrServer, RadarrServer, OmbiServer, TautulliServer, InfluxServer
|
|
+from varken.structures import SonarrServer, RadarrServer, OmbiServer, OverseerrServer, TautulliServer, InfluxServer
|
|
|
|
|
|
class INIParser(object):
|
|
@@ -17,7 +17,7 @@ class INIParser(object):
|
|
self.config = None
|
|
self.data_folder = data_folder
|
|
self.filtered_strings = None
|
|
- self.services = ['sonarr', 'radarr', 'lidarr', 'ombi', 'tautulli', 'sickchill', 'unifi']
|
|
+ self.services = ['sonarr', 'radarr', 'lidarr', 'ombi', 'overseerr', 'tautulli', 'sickchill', 'unifi']
|
|
|
|
self.logger = getLogger()
|
|
self.influx_server = InfluxServer()
|
|
@@ -293,6 +293,38 @@ class INIParser(object):
|
|
issue_status_counts=issue_status_counts,
|
|
issue_status_run_seconds=issue_status_run_seconds)
|
|
|
|
+ if service == 'overseerr':
|
|
+ get_latest_requests = boolcheck(env.get(
|
|
+ f'VRKN_{envsection}_GET_LATEST_REQUESTS',
|
|
+ self.config.get(section, 'get_latest_requests')))
|
|
+ num_latest_requests_to_fetch = int(env.get(
|
|
+ f'VRKN_{envsection}_NUM_LATEST_REQUESTS',
|
|
+ self.config.getint(section, 'num_latest_requests_to_fetch')))
|
|
+ num_latest_requests_seconds = int(env.get(
|
|
+ f'VRKN_{envsection}_NUM_LATEST_REQUESTS_SECONDS',
|
|
+ self.config.getint(section, 'num_latest_requests_seconds')))
|
|
+ get_request_total_counts = boolcheck(env.get(
|
|
+ f'VRKN_{envsection}_GET_REQUEST_TOTAL_COUNTS',
|
|
+ self.config.get(section, 'get_request_total_counts')))
|
|
+ request_total_run_seconds = int(env.get(
|
|
+ f'VRKN_{envsection}_REQUEST_TOTAL_RUN_SECONDS',
|
|
+ self.config.getint(section, 'request_total_run_seconds')))
|
|
+ get_request_status_counts = boolcheck(env.get(
|
|
+ f'VRKN_{envsection}_GET_REQUEST_STATUS_COUNTS',
|
|
+ self.config.get(section, 'get_request_status_counts')))
|
|
+ request_status_run_seconds = int(env.get(
|
|
+ f'VRKN_{envsection}_REQUEST_STATUS_RUN_SECONDS',
|
|
+ self.config.getint(section, 'request_status_run_seconds')))
|
|
+
|
|
+ server = OverseerrServer(id=server_id, url=scheme + url, api_key=apikey,
|
|
+ verify_ssl=verify_ssl, get_latest_requests=get_latest_requests,
|
|
+ num_latest_requests_to_fetch=num_latest_requests_to_fetch,
|
|
+ num_latest_requests_seconds=num_latest_requests_seconds,
|
|
+ get_request_total_counts=get_request_total_counts,
|
|
+ request_total_run_seconds=request_total_run_seconds,
|
|
+ get_request_status_counts=get_request_status_counts,
|
|
+ request_status_run_seconds=request_status_run_seconds)
|
|
+
|
|
if service == 'sickchill':
|
|
get_missing = boolcheck(env.get(f'VRKN_{envsection}_GET_MISSING',
|
|
self.config.get(section, 'get_missing')))
|
|
diff --git a/varken/overseerr.py b/varken/overseerr.py
|
|
new file mode 100644
|
|
index 0000000..55e8880
|
|
--- /dev/null
|
|
+++ b/varken/overseerr.py
|
|
@@ -0,0 +1,179 @@
|
|
+from logging import getLogger
|
|
+from requests import Session, Request
|
|
+from datetime import datetime, timezone
|
|
+
|
|
+from varken.helpers import connection_handler, hashit
|
|
+from varken.structures import OverseerrRequest, OverseerrRequestCounts
|
|
+
|
|
+
|
|
+class OverseerrAPI(object):
|
|
+ def __init__(self, server, dbmanager):
|
|
+ self.dbmanager = dbmanager
|
|
+ self.server = server
|
|
+ # Create session to reduce server web thread load, and globally define pageSize for all requests
|
|
+ self.session = Session()
|
|
+ self.session.headers = {'X-Api-Key': self.server.api_key}
|
|
+ self.logger = getLogger()
|
|
+
|
|
+ def __repr__(self):
|
|
+ return f"<overseerr-{self.server.id}>"
|
|
+
|
|
+ def get_total_requests(self):
|
|
+ now = datetime.now(timezone.utc).astimezone().isoformat()
|
|
+ endpoint = '/api/v1/request?take=200&filter=all&sort=added'
|
|
+
|
|
+ req = self.session.prepare_request(Request('GET', self.server.url + endpoint))
|
|
+ get_req = connection_handler(self.session, req, self.server.verify_ssl) or []
|
|
+
|
|
+ if not any([get_req]):
|
|
+ self.logger.error('No json replies. Discarding job')
|
|
+ return
|
|
+
|
|
+ tv_requests = []
|
|
+ movie_requests = []
|
|
+
|
|
+ for result in get_req['results']:
|
|
+ if result['type'] == 'tv':
|
|
+ try:
|
|
+ tv_requests.append(OverseerrRequest(**result))
|
|
+ except TypeError as e:
|
|
+ self.logger.error('TypeError has occurred : %s while creating OverseerrRequest structure for show. '
|
|
+ 'data attempted is: %s', e, result)
|
|
+
|
|
+ if result['type'] == 'movie':
|
|
+ try:
|
|
+ movie_requests.append(OverseerrRequest(**result))
|
|
+ except TypeError as e:
|
|
+ self.logger.error('TypeError has occurred : %s while creating OverseerrRequest \
|
|
+ structure for movie. '
|
|
+ 'data attempted is: %s', e, result)
|
|
+
|
|
+ if tv_requests:
|
|
+ tv_request_count = len(tv_requests)
|
|
+
|
|
+ if movie_requests:
|
|
+ movie_request_count = len(movie_requests)
|
|
+
|
|
+ influx_payload = [
|
|
+ {
|
|
+ "measurement": "Overseerr",
|
|
+ "tags": {
|
|
+ "type": "Request_Totals",
|
|
+ "server": self.server.id
|
|
+ },
|
|
+ "time": now,
|
|
+ "fields": {
|
|
+ "total": movie_request_count + tv_request_count,
|
|
+ "movies": movie_request_count,
|
|
+ "tv": tv_request_count
|
|
+ }
|
|
+ }
|
|
+ ]
|
|
+
|
|
+ if influx_payload:
|
|
+ self.dbmanager.write_points(influx_payload)
|
|
+ else:
|
|
+ self.logger.debug("Empty dataset for overseerr module. Discarding...")
|
|
+
|
|
+ def get_request_status_counts(self):
|
|
+ now = datetime.now(timezone.utc).astimezone().isoformat()
|
|
+ endpoint = '/api/v1/request/count'
|
|
+
|
|
+ req = self.session.prepare_request(Request('GET', self.server.url + endpoint))
|
|
+ get_req = connection_handler(self.session, req, self.server.verify_ssl)
|
|
+
|
|
+ if not get_req:
|
|
+ return
|
|
+
|
|
+ requests = OverseerrRequestCounts(**get_req)
|
|
+ influx_payload = [
|
|
+ {
|
|
+ "measurement": "Overseerr",
|
|
+ "tags": {
|
|
+ "type": "Request_Counts"
|
|
+ },
|
|
+ "time": now,
|
|
+ "fields": {
|
|
+ "pending": requests.pending,
|
|
+ "approved": requests.approved,
|
|
+ "processing": requests.processing,
|
|
+ "available": requests.available
|
|
+ }
|
|
+ }
|
|
+ ]
|
|
+
|
|
+ self.dbmanager.write_points(influx_payload)
|
|
+
|
|
+ def get_latest_requests(self):
|
|
+ now = datetime.now(timezone.utc).astimezone().isoformat()
|
|
+ endpoint = '/api/v1/request?take=' + str(self.server.num_latest_requests_to_fetch) + '&filter=all&sort=added'
|
|
+ movie_endpoint = '/api/v1/movie/'
|
|
+ tv_endpoint = '/api/v1/tv/'
|
|
+
|
|
+ # GET THE LATEST n REQUESTS
|
|
+ req = self.session.prepare_request(Request('GET', self.server.url + endpoint))
|
|
+ get_latest_req = connection_handler(self.session, req, self.server.verify_ssl)
|
|
+
|
|
+ # RETURN NOTHING IF NO RESULTS
|
|
+ if not get_latest_req:
|
|
+ return
|
|
+
|
|
+ influx_payload = []
|
|
+
|
|
+ # Request Type: Movie = 1, TV Show = 0
|
|
+ for result in get_latest_req['results']:
|
|
+ if result['type'] == 'tv':
|
|
+ req = self.session.prepare_request(Request('GET',
|
|
+ self.server.url +
|
|
+ tv_endpoint +
|
|
+ str(result['media']['tmdbId'])))
|
|
+ get_tv_req = connection_handler(self.session, req, self.server.verify_ssl)
|
|
+ hash_id = hashit(f'{get_tv_req["id"]}{get_tv_req["name"]}')
|
|
+
|
|
+ influx_payload.append(
|
|
+ {
|
|
+ "measurement": "Overseerr",
|
|
+ "tags": {
|
|
+ "type": "Requests",
|
|
+ "server": self.server.id,
|
|
+ "request_type": 0,
|
|
+ "status": get_tv_req['mediaInfo']['status'],
|
|
+ "title": get_tv_req['name'],
|
|
+ "requested_user": get_tv_req['mediaInfo']['requests'][0]['requestedBy']['plexUsername'],
|
|
+ "requested_date": get_tv_req['mediaInfo']['requests'][0]['requestedBy']['createdAt']
|
|
+ },
|
|
+ "time": now,
|
|
+ "fields": {
|
|
+ "hash": hash_id
|
|
+ }
|
|
+ }
|
|
+ )
|
|
+
|
|
+ if result['type'] == 'movie':
|
|
+ req = self.session.prepare_request(Request('GET',
|
|
+ self.server.url +
|
|
+ movie_endpoint +
|
|
+ str(result['media']['tmdbId'])))
|
|
+ get_movie_req = connection_handler(self.session, req, self.server.verify_ssl)
|
|
+ hash_id = hashit(f'{get_movie_req["id"]}{get_movie_req["title"]}')
|
|
+
|
|
+ influx_payload.append(
|
|
+ {
|
|
+ "measurement": "Overseerr",
|
|
+ "tags": {
|
|
+ "type": "Requests",
|
|
+ "server": self.server.id,
|
|
+ "request_type": 1,
|
|
+ "status": get_movie_req['mediaInfo']['status'],
|
|
+ "title": get_movie_req['title'],
|
|
+ "requested_user": get_movie_req['mediaInfo']['requests'][0]['requestedBy']['plexUsername'],
|
|
+ "requested_date": get_movie_req['mediaInfo']['requests'][0]['requestedBy']['createdAt']
|
|
+ },
|
|
+ "time": now,
|
|
+ "fields": {
|
|
+ "hash": hash_id
|
|
+ }
|
|
+ }
|
|
+ )
|
|
+
|
|
+ self.dbmanager.write_points(influx_payload)
|
|
diff --git a/varken/structures.py b/varken/structures.py
|
|
index f799ab4..950c3d3 100644
|
|
--- a/varken/structures.py
|
|
+++ b/varken/structures.py
|
|
@@ -57,6 +57,20 @@ class OmbiServer(NamedTuple):
|
|
verify_ssl: bool = False
|
|
|
|
|
|
+class OverseerrServer(NamedTuple):
|
|
+ api_key: str = None
|
|
+ id: int = None
|
|
+ url: str = None
|
|
+ verify_ssl: bool = False
|
|
+ get_request_total_counts: bool = False
|
|
+ request_total_run_seconds: int = 30
|
|
+ get_request_status_counts: bool = False
|
|
+ request_status_run_seconds: int = 30
|
|
+ get_latest_requests: bool = False
|
|
+ num_latest_requests_to_fetch: int = 10
|
|
+ num_latest_requests_seconds: int = 30
|
|
+
|
|
+
|
|
class TautulliServer(NamedTuple):
|
|
api_key: str = None
|
|
fallback_ip: str = None
|
|
@@ -173,6 +187,60 @@ class OmbiMovieRequest(NamedTuple):
|
|
requestStatus: str = None
|
|
|
|
|
|
+# Overseerr
|
|
+class OverseerrRequest(NamedTuple):
|
|
+ id: int = None
|
|
+ status: int = None
|
|
+ createdAt: str = None
|
|
+ updatedAt: str = None
|
|
+ type: str = None
|
|
+ is4k: bool = None
|
|
+ serverId: int = None
|
|
+ profileId: int = None
|
|
+ rootFolder: str = None
|
|
+ languageProfileId: int = None
|
|
+ tags: list = None
|
|
+ media: dict = None
|
|
+ seasons: list = None
|
|
+ modifiedBy: dict = None
|
|
+ requestedBy: dict = None
|
|
+ seasonCount: int = None
|
|
+
|
|
+
|
|
+class OverseerrRequestCounts(NamedTuple):
|
|
+ pending: int = None
|
|
+ approved: int = None
|
|
+ processing: int = None
|
|
+ available: int = None
|
|
+
|
|
+
|
|
+# Overseerr
|
|
+class OverseerrRequest(NamedTuple):
|
|
+ id: int = None
|
|
+ status: int = None
|
|
+ createdAt: str = None
|
|
+ updatedAt: str = None
|
|
+ type: str = None
|
|
+ is4k: bool = None
|
|
+ serverId: int = None
|
|
+ profileId: int = None
|
|
+ rootFolder: str = None
|
|
+ languageProfileId: int = None
|
|
+ tags: list = None
|
|
+ media: dict = None
|
|
+ seasons: list = None
|
|
+ modifiedBy: dict = None
|
|
+ requestedBy: dict = None
|
|
+ seasonCount: int = None
|
|
+
|
|
+
|
|
+class OverseerrRequestCounts(NamedTuple):
|
|
+ pending: int = None
|
|
+ approved: int = None
|
|
+ processing: int = None
|
|
+ available: int = None
|
|
+
|
|
+
|
|
# Sonarr
|
|
class SonarrTVShow(NamedTuple):
|
|
added: str = None
|
|
--
|
|
GitLab
|
|
|
|
|
|
From d4c8037f56d83ed2b789b4efd425ab6cb7954bc3 Mon Sep 17 00:00:00 2001
|
|
From: samwiseg0 <2241731+samwiseg0@users.noreply.github.com>
|
|
Date: Fri, 14 Jan 2022 23:20:33 -0500
|
|
Subject: [PATCH 11/65] Remove duplicate structures
|
|
|
|
---
|
|
varken/structures.py | 27 ---------------------------
|
|
1 file changed, 27 deletions(-)
|
|
|
|
diff --git a/varken/structures.py b/varken/structures.py
|
|
index 950c3d3..f2f28f2 100644
|
|
--- a/varken/structures.py
|
|
+++ b/varken/structures.py
|
|
@@ -214,33 +214,6 @@ class OverseerrRequestCounts(NamedTuple):
|
|
available: int = None
|
|
|
|
|
|
-# Overseerr
|
|
-class OverseerrRequest(NamedTuple):
|
|
- id: int = None
|
|
- status: int = None
|
|
- createdAt: str = None
|
|
- updatedAt: str = None
|
|
- type: str = None
|
|
- is4k: bool = None
|
|
- serverId: int = None
|
|
- profileId: int = None
|
|
- rootFolder: str = None
|
|
- languageProfileId: int = None
|
|
- tags: list = None
|
|
- media: dict = None
|
|
- seasons: list = None
|
|
- modifiedBy: dict = None
|
|
- requestedBy: dict = None
|
|
- seasonCount: int = None
|
|
-
|
|
-
|
|
-class OverseerrRequestCounts(NamedTuple):
|
|
- pending: int = None
|
|
- approved: int = None
|
|
- processing: int = None
|
|
- available: int = None
|
|
-
|
|
-
|
|
# Sonarr
|
|
class SonarrTVShow(NamedTuple):
|
|
added: str = None
|
|
--
|
|
GitLab
|
|
|
|
|
|
From 5a764596540b53a8417fbadf7113279d6f9e5082 Mon Sep 17 00:00:00 2001
|
|
From: samwiseg0 <2241731+samwiseg0@users.noreply.github.com>
|
|
Date: Sat, 15 Jan 2022 12:11:47 -0500
|
|
Subject: [PATCH 12/65] update changelog to reflect v1.7.7 changes
|
|
|
|
---
|
|
CHANGELOG.md | 24 ++++++++++++++++++++++++
|
|
1 file changed, 24 insertions(+)
|
|
|
|
diff --git a/CHANGELOG.md b/CHANGELOG.md
|
|
index e5f992c..8d0dd88 100644
|
|
--- a/CHANGELOG.md
|
|
+++ b/CHANGELOG.md
|
|
@@ -1,5 +1,29 @@
|
|
# Change Log
|
|
|
|
+## [v1.7.7](https://github.com/Boerderij/Varken/tree/v1.7.7) (2020-12-21)
|
|
+[Full Changelog](https://github.com/Boerderij/Varken/compare/1.7.6...v1.7.7)
|
|
+
|
|
+**Implemented enhancements:**
|
|
+- \[Enhancement\] Ombi 4.0 compatibility [\#186](https://github.com/Boerderij/Varken/issues/186)
|
|
+ ([samwiseg0](https://github.com/samwiseg0))
|
|
+
|
|
+**Merged pull requests:**
|
|
+
|
|
+- v1.7.7 Merge [\#191](https://github.com/Boerderij/Varken/pull/191)
|
|
+ ([DirtyCajunRice](https://github.com/DirtyCajunRice))
|
|
+- Type Error fix [\#177](https://github.com/Boerderij/Varken/pull/177)
|
|
+ ([derek-miller](https://github.com/derek-miller))
|
|
+
|
|
+**Fixed bugs:**
|
|
+
|
|
+- \[BUG\] Influxdb exit code [\#174](https://github.com/Boerderij/Varken/issues/174)
|
|
+ ([samwiseg0](https://github.com/samwiseg0))
|
|
+
|
|
+**Notes:**
|
|
+- Now built via github actions
|
|
+- Available on ghcr, quay.io, and dockerhub
|
|
+- Nightly builds done to accommodate dependabot MRs
|
|
+
|
|
## [v1.7.6](https://github.com/Boerderij/Varken/tree/v1.7.6) (2020-01-01)
|
|
[Full Changelog](https://github.com/Boerderij/Varken/compare/1.7.5...v1.7.6)
|
|
|
|
--
|
|
GitLab
|
|
|
|
|
|
From 436a6823605200e870f0fdf8e4c1ec9e90fca6d6 Mon Sep 17 00:00:00 2001
|
|
From: samwiseg0 <2241731+samwiseg0@users.noreply.github.com>
|
|
Date: Sat, 15 Jan 2022 12:23:29 -0500
|
|
Subject: [PATCH 13/65] Add IP data to tautulli #202
|
|
|
|
---
|
|
varken/tautulli.py | 1 +
|
|
1 file changed, 1 insertion(+)
|
|
|
|
diff --git a/varken/tautulli.py b/varken/tautulli.py
|
|
index a8d677b..0d0e04d 100644
|
|
--- a/varken/tautulli.py
|
|
+++ b/varken/tautulli.py
|
|
@@ -327,6 +327,7 @@ class TautulliAPI(object):
|
|
"tags": {
|
|
"type": "Session",
|
|
"session_id": session.session_id,
|
|
+ "ip_address": session.ip_address,
|
|
"friendly_name": session.friendly_name,
|
|
"username": session.user,
|
|
"title": session.full_title,
|
|
--
|
|
GitLab
|
|
|
|
|
|
From 9508c3c3f74e7a6a6c6f9e8803366dc8b68ed4e0 Mon Sep 17 00:00:00 2001
|
|
From: samwiseg0 <2241731+samwiseg0@users.noreply.github.com>
|
|
Date: Sat, 15 Jan 2022 12:43:32 -0500
|
|
Subject: [PATCH 14/65] add missing ip address in tautulli
|
|
|
|
---
|
|
varken/tautulli.py | 1 +
|
|
1 file changed, 1 insertion(+)
|
|
|
|
diff --git a/varken/tautulli.py b/varken/tautulli.py
|
|
index 0d0e04d..746685f 100644
|
|
--- a/varken/tautulli.py
|
|
+++ b/varken/tautulli.py
|
|
@@ -129,6 +129,7 @@ class TautulliAPI(object):
|
|
"tags": {
|
|
"type": "Session",
|
|
"session_id": session.session_id,
|
|
+ "ip_address": session.ip_address,
|
|
"friendly_name": session.friendly_name,
|
|
"username": session.username,
|
|
"title": session.full_title,
|
|
--
|
|
GitLab
|
|
|
|
|
|
From 870c6cdee25d3932e7a94bd787ff81c8a8c69620 Mon Sep 17 00:00:00 2001
|
|
From: Robin Dadswell <19610103+RobinDadswell@users.noreply.github.com>
|
|
Date: Mon, 17 Jan 2022 01:40:12 +0000
|
|
Subject: [PATCH 15/65] Fixed: Streamlined API calls to Radarr and Sonarr
|
|
(#221)
|
|
|
|
---
|
|
varken/radarr.py | 15 ++-------------
|
|
varken/sonarr.py | 25 +++++++------------------
|
|
varken/structures.py | 4 ++++
|
|
3 files changed, 13 insertions(+), 31 deletions(-)
|
|
|
|
diff --git a/varken/radarr.py b/varken/radarr.py
|
|
index f654198..3a153ff 100644
|
|
--- a/varken/radarr.py
|
|
+++ b/varken/radarr.py
|
|
@@ -18,17 +18,6 @@ class RadarrAPI(object):
|
|
def __repr__(self):
|
|
return f"<radarr-{self.server.id}>"
|
|
|
|
- def get_movie(self, id):
|
|
- endpoint = '/api/v3/movie/'
|
|
-
|
|
- req = self.session.prepare_request(Request('GET', self.server.url + endpoint + str(id)))
|
|
- get = connection_handler(self.session, req, self.server.verify_ssl)
|
|
-
|
|
- if not get:
|
|
- return
|
|
-
|
|
- return RadarrMovie(**get)
|
|
-
|
|
def get_missing(self):
|
|
endpoint = '/api/v3/movie'
|
|
now = datetime.now(timezone.utc).astimezone().isoformat()
|
|
@@ -84,7 +73,7 @@ class RadarrAPI(object):
|
|
now = datetime.now(timezone.utc).astimezone().isoformat()
|
|
influx_payload = []
|
|
pageSize = 250
|
|
- params = {'pageSize': pageSize}
|
|
+ params = {'pageSize': pageSize, 'includeMovie': True}
|
|
queueResponse = []
|
|
queue = []
|
|
|
|
@@ -118,7 +107,7 @@ class RadarrAPI(object):
|
|
return
|
|
|
|
for queue_item in download_queue:
|
|
- movie = self.get_movie(queue_item.movieId)
|
|
+ movie = queue_item.movie
|
|
|
|
name = f'{movie.title} ({movie.year})'
|
|
|
|
diff --git a/varken/sonarr.py b/varken/sonarr.py
|
|
index 0bb8684..f89b1df 100644
|
|
--- a/varken/sonarr.py
|
|
+++ b/varken/sonarr.py
|
|
@@ -2,7 +2,7 @@ from logging import getLogger
|
|
from requests import Session, Request
|
|
from datetime import datetime, timezone, date, timedelta
|
|
|
|
-from varken.structures import SonarrEpisode, SonarrQueue, QueuePages, SonarrTVShow
|
|
+from varken.structures import SonarrEpisode, SonarrQueue, QueuePages
|
|
from varken.helpers import hashit, connection_handler
|
|
|
|
|
|
@@ -19,17 +19,6 @@ class SonarrAPI(object):
|
|
def __repr__(self):
|
|
return f"<sonarr-{self.server.id}>"
|
|
|
|
- def get_series(self, id):
|
|
- endpoint = '/api/v3/series/'
|
|
-
|
|
- req = self.session.prepare_request(Request('GET', self.server.url + endpoint + str(id)))
|
|
- get = connection_handler(self.session, req, self.server.verify_ssl)
|
|
-
|
|
- if not get:
|
|
- return
|
|
-
|
|
- return SonarrTVShow(**get)
|
|
-
|
|
def get_episode(self, id):
|
|
endpoint = '/api/v3/episode'
|
|
params = {'episodeIds': id}
|
|
@@ -49,9 +38,9 @@ class SonarrAPI(object):
|
|
future = str(date.today() + timedelta(days=self.server.future_days))
|
|
now = datetime.now(timezone.utc).astimezone().isoformat()
|
|
if query == "Missing":
|
|
- params = {'start': last_days, 'end': today}
|
|
+ params = {'start': last_days, 'end': today, 'includeSeries': True}
|
|
else:
|
|
- params = {'start': today, 'end': future}
|
|
+ params = {'start': today, 'end': future, 'includeSeries': True}
|
|
influx_payload = []
|
|
air_days = []
|
|
missing = []
|
|
@@ -71,7 +60,7 @@ class SonarrAPI(object):
|
|
'attempted is: %s', e, show)
|
|
|
|
for episode in tv_shows:
|
|
- tvShow = self.get_series(episode.seriesId)
|
|
+ tvShow = episode.series
|
|
sxe = f'S{episode.seasonNumber:0>2}E{episode.episodeNumber:0>2}'
|
|
if episode.hasFile:
|
|
downloaded = 1
|
|
@@ -126,7 +115,7 @@ class SonarrAPI(object):
|
|
|
|
while response.totalRecords > response.page * response.pageSize:
|
|
page = response.page + 1
|
|
- params = {'pageSize': pageSize, 'page': page}
|
|
+ params = {'pageSize': pageSize, 'page': page, 'includeSeries': True, 'includeEpisode': True}
|
|
req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params))
|
|
get = connection_handler(self.session, req, self.server.verify_ssl)
|
|
if not get:
|
|
@@ -146,8 +135,8 @@ class SonarrAPI(object):
|
|
return
|
|
|
|
for queueItem in download_queue:
|
|
- tvShow = self.get_series(queueItem.seriesId)
|
|
- episode = self.get_episode(queueItem.episodeId)
|
|
+ tvShow = queueItem.series
|
|
+ episode = queueItem.episode
|
|
try:
|
|
sxe = f"S{episode.seasonNumber:0>2}E{episode.episodeNumber:0>2}"
|
|
except TypeError as e:
|
|
diff --git a/varken/structures.py b/varken/structures.py
|
|
index f2f28f2..995ef92 100644
|
|
--- a/varken/structures.py
|
|
+++ b/varken/structures.py
|
|
@@ -272,6 +272,7 @@ class SonarrEpisode(NamedTuple):
|
|
sceneAbsoluteEpisodeNumber: int = None
|
|
sceneEpisodeNumber: int = None
|
|
sceneSeasonNumber: int = None
|
|
+ series: SonarrTVShow = None
|
|
|
|
|
|
class SonarrQueue(NamedTuple):
|
|
@@ -293,6 +294,8 @@ class SonarrQueue(NamedTuple):
|
|
seriesId: int = None
|
|
errorMessage: str = None
|
|
outputPath: str = None
|
|
+ series: SonarrTVShow = None
|
|
+ episode: SonarrEpisode = None
|
|
|
|
|
|
# Radarr
|
|
@@ -358,6 +361,7 @@ class RadarrQueue(NamedTuple):
|
|
estimatedCompletionTime: str = None
|
|
errorMessage: str = None
|
|
outputPath: str = None
|
|
+ movie: RadarrMovie = None
|
|
|
|
|
|
# Sickchill
|
|
--
|
|
GitLab
|
|
|
|
|
|
From 463f37e2868e6a8f55233c43e0d1986c0c6fcab2 Mon Sep 17 00:00:00 2001
|
|
From: Robin Dadswell <19610103+RobinDadswell@users.noreply.github.com>
|
|
Date: Mon, 17 Jan 2022 16:10:18 +0000
|
|
Subject: [PATCH 16/65] Fixed: Sonarr Data pull issues (#222)
|
|
|
|
---
|
|
varken/sonarr.py | 5 +++--
|
|
1 file changed, 3 insertions(+), 2 deletions(-)
|
|
|
|
diff --git a/varken/sonarr.py b/varken/sonarr.py
|
|
index f89b1df..8439142 100644
|
|
--- a/varken/sonarr.py
|
|
+++ b/varken/sonarr.py
|
|
@@ -68,7 +68,8 @@ class SonarrAPI(object):
|
|
downloaded = 0
|
|
if query == "Missing":
|
|
if episode.monitored and not downloaded:
|
|
- missing.append((tvShow.title, downloaded, sxe, episode.title, episode.airDateUtc, episode.seriesId))
|
|
+ missing.append((tvShow['title'], downloaded, sxe, episode.title,
|
|
+ episode.airDateUtc, episode.seriesId))
|
|
else:
|
|
air_days.append((tvShow.title, downloaded, sxe, episode.title, episode.airDateUtc, episode.seriesId))
|
|
|
|
@@ -149,7 +150,7 @@ class SonarrAPI(object):
|
|
else:
|
|
protocol_id = 0
|
|
|
|
- queue.append((tvShow.title, episode.title, queueItem.protocol.upper(),
|
|
+ queue.append((tvShow['title'], episode.title, queueItem.protocol.upper(),
|
|
protocol_id, sxe, queueItem.seriesId, queueItem.quality['quality']['name']))
|
|
|
|
for series_title, episode_title, protocol, protocol_id, sxe, sonarr_id, quality in queue:
|
|
--
|
|
GitLab
|
|
|
|
|
|
From 7a4cf59e7c19480e7629cc0ec61efe9f78df5e1e Mon Sep 17 00:00:00 2001
|
|
From: samwiseg0 <2241731+samwiseg0@users.noreply.github.com>
|
|
Date: Mon, 17 Jan 2022 11:22:55 -0500
|
|
Subject: [PATCH 17/65] Fix Sonarrr calendar
|
|
|
|
---
|
|
varken/sonarr.py | 2 +-
|
|
1 file changed, 1 insertion(+), 1 deletion(-)
|
|
|
|
diff --git a/varken/sonarr.py b/varken/sonarr.py
|
|
index 8439142..41b4dbe 100644
|
|
--- a/varken/sonarr.py
|
|
+++ b/varken/sonarr.py
|
|
@@ -71,7 +71,7 @@ class SonarrAPI(object):
|
|
missing.append((tvShow['title'], downloaded, sxe, episode.title,
|
|
episode.airDateUtc, episode.seriesId))
|
|
else:
|
|
- air_days.append((tvShow.title, downloaded, sxe, episode.title, episode.airDateUtc, episode.seriesId))
|
|
+ air_days.append((tvShow['title'], downloaded, sxe, episode.title, episode.airDateUtc, episode.seriesId))
|
|
|
|
for series_title, dl_status, sxe, episode_title, air_date_utc, sonarr_id in (air_days or missing):
|
|
hash_id = hashit(f'{self.server.id}{series_title}{sxe}')
|
|
--
|
|
GitLab
|
|
|
|
|
|
From 5e8c8eb5ce87080bb94273c61a1c8c40dd74574a Mon Sep 17 00:00:00 2001
|
|
From: Stewart Thomson <stewartthomson3@gmail.com>
|
|
Date: Mon, 17 Jan 2022 22:10:54 -0500
|
|
Subject: [PATCH 18/65] Update lidarr structure (#225)
|
|
|
|
Added missing arguments to Lidarr structure
|
|
|
|
Fixes #223
|
|
---
|
|
varken/structures.py | 2 ++
|
|
1 file changed, 2 insertions(+)
|
|
|
|
diff --git a/varken/structures.py b/varken/structures.py
|
|
index 995ef92..90c8b13 100644
|
|
--- a/varken/structures.py
|
|
+++ b/varken/structures.py
|
|
@@ -608,7 +608,9 @@ class LidarrQueue(NamedTuple):
|
|
sizeleft: float = None
|
|
status: str = None
|
|
trackedDownloadStatus: str = None
|
|
+ trackedDownloadState: str = None
|
|
statusMessages: list = None
|
|
+ errorMessage: str = None
|
|
downloadId: str = None
|
|
protocol: str = None
|
|
downloadClient: str = None
|
|
--
|
|
GitLab
|
|
|
|
|
|
From 08c49698a7e3b82d9f7a60727b6f6340f128cac5 Mon Sep 17 00:00:00 2001
|
|
From: samwiseg0 <2241731+samwiseg0@users.noreply.github.com>
|
|
Date: Wed, 19 Jan 2022 13:28:12 -0500
|
|
Subject: [PATCH 19/65] Clean up request totals. Upstream change
|
|
sct/overseerr#2426
|
|
|
|
---
|
|
Varken.py | 3 --
|
|
data/varken.example.ini | 2 --
|
|
varken/iniparser.py | 8 -----
|
|
varken/overseerr.py | 68 +++++------------------------------------
|
|
varken/structures.py | 4 +++
|
|
5 files changed, 12 insertions(+), 73 deletions(-)
|
|
|
|
diff --git a/Varken.py b/Varken.py
|
|
index fedb080..6c17fca 100644
|
|
--- a/Varken.py
|
|
+++ b/Varken.py
|
|
@@ -160,9 +160,6 @@ if __name__ == "__main__":
|
|
if CONFIG.overseerr_enabled:
|
|
for server in CONFIG.overseerr_servers:
|
|
OVERSEER = OverseerrAPI(server, DBMANAGER)
|
|
- if server.get_request_total_counts:
|
|
- at_time = schedule.every(server.request_total_run_seconds).seconds
|
|
- at_time.do(thread, OVERSEER.get_total_requests).tag("overseerr-{}-get_total_requests".format(server.id))
|
|
if server.get_request_status_counts:
|
|
at_time = schedule.every(server.request_status_run_seconds).seconds
|
|
at_time.do(thread, OVERSEER.get_request_status_counts).tag("overseerr-{}-get_request_status_counts"
|
|
diff --git a/data/varken.example.ini b/data/varken.example.ini
|
|
index e5eb650..a4c7c9b 100644
|
|
--- a/data/varken.example.ini
|
|
+++ b/data/varken.example.ini
|
|
@@ -101,8 +101,6 @@ url = overseerr.domain.tld
|
|
apikey = xxxxxxxxxxxxxxxx
|
|
ssl = false
|
|
verify_ssl = false
|
|
-get_request_total_counts = true
|
|
-request_total_run_seconds = 300
|
|
get_request_status_counts = true
|
|
request_status_run_seconds = 300
|
|
get_latest_requests = true
|
|
diff --git a/varken/iniparser.py b/varken/iniparser.py
|
|
index 4db95f1..a63e777 100644
|
|
--- a/varken/iniparser.py
|
|
+++ b/varken/iniparser.py
|
|
@@ -297,12 +297,6 @@ class INIParser(object):
|
|
get_latest_requests = boolcheck(env.get(
|
|
f'VRKN_{envsection}_GET_LATEST_REQUESTS',
|
|
self.config.get(section, 'get_latest_requests')))
|
|
- num_latest_requests_to_fetch = int(env.get(
|
|
- f'VRKN_{envsection}_NUM_LATEST_REQUESTS',
|
|
- self.config.getint(section, 'num_latest_requests_to_fetch')))
|
|
- num_latest_requests_seconds = int(env.get(
|
|
- f'VRKN_{envsection}_NUM_LATEST_REQUESTS_SECONDS',
|
|
- self.config.getint(section, 'num_latest_requests_seconds')))
|
|
get_request_total_counts = boolcheck(env.get(
|
|
f'VRKN_{envsection}_GET_REQUEST_TOTAL_COUNTS',
|
|
self.config.get(section, 'get_request_total_counts')))
|
|
@@ -318,8 +312,6 @@ class INIParser(object):
|
|
|
|
server = OverseerrServer(id=server_id, url=scheme + url, api_key=apikey,
|
|
verify_ssl=verify_ssl, get_latest_requests=get_latest_requests,
|
|
- num_latest_requests_to_fetch=num_latest_requests_to_fetch,
|
|
- num_latest_requests_seconds=num_latest_requests_seconds,
|
|
get_request_total_counts=get_request_total_counts,
|
|
request_total_run_seconds=request_total_run_seconds,
|
|
get_request_status_counts=get_request_status_counts,
|
|
diff --git a/varken/overseerr.py b/varken/overseerr.py
|
|
index 55e8880..d1a74b7 100644
|
|
--- a/varken/overseerr.py
|
|
+++ b/varken/overseerr.py
|
|
@@ -18,63 +18,6 @@ class OverseerrAPI(object):
|
|
def __repr__(self):
|
|
return f"<overseerr-{self.server.id}>"
|
|
|
|
- def get_total_requests(self):
|
|
- now = datetime.now(timezone.utc).astimezone().isoformat()
|
|
- endpoint = '/api/v1/request?take=200&filter=all&sort=added'
|
|
-
|
|
- req = self.session.prepare_request(Request('GET', self.server.url + endpoint))
|
|
- get_req = connection_handler(self.session, req, self.server.verify_ssl) or []
|
|
-
|
|
- if not any([get_req]):
|
|
- self.logger.error('No json replies. Discarding job')
|
|
- return
|
|
-
|
|
- tv_requests = []
|
|
- movie_requests = []
|
|
-
|
|
- for result in get_req['results']:
|
|
- if result['type'] == 'tv':
|
|
- try:
|
|
- tv_requests.append(OverseerrRequest(**result))
|
|
- except TypeError as e:
|
|
- self.logger.error('TypeError has occurred : %s while creating OverseerrRequest structure for show. '
|
|
- 'data attempted is: %s', e, result)
|
|
-
|
|
- if result['type'] == 'movie':
|
|
- try:
|
|
- movie_requests.append(OverseerrRequest(**result))
|
|
- except TypeError as e:
|
|
- self.logger.error('TypeError has occurred : %s while creating OverseerrRequest \
|
|
- structure for movie. '
|
|
- 'data attempted is: %s', e, result)
|
|
-
|
|
- if tv_requests:
|
|
- tv_request_count = len(tv_requests)
|
|
-
|
|
- if movie_requests:
|
|
- movie_request_count = len(movie_requests)
|
|
-
|
|
- influx_payload = [
|
|
- {
|
|
- "measurement": "Overseerr",
|
|
- "tags": {
|
|
- "type": "Request_Totals",
|
|
- "server": self.server.id
|
|
- },
|
|
- "time": now,
|
|
- "fields": {
|
|
- "total": movie_request_count + tv_request_count,
|
|
- "movies": movie_request_count,
|
|
- "tv": tv_request_count
|
|
- }
|
|
- }
|
|
- ]
|
|
-
|
|
- if influx_payload:
|
|
- self.dbmanager.write_points(influx_payload)
|
|
- else:
|
|
- self.logger.debug("Empty dataset for overseerr module. Discarding...")
|
|
-
|
|
def get_request_status_counts(self):
|
|
now = datetime.now(timezone.utc).astimezone().isoformat()
|
|
endpoint = '/api/v1/request/count'
|
|
@@ -97,7 +40,12 @@ class OverseerrAPI(object):
|
|
"pending": requests.pending,
|
|
"approved": requests.approved,
|
|
"processing": requests.processing,
|
|
- "available": requests.available
|
|
+ "available": requests.available,
|
|
+ "total": requests.total,
|
|
+ "movies": requests.movie,
|
|
+ "tv": requests.tv,
|
|
+ "declined": requests.declined
|
|
+
|
|
}
|
|
}
|
|
]
|
|
@@ -139,8 +87,8 @@ class OverseerrAPI(object):
|
|
"request_type": 0,
|
|
"status": get_tv_req['mediaInfo']['status'],
|
|
"title": get_tv_req['name'],
|
|
- "requested_user": get_tv_req['mediaInfo']['requests'][0]['requestedBy']['plexUsername'],
|
|
- "requested_date": get_tv_req['mediaInfo']['requests'][0]['requestedBy']['createdAt']
|
|
+ "requested_date": get_tv_req['mediaInfo']['requests'][0]['media']['createdAt']
|
|
+ "requested_date": get_movie_req['mediaInfo']['requests'][0]['media']['createdAt']
|
|
},
|
|
"time": now,
|
|
"fields": {
|
|
diff --git a/varken/structures.py b/varken/structures.py
|
|
index 90c8b13..2d1ecb4 100644
|
|
--- a/varken/structures.py
|
|
+++ b/varken/structures.py
|
|
@@ -212,6 +212,10 @@ class OverseerrRequestCounts(NamedTuple):
|
|
approved: int = None
|
|
processing: int = None
|
|
available: int = None
|
|
+ total: int = None
|
|
+ movie: int = None
|
|
+ tv: int = None
|
|
+ declined: int = None
|
|
|
|
|
|
# Sonarr
|
|
--
|
|
GitLab
|
|
|
|
|
|
From 756b89bc0389749716d29a32a16c5f81e303d63c Mon Sep 17 00:00:00 2001
|
|
From: samwiseg0 <2241731+samwiseg0@users.noreply.github.com>
|
|
Date: Wed, 19 Jan 2022 13:29:12 -0500
|
|
Subject: [PATCH 20/65] Cleanup blank space
|
|
|
|
---
|
|
varken/overseerr.py | 1 -
|
|
1 file changed, 1 deletion(-)
|
|
|
|
diff --git a/varken/overseerr.py b/varken/overseerr.py
|
|
index d1a74b7..4ffd9e1 100644
|
|
--- a/varken/overseerr.py
|
|
+++ b/varken/overseerr.py
|
|
@@ -45,7 +45,6 @@ class OverseerrAPI(object):
|
|
"movies": requests.movie,
|
|
"tv": requests.tv,
|
|
"declined": requests.declined
|
|
-
|
|
}
|
|
}
|
|
]
|
|
--
|
|
GitLab
|
|
|
|
|
|
From 908cfb15a00fc852b482af20e53f9e671d984459 Mon Sep 17 00:00:00 2001
|
|
From: samwiseg0 <2241731+samwiseg0@users.noreply.github.com>
|
|
Date: Wed, 19 Jan 2022 13:31:22 -0500
|
|
Subject: [PATCH 21/65] Fix requested_date syntax.
|
|
|
|
---
|
|
varken/overseerr.py | 2 +-
|
|
1 file changed, 1 insertion(+), 1 deletion(-)
|
|
|
|
diff --git a/varken/overseerr.py b/varken/overseerr.py
|
|
index 4ffd9e1..cbbdcef 100644
|
|
--- a/varken/overseerr.py
|
|
+++ b/varken/overseerr.py
|
|
@@ -86,7 +86,7 @@ class OverseerrAPI(object):
|
|
"request_type": 0,
|
|
"status": get_tv_req['mediaInfo']['status'],
|
|
"title": get_tv_req['name'],
|
|
- "requested_date": get_tv_req['mediaInfo']['requests'][0]['media']['createdAt']
|
|
+ "requested_date": get_tv_req['mediaInfo']['requests'][0]['media']['createdAt'],
|
|
"requested_date": get_movie_req['mediaInfo']['requests'][0]['media']['createdAt']
|
|
},
|
|
"time": now,
|
|
--
|
|
GitLab
|
|
|
|
|
|
From e4b9926f68a38923858cb471a9d9299f9163220f Mon Sep 17 00:00:00 2001
|
|
From: samwiseg0 <2241731+samwiseg0@users.noreply.github.com>
|
|
Date: Wed, 19 Jan 2022 13:36:07 -0500
|
|
Subject: [PATCH 22/65] Fix requested_date for Overseerr tv and movie
|
|
|
|
---
|
|
varken/overseerr.py | 6 +++---
|
|
1 file changed, 3 insertions(+), 3 deletions(-)
|
|
|
|
diff --git a/varken/overseerr.py b/varken/overseerr.py
|
|
index cbbdcef..a0da164 100644
|
|
--- a/varken/overseerr.py
|
|
+++ b/varken/overseerr.py
|
|
@@ -86,8 +86,8 @@ class OverseerrAPI(object):
|
|
"request_type": 0,
|
|
"status": get_tv_req['mediaInfo']['status'],
|
|
"title": get_tv_req['name'],
|
|
- "requested_date": get_tv_req['mediaInfo']['requests'][0]['media']['createdAt'],
|
|
- "requested_date": get_movie_req['mediaInfo']['requests'][0]['media']['createdAt']
|
|
+ "requested_user": get_tv_req['mediaInfo']['requests'][0]['requestedBy']['plexUsername'],
|
|
+ "requested_date": get_tv_req['mediaInfo']['requests'][0]['media']['createdAt']
|
|
},
|
|
"time": now,
|
|
"fields": {
|
|
@@ -114,7 +114,7 @@ class OverseerrAPI(object):
|
|
"status": get_movie_req['mediaInfo']['status'],
|
|
"title": get_movie_req['title'],
|
|
"requested_user": get_movie_req['mediaInfo']['requests'][0]['requestedBy']['plexUsername'],
|
|
- "requested_date": get_movie_req['mediaInfo']['requests'][0]['requestedBy']['createdAt']
|
|
+ "requested_date": get_movie_req['mediaInfo']['requests'][0]['media']['createdAt']
|
|
},
|
|
"time": now,
|
|
"fields": {
|
|
--
|
|
GitLab
|
|
|
|
|
|
From d1b47e0bd927421c8a9bcbb8e2f55c28c93308bd Mon Sep 17 00:00:00 2001
|
|
From: samwiseg0 <2241731+samwiseg0@users.noreply.github.com>
|
|
Date: Wed, 19 Jan 2022 13:55:15 -0500
|
|
Subject: [PATCH 23/65] Fix overseerr config refernces
|
|
|
|
---
|
|
Varken.py | 4 ++--
|
|
data/varken.example.ini | 4 ++--
|
|
2 files changed, 4 insertions(+), 4 deletions(-)
|
|
|
|
diff --git a/Varken.py b/Varken.py
|
|
index 6c17fca..50edcf7 100644
|
|
--- a/Varken.py
|
|
+++ b/Varken.py
|
|
@@ -160,8 +160,8 @@ if __name__ == "__main__":
|
|
if CONFIG.overseerr_enabled:
|
|
for server in CONFIG.overseerr_servers:
|
|
OVERSEER = OverseerrAPI(server, DBMANAGER)
|
|
- if server.get_request_status_counts:
|
|
- at_time = schedule.every(server.request_status_run_seconds).seconds
|
|
+ if server.get_request_total_counts:
|
|
+ at_time = schedule.every(server.request_total_run_seconds).seconds
|
|
at_time.do(thread, OVERSEER.get_request_status_counts).tag("overseerr-{}-get_request_status_counts"
|
|
.format(server.id))
|
|
if server.get_latest_requests:
|
|
diff --git a/data/varken.example.ini b/data/varken.example.ini
|
|
index a4c7c9b..c816716 100644
|
|
--- a/data/varken.example.ini
|
|
+++ b/data/varken.example.ini
|
|
@@ -101,8 +101,8 @@ url = overseerr.domain.tld
|
|
apikey = xxxxxxxxxxxxxxxx
|
|
ssl = false
|
|
verify_ssl = false
|
|
-get_request_status_counts = true
|
|
-request_status_run_seconds = 300
|
|
+get_request_total_counts = true
|
|
+request_total_run_seconds = 300
|
|
get_latest_requests = true
|
|
num_latest_requests_to_fetch = 10
|
|
num_latest_requests_seconds = 300
|
|
--
|
|
GitLab
|
|
|
|
|
|
From 442b518cede1a894db8db1dbcd818b4ff570b80e Mon Sep 17 00:00:00 2001
|
|
From: samwiseg0 <2241731+samwiseg0@users.noreply.github.com>
|
|
Date: Wed, 19 Jan 2022 14:05:24 -0500
|
|
Subject: [PATCH 24/65] Fix overseerr structures
|
|
|
|
---
|
|
Varken.py | 4 ++--
|
|
data/varken.example.ini | 4 ++--
|
|
varken/overseerr.py | 2 +-
|
|
varken/structures.py | 2 --
|
|
4 files changed, 5 insertions(+), 7 deletions(-)
|
|
|
|
diff --git a/Varken.py b/Varken.py
|
|
index 50edcf7..8d2475c 100644
|
|
--- a/Varken.py
|
|
+++ b/Varken.py
|
|
@@ -162,8 +162,8 @@ if __name__ == "__main__":
|
|
OVERSEER = OverseerrAPI(server, DBMANAGER)
|
|
if server.get_request_total_counts:
|
|
at_time = schedule.every(server.request_total_run_seconds).seconds
|
|
- at_time.do(thread, OVERSEER.get_request_status_counts).tag("overseerr-{}-get_request_status_counts"
|
|
- .format(server.id))
|
|
+ at_time.do(thread, OVERSEER.get_request_counts).tag("overseerr-{}-get_request_counts"
|
|
+ .format(server.id))
|
|
if server.get_latest_requests:
|
|
at_time = schedule.every(server.num_latest_requests_seconds).seconds
|
|
at_time.do(thread, OVERSEER.get_latest_requests).tag("overseerr-{}-get_latest_requests"
|
|
diff --git a/data/varken.example.ini b/data/varken.example.ini
|
|
index c816716..b32eab6 100644
|
|
--- a/data/varken.example.ini
|
|
+++ b/data/varken.example.ini
|
|
@@ -102,10 +102,10 @@ apikey = xxxxxxxxxxxxxxxx
|
|
ssl = false
|
|
verify_ssl = false
|
|
get_request_total_counts = true
|
|
-request_total_run_seconds = 300
|
|
+request_total_run_seconds = 30
|
|
get_latest_requests = true
|
|
num_latest_requests_to_fetch = 10
|
|
-num_latest_requests_seconds = 300
|
|
+num_latest_requests_seconds = 30
|
|
|
|
[sickchill-1]
|
|
url = sickchill.domain.tld:8081
|
|
diff --git a/varken/overseerr.py b/varken/overseerr.py
|
|
index a0da164..997b52d 100644
|
|
--- a/varken/overseerr.py
|
|
+++ b/varken/overseerr.py
|
|
@@ -18,7 +18,7 @@ class OverseerrAPI(object):
|
|
def __repr__(self):
|
|
return f"<overseerr-{self.server.id}>"
|
|
|
|
- def get_request_status_counts(self):
|
|
+ def get_request_counts(self):
|
|
now = datetime.now(timezone.utc).astimezone().isoformat()
|
|
endpoint = '/api/v1/request/count'
|
|
|
|
diff --git a/varken/structures.py b/varken/structures.py
|
|
index 2d1ecb4..8e58417 100644
|
|
--- a/varken/structures.py
|
|
+++ b/varken/structures.py
|
|
@@ -64,8 +64,6 @@ class OverseerrServer(NamedTuple):
|
|
verify_ssl: bool = False
|
|
get_request_total_counts: bool = False
|
|
request_total_run_seconds: int = 30
|
|
- get_request_status_counts: bool = False
|
|
- request_status_run_seconds: int = 30
|
|
get_latest_requests: bool = False
|
|
num_latest_requests_to_fetch: int = 10
|
|
num_latest_requests_seconds: int = 30
|
|
--
|
|
GitLab
|
|
|
|
|
|
From e2920029eda79586d26c14860ee2c87db762bfe1 Mon Sep 17 00:00:00 2001
|
|
From: samwiseg0 <2241731+samwiseg0@users.noreply.github.com>
|
|
Date: Wed, 19 Jan 2022 14:20:43 -0500
|
|
Subject: [PATCH 25/65] Update intparser to accommodate changes to config
|
|
structure
|
|
|
|
---
|
|
varken/iniparser.py | 25 +++++++++++++------------
|
|
1 file changed, 13 insertions(+), 12 deletions(-)
|
|
|
|
diff --git a/varken/iniparser.py b/varken/iniparser.py
|
|
index a63e777..53ddcd2 100644
|
|
--- a/varken/iniparser.py
|
|
+++ b/varken/iniparser.py
|
|
@@ -294,28 +294,29 @@ class INIParser(object):
|
|
issue_status_run_seconds=issue_status_run_seconds)
|
|
|
|
if service == 'overseerr':
|
|
- get_latest_requests = boolcheck(env.get(
|
|
- f'VRKN_{envsection}_GET_LATEST_REQUESTS',
|
|
- self.config.get(section, 'get_latest_requests')))
|
|
get_request_total_counts = boolcheck(env.get(
|
|
f'VRKN_{envsection}_GET_REQUEST_TOTAL_COUNTS',
|
|
self.config.get(section, 'get_request_total_counts')))
|
|
request_total_run_seconds = int(env.get(
|
|
f'VRKN_{envsection}_REQUEST_TOTAL_RUN_SECONDS',
|
|
self.config.getint(section, 'request_total_run_seconds')))
|
|
- get_request_status_counts = boolcheck(env.get(
|
|
- f'VRKN_{envsection}_GET_REQUEST_STATUS_COUNTS',
|
|
- self.config.get(section, 'get_request_status_counts')))
|
|
- request_status_run_seconds = int(env.get(
|
|
- f'VRKN_{envsection}_REQUEST_STATUS_RUN_SECONDS',
|
|
- self.config.getint(section, 'request_status_run_seconds')))
|
|
+ get_latest_requests = boolcheck(env.get(
|
|
+ f'VRKN_{envsection}_GET_LATEST_REQUESTS',
|
|
+ self.config.get(section, 'get_latest_requests')))
|
|
+ num_latest_requests_to_fetch = boolcheck(env.get(
|
|
+ f'VRKN_{envsection}_GET_LATEST_REQUESTS_TO_FETCH',
|
|
+ self.config.get(section, 'num_latest_requests_to_fetch')))
|
|
+ num_latest_requests_seconds = int(env.get(
|
|
+ f'VRKN_{envsection}_NUM_LATEST_REQUESTS_SECONDS',
|
|
+ self.config.getint(section, 'num_latest_requests_seconds')))
|
|
|
|
server = OverseerrServer(id=server_id, url=scheme + url, api_key=apikey,
|
|
- verify_ssl=verify_ssl, get_latest_requests=get_latest_requests,
|
|
+ verify_ssl=verify_ssl,
|
|
get_request_total_counts=get_request_total_counts,
|
|
request_total_run_seconds=request_total_run_seconds,
|
|
- get_request_status_counts=get_request_status_counts,
|
|
- request_status_run_seconds=request_status_run_seconds)
|
|
+ get_latest_requests=get_latest_requests,
|
|
+ num_latest_requests_to_fetch=num_latest_requests_to_fetch,
|
|
+ num_latest_requests_seconds=num_latest_requests_seconds)
|
|
|
|
if service == 'sickchill':
|
|
get_missing = boolcheck(env.get(f'VRKN_{envsection}_GET_MISSING',
|
|
--
|
|
GitLab
|
|
|
|
|
|
From 885359986c80ef6fa760daff20be6fca5f9baa68 Mon Sep 17 00:00:00 2001
|
|
From: samwiseg0 <2241731+samwiseg0@users.noreply.github.com>
|
|
Date: Wed, 19 Jan 2022 15:17:19 -0500
|
|
Subject: [PATCH 26/65] Cleanup overseerr data collection
|
|
|
|
---
|
|
Varken.py | 4 ++--
|
|
varken/iniparser.py | 8 ++------
|
|
varken/overseerr.py | 10 +++++-----
|
|
varken/structures.py | 23 +++--------------------
|
|
4 files changed, 12 insertions(+), 33 deletions(-)
|
|
|
|
diff --git a/Varken.py b/Varken.py
|
|
index 8d2475c..609cc85 100644
|
|
--- a/Varken.py
|
|
+++ b/Varken.py
|
|
@@ -164,7 +164,7 @@ if __name__ == "__main__":
|
|
at_time = schedule.every(server.request_total_run_seconds).seconds
|
|
at_time.do(thread, OVERSEER.get_request_counts).tag("overseerr-{}-get_request_counts"
|
|
.format(server.id))
|
|
- if server.get_latest_requests:
|
|
+ if server.num_latest_requests_to_fetch > 0:
|
|
at_time = schedule.every(server.num_latest_requests_seconds).seconds
|
|
at_time.do(thread, OVERSEER.get_latest_requests).tag("overseerr-{}-get_latest_requests"
|
|
.format(server.id))
|
|
@@ -184,7 +184,7 @@ if __name__ == "__main__":
|
|
|
|
# Run all on startup
|
|
SERVICES_ENABLED = [CONFIG.ombi_enabled, CONFIG.radarr_enabled, CONFIG.tautulli_enabled, CONFIG.unifi_enabled,
|
|
- CONFIG.sonarr_enabled, CONFIG.sickchill_enabled, CONFIG.lidarr_enabled]
|
|
+ CONFIG.sonarr_enabled, CONFIG.sickchill_enabled, CONFIG.lidarr_enabled, CONFIG.overseerr_enabled]
|
|
if not [enabled for enabled in SERVICES_ENABLED if enabled]:
|
|
vl.logger.error("All services disabled. Exiting")
|
|
exit(1)
|
|
diff --git a/varken/iniparser.py b/varken/iniparser.py
|
|
index 53ddcd2..bcb3b37 100644
|
|
--- a/varken/iniparser.py
|
|
+++ b/varken/iniparser.py
|
|
@@ -300,12 +300,9 @@ class INIParser(object):
|
|
request_total_run_seconds = int(env.get(
|
|
f'VRKN_{envsection}_REQUEST_TOTAL_RUN_SECONDS',
|
|
self.config.getint(section, 'request_total_run_seconds')))
|
|
- get_latest_requests = boolcheck(env.get(
|
|
- f'VRKN_{envsection}_GET_LATEST_REQUESTS',
|
|
- self.config.get(section, 'get_latest_requests')))
|
|
- num_latest_requests_to_fetch = boolcheck(env.get(
|
|
+ num_latest_requests_to_fetch = int(env.get(
|
|
f'VRKN_{envsection}_GET_LATEST_REQUESTS_TO_FETCH',
|
|
- self.config.get(section, 'num_latest_requests_to_fetch')))
|
|
+ self.config.getint(section, 'num_latest_requests_to_fetch')))
|
|
num_latest_requests_seconds = int(env.get(
|
|
f'VRKN_{envsection}_NUM_LATEST_REQUESTS_SECONDS',
|
|
self.config.getint(section, 'num_latest_requests_seconds')))
|
|
@@ -314,7 +311,6 @@ class INIParser(object):
|
|
verify_ssl=verify_ssl,
|
|
get_request_total_counts=get_request_total_counts,
|
|
request_total_run_seconds=request_total_run_seconds,
|
|
- get_latest_requests=get_latest_requests,
|
|
num_latest_requests_to_fetch=num_latest_requests_to_fetch,
|
|
num_latest_requests_seconds=num_latest_requests_seconds)
|
|
|
|
diff --git a/varken/overseerr.py b/varken/overseerr.py
|
|
index 997b52d..248f9df 100644
|
|
--- a/varken/overseerr.py
|
|
+++ b/varken/overseerr.py
|
|
@@ -3,7 +3,7 @@ from requests import Session, Request
|
|
from datetime import datetime, timezone
|
|
|
|
from varken.helpers import connection_handler, hashit
|
|
-from varken.structures import OverseerrRequest, OverseerrRequestCounts
|
|
+from varken.structures import OverseerrRequestCounts
|
|
|
|
|
|
class OverseerrAPI(object):
|
|
@@ -86,8 +86,8 @@ class OverseerrAPI(object):
|
|
"request_type": 0,
|
|
"status": get_tv_req['mediaInfo']['status'],
|
|
"title": get_tv_req['name'],
|
|
- "requested_user": get_tv_req['mediaInfo']['requests'][0]['requestedBy']['plexUsername'],
|
|
- "requested_date": get_tv_req['mediaInfo']['requests'][0]['media']['createdAt']
|
|
+ "requested_user": get_tv_req['mediaInfo']['requests'][0]['requestedBy']['displayName'],
|
|
+ "requested_date": get_tv_req['mediaInfo']['requests'][0]['createdAt']
|
|
},
|
|
"time": now,
|
|
"fields": {
|
|
@@ -113,8 +113,8 @@ class OverseerrAPI(object):
|
|
"request_type": 1,
|
|
"status": get_movie_req['mediaInfo']['status'],
|
|
"title": get_movie_req['title'],
|
|
- "requested_user": get_movie_req['mediaInfo']['requests'][0]['requestedBy']['plexUsername'],
|
|
- "requested_date": get_movie_req['mediaInfo']['requests'][0]['media']['createdAt']
|
|
+ "requested_user": get_movie_req['mediaInfo']['requests'][0]['requestedBy']['displayName'],
|
|
+ "requested_date": get_movie_req['mediaInfo']['requests'][0]['createdAt']
|
|
},
|
|
"time": now,
|
|
"fields": {
|
|
diff --git a/varken/structures.py b/varken/structures.py
|
|
index 8e58417..17d3abc 100644
|
|
--- a/varken/structures.py
|
|
+++ b/varken/structures.py
|
|
@@ -64,7 +64,6 @@ class OverseerrServer(NamedTuple):
|
|
verify_ssl: bool = False
|
|
get_request_total_counts: bool = False
|
|
request_total_run_seconds: int = 30
|
|
- get_latest_requests: bool = False
|
|
num_latest_requests_to_fetch: int = 10
|
|
num_latest_requests_seconds: int = 30
|
|
|
|
@@ -186,25 +185,6 @@ class OmbiMovieRequest(NamedTuple):
|
|
|
|
|
|
# Overseerr
|
|
-class OverseerrRequest(NamedTuple):
|
|
- id: int = None
|
|
- status: int = None
|
|
- createdAt: str = None
|
|
- updatedAt: str = None
|
|
- type: str = None
|
|
- is4k: bool = None
|
|
- serverId: int = None
|
|
- profileId: int = None
|
|
- rootFolder: str = None
|
|
- languageProfileId: int = None
|
|
- tags: list = None
|
|
- media: dict = None
|
|
- seasons: list = None
|
|
- modifiedBy: dict = None
|
|
- requestedBy: dict = None
|
|
- seasonCount: int = None
|
|
-
|
|
-
|
|
class OverseerrRequestCounts(NamedTuple):
|
|
pending: int = None
|
|
approved: int = None
|
|
@@ -298,6 +278,8 @@ class SonarrQueue(NamedTuple):
|
|
outputPath: str = None
|
|
series: SonarrTVShow = None
|
|
episode: SonarrEpisode = None
|
|
+ timeleft: int = None
|
|
+ estimatedCompletionTime: int = None
|
|
|
|
|
|
# Radarr
|
|
@@ -364,6 +346,7 @@ class RadarrQueue(NamedTuple):
|
|
errorMessage: str = None
|
|
outputPath: str = None
|
|
movie: RadarrMovie = None
|
|
+ timeleft: str = None
|
|
|
|
|
|
# Sickchill
|
|
--
|
|
GitLab
|
|
|
|
|
|
From 3c70ecbd0a4047237aba9f0bd2fbea000ba7a394 Mon Sep 17 00:00:00 2001
|
|
From: samwiseg0 <2241731+samwiseg0@users.noreply.github.com>
|
|
Date: Wed, 19 Jan 2022 15:18:56 -0500
|
|
Subject: [PATCH 27/65] Fix SERVICES_ENABLED in varken.py to acomidate
|
|
overseerr
|
|
|
|
---
|
|
Varken.py | 3 ++-
|
|
1 file changed, 2 insertions(+), 1 deletion(-)
|
|
|
|
diff --git a/Varken.py b/Varken.py
|
|
index 609cc85..c494eaf 100644
|
|
--- a/Varken.py
|
|
+++ b/Varken.py
|
|
@@ -184,7 +184,8 @@ if __name__ == "__main__":
|
|
|
|
# Run all on startup
|
|
SERVICES_ENABLED = [CONFIG.ombi_enabled, CONFIG.radarr_enabled, CONFIG.tautulli_enabled, CONFIG.unifi_enabled,
|
|
- CONFIG.sonarr_enabled, CONFIG.sickchill_enabled, CONFIG.lidarr_enabled, CONFIG.overseerr_enabled]
|
|
+ CONFIG.sonarr_enabled, CONFIG.sickchill_enabled, CONFIG.lidarr_enabled,
|
|
+ CONFIG.overseerr_enabled]
|
|
if not [enabled for enabled in SERVICES_ENABLED if enabled]:
|
|
vl.logger.error("All services disabled. Exiting")
|
|
exit(1)
|
|
--
|
|
GitLab
|
|
|
|
|
|
From 707c4a28fe04c12b53362ff59368c9875ecc6763 Mon Sep 17 00:00:00 2001
|
|
From: Robin Dadswell <19610103+RobinDadswell@users.noreply.github.com>
|
|
Date: Wed, 19 Jan 2022 21:53:28 +0000
|
|
Subject: [PATCH 28/65] Fixed: Sonarr/Lidarr Queues (#227)
|
|
|
|
---
|
|
varken/radarr.py | 2 +-
|
|
varken/sonarr.py | 10 +++++-----
|
|
2 files changed, 6 insertions(+), 6 deletions(-)
|
|
|
|
diff --git a/varken/radarr.py b/varken/radarr.py
|
|
index 3a153ff..b04777f 100644
|
|
--- a/varken/radarr.py
|
|
+++ b/varken/radarr.py
|
|
@@ -107,7 +107,7 @@ class RadarrAPI(object):
|
|
return
|
|
|
|
for queue_item in download_queue:
|
|
- movie = queue_item.movie
|
|
+ movie = RadarrMovie(**queue_item.movie)
|
|
|
|
name = f'{movie.title} ({movie.year})'
|
|
|
|
diff --git a/varken/sonarr.py b/varken/sonarr.py
|
|
index 41b4dbe..685f56e 100644
|
|
--- a/varken/sonarr.py
|
|
+++ b/varken/sonarr.py
|
|
@@ -2,7 +2,7 @@ from logging import getLogger
|
|
from requests import Session, Request
|
|
from datetime import datetime, timezone, date, timedelta
|
|
|
|
-from varken.structures import SonarrEpisode, SonarrQueue, QueuePages
|
|
+from varken.structures import SonarrEpisode, SonarrTVShow, SonarrQueue, QueuePages
|
|
from varken.helpers import hashit, connection_handler
|
|
|
|
|
|
@@ -102,7 +102,7 @@ class SonarrAPI(object):
|
|
endpoint = '/api/v3/queue'
|
|
now = datetime.now(timezone.utc).astimezone().isoformat()
|
|
pageSize = 250
|
|
- params = {'pageSize': pageSize}
|
|
+ params = {'pageSize': pageSize, 'includeEpisode': True, 'includeSeries': True}
|
|
queueResponse = []
|
|
queue = []
|
|
|
|
@@ -136,8 +136,8 @@ class SonarrAPI(object):
|
|
return
|
|
|
|
for queueItem in download_queue:
|
|
- tvShow = queueItem.series
|
|
- episode = queueItem.episode
|
|
+ tvShow = SonarrTVShow(**queueItem.series)
|
|
+ episode = SonarrEpisode(**queueItem.episode)
|
|
try:
|
|
sxe = f"S{episode.seasonNumber:0>2}E{episode.episodeNumber:0>2}"
|
|
except TypeError as e:
|
|
@@ -150,7 +150,7 @@ class SonarrAPI(object):
|
|
else:
|
|
protocol_id = 0
|
|
|
|
- queue.append((tvShow['title'], episode.title, queueItem.protocol.upper(),
|
|
+ queue.append((tvShow.title, episode.title, queueItem.protocol.upper(),
|
|
protocol_id, sxe, queueItem.seriesId, queueItem.quality['quality']['name']))
|
|
|
|
for series_title, episode_title, protocol, protocol_id, sxe, sonarr_id, quality in queue:
|
|
--
|
|
GitLab
|
|
|
|
|
|
From 06c4777a34a1c7858a995e89e3df281821122064 Mon Sep 17 00:00:00 2001
|
|
From: samwiseg0 <2241731+samwiseg0@users.noreply.github.com>
|
|
Date: Wed, 19 Jan 2022 17:01:12 -0500
|
|
Subject: [PATCH 29/65] Change sonarr queue structures to str
|
|
|
|
---
|
|
varken/structures.py | 4 ++--
|
|
1 file changed, 2 insertions(+), 2 deletions(-)
|
|
|
|
diff --git a/varken/structures.py b/varken/structures.py
|
|
index 17d3abc..4310909 100644
|
|
--- a/varken/structures.py
|
|
+++ b/varken/structures.py
|
|
@@ -278,8 +278,8 @@ class SonarrQueue(NamedTuple):
|
|
outputPath: str = None
|
|
series: SonarrTVShow = None
|
|
episode: SonarrEpisode = None
|
|
- timeleft: int = None
|
|
- estimatedCompletionTime: int = None
|
|
+ timeleft: str = None
|
|
+ estimatedCompletionTime: str = None
|
|
|
|
|
|
# Radarr
|
|
--
|
|
GitLab
|
|
|
|
|
|
From 62749f20f950c88f4d593980c114da19b59295f1 Mon Sep 17 00:00:00 2001
|
|
From: samwiseg0 <2241731+samwiseg0@users.noreply.github.com>
|
|
Date: Thu, 20 Jan 2022 23:04:19 -0500
|
|
Subject: [PATCH 30/65] Fixed: Multipage queue fetching
|
|
|
|
---
|
|
varken/radarr.py | 4 ++--
|
|
1 file changed, 2 insertions(+), 2 deletions(-)
|
|
|
|
diff --git a/varken/radarr.py b/varken/radarr.py
|
|
index b04777f..5292d4f 100644
|
|
--- a/varken/radarr.py
|
|
+++ b/varken/radarr.py
|
|
@@ -73,7 +73,7 @@ class RadarrAPI(object):
|
|
now = datetime.now(timezone.utc).astimezone().isoformat()
|
|
influx_payload = []
|
|
pageSize = 250
|
|
- params = {'pageSize': pageSize, 'includeMovie': True}
|
|
+ params = {'pageSize': pageSize, 'includeMovie': True, 'includeUnknownMovieItems': False}
|
|
queueResponse = []
|
|
queue = []
|
|
|
|
@@ -87,7 +87,7 @@ class RadarrAPI(object):
|
|
|
|
while response.totalRecords > response.page * response.pageSize:
|
|
page = response.page + 1
|
|
- params = {'pageSize': pageSize, 'page': page}
|
|
+ params = {'pageSize': pageSize, 'page': page, 'includeMovie': True, 'includeUnknownMovieItems': False}
|
|
req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params))
|
|
get = connection_handler(self.session, req, self.server.verify_ssl)
|
|
if not get:
|
|
--
|
|
GitLab
|
|
|
|
|
|
From 45f9a2092b530db708d8b6c2885347a49f1a356d Mon Sep 17 00:00:00 2001
|
|
From: Stewart Thomson <stewartthomson3@gmail.com>
|
|
Date: Thu, 20 Jan 2022 23:38:02 -0500
|
|
Subject: [PATCH 31/65] Update historical tautulli import (#226)
|
|
|
|
---
|
|
utilities/historical_tautulli_import.py | 2 +-
|
|
varken/structures.py | 3 +++
|
|
2 files changed, 4 insertions(+), 1 deletion(-)
|
|
|
|
diff --git a/utilities/historical_tautulli_import.py b/utilities/historical_tautulli_import.py
|
|
index 62bd0f8..a5f7a14 100644
|
|
--- a/utilities/historical_tautulli_import.py
|
|
+++ b/utilities/historical_tautulli_import.py
|
|
@@ -41,7 +41,7 @@ if __name__ == "__main__":
|
|
DBMANAGER = DBManager(CONFIG.influx_server)
|
|
|
|
if CONFIG.tautulli_enabled:
|
|
- GEOIPHANDLER = GeoIPHandler(DATA_FOLDER)
|
|
+ GEOIPHANDLER = GeoIPHandler(DATA_FOLDER, CONFIG.tautulli_servers[0].maxmind_license_key)
|
|
for server in CONFIG.tautulli_servers:
|
|
TAUTULLI = TautulliAPI(server, DBMANAGER, GEOIPHANDLER)
|
|
TAUTULLI.get_historical(days=opts.days)
|
|
diff --git a/varken/structures.py b/varken/structures.py
|
|
index 4310909..e3bd08f 100644
|
|
--- a/varken/structures.py
|
|
+++ b/varken/structures.py
|
|
@@ -466,6 +466,7 @@ class TautulliStream(NamedTuple):
|
|
reference_id: int = None
|
|
relay: int = None
|
|
relayed: int = None
|
|
+ row_id: int = None
|
|
section_id: str = None
|
|
secure: str = None
|
|
selected: int = None
|
|
@@ -504,6 +505,7 @@ class TautulliStream(NamedTuple):
|
|
stream_video_codec: str = None
|
|
stream_video_codec_level: str = None
|
|
stream_video_decision: str = None
|
|
+ stream_video_dynamic_range: str = None
|
|
stream_video_framerate: str = None
|
|
stream_video_full_resolution: str = None
|
|
stream_video_height: str = None
|
|
@@ -563,6 +565,7 @@ class TautulliStream(NamedTuple):
|
|
video_codec: str = None
|
|
video_codec_level: str = None
|
|
video_decision: str = None
|
|
+ video_dynamic_range: str = None
|
|
video_frame_rate: str = None
|
|
video_framerate: str = None
|
|
video_full_resolution: str = None
|
|
--
|
|
GitLab
|
|
|
|
|
|
From 518ea6c384d4195b625c79d4198e3455956a3f48 Mon Sep 17 00:00:00 2001
|
|
From: samwiseg0 <2241731+samwiseg0@users.noreply.github.com>
|
|
Date: Thu, 20 Jan 2022 23:09:12 -0500
|
|
Subject: [PATCH 32/65] Fixed: Sonarr perams ordering
|
|
|
|
---
|
|
varken/sonarr.py | 2 +-
|
|
1 file changed, 1 insertion(+), 1 deletion(-)
|
|
|
|
diff --git a/varken/sonarr.py b/varken/sonarr.py
|
|
index 685f56e..ef35328 100644
|
|
--- a/varken/sonarr.py
|
|
+++ b/varken/sonarr.py
|
|
@@ -102,7 +102,7 @@ class SonarrAPI(object):
|
|
endpoint = '/api/v3/queue'
|
|
now = datetime.now(timezone.utc).astimezone().isoformat()
|
|
pageSize = 250
|
|
- params = {'pageSize': pageSize, 'includeEpisode': True, 'includeSeries': True}
|
|
+ params = {'pageSize': pageSize, 'includeSeries': True, 'includeEpisode': True}
|
|
queueResponse = []
|
|
queue = []
|
|
|
|
--
|
|
GitLab
|
|
|
|
|
|
From 752073d590f4a84ba96a794ff53e243be56e586e Mon Sep 17 00:00:00 2001
|
|
From: samwiseg0 <2241731+samwiseg0@users.noreply.github.com>
|
|
Date: Fri, 21 Jan 2022 00:14:15 -0500
|
|
Subject: [PATCH 33/65] Fixed: Proper warnings for missing data in sonarr and
|
|
radarr
|
|
|
|
---
|
|
varken/radarr.py | 14 +++++++++++---
|
|
varken/sonarr.py | 7 +++++--
|
|
2 files changed, 16 insertions(+), 5 deletions(-)
|
|
|
|
diff --git a/varken/radarr.py b/varken/radarr.py
|
|
index 5292d4f..9471963 100644
|
|
--- a/varken/radarr.py
|
|
+++ b/varken/radarr.py
|
|
@@ -66,7 +66,10 @@ class RadarrAPI(object):
|
|
}
|
|
)
|
|
|
|
- self.dbmanager.write_points(influx_payload)
|
|
+ if influx_payload:
|
|
+ self.dbmanager.write_points(influx_payload)
|
|
+ else:
|
|
+ self.logger.warning("No data to send to influx for radarr-missing instance, discarding.")
|
|
|
|
def get_queue(self):
|
|
endpoint = '/api/v3/queue'
|
|
@@ -79,6 +82,7 @@ class RadarrAPI(object):
|
|
|
|
req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params))
|
|
get = connection_handler(self.session, req, self.server.verify_ssl)
|
|
+
|
|
if not get:
|
|
return
|
|
|
|
@@ -101,9 +105,10 @@ class RadarrAPI(object):
|
|
try:
|
|
download_queue.append(RadarrQueue(**queueItem))
|
|
except TypeError as e:
|
|
- self.logger.error('TypeError has occurred : %s while creating RadarrQueue structure', e)
|
|
+ self.logger.warning('TypeError has occurred : %s while creating RadarrQueue structure', e)
|
|
return
|
|
if not download_queue:
|
|
+ self.logger.warning("No data to send to influx for radarr-queue instance, discarding.")
|
|
return
|
|
|
|
for queue_item in download_queue:
|
|
@@ -141,4 +146,7 @@ class RadarrAPI(object):
|
|
}
|
|
)
|
|
|
|
- self.dbmanager.write_points(influx_payload)
|
|
+ if influx_payload:
|
|
+ self.dbmanager.write_points(influx_payload)
|
|
+ else:
|
|
+ self.logger.warning("No data to send to influx for radarr-queue instance, discarding.")
|
|
diff --git a/varken/sonarr.py b/varken/sonarr.py
|
|
index ef35328..50c475a 100644
|
|
--- a/varken/sonarr.py
|
|
+++ b/varken/sonarr.py
|
|
@@ -95,7 +95,10 @@ class SonarrAPI(object):
|
|
}
|
|
)
|
|
|
|
- self.dbmanager.write_points(influx_payload)
|
|
+ if influx_payload:
|
|
+ self.dbmanager.write_points(influx_payload)
|
|
+ else:
|
|
+ self.logger.warning("No data to send to influx for sonarr-calendar instance, discarding.")
|
|
|
|
def get_queue(self):
|
|
influx_payload = []
|
|
@@ -178,4 +181,4 @@ class SonarrAPI(object):
|
|
if influx_payload:
|
|
self.dbmanager.write_points(influx_payload)
|
|
else:
|
|
- self.logger.debug("No data to send to influx for sonarr instance, discarding.")
|
|
+ self.logger.warning("No data to send to influx for sonarr-queue instance, discarding.")
|
|
--
|
|
GitLab
|
|
|
|
|
|
From a60e41e6e4c9206473ae02855f65321a8adbfb5a Mon Sep 17 00:00:00 2001
|
|
From: samwiseg0 <2241731+samwiseg0@users.noreply.github.com>
|
|
Date: Fri, 21 Jan 2022 00:31:20 -0500
|
|
Subject: [PATCH 34/65] Added: Overseerr ENVs to docker compose.
|
|
|
|
---
|
|
docker-compose.yml | 12 ++++++++++--
|
|
1 file changed, 10 insertions(+), 2 deletions(-)
|
|
|
|
diff --git a/docker-compose.yml b/docker-compose.yml
|
|
index c75e8a7..416a1bf 100644
|
|
--- a/docker-compose.yml
|
|
+++ b/docker-compose.yml
|
|
@@ -6,7 +6,7 @@ services:
|
|
influxdb:
|
|
hostname: influxdb
|
|
container_name: influxdb
|
|
- image: influxdb:1.8.4
|
|
+ image: influxdb:1.8
|
|
networks:
|
|
- internal
|
|
volumes:
|
|
@@ -101,6 +101,14 @@ services:
|
|
- VRKN_OMBI_1_REQUEST_TOTAL_RUN_SECONDS=300
|
|
- VRKN_OMBI_1_GET_ISSUE_STATUS_COUNTS=true
|
|
- VRKN_OMBI_1_ISSUE_STATUS_RUN_SECONDS=300
|
|
+ - VRKN_OVERSEERR_1_URL=ombi.domain.tld
|
|
+ - VRKN_OVERSEERR_1_APIKEY=xxxxxxxxxxxxxxxx
|
|
+ - VRKN_OVERSEERR_1_SSL=false
|
|
+ - VRKN_OVERSEERR_1_VERIFY_SSL=false
|
|
+ - VRKN_OVERSEERR_1_GET_REQUEST_TOTAL_COUNTS=true
|
|
+ - VRKN_OVERSEERR_1_REQUEST_TOTAL_RUN_SECONDSS=30
|
|
+ - VRKN_OVERSEERR_1_GET_LATEST_REQUESTS_TO_FETCH=10
|
|
+ - VRKN_OVERSEERR_1_NUM_LATEST_REQUESTS_SECONDS=300
|
|
- VRKN_SICKCHILL_1_URL=sickchill.domain.tld:8081
|
|
- VRKN_SICKCHILL_1_APIKEY=xxxxxxxxxxxxxxxx
|
|
- VRKN_SICKCHILL_1_SSL=false
|
|
@@ -118,7 +126,7 @@ services:
|
|
- internal
|
|
ports:
|
|
- 3000:3000
|
|
- volumes:
|
|
+ volumes:
|
|
- /path/to/docker-grafana/config-folder:/config
|
|
environment:
|
|
- GF_PATHS_DATA=/config/data
|
|
--
|
|
GitLab
|
|
|
|
|
|
From 2f7f01edbbd6b5f11c66ec4ba5463da8fc22c0c3 Mon Sep 17 00:00:00 2001
|
|
From: samwiseg0 <2241731+samwiseg0@users.noreply.github.com>
|
|
Date: Fri, 21 Jan 2022 00:35:06 -0500
|
|
Subject: [PATCH 35/65] Added: Logging to empty/no data returns
|
|
|
|
---
|
|
varken/overseerr.py | 11 +++++++++--
|
|
varken/sonarr.py | 1 +
|
|
2 files changed, 10 insertions(+), 2 deletions(-)
|
|
|
|
diff --git a/varken/overseerr.py b/varken/overseerr.py
|
|
index 248f9df..4d4b96b 100644
|
|
--- a/varken/overseerr.py
|
|
+++ b/varken/overseerr.py
|
|
@@ -49,7 +49,10 @@ class OverseerrAPI(object):
|
|
}
|
|
]
|
|
|
|
- self.dbmanager.write_points(influx_payload)
|
|
+ if influx_payload:
|
|
+ self.dbmanager.write_points(influx_payload)
|
|
+ else:
|
|
+ self.logger.warning("No data to send to influx for overseerr-request-counts instance, discarding.")
|
|
|
|
def get_latest_requests(self):
|
|
now = datetime.now(timezone.utc).astimezone().isoformat()
|
|
@@ -63,6 +66,7 @@ class OverseerrAPI(object):
|
|
|
|
# RETURN NOTHING IF NO RESULTS
|
|
if not get_latest_req:
|
|
+ self.logger.warning("No data to send to influx for overseerr-latest-requests instance, discarding.")
|
|
return
|
|
|
|
influx_payload = []
|
|
@@ -123,4 +127,7 @@ class OverseerrAPI(object):
|
|
}
|
|
)
|
|
|
|
- self.dbmanager.write_points(influx_payload)
|
|
+ if influx_payload:
|
|
+ self.dbmanager.write_points(influx_payload)
|
|
+ else:
|
|
+ self.logger.warning("No data to send to influx for overseerr-latest-requests instance, discarding.")
|
|
diff --git a/varken/sonarr.py b/varken/sonarr.py
|
|
index 50c475a..1e8c267 100644
|
|
--- a/varken/sonarr.py
|
|
+++ b/varken/sonarr.py
|
|
@@ -178,6 +178,7 @@ class SonarrAPI(object):
|
|
}
|
|
}
|
|
)
|
|
+
|
|
if influx_payload:
|
|
self.dbmanager.write_points(influx_payload)
|
|
else:
|
|
--
|
|
GitLab
|
|
|
|
|
|
From 731808544dbd54990f1000cece3e7fa13720bcfe Mon Sep 17 00:00:00 2001
|
|
From: Cameron Stephen <mail@cajs.co.uk>
|
|
Date: Mon, 7 Mar 2022 23:14:14 +0000
|
|
Subject: [PATCH 36/65] Update Sonarr & Lidarr Structs to match latest API
|
|
changes (#231)
|
|
|
|
* Add support for estimatedCompletionTime in LidarrQueue
|
|
|
|
* Add support for tvdbId in SonarrEpisode struct
|
|
---
|
|
varken/structures.py | 2 ++
|
|
1 file changed, 2 insertions(+)
|
|
|
|
diff --git a/varken/structures.py b/varken/structures.py
|
|
index e3bd08f..bd9036b 100644
|
|
--- a/varken/structures.py
|
|
+++ b/varken/structures.py
|
|
@@ -255,6 +255,7 @@ class SonarrEpisode(NamedTuple):
|
|
sceneEpisodeNumber: int = None
|
|
sceneSeasonNumber: int = None
|
|
series: SonarrTVShow = None
|
|
+ tvdbId: int = None
|
|
|
|
|
|
class SonarrQueue(NamedTuple):
|
|
@@ -606,6 +607,7 @@ class LidarrQueue(NamedTuple):
|
|
outputPath: str = None
|
|
downloadForced: bool = None
|
|
id: int = None
|
|
+ estimatedCompletionTime: str = None
|
|
|
|
|
|
class LidarrAlbum(NamedTuple):
|
|
--
|
|
GitLab
|
|
|
|
|
|
From 2c81fe5f9f532eb6000b545d7802908a6559233e Mon Sep 17 00:00:00 2001
|
|
From: Samwiseg0 <2241731+samwiseg0@users.noreply.github.com>
|
|
Date: Thu, 31 Mar 2022 23:02:52 -0400
|
|
Subject: [PATCH 37/65] Fix typo in docker yml
|
|
|
|
---
|
|
docker-compose.yml | 2 +-
|
|
1 file changed, 1 insertion(+), 1 deletion(-)
|
|
|
|
diff --git a/docker-compose.yml b/docker-compose.yml
|
|
index 416a1bf..49f7260 100644
|
|
--- a/docker-compose.yml
|
|
+++ b/docker-compose.yml
|
|
@@ -106,7 +106,7 @@ services:
|
|
- VRKN_OVERSEERR_1_SSL=false
|
|
- VRKN_OVERSEERR_1_VERIFY_SSL=false
|
|
- VRKN_OVERSEERR_1_GET_REQUEST_TOTAL_COUNTS=true
|
|
- - VRKN_OVERSEERR_1_REQUEST_TOTAL_RUN_SECONDSS=30
|
|
+ - VRKN_OVERSEERR_1_REQUEST_TOTAL_RUN_SECONDS=30
|
|
- VRKN_OVERSEERR_1_GET_LATEST_REQUESTS_TO_FETCH=10
|
|
- VRKN_OVERSEERR_1_NUM_LATEST_REQUESTS_SECONDS=300
|
|
- VRKN_SICKCHILL_1_URL=sickchill.domain.tld:8081
|
|
--
|
|
GitLab
|
|
|
|
|
|
From 5f8af8e0295daae3cdd14046881091afbea32dcd Mon Sep 17 00:00:00 2001
|
|
From: Samwiseg0 <2241731+samwiseg0@users.noreply.github.com>
|
|
Date: Thu, 31 Mar 2022 23:03:51 -0400
|
|
Subject: [PATCH 38/65] Rename example url for overseerr in docker yml
|
|
|
|
---
|
|
docker-compose.yml | 2 +-
|
|
1 file changed, 1 insertion(+), 1 deletion(-)
|
|
|
|
diff --git a/docker-compose.yml b/docker-compose.yml
|
|
index 49f7260..26c922e 100644
|
|
--- a/docker-compose.yml
|
|
+++ b/docker-compose.yml
|
|
@@ -101,7 +101,7 @@ services:
|
|
- VRKN_OMBI_1_REQUEST_TOTAL_RUN_SECONDS=300
|
|
- VRKN_OMBI_1_GET_ISSUE_STATUS_COUNTS=true
|
|
- VRKN_OMBI_1_ISSUE_STATUS_RUN_SECONDS=300
|
|
- - VRKN_OVERSEERR_1_URL=ombi.domain.tld
|
|
+ - VRKN_OVERSEERR_1_URL=overseerr.domain.tld
|
|
- VRKN_OVERSEERR_1_APIKEY=xxxxxxxxxxxxxxxx
|
|
- VRKN_OVERSEERR_1_SSL=false
|
|
- VRKN_OVERSEERR_1_VERIFY_SSL=false
|
|
--
|
|
GitLab
|
|
|
|
|
|
From f3960d2921b5b9b6688a315528337e5a787d132a Mon Sep 17 00:00:00 2001
|
|
From: Samwiseg0 <2241731+samwiseg0@users.noreply.github.com>
|
|
Date: Thu, 21 Apr 2022 12:52:40 -0400
|
|
Subject: [PATCH 39/65] Update radarr structures to inclue originalLanguage
|
|
|
|
---
|
|
varken/structures.py | 1 +
|
|
1 file changed, 1 insertion(+)
|
|
|
|
diff --git a/varken/structures.py b/varken/structures.py
|
|
index bd9036b..538d43b 100644
|
|
--- a/varken/structures.py
|
|
+++ b/varken/structures.py
|
|
@@ -322,6 +322,7 @@ class RadarrMovie(NamedTuple):
|
|
year: int = None
|
|
youTubeTrailerId: str = None
|
|
title: str = None
|
|
+ originalLanguage: str = None
|
|
|
|
|
|
# Radarr Queue
|
|
--
|
|
GitLab
|
|
|
|
|
|
From 7b4b44568a57cf06f4c9a9b6bb909d22944e7be4 Mon Sep 17 00:00:00 2001
|
|
From: Samwiseg0 <2241731+samwiseg0@users.noreply.github.com>
|
|
Date: Thu, 21 Apr 2022 14:55:27 -0400
|
|
Subject: [PATCH 40/65] Update radarr structures to include addOptions
|
|
|
|
---
|
|
varken/structures.py | 1 +
|
|
1 file changed, 1 insertion(+)
|
|
|
|
diff --git a/varken/structures.py b/varken/structures.py
|
|
index 538d43b..0dfd86c 100644
|
|
--- a/varken/structures.py
|
|
+++ b/varken/structures.py
|
|
@@ -323,6 +323,7 @@ class RadarrMovie(NamedTuple):
|
|
youTubeTrailerId: str = None
|
|
title: str = None
|
|
originalLanguage: str = None
|
|
+ addOptions: str = None
|
|
|
|
|
|
# Radarr Queue
|
|
--
|
|
GitLab
|
|
|
|
|
|
From 0a927d07338bdbfc49c5ceaed2b10c5c3eb8c5a3 Mon Sep 17 00:00:00 2001
|
|
From: Samwiseg0 <2241731+samwiseg0@users.noreply.github.com>
|
|
Date: Fri, 29 Apr 2022 15:11:57 -0400
|
|
Subject: [PATCH 41/65] Update radarr structures to include popularity
|
|
|
|
---
|
|
varken/structures.py | 1 +
|
|
1 file changed, 1 insertion(+)
|
|
|
|
diff --git a/varken/structures.py b/varken/structures.py
|
|
index 0dfd86c..dedc28f 100644
|
|
--- a/varken/structures.py
|
|
+++ b/varken/structures.py
|
|
@@ -324,6 +324,7 @@ class RadarrMovie(NamedTuple):
|
|
title: str = None
|
|
originalLanguage: str = None
|
|
addOptions: str = None
|
|
+ popularity: str = None
|
|
|
|
|
|
# Radarr Queue
|
|
--
|
|
GitLab
|
|
|
|
|
|
From 817c7a5b53494325a34f9a3ae7ff76aeb070f42f Mon Sep 17 00:00:00 2001
|
|
From: MDHMatt <10845262+MDHMatt@users.noreply.github.com>
|
|
Date: Tue, 12 Jul 2022 18:36:24 +0100
|
|
Subject: [PATCH 42/65] fix(ombi): Update structures.py (#238)
|
|
|
|
---
|
|
varken/structures.py | 19 +++++++++++++++----
|
|
1 file changed, 15 insertions(+), 4 deletions(-)
|
|
|
|
diff --git a/varken/structures.py b/varken/structures.py
|
|
index dedc28f..e3ee094 100644
|
|
--- a/varken/structures.py
|
|
+++ b/varken/structures.py
|
|
@@ -149,39 +149,50 @@ class OmbiTVRequest(NamedTuple):
|
|
|
|
class OmbiMovieRequest(NamedTuple):
|
|
approved: bool = None
|
|
+ approved4K: bool = None
|
|
available: bool = None
|
|
+ available4K: bool = None
|
|
background: str = None
|
|
canApprove: bool = None
|
|
denied: bool = None
|
|
+ denied4K: None = None
|
|
deniedReason: None = None
|
|
+ deniedReason4K: None = None
|
|
digitalRelease: bool = None
|
|
digitalReleaseDate: None = None
|
|
+ has4KRequest: bool = None
|
|
id: int = None
|
|
imdbId: str = None
|
|
+ is4kRequest: bool = None
|
|
issueId: None = None
|
|
issues: None = None
|
|
+ langCode: str = None
|
|
+ languageCode: str = None
|
|
markedAsApproved: str = None
|
|
+ markedAsApproved4K: str = None
|
|
markedAsAvailable: None = None
|
|
+ markedAsAvailable4K: None = None
|
|
markedAsDenied: str = None
|
|
+ markedAsDenied4K: str = None
|
|
overview: str = None
|
|
posterPath: str = None
|
|
qualityOverride: int = None
|
|
released: bool = None
|
|
releaseDate: str = None
|
|
+ requestedByAlias: str = None
|
|
requestedDate: str = None
|
|
+ requestedDate4k: str = None
|
|
requestedUser: dict = None
|
|
requestedUserId: str = None
|
|
+ requestStatus: str = None
|
|
requestType: int = None
|
|
rootPathOverride: int = None
|
|
showSubscribe: bool = None
|
|
+ source: int = None
|
|
status: str = None
|
|
subscribed: bool = None
|
|
theMovieDbId: int = None
|
|
title: str = None
|
|
- langCode: str = None
|
|
- languageCode: str = None
|
|
- requestedByAlias: str = None
|
|
- requestStatus: str = None
|
|
|
|
|
|
# Overseerr
|
|
--
|
|
GitLab
|
|
|
|
|
|
From 2cc26c18133dd9dd1b494fc4e0dc68f6996d8141 Mon Sep 17 00:00:00 2001
|
|
From: Samwiseg0 <2241731+samwiseg0@users.noreply.github.com>
|
|
Date: Sun, 17 Jul 2022 13:00:17 -0400
|
|
Subject: [PATCH 43/65] feat(docker): remove envs from example
|
|
|
|
---
|
|
docker-compose.yml | 93 ----------------------------------------------
|
|
1 file changed, 93 deletions(-)
|
|
|
|
diff --git a/docker-compose.yml b/docker-compose.yml
|
|
index 26c922e..04bfb96 100644
|
|
--- a/docker-compose.yml
|
|
+++ b/docker-compose.yml
|
|
@@ -22,99 +22,6 @@ services:
|
|
- /path/to/docker-varken/config-folder:/config
|
|
environment:
|
|
- TZ=America/Chicago
|
|
- - VRKN_GLOBAL_SONARR_SERVER_IDS=1,2
|
|
- - VRKN_GLOBAL_RADARR_SERVER_IDS=1,2
|
|
- - VRKN_GLOBAL_LIDARR_SERVER_IDS=false
|
|
- - VRKN_GLOBAL_TAUTULLI_SERVER_IDS=1
|
|
- - VRKN_GLOBAL_OMBI_SERVER_IDS=1
|
|
- - VRKN_GLOBAL_SICKCHILL_SERVER_IDS=false
|
|
- - VRKN_GLOBAL_UNIFI_SERVER_IDS=false
|
|
- - VRKN_GLOBAL_MAXMIND_LICENSE_KEY=xxxxxxxxxxxxxxxx
|
|
- - VRKN_INFLUXDB_URL=influxdb.domain.tld
|
|
- - VRKN_INFLUXDB_PORT=8086
|
|
- - VRKN_INFLUXDB_SSL=false
|
|
- - VRKN_INFLUXDB_VERIFY_SSL=false
|
|
- - VRKN_INFLUXDB_USERNAME=root
|
|
- - VRKN_INFLUXDB_PASSWORD=root
|
|
- - VRKN_TAUTULLI_1_URL=tautulli.domain.tld:8181
|
|
- - VRKN_TAUTULLI_1_FALLBACK_IP=1.1.1.1
|
|
- - VRKN_TAUTULLI_1_APIKEY=xxxxxxxxxxxxxxxx
|
|
- - VRKN_TAUTULLI_1_SSL=false
|
|
- - VRKN_TAUTULLI_1_VERIFY_SSL=false
|
|
- - VRKN_TAUTULLI_1_GET_ACTIVITY=true
|
|
- - VRKN_TAUTULLI_1_GET_ACTIVITY_RUN_SECONDS=30
|
|
- - VRKN_TAUTULLI_1_GET_STATS=true
|
|
- - VRKN_TAUTULLI_1_GET_STATS_RUN_SECONDS=3600
|
|
- - VRKN_SONARR_1_URL=sonarr1.domain.tld:8989
|
|
- - VRKN_SONARR_1_APIKEY=xxxxxxxxxxxxxxxx
|
|
- - VRKN_SONARR_1_SSL=false
|
|
- - VRKN_SONARR_1_VERIFY_SSL=false
|
|
- - VRKN_SONARR_1_MISSING_DAYS=7
|
|
- - VRKN_SONARR_1_MISSING_DAYS_RUN_SECONDS=300
|
|
- - VRKN_SONARR_1_FUTURE_DAYS=1
|
|
- - VRKN_SONARR_1_FUTURE_DAYS_RUN_SECONDS=300
|
|
- - VRKN_SONARR_1_QUEUE=true
|
|
- - VRKN_SONARR_1_QUEUE_RUN_SECONDS=300
|
|
- - VRKN_SONARR_2_URL=sonarr2.domain.tld:8989
|
|
- - VRKN_SONARR_2_APIKEY=yyyyyyyyyyyyyyyy
|
|
- - VRKN_SONARR_2_SSL=false
|
|
- - VRKN_SONARR_2_VERIFY_SSL=false
|
|
- - VRKN_SONARR_2_MISSING_DAYS=7
|
|
- - VRKN_SONARR_2_MISSING_DAYS_RUN_SECONDS=300
|
|
- - VRKN_SONARR_2_FUTURE_DAYS=1
|
|
- - VRKN_SONARR_2_FUTURE_DAYS_RUN_SECONDS=300
|
|
- - VRKN_SONARR_2_QUEUE=true
|
|
- - VRKN_SONARR_2_QUEUE_RUN_SECONDS=300
|
|
- - VRKN_RADARR_1_URL=radarr1.domain.tld
|
|
- - VRKN_RADARR_1_APIKEY=xxxxxxxxxxxxxxxx
|
|
- - VRKN_RADARR_1_SSL=false
|
|
- - VRKN_RADARR_1_VERIFY_SSL=false
|
|
- - VRKN_RADARR_1_QUEUE=true
|
|
- - VRKN_RADARR_1_QUEUE_RUN_SECONDS=300
|
|
- - VRKN_RADARR_1_GET_MISSING=true
|
|
- - VRKN_RADARR_1_GET_MISSING_RUN_SECONDS=300
|
|
- - VRKN_RADARR_2_URL=radarr2.domain.tld
|
|
- - VRKN_RADARR_2_APIKEY=yyyyyyyyyyyyyyyy
|
|
- - VRKN_RADARR_2_SSL=false
|
|
- - VRKN_RADARR_2_VERIFY_SSL=false
|
|
- - VRKN_RADARR_2_QUEUE=true
|
|
- - VRKN_RADARR_2_QUEUE_RUN_SECONDS=300
|
|
- - VRKN_RADARR_2_GET_MISSING=true
|
|
- - VRKN_RADARR_2_GET_MISSING_RUN_SECONDS=300
|
|
- - VRKN_LIDARR_1_URL=lidarr1.domain.tld:8686
|
|
- - VRKN_LIDARR_1_APIKEY=xxxxxxxxxxxxxxxx
|
|
- - VRKN_LIDARR_1_SSL=false
|
|
- - VRKN_LIDARR_1_VERIFY_SSL=false
|
|
- - VRKN_LIDARR_1_MISSING_DAYS=30
|
|
- - VRKN_LIDARR_1_MISSING_DAYS_RUN_SECONDS=300
|
|
- - VRKN_LIDARR_1_FUTURE_DAYS=30
|
|
- - VRKN_LIDARR_1_FUTURE_DAYS_RUN_SECONDS=300
|
|
- - VRKN_LIDARR_1_QUEUE=true
|
|
- - VRKN_LIDARR_1_QUEUE_RUN_SECONDS=300
|
|
- - VRKN_OMBI_1_URL=ombi.domain.tld
|
|
- - VRKN_OMBI_1_APIKEY=xxxxxxxxxxxxxxxx
|
|
- - VRKN_OMBI_1_SSL=false
|
|
- - VRKN_OMBI_1_VERIFY_SSL=false
|
|
- - VRKN_OMBI_1_GET_REQUEST_TYPE_COUNTS=true
|
|
- - VRKN_OMBI_1_REQUEST_TYPE_RUN_SECONDS=300
|
|
- - VRKN_OMBI_1_GET_REQUEST_TOTAL_COUNTS=true
|
|
- - VRKN_OMBI_1_REQUEST_TOTAL_RUN_SECONDS=300
|
|
- - VRKN_OMBI_1_GET_ISSUE_STATUS_COUNTS=true
|
|
- - VRKN_OMBI_1_ISSUE_STATUS_RUN_SECONDS=300
|
|
- - VRKN_OVERSEERR_1_URL=overseerr.domain.tld
|
|
- - VRKN_OVERSEERR_1_APIKEY=xxxxxxxxxxxxxxxx
|
|
- - VRKN_OVERSEERR_1_SSL=false
|
|
- - VRKN_OVERSEERR_1_VERIFY_SSL=false
|
|
- - VRKN_OVERSEERR_1_GET_REQUEST_TOTAL_COUNTS=true
|
|
- - VRKN_OVERSEERR_1_REQUEST_TOTAL_RUN_SECONDS=30
|
|
- - VRKN_OVERSEERR_1_GET_LATEST_REQUESTS_TO_FETCH=10
|
|
- - VRKN_OVERSEERR_1_NUM_LATEST_REQUESTS_SECONDS=300
|
|
- - VRKN_SICKCHILL_1_URL=sickchill.domain.tld:8081
|
|
- - VRKN_SICKCHILL_1_APIKEY=xxxxxxxxxxxxxxxx
|
|
- - VRKN_SICKCHILL_1_SSL=false
|
|
- - VRKN_SICKCHILL_1_VERIFY_SSL=false
|
|
- - VRKN_SICKCHILL_1_GET_MISSING=true
|
|
- - VRKN_SICKCHILL_1_GET_MISSING_RUN_SECONDS=300
|
|
depends_on:
|
|
- influxdb
|
|
restart: unless-stopped
|
|
--
|
|
GitLab
|
|
|
|
|
|
From cfc5c6998a03b53c1e60fc23c2b4e679fa41a66b Mon Sep 17 00:00:00 2001
|
|
From: MDHMatt <10845262+MDHMatt@users.noreply.github.com>
|
|
Date: Sun, 17 Jul 2022 18:06:48 +0100
|
|
Subject: [PATCH 44/65] fix(logging): remove depreciation warning. Var for
|
|
debug mode (#240)
|
|
|
|
---
|
|
Varken.py | 5 +++--
|
|
varken.xml | 1 +
|
|
2 files changed, 4 insertions(+), 2 deletions(-)
|
|
|
|
diff --git a/Varken.py b/Varken.py
|
|
index c494eaf..4a1f238 100644
|
|
--- a/Varken.py
|
|
+++ b/Varken.py
|
|
@@ -1,16 +1,17 @@
|
|
import platform
|
|
import schedule
|
|
+import distro
|
|
from time import sleep
|
|
from queue import Queue
|
|
from sys import version
|
|
from threading import Thread
|
|
from os import environ as env
|
|
from os import access, R_OK, getenv
|
|
-from distro import linux_distribution
|
|
from os.path import isdir, abspath, dirname, join
|
|
from argparse import ArgumentParser, RawTextHelpFormatter
|
|
from logging import getLogger, StreamHandler, Formatter, DEBUG
|
|
|
|
+
|
|
# Needed to check version of python
|
|
from varken import structures # noqa
|
|
from varken.ombi import OmbiAPI
|
|
@@ -28,7 +29,7 @@ from varken.sickchill import SickChillAPI
|
|
from varken.varkenlogger import VarkenLogger
|
|
|
|
|
|
-PLATFORM_LINUX_DISTRO = ' '.join(x for x in linux_distribution() if x)
|
|
+PLATFORM_LINUX_DISTRO = ' '.join(distro.id() + distro.version() + distro.name())
|
|
|
|
|
|
def thread(job, **kwargs):
|
|
diff --git a/varken.xml b/varken.xml
|
|
index b846c57..ab09d2d 100644
|
|
--- a/varken.xml
|
|
+++ b/varken.xml
|
|
@@ -51,5 +51,6 @@
|
|
<Labels/>
|
|
<Config Name="PGID" Target="PGID" Default="" Mode="" Description="Container Variable: PGID" Type="Variable" Display="always" Required="true" Mask="false">99</Config>
|
|
<Config Name="PUID" Target="PUID" Default="" Mode="" Description="Container Variable: PUID" Type="Variable" Display="always" Required="true" Mask="false">100</Config>
|
|
+ <Config Name="Debug" Target="DEBUG" Default="False" Mode="" Description="Turn Debug on or off" Type="Variable" Display="always" Required="false" Mask="false">False</Config>
|
|
<Config Name="Varken DataDir" Target="/config" Default="" Mode="rw" Description="Container Path: /config" Type="Path" Display="advanced-hide" Required="true" Mask="false">/mnt/user/appdata/varken</Config>
|
|
</Container>
|
|
\ No newline at end of file
|
|
--
|
|
GitLab
|
|
|
|
|
|
From a6c8ffce2539a673d838d7432f61453a13ce75c4 Mon Sep 17 00:00:00 2001
|
|
From: Samwiseg0 <2241731+samwiseg0@users.noreply.github.com>
|
|
Date: Sun, 17 Jul 2022 16:43:54 -0400
|
|
Subject: [PATCH 45/65] fix(build): bump schedule version to 1.1
|
|
|
|
---
|
|
requirements.txt | 2 +-
|
|
1 file changed, 1 insertion(+), 1 deletion(-)
|
|
|
|
diff --git a/requirements.txt b/requirements.txt
|
|
index 523e427..8341458 100644
|
|
--- a/requirements.txt
|
|
+++ b/requirements.txt
|
|
@@ -5,6 +5,6 @@
|
|
requests==2.25.1
|
|
geoip2==2.9.0
|
|
influxdb==5.2.0
|
|
-schedule==0.6.0
|
|
+schedule==1.1.0
|
|
distro==1.4.0
|
|
urllib3==1.26.5
|
|
--
|
|
GitLab
|
|
|
|
|
|
From 0a9a20007e7df2ac5ee8c9586eb3cf6d081f2fef Mon Sep 17 00:00:00 2001
|
|
From: Samwiseg0 <2241731+samwiseg0@users.noreply.github.com>
|
|
Date: Sun, 17 Jul 2022 16:44:45 -0400
|
|
Subject: [PATCH 46/65] fix(build): bump docker python version
|
|
|
|
---
|
|
Dockerfile | 2 +-
|
|
1 file changed, 1 insertion(+), 1 deletion(-)
|
|
|
|
diff --git a/Dockerfile b/Dockerfile
|
|
index c5c41fa..a227b48 100644
|
|
--- a/Dockerfile
|
|
+++ b/Dockerfile
|
|
@@ -1,4 +1,4 @@
|
|
-FROM python:3.9.1-alpine
|
|
+FROM python:3.10.5-alpine
|
|
|
|
ENV DEBUG="True" \
|
|
DATA_FOLDER="/config" \
|
|
--
|
|
GitLab
|
|
|
|
|
|
From a197cb63f8ef0561ef7e04537d96b4a6364423a0 Mon Sep 17 00:00:00 2001
|
|
From: Samwiseg0 <2241731+samwiseg0@users.noreply.github.com>
|
|
Date: Sun, 17 Jul 2022 17:04:24 -0400
|
|
Subject: [PATCH 47/65] fix(dep): update requests and urllib3
|
|
|
|
---
|
|
requirements.txt | 4 ++--
|
|
1 file changed, 2 insertions(+), 2 deletions(-)
|
|
|
|
diff --git a/requirements.txt b/requirements.txt
|
|
index 8341458..4a03a5d 100644
|
|
--- a/requirements.txt
|
|
+++ b/requirements.txt
|
|
@@ -2,9 +2,9 @@
|
|
# Potential requirements.
|
|
# pip3 install -r requirements.txt
|
|
#---------------------------------------------------------
|
|
-requests==2.25.1
|
|
+requests==2.28.1
|
|
geoip2==2.9.0
|
|
influxdb==5.2.0
|
|
schedule==1.1.0
|
|
distro==1.4.0
|
|
-urllib3==1.26.5
|
|
+urllib3==1.26.10
|
|
--
|
|
GitLab
|
|
|
|
|
|
From b077508a3e0f5bcd7ccd46cf0f07a094ea5e5d54 Mon Sep 17 00:00:00 2001
|
|
From: Nathan Adams <dinnerbone@dinnerbone.com>
|
|
Date: Mon, 18 Jul 2022 17:10:29 +0200
|
|
Subject: [PATCH 48/65] fix(sonarr): ensure invalid sonarr queue items are just
|
|
skipped over - fixes #239 (#243)
|
|
|
|
---
|
|
varken/sonarr.py | 8 +++++---
|
|
1 file changed, 5 insertions(+), 3 deletions(-)
|
|
|
|
diff --git a/varken/sonarr.py b/varken/sonarr.py
|
|
index 1e8c267..f9b55c6 100644
|
|
--- a/varken/sonarr.py
|
|
+++ b/varken/sonarr.py
|
|
@@ -105,7 +105,8 @@ class SonarrAPI(object):
|
|
endpoint = '/api/v3/queue'
|
|
now = datetime.now(timezone.utc).astimezone().isoformat()
|
|
pageSize = 250
|
|
- params = {'pageSize': pageSize, 'includeSeries': True, 'includeEpisode': True}
|
|
+ params = {'pageSize': pageSize, 'includeSeries': True, 'includeEpisode': True,
|
|
+ 'includeUnknownSeriesItems': False}
|
|
queueResponse = []
|
|
queue = []
|
|
|
|
@@ -119,7 +120,8 @@ class SonarrAPI(object):
|
|
|
|
while response.totalRecords > response.page * response.pageSize:
|
|
page = response.page + 1
|
|
- params = {'pageSize': pageSize, 'page': page, 'includeSeries': True, 'includeEpisode': True}
|
|
+ params = {'pageSize': pageSize, 'page': page, 'includeSeries': True, 'includeEpisode': True,
|
|
+ 'includeUnknownSeriesItems': False}
|
|
req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params))
|
|
get = connection_handler(self.session, req, self.server.verify_ssl)
|
|
if not get:
|
|
@@ -140,8 +142,8 @@ class SonarrAPI(object):
|
|
|
|
for queueItem in download_queue:
|
|
tvShow = SonarrTVShow(**queueItem.series)
|
|
- episode = SonarrEpisode(**queueItem.episode)
|
|
try:
|
|
+ episode = SonarrEpisode(**queueItem.episode)
|
|
sxe = f"S{episode.seasonNumber:0>2}E{episode.episodeNumber:0>2}"
|
|
except TypeError as e:
|
|
self.logger.error('TypeError has occurred : %s while processing the sonarr queue. \
|
|
--
|
|
GitLab
|
|
|
|
|
|
From 4768870bfb5596f5e613896181007a98342c7bcf Mon Sep 17 00:00:00 2001
|
|
From: "Nicholas St. Germain" <nick@cajun.pro>
|
|
Date: Mon, 21 Dec 2020 12:21:27 -0600
|
|
Subject: [PATCH 49/65] add branch to build inputs
|
|
|
|
---
|
|
CHANGELOG.md | 2 +-
|
|
1 file changed, 1 insertion(+), 1 deletion(-)
|
|
|
|
diff --git a/CHANGELOG.md b/CHANGELOG.md
|
|
index 8d0dd88..473132f 100644
|
|
--- a/CHANGELOG.md
|
|
+++ b/CHANGELOG.md
|
|
@@ -374,4 +374,4 @@
|
|
- Create crontabs [\#6](https://github.com/Boerderij/Varken/pull/6) ([ghost](https://github.com/ghost))
|
|
- update plex\_dashboard.json [\#5](https://github.com/Boerderij/Varken/pull/5) ([ghost](https://github.com/ghost))
|
|
- Update README.md [\#4](https://github.com/Boerderij/Varken/pull/4) ([ghost](https://github.com/ghost))
|
|
-- added sickrage portion [\#3](https://github.com/Boerderij/Varken/pull/3) ([ghost](https://github.com/ghost))
|
|
\ No newline at end of file
|
|
+- added sickrage portion [\#3](https://github.com/Boerderij/Varken/pull/3) ([ghost](https://github.com/ghost))
|
|
--
|
|
GitLab
|
|
|
|
|
|
From f57a25ac4efb7f658304bf1558c96302b49f922e Mon Sep 17 00:00:00 2001
|
|
From: "Nicholas St. Germain" <nick@cajun.pro>
|
|
Date: Mon, 21 Dec 2020 12:30:50 -0600
|
|
Subject: [PATCH 50/65] update pipeline badge
|
|
|
|
---
|
|
README.md | 4 ++--
|
|
1 file changed, 2 insertions(+), 2 deletions(-)
|
|
|
|
diff --git a/README.md b/README.md
|
|
index fe95bca..dd25a3e 100644
|
|
--- a/README.md
|
|
+++ b/README.md
|
|
@@ -2,7 +2,7 @@
|
|
<img width="800" src="https://raw.githubusercontent.com/Boerderij/Varken/master/assets/varken_full_banner.jpg" alt="Logo Banner">
|
|
</p>
|
|
|
|
-[](https://gitlab.com/boerderij/Varken/commits/master)
|
|
+[](https://github.com/Boerderij/Varken/actions?query=workflow%3Avarken)
|
|
[](https://discord.gg/VjZ6qSM)
|
|
[](https://ko-fi.com/varken)
|
|
[](https://microbadger.com/images/boerderij/varken)
|
|
@@ -58,4 +58,4 @@ do not include database creation, please ensure you create an influx database
|
|
named `varken`
|
|
|
|
### Grafana
|
|
-[Grafana Installation/Dashboard Documentation](https://wiki.cajun.pro/books/varken/page/grafana)
|
|
\ No newline at end of file
|
|
+[Grafana Installation/Dashboard Documentation](https://wiki.cajun.pro/books/varken/page/grafana)
|
|
--
|
|
GitLab
|
|
|
|
|
|
From dbfaa1a886dbcdb1c90e959136806870bbca5b9e Mon Sep 17 00:00:00 2001
|
|
From: d-mcknight <47727384+d-mcknight@users.noreply.github.com>
|
|
Date: Wed, 21 Jun 2023 20:11:36 -0700
|
|
Subject: [PATCH 51/65] Update automation
|
|
|
|
---
|
|
.github/FUNDING.yml | 1 -
|
|
.github/ISSUE_TEMPLATE/bug_report.md | 31 -----
|
|
.github/ISSUE_TEMPLATE/feature_request.md | 20 ---
|
|
.../docker-multi-login-action/action.yml | 23 ----
|
|
.../action.yml | 46 -------
|
|
.github/workflows/docker.yaml | 116 ------------------
|
|
.github/workflows/docker.yml | 41 +++++++
|
|
.github/workflows/invalid_template.yml | 19 ---
|
|
.github/workflows/support.yml | 25 ----
|
|
Dockerfile | 6 +-
|
|
10 files changed, 44 insertions(+), 284 deletions(-)
|
|
delete mode 100644 .github/FUNDING.yml
|
|
delete mode 100644 .github/ISSUE_TEMPLATE/bug_report.md
|
|
delete mode 100644 .github/ISSUE_TEMPLATE/feature_request.md
|
|
delete mode 100644 .github/actions/docker-multi-login-action/action.yml
|
|
delete mode 100644 .github/actions/docker-target-image-list-action/action.yml
|
|
delete mode 100644 .github/workflows/docker.yaml
|
|
create mode 100644 .github/workflows/docker.yml
|
|
delete mode 100644 .github/workflows/invalid_template.yml
|
|
delete mode 100644 .github/workflows/support.yml
|
|
|
|
diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml
|
|
deleted file mode 100644
|
|
index b23a4f6..0000000
|
|
--- a/.github/FUNDING.yml
|
|
+++ /dev/null
|
|
@@ -1 +0,0 @@
|
|
-ko_fi: varken
|
|
diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md
|
|
deleted file mode 100644
|
|
index a2b1f91..0000000
|
|
--- a/.github/ISSUE_TEMPLATE/bug_report.md
|
|
+++ /dev/null
|
|
@@ -1,31 +0,0 @@
|
|
----
|
|
-name: Bug report
|
|
-about: Create a report to help us improve
|
|
-title: "[BUG]"
|
|
-labels: awaiting-triage
|
|
-assignees: ''
|
|
-
|
|
----
|
|
-
|
|
-**Describe the bug**
|
|
-A clear and concise description of what the bug is.
|
|
-
|
|
-**To Reproduce**
|
|
-Steps to reproduce the behavior:
|
|
-1. ...
|
|
-2. ...
|
|
-3. ...
|
|
-4. ...
|
|
-
|
|
-**Expected behavior**
|
|
-A clear and concise description of what you expected to happen.
|
|
-
|
|
-**Screenshots**
|
|
-If applicable, add screenshots to help explain your problem.
|
|
-
|
|
-**Environment (please complete the following information):**
|
|
- - OS: [e.g. Ubuntu 18.04.1 or Docker:Tag]
|
|
- - Version [e.g. v1.1]
|
|
-
|
|
-**Additional context**
|
|
-Add any other context about the problem here.
|
|
diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md
|
|
deleted file mode 100644
|
|
index 6bdd151..0000000
|
|
--- a/.github/ISSUE_TEMPLATE/feature_request.md
|
|
+++ /dev/null
|
|
@@ -1,20 +0,0 @@
|
|
----
|
|
-name: Feature request
|
|
-about: Suggest an idea for this project
|
|
-title: "[Feature Request]"
|
|
-labels: awaiting-triage
|
|
-assignees: ''
|
|
-
|
|
----
|
|
-
|
|
-**Is your feature request related to a problem? Please describe.**
|
|
-A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
|
-
|
|
-**Describe the solution you'd like**
|
|
-A clear and concise description of what you want to happen.
|
|
-
|
|
-**Describe alternatives you've considered**
|
|
-A clear and concise description of any alternative solutions or features you've considered.
|
|
-
|
|
-**Additional context**
|
|
-Add any other context or screenshots about the feature request here.
|
|
diff --git a/.github/actions/docker-multi-login-action/action.yml b/.github/actions/docker-multi-login-action/action.yml
|
|
deleted file mode 100644
|
|
index aab3f20..0000000
|
|
--- a/.github/actions/docker-multi-login-action/action.yml
|
|
+++ /dev/null
|
|
@@ -1,23 +0,0 @@
|
|
-name: 'Docker Multi Login Action'
|
|
-description: 'Log in to dockerhub, quay, and github container registry'
|
|
-runs:
|
|
- using: "composite"
|
|
- steps:
|
|
- - shell: bash
|
|
- run: |
|
|
- echo "🔑 Logging into dockerhub..."
|
|
- if docker login --username ${{ fromJSON(env.secrets).DOCKERHUB_USERNAME }} --password ${{ fromJSON(env.secrets).DOCKERHUB_PASSWORD }} > /dev/null 2>&1; then
|
|
- echo "🎉 Login Succeeded!"
|
|
- fi
|
|
- - shell: bash
|
|
- run: |
|
|
- echo "🔑 Logging into quay.io..."
|
|
- if docker login quay.io --username ${{ fromJSON(env.secrets).QUAY_USERNAME }} --password ${{ fromJSON(env.secrets).QUAY_PASSWORD }} > /dev/null 2>&1; then
|
|
- echo "🎉 Login Succeeded!"
|
|
- fi
|
|
- - shell: bash
|
|
- run: |
|
|
- echo "🔑 Logging into ghcr.io..."
|
|
- if docker login ghcr.io --username ${{ fromJSON(env.secrets).GHCR_USERNAME }} --password ${{ fromJSON(env.secrets).GHCR_PASSWORD }} > /dev/null 2>&1; then
|
|
- echo "🎉 Login Succeeded!"
|
|
- fi
|
|
diff --git a/.github/actions/docker-target-image-list-action/action.yml b/.github/actions/docker-target-image-list-action/action.yml
|
|
deleted file mode 100644
|
|
index dc08c89..0000000
|
|
--- a/.github/actions/docker-target-image-list-action/action.yml
|
|
+++ /dev/null
|
|
@@ -1,46 +0,0 @@
|
|
-name: 'Docker Target Image List Generator'
|
|
-description: 'A Github Action to generate a list of fully qualified target images for docker related steps'
|
|
-inputs:
|
|
- registries:
|
|
- description: "Comma separated list of docker registries"
|
|
- required: false
|
|
- default: "docker.io,quay.io,ghcr.io"
|
|
- images:
|
|
- description: "Comma separated list of images"
|
|
- required: true
|
|
- tags:
|
|
- description: "Comma separated list of image tags"
|
|
- required: false
|
|
- default: "edge"
|
|
-outputs:
|
|
- fully-qualified-target-images:
|
|
- description: "List of fully qualified docker target images"
|
|
- value: ${{ steps.gen-fqti.outputs.fully-qualified-target-images }}
|
|
-runs:
|
|
- using: "composite"
|
|
- steps:
|
|
- - name: Generate fully qualified docker target images
|
|
- id: gen-fqti
|
|
- shell: bash
|
|
- run: |
|
|
- IFS=',' read -r -a registries <<< "${{ inputs.registries }}"
|
|
- IFS=',' read -r -a images <<< "${{ inputs.images }}"
|
|
- IFS=',' read -r -a tags <<< "${{ inputs.tags }}"
|
|
- FQTI=""
|
|
- echo "Generating fully qualified docker target images for:"
|
|
- echo "🐋 Registries: ${#registries[@]}"
|
|
- echo "📷 Images: ${#images[@]}"
|
|
- echo "🏷️ Tags: ${#tags[@]}"
|
|
- echo "🧮 Total: $((${#registries[@]}*${#images[@]}*${#tags[@]}))"
|
|
- for registry in "${registries[@]}"; do
|
|
- for image in "${images[@]}"; do
|
|
- for tag in "${tags[@]}"; do
|
|
- if [ -z "$FQTI" ]; then
|
|
- FQTI="${registry}/${image}:${tag}"
|
|
- else
|
|
- FQTI="$FQTI,${registry}/${image}:${tag}"
|
|
- fi
|
|
- done
|
|
- done
|
|
- done
|
|
- echo ::set-output name=fully-qualified-target-images::${FQTI}
|
|
diff --git a/.github/workflows/docker.yaml b/.github/workflows/docker.yaml
|
|
deleted file mode 100644
|
|
index 8818fcb..0000000
|
|
--- a/.github/workflows/docker.yaml
|
|
+++ /dev/null
|
|
@@ -1,116 +0,0 @@
|
|
-name: varken
|
|
-on:
|
|
- schedule:
|
|
- - cron: '0 10 * * *'
|
|
- push:
|
|
- branches:
|
|
- - master
|
|
- - develop
|
|
- tags:
|
|
- - 'v*.*.*'
|
|
- paths:
|
|
- - '.github/workflows/docker.yaml'
|
|
- - 'varken/**'
|
|
- - 'Varken.py'
|
|
- - 'Dockerfile'
|
|
- pull_request:
|
|
- branches:
|
|
- - master
|
|
- - develop
|
|
- paths:
|
|
- - '.github/workflows/docker.yaml'
|
|
- - 'varken/**'
|
|
- - 'Varken.py'
|
|
- - 'Dockerfile'
|
|
- workflow_dispatch:
|
|
- inputs:
|
|
- tag:
|
|
- description: 'Use this tag instead of most recent'
|
|
- required: false
|
|
- ignore-existing-tag:
|
|
- description: 'Ignore existing tag if "true"'
|
|
- required: false
|
|
-env:
|
|
- IMAGES: boerderij/varken
|
|
- PLATFORMS: "linux/amd64,linux/arm64,linux/arm/v7"
|
|
-jobs:
|
|
- lint-and-test:
|
|
- runs-on: ubuntu-latest
|
|
- steps:
|
|
- - name: Checkout
|
|
- uses: actions/checkout@v2
|
|
- - name: Setup Python
|
|
- uses: actions/setup-python@v2
|
|
- with:
|
|
- python-version: '3.x'
|
|
- - name: Lint
|
|
- run: pip install flake8 && flake8 --max-line-length 120 Varken.py varken/*.py
|
|
- build:
|
|
- runs-on: ubuntu-latest
|
|
- needs: lint-and-test
|
|
- steps:
|
|
- - name: Checkout
|
|
- uses: actions/checkout@v2
|
|
- - name: Prepare
|
|
- id: prep
|
|
- run: |
|
|
- VERSION=edge
|
|
- if [[ $GITHUB_REF == refs/tags/* ]]; then
|
|
- VERSION=${GITHUB_REF#refs/tags/v}
|
|
- fi
|
|
- if [ "${{ github.event_name }}" = "schedule" ]; then
|
|
- VERSION=nightly
|
|
- fi
|
|
- if [[ ${GITHUB_REF##*/} == "develop" ]]; then
|
|
- VERSION=develop
|
|
- fi
|
|
- TAGS="${VERSION}"
|
|
- if [[ $VERSION =~ ^[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}$ ]]; then
|
|
- TAGS="$TAGS,latest"
|
|
- fi
|
|
- echo ::set-output name=version::${VERSION}
|
|
- echo ::set-output name=tags::${TAGS}
|
|
- echo ::set-output name=branch::${GITHUB_REF##*/}
|
|
- echo ::set-output name=build_date::$(date -u +'%Y-%m-%dT%H:%M:%SZ')
|
|
- echo ::set-output name=vcs_ref::${GITHUB_SHA::8}
|
|
- - uses: ./.github/actions/docker-target-image-list-action
|
|
- name: Generate Target Images
|
|
- id: gen-tags
|
|
- with:
|
|
- images: ${{ env.IMAGES }}
|
|
- tags: ${{ steps.prep.outputs.tags }}
|
|
- - name: Set up QEMU
|
|
- uses: docker/setup-qemu-action@v1
|
|
- with:
|
|
- platforms: ${{ env.PLATFORMS }}
|
|
- - name: Set up Docker Buildx
|
|
- uses: docker/setup-buildx-action@v1
|
|
- with:
|
|
- install: true
|
|
- version: latest
|
|
- driver-opts: image=moby/buildkit:master
|
|
- - name: Docker Multi Login
|
|
- uses: ./.github/actions/docker-multi-login-action
|
|
- env:
|
|
- secrets: ${{ toJSON(secrets) }}
|
|
- - name: Build and Push
|
|
- uses: docker/build-push-action@v2
|
|
- with:
|
|
- context: .
|
|
- file: ./Dockerfile
|
|
- platforms: ${{ env.PLATFORMS }}
|
|
- pull: true
|
|
- push: ${{ github.event_name != 'pull_request' }}
|
|
- tags: ${{ steps.gen-tags.outputs.fully-qualified-target-images }}
|
|
- build-args: |
|
|
- VERSION=${{ steps.prep.outputs.version }}
|
|
- BRANCH=${{ steps.prep.outputs.branch }}
|
|
- BUILD_DATE=${{ steps.prep.outputs.build_date }}
|
|
- VCS_REF=${{ steps.prep.outputs.vcs_ref }}
|
|
- - name: Inspect
|
|
- if: ${{ github.event_name != 'pull_request' }}
|
|
- run: |
|
|
- IFS=',' read -r -a images <<< "${{ steps.gen-tags.outputs.fully-qualified-target-images }}"
|
|
- for image in "${images[@]}"; do
|
|
- docker buildx imagetools inspect ${image}
|
|
- done
|
|
diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml
|
|
new file mode 100644
|
|
index 0000000..c51db79
|
|
--- /dev/null
|
|
+++ b/.github/workflows/docker.yml
|
|
@@ -0,0 +1,41 @@
|
|
+name: Publish Docker Containers
|
|
+on:
|
|
+ workflow_dispatch:
|
|
+
|
|
+env:
|
|
+ REGISTRY: ghcr.io
|
|
+ IMAGE_NAME: ${{ github.repository }}
|
|
+
|
|
+jobs:
|
|
+ build_and_publish_docker:
|
|
+ runs-on: ubuntu-latest
|
|
+ permissions:
|
|
+ contents: read
|
|
+ packages: write
|
|
+ steps:
|
|
+ - name: Checkout repository
|
|
+ uses: actions/checkout@v2
|
|
+ with:
|
|
+ ref: ${{ github.ref }}
|
|
+
|
|
+ - name: Log in to the Container registry
|
|
+ uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9
|
|
+ with:
|
|
+ registry: ${{ env.REGISTRY }}
|
|
+ username: ${{ github.actor }}
|
|
+ password: ${{ secrets.GITHUB_TOKEN }}
|
|
+
|
|
+ - name: Extract metadata for base Docker
|
|
+ id: base_meta
|
|
+ uses: docker/metadata-action@v2
|
|
+ with:
|
|
+ images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
|
+ tags: |
|
|
+ type=ref,event=branch
|
|
+ - name: Build and push Docker image
|
|
+ uses: docker/build-push-action@ad44023a93711e3deb337508980b4b5e9bcdc5dc
|
|
+ with:
|
|
+ context: .
|
|
+ push: true
|
|
+ tags: ${{ steps.base_meta.outputs.tags }}
|
|
+ labels: ${{ steps.base_meta.outputs.labels }}
|
|
diff --git a/.github/workflows/invalid_template.yml b/.github/workflows/invalid_template.yml
|
|
deleted file mode 100644
|
|
index 647cdec..0000000
|
|
--- a/.github/workflows/invalid_template.yml
|
|
+++ /dev/null
|
|
@@ -1,19 +0,0 @@
|
|
-name: 'Invalid Template'
|
|
-
|
|
-on:
|
|
- issues:
|
|
- types: [labeled, unlabeled, reopened]
|
|
-
|
|
-jobs:
|
|
- support:
|
|
- runs-on: ubuntu-latest
|
|
- steps:
|
|
- - uses: dessant/support-requests@v2
|
|
- with:
|
|
- github-token: ${{ github.token }}
|
|
- support-label: 'invalid:template-incomplete'
|
|
- issue-comment: >
|
|
- :wave: @{issue-author}, please edit your issue and follow the template provided.
|
|
- close-issue: false
|
|
- lock-issue: false
|
|
- issue-lock-reason: 'resolved'
|
|
diff --git a/.github/workflows/support.yml b/.github/workflows/support.yml
|
|
deleted file mode 100644
|
|
index df74c10..0000000
|
|
--- a/.github/workflows/support.yml
|
|
+++ /dev/null
|
|
@@ -1,25 +0,0 @@
|
|
-name: 'Support Request'
|
|
-
|
|
-on:
|
|
- issues:
|
|
- types: [labeled, unlabeled, reopened]
|
|
-
|
|
-jobs:
|
|
- support:
|
|
- runs-on: ubuntu-latest
|
|
- steps:
|
|
- - uses: dessant/support-requests@v2
|
|
- with:
|
|
- github-token: ${{ github.token }}
|
|
- support-label: 'support'
|
|
- issue-comment: >
|
|
- :wave: @{issue-author}, we use the issue tracker exclusively
|
|
- for bug reports and feature requests. However, this issue appears
|
|
- to be a support request. Please use our support channels
|
|
- to get help with Varken!
|
|
-
|
|
- - [Discord](https://discord.gg/VjZ6qSM)
|
|
- - [Discord Quick Access](http://cyborg.decreator.dev/channels/518970285773422592/530424560504537105/)
|
|
- close-issue: true
|
|
- lock-issue: false
|
|
- issue-lock-reason: 'off-topic'
|
|
diff --git a/Dockerfile b/Dockerfile
|
|
index a227b48..aec9282 100644
|
|
--- a/Dockerfile
|
|
+++ b/Dockerfile
|
|
@@ -8,11 +8,11 @@ ENV DEBUG="True" \
|
|
|
|
LABEL maintainer="dirtycajunrice,samwiseg0" \
|
|
org.opencontainers.image.created=$BUILD_DATE \
|
|
- org.opencontainers.image.url="https://github.com/Boerderij/Varken" \
|
|
- org.opencontainers.image.source="https://github.com/Boerderij/Varken" \
|
|
+ org.opencontainers.image.url="https://github.com/d-mcknight/Varken" \
|
|
+ org.opencontainers.image.source="https://github.com/d-mcknight/Varken" \
|
|
org.opencontainers.image.version=$VERSION \
|
|
org.opencontainers.image.revision=$VCS_REF \
|
|
- org.opencontainers.image.vendor="boerderij" \
|
|
+ org.opencontainers.image.vendor="d-mcknight" \
|
|
org.opencontainers.image.title="varken" \
|
|
org.opencontainers.image.description="Varken is a standalone application to aggregate data from the Plex ecosystem into InfluxDB using Grafana for a frontend" \
|
|
org.opencontainers.image.licenses="MIT"
|
|
--
|
|
GitLab
|
|
|
|
|
|
From 32f4bd95d031f06b0f7ced1b463a322bed462c56 Mon Sep 17 00:00:00 2001
|
|
From: Gabe Revells <gcrevell@mtu.edu>
|
|
Date: Sat, 13 Feb 2021 15:45:37 -0500
|
|
Subject: [PATCH 52/65] Add influxdb 2 client
|
|
|
|
---
|
|
requirements.txt | 1 +
|
|
1 file changed, 1 insertion(+)
|
|
|
|
diff --git a/requirements.txt b/requirements.txt
|
|
index 4a03a5d..22449b9 100644
|
|
--- a/requirements.txt
|
|
+++ b/requirements.txt
|
|
@@ -8,3 +8,4 @@ influxdb==5.2.0
|
|
schedule==1.1.0
|
|
distro==1.4.0
|
|
urllib3==1.26.10
|
|
+influxdb-client==1.14.0
|
|
\ No newline at end of file
|
|
--
|
|
GitLab
|
|
|
|
|
|
From 47eb8077af4e9959c1ae0a7d23b2373a0e8af99a Mon Sep 17 00:00:00 2001
|
|
From: Gabe Revells <gcrevell@mtu.edu>
|
|
Date: Sat, 13 Feb 2021 15:46:26 -0500
|
|
Subject: [PATCH 53/65] Add structure for influxdb 2 params
|
|
|
|
This contains all the data needed for connecting and writing to an InfluxDB2 server
|
|
---
|
|
varken/structures.py | 10 ++++++++++
|
|
1 file changed, 10 insertions(+)
|
|
|
|
diff --git a/varken/structures.py b/varken/structures.py
|
|
index e3ee094..3637b45 100644
|
|
--- a/varken/structures.py
|
|
+++ b/varken/structures.py
|
|
@@ -20,6 +20,16 @@ class InfluxServer(NamedTuple):
|
|
verify_ssl: bool = False
|
|
|
|
|
|
+class Influx2Server(NamedTuple):
|
|
+ url: str = 'localhost'
|
|
+ org: str = 'server'
|
|
+ token: str = 'TOKEN'
|
|
+ bucket: str = 'varken'
|
|
+ timeout: int = 10000
|
|
+ ssl: bool = False
|
|
+ verify_ssl: bool = False
|
|
+
|
|
+
|
|
class SonarrServer(NamedTuple):
|
|
api_key: str = None
|
|
future_days: int = 0
|
|
--
|
|
GitLab
|
|
|
|
|
|
From c515184cb43d677159953f27100f958c22be7fee Mon Sep 17 00:00:00 2001
|
|
From: Gabe Revells <gcrevell@mtu.edu>
|
|
Date: Sat, 13 Feb 2021 15:48:41 -0500
|
|
Subject: [PATCH 54/65] Parse influxdb 2 config data
|
|
|
|
---
|
|
varken/iniparser.py | 53 +++++++++++++++++++++++++++++++--------------
|
|
1 file changed, 37 insertions(+), 16 deletions(-)
|
|
|
|
diff --git a/varken/iniparser.py b/varken/iniparser.py
|
|
index bcb3b37..96fff88 100644
|
|
--- a/varken/iniparser.py
|
|
+++ b/varken/iniparser.py
|
|
@@ -9,7 +9,7 @@ from configparser import ConfigParser, NoOptionError, NoSectionError
|
|
from varken.varkenlogger import BlacklistFilter
|
|
from varken.structures import SickChillServer, UniFiServer
|
|
from varken.helpers import clean_sid_check, rfc1918_ip_check, boolcheck
|
|
-from varken.structures import SonarrServer, RadarrServer, OmbiServer, OverseerrServer, TautulliServer, InfluxServer
|
|
+from varken.structures import SonarrServer, RadarrServer, OmbiServer, OverseerrServer, TautulliServer, InfluxServer, Influx2Server
|
|
|
|
|
|
class INIParser(object):
|
|
@@ -144,23 +144,44 @@ class INIParser(object):
|
|
if read_file:
|
|
self.config = self.read_file('varken.ini')
|
|
self.config_blacklist()
|
|
+
|
|
# Parse InfluxDB options
|
|
- try:
|
|
- url = self.url_check(env.get('VRKN_INFLUXDB_URL', self.config.get('influxdb', 'url')),
|
|
- include_port=False, section='influxdb')
|
|
- port = int(env.get('VRKN_INFLUXDB_PORT', self.config.getint('influxdb', 'port')))
|
|
- ssl = boolcheck(env.get('VRKN_INFLUXDB_SSL', self.config.get('influxdb', 'ssl')))
|
|
- verify_ssl = boolcheck(env.get('VRKN_INFLUXDB_VERIFY_SSL', self.config.get('influxdb', 'verify_ssl')))
|
|
-
|
|
- username = env.get('VRKN_INFLUXDB_USERNAME', self.config.get('influxdb', 'username'))
|
|
- password = env.get('VRKN_INFLUXDB_PASSWORD', self.config.get('influxdb', 'password'))
|
|
- except NoOptionError as e:
|
|
- self.logger.error('Missing key in %s. Error: %s', "influxdb", e)
|
|
- self.rectify_ini()
|
|
- return
|
|
+ self.influx2_enabled = env.get('VRKN_GLOBAL_INFLUXDB2_ENABLED', self.config.getboolean('global', 'influx2_enabled'))
|
|
+
|
|
+ if self.influx2_enabled:
|
|
+ # Use INFLUX version 2
|
|
+ try:
|
|
+ url = self.url_check(env.get('VRKN_INFLUXDB2_URL', self.config.get('influx2', 'url')), section='influx2')
|
|
+ ssl = boolcheck(env.get('VRKN_INFLUXDB2_SSL', self.config.get('influx2', 'ssl')))
|
|
+ verify_ssl = boolcheck(env.get('VRKN_INFLUXDB2_VERIFY_SSL', self.config.get('influx2', 'verify_ssl')))
|
|
+
|
|
+ org = env.get('VRKN_INFLUXDB2_ORG', self.config.get('influx2', 'org'))
|
|
+ token = env.get('VRKN_INFLUXDB2_TOKEN', self.config.get('influx2', 'token'))
|
|
+ timeout = env.get('VRKN_INFLUXDB2_TIMEOUT', self.config.get('influx2', 'timeout'))
|
|
+ except NoOptionError as e:
|
|
+ self.logger.error('Missing key in %s. Error: %s', "influx2", e)
|
|
+ self.rectify_ini()
|
|
+ return
|
|
+
|
|
+ self.influx_server = Influx2Server(url=url, token=token, org=org, timeout=timeout, ssl=ssl,
|
|
+ verify_ssl=verify_ssl)
|
|
+ else:
|
|
+ try:
|
|
+ url = self.url_check(env.get('VRKN_INFLUXDB_URL', self.config.get('influxdb', 'url')),
|
|
+ include_port=False, section='influxdb')
|
|
+ port = int(env.get('VRKN_INFLUXDB_PORT', self.config.getint('influxdb', 'port')))
|
|
+ ssl = boolcheck(env.get('VRKN_INFLUXDB_SSL', self.config.get('influxdb', 'ssl')))
|
|
+ verify_ssl = boolcheck(env.get('VRKN_INFLUXDB_VERIFY_SSL', self.config.get('influxdb', 'verify_ssl')))
|
|
+
|
|
+ username = env.get('VRKN_INFLUXDB_USERNAME', self.config.get('influxdb', 'username'))
|
|
+ password = env.get('VRKN_INFLUXDB_PASSWORD', self.config.get('influxdb', 'password'))
|
|
+ except NoOptionError as e:
|
|
+ self.logger.error('Missing key in %s. Error: %s', "influxdb", e)
|
|
+ self.rectify_ini()
|
|
+ return
|
|
|
|
- self.influx_server = InfluxServer(url=url, port=port, username=username, password=password, ssl=ssl,
|
|
- verify_ssl=verify_ssl)
|
|
+ self.influx_server = InfluxServer(url=url, port=port, username=username, password=password, ssl=ssl,
|
|
+ verify_ssl=verify_ssl)
|
|
|
|
# Check for all enabled services
|
|
for service in self.services:
|
|
--
|
|
GitLab
|
|
|
|
|
|
From 2273083d00ce7ab2814c3b5b9c17116ac977ef89 Mon Sep 17 00:00:00 2001
|
|
From: Gabe Revells <gcrevell@mtu.edu>
|
|
Date: Sat, 13 Feb 2021 15:49:41 -0500
|
|
Subject: [PATCH 55/65] Add influxdb2 manager class
|
|
|
|
This stores the data needed for InfluxDB2, and has a single `write_points` function on this that takes an array of points to add to the database
|
|
---
|
|
varken/influxdb2manager.py | 28 ++++++++++++++++++++++++++++
|
|
1 file changed, 28 insertions(+)
|
|
create mode 100644 varken/influxdb2manager.py
|
|
|
|
diff --git a/varken/influxdb2manager.py b/varken/influxdb2manager.py
|
|
new file mode 100644
|
|
index 0000000..d6e3452
|
|
--- /dev/null
|
|
+++ b/varken/influxdb2manager.py
|
|
@@ -0,0 +1,28 @@
|
|
+from sys import exit
|
|
+from logging import getLogger
|
|
+from requests.exceptions import ConnectionError
|
|
+from influxdb_client import InfluxDBClient, Point
|
|
+from influxdb_client.client.write_api import SYNCHRONOUS
|
|
+
|
|
+
|
|
+class InfluxDB2Manager(object):
|
|
+ def __init__(self, server):
|
|
+ self.server = server
|
|
+ self.logger = getLogger()
|
|
+ if self.server.url == "influxdb2.domain.tld":
|
|
+ self.logger.critical("You have not configured your varken.ini. Please read Wiki page for configuration")
|
|
+ exit()
|
|
+
|
|
+ self.influx = InfluxDBClient(url=self.server.url, token=self.server.token, org=self.server.org,
|
|
+ timeout=self.server.timeout, verify_ssl=self.server.verify_ssl, ssl_ca_cert=self.server.ssl)
|
|
+ self.influx_write_api = self.influx.write_api(write_options=SYNCHRONOUS)
|
|
+
|
|
+ def write_points(self, data):
|
|
+ d = data
|
|
+ self.logger.info('Writing Data to InfluxDBv2 %s', d)
|
|
+
|
|
+ try:
|
|
+ self.influx_write_api.write(bucket=self.server.bucket, record=d)
|
|
+ except (InfluxDBServerError, ConnectionError) as e:
|
|
+ self.logger.error('Error writing data to influxdb2. Dropping this set of data. '
|
|
+ 'Check your database! Error: %s', e)
|
|
--
|
|
GitLab
|
|
|
|
|
|
From 163798bc20f1e58b61442ab331766e32df19d440 Mon Sep 17 00:00:00 2001
|
|
From: Gabe Revells <gcrevell@mtu.edu>
|
|
Date: Sat, 13 Feb 2021 15:50:12 -0500
|
|
Subject: [PATCH 56/65] Use the correct db manager for varken
|
|
|
|
---
|
|
Varken.py | 11 ++++++++++-
|
|
1 file changed, 10 insertions(+), 1 deletion(-)
|
|
|
|
diff --git a/Varken.py b/Varken.py
|
|
index 4a1f238..493a4bf 100644
|
|
--- a/Varken.py
|
|
+++ b/Varken.py
|
|
@@ -23,6 +23,7 @@ from varken.radarr import RadarrAPI
|
|
from varken.lidarr import LidarrAPI
|
|
from varken.iniparser import INIParser
|
|
from varken.dbmanager import DBManager
|
|
+from varken.influxdb2manager import InfluxDB2Manager
|
|
from varken.helpers import GeoIPHandler
|
|
from varken.tautulli import TautulliAPI
|
|
from varken.sickchill import SickChillAPI
|
|
@@ -92,7 +93,15 @@ if __name__ == "__main__":
|
|
vl.logger.info("Varken v%s-%s %s", VERSION, BRANCH, BUILD_DATE)
|
|
|
|
CONFIG = INIParser(DATA_FOLDER)
|
|
- DBMANAGER = DBManager(CONFIG.influx_server)
|
|
+
|
|
+ if CONFIG.influx2_enabled:
|
|
+ # Use INFLUX version 2
|
|
+ vl.logger.info('Using INFLUXDBv2')
|
|
+ DBMANAGER = InfluxDB2Manager(CONFIG.influx_server)
|
|
+ else:
|
|
+ vl.logger.info('Using INFLUXDB')
|
|
+ DBMANAGER = DBManager(CONFIG.influx_server)
|
|
+
|
|
QUEUE = Queue()
|
|
|
|
if CONFIG.sonarr_enabled:
|
|
--
|
|
GitLab
|
|
|
|
|
|
From 94f5174e2fe704843563ec86da84dad484bfe1bf Mon Sep 17 00:00:00 2001
|
|
From: Gabe Revells <gcrevell@mtu.edu>
|
|
Date: Sat, 13 Feb 2021 15:50:48 -0500
|
|
Subject: [PATCH 57/65] Add influxdb2 to the example varken config file
|
|
|
|
---
|
|
data/varken.example.ini | 10 ++++++++++
|
|
1 file changed, 10 insertions(+)
|
|
|
|
diff --git a/data/varken.example.ini b/data/varken.example.ini
|
|
index b32eab6..93d9ec2 100644
|
|
--- a/data/varken.example.ini
|
|
+++ b/data/varken.example.ini
|
|
@@ -8,6 +8,7 @@ overseerr_server_ids = 1
|
|
sickchill_server_ids = false
|
|
unifi_server_ids = false
|
|
maxmind_license_key = xxxxxxxxxxxxxxxx
|
|
+influx2_enabled = false
|
|
|
|
[influxdb]
|
|
url = influxdb.domain.tld
|
|
@@ -17,6 +18,15 @@ verify_ssl = false
|
|
username = root
|
|
password = root
|
|
|
|
+[influx2]
|
|
+url = influxdb2.domain.tld
|
|
+org = ORG
|
|
+token = TOKEN
|
|
+timeout = 10000
|
|
+ssl = false
|
|
+verify_ssl = false
|
|
+bucket = varken
|
|
+
|
|
[tautulli-1]
|
|
url = tautulli.domain.tld:8181
|
|
fallback_ip = 1.1.1.1
|
|
--
|
|
GitLab
|
|
|
|
|
|
From debadb56e5b24e193dc7594fad25861ea57eb0a7 Mon Sep 17 00:00:00 2001
|
|
From: Gabe Revells <gcrevell@mtu.edu>
|
|
Date: Sat, 13 Feb 2021 16:03:06 -0500
|
|
Subject: [PATCH 58/65] Create influx bucket if it doesn't exist
|
|
|
|
---
|
|
varken/influxdb2manager.py | 18 ++++++++++++++++++
|
|
1 file changed, 18 insertions(+)
|
|
|
|
diff --git a/varken/influxdb2manager.py b/varken/influxdb2manager.py
|
|
index d6e3452..0cb9df9 100644
|
|
--- a/varken/influxdb2manager.py
|
|
+++ b/varken/influxdb2manager.py
|
|
@@ -1,6 +1,7 @@
|
|
from sys import exit
|
|
from logging import getLogger
|
|
from requests.exceptions import ConnectionError
|
|
+import influxdb_client
|
|
from influxdb_client import InfluxDBClient, Point
|
|
from influxdb_client.client.write_api import SYNCHRONOUS
|
|
|
|
@@ -17,6 +18,23 @@ class InfluxDB2Manager(object):
|
|
timeout=self.server.timeout, verify_ssl=self.server.verify_ssl, ssl_ca_cert=self.server.ssl)
|
|
self.influx_write_api = self.influx.write_api(write_options=SYNCHRONOUS)
|
|
|
|
+ # Create the bucket if needed
|
|
+
|
|
+ bucket_api = self.influx.buckets_api()
|
|
+
|
|
+ bucket = bucket_api.find_bucket_by_name(self.server.bucket)
|
|
+
|
|
+ if bucket is None:
|
|
+ self.logger.info('Creating bucket %s', self.server.bucket)
|
|
+
|
|
+ org_api = influxdb_client.service.organizations_service.OrganizationsService(self.influx.api_client)
|
|
+ orgs = org_api.get_orgs()
|
|
+ for org in orgs.orgs:
|
|
+ if org.name == self.server.org:
|
|
+ my_org = org
|
|
+
|
|
+ self.influx.buckets_api().create_bucket(bucket_name=self.server.bucket, org_id=my_org.id)
|
|
+
|
|
def write_points(self, data):
|
|
d = data
|
|
self.logger.info('Writing Data to InfluxDBv2 %s', d)
|
|
--
|
|
GitLab
|
|
|
|
|
|
From 669f00ac71c7a678799065826a2420ab305dab58 Mon Sep 17 00:00:00 2001
|
|
From: Gabe Revells <gcrevell@mtu.edu>
|
|
Date: Sat, 13 Feb 2021 16:05:45 -0500
|
|
Subject: [PATCH 59/65] Update InfluxDB type on README
|
|
|
|
---
|
|
README.md | 2 +-
|
|
1 file changed, 1 insertion(+), 1 deletion(-)
|
|
|
|
diff --git a/README.md b/README.md
|
|
index dd25a3e..de4e654 100644
|
|
--- a/README.md
|
|
+++ b/README.md
|
|
@@ -17,7 +17,7 @@ ecosystem into InfluxDB using Grafana for a frontend
|
|
Requirements:
|
|
* [Python 3.6.7+](https://www.python.org/downloads/release/python-367/)
|
|
* [Python3-pip](https://pip.pypa.io/en/stable/installing/)
|
|
-* [InfluxDB 1.8.x](https://www.influxdata.com/)
|
|
+* [InfluxDB 1.8.x or 2.0.x](https://www.influxdata.com/)
|
|
* [Grafana](https://grafana.com/)
|
|
|
|
<p align="center">
|
|
--
|
|
GitLab
|
|
|
|
|
|
From 90ae8fac32c0056373c8a31b6a7d8e05077c2bf2 Mon Sep 17 00:00:00 2001
|
|
From: Gabe Revells <gcrevell@mtu.edu>
|
|
Date: Sat, 13 Feb 2021 16:38:42 -0500
|
|
Subject: [PATCH 60/65] Clean up linting errors
|
|
|
|
---
|
|
varken/influxdb2manager.py | 8 ++++----
|
|
varken/iniparser.py | 12 +++++++-----
|
|
2 files changed, 11 insertions(+), 9 deletions(-)
|
|
|
|
diff --git a/varken/influxdb2manager.py b/varken/influxdb2manager.py
|
|
index 0cb9df9..3979ba4 100644
|
|
--- a/varken/influxdb2manager.py
|
|
+++ b/varken/influxdb2manager.py
|
|
@@ -1,8 +1,7 @@
|
|
from sys import exit
|
|
from logging import getLogger
|
|
-from requests.exceptions import ConnectionError
|
|
import influxdb_client
|
|
-from influxdb_client import InfluxDBClient, Point
|
|
+from influxdb_client import InfluxDBClient
|
|
from influxdb_client.client.write_api import SYNCHRONOUS
|
|
|
|
|
|
@@ -15,7 +14,8 @@ class InfluxDB2Manager(object):
|
|
exit()
|
|
|
|
self.influx = InfluxDBClient(url=self.server.url, token=self.server.token, org=self.server.org,
|
|
- timeout=self.server.timeout, verify_ssl=self.server.verify_ssl, ssl_ca_cert=self.server.ssl)
|
|
+ timeout=self.server.timeout, verify_ssl=self.server.verify_ssl,
|
|
+ ssl_ca_cert=self.server.ssl)
|
|
self.influx_write_api = self.influx.write_api(write_options=SYNCHRONOUS)
|
|
|
|
# Create the bucket if needed
|
|
@@ -41,6 +41,6 @@ class InfluxDB2Manager(object):
|
|
|
|
try:
|
|
self.influx_write_api.write(bucket=self.server.bucket, record=d)
|
|
- except (InfluxDBServerError, ConnectionError) as e:
|
|
+ except Exception as e:
|
|
self.logger.error('Error writing data to influxdb2. Dropping this set of data. '
|
|
'Check your database! Error: %s', e)
|
|
diff --git a/varken/iniparser.py b/varken/iniparser.py
|
|
index 96fff88..6874dc2 100644
|
|
--- a/varken/iniparser.py
|
|
+++ b/varken/iniparser.py
|
|
@@ -146,12 +146,14 @@ class INIParser(object):
|
|
self.config_blacklist()
|
|
|
|
# Parse InfluxDB options
|
|
- self.influx2_enabled = env.get('VRKN_GLOBAL_INFLUXDB2_ENABLED', self.config.getboolean('global', 'influx2_enabled'))
|
|
+ self.influx2_enabled = env.get('VRKN_GLOBAL_INFLUXDB2_ENABLED',
|
|
+ self.config.getboolean('global', 'influx2_enabled'))
|
|
|
|
if self.influx2_enabled:
|
|
# Use INFLUX version 2
|
|
try:
|
|
- url = self.url_check(env.get('VRKN_INFLUXDB2_URL', self.config.get('influx2', 'url')), section='influx2')
|
|
+ url = self.url_check(env.get('VRKN_INFLUXDB2_URL', self.config.get('influx2', 'url')),
|
|
+ section='influx2')
|
|
ssl = boolcheck(env.get('VRKN_INFLUXDB2_SSL', self.config.get('influx2', 'ssl')))
|
|
verify_ssl = boolcheck(env.get('VRKN_INFLUXDB2_VERIFY_SSL', self.config.get('influx2', 'verify_ssl')))
|
|
|
|
@@ -164,11 +166,11 @@ class INIParser(object):
|
|
return
|
|
|
|
self.influx_server = Influx2Server(url=url, token=token, org=org, timeout=timeout, ssl=ssl,
|
|
- verify_ssl=verify_ssl)
|
|
+ verify_ssl=verify_ssl)
|
|
else:
|
|
try:
|
|
url = self.url_check(env.get('VRKN_INFLUXDB_URL', self.config.get('influxdb', 'url')),
|
|
- include_port=False, section='influxdb')
|
|
+ include_port=False, section='influxdb')
|
|
port = int(env.get('VRKN_INFLUXDB_PORT', self.config.getint('influxdb', 'port')))
|
|
ssl = boolcheck(env.get('VRKN_INFLUXDB_SSL', self.config.get('influxdb', 'ssl')))
|
|
verify_ssl = boolcheck(env.get('VRKN_INFLUXDB_VERIFY_SSL', self.config.get('influxdb', 'verify_ssl')))
|
|
@@ -181,7 +183,7 @@ class INIParser(object):
|
|
return
|
|
|
|
self.influx_server = InfluxServer(url=url, port=port, username=username, password=password, ssl=ssl,
|
|
- verify_ssl=verify_ssl)
|
|
+ verify_ssl=verify_ssl)
|
|
|
|
# Check for all enabled services
|
|
for service in self.services:
|
|
--
|
|
GitLab
|
|
|
|
|
|
From 622f9a65c1bbd2377233540a4136b85e0324b43b Mon Sep 17 00:00:00 2001
|
|
From: Gabe Revells <gcrevell@mtu.edu>
|
|
Date: Sat, 13 Feb 2021 16:40:02 -0500
|
|
Subject: [PATCH 61/65] Wrap create bucket in try/catch
|
|
|
|
---
|
|
varken/influxdb2manager.py | 21 ++++++++++++---------
|
|
1 file changed, 12 insertions(+), 9 deletions(-)
|
|
|
|
diff --git a/varken/influxdb2manager.py b/varken/influxdb2manager.py
|
|
index 3979ba4..66eb12d 100644
|
|
--- a/varken/influxdb2manager.py
|
|
+++ b/varken/influxdb2manager.py
|
|
@@ -22,18 +22,21 @@ class InfluxDB2Manager(object):
|
|
|
|
bucket_api = self.influx.buckets_api()
|
|
|
|
- bucket = bucket_api.find_bucket_by_name(self.server.bucket)
|
|
+ try:
|
|
+ bucket = bucket_api.find_bucket_by_name(self.server.bucket)
|
|
|
|
- if bucket is None:
|
|
- self.logger.info('Creating bucket %s', self.server.bucket)
|
|
+ if bucket is None:
|
|
+ self.logger.info('Creating bucket %s', self.server.bucket)
|
|
|
|
- org_api = influxdb_client.service.organizations_service.OrganizationsService(self.influx.api_client)
|
|
- orgs = org_api.get_orgs()
|
|
- for org in orgs.orgs:
|
|
- if org.name == self.server.org:
|
|
- my_org = org
|
|
+ org_api = influxdb_client.service.organizations_service.OrganizationsService(self.influx.api_client)
|
|
+ orgs = org_api.get_orgs()
|
|
+ for org in orgs.orgs:
|
|
+ if org.name == self.server.org:
|
|
+ my_org = org
|
|
|
|
- self.influx.buckets_api().create_bucket(bucket_name=self.server.bucket, org_id=my_org.id)
|
|
+ self.influx.buckets_api().create_bucket(bucket_name=self.server.bucket, org_id=my_org.id)
|
|
+ except Exception as e:
|
|
+ self.logger.error('Failed creating new InfluxDB bucket! Error: %s', e)
|
|
|
|
def write_points(self, data):
|
|
d = data
|
|
--
|
|
GitLab
|
|
|
|
|
|
From 6a65895c503813771b230a58ead8bc6125ab6621 Mon Sep 17 00:00:00 2001
|
|
From: Gabe Revells <gcrevell@mtu.edu>
|
|
Date: Mon, 1 Mar 2021 10:42:35 -0500
|
|
Subject: [PATCH 62/65] Use bucket given in ini file
|
|
|
|
---
|
|
varken/iniparser.py | 3 ++-
|
|
1 file changed, 2 insertions(+), 1 deletion(-)
|
|
|
|
diff --git a/varken/iniparser.py b/varken/iniparser.py
|
|
index 6874dc2..84d5af9 100644
|
|
--- a/varken/iniparser.py
|
|
+++ b/varken/iniparser.py
|
|
@@ -158,6 +158,7 @@ class INIParser(object):
|
|
verify_ssl = boolcheck(env.get('VRKN_INFLUXDB2_VERIFY_SSL', self.config.get('influx2', 'verify_ssl')))
|
|
|
|
org = env.get('VRKN_INFLUXDB2_ORG', self.config.get('influx2', 'org'))
|
|
+ bucket = env.get('VRKN_INFLUXDB2_BUCKET', self.config.get('influx2', 'bucket'))
|
|
token = env.get('VRKN_INFLUXDB2_TOKEN', self.config.get('influx2', 'token'))
|
|
timeout = env.get('VRKN_INFLUXDB2_TIMEOUT', self.config.get('influx2', 'timeout'))
|
|
except NoOptionError as e:
|
|
@@ -166,7 +167,7 @@ class INIParser(object):
|
|
return
|
|
|
|
self.influx_server = Influx2Server(url=url, token=token, org=org, timeout=timeout, ssl=ssl,
|
|
- verify_ssl=verify_ssl)
|
|
+ verify_ssl=verify_ssl, bucket=bucket)
|
|
else:
|
|
try:
|
|
url = self.url_check(env.get('VRKN_INFLUXDB_URL', self.config.get('influxdb', 'url')),
|
|
--
|
|
GitLab
|
|
|
|
|
|
From 0ca9db2b6d1ec712c521a9df96659bf28acdec67 Mon Sep 17 00:00:00 2001
|
|
From: Daniel <d_mcknight@live.com>
|
|
Date: Thu, 22 Jun 2023 21:32:24 -0700
|
|
Subject: [PATCH 63/65] Log exception to troubleshoot errors
|
|
|
|
---
|
|
varken/influxdb2manager.py | 9 ++++-----
|
|
1 file changed, 4 insertions(+), 5 deletions(-)
|
|
|
|
diff --git a/varken/influxdb2manager.py b/varken/influxdb2manager.py
|
|
index 66eb12d..62229ec 100644
|
|
--- a/varken/influxdb2manager.py
|
|
+++ b/varken/influxdb2manager.py
|
|
@@ -39,11 +39,10 @@ class InfluxDB2Manager(object):
|
|
self.logger.error('Failed creating new InfluxDB bucket! Error: %s', e)
|
|
|
|
def write_points(self, data):
|
|
- d = data
|
|
- self.logger.info('Writing Data to InfluxDBv2 %s', d)
|
|
+ self.logger.info('Writing Data to InfluxDBv2 %s', data)
|
|
|
|
try:
|
|
- self.influx_write_api.write(bucket=self.server.bucket, record=d)
|
|
+ self.influx_write_api.write(bucket=self.server.bucket, record=data)
|
|
except Exception as e:
|
|
- self.logger.error('Error writing data to influxdb2. Dropping this set of data. '
|
|
- 'Check your database! Error: %s', e)
|
|
+ self.logger.exception('Error writing data to influxdb2. Dropping this set of data. '
|
|
+ 'Check your database! Error: %s', e)
|
|
--
|
|
GitLab
|
|
|
|
|
|
From 5bcefd265211b1fc1ebbe8b907c0eb2d9746de83 Mon Sep 17 00:00:00 2001
|
|
From: Daniel <d_mcknight@live.com>
|
|
Date: Thu, 22 Jun 2023 21:48:27 -0700
|
|
Subject: [PATCH 64/65] Allow configured influx2 address as URL (no port)
|
|
|
|
---
|
|
varken/iniparser.py | 2 +-
|
|
1 file changed, 1 insertion(+), 1 deletion(-)
|
|
|
|
diff --git a/varken/iniparser.py b/varken/iniparser.py
|
|
index 84d5af9..f882b4e 100644
|
|
--- a/varken/iniparser.py
|
|
+++ b/varken/iniparser.py
|
|
@@ -153,7 +153,7 @@ class INIParser(object):
|
|
# Use INFLUX version 2
|
|
try:
|
|
url = self.url_check(env.get('VRKN_INFLUXDB2_URL', self.config.get('influx2', 'url')),
|
|
- section='influx2')
|
|
+ section='influx2', include_port=False)
|
|
ssl = boolcheck(env.get('VRKN_INFLUXDB2_SSL', self.config.get('influx2', 'ssl')))
|
|
verify_ssl = boolcheck(env.get('VRKN_INFLUXDB2_VERIFY_SSL', self.config.get('influx2', 'verify_ssl')))
|
|
|
|
--
|
|
GitLab
|
|
|
|
|
|
From 7586d54464c58c4c479a179b165c8dfa638257e8 Mon Sep 17 00:00:00 2001
|
|
From: Daniel <d_mcknight@live.com>
|
|
Date: Thu, 22 Jun 2023 21:52:32 -0700
|
|
Subject: [PATCH 65/65] Bypass validity check to troubleshoot
|
|
|
|
---
|
|
varken/iniparser.py | 1 +
|
|
1 file changed, 1 insertion(+)
|
|
|
|
diff --git a/varken/iniparser.py b/varken/iniparser.py
|
|
index f882b4e..fc84d87 100644
|
|
--- a/varken/iniparser.py
|
|
+++ b/varken/iniparser.py
|
|
@@ -107,6 +107,7 @@ class INIParser(object):
|
|
|
|
valid = match(regex, url_check) is not None
|
|
if not valid:
|
|
+ return url_check
|
|
if inc_port:
|
|
self.logger.error('%s is invalid in module [%s]! URL must host/IP and '
|
|
'port if not 80 or 443. ie. localhost:8080',
|
|
--
|
|
GitLab
|
|
|