reworked scheduler to pass server to instance to remove duplication
This commit is contained in:
parent
bf1db64b82
commit
7be718751b
6 changed files with 270 additions and 291 deletions
|
@ -116,7 +116,7 @@ class TautulliServer(NamedTuple):
|
|||
id: int = None
|
||||
url: str = None
|
||||
fallback_ip: str = None
|
||||
apikey: str = None
|
||||
api_key: str = None
|
||||
verify_ssl: bool = None
|
||||
get_activity: bool = False
|
||||
get_activity_run_seconds: int = 30
|
||||
|
@ -319,7 +319,6 @@ class TautulliStream(NamedTuple):
|
|||
transcode_progress: int = None
|
||||
subtitle_language: str = None
|
||||
stream_subtitle_container: str = None
|
||||
_cache_time: int = None
|
||||
|
||||
|
||||
def geoip_download():
|
||||
|
|
|
@ -7,11 +7,11 @@ from Varken.helpers import Movie, Queue
|
|||
|
||||
|
||||
class RadarrAPI(object):
|
||||
def __init__(self, servers, influx_server):
|
||||
def __init__(self, server, influx_server):
|
||||
self.now = datetime.now(timezone.utc).astimezone().isoformat()
|
||||
self.influx = InfluxDBClient(influx_server.url, influx_server.port, influx_server.username,
|
||||
influx_server.password, 'plex2')
|
||||
self.servers = servers
|
||||
self.server = server
|
||||
# Create session to reduce server web thread load, and globally define pageSize for all requests
|
||||
self.session = requests.Session()
|
||||
|
||||
|
@ -20,19 +20,18 @@ class RadarrAPI(object):
|
|||
self.influx.write_points(payload)
|
||||
|
||||
@logging
|
||||
def get_missing(self, notimplemented):
|
||||
def get_missing(self):
|
||||
endpoint = '/api/movie'
|
||||
self.now = datetime.now(timezone.utc).astimezone().isoformat()
|
||||
influx_payload = []
|
||||
|
||||
for server in self.servers:
|
||||
missing = []
|
||||
headers = {'X-Api-Key': server.api_key}
|
||||
get = self.session.get(server.url + endpoint, headers=headers, verify=server.verify_ssl).json()
|
||||
headers = {'X-Api-Key': self.server.api_key}
|
||||
get = self.session.get(self.server.url + endpoint, headers=headers, verify=self.server.verify_ssl).json()
|
||||
movies = [Movie(**movie) for movie in get]
|
||||
|
||||
for movie in movies:
|
||||
if server.get_missing:
|
||||
if self.server.get_missing:
|
||||
if not movie.downloaded and movie.isAvailable:
|
||||
ma = True
|
||||
else:
|
||||
|
@ -48,7 +47,7 @@ class RadarrAPI(object):
|
|||
"Missing": True,
|
||||
"Missing_Available": ma,
|
||||
"tmdbId": mid,
|
||||
"server": server.id
|
||||
"server": self.server.id
|
||||
},
|
||||
"time": self.now,
|
||||
"fields": {
|
||||
|
@ -60,15 +59,14 @@ class RadarrAPI(object):
|
|||
self.influx_push(influx_payload)
|
||||
|
||||
@logging
|
||||
def get_queue(self, notimplemented):
|
||||
def get_queue(self):
|
||||
endpoint = '/api/queue'
|
||||
self.now = datetime.now(timezone.utc).astimezone().isoformat()
|
||||
influx_payload = []
|
||||
|
||||
for server in self.servers:
|
||||
queue = []
|
||||
headers = {'X-Api-Key': server.api_key}
|
||||
get = self.session.get(server.url + endpoint, headers=headers, verify=server.verify_ssl).json()
|
||||
headers = {'X-Api-Key': self.server.api_key}
|
||||
get = self.session.get(self.server.url + endpoint, headers=headers, verify=self.server.verify_ssl).json()
|
||||
for movie in get:
|
||||
movie['movie'] = Movie(**movie['movie'])
|
||||
download_queue = [Queue(**movie) for movie in get]
|
||||
|
@ -91,7 +89,7 @@ class RadarrAPI(object):
|
|||
"tags": {
|
||||
"type": "Queue",
|
||||
"tmdbId": qid,
|
||||
"server": server.id
|
||||
"server": self.server.id
|
||||
},
|
||||
"time": self.now,
|
||||
"fields": {
|
||||
|
|
|
@ -7,31 +7,29 @@ from Varken.helpers import TVShow, Queue
|
|||
|
||||
|
||||
class SonarrAPI(object):
|
||||
def __init__(self, servers, influx_server):
|
||||
def __init__(self, server, influx_server):
|
||||
# Set Time of initialization
|
||||
self.now = datetime.now(timezone.utc).astimezone().isoformat()
|
||||
self.today = str(date.today())
|
||||
self.influx = InfluxDBClient(influx_server.url, influx_server.port, influx_server.username,
|
||||
influx_server.password, 'plex')
|
||||
self.servers = servers
|
||||
self.server = server
|
||||
# Create session to reduce server web thread load, and globally define pageSize for all requests
|
||||
self.session = requests.Session()
|
||||
self.session.params = {'pageSize': 1000}
|
||||
|
||||
@logging
|
||||
def get_missing(self, days_past):
|
||||
def get_missing(self):
|
||||
endpoint = '/api/calendar'
|
||||
last_days = str(date.today() + timedelta(days=-days_past))
|
||||
last_days = str(date.today() + timedelta(days=-self.server.missing_days))
|
||||
self.now = datetime.now(timezone.utc).astimezone().isoformat()
|
||||
params = {'start': last_days, 'end': self.today}
|
||||
influx_payload = []
|
||||
|
||||
for server in self.servers:
|
||||
missing = []
|
||||
headers = {'X-Api-Key': server.api_key}
|
||||
headers = {'X-Api-Key': self.server.api_key}
|
||||
|
||||
get = self.session.get(server.url + endpoint, params=params, headers=headers,
|
||||
verify=server.verify_ssl).json()
|
||||
get = self.session.get(self.server.url + endpoint, params=params, headers=headers,
|
||||
verify=self.server.verify_ssl).json()
|
||||
# Iteratively create a list of TVShow Objects from response json
|
||||
tv_shows = [TVShow(**show) for show in get]
|
||||
|
||||
|
@ -48,7 +46,7 @@ class SonarrAPI(object):
|
|||
"tags": {
|
||||
"type": "Missing",
|
||||
"sonarrId": sonarr_id,
|
||||
"server": server.id
|
||||
"server": self.server.id
|
||||
},
|
||||
"time": self.now,
|
||||
"fields": {
|
||||
|
@ -62,21 +60,19 @@ class SonarrAPI(object):
|
|||
|
||||
self.influx_push(influx_payload)
|
||||
|
||||
|
||||
@logging
|
||||
def get_future(self, future_days):
|
||||
def get_future(self):
|
||||
endpoint = '/api/calendar/'
|
||||
self.now = datetime.now(timezone.utc).astimezone().isoformat()
|
||||
future = str(date.today() + timedelta(days=future_days))
|
||||
future = str(date.today() + timedelta(days=self.server.future_days))
|
||||
influx_payload = []
|
||||
|
||||
for server in self.servers:
|
||||
air_days = []
|
||||
|
||||
headers = {'X-Api-Key': server.api_key}
|
||||
headers = {'X-Api-Key': self.server.api_key}
|
||||
params = {'start': self.today, 'end': future}
|
||||
|
||||
get = self.session.get(server.url + endpoint, params=params, headers=headers,
|
||||
verify=server.verify_ssl).json()
|
||||
get = self.session.get(self.server.url + endpoint, params=params, headers=headers,
|
||||
verify=self.server.verify_ssl).json()
|
||||
tv_shows = [TVShow(**show) for show in get]
|
||||
|
||||
for show in tv_shows:
|
||||
|
@ -90,7 +86,7 @@ class SonarrAPI(object):
|
|||
"tags": {
|
||||
"type": "Future",
|
||||
"sonarrId": sonarr_id,
|
||||
"server": server.id
|
||||
"server": self.server.id
|
||||
},
|
||||
"time": self.now,
|
||||
"fields": {
|
||||
|
@ -106,16 +102,14 @@ class SonarrAPI(object):
|
|||
self.influx_push(influx_payload)
|
||||
|
||||
@logging
|
||||
def get_queue(self, notimplemented):
|
||||
def get_queue(self):
|
||||
influx_payload = []
|
||||
endpoint = '/api/queue'
|
||||
self.now = datetime.now(timezone.utc).astimezone().isoformat()
|
||||
|
||||
for server in self.servers:
|
||||
queue = []
|
||||
headers = {'X-Api-Key': server.api_key}
|
||||
headers = {'X-Api-Key': self.server.api_key}
|
||||
|
||||
get = self.session.get(server.url + endpoint, headers=headers, verify=server.verify_ssl).json()
|
||||
get = self.session.get(self.server.url + endpoint, headers=headers, verify=self.server.verify_ssl).json()
|
||||
download_queue = [Queue(**show) for show in get]
|
||||
|
||||
for show in download_queue:
|
||||
|
@ -135,7 +129,7 @@ class SonarrAPI(object):
|
|||
"tags": {
|
||||
"type": "Queue",
|
||||
"sonarrId": sonarr_id,
|
||||
"server": server.id
|
||||
"server": self.server.id
|
||||
|
||||
},
|
||||
"time": self.now,
|
||||
|
|
|
@ -7,12 +7,12 @@ from Varken.logger import logging
|
|||
|
||||
|
||||
class TautulliAPI(object):
|
||||
def __init__(self, servers, influx_server):
|
||||
def __init__(self, server, influx_server):
|
||||
# Set Time of initialization
|
||||
self.now = datetime.now(timezone.utc).astimezone().isoformat()
|
||||
self.influx = InfluxDBClient(influx_server.url, influx_server.port, influx_server.username,
|
||||
influx_server.password, 'plex2')
|
||||
self.servers = servers
|
||||
self.server = server
|
||||
self.session = requests.Session()
|
||||
self.endpoint = '/api/v2'
|
||||
|
||||
|
@ -21,14 +21,12 @@ class TautulliAPI(object):
|
|||
self.influx.write_points(payload)
|
||||
|
||||
@logging
|
||||
def get_activity(self, notimplemented):
|
||||
def get_activity(self):
|
||||
self.now = datetime.now(timezone.utc).astimezone().isoformat()
|
||||
params = {'cmd': 'get_activity'}
|
||||
influx_payload = []
|
||||
|
||||
for server in self.servers:
|
||||
params['apikey'] = server.apikey
|
||||
g = self.session.get(server.url + self.endpoint, params=params, verify=server.verify_ssl)
|
||||
params['apikey'] = self.server.api_key
|
||||
g = self.session.get(self.server.url + self.endpoint, params=params, verify=self.server.verify_ssl)
|
||||
get = g.json()['response']['data']
|
||||
|
||||
influx_payload.append(
|
||||
|
@ -36,7 +34,7 @@ class TautulliAPI(object):
|
|||
"measurement": "Tautulli",
|
||||
"tags": {
|
||||
"type": "current_stream_stats",
|
||||
"server": server.id
|
||||
"server": self.server.id
|
||||
},
|
||||
"time": self.now,
|
||||
"fields": {
|
||||
|
@ -54,14 +52,12 @@ class TautulliAPI(object):
|
|||
self.influx_push(influx_payload)
|
||||
|
||||
@logging
|
||||
def get_sessions(self, notimplemented):
|
||||
def get_sessions(self):
|
||||
self.now = datetime.now(timezone.utc).astimezone().isoformat()
|
||||
params = {'cmd': 'get_activity'}
|
||||
influx_payload = []
|
||||
|
||||
for server in self.servers:
|
||||
params['apikey'] = server.apikey
|
||||
g = self.session.get(server.url + self.endpoint, params=params, verify=server.verify_ssl)
|
||||
params['apikey'] = self.server.api_key
|
||||
g = self.session.get(self.server.url + self.endpoint, params=params, verify=self.server.verify_ssl)
|
||||
get = g.json()['response']['data']['sessions']
|
||||
sessions = [TautulliStream(**session) for session in get]
|
||||
|
||||
|
@ -69,8 +65,8 @@ class TautulliAPI(object):
|
|||
try:
|
||||
geodata = geo_lookup(session.ip_address_public)
|
||||
except (ValueError, AddressNotFoundError):
|
||||
if server.fallback_ip:
|
||||
geodata = geo_lookup(server.fallback_ip)
|
||||
if self.server.fallback_ip:
|
||||
geodata = geo_lookup(self.server.fallback_ip)
|
||||
else:
|
||||
my_ip = requests.get('http://ip.42.pl/raw').text
|
||||
geodata = geo_lookup(my_ip)
|
||||
|
@ -135,7 +131,7 @@ class TautulliAPI(object):
|
|||
"longitude": longitude,
|
||||
"player_state": player_state,
|
||||
"device_type": session.platform,
|
||||
"server": server.id
|
||||
"server": self.server.id
|
||||
},
|
||||
"time": self.now,
|
||||
"fields": {
|
||||
|
|
|
@ -61,7 +61,6 @@ verify_ssl = true
|
|||
queue = true
|
||||
queue_run_seconds = 300
|
||||
get_missing = true
|
||||
get_missing_available = true
|
||||
get_missing_run_seconds = 300
|
||||
|
||||
[radarr-2]
|
||||
|
|
23
varken.py
23
varken.py
|
@ -8,8 +8,8 @@ from Varken.tautulli import TautulliAPI
|
|||
from Varken.radarr import RadarrAPI
|
||||
|
||||
|
||||
def threaded(job, days=None):
|
||||
thread = threading.Thread(target=job, args=([days]))
|
||||
def threaded(job):
|
||||
thread = threading.Thread(target=job)
|
||||
thread.start()
|
||||
|
||||
|
||||
|
@ -17,38 +17,31 @@ if __name__ == "__main__":
|
|||
CONFIG = INIParser()
|
||||
|
||||
if CONFIG.sonarr_enabled:
|
||||
SONARR = SonarrAPI(CONFIG.sonarr_servers, CONFIG.influx_server)
|
||||
|
||||
for server in CONFIG.sonarr_servers:
|
||||
SONARR = SonarrAPI(server, CONFIG.influx_server)
|
||||
if server.queue:
|
||||
schedule.every(server.queue_run_seconds).seconds.do(threaded, SONARR.get_queue)
|
||||
if server.missing_days > 0:
|
||||
schedule.every(server.missing_days_run_seconds).seconds.do(threaded, SONARR.get_missing,
|
||||
server.missing_days)
|
||||
schedule.every(server.missing_days_run_seconds).seconds.do(threaded, SONARR.get_missing)
|
||||
if server.future_days > 0:
|
||||
schedule.every(server.future_days_run_seconds).seconds.do(threaded, SONARR.get_future,
|
||||
server.future_days)
|
||||
schedule.every(server.future_days_run_seconds).seconds.do(threaded, SONARR.get_future)
|
||||
|
||||
if CONFIG.tautulli_enabled:
|
||||
TAUTULLI = TautulliAPI(CONFIG.tautulli_servers, CONFIG.influx_server)
|
||||
|
||||
for server in CONFIG.tautulli_servers:
|
||||
TAUTULLI = TautulliAPI(server, CONFIG.influx_server)
|
||||
if server.get_activity:
|
||||
schedule.every(server.get_activity_run_seconds).seconds.do(threaded, TAUTULLI.get_activity)
|
||||
if server.get_sessions:
|
||||
schedule.every(server.get_sessions_run_seconds).seconds.do(threaded, TAUTULLI.get_sessions)
|
||||
|
||||
if CONFIG.radarr_enabled:
|
||||
RADARR = RadarrAPI(CONFIG.radarr_servers, CONFIG.influx_server)
|
||||
|
||||
for server in CONFIG.radarr_servers:
|
||||
if any([server.get_missing, server.get_missing_available]):
|
||||
RADARR = RadarrAPI(server, CONFIG.influx_server)
|
||||
if server.get_missing:
|
||||
schedule.every(server.get_missing_run_seconds).seconds.do(threaded, RADARR.get_missing)
|
||||
if server.queue:
|
||||
schedule.every(server.queue_run_seconds).seconds.do(threaded, RADARR.get_queue)
|
||||
|
||||
|
||||
|
||||
while True:
|
||||
schedule.run_pending()
|
||||
sleep(1)
|
||||
|
|
Loading…
Reference in a new issue