swapped capitalization
This commit is contained in:
parent
2e158212d2
commit
8b2056ad66
8 changed files with 487 additions and 13 deletions
0
varken/__init__.py
Normal file
0
varken/__init__.py
Normal file
16
varken/dbmanager.py
Normal file
16
varken/dbmanager.py
Normal file
|
@ -0,0 +1,16 @@
|
|||
from influxdb import InfluxDBClient
|
||||
|
||||
|
||||
class DBManager(object):
|
||||
def __init__(self, server):
|
||||
self.server = server
|
||||
self.influx = InfluxDBClient(self.server.url, self.server.port, self.server.username, self.server.password,
|
||||
'varken')
|
||||
databases = [db['name'] for db in self.influx.get_list_database()]
|
||||
|
||||
if 'varken' not in databases:
|
||||
self.influx.create_database('varken')
|
||||
self.influx.create_retention_policy('varken 30d/1h', '30d', '1', 'varken', False, '1h')
|
||||
|
||||
def write_points(self, data):
|
||||
self.influx.write_points(data)
|
181
varken/iniparser.py
Normal file
181
varken/iniparser.py
Normal file
|
@ -0,0 +1,181 @@
|
|||
import sys
|
||||
import configparser
|
||||
from sys import exit
|
||||
from os.path import join, exists
|
||||
from varken.structures import SonarrServer, RadarrServer, OmbiServer, TautulliServer, InfluxServer
|
||||
|
||||
|
||||
class INIParser(object):
|
||||
def __init__(self, data_folder):
|
||||
self.config = configparser.ConfigParser()
|
||||
self.data_folder = data_folder
|
||||
|
||||
self.influx_server = InfluxServer()
|
||||
|
||||
self.sonarr_enabled = False
|
||||
self.sonarr_servers = []
|
||||
|
||||
self.radarr_enabled = False
|
||||
self.radarr_servers = []
|
||||
|
||||
self.ombi_enabled = False
|
||||
self.ombi_servers = []
|
||||
|
||||
self.tautulli_enabled = False
|
||||
self.tautulli_servers = []
|
||||
|
||||
self.asa_enabled = False
|
||||
self.asa = None
|
||||
|
||||
self.parse_opts()
|
||||
|
||||
def read_file(self):
|
||||
file_path = join(self.data_folder, 'varken.ini')
|
||||
if exists(file_path):
|
||||
with open(file_path) as config_ini:
|
||||
self.config.read_file(config_ini)
|
||||
else:
|
||||
exit("You do not have a varken.ini file in {}".format(self.data_folder))
|
||||
|
||||
def parse_opts(self):
|
||||
self.read_file()
|
||||
# Parse InfluxDB options
|
||||
url = self.config.get('influxdb', 'url')
|
||||
port = self.config.getint('influxdb', 'port')
|
||||
username = self.config.get('influxdb', 'username')
|
||||
password = self.config.get('influxdb', 'password')
|
||||
|
||||
self.influx_server = InfluxServer(url, port, username, password)
|
||||
|
||||
# Parse Sonarr options
|
||||
try:
|
||||
if not self.config.getboolean('global', 'sonarr_server_ids'):
|
||||
sys.exit('server_ids must be either false, or a comma-separated list of server ids')
|
||||
elif self.config.getint('global', 'sonarr_server_ids'):
|
||||
self.sonarr_enabled = True
|
||||
except ValueError:
|
||||
self.sonarr_enabled = True
|
||||
|
||||
if self.sonarr_enabled:
|
||||
sids = self.config.get('global', 'sonarr_server_ids').strip(' ').split(',')
|
||||
|
||||
for server_id in sids:
|
||||
sonarr_section = 'sonarr-' + server_id
|
||||
url = self.config.get(sonarr_section, 'url')
|
||||
apikey = self.config.get(sonarr_section, 'apikey')
|
||||
scheme = 'https://' if self.config.getboolean(sonarr_section, 'ssl') else 'http://'
|
||||
verify_ssl = self.config.getboolean(sonarr_section, 'verify_ssl')
|
||||
if scheme != 'https://':
|
||||
verify_ssl = False
|
||||
queue = self.config.getboolean(sonarr_section, 'queue')
|
||||
missing_days = self.config.getint(sonarr_section, 'missing_days')
|
||||
future_days = self.config.getint(sonarr_section, 'future_days')
|
||||
missing_days_run_seconds = self.config.getint(sonarr_section, 'missing_days_run_seconds')
|
||||
future_days_run_seconds = self.config.getint(sonarr_section, 'future_days_run_seconds')
|
||||
queue_run_seconds = self.config.getint(sonarr_section, 'queue_run_seconds')
|
||||
|
||||
server = SonarrServer(server_id, scheme + url, apikey, verify_ssl, missing_days,
|
||||
missing_days_run_seconds, future_days, future_days_run_seconds,
|
||||
queue, queue_run_seconds)
|
||||
self.sonarr_servers.append(server)
|
||||
|
||||
# Parse Radarr options
|
||||
try:
|
||||
if not self.config.getboolean('global', 'radarr_server_ids'):
|
||||
sys.exit('server_ids must be either false, or a comma-separated list of server ids')
|
||||
elif self.config.getint('global', 'radarr_server_ids'):
|
||||
self.radarr_enabled = True
|
||||
except ValueError:
|
||||
self.radarr_enabled = True
|
||||
|
||||
if self.radarr_enabled:
|
||||
sids = self.config.get('global', 'radarr_server_ids').strip(' ').split(',')
|
||||
|
||||
for server_id in sids:
|
||||
radarr_section = 'radarr-' + server_id
|
||||
url = self.config.get(radarr_section, 'url')
|
||||
apikey = self.config.get(radarr_section, 'apikey')
|
||||
scheme = 'https://' if self.config.getboolean(radarr_section, 'ssl') else 'http://'
|
||||
verify_ssl = self.config.getboolean(radarr_section, 'verify_ssl')
|
||||
if scheme != 'https://':
|
||||
verify_ssl = False
|
||||
queue = self.config.getboolean(radarr_section, 'queue')
|
||||
queue_run_seconds = self.config.getint(radarr_section, 'queue_run_seconds')
|
||||
get_missing = self.config.getboolean(radarr_section, 'get_missing')
|
||||
get_missing_run_seconds = self.config.getint(radarr_section, 'get_missing_run_seconds')
|
||||
|
||||
server = RadarrServer(server_id, scheme + url, apikey, verify_ssl, queue, queue_run_seconds,
|
||||
get_missing, get_missing_run_seconds)
|
||||
self.radarr_servers.append(server)
|
||||
|
||||
# Parse Tautulli options
|
||||
try:
|
||||
if not self.config.getboolean('global', 'tautulli_server_ids'):
|
||||
sys.exit('server_ids must be either false, or a comma-separated list of server ids')
|
||||
elif self.config.getint('global', 'tautulli_server_ids'):
|
||||
self.tautulli_enabled = True
|
||||
except ValueError:
|
||||
self.tautulli_enabled = True
|
||||
|
||||
if self.tautulli_enabled:
|
||||
sids = self.config.get('global', 'tautulli_server_ids').strip(' ').split(',')
|
||||
|
||||
for server_id in sids:
|
||||
tautulli_section = 'tautulli-' + server_id
|
||||
url = self.config.get(tautulli_section, 'url')
|
||||
fallback_ip = self.config.get(tautulli_section, 'fallback_ip')
|
||||
apikey = self.config.get(tautulli_section, 'apikey')
|
||||
scheme = 'https://' if self.config.getboolean(tautulli_section, 'ssl') else 'http://'
|
||||
verify_ssl = self.config.getboolean(tautulli_section, 'verify_ssl')
|
||||
if scheme != 'https://':
|
||||
verify_ssl = False
|
||||
get_activity = self.config.getboolean(tautulli_section, 'get_activity')
|
||||
get_activity_run_seconds = self.config.getint(tautulli_section, 'get_activity_run_seconds')
|
||||
get_sessions = self.config.getboolean(tautulli_section, 'get_sessions')
|
||||
get_sessions_run_seconds = self.config.getint(tautulli_section, 'get_sessions_run_seconds')
|
||||
|
||||
server = TautulliServer(server_id, scheme + url, fallback_ip, apikey, verify_ssl, get_activity,
|
||||
get_activity_run_seconds, get_sessions, get_sessions_run_seconds)
|
||||
self.tautulli_servers.append(server)
|
||||
|
||||
# Parse Ombi Options
|
||||
try:
|
||||
if not self.config.getboolean('global', 'ombi_server_ids'):
|
||||
sys.exit('server_ids must be either false, or a comma-separated list of server ids')
|
||||
elif self.config.getint('global', 'ombi_server_ids'):
|
||||
self.ombi_enabled = True
|
||||
except ValueError:
|
||||
self.ombi_enabled = True
|
||||
|
||||
if self.ombi_enabled:
|
||||
sids = self.config.get('global', 'ombi_server_ids').strip(' ').split(',')
|
||||
for server_id in sids:
|
||||
ombi_section = 'ombi-' + server_id
|
||||
url = self.config.get(ombi_section, 'url')
|
||||
apikey = self.config.get(ombi_section, 'apikey')
|
||||
scheme = 'https://' if self.config.getboolean(ombi_section, 'ssl') else 'http://'
|
||||
verify_ssl = self.config.getboolean(ombi_section, 'verify_ssl')
|
||||
if scheme != 'https://':
|
||||
verify_ssl = False
|
||||
request_type_counts = self.config.getboolean(ombi_section, 'get_request_type_counts')
|
||||
request_type_run_seconds = self.config.getint(ombi_section, 'request_type_run_seconds')
|
||||
request_total_counts = self.config.getboolean(ombi_section, 'get_request_total_counts')
|
||||
request_total_run_seconds = self.config.getint(ombi_section, 'request_total_run_seconds')
|
||||
|
||||
server = OmbiServer(server_id, scheme + url, apikey, verify_ssl, request_type_counts,
|
||||
request_type_run_seconds, request_total_counts, request_total_run_seconds)
|
||||
self.ombi_servers.append(server)
|
||||
|
||||
# Parse ASA opts
|
||||
if self.config.getboolean('global', 'asa'):
|
||||
self.asa_enabled = True
|
||||
url = self.config.get('asa', 'url')
|
||||
username = self.config.get('asa', 'username')
|
||||
password = self.config.get('asa', 'password')
|
||||
scheme = 'https://' if self.config.getboolean('asa', 'ssl') else 'http://'
|
||||
verify_ssl = self.config.getboolean('asa', 'verify_ssl')
|
||||
if scheme != 'https://':
|
||||
verify_ssl = False
|
||||
db_name = self.config.get('asa', 'influx_db')
|
||||
|
||||
self.asa = (scheme + url, username, password, verify_ssl, db_name)
|
79
varken/ombi.py
Normal file
79
varken/ombi.py
Normal file
|
@ -0,0 +1,79 @@
|
|||
from requests import Session, Request
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from varken.logger import logging
|
||||
from varken.helpers import connection_handler
|
||||
from varken.structures import OmbiRequestCounts
|
||||
|
||||
|
||||
class OmbiAPI(object):
|
||||
def __init__(self, server, dbmanager):
|
||||
self.now = datetime.now(timezone.utc).astimezone().isoformat()
|
||||
self.dbmanager = dbmanager
|
||||
self.server = server
|
||||
# Create session to reduce server web thread load, and globally define pageSize for all requests
|
||||
self.session = Session()
|
||||
self.session.headers = {'Apikey': self.server.api_key}
|
||||
|
||||
@logging
|
||||
def get_total_requests(self):
|
||||
self.now = datetime.now(timezone.utc).astimezone().isoformat()
|
||||
tv_endpoint = '/api/v1/Request/tv'
|
||||
movie_endpoint = "/api/v1/Request/movie"
|
||||
|
||||
tv_req = self.session.prepare_request(Request('GET', self.server.url + tv_endpoint))
|
||||
movie_req = self.session.prepare_request(Request('GET', self.server.url + movie_endpoint))
|
||||
get_tv = connection_handler(self.session, tv_req, self.server.verify_ssl)
|
||||
get_movie = connection_handler(self.session, movie_req, self.server.verify_ssl)
|
||||
|
||||
if not all([get_tv, get_movie]):
|
||||
return
|
||||
|
||||
movie_requests = len(get_movie)
|
||||
tv_requests = len(get_tv)
|
||||
|
||||
influx_payload = [
|
||||
{
|
||||
"measurement": "Ombi",
|
||||
"tags": {
|
||||
"type": "Request_Total"
|
||||
},
|
||||
"time": self.now,
|
||||
"fields": {
|
||||
"total": movie_requests + tv_requests,
|
||||
"movies": movie_requests,
|
||||
"tv_shows": tv_requests
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
self.dbmanager.write_points(influx_payload)
|
||||
|
||||
@logging
|
||||
def get_request_counts(self):
|
||||
self.now = datetime.now(timezone.utc).astimezone().isoformat()
|
||||
endpoint = '/api/v1/Request/count'
|
||||
|
||||
req = self.session.prepare_request(Request('GET', self.server.url + endpoint))
|
||||
get = connection_handler(self.session, req, self.server.verify_ssl)
|
||||
|
||||
if not get:
|
||||
return
|
||||
|
||||
requests = OmbiRequestCounts(**get)
|
||||
influx_payload = [
|
||||
{
|
||||
"measurement": "Ombi",
|
||||
"tags": {
|
||||
"type": "Request_Counts"
|
||||
},
|
||||
"time": self.now,
|
||||
"fields": {
|
||||
"pending": requests.pending,
|
||||
"approved": requests.approved,
|
||||
"available": requests.available
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
self.dbmanager.write_points(influx_payload)
|
112
varken/radarr.py
Normal file
112
varken/radarr.py
Normal file
|
@ -0,0 +1,112 @@
|
|||
from requests import Session, Request
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from varken.logger import logging
|
||||
from varken.helpers import hashit, connection_handler
|
||||
from varken.structures import Movie, Queue
|
||||
|
||||
|
||||
class RadarrAPI(object):
|
||||
def __init__(self, server, dbmanager):
|
||||
self.now = datetime.now(timezone.utc).astimezone().isoformat()
|
||||
self.dbmanager = dbmanager
|
||||
self.server = server
|
||||
# Create session to reduce server web thread load, and globally define pageSize for all requests
|
||||
self.session = Session()
|
||||
self.session.headers = {'X-Api-Key': self.server.api_key}
|
||||
|
||||
@logging
|
||||
def get_missing(self):
|
||||
endpoint = '/api/movie'
|
||||
self.now = datetime.now(timezone.utc).astimezone().isoformat()
|
||||
influx_payload = []
|
||||
missing = []
|
||||
|
||||
req = self.session.prepare_request(Request('GET', self.server.url + endpoint))
|
||||
get = connection_handler(self.session, req, self.server.verify_ssl)
|
||||
|
||||
if not get:
|
||||
return
|
||||
|
||||
movies = [Movie(**movie) for movie in get]
|
||||
|
||||
for movie in movies:
|
||||
if self.server.get_missing:
|
||||
if not movie.downloaded and movie.isAvailable:
|
||||
ma = True
|
||||
else:
|
||||
ma = False
|
||||
movie_name = '{} ({})'.format(movie.title, movie.year)
|
||||
missing.append((movie_name, ma, movie.tmdbId))
|
||||
|
||||
for title, ma, mid in missing:
|
||||
hash_id = hashit('{}{}{}'.format(self.server.id, title, mid))
|
||||
influx_payload.append(
|
||||
{
|
||||
"measurement": "Radarr",
|
||||
"tags": {
|
||||
"Missing": True,
|
||||
"Missing_Available": ma,
|
||||
"tmdbId": mid,
|
||||
"server": self.server.id,
|
||||
"name": title
|
||||
},
|
||||
"time": self.now,
|
||||
"fields": {
|
||||
"hash": hash_id
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
self.dbmanager.write_points(influx_payload)
|
||||
|
||||
@logging
|
||||
def get_queue(self):
|
||||
endpoint = '/api/queue'
|
||||
self.now = datetime.now(timezone.utc).astimezone().isoformat()
|
||||
influx_payload = []
|
||||
queue = []
|
||||
|
||||
req = self.session.prepare_request(Request('GET', self.server.url + endpoint))
|
||||
get = connection_handler(self.session, req, self.server.verify_ssl)
|
||||
|
||||
if not get:
|
||||
return
|
||||
|
||||
for movie in get:
|
||||
movie['movie'] = Movie(**movie['movie'])
|
||||
download_queue = [Queue(**movie) for movie in get]
|
||||
|
||||
for queue_item in download_queue:
|
||||
name = '{} ({})'.format(queue_item.movie.title, queue_item.movie.year)
|
||||
|
||||
if queue_item.protocol.upper() == 'USENET':
|
||||
protocol_id = 1
|
||||
else:
|
||||
protocol_id = 0
|
||||
|
||||
queue.append((name, queue_item.quality['quality']['name'], queue_item.protocol.upper(),
|
||||
protocol_id, queue_item.id))
|
||||
|
||||
for movie, quality, protocol, protocol_id, qid in queue:
|
||||
hash_id = hashit('{}{}{}'.format(self.server.id, movie, quality))
|
||||
influx_payload.append(
|
||||
{
|
||||
"measurement": "Radarr",
|
||||
"tags": {
|
||||
"type": "Queue",
|
||||
"tmdbId": qid,
|
||||
"server": self.server.id,
|
||||
"name": movie,
|
||||
"quality": quality,
|
||||
"protocol": protocol,
|
||||
"protocol_id": protocol_id
|
||||
},
|
||||
"time": self.now,
|
||||
"fields": {
|
||||
"hash": hash_id
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
self.dbmanager.write_points(influx_payload)
|
165
varken/sonarr.py
Normal file
165
varken/sonarr.py
Normal file
|
@ -0,0 +1,165 @@
|
|||
from requests import Session, Request
|
||||
from datetime import datetime, timezone, date, timedelta
|
||||
|
||||
from varken.logger import logging
|
||||
from varken.helpers import hashit, connection_handler
|
||||
from varken.structures import Queue, TVShow
|
||||
|
||||
|
||||
class SonarrAPI(object):
|
||||
def __init__(self, server, dbmanager):
|
||||
# Set Time of initialization
|
||||
self.now = datetime.now(timezone.utc).astimezone().isoformat()
|
||||
self.dbmanager = dbmanager
|
||||
self.today = str(date.today())
|
||||
self.server = server
|
||||
# Create session to reduce server web thread load, and globally define pageSize for all requests
|
||||
self.session = Session()
|
||||
self.session.headers = {'X-Api-Key': self.server.api_key}
|
||||
self.session.params = {'pageSize': 1000}
|
||||
|
||||
@logging
|
||||
def get_missing(self):
|
||||
endpoint = '/api/calendar'
|
||||
last_days = str(date.today() + timedelta(days=-self.server.missing_days))
|
||||
self.now = datetime.now(timezone.utc).astimezone().isoformat()
|
||||
params = {'start': last_days, 'end': self.today}
|
||||
influx_payload = []
|
||||
missing = []
|
||||
|
||||
req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params))
|
||||
get = connection_handler(self.session, req, self.server.verify_ssl)
|
||||
|
||||
if not get:
|
||||
return
|
||||
|
||||
# Iteratively create a list of TVShow Objects from response json
|
||||
tv_shows = [TVShow(**show) for show in get]
|
||||
|
||||
# Add show to missing list if file does not exist
|
||||
for show in tv_shows:
|
||||
if not show.hasFile:
|
||||
sxe = 'S{:0>2}E{:0>2}'.format(show.seasonNumber, show.episodeNumber)
|
||||
missing.append((show.series['title'], sxe, show.airDate, show.title, show.id))
|
||||
|
||||
for series_title, sxe, air_date, episode_title, sonarr_id in missing:
|
||||
hash_id = hashit('{}{}{}'.format(self.server.id, series_title, sxe))
|
||||
influx_payload.append(
|
||||
{
|
||||
"measurement": "Sonarr",
|
||||
"tags": {
|
||||
"type": "Missing",
|
||||
"sonarrId": sonarr_id,
|
||||
"server": self.server.id,
|
||||
"name": series_title,
|
||||
"epname": episode_title,
|
||||
"sxe": sxe,
|
||||
"airs": air_date
|
||||
},
|
||||
"time": self.now,
|
||||
"fields": {
|
||||
"hash": hash_id
|
||||
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
self.dbmanager.write_points(influx_payload)
|
||||
|
||||
@logging
|
||||
def get_future(self):
|
||||
endpoint = '/api/calendar/'
|
||||
self.now = datetime.now(timezone.utc).astimezone().isoformat()
|
||||
future = str(date.today() + timedelta(days=self.server.future_days))
|
||||
influx_payload = []
|
||||
air_days = []
|
||||
params = {'start': self.today, 'end': future}
|
||||
|
||||
req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params))
|
||||
get = connection_handler(self.session, req, self.server.verify_ssl)
|
||||
|
||||
if not get:
|
||||
return
|
||||
|
||||
tv_shows = [TVShow(**show) for show in get]
|
||||
|
||||
for show in tv_shows:
|
||||
sxe = 'S{:0>2}E{:0>2}'.format(show.seasonNumber, show.episodeNumber)
|
||||
if show.hasFile:
|
||||
downloaded = 1
|
||||
else:
|
||||
downloaded = 0
|
||||
air_days.append((show.series['title'], downloaded, sxe, show.title, show.airDate, show.id))
|
||||
|
||||
for series_title, dl_status, sxe, episode_title, air_date, sonarr_id in air_days:
|
||||
hash_id = hashit('{}{}{}'.format(self.server.id, series_title, sxe))
|
||||
influx_payload.append(
|
||||
{
|
||||
"measurement": "Sonarr",
|
||||
"tags": {
|
||||
"type": "Future",
|
||||
"sonarrId": sonarr_id,
|
||||
"server": self.server.id,
|
||||
"name": series_title,
|
||||
"epname": episode_title,
|
||||
"sxe": sxe,
|
||||
"airs": air_date,
|
||||
"downloaded": dl_status
|
||||
},
|
||||
"time": self.now,
|
||||
"fields": {
|
||||
"hash": hash_id
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
self.dbmanager.write_points(influx_payload)
|
||||
|
||||
@logging
|
||||
def get_queue(self):
|
||||
influx_payload = []
|
||||
endpoint = '/api/queue'
|
||||
self.now = datetime.now(timezone.utc).astimezone().isoformat()
|
||||
queue = []
|
||||
|
||||
req = self.session.prepare_request(Request('GET', self.server.url + endpoint))
|
||||
get = connection_handler(self.session, req, self.server.verify_ssl)
|
||||
|
||||
if not get:
|
||||
return
|
||||
|
||||
download_queue = [Queue(**show) for show in get]
|
||||
|
||||
for show in download_queue:
|
||||
sxe = 'S{:0>2}E{:0>2}'.format(show.episode['seasonNumber'], show.episode['episodeNumber'])
|
||||
if show.protocol.upper() == 'USENET':
|
||||
protocol_id = 1
|
||||
else:
|
||||
protocol_id = 0
|
||||
|
||||
queue.append((show.series['title'], show.episode['title'], show.protocol.upper(),
|
||||
protocol_id, sxe, show.id))
|
||||
|
||||
for series_title, episode_title, protocol, protocol_id, sxe, sonarr_id in queue:
|
||||
hash_id = hashit('{}{}{}'.format(self.server.id, series_title, sxe))
|
||||
influx_payload.append(
|
||||
{
|
||||
"measurement": "Sonarr",
|
||||
"tags": {
|
||||
"type": "Queue",
|
||||
"sonarrId": sonarr_id,
|
||||
"server": self.server.id,
|
||||
"name": series_title,
|
||||
"epname": episode_title,
|
||||
"sxe": sxe,
|
||||
"protocol": protocol,
|
||||
"protocol_id": protocol_id
|
||||
},
|
||||
"time": self.now,
|
||||
"fields": {
|
||||
"hash": hash_id
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
self.dbmanager.write_points(influx_payload)
|
158
varken/tautulli.py
Normal file
158
varken/tautulli.py
Normal file
|
@ -0,0 +1,158 @@
|
|||
from datetime import datetime, timezone
|
||||
from geoip2.errors import AddressNotFoundError
|
||||
from requests import Session, Request
|
||||
|
||||
from varken.logger import logging
|
||||
from varken.helpers import geo_lookup, hashit, connection_handler
|
||||
from varken.structures import TautulliStream
|
||||
|
||||
|
||||
class TautulliAPI(object):
|
||||
def __init__(self, server, dbmanager):
|
||||
# Set Time of initialization
|
||||
self.now = datetime.now(timezone.utc).astimezone().isoformat()
|
||||
self.dbmanager = dbmanager
|
||||
self.server = server
|
||||
self.session = Session()
|
||||
self.session.params['apikey'] = self.server.api_key
|
||||
self.endpoint = '/api/v2'
|
||||
|
||||
@logging
|
||||
def get_activity(self):
|
||||
self.now = datetime.now(timezone.utc).astimezone().isoformat()
|
||||
params = {'cmd': 'get_activity'}
|
||||
influx_payload = []
|
||||
|
||||
req = self.session.prepare_request(Request('GET', self.server.url + self.endpoint, params=params))
|
||||
g = connection_handler(self.session, req, self.server.verify_ssl)
|
||||
|
||||
if not g:
|
||||
return
|
||||
else:
|
||||
get = g['response']['data']
|
||||
|
||||
influx_payload.append(
|
||||
{
|
||||
"measurement": "Tautulli",
|
||||
"tags": {
|
||||
"type": "current_stream_stats",
|
||||
"server": self.server.id
|
||||
},
|
||||
"time": self.now,
|
||||
"fields": {
|
||||
"stream_count": int(get['stream_count']),
|
||||
"total_bandwidth": int(get['total_bandwidth']),
|
||||
"wan_bandwidth": int(get['wan_bandwidth']),
|
||||
"lan_bandwidth": int(get['lan_bandwidth']),
|
||||
"transcode_streams": int(get['stream_count_transcode']),
|
||||
"direct_play_streams": int(get['stream_count_direct_play']),
|
||||
"direct_streams": int(get['stream_count_direct_stream'])
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
self.dbmanager.write_points(influx_payload)
|
||||
|
||||
@logging
|
||||
def get_sessions(self):
|
||||
self.now = datetime.now(timezone.utc).astimezone().isoformat()
|
||||
params = {'cmd': 'get_activity'}
|
||||
influx_payload = []
|
||||
|
||||
req = self.session.prepare_request(Request('GET', self.server.url + self.endpoint, params=params))
|
||||
g = connection_handler(self.session, req, self.server.verify_ssl)
|
||||
|
||||
if not g:
|
||||
return
|
||||
else:
|
||||
get = g['response']['data']['sessions']
|
||||
|
||||
sessions = [TautulliStream(**session) for session in get]
|
||||
|
||||
for session in sessions:
|
||||
try:
|
||||
geodata = geo_lookup(session.ip_address_public)
|
||||
except (ValueError, AddressNotFoundError):
|
||||
if self.server.fallback_ip:
|
||||
geodata = geo_lookup(self.server.fallback_ip)
|
||||
else:
|
||||
my_ip = self.session.get('http://ip.42.pl/raw').text
|
||||
geodata = geo_lookup(my_ip)
|
||||
|
||||
if not all([geodata.location.latitude, geodata.location.longitude]):
|
||||
latitude = 37.234332396
|
||||
longitude = -115.80666344
|
||||
else:
|
||||
latitude = geodata.location.latitude
|
||||
longitude = geodata.location.longitude
|
||||
|
||||
decision = session.transcode_decision
|
||||
if decision == 'copy':
|
||||
decision = 'direct stream'
|
||||
|
||||
video_decision = session.stream_video_decision
|
||||
if video_decision == 'copy':
|
||||
video_decision = 'direct stream'
|
||||
elif video_decision == '':
|
||||
video_decision = 'Music'
|
||||
|
||||
quality = session.stream_video_resolution
|
||||
if not quality:
|
||||
quality = session.container.upper()
|
||||
elif quality in ('SD', 'sd', '4k'):
|
||||
quality = session.stream_video_resolution.upper()
|
||||
else:
|
||||
quality = session.stream_video_resolution + 'p'
|
||||
|
||||
player_state = session.state.lower()
|
||||
if player_state == 'playing':
|
||||
player_state = 0
|
||||
elif player_state == 'paused':
|
||||
player_state = 1
|
||||
elif player_state == 'buffering':
|
||||
player_state = 3
|
||||
|
||||
product_version = session.product_version
|
||||
if session.platform == 'Roku':
|
||||
product_version = session.product_version.split('-')[0]
|
||||
|
||||
hash_id = hashit('{}{}{}{}'.format(session.session_id, session.session_key, session.username,
|
||||
session.full_title))
|
||||
influx_payload.append(
|
||||
{
|
||||
"measurement": "Tautulli",
|
||||
"tags": {
|
||||
"type": "Session",
|
||||
"session_id": session.session_id,
|
||||
"friendly_name": session.friendly_name,
|
||||
"username": session.username,
|
||||
"title": session.full_title,
|
||||
"platform": session.platform,
|
||||
"product_version": product_version,
|
||||
"quality": quality,
|
||||
"video_decision": video_decision.title(),
|
||||
"transcode_decision": decision.title(),
|
||||
"media_type": session.media_type.title(),
|
||||
"audio_codec": session.audio_codec.upper(),
|
||||
"audio_profile": session.audio_profile.upper(),
|
||||
"stream_audio_codec": session.stream_audio_codec.upper(),
|
||||
"quality_profile": session.quality_profile,
|
||||
"progress_percent": session.progress_percent,
|
||||
"region_code": geodata.subdivisions.most_specific.iso_code,
|
||||
"location": geodata.city.name,
|
||||
"full_location": '{} - {}'.format(geodata.subdivisions.most_specific.name,
|
||||
geodata.city.name),
|
||||
"latitude": latitude,
|
||||
"longitude": longitude,
|
||||
"player_state": player_state,
|
||||
"device_type": session.platform,
|
||||
"server": self.server.id
|
||||
},
|
||||
"time": self.now,
|
||||
"fields": {
|
||||
"hash": hash_id
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
self.dbmanager.write_points(influx_payload)
|
Loading…
Add table
Add a link
Reference in a new issue