test push

This commit is contained in:
Nicholas St. Germain 2018-12-04 10:50:02 -06:00
parent 8b2056ad66
commit 967a2efc1e
8 changed files with 1 additions and 475 deletions

View file

@ -2,7 +2,7 @@
Dutch for PIG. PIG is an Acronym for Plex/InfluxDB/Grafana
Varken is a standalone command-line utility to aggregate data
from the plex ecosystem into InfluxDB. Examples use Grafana for a
from the Plex ecosystem into InfluxDB. Examples use Grafana for a
frontend
Requirements /w install links: [Grafana](http://docs.grafana.org/installation/), [Python3](https://www.python.org/downloads/), [InfluxDB](https://docs.influxdata.com/influxdb/v1.5/introduction/installation/)

View file

@ -1,16 +0,0 @@
from influxdb import InfluxDBClient
class DBManager(object):
def __init__(self, server):
self.server = server
self.influx = InfluxDBClient(self.server.url, self.server.port, self.server.username, self.server.password,
'varken')
databases = [db['name'] for db in self.influx.get_list_database()]
if 'varken' not in databases:
self.influx.create_database('varken')
self.influx.create_retention_policy('Varken 30d/1h', '30d', '1', 'varken', False, '1h')
def write_points(self, data):
self.influx.write_points(data)

View file

@ -1,181 +0,0 @@
import sys
import configparser
from sys import exit
from os.path import join, exists
from Varken.structures import SonarrServer, RadarrServer, OmbiServer, TautulliServer, InfluxServer
class INIParser(object):
def __init__(self, data_folder):
self.config = configparser.ConfigParser()
self.data_folder = data_folder
self.influx_server = InfluxServer()
self.sonarr_enabled = False
self.sonarr_servers = []
self.radarr_enabled = False
self.radarr_servers = []
self.ombi_enabled = False
self.ombi_servers = []
self.tautulli_enabled = False
self.tautulli_servers = []
self.asa_enabled = False
self.asa = None
self.parse_opts()
def read_file(self):
file_path = join(self.data_folder, 'varken.ini')
if exists(file_path):
with open(file_path) as config_ini:
self.config.read_file(config_ini)
else:
exit("You do not have a varken.ini file in {}".format(self.data_folder))
def parse_opts(self):
self.read_file()
# Parse InfluxDB options
url = self.config.get('influxdb', 'url')
port = self.config.getint('influxdb', 'port')
username = self.config.get('influxdb', 'username')
password = self.config.get('influxdb', 'password')
self.influx_server = InfluxServer(url, port, username, password)
# Parse Sonarr options
try:
if not self.config.getboolean('global', 'sonarr_server_ids'):
sys.exit('server_ids must be either false, or a comma-separated list of server ids')
elif self.config.getint('global', 'sonarr_server_ids'):
self.sonarr_enabled = True
except ValueError:
self.sonarr_enabled = True
if self.sonarr_enabled:
sids = self.config.get('global', 'sonarr_server_ids').strip(' ').split(',')
for server_id in sids:
sonarr_section = 'sonarr-' + server_id
url = self.config.get(sonarr_section, 'url')
apikey = self.config.get(sonarr_section, 'apikey')
scheme = 'https://' if self.config.getboolean(sonarr_section, 'ssl') else 'http://'
verify_ssl = self.config.getboolean(sonarr_section, 'verify_ssl')
if scheme != 'https://':
verify_ssl = False
queue = self.config.getboolean(sonarr_section, 'queue')
missing_days = self.config.getint(sonarr_section, 'missing_days')
future_days = self.config.getint(sonarr_section, 'future_days')
missing_days_run_seconds = self.config.getint(sonarr_section, 'missing_days_run_seconds')
future_days_run_seconds = self.config.getint(sonarr_section, 'future_days_run_seconds')
queue_run_seconds = self.config.getint(sonarr_section, 'queue_run_seconds')
server = SonarrServer(server_id, scheme + url, apikey, verify_ssl, missing_days,
missing_days_run_seconds, future_days, future_days_run_seconds,
queue, queue_run_seconds)
self.sonarr_servers.append(server)
# Parse Radarr options
try:
if not self.config.getboolean('global', 'radarr_server_ids'):
sys.exit('server_ids must be either false, or a comma-separated list of server ids')
elif self.config.getint('global', 'radarr_server_ids'):
self.radarr_enabled = True
except ValueError:
self.radarr_enabled = True
if self.radarr_enabled:
sids = self.config.get('global', 'radarr_server_ids').strip(' ').split(',')
for server_id in sids:
radarr_section = 'radarr-' + server_id
url = self.config.get(radarr_section, 'url')
apikey = self.config.get(radarr_section, 'apikey')
scheme = 'https://' if self.config.getboolean(radarr_section, 'ssl') else 'http://'
verify_ssl = self.config.getboolean(radarr_section, 'verify_ssl')
if scheme != 'https://':
verify_ssl = False
queue = self.config.getboolean(radarr_section, 'queue')
queue_run_seconds = self.config.getint(radarr_section, 'queue_run_seconds')
get_missing = self.config.getboolean(radarr_section, 'get_missing')
get_missing_run_seconds = self.config.getint(radarr_section, 'get_missing_run_seconds')
server = RadarrServer(server_id, scheme + url, apikey, verify_ssl, queue, queue_run_seconds,
get_missing, get_missing_run_seconds)
self.radarr_servers.append(server)
# Parse Tautulli options
try:
if not self.config.getboolean('global', 'tautulli_server_ids'):
sys.exit('server_ids must be either false, or a comma-separated list of server ids')
elif self.config.getint('global', 'tautulli_server_ids'):
self.tautulli_enabled = True
except ValueError:
self.tautulli_enabled = True
if self.tautulli_enabled:
sids = self.config.get('global', 'tautulli_server_ids').strip(' ').split(',')
for server_id in sids:
tautulli_section = 'tautulli-' + server_id
url = self.config.get(tautulli_section, 'url')
fallback_ip = self.config.get(tautulli_section, 'fallback_ip')
apikey = self.config.get(tautulli_section, 'apikey')
scheme = 'https://' if self.config.getboolean(tautulli_section, 'ssl') else 'http://'
verify_ssl = self.config.getboolean(tautulli_section, 'verify_ssl')
if scheme != 'https://':
verify_ssl = False
get_activity = self.config.getboolean(tautulli_section, 'get_activity')
get_activity_run_seconds = self.config.getint(tautulli_section, 'get_activity_run_seconds')
get_sessions = self.config.getboolean(tautulli_section, 'get_sessions')
get_sessions_run_seconds = self.config.getint(tautulli_section, 'get_sessions_run_seconds')
server = TautulliServer(server_id, scheme + url, fallback_ip, apikey, verify_ssl, get_activity,
get_activity_run_seconds, get_sessions, get_sessions_run_seconds)
self.tautulli_servers.append(server)
# Parse Ombi Options
try:
if not self.config.getboolean('global', 'ombi_server_ids'):
sys.exit('server_ids must be either false, or a comma-separated list of server ids')
elif self.config.getint('global', 'ombi_server_ids'):
self.ombi_enabled = True
except ValueError:
self.ombi_enabled = True
if self.ombi_enabled:
sids = self.config.get('global', 'ombi_server_ids').strip(' ').split(',')
for server_id in sids:
ombi_section = 'ombi-' + server_id
url = self.config.get(ombi_section, 'url')
apikey = self.config.get(ombi_section, 'apikey')
scheme = 'https://' if self.config.getboolean(ombi_section, 'ssl') else 'http://'
verify_ssl = self.config.getboolean(ombi_section, 'verify_ssl')
if scheme != 'https://':
verify_ssl = False
request_type_counts = self.config.getboolean(ombi_section, 'get_request_type_counts')
request_type_run_seconds = self.config.getint(ombi_section, 'request_type_run_seconds')
request_total_counts = self.config.getboolean(ombi_section, 'get_request_total_counts')
request_total_run_seconds = self.config.getint(ombi_section, 'request_total_run_seconds')
server = OmbiServer(server_id, scheme + url, apikey, verify_ssl, request_type_counts,
request_type_run_seconds, request_total_counts, request_total_run_seconds)
self.ombi_servers.append(server)
# Parse ASA opts
if self.config.getboolean('global', 'asa'):
self.asa_enabled = True
url = self.config.get('asa', 'url')
username = self.config.get('asa', 'username')
password = self.config.get('asa', 'password')
scheme = 'https://' if self.config.getboolean('asa', 'ssl') else 'http://'
verify_ssl = self.config.getboolean('asa', 'verify_ssl')
if scheme != 'https://':
verify_ssl = False
db_name = self.config.get('asa', 'influx_db')
self.asa = (scheme + url, username, password, verify_ssl, db_name)

View file

@ -1,112 +0,0 @@
from requests import Session, Request
from datetime import datetime, timezone
from Varken.logger import logging
from Varken.helpers import hashit, connection_handler
from Varken.structures import Movie, Queue
class RadarrAPI(object):
def __init__(self, server, dbmanager):
self.now = datetime.now(timezone.utc).astimezone().isoformat()
self.dbmanager = dbmanager
self.server = server
# Create session to reduce server web thread load, and globally define pageSize for all requests
self.session = Session()
self.session.headers = {'X-Api-Key': self.server.api_key}
@logging
def get_missing(self):
endpoint = '/api/movie'
self.now = datetime.now(timezone.utc).astimezone().isoformat()
influx_payload = []
missing = []
req = self.session.prepare_request(Request('GET', self.server.url + endpoint))
get = connection_handler(self.session, req, self.server.verify_ssl)
if not get:
return
movies = [Movie(**movie) for movie in get]
for movie in movies:
if self.server.get_missing:
if not movie.downloaded and movie.isAvailable:
ma = True
else:
ma = False
movie_name = '{} ({})'.format(movie.title, movie.year)
missing.append((movie_name, ma, movie.tmdbId))
for title, ma, mid in missing:
hash_id = hashit('{}{}{}'.format(self.server.id, title, mid))
influx_payload.append(
{
"measurement": "Radarr",
"tags": {
"Missing": True,
"Missing_Available": ma,
"tmdbId": mid,
"server": self.server.id,
"name": title
},
"time": self.now,
"fields": {
"hash": hash_id
}
}
)
self.dbmanager.write_points(influx_payload)
@logging
def get_queue(self):
endpoint = '/api/queue'
self.now = datetime.now(timezone.utc).astimezone().isoformat()
influx_payload = []
queue = []
req = self.session.prepare_request(Request('GET', self.server.url + endpoint))
get = connection_handler(self.session, req, self.server.verify_ssl)
if not get:
return
for movie in get:
movie['movie'] = Movie(**movie['movie'])
download_queue = [Queue(**movie) for movie in get]
for queue_item in download_queue:
name = '{} ({})'.format(queue_item.movie.title, queue_item.movie.year)
if queue_item.protocol.upper() == 'USENET':
protocol_id = 1
else:
protocol_id = 0
queue.append((name, queue_item.quality['quality']['name'], queue_item.protocol.upper(),
protocol_id, queue_item.id))
for movie, quality, protocol, protocol_id, qid in queue:
hash_id = hashit('{}{}{}'.format(self.server.id, movie, quality))
influx_payload.append(
{
"measurement": "Radarr",
"tags": {
"type": "Queue",
"tmdbId": qid,
"server": self.server.id,
"name": movie,
"quality": quality,
"protocol": protocol,
"protocol_id": protocol_id
},
"time": self.now,
"fields": {
"hash": hash_id
}
}
)
self.dbmanager.write_points(influx_payload)

View file

@ -1,165 +0,0 @@
from requests import Session, Request
from datetime import datetime, timezone, date, timedelta
from Varken.logger import logging
from Varken.helpers import hashit, connection_handler
from Varken.structures import Queue, TVShow
class SonarrAPI(object):
def __init__(self, server, dbmanager):
# Set Time of initialization
self.now = datetime.now(timezone.utc).astimezone().isoformat()
self.dbmanager = dbmanager
self.today = str(date.today())
self.server = server
# Create session to reduce server web thread load, and globally define pageSize for all requests
self.session = Session()
self.session.headers = {'X-Api-Key': self.server.api_key}
self.session.params = {'pageSize': 1000}
@logging
def get_missing(self):
endpoint = '/api/calendar'
last_days = str(date.today() + timedelta(days=-self.server.missing_days))
self.now = datetime.now(timezone.utc).astimezone().isoformat()
params = {'start': last_days, 'end': self.today}
influx_payload = []
missing = []
req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params))
get = connection_handler(self.session, req, self.server.verify_ssl)
if not get:
return
# Iteratively create a list of TVShow Objects from response json
tv_shows = [TVShow(**show) for show in get]
# Add show to missing list if file does not exist
for show in tv_shows:
if not show.hasFile:
sxe = 'S{:0>2}E{:0>2}'.format(show.seasonNumber, show.episodeNumber)
missing.append((show.series['title'], sxe, show.airDate, show.title, show.id))
for series_title, sxe, air_date, episode_title, sonarr_id in missing:
hash_id = hashit('{}{}{}'.format(self.server.id, series_title, sxe))
influx_payload.append(
{
"measurement": "Sonarr",
"tags": {
"type": "Missing",
"sonarrId": sonarr_id,
"server": self.server.id,
"name": series_title,
"epname": episode_title,
"sxe": sxe,
"airs": air_date
},
"time": self.now,
"fields": {
"hash": hash_id
}
}
)
self.dbmanager.write_points(influx_payload)
@logging
def get_future(self):
endpoint = '/api/calendar/'
self.now = datetime.now(timezone.utc).astimezone().isoformat()
future = str(date.today() + timedelta(days=self.server.future_days))
influx_payload = []
air_days = []
params = {'start': self.today, 'end': future}
req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params))
get = connection_handler(self.session, req, self.server.verify_ssl)
if not get:
return
tv_shows = [TVShow(**show) for show in get]
for show in tv_shows:
sxe = 'S{:0>2}E{:0>2}'.format(show.seasonNumber, show.episodeNumber)
if show.hasFile:
downloaded = 1
else:
downloaded = 0
air_days.append((show.series['title'], downloaded, sxe, show.title, show.airDate, show.id))
for series_title, dl_status, sxe, episode_title, air_date, sonarr_id in air_days:
hash_id = hashit('{}{}{}'.format(self.server.id, series_title, sxe))
influx_payload.append(
{
"measurement": "Sonarr",
"tags": {
"type": "Future",
"sonarrId": sonarr_id,
"server": self.server.id,
"name": series_title,
"epname": episode_title,
"sxe": sxe,
"airs": air_date,
"downloaded": dl_status
},
"time": self.now,
"fields": {
"hash": hash_id
}
}
)
self.dbmanager.write_points(influx_payload)
@logging
def get_queue(self):
influx_payload = []
endpoint = '/api/queue'
self.now = datetime.now(timezone.utc).astimezone().isoformat()
queue = []
req = self.session.prepare_request(Request('GET', self.server.url + endpoint))
get = connection_handler(self.session, req, self.server.verify_ssl)
if not get:
return
download_queue = [Queue(**show) for show in get]
for show in download_queue:
sxe = 'S{:0>2}E{:0>2}'.format(show.episode['seasonNumber'], show.episode['episodeNumber'])
if show.protocol.upper() == 'USENET':
protocol_id = 1
else:
protocol_id = 0
queue.append((show.series['title'], show.episode['title'], show.protocol.upper(),
protocol_id, sxe, show.id))
for series_title, episode_title, protocol, protocol_id, sxe, sonarr_id in queue:
hash_id = hashit('{}{}{}'.format(self.server.id, series_title, sxe))
influx_payload.append(
{
"measurement": "Sonarr",
"tags": {
"type": "Queue",
"sonarrId": sonarr_id,
"server": self.server.id,
"name": series_title,
"epname": episode_title,
"sxe": sxe,
"protocol": protocol,
"protocol_id": protocol_id
},
"time": self.now,
"fields": {
"hash": hash_id
}
}
)
self.dbmanager.write_points(influx_payload)