From 967a2efc1eef6a4822823fc38a8a9138fca48ab8 Mon Sep 17 00:00:00 2001 From: "Nicholas St. Germain" Date: Tue, 4 Dec 2018 10:50:02 -0600 Subject: [PATCH] test push --- README.md | 2 +- Varken/dbmanager.py | 16 --- Varken/iniparser.py | 181 ------------------------------- Varken/radarr.py | 112 ------------------- Varken/sonarr.py | 165 ---------------------------- {Varken => varken}/helpers.py | 0 {Varken => varken}/logger.py | 0 {Varken => varken}/structures.py | 0 8 files changed, 1 insertion(+), 475 deletions(-) delete mode 100644 Varken/dbmanager.py delete mode 100644 Varken/iniparser.py delete mode 100644 Varken/radarr.py delete mode 100644 Varken/sonarr.py rename {Varken => varken}/helpers.py (100%) rename {Varken => varken}/logger.py (100%) rename {Varken => varken}/structures.py (100%) diff --git a/README.md b/README.md index 5291bf0..9b8370d 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ Dutch for PIG. PIG is an Acronym for Plex/InfluxDB/Grafana Varken is a standalone command-line utility to aggregate data -from the plex ecosystem into InfluxDB. Examples use Grafana for a +from the Plex ecosystem into InfluxDB. Examples use Grafana for a frontend Requirements /w install links: [Grafana](http://docs.grafana.org/installation/), [Python3](https://www.python.org/downloads/), [InfluxDB](https://docs.influxdata.com/influxdb/v1.5/introduction/installation/) diff --git a/Varken/dbmanager.py b/Varken/dbmanager.py deleted file mode 100644 index aa73534..0000000 --- a/Varken/dbmanager.py +++ /dev/null @@ -1,16 +0,0 @@ -from influxdb import InfluxDBClient - - -class DBManager(object): - def __init__(self, server): - self.server = server - self.influx = InfluxDBClient(self.server.url, self.server.port, self.server.username, self.server.password, - 'varken') - databases = [db['name'] for db in self.influx.get_list_database()] - - if 'varken' not in databases: - self.influx.create_database('varken') - self.influx.create_retention_policy('Varken 30d/1h', '30d', '1', 'varken', False, '1h') - - def write_points(self, data): - self.influx.write_points(data) \ No newline at end of file diff --git a/Varken/iniparser.py b/Varken/iniparser.py deleted file mode 100644 index 6820bd2..0000000 --- a/Varken/iniparser.py +++ /dev/null @@ -1,181 +0,0 @@ -import sys -import configparser -from sys import exit -from os.path import join, exists -from Varken.structures import SonarrServer, RadarrServer, OmbiServer, TautulliServer, InfluxServer - - -class INIParser(object): - def __init__(self, data_folder): - self.config = configparser.ConfigParser() - self.data_folder = data_folder - - self.influx_server = InfluxServer() - - self.sonarr_enabled = False - self.sonarr_servers = [] - - self.radarr_enabled = False - self.radarr_servers = [] - - self.ombi_enabled = False - self.ombi_servers = [] - - self.tautulli_enabled = False - self.tautulli_servers = [] - - self.asa_enabled = False - self.asa = None - - self.parse_opts() - - def read_file(self): - file_path = join(self.data_folder, 'varken.ini') - if exists(file_path): - with open(file_path) as config_ini: - self.config.read_file(config_ini) - else: - exit("You do not have a varken.ini file in {}".format(self.data_folder)) - - def parse_opts(self): - self.read_file() - # Parse InfluxDB options - url = self.config.get('influxdb', 'url') - port = self.config.getint('influxdb', 'port') - username = self.config.get('influxdb', 'username') - password = self.config.get('influxdb', 'password') - - self.influx_server = InfluxServer(url, port, username, password) - - # Parse Sonarr options - try: - if not self.config.getboolean('global', 'sonarr_server_ids'): - sys.exit('server_ids must be either false, or a comma-separated list of server ids') - elif self.config.getint('global', 'sonarr_server_ids'): - self.sonarr_enabled = True - except ValueError: - self.sonarr_enabled = True - - if self.sonarr_enabled: - sids = self.config.get('global', 'sonarr_server_ids').strip(' ').split(',') - - for server_id in sids: - sonarr_section = 'sonarr-' + server_id - url = self.config.get(sonarr_section, 'url') - apikey = self.config.get(sonarr_section, 'apikey') - scheme = 'https://' if self.config.getboolean(sonarr_section, 'ssl') else 'http://' - verify_ssl = self.config.getboolean(sonarr_section, 'verify_ssl') - if scheme != 'https://': - verify_ssl = False - queue = self.config.getboolean(sonarr_section, 'queue') - missing_days = self.config.getint(sonarr_section, 'missing_days') - future_days = self.config.getint(sonarr_section, 'future_days') - missing_days_run_seconds = self.config.getint(sonarr_section, 'missing_days_run_seconds') - future_days_run_seconds = self.config.getint(sonarr_section, 'future_days_run_seconds') - queue_run_seconds = self.config.getint(sonarr_section, 'queue_run_seconds') - - server = SonarrServer(server_id, scheme + url, apikey, verify_ssl, missing_days, - missing_days_run_seconds, future_days, future_days_run_seconds, - queue, queue_run_seconds) - self.sonarr_servers.append(server) - - # Parse Radarr options - try: - if not self.config.getboolean('global', 'radarr_server_ids'): - sys.exit('server_ids must be either false, or a comma-separated list of server ids') - elif self.config.getint('global', 'radarr_server_ids'): - self.radarr_enabled = True - except ValueError: - self.radarr_enabled = True - - if self.radarr_enabled: - sids = self.config.get('global', 'radarr_server_ids').strip(' ').split(',') - - for server_id in sids: - radarr_section = 'radarr-' + server_id - url = self.config.get(radarr_section, 'url') - apikey = self.config.get(radarr_section, 'apikey') - scheme = 'https://' if self.config.getboolean(radarr_section, 'ssl') else 'http://' - verify_ssl = self.config.getboolean(radarr_section, 'verify_ssl') - if scheme != 'https://': - verify_ssl = False - queue = self.config.getboolean(radarr_section, 'queue') - queue_run_seconds = self.config.getint(radarr_section, 'queue_run_seconds') - get_missing = self.config.getboolean(radarr_section, 'get_missing') - get_missing_run_seconds = self.config.getint(radarr_section, 'get_missing_run_seconds') - - server = RadarrServer(server_id, scheme + url, apikey, verify_ssl, queue, queue_run_seconds, - get_missing, get_missing_run_seconds) - self.radarr_servers.append(server) - - # Parse Tautulli options - try: - if not self.config.getboolean('global', 'tautulli_server_ids'): - sys.exit('server_ids must be either false, or a comma-separated list of server ids') - elif self.config.getint('global', 'tautulli_server_ids'): - self.tautulli_enabled = True - except ValueError: - self.tautulli_enabled = True - - if self.tautulli_enabled: - sids = self.config.get('global', 'tautulli_server_ids').strip(' ').split(',') - - for server_id in sids: - tautulli_section = 'tautulli-' + server_id - url = self.config.get(tautulli_section, 'url') - fallback_ip = self.config.get(tautulli_section, 'fallback_ip') - apikey = self.config.get(tautulli_section, 'apikey') - scheme = 'https://' if self.config.getboolean(tautulli_section, 'ssl') else 'http://' - verify_ssl = self.config.getboolean(tautulli_section, 'verify_ssl') - if scheme != 'https://': - verify_ssl = False - get_activity = self.config.getboolean(tautulli_section, 'get_activity') - get_activity_run_seconds = self.config.getint(tautulli_section, 'get_activity_run_seconds') - get_sessions = self.config.getboolean(tautulli_section, 'get_sessions') - get_sessions_run_seconds = self.config.getint(tautulli_section, 'get_sessions_run_seconds') - - server = TautulliServer(server_id, scheme + url, fallback_ip, apikey, verify_ssl, get_activity, - get_activity_run_seconds, get_sessions, get_sessions_run_seconds) - self.tautulli_servers.append(server) - - # Parse Ombi Options - try: - if not self.config.getboolean('global', 'ombi_server_ids'): - sys.exit('server_ids must be either false, or a comma-separated list of server ids') - elif self.config.getint('global', 'ombi_server_ids'): - self.ombi_enabled = True - except ValueError: - self.ombi_enabled = True - - if self.ombi_enabled: - sids = self.config.get('global', 'ombi_server_ids').strip(' ').split(',') - for server_id in sids: - ombi_section = 'ombi-' + server_id - url = self.config.get(ombi_section, 'url') - apikey = self.config.get(ombi_section, 'apikey') - scheme = 'https://' if self.config.getboolean(ombi_section, 'ssl') else 'http://' - verify_ssl = self.config.getboolean(ombi_section, 'verify_ssl') - if scheme != 'https://': - verify_ssl = False - request_type_counts = self.config.getboolean(ombi_section, 'get_request_type_counts') - request_type_run_seconds = self.config.getint(ombi_section, 'request_type_run_seconds') - request_total_counts = self.config.getboolean(ombi_section, 'get_request_total_counts') - request_total_run_seconds = self.config.getint(ombi_section, 'request_total_run_seconds') - - server = OmbiServer(server_id, scheme + url, apikey, verify_ssl, request_type_counts, - request_type_run_seconds, request_total_counts, request_total_run_seconds) - self.ombi_servers.append(server) - - # Parse ASA opts - if self.config.getboolean('global', 'asa'): - self.asa_enabled = True - url = self.config.get('asa', 'url') - username = self.config.get('asa', 'username') - password = self.config.get('asa', 'password') - scheme = 'https://' if self.config.getboolean('asa', 'ssl') else 'http://' - verify_ssl = self.config.getboolean('asa', 'verify_ssl') - if scheme != 'https://': - verify_ssl = False - db_name = self.config.get('asa', 'influx_db') - - self.asa = (scheme + url, username, password, verify_ssl, db_name) diff --git a/Varken/radarr.py b/Varken/radarr.py deleted file mode 100644 index ba7f050..0000000 --- a/Varken/radarr.py +++ /dev/null @@ -1,112 +0,0 @@ -from requests import Session, Request -from datetime import datetime, timezone - -from Varken.logger import logging -from Varken.helpers import hashit, connection_handler -from Varken.structures import Movie, Queue - - -class RadarrAPI(object): - def __init__(self, server, dbmanager): - self.now = datetime.now(timezone.utc).astimezone().isoformat() - self.dbmanager = dbmanager - self.server = server - # Create session to reduce server web thread load, and globally define pageSize for all requests - self.session = Session() - self.session.headers = {'X-Api-Key': self.server.api_key} - - @logging - def get_missing(self): - endpoint = '/api/movie' - self.now = datetime.now(timezone.utc).astimezone().isoformat() - influx_payload = [] - missing = [] - - req = self.session.prepare_request(Request('GET', self.server.url + endpoint)) - get = connection_handler(self.session, req, self.server.verify_ssl) - - if not get: - return - - movies = [Movie(**movie) for movie in get] - - for movie in movies: - if self.server.get_missing: - if not movie.downloaded and movie.isAvailable: - ma = True - else: - ma = False - movie_name = '{} ({})'.format(movie.title, movie.year) - missing.append((movie_name, ma, movie.tmdbId)) - - for title, ma, mid in missing: - hash_id = hashit('{}{}{}'.format(self.server.id, title, mid)) - influx_payload.append( - { - "measurement": "Radarr", - "tags": { - "Missing": True, - "Missing_Available": ma, - "tmdbId": mid, - "server": self.server.id, - "name": title - }, - "time": self.now, - "fields": { - "hash": hash_id - } - } - ) - - self.dbmanager.write_points(influx_payload) - - @logging - def get_queue(self): - endpoint = '/api/queue' - self.now = datetime.now(timezone.utc).astimezone().isoformat() - influx_payload = [] - queue = [] - - req = self.session.prepare_request(Request('GET', self.server.url + endpoint)) - get = connection_handler(self.session, req, self.server.verify_ssl) - - if not get: - return - - for movie in get: - movie['movie'] = Movie(**movie['movie']) - download_queue = [Queue(**movie) for movie in get] - - for queue_item in download_queue: - name = '{} ({})'.format(queue_item.movie.title, queue_item.movie.year) - - if queue_item.protocol.upper() == 'USENET': - protocol_id = 1 - else: - protocol_id = 0 - - queue.append((name, queue_item.quality['quality']['name'], queue_item.protocol.upper(), - protocol_id, queue_item.id)) - - for movie, quality, protocol, protocol_id, qid in queue: - hash_id = hashit('{}{}{}'.format(self.server.id, movie, quality)) - influx_payload.append( - { - "measurement": "Radarr", - "tags": { - "type": "Queue", - "tmdbId": qid, - "server": self.server.id, - "name": movie, - "quality": quality, - "protocol": protocol, - "protocol_id": protocol_id - }, - "time": self.now, - "fields": { - "hash": hash_id - } - } - ) - - self.dbmanager.write_points(influx_payload) diff --git a/Varken/sonarr.py b/Varken/sonarr.py deleted file mode 100644 index bcda5e5..0000000 --- a/Varken/sonarr.py +++ /dev/null @@ -1,165 +0,0 @@ -from requests import Session, Request -from datetime import datetime, timezone, date, timedelta - -from Varken.logger import logging -from Varken.helpers import hashit, connection_handler -from Varken.structures import Queue, TVShow - - -class SonarrAPI(object): - def __init__(self, server, dbmanager): - # Set Time of initialization - self.now = datetime.now(timezone.utc).astimezone().isoformat() - self.dbmanager = dbmanager - self.today = str(date.today()) - self.server = server - # Create session to reduce server web thread load, and globally define pageSize for all requests - self.session = Session() - self.session.headers = {'X-Api-Key': self.server.api_key} - self.session.params = {'pageSize': 1000} - - @logging - def get_missing(self): - endpoint = '/api/calendar' - last_days = str(date.today() + timedelta(days=-self.server.missing_days)) - self.now = datetime.now(timezone.utc).astimezone().isoformat() - params = {'start': last_days, 'end': self.today} - influx_payload = [] - missing = [] - - req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params)) - get = connection_handler(self.session, req, self.server.verify_ssl) - - if not get: - return - - # Iteratively create a list of TVShow Objects from response json - tv_shows = [TVShow(**show) for show in get] - - # Add show to missing list if file does not exist - for show in tv_shows: - if not show.hasFile: - sxe = 'S{:0>2}E{:0>2}'.format(show.seasonNumber, show.episodeNumber) - missing.append((show.series['title'], sxe, show.airDate, show.title, show.id)) - - for series_title, sxe, air_date, episode_title, sonarr_id in missing: - hash_id = hashit('{}{}{}'.format(self.server.id, series_title, sxe)) - influx_payload.append( - { - "measurement": "Sonarr", - "tags": { - "type": "Missing", - "sonarrId": sonarr_id, - "server": self.server.id, - "name": series_title, - "epname": episode_title, - "sxe": sxe, - "airs": air_date - }, - "time": self.now, - "fields": { - "hash": hash_id - - } - } - ) - - self.dbmanager.write_points(influx_payload) - - @logging - def get_future(self): - endpoint = '/api/calendar/' - self.now = datetime.now(timezone.utc).astimezone().isoformat() - future = str(date.today() + timedelta(days=self.server.future_days)) - influx_payload = [] - air_days = [] - params = {'start': self.today, 'end': future} - - req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params)) - get = connection_handler(self.session, req, self.server.verify_ssl) - - if not get: - return - - tv_shows = [TVShow(**show) for show in get] - - for show in tv_shows: - sxe = 'S{:0>2}E{:0>2}'.format(show.seasonNumber, show.episodeNumber) - if show.hasFile: - downloaded = 1 - else: - downloaded = 0 - air_days.append((show.series['title'], downloaded, sxe, show.title, show.airDate, show.id)) - - for series_title, dl_status, sxe, episode_title, air_date, sonarr_id in air_days: - hash_id = hashit('{}{}{}'.format(self.server.id, series_title, sxe)) - influx_payload.append( - { - "measurement": "Sonarr", - "tags": { - "type": "Future", - "sonarrId": sonarr_id, - "server": self.server.id, - "name": series_title, - "epname": episode_title, - "sxe": sxe, - "airs": air_date, - "downloaded": dl_status - }, - "time": self.now, - "fields": { - "hash": hash_id - } - } - ) - - self.dbmanager.write_points(influx_payload) - - @logging - def get_queue(self): - influx_payload = [] - endpoint = '/api/queue' - self.now = datetime.now(timezone.utc).astimezone().isoformat() - queue = [] - - req = self.session.prepare_request(Request('GET', self.server.url + endpoint)) - get = connection_handler(self.session, req, self.server.verify_ssl) - - if not get: - return - - download_queue = [Queue(**show) for show in get] - - for show in download_queue: - sxe = 'S{:0>2}E{:0>2}'.format(show.episode['seasonNumber'], show.episode['episodeNumber']) - if show.protocol.upper() == 'USENET': - protocol_id = 1 - else: - protocol_id = 0 - - queue.append((show.series['title'], show.episode['title'], show.protocol.upper(), - protocol_id, sxe, show.id)) - - for series_title, episode_title, protocol, protocol_id, sxe, sonarr_id in queue: - hash_id = hashit('{}{}{}'.format(self.server.id, series_title, sxe)) - influx_payload.append( - { - "measurement": "Sonarr", - "tags": { - "type": "Queue", - "sonarrId": sonarr_id, - "server": self.server.id, - "name": series_title, - "epname": episode_title, - "sxe": sxe, - "protocol": protocol, - "protocol_id": protocol_id - }, - "time": self.now, - "fields": { - "hash": hash_id - } - } - ) - - self.dbmanager.write_points(influx_payload) diff --git a/Varken/helpers.py b/varken/helpers.py similarity index 100% rename from Varken/helpers.py rename to varken/helpers.py diff --git a/Varken/logger.py b/varken/logger.py similarity index 100% rename from Varken/logger.py rename to varken/logger.py diff --git a/Varken/structures.py b/varken/structures.py similarity index 100% rename from Varken/structures.py rename to varken/structures.py