From 1b1a977ba675ae8ddb79aacf8cccde48a8f761e1 Mon Sep 17 00:00:00 2001 From: "Nicholas St. Germain" Date: Thu, 18 Apr 2019 21:39:46 -0500 Subject: [PATCH 01/49] stage 1.6.9 --- varken/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/varken/__init__.py b/varken/__init__.py index 086d49b..2d162d2 100644 --- a/varken/__init__.py +++ b/varken/__init__.py @@ -1,2 +1,2 @@ -VERSION = "1.6.8" -BRANCH = 'master' +VERSION = "1.6.9" +BRANCH = 'develop' From 663b399a975f8772f7679da22ef4025d9f86ccf7 Mon Sep 17 00:00:00 2001 From: "Nicholas St. Germain" Date: Fri, 19 Apr 2019 11:29:27 -0500 Subject: [PATCH 02/49] fix uncaught influxdb connection error --- varken/dbmanager.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/varken/dbmanager.py b/varken/dbmanager.py index 2f076df..db3629f 100644 --- a/varken/dbmanager.py +++ b/varken/dbmanager.py @@ -1,3 +1,4 @@ +from sys import exit from logging import getLogger from influxdb import InfluxDBClient from requests.exceptions import ConnectionError @@ -10,10 +11,15 @@ class DBManager(object): self.influx = InfluxDBClient(host=self.server.url, port=self.server.port, username=self.server.username, password=self.server.password, ssl=self.server.ssl, database='varken', verify_ssl=self.server.verify_ssl) - version = self.influx.request('ping', expected_response_code=204).headers['X-Influxdb-Version'] - databases = [db['name'] for db in self.influx.get_list_database()] self.logger = getLogger() - self.logger.info('Influxdb version: %s', version) + try: + version = self.influx.request('ping', expected_response_code=204).headers['X-Influxdb-Version'] + self.logger.info('Influxdb version: %s', version) + except ConnectionError: + self.logger.critical("Error testing connection to InfluxDB. Please check your url/hostname") + exit() + + databases = [db['name'] for db in self.influx.get_list_database()] if 'varken' not in databases: self.logger.info("Creating varken database") From c08ce39ed396023864a559579e83f6b6cf21b11f Mon Sep 17 00:00:00 2001 From: "Nicholas St. Germain" Date: Fri, 19 Apr 2019 11:30:25 -0500 Subject: [PATCH 03/49] drop unnamed unifi devices. fixes #126 --- varken/unifi.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/varken/unifi.py b/varken/unifi.py index ad829e2..a32cd52 100644 --- a/varken/unifi.py +++ b/varken/unifi.py @@ -40,7 +40,8 @@ class UniFiAPI(object): self.logger.error("Disregarding Job get_usg_stats for unifi-%s", self.server.id) return - devices = {device['name']: device for device in get['data']} + devices = {device['name']: device for device in get['data'] if device.get('name')} + if devices.get(self.server.usg_name): device = devices[self.server.usg_name] else: From ed07d69cdcdc1527b8348656747a4b10414b1313 Mon Sep 17 00:00:00 2001 From: "Nicholas St. Germain" Date: Fri, 19 Apr 2019 12:44:07 -0500 Subject: [PATCH 04/49] refactor ombi and add logging for bad requests so we can see data --- varken/ombi.py | 39 +++++++++++++++++++++++++-------------- 1 file changed, 25 insertions(+), 14 deletions(-) diff --git a/varken/ombi.py b/varken/ombi.py index f82bc2f..98fe083 100644 --- a/varken/ombi.py +++ b/varken/ombi.py @@ -25,27 +25,38 @@ class OmbiAPI(object): tv_req = self.session.prepare_request(Request('GET', self.server.url + tv_endpoint)) movie_req = self.session.prepare_request(Request('GET', self.server.url + movie_endpoint)) - get_tv = connection_handler(self.session, tv_req, self.server.verify_ssl) - get_movie = connection_handler(self.session, movie_req, self.server.verify_ssl) + get_tv = connection_handler(self.session, tv_req, self.server.verify_ssl) or [] + get_movie = connection_handler(self.session, movie_req, self.server.verify_ssl) or [] if not any([get_tv, get_movie]): self.logger.error('No json replies. Discarding job') return - movie_request_count = len(get_movie) - tv_request_count = len(get_tv) + if get_movie: + movie_request_count = len(get_movie) + else: + movie_request_count = 0 - try: - tv_show_requests = [OmbiTVRequest(**show) for show in get_tv] - except TypeError as e: - self.logger.error('TypeError has occurred : %s while creating OmbiTVRequest structure', e) - return + if get_tv: + tv_request_count = len(get_tv) + else: + tv_request_count = 0 - try: - movie_requests = [OmbiMovieRequest(**movie) for movie in get_movie] - except TypeError as e: - self.logger.error('TypeError has occurred : %s while creating OmbiMovieRequest structure', e) - return + tv_show_requests = [] + for show in get_tv: + try: + tv_show_requests.append(OmbiTVRequest(**show)) + except TypeError as e: + self.logger.error('TypeError has occurred : %s while creating OmbiTVRequest structure for show. ' + 'data attempted is: %s', e, show) + + movie_requests = [] + for movie in get_movie: + try: + movie_requests.append(OmbiMovieRequest(**movie)) + except TypeError as e: + self.logger.error('TypeError has occurred : %s while creating OmbiMovieRequest structure for movie. ' + 'data attempted is: %s', e, movie) influx_payload = [ { From 23fa435590d779c97a34b9235d2125fe77141da1 Mon Sep 17 00:00:00 2001 From: "Nicholas St. Germain" Date: Fri, 19 Apr 2019 12:48:04 -0500 Subject: [PATCH 05/49] update sonarr error logging --- varken/sonarr.py | 24 ++++++++++-------------- 1 file changed, 10 insertions(+), 14 deletions(-) diff --git a/varken/sonarr.py b/varken/sonarr.py index eea265f..7e6b845 100644 --- a/varken/sonarr.py +++ b/varken/sonarr.py @@ -38,12 +38,10 @@ class SonarrAPI(object): tv_shows = [] for show in get: try: - show_tuple = SonarrTVShow(**show) - tv_shows.append(show_tuple) + tv_shows.append(SonarrTVShow(**show)) except TypeError as e: - self.logger.error('TypeError has occurred : %s while creating SonarrTVShow structure for show', e) - if not tv_shows: - return + self.logger.error('TypeError has occurred : %s while creating SonarrTVShow structure for show. Data ' + 'attempted is: %s', e, show) # Add show to missing list if file does not exist for show in tv_shows: @@ -93,12 +91,10 @@ class SonarrAPI(object): tv_shows = [] for show in get: try: - show_tuple = SonarrTVShow(**show) - tv_shows.append(show_tuple) + tv_shows.append(SonarrTVShow(**show)) except TypeError as e: - self.logger.error('TypeError has occurred : %s while creating SonarrTVShow structure for show', e) - if not tv_shows: - return + self.logger.error('TypeError has occurred : %s while creating SonarrTVShow structure for show. Data ' + 'attempted is: %s', e, show) for show in tv_shows: sxe = f'S{show.seasonNumber:0>2}E{show.episodeNumber:0>2}' @@ -147,10 +143,10 @@ class SonarrAPI(object): download_queue = [] for show in get: try: - show_tuple = Queue(**show) - download_queue.append(show_tuple) + download_queue.append(Queue(**show)) except TypeError as e: - self.logger.error('TypeError has occurred : %s while creating Queue structure', e) + self.logger.error('TypeError has occurred : %s while creating Queue structure. Data attempted is: ' + '%s', e, show) if not download_queue: return @@ -159,7 +155,7 @@ class SonarrAPI(object): sxe = f"S{show.episode['seasonNumber']:0>2}E{show.episode['episodeNumber']:0>2}" except TypeError as e: self.logger.error('TypeError has occurred : %s while processing the sonarr queue. \ - Remove invalid queue entries.', e) + Remove invalid queue entry. Data attempted is: %s', e, show) continue if show.protocol.upper() == 'USENET': From 5b7fddddaf5689600b7c2bbb2aaf8ddc7c1e8428 Mon Sep 17 00:00:00 2001 From: "Nicholas St. Germain" Date: Fri, 19 Apr 2019 12:54:16 -0500 Subject: [PATCH 06/49] add a bit more logging --- varken/ombi.py | 5 ++++- varken/sonarr.py | 5 ++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/varken/ombi.py b/varken/ombi.py index 98fe083..a660478 100644 --- a/varken/ombi.py +++ b/varken/ombi.py @@ -144,7 +144,10 @@ class OmbiAPI(object): } ) - self.dbmanager.write_points(influx_payload) + if influx_payload: + self.dbmanager.write_points(influx_payload) + else: + self.logger.debug("Empty dataset for ombi module. Discarding...") def get_request_counts(self): now = datetime.now(timezone.utc).astimezone().isoformat() diff --git a/varken/sonarr.py b/varken/sonarr.py index 7e6b845..ae784c8 100644 --- a/varken/sonarr.py +++ b/varken/sonarr.py @@ -188,4 +188,7 @@ class SonarrAPI(object): } } ) - self.dbmanager.write_points(influx_payload) + if influx_payload: + self.dbmanager.write_points(influx_payload) + else: + self.logger.debug("No data to send to influx for sonarr instance, discarding.") From c42a0b58e462de95863426de6f817b2d7ff5b8d7 Mon Sep 17 00:00:00 2001 From: samwiseg0 Date: Sat, 20 Apr 2019 09:30:17 -0400 Subject: [PATCH 07/49] Add album and track totals to artist library from Tautulli #127 --- varken/tautulli.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/varken/tautulli.py b/varken/tautulli.py index 4eac352..c49d95b 100644 --- a/varken/tautulli.py +++ b/varken/tautulli.py @@ -208,6 +208,10 @@ class TautulliAPI(object): if library['section_type'] == 'show': data['fields']['seasons'] = int(library['parent_count']) data['fields']['episodes'] = int(library['child_count']) + + elif library['section_type'] == 'artist': + data['fields']['albums'] = int(library['parent_count']) + data['fields']['tracks'] = int(library['child_count']) influx_payload.append(data) self.dbmanager.write_points(influx_payload) From 2a260f6ee9dcc80be62399a1eacafd51132c5cf6 Mon Sep 17 00:00:00 2001 From: "Nicholas St. Germain" Date: Mon, 22 Apr 2019 14:05:17 -0500 Subject: [PATCH 08/49] make an attempt to reauth unifi + let users know they didnt RTFM --- varken/dbmanager.py | 3 +++ varken/unifi.py | 15 +++++++++------ 2 files changed, 12 insertions(+), 6 deletions(-) diff --git a/varken/dbmanager.py b/varken/dbmanager.py index db3629f..c321469 100644 --- a/varken/dbmanager.py +++ b/varken/dbmanager.py @@ -8,6 +8,9 @@ from influxdb.exceptions import InfluxDBServerError class DBManager(object): def __init__(self, server): self.server = server + if self.server.url == "influxdb.domain.tld": + self.logger.critical("You have not configured your varken.ini. Please read Wiki page for configuration") + exit() self.influx = InfluxDBClient(host=self.server.url, port=self.server.port, username=self.server.username, password=self.server.password, ssl=self.server.ssl, database='varken', verify_ssl=self.server.verify_ssl) diff --git a/varken/unifi.py b/varken/unifi.py index a32cd52..a9c5164 100644 --- a/varken/unifi.py +++ b/varken/unifi.py @@ -12,7 +12,7 @@ class UniFiAPI(object): # Create session to reduce server web thread load, and globally define pageSize for all requests self.session = Session() self.logger = getLogger() - + self.get_retry = True self.get_cookie() def __repr__(self): @@ -37,7 +37,14 @@ class UniFiAPI(object): get = connection_handler(self.session, req, self.server.verify_ssl) if not get: - self.logger.error("Disregarding Job get_usg_stats for unifi-%s", self.server.id) + if self.get_retry: + self.get_retry = False + self.logger.error("Attempting to reauthenticate for unifi-%s", self.server.id) + self.get_cookie() + self.get_usg_stats() + else: + self.get_retry = True + self.logger.error("Disregarding Job get_usg_stats for unifi-%s", self.server.id) return devices = {device['name']: device for device in get['data'] if device.get('name')} @@ -63,10 +70,6 @@ class UniFiAPI(object): "rx_bytes_current": device['wan1']['rx_bytes-r'], "tx_bytes_total": device['wan1']['tx_bytes'], "tx_bytes_current": device['wan1']['tx_bytes-r'], - # Commenting speedtest out until Unifi gets their shit together - # "speedtest_latency": device['speedtest-status']['latency'], - # "speedtest_download": device['speedtest-status']['xput_download'], - # "speedtest_upload": device['speedtest-status']['xput_upload'], "cpu_loadavg_1": float(device['sys_stats']['loadavg_1']), "cpu_loadavg_5": float(device['sys_stats']['loadavg_5']), "cpu_loadavg_15": float(device['sys_stats']['loadavg_15']), From 61def3b37e28f7a61a02df33af44226bdcb7bbc2 Mon Sep 17 00:00:00 2001 From: "Nicholas St. Germain" Date: Mon, 22 Apr 2019 14:07:28 -0500 Subject: [PATCH 09/49] reset check if successful --- varken/unifi.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/varken/unifi.py b/varken/unifi.py index a9c5164..0de3e87 100644 --- a/varken/unifi.py +++ b/varken/unifi.py @@ -47,6 +47,9 @@ class UniFiAPI(object): self.logger.error("Disregarding Job get_usg_stats for unifi-%s", self.server.id) return + if not self.get_retry: + self.get_retry = True + devices = {device['name']: device for device in get['data'] if device.get('name')} if devices.get(self.server.usg_name): From d9b6e825f3d0812cca8f9dcc6d487135dcc3c619 Mon Sep 17 00:00:00 2001 From: "Nicholas St. Germain" Date: Tue, 23 Apr 2019 23:19:06 -0500 Subject: [PATCH 10/49] get sites and map for unifi even if its a description (alias) --- varken/unifi.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/varken/unifi.py b/varken/unifi.py index a32cd52..eb24097 100644 --- a/varken/unifi.py +++ b/varken/unifi.py @@ -14,6 +14,7 @@ class UniFiAPI(object): self.logger = getLogger() self.get_cookie() + self.get_site() def __repr__(self): return f"" @@ -25,11 +26,27 @@ class UniFiAPI(object): post = connection_handler(self.session, req, self.server.verify_ssl, as_is_reply=True) if not post or not post.cookies.get('unifises'): + self.logger.error(f"Could not retrieve session cookie from UniFi Controller") return cookies = {'unifises': post.cookies.get('unifises')} self.session.cookies.update(cookies) + def get_site(self): + endpoint = '/api/self/sites' + req = self.session.prepare_request(Request('GET', self.server.url + endpoint)) + get = connection_handler(self.session, req, self.server.verify_ssl) + + if not get: + self.logger.error(f"Could not get list of sites from UniFi Controller") + return + site = [site['name'] for site in get['data'] if site['name'].lower() == self.server.site.lower() + or site['desc'].lower() == self.server.site.lower()] + if site: + self.server.site = site[0] + else: + self.logger.error(f"Could not map site {self.server.site} to a site id/alias") + def get_usg_stats(self): now = datetime.now(timezone.utc).astimezone().isoformat() endpoint = f'/api/s/{self.server.site}/stat/device' From 04db2670013e9266eb111417bd61240c62f40515 Mon Sep 17 00:00:00 2001 From: "Nicholas St. Germain" Date: Wed, 24 Apr 2019 00:41:12 -0500 Subject: [PATCH 11/49] grafana datasource + official dashboard import script --- grafana_build.py | 130 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 130 insertions(+) create mode 100644 grafana_build.py diff --git a/grafana_build.py b/grafana_build.py new file mode 100644 index 0000000..77ab407 --- /dev/null +++ b/grafana_build.py @@ -0,0 +1,130 @@ +#!/usr/bin/env python3 +from sys import exit +from requests import Session +from json.decoder import JSONDecodeError + +session = Session() + +docker = True # True if using a docker container, False if not +host_ip = '127.0.0.1' # Only relevant if docker = False +username = 'admin' # Grafana username +password = 'admin' # Grafana password +grafana_url = 'http://grafana:3000' +verify = False # Verify SSL + +# Do not remove any of these, just change the ones you use +movies_library = 'Movies' +fourk_movies_library = 'Movies 4K' +tv_shows_library = 'TV Shows' +fourk_tv_shows_library = 'TV Shows 4K' +ombi_url = 'https://yourdomain.com/ombi' +tautulli_url = 'https://yourdomain.com/tautulli' +sonarr_url = 'https://yourdomain.com/sonarr' +radarr_url = 'https://yourdomain.com/radarr' +sickchill_url = 'https://yourdomain.com/sickchill' + + +# Do not edit past this line # +auth = (username, password) +url_base = f"{grafana_url.rstrip('/')}/api" + +try: + datasources = session.get(url_base + '/datasources', auth=auth, verify=verify).json() + varken_datasource = [source for source in datasources if source['database'] == 'varken'] + if varken_datasource: + exit(f'varken datasource already exists with the name "{varken_datasource[0]["name"]}"') +except JSONDecodeError: + exit(f"Could not talk to grafana at {grafana_url}. Check URL/Username/Password") + +datasource_data = { + "name": "Varken-Script", + "type": "influxdb", + "url": f"http://{'influxdb' if docker else host_ip}:8086", + "access": "proxy", + "basicAuth": False, + "database": 'varken' +} +post = session.post(url_base + '/datasources', auth=auth, verify=verify, json=datasource_data).json() +print(f'Created Varken-Script datasource (id:{post["datasource"]["id"]})') + +our_dashboard = session.get(url_base + '/gnet/dashboards/9585', auth=auth, verify=verify).json()['json'] +dashboard_data = { + "dashboard": our_dashboard, + "overwrite": True, + "inputs": [ + { + "name": "DS_VARKEN", + "label": "varken", + "description": "", + "type": "datasource", + "pluginId": "influxdb", + "pluginName": "InfluxDB", + "value": "Varken-Script" + }, + { + "name": "VAR_MOVIESLIBRARY", + "type": "constant", + "label": "Movies Library Name", + "value": movies_library, + "description": "" + }, + { + "name": "VAR_MOVIES4KLIBRARY", + "type": "constant", + "label": "4K Movies Library Name", + "value": fourk_movies_library, + "description": "" + }, + { + "name": "VAR_TVLIBRARY", + "type": "constant", + "label": "TV Library Name", + "value": tv_shows_library, + "description": "" + }, + { + "name": "VAR_TV4KLIBRARY", + "type": "constant", + "label": "TV 4K Library Name", + "value": fourk_tv_shows_library, + "description": "" + }, + { + "name": "VAR_OMBIURL", + "type": "constant", + "label": "Ombi URL", + "value": ombi_url, + "description": "" + }, + { + "name": "VAR_TAUTULLIURL", + "type": "constant", + "label": "Tautulli URL", + "value": tautulli_url, + "description": "" + }, + { + "name": "VAR_SONARRURL", + "type": "constant", + "label": "Sonarr URL", + "value": sonarr_url, + "description": "" + }, + { + "name": "VAR_RADARRURL", + "type": "constant", + "label": "Radarr URL", + "value": radarr_url, + "description": "" + }, + { + "name": "VAR_SICKCHILLURL", + "type": "constant", + "label": "Sickchill URL", + "value": sickchill_url, + "description": "" + } + ] +} +make_dashboard = session.post(url_base + '/dashboards/import', data=dashboard_data, auth=auth, verify=verify) +print('Created dashboard "Varken-Script"') From 1acdcb8bf223ee8f10a651ac766ccd6ed4e78aab Mon Sep 17 00:00:00 2001 From: samwiseg0 Date: Wed, 24 Apr 2019 12:05:00 -0400 Subject: [PATCH 12/49] Enable Debug by default for docker images --- Dockerfile | 2 +- Dockerfile.arm | 2 +- Dockerfile.arm64 | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Dockerfile b/Dockerfile index e635b17..782a3a9 100644 --- a/Dockerfile +++ b/Dockerfile @@ -2,7 +2,7 @@ FROM amd64/python:3.7.2-alpine LABEL maintainers="dirtycajunrice,samwiseg0" -ENV DEBUG="False" +ENV DEBUG="True" WORKDIR /app diff --git a/Dockerfile.arm b/Dockerfile.arm index da5c42a..9f7fed1 100644 --- a/Dockerfile.arm +++ b/Dockerfile.arm @@ -2,7 +2,7 @@ FROM arm32v6/python:3.7.2-alpine LABEL maintainers="dirtycajunrice,samwiseg0" -ENV DEBUG="False" +ENV DEBUG="True" WORKDIR /app diff --git a/Dockerfile.arm64 b/Dockerfile.arm64 index 4ae6d4d..9ad67eb 100644 --- a/Dockerfile.arm64 +++ b/Dockerfile.arm64 @@ -2,7 +2,7 @@ FROM arm64v8/python:3.7.2-alpine LABEL maintainers="dirtycajunrice,samwiseg0" -ENV DEBUG="False" +ENV DEBUG="True" WORKDIR /app From b7f8b6dc7fd39ce61f4547a376ac97e82aa7d6b8 Mon Sep 17 00:00:00 2001 From: samwiseg0 Date: Wed, 24 Apr 2019 12:05:26 -0400 Subject: [PATCH 13/49] Enable debug by default and allow it to be disabled --- Varken.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/Varken.py b/Varken.py index 1c0c7f2..65aecd8 100644 --- a/Varken.py +++ b/Varken.py @@ -44,6 +44,7 @@ if __name__ == "__main__": parser.add_argument("-d", "--data-folder", help='Define an alternate data folder location') parser.add_argument("-D", "--debug", action='store_true', help='Use to enable DEBUG logging') + parser.add_argument("-ND", "--no_debug", action='store_true', help='Use to disable DEBUG logging') opts = parser.parse_args() @@ -72,10 +73,15 @@ if __name__ == "__main__": enable_opts = ['True', 'true', 'yes'] debug_opts = ['debug', 'Debug', 'DEBUG'] - if not opts.debug: + opts.debug = True + + if getenv('DEBUG') is not None: opts.debug = True if any([getenv(string, False) for true in enable_opts for string in debug_opts if getenv(string, False) == true]) else False + elif opts.no_debug: + opts.debug = False + # Initiate the logger vl = VarkenLogger(data_folder=DATA_FOLDER, debug=opts.debug) vl.logger.info('Starting Varken...') From c41a80dd82d8cd257ccff781d89ddfa7ceaac53b Mon Sep 17 00:00:00 2001 From: samwiseg0 Date: Wed, 24 Apr 2019 12:15:02 -0400 Subject: [PATCH 14/49] Add "Depreciated" to debug help --- Varken.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Varken.py b/Varken.py index 65aecd8..c78dbb7 100644 --- a/Varken.py +++ b/Varken.py @@ -43,7 +43,7 @@ if __name__ == "__main__": formatter_class=RawTextHelpFormatter) parser.add_argument("-d", "--data-folder", help='Define an alternate data folder location') - parser.add_argument("-D", "--debug", action='store_true', help='Use to enable DEBUG logging') + parser.add_argument("-D", "--debug", action='store_true', help='Use to enable DEBUG logging. (Depreciated)') parser.add_argument("-ND", "--no_debug", action='store_true', help='Use to disable DEBUG logging') opts = parser.parse_args() From 737cc11457f89bceb6e613bef99d3813a9d93df6 Mon Sep 17 00:00:00 2001 From: "Nicholas St. Germain" Date: Wed, 24 Apr 2019 11:30:18 -0500 Subject: [PATCH 15/49] copy utilities folder --- Dockerfile | 2 ++ grafana_build.py => utilities/grafana_build.py | 0 2 files changed, 2 insertions(+) rename grafana_build.py => utilities/grafana_build.py (100%) diff --git a/Dockerfile b/Dockerfile index 782a3a9..f0747b9 100644 --- a/Dockerfile +++ b/Dockerfile @@ -12,6 +12,8 @@ COPY /varken /app/varken COPY /data /app/data +COPY /utilities /config/utilities + RUN apk add --no-cache tzdata && \ python3 -m pip install -r /app/requirements.txt diff --git a/grafana_build.py b/utilities/grafana_build.py similarity index 100% rename from grafana_build.py rename to utilities/grafana_build.py From 7228fa2a3912ad26da9b8d240fa49c0494be1ef7 Mon Sep 17 00:00:00 2001 From: "Nicholas St. Germain" Date: Wed, 24 Apr 2019 11:35:19 -0500 Subject: [PATCH 16/49] fix attribute error --- varken/unifi.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/varken/unifi.py b/varken/unifi.py index 7b1c7aa..2c3d5d0 100644 --- a/varken/unifi.py +++ b/varken/unifi.py @@ -9,6 +9,7 @@ class UniFiAPI(object): def __init__(self, server, dbmanager): self.dbmanager = dbmanager self.server = server + self.site = self.server.site # Create session to reduce server web thread load, and globally define pageSize for all requests self.session = Session() self.logger = getLogger() @@ -43,13 +44,13 @@ class UniFiAPI(object): site = [site['name'] for site in get['data'] if site['name'].lower() == self.server.site.lower() or site['desc'].lower() == self.server.site.lower()] if site: - self.server.site = site[0] + self.site = site[0] else: self.logger.error(f"Could not map site {self.server.site} to a site id/alias") def get_usg_stats(self): now = datetime.now(timezone.utc).astimezone().isoformat() - endpoint = f'/api/s/{self.server.site}/stat/device' + endpoint = f'/api/s/{self.site}/stat/device' req = self.session.prepare_request(Request('GET', self.server.url + endpoint)) get = connection_handler(self.session, req, self.server.verify_ssl) From 4258c59189d5cd04a0c280b3335aa8494e966836 Mon Sep 17 00:00:00 2001 From: "Nicholas St. Germain" Date: Wed, 24 Apr 2019 11:41:33 -0500 Subject: [PATCH 17/49] move utilities copy --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index f0747b9..bde351c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -12,7 +12,7 @@ COPY /varken /app/varken COPY /data /app/data -COPY /utilities /config/utilities +COPY /utilities /app/data/utilities RUN apk add --no-cache tzdata && \ python3 -m pip install -r /app/requirements.txt From 624b993bbf700f826168b89efed0a3649f86b796 Mon Sep 17 00:00:00 2001 From: "Nicholas St. Germain" Date: Wed, 24 Apr 2019 11:48:27 -0500 Subject: [PATCH 18/49] add container names + force grafana to wait on proper varken config --- docker-compose.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docker-compose.yml b/docker-compose.yml index 52db973..7ff4a27 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -5,6 +5,7 @@ networks: services: influxdb: hostname: influxdb + container_name: influxdb image: influxdb networks: - internal @@ -13,6 +14,7 @@ services: restart: unless-stopped varken: hostname: varken + container_name: varken image: boerderij/varken networks: - internal @@ -27,6 +29,7 @@ services: restart: unless-stopped grafana: hostname: grafana + container_name: grafana image: grafana/grafana networks: - internal @@ -41,4 +44,5 @@ services: - GF_INSTALL_PLUGINS=grafana-piechart-panel,grafana-worldmap-panel depends_on: - influxdb + - varken restart: unless-stopped \ No newline at end of file From d1fb08e62d57d19875bf933e80da61e810b16c78 Mon Sep 17 00:00:00 2001 From: "Nicholas St. Germain" Date: Wed, 24 Apr 2019 11:50:45 -0500 Subject: [PATCH 19/49] move session under do not edit --- utilities/grafana_build.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/utilities/grafana_build.py b/utilities/grafana_build.py index 77ab407..ccbac3a 100644 --- a/utilities/grafana_build.py +++ b/utilities/grafana_build.py @@ -3,8 +3,6 @@ from sys import exit from requests import Session from json.decoder import JSONDecodeError -session = Session() - docker = True # True if using a docker container, False if not host_ip = '127.0.0.1' # Only relevant if docker = False username = 'admin' # Grafana username @@ -25,6 +23,7 @@ sickchill_url = 'https://yourdomain.com/sickchill' # Do not edit past this line # +session = Session() auth = (username, password) url_base = f"{grafana_url.rstrip('/')}/api" From b43de028af0538473684c3f60186799f340b6ed0 Mon Sep 17 00:00:00 2001 From: "Nicholas St. Germain" Date: Wed, 24 Apr 2019 12:13:58 -0500 Subject: [PATCH 20/49] fix data -> json --- utilities/grafana_build.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/utilities/grafana_build.py b/utilities/grafana_build.py index ccbac3a..eb90b19 100644 --- a/utilities/grafana_build.py +++ b/utilities/grafana_build.py @@ -125,5 +125,5 @@ dashboard_data = { } ] } -make_dashboard = session.post(url_base + '/dashboards/import', data=dashboard_data, auth=auth, verify=verify) +make_dashboard = session.post(url_base + '/dashboards/import', json=dashboard_data, auth=auth, verify=verify) print('Created dashboard "Varken-Script"') From 8a632e1043eaaa3db6174e59cc68e26e87fcce5e Mon Sep 17 00:00:00 2001 From: "Nicholas St. Germain" Date: Wed, 24 Apr 2019 12:22:34 -0500 Subject: [PATCH 21/49] pep8 --- utilities/grafana_build.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/utilities/grafana_build.py b/utilities/grafana_build.py index eb90b19..e261e6b 100644 --- a/utilities/grafana_build.py +++ b/utilities/grafana_build.py @@ -27,11 +27,12 @@ session = Session() auth = (username, password) url_base = f"{grafana_url.rstrip('/')}/api" +varken_datasource = [] try: datasources = session.get(url_base + '/datasources', auth=auth, verify=verify).json() varken_datasource = [source for source in datasources if source['database'] == 'varken'] if varken_datasource: - exit(f'varken datasource already exists with the name "{varken_datasource[0]["name"]}"') + print(f'varken datasource already exists with the name "{varken_datasource[0]["name"]}"') except JSONDecodeError: exit(f"Could not talk to grafana at {grafana_url}. Check URL/Username/Password") @@ -43,8 +44,9 @@ datasource_data = { "basicAuth": False, "database": 'varken' } -post = session.post(url_base + '/datasources', auth=auth, verify=verify, json=datasource_data).json() -print(f'Created Varken-Script datasource (id:{post["datasource"]["id"]})') +if not varken_datasource: + post = session.post(url_base + '/datasources', auth=auth, verify=verify, json=datasource_data).json() + print(f'Created Varken-Script datasource (id:{post["datasource"]["id"]})') our_dashboard = session.get(url_base + '/gnet/dashboards/9585', auth=auth, verify=verify).json()['json'] dashboard_data = { From a7d241648547b2ae9903626ed632e595455232c7 Mon Sep 17 00:00:00 2001 From: "Nicholas St. Germain" Date: Wed, 24 Apr 2019 12:23:42 -0500 Subject: [PATCH 22/49] add regular venv to gitignore --- .gitignore | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 6b64cc4..f2cb4ec 100644 --- a/.gitignore +++ b/.gitignore @@ -5,10 +5,10 @@ .Trashes ehthumbs.db Thumbs.db -__pycache__ GeoLite2-City.mmdb GeoLite2-City.tar.gz data/varken.ini .idea/ varken-venv/ +venv/ logs/ From ec4b069ec644af3094da4aae8f386208c4b9c833 Mon Sep 17 00:00:00 2001 From: "Nicholas St. Germain" Date: Wed, 24 Apr 2019 15:27:30 -0500 Subject: [PATCH 23/49] add lidarr functionality --- Varken.py | 15 +++++ varken/lidarr.py | 130 +++++++++++++++++++++++++++++++++++++++++++ varken/structures.py | 46 +++++++++++++++ 3 files changed, 191 insertions(+) create mode 100644 varken/lidarr.py diff --git a/Varken.py b/Varken.py index c78dbb7..bcaa424 100644 --- a/Varken.py +++ b/Varken.py @@ -17,6 +17,7 @@ from varken.unifi import UniFiAPI from varken import VERSION, BRANCH from varken.sonarr import SonarrAPI from varken.radarr import RadarrAPI +from varken.lidarr import LidarrAPI from varken.iniparser import INIParser from varken.dbmanager import DBManager from varken.helpers import GeoIPHandler @@ -134,6 +135,20 @@ if __name__ == "__main__": at_time = schedule.every(server.queue_run_seconds).seconds at_time.do(QUEUE.put, RADARR.get_queue).tag("radarr-{}-get_queue".format(server.id)) + if CONFIG.lidarr_enabled: + for server in CONFIG.lidarr_servers: + LIDARR = LidarrAPI(server, DBMANAGER) + if server.queue: + at_time = schedule.every(server.queue_run_seconds).seconds + at_time.do(QUEUE.put, LIDARR.get_queue).tag("lidarr-{}-get_queue".format(server.id)) + if server.missing_days > 0: + at_time = schedule.every(server.missing_days_run_seconds).seconds + at_time.do(QUEUE.put, LIDARR.get_calendar, query="Missing").tag( + "lidarr-{}-get_missing".format(server.id)) + if server.future_days > 0: + at_time = schedule.every(server.future_days_run_seconds).seconds + at_time.do(QUEUE.put, LIDARR.get_calendar, query="Future").tag("lidarr-{}-get_future".format(server.id)) + if CONFIG.ombi_enabled: for server in CONFIG.ombi_servers: OMBI = OmbiAPI(server, DBMANAGER) diff --git a/varken/lidarr.py b/varken/lidarr.py new file mode 100644 index 0000000..8bcd070 --- /dev/null +++ b/varken/lidarr.py @@ -0,0 +1,130 @@ +from logging import getLogger +from requests import Session, Request +from datetime import datetime, timezone, date, timedelta + +from varken.structures import LidarrQueue, LidarrAlbum +from varken.helpers import hashit, connection_handler + + +class LidarrAPI(object): + def __init__(self, server, dbmanager): + self.dbmanager = dbmanager + self.server = server + # Create session to reduce server web thread load, and globally define pageSize for all requests + self.session = Session() + self.session.headers = {'X-Api-Key': self.server.api_key} + self.logger = getLogger() + + def __repr__(self): + return f"" + + def get_calendar(self, query="Missing"): + endpoint = '/api/v1/calendar' + today = str(date.today()) + last_days = str(date.today() - timedelta(days=self.server.missing_days)) + future = str(date.today() + timedelta(days=self.server.future_days)) + now = datetime.now(timezone.utc).astimezone().isoformat() + if query == "Missing": + params = {'start': last_days, 'end': today} + else: + params = {'start': today, 'end': future} + influx_payload = [] + influx_albums = [] + + req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params)) + get = connection_handler(self.session, req, self.server.verify_ssl) + + if not get: + return + + # Iteratively create a list of LidarrAlbum Objects from response json + albums = [] + for album in get: + try: + albums.append(LidarrAlbum(**album)) + except TypeError as e: + self.logger.error('TypeError has occurred : %s while creating LidarrAlbum structure for album. Data ' + 'attempted is: %s', e, album) + + # Add Album to missing list if album is not complete + for album in albums: + if album.statistics['percentOfTracks'] != 100: + influx_albums.append((album.title, album.releaseDate, album.artist['artistName'], album.id, + album.statistics['percentOfTracks'], + f"{album.statistics['trackFileCount']}/{album.statistics['TrackCount']}")) + + for title, release_date, artist_name, album_id, percent_complete, complete_count in influx_albums: + hash_id = hashit(f'{self.server.id}{title}{album_id}') + influx_payload.append( + { + "measurement": "Lidarr", + "tags": { + "type": query, + "sonarrId": album_id, + "server": self.server.id, + "albumName": title, + "artistName": artist_name, + "percentComplete": percent_complete, + "completeCount": complete_count, + "releaseDate": release_date + }, + "time": now, + "fields": { + "hash": hash_id + + } + } + ) + + self.dbmanager.write_points(influx_payload) + + def get_queue(self): + endpoint = '/api/v1/queue' + now = datetime.now(timezone.utc).astimezone().isoformat() + influx_payload = [] + params = {'pageSize': 1000} + + req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params)) + get = connection_handler(self.session, req, self.server.verify_ssl) + + if not get: + return + + queue = [] + for song in get['records']: + try: + queue.append(LidarrQueue(**song)) + except TypeError as e: + self.logger.error('TypeError has occurred : %s while creating LidarrQueue structure for show. Data ' + 'attempted is: %s', e, song) + + if not queue: + return + + for song in queue: + if song.protocol.upper() == 'USENET': + protocol_id = 1 + else: + protocol_id = 0 + hash_id = hashit(f'{self.server.id}{song.title}{song.artistId}') + influx_payload.append( + { + "measurement": "Lidarr", + "tags": { + "type": "Queue", + "id": song.id, + "server": self.server.id, + "title": song.title, + "quality": song.quality['quality']['name'], + "protocol": song.protocol, + "protocol_id": protocol_id, + "indexer": song.indexer + }, + "time": now, + "fields": { + "hash": hash_id + } + } + ) + + self.dbmanager.write_points(influx_payload) diff --git a/varken/structures.py b/varken/structures.py index 0957e44..9225adb 100644 --- a/varken/structures.py +++ b/varken/structures.py @@ -457,3 +457,49 @@ class TautulliStream(NamedTuple): year: str = None secure: str = None relayed: int = None + + +# Lidarr +class LidarrQueue(NamedTuple): + artistId: int = None + albumId: int = None + language: dict = None + quality: dict = None + size: float = None + title: str = None + timeleft: str = None + sizeleft: float = None + status: str = None + trackedDownloadStatus: str = None + statusMessages: list = None + downloadId: str = None + protocol: str = None + downloadClient: str = None + indexer: str = None + downloadForced: bool = None + id: int = None + + +class LidarrAlbum(NamedTuple): + title: str = None + disambiguation: str = None + overview: str = None + artistId: int = None + foreignAlbumId: str = None + monitored: bool = None + anyReleaseOk: bool = None + profileId: int = None + duration: int = None + albumType: str = None + secondaryTypes: list = None + mediumCount: int = None + ratings: dict = None + releaseDate: str = None + releases: list = None + genres: list = None + media: list = None + artist: dict = None + images: list = None + links: list = None + statistics: dict = None + id: int = None From f00bfa5be468845fd54f0026aa2222ef6a51019d Mon Sep 17 00:00:00 2001 From: "Nicholas St. Germain" Date: Wed, 24 Apr 2019 15:32:07 -0500 Subject: [PATCH 24/49] add lidarr to example.ini --- data/varken.example.ini | 13 +++++++++++++ varken/iniparser.py | 8 ++++---- 2 files changed, 17 insertions(+), 4 deletions(-) diff --git a/data/varken.example.ini b/data/varken.example.ini index 472fc11..9320af8 100644 --- a/data/varken.example.ini +++ b/data/varken.example.ini @@ -1,6 +1,7 @@ [global] sonarr_server_ids = 1,2 radarr_server_ids = 1,2 +lidarr_server_ids = 1 tautulli_server_ids = 1 ombi_server_ids = 1 sickchill_server_ids = false @@ -69,6 +70,18 @@ queue_run_seconds = 300 get_missing = true get_missing_run_seconds = 300 +[lidarr-1] +url = lidarr1.domain.tld:8686 +apikey = xxxxxxxxxxxxxxxx +ssl = false +verify_ssl = false +missing_days = 30 +missing_days_run_seconds = 300 +future_days = 30 +future_days_run_seconds = 300 +queue = true +queue_run_seconds = 300 + [ombi-1] url = ombi.domain.tld apikey = xxxxxxxxxxxxxxxx diff --git a/varken/iniparser.py b/varken/iniparser.py index fd8157b..0ec0049 100644 --- a/varken/iniparser.py +++ b/varken/iniparser.py @@ -15,7 +15,7 @@ class INIParser(object): self.config = None self.data_folder = data_folder self.filtered_strings = None - self.services = ['sonarr', 'radarr', 'ombi', 'tautulli', 'sickchill', 'unifi'] + self.services = ['sonarr', 'radarr', 'lidarr', 'ombi', 'tautulli', 'sickchill', 'unifi'] self.logger = getLogger() self.influx_server = InfluxServer() @@ -174,7 +174,7 @@ class INIParser(object): url = self.url_check(self.config.get(section, 'url'), section=section) apikey = None - if service not in ['ciscoasa', 'unifi']: + if service != 'unifi': apikey = self.config.get(section, 'apikey') scheme = 'https://' if self.config.getboolean(section, 'ssl') else 'http://' @@ -183,11 +183,11 @@ class INIParser(object): if scheme != 'https://': verify_ssl = False - if service in ['sonarr', 'radarr']: + if service in ['sonarr', 'radarr', 'lidarr']: queue = self.config.getboolean(section, 'queue') queue_run_seconds = self.config.getint(section, 'queue_run_seconds') - if service == 'sonarr': + if service in ['sonarr', 'lidarr']: missing_days = self.config.getint(section, 'missing_days') future_days = self.config.getint(section, 'future_days') From a926ecd7f98009cf617d43f0f3af37ae8533c33c Mon Sep 17 00:00:00 2001 From: "Nicholas St. Germain" Date: Wed, 24 Apr 2019 16:07:25 -0500 Subject: [PATCH 25/49] wip. broken --- Varken.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/Varken.py b/Varken.py index bcaa424..f63b06b 100644 --- a/Varken.py +++ b/Varken.py @@ -32,7 +32,12 @@ PLATFORM_LINUX_DISTRO = ' '.join(x for x in linux_distribution() if x) def thread(): while schedule.jobs: job = QUEUE.get() - a = job() + if isinstance(job, tuple): + query = job[1] + job = job[0] + a = job(query=query) + else: + a = job() if a is not None: schedule.clear(a) QUEUE.task_done() @@ -147,7 +152,8 @@ if __name__ == "__main__": "lidarr-{}-get_missing".format(server.id)) if server.future_days > 0: at_time = schedule.every(server.future_days_run_seconds).seconds - at_time.do(QUEUE.put, LIDARR.get_calendar, query="Future").tag("lidarr-{}-get_future".format(server.id)) + at_time.do(QUEUE.put, (LIDARR.get_calendar, "Future")).tag("lidarr-{}-get_future".format( + server.id)) if CONFIG.ombi_enabled: for server in CONFIG.ombi_servers: From 11e290690479e81a793d09fb9fca941bbdf2eb60 Mon Sep 17 00:00:00 2001 From: "Nicholas St. Germain" Date: Wed, 24 Apr 2019 17:37:41 -0500 Subject: [PATCH 26/49] fixed queue --- Varken.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Varken.py b/Varken.py index f63b06b..b602bbd 100644 --- a/Varken.py +++ b/Varken.py @@ -33,8 +33,8 @@ def thread(): while schedule.jobs: job = QUEUE.get() if isinstance(job, tuple): - query = job[1] job = job[0] + query = job[1] a = job(query=query) else: a = job() @@ -145,10 +145,10 @@ if __name__ == "__main__": LIDARR = LidarrAPI(server, DBMANAGER) if server.queue: at_time = schedule.every(server.queue_run_seconds).seconds - at_time.do(QUEUE.put, LIDARR.get_queue).tag("lidarr-{}-get_queue".format(server.id)) + at_time.do(QUEUE.put, LIDARR.get_queue, None).tag("lidarr-{}-get_queue".format(server.id)) if server.missing_days > 0: at_time = schedule.every(server.missing_days_run_seconds).seconds - at_time.do(QUEUE.put, LIDARR.get_calendar, query="Missing").tag( + at_time.do(QUEUE.put, (LIDARR.get_calendar, "Missing")).tag( "lidarr-{}-get_missing".format(server.id)) if server.future_days > 0: at_time = schedule.every(server.future_days_run_seconds).seconds @@ -183,7 +183,7 @@ if __name__ == "__main__": # Run all on startup SERVICES_ENABLED = [CONFIG.ombi_enabled, CONFIG.radarr_enabled, CONFIG.tautulli_enabled, CONFIG.unifi_enabled, - CONFIG.sonarr_enabled, CONFIG.sickchill_enabled] + CONFIG.sonarr_enabled, CONFIG.sickchill_enabled, CONFIG.lidarr_enabled] if not [enabled for enabled in SERVICES_ENABLED if enabled]: vl.logger.error("All services disabled. Exiting") exit(1) From ee980085b9d631909b6c0b94e83008aa9065d5f4 Mon Sep 17 00:00:00 2001 From: "Nicholas St. Germain" Date: Wed, 24 Apr 2019 17:38:50 -0500 Subject: [PATCH 27/49] start with lidarr disabled --- data/varken.example.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/data/varken.example.ini b/data/varken.example.ini index 9320af8..26a1e22 100644 --- a/data/varken.example.ini +++ b/data/varken.example.ini @@ -1,7 +1,7 @@ [global] sonarr_server_ids = 1,2 radarr_server_ids = 1,2 -lidarr_server_ids = 1 +lidarr_server_ids = false tautulli_server_ids = 1 ombi_server_ids = 1 sickchill_server_ids = false From 0bba242e72a03a54d31b8294311859ae32a61f15 Mon Sep 17 00:00:00 2001 From: "Nicholas St. Germain" Date: Wed, 24 Apr 2019 17:40:13 -0500 Subject: [PATCH 28/49] cleanup lines --- Varken.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/Varken.py b/Varken.py index b602bbd..47c5e83 100644 --- a/Varken.py +++ b/Varken.py @@ -33,8 +33,7 @@ def thread(): while schedule.jobs: job = QUEUE.get() if isinstance(job, tuple): - job = job[0] - query = job[1] + job, query = job[0], job[1] a = job(query=query) else: a = job() From 5827bb2c976688d79d00000078809a768804c793 Mon Sep 17 00:00:00 2001 From: "Nicholas St. Germain" Date: Wed, 24 Apr 2019 17:50:09 -0500 Subject: [PATCH 29/49] fix dict check for lidarr --- varken/lidarr.py | 10 ++++++---- varken/structures.py | 2 +- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/varken/lidarr.py b/varken/lidarr.py index 8bcd070..e631372 100644 --- a/varken/lidarr.py +++ b/varken/lidarr.py @@ -48,10 +48,12 @@ class LidarrAPI(object): # Add Album to missing list if album is not complete for album in albums: - if album.statistics['percentOfTracks'] != 100: - influx_albums.append((album.title, album.releaseDate, album.artist['artistName'], album.id, - album.statistics['percentOfTracks'], - f"{album.statistics['trackFileCount']}/{album.statistics['TrackCount']}")) + percent_of_tracks = album.statistics.get('percentOfTracks', 0) + if percent_of_tracks != 100: + influx_albums.append( + (album.title, album.releaseDate, album.artist['artistName'], album.id,percent_of_tracks, + f"{album.statistics.get('trackFileCount', 0)}/{album.statistics.get('trackCount', 0)}") + ) for title, release_date, artist_name, album_id, percent_complete, complete_count in influx_albums: hash_id = hashit(f'{self.server.id}{title}{album_id}') diff --git a/varken/structures.py b/varken/structures.py index 9225adb..94e2478 100644 --- a/varken/structures.py +++ b/varken/structures.py @@ -501,5 +501,5 @@ class LidarrAlbum(NamedTuple): artist: dict = None images: list = None links: list = None - statistics: dict = None + statistics: dict = {} id: int = None From f943968087bb01ee2a95948d6facce3a4133e366 Mon Sep 17 00:00:00 2001 From: "Nicholas St. Germain" Date: Wed, 24 Apr 2019 18:17:11 -0500 Subject: [PATCH 30/49] Ultra-threaded concurrency. For SCIENCE! --- Varken.py | 53 +++++++++++++++++++++++------------------------------ 1 file changed, 23 insertions(+), 30 deletions(-) diff --git a/Varken.py b/Varken.py index 47c5e83..6c58195 100644 --- a/Varken.py +++ b/Varken.py @@ -29,17 +29,13 @@ from varken.varkenlogger import VarkenLogger PLATFORM_LINUX_DISTRO = ' '.join(x for x in linux_distribution() if x) -def thread(): - while schedule.jobs: - job = QUEUE.get() - if isinstance(job, tuple): - job, query = job[0], job[1] - a = job(query=query) - else: - a = job() - if a is not None: - schedule.clear(a) - QUEUE.task_done() +def thread(job): + worker = Thread(target=job) + if isinstance(job, tuple): + job, query = job[0], job[1] + worker = Thread(target=job, kwargs={'query': query}) + + worker.start() if __name__ == "__main__": @@ -109,49 +105,49 @@ if __name__ == "__main__": SONARR = SonarrAPI(server, DBMANAGER) if server.queue: at_time = schedule.every(server.queue_run_seconds).seconds - at_time.do(QUEUE.put, SONARR.get_queue).tag("sonarr-{}-get_queue".format(server.id)) + at_time.do(thread, SONARR.get_queue).tag("sonarr-{}-get_queue".format(server.id)) if server.missing_days > 0: at_time = schedule.every(server.missing_days_run_seconds).seconds - at_time.do(QUEUE.put, SONARR.get_missing).tag("sonarr-{}-get_missing".format(server.id)) + at_time.do(thread, SONARR.get_missing).tag("sonarr-{}-get_missing".format(server.id)) if server.future_days > 0: at_time = schedule.every(server.future_days_run_seconds).seconds - at_time.do(QUEUE.put, SONARR.get_future).tag("sonarr-{}-get_future".format(server.id)) + at_time.do(thread, SONARR.get_future).tag("sonarr-{}-get_future".format(server.id)) if CONFIG.tautulli_enabled: GEOIPHANDLER = GeoIPHandler(DATA_FOLDER) - schedule.every(12).to(24).hours.do(QUEUE.put, GEOIPHANDLER.update) + schedule.every(12).to(24).hours.do(thread, GEOIPHANDLER.update) for server in CONFIG.tautulli_servers: TAUTULLI = TautulliAPI(server, DBMANAGER, GEOIPHANDLER) if server.get_activity: at_time = schedule.every(server.get_activity_run_seconds).seconds - at_time.do(QUEUE.put, TAUTULLI.get_activity).tag("tautulli-{}-get_activity".format(server.id)) + at_time.do(thread, TAUTULLI.get_activity).tag("tautulli-{}-get_activity".format(server.id)) if server.get_stats: at_time = schedule.every(server.get_stats_run_seconds).seconds - at_time.do(QUEUE.put, TAUTULLI.get_stats).tag("tautulli-{}-get_stats".format(server.id)) + at_time.do(thread, TAUTULLI.get_stats).tag("tautulli-{}-get_stats".format(server.id)) if CONFIG.radarr_enabled: for server in CONFIG.radarr_servers: RADARR = RadarrAPI(server, DBMANAGER) if server.get_missing: at_time = schedule.every(server.get_missing_run_seconds).seconds - at_time.do(QUEUE.put, RADARR.get_missing).tag("radarr-{}-get_missing".format(server.id)) + at_time.do(thread, RADARR.get_missing).tag("radarr-{}-get_missing".format(server.id)) if server.queue: at_time = schedule.every(server.queue_run_seconds).seconds - at_time.do(QUEUE.put, RADARR.get_queue).tag("radarr-{}-get_queue".format(server.id)) + at_time.do(thread, RADARR.get_queue).tag("radarr-{}-get_queue".format(server.id)) if CONFIG.lidarr_enabled: for server in CONFIG.lidarr_servers: LIDARR = LidarrAPI(server, DBMANAGER) if server.queue: at_time = schedule.every(server.queue_run_seconds).seconds - at_time.do(QUEUE.put, LIDARR.get_queue, None).tag("lidarr-{}-get_queue".format(server.id)) + at_time.do(thread, LIDARR.get_queue).tag("lidarr-{}-get_queue".format(server.id)) if server.missing_days > 0: at_time = schedule.every(server.missing_days_run_seconds).seconds - at_time.do(QUEUE.put, (LIDARR.get_calendar, "Missing")).tag( + at_time.do(thread, (LIDARR.get_calendar, "Missing")).tag( "lidarr-{}-get_missing".format(server.id)) if server.future_days > 0: at_time = schedule.every(server.future_days_run_seconds).seconds - at_time.do(QUEUE.put, (LIDARR.get_calendar, "Future")).tag("lidarr-{}-get_future".format( + at_time.do(thread, (LIDARR.get_calendar, "Future")).tag("lidarr-{}-get_future".format( server.id)) if CONFIG.ombi_enabled: @@ -159,26 +155,26 @@ if __name__ == "__main__": OMBI = OmbiAPI(server, DBMANAGER) if server.request_type_counts: at_time = schedule.every(server.request_type_run_seconds).seconds - at_time.do(QUEUE.put, OMBI.get_request_counts).tag("ombi-{}-get_request_counts".format(server.id)) + at_time.do(thread, OMBI.get_request_counts).tag("ombi-{}-get_request_counts".format(server.id)) if server.request_total_counts: at_time = schedule.every(server.request_total_run_seconds).seconds - at_time.do(QUEUE.put, OMBI.get_all_requests).tag("ombi-{}-get_all_requests".format(server.id)) + at_time.do(thread, OMBI.get_all_requests).tag("ombi-{}-get_all_requests".format(server.id)) if server.issue_status_counts: at_time = schedule.every(server.issue_status_run_seconds).seconds - at_time.do(QUEUE.put, OMBI.get_issue_counts).tag("ombi-{}-get_issue_counts".format(server.id)) + at_time.do(thread, OMBI.get_issue_counts).tag("ombi-{}-get_issue_counts".format(server.id)) if CONFIG.sickchill_enabled: for server in CONFIG.sickchill_servers: SICKCHILL = SickChillAPI(server, DBMANAGER) if server.get_missing: at_time = schedule.every(server.get_missing_run_seconds).seconds - at_time.do(QUEUE.put, SICKCHILL.get_missing).tag("sickchill-{}-get_missing".format(server.id)) + at_time.do(thread, SICKCHILL.get_missing).tag("sickchill-{}-get_missing".format(server.id)) if CONFIG.unifi_enabled: for server in CONFIG.unifi_servers: UNIFI = UniFiAPI(server, DBMANAGER) at_time = schedule.every(server.get_usg_stats_run_seconds).seconds - at_time.do(QUEUE.put, UNIFI.get_usg_stats).tag("unifi-{}-get_usg_stats".format(server.id)) + at_time.do(thread, UNIFI.get_usg_stats).tag("unifi-{}-get_usg_stats".format(server.id)) # Run all on startup SERVICES_ENABLED = [CONFIG.ombi_enabled, CONFIG.radarr_enabled, CONFIG.tautulli_enabled, CONFIG.unifi_enabled, @@ -187,9 +183,6 @@ if __name__ == "__main__": vl.logger.error("All services disabled. Exiting") exit(1) - WORKER = Thread(target=thread) - WORKER.start() - schedule.run_all() while schedule.jobs: From 73ea3d51765c823824d71fd96f2f474ff4b8cd33 Mon Sep 17 00:00:00 2001 From: "Nicholas St. Germain" Date: Wed, 24 Apr 2019 18:51:32 -0500 Subject: [PATCH 31/49] sonarr refactor --- Varken.py | 16 ++++------- varken/sonarr.py | 75 ++++++++++-------------------------------------- 2 files changed, 21 insertions(+), 70 deletions(-) diff --git a/Varken.py b/Varken.py index 6c58195..4464085 100644 --- a/Varken.py +++ b/Varken.py @@ -29,12 +29,8 @@ from varken.varkenlogger import VarkenLogger PLATFORM_LINUX_DISTRO = ' '.join(x for x in linux_distribution() if x) -def thread(job): - worker = Thread(target=job) - if isinstance(job, tuple): - job, query = job[0], job[1] - worker = Thread(target=job, kwargs={'query': query}) - +def thread(job, **kwargs): + worker = Thread(target=job, kwargs=dict(**kwargs)) worker.start() @@ -108,10 +104,10 @@ if __name__ == "__main__": at_time.do(thread, SONARR.get_queue).tag("sonarr-{}-get_queue".format(server.id)) if server.missing_days > 0: at_time = schedule.every(server.missing_days_run_seconds).seconds - at_time.do(thread, SONARR.get_missing).tag("sonarr-{}-get_missing".format(server.id)) + at_time.do(thread, SONARR.get_calendar, query="Missing").tag("sonarr-{}-get_missing".format(server.id)) if server.future_days > 0: at_time = schedule.every(server.future_days_run_seconds).seconds - at_time.do(thread, SONARR.get_future).tag("sonarr-{}-get_future".format(server.id)) + at_time.do(thread, SONARR.get_calendar, query="Future").tag("sonarr-{}-get_future".format(server.id)) if CONFIG.tautulli_enabled: GEOIPHANDLER = GeoIPHandler(DATA_FOLDER) @@ -143,11 +139,11 @@ if __name__ == "__main__": at_time.do(thread, LIDARR.get_queue).tag("lidarr-{}-get_queue".format(server.id)) if server.missing_days > 0: at_time = schedule.every(server.missing_days_run_seconds).seconds - at_time.do(thread, (LIDARR.get_calendar, "Missing")).tag( + at_time.do(thread, LIDARR.get_calendar, query="Missing").tag( "lidarr-{}-get_missing".format(server.id)) if server.future_days > 0: at_time = schedule.every(server.future_days_run_seconds).seconds - at_time.do(thread, (LIDARR.get_calendar, "Future")).tag("lidarr-{}-get_future".format( + at_time.do(thread, LIDARR.get_calendar, query="Future").tag("lidarr-{}-get_future".format( server.id)) if CONFIG.ombi_enabled: diff --git a/varken/sonarr.py b/varken/sonarr.py index ae784c8..426daf5 100644 --- a/varken/sonarr.py +++ b/varken/sonarr.py @@ -19,68 +19,19 @@ class SonarrAPI(object): def __repr__(self): return f"" - def get_missing(self): - endpoint = '/api/calendar' - today = str(date.today()) - last_days = str(date.today() + timedelta(days=-self.server.missing_days)) - now = datetime.now(timezone.utc).astimezone().isoformat() - params = {'start': last_days, 'end': today} - influx_payload = [] - missing = [] - - req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params)) - get = connection_handler(self.session, req, self.server.verify_ssl) - - if not get: - return - - # Iteratively create a list of SonarrTVShow Objects from response json - tv_shows = [] - for show in get: - try: - tv_shows.append(SonarrTVShow(**show)) - except TypeError as e: - self.logger.error('TypeError has occurred : %s while creating SonarrTVShow structure for show. Data ' - 'attempted is: %s', e, show) - - # Add show to missing list if file does not exist - for show in tv_shows: - if not show.hasFile: - sxe = f'S{show.seasonNumber:0>2}E{show.episodeNumber:0>2}' - missing.append((show.series['title'], sxe, show.airDateUtc, show.title, show.id)) - - for series_title, sxe, air_date_utc, episode_title, sonarr_id in missing: - hash_id = hashit(f'{self.server.id}{series_title}{sxe}') - influx_payload.append( - { - "measurement": "Sonarr", - "tags": { - "type": "Missing", - "sonarrId": sonarr_id, - "server": self.server.id, - "name": series_title, - "epname": episode_title, - "sxe": sxe, - "airsUTC": air_date_utc - }, - "time": now, - "fields": { - "hash": hash_id - - } - } - ) - - self.dbmanager.write_points(influx_payload) - - def get_future(self): + def get_calendar(self, query="Missing"): endpoint = '/api/calendar/' today = str(date.today()) - now = datetime.now(timezone.utc).astimezone().isoformat() + last_days = str(date.today() - timedelta(days=self.server.missing_days)) future = str(date.today() + timedelta(days=self.server.future_days)) + now = datetime.now(timezone.utc).astimezone().isoformat() + if query == "Missing": + params = {'start': last_days, 'end': today} + else: + params = {'start': today, 'end': future} influx_payload = [] air_days = [] - params = {'start': today, 'end': future} + missing = [] req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params)) get = connection_handler(self.session, req, self.server.verify_ssl) @@ -102,15 +53,19 @@ class SonarrAPI(object): downloaded = 1 else: downloaded = 0 - air_days.append((show.series['title'], downloaded, sxe, show.title, show.airDateUtc, show.id)) + if query == "Missing": + if not downloaded: + missing.append((show.series['title'], downloaded, sxe, show.airDateUtc, show.title, show.id)) + else: + air_days.append((show.series['title'], downloaded, sxe, show.title, show.airDateUtc, show.id)) - for series_title, dl_status, sxe, episode_title, air_date_utc, sonarr_id in air_days: + for series_title, dl_status, sxe, episode_title, air_date_utc, sonarr_id in (air_days or missing): hash_id = hashit(f'{self.server.id}{series_title}{sxe}') influx_payload.append( { "measurement": "Sonarr", "tags": { - "type": "Future", + "type": query, "sonarrId": sonarr_id, "server": self.server.id, "name": series_title, From 25d8b490f4149be94b1418c8e302f21ef3a34a89 Mon Sep 17 00:00:00 2001 From: "Nicholas St. Germain" Date: Wed, 24 Apr 2019 18:52:54 -0500 Subject: [PATCH 32/49] pre-bump for version --- varken/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/varken/__init__.py b/varken/__init__.py index 2d162d2..ad39859 100644 --- a/varken/__init__.py +++ b/varken/__init__.py @@ -1,2 +1,2 @@ -VERSION = "1.6.9" +VERSION = "1.7.0" BRANCH = 'develop' From 6f0b3a29e058b114f816a452dda97dd253c70396 Mon Sep 17 00:00:00 2001 From: samwiseg0 Date: Thu, 25 Apr 2019 09:26:56 -0400 Subject: [PATCH 33/49] Add to .gitignore --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index f2cb4ec..9e9a0d9 100644 --- a/.gitignore +++ b/.gitignore @@ -12,3 +12,4 @@ data/varken.ini varken-venv/ venv/ logs/ +__pycache__ From c6a0a49100fb7d9b567e5f7b1cae828a66f91afc Mon Sep 17 00:00:00 2001 From: "Nicholas St. Germain" Date: Thu, 25 Apr 2019 09:30:29 -0500 Subject: [PATCH 34/49] pep8 --- varken/lidarr.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/varken/lidarr.py b/varken/lidarr.py index e631372..a65b44a 100644 --- a/varken/lidarr.py +++ b/varken/lidarr.py @@ -51,7 +51,7 @@ class LidarrAPI(object): percent_of_tracks = album.statistics.get('percentOfTracks', 0) if percent_of_tracks != 100: influx_albums.append( - (album.title, album.releaseDate, album.artist['artistName'], album.id,percent_of_tracks, + (album.title, album.releaseDate, album.artist['artistName'], album.id, percent_of_tracks, f"{album.statistics.get('trackFileCount', 0)}/{album.statistics.get('trackCount', 0)}") ) From c06b7e3f98454d92f951cba725008c38fbb14300 Mon Sep 17 00:00:00 2001 From: "Nicholas St. Germain" Date: Thu, 25 Apr 2019 09:52:24 -0500 Subject: [PATCH 35/49] Partial update to build to get ready for lidarr to master --- utilities/grafana_build.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/utilities/grafana_build.py b/utilities/grafana_build.py index e261e6b..220949f 100644 --- a/utilities/grafana_build.py +++ b/utilities/grafana_build.py @@ -15,12 +15,14 @@ movies_library = 'Movies' fourk_movies_library = 'Movies 4K' tv_shows_library = 'TV Shows' fourk_tv_shows_library = 'TV Shows 4K' +music_library = 'Music' +usg_name = 'Gateway' ombi_url = 'https://yourdomain.com/ombi' tautulli_url = 'https://yourdomain.com/tautulli' sonarr_url = 'https://yourdomain.com/sonarr' radarr_url = 'https://yourdomain.com/radarr' sickchill_url = 'https://yourdomain.com/sickchill' - +lidarr_url = 'https://yourdomain.com/lidarr' # Do not edit past this line # session = Session() From 4ac0fe6690cf6fd39cfb2de77313c19734b66e20 Mon Sep 17 00:00:00 2001 From: "Nicholas St. Germain" Date: Thu, 25 Apr 2019 10:09:23 -0500 Subject: [PATCH 36/49] more updates --- utilities/grafana_build.py | 51 +++++++++++++++++++++++++++++--------- 1 file changed, 39 insertions(+), 12 deletions(-) diff --git a/utilities/grafana_build.py b/utilities/grafana_build.py index 220949f..7a8f73a 100644 --- a/utilities/grafana_build.py +++ b/utilities/grafana_build.py @@ -30,25 +30,27 @@ auth = (username, password) url_base = f"{grafana_url.rstrip('/')}/api" varken_datasource = [] +datasource_name = "Varken-Script" try: datasources = session.get(url_base + '/datasources', auth=auth, verify=verify).json() varken_datasource = [source for source in datasources if source['database'] == 'varken'] if varken_datasource: print(f'varken datasource already exists with the name "{varken_datasource[0]["name"]}"') + datasource_name = varken_datasource[0]["name"] except JSONDecodeError: exit(f"Could not talk to grafana at {grafana_url}. Check URL/Username/Password") -datasource_data = { - "name": "Varken-Script", - "type": "influxdb", - "url": f"http://{'influxdb' if docker else host_ip}:8086", - "access": "proxy", - "basicAuth": False, - "database": 'varken' -} if not varken_datasource: + datasource_data = { + "name": datasource_name, + "type": "influxdb", + "url": f"http://{'influxdb' if docker else host_ip}:8086", + "access": "proxy", + "basicAuth": False, + "database": 'varken' + } post = session.post(url_base + '/datasources', auth=auth, verify=verify, json=datasource_data).json() - print(f'Created Varken-Script datasource (id:{post["datasource"]["id"]})') + print(f'Created {datasource_name} datasource (id:{post["datasource"]["id"]})') our_dashboard = session.get(url_base + '/gnet/dashboards/9585', auth=auth, verify=verify).json()['json'] dashboard_data = { @@ -62,7 +64,7 @@ dashboard_data = { "type": "datasource", "pluginId": "influxdb", "pluginName": "InfluxDB", - "value": "Varken-Script" + "value": datasource_name }, { "name": "VAR_MOVIESLIBRARY", @@ -92,6 +94,20 @@ dashboard_data = { "value": fourk_tv_shows_library, "description": "" }, + { + "name": "VAR_MUSICLIBRARY", + "type": "constant", + "label": "Music Library Name", + "value": music_library, + "description": "" + }, + { + "name": "VAR_USGNAME", + "type": "constant", + "label": "Unifi USG Name", + "value": usg_name, + "description": "" + }, { "name": "VAR_OMBIURL", "type": "constant", @@ -126,8 +142,19 @@ dashboard_data = { "label": "Sickchill URL", "value": sickchill_url, "description": "" + }, + { + "name": "VAR_LIDARRURL", + "type": "constant", + "label": "lidarr URL", + "value": lidarr_url, + "description": "" } ] } -make_dashboard = session.post(url_base + '/dashboards/import', json=dashboard_data, auth=auth, verify=verify) -print('Created dashboard "Varken-Script"') +try: + make_dashboard = session.post(url_base + '/dashboards/import', json=dashboard_data, auth=auth, verify=verify) + if make_dashboard.status_code == 200 and make_dashboard.json().get('imported'): + print(f'Created dashboard "{our_dashboard["title"]}"') +except: + print('Shit...') From 03e67b8759a4831b1785e1bcf76214e9ac5d96b4 Mon Sep 17 00:00:00 2001 From: "Nicholas St. Germain" Date: Thu, 25 Apr 2019 10:19:07 -0500 Subject: [PATCH 37/49] update requirements.txt + desc --- requirements.txt | 8 ++++---- utilities/grafana_build.py | 4 ++++ 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/requirements.txt b/requirements.txt index bab1e82..6225a90 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,9 +2,9 @@ # Potential requirements. # pip3 install -r requirements.txt #--------------------------------------------------------- -requests>=2.20.1 +requests>=2.21 geoip2>=2.9.0 influxdb>=5.2.0 -schedule>=0.5.0 -distro>=1.3.0 -urllib3>=1.22 \ No newline at end of file +schedule>=0.6.0 +distro>=1.4.0 +urllib3>=1.24.2 \ No newline at end of file diff --git a/utilities/grafana_build.py b/utilities/grafana_build.py index 7a8f73a..7a6ac3a 100644 --- a/utilities/grafana_build.py +++ b/utilities/grafana_build.py @@ -1,4 +1,8 @@ #!/usr/bin/env python3 +# To use: +# docker exec -it varken cp /app/data/utilities/grafana_build.py /config/grafana_build.py +# nano /opt/dockerconfigs/varken/grafana_build.py # Edit vars. This assumes you have your persistent data there +# docker exec -it varken python3 /config/grafana_build.py from sys import exit from requests import Session from json.decoder import JSONDecodeError From e44ddefb434d142b047242a094e5f2f5b071072c Mon Sep 17 00:00:00 2001 From: "Nicholas St. Germain" Date: Mon, 29 Apr 2019 10:25:06 -0500 Subject: [PATCH 38/49] fixes #129 --- varken/dbmanager.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/varken/dbmanager.py b/varken/dbmanager.py index c321469..e0b0bd5 100644 --- a/varken/dbmanager.py +++ b/varken/dbmanager.py @@ -28,8 +28,12 @@ class DBManager(object): self.logger.info("Creating varken database") self.influx.create_database('varken') - self.logger.info("Creating varken retention policy (30d/1h)") - self.influx.create_retention_policy('varken 30d/1h', '30d', '1', 'varken', False, '1h') + retention_policies = [policy['name'] for policy in self.influx.get_list_retention_policies(database='varken')] + + # Fix name bug by adding retention policy retroactively + if 'varken 30d-1h' not in retention_policies: + self.logger.info("Creating varken retention policy (30d-1h)") + self.influx.create_retention_policy('varken 30d-1h', '30d', '1', 'varken', False, '1h') def write_points(self, data): d = data From 751ed9644407db459a323c64492cf0a030fc2dfb Mon Sep 17 00:00:00 2001 From: "Nicholas St. Germain" Date: Mon, 29 Apr 2019 10:36:16 -0500 Subject: [PATCH 39/49] change default to true + explain policy with verbosity --- varken/dbmanager.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/varken/dbmanager.py b/varken/dbmanager.py index e0b0bd5..acb77d2 100644 --- a/varken/dbmanager.py +++ b/varken/dbmanager.py @@ -33,7 +33,8 @@ class DBManager(object): # Fix name bug by adding retention policy retroactively if 'varken 30d-1h' not in retention_policies: self.logger.info("Creating varken retention policy (30d-1h)") - self.influx.create_retention_policy('varken 30d-1h', '30d', '1', 'varken', False, '1h') + self.influx.create_retention_policy(name='varken 30d-1h', duration='30d', replication='1', + database='varken', default=True, shard_duration='1h') def write_points(self, data): d = data From 29e4e57e20d73b3b0c619498ac9bf82284fa1a6b Mon Sep 17 00:00:00 2001 From: "Nicholas St. Germain" Date: Mon, 29 Apr 2019 10:45:56 -0500 Subject: [PATCH 40/49] pep8 --- varken/dbmanager.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/varken/dbmanager.py b/varken/dbmanager.py index acb77d2..f223a34 100644 --- a/varken/dbmanager.py +++ b/varken/dbmanager.py @@ -33,7 +33,7 @@ class DBManager(object): # Fix name bug by adding retention policy retroactively if 'varken 30d-1h' not in retention_policies: self.logger.info("Creating varken retention policy (30d-1h)") - self.influx.create_retention_policy(name='varken 30d-1h', duration='30d', replication='1', + self.influx.create_retention_policy(name='varken 30d-1h', duration='30d', replication='1', database='varken', default=True, shard_duration='1h') def write_points(self, data): From 1db99a46ed25439ffd9b6c11ab2263d4c82bb6cd Mon Sep 17 00:00:00 2001 From: "Nicholas St. Germain" Date: Mon, 29 Apr 2019 10:52:45 -0500 Subject: [PATCH 41/49] only add if no db for now until tau historical built --- varken/dbmanager.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/varken/dbmanager.py b/varken/dbmanager.py index f223a34..06e6f18 100644 --- a/varken/dbmanager.py +++ b/varken/dbmanager.py @@ -28,13 +28,12 @@ class DBManager(object): self.logger.info("Creating varken database") self.influx.create_database('varken') - retention_policies = [policy['name'] for policy in self.influx.get_list_retention_policies(database='varken')] - - # Fix name bug by adding retention policy retroactively - if 'varken 30d-1h' not in retention_policies: - self.logger.info("Creating varken retention policy (30d-1h)") - self.influx.create_retention_policy(name='varken 30d-1h', duration='30d', replication='1', - database='varken', default=True, shard_duration='1h') + retention_policies = [policy['name'] for policy in + self.influx.get_list_retention_policies(database='varken')] + if 'varken 30d-1h' not in retention_policies: + self.logger.info("Creating varken retention policy (30d-1h)") + self.influx.create_retention_policy(name='varken 30d-1h', duration='30d', replication='1', + database='varken', default=True, shard_duration='1h') def write_points(self, data): d = data From 640989e49513a4146b56e840dac344d59059d875 Mon Sep 17 00:00:00 2001 From: "Nicholas St. Germain" Date: Mon, 29 Apr 2019 14:27:57 -0500 Subject: [PATCH 42/49] allow historical import of tautulli --- utilities/historical_tautulli_import.py | 47 ++++++++ varken/structures.py | 33 ++++-- varken/tautulli.py | 137 +++++++++++++++++++++++- 3 files changed, 204 insertions(+), 13 deletions(-) create mode 100644 utilities/historical_tautulli_import.py diff --git a/utilities/historical_tautulli_import.py b/utilities/historical_tautulli_import.py new file mode 100644 index 0000000..62bd0f8 --- /dev/null +++ b/utilities/historical_tautulli_import.py @@ -0,0 +1,47 @@ +#!/usr/bin/env python3 +from argparse import ArgumentParser +from os import access, R_OK +from os.path import isdir, abspath, dirname, join +from logging import getLogger, StreamHandler, Formatter, DEBUG + +from varken.iniparser import INIParser +from varken.dbmanager import DBManager +from varken.helpers import GeoIPHandler +from varken.tautulli import TautulliAPI + +if __name__ == "__main__": + parser = ArgumentParser(prog='varken', + description='Tautulli historical import tool') + parser.add_argument("-d", "--data-folder", help='Define an alternate data folder location') + parser.add_argument("-D", "--days", default=30, type=int, help='Specify length of historical import') + opts = parser.parse_args() + + DATA_FOLDER = abspath(join(dirname(__file__), '..', 'data')) + + templogger = getLogger('temp') + templogger.setLevel(DEBUG) + tempch = StreamHandler() + tempformatter = Formatter('%(asctime)s : %(levelname)s : %(module)s : %(message)s', '%Y-%m-%d %H:%M:%S') + tempch.setFormatter(tempformatter) + templogger.addHandler(tempch) + + if opts.data_folder: + ARG_FOLDER = opts.data_folder + + if isdir(ARG_FOLDER): + DATA_FOLDER = ARG_FOLDER + if not access(DATA_FOLDER, R_OK): + templogger.error("Read permission error for %s", DATA_FOLDER) + exit(1) + else: + templogger.error("%s does not exist", ARG_FOLDER) + exit(1) + + CONFIG = INIParser(DATA_FOLDER) + DBMANAGER = DBManager(CONFIG.influx_server) + + if CONFIG.tautulli_enabled: + GEOIPHANDLER = GeoIPHandler(DATA_FOLDER) + for server in CONFIG.tautulli_servers: + TAUTULLI = TautulliAPI(server, DBMANAGER, GEOIPHANDLER) + TAUTULLI.get_historical(days=opts.days) diff --git a/varken/structures.py b/varken/structures.py index 94e2478..9781067 100644 --- a/varken/structures.py +++ b/varken/structures.py @@ -273,8 +273,8 @@ class TautulliStream(NamedTuple): audience_rating_image: str = None audio_bitrate: str = None audio_bitrate_mode: str = None - audio_channels: str = None audio_channel_layout: str = None + audio_channels: str = None audio_codec: str = None audio_decision: str = None audio_language: str = None @@ -292,6 +292,8 @@ class TautulliStream(NamedTuple): collections: list = None container: str = None content_rating: str = None + current_session: str = None + date: str = None deleted_user: int = None device: str = None directors: list = None @@ -307,6 +309,8 @@ class TautulliStream(NamedTuple): grandparent_rating_key: str = None grandparent_thumb: str = None grandparent_title: str = None + group_count: int = None + group_ids: str = None guid: str = None height: str = None id: str = None @@ -331,16 +335,19 @@ class TautulliStream(NamedTuple): optimized_version: int = None optimized_version_profile: str = None optimized_version_title: str = None - originally_available_at: str = None original_title: str = None + originally_available_at: str = None parent_media_index: str = None parent_rating_key: str = None parent_thumb: str = None parent_title: str = None + paused_counter: int = None + percent_complete: int = None platform: str = None platform_name: str = None platform_version: str = None player: str = None + pre_tautulli: str = None product: str = None product_version: str = None profile: str = None @@ -349,20 +356,25 @@ class TautulliStream(NamedTuple): rating: str = None rating_image: str = None rating_key: str = None + reference_id: int = None relay: int = None + relayed: int = None section_id: str = None + secure: str = None selected: int = None session_id: str = None session_key: str = None shared_libraries: list = None sort_title: str = None + started: int = None state: str = None + stopped: int = None stream_aspect_ratio: str = None stream_audio_bitrate: str = None stream_audio_bitrate_mode: str = None - stream_audio_channels: str = None stream_audio_channel_layout: str = None stream_audio_channel_layout_: str = None + stream_audio_channels: str = None stream_audio_codec: str = None stream_audio_decision: str = None stream_audio_language: str = None @@ -380,8 +392,8 @@ class TautulliStream(NamedTuple): stream_subtitle_language: str = None stream_subtitle_language_code: str = None stream_subtitle_location: str = None - stream_video_bitrate: str = None stream_video_bit_depth: str = None + stream_video_bitrate: str = None stream_video_codec: str = None stream_video_codec_level: str = None stream_video_decision: str = None @@ -393,7 +405,7 @@ class TautulliStream(NamedTuple): stream_video_resolution: str = None stream_video_width: str = None studio: str = None - subtitles: int = None + sub_type: str = None subtitle_codec: str = None subtitle_container: str = None subtitle_decision: str = None @@ -402,7 +414,7 @@ class TautulliStream(NamedTuple): subtitle_language: str = None subtitle_language_code: str = None subtitle_location: str = None - sub_type: str = None + subtitles: int = None summary: str = None synced_version: int = None synced_version_profile: str = None @@ -433,17 +445,17 @@ class TautulliStream(NamedTuple): type: str = None updated_at: str = None user: str = None - username: str = None user_id: int = None user_rating: str = None user_thumb: str = None - video_bitrate: str = None + username: str = None video_bit_depth: str = None + video_bitrate: str = None video_codec: str = None video_codec_level: str = None video_decision: str = None - video_framerate: str = None video_frame_rate: str = None + video_framerate: str = None video_height: str = None video_language: str = None video_language_code: str = None @@ -452,11 +464,10 @@ class TautulliStream(NamedTuple): video_resolution: str = None video_width: str = None view_offset: str = None + watched_status: int = None width: str = None writers: list = None year: str = None - secure: str = None - relayed: int = None # Lidarr diff --git a/varken/tautulli.py b/varken/tautulli.py index c49d95b..0792fc7 100644 --- a/varken/tautulli.py +++ b/varken/tautulli.py @@ -1,7 +1,8 @@ from logging import getLogger from requests import Session, Request -from datetime import datetime, timezone from geoip2.errors import AddressNotFoundError +from datetime import datetime, timezone, date, timedelta +from influxdb.exceptions import InfluxDBClientError from varken.structures import TautulliStream from varken.helpers import hashit, connection_handler @@ -60,7 +61,7 @@ class TautulliAPI(object): if not self.my_ip: # Try the fallback ip in the config file try: - self.logger.debug('Atempting to use the failback IP...') + self.logger.debug('Attempting to use the fallback IP...') geodata = self.geoiphandler.lookup(self.server.fallback_ip) except AddressNotFoundError as e: self.logger.error('%s', e) @@ -215,3 +216,135 @@ class TautulliAPI(object): influx_payload.append(data) self.dbmanager.write_points(influx_payload) + + def get_historical(self, days=30): + influx_payload = [] + start_date = date.today() - timedelta(days=days) + params = {'cmd': 'get_history', 'grouping': 1, 'length': 1000000} + req = self.session.prepare_request(Request('GET', self.server.url + self.endpoint, params=params)) + g = connection_handler(self.session, req, self.server.verify_ssl) + + if not g: + return + + get = g['response']['data']['data'] + + params = {'cmd': 'get_stream_data', 'row_id': 0} + sessions = [] + for history_item in get: + if not history_item['id']: + self.logger.debug('Skipping entry with no ID. (%s)', history_item['full_title']) + continue + if date.fromtimestamp(history_item['started'] < start_date): + continue + params['row_id'] = history_item['id'] + req = self.session.prepare_request(Request('GET', self.server.url + self.endpoint, params=params)) + g = connection_handler(self.session, req, self.server.verify_ssl) + if not g: + self.logger.debug('Could not get historical stream data for %s. Skipping.', history_item['full_title']) + try: + self.logger.debug('Adding %s to history', history_item['full_title']) + history_item.update(g['response']['data']) + sessions.append(TautulliStream(**history_item)) + except TypeError as e: + self.logger.error('TypeError has occurred : %s while creating TautulliStream structure', e) + continue + + for session in sessions: + try: + geodata = self.geoiphandler.lookup(session.ip_address) + except (ValueError, AddressNotFoundError): + self.logger.debug('Public IP missing for Tautulli session...') + if not self.my_ip: + # Try the fallback ip in the config file + try: + self.logger.debug('Attempting to use the fallback IP...') + geodata = self.geoiphandler.lookup(self.server.fallback_ip) + except AddressNotFoundError as e: + self.logger.error('%s', e) + + self.my_ip = self.session.get('http://ip.42.pl/raw').text + self.logger.debug('Looked the public IP and set it to %s', self.my_ip) + + geodata = self.geoiphandler.lookup(self.my_ip) + + else: + geodata = self.geoiphandler.lookup(self.my_ip) + + if not all([geodata.location.latitude, geodata.location.longitude]): + latitude = 37.234332396 + longitude = -115.80666344 + else: + latitude = geodata.location.latitude + longitude = geodata.location.longitude + + if not geodata.city.name: + location = '👽' + else: + location = geodata.city.name + + decision = session.transcode_decision + if decision == 'copy': + decision = 'direct stream' + + video_decision = session.stream_video_decision + if video_decision == 'copy': + video_decision = 'direct stream' + elif video_decision == '': + video_decision = 'Music' + + quality = session.stream_video_resolution + if not quality: + quality = session.container.upper() + elif quality in ('SD', 'sd', '4k'): + quality = session.stream_video_resolution.upper() + else: + quality = session.stream_video_resolution + 'p' + + player_state = 100 + + hash_id = hashit(f'{session.id}{session.session_key}{session.user}{session.full_title}') + influx_payload.append( + { + "measurement": "Tautulli", + "tags": { + "type": "Session", + "session_id": session.session_id, + "friendly_name": session.friendly_name, + "username": session.user, + "title": session.full_title, + "platform": session.platform, + "quality": quality, + "video_decision": video_decision.title(), + "transcode_decision": decision.title(), + "transcode_hw_decoding": session.transcode_hw_decoding, + "transcode_hw_encoding": session.transcode_hw_encoding, + "media_type": session.media_type.title(), + "audio_codec": session.audio_codec.upper(), + "stream_audio_codec": session.stream_audio_codec.upper(), + "quality_profile": session.quality_profile, + "progress_percent": session.progress_percent, + "region_code": geodata.subdivisions.most_specific.iso_code, + "location": location, + "full_location": f'{geodata.subdivisions.most_specific.name} - {geodata.city.name}', + "latitude": latitude, + "longitude": longitude, + "player_state": player_state, + "device_type": session.platform, + "relayed": session.relayed, + "secure": session.secure, + "server": self.server.id + }, + "time": datetime.fromtimestamp(session.stopped).astimezone().isoformat(), + "fields": { + "hash": hash_id + } + } + ) + try: + self.dbmanager.write_points(influx_payload) + except InfluxDBClientError as e: + if "beyond retention policy" in str(e): + self.logger.debug('Only imported 30 days of data per retention policy') + else: + self.logger.error('Something went wrong... post this output in discord: %s', e) From bd257afcd62fe804c5e0f12c201c96d235f661dc Mon Sep 17 00:00:00 2001 From: "Nicholas St. Germain" Date: Mon, 29 Apr 2019 14:50:33 -0500 Subject: [PATCH 43/49] force specific packages --- requirements.txt | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/requirements.txt b/requirements.txt index 6225a90..38e1312 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,9 +2,9 @@ # Potential requirements. # pip3 install -r requirements.txt #--------------------------------------------------------- -requests>=2.21 -geoip2>=2.9.0 -influxdb>=5.2.0 -schedule>=0.6.0 -distro>=1.4.0 -urllib3>=1.24.2 \ No newline at end of file +requests==2.21 +geoip2==2.9.0 +influxdb==5.2.0 +schedule==0.6.0 +distro==1.4.0 +urllib3==1.24.2 \ No newline at end of file From 641ab7f7ae8a482987b054fd92f07dabfabd8d81 Mon Sep 17 00:00:00 2001 From: samwiseg0 Date: Tue, 30 Apr 2019 09:30:26 -0400 Subject: [PATCH 44/49] Fix date compare for history gathering in Tautulli --- varken/tautulli.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/varken/tautulli.py b/varken/tautulli.py index 0792fc7..8a60ae4 100644 --- a/varken/tautulli.py +++ b/varken/tautulli.py @@ -235,7 +235,7 @@ class TautulliAPI(object): if not history_item['id']: self.logger.debug('Skipping entry with no ID. (%s)', history_item['full_title']) continue - if date.fromtimestamp(history_item['started'] < start_date): + if date.fromtimestamp(history_item['started']) < start_date: continue params['row_id'] = history_item['id'] req = self.session.prepare_request(Request('GET', self.server.url + self.endpoint, params=params)) From 0f2b4ad355c43089f112e850d5dd44b89e7b7cda Mon Sep 17 00:00:00 2001 From: "Nicholas St. Germain" Date: Sun, 5 May 2019 21:38:15 -0500 Subject: [PATCH 45/49] change links to BookStack links --- README.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 5650519..3f131ef 100644 --- a/README.md +++ b/README.md @@ -41,15 +41,15 @@ Key features: ## Installation Guides -Varken Installation guides can be found in the [wiki](https://github.com/Boerderij/Varken/wiki/Installation). +Varken Installation guides can be found in the [wiki](https://wiki.cajun.pro/books/varken/chapter/installation). ## Support -Please read [Asking for Support](https://github.com/Boerderij/Varken/wiki/Asking-for-Support) before seeking support. +Please read [Asking for Support](https://wiki.cajun.pro/books/varken/chapter/asking-for-support) before seeking support. [Click here for quick access to discord support](http://cyborg.decreator.dev/channels/518970285773422592/530424560504537105/). No app or account needed! ### InfluxDB -[InfluxDB Installation Documentation](https://docs.influxdata.com/influxdb/v1.7/introduction/installation/) +[InfluxDB Installation Documentation](https://wiki.cajun.pro/books/varken/page/influxdb-d1f) Influxdb is required but not packaged as part of Varken. Varken will create its database on its own. If you choose to give varken user permissions that @@ -57,5 +57,5 @@ do not include database creation, please ensure you create an influx database named `varken` ### Grafana -[Grafana Installation Documentation](http://docs.grafana.org/installation/) -Official dashboard installation instructions can be found in the [wiki](https://github.com/Boerderij/Varken/wiki/Installation#grafana) +[Grafana Installation Documentation](https://wiki.cajun.pro/books/varken/page/grafana) +Official dashboard installation instructions can be found in the [wiki](https://wiki.cajun.pro/books/varken/page/grafana) From 0e593640df4cb250c99039ee0a839f07f4249130 Mon Sep 17 00:00:00 2001 From: "Nicholas St. Germain" Date: Sun, 5 May 2019 21:39:59 -0500 Subject: [PATCH 46/49] Add lidarr to readme --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 3f131ef..203dbcc 100644 --- a/README.md +++ b/README.md @@ -33,6 +33,7 @@ Supported Modules: * [Tautulli](https://tautulli.com/) - A Python based monitoring and tracking tool for Plex Media Server. * [Ombi](https://ombi.io/) - Want a Movie or TV Show on Plex or Emby? Use Ombi! * [Unifi](https://unifi-sdn.ubnt.com/) - The Global Leader in Managed Wi-Fi Systems +* [Lidarr](https://lidarr.audio/) - Looks and smells like Sonarr but made for music. Key features: * Multiple server support for all modules From ef1bf1f6238153f8dab4d735ba296060767af339 Mon Sep 17 00:00:00 2001 From: "Nicholas St. Germain" Date: Sun, 5 May 2019 21:42:29 -0500 Subject: [PATCH 47/49] reword tagline --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 203dbcc..e7bdaf6 100644 --- a/README.md +++ b/README.md @@ -11,14 +11,14 @@ Dutch for PIG. PIG is an Acronym for Plex/InfluxDB/Grafana -Varken is a standalone command-line utility to aggregate data -from the Plex ecosystem into InfluxDB. Examples use Grafana for a -frontend +Varken is a standalone application to aggregate data from the Plex +ecosystem into InfluxDB using Grafana for a frontend Requirements: * [Python 3.6.7+](https://www.python.org/downloads/release/python-367/) * [Python3-pip](https://pip.pypa.io/en/stable/installing/) * [InfluxDB](https://www.influxdata.com/) +* [Grafana](https://grafana.com/)

Example Dashboard From 3ff614c2a43bf0a52dc0e48a0508d71cb95e1797 Mon Sep 17 00:00:00 2001 From: "Nicholas St. Germain" Date: Sun, 5 May 2019 21:44:01 -0500 Subject: [PATCH 48/49] add alts --- README.md | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index e7bdaf6..eebc61e 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,5 @@

- +Logo Banner

[![Build Status](https://jenkins.cajun.pro/buildStatus/icon?job=Varken/master)](https://jenkins.cajun.pro/job/Varken/job/master/) @@ -23,7 +23,7 @@ Requirements:

Example Dashboard - +dashboard

Supported Modules: @@ -58,5 +58,6 @@ do not include database creation, please ensure you create an influx database named `varken` ### Grafana -[Grafana Installation Documentation](https://wiki.cajun.pro/books/varken/page/grafana) +[Grafana Installation Documentation](https://wiki.cajun.pro/books/varken/page/grafana) + Official dashboard installation instructions can be found in the [wiki](https://wiki.cajun.pro/books/varken/page/grafana) From 9c863f87622821a422c73b73c73186cceeff86bf Mon Sep 17 00:00:00 2001 From: "Nicholas St. Germain" Date: Sun, 5 May 2019 21:52:00 -0500 Subject: [PATCH 49/49] v1.7.0 Merge --- CHANGELOG.md | 21 +++++++++++++++++++-- README.md | 4 +--- varken/__init__.py | 2 +- 3 files changed, 21 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c92b35a..310738a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,7 +1,24 @@ # Change Log -## [v1.6.8](https://github.com/Boerderij/Varken/tree/v1.6.8) (2019-04-18) -[Full Changelog](https://github.com/Boerderij/Varken/compare/1.6.7...v1.6.8) +## [v1.7.0](https://github.com/Boerderij/Varken/tree/v1.7.0) (2019-05-05) +[Full Changelog](https://github.com/Boerderij/Varken/compare/1.6.8...v1.7.0) + +**Implemented enhancements:** + +- \[ENHANCEMENT\] Add album and track totals to artist library from Tautulli [\#127](https://github.com/Boerderij/Varken/issues/127) +- \[Feature Request\] No way to show music album / track count [\#125](https://github.com/Boerderij/Varken/issues/125) + +**Fixed bugs:** + +- \[BUG\] Invalid retention policy name causing retention policy creation failure [\#129](https://github.com/Boerderij/Varken/issues/129) +- \[BUG\] Unifi errors on unnamed devices [\#126](https://github.com/Boerderij/Varken/issues/126) + +**Merged pull requests:** + +- v1.7.0 Merge [\#131](https://github.com/Boerderij/Varken/pull/131) ([DirtyCajunRice](https://github.com/DirtyCajunRice)) + +## [1.6.8](https://github.com/Boerderij/Varken/tree/1.6.8) (2019-04-19) +[Full Changelog](https://github.com/Boerderij/Varken/compare/1.6.7...1.6.8) **Implemented enhancements:** diff --git a/README.md b/README.md index eebc61e..2197e33 100644 --- a/README.md +++ b/README.md @@ -58,6 +58,4 @@ do not include database creation, please ensure you create an influx database named `varken` ### Grafana -[Grafana Installation Documentation](https://wiki.cajun.pro/books/varken/page/grafana) - -Official dashboard installation instructions can be found in the [wiki](https://wiki.cajun.pro/books/varken/page/grafana) +[Grafana Installation/Dashboard Documentation](https://wiki.cajun.pro/books/varken/page/grafana) \ No newline at end of file diff --git a/varken/__init__.py b/varken/__init__.py index ad39859..8b51e03 100644 --- a/varken/__init__.py +++ b/varken/__init__.py @@ -1,2 +1,2 @@ VERSION = "1.7.0" -BRANCH = 'develop' +BRANCH = 'master'