diff --git a/Varken.py b/Varken.py index c0059c1..914eafe 100644 --- a/Varken.py +++ b/Varken.py @@ -12,6 +12,7 @@ from varken.tautulli import TautulliAPI from varken.radarr import RadarrAPI from varken.ombi import OmbiAPI from varken.dbmanager import DBManager +from varken.varkenlogger import VarkenLogger def threaded(job): thread = threading.Thread(target=job) @@ -19,6 +20,9 @@ def threaded(job): if __name__ == "__main__": + vl = VarkenLogger() + vl.logger.info('Starting Varken...') + parser = ArgumentParser(prog='varken', description='Command-line utility to aggregate data from the plex ecosystem into InfluxDB', formatter_class=RawTextHelpFormatter) @@ -84,4 +88,3 @@ if __name__ == "__main__": while True: schedule.run_pending() sleep(1) - diff --git a/varken/dbmanager.py b/varken/dbmanager.py index 8da3ced..581cb0f 100644 --- a/varken/dbmanager.py +++ b/varken/dbmanager.py @@ -1,5 +1,8 @@ +import logging + from influxdb import InfluxDBClient +logger = logging.getLogger('Varken') class DBManager(object): def __init__(self, server): @@ -13,4 +16,5 @@ class DBManager(object): self.influx.create_retention_policy('varken 30d/1h', '30d', '1', 'varken', False, '1h') def write_points(self, data): - self.influx.write_points(data) \ No newline at end of file + logger.debug('Writing Data to InfluxDB {}'.format(data)) + self.influx.write_points(data) diff --git a/varken/helpers.py b/varken/helpers.py index eeda689..772f70f 100644 --- a/varken/helpers.py +++ b/varken/helpers.py @@ -3,17 +3,20 @@ import time import tarfile import hashlib import geoip2.database +import logging + from json.decoder import JSONDecodeError from os.path import abspath, join from requests.exceptions import InvalidSchema, SSLError from urllib.request import urlretrieve +logger = logging.getLogger('Varken') def geoip_download(): tar_dbfile = abspath(join('.', 'data', 'GeoLite2-City.tar.gz')) url = 'http://geolite.maxmind.com/download/geoip/database/GeoLite2-City.tar.gz' urlretrieve(url, tar_dbfile) - tar = tarfile.open(tar_dbfile, "r:gz") + tar = tarfile.open(tar_dbfile, 'r:gz') for files in tar.getmembers(): if 'GeoLite2-City.mmdb' in files.name: files.name = os.path.basename(files.name) @@ -56,20 +59,20 @@ def connection_handler(session, request, verify): try: get = s.send(r, verify=v) if get.status_code == 401: - print("Your api key is incorrect for {}".format(r.url)) + logger.info('Your api key is incorrect for {}'.format(r.url)) elif get.status_code == 404: - print("This url doesnt even resolve: {}".format(r.url)) + logger.info('This url doesnt even resolve: {}'.format(r.url)) elif get.status_code == 200: try: return_json = get.json() except JSONDecodeError: - print("No JSON response... BORKED! Let us know in discord") + logger.info('No JSON response... BORKED! Let us know in discord') except InvalidSchema: - print("You added http(s):// in the config file. Don't do that.") + logger.info('You added http(s):// in the config file. Don't do that.') except SSLError as e: - print("Either your host is unreachable or you have an ssl issue.") - print("The issue was: {}".format(e)) + logger.info('Either your host is unreachable or you have an ssl issue.') + logger.info('The issue was: {}'.format(e)) return return_json diff --git a/varken/ombi.py b/varken/ombi.py index 8d8fe03..6c71fe0 100644 --- a/varken/ombi.py +++ b/varken/ombi.py @@ -1,7 +1,6 @@ from requests import Session, Request from datetime import datetime, timezone -from varken.logger import logging from varken.helpers import connection_handler from varken.structures import OmbiRequestCounts @@ -15,7 +14,6 @@ class OmbiAPI(object): self.session = Session() self.session.headers = {'Apikey': self.server.api_key} - @logging def get_total_requests(self): self.now = datetime.now(timezone.utc).astimezone().isoformat() tv_endpoint = '/api/v1/Request/tv' @@ -49,7 +47,6 @@ class OmbiAPI(object): self.dbmanager.write_points(influx_payload) - @logging def get_request_counts(self): self.now = datetime.now(timezone.utc).astimezone().isoformat() endpoint = '/api/v1/Request/count' diff --git a/varken/radarr.py b/varken/radarr.py index acc1166..2dddacb 100644 --- a/varken/radarr.py +++ b/varken/radarr.py @@ -1,7 +1,6 @@ from requests import Session, Request from datetime import datetime, timezone -from varken.logger import logging from varken.helpers import hashit, connection_handler from varken.structures import Movie, Queue @@ -15,7 +14,6 @@ class RadarrAPI(object): self.session = Session() self.session.headers = {'X-Api-Key': self.server.api_key} - @logging def get_missing(self): endpoint = '/api/movie' self.now = datetime.now(timezone.utc).astimezone().isoformat() @@ -60,7 +58,6 @@ class RadarrAPI(object): self.dbmanager.write_points(influx_payload) - @logging def get_queue(self): endpoint = '/api/queue' self.now = datetime.now(timezone.utc).astimezone().isoformat() diff --git a/varken/sonarr.py b/varken/sonarr.py index ecde50e..81a4d38 100644 --- a/varken/sonarr.py +++ b/varken/sonarr.py @@ -1,11 +1,9 @@ from requests import Session, Request from datetime import datetime, timezone, date, timedelta -from varken.logger import logging from varken.helpers import hashit, connection_handler from varken.structures import Queue, TVShow - class SonarrAPI(object): def __init__(self, server, dbmanager): # Set Time of initialization @@ -18,7 +16,6 @@ class SonarrAPI(object): self.session.headers = {'X-Api-Key': self.server.api_key} self.session.params = {'pageSize': 1000} - @logging def get_missing(self): endpoint = '/api/calendar' last_days = str(date.today() + timedelta(days=-self.server.missing_days)) @@ -66,7 +63,6 @@ class SonarrAPI(object): self.dbmanager.write_points(influx_payload) - @logging def get_future(self): endpoint = '/api/calendar/' self.now = datetime.now(timezone.utc).astimezone().isoformat() @@ -115,7 +111,6 @@ class SonarrAPI(object): self.dbmanager.write_points(influx_payload) - @logging def get_queue(self): influx_payload = [] endpoint = '/api/queue'