major pep8 cleanup.
funneled scope of imports where possible fixed #59 Added fstrings where possible added logging both temporarily and in secondary places
This commit is contained in:
parent
47f2f61082
commit
61fcf3b80c
13 changed files with 131 additions and 121 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -10,6 +10,5 @@ GeoLite2-City.mmdb
|
||||||
GeoLite2-City.tar.gz
|
GeoLite2-City.tar.gz
|
||||||
data/varken.ini
|
data/varken.ini
|
||||||
.idea/
|
.idea/
|
||||||
Legacy/configuration.py
|
|
||||||
varken-venv/
|
varken-venv/
|
||||||
logs/
|
logs/
|
||||||
|
|
63
Varken.py
63
Varken.py
|
@ -1,36 +1,30 @@
|
||||||
import sys
|
|
||||||
|
|
||||||
# Check for python3.6 or newer to resolve erroneous typing.NamedTuple issues
|
|
||||||
if sys.version_info < (3, 6):
|
|
||||||
exit('Varken requires python3.6 or newer')
|
|
||||||
|
|
||||||
import schedule
|
|
||||||
import threading
|
|
||||||
import platform
|
import platform
|
||||||
import distro
|
import schedule
|
||||||
import os
|
|
||||||
|
|
||||||
from sys import exit
|
|
||||||
from time import sleep
|
from time import sleep
|
||||||
from os import access, R_OK
|
from sys import version
|
||||||
|
from threading import Thread
|
||||||
|
from os import access, R_OK, getenv
|
||||||
|
from distro import linux_distribution
|
||||||
from os.path import isdir, abspath, dirname, join
|
from os.path import isdir, abspath, dirname, join
|
||||||
from argparse import ArgumentParser, RawTextHelpFormatter
|
from argparse import ArgumentParser, RawTextHelpFormatter
|
||||||
|
from logging import getLogger, StreamHandler, Formatter, DEBUG
|
||||||
|
|
||||||
from varken.iniparser import INIParser
|
|
||||||
from varken.sonarr import SonarrAPI
|
|
||||||
from varken.tautulli import TautulliAPI
|
|
||||||
from varken.radarr import RadarrAPI
|
|
||||||
from varken.ombi import OmbiAPI
|
from varken.ombi import OmbiAPI
|
||||||
from varken.sickchill import SickChillAPI
|
|
||||||
from varken.cisco import CiscoAPI
|
from varken.cisco import CiscoAPI
|
||||||
|
from varken.sonarr import SonarrAPI
|
||||||
|
from varken.radarr import RadarrAPI
|
||||||
|
from varken.iniparser import INIParser
|
||||||
from varken.dbmanager import DBManager
|
from varken.dbmanager import DBManager
|
||||||
|
from varken.tautulli import TautulliAPI
|
||||||
|
from varken.sickchill import SickChillAPI
|
||||||
from varken.varkenlogger import VarkenLogger
|
from varken.varkenlogger import VarkenLogger
|
||||||
|
|
||||||
PLATFORM_LINUX_DISTRO = ' '.join(x for x in distro.linux_distribution() if x)
|
PLATFORM_LINUX_DISTRO = ' '.join(x for x in linux_distribution() if x)
|
||||||
|
|
||||||
|
|
||||||
def threaded(job):
|
def threaded(job):
|
||||||
thread = threading.Thread(target=job)
|
thread = Thread(target=job)
|
||||||
thread.start()
|
thread.start()
|
||||||
|
|
||||||
|
|
||||||
|
@ -46,23 +40,32 @@ if __name__ == "__main__":
|
||||||
|
|
||||||
DATA_FOLDER = abspath(join(dirname(__file__), 'data'))
|
DATA_FOLDER = abspath(join(dirname(__file__), 'data'))
|
||||||
|
|
||||||
|
templogger = getLogger('temp')
|
||||||
|
templogger.setLevel(DEBUG)
|
||||||
|
tempch = StreamHandler()
|
||||||
|
tempformatter = Formatter('%(asctime)s : %(levelname)s : %(module)s : %(message)s', '%Y-%m-%d %H:%M:%S')
|
||||||
|
tempch.setFormatter(tempformatter)
|
||||||
|
templogger.addHandler(tempch)
|
||||||
|
|
||||||
if opts.data_folder:
|
if opts.data_folder:
|
||||||
ARG_FOLDER = opts.data_folder
|
ARG_FOLDER = opts.data_folder
|
||||||
|
|
||||||
if isdir(ARG_FOLDER):
|
if isdir(ARG_FOLDER):
|
||||||
DATA_FOLDER = ARG_FOLDER
|
DATA_FOLDER = ARG_FOLDER
|
||||||
if not access(ARG_FOLDER, R_OK):
|
if not access(DATA_FOLDER, R_OK):
|
||||||
exit("Read permission error for {}".format(ARG_FOLDER))
|
templogger.error("Read permission error for %s", DATA_FOLDER)
|
||||||
|
exit(1)
|
||||||
else:
|
else:
|
||||||
exit("{} does not exist".format(ARG_FOLDER))
|
templogger.error("%s does not exist", ARG_FOLDER)
|
||||||
|
exit(1)
|
||||||
|
|
||||||
# Set Debug to True if DEBUG env is set
|
# Set Debug to True if DEBUG env is set
|
||||||
enable_opts = ['True', 'true', 'yes']
|
enable_opts = ['True', 'true', 'yes']
|
||||||
debug_opts = ['debug', 'Debug', 'DEBUG']
|
debug_opts = ['debug', 'Debug', 'DEBUG']
|
||||||
|
|
||||||
if not opts.debug:
|
if not opts.debug:
|
||||||
opts.debug = True if any([os.getenv(string, False) for true in enable_opts
|
opts.debug = True if any([getenv(string, False) for true in enable_opts
|
||||||
for string in debug_opts if os.getenv(string, False) == true]) else False
|
for string in debug_opts if getenv(string, False) == true]) else False
|
||||||
|
|
||||||
# Initiate the logger
|
# Initiate the logger
|
||||||
vl = VarkenLogger(data_folder=DATA_FOLDER, debug=opts.debug)
|
vl = VarkenLogger(data_folder=DATA_FOLDER, debug=opts.debug)
|
||||||
|
@ -70,11 +73,10 @@ if __name__ == "__main__":
|
||||||
|
|
||||||
vl.logger.info('Data folder is "%s"', DATA_FOLDER)
|
vl.logger.info('Data folder is "%s"', DATA_FOLDER)
|
||||||
|
|
||||||
vl.logger.info(u"{} {} ({}{})".format(
|
vl.logger.info(u"%s %s (%s%s)", platform.system(), platform.release(), platform.version(),
|
||||||
platform.system(), platform.release(), platform.version(),
|
f' - {PLATFORM_LINUX_DISTRO}' if PLATFORM_LINUX_DISTRO else '')
|
||||||
' - {}'.format(PLATFORM_LINUX_DISTRO) if PLATFORM_LINUX_DISTRO else ''
|
|
||||||
))
|
vl.logger.info(u"Python %s", version)
|
||||||
vl.logger.info(u"Python {}".format(sys.version))
|
|
||||||
|
|
||||||
CONFIG = INIParser(DATA_FOLDER)
|
CONFIG = INIParser(DATA_FOLDER)
|
||||||
DBMANAGER = DBManager(CONFIG.influx_server)
|
DBMANAGER = DBManager(CONFIG.influx_server)
|
||||||
|
@ -126,7 +128,8 @@ if __name__ == "__main__":
|
||||||
SERVICES_ENABLED = [CONFIG.ombi_enabled, CONFIG.radarr_enabled, CONFIG.tautulli_enabled,
|
SERVICES_ENABLED = [CONFIG.ombi_enabled, CONFIG.radarr_enabled, CONFIG.tautulli_enabled,
|
||||||
CONFIG.sonarr_enabled, CONFIG.ciscoasa_enabled, CONFIG.sickchill_enabled]
|
CONFIG.sonarr_enabled, CONFIG.ciscoasa_enabled, CONFIG.sickchill_enabled]
|
||||||
if not [enabled for enabled in SERVICES_ENABLED if enabled]:
|
if not [enabled for enabled in SERVICES_ENABLED if enabled]:
|
||||||
exit("All services disabled. Exiting")
|
vl.logger.error("All services disabled. Exiting")
|
||||||
|
exit(1)
|
||||||
schedule.run_all()
|
schedule.run_all()
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import logging
|
from logging import getLogger
|
||||||
from requests import Session, Request
|
from requests import Session, Request
|
||||||
from datetime import datetime, timezone
|
from datetime import datetime, timezone
|
||||||
|
|
||||||
|
@ -13,12 +13,12 @@ class CiscoAPI(object):
|
||||||
# Create session to reduce server web thread load, and globally define pageSize for all requests
|
# Create session to reduce server web thread load, and globally define pageSize for all requests
|
||||||
self.session = Session()
|
self.session = Session()
|
||||||
self.session.auth = (self.firewall.username, self.firewall.password)
|
self.session.auth = (self.firewall.username, self.firewall.password)
|
||||||
self.logger = logging.getLogger()
|
self.logger = getLogger()
|
||||||
|
|
||||||
self.get_token()
|
self.get_token()
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "<ciscoasa-{}>".format(self.firewall.id)
|
return f"<ciscoasa-{self.firewall.id}>"
|
||||||
|
|
||||||
def get_token(self):
|
def get_token(self):
|
||||||
endpoint = '/api/tokenservices'
|
endpoint = '/api/tokenservices'
|
||||||
|
|
|
@ -1,8 +1,6 @@
|
||||||
import logging
|
from logging import getLogger
|
||||||
|
|
||||||
from influxdb import InfluxDBClient
|
from influxdb import InfluxDBClient
|
||||||
|
|
||||||
logger = logging.getLogger('varken')
|
|
||||||
|
|
||||||
class DBManager(object):
|
class DBManager(object):
|
||||||
def __init__(self, server):
|
def __init__(self, server):
|
||||||
|
@ -10,12 +8,16 @@ class DBManager(object):
|
||||||
self.influx = InfluxDBClient(self.server.url, self.server.port, self.server.username, self.server.password,
|
self.influx = InfluxDBClient(self.server.url, self.server.port, self.server.username, self.server.password,
|
||||||
'varken')
|
'varken')
|
||||||
databases = [db['name'] for db in self.influx.get_list_database()]
|
databases = [db['name'] for db in self.influx.get_list_database()]
|
||||||
|
self.logger = getLogger()
|
||||||
|
|
||||||
if 'varken' not in databases:
|
if 'varken' not in databases:
|
||||||
|
self.logger.info("Creating varken database")
|
||||||
self.influx.create_database('varken')
|
self.influx.create_database('varken')
|
||||||
|
|
||||||
|
self.logger.info("Creating varken retention policy (30d/1h)")
|
||||||
self.influx.create_retention_policy('varken 30d/1h', '30d', '1', 'varken', False, '1h')
|
self.influx.create_retention_policy('varken 30d/1h', '30d', '1', 'varken', False, '1h')
|
||||||
|
|
||||||
def write_points(self, data):
|
def write_points(self, data):
|
||||||
d = data
|
d = data
|
||||||
logger.debug('Writing Data to InfluxDB %s', d)
|
self.logger.debug('Writing Data to InfluxDB %s', d)
|
||||||
self.influx.write_points(d)
|
self.influx.write_points(d)
|
||||||
|
|
|
@ -106,12 +106,12 @@ def connection_handler(session, request, verify):
|
||||||
|
|
||||||
|
|
||||||
def mkdir_p(path):
|
def mkdir_p(path):
|
||||||
"""http://stackoverflow.com/a/600612/190597 (tzot)"""
|
templogger = logging.getLogger('temp')
|
||||||
try:
|
try:
|
||||||
logger.info('Creating folder %s ', path)
|
templogger.info('Creating folder %s ', path)
|
||||||
os.makedirs(path, exist_ok=True)
|
os.makedirs(path, exist_ok=True)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error('Could not create folder %s : %s ', path, e)
|
templogger.error('Could not create folder %s : %s ', path, e)
|
||||||
|
|
||||||
|
|
||||||
def clean_sid_check(server_id_list, server_type=None):
|
def clean_sid_check(server_id_list, server_type=None):
|
||||||
|
@ -123,7 +123,7 @@ def clean_sid_check(server_id_list, server_type=None):
|
||||||
try:
|
try:
|
||||||
valid_sids.append(int(sid))
|
valid_sids.append(int(sid))
|
||||||
except ValueError:
|
except ValueError:
|
||||||
logger.error("{} is not a valid server id number".format(sid))
|
logger.error("%s is not a valid server id number", sid)
|
||||||
if valid_sids:
|
if valid_sids:
|
||||||
logger.info('%s : %s', t.upper(), valid_sids)
|
logger.info('%s : %s', t.upper(), valid_sids)
|
||||||
return valid_sids
|
return valid_sids
|
||||||
|
|
|
@ -1,22 +1,20 @@
|
||||||
import configparser
|
from logging import getLogger
|
||||||
import logging
|
|
||||||
import re
|
|
||||||
|
|
||||||
from sys import exit
|
|
||||||
from os.path import join, exists
|
from os.path import join, exists
|
||||||
|
from re import match, compile, IGNORECASE
|
||||||
|
from configparser import ConfigParser, NoOptionError
|
||||||
|
|
||||||
from varken.helpers import clean_sid_check
|
from varken.helpers import clean_sid_check
|
||||||
|
from varken.structures import SickChillServer
|
||||||
from varken.varkenlogger import BlacklistFilter
|
from varken.varkenlogger import BlacklistFilter
|
||||||
from varken.structures import SonarrServer, RadarrServer, OmbiServer, TautulliServer, InfluxServer, CiscoASAFirewall
|
from varken.structures import SonarrServer, RadarrServer, OmbiServer, TautulliServer, InfluxServer, CiscoASAFirewall
|
||||||
from varken.structures import SickChillServer
|
|
||||||
|
|
||||||
|
|
||||||
class INIParser(object):
|
class INIParser(object):
|
||||||
def __init__(self, data_folder):
|
def __init__(self, data_folder):
|
||||||
self.config = configparser.ConfigParser(interpolation=None)
|
self.config = ConfigParser(interpolation=None)
|
||||||
self.data_folder = data_folder
|
self.data_folder = data_folder
|
||||||
|
|
||||||
self.logger = logging.getLogger()
|
self.logger = getLogger()
|
||||||
|
|
||||||
self.influx_server = InfluxServer()
|
self.influx_server = InfluxServer()
|
||||||
|
|
||||||
|
@ -59,7 +57,7 @@ class INIParser(object):
|
||||||
else:
|
else:
|
||||||
sids = clean_sid_check(global_server_ids, t)
|
sids = clean_sid_check(global_server_ids, t)
|
||||||
return sids
|
return sids
|
||||||
except configparser.NoOptionError as e:
|
except NoOptionError as e:
|
||||||
self.logger.error(e)
|
self.logger.error(e)
|
||||||
|
|
||||||
def read_file(self):
|
def read_file(self):
|
||||||
|
@ -86,11 +84,11 @@ class INIParser(object):
|
||||||
else:
|
else:
|
||||||
search = (search + r'(?:/?|[/?]\S+)$')
|
search = (search + r'(?:/?|[/?]\S+)$')
|
||||||
|
|
||||||
regex = re.compile('{}'.format(search), re.IGNORECASE)
|
regex = compile('{}'.format(search), IGNORECASE)
|
||||||
|
|
||||||
print(re.match(regex, url_check))
|
print(match(regex, url_check))
|
||||||
|
|
||||||
valid = re.match(regex, url_check) is not None
|
valid = match(regex, url_check) is not None
|
||||||
if not valid:
|
if not valid:
|
||||||
if inc_port:
|
if inc_port:
|
||||||
self.logger.error('%s is invalid! URL must host/IP and port if not 80 or 443. ie. localhost:8080',
|
self.logger.error('%s is invalid! URL must host/IP and port if not 80 or 443. ie. localhost:8080',
|
||||||
|
@ -151,7 +149,7 @@ class INIParser(object):
|
||||||
queue, queue_run_seconds)
|
queue, queue_run_seconds)
|
||||||
|
|
||||||
self.sonarr_servers.append(server)
|
self.sonarr_servers.append(server)
|
||||||
except configparser.NoOptionError as e:
|
except NoOptionError as e:
|
||||||
self.sonarr_enabled = False
|
self.sonarr_enabled = False
|
||||||
self.logger.error(
|
self.logger.error(
|
||||||
'%s disabled. Error: %s', section, e)
|
'%s disabled. Error: %s', section, e)
|
||||||
|
@ -185,7 +183,7 @@ class INIParser(object):
|
||||||
server = RadarrServer(server_id, scheme + url, apikey, verify_ssl, queue, queue_run_seconds,
|
server = RadarrServer(server_id, scheme + url, apikey, verify_ssl, queue, queue_run_seconds,
|
||||||
get_missing, get_missing_run_seconds)
|
get_missing, get_missing_run_seconds)
|
||||||
self.radarr_servers.append(server)
|
self.radarr_servers.append(server)
|
||||||
except configparser.NoOptionError as e:
|
except NoOptionError as e:
|
||||||
self.radarr_enabled = False
|
self.radarr_enabled = False
|
||||||
self.logger.error(
|
self.logger.error(
|
||||||
'%s disabled. Error: %s', section, e)
|
'%s disabled. Error: %s', section, e)
|
||||||
|
@ -217,7 +215,7 @@ class INIParser(object):
|
||||||
server = TautulliServer(server_id, scheme + url, fallback_ip, apikey, verify_ssl, get_activity,
|
server = TautulliServer(server_id, scheme + url, fallback_ip, apikey, verify_ssl, get_activity,
|
||||||
get_activity_run_seconds)
|
get_activity_run_seconds)
|
||||||
self.tautulli_servers.append(server)
|
self.tautulli_servers.append(server)
|
||||||
except configparser.NoOptionError as e:
|
except NoOptionError as e:
|
||||||
self.tautulli_enabled = False
|
self.tautulli_enabled = False
|
||||||
self.logger.error(
|
self.logger.error(
|
||||||
'%s disabled. Error: %s', section, e)
|
'%s disabled. Error: %s', section, e)
|
||||||
|
@ -251,7 +249,7 @@ class INIParser(object):
|
||||||
server = OmbiServer(server_id, scheme + url, apikey, verify_ssl, request_type_counts,
|
server = OmbiServer(server_id, scheme + url, apikey, verify_ssl, request_type_counts,
|
||||||
request_type_run_seconds, request_total_counts, request_total_run_seconds)
|
request_type_run_seconds, request_total_counts, request_total_run_seconds)
|
||||||
self.ombi_servers.append(server)
|
self.ombi_servers.append(server)
|
||||||
except configparser.NoOptionError as e:
|
except NoOptionError as e:
|
||||||
self.ombi_enabled = False
|
self.ombi_enabled = False
|
||||||
self.logger.error(
|
self.logger.error(
|
||||||
'%s disabled. Error: %s', section, e)
|
'%s disabled. Error: %s', section, e)
|
||||||
|
@ -281,7 +279,7 @@ class INIParser(object):
|
||||||
server = SickChillServer(server_id, scheme + url, apikey, verify_ssl,
|
server = SickChillServer(server_id, scheme + url, apikey, verify_ssl,
|
||||||
get_missing, get_missing_run_seconds)
|
get_missing, get_missing_run_seconds)
|
||||||
self.sickchill_servers.append(server)
|
self.sickchill_servers.append(server)
|
||||||
except configparser.NoOptionError as e:
|
except NoOptionError as e:
|
||||||
self.sickchill_enabled = False
|
self.sickchill_enabled = False
|
||||||
self.logger.error(
|
self.logger.error(
|
||||||
'%s disabled. Error: %s', section, e)
|
'%s disabled. Error: %s', section, e)
|
||||||
|
@ -313,7 +311,7 @@ class INIParser(object):
|
||||||
firewall = CiscoASAFirewall(firewall_id, scheme + url, username, password, outside_interface,
|
firewall = CiscoASAFirewall(firewall_id, scheme + url, username, password, outside_interface,
|
||||||
verify_ssl, get_bandwidth_run_seconds)
|
verify_ssl, get_bandwidth_run_seconds)
|
||||||
self.ciscoasa_firewalls.append(firewall)
|
self.ciscoasa_firewalls.append(firewall)
|
||||||
except configparser.NoOptionError as e:
|
except NoOptionError as e:
|
||||||
self.ciscoasa_enabled = False
|
self.ciscoasa_enabled = False
|
||||||
self.logger.error(
|
self.logger.error(
|
||||||
'%s disabled. Error: %s', section, e)
|
'%s disabled. Error: %s', section, e)
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import logging
|
from logging import getLogger
|
||||||
from requests import Session, Request
|
from requests import Session, Request
|
||||||
from datetime import datetime, timezone
|
from datetime import datetime, timezone
|
||||||
|
|
||||||
|
@ -13,10 +13,10 @@ class OmbiAPI(object):
|
||||||
# Create session to reduce server web thread load, and globally define pageSize for all requests
|
# Create session to reduce server web thread load, and globally define pageSize for all requests
|
||||||
self.session = Session()
|
self.session = Session()
|
||||||
self.session.headers = {'Apikey': self.server.api_key}
|
self.session.headers = {'Apikey': self.server.api_key}
|
||||||
self.logger = logging.getLogger()
|
self.logger = getLogger()
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "<ombi-{}>".format(self.server.id)
|
return f"<ombi-{self.server.id}>"
|
||||||
|
|
||||||
def get_all_requests(self):
|
def get_all_requests(self):
|
||||||
now = datetime.now(timezone.utc).astimezone().isoformat()
|
now = datetime.now(timezone.utc).astimezone().isoformat()
|
||||||
|
@ -94,14 +94,16 @@ class OmbiAPI(object):
|
||||||
|
|
||||||
for show in tv_show_requests:
|
for show in tv_show_requests:
|
||||||
hash_id = hashit(f'{show.id}{show.tvDbId}{show.title}')
|
hash_id = hashit(f'{show.id}{show.tvDbId}{show.title}')
|
||||||
status = None
|
|
||||||
# Denied = 0, Approved = 1, Completed = 2
|
# Denied = 0, Approved = 1, Completed = 2, Pending = 3
|
||||||
if show.childRequests[0]['denied']:
|
if show.childRequests[0]['denied']:
|
||||||
status = 0
|
status = 0
|
||||||
elif show.childRequests[0]['approved'] and show.childRequests[0]['available']:
|
elif show.childRequests[0]['approved'] and show.childRequests[0]['available']:
|
||||||
status = 2
|
status = 2
|
||||||
elif show.childRequests[0]['approved']:
|
elif show.childRequests[0]['approved']:
|
||||||
status = 1
|
status = 1
|
||||||
|
else:
|
||||||
|
status = 3
|
||||||
|
|
||||||
influx_payload.append(
|
influx_payload.append(
|
||||||
{
|
{
|
||||||
|
@ -122,7 +124,6 @@ class OmbiAPI(object):
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
self.dbmanager.write_points(influx_payload)
|
self.dbmanager.write_points(influx_payload)
|
||||||
|
|
||||||
def get_request_counts(self):
|
def get_request_counts(self):
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
import logging
|
from logging import getLogger
|
||||||
from requests import Session, Request
|
from requests import Session, Request
|
||||||
from datetime import datetime, timezone
|
from datetime import datetime, timezone
|
||||||
|
|
||||||
from varken.helpers import hashit, connection_handler
|
|
||||||
from varken.structures import Movie, Queue
|
from varken.structures import Movie, Queue
|
||||||
|
from varken.helpers import hashit, connection_handler
|
||||||
|
|
||||||
|
|
||||||
class RadarrAPI(object):
|
class RadarrAPI(object):
|
||||||
|
@ -13,10 +13,10 @@ class RadarrAPI(object):
|
||||||
# Create session to reduce server web thread load, and globally define pageSize for all requests
|
# Create session to reduce server web thread load, and globally define pageSize for all requests
|
||||||
self.session = Session()
|
self.session = Session()
|
||||||
self.session.headers = {'X-Api-Key': self.server.api_key}
|
self.session.headers = {'X-Api-Key': self.server.api_key}
|
||||||
self.logger = logging.getLogger()
|
self.logger = getLogger()
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "<radarr-{}>".format(self.server.id)
|
return f"<radarr-{self.server.id}>"
|
||||||
|
|
||||||
def get_missing(self):
|
def get_missing(self):
|
||||||
endpoint = '/api/movie'
|
endpoint = '/api/movie'
|
||||||
|
@ -43,11 +43,11 @@ class RadarrAPI(object):
|
||||||
else:
|
else:
|
||||||
ma = 1
|
ma = 1
|
||||||
|
|
||||||
movie_name = '{} ({})'.format(movie.title, movie.year)
|
movie_name = f'{movie.title} ({movie.year})'
|
||||||
missing.append((movie_name, ma, movie.tmdbId, movie.titleSlug))
|
missing.append((movie_name, ma, movie.tmdbId, movie.titleSlug))
|
||||||
|
|
||||||
for title, ma, mid, title_slug in missing:
|
for title, ma, mid, title_slug in missing:
|
||||||
hash_id = hashit('{}{}{}'.format(self.server.id, title, mid))
|
hash_id = hashit(f'{self.server.id}{title}{mid}')
|
||||||
influx_payload.append(
|
influx_payload.append(
|
||||||
{
|
{
|
||||||
"measurement": "Radarr",
|
"measurement": "Radarr",
|
||||||
|
@ -96,7 +96,7 @@ class RadarrAPI(object):
|
||||||
for queue_item in download_queue:
|
for queue_item in download_queue:
|
||||||
movie = queue_item.movie
|
movie = queue_item.movie
|
||||||
|
|
||||||
name = '{} ({})'.format(movie.title, movie.year)
|
name = f'{movie.title} ({movie.year})'
|
||||||
|
|
||||||
if queue_item.protocol.upper() == 'USENET':
|
if queue_item.protocol.upper() == 'USENET':
|
||||||
protocol_id = 1
|
protocol_id = 1
|
||||||
|
@ -107,7 +107,7 @@ class RadarrAPI(object):
|
||||||
protocol_id, queue_item.id, movie.titleSlug))
|
protocol_id, queue_item.id, movie.titleSlug))
|
||||||
|
|
||||||
for name, quality, protocol, protocol_id, qid, title_slug in queue:
|
for name, quality, protocol, protocol_id, qid, title_slug in queue:
|
||||||
hash_id = hashit('{}{}{}'.format(self.server.id, name, quality))
|
hash_id = hashit(f'{self.server.id}{name}{quality}')
|
||||||
influx_payload.append(
|
influx_payload.append(
|
||||||
{
|
{
|
||||||
"measurement": "Radarr",
|
"measurement": "Radarr",
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
import logging
|
from logging import getLogger
|
||||||
from requests import Session, Request
|
from requests import Session, Request
|
||||||
from datetime import datetime, timezone, date, timedelta
|
from datetime import datetime, timezone
|
||||||
|
|
||||||
from varken.helpers import hashit, connection_handler
|
|
||||||
from varken.structures import SickChillTVShow
|
from varken.structures import SickChillTVShow
|
||||||
|
from varken.helpers import hashit, connection_handler
|
||||||
|
|
||||||
|
|
||||||
class SickChillAPI(object):
|
class SickChillAPI(object):
|
||||||
|
@ -14,10 +14,10 @@ class SickChillAPI(object):
|
||||||
self.session = Session()
|
self.session = Session()
|
||||||
self.session.params = {'limit': 1000}
|
self.session.params = {'limit': 1000}
|
||||||
self.endpoint = f"/api/{self.server.api_key}"
|
self.endpoint = f"/api/{self.server.api_key}"
|
||||||
self.logger = logging.getLogger()
|
self.logger = getLogger()
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "<sickchill-{}>".format(self.server.id)
|
return f"<sickchill-{self.server.id}>"
|
||||||
|
|
||||||
def get_missing(self):
|
def get_missing(self):
|
||||||
now = datetime.now(timezone.utc).astimezone().isoformat()
|
now = datetime.now(timezone.utc).astimezone().isoformat()
|
||||||
|
@ -39,8 +39,8 @@ class SickChillAPI(object):
|
||||||
|
|
||||||
for key, section in get['data'].items():
|
for key, section in get['data'].items():
|
||||||
for show in section:
|
for show in section:
|
||||||
sxe = 'S{:0>2}E{:0>2}'.format(show.season, show.episode)
|
sxe = f'S{show.season:0>2}E{show.episode:0>2}'
|
||||||
hash_id = hashit('{}{}{}'.format(self.server.id, show.show_name, sxe))
|
hash_id = hashit(f'{self.server.id}{show.show_name}{sxe}')
|
||||||
missing_types = [(0, 'future'), (1, 'later'), (2, 'soon'), (3, 'today'), (4, 'missed')]
|
missing_types = [(0, 'future'), (1, 'later'), (2, 'soon'), (3, 'today'), (4, 'missed')]
|
||||||
influx_payload.append(
|
influx_payload.append(
|
||||||
{
|
{
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
import logging
|
from logging import getLogger
|
||||||
from requests import Session, Request
|
from requests import Session, Request
|
||||||
from datetime import datetime, timezone, date, timedelta
|
from datetime import datetime, timezone, date, timedelta
|
||||||
|
|
||||||
from varken.helpers import hashit, connection_handler
|
|
||||||
from varken.structures import Queue, TVShow
|
from varken.structures import Queue, TVShow
|
||||||
|
from varken.helpers import hashit, connection_handler
|
||||||
|
|
||||||
|
|
||||||
class SonarrAPI(object):
|
class SonarrAPI(object):
|
||||||
|
@ -14,10 +14,10 @@ class SonarrAPI(object):
|
||||||
self.session = Session()
|
self.session = Session()
|
||||||
self.session.headers = {'X-Api-Key': self.server.api_key}
|
self.session.headers = {'X-Api-Key': self.server.api_key}
|
||||||
self.session.params = {'pageSize': 1000}
|
self.session.params = {'pageSize': 1000}
|
||||||
self.logger = logging.getLogger()
|
self.logger = getLogger()
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "<sonarr-{}>".format(self.server.id)
|
return f"<sonarr-{self.server.id}>"
|
||||||
|
|
||||||
def get_missing(self):
|
def get_missing(self):
|
||||||
endpoint = '/api/calendar'
|
endpoint = '/api/calendar'
|
||||||
|
@ -44,11 +44,11 @@ class SonarrAPI(object):
|
||||||
# Add show to missing list if file does not exist
|
# Add show to missing list if file does not exist
|
||||||
for show in tv_shows:
|
for show in tv_shows:
|
||||||
if not show.hasFile:
|
if not show.hasFile:
|
||||||
sxe = 'S{:0>2}E{:0>2}'.format(show.seasonNumber, show.episodeNumber)
|
sxe = f'S{show.seasonNumber:0>2}E{show.episodeNumber:0>2}'
|
||||||
missing.append((show.series['title'], sxe, show.airDateUtc, show.title, show.id))
|
missing.append((show.series['title'], sxe, show.airDateUtc, show.title, show.id))
|
||||||
|
|
||||||
for series_title, sxe, air_date_utc, episode_title, sonarr_id in missing:
|
for series_title, sxe, air_date_utc, episode_title, sonarr_id in missing:
|
||||||
hash_id = hashit('{}{}{}'.format(self.server.id, series_title, sxe))
|
hash_id = hashit(f'{self.server.id}{series_title}{sxe}')
|
||||||
influx_payload.append(
|
influx_payload.append(
|
||||||
{
|
{
|
||||||
"measurement": "Sonarr",
|
"measurement": "Sonarr",
|
||||||
|
@ -93,7 +93,7 @@ class SonarrAPI(object):
|
||||||
return
|
return
|
||||||
|
|
||||||
for show in tv_shows:
|
for show in tv_shows:
|
||||||
sxe = 'S{:0>2}E{:0>2}'.format(show.seasonNumber, show.episodeNumber)
|
sxe = f'S{show.seasonNumber:0>2}E{show.episodeNumber:0>2}'
|
||||||
if show.hasFile:
|
if show.hasFile:
|
||||||
downloaded = 1
|
downloaded = 1
|
||||||
else:
|
else:
|
||||||
|
@ -101,7 +101,7 @@ class SonarrAPI(object):
|
||||||
air_days.append((show.series['title'], downloaded, sxe, show.title, show.airDateUtc, show.id))
|
air_days.append((show.series['title'], downloaded, sxe, show.title, show.airDateUtc, show.id))
|
||||||
|
|
||||||
for series_title, dl_status, sxe, episode_title, air_date_utc, sonarr_id in air_days:
|
for series_title, dl_status, sxe, episode_title, air_date_utc, sonarr_id in air_days:
|
||||||
hash_id = hashit('{}{}{}'.format(self.server.id, series_title, sxe))
|
hash_id = hashit(f'{self.server.id}{series_title}{sxe}')
|
||||||
influx_payload.append(
|
influx_payload.append(
|
||||||
{
|
{
|
||||||
"measurement": "Sonarr",
|
"measurement": "Sonarr",
|
||||||
|
@ -143,7 +143,7 @@ class SonarrAPI(object):
|
||||||
return
|
return
|
||||||
|
|
||||||
for show in download_queue:
|
for show in download_queue:
|
||||||
sxe = 'S{:0>2}E{:0>2}'.format(show.episode['seasonNumber'], show.episode['episodeNumber'])
|
sxe = f"S{show.episode['seasonNumber']:0>2}E{show.episode['episodeNumber']:0>2}"
|
||||||
if show.protocol.upper() == 'USENET':
|
if show.protocol.upper() == 'USENET':
|
||||||
protocol_id = 1
|
protocol_id = 1
|
||||||
else:
|
else:
|
||||||
|
@ -153,7 +153,7 @@ class SonarrAPI(object):
|
||||||
protocol_id, sxe, show.id))
|
protocol_id, sxe, show.id))
|
||||||
|
|
||||||
for series_title, episode_title, protocol, protocol_id, sxe, sonarr_id in queue:
|
for series_title, episode_title, protocol, protocol_id, sxe, sonarr_id in queue:
|
||||||
hash_id = hashit('{}{}{}'.format(self.server.id, series_title, sxe))
|
hash_id = hashit(f'{self.server.id}{series_title}{sxe}')
|
||||||
influx_payload.append(
|
influx_payload.append(
|
||||||
{
|
{
|
||||||
"measurement": "Sonarr",
|
"measurement": "Sonarr",
|
||||||
|
|
|
@ -1,4 +1,13 @@
|
||||||
|
from sys import version_info
|
||||||
from typing import NamedTuple
|
from typing import NamedTuple
|
||||||
|
from logging import getLogger
|
||||||
|
|
||||||
|
logger = getLogger('temp')
|
||||||
|
# Check for python3.6 or newer to resolve erroneous typing.NamedTuple issues
|
||||||
|
if version_info < (3, 6):
|
||||||
|
logger.error('Varken requires python3.6 or newer. You are on python%s.%s - Exiting...',
|
||||||
|
version_info.major, version_info.minor)
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
|
||||||
class Queue(NamedTuple):
|
class Queue(NamedTuple):
|
||||||
|
@ -89,6 +98,7 @@ class CiscoASAFirewall(NamedTuple):
|
||||||
verify_ssl: bool = False
|
verify_ssl: bool = False
|
||||||
get_bandwidth_run_seconds: int = 30
|
get_bandwidth_run_seconds: int = 30
|
||||||
|
|
||||||
|
|
||||||
class OmbiRequestCounts(NamedTuple):
|
class OmbiRequestCounts(NamedTuple):
|
||||||
pending: int = 0
|
pending: int = 0
|
||||||
approved: int = 0
|
approved: int = 0
|
||||||
|
@ -348,6 +358,7 @@ class Movie(NamedTuple):
|
||||||
website: str = None
|
website: str = None
|
||||||
id: int = None
|
id: int = None
|
||||||
|
|
||||||
|
|
||||||
class OmbiMovieRequest(NamedTuple):
|
class OmbiMovieRequest(NamedTuple):
|
||||||
theMovieDbId: int = None
|
theMovieDbId: int = None
|
||||||
issueId: None = None
|
issueId: None = None
|
||||||
|
@ -380,6 +391,7 @@ class OmbiMovieRequest(NamedTuple):
|
||||||
canApprove: bool = None
|
canApprove: bool = None
|
||||||
id: int = None
|
id: int = None
|
||||||
|
|
||||||
|
|
||||||
class OmbiTVRequest(NamedTuple):
|
class OmbiTVRequest(NamedTuple):
|
||||||
tvDbId: int = None
|
tvDbId: int = None
|
||||||
imdbId: str = None
|
imdbId: str = None
|
||||||
|
@ -395,6 +407,7 @@ class OmbiTVRequest(NamedTuple):
|
||||||
childRequests: list = None
|
childRequests: list = None
|
||||||
id: int = None
|
id: int = None
|
||||||
|
|
||||||
|
|
||||||
class SickChillTVShow(NamedTuple):
|
class SickChillTVShow(NamedTuple):
|
||||||
airdate: str = None
|
airdate: str = None
|
||||||
airs: str = None
|
airs: str = None
|
||||||
|
|
|
@ -1,12 +1,10 @@
|
||||||
import os
|
from logging import getLogger
|
||||||
import logging
|
|
||||||
|
|
||||||
from requests import Session, Request
|
from requests import Session, Request
|
||||||
from datetime import datetime, timezone
|
from datetime import datetime, timezone
|
||||||
from geoip2.errors import AddressNotFoundError
|
from geoip2.errors import AddressNotFoundError
|
||||||
|
|
||||||
from varken.helpers import geo_lookup, hashit, connection_handler
|
|
||||||
from varken.structures import TautulliStream
|
from varken.structures import TautulliStream
|
||||||
|
from varken.helpers import geo_lookup, hashit, connection_handler
|
||||||
|
|
||||||
|
|
||||||
class TautulliAPI(object):
|
class TautulliAPI(object):
|
||||||
|
@ -16,11 +14,11 @@ class TautulliAPI(object):
|
||||||
self.session = Session()
|
self.session = Session()
|
||||||
self.session.params = {'apikey': self.server.api_key, 'cmd': 'get_activity'}
|
self.session.params = {'apikey': self.server.api_key, 'cmd': 'get_activity'}
|
||||||
self.endpoint = '/api/v2'
|
self.endpoint = '/api/v2'
|
||||||
self.logger = logging.getLogger()
|
self.logger = getLogger()
|
||||||
self.data_folder = data_folder
|
self.data_folder = data_folder
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "<tautulli-{}>".format(self.server.id)
|
return f"<tautulli-{self.server.id}>"
|
||||||
|
|
||||||
def get_activity(self):
|
def get_activity(self):
|
||||||
now = datetime.now(timezone.utc).astimezone().isoformat()
|
now = datetime.now(timezone.utc).astimezone().isoformat()
|
||||||
|
@ -41,12 +39,12 @@ class TautulliAPI(object):
|
||||||
return
|
return
|
||||||
|
|
||||||
for session in sessions:
|
for session in sessions:
|
||||||
# Check to see if ip_address_public atribute exists as it was introduced in v2
|
# Check to see if ip_address_public attribute exists as it was introduced in v2
|
||||||
try:
|
try:
|
||||||
getattr(session, 'ip_address_public')
|
getattr(session, 'ip_address_public')
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
self.logger.error('Public IP attribute missing!!! Do you have an old version of Tautulli (v1)?')
|
self.logger.error('Public IP attribute missing!!! Do you have an old version of Tautulli (v1)?')
|
||||||
os._exit(1)
|
exit(1)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
geodata = geo_lookup(session.ip_address_public, self.data_folder)
|
geodata = geo_lookup(session.ip_address_public, self.data_folder)
|
||||||
|
@ -94,8 +92,7 @@ class TautulliAPI(object):
|
||||||
if session.platform == 'Roku':
|
if session.platform == 'Roku':
|
||||||
product_version = session.product_version.split('-')[0]
|
product_version = session.product_version.split('-')[0]
|
||||||
|
|
||||||
hash_id = hashit('{}{}{}{}'.format(session.session_id, session.session_key, session.username,
|
hash_id = hashit(f'{session.session_id}{session.session_key}{session.username}{session.full_title}')
|
||||||
session.full_title))
|
|
||||||
influx_payload.append(
|
influx_payload.append(
|
||||||
{
|
{
|
||||||
"measurement": "Tautulli",
|
"measurement": "Tautulli",
|
||||||
|
@ -118,8 +115,7 @@ class TautulliAPI(object):
|
||||||
"progress_percent": session.progress_percent,
|
"progress_percent": session.progress_percent,
|
||||||
"region_code": geodata.subdivisions.most_specific.iso_code,
|
"region_code": geodata.subdivisions.most_specific.iso_code,
|
||||||
"location": geodata.city.name,
|
"location": geodata.city.name,
|
||||||
"full_location": '{} - {}'.format(geodata.subdivisions.most_specific.name,
|
"full_location": f'{geodata.subdivisions.most_specific.name} - {geodata.city.name}',
|
||||||
geodata.city.name),
|
|
||||||
"latitude": latitude,
|
"latitude": latitude,
|
||||||
"longitude": longitude,
|
"longitude": longitude,
|
||||||
"player_state": player_state,
|
"player_state": player_state,
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
import logging
|
|
||||||
|
|
||||||
from logging.handlers import RotatingFileHandler
|
from logging.handlers import RotatingFileHandler
|
||||||
|
from logging import Filter, DEBUG, INFO, getLogger, Formatter, StreamHandler
|
||||||
|
|
||||||
from varken.helpers import mkdir_p
|
from varken.helpers import mkdir_p
|
||||||
|
|
||||||
|
|
||||||
class BlacklistFilter(logging.Filter):
|
class BlacklistFilter(Filter):
|
||||||
"""
|
"""
|
||||||
Log filter for blacklisted tokens and passwords
|
Log filter for blacklisted tokens and passwords
|
||||||
"""
|
"""
|
||||||
|
@ -13,7 +13,7 @@ class BlacklistFilter(logging.Filter):
|
||||||
max_files = 5
|
max_files = 5
|
||||||
log_folder = 'logs'
|
log_folder = 'logs'
|
||||||
|
|
||||||
blacklisted_strings = ['apikey', 'username', 'password']
|
blacklisted_strings = ['apikey', 'username', 'password', 'url']
|
||||||
|
|
||||||
def __init__(self, filteredstrings):
|
def __init__(self, filteredstrings):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
|
@ -23,9 +23,9 @@ class BlacklistFilter(logging.Filter):
|
||||||
for item in self.filtered_strings:
|
for item in self.filtered_strings:
|
||||||
try:
|
try:
|
||||||
if item in record.msg:
|
if item in record.msg:
|
||||||
record.msg = record.msg.replace(item, 8 * '*' + item[-2:])
|
record.msg = record.msg.replace(item, 8 * '*' + item[-5:])
|
||||||
if any(item in str(arg) for arg in record.args):
|
if any(item in str(arg) for arg in record.args):
|
||||||
record.args = tuple(arg.replace(item, 8 * '*' + item[-2:]) if isinstance(arg, str) else arg
|
record.args = tuple(arg.replace(item, 8 * '*' + item[-5:]) if isinstance(arg, str) else arg
|
||||||
for arg in record.args)
|
for arg in record.args)
|
||||||
except TypeError:
|
except TypeError:
|
||||||
pass
|
pass
|
||||||
|
@ -39,27 +39,25 @@ class VarkenLogger(object):
|
||||||
|
|
||||||
# Set log level
|
# Set log level
|
||||||
if self.log_level:
|
if self.log_level:
|
||||||
self.log_level = logging.DEBUG
|
self.log_level = DEBUG
|
||||||
|
|
||||||
else:
|
else:
|
||||||
self.log_level = logging.INFO
|
self.log_level = INFO
|
||||||
|
|
||||||
# Make the log directory if it does not exist
|
# Make the log directory if it does not exist
|
||||||
mkdir_p('{}/{}'.format(self.data_folder, BlacklistFilter.log_folder))
|
mkdir_p('{}/{}'.format(self.data_folder, BlacklistFilter.log_folder))
|
||||||
|
|
||||||
# Create the Logger
|
# Create the Logger
|
||||||
self.logger = logging.getLogger()
|
self.logger = getLogger()
|
||||||
self.logger.setLevel(logging.DEBUG)
|
self.logger.setLevel(DEBUG)
|
||||||
|
|
||||||
# Create a Formatter for formatting the log messages
|
# Create a Formatter for formatting the log messages
|
||||||
logger_formatter = logging.Formatter('%(asctime)s : %(levelname)s : %(module)s : %(message)s',
|
logger_formatter = Formatter('%(asctime)s : %(levelname)s : %(module)s : %(message)s', '%Y-%m-%d %H:%M:%S')
|
||||||
'%Y-%m-%d %H:%M:%S')
|
|
||||||
|
|
||||||
# Create the Handler for logging data to a file
|
# Create the Handler for logging data to a file
|
||||||
file_logger = RotatingFileHandler('{}/{}/{}'.format(self.data_folder, BlacklistFilter.log_folder,
|
file_logger = RotatingFileHandler(f'{self.data_folder}/{BlacklistFilter.log_folder}/{BlacklistFilter.filename}',
|
||||||
BlacklistFilter.filename), mode='a',
|
mode='a', maxBytes=BlacklistFilter.max_size, encoding=None, delay=0,
|
||||||
maxBytes=BlacklistFilter.max_size, backupCount=BlacklistFilter.max_files,
|
backupCount=BlacklistFilter.max_files)
|
||||||
encoding=None, delay=0)
|
|
||||||
|
|
||||||
file_logger.setLevel(self.log_level)
|
file_logger.setLevel(self.log_level)
|
||||||
|
|
||||||
|
@ -67,7 +65,7 @@ class VarkenLogger(object):
|
||||||
file_logger.setFormatter(logger_formatter)
|
file_logger.setFormatter(logger_formatter)
|
||||||
|
|
||||||
# Add the console logger
|
# Add the console logger
|
||||||
console_logger = logging.StreamHandler()
|
console_logger = StreamHandler()
|
||||||
console_logger.setFormatter(logger_formatter)
|
console_logger.setFormatter(logger_formatter)
|
||||||
console_logger.setLevel(self.log_level)
|
console_logger.setLevel(self.log_level)
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue