major pep8 cleanup.
funneled scope of imports where possible fixed #59 Added fstrings where possible added logging both temporarily and in secondary places
This commit is contained in:
parent
47f2f61082
commit
61fcf3b80c
13 changed files with 131 additions and 121 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -10,6 +10,5 @@ GeoLite2-City.mmdb
|
|||
GeoLite2-City.tar.gz
|
||||
data/varken.ini
|
||||
.idea/
|
||||
Legacy/configuration.py
|
||||
varken-venv/
|
||||
logs/
|
||||
|
|
63
Varken.py
63
Varken.py
|
@ -1,36 +1,30 @@
|
|||
import sys
|
||||
|
||||
# Check for python3.6 or newer to resolve erroneous typing.NamedTuple issues
|
||||
if sys.version_info < (3, 6):
|
||||
exit('Varken requires python3.6 or newer')
|
||||
|
||||
import schedule
|
||||
import threading
|
||||
import platform
|
||||
import distro
|
||||
import os
|
||||
import schedule
|
||||
|
||||
from sys import exit
|
||||
from time import sleep
|
||||
from os import access, R_OK
|
||||
from sys import version
|
||||
from threading import Thread
|
||||
from os import access, R_OK, getenv
|
||||
from distro import linux_distribution
|
||||
from os.path import isdir, abspath, dirname, join
|
||||
from argparse import ArgumentParser, RawTextHelpFormatter
|
||||
from logging import getLogger, StreamHandler, Formatter, DEBUG
|
||||
|
||||
from varken.iniparser import INIParser
|
||||
from varken.sonarr import SonarrAPI
|
||||
from varken.tautulli import TautulliAPI
|
||||
from varken.radarr import RadarrAPI
|
||||
from varken.ombi import OmbiAPI
|
||||
from varken.sickchill import SickChillAPI
|
||||
from varken.cisco import CiscoAPI
|
||||
from varken.sonarr import SonarrAPI
|
||||
from varken.radarr import RadarrAPI
|
||||
from varken.iniparser import INIParser
|
||||
from varken.dbmanager import DBManager
|
||||
from varken.tautulli import TautulliAPI
|
||||
from varken.sickchill import SickChillAPI
|
||||
from varken.varkenlogger import VarkenLogger
|
||||
|
||||
PLATFORM_LINUX_DISTRO = ' '.join(x for x in distro.linux_distribution() if x)
|
||||
PLATFORM_LINUX_DISTRO = ' '.join(x for x in linux_distribution() if x)
|
||||
|
||||
|
||||
def threaded(job):
|
||||
thread = threading.Thread(target=job)
|
||||
thread = Thread(target=job)
|
||||
thread.start()
|
||||
|
||||
|
||||
|
@ -46,23 +40,32 @@ if __name__ == "__main__":
|
|||
|
||||
DATA_FOLDER = abspath(join(dirname(__file__), 'data'))
|
||||
|
||||
templogger = getLogger('temp')
|
||||
templogger.setLevel(DEBUG)
|
||||
tempch = StreamHandler()
|
||||
tempformatter = Formatter('%(asctime)s : %(levelname)s : %(module)s : %(message)s', '%Y-%m-%d %H:%M:%S')
|
||||
tempch.setFormatter(tempformatter)
|
||||
templogger.addHandler(tempch)
|
||||
|
||||
if opts.data_folder:
|
||||
ARG_FOLDER = opts.data_folder
|
||||
|
||||
if isdir(ARG_FOLDER):
|
||||
DATA_FOLDER = ARG_FOLDER
|
||||
if not access(ARG_FOLDER, R_OK):
|
||||
exit("Read permission error for {}".format(ARG_FOLDER))
|
||||
if not access(DATA_FOLDER, R_OK):
|
||||
templogger.error("Read permission error for %s", DATA_FOLDER)
|
||||
exit(1)
|
||||
else:
|
||||
exit("{} does not exist".format(ARG_FOLDER))
|
||||
templogger.error("%s does not exist", ARG_FOLDER)
|
||||
exit(1)
|
||||
|
||||
# Set Debug to True if DEBUG env is set
|
||||
enable_opts = ['True', 'true', 'yes']
|
||||
debug_opts = ['debug', 'Debug', 'DEBUG']
|
||||
|
||||
if not opts.debug:
|
||||
opts.debug = True if any([os.getenv(string, False) for true in enable_opts
|
||||
for string in debug_opts if os.getenv(string, False) == true]) else False
|
||||
opts.debug = True if any([getenv(string, False) for true in enable_opts
|
||||
for string in debug_opts if getenv(string, False) == true]) else False
|
||||
|
||||
# Initiate the logger
|
||||
vl = VarkenLogger(data_folder=DATA_FOLDER, debug=opts.debug)
|
||||
|
@ -70,11 +73,10 @@ if __name__ == "__main__":
|
|||
|
||||
vl.logger.info('Data folder is "%s"', DATA_FOLDER)
|
||||
|
||||
vl.logger.info(u"{} {} ({}{})".format(
|
||||
platform.system(), platform.release(), platform.version(),
|
||||
' - {}'.format(PLATFORM_LINUX_DISTRO) if PLATFORM_LINUX_DISTRO else ''
|
||||
))
|
||||
vl.logger.info(u"Python {}".format(sys.version))
|
||||
vl.logger.info(u"%s %s (%s%s)", platform.system(), platform.release(), platform.version(),
|
||||
f' - {PLATFORM_LINUX_DISTRO}' if PLATFORM_LINUX_DISTRO else '')
|
||||
|
||||
vl.logger.info(u"Python %s", version)
|
||||
|
||||
CONFIG = INIParser(DATA_FOLDER)
|
||||
DBMANAGER = DBManager(CONFIG.influx_server)
|
||||
|
@ -126,7 +128,8 @@ if __name__ == "__main__":
|
|||
SERVICES_ENABLED = [CONFIG.ombi_enabled, CONFIG.radarr_enabled, CONFIG.tautulli_enabled,
|
||||
CONFIG.sonarr_enabled, CONFIG.ciscoasa_enabled, CONFIG.sickchill_enabled]
|
||||
if not [enabled for enabled in SERVICES_ENABLED if enabled]:
|
||||
exit("All services disabled. Exiting")
|
||||
vl.logger.error("All services disabled. Exiting")
|
||||
exit(1)
|
||||
schedule.run_all()
|
||||
|
||||
while True:
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import logging
|
||||
from logging import getLogger
|
||||
from requests import Session, Request
|
||||
from datetime import datetime, timezone
|
||||
|
||||
|
@ -13,12 +13,12 @@ class CiscoAPI(object):
|
|||
# Create session to reduce server web thread load, and globally define pageSize for all requests
|
||||
self.session = Session()
|
||||
self.session.auth = (self.firewall.username, self.firewall.password)
|
||||
self.logger = logging.getLogger()
|
||||
self.logger = getLogger()
|
||||
|
||||
self.get_token()
|
||||
|
||||
def __repr__(self):
|
||||
return "<ciscoasa-{}>".format(self.firewall.id)
|
||||
return f"<ciscoasa-{self.firewall.id}>"
|
||||
|
||||
def get_token(self):
|
||||
endpoint = '/api/tokenservices'
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
import logging
|
||||
|
||||
from logging import getLogger
|
||||
from influxdb import InfluxDBClient
|
||||
|
||||
logger = logging.getLogger('varken')
|
||||
|
||||
class DBManager(object):
|
||||
def __init__(self, server):
|
||||
|
@ -10,12 +8,16 @@ class DBManager(object):
|
|||
self.influx = InfluxDBClient(self.server.url, self.server.port, self.server.username, self.server.password,
|
||||
'varken')
|
||||
databases = [db['name'] for db in self.influx.get_list_database()]
|
||||
self.logger = getLogger()
|
||||
|
||||
if 'varken' not in databases:
|
||||
self.logger.info("Creating varken database")
|
||||
self.influx.create_database('varken')
|
||||
|
||||
self.logger.info("Creating varken retention policy (30d/1h)")
|
||||
self.influx.create_retention_policy('varken 30d/1h', '30d', '1', 'varken', False, '1h')
|
||||
|
||||
def write_points(self, data):
|
||||
d = data
|
||||
logger.debug('Writing Data to InfluxDB %s', d)
|
||||
self.logger.debug('Writing Data to InfluxDB %s', d)
|
||||
self.influx.write_points(d)
|
||||
|
|
|
@ -106,12 +106,12 @@ def connection_handler(session, request, verify):
|
|||
|
||||
|
||||
def mkdir_p(path):
|
||||
"""http://stackoverflow.com/a/600612/190597 (tzot)"""
|
||||
templogger = logging.getLogger('temp')
|
||||
try:
|
||||
logger.info('Creating folder %s ', path)
|
||||
templogger.info('Creating folder %s ', path)
|
||||
os.makedirs(path, exist_ok=True)
|
||||
except Exception as e:
|
||||
logger.error('Could not create folder %s : %s ', path, e)
|
||||
templogger.error('Could not create folder %s : %s ', path, e)
|
||||
|
||||
|
||||
def clean_sid_check(server_id_list, server_type=None):
|
||||
|
@ -123,7 +123,7 @@ def clean_sid_check(server_id_list, server_type=None):
|
|||
try:
|
||||
valid_sids.append(int(sid))
|
||||
except ValueError:
|
||||
logger.error("{} is not a valid server id number".format(sid))
|
||||
logger.error("%s is not a valid server id number", sid)
|
||||
if valid_sids:
|
||||
logger.info('%s : %s', t.upper(), valid_sids)
|
||||
return valid_sids
|
||||
|
|
|
@ -1,22 +1,20 @@
|
|||
import configparser
|
||||
import logging
|
||||
import re
|
||||
|
||||
from sys import exit
|
||||
from logging import getLogger
|
||||
from os.path import join, exists
|
||||
from re import match, compile, IGNORECASE
|
||||
from configparser import ConfigParser, NoOptionError
|
||||
|
||||
from varken.helpers import clean_sid_check
|
||||
from varken.structures import SickChillServer
|
||||
from varken.varkenlogger import BlacklistFilter
|
||||
from varken.structures import SonarrServer, RadarrServer, OmbiServer, TautulliServer, InfluxServer, CiscoASAFirewall
|
||||
from varken.structures import SickChillServer
|
||||
|
||||
|
||||
class INIParser(object):
|
||||
def __init__(self, data_folder):
|
||||
self.config = configparser.ConfigParser(interpolation=None)
|
||||
self.config = ConfigParser(interpolation=None)
|
||||
self.data_folder = data_folder
|
||||
|
||||
self.logger = logging.getLogger()
|
||||
self.logger = getLogger()
|
||||
|
||||
self.influx_server = InfluxServer()
|
||||
|
||||
|
@ -59,7 +57,7 @@ class INIParser(object):
|
|||
else:
|
||||
sids = clean_sid_check(global_server_ids, t)
|
||||
return sids
|
||||
except configparser.NoOptionError as e:
|
||||
except NoOptionError as e:
|
||||
self.logger.error(e)
|
||||
|
||||
def read_file(self):
|
||||
|
@ -86,11 +84,11 @@ class INIParser(object):
|
|||
else:
|
||||
search = (search + r'(?:/?|[/?]\S+)$')
|
||||
|
||||
regex = re.compile('{}'.format(search), re.IGNORECASE)
|
||||
regex = compile('{}'.format(search), IGNORECASE)
|
||||
|
||||
print(re.match(regex, url_check))
|
||||
print(match(regex, url_check))
|
||||
|
||||
valid = re.match(regex, url_check) is not None
|
||||
valid = match(regex, url_check) is not None
|
||||
if not valid:
|
||||
if inc_port:
|
||||
self.logger.error('%s is invalid! URL must host/IP and port if not 80 or 443. ie. localhost:8080',
|
||||
|
@ -151,7 +149,7 @@ class INIParser(object):
|
|||
queue, queue_run_seconds)
|
||||
|
||||
self.sonarr_servers.append(server)
|
||||
except configparser.NoOptionError as e:
|
||||
except NoOptionError as e:
|
||||
self.sonarr_enabled = False
|
||||
self.logger.error(
|
||||
'%s disabled. Error: %s', section, e)
|
||||
|
@ -185,7 +183,7 @@ class INIParser(object):
|
|||
server = RadarrServer(server_id, scheme + url, apikey, verify_ssl, queue, queue_run_seconds,
|
||||
get_missing, get_missing_run_seconds)
|
||||
self.radarr_servers.append(server)
|
||||
except configparser.NoOptionError as e:
|
||||
except NoOptionError as e:
|
||||
self.radarr_enabled = False
|
||||
self.logger.error(
|
||||
'%s disabled. Error: %s', section, e)
|
||||
|
@ -217,7 +215,7 @@ class INIParser(object):
|
|||
server = TautulliServer(server_id, scheme + url, fallback_ip, apikey, verify_ssl, get_activity,
|
||||
get_activity_run_seconds)
|
||||
self.tautulli_servers.append(server)
|
||||
except configparser.NoOptionError as e:
|
||||
except NoOptionError as e:
|
||||
self.tautulli_enabled = False
|
||||
self.logger.error(
|
||||
'%s disabled. Error: %s', section, e)
|
||||
|
@ -251,7 +249,7 @@ class INIParser(object):
|
|||
server = OmbiServer(server_id, scheme + url, apikey, verify_ssl, request_type_counts,
|
||||
request_type_run_seconds, request_total_counts, request_total_run_seconds)
|
||||
self.ombi_servers.append(server)
|
||||
except configparser.NoOptionError as e:
|
||||
except NoOptionError as e:
|
||||
self.ombi_enabled = False
|
||||
self.logger.error(
|
||||
'%s disabled. Error: %s', section, e)
|
||||
|
@ -281,7 +279,7 @@ class INIParser(object):
|
|||
server = SickChillServer(server_id, scheme + url, apikey, verify_ssl,
|
||||
get_missing, get_missing_run_seconds)
|
||||
self.sickchill_servers.append(server)
|
||||
except configparser.NoOptionError as e:
|
||||
except NoOptionError as e:
|
||||
self.sickchill_enabled = False
|
||||
self.logger.error(
|
||||
'%s disabled. Error: %s', section, e)
|
||||
|
@ -313,7 +311,7 @@ class INIParser(object):
|
|||
firewall = CiscoASAFirewall(firewall_id, scheme + url, username, password, outside_interface,
|
||||
verify_ssl, get_bandwidth_run_seconds)
|
||||
self.ciscoasa_firewalls.append(firewall)
|
||||
except configparser.NoOptionError as e:
|
||||
except NoOptionError as e:
|
||||
self.ciscoasa_enabled = False
|
||||
self.logger.error(
|
||||
'%s disabled. Error: %s', section, e)
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import logging
|
||||
from logging import getLogger
|
||||
from requests import Session, Request
|
||||
from datetime import datetime, timezone
|
||||
|
||||
|
@ -13,10 +13,10 @@ class OmbiAPI(object):
|
|||
# Create session to reduce server web thread load, and globally define pageSize for all requests
|
||||
self.session = Session()
|
||||
self.session.headers = {'Apikey': self.server.api_key}
|
||||
self.logger = logging.getLogger()
|
||||
self.logger = getLogger()
|
||||
|
||||
def __repr__(self):
|
||||
return "<ombi-{}>".format(self.server.id)
|
||||
return f"<ombi-{self.server.id}>"
|
||||
|
||||
def get_all_requests(self):
|
||||
now = datetime.now(timezone.utc).astimezone().isoformat()
|
||||
|
@ -94,14 +94,16 @@ class OmbiAPI(object):
|
|||
|
||||
for show in tv_show_requests:
|
||||
hash_id = hashit(f'{show.id}{show.tvDbId}{show.title}')
|
||||
status = None
|
||||
# Denied = 0, Approved = 1, Completed = 2
|
||||
|
||||
# Denied = 0, Approved = 1, Completed = 2, Pending = 3
|
||||
if show.childRequests[0]['denied']:
|
||||
status = 0
|
||||
elif show.childRequests[0]['approved'] and show.childRequests[0]['available']:
|
||||
status = 2
|
||||
elif show.childRequests[0]['approved']:
|
||||
status = 1
|
||||
else:
|
||||
status = 3
|
||||
|
||||
influx_payload.append(
|
||||
{
|
||||
|
@ -122,7 +124,6 @@ class OmbiAPI(object):
|
|||
}
|
||||
)
|
||||
|
||||
|
||||
self.dbmanager.write_points(influx_payload)
|
||||
|
||||
def get_request_counts(self):
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
import logging
|
||||
from logging import getLogger
|
||||
from requests import Session, Request
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from varken.helpers import hashit, connection_handler
|
||||
from varken.structures import Movie, Queue
|
||||
from varken.helpers import hashit, connection_handler
|
||||
|
||||
|
||||
class RadarrAPI(object):
|
||||
|
@ -13,10 +13,10 @@ class RadarrAPI(object):
|
|||
# Create session to reduce server web thread load, and globally define pageSize for all requests
|
||||
self.session = Session()
|
||||
self.session.headers = {'X-Api-Key': self.server.api_key}
|
||||
self.logger = logging.getLogger()
|
||||
self.logger = getLogger()
|
||||
|
||||
def __repr__(self):
|
||||
return "<radarr-{}>".format(self.server.id)
|
||||
return f"<radarr-{self.server.id}>"
|
||||
|
||||
def get_missing(self):
|
||||
endpoint = '/api/movie'
|
||||
|
@ -43,11 +43,11 @@ class RadarrAPI(object):
|
|||
else:
|
||||
ma = 1
|
||||
|
||||
movie_name = '{} ({})'.format(movie.title, movie.year)
|
||||
movie_name = f'{movie.title} ({movie.year})'
|
||||
missing.append((movie_name, ma, movie.tmdbId, movie.titleSlug))
|
||||
|
||||
for title, ma, mid, title_slug in missing:
|
||||
hash_id = hashit('{}{}{}'.format(self.server.id, title, mid))
|
||||
hash_id = hashit(f'{self.server.id}{title}{mid}')
|
||||
influx_payload.append(
|
||||
{
|
||||
"measurement": "Radarr",
|
||||
|
@ -96,7 +96,7 @@ class RadarrAPI(object):
|
|||
for queue_item in download_queue:
|
||||
movie = queue_item.movie
|
||||
|
||||
name = '{} ({})'.format(movie.title, movie.year)
|
||||
name = f'{movie.title} ({movie.year})'
|
||||
|
||||
if queue_item.protocol.upper() == 'USENET':
|
||||
protocol_id = 1
|
||||
|
@ -107,7 +107,7 @@ class RadarrAPI(object):
|
|||
protocol_id, queue_item.id, movie.titleSlug))
|
||||
|
||||
for name, quality, protocol, protocol_id, qid, title_slug in queue:
|
||||
hash_id = hashit('{}{}{}'.format(self.server.id, name, quality))
|
||||
hash_id = hashit(f'{self.server.id}{name}{quality}')
|
||||
influx_payload.append(
|
||||
{
|
||||
"measurement": "Radarr",
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
import logging
|
||||
from logging import getLogger
|
||||
from requests import Session, Request
|
||||
from datetime import datetime, timezone, date, timedelta
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from varken.helpers import hashit, connection_handler
|
||||
from varken.structures import SickChillTVShow
|
||||
from varken.helpers import hashit, connection_handler
|
||||
|
||||
|
||||
class SickChillAPI(object):
|
||||
|
@ -14,10 +14,10 @@ class SickChillAPI(object):
|
|||
self.session = Session()
|
||||
self.session.params = {'limit': 1000}
|
||||
self.endpoint = f"/api/{self.server.api_key}"
|
||||
self.logger = logging.getLogger()
|
||||
self.logger = getLogger()
|
||||
|
||||
def __repr__(self):
|
||||
return "<sickchill-{}>".format(self.server.id)
|
||||
return f"<sickchill-{self.server.id}>"
|
||||
|
||||
def get_missing(self):
|
||||
now = datetime.now(timezone.utc).astimezone().isoformat()
|
||||
|
@ -39,8 +39,8 @@ class SickChillAPI(object):
|
|||
|
||||
for key, section in get['data'].items():
|
||||
for show in section:
|
||||
sxe = 'S{:0>2}E{:0>2}'.format(show.season, show.episode)
|
||||
hash_id = hashit('{}{}{}'.format(self.server.id, show.show_name, sxe))
|
||||
sxe = f'S{show.season:0>2}E{show.episode:0>2}'
|
||||
hash_id = hashit(f'{self.server.id}{show.show_name}{sxe}')
|
||||
missing_types = [(0, 'future'), (1, 'later'), (2, 'soon'), (3, 'today'), (4, 'missed')]
|
||||
influx_payload.append(
|
||||
{
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
import logging
|
||||
from logging import getLogger
|
||||
from requests import Session, Request
|
||||
from datetime import datetime, timezone, date, timedelta
|
||||
|
||||
from varken.helpers import hashit, connection_handler
|
||||
from varken.structures import Queue, TVShow
|
||||
from varken.helpers import hashit, connection_handler
|
||||
|
||||
|
||||
class SonarrAPI(object):
|
||||
|
@ -14,10 +14,10 @@ class SonarrAPI(object):
|
|||
self.session = Session()
|
||||
self.session.headers = {'X-Api-Key': self.server.api_key}
|
||||
self.session.params = {'pageSize': 1000}
|
||||
self.logger = logging.getLogger()
|
||||
self.logger = getLogger()
|
||||
|
||||
def __repr__(self):
|
||||
return "<sonarr-{}>".format(self.server.id)
|
||||
return f"<sonarr-{self.server.id}>"
|
||||
|
||||
def get_missing(self):
|
||||
endpoint = '/api/calendar'
|
||||
|
@ -44,11 +44,11 @@ class SonarrAPI(object):
|
|||
# Add show to missing list if file does not exist
|
||||
for show in tv_shows:
|
||||
if not show.hasFile:
|
||||
sxe = 'S{:0>2}E{:0>2}'.format(show.seasonNumber, show.episodeNumber)
|
||||
sxe = f'S{show.seasonNumber:0>2}E{show.episodeNumber:0>2}'
|
||||
missing.append((show.series['title'], sxe, show.airDateUtc, show.title, show.id))
|
||||
|
||||
for series_title, sxe, air_date_utc, episode_title, sonarr_id in missing:
|
||||
hash_id = hashit('{}{}{}'.format(self.server.id, series_title, sxe))
|
||||
hash_id = hashit(f'{self.server.id}{series_title}{sxe}')
|
||||
influx_payload.append(
|
||||
{
|
||||
"measurement": "Sonarr",
|
||||
|
@ -93,7 +93,7 @@ class SonarrAPI(object):
|
|||
return
|
||||
|
||||
for show in tv_shows:
|
||||
sxe = 'S{:0>2}E{:0>2}'.format(show.seasonNumber, show.episodeNumber)
|
||||
sxe = f'S{show.seasonNumber:0>2}E{show.episodeNumber:0>2}'
|
||||
if show.hasFile:
|
||||
downloaded = 1
|
||||
else:
|
||||
|
@ -101,7 +101,7 @@ class SonarrAPI(object):
|
|||
air_days.append((show.series['title'], downloaded, sxe, show.title, show.airDateUtc, show.id))
|
||||
|
||||
for series_title, dl_status, sxe, episode_title, air_date_utc, sonarr_id in air_days:
|
||||
hash_id = hashit('{}{}{}'.format(self.server.id, series_title, sxe))
|
||||
hash_id = hashit(f'{self.server.id}{series_title}{sxe}')
|
||||
influx_payload.append(
|
||||
{
|
||||
"measurement": "Sonarr",
|
||||
|
@ -143,7 +143,7 @@ class SonarrAPI(object):
|
|||
return
|
||||
|
||||
for show in download_queue:
|
||||
sxe = 'S{:0>2}E{:0>2}'.format(show.episode['seasonNumber'], show.episode['episodeNumber'])
|
||||
sxe = f"S{show.episode['seasonNumber']:0>2}E{show.episode['episodeNumber']:0>2}"
|
||||
if show.protocol.upper() == 'USENET':
|
||||
protocol_id = 1
|
||||
else:
|
||||
|
@ -153,7 +153,7 @@ class SonarrAPI(object):
|
|||
protocol_id, sxe, show.id))
|
||||
|
||||
for series_title, episode_title, protocol, protocol_id, sxe, sonarr_id in queue:
|
||||
hash_id = hashit('{}{}{}'.format(self.server.id, series_title, sxe))
|
||||
hash_id = hashit(f'{self.server.id}{series_title}{sxe}')
|
||||
influx_payload.append(
|
||||
{
|
||||
"measurement": "Sonarr",
|
||||
|
|
|
@ -1,4 +1,13 @@
|
|||
from sys import version_info
|
||||
from typing import NamedTuple
|
||||
from logging import getLogger
|
||||
|
||||
logger = getLogger('temp')
|
||||
# Check for python3.6 or newer to resolve erroneous typing.NamedTuple issues
|
||||
if version_info < (3, 6):
|
||||
logger.error('Varken requires python3.6 or newer. You are on python%s.%s - Exiting...',
|
||||
version_info.major, version_info.minor)
|
||||
exit(1)
|
||||
|
||||
|
||||
class Queue(NamedTuple):
|
||||
|
@ -89,6 +98,7 @@ class CiscoASAFirewall(NamedTuple):
|
|||
verify_ssl: bool = False
|
||||
get_bandwidth_run_seconds: int = 30
|
||||
|
||||
|
||||
class OmbiRequestCounts(NamedTuple):
|
||||
pending: int = 0
|
||||
approved: int = 0
|
||||
|
@ -348,6 +358,7 @@ class Movie(NamedTuple):
|
|||
website: str = None
|
||||
id: int = None
|
||||
|
||||
|
||||
class OmbiMovieRequest(NamedTuple):
|
||||
theMovieDbId: int = None
|
||||
issueId: None = None
|
||||
|
@ -380,6 +391,7 @@ class OmbiMovieRequest(NamedTuple):
|
|||
canApprove: bool = None
|
||||
id: int = None
|
||||
|
||||
|
||||
class OmbiTVRequest(NamedTuple):
|
||||
tvDbId: int = None
|
||||
imdbId: str = None
|
||||
|
@ -395,6 +407,7 @@ class OmbiTVRequest(NamedTuple):
|
|||
childRequests: list = None
|
||||
id: int = None
|
||||
|
||||
|
||||
class SickChillTVShow(NamedTuple):
|
||||
airdate: str = None
|
||||
airs: str = None
|
||||
|
@ -409,4 +422,4 @@ class SickChillTVShow(NamedTuple):
|
|||
show_name: str = None
|
||||
show_status: str = None
|
||||
tvdbid: int = None
|
||||
weekday: int = None
|
||||
weekday: int = None
|
||||
|
|
|
@ -1,12 +1,10 @@
|
|||
import os
|
||||
import logging
|
||||
|
||||
from logging import getLogger
|
||||
from requests import Session, Request
|
||||
from datetime import datetime, timezone
|
||||
from geoip2.errors import AddressNotFoundError
|
||||
|
||||
from varken.helpers import geo_lookup, hashit, connection_handler
|
||||
from varken.structures import TautulliStream
|
||||
from varken.helpers import geo_lookup, hashit, connection_handler
|
||||
|
||||
|
||||
class TautulliAPI(object):
|
||||
|
@ -16,11 +14,11 @@ class TautulliAPI(object):
|
|||
self.session = Session()
|
||||
self.session.params = {'apikey': self.server.api_key, 'cmd': 'get_activity'}
|
||||
self.endpoint = '/api/v2'
|
||||
self.logger = logging.getLogger()
|
||||
self.logger = getLogger()
|
||||
self.data_folder = data_folder
|
||||
|
||||
def __repr__(self):
|
||||
return "<tautulli-{}>".format(self.server.id)
|
||||
return f"<tautulli-{self.server.id}>"
|
||||
|
||||
def get_activity(self):
|
||||
now = datetime.now(timezone.utc).astimezone().isoformat()
|
||||
|
@ -41,12 +39,12 @@ class TautulliAPI(object):
|
|||
return
|
||||
|
||||
for session in sessions:
|
||||
# Check to see if ip_address_public atribute exists as it was introduced in v2
|
||||
# Check to see if ip_address_public attribute exists as it was introduced in v2
|
||||
try:
|
||||
getattr(session, 'ip_address_public')
|
||||
except AttributeError:
|
||||
self.logger.error('Public IP attribute missing!!! Do you have an old version of Tautulli (v1)?')
|
||||
os._exit(1)
|
||||
exit(1)
|
||||
|
||||
try:
|
||||
geodata = geo_lookup(session.ip_address_public, self.data_folder)
|
||||
|
@ -94,8 +92,7 @@ class TautulliAPI(object):
|
|||
if session.platform == 'Roku':
|
||||
product_version = session.product_version.split('-')[0]
|
||||
|
||||
hash_id = hashit('{}{}{}{}'.format(session.session_id, session.session_key, session.username,
|
||||
session.full_title))
|
||||
hash_id = hashit(f'{session.session_id}{session.session_key}{session.username}{session.full_title}')
|
||||
influx_payload.append(
|
||||
{
|
||||
"measurement": "Tautulli",
|
||||
|
@ -118,8 +115,7 @@ class TautulliAPI(object):
|
|||
"progress_percent": session.progress_percent,
|
||||
"region_code": geodata.subdivisions.most_specific.iso_code,
|
||||
"location": geodata.city.name,
|
||||
"full_location": '{} - {}'.format(geodata.subdivisions.most_specific.name,
|
||||
geodata.city.name),
|
||||
"full_location": f'{geodata.subdivisions.most_specific.name} - {geodata.city.name}',
|
||||
"latitude": latitude,
|
||||
"longitude": longitude,
|
||||
"player_state": player_state,
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
import logging
|
||||
|
||||
from logging.handlers import RotatingFileHandler
|
||||
from logging import Filter, DEBUG, INFO, getLogger, Formatter, StreamHandler
|
||||
|
||||
from varken.helpers import mkdir_p
|
||||
|
||||
|
||||
class BlacklistFilter(logging.Filter):
|
||||
class BlacklistFilter(Filter):
|
||||
"""
|
||||
Log filter for blacklisted tokens and passwords
|
||||
"""
|
||||
|
@ -13,7 +13,7 @@ class BlacklistFilter(logging.Filter):
|
|||
max_files = 5
|
||||
log_folder = 'logs'
|
||||
|
||||
blacklisted_strings = ['apikey', 'username', 'password']
|
||||
blacklisted_strings = ['apikey', 'username', 'password', 'url']
|
||||
|
||||
def __init__(self, filteredstrings):
|
||||
super().__init__()
|
||||
|
@ -23,9 +23,9 @@ class BlacklistFilter(logging.Filter):
|
|||
for item in self.filtered_strings:
|
||||
try:
|
||||
if item in record.msg:
|
||||
record.msg = record.msg.replace(item, 8 * '*' + item[-2:])
|
||||
record.msg = record.msg.replace(item, 8 * '*' + item[-5:])
|
||||
if any(item in str(arg) for arg in record.args):
|
||||
record.args = tuple(arg.replace(item, 8 * '*' + item[-2:]) if isinstance(arg, str) else arg
|
||||
record.args = tuple(arg.replace(item, 8 * '*' + item[-5:]) if isinstance(arg, str) else arg
|
||||
for arg in record.args)
|
||||
except TypeError:
|
||||
pass
|
||||
|
@ -39,27 +39,25 @@ class VarkenLogger(object):
|
|||
|
||||
# Set log level
|
||||
if self.log_level:
|
||||
self.log_level = logging.DEBUG
|
||||
self.log_level = DEBUG
|
||||
|
||||
else:
|
||||
self.log_level = logging.INFO
|
||||
self.log_level = INFO
|
||||
|
||||
# Make the log directory if it does not exist
|
||||
mkdir_p('{}/{}'.format(self.data_folder, BlacklistFilter.log_folder))
|
||||
|
||||
# Create the Logger
|
||||
self.logger = logging.getLogger()
|
||||
self.logger.setLevel(logging.DEBUG)
|
||||
self.logger = getLogger()
|
||||
self.logger.setLevel(DEBUG)
|
||||
|
||||
# Create a Formatter for formatting the log messages
|
||||
logger_formatter = logging.Formatter('%(asctime)s : %(levelname)s : %(module)s : %(message)s',
|
||||
'%Y-%m-%d %H:%M:%S')
|
||||
logger_formatter = Formatter('%(asctime)s : %(levelname)s : %(module)s : %(message)s', '%Y-%m-%d %H:%M:%S')
|
||||
|
||||
# Create the Handler for logging data to a file
|
||||
file_logger = RotatingFileHandler('{}/{}/{}'.format(self.data_folder, BlacklistFilter.log_folder,
|
||||
BlacklistFilter.filename), mode='a',
|
||||
maxBytes=BlacklistFilter.max_size, backupCount=BlacklistFilter.max_files,
|
||||
encoding=None, delay=0)
|
||||
file_logger = RotatingFileHandler(f'{self.data_folder}/{BlacklistFilter.log_folder}/{BlacklistFilter.filename}',
|
||||
mode='a', maxBytes=BlacklistFilter.max_size, encoding=None, delay=0,
|
||||
backupCount=BlacklistFilter.max_files)
|
||||
|
||||
file_logger.setLevel(self.log_level)
|
||||
|
||||
|
@ -67,7 +65,7 @@ class VarkenLogger(object):
|
|||
file_logger.setFormatter(logger_formatter)
|
||||
|
||||
# Add the console logger
|
||||
console_logger = logging.StreamHandler()
|
||||
console_logger = StreamHandler()
|
||||
console_logger.setFormatter(logger_formatter)
|
||||
console_logger.setLevel(self.log_level)
|
||||
|
||||
|
|
Loading…
Reference in a new issue