* #203 * Update docker compose to specify influxdb:1.8.4 * Update requirements to use urllib3==1.26.5 * updated to support Radarr and Sonarr V3 Api * bump requirements for requests * Fix Sonarr & Radarr V3 API /queue endpoint (#220) * Fix lint issues * More lint fixes * Update Sonarr structures * Add Overseerr Support (#210) * Remove duplicate structures * update changelog to reflect v1.7.7 changes * Add IP data to tautulli #202 * add missing ip address in tautulli * Fixed: Streamlined API calls to Radarr and Sonarr (#221) * Fixed: Sonarr Data pull issues (#222) * Fix Sonarrr calendar * Update lidarr structure (#225) Added missing arguments to Lidarr structure Fixes #223 * Clean up request totals. Upstream change sct/overseerr#2426 * Cleanup blank space * Fix requested_date syntax. * Fix requested_date for Overseerr tv and movie * Fix overseerr config refernces * Fix overseerr structures * Update intparser to accommodate changes to config structure * Cleanup overseerr data collection * Fix SERVICES_ENABLED in varken.py to acomidate overseerr * Fixed: Sonarr/Lidarr Queues (#227) * Change sonarr queue structures to str * Fixed: Multipage queue fetching * Update historical tautulli import (#226) * Fixed: Sonarr perams ordering * Fixed: Proper warnings for missing data in sonarr and radarr * Added: Overseerr ENVs to docker compose. * Added: Logging to empty/no data returns * Update Sonarr & Lidarr Structs to match latest API changes (#231) * Add support for estimatedCompletionTime in LidarrQueue * Add support for tvdbId in SonarrEpisode struct * Fix typo in docker yml * Rename example url for overseerr in docker yml * Update radarr structures to inclue originalLanguage * Update radarr structures to include addOptions * Update radarr structures to include popularity * fix(ombi): Update structures.py (#238) * feat(docker): remove envs from example * fix(logging): remove depreciation warning. Var for debug mode (#240) * fix(build): bump schedule version to 1.1 * fix(build): bump docker python version * fix(dep): update requests and urllib3 * fix(sonarr): ensure invalid sonarr queue items are just skipped over - fixes #239 (#243) * add branch to build inputs * update pipeline badge * Update automation * Add influxdb 2 client * Add structure for influxdb 2 params This contains all the data needed for connecting and writing to an InfluxDB2 server * Parse influxdb 2 config data * Add influxdb2 manager class This stores the data needed for InfluxDB2, and has a single `write_points` function on this that takes an array of points to add to the database * Use the correct db manager for varken * Add influxdb2 to the example varken config file * Create influx bucket if it doesn't exist * Update InfluxDB type on README * Clean up linting errors * Wrap create bucket in try/catch * Use bucket given in ini file * Log exception to troubleshoot errors * Allow configured influx2 address as URL (no port) * Bypass validity check to troubleshoot --------- Co-authored-by: mal5305 <malcolm.e.rogers@gmail.com> Co-authored-by: samwiseg0 <2241731+samwiseg0@users.noreply.github.com> Co-authored-by: Robin <19610103+RobinDadswell@users.noreply.github.com> Co-authored-by: tigattack <10629864+tigattack@users.noreply.github.com> Co-authored-by: Stewart Thomson <stewartthomson3@gmail.com> Co-authored-by: Cameron Stephen <mail@cajs.co.uk> Co-authored-by: MDHMatt <10845262+MDHMatt@users.noreply.github.com> Co-authored-by: Nathan Adams <dinnerbone@dinnerbone.com> Co-authored-by: Nicholas St. Germain <nick@cajun.pro> Co-authored-by: Gabe Revells <gcrevell@mtu.edu>
47 lines
1.8 KiB
Python
47 lines
1.8 KiB
Python
#!/usr/bin/env python3
|
|
from argparse import ArgumentParser
|
|
from os import access, R_OK
|
|
from os.path import isdir, abspath, dirname, join
|
|
from logging import getLogger, StreamHandler, Formatter, DEBUG
|
|
|
|
from varken.iniparser import INIParser
|
|
from varken.dbmanager import DBManager
|
|
from varken.helpers import GeoIPHandler
|
|
from varken.tautulli import TautulliAPI
|
|
|
|
if __name__ == "__main__":
|
|
parser = ArgumentParser(prog='varken',
|
|
description='Tautulli historical import tool')
|
|
parser.add_argument("-d", "--data-folder", help='Define an alternate data folder location')
|
|
parser.add_argument("-D", "--days", default=30, type=int, help='Specify length of historical import')
|
|
opts = parser.parse_args()
|
|
|
|
DATA_FOLDER = abspath(join(dirname(__file__), '..', 'data'))
|
|
|
|
templogger = getLogger('temp')
|
|
templogger.setLevel(DEBUG)
|
|
tempch = StreamHandler()
|
|
tempformatter = Formatter('%(asctime)s : %(levelname)s : %(module)s : %(message)s', '%Y-%m-%d %H:%M:%S')
|
|
tempch.setFormatter(tempformatter)
|
|
templogger.addHandler(tempch)
|
|
|
|
if opts.data_folder:
|
|
ARG_FOLDER = opts.data_folder
|
|
|
|
if isdir(ARG_FOLDER):
|
|
DATA_FOLDER = ARG_FOLDER
|
|
if not access(DATA_FOLDER, R_OK):
|
|
templogger.error("Read permission error for %s", DATA_FOLDER)
|
|
exit(1)
|
|
else:
|
|
templogger.error("%s does not exist", ARG_FOLDER)
|
|
exit(1)
|
|
|
|
CONFIG = INIParser(DATA_FOLDER)
|
|
DBMANAGER = DBManager(CONFIG.influx_server)
|
|
|
|
if CONFIG.tautulli_enabled:
|
|
GEOIPHANDLER = GeoIPHandler(DATA_FOLDER, CONFIG.tautulli_servers[0].maxmind_license_key)
|
|
for server in CONFIG.tautulli_servers:
|
|
TAUTULLI = TautulliAPI(server, DBMANAGER, GEOIPHANDLER)
|
|
TAUTULLI.get_historical(days=opts.days)
|