fixed clean_check of server_ids, fixed under-indented radarr get_movie,

and added __repr__ for cleaner logging
This commit is contained in:
Nicholas St. Germain 2018-12-04 21:17:33 -06:00
parent 87fea6db06
commit 18a5fdacba
8 changed files with 50 additions and 17 deletions

View file

@ -1,7 +1,7 @@
# Varken # Varken
Dutch for PIG. PIG is an Acronym for Plex/InfluxDB/Grafana Dutch for PIG. PIG is an Acronym for Plex/InfluxDB/Grafana
Varken is a standalone command-line utility to aggregate data varken is a standalone command-line utility to aggregate data
from the Plex ecosystem into InfluxDB. Examples use Grafana for a from the Plex ecosystem into InfluxDB. Examples use Grafana for a
frontend frontend
@ -12,14 +12,14 @@ Requirements /w install links: [Grafana](http://docs.grafana.org/installation/),
</p> </p>
## Quick Setup (Varken Alpha) ## Quick Setup (Varken Alpha)
1. Clone the repository `sudo git clone https://github.com/DirtyCajunRice/grafana-scripts.git /opt/Varken` 1. Clone the repository `sudo git clone https://github.com/DirtyCajunRice/grafana-scripts.git /opt/varken`
1. Follow the systemd install instructions located in `varken.systemd` 1. Follow the systemd install instructions located in `varken.systemd`
1. Create venv in project `/usr/bin/python3 -m venv varken-venv` 1. Create venv in project `/usr/bin/python3 -m venv varken-venv`
1. Install requirements `/opt/Varken/varken-venv/bin/python -m pip install -r requirements.txt` 1. Install requirements `/opt/varken/varken-venv/bin/python -m pip install -r requirements.txt`
1. Make a copy of `varken.example.ini` to `varken.ini` in the `data` folder 1. Make a copy of `varken.example.ini` to `varken.ini` in the `data` folder
`cp /opt/Varken/data/varken.example.ini /opt/Varken/data/varken.ini` `cp /opt/varken/data/varken.example.ini /opt/varken/data/varken.ini`
1. Make the appropriate changes to `varken.ini` 1. Make the appropriate changes to `varken.ini`
ie.`nano /opt/Varken/data/varken.ini` ie.`nano /opt/varken/data/varken.ini`
1. After completing the [getting started](http://docs.grafana.org/guides/getting_started/) portion of grafana, create your datasource for influxdb. 1. After completing the [getting started](http://docs.grafana.org/guides/getting_started/) portion of grafana, create your datasource for influxdb.
1. Install `grafana-cli plugins install grafana-worldmap-panel` 1. Install `grafana-cli plugins install grafana-worldmap-panel`
1. TODO:: Click the + on your menu and click import. Using the .json provided in this repo, paste it in and customize as you like. 1. TODO:: Click the + on your menu and click import. Using the .json provided in this repo, paste it in and customize as you like.

View file

@ -2,7 +2,7 @@ import logging
from influxdb import InfluxDBClient from influxdb import InfluxDBClient
logger = logging.getLogger('Varken') logger = logging.getLogger('varken')
class DBManager(object): class DBManager(object):
def __init__(self, server): def __init__(self, server):
@ -16,5 +16,6 @@ class DBManager(object):
self.influx.create_retention_policy('varken 30d/1h', '30d', '1', 'varken', False, '1h') self.influx.create_retention_policy('varken 30d/1h', '30d', '1', 'varken', False, '1h')
def write_points(self, data): def write_points(self, data):
logger.debug('Writing Data to InfluxDB {}'.format(data)) d = data
self.influx.write_points(data) logger.debug('Writing Data to InfluxDB {}'.format(d))
self.influx.write_points(d)

View file

@ -4,13 +4,14 @@ import tarfile
import hashlib import hashlib
import geoip2.database import geoip2.database
import logging import logging
from functools import update_wrapper
from json.decoder import JSONDecodeError from json.decoder import JSONDecodeError
from os.path import abspath, join from os.path import abspath, join
from requests.exceptions import InvalidSchema, SSLError from requests.exceptions import InvalidSchema, SSLError
from urllib.request import urlretrieve from urllib.request import urlretrieve
logger = logging.getLogger('Varken') logger = logging.getLogger('varken')
def geoip_download(): def geoip_download():
tar_dbfile = abspath(join('.', 'data', 'GeoLite2-City.tar.gz')) tar_dbfile = abspath(join('.', 'data', 'GeoLite2-City.tar.gz'))

View file

@ -32,14 +32,33 @@ class INIParser(object):
self.parse_opts() self.parse_opts()
def enable_check(self, type=None): def enable_check(self, server_type=None):
global_server_ids = self.config.get('global', type) t = server_type
global_server_ids = self.config.get('global', t)
if global_server_ids.lower() in ['false', 'no', '0']: if global_server_ids.lower() in ['false', 'no', '0']:
logger.info('{} disabled.'.format(type.upper())) logger.info('{} disabled.'.format(t.upper()))
return False return False
else: else:
logger.info('{} : ({})'.format(type.upper(), global_server_ids)) sids = self.clean_check(global_server_ids, t)
return global_server_ids return sids
def clean_check(self, server_id_list, server_type=None):
t = server_type
sid_list = server_id_list
cleaned_list = sid_list.replace(' ', '').split(',')
valid_sids = []
for sid in cleaned_list:
try:
valid_sids.append(int(sid))
except ValueError:
logger.error("{} is not a valid server id number".format(sid))
if valid_sids:
logger.info('{} : {}'.format(t.upper(), valid_sids))
return valid_sids
else:
logger.error("No valid {}".format(t.upper()))
return False
def read_file(self): def read_file(self):
file_path = join(self.data_folder, 'varken.ini') file_path = join(self.data_folder, 'varken.ini')

View file

@ -14,6 +14,9 @@ class OmbiAPI(object):
self.session = Session() self.session = Session()
self.session.headers = {'Apikey': self.server.api_key} self.session.headers = {'Apikey': self.server.api_key}
def __repr__(self):
return "<ombi-{}>".format(self.server.id)
def get_total_requests(self): def get_total_requests(self):
self.now = datetime.now(timezone.utc).astimezone().isoformat() self.now = datetime.now(timezone.utc).astimezone().isoformat()
tv_endpoint = '/api/v1/Request/tv' tv_endpoint = '/api/v1/Request/tv'

View file

@ -14,6 +14,9 @@ class RadarrAPI(object):
self.session = Session() self.session = Session()
self.session.headers = {'X-Api-Key': self.server.api_key} self.session.headers = {'X-Api-Key': self.server.api_key}
def __repr__(self):
return "<radarr-{}>".format(self.server.id)
def get_missing(self): def get_missing(self):
endpoint = '/api/movie' endpoint = '/api/movie'
self.now = datetime.now(timezone.utc).astimezone().isoformat() self.now = datetime.now(timezone.utc).astimezone().isoformat()
@ -29,8 +32,8 @@ class RadarrAPI(object):
movies = [Movie(**movie) for movie in get] movies = [Movie(**movie) for movie in get]
for movie in movies: for movie in movies:
if self.server.get_missing: if not movie.downloaded:
if not movie.downloaded and movie.isAvailable: if movie.isAvailable:
ma = True ma = True
else: else:
ma = False ma = False

View file

@ -16,6 +16,9 @@ class SonarrAPI(object):
self.session.headers = {'X-Api-Key': self.server.api_key} self.session.headers = {'X-Api-Key': self.server.api_key}
self.session.params = {'pageSize': 1000} self.session.params = {'pageSize': 1000}
def __repr__(self):
return "<sonarr-{}>".format(self.server.id)
def get_missing(self): def get_missing(self):
endpoint = '/api/calendar' endpoint = '/api/calendar'
last_days = str(date.today() + timedelta(days=-self.server.missing_days)) last_days = str(date.today() + timedelta(days=-self.server.missing_days))

View file

@ -19,6 +19,9 @@ class TautulliAPI(object):
self.session.params['apikey'] = self.server.api_key self.session.params['apikey'] = self.server.api_key
self.endpoint = '/api/v2' self.endpoint = '/api/v2'
def __repr__(self):
return "<tautulli-{}>".format(self.server.id)
def get_activity(self): def get_activity(self):
self.now = datetime.now(timezone.utc).astimezone().isoformat() self.now = datetime.now(timezone.utc).astimezone().isoformat()
params = {'cmd': 'get_activity'} params = {'cmd': 'get_activity'}