folder restructure, dbmanager placeholder, iniparser file fullpath, and updated example

This commit is contained in:
Nicholas St. Germain 2018-11-29 12:55:13 -06:00
parent 3eb91d5352
commit e89a2a75a8
8 changed files with 0 additions and 0 deletions

34
Legacy/cisco_asa.py Normal file
View file

@ -0,0 +1,34 @@
# Do not edit this script. Edit configuration.py
import requests
from datetime import datetime, timezone
from influxdb import InfluxDBClient
from Varken import configuration
current_time = datetime.now(timezone.utc).astimezone().isoformat()
stats = {
'token': requests.post('{}/api/tokenservices'.format(configuration.asa_url),
auth=(configuration.asa_username, configuration.asa_password), verify=False)
}
stats['headers'] = {'X-Auth-Token': stats['token'].headers['X-Auth-Token']}
stats['outside_interface'] = requests.get('{}/api/monitoring/device/interfaces/Outside'.format(configuration.asa_url),
headers=stats['headers'], verify=False).json()
influx_payload = [
{
"measurement": "bandwidth",
"tags": {
"interface": "outside"
},
"time": current_time,
"fields": {
"upload_bitrate": stats['outside_interface']['outputBitRate'],
"download_bitrate": stats['outside_interface']['inputBitRate']
}
}
]
influx = InfluxDBClient(configuration.influxdb_url, configuration.influxdb_port, configuration.influxdb_username,
configuration.influxdb_password, configuration.asa_influxdb_db_name)
influx.write_points(influx_payload)

View file

@ -0,0 +1,49 @@
'''
Notes:
- Domains should be either http(s)://subdomain.domain.com or http(s)://domain.com/url_suffix
- Sonarr + Radarr scripts support multiple servers. You can remove the second
server by putting a # in front of the line.
- tautulli_failback_ip, This is used when there is no IP listed in tautulli.
This can happen when you are streaming locally. This is usually your public IP.
'''
########################### INFLUXDB CONFIG ###########################
influxdb_url = 'influxdb.domain.tld'
influxdb_port = 8086
influxdb_username = ''
influxdb_password = ''
############################ SONARR CONFIG ############################
sonarr_server_list = [
('https://sonarr1.domain.tld', 'xxxxxxxxxxxxxxx', '1'),
('https://sonarr2.domain.tld', 'xxxxxxxxxxxxxxx', '2'),
#('https://sonarr3.domain.tld', 'xxxxxxxxxxxxxxx', '3')
]
sonarr_influxdb_db_name = 'plex'
############################ RADARR CONFIG ############################
radarr_server_list = [
('https://radarr1.domain.tld', 'xxxxxxxxxxxxxxx', '1'),
('https://radarr2.domain.tld', 'xxxxxxxxxxxxxxx', '2'),
#('https://radarr3.domain.tld', 'xxxxxxxxxxxxxxx', '3')
]
radarr_influxdb_db_name = 'plex'
############################ OMBI CONFIG ##############################
ombi_url = 'https://ombi.domain.tld'
ombi_api_key = 'xxxxxxxxxxxxxxx'
ombi_influxdb_db_name = 'plex'
########################## TAUTULLI CONFIG ############################
tautulli_url = 'https://tautulli.domain.tld'
tautulli_api_key = 'xxxxxxxxxxxxxxx'
tautulli_failback_ip = ''
tautulli_influxdb_db_name = 'plex'
########################## FIREWALL CONFIG ############################
asa_url = 'https://firewall.domain.tld'
asa_username = 'cisco'
asa_password = 'cisco'
asa_influxdb_db_name = 'asa'

11
Legacy/crontabs Normal file
View file

@ -0,0 +1,11 @@
### Modify paths as appropriate. python3 is located in different places for different users. (`which python3` will give you the path)
### to edit your crontab entry, do not modify /var/spool/cron/crontabs/<user> directly, use `crontab -e`
### Crontabs require an empty line at the end or they WILL not run. Make sure to have 2 lines to be safe
###
* * * * * /usr/bin/python3 /path-to-grafana-scripts/ombi.py
* * * * * ( sleep 30 ; /usr/bin/python3 /path-to-grafana-scripts/ombi.py )
* * * * * /usr/bin/python3 /path-to-grafana-scripts/taurulli.py
* * * * * ( sleep 30 ; /usr/bin/python3 /path-to-grafana-scripts/tautulli.py )
*/30 * * * * /usr/bin/python3 /path-to-grafana-scripts/radarr.py
*/30 * * * * /usr/bin/python3 /path-to-grafana-scripts/sonarr.py
#*/30 * * * * /usr/bin/python3 /path-to-grafana-scripts/sickrage.py

87
Legacy/ombi.py Normal file
View file

@ -0,0 +1,87 @@
# Do not edit this script. Edit configuration.py
import sys
import requests
from datetime import datetime, timezone
from influxdb import InfluxDBClient
import argparse
from argparse import RawTextHelpFormatter
from Varken import configuration
headers = {'Apikey': configuration.ombi_api_key}
def now_iso():
now_iso = datetime.now(timezone.utc).astimezone().isoformat()
return now_iso
def influx_sender(influx_payload):
influx = InfluxDBClient(configuration.influxdb_url, configuration.influxdb_port, configuration.influxdb_username,
configuration.influxdb_password, configuration.ombi_influxdb_db_name)
influx.write_points(influx_payload)
def get_total_requests():
get_tv_requests = requests.get('{}/api/v1/Request/tv'.format(configuration.ombi_url), headers=headers).json()
get_movie_requests = requests.get('{}/api/v1/Request/movie'.format(configuration.ombi_url), headers=headers).json()
count_movie_requests = 0
count_tv_requests = 0
for show in get_tv_requests:
count_tv_requests += 1
for movie in get_movie_requests:
count_movie_requests += 1
influx_payload = [
{
"measurement": "Ombi",
"tags": {
"type": "Request_Total"
},
"time": now_iso(),
"fields": {
"total": count_movie_requests + count_tv_requests
}
}
]
return influx_payload
def get_request_counts():
get_request_counts = requests.get('{}/api/v1/Request/count'.format(configuration.ombi_url), headers=headers).json()
influx_payload = [
{
"measurement": "Ombi",
"tags": {
"type": "Request_Counts"
},
"time": now_iso(),
"fields": {
"pending": int(get_request_counts['pending']),
"approved": int(get_request_counts['approved']),
"available": int(get_request_counts['available'])
}
}
]
return influx_payload
if __name__ == "__main__":
parser = argparse.ArgumentParser(prog='Ombi stats operations',
description='Script to aid in data gathering from Ombi', formatter_class=RawTextHelpFormatter)
parser.add_argument("--total", action='store_true',
help='Get the total count of all requests')
parser.add_argument("--counts", action='store_true',
help='Get the count of pending, approved, and available requests')
opts = parser.parse_args()
if opts.total:
influx_sender(get_total_requests())
elif opts.counts:
influx_sender(get_request_counts())
elif len(sys.argv) == 1:
parser.print_help(sys.stderr)
sys.exit(1)

171
Legacy/radarr.py Normal file
View file

@ -0,0 +1,171 @@
# Do not edit this script. Edit configuration.py
import sys
import requests
from datetime import datetime, timezone
from influxdb import InfluxDBClient
import argparse
from argparse import RawTextHelpFormatter
from Varken import configuration
def now_iso():
now_iso = datetime.now(timezone.utc).astimezone().isoformat()
return now_iso
def influx_sender(influx_payload):
influx = InfluxDBClient(configuration.influxdb_url, configuration.influxdb_port, configuration.influxdb_username,
configuration.influxdb_password, configuration.radarr_influxdb_db_name)
influx.write_points(influx_payload)
def get_missing_movies():
# Set the time here so we have one timestamp to work with
now = now_iso()
missing = []
influx_payload = []
for radarr_url, radarr_api_key, server_id in configuration.radarr_server_list:
headers = {'X-Api-Key': radarr_api_key}
get_movies = requests.get('{}/api/movie'.format(radarr_url), headers=headers).json()
movies = {d['tmdbId']: d for d in get_movies}
for movie in movies.keys():
if not movies[movie]['downloaded']:
movie_name = ('{} ({})'.format(movies[movie]['title'], movies[movie]['year']))
missing.append((movie_name, movies[movie]['tmdbId']))
for movie, id in missing:
influx_payload.append(
{
"measurement": "Radarr",
"tags": {
"type": "Missing",
"tmdbId": id,
"server": server_id
},
"time": now,
"fields": {
"name": movie
}
}
)
# Empty missing or else things get foo bared
missing = []
return influx_payload
def get_missing_avl():
# Set the time here so we have one timestamp to work with
now = now_iso()
missing = []
influx_payload = []
for radarr_url, radarr_api_key, server_id in configuration.radarr_server_list:
headers = {'X-Api-Key': radarr_api_key}
get_movies = requests.get('{}/api/movie'.format(radarr_url), headers=headers).json()
movies = {d['tmdbId']: d for d in get_movies}
for movie in movies.keys():
if not movies[movie]['downloaded']:
if movies[movie]['isAvailable'] is True:
movie_name = ('{} ({})'.format(movies[movie]['title'], movies[movie]['year']))
missing.append((movie_name, movies[movie]['tmdbId']))
for movie, id in missing:
influx_payload.append(
{
"measurement": "Radarr",
"tags": {
"type": "Missing_Available",
"tmdbId": id,
"server": server_id
},
"time": now,
"fields": {
"name": movie,
}
}
)
# Empty missing or else things get foo bared
missing = []
return influx_payload
def get_queue_movies():
# Set the time here so we have one timestamp to work with
now = now_iso()
influx_payload = []
queue = []
for radarr_url, radarr_api_key, server_id in configuration.radarr_server_list:
headers = {'X-Api-Key': radarr_api_key}
get_movies = requests.get('{}/api/queue'.format(radarr_url), headers=headers).json()
queue_movies = {d['id']: d for d in get_movies}
for movie in queue_movies.keys():
name = '{} ({})'.format(queue_movies[movie]['movie']['title'], queue_movies[movie]['movie']['year'])
quality = (queue_movies[movie]['quality']['quality']['name'])
protocol = (queue_movies[movie]['protocol'].upper())
if protocol == 'USENET':
protocol_id = 1
else:
protocol_id = 0
queue.append((name, queue_movies[movie]['id']))
for movie, id in queue:
influx_payload.append(
{
"measurement": "Radarr",
"tags": {
"type": "Queue",
"tmdbId": id,
"server": server_id
},
"time": now,
"fields": {
"name": movie,
"quality": quality,
"protocol": protocol,
"protocol_id": protocol_id
}
}
)
# Empty queue or else things get foo bared
queue = []
return influx_payload
if __name__ == "__main__":
parser = argparse.ArgumentParser(prog='Radarr stats operations',
description='Script to aid in data gathering from Radarr', formatter_class=RawTextHelpFormatter)
parser.add_argument("--missing", action='store_true',
help='Get missing movies')
parser.add_argument("--missing_avl", action='store_true',
help='Get missing yet available movies')
parser.add_argument("--queue", action='store_true',
help='Get movies in queue')
opts = parser.parse_args()
if opts.missing:
influx_sender(get_missing_movies())
elif opts.missing_avl:
influx_sender(get_missing_avl())
elif opts.queue:
influx_sender(get_queue_movies())
elif len(sys.argv) == 1:
parser.print_help(sys.stderr)
sys.exit(1)

35
Legacy/raid_init.py Normal file
View file

@ -0,0 +1,35 @@
import psutil
import mdstat
import platform
from datetime import datetime, timezone, timedelta
from influxdb import InfluxDBClient
# Do not edit below this line #
influx_payload = []
devices = {
'md': mdstat.parse()['devices'],
}
for array in devices['md']:
influx_payload.append(
{
"measurement": "Storage Servers",
"tags": {
"server": platform.uname()[1],
"mount_point": array,
"type": 'rebuild'
},
"time": datetime.now(timezone.utc).astimezone().isoformat(),
"fields": {
"resync_progress": float(devices['md'][array]['resync']['progress'].replace('%', '')),
"resync_eta_mins": float(devices['md'][array]['resync']['finish'].replace('min', '')),
"resync_eta_date": '{:%A, %b %d %I:%M %p}'.format(
datetime.now() + timedelta(minutes=float(devices['md'][array]['resync']['finish']
.replace('min', '')))),
"resync_speed_KiB/s": int(devices['md'][array]['resync']['speed'].replace('K/sec', '')),
}
}
)
influx = InfluxDBClient('grafana.domain.tld', 8086, 'root', 'root', 'storage_server')
influx.write_points(influx_payload)

36
Legacy/san.py Normal file
View file

@ -0,0 +1,36 @@
import platform
import psutil
from datetime import datetime, timezone
from influxdb import InfluxDBClient
mount_points = ['/mnt/raid6-a', '/mnt/raid6-b']
# Do not edit below this line #
influx_payload = []
devices = {
'mount_points': {}
}
for mount in mount_points:
devices['mount_points'][mount] = {
'usage': psutil.disk_usage(mount)
}
influx_payload.append(
{
"measurement": "Storage Servers",
"tags": {
"server": platform.uname()[1],
"mount_point": mount
},
"time": datetime.now(timezone.utc).astimezone().isoformat(),
"fields": {
"bytes Used": devices['mount_points'][mount]['usage'].used,
"bytes Free": devices['mount_points'][mount]['usage'].free,
"bytes Total": devices['mount_points'][mount]['usage'].total,
"Utilization": devices['mount_points'][mount]['usage'].percent
}
}
)
influx = InfluxDBClient('grafana.domain.tld', 8086, 'root', 'root', 'storage_server')
influx.write_points(influx_payload)

179
Legacy/tautulli.py Normal file
View file

@ -0,0 +1,179 @@
import os
import tarfile
import urllib.request
import time
from datetime import datetime, timezone
import geoip2.database
from influxdb import InfluxDBClient
import requests
from Varken import configuration
CURRENT_TIME = datetime.now(timezone.utc).astimezone().isoformat()
PAYLOAD = {'apikey': configuration.tautulli_api_key, 'cmd': 'get_activity'}
ACTIVITY = requests.get('{}/api/v2'.format(configuration.tautulli_url),
params=PAYLOAD).json()['response']['data']
SESSIONS = {d['session_id']: d for d in ACTIVITY['sessions']}
TAR_DBFILE = '{}/GeoLite2-City.tar.gz'.format(os.path.dirname(os.path.realpath(__file__)))
DBFILE = '{}/GeoLite2-City.mmdb'.format(os.path.dirname(os.path.realpath(__file__)))
NOW = time.time()
DB_AGE = NOW - (86400 * 35)
#remove the running db file if it is older than 35 days
try:
t = os.stat(DBFILE)
c = t.st_ctime
if c < DB_AGE:
os.remove(DBFILE)
except FileNotFoundError:
pass
def geo_lookup(ipaddress):
"""Lookup an IP using the local GeoLite2 DB"""
if not os.path.isfile(DBFILE):
urllib.request.urlretrieve(
'http://geolite.maxmind.com/download/geoip/database/GeoLite2-City.tar.gz',
TAR_DBFILE)
tar = tarfile.open(TAR_DBFILE, "r:gz")
for files in tar.getmembers():
if 'GeoLite2-City.mmdb' in files.name:
files.name = os.path.basename(files.name)
tar.extract(files, '{}/'.format(os.path.dirname(os.path.realpath(__file__))))
reader = geoip2.database.Reader(DBFILE)
return reader.city(ipaddress)
INFLUX_PAYLOAD = [
{
"measurement": "Tautulli",
"tags": {
"type": "stream_count"
},
"time": CURRENT_TIME,
"fields": {
"current_streams": int(ACTIVITY['stream_count']),
"transcode_streams": int(ACTIVITY['stream_count_transcode']),
"direct_play_streams": int(ACTIVITY['stream_count_direct_play']),
"direct_streams": int(ACTIVITY['stream_count_direct_stream'])
}
}
]
for session in SESSIONS.keys():
try:
geodata = geo_lookup(SESSIONS[session]['ip_address_public'])
except (ValueError, geoip2.errors.AddressNotFoundError):
if configuration.tautulli_failback_ip:
geodata = geo_lookup(configuration.tautulli_failback_ip)
else:
geodata = geo_lookup(requests.get('http://ip.42.pl/raw').text)
latitude = geodata.location.latitude
if not geodata.location.latitude:
latitude = 37.234332396
else:
latitude = geodata.location.latitude
if not geodata.location.longitude:
longitude = -115.80666344
else:
longitude = geodata.location.longitude
decision = SESSIONS[session]['transcode_decision']
if decision == 'copy':
decision = 'direct stream'
video_decision = SESSIONS[session]['stream_video_decision']
if video_decision == 'copy':
video_decision = 'direct stream'
elif video_decision == '':
video_decision = 'Music'
quality = SESSIONS[session]['stream_video_resolution']
# If the video resolution is empty. Asssume it's an audio stream
# and use the container for music
if not quality:
quality = SESSIONS[session]['container'].upper()
elif quality in ('SD', 'sd'):
quality = SESSIONS[session]['stream_video_resolution'].upper()
elif quality in '4k':
quality = SESSIONS[session]['stream_video_resolution'].upper()
else:
quality = '{}p'.format(SESSIONS[session]['stream_video_resolution'])
# Translate player_state to integers so we can colorize the table
player_state = SESSIONS[session]['state'].lower()
if player_state == 'playing':
player_state = 0
elif player_state == 'paused':
player_state = 1
elif player_state == 'buffering':
player_state = 3
INFLUX_PAYLOAD.append(
{
"measurement": "Tautulli",
"tags": {
"type": "Session",
"session_id": SESSIONS[session]['session_id'],
"name": SESSIONS[session]['friendly_name'],
"title": SESSIONS[session]['full_title'],
"platform": SESSIONS[session]['platform'],
"product_version": SESSIONS[session]['product_version'],
"quality": quality,
"video_decision": video_decision.title(),
"transcode_decision": decision.title(),
"media_type": SESSIONS[session]['media_type'].title(),
"audio_codec": SESSIONS[session]['audio_codec'].upper(),
"audio_profile": SESSIONS[session]['audio_profile'].upper(),
"stream_audio_codec": SESSIONS[session]['stream_audio_codec'].upper(),
"quality_profile": SESSIONS[session]['quality_profile'],
"progress_percent": SESSIONS[session]['progress_percent'],
"region_code": geodata.subdivisions.most_specific.iso_code,
"location": geodata.city.name,
"full_location": '{} - {}'.format(geodata.subdivisions.most_specific.name,
geodata.city.name),
"latitude": latitude,
"longitude": longitude,
"player_state": player_state,
"device_type": SESSIONS[session]['platform']
},
"time": CURRENT_TIME,
"fields": {
"session_id": SESSIONS[session]['session_id'],
"session_key": SESSIONS[session]['session_key']
}
}
)
INFLUX_SENDER = InfluxDBClient(configuration.influxdb_url,
configuration.influxdb_port,
configuration.influxdb_username,
configuration.influxdb_password,
configuration.tautulli_influxdb_db_name)
INFLUX_SENDER.write_points(INFLUX_PAYLOAD)