Merge pull request #14 from samwiseg00/feature-rework

Major rework of the scripts
This commit is contained in:
Nicholas St. Germain 2018-08-06 13:40:47 -05:00 committed by GitHub
commit f1cbbe9509
10 changed files with 710 additions and 170 deletions

11
.gitignore vendored Normal file
View file

@ -0,0 +1,11 @@
.DS_Store
.DS_Store?
._*
.Spotlight-V100
.Trashes
ehthumbs.db
Thumbs.db
configuration.py
__pycache__
GeoLite2-City.mmdb
GeoLite2-City.tar.gz

View file

@ -4,15 +4,70 @@ Repo for api scripts written (both pushing and pulling) to aggregate data into i
Requirements /w install links: [Grafana](http://docs.grafana.org/installation/), [Python3](https://www.python.org/downloads/), [InfluxDB](https://docs.influxdata.com/influxdb/v1.5/introduction/installation/)
## Quick Setup
1. Install requirements (If using tautulli.py you need to `pip3 install geoip2`)
2. Create your plex database in influx
1. Install requirements `pip3 install -r requirements.txt`
1. Make a copy of `configuration.example.py` to `configuration.py`
2. Make the appropriate changes to `configuration.py`
1. Create your plex database in influx
```sh
user@server: ~$ influx
> CREATE DATABASE plex
> quit
```
3. After completing the [getting started](http://docs.grafana.org/guides/getting_started/) portion of grafana, create your datasource for influxdb. At a minimum, you will need the plex database.
4. Click the + on your menu and click import. Using the .json provided in this repo, paste it in and customize as you like.
1. After completing the [getting started](http://docs.grafana.org/guides/getting_started/) portion of grafana, create your datasource for influxdb. At a minimum, you will need the plex database.
1. Install `grafana-cli plugins install grafana-worldmap-panel`
1. Click the + on your menu and click import. Using the .json provided in this repo, paste it in and customize as you like.
## Scripts
`sonarr.py` Gathers data from Sonarr and pushes it to influxdb. Switches are mandatory.
- Notes:
- You cannot stack the arguments. ie. `sonarr.py --missing --queue`
- Arguments are mandatory
```
Script to aid in data gathering from Sonarr
optional arguments:
-h, --help show this help message and exit
--missing Get all missing TV shows
--missing_days MISSING_DAYS
Get missing TV shows in X pass days
--upcoming Get upcoming TV shows
--today Get TV shows on today
--queue Get movies in queue
```
`radarr.py` Gathers data from Radarr and pushes it to influxdb
- Notes:
- You cannot stack the arguments. ie. `radarr.py --missing --queue`
- Arguments are mandatory
```
Script to aid in data gathering from Radarr
optional arguments:
-h, --help show this help message and exit
--missing Get missing movies
--missing_avl Get missing available movies
--queue Get movies in queue
```
`ombi.py` Gathers data from Ombi and pushes it to influxdb
- Notes:
- You cannot stack the arguments. ie. `ombi.py --total --counts`
- Arguments are mandatory
```
Script to aid in data gathering from Ombi
optional arguments:
-h, --help show this help message and exit
--total Get the total count of all requests
--counts Get the count of pending, approved, and available requests
```
`tautulli.py` Gathers data from Tautulli and pushes it to influxdb
`sickrage.py` Gathers data from Sickrage and pushes it to influxdb
## Notes
To run the python scripts crontab is currently leveraged. Examples:
@ -21,9 +76,9 @@ To run the python scripts crontab is currently leveraged. Examples:
### to edit your crontab entry, do not modify /var/spool/cron/crontabs/<user> directly, use `crontab -e`
### Crontabs require an empty line at the end or they WILL not run. Make sure to have 2 lines to be safe
### It is bad practice to run any cronjob more than once a minute. For timing help: https://crontab.guru/
* * * * * /usr/bin/python3 /path-to-grafana-scripts/ombi.py
* * * * * /usr/bin/python3 /path-to-grafana-scripts/ombi.py --total
* * * * * /usr/bin/python3 /path-to-grafana-scripts/tautulli.py
*/30 * * * * /usr/bin/python3 /path-to-grafana-scripts/radarr.py
*/30 * * * * /usr/bin/python3 /path-to-grafana-scripts/sonarr.py
*/30 * * * * /usr/bin/python3 /path-to-grafana-scripts/radarr.py --missing
*/30 * * * * /usr/bin/python3 /path-to-grafana-scripts/sonarr.py --missing
*/30 * * * * /usr/bin/python3 /path-to-grafana-scripts/sickrage.py
```

View file

@ -29,8 +29,6 @@ influx_payload = [
}
]
influx = InfluxDBClient(configuration.grafana_url, configuration.grafana_port, configuration.grafana_username,
configuration.grafana_password, configuration.asa_grafana_db_name)
influx = InfluxDBClient(configuration.influxdb_url, configuration.influxdb_port, configuration.influxdb_username,
configuration.influxdb_password, configuration.asa_influxdb_db_name)
influx.write_points(influx_payload)

54
configuration.example.py Normal file
View file

@ -0,0 +1,54 @@
'''
Notes:
- Domains should be either http(s)://subdomain.domain.com or http(s)://domain.com/url_suffix
- Sonarr + Radarr scripts support multiple servers. You can remove the second
server by putting a # in front of the line.
- tautulli_failback_ip, This is used when there is no IP listed in tautulli.
This can happen when you are streaming locally. This is usually your public IP.
'''
########################### INFLUXDB CONFIG ###########################
influxdb_url = 'influxdb.domain.tld'
influxdb_port = 8086
influxdb_username = ''
influxdb_password = ''
############################ SONARR CONFIG ############################
sonarr_server_list = [
('https://sonarr1.domain.tld', 'xxxxxxxxxxxxxxx', '1'),
('https://sonarr2.domain.tld', 'xxxxxxxxxxxxxxx', '2'),
#('https://sonarr3.domain.tld', 'xxxxxxxxxxxxxxx', '3')
]
sonarr_influxdb_db_name = 'plex'
############################ RADARR CONFIG ############################
radarr_server_list = [
('https://radarr1.domain.tld', 'xxxxxxxxxxxxxxx', '1'),
('https://radarr2.domain.tld', 'xxxxxxxxxxxxxxx', '2'),
#('https://radarr3.domain.tld', 'xxxxxxxxxxxxxxx', '3')
]
radarr_influxdb_db_name = 'plex'
############################ OMBI CONFIG ##############################
ombi_url = 'https://ombi.domain.tld'
ombi_api_key = 'xxxxxxxxxxxxxxx'
ombi_influxdb_db_name = 'plex'
########################## TAUTULLI CONFIG ############################
tautulli_url = 'https://tautulli.domain.tld'
tautulli_api_key = 'xxxxxxxxxxxxxxx'
tautulli_failback_ip = ''
tautulli_influxdb_db_name = 'plex'
########################## FIREWALL CONFIG ############################
asa_url = 'https://firewall.domain.tld'
asa_username = 'cisco'
asa_password = 'cisco'
asa_influxdb_db_name = 'asa'
########################## SICKRAGE CONFIG ############################
sickrage_url = 'https://sickrage.domain.tld/'
sickrage_api_key = 'xxxxxxxxxxxxxxx'
sickrage_influxdb_db_name = 'plex'

View file

@ -1,31 +0,0 @@
# Domains should be either http(s)://subdomain.domain.com or http(s):// domain.com/url_suffix
grafana_url = 'grafana.domain.tld'
grafana_port = 8086
grafana_username = 'root'
grafana_password = 'root'
sonarr_url = 'https://radarr.domain.tld'
sonarr_api_key = 'xxxxxxxxxxxxxxx'
sonarr_grafana_db_name = 'plex'
radarr_url = 'https://sonarr.domain.tld'
radarr_api_key = 'xxxxxxxxxxxxxxx'
radarr_grafana_db_name = 'plex'
ombi_url = 'https://ombi.domain.tld'
ombi_api_key = 'xxxxxxxxxxxxxxx'
ombi_grafana_db_name = 'plex'
tautulli_url = 'https://tautulli.domain.tld'
tautulli_api_key = 'xxxxxxxxxxxxxxx'
tautulli_grafana_db_name = 'plex'
asa_url = 'https://firewall.domain.tld'
asa_username = 'cisco'
asa_password = 'cisco'
asa_grafana_db_name = 'asa'
sickrage_url = 'https://sickrage.domain.tld/'
sickrage_api_key = 'xxxxxxxxxxxxxxx'
sickrage_grafana_db_name = 'plex'

100
ombi.py
View file

@ -1,39 +1,87 @@
# Do not edit this script. Edit configuration.py
import sys
import requests
from datetime import datetime, timezone
from influxdb import InfluxDBClient
import argparse
from argparse import RawTextHelpFormatter
import configuration
current_time = datetime.now(timezone.utc).astimezone().isoformat()
headers = {'Apikey': configuration.ombi_api_key}
get_tv_requests = requests.get('{}/api/v1/Request/tv'.format(configuration.ombi_url), headers=headers).json()
get_movie_requests = requests.get('{}/api/v1/Request/movie'.format(configuration.ombi_url), headers=headers).json()
count_movie_requests = 0
count_tv_requests = 0
def now_iso():
now_iso = datetime.now(timezone.utc).astimezone().isoformat()
return now_iso
for show in get_tv_requests:
count_tv_requests += 1
for movie in get_movie_requests:
count_movie_requests += 1
def influx_sender(influx_payload):
influx = InfluxDBClient(configuration.influxdb_url, configuration.influxdb_port, configuration.influxdb_username,
configuration.influxdb_password, configuration.ombi_influxdb_db_name)
influx.write_points(influx_payload)
influx_payload = [
{
"measurement": "Ombi",
"tags": {
"type": "Requests"
},
"time": current_time,
"fields": {
"total": count_movie_requests + count_tv_requests
def get_total_requests():
get_tv_requests = requests.get('{}/api/v1/Request/tv'.format(configuration.ombi_url), headers=headers).json()
get_movie_requests = requests.get('{}/api/v1/Request/movie'.format(configuration.ombi_url), headers=headers).json()
count_movie_requests = 0
count_tv_requests = 0
for show in get_tv_requests:
count_tv_requests += 1
for movie in get_movie_requests:
count_movie_requests += 1
influx_payload = [
{
"measurement": "Ombi",
"tags": {
"type": "Request_Total"
},
"time": now_iso(),
"fields": {
"total": count_movie_requests + count_tv_requests
}
}
}
]
]
return influx_payload
influx = InfluxDBClient(configuration.grafana_url, configuration.grafana_port, configuration.grafana_username,
configuration.grafana_password, configuration.ombi_grafana_db_name)
influx.write_points(influx_payload)
def get_request_counts():
get_request_counts = requests.get('{}/api/v1/Request/count'.format(configuration.ombi_url), headers=headers).json()
influx_payload = [
{
"measurement": "Ombi",
"tags": {
"type": "Request_Counts"
},
"time": now_iso(),
"fields": {
"pending": int(get_request_counts['pending']),
"approved": int(get_request_counts['approved']),
"available": int(get_request_counts['available'])
}
}
]
return influx_payload
if __name__ == "__main__":
parser = argparse.ArgumentParser(prog='Ombi stats operations',
description='Script to aid in data gathering from Ombi', formatter_class=RawTextHelpFormatter)
parser.add_argument("--total", action='store_true',
help='Get the total count of all requests')
parser.add_argument("--counts", action='store_true',
help='Get the count of pending, approved, and available requests')
opts = parser.parse_args()
if opts.total:
influx_sender(get_total_requests())
elif opts.counts:
influx_sender(get_request_counts())
elif len(sys.argv) == 1:
parser.print_help(sys.stderr)
sys.exit(1)

187
radarr.py
View file

@ -1,38 +1,171 @@
# Do not edit this script. Edit configuration.py
import sys
import requests
from datetime import datetime, timezone
from influxdb import InfluxDBClient
import argparse
from argparse import RawTextHelpFormatter
import configuration
current_time = datetime.now(timezone.utc).astimezone().isoformat()
headers = {'X-Api-Key': configuration.radarr_api_key}
get_movies = requests.get('{}/api/movie'.format(configuration.radarr_url), headers=headers).json()
movies = {d['tmdbId']: d for d in get_movies}
missing = []
influx_payload = []
def now_iso():
now_iso = datetime.now(timezone.utc).astimezone().isoformat()
return now_iso
for movie in movies.keys():
if not movies[movie]['downloaded']:
missing.append((movies[movie]['title'], movies[movie]['tmdbId']))
for movie, id in missing:
influx_payload.append(
{
"measurement": "Radarr",
"tags": {
"type": "Missing",
"tmdbId": id
},
"time": current_time,
"fields": {
"name": movie
}
}
)
def influx_sender(influx_payload):
influx = InfluxDBClient(configuration.influxdb_url, configuration.influxdb_port, configuration.influxdb_username,
configuration.influxdb_password, configuration.radarr_influxdb_db_name)
influx.write_points(influx_payload)
influx = InfluxDBClient(configuration.grafana_url, configuration.grafana_port, configuration.grafana_username,
configuration.grafana_password, configuration.radarr_grafana_db_name)
influx.write_points(influx_payload)
def get_missing_movies():
# Set the time here so we have one timestamp to work with
now = now_iso()
missing = []
influx_payload = []
for radarr_url, radarr_api_key, server_id in configuration.radarr_server_list:
headers = {'X-Api-Key': radarr_api_key}
get_movies = requests.get('{}/api/movie'.format(radarr_url), headers=headers).json()
movies = {d['tmdbId']: d for d in get_movies}
for movie in movies.keys():
if not movies[movie]['downloaded']:
movie_name = ('{} ({})'.format(movies[movie]['title'], movies[movie]['year']))
missing.append((movie_name, movies[movie]['tmdbId']))
for movie, id in missing:
influx_payload.append(
{
"measurement": "Radarr",
"tags": {
"type": "Missing",
"tmdbId": id,
"server": server_id
},
"time": now,
"fields": {
"name": movie
}
}
)
# Empty missing or else things get foo bared
missing = []
return influx_payload
def get_missing_avl():
# Set the time here so we have one timestamp to work with
now = now_iso()
missing = []
influx_payload = []
for radarr_url, radarr_api_key, server_id in configuration.radarr_server_list:
headers = {'X-Api-Key': radarr_api_key}
get_movies = requests.get('{}/api/movie'.format(radarr_url), headers=headers).json()
movies = {d['tmdbId']: d for d in get_movies}
for movie in movies.keys():
if not movies[movie]['downloaded']:
if movies[movie]['isAvailable'] is True:
movie_name = ('{} ({})'.format(movies[movie]['title'], movies[movie]['year']))
missing.append((movie_name, movies[movie]['tmdbId']))
for movie, id in missing:
influx_payload.append(
{
"measurement": "Radarr",
"tags": {
"type": "Missing_Available",
"tmdbId": id,
"server": server_id
},
"time": now,
"fields": {
"name": movie,
}
}
)
# Empty missing or else things get foo bared
missing = []
return influx_payload
def get_queue_movies():
# Set the time here so we have one timestamp to work with
now = now_iso()
influx_payload = []
queue = []
for radarr_url, radarr_api_key, server_id in configuration.radarr_server_list:
headers = {'X-Api-Key': radarr_api_key}
get_movies = requests.get('{}/api/queue'.format(radarr_url), headers=headers).json()
queue_movies = {d['id']: d for d in get_movies}
for movie in queue_movies.keys():
name = '{} ({})'.format(queue_movies[movie]['movie']['title'], queue_movies[movie]['movie']['year'])
quality = (queue_movies[movie]['quality']['quality']['name'])
protocol = (queue_movies[movie]['protocol'].upper())
if protocol == 'USENET':
protocol_id = 1
else:
protocol_id = 0
queue.append((name, queue_movies[movie]['id']))
for movie, id in queue:
influx_payload.append(
{
"measurement": "Radarr",
"tags": {
"type": "Queue",
"tmdbId": id,
"server": server_id
},
"time": now,
"fields": {
"name": movie,
"quality": quality,
"protocol": protocol,
"protocol_id": protocol_id
}
}
)
# Empty queue or else things get foo bared
queue = []
return influx_payload
if __name__ == "__main__":
parser = argparse.ArgumentParser(prog='Radarr stats operations',
description='Script to aid in data gathering from Radarr', formatter_class=RawTextHelpFormatter)
parser.add_argument("--missing", action='store_true',
help='Get missing movies')
parser.add_argument("--missing_avl", action='store_true',
help='Get missing available movies')
parser.add_argument("--queue", action='store_true',
help='Get movies in queue')
opts = parser.parse_args()
if opts.missing:
influx_sender(get_missing_movies())
elif opts.missing_avl:
influx_sender(get_missing_avl())
elif opts.queue:
influx_sender(get_queue_movies())
elif len(sys.argv) == 1:
parser.print_help(sys.stderr)
sys.exit(1)

7
requirements.txt Normal file
View file

@ -0,0 +1,7 @@
#---------------------------------------------------------
# Potential requirements.
# pip3 install -r requirements.txt
#---------------------------------------------------------
requests
geoip2
influxdb

366
sonarr.py
View file

@ -1,70 +1,316 @@
# Do not edit this script. Edit configuration.py
import sys
import requests
from datetime import datetime, timezone
from datetime import datetime, timezone, date, timedelta
from influxdb import InfluxDBClient
import argparse
from argparse import RawTextHelpFormatter
import configuration
current_time = datetime.now(timezone.utc).astimezone().isoformat()
headers = {'X-Api-Key': configuration.sonarr_api_key}
get_tv_shows = requests.get('{}/api/wanted/missing/?pageSize=1000'.format(configuration.sonarr_url),
headers=headers).json()['records']
tv_shows = {d['id']: d for d in get_tv_shows}
missing = []
influx_payload = []
for show in tv_shows.keys():
seriesTitle = '{}'.format(tv_shows[show]['series']['title'])
sxe = 'S{:0>2}E{:0>2}'.format(tv_shows[show]['seasonNumber'],tv_shows[show]['episodeNumber'])
missing.append((seriesTitle, sxe, tv_shows[show]['id'], tv_shows[show]['title']))
def now_iso():
now_iso = datetime.now(timezone.utc).astimezone().isoformat()
return now_iso
for seriesTitle, sxe, id, title in missing:
influx_payload.append(
{
"measurement": "Sonarr",
"tags": {
"type": "Missing",
"sonarrId": id
},
"time": current_time,
"fields": {
"name": seriesTitle,
"epname": title,
"sxe": sxe
}
}
)
get_upcoming_shows = requests.get('{}/api/calendar/'.format(configuration.sonarr_url),
headers=headers).json()
upcoming_shows = {d['id']: d for d in get_upcoming_shows}
upcoming = []
influx_payload2 = []
for show in upcoming_shows.keys():
seriesTitle = '{}'.format(upcoming_shows[show]['series']['title'])
sxe = 'S{:0>2}E{:0>2}'.format(upcoming_shows[show]['seasonNumber'],upcoming_shows[show]['episodeNumber'])
upcoming.append((seriesTitle, sxe, upcoming_shows[show]['id'], upcoming_shows[show]['title'], upcoming_shows[show]['airDate']))
def influx_sender(influx_payload):
influx = InfluxDBClient(configuration.influxdb_url, configuration.influxdb_port, configuration.influxdb_username,
configuration.influxdb_password, configuration.sonarr_influxdb_db_name)
influx.write_points(influx_payload)
for seriesTitle, sxe, id, title, airDate in upcoming:
influx_payload2.append(
{
"measurement": "Sonarr",
"tags": {
"type": "Soon",
"sonarrId": id
},
"time": current_time,
"fields": {
"name": seriesTitle,
"epname": title,
"sxe": sxe,
"airs": airDate
}
}
)
influx = InfluxDBClient(configuration.grafana_url, configuration.grafana_port, configuration.grafana_username,
configuration.grafana_password, configuration.sonarr_grafana_db_name)
influx.write_points(influx_payload)
influx.write_points(influx_payload2)
def get_all_missing_shows():
# Set the time here so we have one timestamp to work with
now = now_iso()
missing = []
influx_payload = []
for sonarr_url, sonarr_api_key, server_id in configuration.sonarr_server_list:
headers = {'X-Api-Key': sonarr_api_key}
get_tv_shows = requests.get('{}/api/wanted/missing/?pageSize=1000'.format(sonarr_url),
headers=headers).json()['records']
tv_shows = {d['id']: d for d in get_tv_shows}
for show in tv_shows.keys():
series_title = '{}'.format(tv_shows[show]['series']['title'])
sxe = 'S{:0>2}E{:0>2}'.format(tv_shows[show]['seasonNumber'],tv_shows[show]['episodeNumber'])
missing.append((series_title, sxe, tv_shows[show]['id'], tv_shows[show]['title']))
for series_title, sxe, id, title in missing:
influx_payload.append(
{
"measurement": "Sonarr",
"tags": {
"type": "Missing",
"sonarrId": id,
"server": server_id
},
"time": now,
"fields": {
"name": series_title,
"epname": title,
"sxe": sxe
}
}
)
# Empty missing or else things get foo bared
missing = []
return influx_payload
def get_missing_shows(days_past):
# Set the time here so we have one timestamp to work with
now = now_iso()
last_days = str(date.today()+timedelta(days=-days_past))
today = str(date.today())
missing = []
influx_payload = []
for sonarr_url, sonarr_api_key, server_id in configuration.sonarr_server_list:
headers = {'X-Api-Key': sonarr_api_key}
get_tv_shows = requests.get('{}/api/calendar/?start={}&end={}&pageSize=1000'.format(sonarr_url, last_days, today),
headers=headers).json()
tv_shows = {d['id']: d for d in get_tv_shows}
for show in tv_shows.keys():
if not (tv_shows[show]['hasFile']):
series_title = '{}'.format(tv_shows[show]['series']['title'])
sxe = 'S{:0>2}E{:0>2}'.format(tv_shows[show]['seasonNumber'], tv_shows[show]['episodeNumber'])
air_date = (tv_shows[show]['airDate'])
missing.append((series_title, sxe, air_date, tv_shows[show]['id']))
for series_title, sxe, air_date, id in missing:
influx_payload.append(
{
"measurement": "Sonarr",
"tags": {
"type": "Missing_Days",
"sonarrId": id,
"server": server_id
},
"time": now,
"fields": {
"name": series_title,
"sxe": sxe,
"airs": air_date
}
}
)
# Empty missing or else things get foo bared
missing = []
return influx_payload
def get_upcoming_shows():
# Set the time here so we have one timestamp to work with
now = now_iso()
upcoming = []
influx_payload = []
for sonarr_url, sonarr_api_key, server_id in configuration.sonarr_server_list:
headers = {'X-Api-Key': sonarr_api_key}
get_upcoming_shows = requests.get('{}/api/calendar/'.format(sonarr_url),
headers=headers).json()
upcoming_shows = {d['id']: d for d in get_upcoming_shows}
for show in upcoming_shows.keys():
series_title = '{}'.format(upcoming_shows[show]['series']['title'])
sxe = 'S{:0>2}E{:0>2}'.format(upcoming_shows[show]['seasonNumber'],upcoming_shows[show]['episodeNumber'])
upcoming.append((series_title, sxe, upcoming_shows[show]['id'], upcoming_shows[show]['title'], upcoming_shows[show]['airDate']))
for series_title, sxe, id, title, air_date in upcoming:
influx_payload.append(
{
"measurement": "Sonarr",
"tags": {
"type": "Soon",
"sonarrId": id,
"server": server_id
},
"time": now,
"fields": {
"name": series_title,
"epname": title,
"sxe": sxe,
"airs": air_date
}
}
)
# Empty upcoming or else things get foo bared
upcoming = []
return influx_payload
def get_today_shows():
# Set the time here so we have one timestamp to work with
now = now_iso()
today = str(date.today())
tomorrow = str(date.today()+timedelta(days=1))
air_today = []
downloaded = []
influx_payload = []
for sonarr_url, sonarr_api_key, server_id in configuration.sonarr_server_list:
headers = {'X-Api-Key': sonarr_api_key}
get_tv_shows = requests.get('{}/api/calendar/?start={}&end={}&pageSize=50'.format(sonarr_url, today, tomorrow),
headers=headers).json()
tv_shows = {d['id']: d for d in get_tv_shows}
for show in tv_shows.keys():
series_title = '{}'.format(tv_shows[show]['series']['title'])
dl_status = int(tv_shows[show]['hasFile'])
sxe = 'S{:0>2}E{:0>2}'.format(tv_shows[show]['seasonNumber'], tv_shows[show]['episodeNumber'])
air_today.append((series_title, dl_status, sxe, tv_shows[show]['title'], tv_shows[show]['id']))
for series_title, dl_status, sxe, title, id in air_today:
influx_payload.append(
{
"measurement": "Sonarr",
"tags": {
"type": "Today",
"sonarrId": id,
"server": server_id
},
"time": now,
"fields": {
"name": series_title,
"epname": title,
"sxe": sxe,
"downloaded": dl_status
}
}
)
# Empty air_today or else things get foo bared
air_today = []
return influx_payload
def get_queue_shows():
# Set the time here so we have one timestamp to work with
now = now_iso()
queue = []
downloaded = []
influx_payload = []
for sonarr_url, sonarr_api_key, server_id in configuration.sonarr_server_list:
headers = {'X-Api-Key': sonarr_api_key}
get_tv_shows = requests.get('{}/api/queue'.format(sonarr_url),
headers=headers).json()
tv_shows = {d['id']: d for d in get_tv_shows}
for show in tv_shows.keys():
series_title = '{}'.format(tv_shows[show]['series']['title'])
protocol = (tv_shows[show]['protocol'].upper())
sxe = 'S{:0>2}E{:0>2}'.format(tv_shows[show]['episode']['seasonNumber'], tv_shows[show]['episode']['episodeNumber'])
if protocol == 'USENET':
protocol_id = 1
else:
protocol_id = 0
queue.append((series_title, protocol, protocol_id, sxe, tv_shows[show]['id']))
for series_title, protocol, protocol_id, sxe, id in queue:
influx_payload.append(
{
"measurement": "Sonarr",
"tags": {
"type": "Queue",
"sonarrId": id,
"server": server_id
},
"time": now,
"fields": {
"name": show,
"name": series_title,
"sxe": sxe,
"protocol": protocol,
"protocol_id": protocol_id
}
}
)
# Empty queue or else things get foo bared
queue = []
return influx_payload
if __name__ == "__main__":
parser = argparse.ArgumentParser(prog='Sonarr stats operations',
description='Script to aid in data gathering from Sonarr', formatter_class=RawTextHelpFormatter)
parser.add_argument("--missing", action='store_true',
help='Get all missing TV shows')
parser.add_argument("--missing_days", type=int,
help='Get missing TV shows in X pass days')
parser.add_argument("--upcoming", action='store_true',
help='Get upcoming TV shows')
parser.add_argument("--today", action='store_true',
help='Get TV shows on today')
parser.add_argument("--queue", action='store_true',
help='Get movies in queue')
opts = parser.parse_args()
if opts.missing:
influx_sender(get_all_missing_shows())
elif opts.missing_days:
influx_sender(get_missing_shows(opts.missing_days))
elif opts.upcoming:
influx_sender(get_upcoming_shows())
elif opts.today:
influx_sender(get_today_shows())
elif opts.queue:
influx_sender(get_queue_shows())
elif len(sys.argv) == 1:
parser.print_help(sys.stderr)
sys.exit(1)

View file

@ -11,31 +11,35 @@ from influxdb import InfluxDBClient
import configuration
current_time = datetime.now(timezone.utc).astimezone().isoformat()
payload = {'apikey': configuration.tautulli_api_key, 'cmd': 'get_activity'}
activity = requests.get('{}/api/v2'.format(configuration.tautulli_url), params=payload).json()['response']['data']
sessions = {d['session_id']: d for d in activity['sessions']}
def GeoLite2db(ipaddress):
dbfile = 'GeoLite2-City.mmdb'
if not os.path.isfile('GeoLite2-City.mmdb'):
urllib.request.urlretrieve('http://geolite.maxmind.com/download/geoip/database/GeoLite2-City.tar.gz', 'GeoLite2-City.tar.gz')
tar = tarfile.open('GeoLite2-City.tar.gz', "r:gz")
tar.extractall()
tar.close()
tempfolder = next(d for d in os.listdir(os.getcwd()) if 'GeoLite2' in d)
tempfullpath = os.path.join(tempfolder, dbfile)
os.rename(tempfullpath, dbfile)
shutil.rmtree(tempfolder)
for files in tar.getmembers():
if dbfile in files.name:
files.name = os.path.basename(files.name)
tar.extract(files, '{}/'.format(os.path.dirname(os.path.realpath(__file__))))
reader = geoip2.database.Reader(dbfile)
geodata = reader.city(ipaddress)
return geodata
reader = geoip2.database.Reader(dbfile)
geodata = reader.city(ipaddress)
return geodata
influx_payload = [
{
@ -45,36 +49,51 @@ influx_payload = [
},
"time": current_time,
"fields": {
"current_streams": int(activity['stream_count'])
"current_streams": int(activity['stream_count']),
"transcode_streams": int(activity['stream_count_transcode']),
"direct_play_streams": int(activity['stream_count_direct_play']),
"direct_streams": int(activity['stream_count_direct_stream'])
}
}
]
for session in sessions.keys():
geodata = GeoLite2db(sessions[session]['ip_address_public'])
try:
geodata = GeoLite2db(sessions[session]['ip_address_public'])
except ValueError:
if configuration.tautulli_failback_ip:
geodata =GeoLite2db(configuration.tautulli_failback_ip)
else:
geodata = GeoLite2db(requests.get('http://ip.42.pl/raw').text)
decision = sessions[session]['transcode_decision']
if decision == 'copy':
decision = 'direct stream'
influx_payload.append(
{
"measurement": "Tautulli",
"tags": {
"type": "Session",
"region_code": geodata.subdivisions.most_specific.iso_code,
"name": sessions[session]['friendly_name']
"session_key": sessions[session]['session_key']
},
"time": current_time,
"fields": {
"name": sessions[session]['friendly_name'],
"title": sessions[session]['full_title'],
"quality": '{}p'.format(sessions[session]['video_resolution']),
"video_decision": sessions[session]['stream_video_decision'],
"transcode_decision": decision.title(),
"platform": sessions[session]['platform'],
"product_version": sessions[session]['product_version'],
"quality_profile": sessions[session]['quality_profile'],
"progress_percent": sessions[session]['progress_percent'],
"location": geodata.city.name,
}
}
)
influx = InfluxDBClient(configuration.grafana_url, configuration.grafana_port, configuration.grafana_username,
configuration.grafana_password, configuration.tautulli_grafana_db_name)
influx = InfluxDBClient(configuration.influxdb_url, configuration.influxdb_port, configuration.influxdb_username,
configuration.influxdb_password, configuration.tautulli_influxdb_db_name)
influx.write_points(influx_payload)