v1.6.1 Merge

v1.6.1 Merge
This commit is contained in:
Nicholas St. Germain 2019-01-11 23:54:31 -06:00 committed by GitHub
commit 345ce141ec
14 changed files with 221 additions and 163 deletions

View file

@ -1,5 +1,24 @@
# Change Log
## [v1.6.1](https://github.com/Boerderij/Varken/tree/v1.6.1) (2019-01-11)
[Full Changelog](https://github.com/Boerderij/Varken/compare/v1.6...v1.6.1)
**Implemented enhancements:**
- \[Feature Request\] Unifi Integration [\#79](https://github.com/Boerderij/Varken/issues/79)
**Fixed bugs:**
- \[BUG\] Unexpected keyword argument 'langCode' while creating OmbiMovieRequest structure [\#88](https://github.com/Boerderij/Varken/issues/88)
**Closed issues:**
- Remove Cisco ASA since Telegraf + SNMP can do the same [\#86](https://github.com/Boerderij/Varken/issues/86)
**Merged pull requests:**
- v1.6 Merge [\#90](https://github.com/Boerderij/Varken/pull/85) ([DirtyCajunRice](https://github.com/DirtyCajunRice))
## [v1.6](https://github.com/Boerderij/Varken/tree/v1.6) (2019-01-04)
[Full Changelog](https://github.com/Boerderij/Varken/compare/v1.5...v1.6)
@ -15,7 +34,7 @@
**Merged pull requests:**
- v1.6 Merge [\#75](https://github.com/Boerderij/Varken/pull/85) ([DirtyCajunRice](https://github.com/DirtyCajunRice))
- v1.6 Merge [\#85](https://github.com/Boerderij/Varken/pull/85) ([DirtyCajunRice](https://github.com/DirtyCajunRice))
## [v1.5](https://github.com/Boerderij/Varken/tree/v1.5) (2018-12-30)
[Full Changelog](https://github.com/Boerderij/Varken/compare/v1.4...v1.5)

View file

@ -1,6 +1,6 @@
FROM lsiobase/alpine.python3
LABEL maintainer="dirtycajunrice"
LABEL maintainers="dirtycajunrice,samwiseg0"
ENV DEBUG="False"

View file

@ -5,7 +5,6 @@
[![Docker-Layers](https://images.microbadger.com/badges/image/boerderij/varken.svg)](https://microbadger.com/images/boerderij/varken)
[![Docker-Version](https://images.microbadger.com/badges/version/boerderij/varken.svg)](https://microbadger.com/images/boerderij/varken)
[![Docker Pulls](https://img.shields.io/docker/pulls/boerderij/varken.svg)](https://hub.docker.com/r/boerderij/varken/)
[![Docker Stars](https://img.shields.io/docker/stars/boerderij/varken.svg)](https://hub.docker.com/r/boerderij/varken/)
Dutch for PIG. PIG is an Acronym for Plex/InfluxDB/Grafana
@ -30,7 +29,7 @@ Supported Modules:
* [Radarr](https://radarr.video/) - A fork of Sonarr to work with movies à la Couchpotato.
* [Tautulli](https://tautulli.com/) - A Python based monitoring and tracking tool for Plex Media Server.
* [Ombi](https://ombi.io/) - Want a Movie or TV Show on Plex or Emby? Use Ombi!
* Cisco ASA
* [Unifi](https://unifi-sdn.ubnt.com/) - The Global Leader in Managed Wi-Fi Systems
Key features:
* Multiple server support for all modules

View file

@ -1,7 +1,7 @@
import platform
import schedule
from time import sleep
from queue import Queue
from sys import version
from threading import Thread
from os import access, R_OK, getenv
@ -10,8 +10,9 @@ from os.path import isdir, abspath, dirname, join
from argparse import ArgumentParser, RawTextHelpFormatter
from logging import getLogger, StreamHandler, Formatter, DEBUG
from varken import structures # Needed to check version of python
from varken.ombi import OmbiAPI
from varken.cisco import CiscoAPI
from varken.unifi import UniFiAPI
from varken import VERSION, BRANCH
from varken.sonarr import SonarrAPI
from varken.radarr import RadarrAPI
@ -26,9 +27,14 @@ from varken.varkenlogger import VarkenLogger
PLATFORM_LINUX_DISTRO = ' '.join(x for x in linux_distribution() if x)
def threaded(job):
thread = Thread(target=job)
thread.start()
def thread():
while schedule.jobs:
job = QUEUE.get()
a = job()
print(a)
if a is not None:
schedule.clear(a)
QUEUE.task_done()
if __name__ == "__main__":
@ -85,64 +91,81 @@ if __name__ == "__main__":
CONFIG = INIParser(DATA_FOLDER)
DBMANAGER = DBManager(CONFIG.influx_server)
QUEUE = Queue()
if CONFIG.sonarr_enabled:
for server in CONFIG.sonarr_servers:
SONARR = SonarrAPI(server, DBMANAGER)
if server.queue:
schedule.every(server.queue_run_seconds).seconds.do(threaded, SONARR.get_queue)
at_time = schedule.every(server.queue_run_seconds).seconds
at_time.do(QUEUE.put, SONARR.get_queue).tag(f"sonarr-{server.id}-get_queue")
if server.missing_days > 0:
schedule.every(server.missing_days_run_seconds).seconds.do(threaded, SONARR.get_missing)
at_time = schedule.every(server.missing_days_run_seconds).seconds
at_time.do(QUEUE.put, SONARR.get_missing).tag(f"sonarr-{server.id}-get_missing")
if server.future_days > 0:
schedule.every(server.future_days_run_seconds).seconds.do(threaded, SONARR.get_future)
at_time = schedule.every(server.future_days_run_seconds).seconds
at_time.do(QUEUE.put, SONARR.get_future).tag(f"sonarr-{server.id}-get_future")
if CONFIG.tautulli_enabled:
GEOIPHANDLER = GeoIPHandler(DATA_FOLDER)
schedule.every(12).to(24).hours.do(threaded, GEOIPHANDLER.update)
schedule.every(12).to(24).hours.do(QUEUE.put, GEOIPHANDLER.update)
for server in CONFIG.tautulli_servers:
TAUTULLI = TautulliAPI(server, DBMANAGER, GEOIPHANDLER)
if server.get_activity:
schedule.every(server.get_activity_run_seconds).seconds.do(threaded, TAUTULLI.get_activity)
at_time = schedule.every(server.get_activity_run_seconds).seconds
at_time.do(QUEUE.put, TAUTULLI.get_activity).tag(f"tautulli-{server.id}-get_activity")
if server.get_stats:
schedule.every(server.get_stats_run_seconds).seconds.do(threaded, TAUTULLI.get_stats)
at_time = schedule.every(server.get_stats_run_seconds).seconds
at_time.do(QUEUE.put, TAUTULLI.get_stats).tag(f"tautulli-{server.id}-get_stats")
if CONFIG.radarr_enabled:
for server in CONFIG.radarr_servers:
RADARR = RadarrAPI(server, DBMANAGER)
if server.get_missing:
schedule.every(server.get_missing_run_seconds).seconds.do(threaded, RADARR.get_missing)
at_time = schedule.every(server.get_missing_run_seconds).seconds
at_time.do(QUEUE.put, RADARR.get_missing).tag(f"radarr-{server.id}-get_missing")
if server.queue:
schedule.every(server.queue_run_seconds).seconds.do(threaded, RADARR.get_queue)
at_time = schedule.every(server.queue_run_seconds).seconds
at_time.do(QUEUE.put, RADARR.get_queue).tag(f"radarr-{server.id}-get_queue")
if CONFIG.ombi_enabled:
for server in CONFIG.ombi_servers:
OMBI = OmbiAPI(server, DBMANAGER)
if server.request_type_counts:
schedule.every(server.request_type_run_seconds).seconds.do(threaded, OMBI.get_request_counts)
at_time = schedule.every(server.request_type_run_seconds).seconds
at_time.do(QUEUE.put, OMBI.get_request_counts).tag(f"ombi-{server.id}-get_request_counts")
if server.request_total_counts:
schedule.every(server.request_total_run_seconds).seconds.do(threaded, OMBI.get_all_requests)
at_time = schedule.every(server.request_total_run_seconds).seconds
at_time.do(QUEUE.put, OMBI.get_all_requests).tag(f"ombi-{server.id}-get_all_requests")
if server.issue_status_counts:
schedule.every(server.issue_status_run_seconds).seconds.do(threaded, OMBI.get_issue_counts)
at_time = schedule.every(server.issue_status_run_seconds).seconds
at_time.do(QUEUE.put, OMBI.get_issue_counts).tag(f"ombi-{server.id}-get_issue_counts")
if CONFIG.sickchill_enabled:
for server in CONFIG.sickchill_servers:
SICKCHILL = SickChillAPI(server, DBMANAGER)
if server.get_missing:
schedule.every(server.get_missing_run_seconds).seconds.do(threaded, SICKCHILL.get_missing)
at_time = schedule.every(server.get_missing_run_seconds).seconds
at_time.do(QUEUE.put, SICKCHILL.get_missing).tag(f"sickchill-{server.id}-get_missing")
if CONFIG.ciscoasa_enabled:
for firewall in CONFIG.ciscoasa_servers:
ASA = CiscoAPI(firewall, DBMANAGER)
schedule.every(firewall.get_bandwidth_run_seconds).seconds.do(threaded, ASA.get_bandwidth)
if CONFIG.unifi_enabled:
for server in CONFIG.unifi_servers:
UNIFI = UniFiAPI(server, DBMANAGER)
at_time = schedule.every(server.get_usg_stats_run_seconds).seconds
at_time.do(QUEUE.put, UNIFI.get_usg_stats).tag(f"unifi-{server.id}-get_usg_stats")
# Run all on startup
SERVICES_ENABLED = [CONFIG.ombi_enabled, CONFIG.radarr_enabled, CONFIG.tautulli_enabled,
CONFIG.sonarr_enabled, CONFIG.ciscoasa_enabled, CONFIG.sickchill_enabled]
SERVICES_ENABLED = [CONFIG.ombi_enabled, CONFIG.radarr_enabled, CONFIG.tautulli_enabled, CONFIG.unifi_enabled,
CONFIG.sonarr_enabled, CONFIG.sickchill_enabled]
if not [enabled for enabled in SERVICES_ENABLED if enabled]:
vl.logger.error("All services disabled. Exiting")
exit(1)
WORKER = Thread(target=thread)
WORKER.start()
schedule.run_all()
while True:
while schedule.jobs:
schedule.run_pending()
sleep(1)

View file

@ -3,12 +3,14 @@ sonarr_server_ids = 1,2
radarr_server_ids = 1,2
tautulli_server_ids = 1
ombi_server_ids = 1
ciscoasa_server_ids = false
sickchill_server_ids = false
unifi_server_ids = false
[influxdb]
url = influxdb.domain.tld
port = 8086
ssl = false
verify_ssl = false
username = root
password = root
@ -87,11 +89,12 @@ verify_ssl = false
get_missing = true
get_missing_run_seconds = 300
[ciscoasa-1]
url = firewall.domain.tld
username = cisco
password = cisco
outside_interface = WAN
[unifi-1]
url = unifi.domain.tld:8443
username = ubnt
password = ubnt
site = default
usg_name = MyRouter
ssl = false
verify_ssl = false
get_bandwidth_run_seconds = 300
get_usg_stats_run_seconds = 300

View file

@ -40,6 +40,7 @@ services:
- GF_PATHS_DATA=/config/data
- GF_PATHS_LOGS=/config/logs
- GF_PATHS_PLUGINS=/config/plugins
- GF_PATHS_CONFIG=/config/grafana.ini
- GF_INSTALL_PLUGINS=grafana-piechart-panel,grafana-worldmap-panel
depends_on:
- influxdb

View file

@ -1,2 +1,2 @@
VERSION = 1.7
BRANCH = 'pre-nightly'
VERSION = "1.6.1"
BRANCH = 'master'

View file

@ -1,61 +0,0 @@
from logging import getLogger
from requests import Session, Request
from datetime import datetime, timezone
from varken.helpers import connection_handler
class CiscoAPI(object):
def __init__(self, firewall, dbmanager):
self.now = datetime.now(timezone.utc).astimezone().isoformat()
self.dbmanager = dbmanager
self.firewall = firewall
# Create session to reduce server web thread load, and globally define pageSize for all requests
self.session = Session()
self.session.auth = (self.firewall.username, self.firewall.password)
self.logger = getLogger()
self.get_token()
def __repr__(self):
return f"<ciscoasa-{self.firewall.id}>"
def get_token(self):
endpoint = '/api/tokenservices'
req = self.session.prepare_request(Request('POST', self.firewall.url + endpoint))
post = connection_handler(self.session, req, self.firewall.verify_ssl)
if not post:
return
self.session.headers = {'X-Auth-Token': post}
def get_bandwidth(self):
self.now = datetime.now(timezone.utc).astimezone().isoformat()
endpoint = '/api/monitoring/device/interfaces/' + self.firewall.outside_interface
if not self.session.headers:
return
req = self.session.prepare_request(Request('GET', self.firewall.url + endpoint))
get = connection_handler(self.session, req, self.firewall.verify_ssl)
if not get:
return
influx_payload = [
{
"measurement": "Cisco ASA",
"tags": {
"interface": self.firewall.outside_interface
},
"time": self.now,
"fields": {
"upload_bitrate": get['outputBitRate'],
"download_bitrate": get['inputBitRate']
}
}
]
self.dbmanager.write_points(influx_payload)

View file

@ -5,8 +5,9 @@ from influxdb import InfluxDBClient
class DBManager(object):
def __init__(self, server):
self.server = server
self.influx = InfluxDBClient(self.server.url, self.server.port, self.server.username, self.server.password,
'varken')
self.influx = InfluxDBClient(host=self.server.url, port=self.server.port, username=self.server.username,
password=self.server.password, ssl=self.server.ssl, database='varken',
verify_ssl=self.server.verify_ssl)
databases = [db['name'] for db in self.influx.get_list_database()]
self.logger = getLogger()

View file

@ -1,6 +1,7 @@
from hashlib import md5
from datetime import date
from logging import getLogger
from ipaddress import IPv4Address
from calendar import monthcalendar
from geoip2.database import Reader
from tarfile import open as taropen
@ -11,7 +12,6 @@ from json.decoder import JSONDecodeError
from os.path import abspath, join, basename, isdir
from urllib3.exceptions import InsecureRequestWarning
from requests.exceptions import InvalidSchema, SSLError, ConnectionError
from ipaddress import IPv4Address
logger = getLogger()
@ -36,11 +36,11 @@ class GeoIPHandler(object):
today = date.today()
try:
dbdate = date.fromtimestamp(stat(self.dbfile).st_ctime)
dbdate = date.fromtimestamp(stat(self.dbfile).st_mtime)
except FileNotFoundError:
self.logger.error("Could not find GeoLite2 DB as: %s", self.dbfile)
self.download()
dbdate = date.fromtimestamp(stat(self.dbfile).st_ctime)
dbdate = date.fromtimestamp(stat(self.dbfile).st_mtime)
first_wednesday_day = [week[2:3][0] for week in monthcalendar(today.year, today.month) if week[2:3][0] != 0][0]
first_wednesday_date = date(today.year, today.month, first_wednesday_day)
@ -90,7 +90,8 @@ def rfc1918_ip_check(ip):
return rfc1918_ip
def connection_handler(session, request, verify):
def connection_handler(session, request, verify, as_is_reply=False):
air = as_is_reply
s = session
r = request
v = verify
@ -109,11 +110,9 @@ def connection_handler(session, request, verify):
return_json = get.json()
except JSONDecodeError:
logger.error('No JSON response. Response is: %s', get.text)
# 204 No Content is for ASA only
elif get.status_code == 204:
if get.headers['X-Auth-Token']:
return get.headers['X-Auth-Token']
if air:
return get
except InvalidSchema:
logger.error("You added http(s):// in the config file. Don't do that.")

View file

@ -4,10 +4,10 @@ from os.path import join, exists
from re import match, compile, IGNORECASE
from configparser import ConfigParser, NoOptionError, NoSectionError
from varken.helpers import clean_sid_check, rfc1918_ip_check
from varken.structures import SickChillServer
from varken.varkenlogger import BlacklistFilter
from varken.structures import SonarrServer, RadarrServer, OmbiServer, TautulliServer, InfluxServer, CiscoASAFirewall
from varken.structures import SickChillServer, UniFiServer
from varken.helpers import clean_sid_check, rfc1918_ip_check
from varken.structures import SonarrServer, RadarrServer, OmbiServer, TautulliServer, InfluxServer
class INIParser(object):
@ -15,7 +15,7 @@ class INIParser(object):
self.config = None
self.data_folder = data_folder
self.filtered_strings = None
self.services = ['sonarr', 'radarr', 'ombi', 'tautulli', 'sickchill', 'ciscoasa']
self.services = ['sonarr', 'radarr', 'ombi', 'tautulli', 'sickchill', 'unifi']
self.logger = getLogger()
self.influx_server = InfluxServer()
@ -141,13 +141,15 @@ class INIParser(object):
self.config_blacklist()
# Parse InfluxDB options
url = self.url_check(self.config.get('influxdb', 'url'), include_port=False, section='influxdb')
port = self.config.getint('influxdb', 'port')
ssl = self.config.getboolean('influxdb', 'ssl')
verify_ssl = self.config.getboolean('influxdb', 'verify_ssl')
username = self.config.get('influxdb', 'username')
password = self.config.get('influxdb', 'password')
self.influx_server = InfluxServer(url=url, port=port, username=username, password=password)
self.influx_server = InfluxServer(url=url, port=port, username=username, password=password, ssl=ssl,
verify_ssl=verify_ssl)
# Check for all enabled services
for service in self.services:
@ -167,29 +169,26 @@ class INIParser(object):
url = self.url_check(self.config.get(section, 'url'), section=section)
apikey = None
if service != 'ciscoasa':
if service not in ['ciscoasa', 'unifi']:
apikey = self.config.get(section, 'apikey')
scheme = 'https://' if self.config.getboolean(section, 'ssl') else 'http://'
verify_ssl = self.config.getboolean(section, 'verify_ssl')
if scheme != 'https://':
verify_ssl = False
if service == 'sonarr':
if service in ['sonarr', 'radarr']:
queue = self.config.getboolean(section, 'queue')
queue_run_seconds = self.config.getint(section, 'queue_run_seconds')
if service == 'sonarr':
missing_days = self.config.getint(section, 'missing_days')
future_days = self.config.getint(section, 'future_days')
missing_days_run_seconds = self.config.getint(section, 'missing_days_run_seconds')
future_days_run_seconds = self.config.getint(section, 'future_days_run_seconds')
queue_run_seconds = self.config.getint(section, 'queue_run_seconds')
server = SonarrServer(id=server_id, url=scheme + url, api_key=apikey, verify_ssl=verify_ssl,
missing_days=missing_days, future_days=future_days,
missing_days_run_seconds=missing_days_run_seconds,
@ -197,12 +196,7 @@ class INIParser(object):
queue=queue, queue_run_seconds=queue_run_seconds)
if service == 'radarr':
queue = self.config.getboolean(section, 'queue')
queue_run_seconds = self.config.getint(section, 'queue_run_seconds')
get_missing = self.config.getboolean(section, 'get_missing')
get_missing_run_seconds = self.config.getint(section, 'get_missing_run_seconds')
server = RadarrServer(id=server_id, url=scheme + url, api_key=apikey, verify_ssl=verify_ssl,
@ -212,18 +206,17 @@ class INIParser(object):
if service == 'tautulli':
fallback_ip = self.config.get(section, 'fallback_ip')
get_stats = self.config.getboolean(section, 'get_stats')
get_activity = self.config.getboolean(section, 'get_activity')
get_activity_run_seconds = self.config.getint(section, 'get_activity_run_seconds')
get_stats = self.config.getboolean(section, 'get_stats')
get_stats_run_seconds = self.config.getint(section, 'get_stats_run_seconds')
invalid_wan_ip = rfc1918_ip_check(fallback_ip)
if invalid_wan_ip:
self.logger.error('Invalid failback_ip [%s] set for %s-%s!', fallback_ip, service, server_id)
self.logger.error('Invalid fallback_ip [%s] set for %s-%s!', fallback_ip, service,
server_id)
exit(1)
server = TautulliServer(id=server_id, url=scheme + url, api_key=apikey,
@ -233,17 +226,13 @@ class INIParser(object):
get_stats_run_seconds=get_stats_run_seconds)
if service == 'ombi':
issue_status_counts = self.config.getboolean(section, 'get_issue_status_counts')
request_type_counts = self.config.getboolean(section, 'get_request_type_counts')
request_type_run_seconds = self.config.getint(section, 'request_type_run_seconds')
request_total_counts = self.config.getboolean(section, 'get_request_total_counts')
request_total_run_seconds = self.config.getint(section, 'request_total_run_seconds')
issue_status_counts = self.config.getboolean(section, 'get_issue_status_counts')
issue_status_run_seconds = self.config.getint(section, 'issue_status_run_seconds')
request_type_run_seconds = self.config.getint(section, 'request_type_run_seconds')
request_total_run_seconds = self.config.getint(section, 'request_total_run_seconds')
server = OmbiServer(id=server_id, url=scheme + url, api_key=apikey, verify_ssl=verify_ssl,
request_type_counts=request_type_counts,
@ -255,29 +244,24 @@ class INIParser(object):
if service == 'sickchill':
get_missing = self.config.getboolean(section, 'get_missing')
get_missing_run_seconds = self.config.getint(section, 'get_missing_run_seconds')
server = SickChillServer(id=server_id, url=scheme + url, api_key=apikey,
verify_ssl=verify_ssl, get_missing=get_missing,
get_missing_run_seconds=get_missing_run_seconds)
if service == 'ciscoasa':
if service == 'unifi':
username = self.config.get(section, 'username')
password = self.config.get(section, 'password')
site = self.config.get(section, 'site').lower()
usg_name = self.config.get(section, 'usg_name')
get_usg_stats_run_seconds = self.config.getint(section, 'get_usg_stats_run_seconds')
outside_interface = self.config.get(section, 'outside_interface')
get_bandwidth_run_seconds = self.config.getint(section, 'get_bandwidth_run_seconds')
server = CiscoASAFirewall(id=server_id, url=scheme + url, verify_ssl=verify_ssl,
username=username, password=password,
outside_interface=outside_interface,
get_bandwidth_run_seconds=get_bandwidth_run_seconds)
server = UniFiServer(id=server_id, url=scheme + url, verify_ssl=verify_ssl, site=site,
username=username, password=password, usg_name=usg_name,
get_usg_stats_run_seconds=get_usg_stats_run_seconds)
getattr(self, f'{service}_servers').append(server)
except NoOptionError as e:
self.logger.error('Missing key in %s. Error: %s', section, e)
self.rectify_ini()

View file

@ -14,8 +14,10 @@ if version_info < (3, 6, 2):
class InfluxServer(NamedTuple):
password: str = 'root'
port: int = 8086
ssl: bool = False
url: str = 'localhost'
username: str = 'root'
verify_ssl: bool = False
class SonarrServer(NamedTuple):
@ -76,13 +78,14 @@ class SickChillServer(NamedTuple):
verify_ssl: bool = False
class CiscoASAFirewall(NamedTuple):
get_bandwidth_run_seconds: int = 30
class UniFiServer(NamedTuple):
get_usg_stats_run_seconds: int = 30
id: int = None
outside_interface: str = None
password: str = 'cisco'
url: str = '192.168.1.1'
username: str = 'cisco'
password: str = 'ubnt'
site: str = None
url: str = 'unifi.domain.tld:8443'
username: str = 'ubnt'
usg_name: str = None
verify_ssl: bool = False
@ -168,6 +171,8 @@ class OmbiMovieRequest(NamedTuple):
subscribed: bool = None
theMovieDbId: int = None
title: str = None
langCode: str = None
languageCode: str = None
# Sonarr

View file

@ -1,4 +1,3 @@
from os import _exit
from logging import getLogger
from requests import Session, Request
from datetime import datetime, timezone
@ -17,6 +16,7 @@ class TautulliAPI(object):
self.session.params = {'apikey': self.server.api_key}
self.endpoint = '/api/v2'
self.logger = getLogger()
self.my_ip = None
def __repr__(self):
return f"<tautulli-{self.server.id}>"
@ -34,6 +34,11 @@ class TautulliAPI(object):
get = g['response']['data']
# Remove erroneous key from sessions
for session in get['sessions']:
if session.get('_cache_time'):
del session['_cache_time']
try:
sessions = [TautulliStream(**session) for session in get['sessions']]
except TypeError as e:
@ -46,22 +51,27 @@ class TautulliAPI(object):
getattr(session, 'ip_address_public')
except AttributeError:
self.logger.error('Public IP attribute missing!!! Do you have an old version of Tautulli (v1)?')
_exit(1)
exit(1)
try:
geodata = self.geoiphandler.lookup(session.ip_address_public)
except (ValueError, AddressNotFoundError):
if self.server.fallback_ip:
# Try the failback ip in the config file
self.logger.debug('Public IP missing for Tautulli session...')
if not self.my_ip:
# Try the fallback ip in the config file
try:
self.logger.debug('Atempting to use the failback IP...')
geodata = self.geoiphandler.lookup(self.server.fallback_ip)
except AddressNotFoundError as e:
self.logger.error('%s', e)
_exit(1)
self.my_ip = self.session.get('http://ip.42.pl/raw').text
self.logger.debug('Looked the public IP and set it to %s', self.my_ip)
geodata = self.geoiphandler.lookup(self.my_ip)
else:
my_ip = self.session.get('http://ip.42.pl/raw').text
geodata = self.geoiphandler.lookup(my_ip)
geodata = self.geoiphandler.lookup(self.my_ip)
if not all([geodata.location.latitude, geodata.location.longitude]):
latitude = 37.234332396

75
varken/unifi.py Normal file
View file

@ -0,0 +1,75 @@
from logging import getLogger
from requests import Session, Request
from datetime import datetime, timezone
from varken.helpers import connection_handler
class UniFiAPI(object):
def __init__(self, server, dbmanager):
self.dbmanager = dbmanager
self.server = server
# Create session to reduce server web thread load, and globally define pageSize for all requests
self.session = Session()
self.logger = getLogger()
self.get_cookie()
def __repr__(self):
return f"<unifi-{self.server.id}>"
def get_cookie(self):
endpoint = '/api/login'
pre_cookies = {'username': self.server.username, 'password': self.server.password, 'remember': True}
req = self.session.prepare_request(Request('POST', self.server.url + endpoint, json=pre_cookies))
post = connection_handler(self.session, req, self.server.verify_ssl, as_is_reply=True)
if not post.cookies.get('unifises'):
return
cookies = {'unifises': post.cookies.get('unifises')}
self.session.cookies.update(cookies)
def get_usg_stats(self):
now = datetime.now(timezone.utc).astimezone().isoformat()
endpoint = f'/api/s/{self.server.site}/stat/device'
req = self.session.prepare_request(Request('GET', self.server.url + endpoint))
get = connection_handler(self.session, req, self.server.verify_ssl)
if not get:
self.logger.error("Canceling Job get_usg_stats for unifi-%s", self.server.id)
return f"unifi-{self.server.id}-get_usg_stats"
devices = {device['name']: device for device in get['data']}
if devices.get(self.server.usg_name):
device = devices[self.server.usg_name]
else:
self.logger.error("Could not find a USG named %s from your UniFi Controller", self.server.usg_name)
return
influx_payload = [
{
"measurement": "UniFi",
"tags": {
"model": device['model'],
"name": device['name']
},
"time": now,
"fields": {
"bytes_current": device['wan1']['bytes-r'],
"rx_bytes_total": device['wan1']['rx_bytes'],
"rx_bytes_current": device['wan1']['rx_bytes-r'],
"tx_bytes_total": device['wan1']['tx_bytes'],
"tx_bytes_current": device['wan1']['tx_bytes-r'],
"speedtest_latency": device['speedtest-status']['latency'],
"speedtest_download": device['speedtest-status']['xput_download'],
"speedtest_upload": device['speedtest-status']['xput_upload'],
"cpu_loadavg_1": device['sys_stats']['loadavg_1'],
"cpu_loadavg_5": device['sys_stats']['loadavg_5'],
"cpu_loadavg_15": device['sys_stats']['loadavg_15'],
"cpu_util": device['system-stats']['cpu'],
"mem_util": device['system-stats']['mem'],
}
}
]
self.dbmanager.write_points(influx_payload)