Compare commits

..

17 commits

Author SHA1 Message Date
d-mcknight
b5a83f0d34
Update automation 2023-06-21 20:11:36 -07:00
Nicholas St. Germain
ec79d22df7
update pipeline badge 2020-12-21 12:30:50 -06:00
Nicholas St. Germain
50302c8dc2
add branch to build inputs 2020-12-21 12:21:27 -06:00
Nicholas St. Germain
23bcbbf1e6
v1.7.7
Merge pull request #191 from Boerderij/develop
2020-12-21 12:09:14 -06:00
samwiseg0
b3b1876b82
v1.7.6 (#165)
v1.7.6
2020-01-01 19:30:41 -05:00
Nicholas St. Germain
7b71a8a574
v1.7.5
v1.7.5
2019-12-11 11:12:19 -06:00
Nicholas St. Germain
86ba4f2039
Merge pull request #155 from Boerderij/develop
v1.7.4 Merge
2019-10-07 11:40:43 -05:00
Nicholas St. Germain
f3286ca8f3
v1.7.3 Merge
v1.7.3 Merge
2019-08-09 17:40:52 -05:00
Nicholas St. Germain
73410dbee5
v1.7.2 Merge
v1.7.2 Merge
2019-06-23 23:24:15 -05:00
Nicholas St. Germain
5570721dd8 Merge pull request #134 from Boerderij/develop
v1.7.1 Merge
2019-06-03 19:46:51 -05:00
Nicholas St. Germain
6d61515b7a v1.7.0 Merge
v1.7.0 Merge
2019-05-05 21:54:08 -05:00
Nicholas St. Germain
379117d976 v1.6.8 Merge
v1.6.8 Merge
2019-04-18 21:39:09 -05:00
Nicholas St. Germain
bc8565fd96 link change 2019-04-18 17:44:28 -05:00
Nicholas St. Germain
702c86dba1 v1.6.7 Merge
v1.6.7 Merge
2019-04-18 10:34:23 -05:00
samwiseg0
9590839b75 Merge pull request #116 from Boerderij/develop
v1.6.6 Merge
2019-03-11 21:12:41 -04:00
samwiseg0
d274df35d8 Merge pull request #115 from Boerderij/develop
v1.6.5 Merge
2019-03-11 19:42:01 -04:00
Nicholas St. Germain
35ff610c0d v1.6.4 Merge
v1.6.4 Merge
2019-02-03 23:53:15 -06:00
24 changed files with 237 additions and 861 deletions

1
.github/FUNDING.yml vendored
View file

@ -1 +0,0 @@
ko_fi: varken

View file

@ -1,31 +0,0 @@
---
name: Bug report
about: Create a report to help us improve
title: "[BUG]"
labels: awaiting-triage
assignees: ''
---
**Describe the bug**
A clear and concise description of what the bug is.
**To Reproduce**
Steps to reproduce the behavior:
1. ...
2. ...
3. ...
4. ...
**Expected behavior**
A clear and concise description of what you expected to happen.
**Screenshots**
If applicable, add screenshots to help explain your problem.
**Environment (please complete the following information):**
- OS: [e.g. Ubuntu 18.04.1 or Docker:Tag]
- Version [e.g. v1.1]
**Additional context**
Add any other context about the problem here.

View file

@ -1,20 +0,0 @@
---
name: Feature request
about: Suggest an idea for this project
title: "[Feature Request]"
labels: awaiting-triage
assignees: ''
---
**Is your feature request related to a problem? Please describe.**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
**Describe the solution you'd like**
A clear and concise description of what you want to happen.
**Describe alternatives you've considered**
A clear and concise description of any alternative solutions or features you've considered.
**Additional context**
Add any other context or screenshots about the feature request here.

View file

@ -1,23 +0,0 @@
name: 'Docker Multi Login Action'
description: 'Log in to dockerhub, quay, and github container registry'
runs:
using: "composite"
steps:
- shell: bash
run: |
echo "🔑 Logging into dockerhub..."
if docker login --username ${{ fromJSON(env.secrets).DOCKERHUB_USERNAME }} --password ${{ fromJSON(env.secrets).DOCKERHUB_PASSWORD }} > /dev/null 2>&1; then
echo "🎉 Login Succeeded!"
fi
- shell: bash
run: |
echo "🔑 Logging into quay.io..."
if docker login quay.io --username ${{ fromJSON(env.secrets).QUAY_USERNAME }} --password ${{ fromJSON(env.secrets).QUAY_PASSWORD }} > /dev/null 2>&1; then
echo "🎉 Login Succeeded!"
fi
- shell: bash
run: |
echo "🔑 Logging into ghcr.io..."
if docker login ghcr.io --username ${{ fromJSON(env.secrets).GHCR_USERNAME }} --password ${{ fromJSON(env.secrets).GHCR_PASSWORD }} > /dev/null 2>&1; then
echo "🎉 Login Succeeded!"
fi

View file

@ -1,46 +0,0 @@
name: 'Docker Target Image List Generator'
description: 'A Github Action to generate a list of fully qualified target images for docker related steps'
inputs:
registries:
description: "Comma separated list of docker registries"
required: false
default: "docker.io,quay.io,ghcr.io"
images:
description: "Comma separated list of images"
required: true
tags:
description: "Comma separated list of image tags"
required: false
default: "edge"
outputs:
fully-qualified-target-images:
description: "List of fully qualified docker target images"
value: ${{ steps.gen-fqti.outputs.fully-qualified-target-images }}
runs:
using: "composite"
steps:
- name: Generate fully qualified docker target images
id: gen-fqti
shell: bash
run: |
IFS=',' read -r -a registries <<< "${{ inputs.registries }}"
IFS=',' read -r -a images <<< "${{ inputs.images }}"
IFS=',' read -r -a tags <<< "${{ inputs.tags }}"
FQTI=""
echo "Generating fully qualified docker target images for:"
echo "🐋 Registries: ${#registries[@]}"
echo "📷 Images: ${#images[@]}"
echo "🏷️ Tags: ${#tags[@]}"
echo "🧮 Total: $((${#registries[@]}*${#images[@]}*${#tags[@]}))"
for registry in "${registries[@]}"; do
for image in "${images[@]}"; do
for tag in "${tags[@]}"; do
if [ -z "$FQTI" ]; then
FQTI="${registry}/${image}:${tag}"
else
FQTI="$FQTI,${registry}/${image}:${tag}"
fi
done
done
done
echo ::set-output name=fully-qualified-target-images::${FQTI}

View file

@ -1,116 +0,0 @@
name: varken
on:
schedule:
- cron: '0 10 * * *'
push:
branches:
- master
- develop
tags:
- 'v*.*.*'
paths:
- '.github/workflows/docker.yaml'
- 'varken/**'
- 'Varken.py'
- 'Dockerfile'
pull_request:
branches:
- master
- develop
paths:
- '.github/workflows/docker.yaml'
- 'varken/**'
- 'Varken.py'
- 'Dockerfile'
workflow_dispatch:
inputs:
tag:
description: 'Use this tag instead of most recent'
required: false
ignore-existing-tag:
description: 'Ignore existing tag if "true"'
required: false
env:
IMAGES: boerderij/varken
PLATFORMS: "linux/amd64,linux/arm64,linux/arm/v7"
jobs:
lint-and-test:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Setup Python
uses: actions/setup-python@v2
with:
python-version: '3.x'
- name: Lint
run: pip install flake8 && flake8 --max-line-length 120 Varken.py varken/*.py
build:
runs-on: ubuntu-latest
needs: lint-and-test
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Prepare
id: prep
run: |
VERSION=edge
if [[ $GITHUB_REF == refs/tags/* ]]; then
VERSION=${GITHUB_REF#refs/tags/v}
fi
if [ "${{ github.event_name }}" = "schedule" ]; then
VERSION=nightly
fi
if [[ ${GITHUB_REF##*/} == "develop" ]]; then
VERSION=develop
fi
TAGS="${VERSION}"
if [[ $VERSION =~ ^[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}$ ]]; then
TAGS="$TAGS,latest"
fi
echo ::set-output name=version::${VERSION}
echo ::set-output name=tags::${TAGS}
echo ::set-output name=branch::${GITHUB_REF##*/}
echo ::set-output name=build_date::$(date -u +'%Y-%m-%dT%H:%M:%SZ')
echo ::set-output name=vcs_ref::${GITHUB_SHA::8}
- uses: ./.github/actions/docker-target-image-list-action
name: Generate Target Images
id: gen-tags
with:
images: ${{ env.IMAGES }}
tags: ${{ steps.prep.outputs.tags }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
with:
platforms: ${{ env.PLATFORMS }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
with:
install: true
version: latest
driver-opts: image=moby/buildkit:master
- name: Docker Multi Login
uses: ./.github/actions/docker-multi-login-action
env:
secrets: ${{ toJSON(secrets) }}
- name: Build and Push
uses: docker/build-push-action@v2
with:
context: .
file: ./Dockerfile
platforms: ${{ env.PLATFORMS }}
pull: true
push: ${{ github.event_name != 'pull_request' }}
tags: ${{ steps.gen-tags.outputs.fully-qualified-target-images }}
build-args: |
VERSION=${{ steps.prep.outputs.version }}
BRANCH=${{ steps.prep.outputs.branch }}
BUILD_DATE=${{ steps.prep.outputs.build_date }}
VCS_REF=${{ steps.prep.outputs.vcs_ref }}
- name: Inspect
if: ${{ github.event_name != 'pull_request' }}
run: |
IFS=',' read -r -a images <<< "${{ steps.gen-tags.outputs.fully-qualified-target-images }}"
for image in "${images[@]}"; do
docker buildx imagetools inspect ${image}
done

41
.github/workflows/docker.yml vendored Normal file
View file

@ -0,0 +1,41 @@
name: Publish Docker Containers
on:
workflow_dispatch:
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
jobs:
build_and_publish_docker:
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
steps:
- name: Checkout repository
uses: actions/checkout@v2
with:
ref: ${{ github.ref }}
- name: Log in to the Container registry
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract metadata for base Docker
id: base_meta
uses: docker/metadata-action@v2
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
tags: |
type=ref,event=branch
- name: Build and push Docker image
uses: docker/build-push-action@ad44023a93711e3deb337508980b4b5e9bcdc5dc
with:
context: .
push: true
tags: ${{ steps.base_meta.outputs.tags }}
labels: ${{ steps.base_meta.outputs.labels }}

View file

@ -1,19 +0,0 @@
name: 'Invalid Template'
on:
issues:
types: [labeled, unlabeled, reopened]
jobs:
support:
runs-on: ubuntu-latest
steps:
- uses: dessant/support-requests@v2
with:
github-token: ${{ github.token }}
support-label: 'invalid:template-incomplete'
issue-comment: >
:wave: @{issue-author}, please edit your issue and follow the template provided.
close-issue: false
lock-issue: false
issue-lock-reason: 'resolved'

View file

@ -1,25 +0,0 @@
name: 'Support Request'
on:
issues:
types: [labeled, unlabeled, reopened]
jobs:
support:
runs-on: ubuntu-latest
steps:
- uses: dessant/support-requests@v2
with:
github-token: ${{ github.token }}
support-label: 'support'
issue-comment: >
:wave: @{issue-author}, we use the issue tracker exclusively
for bug reports and feature requests. However, this issue appears
to be a support request. Please use our support channels
to get help with Varken!
- [Discord](https://discord.gg/VjZ6qSM)
- [Discord Quick Access](http://cyborg.decreator.dev/channels/518970285773422592/530424560504537105/)
close-issue: true
lock-issue: false
issue-lock-reason: 'off-topic'

View file

@ -1,4 +1,4 @@
FROM python:3.10.5-alpine FROM python:3.9.1-alpine
ENV DEBUG="True" \ ENV DEBUG="True" \
DATA_FOLDER="/config" \ DATA_FOLDER="/config" \
@ -8,11 +8,11 @@ ENV DEBUG="True" \
LABEL maintainer="dirtycajunrice,samwiseg0" \ LABEL maintainer="dirtycajunrice,samwiseg0" \
org.opencontainers.image.created=$BUILD_DATE \ org.opencontainers.image.created=$BUILD_DATE \
org.opencontainers.image.url="https://github.com/Boerderij/Varken" \ org.opencontainers.image.url="https://github.com/d-mcknight/Varken" \
org.opencontainers.image.source="https://github.com/Boerderij/Varken" \ org.opencontainers.image.source="https://github.com/d-mcknight/Varken" \
org.opencontainers.image.version=$VERSION \ org.opencontainers.image.version=$VERSION \
org.opencontainers.image.revision=$VCS_REF \ org.opencontainers.image.revision=$VCS_REF \
org.opencontainers.image.vendor="boerderij" \ org.opencontainers.image.vendor="d-mcknight" \
org.opencontainers.image.title="varken" \ org.opencontainers.image.title="varken" \
org.opencontainers.image.description="Varken is a standalone application to aggregate data from the Plex ecosystem into InfluxDB using Grafana for a frontend" \ org.opencontainers.image.description="Varken is a standalone application to aggregate data from the Plex ecosystem into InfluxDB using Grafana for a frontend" \
org.opencontainers.image.licenses="MIT" org.opencontainers.image.licenses="MIT"

View file

@ -17,7 +17,7 @@ ecosystem into InfluxDB using Grafana for a frontend
Requirements: Requirements:
* [Python 3.6.7+](https://www.python.org/downloads/release/python-367/) * [Python 3.6.7+](https://www.python.org/downloads/release/python-367/)
* [Python3-pip](https://pip.pypa.io/en/stable/installing/) * [Python3-pip](https://pip.pypa.io/en/stable/installing/)
* [InfluxDB 1.8.x or 2.0.x](https://www.influxdata.com/) * [InfluxDB 1.8.x](https://www.influxdata.com/)
* [Grafana](https://grafana.com/) * [Grafana](https://grafana.com/)
<p align="center"> <p align="center">

View file

@ -1,21 +1,19 @@
import platform import platform
import schedule import schedule
import distro
from time import sleep from time import sleep
from queue import Queue from queue import Queue
from sys import version from sys import version
from threading import Thread from threading import Thread
from os import environ as env from os import environ as env
from os import access, R_OK, getenv from os import access, R_OK, getenv
from distro import linux_distribution
from os.path import isdir, abspath, dirname, join from os.path import isdir, abspath, dirname, join
from argparse import ArgumentParser, RawTextHelpFormatter from argparse import ArgumentParser, RawTextHelpFormatter
from logging import getLogger, StreamHandler, Formatter, DEBUG from logging import getLogger, StreamHandler, Formatter, DEBUG
# Needed to check version of python # Needed to check version of python
from varken import structures # noqa from varken import structures # noqa
from varken.ombi import OmbiAPI from varken.ombi import OmbiAPI
from varken.overseerr import OverseerrAPI
from varken.unifi import UniFiAPI from varken.unifi import UniFiAPI
from varken import VERSION, BRANCH, BUILD_DATE from varken import VERSION, BRANCH, BUILD_DATE
from varken.sonarr import SonarrAPI from varken.sonarr import SonarrAPI
@ -23,14 +21,13 @@ from varken.radarr import RadarrAPI
from varken.lidarr import LidarrAPI from varken.lidarr import LidarrAPI
from varken.iniparser import INIParser from varken.iniparser import INIParser
from varken.dbmanager import DBManager from varken.dbmanager import DBManager
from varken.influxdb2manager import InfluxDB2Manager
from varken.helpers import GeoIPHandler from varken.helpers import GeoIPHandler
from varken.tautulli import TautulliAPI from varken.tautulli import TautulliAPI
from varken.sickchill import SickChillAPI from varken.sickchill import SickChillAPI
from varken.varkenlogger import VarkenLogger from varken.varkenlogger import VarkenLogger
PLATFORM_LINUX_DISTRO = ' '.join(distro.id() + distro.version() + distro.name()) PLATFORM_LINUX_DISTRO = ' '.join(x for x in linux_distribution() if x)
def thread(job, **kwargs): def thread(job, **kwargs):
@ -93,15 +90,7 @@ if __name__ == "__main__":
vl.logger.info("Varken v%s-%s %s", VERSION, BRANCH, BUILD_DATE) vl.logger.info("Varken v%s-%s %s", VERSION, BRANCH, BUILD_DATE)
CONFIG = INIParser(DATA_FOLDER) CONFIG = INIParser(DATA_FOLDER)
DBMANAGER = DBManager(CONFIG.influx_server)
if CONFIG.influx2_enabled:
# Use INFLUX version 2
vl.logger.info('Using INFLUXDBv2')
DBMANAGER = InfluxDB2Manager(CONFIG.influx_server)
else:
vl.logger.info('Using INFLUXDB')
DBMANAGER = DBManager(CONFIG.influx_server)
QUEUE = Queue() QUEUE = Queue()
if CONFIG.sonarr_enabled: if CONFIG.sonarr_enabled:
@ -167,18 +156,6 @@ if __name__ == "__main__":
at_time = schedule.every(server.issue_status_run_seconds).seconds at_time = schedule.every(server.issue_status_run_seconds).seconds
at_time.do(thread, OMBI.get_issue_counts).tag("ombi-{}-get_issue_counts".format(server.id)) at_time.do(thread, OMBI.get_issue_counts).tag("ombi-{}-get_issue_counts".format(server.id))
if CONFIG.overseerr_enabled:
for server in CONFIG.overseerr_servers:
OVERSEER = OverseerrAPI(server, DBMANAGER)
if server.get_request_total_counts:
at_time = schedule.every(server.request_total_run_seconds).seconds
at_time.do(thread, OVERSEER.get_request_counts).tag("overseerr-{}-get_request_counts"
.format(server.id))
if server.num_latest_requests_to_fetch > 0:
at_time = schedule.every(server.num_latest_requests_seconds).seconds
at_time.do(thread, OVERSEER.get_latest_requests).tag("overseerr-{}-get_latest_requests"
.format(server.id))
if CONFIG.sickchill_enabled: if CONFIG.sickchill_enabled:
for server in CONFIG.sickchill_servers: for server in CONFIG.sickchill_servers:
SICKCHILL = SickChillAPI(server, DBMANAGER) SICKCHILL = SickChillAPI(server, DBMANAGER)
@ -194,8 +171,7 @@ if __name__ == "__main__":
# Run all on startup # Run all on startup
SERVICES_ENABLED = [CONFIG.ombi_enabled, CONFIG.radarr_enabled, CONFIG.tautulli_enabled, CONFIG.unifi_enabled, SERVICES_ENABLED = [CONFIG.ombi_enabled, CONFIG.radarr_enabled, CONFIG.tautulli_enabled, CONFIG.unifi_enabled,
CONFIG.sonarr_enabled, CONFIG.sickchill_enabled, CONFIG.lidarr_enabled, CONFIG.sonarr_enabled, CONFIG.sickchill_enabled, CONFIG.lidarr_enabled]
CONFIG.overseerr_enabled]
if not [enabled for enabled in SERVICES_ENABLED if enabled]: if not [enabled for enabled in SERVICES_ENABLED if enabled]:
vl.logger.error("All services disabled. Exiting") vl.logger.error("All services disabled. Exiting")
exit(1) exit(1)

View file

@ -3,12 +3,10 @@ sonarr_server_ids = 1,2
radarr_server_ids = 1,2 radarr_server_ids = 1,2
lidarr_server_ids = false lidarr_server_ids = false
tautulli_server_ids = 1 tautulli_server_ids = 1
ombi_server_ids = false ombi_server_ids = 1
overseerr_server_ids = 1
sickchill_server_ids = false sickchill_server_ids = false
unifi_server_ids = false unifi_server_ids = false
maxmind_license_key = xxxxxxxxxxxxxxxx maxmind_license_key = xxxxxxxxxxxxxxxx
influx2_enabled = false
[influxdb] [influxdb]
url = influxdb.domain.tld url = influxdb.domain.tld
@ -18,15 +16,6 @@ verify_ssl = false
username = root username = root
password = root password = root
[influx2]
url = influxdb2.domain.tld
org = ORG
token = TOKEN
timeout = 10000
ssl = false
verify_ssl = false
bucket = varken
[tautulli-1] [tautulli-1]
url = tautulli.domain.tld:8181 url = tautulli.domain.tld:8181
fallback_ip = 1.1.1.1 fallback_ip = 1.1.1.1
@ -106,17 +95,6 @@ request_total_run_seconds = 300
get_issue_status_counts = true get_issue_status_counts = true
issue_status_run_seconds = 300 issue_status_run_seconds = 300
[overseerr-1]
url = overseerr.domain.tld
apikey = xxxxxxxxxxxxxxxx
ssl = false
verify_ssl = false
get_request_total_counts = true
request_total_run_seconds = 30
get_latest_requests = true
num_latest_requests_to_fetch = 10
num_latest_requests_seconds = 30
[sickchill-1] [sickchill-1]
url = sickchill.domain.tld:8081 url = sickchill.domain.tld:8081
apikey = xxxxxxxxxxxxxxxx apikey = xxxxxxxxxxxxxxxx

View file

@ -6,7 +6,7 @@ services:
influxdb: influxdb:
hostname: influxdb hostname: influxdb
container_name: influxdb container_name: influxdb
image: influxdb:1.8 image: influxdb
networks: networks:
- internal - internal
volumes: volumes:
@ -22,6 +22,91 @@ services:
- /path/to/docker-varken/config-folder:/config - /path/to/docker-varken/config-folder:/config
environment: environment:
- TZ=America/Chicago - TZ=America/Chicago
- VRKN_GLOBAL_SONARR_SERVER_IDS=1,2
- VRKN_GLOBAL_RADARR_SERVER_IDS=1,2
- VRKN_GLOBAL_LIDARR_SERVER_IDS=false
- VRKN_GLOBAL_TAUTULLI_SERVER_IDS=1
- VRKN_GLOBAL_OMBI_SERVER_IDS=1
- VRKN_GLOBAL_SICKCHILL_SERVER_IDS=false
- VRKN_GLOBAL_UNIFI_SERVER_IDS=false
- VRKN_GLOBAL_MAXMIND_LICENSE_KEY=xxxxxxxxxxxxxxxx
- VRKN_INFLUXDB_URL=influxdb.domain.tld
- VRKN_INFLUXDB_PORT=8086
- VRKN_INFLUXDB_SSL=false
- VRKN_INFLUXDB_VERIFY_SSL=false
- VRKN_INFLUXDB_USERNAME=root
- VRKN_INFLUXDB_PASSWORD=root
- VRKN_TAUTULLI_1_URL=tautulli.domain.tld:8181
- VRKN_TAUTULLI_1_FALLBACK_IP=1.1.1.1
- VRKN_TAUTULLI_1_APIKEY=xxxxxxxxxxxxxxxx
- VRKN_TAUTULLI_1_SSL=false
- VRKN_TAUTULLI_1_VERIFY_SSL=false
- VRKN_TAUTULLI_1_GET_ACTIVITY=true
- VRKN_TAUTULLI_1_GET_ACTIVITY_RUN_SECONDS=30
- VRKN_TAUTULLI_1_GET_STATS=true
- VRKN_TAUTULLI_1_GET_STATS_RUN_SECONDS=3600
- VRKN_SONARR_1_URL=sonarr1.domain.tld:8989
- VRKN_SONARR_1_APIKEY=xxxxxxxxxxxxxxxx
- VRKN_SONARR_1_SSL=false
- VRKN_SONARR_1_VERIFY_SSL=false
- VRKN_SONARR_1_MISSING_DAYS=7
- VRKN_SONARR_1_MISSING_DAYS_RUN_SECONDS=300
- VRKN_SONARR_1_FUTURE_DAYS=1
- VRKN_SONARR_1_FUTURE_DAYS_RUN_SECONDS=300
- VRKN_SONARR_1_QUEUE=true
- VRKN_SONARR_1_QUEUE_RUN_SECONDS=300
- VRKN_SONARR_2_URL=sonarr2.domain.tld:8989
- VRKN_SONARR_2_APIKEY=yyyyyyyyyyyyyyyy
- VRKN_SONARR_2_SSL=false
- VRKN_SONARR_2_VERIFY_SSL=false
- VRKN_SONARR_2_MISSING_DAYS=7
- VRKN_SONARR_2_MISSING_DAYS_RUN_SECONDS=300
- VRKN_SONARR_2_FUTURE_DAYS=1
- VRKN_SONARR_2_FUTURE_DAYS_RUN_SECONDS=300
- VRKN_SONARR_2_QUEUE=true
- VRKN_SONARR_2_QUEUE_RUN_SECONDS=300
- VRKN_RADARR_1_URL=radarr1.domain.tld
- VRKN_RADARR_1_APIKEY=xxxxxxxxxxxxxxxx
- VRKN_RADARR_1_SSL=false
- VRKN_RADARR_1_VERIFY_SSL=false
- VRKN_RADARR_1_QUEUE=true
- VRKN_RADARR_1_QUEUE_RUN_SECONDS=300
- VRKN_RADARR_1_GET_MISSING=true
- VRKN_RADARR_1_GET_MISSING_RUN_SECONDS=300
- VRKN_RADARR_2_URL=radarr2.domain.tld
- VRKN_RADARR_2_APIKEY=yyyyyyyyyyyyyyyy
- VRKN_RADARR_2_SSL=false
- VRKN_RADARR_2_VERIFY_SSL=false
- VRKN_RADARR_2_QUEUE=true
- VRKN_RADARR_2_QUEUE_RUN_SECONDS=300
- VRKN_RADARR_2_GET_MISSING=true
- VRKN_RADARR_2_GET_MISSING_RUN_SECONDS=300
- VRKN_LIDARR_1_URL=lidarr1.domain.tld:8686
- VRKN_LIDARR_1_APIKEY=xxxxxxxxxxxxxxxx
- VRKN_LIDARR_1_SSL=false
- VRKN_LIDARR_1_VERIFY_SSL=false
- VRKN_LIDARR_1_MISSING_DAYS=30
- VRKN_LIDARR_1_MISSING_DAYS_RUN_SECONDS=300
- VRKN_LIDARR_1_FUTURE_DAYS=30
- VRKN_LIDARR_1_FUTURE_DAYS_RUN_SECONDS=300
- VRKN_LIDARR_1_QUEUE=true
- VRKN_LIDARR_1_QUEUE_RUN_SECONDS=300
- VRKN_OMBI_1_URL=ombi.domain.tld
- VRKN_OMBI_1_APIKEY=xxxxxxxxxxxxxxxx
- VRKN_OMBI_1_SSL=false
- VRKN_OMBI_1_VERIFY_SSL=false
- VRKN_OMBI_1_GET_REQUEST_TYPE_COUNTS=true
- VRKN_OMBI_1_REQUEST_TYPE_RUN_SECONDS=300
- VRKN_OMBI_1_GET_REQUEST_TOTAL_COUNTS=true
- VRKN_OMBI_1_REQUEST_TOTAL_RUN_SECONDS=300
- VRKN_OMBI_1_GET_ISSUE_STATUS_COUNTS=true
- VRKN_OMBI_1_ISSUE_STATUS_RUN_SECONDS=300
- VRKN_SICKCHILL_1_URL=sickchill.domain.tld:8081
- VRKN_SICKCHILL_1_APIKEY=xxxxxxxxxxxxxxxx
- VRKN_SICKCHILL_1_SSL=false
- VRKN_SICKCHILL_1_VERIFY_SSL=false
- VRKN_SICKCHILL_1_GET_MISSING=true
- VRKN_SICKCHILL_1_GET_MISSING_RUN_SECONDS=300
depends_on: depends_on:
- influxdb - influxdb
restart: unless-stopped restart: unless-stopped
@ -33,7 +118,7 @@ services:
- internal - internal
ports: ports:
- 3000:3000 - 3000:3000
volumes: volumes:
- /path/to/docker-grafana/config-folder:/config - /path/to/docker-grafana/config-folder:/config
environment: environment:
- GF_PATHS_DATA=/config/data - GF_PATHS_DATA=/config/data
@ -43,4 +128,4 @@ services:
depends_on: depends_on:
- influxdb - influxdb
- varken - varken
restart: unless-stopped restart: unless-stopped

View file

@ -2,10 +2,9 @@
# Potential requirements. # Potential requirements.
# pip3 install -r requirements.txt # pip3 install -r requirements.txt
#--------------------------------------------------------- #---------------------------------------------------------
requests==2.28.1 requests==2.21
geoip2==2.9.0 geoip2==2.9.0
influxdb==5.2.0 influxdb==5.2.0
schedule==1.1.0 schedule==0.6.0
distro==1.4.0 distro==1.4.0
urllib3==1.26.10 urllib3==1.24.2
influxdb-client==1.14.0

View file

@ -41,7 +41,7 @@ if __name__ == "__main__":
DBMANAGER = DBManager(CONFIG.influx_server) DBMANAGER = DBManager(CONFIG.influx_server)
if CONFIG.tautulli_enabled: if CONFIG.tautulli_enabled:
GEOIPHANDLER = GeoIPHandler(DATA_FOLDER, CONFIG.tautulli_servers[0].maxmind_license_key) GEOIPHANDLER = GeoIPHandler(DATA_FOLDER)
for server in CONFIG.tautulli_servers: for server in CONFIG.tautulli_servers:
TAUTULLI = TautulliAPI(server, DBMANAGER, GEOIPHANDLER) TAUTULLI = TautulliAPI(server, DBMANAGER, GEOIPHANDLER)
TAUTULLI.get_historical(days=opts.days) TAUTULLI.get_historical(days=opts.days)

View file

@ -51,6 +51,5 @@
<Labels/> <Labels/>
<Config Name="PGID" Target="PGID" Default="" Mode="" Description="Container Variable: PGID" Type="Variable" Display="always" Required="true" Mask="false">99</Config> <Config Name="PGID" Target="PGID" Default="" Mode="" Description="Container Variable: PGID" Type="Variable" Display="always" Required="true" Mask="false">99</Config>
<Config Name="PUID" Target="PUID" Default="" Mode="" Description="Container Variable: PUID" Type="Variable" Display="always" Required="true" Mask="false">100</Config> <Config Name="PUID" Target="PUID" Default="" Mode="" Description="Container Variable: PUID" Type="Variable" Display="always" Required="true" Mask="false">100</Config>
<Config Name="Debug" Target="DEBUG" Default="False" Mode="" Description="Turn Debug on or off" Type="Variable" Display="always" Required="false" Mask="false">False</Config>
<Config Name="Varken DataDir" Target="/config" Default="" Mode="rw" Description="Container Path: /config" Type="Path" Display="advanced-hide" Required="true" Mask="false">/mnt/user/appdata/varken</Config> <Config Name="Varken DataDir" Target="/config" Default="" Mode="rw" Description="Container Path: /config" Type="Path" Display="advanced-hide" Required="true" Mask="false">/mnt/user/appdata/varken</Config>
</Container> </Container>

View file

@ -1,48 +0,0 @@
from sys import exit
from logging import getLogger
import influxdb_client
from influxdb_client import InfluxDBClient
from influxdb_client.client.write_api import SYNCHRONOUS
class InfluxDB2Manager(object):
def __init__(self, server):
self.server = server
self.logger = getLogger()
if self.server.url == "influxdb2.domain.tld":
self.logger.critical("You have not configured your varken.ini. Please read Wiki page for configuration")
exit()
self.influx = InfluxDBClient(url=self.server.url, token=self.server.token, org=self.server.org,
timeout=self.server.timeout, verify_ssl=self.server.verify_ssl,
ssl_ca_cert=self.server.ssl)
self.influx_write_api = self.influx.write_api(write_options=SYNCHRONOUS)
# Create the bucket if needed
bucket_api = self.influx.buckets_api()
try:
bucket = bucket_api.find_bucket_by_name(self.server.bucket)
if bucket is None:
self.logger.info('Creating bucket %s', self.server.bucket)
org_api = influxdb_client.service.organizations_service.OrganizationsService(self.influx.api_client)
orgs = org_api.get_orgs()
for org in orgs.orgs:
if org.name == self.server.org:
my_org = org
self.influx.buckets_api().create_bucket(bucket_name=self.server.bucket, org_id=my_org.id)
except Exception as e:
self.logger.error('Failed creating new InfluxDB bucket! Error: %s', e)
def write_points(self, data):
self.logger.info('Writing Data to InfluxDBv2 %s', data)
try:
self.influx_write_api.write(bucket=self.server.bucket, record=data)
except Exception as e:
self.logger.exception('Error writing data to influxdb2. Dropping this set of data. '
'Check your database! Error: %s', e)

View file

@ -9,7 +9,7 @@ from configparser import ConfigParser, NoOptionError, NoSectionError
from varken.varkenlogger import BlacklistFilter from varken.varkenlogger import BlacklistFilter
from varken.structures import SickChillServer, UniFiServer from varken.structures import SickChillServer, UniFiServer
from varken.helpers import clean_sid_check, rfc1918_ip_check, boolcheck from varken.helpers import clean_sid_check, rfc1918_ip_check, boolcheck
from varken.structures import SonarrServer, RadarrServer, OmbiServer, OverseerrServer, TautulliServer, InfluxServer, Influx2Server from varken.structures import SonarrServer, RadarrServer, OmbiServer, TautulliServer, InfluxServer
class INIParser(object): class INIParser(object):
@ -17,7 +17,7 @@ class INIParser(object):
self.config = None self.config = None
self.data_folder = data_folder self.data_folder = data_folder
self.filtered_strings = None self.filtered_strings = None
self.services = ['sonarr', 'radarr', 'lidarr', 'ombi', 'overseerr', 'tautulli', 'sickchill', 'unifi'] self.services = ['sonarr', 'radarr', 'lidarr', 'ombi', 'tautulli', 'sickchill', 'unifi']
self.logger = getLogger() self.logger = getLogger()
self.influx_server = InfluxServer() self.influx_server = InfluxServer()
@ -107,7 +107,6 @@ class INIParser(object):
valid = match(regex, url_check) is not None valid = match(regex, url_check) is not None
if not valid: if not valid:
return url_check
if inc_port: if inc_port:
self.logger.error('%s is invalid in module [%s]! URL must host/IP and ' self.logger.error('%s is invalid in module [%s]! URL must host/IP and '
'port if not 80 or 443. ie. localhost:8080', 'port if not 80 or 443. ie. localhost:8080',
@ -145,47 +144,23 @@ class INIParser(object):
if read_file: if read_file:
self.config = self.read_file('varken.ini') self.config = self.read_file('varken.ini')
self.config_blacklist() self.config_blacklist()
# Parse InfluxDB options # Parse InfluxDB options
self.influx2_enabled = env.get('VRKN_GLOBAL_INFLUXDB2_ENABLED', try:
self.config.getboolean('global', 'influx2_enabled')) url = self.url_check(env.get('VRKN_INFLUXDB_URL', self.config.get('influxdb', 'url')),
include_port=False, section='influxdb')
port = int(env.get('VRKN_INFLUXDB_PORT', self.config.getint('influxdb', 'port')))
ssl = boolcheck(env.get('VRKN_INFLUXDB_SSL', self.config.get('influxdb', 'ssl')))
verify_ssl = boolcheck(env.get('VRKN_INFLUXDB_VERIFY_SSL', self.config.get('influxdb', 'verify_ssl')))
if self.influx2_enabled: username = env.get('VRKN_INFLUXDB_USERNAME', self.config.get('influxdb', 'username'))
# Use INFLUX version 2 password = env.get('VRKN_INFLUXDB_PASSWORD', self.config.get('influxdb', 'password'))
try: except NoOptionError as e:
url = self.url_check(env.get('VRKN_INFLUXDB2_URL', self.config.get('influx2', 'url')), self.logger.error('Missing key in %s. Error: %s', "influxdb", e)
section='influx2', include_port=False) self.rectify_ini()
ssl = boolcheck(env.get('VRKN_INFLUXDB2_SSL', self.config.get('influx2', 'ssl'))) return
verify_ssl = boolcheck(env.get('VRKN_INFLUXDB2_VERIFY_SSL', self.config.get('influx2', 'verify_ssl')))
org = env.get('VRKN_INFLUXDB2_ORG', self.config.get('influx2', 'org')) self.influx_server = InfluxServer(url=url, port=port, username=username, password=password, ssl=ssl,
bucket = env.get('VRKN_INFLUXDB2_BUCKET', self.config.get('influx2', 'bucket')) verify_ssl=verify_ssl)
token = env.get('VRKN_INFLUXDB2_TOKEN', self.config.get('influx2', 'token'))
timeout = env.get('VRKN_INFLUXDB2_TIMEOUT', self.config.get('influx2', 'timeout'))
except NoOptionError as e:
self.logger.error('Missing key in %s. Error: %s', "influx2", e)
self.rectify_ini()
return
self.influx_server = Influx2Server(url=url, token=token, org=org, timeout=timeout, ssl=ssl,
verify_ssl=verify_ssl, bucket=bucket)
else:
try:
url = self.url_check(env.get('VRKN_INFLUXDB_URL', self.config.get('influxdb', 'url')),
include_port=False, section='influxdb')
port = int(env.get('VRKN_INFLUXDB_PORT', self.config.getint('influxdb', 'port')))
ssl = boolcheck(env.get('VRKN_INFLUXDB_SSL', self.config.get('influxdb', 'ssl')))
verify_ssl = boolcheck(env.get('VRKN_INFLUXDB_VERIFY_SSL', self.config.get('influxdb', 'verify_ssl')))
username = env.get('VRKN_INFLUXDB_USERNAME', self.config.get('influxdb', 'username'))
password = env.get('VRKN_INFLUXDB_PASSWORD', self.config.get('influxdb', 'password'))
except NoOptionError as e:
self.logger.error('Missing key in %s. Error: %s', "influxdb", e)
self.rectify_ini()
return
self.influx_server = InfluxServer(url=url, port=port, username=username, password=password, ssl=ssl,
verify_ssl=verify_ssl)
# Check for all enabled services # Check for all enabled services
for service in self.services: for service in self.services:
@ -318,27 +293,6 @@ class INIParser(object):
issue_status_counts=issue_status_counts, issue_status_counts=issue_status_counts,
issue_status_run_seconds=issue_status_run_seconds) issue_status_run_seconds=issue_status_run_seconds)
if service == 'overseerr':
get_request_total_counts = boolcheck(env.get(
f'VRKN_{envsection}_GET_REQUEST_TOTAL_COUNTS',
self.config.get(section, 'get_request_total_counts')))
request_total_run_seconds = int(env.get(
f'VRKN_{envsection}_REQUEST_TOTAL_RUN_SECONDS',
self.config.getint(section, 'request_total_run_seconds')))
num_latest_requests_to_fetch = int(env.get(
f'VRKN_{envsection}_GET_LATEST_REQUESTS_TO_FETCH',
self.config.getint(section, 'num_latest_requests_to_fetch')))
num_latest_requests_seconds = int(env.get(
f'VRKN_{envsection}_NUM_LATEST_REQUESTS_SECONDS',
self.config.getint(section, 'num_latest_requests_seconds')))
server = OverseerrServer(id=server_id, url=scheme + url, api_key=apikey,
verify_ssl=verify_ssl,
get_request_total_counts=get_request_total_counts,
request_total_run_seconds=request_total_run_seconds,
num_latest_requests_to_fetch=num_latest_requests_to_fetch,
num_latest_requests_seconds=num_latest_requests_seconds)
if service == 'sickchill': if service == 'sickchill':
get_missing = boolcheck(env.get(f'VRKN_{envsection}_GET_MISSING', get_missing = boolcheck(env.get(f'VRKN_{envsection}_GET_MISSING',
self.config.get(section, 'get_missing'))) self.config.get(section, 'get_missing')))

View file

@ -1,133 +0,0 @@
from logging import getLogger
from requests import Session, Request
from datetime import datetime, timezone
from varken.helpers import connection_handler, hashit
from varken.structures import OverseerrRequestCounts
class OverseerrAPI(object):
def __init__(self, server, dbmanager):
self.dbmanager = dbmanager
self.server = server
# Create session to reduce server web thread load, and globally define pageSize for all requests
self.session = Session()
self.session.headers = {'X-Api-Key': self.server.api_key}
self.logger = getLogger()
def __repr__(self):
return f"<overseerr-{self.server.id}>"
def get_request_counts(self):
now = datetime.now(timezone.utc).astimezone().isoformat()
endpoint = '/api/v1/request/count'
req = self.session.prepare_request(Request('GET', self.server.url + endpoint))
get_req = connection_handler(self.session, req, self.server.verify_ssl)
if not get_req:
return
requests = OverseerrRequestCounts(**get_req)
influx_payload = [
{
"measurement": "Overseerr",
"tags": {
"type": "Request_Counts"
},
"time": now,
"fields": {
"pending": requests.pending,
"approved": requests.approved,
"processing": requests.processing,
"available": requests.available,
"total": requests.total,
"movies": requests.movie,
"tv": requests.tv,
"declined": requests.declined
}
}
]
if influx_payload:
self.dbmanager.write_points(influx_payload)
else:
self.logger.warning("No data to send to influx for overseerr-request-counts instance, discarding.")
def get_latest_requests(self):
now = datetime.now(timezone.utc).astimezone().isoformat()
endpoint = '/api/v1/request?take=' + str(self.server.num_latest_requests_to_fetch) + '&filter=all&sort=added'
movie_endpoint = '/api/v1/movie/'
tv_endpoint = '/api/v1/tv/'
# GET THE LATEST n REQUESTS
req = self.session.prepare_request(Request('GET', self.server.url + endpoint))
get_latest_req = connection_handler(self.session, req, self.server.verify_ssl)
# RETURN NOTHING IF NO RESULTS
if not get_latest_req:
self.logger.warning("No data to send to influx for overseerr-latest-requests instance, discarding.")
return
influx_payload = []
# Request Type: Movie = 1, TV Show = 0
for result in get_latest_req['results']:
if result['type'] == 'tv':
req = self.session.prepare_request(Request('GET',
self.server.url +
tv_endpoint +
str(result['media']['tmdbId'])))
get_tv_req = connection_handler(self.session, req, self.server.verify_ssl)
hash_id = hashit(f'{get_tv_req["id"]}{get_tv_req["name"]}')
influx_payload.append(
{
"measurement": "Overseerr",
"tags": {
"type": "Requests",
"server": self.server.id,
"request_type": 0,
"status": get_tv_req['mediaInfo']['status'],
"title": get_tv_req['name'],
"requested_user": get_tv_req['mediaInfo']['requests'][0]['requestedBy']['displayName'],
"requested_date": get_tv_req['mediaInfo']['requests'][0]['createdAt']
},
"time": now,
"fields": {
"hash": hash_id
}
}
)
if result['type'] == 'movie':
req = self.session.prepare_request(Request('GET',
self.server.url +
movie_endpoint +
str(result['media']['tmdbId'])))
get_movie_req = connection_handler(self.session, req, self.server.verify_ssl)
hash_id = hashit(f'{get_movie_req["id"]}{get_movie_req["title"]}')
influx_payload.append(
{
"measurement": "Overseerr",
"tags": {
"type": "Requests",
"server": self.server.id,
"request_type": 1,
"status": get_movie_req['mediaInfo']['status'],
"title": get_movie_req['title'],
"requested_user": get_movie_req['mediaInfo']['requests'][0]['requestedBy']['displayName'],
"requested_date": get_movie_req['mediaInfo']['requests'][0]['createdAt']
},
"time": now,
"fields": {
"hash": hash_id
}
}
)
if influx_payload:
self.dbmanager.write_points(influx_payload)
else:
self.logger.warning("No data to send to influx for overseerr-latest-requests instance, discarding.")

View file

@ -2,7 +2,7 @@ from logging import getLogger
from requests import Session, Request from requests import Session, Request
from datetime import datetime, timezone from datetime import datetime, timezone
from varken.structures import QueuePages, RadarrMovie, RadarrQueue from varken.structures import RadarrMovie, Queue
from varken.helpers import hashit, connection_handler from varken.helpers import hashit, connection_handler
@ -19,7 +19,7 @@ class RadarrAPI(object):
return f"<radarr-{self.server.id}>" return f"<radarr-{self.server.id}>"
def get_missing(self): def get_missing(self):
endpoint = '/api/v3/movie' endpoint = '/api/movie'
now = datetime.now(timezone.utc).astimezone().isoformat() now = datetime.now(timezone.utc).astimezone().isoformat()
influx_payload = [] influx_payload = []
missing = [] missing = []
@ -37,7 +37,7 @@ class RadarrAPI(object):
return return
for movie in movies: for movie in movies:
if movie.monitored and not movie.hasFile: if movie.monitored and not movie.downloaded:
if movie.isAvailable: if movie.isAvailable:
ma = 0 ma = 0
else: else:
@ -66,53 +66,35 @@ class RadarrAPI(object):
} }
) )
if influx_payload: self.dbmanager.write_points(influx_payload)
self.dbmanager.write_points(influx_payload)
else:
self.logger.warning("No data to send to influx for radarr-missing instance, discarding.")
def get_queue(self): def get_queue(self):
endpoint = '/api/v3/queue' endpoint = '/api/queue'
now = datetime.now(timezone.utc).astimezone().isoformat() now = datetime.now(timezone.utc).astimezone().isoformat()
influx_payload = [] influx_payload = []
pageSize = 250
params = {'pageSize': pageSize, 'includeMovie': True, 'includeUnknownMovieItems': False}
queueResponse = []
queue = [] queue = []
req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params)) req = self.session.prepare_request(Request('GET', self.server.url + endpoint))
get = connection_handler(self.session, req, self.server.verify_ssl) get = connection_handler(self.session, req, self.server.verify_ssl)
if not get: if not get:
return return
response = QueuePages(**get) for movie in get:
queueResponse.extend(response.records)
while response.totalRecords > response.page * response.pageSize:
page = response.page + 1
params = {'pageSize': pageSize, 'page': page, 'includeMovie': True, 'includeUnknownMovieItems': False}
req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params))
get = connection_handler(self.session, req, self.server.verify_ssl)
if not get:
return
response = QueuePages(**get)
queueResponse.extend(response.records)
download_queue = []
for queueItem in queueResponse:
try: try:
download_queue.append(RadarrQueue(**queueItem)) movie['movie'] = RadarrMovie(**movie['movie'])
except TypeError as e: except TypeError as e:
self.logger.warning('TypeError has occurred : %s while creating RadarrQueue structure', e) self.logger.error('TypeError has occurred : %s while creating RadarrMovie structure', e)
return return
if not download_queue:
self.logger.warning("No data to send to influx for radarr-queue instance, discarding.") try:
download_queue = [Queue(**movie) for movie in get]
except TypeError as e:
self.logger.error('TypeError has occurred : %s while creating Queue structure', e)
return return
for queue_item in download_queue: for queue_item in download_queue:
movie = RadarrMovie(**queue_item.movie) movie = queue_item.movie
name = f'{movie.title} ({movie.year})' name = f'{movie.title} ({movie.year})'
@ -146,7 +128,4 @@ class RadarrAPI(object):
} }
) )
if influx_payload: self.dbmanager.write_points(influx_payload)
self.dbmanager.write_points(influx_payload)
else:
self.logger.warning("No data to send to influx for radarr-queue instance, discarding.")

View file

@ -2,7 +2,7 @@ from logging import getLogger
from requests import Session, Request from requests import Session, Request
from datetime import datetime, timezone, date, timedelta from datetime import datetime, timezone, date, timedelta
from varken.structures import SonarrEpisode, SonarrTVShow, SonarrQueue, QueuePages from varken.structures import Queue, SonarrTVShow
from varken.helpers import hashit, connection_handler from varken.helpers import hashit, connection_handler
@ -19,28 +19,16 @@ class SonarrAPI(object):
def __repr__(self): def __repr__(self):
return f"<sonarr-{self.server.id}>" return f"<sonarr-{self.server.id}>"
def get_episode(self, id):
endpoint = '/api/v3/episode'
params = {'episodeIds': id}
req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params))
get = connection_handler(self.session, req, self.server.verify_ssl)
if not get:
return
return SonarrEpisode(**get[0])
def get_calendar(self, query="Missing"): def get_calendar(self, query="Missing"):
endpoint = '/api/v3/calendar/' endpoint = '/api/calendar/'
today = str(date.today()) today = str(date.today())
last_days = str(date.today() - timedelta(days=self.server.missing_days)) last_days = str(date.today() - timedelta(days=self.server.missing_days))
future = str(date.today() + timedelta(days=self.server.future_days)) future = str(date.today() + timedelta(days=self.server.future_days))
now = datetime.now(timezone.utc).astimezone().isoformat() now = datetime.now(timezone.utc).astimezone().isoformat()
if query == "Missing": if query == "Missing":
params = {'start': last_days, 'end': today, 'includeSeries': True} params = {'start': last_days, 'end': today}
else: else:
params = {'start': today, 'end': future, 'includeSeries': True} params = {'start': today, 'end': future}
influx_payload = [] influx_payload = []
air_days = [] air_days = []
missing = [] missing = []
@ -54,24 +42,22 @@ class SonarrAPI(object):
tv_shows = [] tv_shows = []
for show in get: for show in get:
try: try:
tv_shows.append(SonarrEpisode(**show)) tv_shows.append(SonarrTVShow(**show))
except TypeError as e: except TypeError as e:
self.logger.error('TypeError has occurred : %s while creating SonarrEpisode structure for show. Data ' self.logger.error('TypeError has occurred : %s while creating SonarrTVShow structure for show. Data '
'attempted is: %s', e, show) 'attempted is: %s', e, show)
for episode in tv_shows: for show in tv_shows:
tvShow = episode.series sxe = f'S{show.seasonNumber:0>2}E{show.episodeNumber:0>2}'
sxe = f'S{episode.seasonNumber:0>2}E{episode.episodeNumber:0>2}' if show.hasFile:
if episode.hasFile:
downloaded = 1 downloaded = 1
else: else:
downloaded = 0 downloaded = 0
if query == "Missing": if query == "Missing":
if episode.monitored and not downloaded: if show.monitored and not downloaded:
missing.append((tvShow['title'], downloaded, sxe, episode.title, missing.append((show.series['title'], downloaded, sxe, show.title, show.airDateUtc, show.id))
episode.airDateUtc, episode.seriesId))
else: else:
air_days.append((tvShow['title'], downloaded, sxe, episode.title, episode.airDateUtc, episode.seriesId)) air_days.append((show.series['title'], downloaded, sxe, show.title, show.airDateUtc, show.id))
for series_title, dl_status, sxe, episode_title, air_date_utc, sonarr_id in (air_days or missing): for series_title, dl_status, sxe, episode_title, air_date_utc, sonarr_id in (air_days or missing):
hash_id = hashit(f'{self.server.id}{series_title}{sxe}') hash_id = hashit(f'{self.server.id}{series_title}{sxe}')
@ -95,68 +81,45 @@ class SonarrAPI(object):
} }
) )
if influx_payload: self.dbmanager.write_points(influx_payload)
self.dbmanager.write_points(influx_payload)
else:
self.logger.warning("No data to send to influx for sonarr-calendar instance, discarding.")
def get_queue(self): def get_queue(self):
influx_payload = [] influx_payload = []
endpoint = '/api/v3/queue' endpoint = '/api/queue'
now = datetime.now(timezone.utc).astimezone().isoformat() now = datetime.now(timezone.utc).astimezone().isoformat()
pageSize = 250
params = {'pageSize': pageSize, 'includeSeries': True, 'includeEpisode': True,
'includeUnknownSeriesItems': False}
queueResponse = []
queue = [] queue = []
req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params)) req = self.session.prepare_request(Request('GET', self.server.url + endpoint))
get = connection_handler(self.session, req, self.server.verify_ssl) get = connection_handler(self.session, req, self.server.verify_ssl)
if not get: if not get:
return return
response = QueuePages(**get)
queueResponse.extend(response.records)
while response.totalRecords > response.page * response.pageSize:
page = response.page + 1
params = {'pageSize': pageSize, 'page': page, 'includeSeries': True, 'includeEpisode': True,
'includeUnknownSeriesItems': False}
req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params))
get = connection_handler(self.session, req, self.server.verify_ssl)
if not get:
return
response = QueuePages(**get)
queueResponse.extend(response.records)
download_queue = [] download_queue = []
for queueItem in queueResponse: for show in get:
try: try:
download_queue.append(SonarrQueue(**queueItem)) download_queue.append(Queue(**show))
except TypeError as e: except TypeError as e:
self.logger.error('TypeError has occurred : %s while creating Queue structure. Data attempted is: ' self.logger.error('TypeError has occurred : %s while creating Queue structure. Data attempted is: '
'%s', e, queueItem) '%s', e, show)
if not download_queue: if not download_queue:
return return
for queueItem in download_queue: for show in download_queue:
tvShow = SonarrTVShow(**queueItem.series)
try: try:
episode = SonarrEpisode(**queueItem.episode) sxe = f"S{show.episode['seasonNumber']:0>2}E{show.episode['episodeNumber']:0>2}"
sxe = f"S{episode.seasonNumber:0>2}E{episode.episodeNumber:0>2}"
except TypeError as e: except TypeError as e:
self.logger.error('TypeError has occurred : %s while processing the sonarr queue. \ self.logger.error('TypeError has occurred : %s while processing the sonarr queue. \
Remove invalid queue entry. Data attempted is: %s', e, queueItem) Remove invalid queue entry. Data attempted is: %s', e, show)
continue continue
if queueItem.protocol.upper() == 'USENET': if show.protocol.upper() == 'USENET':
protocol_id = 1 protocol_id = 1
else: else:
protocol_id = 0 protocol_id = 0
queue.append((tvShow.title, episode.title, queueItem.protocol.upper(), queue.append((show.series['title'], show.episode['title'], show.protocol.upper(),
protocol_id, sxe, queueItem.seriesId, queueItem.quality['quality']['name'])) protocol_id, sxe, show.id, show.quality['quality']['name']))
for series_title, episode_title, protocol, protocol_id, sxe, sonarr_id, quality in queue: for series_title, episode_title, protocol, protocol_id, sxe, sonarr_id, quality in queue:
hash_id = hashit(f'{self.server.id}{series_title}{sxe}') hash_id = hashit(f'{self.server.id}{series_title}{sxe}')
@ -180,8 +143,7 @@ class SonarrAPI(object):
} }
} }
) )
if influx_payload: if influx_payload:
self.dbmanager.write_points(influx_payload) self.dbmanager.write_points(influx_payload)
else: else:
self.logger.warning("No data to send to influx for sonarr-queue instance, discarding.") self.logger.debug("No data to send to influx for sonarr instance, discarding.")

View file

@ -20,16 +20,6 @@ class InfluxServer(NamedTuple):
verify_ssl: bool = False verify_ssl: bool = False
class Influx2Server(NamedTuple):
url: str = 'localhost'
org: str = 'server'
token: str = 'TOKEN'
bucket: str = 'varken'
timeout: int = 10000
ssl: bool = False
verify_ssl: bool = False
class SonarrServer(NamedTuple): class SonarrServer(NamedTuple):
api_key: str = None api_key: str = None
future_days: int = 0 future_days: int = 0
@ -67,17 +57,6 @@ class OmbiServer(NamedTuple):
verify_ssl: bool = False verify_ssl: bool = False
class OverseerrServer(NamedTuple):
api_key: str = None
id: int = None
url: str = None
verify_ssl: bool = False
get_request_total_counts: bool = False
request_total_run_seconds: int = 30
num_latest_requests_to_fetch: int = 10
num_latest_requests_seconds: int = 30
class TautulliServer(NamedTuple): class TautulliServer(NamedTuple):
api_key: str = None api_key: str = None
fallback_ip: str = None fallback_ip: str = None
@ -112,13 +91,22 @@ class UniFiServer(NamedTuple):
# Shared # Shared
class QueuePages(NamedTuple): class Queue(NamedTuple):
page: int = None downloadId: str = None
pageSize: int = None episode: dict = None
sortKey: str = None estimatedCompletionTime: str = None
sortDirection: str = None id: int = None
totalRecords: str = None movie: dict = None
records: list = None protocol: str = None
quality: dict = None
series: dict = None
size: float = None
sizeleft: float = None
status: str = None
statusMessages: list = None
timeleft: str = None
title: str = None
trackedDownloadStatus: str = None
# Ombi Structures # Ombi Structures
@ -139,10 +127,8 @@ class OmbiTVRequest(NamedTuple):
childRequests: list = None childRequests: list = None
denied: bool = None denied: bool = None
deniedReason: None = None deniedReason: None = None
externalProviderId: str = None
id: int = None id: int = None
imdbId: str = None imdbId: str = None
languageProfile: str = None
markedAsDenied: str = None markedAsDenied: str = None
overview: str = None overview: str = None
posterPath: str = None posterPath: str = None
@ -159,159 +145,72 @@ class OmbiTVRequest(NamedTuple):
class OmbiMovieRequest(NamedTuple): class OmbiMovieRequest(NamedTuple):
approved: bool = None approved: bool = None
approved4K: bool = None
available: bool = None available: bool = None
available4K: bool = None
background: str = None background: str = None
canApprove: bool = None canApprove: bool = None
denied: bool = None denied: bool = None
denied4K: None = None
deniedReason: None = None deniedReason: None = None
deniedReason4K: None = None
digitalRelease: bool = None digitalRelease: bool = None
digitalReleaseDate: None = None digitalReleaseDate: None = None
has4KRequest: bool = None
id: int = None id: int = None
imdbId: str = None imdbId: str = None
is4kRequest: bool = None
issueId: None = None issueId: None = None
issues: None = None issues: None = None
langCode: str = None
languageCode: str = None
markedAsApproved: str = None markedAsApproved: str = None
markedAsApproved4K: str = None
markedAsAvailable: None = None markedAsAvailable: None = None
markedAsAvailable4K: None = None
markedAsDenied: str = None markedAsDenied: str = None
markedAsDenied4K: str = None
overview: str = None overview: str = None
posterPath: str = None posterPath: str = None
qualityOverride: int = None qualityOverride: int = None
released: bool = None released: bool = None
releaseDate: str = None releaseDate: str = None
requestedByAlias: str = None
requestedDate: str = None requestedDate: str = None
requestedDate4k: str = None
requestedUser: dict = None requestedUser: dict = None
requestedUserId: str = None requestedUserId: str = None
requestStatus: str = None
requestType: int = None requestType: int = None
rootPathOverride: int = None rootPathOverride: int = None
showSubscribe: bool = None showSubscribe: bool = None
source: int = None
status: str = None status: str = None
subscribed: bool = None subscribed: bool = None
theMovieDbId: int = None theMovieDbId: int = None
title: str = None title: str = None
langCode: str = None
languageCode: str = None
# Overseerr requestedByAlias: str = None
class OverseerrRequestCounts(NamedTuple): requestStatus: str = None
pending: int = None
approved: int = None
processing: int = None
available: int = None
total: int = None
movie: int = None
tv: int = None
declined: int = None
# Sonarr # Sonarr
class SonarrTVShow(NamedTuple): class SonarrTVShow(NamedTuple):
added: str = None
airTime: str = None
alternateTitles: list = None
certification: str = None
cleanTitle: str = None
ended: bool = None
firstAired: str = None
genres: list = None
id: int = None
images: list = None
imdbId: str = None
languageProfileId: int = None
monitored: bool = None
nextAiring: str = None
network: str = None
overview: str = None
path: str = None
previousAiring: str = None
qualityProfileId: int = None
ratings: dict = None
rootFolderPath: str = None
runtime: int = None
seasonFolder: bool = None
seasons: list = None
seriesType: str = None
sortTitle: str = None
statistics: dict = None
status: str = None
tags: list = None
title: str = None
titleSlug: str = None
tvdbId: int = None
tvMazeId: int = None
tvRageId: int = None
useSceneNumbering: bool = None
year: int = None
class SonarrEpisode(NamedTuple):
absoluteEpisodeNumber: int = None absoluteEpisodeNumber: int = None
airDate: str = None airDate: str = None
airDateUtc: str = None airDateUtc: str = None
episodeFile: dict = None
episodeFileId: int = None episodeFileId: int = None
episodeNumber: int = None episodeNumber: int = None
grabbed: bool = None
hasFile: bool = None hasFile: bool = None
id: int = None id: int = None
lastSearchTime: str = None
monitored: bool = None monitored: bool = None
overview: str = None overview: str = None
seasonNumber: int = None
seriesId: int = None
title: str = None
unverifiedSceneNumbering: bool = None
sceneAbsoluteEpisodeNumber: int = None sceneAbsoluteEpisodeNumber: int = None
sceneEpisodeNumber: int = None sceneEpisodeNumber: int = None
sceneSeasonNumber: int = None sceneSeasonNumber: int = None
series: SonarrTVShow = None seasonNumber: int = None
tvdbId: int = None series: dict = None
class SonarrQueue(NamedTuple):
downloadClient: str = None
downloadId: str = None
episodeId: int = None
id: int = None
indexer: str = None
language: dict = None
protocol: str = None
quality: dict = None
size: float = None
sizeleft: float = None
status: str = None
statusMessages: list = None
title: str = None
trackedDownloadState: str = None
trackedDownloadStatus: str = None
seriesId: int = None seriesId: int = None
errorMessage: str = None title: str = None
outputPath: str = None unverifiedSceneNumbering: bool = None
series: SonarrTVShow = None
episode: SonarrEpisode = None
timeleft: str = None
estimatedCompletionTime: str = None
# Radarr # Radarr
class RadarrMovie(NamedTuple): class RadarrMovie(NamedTuple):
added: str = None added: str = None
alternateTitles: list = None addOptions: str = None
alternativeTitles: list = None
certification: str = None certification: str = None
cleanTitle: str = None cleanTitle: str = None
collection: dict = None downloaded: bool = None
digitalRelease: str = None
folderName: str = None folderName: str = None
genres: list = None genres: list = None
hasFile: bool = None hasFile: bool = None
@ -320,58 +219,32 @@ class RadarrMovie(NamedTuple):
imdbId: str = None imdbId: str = None
inCinemas: str = None inCinemas: str = None
isAvailable: bool = None isAvailable: bool = None
lastInfoSync: str = None
minimumAvailability: str = None minimumAvailability: str = None
monitored: bool = None monitored: bool = None
movieFile: dict = None movieFile: dict = None
originalTitle: str = None
overview: str = None overview: str = None
path: str = None path: str = None
pathState: str = None
physicalRelease: str = None physicalRelease: str = None
physicalReleaseNote: str = None
profileId: int = None
qualityProfileId: int = None qualityProfileId: int = None
ratings: dict = None ratings: dict = None
runtime: int = None runtime: int = None
secondaryYear: int = None secondaryYear: str = None
secondaryYearSourceId: int = None secondaryYearSourceId: int = None
sizeOnDisk: float = None sizeOnDisk: int = None
sortTitle: str = None sortTitle: str = None
status: str = None status: str = None
studio: str = None studio: str = None
tags: list = None tags: list = None
title: str = None
titleSlug: str = None titleSlug: str = None
tmdbId: int = None tmdbId: int = None
website: str = None website: str = None
year: int = None year: int = None
youTubeTrailerId: str = None youTubeTrailerId: str = None
title: str = None
originalLanguage: str = None
addOptions: str = None
popularity: str = None
# Radarr Queue
class RadarrQueue(NamedTuple):
customFormats: list = None
downloadClient: str = None
downloadId: str = None
id: int = None
indexer: str = None
languages: list = None
movieId: int = None
protocol: str = None
quality: dict = None
size: float = None
sizeleft: float = None
status: str = None
statusMessages: list = None
title: str = None
trackedDownloadState: str = None
trackedDownloadStatus: str = None
timeleft: str = None
estimatedCompletionTime: str = None
errorMessage: str = None
outputPath: str = None
movie: RadarrMovie = None
timeleft: str = None
# Sickchill # Sickchill
@ -491,7 +364,6 @@ class TautulliStream(NamedTuple):
reference_id: int = None reference_id: int = None
relay: int = None relay: int = None
relayed: int = None relayed: int = None
row_id: int = None
section_id: str = None section_id: str = None
secure: str = None secure: str = None
selected: int = None selected: int = None
@ -530,7 +402,6 @@ class TautulliStream(NamedTuple):
stream_video_codec: str = None stream_video_codec: str = None
stream_video_codec_level: str = None stream_video_codec_level: str = None
stream_video_decision: str = None stream_video_decision: str = None
stream_video_dynamic_range: str = None
stream_video_framerate: str = None stream_video_framerate: str = None
stream_video_full_resolution: str = None stream_video_full_resolution: str = None
stream_video_height: str = None stream_video_height: str = None
@ -590,7 +461,6 @@ class TautulliStream(NamedTuple):
video_codec: str = None video_codec: str = None
video_codec_level: str = None video_codec_level: str = None
video_decision: str = None video_decision: str = None
video_dynamic_range: str = None
video_frame_rate: str = None video_frame_rate: str = None
video_framerate: str = None video_framerate: str = None
video_full_resolution: str = None video_full_resolution: str = None
@ -621,9 +491,7 @@ class LidarrQueue(NamedTuple):
sizeleft: float = None sizeleft: float = None
status: str = None status: str = None
trackedDownloadStatus: str = None trackedDownloadStatus: str = None
trackedDownloadState: str = None
statusMessages: list = None statusMessages: list = None
errorMessage: str = None
downloadId: str = None downloadId: str = None
protocol: str = None protocol: str = None
downloadClient: str = None downloadClient: str = None
@ -631,7 +499,6 @@ class LidarrQueue(NamedTuple):
outputPath: str = None outputPath: str = None
downloadForced: bool = None downloadForced: bool = None
id: int = None id: int = None
estimatedCompletionTime: str = None
class LidarrAlbum(NamedTuple): class LidarrAlbum(NamedTuple):

View file

@ -129,7 +129,6 @@ class TautulliAPI(object):
"tags": { "tags": {
"type": "Session", "type": "Session",
"session_id": session.session_id, "session_id": session.session_id,
"ip_address": session.ip_address,
"friendly_name": session.friendly_name, "friendly_name": session.friendly_name,
"username": session.username, "username": session.username,
"title": session.full_title, "title": session.full_title,
@ -328,7 +327,6 @@ class TautulliAPI(object):
"tags": { "tags": {
"type": "Session", "type": "Session",
"session_id": session.session_id, "session_id": session.session_id,
"ip_address": session.ip_address,
"friendly_name": session.friendly_name, "friendly_name": session.friendly_name,
"username": session.user, "username": session.user,
"title": session.full_title, "title": session.full_title,