Influxdb2 #3
					 25 changed files with 649 additions and 481 deletions
				
			
		
							
								
								
									
										1
									
								
								.github/FUNDING.yml
									
										
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.github/FUNDING.yml
									
										
									
									
										vendored
									
									
								
							|  | @ -1 +0,0 @@ | ||||||
| ko_fi: varken |  | ||||||
							
								
								
									
										31
									
								
								.github/ISSUE_TEMPLATE/bug_report.md
									
										
									
									
										vendored
									
									
								
							
							
						
						
									
										31
									
								
								.github/ISSUE_TEMPLATE/bug_report.md
									
										
									
									
										vendored
									
									
								
							|  | @ -1,31 +0,0 @@ | ||||||
| --- |  | ||||||
| name: Bug report |  | ||||||
| about: Create a report to help us improve |  | ||||||
| title: "[BUG]" |  | ||||||
| labels: awaiting-triage |  | ||||||
| assignees: '' |  | ||||||
| 
 |  | ||||||
| --- |  | ||||||
| 
 |  | ||||||
| **Describe the bug** |  | ||||||
| A clear and concise description of what the bug is. |  | ||||||
| 
 |  | ||||||
| **To Reproduce** |  | ||||||
| Steps to reproduce the behavior: |  | ||||||
| 1. ... |  | ||||||
| 2. ... |  | ||||||
| 3. ... |  | ||||||
| 4. ... |  | ||||||
| 
 |  | ||||||
| **Expected behavior** |  | ||||||
| A clear and concise description of what you expected to happen. |  | ||||||
| 
 |  | ||||||
| **Screenshots** |  | ||||||
| If applicable, add screenshots to help explain your problem. |  | ||||||
| 
 |  | ||||||
| **Environment (please complete the following information):** |  | ||||||
|  - OS: [e.g. Ubuntu 18.04.1 or Docker:Tag] |  | ||||||
|  - Version [e.g. v1.1] |  | ||||||
| 
 |  | ||||||
| **Additional context** |  | ||||||
| Add any other context about the problem here. |  | ||||||
							
								
								
									
										20
									
								
								.github/ISSUE_TEMPLATE/feature_request.md
									
										
									
									
										vendored
									
									
								
							
							
						
						
									
										20
									
								
								.github/ISSUE_TEMPLATE/feature_request.md
									
										
									
									
										vendored
									
									
								
							|  | @ -1,20 +0,0 @@ | ||||||
| --- |  | ||||||
| name: Feature request |  | ||||||
| about: Suggest an idea for this project |  | ||||||
| title: "[Feature Request]" |  | ||||||
| labels: awaiting-triage |  | ||||||
| assignees: '' |  | ||||||
| 
 |  | ||||||
| --- |  | ||||||
| 
 |  | ||||||
| **Is your feature request related to a problem? Please describe.** |  | ||||||
| A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] |  | ||||||
| 
 |  | ||||||
| **Describe the solution you'd like** |  | ||||||
| A clear and concise description of what you want to happen. |  | ||||||
| 
 |  | ||||||
| **Describe alternatives you've considered** |  | ||||||
| A clear and concise description of any alternative solutions or features you've considered. |  | ||||||
| 
 |  | ||||||
| **Additional context** |  | ||||||
| Add any other context or screenshots about the feature request here. |  | ||||||
|  | @ -1,23 +0,0 @@ | ||||||
| name: 'Docker Multi Login Action' |  | ||||||
| description: 'Log in to dockerhub, quay, and github container registry' |  | ||||||
| runs: |  | ||||||
|   using: "composite" |  | ||||||
|   steps: |  | ||||||
|     - shell: bash |  | ||||||
|       run: | |  | ||||||
|         echo "🔑 Logging into dockerhub..." |  | ||||||
|         if docker login --username ${{ fromJSON(env.secrets).DOCKERHUB_USERNAME }} --password ${{ fromJSON(env.secrets).DOCKERHUB_PASSWORD }} > /dev/null 2>&1; then |  | ||||||
|           echo "🎉 Login Succeeded!" |  | ||||||
|         fi |  | ||||||
|     - shell: bash |  | ||||||
|       run: | |  | ||||||
|         echo "🔑 Logging into quay.io..." |  | ||||||
|         if docker login quay.io --username ${{ fromJSON(env.secrets).QUAY_USERNAME }} --password ${{ fromJSON(env.secrets).QUAY_PASSWORD }} > /dev/null 2>&1; then |  | ||||||
|           echo "🎉 Login Succeeded!" |  | ||||||
|         fi |  | ||||||
|     - shell: bash |  | ||||||
|       run: | |  | ||||||
|         echo "🔑 Logging into ghcr.io..." |  | ||||||
|         if docker login ghcr.io --username ${{ fromJSON(env.secrets).GHCR_USERNAME }} --password ${{ fromJSON(env.secrets).GHCR_PASSWORD }} > /dev/null 2>&1; then |  | ||||||
|           echo "🎉 Login Succeeded!" |  | ||||||
|         fi |  | ||||||
|  | @ -1,46 +0,0 @@ | ||||||
| name: 'Docker Target Image List Generator' |  | ||||||
| description: 'A Github Action to generate a list of fully qualified target images for docker related steps' |  | ||||||
| inputs: |  | ||||||
|   registries: |  | ||||||
|     description: "Comma separated list of docker registries" |  | ||||||
|     required: false |  | ||||||
|     default: "docker.io,quay.io,ghcr.io" |  | ||||||
|   images: |  | ||||||
|     description: "Comma separated list of images" |  | ||||||
|     required: true |  | ||||||
|   tags: |  | ||||||
|     description: "Comma separated list of image tags" |  | ||||||
|     required: false |  | ||||||
|     default: "edge" |  | ||||||
| outputs: |  | ||||||
|   fully-qualified-target-images: |  | ||||||
|     description: "List of fully qualified docker target images" |  | ||||||
|     value: ${{ steps.gen-fqti.outputs.fully-qualified-target-images }} |  | ||||||
| runs: |  | ||||||
|   using: "composite" |  | ||||||
|   steps: |  | ||||||
|     - name: Generate fully qualified docker target images |  | ||||||
|       id: gen-fqti |  | ||||||
|       shell: bash |  | ||||||
|       run: | |  | ||||||
|         IFS=',' read -r -a registries <<< "${{ inputs.registries }}" |  | ||||||
|         IFS=',' read -r -a images <<< "${{ inputs.images }}" |  | ||||||
|         IFS=',' read -r -a tags <<< "${{ inputs.tags }}" |  | ||||||
|         FQTI="" |  | ||||||
|         echo "Generating fully qualified docker target images for:" |  | ||||||
|         echo "🐋 Registries: ${#registries[@]}" |  | ||||||
|         echo "📷 Images:     ${#images[@]}" |  | ||||||
|         echo "🏷️ Tags:       ${#tags[@]}" |  | ||||||
|         echo "🧮 Total:      $((${#registries[@]}*${#images[@]}*${#tags[@]}))" |  | ||||||
|         for registry in "${registries[@]}"; do |  | ||||||
|           for image in "${images[@]}"; do |  | ||||||
|             for tag in "${tags[@]}"; do |  | ||||||
|               if [ -z "$FQTI" ]; then |  | ||||||
|                 FQTI="${registry}/${image}:${tag}" |  | ||||||
|               else |  | ||||||
|                 FQTI="$FQTI,${registry}/${image}:${tag}" |  | ||||||
|               fi |  | ||||||
|             done |  | ||||||
|           done |  | ||||||
|         done |  | ||||||
|         echo ::set-output name=fully-qualified-target-images::${FQTI} |  | ||||||
							
								
								
									
										116
									
								
								.github/workflows/docker.yaml
									
										
									
									
										vendored
									
									
								
							
							
						
						
									
										116
									
								
								.github/workflows/docker.yaml
									
										
									
									
										vendored
									
									
								
							|  | @ -1,116 +0,0 @@ | ||||||
| name: varken |  | ||||||
| on: |  | ||||||
|   schedule: |  | ||||||
|     - cron: '0 10 * * *' |  | ||||||
|   push: |  | ||||||
|     branches: |  | ||||||
|       - master |  | ||||||
|       - develop |  | ||||||
|     tags: |  | ||||||
|       - 'v*.*.*' |  | ||||||
|     paths: |  | ||||||
|       - '.github/workflows/docker.yaml' |  | ||||||
|       - 'varken/**' |  | ||||||
|       - 'Varken.py' |  | ||||||
|       - 'Dockerfile' |  | ||||||
|   pull_request: |  | ||||||
|     branches: |  | ||||||
|       - master |  | ||||||
|       - develop |  | ||||||
|     paths: |  | ||||||
|       - '.github/workflows/docker.yaml' |  | ||||||
|       - 'varken/**' |  | ||||||
|       - 'Varken.py' |  | ||||||
|       - 'Dockerfile' |  | ||||||
|   workflow_dispatch: |  | ||||||
|     inputs: |  | ||||||
|       tag: |  | ||||||
|         description: 'Use this tag instead of most recent' |  | ||||||
|         required: false |  | ||||||
|       ignore-existing-tag: |  | ||||||
|         description: 'Ignore existing tag if "true"' |  | ||||||
|         required: false |  | ||||||
| env: |  | ||||||
|   IMAGES: boerderij/varken |  | ||||||
|   PLATFORMS: "linux/amd64,linux/arm64,linux/arm/v7" |  | ||||||
| jobs: |  | ||||||
|   lint-and-test: |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|     steps: |  | ||||||
|       - name: Checkout |  | ||||||
|         uses: actions/checkout@v2 |  | ||||||
|       - name: Setup Python |  | ||||||
|         uses: actions/setup-python@v2 |  | ||||||
|         with: |  | ||||||
|           python-version: '3.x' |  | ||||||
|       - name: Lint |  | ||||||
|         run: pip install flake8 && flake8 --max-line-length 120 Varken.py varken/*.py |  | ||||||
|   build: |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|     needs: lint-and-test |  | ||||||
|     steps: |  | ||||||
|       - name: Checkout |  | ||||||
|         uses: actions/checkout@v2 |  | ||||||
|       - name: Prepare |  | ||||||
|         id: prep |  | ||||||
|         run: | |  | ||||||
|           VERSION=edge |  | ||||||
|           if [[ $GITHUB_REF == refs/tags/* ]]; then |  | ||||||
|             VERSION=${GITHUB_REF#refs/tags/v} |  | ||||||
|           fi |  | ||||||
|           if [ "${{ github.event_name }}" = "schedule" ]; then |  | ||||||
|             VERSION=nightly |  | ||||||
|           fi |  | ||||||
|           if [[ ${GITHUB_REF##*/} == "develop" ]]; then |  | ||||||
|             VERSION=develop |  | ||||||
|           fi |  | ||||||
|           TAGS="${VERSION}" |  | ||||||
|           if [[ $VERSION =~ ^[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}$ ]]; then |  | ||||||
|             TAGS="$TAGS,latest" |  | ||||||
|           fi |  | ||||||
|           echo ::set-output name=version::${VERSION} |  | ||||||
|           echo ::set-output name=tags::${TAGS} |  | ||||||
|           echo ::set-output name=branch::${GITHUB_REF##*/} |  | ||||||
|           echo ::set-output name=build_date::$(date -u +'%Y-%m-%dT%H:%M:%SZ') |  | ||||||
|           echo ::set-output name=vcs_ref::${GITHUB_SHA::8} |  | ||||||
|       - uses: ./.github/actions/docker-target-image-list-action |  | ||||||
|         name: Generate Target Images |  | ||||||
|         id: gen-tags |  | ||||||
|         with: |  | ||||||
|           images: ${{ env.IMAGES }} |  | ||||||
|           tags: ${{ steps.prep.outputs.tags }} |  | ||||||
|       - name: Set up QEMU |  | ||||||
|         uses: docker/setup-qemu-action@v1 |  | ||||||
|         with: |  | ||||||
|           platforms: ${{ env.PLATFORMS }} |  | ||||||
|       - name: Set up Docker Buildx |  | ||||||
|         uses: docker/setup-buildx-action@v1 |  | ||||||
|         with: |  | ||||||
|           install: true |  | ||||||
|           version: latest |  | ||||||
|           driver-opts: image=moby/buildkit:master |  | ||||||
|       - name: Docker Multi Login |  | ||||||
|         uses: ./.github/actions/docker-multi-login-action |  | ||||||
|         env: |  | ||||||
|           secrets: ${{ toJSON(secrets) }} |  | ||||||
|       - name: Build and Push |  | ||||||
|         uses: docker/build-push-action@v2 |  | ||||||
|         with: |  | ||||||
|           context: . |  | ||||||
|           file: ./Dockerfile |  | ||||||
|           platforms: ${{ env.PLATFORMS }} |  | ||||||
|           pull: true |  | ||||||
|           push: ${{ github.event_name != 'pull_request' }} |  | ||||||
|           tags: ${{ steps.gen-tags.outputs.fully-qualified-target-images }} |  | ||||||
|           build-args: | |  | ||||||
|             VERSION=${{ steps.prep.outputs.version }} |  | ||||||
|             BRANCH=${{ steps.prep.outputs.branch }} |  | ||||||
|             BUILD_DATE=${{ steps.prep.outputs.build_date }} |  | ||||||
|             VCS_REF=${{ steps.prep.outputs.vcs_ref }} |  | ||||||
|       - name: Inspect |  | ||||||
|         if: ${{ github.event_name != 'pull_request' }} |  | ||||||
|         run: | |  | ||||||
|           IFS=',' read -r -a images <<< "${{ steps.gen-tags.outputs.fully-qualified-target-images }}" |  | ||||||
|           for image in "${images[@]}"; do |  | ||||||
|               docker buildx imagetools inspect ${image} |  | ||||||
|           done |  | ||||||
							
								
								
									
										41
									
								
								.github/workflows/docker.yml
									
										
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										41
									
								
								.github/workflows/docker.yml
									
										
									
									
										vendored
									
									
										Normal file
									
								
							|  | @ -0,0 +1,41 @@ | ||||||
|  | name: Publish Docker Containers | ||||||
|  | on: | ||||||
|  |   workflow_dispatch: | ||||||
|  | 
 | ||||||
|  | env: | ||||||
|  |   REGISTRY: ghcr.io | ||||||
|  |   IMAGE_NAME: ${{ github.repository }} | ||||||
|  | 
 | ||||||
|  | jobs: | ||||||
|  |   build_and_publish_docker: | ||||||
|  |     runs-on: ubuntu-latest | ||||||
|  |     permissions: | ||||||
|  |       contents: read | ||||||
|  |       packages: write | ||||||
|  |     steps: | ||||||
|  |       - name: Checkout repository | ||||||
|  |         uses: actions/checkout@v2 | ||||||
|  |         with: | ||||||
|  |           ref: ${{ github.ref }} | ||||||
|  | 
 | ||||||
|  |       - name: Log in to the Container registry | ||||||
|  |         uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9 | ||||||
|  |         with: | ||||||
|  |           registry: ${{ env.REGISTRY }} | ||||||
|  |           username: ${{ github.actor }} | ||||||
|  |           password: ${{ secrets.GITHUB_TOKEN }} | ||||||
|  | 
 | ||||||
|  |       - name: Extract metadata for base Docker | ||||||
|  |         id: base_meta | ||||||
|  |         uses: docker/metadata-action@v2 | ||||||
|  |         with: | ||||||
|  |           images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} | ||||||
|  |           tags: | | ||||||
|  |             type=ref,event=branch | ||||||
|  |       - name: Build and push Docker image | ||||||
|  |         uses: docker/build-push-action@ad44023a93711e3deb337508980b4b5e9bcdc5dc | ||||||
|  |         with: | ||||||
|  |           context: . | ||||||
|  |           push: true | ||||||
|  |           tags: ${{ steps.base_meta.outputs.tags }} | ||||||
|  |           labels: ${{ steps.base_meta.outputs.labels }} | ||||||
							
								
								
									
										19
									
								
								.github/workflows/invalid_template.yml
									
										
									
									
										vendored
									
									
								
							
							
						
						
									
										19
									
								
								.github/workflows/invalid_template.yml
									
										
									
									
										vendored
									
									
								
							|  | @ -1,19 +0,0 @@ | ||||||
| name: 'Invalid Template' |  | ||||||
| 
 |  | ||||||
| on: |  | ||||||
|   issues: |  | ||||||
|     types: [labeled, unlabeled, reopened] |  | ||||||
| 
 |  | ||||||
| jobs: |  | ||||||
|   support: |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|     steps: |  | ||||||
|       - uses: dessant/support-requests@v2 |  | ||||||
|         with: |  | ||||||
|           github-token: ${{ github.token }} |  | ||||||
|           support-label: 'invalid:template-incomplete' |  | ||||||
|           issue-comment: > |  | ||||||
|             :wave: @{issue-author}, please edit your issue and follow the template provided. |  | ||||||
|           close-issue: false |  | ||||||
|           lock-issue: false |  | ||||||
|           issue-lock-reason: 'resolved' |  | ||||||
							
								
								
									
										25
									
								
								.github/workflows/support.yml
									
										
									
									
										vendored
									
									
								
							
							
						
						
									
										25
									
								
								.github/workflows/support.yml
									
										
									
									
										vendored
									
									
								
							|  | @ -1,25 +0,0 @@ | ||||||
| name: 'Support Request' |  | ||||||
| 
 |  | ||||||
| on: |  | ||||||
|   issues: |  | ||||||
|     types: [labeled, unlabeled, reopened] |  | ||||||
| 
 |  | ||||||
| jobs: |  | ||||||
|   support: |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|     steps: |  | ||||||
|       - uses: dessant/support-requests@v2 |  | ||||||
|         with: |  | ||||||
|           github-token: ${{ github.token }} |  | ||||||
|           support-label: 'support' |  | ||||||
|           issue-comment: > |  | ||||||
|             :wave: @{issue-author}, we use the issue tracker exclusively |  | ||||||
|             for bug reports and feature requests. However, this issue appears |  | ||||||
|             to be a support request. Please use our support channels |  | ||||||
|             to get help with Varken! |  | ||||||
| 
 |  | ||||||
|             - [Discord](https://discord.gg/VjZ6qSM) |  | ||||||
|             - [Discord Quick Access](http://cyborg.decreator.dev/channels/518970285773422592/530424560504537105/) |  | ||||||
|           close-issue: true |  | ||||||
|           lock-issue: false |  | ||||||
|           issue-lock-reason: 'off-topic' |  | ||||||
							
								
								
									
										24
									
								
								CHANGELOG.md
									
										
									
									
									
								
							
							
						
						
									
										24
									
								
								CHANGELOG.md
									
										
									
									
									
								
							|  | @ -1,5 +1,29 @@ | ||||||
| # Change Log | # Change Log | ||||||
| 
 | 
 | ||||||
|  | ## [v1.7.7](https://github.com/Boerderij/Varken/tree/v1.7.7) (2020-12-21) | ||||||
|  | [Full Changelog](https://github.com/Boerderij/Varken/compare/1.7.6...v1.7.7) | ||||||
|  | 
 | ||||||
|  | **Implemented enhancements:** | ||||||
|  | - \[Enhancement\] Ombi 4.0 compatibility [\#186](https://github.com/Boerderij/Varken/issues/186) | ||||||
|  |   ([samwiseg0](https://github.com/samwiseg0)) | ||||||
|  | 
 | ||||||
|  | **Merged pull requests:** | ||||||
|  | 
 | ||||||
|  | - v1.7.7 Merge [\#191](https://github.com/Boerderij/Varken/pull/191)  | ||||||
|  |   ([DirtyCajunRice](https://github.com/DirtyCajunRice)) | ||||||
|  | - Type Error fix [\#177](https://github.com/Boerderij/Varken/pull/177) | ||||||
|  |   ([derek-miller](https://github.com/derek-miller)) | ||||||
|  | 
 | ||||||
|  | **Fixed bugs:** | ||||||
|  | 
 | ||||||
|  | - \[BUG\] Influxdb exit code [\#174](https://github.com/Boerderij/Varken/issues/174)  | ||||||
|  |   ([samwiseg0](https://github.com/samwiseg0)) | ||||||
|  | 
 | ||||||
|  | **Notes:** | ||||||
|  | - Now built via github actions | ||||||
|  | - Available on ghcr, quay.io, and dockerhub | ||||||
|  | - Nightly builds done to accommodate dependabot MRs | ||||||
|  | 
 | ||||||
| ## [v1.7.6](https://github.com/Boerderij/Varken/tree/v1.7.6) (2020-01-01) | ## [v1.7.6](https://github.com/Boerderij/Varken/tree/v1.7.6) (2020-01-01) | ||||||
| [Full Changelog](https://github.com/Boerderij/Varken/compare/1.7.5...v1.7.6) | [Full Changelog](https://github.com/Boerderij/Varken/compare/1.7.5...v1.7.6) | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -1,4 +1,4 @@ | ||||||
| FROM python:3.9.1-alpine | FROM python:3.10.5-alpine | ||||||
| 
 | 
 | ||||||
| ENV DEBUG="True" \ | ENV DEBUG="True" \ | ||||||
|     DATA_FOLDER="/config" \ |     DATA_FOLDER="/config" \ | ||||||
|  | @ -8,11 +8,11 @@ ENV DEBUG="True" \ | ||||||
| 
 | 
 | ||||||
| LABEL maintainer="dirtycajunrice,samwiseg0" \ | LABEL maintainer="dirtycajunrice,samwiseg0" \ | ||||||
|   org.opencontainers.image.created=$BUILD_DATE \ |   org.opencontainers.image.created=$BUILD_DATE \ | ||||||
|   org.opencontainers.image.url="https://github.com/Boerderij/Varken" \ |   org.opencontainers.image.url="https://github.com/d-mcknight/Varken" \ | ||||||
|   org.opencontainers.image.source="https://github.com/Boerderij/Varken" \ |   org.opencontainers.image.source="https://github.com/d-mcknight/Varken" \ | ||||||
|   org.opencontainers.image.version=$VERSION \ |   org.opencontainers.image.version=$VERSION \ | ||||||
|   org.opencontainers.image.revision=$VCS_REF \ |   org.opencontainers.image.revision=$VCS_REF \ | ||||||
|   org.opencontainers.image.vendor="boerderij" \ |   org.opencontainers.image.vendor="d-mcknight" \ | ||||||
|   org.opencontainers.image.title="varken" \ |   org.opencontainers.image.title="varken" \ | ||||||
|   org.opencontainers.image.description="Varken is a standalone application to aggregate data from the Plex ecosystem into InfluxDB using Grafana for a frontend" \ |   org.opencontainers.image.description="Varken is a standalone application to aggregate data from the Plex ecosystem into InfluxDB using Grafana for a frontend" \ | ||||||
|   org.opencontainers.image.licenses="MIT" |   org.opencontainers.image.licenses="MIT" | ||||||
|  |  | ||||||
|  | @ -2,7 +2,7 @@ | ||||||
| <img width="800" src="https://raw.githubusercontent.com/Boerderij/Varken/master/assets/varken_full_banner.jpg" alt="Logo Banner"> | <img width="800" src="https://raw.githubusercontent.com/Boerderij/Varken/master/assets/varken_full_banner.jpg" alt="Logo Banner"> | ||||||
| </p> | </p> | ||||||
| 
 | 
 | ||||||
| [](https://gitlab.com/boerderij/Varken/commits/master) | [](https://github.com/Boerderij/Varken/actions?query=workflow%3Avarken) | ||||||
| [](https://discord.gg/VjZ6qSM) | [](https://discord.gg/VjZ6qSM) | ||||||
| [](https://ko-fi.com/varken) | [](https://ko-fi.com/varken) | ||||||
| [](https://microbadger.com/images/boerderij/varken) | [](https://microbadger.com/images/boerderij/varken) | ||||||
|  | @ -17,7 +17,7 @@ ecosystem into InfluxDB using Grafana for a frontend | ||||||
| Requirements: | Requirements: | ||||||
| * [Python 3.6.7+](https://www.python.org/downloads/release/python-367/) | * [Python 3.6.7+](https://www.python.org/downloads/release/python-367/) | ||||||
| * [Python3-pip](https://pip.pypa.io/en/stable/installing/) | * [Python3-pip](https://pip.pypa.io/en/stable/installing/) | ||||||
| * [InfluxDB 1.8.x](https://www.influxdata.com/) | * [InfluxDB 1.8.x or 2.0.x](https://www.influxdata.com/) | ||||||
| * [Grafana](https://grafana.com/) | * [Grafana](https://grafana.com/) | ||||||
| 
 | 
 | ||||||
| <p align="center"> | <p align="center"> | ||||||
|  |  | ||||||
							
								
								
									
										32
									
								
								Varken.py
									
										
									
									
									
								
							
							
						
						
									
										32
									
								
								Varken.py
									
										
									
									
									
								
							|  | @ -1,19 +1,21 @@ | ||||||
| import platform | import platform | ||||||
| import schedule | import schedule | ||||||
|  | import distro | ||||||
| from time import sleep | from time import sleep | ||||||
| from queue import Queue | from queue import Queue | ||||||
| from sys import version | from sys import version | ||||||
| from threading import Thread | from threading import Thread | ||||||
| from os import environ as env | from os import environ as env | ||||||
| from os import access, R_OK, getenv | from os import access, R_OK, getenv | ||||||
| from distro import linux_distribution |  | ||||||
| from os.path import isdir, abspath, dirname, join | from os.path import isdir, abspath, dirname, join | ||||||
| from argparse import ArgumentParser, RawTextHelpFormatter | from argparse import ArgumentParser, RawTextHelpFormatter | ||||||
| from logging import getLogger, StreamHandler, Formatter, DEBUG | from logging import getLogger, StreamHandler, Formatter, DEBUG | ||||||
| 
 | 
 | ||||||
|  | 
 | ||||||
| # Needed to check version of python | # Needed to check version of python | ||||||
| from varken import structures  # noqa | from varken import structures  # noqa | ||||||
| from varken.ombi import OmbiAPI | from varken.ombi import OmbiAPI | ||||||
|  | from varken.overseerr import OverseerrAPI | ||||||
| from varken.unifi import UniFiAPI | from varken.unifi import UniFiAPI | ||||||
| from varken import VERSION, BRANCH, BUILD_DATE | from varken import VERSION, BRANCH, BUILD_DATE | ||||||
| from varken.sonarr import SonarrAPI | from varken.sonarr import SonarrAPI | ||||||
|  | @ -21,13 +23,14 @@ from varken.radarr import RadarrAPI | ||||||
| from varken.lidarr import LidarrAPI | from varken.lidarr import LidarrAPI | ||||||
| from varken.iniparser import INIParser | from varken.iniparser import INIParser | ||||||
| from varken.dbmanager import DBManager | from varken.dbmanager import DBManager | ||||||
|  | from varken.influxdb2manager import InfluxDB2Manager | ||||||
| from varken.helpers import GeoIPHandler | from varken.helpers import GeoIPHandler | ||||||
| from varken.tautulli import TautulliAPI | from varken.tautulli import TautulliAPI | ||||||
| from varken.sickchill import SickChillAPI | from varken.sickchill import SickChillAPI | ||||||
| from varken.varkenlogger import VarkenLogger | from varken.varkenlogger import VarkenLogger | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| PLATFORM_LINUX_DISTRO = ' '.join(x for x in linux_distribution() if x) | PLATFORM_LINUX_DISTRO = ' '.join(distro.id() + distro.version() + distro.name()) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def thread(job, **kwargs): | def thread(job, **kwargs): | ||||||
|  | @ -90,7 +93,15 @@ if __name__ == "__main__": | ||||||
|     vl.logger.info("Varken v%s-%s %s", VERSION, BRANCH, BUILD_DATE) |     vl.logger.info("Varken v%s-%s %s", VERSION, BRANCH, BUILD_DATE) | ||||||
| 
 | 
 | ||||||
|     CONFIG = INIParser(DATA_FOLDER) |     CONFIG = INIParser(DATA_FOLDER) | ||||||
|     DBMANAGER = DBManager(CONFIG.influx_server) | 
 | ||||||
|  |     if CONFIG.influx2_enabled: | ||||||
|  |         # Use INFLUX version 2 | ||||||
|  |         vl.logger.info('Using INFLUXDBv2') | ||||||
|  |         DBMANAGER = InfluxDB2Manager(CONFIG.influx_server) | ||||||
|  |     else: | ||||||
|  |         vl.logger.info('Using INFLUXDB') | ||||||
|  |         DBMANAGER = DBManager(CONFIG.influx_server) | ||||||
|  | 
 | ||||||
|     QUEUE = Queue() |     QUEUE = Queue() | ||||||
| 
 | 
 | ||||||
|     if CONFIG.sonarr_enabled: |     if CONFIG.sonarr_enabled: | ||||||
|  | @ -156,6 +167,18 @@ if __name__ == "__main__": | ||||||
|                 at_time = schedule.every(server.issue_status_run_seconds).seconds |                 at_time = schedule.every(server.issue_status_run_seconds).seconds | ||||||
|                 at_time.do(thread, OMBI.get_issue_counts).tag("ombi-{}-get_issue_counts".format(server.id)) |                 at_time.do(thread, OMBI.get_issue_counts).tag("ombi-{}-get_issue_counts".format(server.id)) | ||||||
| 
 | 
 | ||||||
|  |     if CONFIG.overseerr_enabled: | ||||||
|  |         for server in CONFIG.overseerr_servers: | ||||||
|  |             OVERSEER = OverseerrAPI(server, DBMANAGER) | ||||||
|  |             if server.get_request_total_counts: | ||||||
|  |                 at_time = schedule.every(server.request_total_run_seconds).seconds | ||||||
|  |                 at_time.do(thread, OVERSEER.get_request_counts).tag("overseerr-{}-get_request_counts" | ||||||
|  |                                                                     .format(server.id)) | ||||||
|  |             if server.num_latest_requests_to_fetch > 0: | ||||||
|  |                 at_time = schedule.every(server.num_latest_requests_seconds).seconds | ||||||
|  |                 at_time.do(thread, OVERSEER.get_latest_requests).tag("overseerr-{}-get_latest_requests" | ||||||
|  |                                                                      .format(server.id)) | ||||||
|  | 
 | ||||||
|     if CONFIG.sickchill_enabled: |     if CONFIG.sickchill_enabled: | ||||||
|         for server in CONFIG.sickchill_servers: |         for server in CONFIG.sickchill_servers: | ||||||
|             SICKCHILL = SickChillAPI(server, DBMANAGER) |             SICKCHILL = SickChillAPI(server, DBMANAGER) | ||||||
|  | @ -171,7 +194,8 @@ if __name__ == "__main__": | ||||||
| 
 | 
 | ||||||
|     # Run all on startup |     # Run all on startup | ||||||
|     SERVICES_ENABLED = [CONFIG.ombi_enabled, CONFIG.radarr_enabled, CONFIG.tautulli_enabled, CONFIG.unifi_enabled, |     SERVICES_ENABLED = [CONFIG.ombi_enabled, CONFIG.radarr_enabled, CONFIG.tautulli_enabled, CONFIG.unifi_enabled, | ||||||
|                         CONFIG.sonarr_enabled, CONFIG.sickchill_enabled, CONFIG.lidarr_enabled] |                         CONFIG.sonarr_enabled, CONFIG.sickchill_enabled, CONFIG.lidarr_enabled, | ||||||
|  |                         CONFIG.overseerr_enabled] | ||||||
|     if not [enabled for enabled in SERVICES_ENABLED if enabled]: |     if not [enabled for enabled in SERVICES_ENABLED if enabled]: | ||||||
|         vl.logger.error("All services disabled. Exiting") |         vl.logger.error("All services disabled. Exiting") | ||||||
|         exit(1) |         exit(1) | ||||||
|  |  | ||||||
|  | @ -3,10 +3,12 @@ sonarr_server_ids = 1,2 | ||||||
| radarr_server_ids = 1,2 | radarr_server_ids = 1,2 | ||||||
| lidarr_server_ids = false | lidarr_server_ids = false | ||||||
| tautulli_server_ids = 1 | tautulli_server_ids = 1 | ||||||
| ombi_server_ids = 1 | ombi_server_ids = false | ||||||
|  | overseerr_server_ids = 1 | ||||||
| sickchill_server_ids = false | sickchill_server_ids = false | ||||||
| unifi_server_ids = false | unifi_server_ids = false | ||||||
| maxmind_license_key = xxxxxxxxxxxxxxxx | maxmind_license_key = xxxxxxxxxxxxxxxx | ||||||
|  | influx2_enabled = false | ||||||
| 
 | 
 | ||||||
| [influxdb] | [influxdb] | ||||||
| url = influxdb.domain.tld | url = influxdb.domain.tld | ||||||
|  | @ -16,6 +18,15 @@ verify_ssl = false | ||||||
| username = root | username = root | ||||||
| password = root | password = root | ||||||
| 
 | 
 | ||||||
|  | [influx2] | ||||||
|  | url = influxdb2.domain.tld | ||||||
|  | org = ORG | ||||||
|  | token = TOKEN | ||||||
|  | timeout = 10000 | ||||||
|  | ssl = false | ||||||
|  | verify_ssl = false | ||||||
|  | bucket = varken | ||||||
|  | 
 | ||||||
| [tautulli-1] | [tautulli-1] | ||||||
| url = tautulli.domain.tld:8181 | url = tautulli.domain.tld:8181 | ||||||
| fallback_ip = 1.1.1.1 | fallback_ip = 1.1.1.1 | ||||||
|  | @ -95,6 +106,17 @@ request_total_run_seconds = 300 | ||||||
| get_issue_status_counts = true | get_issue_status_counts = true | ||||||
| issue_status_run_seconds = 300 | issue_status_run_seconds = 300 | ||||||
| 
 | 
 | ||||||
|  | [overseerr-1] | ||||||
|  | url = overseerr.domain.tld | ||||||
|  | apikey = xxxxxxxxxxxxxxxx | ||||||
|  | ssl = false | ||||||
|  | verify_ssl = false | ||||||
|  | get_request_total_counts = true | ||||||
|  | request_total_run_seconds = 30 | ||||||
|  | get_latest_requests = true | ||||||
|  | num_latest_requests_to_fetch = 10 | ||||||
|  | num_latest_requests_seconds = 30 | ||||||
|  | 
 | ||||||
| [sickchill-1] | [sickchill-1] | ||||||
| url = sickchill.domain.tld:8081 | url = sickchill.domain.tld:8081 | ||||||
| apikey = xxxxxxxxxxxxxxxx | apikey = xxxxxxxxxxxxxxxx | ||||||
|  |  | ||||||
|  | @ -6,7 +6,7 @@ services: | ||||||
|   influxdb: |   influxdb: | ||||||
|     hostname: influxdb |     hostname: influxdb | ||||||
|     container_name: influxdb |     container_name: influxdb | ||||||
|     image: influxdb |     image: influxdb:1.8 | ||||||
|     networks: |     networks: | ||||||
|       - internal |       - internal | ||||||
|     volumes: |     volumes: | ||||||
|  | @ -22,91 +22,6 @@ services: | ||||||
|       - /path/to/docker-varken/config-folder:/config |       - /path/to/docker-varken/config-folder:/config | ||||||
|     environment: |     environment: | ||||||
|       - TZ=America/Chicago |       - TZ=America/Chicago | ||||||
|       - VRKN_GLOBAL_SONARR_SERVER_IDS=1,2 |  | ||||||
|       - VRKN_GLOBAL_RADARR_SERVER_IDS=1,2 |  | ||||||
|       - VRKN_GLOBAL_LIDARR_SERVER_IDS=false |  | ||||||
|       - VRKN_GLOBAL_TAUTULLI_SERVER_IDS=1 |  | ||||||
|       - VRKN_GLOBAL_OMBI_SERVER_IDS=1 |  | ||||||
|       - VRKN_GLOBAL_SICKCHILL_SERVER_IDS=false |  | ||||||
|       - VRKN_GLOBAL_UNIFI_SERVER_IDS=false |  | ||||||
|       - VRKN_GLOBAL_MAXMIND_LICENSE_KEY=xxxxxxxxxxxxxxxx |  | ||||||
|       - VRKN_INFLUXDB_URL=influxdb.domain.tld |  | ||||||
|       - VRKN_INFLUXDB_PORT=8086 |  | ||||||
|       - VRKN_INFLUXDB_SSL=false |  | ||||||
|       - VRKN_INFLUXDB_VERIFY_SSL=false |  | ||||||
|       - VRKN_INFLUXDB_USERNAME=root |  | ||||||
|       - VRKN_INFLUXDB_PASSWORD=root |  | ||||||
|       - VRKN_TAUTULLI_1_URL=tautulli.domain.tld:8181 |  | ||||||
|       - VRKN_TAUTULLI_1_FALLBACK_IP=1.1.1.1 |  | ||||||
|       - VRKN_TAUTULLI_1_APIKEY=xxxxxxxxxxxxxxxx |  | ||||||
|       - VRKN_TAUTULLI_1_SSL=false |  | ||||||
|       - VRKN_TAUTULLI_1_VERIFY_SSL=false |  | ||||||
|       - VRKN_TAUTULLI_1_GET_ACTIVITY=true |  | ||||||
|       - VRKN_TAUTULLI_1_GET_ACTIVITY_RUN_SECONDS=30 |  | ||||||
|       - VRKN_TAUTULLI_1_GET_STATS=true |  | ||||||
|       - VRKN_TAUTULLI_1_GET_STATS_RUN_SECONDS=3600 |  | ||||||
|       - VRKN_SONARR_1_URL=sonarr1.domain.tld:8989 |  | ||||||
|       - VRKN_SONARR_1_APIKEY=xxxxxxxxxxxxxxxx |  | ||||||
|       - VRKN_SONARR_1_SSL=false |  | ||||||
|       - VRKN_SONARR_1_VERIFY_SSL=false |  | ||||||
|       - VRKN_SONARR_1_MISSING_DAYS=7 |  | ||||||
|       - VRKN_SONARR_1_MISSING_DAYS_RUN_SECONDS=300 |  | ||||||
|       - VRKN_SONARR_1_FUTURE_DAYS=1 |  | ||||||
|       - VRKN_SONARR_1_FUTURE_DAYS_RUN_SECONDS=300 |  | ||||||
|       - VRKN_SONARR_1_QUEUE=true |  | ||||||
|       - VRKN_SONARR_1_QUEUE_RUN_SECONDS=300 |  | ||||||
|       - VRKN_SONARR_2_URL=sonarr2.domain.tld:8989 |  | ||||||
|       - VRKN_SONARR_2_APIKEY=yyyyyyyyyyyyyyyy |  | ||||||
|       - VRKN_SONARR_2_SSL=false |  | ||||||
|       - VRKN_SONARR_2_VERIFY_SSL=false |  | ||||||
|       - VRKN_SONARR_2_MISSING_DAYS=7 |  | ||||||
|       - VRKN_SONARR_2_MISSING_DAYS_RUN_SECONDS=300 |  | ||||||
|       - VRKN_SONARR_2_FUTURE_DAYS=1 |  | ||||||
|       - VRKN_SONARR_2_FUTURE_DAYS_RUN_SECONDS=300 |  | ||||||
|       - VRKN_SONARR_2_QUEUE=true |  | ||||||
|       - VRKN_SONARR_2_QUEUE_RUN_SECONDS=300 |  | ||||||
|       - VRKN_RADARR_1_URL=radarr1.domain.tld |  | ||||||
|       - VRKN_RADARR_1_APIKEY=xxxxxxxxxxxxxxxx |  | ||||||
|       - VRKN_RADARR_1_SSL=false |  | ||||||
|       - VRKN_RADARR_1_VERIFY_SSL=false |  | ||||||
|       - VRKN_RADARR_1_QUEUE=true |  | ||||||
|       - VRKN_RADARR_1_QUEUE_RUN_SECONDS=300 |  | ||||||
|       - VRKN_RADARR_1_GET_MISSING=true |  | ||||||
|       - VRKN_RADARR_1_GET_MISSING_RUN_SECONDS=300 |  | ||||||
|       - VRKN_RADARR_2_URL=radarr2.domain.tld |  | ||||||
|       - VRKN_RADARR_2_APIKEY=yyyyyyyyyyyyyyyy |  | ||||||
|       - VRKN_RADARR_2_SSL=false |  | ||||||
|       - VRKN_RADARR_2_VERIFY_SSL=false |  | ||||||
|       - VRKN_RADARR_2_QUEUE=true |  | ||||||
|       - VRKN_RADARR_2_QUEUE_RUN_SECONDS=300 |  | ||||||
|       - VRKN_RADARR_2_GET_MISSING=true |  | ||||||
|       - VRKN_RADARR_2_GET_MISSING_RUN_SECONDS=300 |  | ||||||
|       - VRKN_LIDARR_1_URL=lidarr1.domain.tld:8686 |  | ||||||
|       - VRKN_LIDARR_1_APIKEY=xxxxxxxxxxxxxxxx |  | ||||||
|       - VRKN_LIDARR_1_SSL=false |  | ||||||
|       - VRKN_LIDARR_1_VERIFY_SSL=false |  | ||||||
|       - VRKN_LIDARR_1_MISSING_DAYS=30 |  | ||||||
|       - VRKN_LIDARR_1_MISSING_DAYS_RUN_SECONDS=300 |  | ||||||
|       - VRKN_LIDARR_1_FUTURE_DAYS=30 |  | ||||||
|       - VRKN_LIDARR_1_FUTURE_DAYS_RUN_SECONDS=300 |  | ||||||
|       - VRKN_LIDARR_1_QUEUE=true |  | ||||||
|       - VRKN_LIDARR_1_QUEUE_RUN_SECONDS=300 |  | ||||||
|       - VRKN_OMBI_1_URL=ombi.domain.tld |  | ||||||
|       - VRKN_OMBI_1_APIKEY=xxxxxxxxxxxxxxxx |  | ||||||
|       - VRKN_OMBI_1_SSL=false |  | ||||||
|       - VRKN_OMBI_1_VERIFY_SSL=false |  | ||||||
|       - VRKN_OMBI_1_GET_REQUEST_TYPE_COUNTS=true |  | ||||||
|       - VRKN_OMBI_1_REQUEST_TYPE_RUN_SECONDS=300 |  | ||||||
|       - VRKN_OMBI_1_GET_REQUEST_TOTAL_COUNTS=true |  | ||||||
|       - VRKN_OMBI_1_REQUEST_TOTAL_RUN_SECONDS=300 |  | ||||||
|       - VRKN_OMBI_1_GET_ISSUE_STATUS_COUNTS=true |  | ||||||
|       - VRKN_OMBI_1_ISSUE_STATUS_RUN_SECONDS=300 |  | ||||||
|       - VRKN_SICKCHILL_1_URL=sickchill.domain.tld:8081 |  | ||||||
|       - VRKN_SICKCHILL_1_APIKEY=xxxxxxxxxxxxxxxx |  | ||||||
|       - VRKN_SICKCHILL_1_SSL=false |  | ||||||
|       - VRKN_SICKCHILL_1_VERIFY_SSL=false |  | ||||||
|       - VRKN_SICKCHILL_1_GET_MISSING=true |  | ||||||
|       - VRKN_SICKCHILL_1_GET_MISSING_RUN_SECONDS=300 |  | ||||||
|     depends_on: |     depends_on: | ||||||
|       - influxdb |       - influxdb | ||||||
|     restart: unless-stopped |     restart: unless-stopped | ||||||
|  |  | ||||||
|  | @ -2,9 +2,10 @@ | ||||||
| # Potential requirements. | # Potential requirements. | ||||||
| # pip3 install -r requirements.txt | # pip3 install -r requirements.txt | ||||||
| #--------------------------------------------------------- | #--------------------------------------------------------- | ||||||
| requests==2.21 | requests==2.28.1 | ||||||
| geoip2==2.9.0 | geoip2==2.9.0 | ||||||
| influxdb==5.2.0 | influxdb==5.2.0 | ||||||
| schedule==0.6.0 | schedule==1.1.0 | ||||||
| distro==1.4.0 | distro==1.4.0 | ||||||
| urllib3==1.24.2 | urllib3==1.26.10 | ||||||
|  | influxdb-client==1.14.0 | ||||||
|  | @ -41,7 +41,7 @@ if __name__ == "__main__": | ||||||
|     DBMANAGER = DBManager(CONFIG.influx_server) |     DBMANAGER = DBManager(CONFIG.influx_server) | ||||||
| 
 | 
 | ||||||
|     if CONFIG.tautulli_enabled: |     if CONFIG.tautulli_enabled: | ||||||
|         GEOIPHANDLER = GeoIPHandler(DATA_FOLDER) |         GEOIPHANDLER = GeoIPHandler(DATA_FOLDER, CONFIG.tautulli_servers[0].maxmind_license_key) | ||||||
|         for server in CONFIG.tautulli_servers: |         for server in CONFIG.tautulli_servers: | ||||||
|             TAUTULLI = TautulliAPI(server, DBMANAGER, GEOIPHANDLER) |             TAUTULLI = TautulliAPI(server, DBMANAGER, GEOIPHANDLER) | ||||||
|             TAUTULLI.get_historical(days=opts.days) |             TAUTULLI.get_historical(days=opts.days) | ||||||
|  |  | ||||||
|  | @ -51,5 +51,6 @@ | ||||||
|   <Labels/> |   <Labels/> | ||||||
|   <Config Name="PGID" Target="PGID" Default="" Mode="" Description="Container Variable: PGID" Type="Variable" Display="always" Required="true" Mask="false">99</Config> |   <Config Name="PGID" Target="PGID" Default="" Mode="" Description="Container Variable: PGID" Type="Variable" Display="always" Required="true" Mask="false">99</Config> | ||||||
|   <Config Name="PUID" Target="PUID" Default="" Mode="" Description="Container Variable: PUID" Type="Variable" Display="always" Required="true" Mask="false">100</Config> |   <Config Name="PUID" Target="PUID" Default="" Mode="" Description="Container Variable: PUID" Type="Variable" Display="always" Required="true" Mask="false">100</Config> | ||||||
|  |   <Config Name="Debug" Target="DEBUG" Default="False" Mode="" Description="Turn Debug on or off" Type="Variable" Display="always" Required="false" Mask="false">False</Config> | ||||||
|   <Config Name="Varken DataDir" Target="/config" Default="" Mode="rw" Description="Container Path: /config" Type="Path" Display="advanced-hide" Required="true" Mask="false">/mnt/user/appdata/varken</Config> |   <Config Name="Varken DataDir" Target="/config" Default="" Mode="rw" Description="Container Path: /config" Type="Path" Display="advanced-hide" Required="true" Mask="false">/mnt/user/appdata/varken</Config> | ||||||
| </Container> | </Container> | ||||||
							
								
								
									
										48
									
								
								varken/influxdb2manager.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										48
									
								
								varken/influxdb2manager.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,48 @@ | ||||||
|  | from sys import exit | ||||||
|  | from logging import getLogger | ||||||
|  | import influxdb_client | ||||||
|  | from influxdb_client import InfluxDBClient | ||||||
|  | from influxdb_client.client.write_api import SYNCHRONOUS | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class InfluxDB2Manager(object): | ||||||
|  |     def __init__(self, server): | ||||||
|  |         self.server = server | ||||||
|  |         self.logger = getLogger() | ||||||
|  |         if self.server.url == "influxdb2.domain.tld": | ||||||
|  |             self.logger.critical("You have not configured your varken.ini. Please read Wiki page for configuration") | ||||||
|  |             exit() | ||||||
|  | 
 | ||||||
|  |         self.influx = InfluxDBClient(url=self.server.url, token=self.server.token, org=self.server.org, | ||||||
|  |                                      timeout=self.server.timeout, verify_ssl=self.server.verify_ssl, | ||||||
|  |                                      ssl_ca_cert=self.server.ssl) | ||||||
|  |         self.influx_write_api = self.influx.write_api(write_options=SYNCHRONOUS) | ||||||
|  | 
 | ||||||
|  |         # Create the bucket if needed | ||||||
|  | 
 | ||||||
|  |         bucket_api = self.influx.buckets_api() | ||||||
|  | 
 | ||||||
|  |         try: | ||||||
|  |             bucket = bucket_api.find_bucket_by_name(self.server.bucket) | ||||||
|  | 
 | ||||||
|  |             if bucket is None: | ||||||
|  |                 self.logger.info('Creating bucket %s', self.server.bucket) | ||||||
|  | 
 | ||||||
|  |                 org_api = influxdb_client.service.organizations_service.OrganizationsService(self.influx.api_client) | ||||||
|  |                 orgs = org_api.get_orgs() | ||||||
|  |                 for org in orgs.orgs: | ||||||
|  |                     if org.name == self.server.org: | ||||||
|  |                         my_org = org | ||||||
|  | 
 | ||||||
|  |                 self.influx.buckets_api().create_bucket(bucket_name=self.server.bucket, org_id=my_org.id) | ||||||
|  |         except Exception as e: | ||||||
|  |             self.logger.error('Failed creating new InfluxDB bucket! Error: %s', e) | ||||||
|  | 
 | ||||||
|  |     def write_points(self, data): | ||||||
|  |         self.logger.info('Writing Data to InfluxDBv2 %s', data) | ||||||
|  | 
 | ||||||
|  |         try: | ||||||
|  |             self.influx_write_api.write(bucket=self.server.bucket, record=data) | ||||||
|  |         except Exception as e: | ||||||
|  |             self.logger.exception('Error writing data to influxdb2. Dropping this set of data. ' | ||||||
|  |                                   'Check your database! Error: %s', e) | ||||||
|  | @ -9,7 +9,7 @@ from configparser import ConfigParser, NoOptionError, NoSectionError | ||||||
| from varken.varkenlogger import BlacklistFilter | from varken.varkenlogger import BlacklistFilter | ||||||
| from varken.structures import SickChillServer, UniFiServer | from varken.structures import SickChillServer, UniFiServer | ||||||
| from varken.helpers import clean_sid_check, rfc1918_ip_check, boolcheck | from varken.helpers import clean_sid_check, rfc1918_ip_check, boolcheck | ||||||
| from varken.structures import SonarrServer, RadarrServer, OmbiServer, TautulliServer, InfluxServer | from varken.structures import SonarrServer, RadarrServer, OmbiServer, OverseerrServer, TautulliServer, InfluxServer, Influx2Server | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| class INIParser(object): | class INIParser(object): | ||||||
|  | @ -17,7 +17,7 @@ class INIParser(object): | ||||||
|         self.config = None |         self.config = None | ||||||
|         self.data_folder = data_folder |         self.data_folder = data_folder | ||||||
|         self.filtered_strings = None |         self.filtered_strings = None | ||||||
|         self.services = ['sonarr', 'radarr', 'lidarr', 'ombi', 'tautulli', 'sickchill', 'unifi'] |         self.services = ['sonarr', 'radarr', 'lidarr', 'ombi', 'overseerr', 'tautulli', 'sickchill', 'unifi'] | ||||||
| 
 | 
 | ||||||
|         self.logger = getLogger() |         self.logger = getLogger() | ||||||
|         self.influx_server = InfluxServer() |         self.influx_server = InfluxServer() | ||||||
|  | @ -107,6 +107,7 @@ class INIParser(object): | ||||||
| 
 | 
 | ||||||
|         valid = match(regex, url_check) is not None |         valid = match(regex, url_check) is not None | ||||||
|         if not valid: |         if not valid: | ||||||
|  |             return url_check | ||||||
|             if inc_port: |             if inc_port: | ||||||
|                 self.logger.error('%s is invalid in module [%s]! URL must host/IP and ' |                 self.logger.error('%s is invalid in module [%s]! URL must host/IP and ' | ||||||
|                                   'port if not 80 or 443. ie. localhost:8080', |                                   'port if not 80 or 443. ie. localhost:8080', | ||||||
|  | @ -144,23 +145,47 @@ class INIParser(object): | ||||||
|         if read_file: |         if read_file: | ||||||
|             self.config = self.read_file('varken.ini') |             self.config = self.read_file('varken.ini') | ||||||
|             self.config_blacklist() |             self.config_blacklist() | ||||||
|  | 
 | ||||||
|         # Parse InfluxDB options |         # Parse InfluxDB options | ||||||
|         try: |         self.influx2_enabled = env.get('VRKN_GLOBAL_INFLUXDB2_ENABLED', | ||||||
|             url = self.url_check(env.get('VRKN_INFLUXDB_URL', self.config.get('influxdb', 'url')), |                                        self.config.getboolean('global', 'influx2_enabled')) | ||||||
|                                  include_port=False, section='influxdb') |  | ||||||
|             port = int(env.get('VRKN_INFLUXDB_PORT', self.config.getint('influxdb', 'port'))) |  | ||||||
|             ssl = boolcheck(env.get('VRKN_INFLUXDB_SSL', self.config.get('influxdb', 'ssl'))) |  | ||||||
|             verify_ssl = boolcheck(env.get('VRKN_INFLUXDB_VERIFY_SSL', self.config.get('influxdb', 'verify_ssl'))) |  | ||||||
| 
 | 
 | ||||||
|             username = env.get('VRKN_INFLUXDB_USERNAME', self.config.get('influxdb', 'username')) |         if self.influx2_enabled: | ||||||
|             password = env.get('VRKN_INFLUXDB_PASSWORD', self.config.get('influxdb', 'password')) |             # Use INFLUX version 2 | ||||||
|         except NoOptionError as e: |             try: | ||||||
|             self.logger.error('Missing key in %s. Error: %s', "influxdb", e) |                 url = self.url_check(env.get('VRKN_INFLUXDB2_URL', self.config.get('influx2', 'url')), | ||||||
|             self.rectify_ini() |                                      section='influx2', include_port=False) | ||||||
|             return |                 ssl = boolcheck(env.get('VRKN_INFLUXDB2_SSL', self.config.get('influx2', 'ssl'))) | ||||||
|  |                 verify_ssl = boolcheck(env.get('VRKN_INFLUXDB2_VERIFY_SSL', self.config.get('influx2', 'verify_ssl'))) | ||||||
| 
 | 
 | ||||||
|         self.influx_server = InfluxServer(url=url, port=port, username=username, password=password, ssl=ssl, |                 org = env.get('VRKN_INFLUXDB2_ORG', self.config.get('influx2', 'org')) | ||||||
|                                           verify_ssl=verify_ssl) |                 bucket = env.get('VRKN_INFLUXDB2_BUCKET', self.config.get('influx2', 'bucket')) | ||||||
|  |                 token = env.get('VRKN_INFLUXDB2_TOKEN', self.config.get('influx2', 'token')) | ||||||
|  |                 timeout = env.get('VRKN_INFLUXDB2_TIMEOUT', self.config.get('influx2', 'timeout')) | ||||||
|  |             except NoOptionError as e: | ||||||
|  |                 self.logger.error('Missing key in %s. Error: %s', "influx2", e) | ||||||
|  |                 self.rectify_ini() | ||||||
|  |                 return | ||||||
|  | 
 | ||||||
|  |             self.influx_server = Influx2Server(url=url, token=token, org=org, timeout=timeout, ssl=ssl, | ||||||
|  |                                                verify_ssl=verify_ssl, bucket=bucket) | ||||||
|  |         else: | ||||||
|  |             try: | ||||||
|  |                 url = self.url_check(env.get('VRKN_INFLUXDB_URL', self.config.get('influxdb', 'url')), | ||||||
|  |                                      include_port=False, section='influxdb') | ||||||
|  |                 port = int(env.get('VRKN_INFLUXDB_PORT', self.config.getint('influxdb', 'port'))) | ||||||
|  |                 ssl = boolcheck(env.get('VRKN_INFLUXDB_SSL', self.config.get('influxdb', 'ssl'))) | ||||||
|  |                 verify_ssl = boolcheck(env.get('VRKN_INFLUXDB_VERIFY_SSL', self.config.get('influxdb', 'verify_ssl'))) | ||||||
|  | 
 | ||||||
|  |                 username = env.get('VRKN_INFLUXDB_USERNAME', self.config.get('influxdb', 'username')) | ||||||
|  |                 password = env.get('VRKN_INFLUXDB_PASSWORD', self.config.get('influxdb', 'password')) | ||||||
|  |             except NoOptionError as e: | ||||||
|  |                 self.logger.error('Missing key in %s. Error: %s', "influxdb", e) | ||||||
|  |                 self.rectify_ini() | ||||||
|  |                 return | ||||||
|  | 
 | ||||||
|  |             self.influx_server = InfluxServer(url=url, port=port, username=username, password=password, ssl=ssl, | ||||||
|  |                                               verify_ssl=verify_ssl) | ||||||
| 
 | 
 | ||||||
|         # Check for all enabled services |         # Check for all enabled services | ||||||
|         for service in self.services: |         for service in self.services: | ||||||
|  | @ -293,6 +318,27 @@ class INIParser(object): | ||||||
|                                                 issue_status_counts=issue_status_counts, |                                                 issue_status_counts=issue_status_counts, | ||||||
|                                                 issue_status_run_seconds=issue_status_run_seconds) |                                                 issue_status_run_seconds=issue_status_run_seconds) | ||||||
| 
 | 
 | ||||||
|  |                         if service == 'overseerr': | ||||||
|  |                             get_request_total_counts = boolcheck(env.get( | ||||||
|  |                                 f'VRKN_{envsection}_GET_REQUEST_TOTAL_COUNTS', | ||||||
|  |                                 self.config.get(section, 'get_request_total_counts'))) | ||||||
|  |                             request_total_run_seconds = int(env.get( | ||||||
|  |                                 f'VRKN_{envsection}_REQUEST_TOTAL_RUN_SECONDS', | ||||||
|  |                                 self.config.getint(section, 'request_total_run_seconds'))) | ||||||
|  |                             num_latest_requests_to_fetch = int(env.get( | ||||||
|  |                                 f'VRKN_{envsection}_GET_LATEST_REQUESTS_TO_FETCH', | ||||||
|  |                                 self.config.getint(section, 'num_latest_requests_to_fetch'))) | ||||||
|  |                             num_latest_requests_seconds = int(env.get( | ||||||
|  |                                 f'VRKN_{envsection}_NUM_LATEST_REQUESTS_SECONDS', | ||||||
|  |                                 self.config.getint(section, 'num_latest_requests_seconds'))) | ||||||
|  | 
 | ||||||
|  |                             server = OverseerrServer(id=server_id, url=scheme + url, api_key=apikey, | ||||||
|  |                                                      verify_ssl=verify_ssl, | ||||||
|  |                                                      get_request_total_counts=get_request_total_counts, | ||||||
|  |                                                      request_total_run_seconds=request_total_run_seconds, | ||||||
|  |                                                      num_latest_requests_to_fetch=num_latest_requests_to_fetch, | ||||||
|  |                                                      num_latest_requests_seconds=num_latest_requests_seconds) | ||||||
|  | 
 | ||||||
|                         if service == 'sickchill': |                         if service == 'sickchill': | ||||||
|                             get_missing = boolcheck(env.get(f'VRKN_{envsection}_GET_MISSING', |                             get_missing = boolcheck(env.get(f'VRKN_{envsection}_GET_MISSING', | ||||||
|                                                             self.config.get(section, 'get_missing'))) |                                                             self.config.get(section, 'get_missing'))) | ||||||
|  |  | ||||||
							
								
								
									
										133
									
								
								varken/overseerr.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										133
									
								
								varken/overseerr.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,133 @@ | ||||||
|  | from logging import getLogger | ||||||
|  | from requests import Session, Request | ||||||
|  | from datetime import datetime, timezone | ||||||
|  | 
 | ||||||
|  | from varken.helpers import connection_handler, hashit | ||||||
|  | from varken.structures import OverseerrRequestCounts | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class OverseerrAPI(object): | ||||||
|  |     def __init__(self, server, dbmanager): | ||||||
|  |         self.dbmanager = dbmanager | ||||||
|  |         self.server = server | ||||||
|  |         # Create session to reduce server web thread load, and globally define pageSize for all requests | ||||||
|  |         self.session = Session() | ||||||
|  |         self.session.headers = {'X-Api-Key': self.server.api_key} | ||||||
|  |         self.logger = getLogger() | ||||||
|  | 
 | ||||||
|  |     def __repr__(self): | ||||||
|  |         return f"<overseerr-{self.server.id}>" | ||||||
|  | 
 | ||||||
|  |     def get_request_counts(self): | ||||||
|  |         now = datetime.now(timezone.utc).astimezone().isoformat() | ||||||
|  |         endpoint = '/api/v1/request/count' | ||||||
|  | 
 | ||||||
|  |         req = self.session.prepare_request(Request('GET', self.server.url + endpoint)) | ||||||
|  |         get_req = connection_handler(self.session, req, self.server.verify_ssl) | ||||||
|  | 
 | ||||||
|  |         if not get_req: | ||||||
|  |             return | ||||||
|  | 
 | ||||||
|  |         requests = OverseerrRequestCounts(**get_req) | ||||||
|  |         influx_payload = [ | ||||||
|  |             { | ||||||
|  |                 "measurement": "Overseerr", | ||||||
|  |                 "tags": { | ||||||
|  |                     "type": "Request_Counts" | ||||||
|  |                 }, | ||||||
|  |                 "time": now, | ||||||
|  |                 "fields": { | ||||||
|  |                     "pending": requests.pending, | ||||||
|  |                     "approved": requests.approved, | ||||||
|  |                     "processing": requests.processing, | ||||||
|  |                     "available": requests.available, | ||||||
|  |                     "total": requests.total, | ||||||
|  |                     "movies": requests.movie, | ||||||
|  |                     "tv": requests.tv, | ||||||
|  |                     "declined": requests.declined | ||||||
|  |                 } | ||||||
|  |             } | ||||||
|  |         ] | ||||||
|  | 
 | ||||||
|  |         if influx_payload: | ||||||
|  |             self.dbmanager.write_points(influx_payload) | ||||||
|  |         else: | ||||||
|  |             self.logger.warning("No data to send to influx for overseerr-request-counts instance, discarding.") | ||||||
|  | 
 | ||||||
|  |     def get_latest_requests(self): | ||||||
|  |         now = datetime.now(timezone.utc).astimezone().isoformat() | ||||||
|  |         endpoint = '/api/v1/request?take=' + str(self.server.num_latest_requests_to_fetch) + '&filter=all&sort=added' | ||||||
|  |         movie_endpoint = '/api/v1/movie/' | ||||||
|  |         tv_endpoint = '/api/v1/tv/' | ||||||
|  | 
 | ||||||
|  |         # GET THE LATEST n REQUESTS | ||||||
|  |         req = self.session.prepare_request(Request('GET', self.server.url + endpoint)) | ||||||
|  |         get_latest_req = connection_handler(self.session, req, self.server.verify_ssl) | ||||||
|  | 
 | ||||||
|  |         # RETURN NOTHING IF NO RESULTS | ||||||
|  |         if not get_latest_req: | ||||||
|  |             self.logger.warning("No data to send to influx for overseerr-latest-requests instance, discarding.") | ||||||
|  |             return | ||||||
|  | 
 | ||||||
|  |         influx_payload = [] | ||||||
|  | 
 | ||||||
|  |         # Request Type: Movie = 1, TV Show = 0 | ||||||
|  |         for result in get_latest_req['results']: | ||||||
|  |             if result['type'] == 'tv': | ||||||
|  |                 req = self.session.prepare_request(Request('GET', | ||||||
|  |                                                            self.server.url + | ||||||
|  |                                                            tv_endpoint + | ||||||
|  |                                                            str(result['media']['tmdbId']))) | ||||||
|  |                 get_tv_req = connection_handler(self.session, req, self.server.verify_ssl) | ||||||
|  |                 hash_id = hashit(f'{get_tv_req["id"]}{get_tv_req["name"]}') | ||||||
|  | 
 | ||||||
|  |                 influx_payload.append( | ||||||
|  |                     { | ||||||
|  |                         "measurement": "Overseerr", | ||||||
|  |                         "tags": { | ||||||
|  |                             "type": "Requests", | ||||||
|  |                             "server": self.server.id, | ||||||
|  |                             "request_type": 0, | ||||||
|  |                             "status": get_tv_req['mediaInfo']['status'], | ||||||
|  |                             "title": get_tv_req['name'], | ||||||
|  |                             "requested_user": get_tv_req['mediaInfo']['requests'][0]['requestedBy']['displayName'], | ||||||
|  |                             "requested_date": get_tv_req['mediaInfo']['requests'][0]['createdAt'] | ||||||
|  |                         }, | ||||||
|  |                         "time": now, | ||||||
|  |                         "fields": { | ||||||
|  |                             "hash": hash_id | ||||||
|  |                         } | ||||||
|  |                     } | ||||||
|  |                 ) | ||||||
|  | 
 | ||||||
|  |             if result['type'] == 'movie': | ||||||
|  |                 req = self.session.prepare_request(Request('GET', | ||||||
|  |                                                            self.server.url + | ||||||
|  |                                                            movie_endpoint + | ||||||
|  |                                                            str(result['media']['tmdbId']))) | ||||||
|  |                 get_movie_req = connection_handler(self.session, req, self.server.verify_ssl) | ||||||
|  |                 hash_id = hashit(f'{get_movie_req["id"]}{get_movie_req["title"]}') | ||||||
|  | 
 | ||||||
|  |                 influx_payload.append( | ||||||
|  |                     { | ||||||
|  |                         "measurement": "Overseerr", | ||||||
|  |                         "tags": { | ||||||
|  |                             "type": "Requests", | ||||||
|  |                             "server": self.server.id, | ||||||
|  |                             "request_type": 1, | ||||||
|  |                             "status": get_movie_req['mediaInfo']['status'], | ||||||
|  |                             "title": get_movie_req['title'], | ||||||
|  |                             "requested_user": get_movie_req['mediaInfo']['requests'][0]['requestedBy']['displayName'], | ||||||
|  |                             "requested_date": get_movie_req['mediaInfo']['requests'][0]['createdAt'] | ||||||
|  |                         }, | ||||||
|  |                         "time": now, | ||||||
|  |                         "fields": { | ||||||
|  |                             "hash": hash_id | ||||||
|  |                         } | ||||||
|  |                     } | ||||||
|  |                 ) | ||||||
|  | 
 | ||||||
|  |         if influx_payload: | ||||||
|  |             self.dbmanager.write_points(influx_payload) | ||||||
|  |         else: | ||||||
|  |             self.logger.warning("No data to send to influx for overseerr-latest-requests instance, discarding.") | ||||||
|  | @ -2,7 +2,7 @@ from logging import getLogger | ||||||
| from requests import Session, Request | from requests import Session, Request | ||||||
| from datetime import datetime, timezone | from datetime import datetime, timezone | ||||||
| 
 | 
 | ||||||
| from varken.structures import RadarrMovie, Queue | from varken.structures import QueuePages, RadarrMovie, RadarrQueue | ||||||
| from varken.helpers import hashit, connection_handler | from varken.helpers import hashit, connection_handler | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | @ -19,7 +19,7 @@ class RadarrAPI(object): | ||||||
|         return f"<radarr-{self.server.id}>" |         return f"<radarr-{self.server.id}>" | ||||||
| 
 | 
 | ||||||
|     def get_missing(self): |     def get_missing(self): | ||||||
|         endpoint = '/api/movie' |         endpoint = '/api/v3/movie' | ||||||
|         now = datetime.now(timezone.utc).astimezone().isoformat() |         now = datetime.now(timezone.utc).astimezone().isoformat() | ||||||
|         influx_payload = [] |         influx_payload = [] | ||||||
|         missing = [] |         missing = [] | ||||||
|  | @ -37,7 +37,7 @@ class RadarrAPI(object): | ||||||
|             return |             return | ||||||
| 
 | 
 | ||||||
|         for movie in movies: |         for movie in movies: | ||||||
|             if movie.monitored and not movie.downloaded: |             if movie.monitored and not movie.hasFile: | ||||||
|                 if movie.isAvailable: |                 if movie.isAvailable: | ||||||
|                     ma = 0 |                     ma = 0 | ||||||
|                 else: |                 else: | ||||||
|  | @ -66,35 +66,53 @@ class RadarrAPI(object): | ||||||
|                 } |                 } | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
|         self.dbmanager.write_points(influx_payload) |         if influx_payload: | ||||||
|  |             self.dbmanager.write_points(influx_payload) | ||||||
|  |         else: | ||||||
|  |             self.logger.warning("No data to send to influx for radarr-missing instance, discarding.") | ||||||
| 
 | 
 | ||||||
|     def get_queue(self): |     def get_queue(self): | ||||||
|         endpoint = '/api/queue' |         endpoint = '/api/v3/queue' | ||||||
|         now = datetime.now(timezone.utc).astimezone().isoformat() |         now = datetime.now(timezone.utc).astimezone().isoformat() | ||||||
|         influx_payload = [] |         influx_payload = [] | ||||||
|  |         pageSize = 250 | ||||||
|  |         params = {'pageSize': pageSize, 'includeMovie': True, 'includeUnknownMovieItems': False} | ||||||
|  |         queueResponse = [] | ||||||
|         queue = [] |         queue = [] | ||||||
| 
 | 
 | ||||||
|         req = self.session.prepare_request(Request('GET', self.server.url + endpoint)) |         req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params)) | ||||||
|         get = connection_handler(self.session, req, self.server.verify_ssl) |         get = connection_handler(self.session, req, self.server.verify_ssl) | ||||||
| 
 | 
 | ||||||
|         if not get: |         if not get: | ||||||
|             return |             return | ||||||
| 
 | 
 | ||||||
|         for movie in get: |         response = QueuePages(**get) | ||||||
|             try: |         queueResponse.extend(response.records) | ||||||
|                 movie['movie'] = RadarrMovie(**movie['movie']) | 
 | ||||||
|             except TypeError as e: |         while response.totalRecords > response.page * response.pageSize: | ||||||
|                 self.logger.error('TypeError has occurred : %s while creating RadarrMovie structure', e) |             page = response.page + 1 | ||||||
|  |             params = {'pageSize': pageSize, 'page': page, 'includeMovie': True, 'includeUnknownMovieItems': False} | ||||||
|  |             req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params)) | ||||||
|  |             get = connection_handler(self.session, req, self.server.verify_ssl) | ||||||
|  |             if not get: | ||||||
|                 return |                 return | ||||||
| 
 | 
 | ||||||
|         try: |             response = QueuePages(**get) | ||||||
|             download_queue = [Queue(**movie) for movie in get] |             queueResponse.extend(response.records) | ||||||
|         except TypeError as e: | 
 | ||||||
|             self.logger.error('TypeError has occurred : %s while creating Queue structure', e) |         download_queue = [] | ||||||
|  |         for queueItem in queueResponse: | ||||||
|  |             try: | ||||||
|  |                 download_queue.append(RadarrQueue(**queueItem)) | ||||||
|  |             except TypeError as e: | ||||||
|  |                 self.logger.warning('TypeError has occurred : %s while creating RadarrQueue structure', e) | ||||||
|  |                 return | ||||||
|  |         if not download_queue: | ||||||
|  |             self.logger.warning("No data to send to influx for radarr-queue instance, discarding.") | ||||||
|             return |             return | ||||||
| 
 | 
 | ||||||
|         for queue_item in download_queue: |         for queue_item in download_queue: | ||||||
|             movie = queue_item.movie |             movie = RadarrMovie(**queue_item.movie) | ||||||
| 
 | 
 | ||||||
|             name = f'{movie.title} ({movie.year})' |             name = f'{movie.title} ({movie.year})' | ||||||
| 
 | 
 | ||||||
|  | @ -128,4 +146,7 @@ class RadarrAPI(object): | ||||||
|                 } |                 } | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
|         self.dbmanager.write_points(influx_payload) |         if influx_payload: | ||||||
|  |             self.dbmanager.write_points(influx_payload) | ||||||
|  |         else: | ||||||
|  |             self.logger.warning("No data to send to influx for radarr-queue instance, discarding.") | ||||||
|  |  | ||||||
|  | @ -2,7 +2,7 @@ from logging import getLogger | ||||||
| from requests import Session, Request | from requests import Session, Request | ||||||
| from datetime import datetime, timezone, date, timedelta | from datetime import datetime, timezone, date, timedelta | ||||||
| 
 | 
 | ||||||
| from varken.structures import Queue, SonarrTVShow | from varken.structures import SonarrEpisode, SonarrTVShow, SonarrQueue, QueuePages | ||||||
| from varken.helpers import hashit, connection_handler | from varken.helpers import hashit, connection_handler | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | @ -19,16 +19,28 @@ class SonarrAPI(object): | ||||||
|     def __repr__(self): |     def __repr__(self): | ||||||
|         return f"<sonarr-{self.server.id}>" |         return f"<sonarr-{self.server.id}>" | ||||||
| 
 | 
 | ||||||
|  |     def get_episode(self, id): | ||||||
|  |         endpoint = '/api/v3/episode' | ||||||
|  |         params = {'episodeIds': id} | ||||||
|  | 
 | ||||||
|  |         req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params)) | ||||||
|  |         get = connection_handler(self.session, req, self.server.verify_ssl) | ||||||
|  | 
 | ||||||
|  |         if not get: | ||||||
|  |             return | ||||||
|  | 
 | ||||||
|  |         return SonarrEpisode(**get[0]) | ||||||
|  | 
 | ||||||
|     def get_calendar(self, query="Missing"): |     def get_calendar(self, query="Missing"): | ||||||
|         endpoint = '/api/calendar/' |         endpoint = '/api/v3/calendar/' | ||||||
|         today = str(date.today()) |         today = str(date.today()) | ||||||
|         last_days = str(date.today() - timedelta(days=self.server.missing_days)) |         last_days = str(date.today() - timedelta(days=self.server.missing_days)) | ||||||
|         future = str(date.today() + timedelta(days=self.server.future_days)) |         future = str(date.today() + timedelta(days=self.server.future_days)) | ||||||
|         now = datetime.now(timezone.utc).astimezone().isoformat() |         now = datetime.now(timezone.utc).astimezone().isoformat() | ||||||
|         if query == "Missing": |         if query == "Missing": | ||||||
|             params = {'start': last_days, 'end': today} |             params = {'start': last_days, 'end': today, 'includeSeries': True} | ||||||
|         else: |         else: | ||||||
|             params = {'start': today, 'end': future} |             params = {'start': today, 'end': future, 'includeSeries': True} | ||||||
|         influx_payload = [] |         influx_payload = [] | ||||||
|         air_days = [] |         air_days = [] | ||||||
|         missing = [] |         missing = [] | ||||||
|  | @ -42,22 +54,24 @@ class SonarrAPI(object): | ||||||
|         tv_shows = [] |         tv_shows = [] | ||||||
|         for show in get: |         for show in get: | ||||||
|             try: |             try: | ||||||
|                 tv_shows.append(SonarrTVShow(**show)) |                 tv_shows.append(SonarrEpisode(**show)) | ||||||
|             except TypeError as e: |             except TypeError as e: | ||||||
|                 self.logger.error('TypeError has occurred : %s while creating SonarrTVShow structure for show. Data ' |                 self.logger.error('TypeError has occurred : %s while creating SonarrEpisode structure for show. Data ' | ||||||
|                                   'attempted is: %s', e, show) |                                   'attempted is: %s', e, show) | ||||||
| 
 | 
 | ||||||
|         for show in tv_shows: |         for episode in tv_shows: | ||||||
|             sxe = f'S{show.seasonNumber:0>2}E{show.episodeNumber:0>2}' |             tvShow = episode.series | ||||||
|             if show.hasFile: |             sxe = f'S{episode.seasonNumber:0>2}E{episode.episodeNumber:0>2}' | ||||||
|  |             if episode.hasFile: | ||||||
|                 downloaded = 1 |                 downloaded = 1 | ||||||
|             else: |             else: | ||||||
|                 downloaded = 0 |                 downloaded = 0 | ||||||
|             if query == "Missing": |             if query == "Missing": | ||||||
|                 if show.monitored and not downloaded: |                 if episode.monitored and not downloaded: | ||||||
|                     missing.append((show.series['title'], downloaded, sxe, show.title, show.airDateUtc, show.id)) |                     missing.append((tvShow['title'], downloaded, sxe, episode.title, | ||||||
|  |                                     episode.airDateUtc, episode.seriesId)) | ||||||
|             else: |             else: | ||||||
|                 air_days.append((show.series['title'], downloaded, sxe, show.title, show.airDateUtc, show.id)) |                 air_days.append((tvShow['title'], downloaded, sxe, episode.title, episode.airDateUtc, episode.seriesId)) | ||||||
| 
 | 
 | ||||||
|         for series_title, dl_status, sxe, episode_title, air_date_utc, sonarr_id in (air_days or missing): |         for series_title, dl_status, sxe, episode_title, air_date_utc, sonarr_id in (air_days or missing): | ||||||
|             hash_id = hashit(f'{self.server.id}{series_title}{sxe}') |             hash_id = hashit(f'{self.server.id}{series_title}{sxe}') | ||||||
|  | @ -81,45 +95,68 @@ class SonarrAPI(object): | ||||||
|                 } |                 } | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
|         self.dbmanager.write_points(influx_payload) |         if influx_payload: | ||||||
|  |             self.dbmanager.write_points(influx_payload) | ||||||
|  |         else: | ||||||
|  |             self.logger.warning("No data to send to influx for sonarr-calendar instance, discarding.") | ||||||
| 
 | 
 | ||||||
|     def get_queue(self): |     def get_queue(self): | ||||||
|         influx_payload = [] |         influx_payload = [] | ||||||
|         endpoint = '/api/queue' |         endpoint = '/api/v3/queue' | ||||||
|         now = datetime.now(timezone.utc).astimezone().isoformat() |         now = datetime.now(timezone.utc).astimezone().isoformat() | ||||||
|  |         pageSize = 250 | ||||||
|  |         params = {'pageSize': pageSize, 'includeSeries': True, 'includeEpisode': True, | ||||||
|  |                   'includeUnknownSeriesItems': False} | ||||||
|  |         queueResponse = [] | ||||||
|         queue = [] |         queue = [] | ||||||
| 
 | 
 | ||||||
|         req = self.session.prepare_request(Request('GET', self.server.url + endpoint)) |         req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params)) | ||||||
|         get = connection_handler(self.session, req, self.server.verify_ssl) |         get = connection_handler(self.session, req, self.server.verify_ssl) | ||||||
| 
 |  | ||||||
|         if not get: |         if not get: | ||||||
|             return |             return | ||||||
| 
 | 
 | ||||||
|  |         response = QueuePages(**get) | ||||||
|  |         queueResponse.extend(response.records) | ||||||
|  | 
 | ||||||
|  |         while response.totalRecords > response.page * response.pageSize: | ||||||
|  |             page = response.page + 1 | ||||||
|  |             params = {'pageSize': pageSize, 'page': page, 'includeSeries': True, 'includeEpisode': True, | ||||||
|  |                       'includeUnknownSeriesItems': False} | ||||||
|  |             req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params)) | ||||||
|  |             get = connection_handler(self.session, req, self.server.verify_ssl) | ||||||
|  |             if not get: | ||||||
|  |                 return | ||||||
|  | 
 | ||||||
|  |             response = QueuePages(**get) | ||||||
|  |             queueResponse.extend(response.records) | ||||||
|  | 
 | ||||||
|         download_queue = [] |         download_queue = [] | ||||||
|         for show in get: |         for queueItem in queueResponse: | ||||||
|             try: |             try: | ||||||
|                 download_queue.append(Queue(**show)) |                 download_queue.append(SonarrQueue(**queueItem)) | ||||||
|             except TypeError as e: |             except TypeError as e: | ||||||
|                 self.logger.error('TypeError has occurred : %s while creating Queue structure. Data attempted is: ' |                 self.logger.error('TypeError has occurred : %s while creating Queue structure. Data attempted is: ' | ||||||
|                                   '%s', e, show) |                                   '%s', e, queueItem) | ||||||
|         if not download_queue: |         if not download_queue: | ||||||
|             return |             return | ||||||
| 
 | 
 | ||||||
|         for show in download_queue: |         for queueItem in download_queue: | ||||||
|  |             tvShow = SonarrTVShow(**queueItem.series) | ||||||
|             try: |             try: | ||||||
|                 sxe = f"S{show.episode['seasonNumber']:0>2}E{show.episode['episodeNumber']:0>2}" |                 episode = SonarrEpisode(**queueItem.episode) | ||||||
|  |                 sxe = f"S{episode.seasonNumber:0>2}E{episode.episodeNumber:0>2}" | ||||||
|             except TypeError as e: |             except TypeError as e: | ||||||
|                 self.logger.error('TypeError has occurred : %s while processing the sonarr queue. \ |                 self.logger.error('TypeError has occurred : %s while processing the sonarr queue. \ | ||||||
|                                   Remove invalid queue entry. Data attempted is: %s', e, show) |                                   Remove invalid queue entry. Data attempted is: %s', e, queueItem) | ||||||
|                 continue |                 continue | ||||||
| 
 | 
 | ||||||
|             if show.protocol.upper() == 'USENET': |             if queueItem.protocol.upper() == 'USENET': | ||||||
|                 protocol_id = 1 |                 protocol_id = 1 | ||||||
|             else: |             else: | ||||||
|                 protocol_id = 0 |                 protocol_id = 0 | ||||||
| 
 | 
 | ||||||
|             queue.append((show.series['title'], show.episode['title'], show.protocol.upper(), |             queue.append((tvShow.title, episode.title, queueItem.protocol.upper(), | ||||||
|                           protocol_id, sxe, show.id, show.quality['quality']['name'])) |                           protocol_id, sxe, queueItem.seriesId, queueItem.quality['quality']['name'])) | ||||||
| 
 | 
 | ||||||
|         for series_title, episode_title, protocol, protocol_id, sxe, sonarr_id, quality in queue: |         for series_title, episode_title, protocol, protocol_id, sxe, sonarr_id, quality in queue: | ||||||
|             hash_id = hashit(f'{self.server.id}{series_title}{sxe}') |             hash_id = hashit(f'{self.server.id}{series_title}{sxe}') | ||||||
|  | @ -143,7 +180,8 @@ class SonarrAPI(object): | ||||||
|                     } |                     } | ||||||
|                 } |                 } | ||||||
|             ) |             ) | ||||||
|  | 
 | ||||||
|         if influx_payload: |         if influx_payload: | ||||||
|             self.dbmanager.write_points(influx_payload) |             self.dbmanager.write_points(influx_payload) | ||||||
|         else: |         else: | ||||||
|             self.logger.debug("No data to send to influx for sonarr instance, discarding.") |             self.logger.warning("No data to send to influx for sonarr-queue instance, discarding.") | ||||||
|  |  | ||||||
|  | @ -20,6 +20,16 @@ class InfluxServer(NamedTuple): | ||||||
|     verify_ssl: bool = False |     verify_ssl: bool = False | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | class Influx2Server(NamedTuple): | ||||||
|  |     url: str = 'localhost' | ||||||
|  |     org: str = 'server' | ||||||
|  |     token: str = 'TOKEN' | ||||||
|  |     bucket: str = 'varken' | ||||||
|  |     timeout: int = 10000 | ||||||
|  |     ssl: bool = False | ||||||
|  |     verify_ssl: bool = False | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
| class SonarrServer(NamedTuple): | class SonarrServer(NamedTuple): | ||||||
|     api_key: str = None |     api_key: str = None | ||||||
|     future_days: int = 0 |     future_days: int = 0 | ||||||
|  | @ -57,6 +67,17 @@ class OmbiServer(NamedTuple): | ||||||
|     verify_ssl: bool = False |     verify_ssl: bool = False | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | class OverseerrServer(NamedTuple): | ||||||
|  |     api_key: str = None | ||||||
|  |     id: int = None | ||||||
|  |     url: str = None | ||||||
|  |     verify_ssl: bool = False | ||||||
|  |     get_request_total_counts: bool = False | ||||||
|  |     request_total_run_seconds: int = 30 | ||||||
|  |     num_latest_requests_to_fetch: int = 10 | ||||||
|  |     num_latest_requests_seconds: int = 30 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
| class TautulliServer(NamedTuple): | class TautulliServer(NamedTuple): | ||||||
|     api_key: str = None |     api_key: str = None | ||||||
|     fallback_ip: str = None |     fallback_ip: str = None | ||||||
|  | @ -91,22 +112,13 @@ class UniFiServer(NamedTuple): | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| # Shared | # Shared | ||||||
| class Queue(NamedTuple): | class QueuePages(NamedTuple): | ||||||
|     downloadId: str = None |     page: int = None | ||||||
|     episode: dict = None |     pageSize: int = None | ||||||
|     estimatedCompletionTime: str = None |     sortKey: str = None | ||||||
|     id: int = None |     sortDirection: str = None | ||||||
|     movie: dict = None |     totalRecords: str = None | ||||||
|     protocol: str = None |     records: list = None | ||||||
|     quality: dict = None |  | ||||||
|     series: dict = None |  | ||||||
|     size: float = None |  | ||||||
|     sizeleft: float = None |  | ||||||
|     status: str = None |  | ||||||
|     statusMessages: list = None |  | ||||||
|     timeleft: str = None |  | ||||||
|     title: str = None |  | ||||||
|     trackedDownloadStatus: str = None |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| # Ombi Structures | # Ombi Structures | ||||||
|  | @ -127,8 +139,10 @@ class OmbiTVRequest(NamedTuple): | ||||||
|     childRequests: list = None |     childRequests: list = None | ||||||
|     denied: bool = None |     denied: bool = None | ||||||
|     deniedReason: None = None |     deniedReason: None = None | ||||||
|  |     externalProviderId: str = None | ||||||
|     id: int = None |     id: int = None | ||||||
|     imdbId: str = None |     imdbId: str = None | ||||||
|  |     languageProfile: str = None | ||||||
|     markedAsDenied: str = None |     markedAsDenied: str = None | ||||||
|     overview: str = None |     overview: str = None | ||||||
|     posterPath: str = None |     posterPath: str = None | ||||||
|  | @ -145,72 +159,159 @@ class OmbiTVRequest(NamedTuple): | ||||||
| 
 | 
 | ||||||
| class OmbiMovieRequest(NamedTuple): | class OmbiMovieRequest(NamedTuple): | ||||||
|     approved: bool = None |     approved: bool = None | ||||||
|  |     approved4K: bool = None | ||||||
|     available: bool = None |     available: bool = None | ||||||
|  |     available4K: bool = None | ||||||
|     background: str = None |     background: str = None | ||||||
|     canApprove: bool = None |     canApprove: bool = None | ||||||
|     denied: bool = None |     denied: bool = None | ||||||
|  |     denied4K: None = None | ||||||
|     deniedReason: None = None |     deniedReason: None = None | ||||||
|  |     deniedReason4K: None = None | ||||||
|     digitalRelease: bool = None |     digitalRelease: bool = None | ||||||
|     digitalReleaseDate: None = None |     digitalReleaseDate: None = None | ||||||
|  |     has4KRequest: bool = None | ||||||
|     id: int = None |     id: int = None | ||||||
|     imdbId: str = None |     imdbId: str = None | ||||||
|  |     is4kRequest: bool = None | ||||||
|     issueId: None = None |     issueId: None = None | ||||||
|     issues: None = None |     issues: None = None | ||||||
|  |     langCode: str = None | ||||||
|  |     languageCode: str = None | ||||||
|     markedAsApproved: str = None |     markedAsApproved: str = None | ||||||
|  |     markedAsApproved4K: str = None | ||||||
|     markedAsAvailable: None = None |     markedAsAvailable: None = None | ||||||
|  |     markedAsAvailable4K: None = None | ||||||
|     markedAsDenied: str = None |     markedAsDenied: str = None | ||||||
|  |     markedAsDenied4K: str = None | ||||||
|     overview: str = None |     overview: str = None | ||||||
|     posterPath: str = None |     posterPath: str = None | ||||||
|     qualityOverride: int = None |     qualityOverride: int = None | ||||||
|     released: bool = None |     released: bool = None | ||||||
|     releaseDate: str = None |     releaseDate: str = None | ||||||
|  |     requestedByAlias: str = None | ||||||
|     requestedDate: str = None |     requestedDate: str = None | ||||||
|  |     requestedDate4k: str = None | ||||||
|     requestedUser: dict = None |     requestedUser: dict = None | ||||||
|     requestedUserId: str = None |     requestedUserId: str = None | ||||||
|  |     requestStatus: str = None | ||||||
|     requestType: int = None |     requestType: int = None | ||||||
|     rootPathOverride: int = None |     rootPathOverride: int = None | ||||||
|     showSubscribe: bool = None |     showSubscribe: bool = None | ||||||
|  |     source: int = None | ||||||
|     status: str = None |     status: str = None | ||||||
|     subscribed: bool = None |     subscribed: bool = None | ||||||
|     theMovieDbId: int = None |     theMovieDbId: int = None | ||||||
|     title: str = None |     title: str = None | ||||||
|     langCode: str = None | 
 | ||||||
|     languageCode: str = None | 
 | ||||||
|     requestedByAlias: str = None | # Overseerr | ||||||
|     requestStatus: str = None | class OverseerrRequestCounts(NamedTuple): | ||||||
|  |     pending: int = None | ||||||
|  |     approved: int = None | ||||||
|  |     processing: int = None | ||||||
|  |     available: int = None | ||||||
|  |     total: int = None | ||||||
|  |     movie: int = None | ||||||
|  |     tv: int = None | ||||||
|  |     declined: int = None | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| # Sonarr | # Sonarr | ||||||
| class SonarrTVShow(NamedTuple): | class SonarrTVShow(NamedTuple): | ||||||
|  |     added: str = None | ||||||
|  |     airTime: str = None | ||||||
|  |     alternateTitles: list = None | ||||||
|  |     certification: str = None | ||||||
|  |     cleanTitle: str = None | ||||||
|  |     ended: bool = None | ||||||
|  |     firstAired: str = None | ||||||
|  |     genres: list = None | ||||||
|  |     id: int = None | ||||||
|  |     images: list = None | ||||||
|  |     imdbId: str = None | ||||||
|  |     languageProfileId: int = None | ||||||
|  |     monitored: bool = None | ||||||
|  |     nextAiring: str = None | ||||||
|  |     network: str = None | ||||||
|  |     overview: str = None | ||||||
|  |     path: str = None | ||||||
|  |     previousAiring: str = None | ||||||
|  |     qualityProfileId: int = None | ||||||
|  |     ratings: dict = None | ||||||
|  |     rootFolderPath: str = None | ||||||
|  |     runtime: int = None | ||||||
|  |     seasonFolder: bool = None | ||||||
|  |     seasons: list = None | ||||||
|  |     seriesType: str = None | ||||||
|  |     sortTitle: str = None | ||||||
|  |     statistics: dict = None | ||||||
|  |     status: str = None | ||||||
|  |     tags: list = None | ||||||
|  |     title: str = None | ||||||
|  |     titleSlug: str = None | ||||||
|  |     tvdbId: int = None | ||||||
|  |     tvMazeId: int = None | ||||||
|  |     tvRageId: int = None | ||||||
|  |     useSceneNumbering: bool = None | ||||||
|  |     year: int = None | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class SonarrEpisode(NamedTuple): | ||||||
|     absoluteEpisodeNumber: int = None |     absoluteEpisodeNumber: int = None | ||||||
|     airDate: str = None |     airDate: str = None | ||||||
|     airDateUtc: str = None |     airDateUtc: str = None | ||||||
|     episodeFile: dict = None |  | ||||||
|     episodeFileId: int = None |     episodeFileId: int = None | ||||||
|     episodeNumber: int = None |     episodeNumber: int = None | ||||||
|  |     grabbed: bool = None | ||||||
|     hasFile: bool = None |     hasFile: bool = None | ||||||
|     id: int = None |     id: int = None | ||||||
|     lastSearchTime: str = None |  | ||||||
|     monitored: bool = None |     monitored: bool = None | ||||||
|     overview: str = None |     overview: str = None | ||||||
|     sceneAbsoluteEpisodeNumber: int = None |  | ||||||
|     sceneEpisodeNumber: int = None |  | ||||||
|     sceneSeasonNumber: int = None |  | ||||||
|     seasonNumber: int = None |     seasonNumber: int = None | ||||||
|     series: dict = None |  | ||||||
|     seriesId: int = None |     seriesId: int = None | ||||||
|     title: str = None |     title: str = None | ||||||
|     unverifiedSceneNumbering: bool = None |     unverifiedSceneNumbering: bool = None | ||||||
|  |     sceneAbsoluteEpisodeNumber: int = None | ||||||
|  |     sceneEpisodeNumber: int = None | ||||||
|  |     sceneSeasonNumber: int = None | ||||||
|  |     series: SonarrTVShow = None | ||||||
|  |     tvdbId: int = None | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class SonarrQueue(NamedTuple): | ||||||
|  |     downloadClient: str = None | ||||||
|  |     downloadId: str = None | ||||||
|  |     episodeId: int = None | ||||||
|  |     id: int = None | ||||||
|  |     indexer: str = None | ||||||
|  |     language: dict = None | ||||||
|  |     protocol: str = None | ||||||
|  |     quality: dict = None | ||||||
|  |     size: float = None | ||||||
|  |     sizeleft: float = None | ||||||
|  |     status: str = None | ||||||
|  |     statusMessages: list = None | ||||||
|  |     title: str = None | ||||||
|  |     trackedDownloadState: str = None | ||||||
|  |     trackedDownloadStatus: str = None | ||||||
|  |     seriesId: int = None | ||||||
|  |     errorMessage: str = None | ||||||
|  |     outputPath: str = None | ||||||
|  |     series: SonarrTVShow = None | ||||||
|  |     episode: SonarrEpisode = None | ||||||
|  |     timeleft: str = None | ||||||
|  |     estimatedCompletionTime: str = None | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| # Radarr | # Radarr | ||||||
| class RadarrMovie(NamedTuple): | class RadarrMovie(NamedTuple): | ||||||
|     added: str = None |     added: str = None | ||||||
|     addOptions: str = None |     alternateTitles: list = None | ||||||
|     alternativeTitles: list = None |  | ||||||
|     certification: str = None |     certification: str = None | ||||||
|     cleanTitle: str = None |     cleanTitle: str = None | ||||||
|     downloaded: bool = None |     collection: dict = None | ||||||
|  |     digitalRelease: str = None | ||||||
|     folderName: str = None |     folderName: str = None | ||||||
|     genres: list = None |     genres: list = None | ||||||
|     hasFile: bool = None |     hasFile: bool = None | ||||||
|  | @ -219,32 +320,58 @@ class RadarrMovie(NamedTuple): | ||||||
|     imdbId: str = None |     imdbId: str = None | ||||||
|     inCinemas: str = None |     inCinemas: str = None | ||||||
|     isAvailable: bool = None |     isAvailable: bool = None | ||||||
|     lastInfoSync: str = None |  | ||||||
|     minimumAvailability: str = None |     minimumAvailability: str = None | ||||||
|     monitored: bool = None |     monitored: bool = None | ||||||
|     movieFile: dict = None |     movieFile: dict = None | ||||||
|  |     originalTitle: str = None | ||||||
|     overview: str = None |     overview: str = None | ||||||
|     path: str = None |     path: str = None | ||||||
|     pathState: str = None |  | ||||||
|     physicalRelease: str = None |     physicalRelease: str = None | ||||||
|     physicalReleaseNote: str = None |  | ||||||
|     profileId: int = None |  | ||||||
|     qualityProfileId: int = None |     qualityProfileId: int = None | ||||||
|     ratings: dict = None |     ratings: dict = None | ||||||
|     runtime: int = None |     runtime: int = None | ||||||
|     secondaryYear: str = None |     secondaryYear: int = None | ||||||
|     secondaryYearSourceId: int = None |     secondaryYearSourceId: int = None | ||||||
|     sizeOnDisk: int = None |     sizeOnDisk: float = None | ||||||
|     sortTitle: str = None |     sortTitle: str = None | ||||||
|     status: str = None |     status: str = None | ||||||
|     studio: str = None |     studio: str = None | ||||||
|     tags: list = None |     tags: list = None | ||||||
|     title: str = None |  | ||||||
|     titleSlug: str = None |     titleSlug: str = None | ||||||
|     tmdbId: int = None |     tmdbId: int = None | ||||||
|     website: str = None |     website: str = None | ||||||
|     year: int = None |     year: int = None | ||||||
|     youTubeTrailerId: str = None |     youTubeTrailerId: str = None | ||||||
|  |     title: str = None | ||||||
|  |     originalLanguage: str = None | ||||||
|  |     addOptions: str = None | ||||||
|  |     popularity: str = None | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # Radarr Queue | ||||||
|  | class RadarrQueue(NamedTuple): | ||||||
|  |     customFormats: list = None | ||||||
|  |     downloadClient: str = None | ||||||
|  |     downloadId: str = None | ||||||
|  |     id: int = None | ||||||
|  |     indexer: str = None | ||||||
|  |     languages: list = None | ||||||
|  |     movieId: int = None | ||||||
|  |     protocol: str = None | ||||||
|  |     quality: dict = None | ||||||
|  |     size: float = None | ||||||
|  |     sizeleft: float = None | ||||||
|  |     status: str = None | ||||||
|  |     statusMessages: list = None | ||||||
|  |     title: str = None | ||||||
|  |     trackedDownloadState: str = None | ||||||
|  |     trackedDownloadStatus: str = None | ||||||
|  |     timeleft: str = None | ||||||
|  |     estimatedCompletionTime: str = None | ||||||
|  |     errorMessage: str = None | ||||||
|  |     outputPath: str = None | ||||||
|  |     movie: RadarrMovie = None | ||||||
|  |     timeleft: str = None | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| # Sickchill | # Sickchill | ||||||
|  | @ -364,6 +491,7 @@ class TautulliStream(NamedTuple): | ||||||
|     reference_id: int = None |     reference_id: int = None | ||||||
|     relay: int = None |     relay: int = None | ||||||
|     relayed: int = None |     relayed: int = None | ||||||
|  |     row_id: int = None | ||||||
|     section_id: str = None |     section_id: str = None | ||||||
|     secure: str = None |     secure: str = None | ||||||
|     selected: int = None |     selected: int = None | ||||||
|  | @ -402,6 +530,7 @@ class TautulliStream(NamedTuple): | ||||||
|     stream_video_codec: str = None |     stream_video_codec: str = None | ||||||
|     stream_video_codec_level: str = None |     stream_video_codec_level: str = None | ||||||
|     stream_video_decision: str = None |     stream_video_decision: str = None | ||||||
|  |     stream_video_dynamic_range: str = None | ||||||
|     stream_video_framerate: str = None |     stream_video_framerate: str = None | ||||||
|     stream_video_full_resolution: str = None |     stream_video_full_resolution: str = None | ||||||
|     stream_video_height: str = None |     stream_video_height: str = None | ||||||
|  | @ -461,6 +590,7 @@ class TautulliStream(NamedTuple): | ||||||
|     video_codec: str = None |     video_codec: str = None | ||||||
|     video_codec_level: str = None |     video_codec_level: str = None | ||||||
|     video_decision: str = None |     video_decision: str = None | ||||||
|  |     video_dynamic_range: str = None | ||||||
|     video_frame_rate: str = None |     video_frame_rate: str = None | ||||||
|     video_framerate: str = None |     video_framerate: str = None | ||||||
|     video_full_resolution: str = None |     video_full_resolution: str = None | ||||||
|  | @ -491,7 +621,9 @@ class LidarrQueue(NamedTuple): | ||||||
|     sizeleft: float = None |     sizeleft: float = None | ||||||
|     status: str = None |     status: str = None | ||||||
|     trackedDownloadStatus: str = None |     trackedDownloadStatus: str = None | ||||||
|  |     trackedDownloadState: str = None | ||||||
|     statusMessages: list = None |     statusMessages: list = None | ||||||
|  |     errorMessage: str = None | ||||||
|     downloadId: str = None |     downloadId: str = None | ||||||
|     protocol: str = None |     protocol: str = None | ||||||
|     downloadClient: str = None |     downloadClient: str = None | ||||||
|  | @ -499,6 +631,7 @@ class LidarrQueue(NamedTuple): | ||||||
|     outputPath: str = None |     outputPath: str = None | ||||||
|     downloadForced: bool = None |     downloadForced: bool = None | ||||||
|     id: int = None |     id: int = None | ||||||
|  |     estimatedCompletionTime: str = None | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| class LidarrAlbum(NamedTuple): | class LidarrAlbum(NamedTuple): | ||||||
|  |  | ||||||
|  | @ -129,6 +129,7 @@ class TautulliAPI(object): | ||||||
|                     "tags": { |                     "tags": { | ||||||
|                         "type": "Session", |                         "type": "Session", | ||||||
|                         "session_id": session.session_id, |                         "session_id": session.session_id, | ||||||
|  |                         "ip_address": session.ip_address, | ||||||
|                         "friendly_name": session.friendly_name, |                         "friendly_name": session.friendly_name, | ||||||
|                         "username": session.username, |                         "username": session.username, | ||||||
|                         "title": session.full_title, |                         "title": session.full_title, | ||||||
|  | @ -327,6 +328,7 @@ class TautulliAPI(object): | ||||||
|                     "tags": { |                     "tags": { | ||||||
|                         "type": "Session", |                         "type": "Session", | ||||||
|                         "session_id": session.session_id, |                         "session_id": session.session_id, | ||||||
|  |                         "ip_address": session.ip_address, | ||||||
|                         "friendly_name": session.friendly_name, |                         "friendly_name": session.friendly_name, | ||||||
|                         "username": session.user, |                         "username": session.user, | ||||||
|                         "title": session.full_title, |                         "title": session.full_title, | ||||||
|  |  | ||||||
		Loading…
	
	Add table
		Add a link
		
	
		Reference in a new issue