Merge pull request #191 from Boerderij/develop
This commit is contained in:
Nicholas St. Germain 2020-12-21 12:09:14 -06:00 committed by GitHub
commit 23bcbbf1e6
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
21 changed files with 268 additions and 204 deletions

View file

@ -2,7 +2,7 @@
name: Bug report
about: Create a report to help us improve
title: "[BUG]"
labels: awaiting-approval
labels: awaiting-triage
assignees: ''
---

View file

@ -2,7 +2,7 @@
name: Feature request
about: Suggest an idea for this project
title: "[Feature Request]"
labels: awaiting-approval
labels: awaiting-triage
assignees: ''
---

View file

@ -0,0 +1,23 @@
name: 'Docker Multi Login Action'
description: 'Log in to dockerhub, quay, and github container registry'
runs:
using: "composite"
steps:
- shell: bash
run: |
echo "🔑 Logging into dockerhub..."
if docker login --username ${{ fromJSON(env.secrets).DOCKERHUB_USERNAME }} --password ${{ fromJSON(env.secrets).DOCKERHUB_PASSWORD }} > /dev/null 2>&1; then
echo "🎉 Login Succeeded!"
fi
- shell: bash
run: |
echo "🔑 Logging into quay.io..."
if docker login quay.io --username ${{ fromJSON(env.secrets).QUAY_USERNAME }} --password ${{ fromJSON(env.secrets).QUAY_PASSWORD }} > /dev/null 2>&1; then
echo "🎉 Login Succeeded!"
fi
- shell: bash
run: |
echo "🔑 Logging into ghcr.io..."
if docker login ghcr.io --username ${{ fromJSON(env.secrets).GHCR_USERNAME }} --password ${{ fromJSON(env.secrets).GHCR_PASSWORD }} > /dev/null 2>&1; then
echo "🎉 Login Succeeded!"
fi

View file

@ -0,0 +1,46 @@
name: 'Docker Target Image List Generator'
description: 'A Github Action to generate a list of fully qualified target images for docker related steps'
inputs:
registries:
description: "Comma separated list of docker registries"
required: false
default: "docker.io,quay.io,ghcr.io"
images:
description: "Comma separated list of images"
required: true
tags:
description: "Comma separated list of image tags"
required: false
default: "edge"
outputs:
fully-qualified-target-images:
description: "List of fully qualified docker target images"
value: ${{ steps.gen-fqti.outputs.fully-qualified-target-images }}
runs:
using: "composite"
steps:
- name: Generate fully qualified docker target images
id: gen-fqti
shell: bash
run: |
IFS=',' read -r -a registries <<< "${{ inputs.registries }}"
IFS=',' read -r -a images <<< "${{ inputs.images }}"
IFS=',' read -r -a tags <<< "${{ inputs.tags }}"
FQTI=""
echo "Generating fully qualified docker target images for:"
echo "🐋 Registries: ${#registries[@]}"
echo "📷 Images: ${#images[@]}"
echo "🏷️ Tags: ${#tags[@]}"
echo "🧮 Total: $((${#registries[@]}*${#images[@]}*${#tags[@]}))"
for registry in "${registries[@]}"; do
for image in "${images[@]}"; do
for tag in "${tags[@]}"; do
if [ -z "$FQTI" ]; then
FQTI="${registry}/${image}:${tag}"
else
FQTI="$FQTI,${registry}/${image}:${tag}"
fi
done
done
done
echo ::set-output name=fully-qualified-target-images::${FQTI}

116
.github/workflows/docker.yaml vendored Normal file
View file

@ -0,0 +1,116 @@
name: varken
on:
schedule:
- cron: '0 10 * * *'
push:
branches:
- master
- develop
tags:
- 'v*.*.*'
paths:
- '.github/workflows/docker.yaml'
- 'varken/**'
- 'Varken.py'
- 'Dockerfile'
pull_request:
branches:
- master
- develop
paths:
- '.github/workflows/docker.yaml'
- 'varken/**'
- 'Varken.py'
- 'Dockerfile'
workflow_dispatch:
inputs:
tag:
description: 'Use this tag instead of most recent'
required: false
ignore-existing-tag:
description: 'Ignore existing tag if "true"'
required: false
env:
IMAGES: boerderij/varken
PLATFORMS: "linux/amd64,linux/arm64,linux/arm/v7"
jobs:
lint-and-test:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Setup Python
uses: actions/setup-python@v2
with:
python-version: '3.x'
- name: Lint
run: pip install flake8 && flake8 --max-line-length 120 Varken.py varken/*.py
build:
runs-on: ubuntu-latest
needs: lint-and-test
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Prepare
id: prep
run: |
VERSION=edge
if [[ $GITHUB_REF == refs/tags/* ]]; then
VERSION=${GITHUB_REF#refs/tags/v}
fi
if [ "${{ github.event_name }}" = "schedule" ]; then
VERSION=nightly
fi
if [[ ${GITHUB_REF##*/} == "develop" ]]; then
VERSION=develop
fi
TAGS="${VERSION}"
if [[ $VERSION =~ ^[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}$ ]]; then
TAGS="$TAGS,latest"
fi
echo ::set-output name=version::${VERSION}
echo ::set-output name=tags::${TAGS}
echo ::set-output name=branch::${GITHUB_REF##*/}
echo ::set-output name=build_date::$(date -u +'%Y-%m-%dT%H:%M:%SZ')
echo ::set-output name=vcs_ref::${GITHUB_SHA::8}
- uses: ./.github/actions/docker-target-image-list-action
name: Generate Target Images
id: gen-tags
with:
images: ${{ env.IMAGES }}
tags: ${{ steps.prep.outputs.tags }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
with:
platforms: ${{ env.PLATFORMS }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
with:
install: true
version: latest
driver-opts: image=moby/buildkit:master
- name: Docker Multi Login
uses: ./.github/actions/docker-multi-login-action
env:
secrets: ${{ toJSON(secrets) }}
- name: Build and Push
uses: docker/build-push-action@v2
with:
context: .
file: ./Dockerfile
platforms: ${{ env.PLATFORMS }}
pull: true
push: ${{ github.event_name != 'pull_request' }}
tags: ${{ steps.gen-tags.outputs.fully-qualified-target-images }}
build-args: |
VERSION=${{ steps.prep.outputs.version }}
BRANCH=${{ steps.prep.outputs.branch }}
BUILD_DATE=${{ steps.prep.outputs.build_date }}
VCS_REF=${{ steps.prep.outputs.vcs_ref }}
- name: Inspect
if: ${{ github.event_name != 'pull_request' }}
run: |
IFS=',' read -r -a images <<< "${{ steps.gen-tags.outputs.fully-qualified-target-images }}"
for image in "${images[@]}"; do
docker buildx imagetools inspect ${image}
done

19
.github/workflows/invalid_template.yml vendored Normal file
View file

@ -0,0 +1,19 @@
name: 'Invalid Template'
on:
issues:
types: [labeled, unlabeled, reopened]
jobs:
support:
runs-on: ubuntu-latest
steps:
- uses: dessant/support-requests@v2
with:
github-token: ${{ github.token }}
support-label: 'invalid:template-incomplete'
issue-comment: >
:wave: @{issue-author}, please edit your issue and follow the template provided.
close-issue: false
lock-issue: false
issue-lock-reason: 'resolved'

25
.github/workflows/support.yml vendored Normal file
View file

@ -0,0 +1,25 @@
name: 'Support Request'
on:
issues:
types: [labeled, unlabeled, reopened]
jobs:
support:
runs-on: ubuntu-latest
steps:
- uses: dessant/support-requests@v2
with:
github-token: ${{ github.token }}
support-label: 'support'
issue-comment: >
:wave: @{issue-author}, we use the issue tracker exclusively
for bug reports and feature requests. However, this issue appears
to be a support request. Please use our support channels
to get help with Varken!
- [Discord](https://discord.gg/VjZ6qSM)
- [Discord Quick Access](http://cyborg.decreator.dev/channels/518970285773422592/530424560504537105/)
close-issue: true
lock-issue: false
issue-lock-reason: 'off-topic'

View file

@ -1,129 +0,0 @@
stages:
- test
- build and push
- manifests
- release
.common_build: &common_build
image: docker:stable
stage: build and push
variables:
DOCKER_HOST: tcp://docker:2375/
DOCKER_DRIVER: overlay2
services:
- docker:dind
except:
- tags
only:
changes:
- "Dockerfile*"
- Varken.py
- varken/*
- .gitlab-ci.yml
before_script:
- mkdir $HOME/.docker
- echo '{"experimental":"enabled"}' > $HOME/.docker/config.json
- if [[ $CI_COMMIT_REF_NAME == "master" ]]; then
export TAG=$(grep -i version varken/__init__.py | cut -d \ -f3 | tr -d \");
else
export TAG="develop";
fi
- echo "$DOCKER_PASSWORD" | docker login -u "$DOCKER_USER" --password-stdin
- export CI_PROJECT_PATH=$(echo $CI_PROJECT_PATH | tr "[:upper:]" "[:lower:]")
.common_release: &common_release
<<: *common_build
tags:
- docker
- shared
only:
refs:
- master
flake8:
image: python:3.7.3-alpine
stage: test
cache:
key: "varken-cache"
paths:
- .cache/pip
- venv/
variables:
PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
tags:
- shared
before_script:
- python -m venv venv
- source venv/bin/activate
- pip install -U flake8
script:
- flake8 --max-line-length 120 Varken.py varken/*.py
amd64:
<<: *common_build
tags:
- docker
- shared
script:
- docker build --pull
--cache-from "$CI_PROJECT_PATH":"$TAG-amd64"
-t "$CI_PROJECT_PATH":"$TAG-amd64" .
- docker push "$CI_PROJECT_PATH":"$TAG-amd64"
armv6:
<<: *common_build
tags:
- docker
- arm
script:
- docker build --pull
--cache-from "$CI_PROJECT_PATH":"$TAG-arm"
-t "$CI_PROJECT_PATH":"$TAG-arm"
-f Dockerfile.arm .
- docker push "$CI_PROJECT_PATH":"$TAG-arm"
arm64v8:
<<: *common_build
tags:
- docker
- arm64
script:
- docker build --pull
--cache-from "$CI_PROJECT_PATH":"$TAG-arm64"
-t "$CI_PROJECT_PATH":"$TAG-arm64"
-f Dockerfile.arm64 .
- docker push "$CI_PROJECT_PATH":"$TAG-arm64"
versioned:
<<: *common_build
stage: manifests
tags:
- docker
- shared
script:
- docker manifest create "$CI_PROJECT_PATH":"$TAG"
"$CI_PROJECT_PATH":"$TAG-amd64"
"$CI_PROJECT_PATH":"$TAG-arm64"
"$CI_PROJECT_PATH":"$TAG-arm"
- docker manifest push -p "$CI_PROJECT_PATH":"$TAG"
latest:
<<: *common_release
stage: manifests
tags:
- docker
script:
- docker manifest create "$CI_PROJECT_PATH":latest
"$CI_PROJECT_PATH":"$TAG-amd64"
"$CI_PROJECT_PATH":"$TAG-arm64"
"$CI_PROJECT_PATH":"$TAG-arm"
- docker manifest push -p "$CI_PROJECT_PATH":latest
github:
<<: *common_release
stage: release
script:
- apk add git
- git remote set-url origin "https://$GITHUB_USER:$GITHUB_TOKEN@github.com/$CI_PROJECT_PATH.git"
- git tag $TAG
- git push --tags

View file

@ -5,7 +5,7 @@
**Merged pull requests:**
- v1.7.6 Merge [\#163](https://github.com/Boerderij/Varken/pull/163) ([samwiseg0](https://github.com/samwiseg0))
- v1.7.6 Merge [\#165](https://github.com/Boerderij/Varken/pull/165) ([samwiseg0](https://github.com/samwiseg0))
**Fixed bugs:**

View file

@ -1,10 +1,21 @@
FROM amd64/python:3.7.3-alpine
FROM python:3.9.1-alpine
LABEL maintainers="dirtycajunrice,samwiseg0"
ENV DEBUG="True" \
DATA_FOLDER="/config" \
VERSION="0.0.0" \
BRANCH="edge" \
BUILD_DATE="1/1/1970"
ENV DEBUG="True"
ENV DATA_FOLDER="/config"
LABEL maintainer="dirtycajunrice,samwiseg0" \
org.opencontainers.image.created=$BUILD_DATE \
org.opencontainers.image.url="https://github.com/Boerderij/Varken" \
org.opencontainers.image.source="https://github.com/Boerderij/Varken" \
org.opencontainers.image.version=$VERSION \
org.opencontainers.image.revision=$VCS_REF \
org.opencontainers.image.vendor="boerderij" \
org.opencontainers.image.title="varken" \
org.opencontainers.image.description="Varken is a standalone application to aggregate data from the Plex ecosystem into InfluxDB using Grafana for a frontend" \
org.opencontainers.image.licenses="MIT"
WORKDIR /app
@ -16,9 +27,9 @@ COPY /data /app/data
COPY /utilities /app/data/utilities
RUN apk add --no-cache tzdata && \
pip install --no-cache-dir -r /app/requirements.txt
RUN \
apk add --no-cache tzdata \
&& pip install --no-cache-dir -r /app/requirements.txt \
&& sed -i "s/0.0.0/${VERSION}/;s/develop/${BRANCH}/;s/1\/1\/1970/${BUILD_DATE//\//\\/}/" varken/__init__.py
CMD cp /app/data/varken.example.ini /config/varken.example.ini && python3 /app/Varken.py
VOLUME /config

View file

@ -1,22 +0,0 @@
FROM arm32v6/python:3.7.3-alpine
LABEL maintainers="dirtycajunrice,samwiseg0"
ENV DEBUG="True"
ENV DATA_FOLDER="/config"
WORKDIR /app
COPY /requirements.txt /Varken.py /app/
COPY /varken /app/varken
COPY /data /app/data
RUN apk add --no-cache tzdata && \
pip install --no-cache-dir -r /app/requirements.txt
CMD cp /app/data/varken.example.ini /config/varken.example.ini && python3 /app/Varken.py
VOLUME /config

View file

@ -1,22 +0,0 @@
FROM arm64v8/python:3.7.3-alpine
LABEL maintainers="dirtycajunrice,samwiseg0"
ENV DEBUG="True"
ENV DATA_FOLDER="/config"
WORKDIR /app
COPY /requirements.txt /Varken.py /app/
COPY /varken /app/varken
COPY /data /app/data
RUN apk add --no-cache tzdata && \
pip install --no-cache-dir -r /app/requirements.txt
CMD cp /app/data/varken.example.ini /config/varken.example.ini && python3 /app/Varken.py
VOLUME /config

View file

@ -17,7 +17,7 @@ ecosystem into InfluxDB using Grafana for a frontend
Requirements:
* [Python 3.6.7+](https://www.python.org/downloads/release/python-367/)
* [Python3-pip](https://pip.pypa.io/en/stable/installing/)
* [InfluxDB](https://www.influxdata.com/)
* [InfluxDB 1.8.x](https://www.influxdata.com/)
* [Grafana](https://grafana.com/)
<p align="center">
@ -32,7 +32,6 @@ Supported Modules:
* [Radarr](https://radarr.video/) - A fork of Sonarr to work with movies à la Couchpotato.
* [Tautulli](https://tautulli.com/) - A Python based monitoring and tracking tool for Plex Media Server.
* [Ombi](https://ombi.io/) - Want a Movie or TV Show on Plex or Emby? Use Ombi!
* [Unifi](https://unifi-sdn.ubnt.com/) - The Global Leader in Managed Wi-Fi Systems
* [Lidarr](https://lidarr.audio/) - Looks and smells like Sonarr but made for music.
Key features:
@ -51,7 +50,8 @@ Please read [Asking for Support](https://wiki.cajun.pro/books/varken/chapter/ask
### InfluxDB
[InfluxDB Installation Documentation](https://wiki.cajun.pro/books/varken/page/influxdb-d1f)
Note: Only v1.8.x is currently supported.
Influxdb is required but not packaged as part of Varken. Varken will create
its database on its own. If you choose to give varken user permissions that
do not include database creation, please ensure you create an influx database

View file

@ -15,7 +15,7 @@ from logging import getLogger, StreamHandler, Formatter, DEBUG
from varken import structures # noqa
from varken.ombi import OmbiAPI
from varken.unifi import UniFiAPI
from varken import VERSION, BRANCH
from varken import VERSION, BRANCH, BUILD_DATE
from varken.sonarr import SonarrAPI
from varken.radarr import RadarrAPI
from varken.lidarr import LidarrAPI
@ -87,7 +87,7 @@ if __name__ == "__main__":
vl.logger.info(u"Python %s", version)
vl.logger.info("Varken v%s-%s", VERSION, BRANCH)
vl.logger.info("Varken v%s-%s %s", VERSION, BRANCH, BUILD_DATE)
CONFIG = INIParser(DATA_FOLDER)
DBMANAGER = DBManager(CONFIG.influx_server)

View file

@ -107,14 +107,6 @@ services:
- VRKN_SICKCHILL_1_VERIFY_SSL=false
- VRKN_SICKCHILL_1_GET_MISSING=true
- VRKN_SICKCHILL_1_GET_MISSING_RUN_SECONDS=300
- VRKN_UNIFI_1_URL=unifi.domain.tld:8443
- VRKN_UNIFI_1_USERNAME=ubnt
- VRKN_UNIFI_1_PASSWORD=ubnt
- VRKN_UNIFI_1_SITE=default
- VRKN_UNIFI_1_USG_NAME=MyRouter
- VRKN_UNIFI_1_SSL=false
- VRKN_UNIFI_1_VERIFY_SSL=false
- VRKN_UNIFI_1_GET_USG_STATS_RUN_SECONDS=300
depends_on:
- influxdb
restart: unless-stopped

View file

@ -1,2 +1,3 @@
VERSION = "1.7.6"
BRANCH = 'master'
VERSION = "0.0.0"
BRANCH = 'develop'
BUILD_DATE = '1/1/1970'

View file

@ -20,7 +20,7 @@ class DBManager(object):
self.logger.info('Influxdb version: %s', version)
except ConnectionError:
self.logger.critical("Error testing connection to InfluxDB. Please check your url/hostname")
exit()
exit(1)
databases = [db['name'] for db in self.influx.get_list_database()]

View file

@ -254,7 +254,7 @@ class INIParser(object):
server_id)
exit(1)
maxmind_license_key = env.get(f'VRKN_GLOBAL_MAXMIND_LICENSE_KEY',
maxmind_license_key = env.get('VRKN_GLOBAL_MAXMIND_LICENSE_KEY',
self.config.get('global', 'maxmind_license_key'))
server = TautulliServer(id=server_id, url=scheme + url, api_key=apikey,

View file

@ -140,6 +140,7 @@ class OmbiTVRequest(NamedTuple):
totalSeasons: int = None
tvDbId: int = None
requestedByAlias: str = None
requestStatus: str = None
class OmbiMovieRequest(NamedTuple):
@ -176,6 +177,7 @@ class OmbiMovieRequest(NamedTuple):
langCode: str = None
languageCode: str = None
requestedByAlias: str = None
requestStatus: str = None
# Sonarr
@ -494,6 +496,7 @@ class LidarrQueue(NamedTuple):
protocol: str = None
downloadClient: str = None
indexer: str = None
outputPath: str = None
downloadForced: bool = None
id: int = None

View file

@ -218,6 +218,7 @@ class TautulliAPI(object):
data['fields']['episodes'] = int(library['child_count'])
elif library['section_type'] == 'artist':
data['fields']['artists'] = int(library['count'])
data['fields']['albums'] = int(library['parent_count'])
data['fields']['tracks'] = int(library['child_count'])
influx_payload.append(data)

View file

@ -27,7 +27,7 @@ class UniFiAPI(object):
post = connection_handler(self.session, req, self.server.verify_ssl, as_is_reply=True)
if not post or not post.cookies.get('unifises'):
self.logger.error(f"Could not retrieve session cookie from UniFi Controller")
self.logger.error("Could not retrieve session cookie from UniFi Controller")
return
cookies = {'unifises': post.cookies.get('unifises')}
@ -39,7 +39,7 @@ class UniFiAPI(object):
get = connection_handler(self.session, req, self.server.verify_ssl)
if not get:
self.logger.error(f"Could not get list of sites from UniFi Controller")
self.logger.error("Could not get list of sites from UniFi Controller")
return
site = [site['name'] for site in get['data'] if site['name'].lower() == self.server.site.lower()
or site['desc'].lower() == self.server.site.lower()]