Merge pull request #115 from Boerderij/develop

v1.6.5 Merge
This commit is contained in:
samwiseg0 2019-03-11 19:42:01 -04:00 committed by GitHub
commit d274df35d8
13 changed files with 215 additions and 129 deletions

View file

@ -1,18 +0,0 @@
sudo: true
dist: xenial
language: minimal
services:
- docker
addons:
apt:
packages:
- docker-ce
deploy:
- provider: script
script: bash deploy.sh
on:
branch: master
- provider: script
script: bash deploy.sh
on:
branch: develop

View file

@ -1,7 +1,26 @@
# Change Log
## [v1.6.4](https://github.com/Boerderij/Varken/tree/v1.6.4) (2019-02-03)
[Full Changelog](https://github.com/Boerderij/Varken/compare/1.6.3...v1.6.4)
## [v1.6.5](https://github.com/Boerderij/Varken/tree/v1.6.5) (2019-03-11)
[Full Changelog](https://github.com/Boerderij/Varken/compare/v1.6.4...v1.6.5)
**Implemented enhancements:**
- \[Feature Request\] Add new "relayed" and "secure" to Tautulli data pushed to influx [\#114](https://github.com/Boerderij/Varken/issues/114)
- \[BUG\] Changes to Tautulli breaks Varken `TypeError` `Secure` `relayed` [\#111](https://github.com/Boerderij/Varken/issues/111)
**Fixed bugs:**
- \[BUG\] Handle GeoIP Downloads better [\#113](https://github.com/Boerderij/Varken/issues/113)
- \[BUG\] - "None" outputted to stdout many times with no benefit? [\#105](https://github.com/Boerderij/Varken/issues/105)
- \[BUG\] windows file open error [\#104](https://github.com/Boerderij/Varken/issues/104)
- \[BUG\] Not catching DB url resolve [\#103](https://github.com/Boerderij/Varken/issues/103)
**Merged pull requests:**
- v1.6.5 Merge [\#115](https://github.com/Boerderij/Varken/pull/115) ([samwiseg0](https://github.com/samwiseg0))
## [1.6.4](https://github.com/Boerderij/Varken/tree/1.6.4) (2019-02-04)
[Full Changelog](https://github.com/Boerderij/Varken/compare/1.6.3...1.6.4)
**Fixed bugs:**

View file

@ -6,8 +6,6 @@ ENV DEBUG="False"
WORKDIR /app
COPY /qemu-arm-static /usr/bin/qemu-arm-static
COPY /requirements.txt /Varken.py /app/
COPY /varken /app/varken

View file

@ -6,8 +6,6 @@ ENV DEBUG="False"
WORKDIR /app
COPY /qemu-aarch64-static /usr/bin/qemu-aarch64-static
COPY /requirements.txt /Varken.py /app/
COPY /varken /app/varken

126
Jenkinsfile vendored Normal file
View file

@ -0,0 +1,126 @@
pipeline {
agent none
environment {
DOCKER_REPO = "boerderij/varken"
GIT_REPO = 'Boerderij/Varken'
VERSION_FILE = "varken/__init__.py"
FLAKE_FILES = "Varken.py varken/*.py"
TAG = ""
GIT_TOKEN = credentials('github-jenkins-token')
}
stages {
stage('Flake8') {
agent { label 'amd64'}
steps {
sh """
python3 -m venv venv && venv/bin/pip install flake8 && venv/bin/python -m flake8 --max-line-length 120 ${FLAKE_FILES}
rm -rf venv/
"""
script {
TAG = sh(returnStdout: true, script: 'grep -i version ${VERSION_FILE} | cut -d" " -f3 | tr -d \\"').trim()
}
}
}
stage('Docker Builds') {
parallel {
stage('amd64') {
when {
anyOf {
branch 'master'
branch 'develop'
}
}
agent { label 'amd64'}
steps {
script {
if (BRANCH_NAME == 'master') {
def image = docker.build("${DOCKER_REPO}:${TAG}-amd64")
image.push()
} else if (BRANCH_NAME == 'develop') {
def image = docker.build("${DOCKER_REPO}:develop-amd64")
image.push()
}
}
}
}
stage('ARMv6') {
when {
anyOf {
branch 'master'
branch 'develop'
}
}
agent { label 'arm64'}
steps {
script {
if (BRANCH_NAME == 'master') {
def image = docker.build("${DOCKER_REPO}:${TAG}-arm", "-f Dockerfile.arm .")
image.push()
} else if (BRANCH_NAME == 'develop') {
def image = docker.build("${DOCKER_REPO}:develop-arm", "-f Dockerfile.arm .")
image.push()
}
}
}
}
stage('ARM64v8') {
when {
anyOf {
branch 'master'
branch 'develop'
}
}
agent { label 'arm64'}
steps {
script {
if (BRANCH_NAME == 'master') {
def image = docker.build("${DOCKER_REPO}:${TAG}-arm64", "-f Dockerfile.arm64 .")
image.push()
} else if (BRANCH_NAME == 'develop') {
def image = docker.build("${DOCKER_REPO}:develop-arm64", "-f Dockerfile.arm64 .")
image.push()
}
}
}
}
}
}
stage('Docker Manifest Build') {
when {
anyOf {
branch 'master'
branch 'develop'
}
}
agent { label 'amd64'}
steps {
script {
if (BRANCH_NAME == 'master') {
sh(script: "docker manifest create ${DOCKER_REPO}:${TAG} ${DOCKER_REPO}:${TAG}-amd64 ${DOCKER_REPO}:${TAG}-arm64 ${DOCKER_REPO}:${TAG}-arm")
sh(script: "docker manifest inspect ${DOCKER_REPO}:${TAG}")
sh(script: "docker manifest push -p ${DOCKER_REPO}:${TAG}")
sh(script: "docker manifest create ${DOCKER_REPO}:latest ${DOCKER_REPO}:${TAG}-amd64 ${DOCKER_REPO}:${TAG}-arm64 ${DOCKER_REPO}:${TAG}-arm")
sh(script: "docker manifest inspect ${DOCKER_REPO}:latest")
sh(script: "docker manifest push -p ${DOCKER_REPO}:latest")
} else if (BRANCH_NAME == 'develop') {
sh(script: "docker manifest create ${DOCKER_REPO}:develop ${DOCKER_REPO}:develop-amd64 ${DOCKER_REPO}:develop-arm64 ${DOCKER_REPO}:develop-arm")
sh(script: "docker manifest inspect ${DOCKER_REPO}:develop")
sh(script: "docker manifest push -p ${DOCKER_REPO}:develop")
}
}
}
}
stage('GitHub Release') {
when { branch 'master' }
agent { label 'amd64'}
steps {
sh """
git remote set-url origin "https://${GIT_TOKEN_USR}:${GIT_TOKEN_PSW}@github.com/${GIT_REPO}.git"
git tag ${TAG}
git push --tags
"""
}
}
}
}

View file

@ -2,7 +2,7 @@
<img width="800" src="https://bin.cajun.pro/images/varken_full_banner.png">
</p>
[![Build Status](https://travis-ci.org/Boerderij/Varken.svg?branch=master)](https://travis-ci.org/Boerderij/Varken)
[![Build Status](https://jenkins.cajun.pro/buildStatus/icon?job=Varken/master)](https://jenkins.cajun.pro/job/Varken/job/master/)
[![Discord](https://img.shields.io/discord/518970285773422592.svg?colorB=7289DA&label=Discord&logo=Discord&logoColor=7289DA&style=flat-square)](https://discord.gg/VjZ6qSM)
[![BuyMeACoffee](https://img.shields.io/badge/BuyMeACoffee-Donate-ff813f.svg?logo=CoffeeScript&style=flat-square)](https://www.buymeacoffee.com/varken)
[![Docker-Layers](https://images.microbadger.com/badges/image/boerderij/varken.svg)](https://microbadger.com/images/boerderij/varken)

View file

@ -10,7 +10,8 @@ from os.path import isdir, abspath, dirname, join
from argparse import ArgumentParser, RawTextHelpFormatter
from logging import getLogger, StreamHandler, Formatter, DEBUG
from varken import structures # Needed to check version of python
# Needed to check version of python
from varken import structures # noqa
from varken.ombi import OmbiAPI
from varken.unifi import UniFiAPI
from varken import VERSION, BRANCH
@ -31,7 +32,6 @@ def thread():
while schedule.jobs:
job = QUEUE.get()
a = job()
print(a)
if a is not None:
schedule.clear(a)
QUEUE.task_done()

View file

@ -1,79 +0,0 @@
#!/usr/bin/env bash
# Travis-ci convenience environment vars used:
# TRAVIS_BRANCH | branch name
# $TRAVIS_REPO_SLUG | organization/project (GitHub Capitalization)
# Travis-ci manual environment vars used:
# GITHUB_USER | github username
# GITHUB_TOKEN | $GITHUB_USER's token
# DOCKER_USER | docker username
# DOCKER_PASSWORD | $DOCKER_USER's password
VERSION="$(grep -i version varken/__init__.py | cut -d' ' -f3 | tr -d \")"
# Set branch to latest if master, else keep the same
if [[ "$TRAVIS_BRANCH" == "master" ]]; then
BRANCH="latest"
else
BRANCH="$TRAVIS_BRANCH"
fi
# get the docker lowercase variant of the repo_name
REPOSITORY="$(echo $TRAVIS_REPO_SLUG | tr '[:upper:]' '[:lower:]')"
# Docker experimental config
echo '{"experimental":true}' | sudo tee /etc/docker/daemon.json
[[ -d ~/.docker ]] || mkdir ~/.docker
[[ -f ~/.docker/config.json ]] || touch ~/.docker/config.json
echo '{"experimental":"enabled"}' | sudo tee ~/.docker/config.json
sudo service docker restart
# Auth
echo "$DOCKER_PASSWORD" | docker login -u "$DOCKER_USER" --password-stdin
# Prepare QEMU for ARM builds
docker run --rm --privileged multiarch/qemu-user-static:register --reset
bash prebuild.sh
chmod +x qemu-aarch64-static qemu-arm-static
# Set tag based off of branch
if [[ "$BRANCH" == "latest" ]]; then
TAG="$VERSION"
else
TAG="$BRANCH"
fi
# AMDx64
docker build -t "${REPOSITORY}:${TAG}-amd64" . && \
docker push "${REPOSITORY}:${TAG}-amd64"
# Create Initial Manifests
docker manifest create "${REPOSITORY}:${TAG}" "${REPOSITORY}:${TAG}-amd64"
if [[ "$BRANCH" == "latest" ]]; then
docker manifest create "${REPOSITORY}:${BRANCH}" "${REPOSITORY}:${TAG}-amd64"
fi
# ARM variants
for i in $(ls *arm*); do
ARCH="$(echo ${i} | cut -d. -f2)"
docker build -f "Dockerfile.${ARCH}" -t "${REPOSITORY}:${TAG}-${ARCH}" . && \
docker push "${REPOSITORY}:${TAG}-${ARCH}"
# Add variant to manifest
docker manifest create -a "${REPOSITORY}:${TAG}" "${REPOSITORY}:${TAG}-${ARCH}"
if [[ "$BRANCH" == "latest" ]]; then
docker manifest create -a "${REPOSITORY}:${BRANCH}" "${REPOSITORY}:${TAG}-${ARCH}"
fi
done
docker manifest inspect "${REPOSITORY}:${TAG}" && \
docker manifest push "${REPOSITORY}:${TAG}"
if [[ "$BRANCH" == "latest" ]]; then
docker manifest inspect "${REPOSITORY}:${BRANCH}" && \
docker manifest push "${REPOSITORY}:${BRANCH}"
fi
# Git tags
if [[ "$BRANCH" == "latest" ]]; then
git remote set-url origin "https://${GITHUB_USER}:${GITHUB_TOKEN}@github.com/${REPOSITORY}.git" && \
git tag "${VERSION}" && \
git push --tags
fi

View file

@ -1,3 +0,0 @@
#!/usr/bin/env bash
wget -q "https://github.com/multiarch/qemu-user-static/releases/download/v3.1.0-2/qemu-aarch64-static"
wget -q "https://github.com/multiarch/qemu-user-static/releases/download/v3.1.0-2/qemu-arm-static"

View file

@ -1,2 +1,2 @@
VERSION = "1.6.4"
VERSION = "1.6.5"
BRANCH = 'master'

View file

@ -1,8 +1,9 @@
from hashlib import md5
from datetime import date
from datetime import date, timedelta
from time import sleep
from logging import getLogger
from ipaddress import IPv4Address
from calendar import monthcalendar
from urllib.error import HTTPError
from geoip2.database import Reader
from tarfile import open as taropen
from urllib3 import disable_warnings
@ -21,10 +22,25 @@ class GeoIPHandler(object):
self.data_folder = data_folder
self.dbfile = abspath(join(self.data_folder, 'GeoLite2-City.mmdb'))
self.logger = getLogger()
self.update()
self.reader = None
self.reader_manager(action='open')
self.logger.info('Opening persistent connection to GeoLite2 DB...')
self.reader = Reader(self.dbfile)
def reader_manager(self, action=None):
if action == 'open':
try:
self.reader = Reader(self.dbfile)
except FileNotFoundError:
self.logger.error("Could not find GeoLite2 DB! Downloading!")
result_status = self.download()
if result_status:
self.logger.error("Could not download GeoLite2 DB!!!, You may need to manually install it.")
exit(1)
else:
self.reader = Reader(self.dbfile)
else:
self.reader.close()
def lookup(self, ipaddress):
ip = ipaddress
@ -37,33 +53,54 @@ class GeoIPHandler(object):
try:
dbdate = date.fromtimestamp(stat(self.dbfile).st_mtime)
db_next_update = date.fromtimestamp(stat(self.dbfile).st_mtime) + timedelta(days=60)
except FileNotFoundError:
self.logger.error("Could not find GeoLite2 DB as: %s", self.dbfile)
self.download()
dbdate = date.fromtimestamp(stat(self.dbfile).st_mtime)
db_next_update = date.fromtimestamp(stat(self.dbfile).st_mtime) + timedelta(days=60)
first_wednesday_day = [week[2:3][0] for week in monthcalendar(today.year, today.month) if week[2:3][0] != 0][0]
first_wednesday_date = date(today.year, today.month, first_wednesday_day)
if dbdate < first_wednesday_date < today:
if db_next_update < today:
self.logger.info("Newer GeoLite2 DB available, Updating...")
remove(self.dbfile)
self.logger.debug("GeoLite2 DB date %s, DB updates after: %s, Today: %s",
dbdate, db_next_update, today)
self.reader_manager(action='close')
self.download()
self.reader_manager(action='open')
else:
td = first_wednesday_date - today
if td.days < 0:
self.logger.debug('Geolite2 DB is only %s days old. Keeping current copy', abs(td.days))
else:
self.logger.debug('Geolite2 DB will update in %s days', abs(td.days))
db_days_update = db_next_update - today
self.logger.debug("Geolite2 DB will update in %s days", abs(db_days_update.days))
self.logger.debug("GeoLite2 DB date %s, DB updates after: %s, Today: %s",
dbdate, db_next_update, today)
def download(self):
tar_dbfile = abspath(join(self.data_folder, 'GeoLite2-City.tar.gz'))
url = 'http://geolite.maxmind.com/download/geoip/database/GeoLite2-City.tar.gz'
downloaded = False
self.logger.info('Downloading GeoLite2 from %s', url)
urlretrieve(url, tar_dbfile)
retry_counter = 0
self.logger.debug('Opening GeoLite2 tar file : %s', tar_dbfile)
while not downloaded:
self.logger.info('Downloading GeoLite2 from %s', url)
try:
urlretrieve(url, tar_dbfile)
downloaded = True
except HTTPError as e:
self.logger.error("Problem downloading new GeoLite2 DB... Trying again. Error: %s", e)
sleep(2)
retry_counter = (retry_counter + 1)
if retry_counter >= 3:
self.logger.error("Retried downloading the new GeoLite2 DB 3 times and failed... Aborting!")
result_status = 1
return result_status
try:
remove(self.dbfile)
except FileNotFoundError:
self.logger.warn("Cannot remove GeoLite2 DB as it does not exsist!")
self.logger.debug("Opening GeoLite2 tar file : %s", tar_dbfile)
tar = taropen(tar_dbfile, 'r:gz')
@ -74,7 +111,11 @@ class GeoIPHandler(object):
tar.extract(files, self.data_folder)
self.logger.debug('%s has been extracted to %s', files, self.data_folder)
tar.close()
remove(tar_dbfile)
try:
remove(tar_dbfile)
self.logger.debug('Removed the GeoLite2 DB TAR file.')
except FileNotFoundError:
self.logger.warn("Cannot remove GeoLite2 DB TAR file as it does not exsist!")
def hashit(string):

View file

@ -455,3 +455,5 @@ class TautulliStream(NamedTuple):
width: str = None
writers: list = None
year: str = None
secure: str = None
relayed: int = None

View file

@ -140,6 +140,8 @@ class TautulliAPI(object):
"longitude": longitude,
"player_state": player_state,
"device_type": session.platform,
"relayed": session.relayed,
"secure": session.secure,
"server": self.server.id
},
"time": now,