testing hashit
This commit is contained in:
parent
4c4955e474
commit
4f1ec17538
2 changed files with 31 additions and 14 deletions
|
@ -1,6 +1,7 @@
|
||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
import tarfile
|
import tarfile
|
||||||
|
import hashlib
|
||||||
import geoip2.database
|
import geoip2.database
|
||||||
from typing import NamedTuple
|
from typing import NamedTuple
|
||||||
from os.path import abspath, join
|
from os.path import abspath, join
|
||||||
|
@ -65,6 +66,7 @@ class Movie(NamedTuple):
|
||||||
website: str = None
|
website: str = None
|
||||||
id: int = None
|
id: int = None
|
||||||
|
|
||||||
|
|
||||||
class Queue(NamedTuple):
|
class Queue(NamedTuple):
|
||||||
movie: dict = None
|
movie: dict = None
|
||||||
series: dict = None
|
series: dict = None
|
||||||
|
@ -95,6 +97,7 @@ class SonarrServer(NamedTuple):
|
||||||
queue: bool = False
|
queue: bool = False
|
||||||
queue_run_seconds: int = 30
|
queue_run_seconds: int = 30
|
||||||
|
|
||||||
|
|
||||||
class RadarrServer(NamedTuple):
|
class RadarrServer(NamedTuple):
|
||||||
id: int = None
|
id: int = None
|
||||||
url: str = None
|
url: str = None
|
||||||
|
@ -343,6 +346,7 @@ def geoip_download():
|
||||||
tar.extract(files, abspath(join('.', 'data')))
|
tar.extract(files, abspath(join('.', 'data')))
|
||||||
os.remove(tar_dbfile)
|
os.remove(tar_dbfile)
|
||||||
|
|
||||||
|
|
||||||
def geo_lookup(ipaddress):
|
def geo_lookup(ipaddress):
|
||||||
|
|
||||||
dbfile = abspath(join('.', 'data', 'GeoLite2-City.mmdb'))
|
dbfile = abspath(join('.', 'data', 'GeoLite2-City.mmdb'))
|
||||||
|
@ -360,3 +364,10 @@ def geo_lookup(ipaddress):
|
||||||
reader = geoip2.database.Reader(dbfile)
|
reader = geoip2.database.Reader(dbfile)
|
||||||
|
|
||||||
return reader.city(ipaddress)
|
return reader.city(ipaddress)
|
||||||
|
|
||||||
|
|
||||||
|
def hashit(string):
|
||||||
|
encoded = string.encode()
|
||||||
|
hashed = hashlib.md5(encoded).hexdigest()
|
||||||
|
|
||||||
|
return hashed
|
||||||
|
|
|
@ -2,7 +2,7 @@ from requests import Session
|
||||||
from datetime import datetime, timezone, date, timedelta
|
from datetime import datetime, timezone, date, timedelta
|
||||||
|
|
||||||
from Varken.logger import logging
|
from Varken.logger import logging
|
||||||
from Varken.helpers import TVShow, Queue
|
from Varken.helpers import TVShow, Queue, hashit
|
||||||
|
|
||||||
|
|
||||||
class SonarrAPI(object):
|
class SonarrAPI(object):
|
||||||
|
@ -38,20 +38,23 @@ class SonarrAPI(object):
|
||||||
missing.append((show.series['title'], sxe, show.airDate, show.title, show.id))
|
missing.append((show.series['title'], sxe, show.airDate, show.title, show.id))
|
||||||
|
|
||||||
for series_title, sxe, air_date, episode_title, sonarr_id in missing:
|
for series_title, sxe, air_date, episode_title, sonarr_id in missing:
|
||||||
|
hash_id = hashit('{}{}{}'.format(self.server.id, series_title, sxe))
|
||||||
influx_payload.append(
|
influx_payload.append(
|
||||||
{
|
{
|
||||||
"measurement": "Sonarr",
|
"measurement": "Sonarr",
|
||||||
"tags": {
|
"tags": {
|
||||||
"type": "Missing",
|
"type": "Missing",
|
||||||
"sonarrId": sonarr_id,
|
"sonarrId": sonarr_id,
|
||||||
"server": self.server.id
|
"server": self.server.id,
|
||||||
},
|
|
||||||
"time": self.now,
|
|
||||||
"fields": {
|
|
||||||
"name": series_title,
|
"name": series_title,
|
||||||
"epname": episode_title,
|
"epname": episode_title,
|
||||||
"sxe": sxe,
|
"sxe": sxe,
|
||||||
"airs": air_date
|
"airs": air_date
|
||||||
|
},
|
||||||
|
"time": self.now,
|
||||||
|
"fields": {
|
||||||
|
"hash": hash_id
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
@ -77,21 +80,23 @@ class SonarrAPI(object):
|
||||||
air_days.append((show.series['title'], show.hasFile, sxe, show.title, show.airDate, show.id))
|
air_days.append((show.series['title'], show.hasFile, sxe, show.title, show.airDate, show.id))
|
||||||
|
|
||||||
for series_title, dl_status, sxe, episode_title, air_date, sonarr_id in air_days:
|
for series_title, dl_status, sxe, episode_title, air_date, sonarr_id in air_days:
|
||||||
|
hash_id = hashit('{}{}{}'.format(self.server.id, series_title, sxe))
|
||||||
influx_payload.append(
|
influx_payload.append(
|
||||||
{
|
{
|
||||||
"measurement": "Sonarr",
|
"measurement": "Sonarr",
|
||||||
"tags": {
|
"tags": {
|
||||||
"type": "Future",
|
"type": "Future",
|
||||||
"sonarrId": sonarr_id,
|
"sonarrId": sonarr_id,
|
||||||
"server": self.server.id
|
"server": self.server.id,
|
||||||
},
|
|
||||||
"time": self.now,
|
|
||||||
"fields": {
|
|
||||||
"name": series_title,
|
"name": series_title,
|
||||||
"epname": episode_title,
|
"epname": episode_title,
|
||||||
"sxe": sxe,
|
"sxe": sxe,
|
||||||
"airs": air_date,
|
"airs": air_date,
|
||||||
"downloaded": dl_status
|
"downloaded": dl_status
|
||||||
|
},
|
||||||
|
"time": self.now,
|
||||||
|
"fields": {
|
||||||
|
"hash": hash_id
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
@ -120,22 +125,23 @@ class SonarrAPI(object):
|
||||||
protocol_id, sxe, show.id))
|
protocol_id, sxe, show.id))
|
||||||
|
|
||||||
for series_title, episode_title, protocol, protocol_id, sxe, sonarr_id in queue:
|
for series_title, episode_title, protocol, protocol_id, sxe, sonarr_id in queue:
|
||||||
|
hash_id = hashit('{}{}{}'.format(self.server.id, series_title, sxe))
|
||||||
influx_payload.append(
|
influx_payload.append(
|
||||||
{
|
{
|
||||||
"measurement": "Sonarr",
|
"measurement": "Sonarr",
|
||||||
"tags": {
|
"tags": {
|
||||||
"type": "Queue",
|
"type": "Queue",
|
||||||
"sonarrId": sonarr_id,
|
"sonarrId": sonarr_id,
|
||||||
"server": self.server.id
|
"server": self.server.id,
|
||||||
|
|
||||||
},
|
|
||||||
"time": self.now,
|
|
||||||
"fields": {
|
|
||||||
"name": series_title,
|
"name": series_title,
|
||||||
"epname": episode_title,
|
"epname": episode_title,
|
||||||
"sxe": sxe,
|
"sxe": sxe,
|
||||||
"protocol": protocol,
|
"protocol": protocol,
|
||||||
"protocol_id": protocol_id
|
"protocol_id": protocol_id
|
||||||
|
},
|
||||||
|
"time": self.now,
|
||||||
|
"fields": {
|
||||||
|
"hash": hash_id
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
Loading…
Reference in a new issue