2018-12-17 17:12:37 -08:00
|
|
|
from logging import getLogger
|
2018-12-03 20:56:12 -08:00
|
|
|
from requests import Session, Request
|
2018-12-01 20:26:44 -08:00
|
|
|
from datetime import datetime, timezone
|
|
|
|
|
2018-12-15 19:37:42 -08:00
|
|
|
from varken.helpers import connection_handler, hashit
|
2018-12-29 11:46:28 -08:00
|
|
|
from varken.structures import OmbiRequestCounts, OmbiIssuesCounts, OmbiMovieRequest, OmbiTVRequest
|
2018-12-01 20:26:44 -08:00
|
|
|
|
2018-12-03 10:38:10 -08:00
|
|
|
|
2018-12-01 20:26:44 -08:00
|
|
|
class OmbiAPI(object):
|
2018-12-01 21:15:12 -08:00
|
|
|
def __init__(self, server, dbmanager):
|
|
|
|
self.dbmanager = dbmanager
|
2018-12-01 20:26:44 -08:00
|
|
|
self.server = server
|
|
|
|
# Create session to reduce server web thread load, and globally define pageSize for all requests
|
|
|
|
self.session = Session()
|
|
|
|
self.session.headers = {'Apikey': self.server.api_key}
|
2018-12-17 17:12:37 -08:00
|
|
|
self.logger = getLogger()
|
2018-12-01 20:26:44 -08:00
|
|
|
|
2018-12-04 19:17:33 -08:00
|
|
|
def __repr__(self):
|
2018-12-17 17:12:37 -08:00
|
|
|
return f"<ombi-{self.server.id}>"
|
2018-12-04 19:17:33 -08:00
|
|
|
|
2018-12-15 19:37:42 -08:00
|
|
|
def get_all_requests(self):
|
2018-12-10 22:01:24 -08:00
|
|
|
now = datetime.now(timezone.utc).astimezone().isoformat()
|
2018-12-01 20:26:44 -08:00
|
|
|
tv_endpoint = '/api/v1/Request/tv'
|
|
|
|
movie_endpoint = "/api/v1/Request/movie"
|
2018-12-03 20:56:12 -08:00
|
|
|
|
|
|
|
tv_req = self.session.prepare_request(Request('GET', self.server.url + tv_endpoint))
|
|
|
|
movie_req = self.session.prepare_request(Request('GET', self.server.url + movie_endpoint))
|
2019-04-19 10:44:07 -07:00
|
|
|
get_tv = connection_handler(self.session, tv_req, self.server.verify_ssl) or []
|
|
|
|
get_movie = connection_handler(self.session, movie_req, self.server.verify_ssl) or []
|
2018-12-03 20:56:12 -08:00
|
|
|
|
2018-12-18 19:24:02 -08:00
|
|
|
if not any([get_tv, get_movie]):
|
|
|
|
self.logger.error('No json replies. Discarding job')
|
2018-12-03 20:56:12 -08:00
|
|
|
return
|
2018-12-01 20:26:44 -08:00
|
|
|
|
2019-04-19 10:44:07 -07:00
|
|
|
if get_movie:
|
|
|
|
movie_request_count = len(get_movie)
|
|
|
|
else:
|
|
|
|
movie_request_count = 0
|
2018-12-15 19:37:42 -08:00
|
|
|
|
2019-04-19 10:44:07 -07:00
|
|
|
if get_tv:
|
|
|
|
tv_request_count = len(get_tv)
|
|
|
|
else:
|
|
|
|
tv_request_count = 0
|
2018-12-15 19:37:42 -08:00
|
|
|
|
2019-04-19 10:44:07 -07:00
|
|
|
tv_show_requests = []
|
|
|
|
for show in get_tv:
|
|
|
|
try:
|
|
|
|
tv_show_requests.append(OmbiTVRequest(**show))
|
|
|
|
except TypeError as e:
|
|
|
|
self.logger.error('TypeError has occurred : %s while creating OmbiTVRequest structure for show. '
|
|
|
|
'data attempted is: %s', e, show)
|
|
|
|
|
|
|
|
movie_requests = []
|
|
|
|
for movie in get_movie:
|
|
|
|
try:
|
|
|
|
movie_requests.append(OmbiMovieRequest(**movie))
|
|
|
|
except TypeError as e:
|
|
|
|
self.logger.error('TypeError has occurred : %s while creating OmbiMovieRequest structure for movie. '
|
|
|
|
'data attempted is: %s', e, movie)
|
2018-12-01 20:26:44 -08:00
|
|
|
|
|
|
|
influx_payload = [
|
|
|
|
{
|
|
|
|
"measurement": "Ombi",
|
|
|
|
"tags": {
|
2018-12-09 16:00:39 -08:00
|
|
|
"type": "Request_Total",
|
|
|
|
"server": self.server.id
|
2018-12-01 20:26:44 -08:00
|
|
|
},
|
2018-12-10 22:01:24 -08:00
|
|
|
"time": now,
|
2018-12-01 20:26:44 -08:00
|
|
|
"fields": {
|
2018-12-15 19:37:42 -08:00
|
|
|
"total": movie_request_count + tv_request_count,
|
|
|
|
"movies": movie_request_count,
|
|
|
|
"tv_shows": tv_request_count
|
2018-12-01 20:26:44 -08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
]
|
2018-12-15 19:37:42 -08:00
|
|
|
# Request Type: Movie = 1, TV Show = 0
|
|
|
|
for movie in movie_requests:
|
|
|
|
hash_id = hashit(f'{movie.id}{movie.theMovieDbId}{movie.title}')
|
2018-12-27 10:25:39 -08:00
|
|
|
|
2018-12-18 19:15:41 -08:00
|
|
|
# Denied = 0, Approved = 1, Completed = 2, Pending = 3
|
2018-12-15 19:37:42 -08:00
|
|
|
if movie.denied:
|
|
|
|
status = 0
|
2018-12-27 10:25:39 -08:00
|
|
|
|
2018-12-15 19:37:42 -08:00
|
|
|
elif movie.approved and movie.available:
|
|
|
|
status = 2
|
2018-12-27 10:25:39 -08:00
|
|
|
|
2018-12-15 19:37:42 -08:00
|
|
|
elif movie.approved:
|
|
|
|
status = 1
|
2018-12-27 10:25:39 -08:00
|
|
|
|
2018-12-18 19:15:41 -08:00
|
|
|
else:
|
|
|
|
status = 3
|
2018-12-15 19:37:42 -08:00
|
|
|
|
|
|
|
influx_payload.append(
|
|
|
|
{
|
|
|
|
"measurement": "Ombi",
|
|
|
|
"tags": {
|
|
|
|
"type": "Requests",
|
|
|
|
"server": self.server.id,
|
|
|
|
"request_type": 1,
|
|
|
|
"status": status,
|
|
|
|
"title": movie.title,
|
|
|
|
"requested_user": movie.requestedUser['userAlias'],
|
|
|
|
"requested_date": movie.requestedDate
|
|
|
|
},
|
|
|
|
"time": now,
|
|
|
|
"fields": {
|
|
|
|
"hash": hash_id
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
for show in tv_show_requests:
|
|
|
|
hash_id = hashit(f'{show.id}{show.tvDbId}{show.title}')
|
2018-12-17 17:12:37 -08:00
|
|
|
|
|
|
|
# Denied = 0, Approved = 1, Completed = 2, Pending = 3
|
2019-04-18 08:17:37 -07:00
|
|
|
if show.childRequests[0].get('denied'):
|
2018-12-15 19:37:42 -08:00
|
|
|
status = 0
|
2018-12-27 10:25:39 -08:00
|
|
|
|
2019-04-18 08:17:37 -07:00
|
|
|
elif show.childRequests[0].get('approved') and show.childRequests[0].get('available'):
|
2018-12-15 19:37:42 -08:00
|
|
|
status = 2
|
2018-12-27 10:25:39 -08:00
|
|
|
|
2019-04-18 08:17:37 -07:00
|
|
|
elif show.childRequests[0].get('approved'):
|
2018-12-15 19:37:42 -08:00
|
|
|
status = 1
|
2018-12-27 10:25:39 -08:00
|
|
|
|
2018-12-17 17:12:37 -08:00
|
|
|
else:
|
|
|
|
status = 3
|
2018-12-15 19:37:42 -08:00
|
|
|
|
|
|
|
influx_payload.append(
|
|
|
|
{
|
|
|
|
"measurement": "Ombi",
|
|
|
|
"tags": {
|
|
|
|
"type": "Requests",
|
|
|
|
"server": self.server.id,
|
|
|
|
"request_type": 0,
|
|
|
|
"status": status,
|
|
|
|
"title": show.title,
|
|
|
|
"requested_user": show.childRequests[0]['requestedUser']['userAlias'],
|
|
|
|
"requested_date": show.childRequests[0]['requestedDate']
|
|
|
|
},
|
|
|
|
"time": now,
|
|
|
|
"fields": {
|
|
|
|
"hash": hash_id
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
2019-04-19 10:54:16 -07:00
|
|
|
if influx_payload:
|
|
|
|
self.dbmanager.write_points(influx_payload)
|
|
|
|
else:
|
|
|
|
self.logger.debug("Empty dataset for ombi module. Discarding...")
|
2018-12-01 20:26:44 -08:00
|
|
|
|
|
|
|
def get_request_counts(self):
|
2018-12-10 22:01:24 -08:00
|
|
|
now = datetime.now(timezone.utc).astimezone().isoformat()
|
2018-12-01 20:26:44 -08:00
|
|
|
endpoint = '/api/v1/Request/count'
|
2018-12-03 20:56:12 -08:00
|
|
|
|
|
|
|
req = self.session.prepare_request(Request('GET', self.server.url + endpoint))
|
|
|
|
get = connection_handler(self.session, req, self.server.verify_ssl)
|
|
|
|
|
|
|
|
if not get:
|
|
|
|
return
|
|
|
|
|
2018-12-01 20:26:44 -08:00
|
|
|
requests = OmbiRequestCounts(**get)
|
|
|
|
influx_payload = [
|
|
|
|
{
|
|
|
|
"measurement": "Ombi",
|
|
|
|
"tags": {
|
|
|
|
"type": "Request_Counts"
|
|
|
|
},
|
2018-12-10 22:01:24 -08:00
|
|
|
"time": now,
|
2018-12-01 20:26:44 -08:00
|
|
|
"fields": {
|
|
|
|
"pending": requests.pending,
|
|
|
|
"approved": requests.approved,
|
|
|
|
"available": requests.available
|
|
|
|
}
|
|
|
|
}
|
|
|
|
]
|
|
|
|
|
2018-12-01 21:15:12 -08:00
|
|
|
self.dbmanager.write_points(influx_payload)
|
2018-12-27 11:02:44 -08:00
|
|
|
|
2018-12-27 13:28:36 -08:00
|
|
|
def get_issue_counts(self):
|
2018-12-27 11:02:44 -08:00
|
|
|
now = datetime.now(timezone.utc).astimezone().isoformat()
|
|
|
|
endpoint = '/api/v1/Issues/count'
|
|
|
|
|
|
|
|
req = self.session.prepare_request(Request('GET', self.server.url + endpoint))
|
|
|
|
get = connection_handler(self.session, req, self.server.verify_ssl)
|
|
|
|
|
|
|
|
if not get:
|
|
|
|
return
|
|
|
|
|
|
|
|
requests = OmbiIssuesCounts(**get)
|
|
|
|
influx_payload = [
|
|
|
|
{
|
|
|
|
"measurement": "Ombi",
|
|
|
|
"tags": {
|
|
|
|
"type": "Issues_Counts"
|
|
|
|
},
|
|
|
|
"time": now,
|
|
|
|
"fields": {
|
|
|
|
"pending": requests.pending,
|
|
|
|
"in_progress": requests.inProgress,
|
|
|
|
"resolved": requests.resolved
|
|
|
|
}
|
|
|
|
}
|
|
|
|
]
|
|
|
|
|
2018-12-29 19:57:48 -08:00
|
|
|
self.dbmanager.write_points(influx_payload)
|