2018-12-02 12:23:42 -08:00
|
|
|
import os
|
2018-12-01 14:30:41 -08:00
|
|
|
import time
|
|
|
|
import tarfile
|
2018-12-02 19:59:46 -08:00
|
|
|
import hashlib
|
2018-12-09 19:41:38 -08:00
|
|
|
import urllib3
|
2018-12-01 14:30:41 -08:00
|
|
|
import geoip2.database
|
2018-12-04 16:16:00 -08:00
|
|
|
import logging
|
2018-12-04 21:42:43 -08:00
|
|
|
|
2018-12-03 20:56:12 -08:00
|
|
|
from json.decoder import JSONDecodeError
|
2018-12-02 12:23:42 -08:00
|
|
|
from os.path import abspath, join
|
2018-12-03 20:56:12 -08:00
|
|
|
from requests.exceptions import InvalidSchema, SSLError
|
2018-12-01 14:30:41 -08:00
|
|
|
from urllib.request import urlretrieve
|
2018-11-28 12:32:39 -08:00
|
|
|
|
2018-12-04 19:17:33 -08:00
|
|
|
logger = logging.getLogger('varken')
|
|
|
|
|
2018-11-28 12:32:39 -08:00
|
|
|
|
2018-12-09 20:56:09 -08:00
|
|
|
def geoip_download(data_folder):
|
|
|
|
datafolder = data_folder
|
2018-12-10 19:10:59 -08:00
|
|
|
|
2018-12-09 20:56:09 -08:00
|
|
|
tar_dbfile = abspath(join(datafolder, 'GeoLite2-City.tar.gz'))
|
2018-12-10 19:10:59 -08:00
|
|
|
|
2018-12-01 14:30:41 -08:00
|
|
|
url = 'http://geolite.maxmind.com/download/geoip/database/GeoLite2-City.tar.gz'
|
2018-12-10 19:10:59 -08:00
|
|
|
logger.info('Downloading GeoLite2 from %s', url)
|
2018-12-01 14:30:41 -08:00
|
|
|
urlretrieve(url, tar_dbfile)
|
2018-12-10 19:10:59 -08:00
|
|
|
|
2018-12-04 16:16:00 -08:00
|
|
|
tar = tarfile.open(tar_dbfile, 'r:gz')
|
2018-12-10 19:10:59 -08:00
|
|
|
logging.debug('Opening GeoLite2 tar file : %s', tar_dbfile)
|
|
|
|
|
2018-12-01 14:30:41 -08:00
|
|
|
for files in tar.getmembers():
|
|
|
|
if 'GeoLite2-City.mmdb' in files.name:
|
2018-12-10 19:10:59 -08:00
|
|
|
logging.debug('"GeoLite2-City.mmdb" FOUND in tar file')
|
2018-12-02 12:23:42 -08:00
|
|
|
files.name = os.path.basename(files.name)
|
2018-12-10 19:10:59 -08:00
|
|
|
|
2018-12-09 20:56:09 -08:00
|
|
|
tar.extract(files, datafolder)
|
2018-12-10 19:10:59 -08:00
|
|
|
logging.debug('%s has been extracted to %s', files, datafolder)
|
2018-12-10 19:16:13 -08:00
|
|
|
|
2018-12-02 12:23:42 -08:00
|
|
|
os.remove(tar_dbfile)
|
2018-12-01 14:30:41 -08:00
|
|
|
|
2018-12-02 19:59:46 -08:00
|
|
|
|
2018-12-09 20:56:09 -08:00
|
|
|
def geo_lookup(ipaddress, data_folder):
|
|
|
|
datafolder = data_folder
|
2018-12-10 19:16:13 -08:00
|
|
|
logging.debug('Reading GeoLite2 from %s', datafolder)
|
|
|
|
|
2018-12-09 20:56:09 -08:00
|
|
|
dbfile = abspath(join(datafolder, 'GeoLite2-City.mmdb'))
|
2018-12-01 14:30:41 -08:00
|
|
|
now = time.time()
|
|
|
|
|
|
|
|
try:
|
2018-12-02 12:23:42 -08:00
|
|
|
dbinfo = os.stat(dbfile)
|
2018-12-01 14:30:41 -08:00
|
|
|
db_age = now - dbinfo.st_ctime
|
|
|
|
if db_age > (35 * 86400):
|
2018-12-10 19:16:13 -08:00
|
|
|
logging.info('GeoLite2 DB is older than 35 days. Attempting to re-download...')
|
|
|
|
|
2018-12-02 12:23:42 -08:00
|
|
|
os.remove(dbfile)
|
2018-12-10 19:16:13 -08:00
|
|
|
|
2018-12-09 20:56:09 -08:00
|
|
|
geoip_download(datafolder)
|
2018-12-01 14:30:41 -08:00
|
|
|
except FileNotFoundError:
|
2018-12-10 19:16:13 -08:00
|
|
|
logging.error('GeoLite2 DB not found. Attempting to download...')
|
2018-12-09 20:56:09 -08:00
|
|
|
geoip_download(datafolder)
|
2018-12-01 14:30:41 -08:00
|
|
|
|
|
|
|
reader = geoip2.database.Reader(dbfile)
|
|
|
|
|
|
|
|
return reader.city(ipaddress)
|
2018-12-02 19:59:46 -08:00
|
|
|
|
|
|
|
|
|
|
|
def hashit(string):
|
|
|
|
encoded = string.encode()
|
|
|
|
hashed = hashlib.md5(encoded).hexdigest()
|
|
|
|
|
|
|
|
return hashed
|
2018-12-03 20:56:12 -08:00
|
|
|
|
|
|
|
|
|
|
|
def connection_handler(session, request, verify):
|
|
|
|
s = session
|
|
|
|
r = request
|
|
|
|
v = verify
|
|
|
|
return_json = False
|
|
|
|
|
2018-12-09 19:41:38 -08:00
|
|
|
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
|
|
|
|
2018-12-03 20:56:12 -08:00
|
|
|
try:
|
|
|
|
get = s.send(r, verify=v)
|
|
|
|
if get.status_code == 401:
|
2018-12-04 16:16:00 -08:00
|
|
|
logger.info('Your api key is incorrect for {}'.format(r.url))
|
2018-12-03 20:56:12 -08:00
|
|
|
elif get.status_code == 404:
|
2018-12-04 16:16:00 -08:00
|
|
|
logger.info('This url doesnt even resolve: {}'.format(r.url))
|
2018-12-03 20:56:12 -08:00
|
|
|
elif get.status_code == 200:
|
|
|
|
try:
|
|
|
|
return_json = get.json()
|
|
|
|
except JSONDecodeError:
|
2018-12-04 21:42:43 -08:00
|
|
|
logger.error('No JSON response... BORKED! Let us know in discord')
|
2018-12-09 19:41:38 -08:00
|
|
|
# 204 No Content is for ASA only
|
|
|
|
elif get.status_code == 204:
|
|
|
|
if get.headers['X-Auth-Token']:
|
|
|
|
return get.headers['X-Auth-Token']
|
2018-12-03 20:56:12 -08:00
|
|
|
|
|
|
|
except InvalidSchema:
|
2018-12-04 21:42:43 -08:00
|
|
|
logger.error('You added http(s):// in the config file. Don\'t do that.')
|
2018-12-03 20:56:12 -08:00
|
|
|
|
|
|
|
except SSLError as e:
|
2018-12-04 21:42:43 -08:00
|
|
|
logger.error('Either your host is unreachable or you have an SSL issue. : %s', e)
|
2018-12-03 20:56:12 -08:00
|
|
|
|
|
|
|
return return_json
|
2018-12-04 21:42:43 -08:00
|
|
|
|
|
|
|
|
|
|
|
def mkdir_p(path):
|
|
|
|
"""http://stackoverflow.com/a/600612/190597 (tzot)"""
|
|
|
|
try:
|
|
|
|
logger.info('Creating folder %s ', path)
|
|
|
|
os.makedirs(path, exist_ok=True)
|
|
|
|
except Exception as e:
|
|
|
|
logger.error('Could not create folder %s : %s ', path, e)
|