added forced package imports

This commit is contained in:
Nicholas St. Germain 2018-12-02 13:38:37 -06:00
parent 0e2ffdbbb1
commit ef9022c6eb
943 changed files with 125530 additions and 16 deletions

21
lib/influxdb/__init__.py Normal file
View file

@ -0,0 +1,21 @@
# -*- coding: utf-8 -*-
"""Initialize the influxdb package."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from .client import InfluxDBClient
from .dataframe_client import DataFrameClient
from .helper import SeriesHelper
__all__ = [
'InfluxDBClient',
'DataFrameClient',
'SeriesHelper',
]
__version__ = '5.2.0'

View file

@ -0,0 +1,452 @@
# -*- coding: utf-8 -*-
"""DataFrame client for InfluxDB."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import math
from collections import defaultdict
import pandas as pd
import numpy as np
from .client import InfluxDBClient
from .line_protocol import _escape_tag
def _pandas_time_unit(time_precision):
unit = time_precision
if time_precision == 'm':
unit = 'ms'
elif time_precision == 'u':
unit = 'us'
elif time_precision == 'n':
unit = 'ns'
assert unit in ('s', 'ms', 'us', 'ns')
return unit
def _escape_pandas_series(s):
return s.apply(lambda v: _escape_tag(v))
class DataFrameClient(InfluxDBClient):
"""DataFrameClient instantiates InfluxDBClient to connect to the backend.
The ``DataFrameClient`` object holds information necessary to connect
to InfluxDB. Requests can be made to InfluxDB directly through the client.
The client reads and writes from pandas DataFrames.
"""
EPOCH = pd.Timestamp('1970-01-01 00:00:00.000+00:00')
def write_points(self,
dataframe,
measurement,
tags=None,
tag_columns=None,
field_columns=None,
time_precision=None,
database=None,
retention_policy=None,
batch_size=None,
protocol='line',
numeric_precision=None):
"""Write to multiple time series names.
:param dataframe: data points in a DataFrame
:param measurement: name of measurement
:param tags: dictionary of tags, with string key-values
:param time_precision: [Optional, default None] Either 's', 'ms', 'u'
or 'n'.
:param batch_size: [Optional] Value to write the points in batches
instead of all at one time. Useful for when doing data dumps from
one database to another or when doing a massive write operation
:type batch_size: int
:param protocol: Protocol for writing data. Either 'line' or 'json'.
:param numeric_precision: Precision for floating point values.
Either None, 'full' or some int, where int is the desired decimal
precision. 'full' preserves full precision for int and float
datatypes. Defaults to None, which preserves 14-15 significant
figures for float and all significant figures for int datatypes.
"""
if tag_columns is None:
tag_columns = []
if field_columns is None:
field_columns = []
if batch_size:
number_batches = int(math.ceil(len(dataframe) / float(batch_size)))
for batch in range(number_batches):
start_index = batch * batch_size
end_index = (batch + 1) * batch_size
if protocol == 'line':
points = self._convert_dataframe_to_lines(
dataframe.iloc[start_index:end_index].copy(),
measurement=measurement,
global_tags=tags,
time_precision=time_precision,
tag_columns=tag_columns,
field_columns=field_columns,
numeric_precision=numeric_precision)
else:
points = self._convert_dataframe_to_json(
dataframe.iloc[start_index:end_index].copy(),
measurement=measurement,
tags=tags,
time_precision=time_precision,
tag_columns=tag_columns,
field_columns=field_columns)
super(DataFrameClient, self).write_points(
points,
time_precision,
database,
retention_policy,
protocol=protocol)
return True
if protocol == 'line':
points = self._convert_dataframe_to_lines(
dataframe,
measurement=measurement,
global_tags=tags,
tag_columns=tag_columns,
field_columns=field_columns,
time_precision=time_precision,
numeric_precision=numeric_precision)
else:
points = self._convert_dataframe_to_json(
dataframe,
measurement=measurement,
tags=tags,
time_precision=time_precision,
tag_columns=tag_columns,
field_columns=field_columns)
super(DataFrameClient, self).write_points(
points,
time_precision,
database,
retention_policy,
protocol=protocol)
return True
def query(self,
query,
params=None,
epoch=None,
expected_response_code=200,
database=None,
raise_errors=True,
chunked=False,
chunk_size=0,
dropna=True):
"""
Quering data into a DataFrame.
:param query: the actual query string
:param params: additional parameters for the request, defaults to {}
:param epoch: response timestamps to be in epoch format either 'h',
'm', 's', 'ms', 'u', or 'ns',defaults to `None` which is
RFC3339 UTC format with nanosecond precision
:param expected_response_code: the expected status code of response,
defaults to 200
:param database: database to query, defaults to None
:param raise_errors: Whether or not to raise exceptions when InfluxDB
returns errors, defaults to True
:param chunked: Enable to use chunked responses from InfluxDB.
With ``chunked`` enabled, one ResultSet is returned per chunk
containing all results within that chunk
:param chunk_size: Size of each chunk to tell InfluxDB to use.
:param dropna: drop columns where all values are missing
:returns: the queried data
:rtype: :class:`~.ResultSet`
"""
query_args = dict(params=params,
epoch=epoch,
expected_response_code=expected_response_code,
raise_errors=raise_errors,
chunked=chunked,
database=database,
chunk_size=chunk_size)
results = super(DataFrameClient, self).query(query, **query_args)
if query.strip().upper().startswith("SELECT"):
if len(results) > 0:
return self._to_dataframe(results, dropna)
else:
return {}
else:
return results
def _to_dataframe(self, rs, dropna=True):
result = defaultdict(list)
if isinstance(rs, list):
return map(self._to_dataframe, rs)
for key, data in rs.items():
name, tags = key
if tags is None:
key = name
else:
key = (name, tuple(sorted(tags.items())))
df = pd.DataFrame(data)
df.time = pd.to_datetime(df.time)
df.set_index('time', inplace=True)
df.index = df.index.tz_localize('UTC')
df.index.name = None
result[key].append(df)
for key, data in result.items():
df = pd.concat(data).sort_index()
if dropna:
df.dropna(how='all', axis=1, inplace=True)
result[key] = df
return result
@staticmethod
def _convert_dataframe_to_json(dataframe,
measurement,
tags=None,
tag_columns=None,
field_columns=None,
time_precision=None):
if not isinstance(dataframe, pd.DataFrame):
raise TypeError('Must be DataFrame, but type was: {0}.'
.format(type(dataframe)))
if not (isinstance(dataframe.index, pd.PeriodIndex) or
isinstance(dataframe.index, pd.DatetimeIndex)):
raise TypeError('Must be DataFrame with DatetimeIndex or '
'PeriodIndex.')
# Make sure tags and tag columns are correctly typed
tag_columns = tag_columns if tag_columns is not None else []
field_columns = field_columns if field_columns is not None else []
tags = tags if tags is not None else {}
# Assume field columns are all columns not included in tag columns
if not field_columns:
field_columns = list(
set(dataframe.columns).difference(set(tag_columns)))
dataframe.index = pd.to_datetime(dataframe.index)
if dataframe.index.tzinfo is None:
dataframe.index = dataframe.index.tz_localize('UTC')
# Convert column to strings
dataframe.columns = dataframe.columns.astype('str')
# Convert dtype for json serialization
dataframe = dataframe.astype('object')
precision_factor = {
"n": 1,
"u": 1e3,
"ms": 1e6,
"s": 1e9,
"m": 1e9 * 60,
"h": 1e9 * 3600,
}.get(time_precision, 1)
points = [
{'measurement': measurement,
'tags': dict(list(tag.items()) + list(tags.items())),
'fields': rec,
'time': np.int64(ts.value / precision_factor)}
for ts, tag, rec in zip(dataframe.index,
dataframe[tag_columns].to_dict('record'),
dataframe[field_columns].to_dict('record'))
]
return points
def _convert_dataframe_to_lines(self,
dataframe,
measurement,
field_columns=None,
tag_columns=None,
global_tags=None,
time_precision=None,
numeric_precision=None):
dataframe = dataframe.dropna(how='all').copy()
if len(dataframe) == 0:
return []
if not isinstance(dataframe, pd.DataFrame):
raise TypeError('Must be DataFrame, but type was: {0}.'
.format(type(dataframe)))
if not (isinstance(dataframe.index, pd.PeriodIndex) or
isinstance(dataframe.index, pd.DatetimeIndex)):
raise TypeError('Must be DataFrame with DatetimeIndex or '
'PeriodIndex.')
dataframe = dataframe.rename(
columns={item: _escape_tag(item) for item in dataframe.columns})
# Create a Series of columns for easier indexing
column_series = pd.Series(dataframe.columns)
if field_columns is None:
field_columns = []
if tag_columns is None:
tag_columns = []
if global_tags is None:
global_tags = {}
# Make sure field_columns and tag_columns are lists
field_columns = list(field_columns) if list(field_columns) else []
tag_columns = list(tag_columns) if list(tag_columns) else []
# If field columns but no tag columns, assume rest of columns are tags
if field_columns and (not tag_columns):
tag_columns = list(column_series[~column_series.isin(
field_columns)])
# If no field columns, assume non-tag columns are fields
if not field_columns:
field_columns = list(column_series[~column_series.isin(
tag_columns)])
precision_factor = {
"n": 1,
"u": 1e3,
"ms": 1e6,
"s": 1e9,
"m": 1e9 * 60,
"h": 1e9 * 3600,
}.get(time_precision, 1)
# Make array of timestamp ints
if isinstance(dataframe.index, pd.PeriodIndex):
time = ((dataframe.index.to_timestamp().values.astype(np.int64) /
precision_factor).astype(np.int64).astype(str))
else:
time = ((pd.to_datetime(dataframe.index).values.astype(np.int64) /
precision_factor).astype(np.int64).astype(str))
# If tag columns exist, make an array of formatted tag keys and values
if tag_columns:
# Make global_tags as tag_columns
if global_tags:
for tag in global_tags:
dataframe[tag] = global_tags[tag]
tag_columns.append(tag)
tag_df = dataframe[tag_columns]
tag_df = tag_df.fillna('') # replace NA with empty string
tag_df = tag_df.sort_index(axis=1)
tag_df = self._stringify_dataframe(
tag_df, numeric_precision, datatype='tag')
# join preprendded tags, leaving None values out
tags = tag_df.apply(
lambda s: [',' + s.name + '=' + v if v else '' for v in s])
tags = tags.sum(axis=1)
del tag_df
elif global_tags:
tag_string = ''.join(
[",{}={}".format(k, _escape_tag(v)) if v else ''
for k, v in sorted(global_tags.items())]
)
tags = pd.Series(tag_string, index=dataframe.index)
else:
tags = ''
# Make an array of formatted field keys and values
field_df = dataframe[field_columns]
# Keep the positions where Null values are found
mask_null = field_df.isnull().values
field_df = self._stringify_dataframe(field_df,
numeric_precision,
datatype='field')
field_df = (field_df.columns.values + '=').tolist() + field_df
field_df[field_df.columns[1:]] = ',' + field_df[
field_df.columns[1:]]
field_df = field_df.where(~mask_null, '') # drop Null entries
fields = field_df.sum(axis=1)
del field_df
# Generate line protocol string
measurement = _escape_tag(measurement)
points = (measurement + tags + ' ' + fields + ' ' + time).tolist()
return points
@staticmethod
def _stringify_dataframe(dframe, numeric_precision, datatype='field'):
# Prevent modification of input dataframe
dframe = dframe.copy()
# Find int and string columns for field-type data
int_columns = dframe.select_dtypes(include=['integer']).columns
string_columns = dframe.select_dtypes(include=['object']).columns
# Convert dframe to string
if numeric_precision is None:
# If no precision specified, convert directly to string (fast)
dframe = dframe.astype(str)
elif numeric_precision == 'full':
# If full precision, use repr to get full float precision
float_columns = (dframe.select_dtypes(
include=['floating']).columns)
nonfloat_columns = dframe.columns[~dframe.columns.isin(
float_columns)]
dframe[float_columns] = dframe[float_columns].applymap(repr)
dframe[nonfloat_columns] = (dframe[nonfloat_columns].astype(str))
elif isinstance(numeric_precision, int):
# If precision is specified, round to appropriate precision
float_columns = (dframe.select_dtypes(
include=['floating']).columns)
nonfloat_columns = dframe.columns[~dframe.columns.isin(
float_columns)]
dframe[float_columns] = (dframe[float_columns].round(
numeric_precision))
# If desired precision is > 10 decimal places, need to use repr
if numeric_precision > 10:
dframe[float_columns] = (dframe[float_columns].applymap(repr))
dframe[nonfloat_columns] = (dframe[nonfloat_columns]
.astype(str))
else:
dframe = dframe.astype(str)
else:
raise ValueError('Invalid numeric precision.')
if datatype == 'field':
# If dealing with fields, format ints and strings correctly
dframe[int_columns] += 'i'
dframe[string_columns] = '"' + dframe[string_columns] + '"'
elif datatype == 'tag':
dframe = dframe.apply(_escape_pandas_series)
dframe.columns = dframe.columns.astype(str)
return dframe
def _datetime_to_epoch(self, datetime, time_precision='s'):
seconds = (datetime - self.EPOCH).total_seconds()
if time_precision == 'h':
return seconds / 3600
elif time_precision == 'm':
return seconds / 60
elif time_precision == 's':
return seconds
elif time_precision == 'ms':
return seconds * 1e3
elif time_precision == 'u':
return seconds * 1e6
elif time_precision == 'n':
return seconds * 1e9

View file

@ -0,0 +1,27 @@
# -*- coding: utf-8 -*-
"""Module to generate chunked JSON replies."""
#
# Author: Adrian Sampson <adrian@radbox.org>
# Source: https://gist.github.com/sampsyo/920215
#
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import json
def loads(s):
"""Generate a sequence of JSON values from a string."""
_decoder = json.JSONDecoder()
while s:
s = s.strip()
obj, pos = _decoder.raw_decode(s)
if not pos:
raise ValueError('no JSON object found at %i' % pos)
yield obj
s = s[pos:]

980
lib/influxdb/client.py Normal file
View file

@ -0,0 +1,980 @@
# -*- coding: utf-8 -*-
"""Python client for InfluxDB."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import time
import random
import json
import socket
import requests
import requests.exceptions
from six.moves import xrange
from six.moves.urllib.parse import urlparse
from influxdb.line_protocol import make_lines, quote_ident, quote_literal
from influxdb.resultset import ResultSet
from .exceptions import InfluxDBClientError
from .exceptions import InfluxDBServerError
class InfluxDBClient(object):
"""InfluxDBClient primary client object to connect InfluxDB.
The :class:`~.InfluxDBClient` object holds information necessary to
connect to InfluxDB. Requests can be made to InfluxDB directly through
the client.
:param host: hostname to connect to InfluxDB, defaults to 'localhost'
:type host: str
:param port: port to connect to InfluxDB, defaults to 8086
:type port: int
:param username: user to connect, defaults to 'root'
:type username: str
:param password: password of the user, defaults to 'root'
:type password: str
:param pool_size: urllib3 connection pool size, defaults to 10.
:type pool_size: int
:param database: database name to connect to, defaults to None
:type database: str
:param ssl: use https instead of http to connect to InfluxDB, defaults to
False
:type ssl: bool
:param verify_ssl: verify SSL certificates for HTTPS requests, defaults to
False
:type verify_ssl: bool
:param timeout: number of seconds Requests will wait for your client to
establish a connection, defaults to None
:type timeout: int
:param retries: number of retries your client will try before aborting,
defaults to 3. 0 indicates try until success
:type retries: int
:param use_udp: use UDP to connect to InfluxDB, defaults to False
:type use_udp: bool
:param udp_port: UDP port to connect to InfluxDB, defaults to 4444
:type udp_port: int
:param proxies: HTTP(S) proxy to use for Requests, defaults to {}
:type proxies: dict
:param path: path of InfluxDB on the server to connect, defaults to ''
:type path: str
"""
def __init__(self,
host='localhost',
port=8086,
username='root',
password='root',
database=None,
ssl=False,
verify_ssl=False,
timeout=None,
retries=3,
use_udp=False,
udp_port=4444,
proxies=None,
pool_size=10,
path='',
):
"""Construct a new InfluxDBClient object."""
self.__host = host
self.__port = int(port)
self._username = username
self._password = password
self._database = database
self._timeout = timeout
self._retries = retries
self._verify_ssl = verify_ssl
self.__use_udp = use_udp
self.__udp_port = udp_port
self._session = requests.Session()
adapter = requests.adapters.HTTPAdapter(
pool_connections=int(pool_size),
pool_maxsize=int(pool_size)
)
if use_udp:
self.udp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
if not path:
self.__path = ''
elif path[0] == '/':
self.__path = path
else:
self.__path = '/' + path
self._scheme = "http"
if ssl is True:
self._scheme = "https"
self._session.mount(self._scheme + '://', adapter)
if proxies is None:
self._proxies = {}
else:
self._proxies = proxies
self.__baseurl = "{0}://{1}:{2}{3}".format(
self._scheme,
self._host,
self._port,
self._path)
self._headers = {
'Content-Type': 'application/json',
'Accept': 'text/plain'
}
@property
def _baseurl(self):
return self.__baseurl
@property
def _host(self):
return self.__host
@property
def _port(self):
return self.__port
@property
def _path(self):
return self.__path
@property
def _udp_port(self):
return self.__udp_port
@property
def _use_udp(self):
return self.__use_udp
@classmethod
def from_dsn(cls, dsn, **kwargs):
r"""Generate an instance of InfluxDBClient from given data source name.
Return an instance of :class:`~.InfluxDBClient` from the provided
data source name. Supported schemes are "influxdb", "https+influxdb"
and "udp+influxdb". Parameters for the :class:`~.InfluxDBClient`
constructor may also be passed to this method.
:param dsn: data source name
:type dsn: string
:param kwargs: additional parameters for `InfluxDBClient`
:type kwargs: dict
:raises ValueError: if the provided DSN has any unexpected values
:Example:
::
>> cli = InfluxDBClient.from_dsn('influxdb://username:password@\
localhost:8086/databasename', timeout=5)
>> type(cli)
<class 'influxdb.client.InfluxDBClient'>
>> cli = InfluxDBClient.from_dsn('udp+influxdb://username:pass@\
localhost:8086/databasename', timeout=5, udp_port=159)
>> print('{0._baseurl} - {0.use_udp} {0.udp_port}'.format(cli))
http://localhost:8086 - True 159
.. note:: parameters provided in `**kwargs` may override dsn parameters
.. note:: when using "udp+influxdb" the specified port (if any) will
be used for the TCP connection; specify the UDP port with the
additional `udp_port` parameter (cf. examples).
"""
init_args = _parse_dsn(dsn)
host, port = init_args.pop('hosts')[0]
init_args['host'] = host
init_args['port'] = port
init_args.update(kwargs)
return cls(**init_args)
def switch_database(self, database):
"""Change the client's database.
:param database: the name of the database to switch to
:type database: str
"""
self._database = database
def switch_user(self, username, password):
"""Change the client's username.
:param username: the username to switch to
:type username: str
:param password: the password for the username
:type password: str
"""
self._username = username
self._password = password
def request(self, url, method='GET', params=None, data=None,
expected_response_code=200, headers=None):
"""Make a HTTP request to the InfluxDB API.
:param url: the path of the HTTP request, e.g. write, query, etc.
:type url: str
:param method: the HTTP method for the request, defaults to GET
:type method: str
:param params: additional parameters for the request, defaults to None
:type params: dict
:param data: the data of the request, defaults to None
:type data: str
:param expected_response_code: the expected response code of
the request, defaults to 200
:type expected_response_code: int
:param headers: headers to add to the request
:type headers: dict
:returns: the response from the request
:rtype: :class:`requests.Response`
:raises InfluxDBServerError: if the response code is any server error
code (5xx)
:raises InfluxDBClientError: if the response code is not the
same as `expected_response_code` and is not a server error code
"""
url = "{0}/{1}".format(self._baseurl, url)
if headers is None:
headers = self._headers
if params is None:
params = {}
if isinstance(data, (dict, list)):
data = json.dumps(data)
# Try to send the request more than once by default (see #103)
retry = True
_try = 0
while retry:
try:
response = self._session.request(
method=method,
url=url,
auth=(self._username, self._password),
params=params,
data=data,
headers=headers,
proxies=self._proxies,
verify=self._verify_ssl,
timeout=self._timeout
)
break
except (requests.exceptions.ConnectionError,
requests.exceptions.HTTPError,
requests.exceptions.Timeout):
_try += 1
if self._retries != 0:
retry = _try < self._retries
if method == "POST":
time.sleep((2 ** _try) * random.random() / 100.0)
if not retry:
raise
# if there's not an error, there must have been a successful response
if 500 <= response.status_code < 600:
raise InfluxDBServerError(response.content)
elif response.status_code == expected_response_code:
return response
else:
raise InfluxDBClientError(response.content, response.status_code)
def write(self, data, params=None, expected_response_code=204,
protocol='json'):
"""Write data to InfluxDB.
:param data: the data to be written
:type data: (if protocol is 'json') dict
(if protocol is 'line') sequence of line protocol strings
or single string
:param params: additional parameters for the request, defaults to None
:type params: dict
:param expected_response_code: the expected response code of the write
operation, defaults to 204
:type expected_response_code: int
:param protocol: protocol of input data, either 'json' or 'line'
:type protocol: str
:returns: True, if the write operation is successful
:rtype: bool
"""
headers = self._headers
headers['Content-Type'] = 'application/octet-stream'
if params:
precision = params.get('precision')
else:
precision = None
if protocol == 'json':
data = make_lines(data, precision).encode('utf-8')
elif protocol == 'line':
if isinstance(data, str):
data = [data]
data = ('\n'.join(data) + '\n').encode('utf-8')
self.request(
url="write",
method='POST',
params=params,
data=data,
expected_response_code=expected_response_code,
headers=headers
)
return True
@staticmethod
def _read_chunked_response(response, raise_errors=True):
result_set = {}
for line in response.iter_lines():
if isinstance(line, bytes):
line = line.decode('utf-8')
data = json.loads(line)
for result in data.get('results', []):
for _key in result:
if isinstance(result[_key], list):
result_set.setdefault(
_key, []).extend(result[_key])
return ResultSet(result_set, raise_errors=raise_errors)
def query(self,
query,
params=None,
epoch=None,
expected_response_code=200,
database=None,
raise_errors=True,
chunked=False,
chunk_size=0,
method="GET"):
"""Send a query to InfluxDB.
:param query: the actual query string
:type query: str
:param params: additional parameters for the request,
defaults to {}
:type params: dict
:param epoch: response timestamps to be in epoch format either 'h',
'm', 's', 'ms', 'u', or 'ns',defaults to `None` which is
RFC3339 UTC format with nanosecond precision
:type epoch: str
:param expected_response_code: the expected status code of response,
defaults to 200
:type expected_response_code: int
:param database: database to query, defaults to None
:type database: str
:param raise_errors: Whether or not to raise exceptions when InfluxDB
returns errors, defaults to True
:type raise_errors: bool
:param chunked: Enable to use chunked responses from InfluxDB.
With ``chunked`` enabled, one ResultSet is returned per chunk
containing all results within that chunk
:type chunked: bool
:param chunk_size: Size of each chunk to tell InfluxDB to use.
:type chunk_size: int
:param method: the HTTP method for the request, defaults to GET
:type method: str
:returns: the queried data
:rtype: :class:`~.ResultSet`
"""
if params is None:
params = {}
params['q'] = query
params['db'] = database or self._database
if epoch is not None:
params['epoch'] = epoch
if chunked:
params['chunked'] = 'true'
if chunk_size > 0:
params['chunk_size'] = chunk_size
if query.lower().startswith("select ") and " into " in query.lower():
method = "POST"
response = self.request(
url="query",
method=method,
params=params,
data=None,
expected_response_code=expected_response_code
)
if chunked:
return self._read_chunked_response(response)
data = response.json()
results = [
ResultSet(result, raise_errors=raise_errors)
for result
in data.get('results', [])
]
# TODO(aviau): Always return a list. (This would be a breaking change)
if len(results) == 1:
return results[0]
return results
def write_points(self,
points,
time_precision=None,
database=None,
retention_policy=None,
tags=None,
batch_size=None,
protocol='json'
):
"""Write to multiple time series names.
:param points: the list of points to be written in the database
:type points: list of dictionaries, each dictionary represents a point
:type points: (if protocol is 'json') list of dicts, where each dict
represents a point.
(if protocol is 'line') sequence of line protocol strings.
:param time_precision: Either 's', 'm', 'ms' or 'u', defaults to None
:type time_precision: str
:param database: the database to write the points to. Defaults to
the client's current database
:type database: str
:param tags: a set of key-value pairs associated with each point. Both
keys and values must be strings. These are shared tags and will be
merged with point-specific tags, defaults to None
:type tags: dict
:param retention_policy: the retention policy for the points. Defaults
to None
:type retention_policy: str
:param batch_size: value to write the points in batches
instead of all at one time. Useful for when doing data dumps from
one database to another or when doing a massive write operation,
defaults to None
:type batch_size: int
:param protocol: Protocol for writing data. Either 'line' or 'json'.
:type protocol: str
:returns: True, if the operation is successful
:rtype: bool
.. note:: if no retention policy is specified, the default retention
policy for the database is used
"""
if batch_size and batch_size > 0:
for batch in self._batches(points, batch_size):
self._write_points(points=batch,
time_precision=time_precision,
database=database,
retention_policy=retention_policy,
tags=tags, protocol=protocol)
return True
return self._write_points(points=points,
time_precision=time_precision,
database=database,
retention_policy=retention_policy,
tags=tags, protocol=protocol)
def ping(self):
"""Check connectivity to InfluxDB.
:returns: The version of the InfluxDB the client is connected to
"""
response = self.request(
url="ping",
method='GET',
expected_response_code=204
)
return response.headers['X-Influxdb-Version']
@staticmethod
def _batches(iterable, size):
for i in xrange(0, len(iterable), size):
yield iterable[i:i + size]
def _write_points(self,
points,
time_precision,
database,
retention_policy,
tags,
protocol='json'):
if time_precision not in ['n', 'u', 'ms', 's', 'm', 'h', None]:
raise ValueError(
"Invalid time precision is given. "
"(use 'n', 'u', 'ms', 's', 'm' or 'h')")
if protocol == 'json':
data = {
'points': points
}
if tags is not None:
data['tags'] = tags
else:
data = points
params = {
'db': database or self._database
}
if time_precision is not None:
params['precision'] = time_precision
if retention_policy is not None:
params['rp'] = retention_policy
if self._use_udp:
self.send_packet(
data, protocol=protocol, time_precision=time_precision
)
else:
self.write(
data=data,
params=params,
expected_response_code=204,
protocol=protocol
)
return True
def get_list_database(self):
"""Get the list of databases in InfluxDB.
:returns: all databases in InfluxDB
:rtype: list of dictionaries
:Example:
::
>> dbs = client.get_list_database()
>> dbs
[{u'name': u'db1'}, {u'name': u'db2'}, {u'name': u'db3'}]
"""
return list(self.query("SHOW DATABASES").get_points())
def create_database(self, dbname):
"""Create a new database in InfluxDB.
:param dbname: the name of the database to create
:type dbname: str
"""
self.query("CREATE DATABASE {0}".format(quote_ident(dbname)),
method="POST")
def drop_database(self, dbname):
"""Drop a database from InfluxDB.
:param dbname: the name of the database to drop
:type dbname: str
"""
self.query("DROP DATABASE {0}".format(quote_ident(dbname)),
method="POST")
def get_list_measurements(self):
"""Get the list of measurements in InfluxDB.
:returns: all measurements in InfluxDB
:rtype: list of dictionaries
:Example:
::
>> dbs = client.get_list_measurements()
>> dbs
[{u'name': u'measurements1'},
{u'name': u'measurements2'},
{u'name': u'measurements3'}]
"""
return list(self.query("SHOW MEASUREMENTS").get_points())
def drop_measurement(self, measurement):
"""Drop a measurement from InfluxDB.
:param measurement: the name of the measurement to drop
:type measurement: str
"""
self.query("DROP MEASUREMENT {0}".format(quote_ident(measurement)),
method="POST")
def create_retention_policy(self, name, duration, replication,
database=None,
default=False, shard_duration="0s"):
"""Create a retention policy for a database.
:param name: the name of the new retention policy
:type name: str
:param duration: the duration of the new retention policy.
Durations such as 1h, 90m, 12h, 7d, and 4w, are all supported
and mean 1 hour, 90 minutes, 12 hours, 7 day, and 4 weeks,
respectively. For infinite retention - meaning the data will
never be deleted - use 'INF' for duration.
The minimum retention period is 1 hour.
:type duration: str
:param replication: the replication of the retention policy
:type replication: str
:param database: the database for which the retention policy is
created. Defaults to current client's database
:type database: str
:param default: whether or not to set the policy as default
:type default: bool
:param shard_duration: the shard duration of the retention policy.
Durations such as 1h, 90m, 12h, 7d, and 4w, are all supported and
mean 1 hour, 90 minutes, 12 hours, 7 day, and 4 weeks,
respectively. Infinite retention is not supported. As a workaround,
specify a "1000w" duration to achieve an extremely long shard group
duration. Defaults to "0s", which is interpreted by the database
to mean the default value given the duration.
The minimum shard group duration is 1 hour.
:type shard_duration: str
"""
query_string = \
"CREATE RETENTION POLICY {0} ON {1} " \
"DURATION {2} REPLICATION {3} SHARD DURATION {4}".format(
quote_ident(name), quote_ident(database or self._database),
duration, replication, shard_duration)
if default is True:
query_string += " DEFAULT"
self.query(query_string, method="POST")
def alter_retention_policy(self, name, database=None,
duration=None, replication=None,
default=None, shard_duration=None):
"""Modify an existing retention policy for a database.
:param name: the name of the retention policy to modify
:type name: str
:param database: the database for which the retention policy is
modified. Defaults to current client's database
:type database: str
:param duration: the new duration of the existing retention policy.
Durations such as 1h, 90m, 12h, 7d, and 4w, are all supported
and mean 1 hour, 90 minutes, 12 hours, 7 day, and 4 weeks,
respectively. For infinite retention, meaning the data will
never be deleted, use 'INF' for duration.
The minimum retention period is 1 hour.
:type duration: str
:param replication: the new replication of the existing
retention policy
:type replication: int
:param default: whether or not to set the modified policy as default
:type default: bool
:param shard_duration: the shard duration of the retention policy.
Durations such as 1h, 90m, 12h, 7d, and 4w, are all supported and
mean 1 hour, 90 minutes, 12 hours, 7 day, and 4 weeks,
respectively. Infinite retention is not supported. As a workaround,
specify a "1000w" duration to achieve an extremely long shard group
duration.
The minimum shard group duration is 1 hour.
:type shard_duration: str
.. note:: at least one of duration, replication, or default flag
should be set. Otherwise the operation will fail.
"""
query_string = (
"ALTER RETENTION POLICY {0} ON {1}"
).format(quote_ident(name),
quote_ident(database or self._database), shard_duration)
if duration:
query_string += " DURATION {0}".format(duration)
if shard_duration:
query_string += " SHARD DURATION {0}".format(shard_duration)
if replication:
query_string += " REPLICATION {0}".format(replication)
if default is True:
query_string += " DEFAULT"
self.query(query_string, method="POST")
def drop_retention_policy(self, name, database=None):
"""Drop an existing retention policy for a database.
:param name: the name of the retention policy to drop
:type name: str
:param database: the database for which the retention policy is
dropped. Defaults to current client's database
:type database: str
"""
query_string = (
"DROP RETENTION POLICY {0} ON {1}"
).format(quote_ident(name), quote_ident(database or self._database))
self.query(query_string, method="POST")
def get_list_retention_policies(self, database=None):
"""Get the list of retention policies for a database.
:param database: the name of the database, defaults to the client's
current database
:type database: str
:returns: all retention policies for the database
:rtype: list of dictionaries
:Example:
::
>> ret_policies = client.get_list_retention_policies('my_db')
>> ret_policies
[{u'default': True,
u'duration': u'0',
u'name': u'default',
u'replicaN': 1}]
"""
if not (database or self._database):
raise InfluxDBClientError(
"get_list_retention_policies() requires a database as a "
"parameter or the client to be using a database")
rsp = self.query(
"SHOW RETENTION POLICIES ON {0}".format(
quote_ident(database or self._database))
)
return list(rsp.get_points())
def get_list_users(self):
"""Get the list of all users in InfluxDB.
:returns: all users in InfluxDB
:rtype: list of dictionaries
:Example:
::
>> users = client.get_list_users()
>> users
[{u'admin': True, u'user': u'user1'},
{u'admin': False, u'user': u'user2'},
{u'admin': False, u'user': u'user3'}]
"""
return list(self.query("SHOW USERS").get_points())
def create_user(self, username, password, admin=False):
"""Create a new user in InfluxDB.
:param username: the new username to create
:type username: str
:param password: the password for the new user
:type password: str
:param admin: whether the user should have cluster administration
privileges or not
:type admin: boolean
"""
text = "CREATE USER {0} WITH PASSWORD {1}".format(
quote_ident(username), quote_literal(password))
if admin:
text += ' WITH ALL PRIVILEGES'
self.query(text, method="POST")
def drop_user(self, username):
"""Drop a user from InfluxDB.
:param username: the username to drop
:type username: str
"""
text = "DROP USER {0}".format(quote_ident(username), method="POST")
self.query(text, method="POST")
def set_user_password(self, username, password):
"""Change the password of an existing user.
:param username: the username who's password is being changed
:type username: str
:param password: the new password for the user
:type password: str
"""
text = "SET PASSWORD FOR {0} = {1}".format(
quote_ident(username), quote_literal(password))
self.query(text)
def delete_series(self, database=None, measurement=None, tags=None):
"""Delete series from a database.
Series can be filtered by measurement and tags.
:param database: the database from which the series should be
deleted, defaults to client's current database
:type database: str
:param measurement: Delete all series from a measurement
:type measurement: str
:param tags: Delete all series that match given tags
:type tags: dict
"""
database = database or self._database
query_str = 'DROP SERIES'
if measurement:
query_str += ' FROM {0}'.format(quote_ident(measurement))
if tags:
tag_eq_list = ["{0}={1}".format(quote_ident(k), quote_literal(v))
for k, v in tags.items()]
query_str += ' WHERE ' + ' AND '.join(tag_eq_list)
self.query(query_str, database=database, method="POST")
def grant_admin_privileges(self, username):
"""Grant cluster administration privileges to a user.
:param username: the username to grant privileges to
:type username: str
.. note:: Only a cluster administrator can create/drop databases
and manage users.
"""
text = "GRANT ALL PRIVILEGES TO {0}".format(quote_ident(username))
self.query(text, method="POST")
def revoke_admin_privileges(self, username):
"""Revoke cluster administration privileges from a user.
:param username: the username to revoke privileges from
:type username: str
.. note:: Only a cluster administrator can create/ drop databases
and manage users.
"""
text = "REVOKE ALL PRIVILEGES FROM {0}".format(quote_ident(username))
self.query(text, method="POST")
def grant_privilege(self, privilege, database, username):
"""Grant a privilege on a database to a user.
:param privilege: the privilege to grant, one of 'read', 'write'
or 'all'. The string is case-insensitive
:type privilege: str
:param database: the database to grant the privilege on
:type database: str
:param username: the username to grant the privilege to
:type username: str
"""
text = "GRANT {0} ON {1} TO {2}".format(privilege,
quote_ident(database),
quote_ident(username))
self.query(text, method="POST")
def revoke_privilege(self, privilege, database, username):
"""Revoke a privilege on a database from a user.
:param privilege: the privilege to revoke, one of 'read', 'write'
or 'all'. The string is case-insensitive
:type privilege: str
:param database: the database to revoke the privilege on
:type database: str
:param username: the username to revoke the privilege from
:type username: str
"""
text = "REVOKE {0} ON {1} FROM {2}".format(privilege,
quote_ident(database),
quote_ident(username))
self.query(text, method="POST")
def get_list_privileges(self, username):
"""Get the list of all privileges granted to given user.
:param username: the username to get privileges of
:type username: str
:returns: all privileges granted to given user
:rtype: list of dictionaries
:Example:
::
>> privileges = client.get_list_privileges('user1')
>> privileges
[{u'privilege': u'WRITE', u'database': u'db1'},
{u'privilege': u'ALL PRIVILEGES', u'database': u'db2'},
{u'privilege': u'NO PRIVILEGES', u'database': u'db3'}]
"""
text = "SHOW GRANTS FOR {0}".format(quote_ident(username))
return list(self.query(text).get_points())
def send_packet(self, packet, protocol='json', time_precision=None):
"""Send an UDP packet.
:param packet: the packet to be sent
:type packet: (if protocol is 'json') dict
(if protocol is 'line') list of line protocol strings
:param protocol: protocol of input data, either 'json' or 'line'
:type protocol: str
:param time_precision: Either 's', 'm', 'ms' or 'u', defaults to None
:type time_precision: str
"""
if protocol == 'json':
data = make_lines(packet, time_precision).encode('utf-8')
elif protocol == 'line':
data = ('\n'.join(packet) + '\n').encode('utf-8')
self.udp_socket.sendto(data, (self._host, self._udp_port))
def close(self):
"""Close http session."""
if isinstance(self._session, requests.Session):
self._session.close()
def _parse_dsn(dsn):
"""Parse data source name.
This is a helper function to split the data source name provided in
the from_dsn classmethod
"""
conn_params = urlparse(dsn)
init_args = {}
scheme_info = conn_params.scheme.split('+')
if len(scheme_info) == 1:
scheme = scheme_info[0]
modifier = None
else:
modifier, scheme = scheme_info
if scheme != 'influxdb':
raise ValueError('Unknown scheme "{0}".'.format(scheme))
if modifier:
if modifier == 'udp':
init_args['use_udp'] = True
elif modifier == 'https':
init_args['ssl'] = True
else:
raise ValueError('Unknown modifier "{0}".'.format(modifier))
netlocs = conn_params.netloc.split(',')
init_args['hosts'] = []
for netloc in netlocs:
parsed = _parse_netloc(netloc)
init_args['hosts'].append((parsed['host'], int(parsed['port'])))
init_args['username'] = parsed['username']
init_args['password'] = parsed['password']
if conn_params.path and len(conn_params.path) > 1:
init_args['database'] = conn_params.path[1:]
return init_args
def _parse_netloc(netloc):
info = urlparse("http://{0}".format(netloc))
return {'username': info.username or None,
'password': info.password or None,
'host': info.hostname or 'localhost',
'port': info.port or 8086}

View file

@ -0,0 +1,28 @@
# -*- coding: utf-8 -*-
"""DataFrame client for InfluxDB."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
__all__ = ['DataFrameClient']
try:
import pandas
del pandas
except ImportError as err:
from .client import InfluxDBClient
class DataFrameClient(InfluxDBClient):
"""DataFrameClient default class instantiation."""
err = err
def __init__(self, *a, **kw):
"""Initialize the default DataFrameClient."""
super(DataFrameClient, self).__init__()
raise ImportError("DataFrameClient requires Pandas "
"which couldn't be imported: %s" % self.err)
else:
from ._dataframe_client import DataFrameClient

View file

@ -0,0 +1,35 @@
# -*- coding: utf-8 -*-
"""Exception handler for InfluxDBClient."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
class InfluxDBClientError(Exception):
"""Raised when an error occurs in the request."""
def __init__(self, content, code=None):
"""Initialize the InfluxDBClientError handler."""
if isinstance(content, type(b'')):
content = content.decode('UTF-8', 'replace')
if code is not None:
message = "%s: %s" % (code, content)
else:
message = content
super(InfluxDBClientError, self).__init__(
message
)
self.content = content
self.code = code
class InfluxDBServerError(Exception):
"""Raised when a server error occurs."""
def __init__(self, content):
"""Initialize the InfluxDBServerError handler."""
super(InfluxDBServerError, self).__init__(content)

184
lib/influxdb/helper.py Normal file
View file

@ -0,0 +1,184 @@
# -*- coding: utf-8 -*-
"""Helper class for InfluxDB."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from collections import namedtuple, defaultdict
from datetime import datetime
from warnings import warn
import six
class SeriesHelper(object):
"""Subclass this helper eases writing data points in bulk.
All data points are immutable, ensuring they do not get overwritten.
Each subclass can write to its own database.
The time series names can also be based on one or more defined fields.
The field "time" can be specified when creating a point, and may be any of
the time types supported by the client (i.e. str, datetime, int).
If the time is not specified, the current system time (utc) will be used.
Annotated example::
class MySeriesHelper(SeriesHelper):
class Meta:
# Meta class stores time series helper configuration.
series_name = 'events.stats.{server_name}'
# Series name must be a string, curly brackets for dynamic use.
fields = ['time', 'server_name']
# Defines all the fields in this time series.
### Following attributes are optional. ###
client = TestSeriesHelper.client
# Client should be an instance of InfluxDBClient.
:warning: Only used if autocommit is True.
bulk_size = 5
# Defines the number of data points to write simultaneously.
# Only applicable if autocommit is True.
autocommit = True
# If True and no bulk_size, then will set bulk_size to 1.
"""
__initialized__ = False
def __new__(cls, *args, **kwargs):
"""Initialize class attributes for subsequent constructor calls.
:note: *args and **kwargs are not explicitly used in this function,
but needed for Python 2 compatibility.
"""
if not cls.__initialized__:
cls.__initialized__ = True
try:
_meta = getattr(cls, 'Meta')
except AttributeError:
raise AttributeError(
'Missing Meta class in {0}.'.format(
cls.__name__))
for attr in ['series_name', 'fields', 'tags']:
try:
setattr(cls, '_' + attr, getattr(_meta, attr))
except AttributeError:
raise AttributeError(
'Missing {0} in {1} Meta class.'.format(
attr,
cls.__name__))
cls._autocommit = getattr(_meta, 'autocommit', False)
cls._client = getattr(_meta, 'client', None)
if cls._autocommit and not cls._client:
raise AttributeError(
'In {0}, autocommit is set to True, but no client is set.'
.format(cls.__name__))
try:
cls._bulk_size = getattr(_meta, 'bulk_size')
if cls._bulk_size < 1 and cls._autocommit:
warn(
'Definition of bulk_size in {0} forced to 1, '
'was less than 1.'.format(cls.__name__))
cls._bulk_size = 1
except AttributeError:
cls._bulk_size = -1
else:
if not cls._autocommit:
warn(
'Definition of bulk_size in {0} has no affect because'
' autocommit is false.'.format(cls.__name__))
cls._datapoints = defaultdict(list)
if 'time' in cls._fields:
cls._fields.remove('time')
cls._type = namedtuple(cls.__name__,
['time'] + cls._tags + cls._fields)
cls._type.__new__.__defaults__ = (None,) * len(cls._fields)
return super(SeriesHelper, cls).__new__(cls)
def __init__(self, **kw):
"""Call to constructor creates a new data point.
:note: Data points written when `bulk_size` is reached per Helper.
:warning: Data points are *immutable* (`namedtuples`).
"""
cls = self.__class__
timestamp = kw.pop('time', self._current_timestamp())
tags = set(cls._tags)
fields = set(cls._fields)
keys = set(kw.keys())
# all tags should be passed, and keys - tags should be a subset of keys
if not(tags <= keys):
raise NameError(
'Expected arguments to contain all tags {0}, instead got {1}.'
.format(cls._tags, kw.keys()))
if not(keys - tags <= fields):
raise NameError('Got arguments not in tags or fields: {0}'
.format(keys - tags - fields))
cls._datapoints[cls._series_name.format(**kw)].append(
cls._type(time=timestamp, **kw)
)
if cls._autocommit and \
sum(len(series) for series in cls._datapoints.values()) \
>= cls._bulk_size:
cls.commit()
@classmethod
def commit(cls, client=None):
"""Commit everything from datapoints via the client.
:param client: InfluxDBClient instance for writing points to InfluxDB.
:attention: any provided client will supersede the class client.
:return: result of client.write_points.
"""
if not client:
client = cls._client
rtn = client.write_points(cls._json_body_())
cls._reset_()
return rtn
@classmethod
def _json_body_(cls):
"""Return the JSON body of given datapoints.
:return: JSON body of these datapoints.
"""
json = []
for series_name, data in six.iteritems(cls._datapoints):
for point in data:
json_point = {
"measurement": series_name,
"fields": {},
"tags": {},
"time": getattr(point, "time")
}
for field in cls._fields:
value = getattr(point, field)
if value is not None:
json_point['fields'][field] = value
for tag in cls._tags:
json_point['tags'][tag] = getattr(point, tag)
json.append(json_point)
return json
@classmethod
def _reset_(cls):
"""Reset data storage."""
cls._datapoints = defaultdict(list)
@staticmethod
def _current_timestamp():
return datetime.utcnow()

View file

@ -0,0 +1,18 @@
# -*- coding: utf-8 -*-
"""Define the influxdb08 package."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from .client import InfluxDBClient
from .dataframe_client import DataFrameClient
from .helper import SeriesHelper
__all__ = [
'InfluxDBClient',
'DataFrameClient',
'SeriesHelper',
]

View file

@ -0,0 +1,27 @@
# -*- coding: utf-8 -*-
"""Module to generate chunked JSON replies for influxdb08."""
#
# Author: Adrian Sampson <adrian@radbox.org>
# Source: https://gist.github.com/sampsyo/920215
#
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import json
def loads(s):
"""Generate a sequence of JSON values from a string."""
_decoder = json.JSONDecoder()
while s:
s = s.strip()
obj, pos = _decoder.raw_decode(s)
if not pos:
raise ValueError('no JSON object found at %i' % pos)
yield obj
s = s[pos:]

View file

@ -0,0 +1,843 @@
# -*- coding: utf-8 -*-
"""Python client for InfluxDB v0.8."""
import warnings
import json
import socket
import requests
import requests.exceptions
from six.moves import xrange
from six.moves.urllib.parse import urlparse
from influxdb import chunked_json
session = requests.Session()
class InfluxDBClientError(Exception):
"""Raised when an error occurs in the request."""
def __init__(self, content, code=-1):
"""Initialize an InfluxDBClientError handler."""
super(InfluxDBClientError, self).__init__(
"{0}: {1}".format(code, content))
self.content = content
self.code = code
class InfluxDBClient(object):
"""Define the standard InfluxDBClient for influxdb v0.8.
The ``InfluxDBClient`` object holds information necessary to connect
to InfluxDB. Requests can be made to InfluxDB directly through the client.
:param host: hostname to connect to InfluxDB, defaults to 'localhost'
:type host: string
:param port: port to connect to InfluxDB, defaults to 'localhost'
:type port: int
:param username: user to connect, defaults to 'root'
:type username: string
:param password: password of the user, defaults to 'root'
:type password: string
:param database: database name to connect to, defaults is None
:type database: string
:param ssl: use https instead of http to connect to InfluxDB, defaults is
False
:type ssl: boolean
:param verify_ssl: verify SSL certificates for HTTPS requests, defaults is
False
:type verify_ssl: boolean
:param retries: number of retries your client will try before aborting,
defaults to 3. 0 indicates try until success
:type retries: int
:param timeout: number of seconds Requests will wait for your client to
establish a connection, defaults to None
:type timeout: int
:param use_udp: use UDP to connect to InfluxDB, defaults is False
:type use_udp: int
:param udp_port: UDP port to connect to InfluxDB, defaults is 4444
:type udp_port: int
"""
def __init__(self,
host='localhost',
port=8086,
username='root',
password='root',
database=None,
ssl=False,
verify_ssl=False,
timeout=None,
retries=3,
use_udp=False,
udp_port=4444):
"""Construct a new InfluxDBClient object."""
self._host = host
self._port = port
self._username = username
self._password = password
self._database = database
self._timeout = timeout
self._retries = retries
self._verify_ssl = verify_ssl
self._use_udp = use_udp
self._udp_port = udp_port
if use_udp:
self.udp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self._scheme = "http"
if ssl is True:
self._scheme = "https"
self._baseurl = "{0}://{1}:{2}".format(
self._scheme,
self._host,
self._port)
self._headers = {
'Content-type': 'application/json',
'Accept': 'text/plain'}
@staticmethod
def from_dsn(dsn, **kwargs):
r"""Return an instaance of InfluxDBClient from given data source name.
Returns an instance of InfluxDBClient from the provided data source
name. Supported schemes are "influxdb", "https+influxdb",
"udp+influxdb". Parameters for the InfluxDBClient constructor may be
also be passed to this function.
Examples:
>> cli = InfluxDBClient.from_dsn('influxdb://username:password@\
... localhost:8086/databasename', timeout=5)
>> type(cli)
<class 'influxdb.client.InfluxDBClient'>
>> cli = InfluxDBClient.from_dsn('udp+influxdb://username:pass@\
... localhost:8086/databasename', timeout=5, udp_port=159)
>> print('{0._baseurl} - {0.use_udp} {0.udp_port}'.format(cli))
http://localhost:8086 - True 159
:param dsn: data source name
:type dsn: string
:param **kwargs: additional parameters for InfluxDBClient.
:type **kwargs: dict
:note: parameters provided in **kwargs may override dsn parameters.
:note: when using "udp+influxdb" the specified port (if any) will be
used for the TCP connection; specify the udp port with the additional
udp_port parameter (cf. examples).
:raise ValueError: if the provided DSN has any unexpected value.
"""
init_args = {}
conn_params = urlparse(dsn)
scheme_info = conn_params.scheme.split('+')
if len(scheme_info) == 1:
scheme = scheme_info[0]
modifier = None
else:
modifier, scheme = scheme_info
if scheme != 'influxdb':
raise ValueError('Unknown scheme "{0}".'.format(scheme))
if modifier:
if modifier == 'udp':
init_args['use_udp'] = True
elif modifier == 'https':
init_args['ssl'] = True
else:
raise ValueError('Unknown modifier "{0}".'.format(modifier))
if conn_params.hostname:
init_args['host'] = conn_params.hostname
if conn_params.port:
init_args['port'] = conn_params.port
if conn_params.username:
init_args['username'] = conn_params.username
if conn_params.password:
init_args['password'] = conn_params.password
if conn_params.path and len(conn_params.path) > 1:
init_args['database'] = conn_params.path[1:]
init_args.update(kwargs)
return InfluxDBClient(**init_args)
# Change member variables
def switch_database(self, database):
"""Change client database.
:param database: the new database name to switch to
:type database: string
"""
self._database = database
def switch_db(self, database):
"""Change client database.
DEPRECATED.
"""
warnings.warn(
"switch_db is deprecated, and will be removed "
"in future versions. Please use "
"``InfluxDBClient.switch_database(database)`` instead.",
FutureWarning)
return self.switch_database(database)
def switch_user(self, username, password):
"""Change client username.
:param username: the new username to switch to
:type username: string
:param password: the new password to switch to
:type password: string
"""
self._username = username
self._password = password
def request(self, url, method='GET', params=None, data=None,
expected_response_code=200):
"""Make a http request to API."""
url = "{0}/{1}".format(self._baseurl, url)
if params is None:
params = {}
auth = {
'u': self._username,
'p': self._password
}
params.update(auth)
if data is not None and not isinstance(data, str):
data = json.dumps(data)
retry = True
_try = 0
# Try to send the request more than once by default (see #103)
while retry:
try:
response = session.request(
method=method,
url=url,
params=params,
data=data,
headers=self._headers,
verify=self._verify_ssl,
timeout=self._timeout
)
break
except (requests.exceptions.ConnectionError,
requests.exceptions.Timeout):
_try += 1
if self._retries != 0:
retry = _try < self._retries
else:
raise requests.exceptions.ConnectionError
if response.status_code == expected_response_code:
return response
else:
raise InfluxDBClientError(response.content, response.status_code)
def write(self, data):
"""Provide as convenience for influxdb v0.9.0, this may change."""
self.request(
url="write",
method='POST',
params=None,
data=data,
expected_response_code=200
)
return True
# Writing Data
#
# Assuming you have a database named foo_production you can write data
# by doing a POST to /db/foo_production/series?u=some_user&p=some_password
# with a JSON body of points.
def write_points(self, data, time_precision='s', *args, **kwargs):
"""Write to multiple time series names.
An example data blob is:
data = [
{
"points": [
[
12
]
],
"name": "cpu_load_short",
"columns": [
"value"
]
}
]
:param data: A list of dicts in InfluxDB 0.8.x data format.
:param time_precision: [Optional, default 's'] Either 's', 'm', 'ms'
or 'u'.
:param batch_size: [Optional] Value to write the points in batches
instead of all at one time. Useful for when doing data dumps from
one database to another or when doing a massive write operation
:type batch_size: int
"""
def list_chunks(l, n):
"""Yield successive n-sized chunks from l."""
for i in xrange(0, len(l), n):
yield l[i:i + n]
batch_size = kwargs.get('batch_size')
if batch_size and batch_size > 0:
for item in data:
name = item.get('name')
columns = item.get('columns')
point_list = item.get('points', [])
for batch in list_chunks(point_list, batch_size):
item = [{
"points": batch,
"name": name,
"columns": columns
}]
self._write_points(
data=item,
time_precision=time_precision)
return True
return self._write_points(data=data,
time_precision=time_precision)
def write_points_with_precision(self, data, time_precision='s'):
"""Write to multiple time series names.
DEPRECATED.
"""
warnings.warn(
"write_points_with_precision is deprecated, and will be removed "
"in future versions. Please use "
"``InfluxDBClient.write_points(time_precision='..')`` instead.",
FutureWarning)
return self._write_points(data=data, time_precision=time_precision)
def _write_points(self, data, time_precision):
if time_precision not in ['s', 'm', 'ms', 'u']:
raise Exception(
"Invalid time precision is given. (use 's', 'm', 'ms' or 'u')")
if self._use_udp and time_precision != 's':
raise Exception(
"InfluxDB only supports seconds precision for udp writes"
)
url = "db/{0}/series".format(self._database)
params = {
'time_precision': time_precision
}
if self._use_udp:
self.send_packet(data)
else:
self.request(
url=url,
method='POST',
params=params,
data=data,
expected_response_code=200
)
return True
# One Time Deletes
def delete_points(self, name):
"""Delete an entire series."""
url = "db/{0}/series/{1}".format(self._database, name)
self.request(
url=url,
method='DELETE',
expected_response_code=204
)
return True
# Regularly Scheduled Deletes
def create_scheduled_delete(self, json_body):
"""Create schedule delete from database.
2013-11-08: This endpoint has not been implemented yet in ver0.0.8,
but it is documented in http://influxdb.org/docs/api/http.html.
See also: src/api/http/api.go:l57
"""
raise NotImplementedError()
# get list of deletes
# curl http://localhost:8086/db/site_dev/scheduled_deletes
#
# remove a regularly scheduled delete
# curl -X DELETE http://localhost:8086/db/site_dev/scheduled_deletes/:id
def get_list_scheduled_delete(self):
"""Get list of scheduled deletes.
2013-11-08: This endpoint has not been implemented yet in ver0.0.8,
but it is documented in http://influxdb.org/docs/api/http.html.
See also: src/api/http/api.go:l57
"""
raise NotImplementedError()
def remove_scheduled_delete(self, delete_id):
"""Remove scheduled delete.
2013-11-08: This endpoint has not been implemented yet in ver0.0.8,
but it is documented in http://influxdb.org/docs/api/http.html.
See also: src/api/http/api.go:l57
"""
raise NotImplementedError()
def query(self, query, time_precision='s', chunked=False):
"""Query data from the influxdb v0.8 database.
:param time_precision: [Optional, default 's'] Either 's', 'm', 'ms'
or 'u'.
:param chunked: [Optional, default=False] True if the data shall be
retrieved in chunks, False otherwise.
"""
return self._query(query, time_precision=time_precision,
chunked=chunked)
# Querying Data
#
# GET db/:name/series. It takes five parameters
def _query(self, query, time_precision='s', chunked=False):
if time_precision not in ['s', 'm', 'ms', 'u']:
raise Exception(
"Invalid time precision is given. (use 's', 'm', 'ms' or 'u')")
if chunked is True:
chunked_param = 'true'
else:
chunked_param = 'false'
# Build the URL of the series to query
url = "db/{0}/series".format(self._database)
params = {
'q': query,
'time_precision': time_precision,
'chunked': chunked_param
}
response = self.request(
url=url,
method='GET',
params=params,
expected_response_code=200
)
if chunked:
try:
decoded = chunked_json.loads(response.content.decode())
except UnicodeDecodeError:
decoded = chunked_json.loads(response.content.decode('utf-8'))
return list(decoded)
return response.json()
# Creating and Dropping Databases
#
# ### create a database
# curl -X POST http://localhost:8086/db -d '{"name": "site_development"}'
#
# ### drop a database
# curl -X DELETE http://localhost:8086/db/site_development
def create_database(self, database):
"""Create a database on the InfluxDB server.
:param database: the name of the database to create
:type database: string
:rtype: boolean
"""
url = "db"
data = {'name': database}
self.request(
url=url,
method='POST',
data=data,
expected_response_code=201
)
return True
def delete_database(self, database):
"""Drop a database on the InfluxDB server.
:param database: the name of the database to delete
:type database: string
:rtype: boolean
"""
url = "db/{0}".format(database)
self.request(
url=url,
method='DELETE',
expected_response_code=204
)
return True
# ### get list of databases
# curl -X GET http://localhost:8086/db
def get_list_database(self):
"""Get the list of databases."""
url = "db"
response = self.request(
url=url,
method='GET',
expected_response_code=200
)
return response.json()
def get_database_list(self):
"""Get the list of databases.
DEPRECATED.
"""
warnings.warn(
"get_database_list is deprecated, and will be removed "
"in future versions. Please use "
"``InfluxDBClient.get_list_database`` instead.",
FutureWarning)
return self.get_list_database()
def delete_series(self, series):
"""Drop a series on the InfluxDB server.
:param series: the name of the series to delete
:type series: string
:rtype: boolean
"""
url = "db/{0}/series/{1}".format(
self._database,
series
)
self.request(
url=url,
method='DELETE',
expected_response_code=204
)
return True
def get_list_series(self):
"""Get a list of all time series in a database."""
response = self._query('list series')
return [series[1] for series in response[0]['points']]
def get_list_continuous_queries(self):
"""Get a list of continuous queries."""
response = self._query('list continuous queries')
return [query[2] for query in response[0]['points']]
# Security
# get list of cluster admins
# curl http://localhost:8086/cluster_admins?u=root&p=root
# add cluster admin
# curl -X POST http://localhost:8086/cluster_admins?u=root&p=root \
# -d '{"name": "paul", "password": "i write teh docz"}'
# update cluster admin password
# curl -X POST http://localhost:8086/cluster_admins/paul?u=root&p=root \
# -d '{"password": "new pass"}'
# delete cluster admin
# curl -X DELETE http://localhost:8086/cluster_admins/paul?u=root&p=root
# Database admins, with a database name of site_dev
# get list of database admins
# curl http://localhost:8086/db/site_dev/admins?u=root&p=root
# add database admin
# curl -X POST http://localhost:8086/db/site_dev/admins?u=root&p=root \
# -d '{"name": "paul", "password": "i write teh docz"}'
# update database admin password
# curl -X POST http://localhost:8086/db/site_dev/admins/paul?u=root&p=root\
# -d '{"password": "new pass"}'
# delete database admin
# curl -X DELETE \
# http://localhost:8086/db/site_dev/admins/paul?u=root&p=root
def get_list_cluster_admins(self):
"""Get list of cluster admins."""
response = self.request(
url="cluster_admins",
method='GET',
expected_response_code=200
)
return response.json()
def add_cluster_admin(self, new_username, new_password):
"""Add cluster admin."""
data = {
'name': new_username,
'password': new_password
}
self.request(
url="cluster_admins",
method='POST',
data=data,
expected_response_code=200
)
return True
def update_cluster_admin_password(self, username, new_password):
"""Update cluster admin password."""
url = "cluster_admins/{0}".format(username)
data = {
'password': new_password
}
self.request(
url=url,
method='POST',
data=data,
expected_response_code=200
)
return True
def delete_cluster_admin(self, username):
"""Delete cluster admin."""
url = "cluster_admins/{0}".format(username)
self.request(
url=url,
method='DELETE',
expected_response_code=200
)
return True
def set_database_admin(self, username):
"""Set user as database admin."""
return self.alter_database_admin(username, True)
def unset_database_admin(self, username):
"""Unset user as database admin."""
return self.alter_database_admin(username, False)
def alter_database_admin(self, username, is_admin):
"""Alter the database admin."""
url = "db/{0}/users/{1}".format(self._database, username)
data = {'admin': is_admin}
self.request(
url=url,
method='POST',
data=data,
expected_response_code=200
)
return True
def get_list_database_admins(self):
"""Get list of database admins.
2013-11-08: This endpoint has not been implemented yet in ver0.0.8,
but it is documented in http://influxdb.org/docs/api/http.html.
See also: src/api/http/api.go:l57
"""
raise NotImplementedError()
def add_database_admin(self, new_username, new_password):
"""Add cluster admin.
2013-11-08: This endpoint has not been implemented yet in ver0.0.8,
but it is documented in http://influxdb.org/docs/api/http.html.
See also: src/api/http/api.go:l57
"""
raise NotImplementedError()
def update_database_admin_password(self, username, new_password):
"""Update database admin password.
2013-11-08: This endpoint has not been implemented yet in ver0.0.8,
but it is documented in http://influxdb.org/docs/api/http.html.
See also: src/api/http/api.go:l57
"""
raise NotImplementedError()
def delete_database_admin(self, username):
"""Delete database admin.
2013-11-08: This endpoint has not been implemented yet in ver0.0.8,
but it is documented in http://influxdb.org/docs/api/http.html.
See also: src/api/http/api.go:l57
"""
raise NotImplementedError()
###
# Limiting User Access
# Database users
# get list of database users
# curl http://localhost:8086/db/site_dev/users?u=root&p=root
# add database user
# curl -X POST http://localhost:8086/db/site_dev/users?u=root&p=root \
# -d '{"name": "paul", "password": "i write teh docz"}'
# update database user password
# curl -X POST http://localhost:8086/db/site_dev/users/paul?u=root&p=root \
# -d '{"password": "new pass"}'
# delete database user
# curl -X DELETE http://localhost:8086/db/site_dev/users/paul?u=root&p=root
def get_database_users(self):
"""Get list of database users."""
url = "db/{0}/users".format(self._database)
response = self.request(
url=url,
method='GET',
expected_response_code=200
)
return response.json()
def add_database_user(self, new_username, new_password, permissions=None):
"""Add database user.
:param permissions: A ``(readFrom, writeTo)`` tuple
"""
url = "db/{0}/users".format(self._database)
data = {
'name': new_username,
'password': new_password
}
if permissions:
try:
data['readFrom'], data['writeTo'] = permissions
except (ValueError, TypeError):
raise TypeError(
"'permissions' must be (readFrom, writeTo) tuple"
)
self.request(
url=url,
method='POST',
data=data,
expected_response_code=200
)
return True
def update_database_user_password(self, username, new_password):
"""Update password."""
return self.alter_database_user(username, new_password)
def alter_database_user(self, username, password=None, permissions=None):
"""Alter a database user and/or their permissions.
:param permissions: A ``(readFrom, writeTo)`` tuple
:raise TypeError: if permissions cannot be read.
:raise ValueError: if neither password nor permissions provided.
"""
url = "db/{0}/users/{1}".format(self._database, username)
if not password and not permissions:
raise ValueError("Nothing to alter for user {0}.".format(username))
data = {}
if password:
data['password'] = password
if permissions:
try:
data['readFrom'], data['writeTo'] = permissions
except (ValueError, TypeError):
raise TypeError(
"'permissions' must be (readFrom, writeTo) tuple"
)
self.request(
url=url,
method='POST',
data=data,
expected_response_code=200
)
if username == self._username:
self._password = password
return True
def delete_database_user(self, username):
"""Delete database user."""
url = "db/{0}/users/{1}".format(self._database, username)
self.request(
url=url,
method='DELETE',
expected_response_code=200
)
return True
# update the user by POSTing to db/site_dev/users/paul
def update_permission(self, username, json_body):
"""Update read/write permission.
2013-11-08: This endpoint has not been implemented yet in ver0.0.8,
but it is documented in http://influxdb.org/docs/api/http.html.
See also: src/api/http/api.go:l57
"""
raise NotImplementedError()
def send_packet(self, packet):
"""Send a UDP packet along the wire."""
data = json.dumps(packet)
byte = data.encode('utf-8')
self.udp_socket.sendto(byte, (self._host, self._udp_port))

View file

@ -0,0 +1,177 @@
# -*- coding: utf-8 -*-
"""DataFrame client for InfluxDB v0.8."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import math
import warnings
from .client import InfluxDBClient
class DataFrameClient(InfluxDBClient):
"""Primary defintion of the DataFrameClient for v0.8.
The ``DataFrameClient`` object holds information necessary to connect
to InfluxDB. Requests can be made to InfluxDB directly through the client.
The client reads and writes from pandas DataFrames.
"""
def __init__(self, ignore_nan=True, *args, **kwargs):
"""Initialize an instance of the DataFrameClient."""
super(DataFrameClient, self).__init__(*args, **kwargs)
try:
global pd
import pandas as pd
except ImportError as ex:
raise ImportError('DataFrameClient requires Pandas, '
'"{ex}" problem importing'.format(ex=str(ex)))
self.EPOCH = pd.Timestamp('1970-01-01 00:00:00.000+00:00')
self.ignore_nan = ignore_nan
def write_points(self, data, *args, **kwargs):
"""Write to multiple time series names.
:param data: A dictionary mapping series names to pandas DataFrames
:param time_precision: [Optional, default 's'] Either 's', 'm', 'ms'
or 'u'.
:param batch_size: [Optional] Value to write the points in batches
instead of all at one time. Useful for when doing data dumps from
one database to another or when doing a massive write operation
:type batch_size: int
"""
batch_size = kwargs.get('batch_size')
time_precision = kwargs.get('time_precision', 's')
if batch_size:
kwargs.pop('batch_size') # don't hand over to InfluxDBClient
for key, data_frame in data.items():
number_batches = int(math.ceil(
len(data_frame) / float(batch_size)))
for batch in range(number_batches):
start_index = batch * batch_size
end_index = (batch + 1) * batch_size
outdata = [
self._convert_dataframe_to_json(
name=key,
dataframe=data_frame
.iloc[start_index:end_index].copy(),
time_precision=time_precision)]
InfluxDBClient.write_points(self, outdata, *args, **kwargs)
return True
outdata = [
self._convert_dataframe_to_json(name=key, dataframe=dataframe,
time_precision=time_precision)
for key, dataframe in data.items()]
return InfluxDBClient.write_points(self, outdata, *args, **kwargs)
def write_points_with_precision(self, data, time_precision='s'):
"""Write to multiple time series names.
DEPRECATED
"""
warnings.warn(
"write_points_with_precision is deprecated, and will be removed "
"in future versions. Please use "
"``DataFrameClient.write_points(time_precision='..')`` instead.",
FutureWarning)
return self.write_points(data, time_precision='s')
def query(self, query, time_precision='s', chunked=False):
"""Query data into DataFrames.
Returns a DataFrame for a single time series and a map for multiple
time series with the time series as value and its name as key.
:param time_precision: [Optional, default 's'] Either 's', 'm', 'ms'
or 'u'.
:param chunked: [Optional, default=False] True if the data shall be
retrieved in chunks, False otherwise.
"""
result = InfluxDBClient.query(self, query=query,
time_precision=time_precision,
chunked=chunked)
if len(result) == 0:
return result
elif len(result) == 1:
return self._to_dataframe(result[0], time_precision)
else:
ret = {}
for time_series in result:
ret[time_series['name']] = self._to_dataframe(time_series,
time_precision)
return ret
@staticmethod
def _to_dataframe(json_result, time_precision):
dataframe = pd.DataFrame(data=json_result['points'],
columns=json_result['columns'])
if 'sequence_number' in dataframe.keys():
dataframe.sort_values(['time', 'sequence_number'], inplace=True)
else:
dataframe.sort_values(['time'], inplace=True)
pandas_time_unit = time_precision
if time_precision == 'm':
pandas_time_unit = 'ms'
elif time_precision == 'u':
pandas_time_unit = 'us'
dataframe.index = pd.to_datetime(list(dataframe['time']),
unit=pandas_time_unit,
utc=True)
del dataframe['time']
return dataframe
def _convert_dataframe_to_json(self, dataframe, name, time_precision='s'):
if not isinstance(dataframe, pd.DataFrame):
raise TypeError('Must be DataFrame, but type was: {0}.'
.format(type(dataframe)))
if not (isinstance(dataframe.index, pd.PeriodIndex) or
isinstance(dataframe.index, pd.DatetimeIndex)):
raise TypeError('Must be DataFrame with DatetimeIndex or \
PeriodIndex.')
if isinstance(dataframe.index, pd.PeriodIndex):
dataframe.index = dataframe.index.to_timestamp()
else:
dataframe.index = pd.to_datetime(dataframe.index)
if dataframe.index.tzinfo is None:
dataframe.index = dataframe.index.tz_localize('UTC')
dataframe['time'] = [self._datetime_to_epoch(dt, time_precision)
for dt in dataframe.index]
data = {'name': name,
'columns': [str(column) for column in dataframe.columns],
'points': [self._convert_array(x) for x in dataframe.values]}
return data
def _convert_array(self, array):
try:
global np
import numpy as np
except ImportError as ex:
raise ImportError('DataFrameClient requires Numpy, '
'"{ex}" problem importing'.format(ex=str(ex)))
if self.ignore_nan:
number_types = (int, float, np.number)
condition = (all(isinstance(el, number_types) for el in array) and
np.isnan(array))
return list(np.where(condition, None, array))
return list(array)
def _datetime_to_epoch(self, datetime, time_precision='s'):
seconds = (datetime - self.EPOCH).total_seconds()
if time_precision == 's':
return seconds
elif time_precision == 'm' or time_precision == 'ms':
return seconds * 1000
elif time_precision == 'u':
return seconds * 1000000

View file

@ -0,0 +1,153 @@
# -*- coding: utf-8 -*-
"""Helper class for InfluxDB for v0.8."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from collections import namedtuple, defaultdict
from warnings import warn
import six
class SeriesHelper(object):
"""Define the SeriesHelper object for InfluxDB v0.8.
Subclassing this helper eases writing data points in bulk.
All data points are immutable, ensuring they do not get overwritten.
Each subclass can write to its own database.
The time series names can also be based on one or more defined fields.
Annotated example::
class MySeriesHelper(SeriesHelper):
class Meta:
# Meta class stores time series helper configuration.
series_name = 'events.stats.{server_name}'
# Series name must be a string, curly brackets for dynamic use.
fields = ['time', 'server_name']
# Defines all the fields in this time series.
### Following attributes are optional. ###
client = TestSeriesHelper.client
# Client should be an instance of InfluxDBClient.
:warning: Only used if autocommit is True.
bulk_size = 5
# Defines the number of data points to write simultaneously.
# Only applicable if autocommit is True.
autocommit = True
# If True and no bulk_size, then will set bulk_size to 1.
"""
__initialized__ = False
def __new__(cls, *args, **kwargs):
"""Initialize class attributes for subsequent constructor calls.
:note: *args and **kwargs are not explicitly used in this function,
but needed for Python 2 compatibility.
"""
if not cls.__initialized__:
cls.__initialized__ = True
try:
_meta = getattr(cls, 'Meta')
except AttributeError:
raise AttributeError(
'Missing Meta class in {0}.'.format(
cls.__name__))
for attr in ['series_name', 'fields']:
try:
setattr(cls, '_' + attr, getattr(_meta, attr))
except AttributeError:
raise AttributeError(
'Missing {0} in {1} Meta class.'.format(
attr,
cls.__name__))
cls._autocommit = getattr(_meta, 'autocommit', False)
cls._client = getattr(_meta, 'client', None)
if cls._autocommit and not cls._client:
raise AttributeError(
'In {0}, autocommit is set to True, but no client is set.'
.format(cls.__name__))
try:
cls._bulk_size = getattr(_meta, 'bulk_size')
if cls._bulk_size < 1 and cls._autocommit:
warn(
'Definition of bulk_size in {0} forced to 1, '
'was less than 1.'.format(cls.__name__))
cls._bulk_size = 1
except AttributeError:
cls._bulk_size = -1
else:
if not cls._autocommit:
warn(
'Definition of bulk_size in {0} has no affect because'
' autocommit is false.'.format(cls.__name__))
cls._datapoints = defaultdict(list)
cls._type = namedtuple(cls.__name__, cls._fields)
return super(SeriesHelper, cls).__new__(cls)
def __init__(self, **kw):
"""Create a new data point.
All fields must be present.
:note: Data points written when `bulk_size` is reached per Helper.
:warning: Data points are *immutable* (`namedtuples`).
"""
cls = self.__class__
if sorted(cls._fields) != sorted(kw.keys()):
raise NameError(
'Expected {0}, got {1}.'.format(
cls._fields,
kw.keys()))
cls._datapoints[cls._series_name.format(**kw)].append(cls._type(**kw))
if cls._autocommit and \
sum(len(series) for series in cls._datapoints.values()) \
>= cls._bulk_size:
cls.commit()
@classmethod
def commit(cls, client=None):
"""Commit everything from datapoints via the client.
:param client: InfluxDBClient instance for writing points to InfluxDB.
:attention: any provided client will supersede the class client.
:return: result of client.write_points.
"""
if not client:
client = cls._client
rtn = client.write_points(cls._json_body_())
cls._reset_()
return rtn
@classmethod
def _json_body_(cls):
"""Return JSON body of the datapoints.
:return: JSON body of the datapoints.
"""
json = []
for series_name, data in six.iteritems(cls._datapoints):
json.append({'name': series_name,
'columns': cls._fields,
'points': [[getattr(point, k) for k in cls._fields]
for point in data]
})
return json
@classmethod
def _reset_(cls):
"""Reset data storage."""
cls._datapoints = defaultdict(list)

View file

@ -0,0 +1,172 @@
# -*- coding: utf-8 -*-
"""Define the line_protocol handler."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from datetime import datetime
from numbers import Integral
from pytz import UTC
from dateutil.parser import parse
from six import iteritems, binary_type, text_type, integer_types, PY2
EPOCH = UTC.localize(datetime.utcfromtimestamp(0))
def _convert_timestamp(timestamp, precision=None):
if isinstance(timestamp, Integral):
return timestamp # assume precision is correct if timestamp is int
if isinstance(_get_unicode(timestamp), text_type):
timestamp = parse(timestamp)
if isinstance(timestamp, datetime):
if not timestamp.tzinfo:
timestamp = UTC.localize(timestamp)
ns = (timestamp - EPOCH).total_seconds() * 1e9
if precision is None or precision == 'n':
return ns
elif precision == 'u':
return ns / 1e3
elif precision == 'ms':
return ns / 1e6
elif precision == 's':
return ns / 1e9
elif precision == 'm':
return ns / 1e9 / 60
elif precision == 'h':
return ns / 1e9 / 3600
raise ValueError(timestamp)
def _escape_tag(tag):
tag = _get_unicode(tag, force=True)
return tag.replace(
"\\", "\\\\"
).replace(
" ", "\\ "
).replace(
",", "\\,"
).replace(
"=", "\\="
)
def _escape_tag_value(value):
ret = _escape_tag(value)
if ret.endswith('\\'):
ret += ' '
return ret
def quote_ident(value):
"""Indent the quotes."""
return "\"{}\"".format(value
.replace("\\", "\\\\")
.replace("\"", "\\\"")
.replace("\n", "\\n"))
def quote_literal(value):
"""Quote provided literal."""
return "'{}'".format(value
.replace("\\", "\\\\")
.replace("'", "\\'"))
def _is_float(value):
try:
float(value)
except (TypeError, ValueError):
return False
return True
def _escape_value(value):
value = _get_unicode(value)
if isinstance(value, text_type) and value != '':
return quote_ident(value)
elif isinstance(value, integer_types) and not isinstance(value, bool):
return str(value) + 'i'
elif _is_float(value):
return repr(value)
return str(value)
def _get_unicode(data, force=False):
"""Try to return a text aka unicode object from the given data."""
if isinstance(data, binary_type):
return data.decode('utf-8')
elif data is None:
return ''
elif force:
if PY2:
return unicode(data)
else:
return str(data)
else:
return data
def make_lines(data, precision=None):
"""Extract points from given dict.
Extracts the points from the given dict and returns a Unicode string
matching the line protocol introduced in InfluxDB 0.9.0.
"""
lines = []
static_tags = data.get('tags')
for point in data['points']:
elements = []
# add measurement name
measurement = _escape_tag(_get_unicode(
point.get('measurement', data.get('measurement'))))
key_values = [measurement]
# add tags
if static_tags:
tags = dict(static_tags) # make a copy, since we'll modify
tags.update(point.get('tags') or {})
else:
tags = point.get('tags') or {}
# tags should be sorted client-side to take load off server
for tag_key, tag_value in sorted(iteritems(tags)):
key = _escape_tag(tag_key)
value = _escape_tag_value(tag_value)
if key != '' and value != '':
key_values.append(key + "=" + value)
elements.append(','.join(key_values))
# add fields
field_values = []
for field_key, field_value in sorted(iteritems(point['fields'])):
key = _escape_tag(field_key)
value = _escape_value(field_value)
if key != '' and value != '':
field_values.append(key + "=" + value)
elements.append(','.join(field_values))
# add timestamp
if 'time' in point:
timestamp = _get_unicode(str(int(
_convert_timestamp(point['time'], precision))))
elements.append(timestamp)
line = ' '.join(elements)
lines.append(line)
return '\n'.join(lines) + '\n'

206
lib/influxdb/resultset.py Normal file
View file

@ -0,0 +1,206 @@
# -*- coding: utf-8 -*-
"""Module to prepare the resultset."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import warnings
from influxdb.exceptions import InfluxDBClientError
_sentinel = object()
class ResultSet(object):
"""A wrapper around a single InfluxDB query result."""
def __init__(self, series, raise_errors=True):
"""Initialize the ResultSet."""
self._raw = series
self._error = self._raw.get('error', None)
if self.error is not None and raise_errors is True:
raise InfluxDBClientError(self.error)
@property
def raw(self):
"""Raw JSON from InfluxDB."""
return self._raw
@raw.setter
def raw(self, value):
self._raw = value
@property
def error(self):
"""Error returned by InfluxDB."""
return self._error
def __getitem__(self, key):
"""Retrieve the series name or specific set based on key.
:param key: Either a series name, or a tags_dict, or
a 2-tuple(series_name, tags_dict).
If the series name is None (or not given) then any serie
matching the eventual given tags will be given its points
one after the other.
To get the points of every series in this resultset then
you have to provide None as key.
:return: A generator yielding `Point`s matching the given key.
NB:
The order in which the points are yielded is actually undefined but
it might change..
"""
warnings.warn(
("ResultSet's ``__getitem__`` method will be deprecated. Use"
"``get_points`` instead."),
DeprecationWarning
)
if isinstance(key, tuple):
if len(key) != 2:
raise TypeError('only 2-tuples allowed')
name = key[0]
tags = key[1]
if not isinstance(tags, dict) and tags is not None:
raise TypeError('tags should be a dict')
elif isinstance(key, dict):
name = None
tags = key
else:
name = key
tags = None
return self.get_points(name, tags)
def get_points(self, measurement=None, tags=None):
"""Return a generator for all the points that match the given filters.
:param measurement: The measurement name
:type measurement: str
:param tags: Tags to look for
:type tags: dict
:return: Points generator
"""
# Raise error if measurement is not str or bytes
if not isinstance(measurement,
(bytes, type(b''.decode()), type(None))):
raise TypeError('measurement must be an str or None')
for series in self._get_series():
series_name = series.get('measurement',
series.get('name', 'results'))
if series_name is None:
# this is a "system" query or a query which
# doesn't return a name attribute.
# like 'show retention policies' ..
if tags is None:
for item in self._get_points_for_series(series):
yield item
elif measurement in (None, series_name):
# by default if no tags was provided then
# we will matches every returned series
series_tags = series.get('tags', {})
for item in self._get_points_for_series(series):
if tags is None or \
self._tag_matches(item, tags) or \
self._tag_matches(series_tags, tags):
yield item
def __repr__(self):
"""Representation of ResultSet object."""
items = []
for item in self.items():
items.append("'%s': %s" % (item[0], list(item[1])))
return "ResultSet({%s})" % ", ".join(items)
def __iter__(self):
"""Yield one dict instance per series result."""
for key in self.keys():
yield list(self.__getitem__(key))
@staticmethod
def _tag_matches(tags, filter):
"""Check if all key/values in filter match in tags."""
for tag_name, tag_value in filter.items():
# using _sentinel as I'm not sure that "None"
# could be used, because it could be a valid
# series_tags value : when a series has no such tag
# then I think it's set to /null/None/.. TBC..
series_tag_value = tags.get(tag_name, _sentinel)
if series_tag_value != tag_value:
return False
return True
def _get_series(self):
"""Return all series."""
return self.raw.get('series', [])
def __len__(self):
"""Return the len of the keys in the ResultSet."""
return len(self.keys())
def keys(self):
"""Return the list of keys in the ResultSet.
:return: List of keys. Keys are tuples (series_name, tags)
"""
keys = []
for series in self._get_series():
keys.append(
(series.get('measurement',
series.get('name', 'results')),
series.get('tags', None))
)
return keys
def items(self):
"""Return the set of items from the ResultSet.
:return: List of tuples, (key, generator)
"""
items = []
for series in self._get_series():
series_key = (series.get('measurement',
series.get('name', 'results')),
series.get('tags', None))
items.append(
(series_key, self._get_points_for_series(series))
)
return items
def _get_points_for_series(self, series):
"""Return generator of dict from columns and values of a series.
:param series: One series
:return: Generator of dicts
"""
for point in series.get('values', []):
yield self.point_from_cols_vals(
series['columns'],
point
)
@staticmethod
def point_from_cols_vals(cols, vals):
"""Create a dict from columns and values lists.
:param cols: List of columns
:param vals: List of values
:return: Dict where keys are columns.
"""
point = {}
for col_index, col_name in enumerate(cols):
point[col_name] = vals[col_index]
return point

View file

@ -0,0 +1,21 @@
# -*- coding: utf-8 -*-
"""Configure the tests package for InfluxDBClient."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import sys
import os
import unittest
using_pypy = hasattr(sys, "pypy_version_info")
skipIfPYpy = unittest.skipIf(using_pypy, "Skipping this test on pypy.")
_skip_server_tests = os.environ.get(
'INFLUXDB_PYTHON_SKIP_SERVER_TESTS',
None) == 'True'
skipServerTests = unittest.skipIf(_skip_server_tests,
"Skipping server tests...")

View file

@ -0,0 +1,51 @@
# -*- coding: utf-8 -*-
"""Chunked JSON test."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import unittest
from influxdb import chunked_json
class TestChunkJson(unittest.TestCase):
"""Set up the TestChunkJson object."""
@classmethod
def setUpClass(cls):
"""Initialize the TestChunkJson object."""
super(TestChunkJson, cls).setUpClass()
def test_load(self):
"""Test reading a sequence of JSON values from a string."""
example_response = \
'{"results": [{"series": [{"measurement": "sdfsdfsdf", ' \
'"columns": ["time", "value"], "values": ' \
'[["2009-11-10T23:00:00Z", 0.64]]}]}, {"series": ' \
'[{"measurement": "cpu_load_short", "columns": ["time", "value"],'\
'"values": [["2009-11-10T23:00:00Z", 0.64]]}]}]}'
res = list(chunked_json.loads(example_response))
# import ipdb; ipdb.set_trace()
self.assertListEqual(
[
{
'results': [
{'series': [{
'values': [['2009-11-10T23:00:00Z', 0.64]],
'measurement': 'sdfsdfsdf',
'columns':
['time', 'value']}]},
{'series': [{
'values': [['2009-11-10T23:00:00Z', 0.64]],
'measurement': 'cpu_load_short',
'columns': ['time', 'value']}]}
]
}
],
res
)

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,711 @@
# -*- coding: utf-8 -*-
"""Unit tests for misc module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from datetime import timedelta
import json
import unittest
import warnings
import requests_mock
from influxdb.tests import skipIfPYpy, using_pypy
from nose.tools import raises
from .client_test import _mocked_session
if not using_pypy:
import pandas as pd
from pandas.util.testing import assert_frame_equal
from influxdb import DataFrameClient
@skipIfPYpy
class TestDataFrameClient(unittest.TestCase):
"""Set up a test DataFrameClient object."""
def setUp(self):
"""Instantiate a TestDataFrameClient object."""
# By default, raise exceptions on warnings
warnings.simplefilter('error', FutureWarning)
def test_write_points_from_dataframe(self):
"""Test write points from df in TestDataFrameClient object."""
now = pd.Timestamp('1970-01-01 00:00+00:00')
dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
index=[now, now + timedelta(hours=1)],
columns=["column_one", "column_two",
"column_three"])
expected = (
b"foo column_one=\"1\",column_two=1i,column_three=1.0 0\n"
b"foo column_one=\"2\",column_two=2i,column_three=2.0 "
b"3600000000000\n"
)
with requests_mock.Mocker() as m:
m.register_uri(requests_mock.POST,
"http://localhost:8086/write",
status_code=204)
cli = DataFrameClient(database='db')
cli.write_points(dataframe, 'foo')
self.assertEqual(m.last_request.body, expected)
cli.write_points(dataframe, 'foo', tags=None)
self.assertEqual(m.last_request.body, expected)
def test_dataframe_write_points_with_whitespace_measurement(self):
"""write_points should escape white space in measurements."""
now = pd.Timestamp('1970-01-01 00:00+00:00')
dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
index=[now, now + timedelta(hours=1)],
columns=["column_one", "column_two",
"column_three"])
expected = (
b"meas\\ with\\ space "
b"column_one=\"1\",column_two=1i,column_three=1.0 0\n"
b"meas\\ with\\ space "
b"column_one=\"2\",column_two=2i,column_three=2.0 "
b"3600000000000\n"
)
with requests_mock.Mocker() as m:
m.register_uri(requests_mock.POST,
"http://localhost:8086/write",
status_code=204)
cli = DataFrameClient(database='db')
cli.write_points(dataframe, 'meas with space')
self.assertEqual(m.last_request.body, expected)
def test_dataframe_write_points_with_whitespace_in_column_names(self):
"""write_points should escape white space in column names."""
now = pd.Timestamp('1970-01-01 00:00+00:00')
dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
index=[now, now + timedelta(hours=1)],
columns=["column one", "column two",
"column three"])
expected = (
b"foo column\\ one=\"1\",column\\ two=1i,column\\ three=1.0 0\n"
b"foo column\\ one=\"2\",column\\ two=2i,column\\ three=2.0 "
b"3600000000000\n"
)
with requests_mock.Mocker() as m:
m.register_uri(requests_mock.POST,
"http://localhost:8086/write",
status_code=204)
cli = DataFrameClient(database='db')
cli.write_points(dataframe, 'foo')
self.assertEqual(m.last_request.body, expected)
def test_write_points_from_dataframe_with_none(self):
"""Test write points from df in TestDataFrameClient object."""
now = pd.Timestamp('1970-01-01 00:00+00:00')
dataframe = pd.DataFrame(data=[["1", None, 1.0], ["2", 2.0, 2.0]],
index=[now, now + timedelta(hours=1)],
columns=["column_one", "column_two",
"column_three"])
expected = (
b"foo column_one=\"1\",column_three=1.0 0\n"
b"foo column_one=\"2\",column_two=2.0,column_three=2.0 "
b"3600000000000\n"
)
with requests_mock.Mocker() as m:
m.register_uri(requests_mock.POST,
"http://localhost:8086/write",
status_code=204)
cli = DataFrameClient(database='db')
cli.write_points(dataframe, 'foo')
self.assertEqual(m.last_request.body, expected)
cli.write_points(dataframe, 'foo', tags=None)
self.assertEqual(m.last_request.body, expected)
def test_write_points_from_dataframe_with_line_of_none(self):
"""Test write points from df in TestDataFrameClient object."""
now = pd.Timestamp('1970-01-01 00:00+00:00')
dataframe = pd.DataFrame(data=[[None, None, None], ["2", 2.0, 2.0]],
index=[now, now + timedelta(hours=1)],
columns=["column_one", "column_two",
"column_three"])
expected = (
b"foo column_one=\"2\",column_two=2.0,column_three=2.0 "
b"3600000000000\n"
)
with requests_mock.Mocker() as m:
m.register_uri(requests_mock.POST,
"http://localhost:8086/write",
status_code=204)
cli = DataFrameClient(database='db')
cli.write_points(dataframe, 'foo')
self.assertEqual(m.last_request.body, expected)
cli.write_points(dataframe, 'foo', tags=None)
self.assertEqual(m.last_request.body, expected)
def test_write_points_from_dataframe_with_all_none(self):
"""Test write points from df in TestDataFrameClient object."""
now = pd.Timestamp('1970-01-01 00:00+00:00')
dataframe = pd.DataFrame(data=[[None, None, None], [None, None, None]],
index=[now, now + timedelta(hours=1)],
columns=["column_one", "column_two",
"column_three"])
expected = (
b"\n"
)
with requests_mock.Mocker() as m:
m.register_uri(requests_mock.POST,
"http://localhost:8086/write",
status_code=204)
cli = DataFrameClient(database='db')
cli.write_points(dataframe, 'foo')
self.assertEqual(m.last_request.body, expected)
cli.write_points(dataframe, 'foo', tags=None)
self.assertEqual(m.last_request.body, expected)
def test_write_points_from_dataframe_in_batches(self):
"""Test write points in batch from df in TestDataFrameClient object."""
now = pd.Timestamp('1970-01-01 00:00+00:00')
dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
index=[now, now + timedelta(hours=1)],
columns=["column_one", "column_two",
"column_three"])
with requests_mock.Mocker() as m:
m.register_uri(requests_mock.POST,
"http://localhost:8086/write",
status_code=204)
cli = DataFrameClient(database='db')
self.assertTrue(cli.write_points(dataframe, "foo", batch_size=1))
def test_write_points_from_dataframe_with_tag_columns(self):
"""Test write points from df w/tag in TestDataFrameClient object."""
now = pd.Timestamp('1970-01-01 00:00+00:00')
dataframe = pd.DataFrame(data=[['blue', 1, "1", 1, 1.0],
['red', 0, "2", 2, 2.0]],
index=[now, now + timedelta(hours=1)],
columns=["tag_one", "tag_two", "column_one",
"column_two", "column_three"])
expected = (
b"foo,tag_one=blue,tag_two=1 "
b"column_one=\"1\",column_two=1i,column_three=1.0 "
b"0\n"
b"foo,tag_one=red,tag_two=0 "
b"column_one=\"2\",column_two=2i,column_three=2.0 "
b"3600000000000\n"
)
with requests_mock.Mocker() as m:
m.register_uri(requests_mock.POST,
"http://localhost:8086/write",
status_code=204)
cli = DataFrameClient(database='db')
cli.write_points(dataframe, 'foo',
tag_columns=['tag_one', 'tag_two'])
self.assertEqual(m.last_request.body, expected)
cli.write_points(dataframe, 'foo',
tag_columns=['tag_one', 'tag_two'], tags=None)
self.assertEqual(m.last_request.body, expected)
def test_write_points_from_dataframe_with_tag_cols_and_global_tags(self):
"""Test write points from df w/tag + cols in TestDataFrameClient."""
now = pd.Timestamp('1970-01-01 00:00+00:00')
dataframe = pd.DataFrame(data=[['blue', 1, "1", 1, 1.0],
['red', 0, "2", 2, 2.0]],
index=[now, now + timedelta(hours=1)],
columns=["tag_one", "tag_two", "column_one",
"column_two", "column_three"])
expected = (
b"foo,global_tag=value,tag_one=blue,tag_two=1 "
b"column_one=\"1\",column_two=1i,column_three=1.0 "
b"0\n"
b"foo,global_tag=value,tag_one=red,tag_two=0 "
b"column_one=\"2\",column_two=2i,column_three=2.0 "
b"3600000000000\n"
)
with requests_mock.Mocker() as m:
m.register_uri(requests_mock.POST,
"http://localhost:8086/write",
status_code=204)
cli = DataFrameClient(database='db')
cli.write_points(dataframe, 'foo',
tag_columns=['tag_one', 'tag_two'],
tags={'global_tag': 'value'})
self.assertEqual(m.last_request.body, expected)
def test_write_points_from_dataframe_with_tag_cols_and_defaults(self):
"""Test default write points from df w/tag in TestDataFrameClient."""
now = pd.Timestamp('1970-01-01 00:00+00:00')
dataframe = pd.DataFrame(data=[['blue', 1, "1", 1, 1.0, 'hot'],
['red', 0, "2", 2, 2.0, 'cold']],
index=[now, now + timedelta(hours=1)],
columns=["tag_one", "tag_two", "column_one",
"column_two", "column_three",
"tag_three"])
expected_tags_and_fields = (
b"foo,tag_one=blue "
b"column_one=\"1\",column_two=1i "
b"0\n"
b"foo,tag_one=red "
b"column_one=\"2\",column_two=2i "
b"3600000000000\n"
)
expected_tags_no_fields = (
b"foo,tag_one=blue,tag_two=1 "
b"column_one=\"1\",column_two=1i,column_three=1.0,"
b"tag_three=\"hot\" 0\n"
b"foo,tag_one=red,tag_two=0 "
b"column_one=\"2\",column_two=2i,column_three=2.0,"
b"tag_three=\"cold\" 3600000000000\n"
)
expected_fields_no_tags = (
b"foo,tag_one=blue,tag_three=hot,tag_two=1 "
b"column_one=\"1\",column_two=1i,column_three=1.0 "
b"0\n"
b"foo,tag_one=red,tag_three=cold,tag_two=0 "
b"column_one=\"2\",column_two=2i,column_three=2.0 "
b"3600000000000\n"
)
expected_no_tags_no_fields = (
b"foo "
b"tag_one=\"blue\",tag_two=1i,column_one=\"1\","
b"column_two=1i,column_three=1.0,tag_three=\"hot\" "
b"0\n"
b"foo "
b"tag_one=\"red\",tag_two=0i,column_one=\"2\","
b"column_two=2i,column_three=2.0,tag_three=\"cold\" "
b"3600000000000\n"
)
with requests_mock.Mocker() as m:
m.register_uri(requests_mock.POST,
"http://localhost:8086/write",
status_code=204)
cli = DataFrameClient(database='db')
cli.write_points(dataframe, 'foo',
field_columns=['column_one', 'column_two'],
tag_columns=['tag_one'])
self.assertEqual(m.last_request.body, expected_tags_and_fields)
cli.write_points(dataframe, 'foo',
tag_columns=['tag_one', 'tag_two'])
self.assertEqual(m.last_request.body, expected_tags_no_fields)
cli.write_points(dataframe, 'foo',
field_columns=['column_one', 'column_two',
'column_three'])
self.assertEqual(m.last_request.body, expected_fields_no_tags)
cli.write_points(dataframe, 'foo')
self.assertEqual(m.last_request.body, expected_no_tags_no_fields)
def test_write_points_from_dataframe_with_tag_escaped(self):
"""Test write points from df w/escaped tag in TestDataFrameClient."""
now = pd.Timestamp('1970-01-01 00:00+00:00')
dataframe = pd.DataFrame(
data=[
['blue orange', "1", 1, 'hot=cold'], # space, equal
['red,green', "2", 2, r'cold\fire'], # comma, backslash
['some', "2", 2, ''], # skip empty
['some', "2", 2, None], # skip None
['', "2", 2, None], # all tags empty
],
index=pd.period_range(now, freq='H', periods=5),
columns=["tag_one", "column_one", "column_two", "tag_three"]
)
expected_escaped_tags = (
b"foo,tag_one=blue\\ orange,tag_three=hot\\=cold "
b"column_one=\"1\",column_two=1i "
b"0\n"
b"foo,tag_one=red\\,green,tag_three=cold\\\\fire "
b"column_one=\"2\",column_two=2i "
b"3600000000000\n"
b"foo,tag_one=some "
b"column_one=\"2\",column_two=2i "
b"7200000000000\n"
b"foo,tag_one=some "
b"column_one=\"2\",column_two=2i "
b"10800000000000\n"
b"foo "
b"column_one=\"2\",column_two=2i "
b"14400000000000\n"
)
with requests_mock.Mocker() as m:
m.register_uri(requests_mock.POST,
"http://localhost:8086/write",
status_code=204)
cli = DataFrameClient(database='db')
cli.write_points(dataframe, 'foo',
field_columns=['column_one', 'column_two'],
tag_columns=['tag_one', 'tag_three'])
self.assertEqual(m.last_request.body, expected_escaped_tags)
def test_write_points_from_dataframe_with_numeric_column_names(self):
"""Test write points from df with numeric cols."""
now = pd.Timestamp('1970-01-01 00:00+00:00')
# df with numeric column names
dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
index=[now, now + timedelta(hours=1)])
expected = (
b'foo,hello=there 0=\"1\",1=1i,2=1.0 0\n'
b'foo,hello=there 0=\"2\",1=2i,2=2.0 3600000000000\n'
)
with requests_mock.Mocker() as m:
m.register_uri(requests_mock.POST,
"http://localhost:8086/write",
status_code=204)
cli = DataFrameClient(database='db')
cli.write_points(dataframe, "foo", {"hello": "there"})
self.assertEqual(m.last_request.body, expected)
def test_write_points_from_dataframe_with_numeric_precision(self):
"""Test write points from df with numeric precision."""
now = pd.Timestamp('1970-01-01 00:00+00:00')
# df with numeric column names
dataframe = pd.DataFrame(data=[["1", 1, 1.1111111111111],
["2", 2, 2.2222222222222]],
index=[now, now + timedelta(hours=1)])
expected_default_precision = (
b'foo,hello=there 0=\"1\",1=1i,2=1.11111111111 0\n'
b'foo,hello=there 0=\"2\",1=2i,2=2.22222222222 3600000000000\n'
)
expected_specified_precision = (
b'foo,hello=there 0=\"1\",1=1i,2=1.1111 0\n'
b'foo,hello=there 0=\"2\",1=2i,2=2.2222 3600000000000\n'
)
expected_full_precision = (
b'foo,hello=there 0=\"1\",1=1i,2=1.1111111111111 0\n'
b'foo,hello=there 0=\"2\",1=2i,2=2.2222222222222 3600000000000\n'
)
with requests_mock.Mocker() as m:
m.register_uri(requests_mock.POST,
"http://localhost:8086/write",
status_code=204)
cli = DataFrameClient(database='db')
cli.write_points(dataframe, "foo", {"hello": "there"})
self.assertEqual(m.last_request.body, expected_default_precision)
cli = DataFrameClient(database='db')
cli.write_points(dataframe, "foo", {"hello": "there"},
numeric_precision=4)
self.assertEqual(m.last_request.body, expected_specified_precision)
cli = DataFrameClient(database='db')
cli.write_points(dataframe, "foo", {"hello": "there"},
numeric_precision='full')
self.assertEqual(m.last_request.body, expected_full_precision)
def test_write_points_from_dataframe_with_period_index(self):
"""Test write points from df with period index."""
dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
index=[pd.Period('1970-01-01'),
pd.Period('1970-01-02')],
columns=["column_one", "column_two",
"column_three"])
expected = (
b"foo column_one=\"1\",column_two=1i,column_three=1.0 0\n"
b"foo column_one=\"2\",column_two=2i,column_three=2.0 "
b"86400000000000\n"
)
with requests_mock.Mocker() as m:
m.register_uri(requests_mock.POST,
"http://localhost:8086/write",
status_code=204)
cli = DataFrameClient(database='db')
cli.write_points(dataframe, "foo")
self.assertEqual(m.last_request.body, expected)
def test_write_points_from_dataframe_with_time_precision(self):
"""Test write points from df with time precision."""
now = pd.Timestamp('1970-01-01 00:00+00:00')
dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
index=[now, now + timedelta(hours=1)],
columns=["column_one", "column_two",
"column_three"])
with requests_mock.Mocker() as m:
m.register_uri(requests_mock.POST,
"http://localhost:8086/write",
status_code=204)
cli = DataFrameClient(database='db')
measurement = "foo"
cli.write_points(dataframe, measurement, time_precision='h')
self.assertEqual(m.last_request.qs['precision'], ['h'])
self.assertEqual(
b'foo column_one="1",column_two=1i,column_three=1.0 0\nfoo '
b'column_one="2",column_two=2i,column_three=2.0 1\n',
m.last_request.body,
)
cli.write_points(dataframe, measurement, time_precision='m')
self.assertEqual(m.last_request.qs['precision'], ['m'])
self.assertEqual(
b'foo column_one="1",column_two=1i,column_three=1.0 0\nfoo '
b'column_one="2",column_two=2i,column_three=2.0 60\n',
m.last_request.body,
)
cli.write_points(dataframe, measurement, time_precision='s')
self.assertEqual(m.last_request.qs['precision'], ['s'])
self.assertEqual(
b'foo column_one="1",column_two=1i,column_three=1.0 0\nfoo '
b'column_one="2",column_two=2i,column_three=2.0 3600\n',
m.last_request.body,
)
cli.write_points(dataframe, measurement, time_precision='ms')
self.assertEqual(m.last_request.qs['precision'], ['ms'])
self.assertEqual(
b'foo column_one="1",column_two=1i,column_three=1.0 0\nfoo '
b'column_one="2",column_two=2i,column_three=2.0 3600000\n',
m.last_request.body,
)
cli.write_points(dataframe, measurement, time_precision='u')
self.assertEqual(m.last_request.qs['precision'], ['u'])
self.assertEqual(
b'foo column_one="1",column_two=1i,column_three=1.0 0\nfoo '
b'column_one="2",column_two=2i,column_three=2.0 3600000000\n',
m.last_request.body,
)
cli.write_points(dataframe, measurement, time_precision='n')
self.assertEqual(m.last_request.qs['precision'], ['n'])
self.assertEqual(
b'foo column_one="1",column_two=1i,column_three=1.0 0\n'
b'foo column_one="2",column_two=2i,column_three=2.0 '
b'3600000000000\n',
m.last_request.body,
)
@raises(TypeError)
def test_write_points_from_dataframe_fails_without_time_index(self):
"""Test failed write points from df without time index."""
dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
columns=["column_one", "column_two",
"column_three"])
with requests_mock.Mocker() as m:
m.register_uri(requests_mock.POST,
"http://localhost:8086/db/db/series",
status_code=204)
cli = DataFrameClient(database='db')
cli.write_points(dataframe, "foo")
@raises(TypeError)
def test_write_points_from_dataframe_fails_with_series(self):
"""Test failed write points from df with series."""
now = pd.Timestamp('1970-01-01 00:00+00:00')
dataframe = pd.Series(data=[1.0, 2.0],
index=[now, now + timedelta(hours=1)])
with requests_mock.Mocker() as m:
m.register_uri(requests_mock.POST,
"http://localhost:8086/db/db/series",
status_code=204)
cli = DataFrameClient(database='db')
cli.write_points(dataframe, "foo")
def test_query_into_dataframe(self):
"""Test query into df for TestDataFrameClient object."""
data = {
"results": [{
"series": [
{"measurement": "network",
"tags": {"direction": ""},
"columns": ["time", "value"],
"values":[["2009-11-10T23:00:00Z", 23422]]
},
{"measurement": "network",
"tags": {"direction": "in"},
"columns": ["time", "value"],
"values": [["2009-11-10T23:00:00Z", 23422],
["2009-11-10T23:00:00Z", 23422],
["2009-11-10T23:00:00Z", 23422]]
}
]
}]
}
pd1 = pd.DataFrame(
[[23422]], columns=['value'],
index=pd.to_datetime(["2009-11-10T23:00:00Z"]))
pd1.index = pd1.index.tz_localize('UTC')
pd2 = pd.DataFrame(
[[23422], [23422], [23422]], columns=['value'],
index=pd.to_datetime(["2009-11-10T23:00:00Z",
"2009-11-10T23:00:00Z",
"2009-11-10T23:00:00Z"]))
pd2.index = pd2.index.tz_localize('UTC')
expected = {
('network', (('direction', ''),)): pd1,
('network', (('direction', 'in'),)): pd2
}
cli = DataFrameClient('host', 8086, 'username', 'password', 'db')
with _mocked_session(cli, 'GET', 200, data):
result = cli.query('select value from network group by direction;')
for k in expected:
assert_frame_equal(expected[k], result[k])
def test_multiquery_into_dataframe(self):
"""Test multiquyer into df for TestDataFrameClient object."""
data = {
"results": [
{
"series": [
{
"name": "cpu_load_short",
"columns": ["time", "value"],
"values": [
["2015-01-29T21:55:43.702900257Z", 0.55],
["2015-01-29T21:55:43.702900257Z", 23422],
["2015-06-11T20:46:02Z", 0.64]
]
}
]
}, {
"series": [
{
"name": "cpu_load_short",
"columns": ["time", "count"],
"values": [
["1970-01-01T00:00:00Z", 3]
]
}
]
}
]
}
pd1 = pd.DataFrame(
[[0.55], [23422.0], [0.64]], columns=['value'],
index=pd.to_datetime([
"2015-01-29 21:55:43.702900257+0000",
"2015-01-29 21:55:43.702900257+0000",
"2015-06-11 20:46:02+0000"])).tz_localize('UTC')
pd2 = pd.DataFrame(
[[3]], columns=['count'],
index=pd.to_datetime(["1970-01-01 00:00:00+00:00"]))\
.tz_localize('UTC')
expected = [{'cpu_load_short': pd1}, {'cpu_load_short': pd2}]
cli = DataFrameClient('host', 8086, 'username', 'password', 'db')
iql = "SELECT value FROM cpu_load_short WHERE region='us-west';"\
"SELECT count(value) FROM cpu_load_short WHERE region='us-west'"
with _mocked_session(cli, 'GET', 200, data):
result = cli.query(iql)
for r, e in zip(result, expected):
for k in e:
assert_frame_equal(e[k], r[k])
def test_query_with_empty_result(self):
"""Test query with empty results in TestDataFrameClient object."""
cli = DataFrameClient('host', 8086, 'username', 'password', 'db')
with _mocked_session(cli, 'GET', 200, {"results": [{}]}):
result = cli.query('select column_one from foo;')
self.assertEqual(result, {})
def test_get_list_database(self):
"""Test get list of databases in TestDataFrameClient object."""
data = {'results': [
{'series': [
{'measurement': 'databases',
'values': [
['new_db_1'],
['new_db_2']],
'columns': ['name']}]}
]}
cli = DataFrameClient('host', 8086, 'username', 'password', 'db')
with _mocked_session(cli, 'get', 200, json.dumps(data)):
self.assertListEqual(
cli.get_list_database(),
[{'name': 'new_db_1'}, {'name': 'new_db_2'}]
)
def test_datetime_to_epoch(self):
"""Test convert datetime to epoch in TestDataFrameClient object."""
timestamp = pd.Timestamp('2013-01-01 00:00:00.000+00:00')
cli = DataFrameClient('host', 8086, 'username', 'password', 'db')
self.assertEqual(
cli._datetime_to_epoch(timestamp),
1356998400.0
)
self.assertEqual(
cli._datetime_to_epoch(timestamp, time_precision='h'),
1356998400.0 / 3600
)
self.assertEqual(
cli._datetime_to_epoch(timestamp, time_precision='m'),
1356998400.0 / 60
)
self.assertEqual(
cli._datetime_to_epoch(timestamp, time_precision='s'),
1356998400.0
)
self.assertEqual(
cli._datetime_to_epoch(timestamp, time_precision='ms'),
1356998400000.0
)
self.assertEqual(
cli._datetime_to_epoch(timestamp, time_precision='u'),
1356998400000000.0
)
self.assertEqual(
cli._datetime_to_epoch(timestamp, time_precision='n'),
1356998400000000000.0
)
def test_dsn_constructor(self):
"""Test data source name deconstructor in TestDataFrameClient."""
client = DataFrameClient.from_dsn('influxdb://localhost:8086')
self.assertIsInstance(client, DataFrameClient)
self.assertEqual('http://localhost:8086', client._baseurl)

View file

@ -0,0 +1,367 @@
# -*- coding: utf-8 -*-
"""Set of series helper functions for test."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from datetime import datetime, timedelta
import unittest
import warnings
import mock
from influxdb import SeriesHelper, InfluxDBClient
from requests.exceptions import ConnectionError
class TestSeriesHelper(unittest.TestCase):
"""Define the SeriesHelper test object."""
@classmethod
def setUpClass(cls):
"""Set up the TestSeriesHelper object."""
super(TestSeriesHelper, cls).setUpClass()
TestSeriesHelper.client = InfluxDBClient(
'host',
8086,
'username',
'password',
'database'
)
class MySeriesHelper(SeriesHelper):
"""Define a SeriesHelper object."""
class Meta:
"""Define metadata for the SeriesHelper object."""
client = TestSeriesHelper.client
series_name = 'events.stats.{server_name}'
fields = ['some_stat']
tags = ['server_name', 'other_tag']
bulk_size = 5
autocommit = True
TestSeriesHelper.MySeriesHelper = MySeriesHelper
def tearDown(self):
"""Deconstruct the TestSeriesHelper object."""
super(TestSeriesHelper, self).tearDown()
TestSeriesHelper.MySeriesHelper._reset_()
self.assertEqual(
TestSeriesHelper.MySeriesHelper._json_body_(),
[],
'Resetting helper did not empty datapoints.')
def test_auto_commit(self):
"""Test write_points called after valid number of events."""
class AutoCommitTest(SeriesHelper):
"""Define a SeriesHelper instance to test autocommit."""
class Meta:
"""Define metadata for AutoCommitTest."""
series_name = 'events.stats.{server_name}'
fields = ['some_stat']
tags = ['server_name', 'other_tag']
bulk_size = 5
client = InfluxDBClient()
autocommit = True
fake_write_points = mock.MagicMock()
AutoCommitTest(server_name='us.east-1', some_stat=159, other_tag='gg')
AutoCommitTest._client.write_points = fake_write_points
AutoCommitTest(server_name='us.east-1', some_stat=158, other_tag='gg')
AutoCommitTest(server_name='us.east-1', some_stat=157, other_tag='gg')
AutoCommitTest(server_name='us.east-1', some_stat=156, other_tag='gg')
self.assertFalse(fake_write_points.called)
AutoCommitTest(server_name='us.east-1', some_stat=3443, other_tag='gg')
self.assertTrue(fake_write_points.called)
@mock.patch('influxdb.helper.SeriesHelper._current_timestamp')
def testSingleSeriesName(self, current_timestamp):
"""Test JSON conversion when there is only one series name."""
current_timestamp.return_value = current_date = datetime.today()
TestSeriesHelper.MySeriesHelper(
server_name='us.east-1', other_tag='ello', some_stat=159)
TestSeriesHelper.MySeriesHelper(
server_name='us.east-1', other_tag='ello', some_stat=158)
TestSeriesHelper.MySeriesHelper(
server_name='us.east-1', other_tag='ello', some_stat=157)
TestSeriesHelper.MySeriesHelper(
server_name='us.east-1', other_tag='ello', some_stat=156)
expectation = [
{
"measurement": "events.stats.us.east-1",
"tags": {
"other_tag": "ello",
"server_name": "us.east-1"
},
"fields": {
"some_stat": 159
},
"time": current_date,
},
{
"measurement": "events.stats.us.east-1",
"tags": {
"other_tag": "ello",
"server_name": "us.east-1"
},
"fields": {
"some_stat": 158
},
"time": current_date,
},
{
"measurement": "events.stats.us.east-1",
"tags": {
"other_tag": "ello",
"server_name": "us.east-1"
},
"fields": {
"some_stat": 157
},
"time": current_date,
},
{
"measurement": "events.stats.us.east-1",
"tags": {
"other_tag": "ello",
"server_name": "us.east-1"
},
"fields": {
"some_stat": 156
},
"time": current_date,
}
]
rcvd = TestSeriesHelper.MySeriesHelper._json_body_()
self.assertTrue(all([el in expectation for el in rcvd]) and
all([el in rcvd for el in expectation]),
'Invalid JSON body of time series returned from '
'_json_body_ for one series name: {0}.'.format(rcvd))
@mock.patch('influxdb.helper.SeriesHelper._current_timestamp')
def testSeveralSeriesNames(self, current_timestamp):
"""Test JSON conversion when there are multiple series names."""
current_timestamp.return_value = current_date = datetime.today()
TestSeriesHelper.MySeriesHelper(
server_name='us.east-1', some_stat=159, other_tag='ello')
TestSeriesHelper.MySeriesHelper(
server_name='fr.paris-10', some_stat=158, other_tag='ello')
TestSeriesHelper.MySeriesHelper(
server_name='lu.lux', some_stat=157, other_tag='ello')
TestSeriesHelper.MySeriesHelper(
server_name='uk.london', some_stat=156, other_tag='ello')
expectation = [
{
'fields': {
'some_stat': 157
},
'measurement': 'events.stats.lu.lux',
'tags': {
'other_tag': 'ello',
'server_name': 'lu.lux'
},
"time": current_date,
},
{
'fields': {
'some_stat': 156
},
'measurement': 'events.stats.uk.london',
'tags': {
'other_tag': 'ello',
'server_name': 'uk.london'
},
"time": current_date,
},
{
'fields': {
'some_stat': 158
},
'measurement': 'events.stats.fr.paris-10',
'tags': {
'other_tag': 'ello',
'server_name': 'fr.paris-10'
},
"time": current_date,
},
{
'fields': {
'some_stat': 159
},
'measurement': 'events.stats.us.east-1',
'tags': {
'other_tag': 'ello',
'server_name': 'us.east-1'
},
"time": current_date,
}
]
rcvd = TestSeriesHelper.MySeriesHelper._json_body_()
self.assertTrue(all([el in expectation for el in rcvd]) and
all([el in rcvd for el in expectation]),
'Invalid JSON body of time series returned from '
'_json_body_ for several series names: {0}.'
.format(rcvd))
@mock.patch('influxdb.helper.SeriesHelper._current_timestamp')
def testSeriesWithoutTimeField(self, current_timestamp):
"""Test that time is optional on a series without a time field."""
current_date = datetime.today()
yesterday = current_date - timedelta(days=1)
current_timestamp.return_value = yesterday
TestSeriesHelper.MySeriesHelper(
server_name='us.east-1', other_tag='ello',
some_stat=159, time=current_date
)
TestSeriesHelper.MySeriesHelper(
server_name='us.east-1', other_tag='ello',
some_stat=158,
)
point1, point2 = TestSeriesHelper.MySeriesHelper._json_body_()
self.assertTrue('time' in point1 and 'time' in point2)
self.assertEqual(point1['time'], current_date)
self.assertEqual(point2['time'], yesterday)
def testSeriesWithoutAllTags(self):
"""Test that creating a data point without a tag throws an error."""
class MyTimeFieldSeriesHelper(SeriesHelper):
class Meta:
client = TestSeriesHelper.client
series_name = 'events.stats.{server_name}'
fields = ['some_stat', 'time']
tags = ['server_name', 'other_tag']
bulk_size = 5
autocommit = True
self.assertRaises(NameError, MyTimeFieldSeriesHelper,
**{"server_name": 'us.east-1',
"some_stat": 158})
@mock.patch('influxdb.helper.SeriesHelper._current_timestamp')
def testSeriesWithTimeField(self, current_timestamp):
"""Test that time is optional on a series with a time field."""
current_date = datetime.today()
yesterday = current_date - timedelta(days=1)
current_timestamp.return_value = yesterday
class MyTimeFieldSeriesHelper(SeriesHelper):
class Meta:
client = TestSeriesHelper.client
series_name = 'events.stats.{server_name}'
fields = ['some_stat', 'time']
tags = ['server_name', 'other_tag']
bulk_size = 5
autocommit = True
MyTimeFieldSeriesHelper(
server_name='us.east-1', other_tag='ello',
some_stat=159, time=current_date
)
MyTimeFieldSeriesHelper(
server_name='us.east-1', other_tag='ello',
some_stat=158,
)
point1, point2 = MyTimeFieldSeriesHelper._json_body_()
self.assertTrue('time' in point1 and 'time' in point2)
self.assertEqual(point1['time'], current_date)
self.assertEqual(point2['time'], yesterday)
def testInvalidHelpers(self):
"""Test errors in invalid helpers."""
class MissingMeta(SeriesHelper):
"""Define instance of SeriesHelper for missing meta."""
pass
class MissingClient(SeriesHelper):
"""Define SeriesHelper for missing client data."""
class Meta:
"""Define metadat for MissingClient."""
series_name = 'events.stats.{server_name}'
fields = ['time', 'server_name']
autocommit = True
class MissingSeriesName(SeriesHelper):
"""Define instance of SeriesHelper for missing series."""
class Meta:
"""Define metadata for MissingSeriesName."""
fields = ['time', 'server_name']
class MissingFields(SeriesHelper):
"""Define instance of SeriesHelper for missing fields."""
class Meta:
"""Define metadata for MissingFields."""
series_name = 'events.stats.{server_name}'
for cls in [MissingMeta, MissingClient, MissingFields,
MissingSeriesName]:
self.assertRaises(
AttributeError, cls, **{'time': 159,
'server_name': 'us.east-1'})
@unittest.skip("Fails on py32")
def testWarnBulkSizeZero(self):
"""Test warning for an invalid bulk size."""
class WarnBulkSizeZero(SeriesHelper):
class Meta:
client = TestSeriesHelper.client
series_name = 'events.stats.{server_name}'
fields = ['time', 'server_name']
tags = []
bulk_size = 0
autocommit = True
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
try:
WarnBulkSizeZero(time=159, server_name='us.east-1')
except ConnectionError:
# Server defined in the client is invalid, we're testing
# the warning only.
pass
self.assertEqual(len(w), 1,
'{0} call should have generated one warning.'
.format(WarnBulkSizeZero))
self.assertIn('forced to 1', str(w[-1].message),
'Warning message did not contain "forced to 1".')
def testWarnBulkSizeNoEffect(self):
"""Test warning for a set bulk size but autocommit False."""
class WarnBulkSizeNoEffect(SeriesHelper):
"""Define SeriesHelper for warning on bulk size."""
class Meta:
"""Define metadat for WarnBulkSizeNoEffect."""
series_name = 'events.stats.{server_name}'
fields = ['time', 'server_name']
bulk_size = 5
tags = []
autocommit = False
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
WarnBulkSizeNoEffect(time=159, server_name='us.east-1')
self.assertEqual(len(w), 1,
'{0} call should have generated one warning.'
.format(WarnBulkSizeNoEffect))
self.assertIn('has no affect', str(w[-1].message),
'Warning message did not contain "has not affect".')

View file

@ -0,0 +1,2 @@
# -*- coding: utf-8 -*-
"""Define the influxdb08 test package."""

View file

@ -0,0 +1,904 @@
# -*- coding: utf-8 -*-
"""Client unit tests."""
import json
import socket
import sys
import unittest
import random
import warnings
import mock
import requests
import requests.exceptions
import requests_mock
from nose.tools import raises
from mock import patch
from influxdb.influxdb08 import InfluxDBClient
from influxdb.influxdb08.client import session
if sys.version < '3':
import codecs
def u(x):
"""Test codec."""
return codecs.unicode_escape_decode(x)[0]
else:
def u(x):
"""Test codec."""
return x
def _build_response_object(status_code=200, content=""):
resp = requests.Response()
resp.status_code = status_code
resp._content = content.encode("utf8")
return resp
def _mocked_session(method="GET", status_code=200, content=""):
method = method.upper()
def request(*args, **kwargs):
"""Define a request for the _mocked_session."""
c = content
# Check method
assert method == kwargs.get('method', 'GET')
if method == 'POST':
data = kwargs.get('data', None)
if data is not None:
# Data must be a string
assert isinstance(data, str)
# Data must be a JSON string
assert c == json.loads(data, strict=True)
c = data
# Anyway, Content must be a JSON string (or empty string)
if not isinstance(c, str):
c = json.dumps(c)
return _build_response_object(status_code=status_code, content=c)
mocked = patch.object(
session,
'request',
side_effect=request
)
return mocked
class TestInfluxDBClient(unittest.TestCase):
"""Define a TestInfluxDBClient object."""
def setUp(self):
"""Set up a TestInfluxDBClient object."""
# By default, raise exceptions on warnings
warnings.simplefilter('error', FutureWarning)
self.dummy_points = [
{
"points": [
["1", 1, 1.0],
["2", 2, 2.0]
],
"name": "foo",
"columns": ["column_one", "column_two", "column_three"]
}
]
self.dsn_string = 'influxdb://uSr:pWd@host:1886/db'
def test_scheme(self):
"""Test database scheme for TestInfluxDBClient object."""
cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
self.assertEqual(cli._baseurl, 'http://host:8086')
cli = InfluxDBClient(
'host', 8086, 'username', 'password', 'database', ssl=True
)
self.assertEqual(cli._baseurl, 'https://host:8086')
def test_dsn(self):
"""Test datasource name for TestInfluxDBClient object."""
cli = InfluxDBClient.from_dsn(self.dsn_string)
self.assertEqual('http://host:1886', cli._baseurl)
self.assertEqual('uSr', cli._username)
self.assertEqual('pWd', cli._password)
self.assertEqual('db', cli._database)
self.assertFalse(cli._use_udp)
cli = InfluxDBClient.from_dsn('udp+' + self.dsn_string)
self.assertTrue(cli._use_udp)
cli = InfluxDBClient.from_dsn('https+' + self.dsn_string)
self.assertEqual('https://host:1886', cli._baseurl)
cli = InfluxDBClient.from_dsn('https+' + self.dsn_string,
**{'ssl': False})
self.assertEqual('http://host:1886', cli._baseurl)
def test_switch_database(self):
"""Test switch database for TestInfluxDBClient object."""
cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
cli.switch_database('another_database')
self.assertEqual(cli._database, 'another_database')
@raises(FutureWarning)
def test_switch_db_deprecated(self):
"""Test deprecated switch database for TestInfluxDBClient object."""
cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
cli.switch_db('another_database')
self.assertEqual(cli._database, 'another_database')
def test_switch_user(self):
"""Test switch user for TestInfluxDBClient object."""
cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
cli.switch_user('another_username', 'another_password')
self.assertEqual(cli._username, 'another_username')
self.assertEqual(cli._password, 'another_password')
def test_write(self):
"""Test write to database for TestInfluxDBClient object."""
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.POST,
"http://localhost:8086/write"
)
cli = InfluxDBClient(database='db')
cli.write(
{"database": "mydb",
"retentionPolicy": "mypolicy",
"points": [{"name": "cpu_load_short",
"tags": {"host": "server01",
"region": "us-west"},
"timestamp": "2009-11-10T23:00:00Z",
"values": {"value": 0.64}}]}
)
self.assertEqual(
json.loads(m.last_request.body),
{"database": "mydb",
"retentionPolicy": "mypolicy",
"points": [{"name": "cpu_load_short",
"tags": {"host": "server01",
"region": "us-west"},
"timestamp": "2009-11-10T23:00:00Z",
"values": {"value": 0.64}}]}
)
def test_write_points(self):
"""Test write points for TestInfluxDBClient object."""
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.POST,
"http://localhost:8086/db/db/series"
)
cli = InfluxDBClient(database='db')
cli.write_points(
self.dummy_points
)
self.assertListEqual(
json.loads(m.last_request.body),
self.dummy_points
)
def test_write_points_string(self):
"""Test write string points for TestInfluxDBClient object."""
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.POST,
"http://localhost:8086/db/db/series"
)
cli = InfluxDBClient(database='db')
cli.write_points(
str(json.dumps(self.dummy_points))
)
self.assertListEqual(
json.loads(m.last_request.body),
self.dummy_points
)
def test_write_points_batch(self):
"""Test write batch points for TestInfluxDBClient object."""
with requests_mock.Mocker() as m:
m.register_uri(requests_mock.POST,
"http://localhost:8086/db/db/series")
cli = InfluxDBClient('localhost', 8086,
'username', 'password', 'db')
cli.write_points(data=self.dummy_points, batch_size=2)
self.assertEqual(1, m.call_count)
def test_write_points_batch_invalid_size(self):
"""Test write batch points invalid size for TestInfluxDBClient."""
with requests_mock.Mocker() as m:
m.register_uri(requests_mock.POST,
"http://localhost:8086/db/db/series")
cli = InfluxDBClient('localhost', 8086,
'username', 'password', 'db')
cli.write_points(data=self.dummy_points, batch_size=-2)
self.assertEqual(1, m.call_count)
def test_write_points_batch_multiple_series(self):
"""Test write points batch multiple series."""
dummy_points = [
{"points": [["1", 1, 1.0], ["2", 2, 2.0], ["3", 3, 3.0],
["4", 4, 4.0], ["5", 5, 5.0]],
"name": "foo",
"columns": ["val1", "val2", "val3"]},
{"points": [["1", 1, 1.0], ["2", 2, 2.0], ["3", 3, 3.0],
["4", 4, 4.0], ["5", 5, 5.0], ["6", 6, 6.0],
["7", 7, 7.0], ["8", 8, 8.0]],
"name": "bar",
"columns": ["val1", "val2", "val3"]},
]
expected_last_body = [{'points': [['7', 7, 7.0], ['8', 8, 8.0]],
'name': 'bar',
'columns': ['val1', 'val2', 'val3']}]
with requests_mock.Mocker() as m:
m.register_uri(requests_mock.POST,
"http://localhost:8086/db/db/series")
cli = InfluxDBClient('localhost', 8086,
'username', 'password', 'db')
cli.write_points(data=dummy_points, batch_size=3)
self.assertEqual(m.call_count, 5)
self.assertEqual(expected_last_body, m.request_history[4].json())
def test_write_points_udp(self):
"""Test write points UDP for TestInfluxDBClient object."""
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
port = random.randint(4000, 8000)
s.bind(('0.0.0.0', port))
cli = InfluxDBClient(
'localhost', 8086, 'root', 'root',
'test', use_udp=True, udp_port=port
)
cli.write_points(self.dummy_points)
received_data, addr = s.recvfrom(1024)
self.assertEqual(self.dummy_points,
json.loads(received_data.decode(), strict=True))
def test_write_bad_precision_udp(self):
"""Test write UDP w/bad precision."""
cli = InfluxDBClient(
'localhost', 8086, 'root', 'root',
'test', use_udp=True, udp_port=4444
)
with self.assertRaisesRegexp(
Exception,
"InfluxDB only supports seconds precision for udp writes"
):
cli.write_points(
self.dummy_points,
time_precision='ms'
)
@raises(Exception)
def test_write_points_fails(self):
"""Test failed write points for TestInfluxDBClient object."""
with _mocked_session('post', 500):
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
cli.write_points([])
def test_write_points_with_precision(self):
"""Test write points with precision."""
with _mocked_session('post', 200, self.dummy_points):
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
self.assertTrue(cli.write_points(self.dummy_points))
def test_write_points_bad_precision(self):
"""Test write points with bad precision."""
cli = InfluxDBClient()
with self.assertRaisesRegexp(
Exception,
"Invalid time precision is given. \(use 's', 'm', 'ms' or 'u'\)"
):
cli.write_points(
self.dummy_points,
time_precision='g'
)
@raises(Exception)
def test_write_points_with_precision_fails(self):
"""Test write points where precision fails."""
with _mocked_session('post', 500):
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
cli.write_points_with_precision([])
def test_delete_points(self):
"""Test delete points for TestInfluxDBClient object."""
with _mocked_session('delete', 204) as mocked:
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
self.assertTrue(cli.delete_points("foo"))
self.assertEqual(len(mocked.call_args_list), 1)
args, kwds = mocked.call_args_list[0]
self.assertEqual(kwds['params'],
{'u': 'username', 'p': 'password'})
self.assertEqual(kwds['url'], 'http://host:8086/db/db/series/foo')
@raises(Exception)
def test_delete_points_with_wrong_name(self):
"""Test delete points with wrong name."""
with _mocked_session('delete', 400):
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
cli.delete_points("nonexist")
@raises(NotImplementedError)
def test_create_scheduled_delete(self):
"""Test create scheduled deletes."""
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
cli.create_scheduled_delete([])
@raises(NotImplementedError)
def test_get_list_scheduled_delete(self):
"""Test get schedule list of deletes TestInfluxDBClient."""
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
cli.get_list_scheduled_delete()
@raises(NotImplementedError)
def test_remove_scheduled_delete(self):
"""Test remove scheduled delete TestInfluxDBClient."""
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
cli.remove_scheduled_delete(1)
def test_query(self):
"""Test query for TestInfluxDBClient object."""
data = [
{
"name": "foo",
"columns": ["time", "sequence_number", "column_one"],
"points": [
[1383876043, 16, "2"], [1383876043, 15, "1"],
[1383876035, 14, "2"], [1383876035, 13, "1"]
]
}
]
with _mocked_session('get', 200, data):
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
result = cli.query('select column_one from foo;')
self.assertEqual(len(result[0]['points']), 4)
def test_query_chunked(self):
"""Test chunked query for TestInfluxDBClient object."""
cli = InfluxDBClient(database='db')
example_object = {
'points': [
[1415206250119, 40001, 667],
[1415206244555, 30001, 7],
[1415206228241, 20001, 788],
[1415206212980, 10001, 555],
[1415197271586, 10001, 23]
],
'name': 'foo',
'columns': [
'time',
'sequence_number',
'val'
]
}
example_response = \
json.dumps(example_object) + json.dumps(example_object)
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.GET,
"http://localhost:8086/db/db/series",
text=example_response
)
self.assertListEqual(
cli.query('select * from foo', chunked=True),
[example_object, example_object]
)
def test_query_chunked_unicode(self):
"""Test unicode chunked query for TestInfluxDBClient object."""
cli = InfluxDBClient(database='db')
example_object = {
'points': [
[1415206212980, 10001, u('unicode-\xcf\x89')],
[1415197271586, 10001, u('more-unicode-\xcf\x90')]
],
'name': 'foo',
'columns': [
'time',
'sequence_number',
'val'
]
}
example_response = \
json.dumps(example_object) + json.dumps(example_object)
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.GET,
"http://localhost:8086/db/db/series",
text=example_response
)
self.assertListEqual(
cli.query('select * from foo', chunked=True),
[example_object, example_object]
)
@raises(Exception)
def test_query_fail(self):
"""Test failed query for TestInfluxDBClient."""
with _mocked_session('get', 401):
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
cli.query('select column_one from foo;')
def test_query_bad_precision(self):
"""Test query with bad precision for TestInfluxDBClient."""
cli = InfluxDBClient()
with self.assertRaisesRegexp(
Exception,
"Invalid time precision is given. \(use 's', 'm', 'ms' or 'u'\)"
):
cli.query('select column_one from foo', time_precision='g')
def test_create_database(self):
"""Test create database for TestInfluxDBClient."""
with _mocked_session('post', 201, {"name": "new_db"}):
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
self.assertTrue(cli.create_database('new_db'))
@raises(Exception)
def test_create_database_fails(self):
"""Test failed create database for TestInfluxDBClient."""
with _mocked_session('post', 401):
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
cli.create_database('new_db')
def test_delete_database(self):
"""Test delete database for TestInfluxDBClient."""
with _mocked_session('delete', 204):
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
self.assertTrue(cli.delete_database('old_db'))
@raises(Exception)
def test_delete_database_fails(self):
"""Test failed delete database for TestInfluxDBClient."""
with _mocked_session('delete', 401):
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
cli.delete_database('old_db')
def test_get_list_database(self):
"""Test get list of databases for TestInfluxDBClient."""
data = [
{"name": "a_db"}
]
with _mocked_session('get', 200, data):
cli = InfluxDBClient('host', 8086, 'username', 'password')
self.assertEqual(len(cli.get_list_database()), 1)
self.assertEqual(cli.get_list_database()[0]['name'], 'a_db')
@raises(Exception)
def test_get_list_database_fails(self):
"""Test failed get list of databases for TestInfluxDBClient."""
with _mocked_session('get', 401):
cli = InfluxDBClient('host', 8086, 'username', 'password')
cli.get_list_database()
@raises(FutureWarning)
def test_get_database_list_deprecated(self):
"""Test deprecated get database list for TestInfluxDBClient."""
data = [
{"name": "a_db"}
]
with _mocked_session('get', 200, data):
cli = InfluxDBClient('host', 8086, 'username', 'password')
self.assertEqual(len(cli.get_database_list()), 1)
self.assertEqual(cli.get_database_list()[0]['name'], 'a_db')
def test_delete_series(self):
"""Test delete series for TestInfluxDBClient."""
with _mocked_session('delete', 204):
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
cli.delete_series('old_series')
@raises(Exception)
def test_delete_series_fails(self):
"""Test failed delete series for TestInfluxDBClient."""
with _mocked_session('delete', 401):
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
cli.delete_series('old_series')
def test_get_series_list(self):
"""Test get list of series for TestInfluxDBClient."""
cli = InfluxDBClient(database='db')
with requests_mock.Mocker() as m:
example_response = \
'[{"name":"list_series_result","columns":' \
'["time","name"],"points":[[0,"foo"],[0,"bar"]]}]'
m.register_uri(
requests_mock.GET,
"http://localhost:8086/db/db/series",
text=example_response
)
self.assertListEqual(
cli.get_list_series(),
['foo', 'bar']
)
def test_get_continuous_queries(self):
"""Test get continuous queries for TestInfluxDBClient."""
cli = InfluxDBClient(database='db')
with requests_mock.Mocker() as m:
# Tip: put this in a json linter!
example_response = '[ { "name": "continuous queries", "columns"' \
': [ "time", "id", "query" ], "points": [ [ ' \
'0, 1, "select foo(bar,95) from \\"foo_bar' \
's\\" group by time(5m) into response_times.' \
'percentiles.5m.95" ], [ 0, 2, "select perce' \
'ntile(value,95) from \\"response_times\\" g' \
'roup by time(5m) into response_times.percen' \
'tiles.5m.95" ] ] } ]'
m.register_uri(
requests_mock.GET,
"http://localhost:8086/db/db/series",
text=example_response
)
self.assertListEqual(
cli.get_list_continuous_queries(),
[
'select foo(bar,95) from "foo_bars" group '
'by time(5m) into response_times.percentiles.5m.95',
'select percentile(value,95) from "response_times" group '
'by time(5m) into response_times.percentiles.5m.95'
]
)
def test_get_list_cluster_admins(self):
"""Test get list of cluster admins, not implemented."""
pass
def test_add_cluster_admin(self):
"""Test add cluster admin for TestInfluxDBClient."""
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.POST,
"http://localhost:8086/cluster_admins"
)
cli = InfluxDBClient(database='db')
cli.add_cluster_admin(
new_username='paul',
new_password='laup'
)
self.assertDictEqual(
json.loads(m.last_request.body),
{
'name': 'paul',
'password': 'laup'
}
)
def test_update_cluster_admin_password(self):
"""Test update cluster admin pass for TestInfluxDBClient."""
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.POST,
"http://localhost:8086/cluster_admins/paul"
)
cli = InfluxDBClient(database='db')
cli.update_cluster_admin_password(
username='paul',
new_password='laup'
)
self.assertDictEqual(
json.loads(m.last_request.body),
{'password': 'laup'}
)
def test_delete_cluster_admin(self):
"""Test delete cluster admin for TestInfluxDBClient."""
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.DELETE,
"http://localhost:8086/cluster_admins/paul",
status_code=200,
)
cli = InfluxDBClient(database='db')
cli.delete_cluster_admin(username='paul')
self.assertIsNone(m.last_request.body)
def test_set_database_admin(self):
"""Test set database admin for TestInfluxDBClient."""
pass
def test_unset_database_admin(self):
"""Test unset database admin for TestInfluxDBClient."""
pass
def test_alter_database_admin(self):
"""Test alter database admin for TestInfluxDBClient."""
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.POST,
"http://localhost:8086/db/db/users/paul"
)
cli = InfluxDBClient(database='db')
cli.alter_database_admin(
username='paul',
is_admin=False
)
self.assertDictEqual(
json.loads(m.last_request.body),
{
'admin': False
}
)
@raises(NotImplementedError)
def test_get_list_database_admins(self):
"""Test get list of database admins for TestInfluxDBClient."""
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
cli.get_list_database_admins()
@raises(NotImplementedError)
def test_add_database_admin(self):
"""Test add database admins for TestInfluxDBClient."""
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
cli.add_database_admin('admin', 'admin_secret_password')
@raises(NotImplementedError)
def test_update_database_admin_password(self):
"""Test update database admin pass for TestInfluxDBClient."""
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
cli.update_database_admin_password('admin', 'admin_secret_password')
@raises(NotImplementedError)
def test_delete_database_admin(self):
"""Test delete database admin for TestInfluxDBClient."""
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
cli.delete_database_admin('admin')
def test_get_database_users(self):
"""Test get database users for TestInfluxDBClient."""
cli = InfluxDBClient('localhost', 8086, 'username', 'password', 'db')
example_response = \
'[{"name":"paul","isAdmin":false,"writeTo":".*","readFrom":".*"},'\
'{"name":"bobby","isAdmin":false,"writeTo":".*","readFrom":".*"}]'
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.GET,
"http://localhost:8086/db/db/users",
text=example_response
)
users = cli.get_database_users()
self.assertEqual(json.loads(example_response), users)
def test_add_database_user(self):
"""Test add database user for TestInfluxDBClient."""
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.POST,
"http://localhost:8086/db/db/users"
)
cli = InfluxDBClient(database='db')
cli.add_database_user(
new_username='paul',
new_password='laup',
permissions=('.*', '.*')
)
self.assertDictEqual(
json.loads(m.last_request.body),
{
'writeTo': '.*',
'password': 'laup',
'readFrom': '.*',
'name': 'paul'
}
)
def test_add_database_user_bad_permissions(self):
"""Test add database user with bad perms for TestInfluxDBClient."""
cli = InfluxDBClient()
with self.assertRaisesRegexp(
Exception,
"'permissions' must be \(readFrom, writeTo\) tuple"
):
cli.add_database_user(
new_password='paul',
new_username='paul',
permissions=('hello', 'hello', 'hello')
)
def test_alter_database_user_password(self):
"""Test alter database user pass for TestInfluxDBClient."""
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.POST,
"http://localhost:8086/db/db/users/paul"
)
cli = InfluxDBClient(database='db')
cli.alter_database_user(
username='paul',
password='n3wp4ss!'
)
self.assertDictEqual(
json.loads(m.last_request.body),
{
'password': 'n3wp4ss!'
}
)
def test_alter_database_user_permissions(self):
"""Test alter database user perms for TestInfluxDBClient."""
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.POST,
"http://localhost:8086/db/db/users/paul"
)
cli = InfluxDBClient(database='db')
cli.alter_database_user(
username='paul',
permissions=('^$', '.*')
)
self.assertDictEqual(
json.loads(m.last_request.body),
{
'readFrom': '^$',
'writeTo': '.*'
}
)
def test_alter_database_user_password_and_permissions(self):
"""Test alter database user pass and perms for TestInfluxDBClient."""
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.POST,
"http://localhost:8086/db/db/users/paul"
)
cli = InfluxDBClient(database='db')
cli.alter_database_user(
username='paul',
password='n3wp4ss!',
permissions=('^$', '.*')
)
self.assertDictEqual(
json.loads(m.last_request.body),
{
'password': 'n3wp4ss!',
'readFrom': '^$',
'writeTo': '.*'
}
)
def test_update_database_user_password_current_user(self):
"""Test update database user pass for TestInfluxDBClient."""
cli = InfluxDBClient(
username='root',
password='hello',
database='database'
)
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.POST,
"http://localhost:8086/db/database/users/root"
)
cli.update_database_user_password(
username='root',
new_password='bye'
)
self.assertEqual(cli._password, 'bye')
def test_delete_database_user(self):
"""Test delete database user for TestInfluxDBClient."""
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.DELETE,
"http://localhost:8086/db/db/users/paul"
)
cli = InfluxDBClient(database='db')
cli.delete_database_user(username='paul')
self.assertIsNone(m.last_request.body)
@raises(NotImplementedError)
def test_update_permission(self):
"""Test update permission for TestInfluxDBClient."""
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
cli.update_permission('admin', [])
@mock.patch('requests.Session.request')
def test_request_retry(self, mock_request):
"""Test that two connection errors will be handled."""
class CustomMock(object):
"""Define CustomMock object."""
def __init__(self):
self.i = 0
def connection_error(self, *args, **kwargs):
"""Test connection error in CustomMock."""
self.i += 1
if self.i < 3:
raise requests.exceptions.ConnectionError
else:
r = requests.Response()
r.status_code = 200
return r
mock_request.side_effect = CustomMock().connection_error
cli = InfluxDBClient(database='db')
cli.write_points(
self.dummy_points
)
@mock.patch('requests.Session.request')
def test_request_retry_raises(self, mock_request):
"""Test that three connection errors will not be handled."""
class CustomMock(object):
"""Define CustomMock object."""
def __init__(self):
"""Initialize the object."""
self.i = 0
def connection_error(self, *args, **kwargs):
"""Test the connection error for CustomMock."""
self.i += 1
if self.i < 4:
raise requests.exceptions.ConnectionError
else:
r = requests.Response()
r.status_code = 200
return r
mock_request.side_effect = CustomMock().connection_error
cli = InfluxDBClient(database='db')
with self.assertRaises(requests.exceptions.ConnectionError):
cli.write_points(self.dummy_points)

View file

@ -0,0 +1,331 @@
# -*- coding: utf-8 -*-
"""Unit tests for misc module."""
from datetime import timedelta
import copy
import json
import unittest
import warnings
import requests_mock
from nose.tools import raises
from influxdb.tests import skipIfPYpy, using_pypy
from .client_test import _mocked_session
if not using_pypy:
import pandas as pd
from pandas.util.testing import assert_frame_equal
from influxdb.influxdb08 import DataFrameClient
@skipIfPYpy
class TestDataFrameClient(unittest.TestCase):
"""Define the DataFramClient test object."""
def setUp(self):
"""Set up an instance of TestDataFrameClient object."""
# By default, raise exceptions on warnings
warnings.simplefilter('error', FutureWarning)
def test_write_points_from_dataframe(self):
"""Test write points from dataframe."""
now = pd.Timestamp('1970-01-01 00:00+00:00')
dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
index=[now, now + timedelta(hours=1)],
columns=["column_one", "column_two",
"column_three"])
points = [
{
"points": [
["1", 1, 1.0, 0],
["2", 2, 2.0, 3600]
],
"name": "foo",
"columns": ["column_one", "column_two", "column_three", "time"]
}
]
with requests_mock.Mocker() as m:
m.register_uri(requests_mock.POST,
"http://localhost:8086/db/db/series")
cli = DataFrameClient(database='db')
cli.write_points({"foo": dataframe})
self.assertListEqual(json.loads(m.last_request.body), points)
def test_write_points_from_dataframe_with_float_nan(self):
"""Test write points from dataframe with NaN float."""
now = pd.Timestamp('1970-01-01 00:00+00:00')
dataframe = pd.DataFrame(data=[[1, float("NaN"), 1.0], [2, 2, 2.0]],
index=[now, now + timedelta(hours=1)],
columns=["column_one", "column_two",
"column_three"])
points = [
{
"points": [
[1, None, 1.0, 0],
[2, 2, 2.0, 3600]
],
"name": "foo",
"columns": ["column_one", "column_two", "column_three", "time"]
}
]
with requests_mock.Mocker() as m:
m.register_uri(requests_mock.POST,
"http://localhost:8086/db/db/series")
cli = DataFrameClient(database='db')
cli.write_points({"foo": dataframe})
self.assertListEqual(json.loads(m.last_request.body), points)
def test_write_points_from_dataframe_in_batches(self):
"""Test write points from dataframe in batches."""
now = pd.Timestamp('1970-01-01 00:00+00:00')
dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
index=[now, now + timedelta(hours=1)],
columns=["column_one", "column_two",
"column_three"])
with requests_mock.Mocker() as m:
m.register_uri(requests_mock.POST,
"http://localhost:8086/db/db/series")
cli = DataFrameClient(database='db')
self.assertTrue(cli.write_points({"foo": dataframe}, batch_size=1))
def test_write_points_from_dataframe_with_numeric_column_names(self):
"""Test write points from dataframe with numeric columns."""
now = pd.Timestamp('1970-01-01 00:00+00:00')
# df with numeric column names
dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
index=[now, now + timedelta(hours=1)])
points = [
{
"points": [
["1", 1, 1.0, 0],
["2", 2, 2.0, 3600]
],
"name": "foo",
"columns": ['0', '1', '2', "time"]
}
]
with requests_mock.Mocker() as m:
m.register_uri(requests_mock.POST,
"http://localhost:8086/db/db/series")
cli = DataFrameClient(database='db')
cli.write_points({"foo": dataframe})
self.assertListEqual(json.loads(m.last_request.body), points)
def test_write_points_from_dataframe_with_period_index(self):
"""Test write points from dataframe with period index."""
dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
index=[pd.Period('1970-01-01'),
pd.Period('1970-01-02')],
columns=["column_one", "column_two",
"column_three"])
points = [
{
"points": [
["1", 1, 1.0, 0],
["2", 2, 2.0, 86400]
],
"name": "foo",
"columns": ["column_one", "column_two", "column_three", "time"]
}
]
with requests_mock.Mocker() as m:
m.register_uri(requests_mock.POST,
"http://localhost:8086/db/db/series")
cli = DataFrameClient(database='db')
cli.write_points({"foo": dataframe})
self.assertListEqual(json.loads(m.last_request.body), points)
def test_write_points_from_dataframe_with_time_precision(self):
"""Test write points from dataframe with time precision."""
now = pd.Timestamp('1970-01-01 00:00+00:00')
dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
index=[now, now + timedelta(hours=1)],
columns=["column_one", "column_two",
"column_three"])
points = [
{
"points": [
["1", 1, 1.0, 0],
["2", 2, 2.0, 3600]
],
"name": "foo",
"columns": ["column_one", "column_two", "column_three", "time"]
}
]
points_ms = copy.deepcopy(points)
points_ms[0]["points"][1][-1] = 3600 * 1000
points_us = copy.deepcopy(points)
points_us[0]["points"][1][-1] = 3600 * 1000000
with requests_mock.Mocker() as m:
m.register_uri(requests_mock.POST,
"http://localhost:8086/db/db/series")
cli = DataFrameClient(database='db')
cli.write_points({"foo": dataframe}, time_precision='s')
self.assertListEqual(json.loads(m.last_request.body), points)
cli.write_points({"foo": dataframe}, time_precision='m')
self.assertListEqual(json.loads(m.last_request.body), points_ms)
cli.write_points({"foo": dataframe}, time_precision='u')
self.assertListEqual(json.loads(m.last_request.body), points_us)
@raises(TypeError)
def test_write_points_from_dataframe_fails_without_time_index(self):
"""Test write points from dataframe that fails without time index."""
dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
columns=["column_one", "column_two",
"column_three"])
with requests_mock.Mocker() as m:
m.register_uri(requests_mock.POST,
"http://localhost:8086/db/db/series")
cli = DataFrameClient(database='db')
cli.write_points({"foo": dataframe})
@raises(TypeError)
def test_write_points_from_dataframe_fails_with_series(self):
"""Test failed write points from dataframe with series."""
now = pd.Timestamp('1970-01-01 00:00+00:00')
dataframe = pd.Series(data=[1.0, 2.0],
index=[now, now + timedelta(hours=1)])
with requests_mock.Mocker() as m:
m.register_uri(requests_mock.POST,
"http://localhost:8086/db/db/series")
cli = DataFrameClient(database='db')
cli.write_points({"foo": dataframe})
def test_query_into_dataframe(self):
"""Test query into a dataframe."""
data = [
{
"name": "foo",
"columns": ["time", "sequence_number", "column_one"],
"points": [
[3600, 16, 2], [3600, 15, 1],
[0, 14, 2], [0, 13, 1]
]
}
]
# dataframe sorted ascending by time first, then sequence_number
dataframe = pd.DataFrame(data=[[13, 1], [14, 2], [15, 1], [16, 2]],
index=pd.to_datetime([0, 0,
3600, 3600],
unit='s', utc=True),
columns=['sequence_number', 'column_one'])
with _mocked_session('get', 200, data):
cli = DataFrameClient('host', 8086, 'username', 'password', 'db')
result = cli.query('select column_one from foo;')
assert_frame_equal(dataframe, result)
def test_query_multiple_time_series(self):
"""Test query for multiple time series."""
data = [
{
"name": "series1",
"columns": ["time", "mean", "min", "max", "stddev"],
"points": [[0, 323048, 323048, 323048, 0]]
},
{
"name": "series2",
"columns": ["time", "mean", "min", "max", "stddev"],
"points": [[0, -2.8233, -2.8503, -2.7832, 0.0173]]
},
{
"name": "series3",
"columns": ["time", "mean", "min", "max", "stddev"],
"points": [[0, -0.01220, -0.01220, -0.01220, 0]]
}
]
dataframes = {
'series1': pd.DataFrame(data=[[323048, 323048, 323048, 0]],
index=pd.to_datetime([0], unit='s',
utc=True),
columns=['mean', 'min', 'max', 'stddev']),
'series2': pd.DataFrame(data=[[-2.8233, -2.8503, -2.7832, 0.0173]],
index=pd.to_datetime([0], unit='s',
utc=True),
columns=['mean', 'min', 'max', 'stddev']),
'series3': pd.DataFrame(data=[[-0.01220, -0.01220, -0.01220, 0]],
index=pd.to_datetime([0], unit='s',
utc=True),
columns=['mean', 'min', 'max', 'stddev'])
}
with _mocked_session('get', 200, data):
cli = DataFrameClient('host', 8086, 'username', 'password', 'db')
result = cli.query("""select mean(value), min(value), max(value),
stddev(value) from series1, series2, series3""")
self.assertEqual(dataframes.keys(), result.keys())
for key in dataframes.keys():
assert_frame_equal(dataframes[key], result[key])
def test_query_with_empty_result(self):
"""Test query with empty results."""
with _mocked_session('get', 200, []):
cli = DataFrameClient('host', 8086, 'username', 'password', 'db')
result = cli.query('select column_one from foo;')
self.assertEqual(result, [])
def test_list_series(self):
"""Test list of series for dataframe object."""
response = [
{
'columns': ['time', 'name'],
'name': 'list_series_result',
'points': [[0, 'seriesA'], [0, 'seriesB']]
}
]
with _mocked_session('get', 200, response):
cli = DataFrameClient('host', 8086, 'username', 'password', 'db')
series_list = cli.get_list_series()
self.assertEqual(series_list, ['seriesA', 'seriesB'])
def test_datetime_to_epoch(self):
"""Test convert datetime to epoch."""
timestamp = pd.Timestamp('2013-01-01 00:00:00.000+00:00')
cli = DataFrameClient('host', 8086, 'username', 'password', 'db')
self.assertEqual(
cli._datetime_to_epoch(timestamp),
1356998400.0
)
self.assertEqual(
cli._datetime_to_epoch(timestamp, time_precision='s'),
1356998400.0
)
self.assertEqual(
cli._datetime_to_epoch(timestamp, time_precision='m'),
1356998400000.0
)
self.assertEqual(
cli._datetime_to_epoch(timestamp, time_precision='ms'),
1356998400000.0
)
self.assertEqual(
cli._datetime_to_epoch(timestamp, time_precision='u'),
1356998400000000.0
)

View file

@ -0,0 +1,228 @@
# -*- coding: utf-8 -*-
"""Define set of helper functions for the dataframe."""
import unittest
import warnings
import mock
from influxdb.influxdb08 import SeriesHelper, InfluxDBClient
from requests.exceptions import ConnectionError
class TestSeriesHelper(unittest.TestCase):
"""Define the SeriesHelper for test."""
@classmethod
def setUpClass(cls):
"""Set up an instance of the TestSerisHelper object."""
super(TestSeriesHelper, cls).setUpClass()
TestSeriesHelper.client = InfluxDBClient(
'host',
8086,
'username',
'password',
'database'
)
class MySeriesHelper(SeriesHelper):
"""Define a subset SeriesHelper instance."""
class Meta:
"""Define metadata for the TestSeriesHelper object."""
client = TestSeriesHelper.client
series_name = 'events.stats.{server_name}'
fields = ['time', 'server_name']
bulk_size = 5
autocommit = True
TestSeriesHelper.MySeriesHelper = MySeriesHelper
def test_auto_commit(self):
"""Test that write_points called after the right number of events."""
class AutoCommitTest(SeriesHelper):
"""Define an instance of SeriesHelper for AutoCommit test."""
class Meta:
"""Define metadata AutoCommitTest object."""
series_name = 'events.stats.{server_name}'
fields = ['time', 'server_name']
bulk_size = 5
client = InfluxDBClient()
autocommit = True
fake_write_points = mock.MagicMock()
AutoCommitTest(server_name='us.east-1', time=159)
AutoCommitTest._client.write_points = fake_write_points
AutoCommitTest(server_name='us.east-1', time=158)
AutoCommitTest(server_name='us.east-1', time=157)
AutoCommitTest(server_name='us.east-1', time=156)
self.assertFalse(fake_write_points.called)
AutoCommitTest(server_name='us.east-1', time=3443)
self.assertTrue(fake_write_points.called)
def testSingleSeriesName(self):
"""Test JSON conversion when there is only one series name."""
TestSeriesHelper.MySeriesHelper(server_name='us.east-1', time=159)
TestSeriesHelper.MySeriesHelper(server_name='us.east-1', time=158)
TestSeriesHelper.MySeriesHelper(server_name='us.east-1', time=157)
TestSeriesHelper.MySeriesHelper(server_name='us.east-1', time=156)
expectation = [{'points': [[159, 'us.east-1'],
[158, 'us.east-1'],
[157, 'us.east-1'],
[156, 'us.east-1']],
'name': 'events.stats.us.east-1',
'columns': ['time', 'server_name']}]
rcvd = TestSeriesHelper.MySeriesHelper._json_body_()
self.assertTrue(all([el in expectation for el in rcvd]) and
all([el in rcvd for el in expectation]),
'Invalid JSON body of time series returned from '
'_json_body_ for one series name: {0}.'.format(rcvd))
TestSeriesHelper.MySeriesHelper._reset_()
self.assertEqual(
TestSeriesHelper.MySeriesHelper._json_body_(),
[],
'Resetting helper did not empty datapoints.')
def testSeveralSeriesNames(self):
"""Test JSON conversion when there is only one series name."""
TestSeriesHelper.MySeriesHelper(server_name='us.east-1', time=159)
TestSeriesHelper.MySeriesHelper(server_name='fr.paris-10', time=158)
TestSeriesHelper.MySeriesHelper(server_name='lu.lux', time=157)
TestSeriesHelper.MySeriesHelper(server_name='uk.london', time=156)
expectation = [{'points': [[157, 'lu.lux']],
'name': 'events.stats.lu.lux',
'columns': ['time', 'server_name']},
{'points': [[156, 'uk.london']],
'name': 'events.stats.uk.london',
'columns': ['time', 'server_name']},
{'points': [[158, 'fr.paris-10']],
'name': 'events.stats.fr.paris-10',
'columns': ['time', 'server_name']},
{'points': [[159, 'us.east-1']],
'name': 'events.stats.us.east-1',
'columns': ['time', 'server_name']}]
rcvd = TestSeriesHelper.MySeriesHelper._json_body_()
self.assertTrue(all([el in expectation for el in rcvd]) and
all([el in rcvd for el in expectation]),
'Invalid JSON body of time series returned from '
'_json_body_ for several series names: {0}.'
.format(rcvd))
TestSeriesHelper.MySeriesHelper._reset_()
self.assertEqual(
TestSeriesHelper.MySeriesHelper._json_body_(),
[],
'Resetting helper did not empty datapoints.')
def testInvalidHelpers(self):
"""Test errors in invalid helpers."""
class MissingMeta(SeriesHelper):
"""Define SeriesHelper object for MissingMeta test."""
pass
class MissingClient(SeriesHelper):
"""Define SeriesHelper object for MissingClient test."""
class Meta:
"""Define metadata for MissingClient object."""
series_name = 'events.stats.{server_name}'
fields = ['time', 'server_name']
autocommit = True
class MissingSeriesName(SeriesHelper):
"""Define SeriesHelper object for MissingSeries test."""
class Meta:
"""Define metadata for MissingSeriesName object."""
fields = ['time', 'server_name']
class MissingFields(SeriesHelper):
"""Define SeriesHelper for MissingFields test."""
class Meta:
"""Define metadata for MissingFields object."""
series_name = 'events.stats.{server_name}'
for cls in [MissingMeta, MissingClient, MissingFields,
MissingSeriesName]:
self.assertRaises(
AttributeError, cls, **{'time': 159,
'server_name': 'us.east-1'})
def testWarnBulkSizeZero(self):
"""Test warning for an invalid bulk size."""
class WarnBulkSizeZero(SeriesHelper):
"""Define SeriesHelper for WarnBulkSizeZero test."""
class Meta:
"""Define metadata for WarnBulkSizeZero object."""
client = TestSeriesHelper.client
series_name = 'events.stats.{server_name}'
fields = ['time', 'server_name']
bulk_size = 0
autocommit = True
with warnings.catch_warnings(record=True) as rec_warnings:
warnings.simplefilter("always")
# Server defined in the client is invalid, we're testing
# the warning only.
with self.assertRaises(ConnectionError):
WarnBulkSizeZero(time=159, server_name='us.east-1')
self.assertGreaterEqual(
len(rec_warnings), 1,
'{0} call should have generated one warning.'
'Actual generated warnings: {1}'.format(
WarnBulkSizeZero, '\n'.join(map(str, rec_warnings))))
expected_msg = (
'Definition of bulk_size in WarnBulkSizeZero forced to 1, '
'was less than 1.')
self.assertIn(expected_msg, list(w.message.args[0]
for w in rec_warnings),
'Warning message did not contain "forced to 1".')
def testWarnBulkSizeNoEffect(self):
"""Test warning for a set bulk size but autocommit False."""
class WarnBulkSizeNoEffect(SeriesHelper):
"""Define SeriesHelper for WarnBulkSizeNoEffect object."""
class Meta:
"""Define metadata for WarnBulkSizeNoEffect object."""
series_name = 'events.stats.{server_name}'
fields = ['time', 'server_name']
bulk_size = 5
autocommit = False
with warnings.catch_warnings(record=True) as rec_warnings:
warnings.simplefilter("always")
WarnBulkSizeNoEffect(time=159, server_name='us.east-1')
self.assertGreaterEqual(
len(rec_warnings), 1,
'{0} call should have generated one warning.'
'Actual generated warnings: {1}'.format(
WarnBulkSizeNoEffect, '\n'.join(map(str, rec_warnings))))
expected_msg = (
'Definition of bulk_size in WarnBulkSizeNoEffect has no affect '
'because autocommit is false.')
self.assertIn(expected_msg, list(w.message.args[0]
for w in rec_warnings),
'Warning message did not contain the expected_msg.')
if __name__ == '__main__':
unittest.main()

View file

@ -0,0 +1,50 @@
# -*- coding: utf-8 -*-
"""Define the misc handler for InfluxDBClient test."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import socket
def get_free_ports(num_ports, ip='127.0.0.1'):
"""Determine free ports on provided interface.
Get `num_ports` free/available ports on the interface linked to the `ip`
:param int num_ports: The number of free ports to get
:param str ip: The ip on which the ports have to be taken
:return: a set of ports number
"""
sock_ports = []
ports = set()
try:
for _ in range(num_ports):
sock = socket.socket()
cur = [sock, -1]
# append the socket directly,
# so that it'll be also closed (no leaked resource)
# in the finally here after.
sock_ports.append(cur)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind((ip, 0))
cur[1] = sock.getsockname()[1]
finally:
for sock, port in sock_ports:
sock.close()
ports.add(port)
assert num_ports == len(ports)
return ports
def is_port_open(port, ip='127.0.0.1'):
"""Check if given TCP port is open for connection."""
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
result = sock.connect_ex((ip, port))
if not result:
sock.shutdown(socket.SHUT_RDWR)
return result == 0
finally:
sock.close()

View file

@ -0,0 +1,202 @@
# -*- coding: utf-8 -*-
"""Define the resultset test package."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import unittest
from influxdb.exceptions import InfluxDBClientError
from influxdb.resultset import ResultSet
class TestResultSet(unittest.TestCase):
"""Define the ResultSet test object."""
def setUp(self):
"""Set up an instance of TestResultSet."""
self.query_response = {
"results": [
{"series": [{"name": "cpu_load_short",
"columns": ["time", "value", "host", "region"],
"values": [
["2015-01-29T21:51:28.968422294Z",
0.64,
"server01",
"us-west"],
["2015-01-29T21:51:28.968422294Z",
0.65,
"server02",
"us-west"],
]},
{"name": "other_series",
"columns": ["time", "value", "host", "region"],
"values": [
["2015-01-29T21:51:28.968422294Z",
0.66,
"server01",
"us-west"],
]}]}
]
}
self.rs = ResultSet(self.query_response['results'][0])
def test_filter_by_name(self):
"""Test filtering by name in TestResultSet object."""
expected = [
{'value': 0.64,
'time': '2015-01-29T21:51:28.968422294Z',
'host': 'server01',
'region': 'us-west'},
{'value': 0.65,
'time': '2015-01-29T21:51:28.968422294Z',
'host': 'server02',
'region': 'us-west'},
]
self.assertEqual(expected, list(self.rs['cpu_load_short']))
self.assertEqual(expected,
list(self.rs.get_points(
measurement='cpu_load_short')))
def test_filter_by_tags(self):
"""Test filter by tags in TestResultSet object."""
expected = [
{'value': 0.64,
'time': '2015-01-29T21:51:28.968422294Z',
'host': 'server01',
'region': 'us-west'},
{'value': 0.66,
'time': '2015-01-29T21:51:28.968422294Z',
'host': 'server01',
'region': 'us-west'},
]
self.assertEqual(
expected,
list(self.rs[{"host": "server01"}])
)
self.assertEqual(
expected,
list(self.rs.get_points(tags={'host': 'server01'}))
)
def test_filter_by_name_and_tags(self):
"""Test filter by name and tags in TestResultSet object."""
self.assertEqual(
list(self.rs[('cpu_load_short', {"host": "server01"})]),
[{'value': 0.64,
'time': '2015-01-29T21:51:28.968422294Z',
'host': 'server01',
'region': 'us-west'}]
)
self.assertEqual(
list(self.rs[('cpu_load_short', {"region": "us-west"})]),
[
{'value': 0.64,
'time': '2015-01-29T21:51:28.968422294Z',
'host': 'server01',
'region': 'us-west'},
{'value': 0.65,
'time': '2015-01-29T21:51:28.968422294Z',
'host': 'server02',
'region': 'us-west'},
]
)
def test_keys(self):
"""Test keys in TestResultSet object."""
self.assertEqual(
self.rs.keys(),
[
('cpu_load_short', None),
('other_series', None),
]
)
def test_len(self):
"""Test length in TestResultSet object."""
self.assertEqual(
len(self.rs),
2
)
def test_items(self):
"""Test items in TestResultSet object."""
items = list(self.rs.items())
items_lists = [(item[0], list(item[1])) for item in items]
self.assertEqual(
items_lists,
[
(
('cpu_load_short', None),
[
{'time': '2015-01-29T21:51:28.968422294Z',
'value': 0.64,
'host': 'server01',
'region': 'us-west'},
{'time': '2015-01-29T21:51:28.968422294Z',
'value': 0.65,
'host': 'server02',
'region': 'us-west'}]),
(
('other_series', None),
[
{'time': '2015-01-29T21:51:28.968422294Z',
'value': 0.66,
'host': 'server01',
'region': 'us-west'}])]
)
def test_point_from_cols_vals(self):
"""Test points from columns in TestResultSet object."""
cols = ['col1', 'col2']
vals = [1, '2']
point = ResultSet.point_from_cols_vals(cols, vals)
self.assertDictEqual(
point,
{'col1': 1, 'col2': '2'}
)
def test_system_query(self):
"""Test system query capabilities in TestResultSet object."""
rs = ResultSet(
{'series': [
{'values': [['another', '48h0m0s', 3, False],
['default', '0', 1, False],
['somename', '24h0m0s', 4, True]],
'columns': ['name', 'duration',
'replicaN', 'default']}]}
)
self.assertEqual(
rs.keys(),
[('results', None)]
)
self.assertEqual(
list(rs['results']),
[
{'duration': '48h0m0s', 'default': False, 'replicaN': 3,
'name': 'another'},
{'duration': '0', 'default': False, 'replicaN': 1,
'name': 'default'},
{'duration': '24h0m0s', 'default': True, 'replicaN': 4,
'name': 'somename'}
]
)
def test_resultset_error(self):
"""Test returning error in TestResultSet object."""
with self.assertRaises(InfluxDBClientError):
ResultSet({
"series": [],
"error": "Big error, many problems."
})

View file

@ -0,0 +1 @@
"""Define the server tests package."""

View file

@ -0,0 +1,84 @@
# -*- coding: utf-8 -*-
"""Define the base module for server test."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import sys
from influxdb.tests import using_pypy
from influxdb.tests.server_tests.influxdb_instance import InfluxDbInstance
from influxdb.client import InfluxDBClient
if not using_pypy:
from influxdb.dataframe_client import DataFrameClient
def _setup_influxdb_server(inst):
inst.influxd_inst = InfluxDbInstance(
inst.influxdb_template_conf,
udp_enabled=getattr(inst, 'influxdb_udp_enabled', False),
)
inst.cli = InfluxDBClient('localhost',
inst.influxd_inst.http_port,
'root',
'',
database='db')
if not using_pypy:
inst.cliDF = DataFrameClient('localhost',
inst.influxd_inst.http_port,
'root',
'',
database='db')
def _teardown_influxdb_server(inst):
remove_tree = sys.exc_info() == (None, None, None)
inst.influxd_inst.close(remove_tree=remove_tree)
class SingleTestCaseWithServerMixin(object):
"""Define the single testcase with server mixin.
A mixin for unittest.TestCase to start an influxdb server instance
in a temporary directory **for each test function/case**
"""
# 'influxdb_template_conf' attribute must be set
# on the TestCase class or instance.
setUp = _setup_influxdb_server
tearDown = _teardown_influxdb_server
class ManyTestCasesWithServerMixin(object):
"""Define the many testcase with server mixin.
Same as the SingleTestCaseWithServerMixin but this module creates
a single instance for the whole class. Also pre-creates a fresh
database: 'db'.
"""
# 'influxdb_template_conf' attribute must be set on the class itself !
@classmethod
def setUpClass(cls):
"""Set up an instance of the ManyTestCasesWithServerMixin."""
_setup_influxdb_server(cls)
def setUp(self):
"""Set up an instance of the ManyTestCasesWithServerMixin."""
self.cli.create_database('db')
@classmethod
def tearDownClass(cls):
"""Deconstruct an instance of ManyTestCasesWithServerMixin."""
_teardown_influxdb_server(cls)
def tearDown(self):
"""Deconstruct an instance of ManyTestCasesWithServerMixin."""
self.cli.drop_database('db')

View file

@ -0,0 +1,825 @@
# -*- coding: utf-8 -*-
"""Unit tests for checking the InfluxDB server.
The good/expected interaction between:
+ the python client.. (obviously)
+ and a *_real_* server instance running.
This basically duplicates what's in client_test.py
but without mocking around every call.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from functools import partial
import os
import time
import unittest
import warnings
from influxdb import InfluxDBClient
from influxdb.exceptions import InfluxDBClientError
from influxdb.tests import skipIfPYpy, using_pypy, skipServerTests
from influxdb.tests.server_tests.base import ManyTestCasesWithServerMixin
from influxdb.tests.server_tests.base import SingleTestCaseWithServerMixin
# By default, raise exceptions on warnings
warnings.simplefilter('error', FutureWarning)
if not using_pypy:
import pandas as pd
from pandas.util.testing import assert_frame_equal
THIS_DIR = os.path.abspath(os.path.dirname(__file__))
def point(series_name, timestamp=None, tags=None, **fields):
"""Define what a point looks like."""
res = {'measurement': series_name}
if timestamp:
res['time'] = timestamp
if tags:
res['tags'] = tags
res['fields'] = fields
return res
dummy_point = [ # some dummy points
{
"measurement": "cpu_load_short",
"tags": {
"host": "server01",
"region": "us-west"
},
"time": "2009-11-10T23:00:00Z",
"fields": {
"value": 0.64
}
}
]
dummy_points = [ # some dummy points
dummy_point[0],
{
"measurement": "memory",
"tags": {
"host": "server01",
"region": "us-west"
},
"time": "2009-11-10T23:01:35Z",
"fields": {
"value": 33.0
}
}
]
if not using_pypy:
dummy_pointDF = {
"measurement": "cpu_load_short",
"tags": {"host": "server01",
"region": "us-west"},
"dataframe": pd.DataFrame(
[[0.64]], columns=['value'],
index=pd.to_datetime(["2009-11-10T23:00:00Z"]))
}
dummy_pointsDF = [{
"measurement": "cpu_load_short",
"tags": {"host": "server01", "region": "us-west"},
"dataframe": pd.DataFrame(
[[0.64]], columns=['value'],
index=pd.to_datetime(["2009-11-10T23:00:00Z"])),
}, {
"measurement": "memory",
"tags": {"host": "server01", "region": "us-west"},
"dataframe": pd.DataFrame(
[[33]], columns=['value'],
index=pd.to_datetime(["2009-11-10T23:01:35Z"])
)
}]
dummy_point_without_timestamp = [
{
"measurement": "cpu_load_short",
"tags": {
"host": "server02",
"region": "us-west"
},
"fields": {
"value": 0.64
}
}
]
@skipServerTests
class SimpleTests(SingleTestCaseWithServerMixin, unittest.TestCase):
"""Define the class of simple tests."""
influxdb_template_conf = os.path.join(THIS_DIR, 'influxdb.conf.template')
def test_fresh_server_no_db(self):
"""Test a fresh server without database."""
self.assertEqual([], self.cli.get_list_database())
def test_create_database(self):
"""Test create a database."""
self.assertIsNone(self.cli.create_database('new_db_1'))
self.assertIsNone(self.cli.create_database('new_db_2'))
self.assertEqual(
self.cli.get_list_database(),
[{'name': 'new_db_1'}, {'name': 'new_db_2'}]
)
def test_drop_database(self):
"""Test drop a database."""
self.test_create_database()
self.assertIsNone(self.cli.drop_database('new_db_1'))
self.assertEqual([{'name': 'new_db_2'}], self.cli.get_list_database())
def test_query_fail(self):
"""Test that a query failed."""
with self.assertRaises(InfluxDBClientError) as ctx:
self.cli.query('select column_one from foo')
self.assertIn('database not found: db',
ctx.exception.content)
def test_query_fail_ignore_errors(self):
"""Test query failed but ignore errors."""
result = self.cli.query('select column_one from foo',
raise_errors=False)
self.assertEqual(result.error, 'database not found: db')
def test_create_user(self):
"""Test create user."""
self.cli.create_user('test_user', 'secret_password')
rsp = list(self.cli.query("SHOW USERS")['results'])
self.assertIn({'user': 'test_user', 'admin': False},
rsp)
def test_create_user_admin(self):
"""Test create admin user."""
self.cli.create_user('test_user', 'secret_password', True)
rsp = list(self.cli.query("SHOW USERS")['results'])
self.assertIn({'user': 'test_user', 'admin': True},
rsp)
def test_create_user_blank_password(self):
"""Test create user with a blank pass."""
self.cli.create_user('test_user', '')
rsp = list(self.cli.query("SHOW USERS")['results'])
self.assertIn({'user': 'test_user', 'admin': False},
rsp)
def test_get_list_users_empty(self):
"""Test get list of users, but empty."""
rsp = self.cli.get_list_users()
self.assertEqual([], rsp)
def test_get_list_users(self):
"""Test get list of users."""
self.cli.query("CREATE USER test WITH PASSWORD 'test'")
rsp = self.cli.get_list_users()
self.assertEqual(
[{'user': 'test', 'admin': False}],
rsp
)
def test_create_user_blank_username(self):
"""Test create blank username."""
with self.assertRaises(InfluxDBClientError) as ctx:
self.cli.create_user('', 'secret_password')
self.assertIn('username required',
ctx.exception.content)
rsp = list(self.cli.query("SHOW USERS")['results'])
self.assertEqual(rsp, [])
def test_drop_user(self):
"""Test drop a user."""
self.cli.query("CREATE USER test WITH PASSWORD 'test'")
self.cli.drop_user('test')
users = list(self.cli.query("SHOW USERS")['results'])
self.assertEqual(users, [])
def test_drop_user_nonexisting(self):
"""Test dropping a nonexistent user."""
with self.assertRaises(InfluxDBClientError) as ctx:
self.cli.drop_user('test')
self.assertIn('user not found',
ctx.exception.content)
@unittest.skip("Broken as of 0.9.0")
def test_revoke_admin_privileges(self):
"""Test revoking admin privs, deprecated as of v0.9.0."""
self.cli.create_user('test', 'test', admin=True)
self.assertEqual([{'user': 'test', 'admin': True}],
self.cli.get_list_users())
self.cli.revoke_admin_privileges('test')
self.assertEqual([{'user': 'test', 'admin': False}],
self.cli.get_list_users())
def test_grant_privilege(self):
"""Test grant privs to user."""
self.cli.create_user('test', 'test')
self.cli.create_database('testdb')
self.cli.grant_privilege('all', 'testdb', 'test')
# TODO: when supported by InfluxDB, check if privileges are granted
def test_grant_privilege_invalid(self):
"""Test grant invalid privs to user."""
self.cli.create_user('test', 'test')
self.cli.create_database('testdb')
with self.assertRaises(InfluxDBClientError) as ctx:
self.cli.grant_privilege('', 'testdb', 'test')
self.assertEqual(400, ctx.exception.code)
self.assertIn('{"error":"error parsing query: ',
ctx.exception.content)
def test_revoke_privilege(self):
"""Test revoke privs from user."""
self.cli.create_user('test', 'test')
self.cli.create_database('testdb')
self.cli.revoke_privilege('all', 'testdb', 'test')
# TODO: when supported by InfluxDB, check if privileges are revoked
def test_revoke_privilege_invalid(self):
"""Test revoke invalid privs from user."""
self.cli.create_user('test', 'test')
self.cli.create_database('testdb')
with self.assertRaises(InfluxDBClientError) as ctx:
self.cli.revoke_privilege('', 'testdb', 'test')
self.assertEqual(400, ctx.exception.code)
self.assertIn('{"error":"error parsing query: ',
ctx.exception.content)
def test_invalid_port_fails(self):
"""Test invalid port access fails."""
with self.assertRaises(ValueError):
InfluxDBClient('host', '80/redir', 'username', 'password')
@skipServerTests
class CommonTests(ManyTestCasesWithServerMixin, unittest.TestCase):
"""Define a class to handle common tests for the server."""
influxdb_template_conf = os.path.join(THIS_DIR, 'influxdb.conf.template')
def test_write(self):
"""Test write to the server."""
self.assertIs(True, self.cli.write(
{'points': dummy_point},
params={'db': 'db'},
))
def test_write_check_read(self):
"""Test write and check read of data to server."""
self.test_write()
time.sleep(1)
rsp = self.cli.query('SELECT * FROM cpu_load_short', database='db')
self.assertListEqual([{'value': 0.64, 'time': '2009-11-10T23:00:00Z',
"host": "server01", "region": "us-west"}],
list(rsp.get_points()))
def test_write_points(self):
"""Test writing points to the server."""
self.assertIs(True, self.cli.write_points(dummy_point))
@skipIfPYpy
def test_write_points_DF(self):
"""Test writing points with dataframe."""
self.assertIs(
True,
self.cliDF.write_points(
dummy_pointDF['dataframe'],
dummy_pointDF['measurement'],
dummy_pointDF['tags']
)
)
def test_write_points_check_read(self):
"""Test writing points and check read back."""
self.test_write_points()
time.sleep(1) # same as test_write_check_read()
rsp = self.cli.query('SELECT * FROM cpu_load_short')
self.assertEqual(
list(rsp),
[[
{'value': 0.64,
'time': '2009-11-10T23:00:00Z',
"host": "server01",
"region": "us-west"}
]]
)
rsp2 = list(rsp.get_points())
self.assertEqual(len(rsp2), 1)
pt = rsp2[0]
self.assertEqual(
pt,
{'time': '2009-11-10T23:00:00Z',
'value': 0.64,
"host": "server01",
"region": "us-west"}
)
@unittest.skip("Broken as of 0.9.0")
def test_write_points_check_read_DF(self):
"""Test write points and check back with dataframe."""
self.test_write_points_DF()
time.sleep(1) # same as test_write_check_read()
rsp = self.cliDF.query('SELECT * FROM cpu_load_short')
assert_frame_equal(
rsp['cpu_load_short'],
dummy_pointDF['dataframe']
)
# Query with Tags
rsp = self.cliDF.query(
"SELECT * FROM cpu_load_short GROUP BY *")
assert_frame_equal(
rsp[('cpu_load_short',
(('host', 'server01'), ('region', 'us-west')))],
dummy_pointDF['dataframe']
)
def test_write_multiple_points_different_series(self):
"""Test write multiple points to different series."""
self.assertIs(True, self.cli.write_points(dummy_points))
time.sleep(1)
rsp = self.cli.query('SELECT * FROM cpu_load_short')
lrsp = list(rsp)
self.assertEqual(
[[
{'value': 0.64,
'time': '2009-11-10T23:00:00Z',
"host": "server01",
"region": "us-west"}
]],
lrsp
)
rsp = list(self.cli.query('SELECT * FROM memory'))
self.assertEqual(
rsp,
[[
{'value': 33,
'time': '2009-11-10T23:01:35Z',
"host": "server01",
"region": "us-west"}
]]
)
def test_select_into_as_post(self):
"""Test SELECT INTO is POSTed."""
self.assertIs(True, self.cli.write_points(dummy_points))
time.sleep(1)
rsp = self.cli.query('SELECT * INTO "newmeas" FROM "memory"')
rsp = self.cli.query('SELECT * FROM "newmeas"')
lrsp = list(rsp)
self.assertEqual(
lrsp,
[[
{'value': 33,
'time': '2009-11-10T23:01:35Z',
"host": "server01",
"region": "us-west"}
]]
)
@unittest.skip("Broken as of 0.9.0")
def test_write_multiple_points_different_series_DF(self):
"""Test write multiple points using dataframe to different series."""
for i in range(2):
self.assertIs(
True, self.cliDF.write_points(
dummy_pointsDF[i]['dataframe'],
dummy_pointsDF[i]['measurement'],
dummy_pointsDF[i]['tags']))
time.sleep(1)
rsp = self.cliDF.query('SELECT * FROM cpu_load_short')
assert_frame_equal(
rsp['cpu_load_short'],
dummy_pointsDF[0]['dataframe']
)
rsp = self.cliDF.query('SELECT * FROM memory')
assert_frame_equal(
rsp['memory'],
dummy_pointsDF[1]['dataframe']
)
def test_write_points_batch(self):
"""Test writing points in a batch."""
dummy_points = [
{"measurement": "cpu_usage", "tags": {"unit": "percent"},
"time": "2009-11-10T23:00:00Z", "fields": {"value": 12.34}},
{"measurement": "network", "tags": {"direction": "in"},
"time": "2009-11-10T23:00:00Z", "fields": {"value": 123.00}},
{"measurement": "network", "tags": {"direction": "out"},
"time": "2009-11-10T23:00:00Z", "fields": {"value": 12.00}}
]
self.cli.write_points(points=dummy_points,
tags={"host": "server01",
"region": "us-west"},
batch_size=2)
time.sleep(5)
net_in = self.cli.query("SELECT value FROM network "
"WHERE direction='in'").raw
net_out = self.cli.query("SELECT value FROM network "
"WHERE direction='out'").raw
cpu = self.cli.query("SELECT value FROM cpu_usage").raw
self.assertIn(123, net_in['series'][0]['values'][0])
self.assertIn(12, net_out['series'][0]['values'][0])
self.assertIn(12.34, cpu['series'][0]['values'][0])
def test_query(self):
"""Test querying data back from server."""
self.assertIs(True, self.cli.write_points(dummy_point))
@unittest.skip('Not implemented for 0.9')
def test_query_chunked(self):
"""Test query for chunked response from server."""
cli = InfluxDBClient(database='db')
example_object = {
'points': [
[1415206250119, 40001, 667],
[1415206244555, 30001, 7],
[1415206228241, 20001, 788],
[1415206212980, 10001, 555],
[1415197271586, 10001, 23]
],
'name': 'foo',
'columns': [
'time',
'sequence_number',
'val'
]
}
del cli
del example_object
# TODO ?
def test_delete_series_invalid(self):
"""Test delete invalid series."""
with self.assertRaises(InfluxDBClientError):
self.cli.delete_series()
def test_default_retention_policy(self):
"""Test add default retention policy."""
rsp = self.cli.get_list_retention_policies()
self.assertEqual(
[
{'name': 'autogen',
'duration': '0s',
'replicaN': 1,
'shardGroupDuration': u'168h0m0s',
'default': True}
],
rsp
)
def test_create_retention_policy_default(self):
"""Test create a new default retention policy."""
self.cli.create_retention_policy('somename', '1d', 1, default=True)
self.cli.create_retention_policy('another', '2d', 1, default=False)
rsp = self.cli.get_list_retention_policies()
self.assertEqual(
[
{'duration': '0s',
'default': False,
'replicaN': 1,
'shardGroupDuration': u'168h0m0s',
'name': 'autogen'},
{'duration': '24h0m0s',
'default': True,
'replicaN': 1,
'shardGroupDuration': u'1h0m0s',
'name': 'somename'},
{'duration': '48h0m0s',
'default': False,
'replicaN': 1,
'shardGroupDuration': u'24h0m0s',
'name': 'another'}
],
rsp
)
def test_create_retention_policy(self):
"""Test creating a new retention policy, not default."""
self.cli.create_retention_policy('somename', '1d', 1)
# NB: creating a retention policy without specifying
# shard group duration
# leads to a shard group duration of 1 hour
rsp = self.cli.get_list_retention_policies()
self.assertEqual(
[
{'duration': '0s',
'default': True,
'replicaN': 1,
'shardGroupDuration': u'168h0m0s',
'name': 'autogen'},
{'duration': '24h0m0s',
'default': False,
'replicaN': 1,
'shardGroupDuration': u'1h0m0s',
'name': 'somename'}
],
rsp
)
self.cli.drop_retention_policy('somename', 'db')
# recreate the RP
self.cli.create_retention_policy('somename', '1w', 1,
shard_duration='1h')
rsp = self.cli.get_list_retention_policies()
self.assertEqual(
[
{'duration': '0s',
'default': True,
'replicaN': 1,
'shardGroupDuration': u'168h0m0s',
'name': 'autogen'},
{'duration': '168h0m0s',
'default': False,
'replicaN': 1,
'shardGroupDuration': u'1h0m0s',
'name': 'somename'}
],
rsp
)
self.cli.drop_retention_policy('somename', 'db')
# recreate the RP
self.cli.create_retention_policy('somename', '1w', 1)
rsp = self.cli.get_list_retention_policies()
self.assertEqual(
[
{'duration': '0s',
'default': True,
'replicaN': 1,
'shardGroupDuration': u'168h0m0s',
'name': 'autogen'},
{'duration': '168h0m0s',
'default': False,
'replicaN': 1,
'shardGroupDuration': u'24h0m0s',
'name': 'somename'}
],
rsp
)
def test_alter_retention_policy(self):
"""Test alter a retention policy, not default."""
self.cli.create_retention_policy('somename', '1d', 1)
# Test alter duration
self.cli.alter_retention_policy('somename', 'db',
duration='4d',
shard_duration='2h')
# NB: altering retention policy doesn't change shard group duration
rsp = self.cli.get_list_retention_policies()
self.assertEqual(
[
{'duration': '0s',
'default': True,
'replicaN': 1,
'shardGroupDuration': u'168h0m0s',
'name': 'autogen'},
{'duration': '96h0m0s',
'default': False,
'replicaN': 1,
'shardGroupDuration': u'2h0m0s',
'name': 'somename'}
],
rsp
)
# Test alter replication
self.cli.alter_retention_policy('somename', 'db',
replication=4)
# NB: altering retention policy doesn't change shard group duration
rsp = self.cli.get_list_retention_policies()
self.assertEqual(
[
{'duration': '0s',
'default': True,
'replicaN': 1,
'shardGroupDuration': u'168h0m0s',
'name': 'autogen'},
{'duration': '96h0m0s',
'default': False,
'replicaN': 4,
'shardGroupDuration': u'2h0m0s',
'name': 'somename'}
],
rsp
)
# Test alter default
self.cli.alter_retention_policy('somename', 'db',
default=True)
# NB: altering retention policy doesn't change shard group duration
rsp = self.cli.get_list_retention_policies()
self.assertEqual(
[
{'duration': '0s',
'default': False,
'replicaN': 1,
'shardGroupDuration': u'168h0m0s',
'name': 'autogen'},
{'duration': '96h0m0s',
'default': True,
'replicaN': 4,
'shardGroupDuration': u'2h0m0s',
'name': 'somename'}
],
rsp
)
# Test alter shard_duration
self.cli.alter_retention_policy('somename', 'db',
shard_duration='4h')
rsp = self.cli.get_list_retention_policies()
self.assertEqual(
[
{'duration': '0s',
'default': False,
'replicaN': 1,
'shardGroupDuration': u'168h0m0s',
'name': 'autogen'},
{'duration': '96h0m0s',
'default': True,
'replicaN': 4,
'shardGroupDuration': u'4h0m0s',
'name': 'somename'}
],
rsp
)
def test_alter_retention_policy_invalid(self):
"""Test invalid alter retention policy."""
self.cli.create_retention_policy('somename', '1d', 1)
with self.assertRaises(InfluxDBClientError) as ctx:
self.cli.alter_retention_policy('somename', 'db')
self.assertEqual(400, ctx.exception.code)
self.assertIn('{"error":"error parsing query: ',
ctx.exception.content)
rsp = self.cli.get_list_retention_policies()
self.assertEqual(
[
{'duration': '0s',
'default': True,
'replicaN': 1,
'shardGroupDuration': u'168h0m0s',
'name': 'autogen'},
{'duration': '24h0m0s',
'default': False,
'replicaN': 1,
'shardGroupDuration': u'1h0m0s',
'name': 'somename'}
],
rsp
)
def test_drop_retention_policy(self):
"""Test drop a retention policy."""
self.cli.create_retention_policy('somename', '1d', 1)
# Test drop retention
self.cli.drop_retention_policy('somename', 'db')
rsp = self.cli.get_list_retention_policies()
self.assertEqual(
[
{'duration': '0s',
'default': True,
'replicaN': 1,
'shardGroupDuration': u'168h0m0s',
'name': 'autogen'}
],
rsp
)
def test_issue_143(self):
"""Test for PR#143 from repo."""
pt = partial(point, 'a_series_name', timestamp='2015-03-30T16:16:37Z')
pts = [
pt(value=15),
pt(tags={'tag_1': 'value1'}, value=5),
pt(tags={'tag_1': 'value2'}, value=10),
]
self.cli.write_points(pts)
time.sleep(1)
rsp = list(self.cli.query('SELECT * FROM a_series_name \
GROUP BY tag_1').get_points())
self.assertEqual(
[
{'time': '2015-03-30T16:16:37Z', 'value': 15},
{'time': '2015-03-30T16:16:37Z', 'value': 5},
{'time': '2015-03-30T16:16:37Z', 'value': 10}
],
rsp
)
# a slightly more complex one with 2 tags values:
pt = partial(point, 'series2', timestamp='2015-03-30T16:16:37Z')
pts = [
pt(tags={'tag1': 'value1', 'tag2': 'v1'}, value=0),
pt(tags={'tag1': 'value1', 'tag2': 'v2'}, value=5),
pt(tags={'tag1': 'value2', 'tag2': 'v1'}, value=10),
]
self.cli.write_points(pts)
time.sleep(1)
rsp = self.cli.query('SELECT * FROM series2 GROUP BY tag1,tag2')
self.assertEqual(
[
{'value': 0, 'time': '2015-03-30T16:16:37Z'},
{'value': 5, 'time': '2015-03-30T16:16:37Z'},
{'value': 10, 'time': '2015-03-30T16:16:37Z'}
],
list(rsp['series2'])
)
all_tag2_equal_v1 = list(rsp.get_points(tags={'tag2': 'v1'}))
self.assertEqual(
[{'value': 0, 'time': '2015-03-30T16:16:37Z'},
{'value': 10, 'time': '2015-03-30T16:16:37Z'}],
all_tag2_equal_v1,
)
def test_query_multiple_series(self):
"""Test query for multiple series."""
pt = partial(point, 'series1', timestamp='2015-03-30T16:16:37Z')
pts = [
pt(tags={'tag1': 'value1', 'tag2': 'v1'}, value=0),
]
self.cli.write_points(pts)
pt = partial(point, 'series2', timestamp='1970-03-30T16:16:37Z')
pts = [
pt(tags={'tag1': 'value1', 'tag2': 'v1'},
value=0, data1=33, data2="bla"),
]
self.cli.write_points(pts)
@skipServerTests
class UdpTests(ManyTestCasesWithServerMixin, unittest.TestCase):
"""Define a class to test UDP series."""
influxdb_udp_enabled = True
influxdb_template_conf = os.path.join(THIS_DIR,
'influxdb.conf.template')
def test_write_points_udp(self):
"""Test write points UDP."""
cli = InfluxDBClient(
'localhost',
self.influxd_inst.http_port,
'root',
'',
database='db',
use_udp=True,
udp_port=self.influxd_inst.udp_port
)
cli.write_points(dummy_point)
# The points are not immediately available after write_points.
# This is to be expected because we are using udp (no response !).
# So we have to wait some time,
time.sleep(3) # 3 sec seems to be a good choice.
rsp = self.cli.query('SELECT * FROM cpu_load_short')
self.assertEqual(
# this is dummy_points :
[
{'value': 0.64,
'time': '2009-11-10T23:00:00Z',
"host": "server01",
"region": "us-west"}
],
list(rsp['cpu_load_short'])
)

View file

@ -0,0 +1,198 @@
# -*- coding: utf-8 -*-
"""Define the test module for an influxdb instance."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import datetime
import distutils
import os
import tempfile
import shutil
import subprocess
import sys
import time
import unittest
from influxdb.tests.misc import is_port_open, get_free_ports
# hack in check_output if it's not defined, like for python 2.6
if "check_output" not in dir(subprocess):
def f(*popenargs, **kwargs):
"""Check for output."""
if 'stdout' in kwargs:
raise ValueError(
'stdout argument not allowed, it will be overridden.'
)
process = subprocess.Popen(stdout=subprocess.PIPE,
*popenargs,
**kwargs)
output, unused_err = process.communicate()
retcode = process.poll()
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
raise subprocess.CalledProcessError(retcode, cmd)
return output
subprocess.check_output = f
class InfluxDbInstance(object):
"""Define an instance of InfluxDB.
A class to launch of fresh influxdb server instance
in a temporary place, using a config file template.
"""
def __init__(self, conf_template, udp_enabled=False):
"""Initialize an instance of InfluxDbInstance."""
if os.environ.get("INFLUXDB_PYTHON_SKIP_SERVER_TESTS", None) == 'True':
raise unittest.SkipTest(
"Skipping server test (INFLUXDB_PYTHON_SKIP_SERVER_TESTS)"
)
self.influxd_path = self.find_influxd_path()
errors = 0
while True:
try:
self._start_server(conf_template, udp_enabled)
break
# Happens when the ports are already in use.
except RuntimeError as e:
errors += 1
if errors > 2:
raise e
def _start_server(self, conf_template, udp_enabled):
# create a temporary dir to store all needed files
# for the influxdb server instance :
self.temp_dir_base = tempfile.mkdtemp()
# "temp_dir_base" will be used for conf file and logs,
# while "temp_dir_influxdb" is for the databases files/dirs :
tempdir = self.temp_dir_influxdb = tempfile.mkdtemp(
dir=self.temp_dir_base)
# find a couple free ports :
free_ports = get_free_ports(4)
ports = {}
for service in 'http', 'global', 'meta', 'udp':
ports[service + '_port'] = free_ports.pop()
if not udp_enabled:
ports['udp_port'] = -1
conf_data = dict(
meta_dir=os.path.join(tempdir, 'meta'),
data_dir=os.path.join(tempdir, 'data'),
wal_dir=os.path.join(tempdir, 'wal'),
cluster_dir=os.path.join(tempdir, 'state'),
handoff_dir=os.path.join(tempdir, 'handoff'),
logs_file=os.path.join(self.temp_dir_base, 'logs.txt'),
udp_enabled='true' if udp_enabled else 'false',
)
conf_data.update(ports)
self.__dict__.update(conf_data)
conf_file = os.path.join(self.temp_dir_base, 'influxdb.conf')
with open(conf_file, "w") as fh:
with open(conf_template) as fh_template:
fh.write(fh_template.read().format(**conf_data))
# now start the server instance:
self.proc = subprocess.Popen(
[self.influxd_path, '-config', conf_file],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
print(
"%s > Started influxdb bin in %r with ports %s and %s.." % (
datetime.datetime.now(),
self.temp_dir_base,
self.global_port,
self.http_port
)
)
# wait for it to listen on the broker and admin ports:
# usually a fresh instance is ready in less than 1 sec ..
timeout = time.time() + 10 # so 10 secs should be enough,
# otherwise either your system load is high,
# or you run a 286 @ 1Mhz ?
try:
while time.time() < timeout:
if (is_port_open(self.http_port) and
is_port_open(self.global_port)):
# it's hard to check if a UDP port is open..
if udp_enabled:
# so let's just sleep 0.5 sec in this case
# to be sure that the server has open the port
time.sleep(0.5)
break
time.sleep(0.5)
if self.proc.poll() is not None:
raise RuntimeError('influxdb prematurely exited')
else:
self.proc.terminate()
self.proc.wait()
raise RuntimeError('Timeout waiting for influxdb to listen'
' on its ports (%s)' % ports)
except RuntimeError as err:
data = self.get_logs_and_output()
data['reason'] = str(err)
data['now'] = datetime.datetime.now()
raise RuntimeError("%(now)s > %(reason)s. RC=%(rc)s\n"
"stdout=%(out)s\nstderr=%(err)s\nlogs=%(logs)r"
% data)
def find_influxd_path(self):
"""Find the path for InfluxDB."""
influxdb_bin_path = os.environ.get(
'INFLUXDB_PYTHON_INFLUXD_PATH',
None
)
if influxdb_bin_path is None:
influxdb_bin_path = distutils.spawn.find_executable('influxd')
if not influxdb_bin_path:
try:
influxdb_bin_path = subprocess.check_output(
['which', 'influxd']
).strip()
except subprocess.CalledProcessError:
# fallback on :
influxdb_bin_path = '/opt/influxdb/influxd'
if not os.path.isfile(influxdb_bin_path):
raise unittest.SkipTest("Could not find influxd binary")
version = subprocess.check_output([influxdb_bin_path, 'version'])
print("InfluxDB version: %s" % version, file=sys.stderr)
return influxdb_bin_path
def get_logs_and_output(self):
"""Query for logs and output."""
proc = self.proc
try:
with open(self.logs_file) as fh:
logs = fh.read()
except IOError as err:
logs = "Couldn't read logs: %s" % err
return {
'rc': proc.returncode,
'out': proc.stdout.read(),
'err': proc.stderr.read(),
'logs': logs
}
def close(self, remove_tree=True):
"""Close an instance of InfluxDB."""
self.proc.terminate()
self.proc.wait()
if remove_tree:
shutil.rmtree(self.temp_dir_base)

View file

@ -0,0 +1,147 @@
# -*- coding: utf-8 -*-
"""Define the line protocol test module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from datetime import datetime
import unittest
from pytz import UTC, timezone
from influxdb import line_protocol
class TestLineProtocol(unittest.TestCase):
"""Define the LineProtocol test object."""
def test_make_lines(self):
"""Test make new lines in TestLineProtocol object."""
data = {
"tags": {
"empty_tag": "",
"none_tag": None,
"backslash_tag": "C:\\",
"integer_tag": 2,
"string_tag": "hello"
},
"points": [
{
"measurement": "test",
"fields": {
"string_val": "hello!",
"int_val": 1,
"float_val": 1.1,
"none_field": None,
"bool_val": True,
}
}
]
}
self.assertEqual(
line_protocol.make_lines(data),
'test,backslash_tag=C:\\\\ ,integer_tag=2,string_tag=hello '
'bool_val=True,float_val=1.1,int_val=1i,string_val="hello!"\n'
)
def test_timezone(self):
"""Test timezone in TestLineProtocol object."""
dt = datetime(2009, 11, 10, 23, 0, 0, 123456)
utc = UTC.localize(dt)
berlin = timezone('Europe/Berlin').localize(dt)
eastern = berlin.astimezone(timezone('US/Eastern'))
data = {
"points": [
{"measurement": "A", "fields": {"val": 1},
"time": 0},
{"measurement": "A", "fields": {"val": 1},
"time": "2009-11-10T23:00:00.123456Z"},
{"measurement": "A", "fields": {"val": 1}, "time": dt},
{"measurement": "A", "fields": {"val": 1}, "time": utc},
{"measurement": "A", "fields": {"val": 1}, "time": berlin},
{"measurement": "A", "fields": {"val": 1}, "time": eastern},
]
}
self.assertEqual(
line_protocol.make_lines(data),
'\n'.join([
'A val=1i 0',
'A val=1i 1257894000123456000',
'A val=1i 1257894000123456000',
'A val=1i 1257894000123456000',
'A val=1i 1257890400123456000',
'A val=1i 1257890400123456000',
]) + '\n'
)
def test_string_val_newline(self):
"""Test string value with newline in TestLineProtocol object."""
data = {
"points": [
{
"measurement": "m1",
"fields": {
"multi_line": "line1\nline1\nline3"
}
}
]
}
self.assertEqual(
line_protocol.make_lines(data),
'm1 multi_line="line1\\nline1\\nline3"\n'
)
def test_make_lines_unicode(self):
"""Test make unicode lines in TestLineProtocol object."""
data = {
"tags": {
"unicode_tag": "\'Привет!\'" # Hello! in Russian
},
"points": [
{
"measurement": "test",
"fields": {
"unicode_val": "Привет!", # Hello! in Russian
}
}
]
}
self.assertEqual(
line_protocol.make_lines(data),
'test,unicode_tag=\'Привет!\' unicode_val="Привет!"\n'
)
def test_quote_ident(self):
"""Test quote indentation in TestLineProtocol object."""
self.assertEqual(
line_protocol.quote_ident(r"""\foo ' bar " Örf"""),
r'''"\\foo ' bar \" Örf"'''
)
def test_quote_literal(self):
"""Test quote literal in TestLineProtocol object."""
self.assertEqual(
line_protocol.quote_literal(r"""\foo ' bar " Örf"""),
r"""'\\foo \' bar " Örf'"""
)
def test_float_with_long_decimal_fraction(self):
"""Ensure precision is preserved when casting floats into strings."""
data = {
"points": [
{
"measurement": "test",
"fields": {
"float_val": 1.0000000000000009,
}
}
]
}
self.assertEqual(
line_protocol.make_lines(data),
'test float_val=1.0000000000000009\n'
)