added forced package imports
This commit is contained in:
parent
0e2ffdbbb1
commit
ef9022c6eb
943 changed files with 125530 additions and 16 deletions
21
lib/influxdb/tests/__init__.py
Normal file
21
lib/influxdb/tests/__init__.py
Normal file
|
@ -0,0 +1,21 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Configure the tests package for InfluxDBClient."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import sys
|
||||
import os
|
||||
|
||||
import unittest
|
||||
|
||||
using_pypy = hasattr(sys, "pypy_version_info")
|
||||
skipIfPYpy = unittest.skipIf(using_pypy, "Skipping this test on pypy.")
|
||||
|
||||
_skip_server_tests = os.environ.get(
|
||||
'INFLUXDB_PYTHON_SKIP_SERVER_TESTS',
|
||||
None) == 'True'
|
||||
skipServerTests = unittest.skipIf(_skip_server_tests,
|
||||
"Skipping server tests...")
|
51
lib/influxdb/tests/chunked_json_test.py
Normal file
51
lib/influxdb/tests/chunked_json_test.py
Normal file
|
@ -0,0 +1,51 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Chunked JSON test."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import unittest
|
||||
|
||||
from influxdb import chunked_json
|
||||
|
||||
|
||||
class TestChunkJson(unittest.TestCase):
|
||||
"""Set up the TestChunkJson object."""
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
"""Initialize the TestChunkJson object."""
|
||||
super(TestChunkJson, cls).setUpClass()
|
||||
|
||||
def test_load(self):
|
||||
"""Test reading a sequence of JSON values from a string."""
|
||||
example_response = \
|
||||
'{"results": [{"series": [{"measurement": "sdfsdfsdf", ' \
|
||||
'"columns": ["time", "value"], "values": ' \
|
||||
'[["2009-11-10T23:00:00Z", 0.64]]}]}, {"series": ' \
|
||||
'[{"measurement": "cpu_load_short", "columns": ["time", "value"],'\
|
||||
'"values": [["2009-11-10T23:00:00Z", 0.64]]}]}]}'
|
||||
|
||||
res = list(chunked_json.loads(example_response))
|
||||
# import ipdb; ipdb.set_trace()
|
||||
|
||||
self.assertListEqual(
|
||||
[
|
||||
{
|
||||
'results': [
|
||||
{'series': [{
|
||||
'values': [['2009-11-10T23:00:00Z', 0.64]],
|
||||
'measurement': 'sdfsdfsdf',
|
||||
'columns':
|
||||
['time', 'value']}]},
|
||||
{'series': [{
|
||||
'values': [['2009-11-10T23:00:00Z', 0.64]],
|
||||
'measurement': 'cpu_load_short',
|
||||
'columns': ['time', 'value']}]}
|
||||
]
|
||||
}
|
||||
],
|
||||
res
|
||||
)
|
1094
lib/influxdb/tests/client_test.py
Normal file
1094
lib/influxdb/tests/client_test.py
Normal file
File diff suppressed because it is too large
Load diff
711
lib/influxdb/tests/dataframe_client_test.py
Normal file
711
lib/influxdb/tests/dataframe_client_test.py
Normal file
|
@ -0,0 +1,711 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Unit tests for misc module."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from datetime import timedelta
|
||||
|
||||
import json
|
||||
import unittest
|
||||
import warnings
|
||||
import requests_mock
|
||||
|
||||
from influxdb.tests import skipIfPYpy, using_pypy
|
||||
from nose.tools import raises
|
||||
|
||||
from .client_test import _mocked_session
|
||||
|
||||
if not using_pypy:
|
||||
import pandas as pd
|
||||
from pandas.util.testing import assert_frame_equal
|
||||
from influxdb import DataFrameClient
|
||||
|
||||
|
||||
@skipIfPYpy
|
||||
class TestDataFrameClient(unittest.TestCase):
|
||||
"""Set up a test DataFrameClient object."""
|
||||
|
||||
def setUp(self):
|
||||
"""Instantiate a TestDataFrameClient object."""
|
||||
# By default, raise exceptions on warnings
|
||||
warnings.simplefilter('error', FutureWarning)
|
||||
|
||||
def test_write_points_from_dataframe(self):
|
||||
"""Test write points from df in TestDataFrameClient object."""
|
||||
now = pd.Timestamp('1970-01-01 00:00+00:00')
|
||||
dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
|
||||
index=[now, now + timedelta(hours=1)],
|
||||
columns=["column_one", "column_two",
|
||||
"column_three"])
|
||||
expected = (
|
||||
b"foo column_one=\"1\",column_two=1i,column_three=1.0 0\n"
|
||||
b"foo column_one=\"2\",column_two=2i,column_three=2.0 "
|
||||
b"3600000000000\n"
|
||||
)
|
||||
|
||||
with requests_mock.Mocker() as m:
|
||||
m.register_uri(requests_mock.POST,
|
||||
"http://localhost:8086/write",
|
||||
status_code=204)
|
||||
|
||||
cli = DataFrameClient(database='db')
|
||||
|
||||
cli.write_points(dataframe, 'foo')
|
||||
self.assertEqual(m.last_request.body, expected)
|
||||
|
||||
cli.write_points(dataframe, 'foo', tags=None)
|
||||
self.assertEqual(m.last_request.body, expected)
|
||||
|
||||
def test_dataframe_write_points_with_whitespace_measurement(self):
|
||||
"""write_points should escape white space in measurements."""
|
||||
now = pd.Timestamp('1970-01-01 00:00+00:00')
|
||||
dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
|
||||
index=[now, now + timedelta(hours=1)],
|
||||
columns=["column_one", "column_two",
|
||||
"column_three"])
|
||||
expected = (
|
||||
b"meas\\ with\\ space "
|
||||
b"column_one=\"1\",column_two=1i,column_three=1.0 0\n"
|
||||
b"meas\\ with\\ space "
|
||||
b"column_one=\"2\",column_two=2i,column_three=2.0 "
|
||||
b"3600000000000\n"
|
||||
)
|
||||
with requests_mock.Mocker() as m:
|
||||
m.register_uri(requests_mock.POST,
|
||||
"http://localhost:8086/write",
|
||||
status_code=204)
|
||||
cli = DataFrameClient(database='db')
|
||||
cli.write_points(dataframe, 'meas with space')
|
||||
self.assertEqual(m.last_request.body, expected)
|
||||
|
||||
def test_dataframe_write_points_with_whitespace_in_column_names(self):
|
||||
"""write_points should escape white space in column names."""
|
||||
now = pd.Timestamp('1970-01-01 00:00+00:00')
|
||||
dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
|
||||
index=[now, now + timedelta(hours=1)],
|
||||
columns=["column one", "column two",
|
||||
"column three"])
|
||||
expected = (
|
||||
b"foo column\\ one=\"1\",column\\ two=1i,column\\ three=1.0 0\n"
|
||||
b"foo column\\ one=\"2\",column\\ two=2i,column\\ three=2.0 "
|
||||
b"3600000000000\n"
|
||||
)
|
||||
with requests_mock.Mocker() as m:
|
||||
m.register_uri(requests_mock.POST,
|
||||
"http://localhost:8086/write",
|
||||
status_code=204)
|
||||
cli = DataFrameClient(database='db')
|
||||
cli.write_points(dataframe, 'foo')
|
||||
self.assertEqual(m.last_request.body, expected)
|
||||
|
||||
def test_write_points_from_dataframe_with_none(self):
|
||||
"""Test write points from df in TestDataFrameClient object."""
|
||||
now = pd.Timestamp('1970-01-01 00:00+00:00')
|
||||
dataframe = pd.DataFrame(data=[["1", None, 1.0], ["2", 2.0, 2.0]],
|
||||
index=[now, now + timedelta(hours=1)],
|
||||
columns=["column_one", "column_two",
|
||||
"column_three"])
|
||||
expected = (
|
||||
b"foo column_one=\"1\",column_three=1.0 0\n"
|
||||
b"foo column_one=\"2\",column_two=2.0,column_three=2.0 "
|
||||
b"3600000000000\n"
|
||||
)
|
||||
|
||||
with requests_mock.Mocker() as m:
|
||||
m.register_uri(requests_mock.POST,
|
||||
"http://localhost:8086/write",
|
||||
status_code=204)
|
||||
|
||||
cli = DataFrameClient(database='db')
|
||||
|
||||
cli.write_points(dataframe, 'foo')
|
||||
self.assertEqual(m.last_request.body, expected)
|
||||
|
||||
cli.write_points(dataframe, 'foo', tags=None)
|
||||
self.assertEqual(m.last_request.body, expected)
|
||||
|
||||
def test_write_points_from_dataframe_with_line_of_none(self):
|
||||
"""Test write points from df in TestDataFrameClient object."""
|
||||
now = pd.Timestamp('1970-01-01 00:00+00:00')
|
||||
dataframe = pd.DataFrame(data=[[None, None, None], ["2", 2.0, 2.0]],
|
||||
index=[now, now + timedelta(hours=1)],
|
||||
columns=["column_one", "column_two",
|
||||
"column_three"])
|
||||
expected = (
|
||||
b"foo column_one=\"2\",column_two=2.0,column_three=2.0 "
|
||||
b"3600000000000\n"
|
||||
)
|
||||
|
||||
with requests_mock.Mocker() as m:
|
||||
m.register_uri(requests_mock.POST,
|
||||
"http://localhost:8086/write",
|
||||
status_code=204)
|
||||
|
||||
cli = DataFrameClient(database='db')
|
||||
|
||||
cli.write_points(dataframe, 'foo')
|
||||
self.assertEqual(m.last_request.body, expected)
|
||||
|
||||
cli.write_points(dataframe, 'foo', tags=None)
|
||||
self.assertEqual(m.last_request.body, expected)
|
||||
|
||||
def test_write_points_from_dataframe_with_all_none(self):
|
||||
"""Test write points from df in TestDataFrameClient object."""
|
||||
now = pd.Timestamp('1970-01-01 00:00+00:00')
|
||||
dataframe = pd.DataFrame(data=[[None, None, None], [None, None, None]],
|
||||
index=[now, now + timedelta(hours=1)],
|
||||
columns=["column_one", "column_two",
|
||||
"column_three"])
|
||||
expected = (
|
||||
b"\n"
|
||||
)
|
||||
|
||||
with requests_mock.Mocker() as m:
|
||||
m.register_uri(requests_mock.POST,
|
||||
"http://localhost:8086/write",
|
||||
status_code=204)
|
||||
|
||||
cli = DataFrameClient(database='db')
|
||||
|
||||
cli.write_points(dataframe, 'foo')
|
||||
self.assertEqual(m.last_request.body, expected)
|
||||
|
||||
cli.write_points(dataframe, 'foo', tags=None)
|
||||
self.assertEqual(m.last_request.body, expected)
|
||||
|
||||
def test_write_points_from_dataframe_in_batches(self):
|
||||
"""Test write points in batch from df in TestDataFrameClient object."""
|
||||
now = pd.Timestamp('1970-01-01 00:00+00:00')
|
||||
dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
|
||||
index=[now, now + timedelta(hours=1)],
|
||||
columns=["column_one", "column_two",
|
||||
"column_three"])
|
||||
with requests_mock.Mocker() as m:
|
||||
m.register_uri(requests_mock.POST,
|
||||
"http://localhost:8086/write",
|
||||
status_code=204)
|
||||
|
||||
cli = DataFrameClient(database='db')
|
||||
self.assertTrue(cli.write_points(dataframe, "foo", batch_size=1))
|
||||
|
||||
def test_write_points_from_dataframe_with_tag_columns(self):
|
||||
"""Test write points from df w/tag in TestDataFrameClient object."""
|
||||
now = pd.Timestamp('1970-01-01 00:00+00:00')
|
||||
dataframe = pd.DataFrame(data=[['blue', 1, "1", 1, 1.0],
|
||||
['red', 0, "2", 2, 2.0]],
|
||||
index=[now, now + timedelta(hours=1)],
|
||||
columns=["tag_one", "tag_two", "column_one",
|
||||
"column_two", "column_three"])
|
||||
expected = (
|
||||
b"foo,tag_one=blue,tag_two=1 "
|
||||
b"column_one=\"1\",column_two=1i,column_three=1.0 "
|
||||
b"0\n"
|
||||
b"foo,tag_one=red,tag_two=0 "
|
||||
b"column_one=\"2\",column_two=2i,column_three=2.0 "
|
||||
b"3600000000000\n"
|
||||
)
|
||||
|
||||
with requests_mock.Mocker() as m:
|
||||
m.register_uri(requests_mock.POST,
|
||||
"http://localhost:8086/write",
|
||||
status_code=204)
|
||||
|
||||
cli = DataFrameClient(database='db')
|
||||
|
||||
cli.write_points(dataframe, 'foo',
|
||||
tag_columns=['tag_one', 'tag_two'])
|
||||
self.assertEqual(m.last_request.body, expected)
|
||||
|
||||
cli.write_points(dataframe, 'foo',
|
||||
tag_columns=['tag_one', 'tag_two'], tags=None)
|
||||
self.assertEqual(m.last_request.body, expected)
|
||||
|
||||
def test_write_points_from_dataframe_with_tag_cols_and_global_tags(self):
|
||||
"""Test write points from df w/tag + cols in TestDataFrameClient."""
|
||||
now = pd.Timestamp('1970-01-01 00:00+00:00')
|
||||
dataframe = pd.DataFrame(data=[['blue', 1, "1", 1, 1.0],
|
||||
['red', 0, "2", 2, 2.0]],
|
||||
index=[now, now + timedelta(hours=1)],
|
||||
columns=["tag_one", "tag_two", "column_one",
|
||||
"column_two", "column_three"])
|
||||
expected = (
|
||||
b"foo,global_tag=value,tag_one=blue,tag_two=1 "
|
||||
b"column_one=\"1\",column_two=1i,column_three=1.0 "
|
||||
b"0\n"
|
||||
b"foo,global_tag=value,tag_one=red,tag_two=0 "
|
||||
b"column_one=\"2\",column_two=2i,column_three=2.0 "
|
||||
b"3600000000000\n"
|
||||
)
|
||||
|
||||
with requests_mock.Mocker() as m:
|
||||
m.register_uri(requests_mock.POST,
|
||||
"http://localhost:8086/write",
|
||||
status_code=204)
|
||||
|
||||
cli = DataFrameClient(database='db')
|
||||
|
||||
cli.write_points(dataframe, 'foo',
|
||||
tag_columns=['tag_one', 'tag_two'],
|
||||
tags={'global_tag': 'value'})
|
||||
self.assertEqual(m.last_request.body, expected)
|
||||
|
||||
def test_write_points_from_dataframe_with_tag_cols_and_defaults(self):
|
||||
"""Test default write points from df w/tag in TestDataFrameClient."""
|
||||
now = pd.Timestamp('1970-01-01 00:00+00:00')
|
||||
dataframe = pd.DataFrame(data=[['blue', 1, "1", 1, 1.0, 'hot'],
|
||||
['red', 0, "2", 2, 2.0, 'cold']],
|
||||
index=[now, now + timedelta(hours=1)],
|
||||
columns=["tag_one", "tag_two", "column_one",
|
||||
"column_two", "column_three",
|
||||
"tag_three"])
|
||||
expected_tags_and_fields = (
|
||||
b"foo,tag_one=blue "
|
||||
b"column_one=\"1\",column_two=1i "
|
||||
b"0\n"
|
||||
b"foo,tag_one=red "
|
||||
b"column_one=\"2\",column_two=2i "
|
||||
b"3600000000000\n"
|
||||
)
|
||||
|
||||
expected_tags_no_fields = (
|
||||
b"foo,tag_one=blue,tag_two=1 "
|
||||
b"column_one=\"1\",column_two=1i,column_three=1.0,"
|
||||
b"tag_three=\"hot\" 0\n"
|
||||
b"foo,tag_one=red,tag_two=0 "
|
||||
b"column_one=\"2\",column_two=2i,column_three=2.0,"
|
||||
b"tag_three=\"cold\" 3600000000000\n"
|
||||
)
|
||||
|
||||
expected_fields_no_tags = (
|
||||
b"foo,tag_one=blue,tag_three=hot,tag_two=1 "
|
||||
b"column_one=\"1\",column_two=1i,column_three=1.0 "
|
||||
b"0\n"
|
||||
b"foo,tag_one=red,tag_three=cold,tag_two=0 "
|
||||
b"column_one=\"2\",column_two=2i,column_three=2.0 "
|
||||
b"3600000000000\n"
|
||||
)
|
||||
|
||||
expected_no_tags_no_fields = (
|
||||
b"foo "
|
||||
b"tag_one=\"blue\",tag_two=1i,column_one=\"1\","
|
||||
b"column_two=1i,column_three=1.0,tag_three=\"hot\" "
|
||||
b"0\n"
|
||||
b"foo "
|
||||
b"tag_one=\"red\",tag_two=0i,column_one=\"2\","
|
||||
b"column_two=2i,column_three=2.0,tag_three=\"cold\" "
|
||||
b"3600000000000\n"
|
||||
)
|
||||
|
||||
with requests_mock.Mocker() as m:
|
||||
m.register_uri(requests_mock.POST,
|
||||
"http://localhost:8086/write",
|
||||
status_code=204)
|
||||
|
||||
cli = DataFrameClient(database='db')
|
||||
|
||||
cli.write_points(dataframe, 'foo',
|
||||
field_columns=['column_one', 'column_two'],
|
||||
tag_columns=['tag_one'])
|
||||
self.assertEqual(m.last_request.body, expected_tags_and_fields)
|
||||
|
||||
cli.write_points(dataframe, 'foo',
|
||||
tag_columns=['tag_one', 'tag_two'])
|
||||
self.assertEqual(m.last_request.body, expected_tags_no_fields)
|
||||
|
||||
cli.write_points(dataframe, 'foo',
|
||||
field_columns=['column_one', 'column_two',
|
||||
'column_three'])
|
||||
self.assertEqual(m.last_request.body, expected_fields_no_tags)
|
||||
|
||||
cli.write_points(dataframe, 'foo')
|
||||
self.assertEqual(m.last_request.body, expected_no_tags_no_fields)
|
||||
|
||||
def test_write_points_from_dataframe_with_tag_escaped(self):
|
||||
"""Test write points from df w/escaped tag in TestDataFrameClient."""
|
||||
now = pd.Timestamp('1970-01-01 00:00+00:00')
|
||||
dataframe = pd.DataFrame(
|
||||
data=[
|
||||
['blue orange', "1", 1, 'hot=cold'], # space, equal
|
||||
['red,green', "2", 2, r'cold\fire'], # comma, backslash
|
||||
['some', "2", 2, ''], # skip empty
|
||||
['some', "2", 2, None], # skip None
|
||||
['', "2", 2, None], # all tags empty
|
||||
],
|
||||
index=pd.period_range(now, freq='H', periods=5),
|
||||
columns=["tag_one", "column_one", "column_two", "tag_three"]
|
||||
)
|
||||
|
||||
expected_escaped_tags = (
|
||||
b"foo,tag_one=blue\\ orange,tag_three=hot\\=cold "
|
||||
b"column_one=\"1\",column_two=1i "
|
||||
b"0\n"
|
||||
b"foo,tag_one=red\\,green,tag_three=cold\\\\fire "
|
||||
b"column_one=\"2\",column_two=2i "
|
||||
b"3600000000000\n"
|
||||
b"foo,tag_one=some "
|
||||
b"column_one=\"2\",column_two=2i "
|
||||
b"7200000000000\n"
|
||||
b"foo,tag_one=some "
|
||||
b"column_one=\"2\",column_two=2i "
|
||||
b"10800000000000\n"
|
||||
b"foo "
|
||||
b"column_one=\"2\",column_two=2i "
|
||||
b"14400000000000\n"
|
||||
)
|
||||
|
||||
with requests_mock.Mocker() as m:
|
||||
m.register_uri(requests_mock.POST,
|
||||
"http://localhost:8086/write",
|
||||
status_code=204)
|
||||
cli = DataFrameClient(database='db')
|
||||
cli.write_points(dataframe, 'foo',
|
||||
field_columns=['column_one', 'column_two'],
|
||||
tag_columns=['tag_one', 'tag_three'])
|
||||
self.assertEqual(m.last_request.body, expected_escaped_tags)
|
||||
|
||||
def test_write_points_from_dataframe_with_numeric_column_names(self):
|
||||
"""Test write points from df with numeric cols."""
|
||||
now = pd.Timestamp('1970-01-01 00:00+00:00')
|
||||
# df with numeric column names
|
||||
dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
|
||||
index=[now, now + timedelta(hours=1)])
|
||||
|
||||
expected = (
|
||||
b'foo,hello=there 0=\"1\",1=1i,2=1.0 0\n'
|
||||
b'foo,hello=there 0=\"2\",1=2i,2=2.0 3600000000000\n'
|
||||
)
|
||||
|
||||
with requests_mock.Mocker() as m:
|
||||
m.register_uri(requests_mock.POST,
|
||||
"http://localhost:8086/write",
|
||||
status_code=204)
|
||||
|
||||
cli = DataFrameClient(database='db')
|
||||
cli.write_points(dataframe, "foo", {"hello": "there"})
|
||||
|
||||
self.assertEqual(m.last_request.body, expected)
|
||||
|
||||
def test_write_points_from_dataframe_with_numeric_precision(self):
|
||||
"""Test write points from df with numeric precision."""
|
||||
now = pd.Timestamp('1970-01-01 00:00+00:00')
|
||||
# df with numeric column names
|
||||
dataframe = pd.DataFrame(data=[["1", 1, 1.1111111111111],
|
||||
["2", 2, 2.2222222222222]],
|
||||
index=[now, now + timedelta(hours=1)])
|
||||
|
||||
expected_default_precision = (
|
||||
b'foo,hello=there 0=\"1\",1=1i,2=1.11111111111 0\n'
|
||||
b'foo,hello=there 0=\"2\",1=2i,2=2.22222222222 3600000000000\n'
|
||||
)
|
||||
|
||||
expected_specified_precision = (
|
||||
b'foo,hello=there 0=\"1\",1=1i,2=1.1111 0\n'
|
||||
b'foo,hello=there 0=\"2\",1=2i,2=2.2222 3600000000000\n'
|
||||
)
|
||||
|
||||
expected_full_precision = (
|
||||
b'foo,hello=there 0=\"1\",1=1i,2=1.1111111111111 0\n'
|
||||
b'foo,hello=there 0=\"2\",1=2i,2=2.2222222222222 3600000000000\n'
|
||||
)
|
||||
|
||||
with requests_mock.Mocker() as m:
|
||||
m.register_uri(requests_mock.POST,
|
||||
"http://localhost:8086/write",
|
||||
status_code=204)
|
||||
|
||||
cli = DataFrameClient(database='db')
|
||||
cli.write_points(dataframe, "foo", {"hello": "there"})
|
||||
|
||||
self.assertEqual(m.last_request.body, expected_default_precision)
|
||||
|
||||
cli = DataFrameClient(database='db')
|
||||
cli.write_points(dataframe, "foo", {"hello": "there"},
|
||||
numeric_precision=4)
|
||||
|
||||
self.assertEqual(m.last_request.body, expected_specified_precision)
|
||||
|
||||
cli = DataFrameClient(database='db')
|
||||
cli.write_points(dataframe, "foo", {"hello": "there"},
|
||||
numeric_precision='full')
|
||||
|
||||
self.assertEqual(m.last_request.body, expected_full_precision)
|
||||
|
||||
def test_write_points_from_dataframe_with_period_index(self):
|
||||
"""Test write points from df with period index."""
|
||||
dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
|
||||
index=[pd.Period('1970-01-01'),
|
||||
pd.Period('1970-01-02')],
|
||||
columns=["column_one", "column_two",
|
||||
"column_three"])
|
||||
|
||||
expected = (
|
||||
b"foo column_one=\"1\",column_two=1i,column_three=1.0 0\n"
|
||||
b"foo column_one=\"2\",column_two=2i,column_three=2.0 "
|
||||
b"86400000000000\n"
|
||||
)
|
||||
|
||||
with requests_mock.Mocker() as m:
|
||||
m.register_uri(requests_mock.POST,
|
||||
"http://localhost:8086/write",
|
||||
status_code=204)
|
||||
|
||||
cli = DataFrameClient(database='db')
|
||||
cli.write_points(dataframe, "foo")
|
||||
|
||||
self.assertEqual(m.last_request.body, expected)
|
||||
|
||||
def test_write_points_from_dataframe_with_time_precision(self):
|
||||
"""Test write points from df with time precision."""
|
||||
now = pd.Timestamp('1970-01-01 00:00+00:00')
|
||||
dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
|
||||
index=[now, now + timedelta(hours=1)],
|
||||
columns=["column_one", "column_two",
|
||||
"column_three"])
|
||||
|
||||
with requests_mock.Mocker() as m:
|
||||
m.register_uri(requests_mock.POST,
|
||||
"http://localhost:8086/write",
|
||||
status_code=204)
|
||||
|
||||
cli = DataFrameClient(database='db')
|
||||
measurement = "foo"
|
||||
|
||||
cli.write_points(dataframe, measurement, time_precision='h')
|
||||
self.assertEqual(m.last_request.qs['precision'], ['h'])
|
||||
self.assertEqual(
|
||||
b'foo column_one="1",column_two=1i,column_three=1.0 0\nfoo '
|
||||
b'column_one="2",column_two=2i,column_three=2.0 1\n',
|
||||
m.last_request.body,
|
||||
)
|
||||
|
||||
cli.write_points(dataframe, measurement, time_precision='m')
|
||||
self.assertEqual(m.last_request.qs['precision'], ['m'])
|
||||
self.assertEqual(
|
||||
b'foo column_one="1",column_two=1i,column_three=1.0 0\nfoo '
|
||||
b'column_one="2",column_two=2i,column_three=2.0 60\n',
|
||||
m.last_request.body,
|
||||
)
|
||||
|
||||
cli.write_points(dataframe, measurement, time_precision='s')
|
||||
self.assertEqual(m.last_request.qs['precision'], ['s'])
|
||||
self.assertEqual(
|
||||
b'foo column_one="1",column_two=1i,column_three=1.0 0\nfoo '
|
||||
b'column_one="2",column_two=2i,column_three=2.0 3600\n',
|
||||
m.last_request.body,
|
||||
)
|
||||
|
||||
cli.write_points(dataframe, measurement, time_precision='ms')
|
||||
self.assertEqual(m.last_request.qs['precision'], ['ms'])
|
||||
self.assertEqual(
|
||||
b'foo column_one="1",column_two=1i,column_three=1.0 0\nfoo '
|
||||
b'column_one="2",column_two=2i,column_three=2.0 3600000\n',
|
||||
m.last_request.body,
|
||||
)
|
||||
|
||||
cli.write_points(dataframe, measurement, time_precision='u')
|
||||
self.assertEqual(m.last_request.qs['precision'], ['u'])
|
||||
self.assertEqual(
|
||||
b'foo column_one="1",column_two=1i,column_three=1.0 0\nfoo '
|
||||
b'column_one="2",column_two=2i,column_three=2.0 3600000000\n',
|
||||
m.last_request.body,
|
||||
)
|
||||
|
||||
cli.write_points(dataframe, measurement, time_precision='n')
|
||||
self.assertEqual(m.last_request.qs['precision'], ['n'])
|
||||
self.assertEqual(
|
||||
b'foo column_one="1",column_two=1i,column_three=1.0 0\n'
|
||||
b'foo column_one="2",column_two=2i,column_three=2.0 '
|
||||
b'3600000000000\n',
|
||||
m.last_request.body,
|
||||
)
|
||||
|
||||
@raises(TypeError)
|
||||
def test_write_points_from_dataframe_fails_without_time_index(self):
|
||||
"""Test failed write points from df without time index."""
|
||||
dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
|
||||
columns=["column_one", "column_two",
|
||||
"column_three"])
|
||||
|
||||
with requests_mock.Mocker() as m:
|
||||
m.register_uri(requests_mock.POST,
|
||||
"http://localhost:8086/db/db/series",
|
||||
status_code=204)
|
||||
|
||||
cli = DataFrameClient(database='db')
|
||||
cli.write_points(dataframe, "foo")
|
||||
|
||||
@raises(TypeError)
|
||||
def test_write_points_from_dataframe_fails_with_series(self):
|
||||
"""Test failed write points from df with series."""
|
||||
now = pd.Timestamp('1970-01-01 00:00+00:00')
|
||||
dataframe = pd.Series(data=[1.0, 2.0],
|
||||
index=[now, now + timedelta(hours=1)])
|
||||
|
||||
with requests_mock.Mocker() as m:
|
||||
m.register_uri(requests_mock.POST,
|
||||
"http://localhost:8086/db/db/series",
|
||||
status_code=204)
|
||||
|
||||
cli = DataFrameClient(database='db')
|
||||
cli.write_points(dataframe, "foo")
|
||||
|
||||
def test_query_into_dataframe(self):
|
||||
"""Test query into df for TestDataFrameClient object."""
|
||||
data = {
|
||||
"results": [{
|
||||
"series": [
|
||||
{"measurement": "network",
|
||||
"tags": {"direction": ""},
|
||||
"columns": ["time", "value"],
|
||||
"values":[["2009-11-10T23:00:00Z", 23422]]
|
||||
},
|
||||
{"measurement": "network",
|
||||
"tags": {"direction": "in"},
|
||||
"columns": ["time", "value"],
|
||||
"values": [["2009-11-10T23:00:00Z", 23422],
|
||||
["2009-11-10T23:00:00Z", 23422],
|
||||
["2009-11-10T23:00:00Z", 23422]]
|
||||
}
|
||||
]
|
||||
}]
|
||||
}
|
||||
|
||||
pd1 = pd.DataFrame(
|
||||
[[23422]], columns=['value'],
|
||||
index=pd.to_datetime(["2009-11-10T23:00:00Z"]))
|
||||
pd1.index = pd1.index.tz_localize('UTC')
|
||||
pd2 = pd.DataFrame(
|
||||
[[23422], [23422], [23422]], columns=['value'],
|
||||
index=pd.to_datetime(["2009-11-10T23:00:00Z",
|
||||
"2009-11-10T23:00:00Z",
|
||||
"2009-11-10T23:00:00Z"]))
|
||||
pd2.index = pd2.index.tz_localize('UTC')
|
||||
expected = {
|
||||
('network', (('direction', ''),)): pd1,
|
||||
('network', (('direction', 'in'),)): pd2
|
||||
}
|
||||
|
||||
cli = DataFrameClient('host', 8086, 'username', 'password', 'db')
|
||||
with _mocked_session(cli, 'GET', 200, data):
|
||||
result = cli.query('select value from network group by direction;')
|
||||
for k in expected:
|
||||
assert_frame_equal(expected[k], result[k])
|
||||
|
||||
def test_multiquery_into_dataframe(self):
|
||||
"""Test multiquyer into df for TestDataFrameClient object."""
|
||||
data = {
|
||||
"results": [
|
||||
{
|
||||
"series": [
|
||||
{
|
||||
"name": "cpu_load_short",
|
||||
"columns": ["time", "value"],
|
||||
"values": [
|
||||
["2015-01-29T21:55:43.702900257Z", 0.55],
|
||||
["2015-01-29T21:55:43.702900257Z", 23422],
|
||||
["2015-06-11T20:46:02Z", 0.64]
|
||||
]
|
||||
}
|
||||
]
|
||||
}, {
|
||||
"series": [
|
||||
{
|
||||
"name": "cpu_load_short",
|
||||
"columns": ["time", "count"],
|
||||
"values": [
|
||||
["1970-01-01T00:00:00Z", 3]
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
pd1 = pd.DataFrame(
|
||||
[[0.55], [23422.0], [0.64]], columns=['value'],
|
||||
index=pd.to_datetime([
|
||||
"2015-01-29 21:55:43.702900257+0000",
|
||||
"2015-01-29 21:55:43.702900257+0000",
|
||||
"2015-06-11 20:46:02+0000"])).tz_localize('UTC')
|
||||
pd2 = pd.DataFrame(
|
||||
[[3]], columns=['count'],
|
||||
index=pd.to_datetime(["1970-01-01 00:00:00+00:00"]))\
|
||||
.tz_localize('UTC')
|
||||
expected = [{'cpu_load_short': pd1}, {'cpu_load_short': pd2}]
|
||||
|
||||
cli = DataFrameClient('host', 8086, 'username', 'password', 'db')
|
||||
iql = "SELECT value FROM cpu_load_short WHERE region='us-west';"\
|
||||
"SELECT count(value) FROM cpu_load_short WHERE region='us-west'"
|
||||
with _mocked_session(cli, 'GET', 200, data):
|
||||
result = cli.query(iql)
|
||||
for r, e in zip(result, expected):
|
||||
for k in e:
|
||||
assert_frame_equal(e[k], r[k])
|
||||
|
||||
def test_query_with_empty_result(self):
|
||||
"""Test query with empty results in TestDataFrameClient object."""
|
||||
cli = DataFrameClient('host', 8086, 'username', 'password', 'db')
|
||||
with _mocked_session(cli, 'GET', 200, {"results": [{}]}):
|
||||
result = cli.query('select column_one from foo;')
|
||||
self.assertEqual(result, {})
|
||||
|
||||
def test_get_list_database(self):
|
||||
"""Test get list of databases in TestDataFrameClient object."""
|
||||
data = {'results': [
|
||||
{'series': [
|
||||
{'measurement': 'databases',
|
||||
'values': [
|
||||
['new_db_1'],
|
||||
['new_db_2']],
|
||||
'columns': ['name']}]}
|
||||
]}
|
||||
|
||||
cli = DataFrameClient('host', 8086, 'username', 'password', 'db')
|
||||
with _mocked_session(cli, 'get', 200, json.dumps(data)):
|
||||
self.assertListEqual(
|
||||
cli.get_list_database(),
|
||||
[{'name': 'new_db_1'}, {'name': 'new_db_2'}]
|
||||
)
|
||||
|
||||
def test_datetime_to_epoch(self):
|
||||
"""Test convert datetime to epoch in TestDataFrameClient object."""
|
||||
timestamp = pd.Timestamp('2013-01-01 00:00:00.000+00:00')
|
||||
cli = DataFrameClient('host', 8086, 'username', 'password', 'db')
|
||||
|
||||
self.assertEqual(
|
||||
cli._datetime_to_epoch(timestamp),
|
||||
1356998400.0
|
||||
)
|
||||
self.assertEqual(
|
||||
cli._datetime_to_epoch(timestamp, time_precision='h'),
|
||||
1356998400.0 / 3600
|
||||
)
|
||||
self.assertEqual(
|
||||
cli._datetime_to_epoch(timestamp, time_precision='m'),
|
||||
1356998400.0 / 60
|
||||
)
|
||||
self.assertEqual(
|
||||
cli._datetime_to_epoch(timestamp, time_precision='s'),
|
||||
1356998400.0
|
||||
)
|
||||
self.assertEqual(
|
||||
cli._datetime_to_epoch(timestamp, time_precision='ms'),
|
||||
1356998400000.0
|
||||
)
|
||||
self.assertEqual(
|
||||
cli._datetime_to_epoch(timestamp, time_precision='u'),
|
||||
1356998400000000.0
|
||||
)
|
||||
self.assertEqual(
|
||||
cli._datetime_to_epoch(timestamp, time_precision='n'),
|
||||
1356998400000000000.0
|
||||
)
|
||||
|
||||
def test_dsn_constructor(self):
|
||||
"""Test data source name deconstructor in TestDataFrameClient."""
|
||||
client = DataFrameClient.from_dsn('influxdb://localhost:8086')
|
||||
self.assertIsInstance(client, DataFrameClient)
|
||||
self.assertEqual('http://localhost:8086', client._baseurl)
|
367
lib/influxdb/tests/helper_test.py
Normal file
367
lib/influxdb/tests/helper_test.py
Normal file
|
@ -0,0 +1,367 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Set of series helper functions for test."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
import unittest
|
||||
import warnings
|
||||
|
||||
import mock
|
||||
from influxdb import SeriesHelper, InfluxDBClient
|
||||
from requests.exceptions import ConnectionError
|
||||
|
||||
|
||||
class TestSeriesHelper(unittest.TestCase):
|
||||
"""Define the SeriesHelper test object."""
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
"""Set up the TestSeriesHelper object."""
|
||||
super(TestSeriesHelper, cls).setUpClass()
|
||||
|
||||
TestSeriesHelper.client = InfluxDBClient(
|
||||
'host',
|
||||
8086,
|
||||
'username',
|
||||
'password',
|
||||
'database'
|
||||
)
|
||||
|
||||
class MySeriesHelper(SeriesHelper):
|
||||
"""Define a SeriesHelper object."""
|
||||
|
||||
class Meta:
|
||||
"""Define metadata for the SeriesHelper object."""
|
||||
|
||||
client = TestSeriesHelper.client
|
||||
series_name = 'events.stats.{server_name}'
|
||||
fields = ['some_stat']
|
||||
tags = ['server_name', 'other_tag']
|
||||
bulk_size = 5
|
||||
autocommit = True
|
||||
|
||||
TestSeriesHelper.MySeriesHelper = MySeriesHelper
|
||||
|
||||
def tearDown(self):
|
||||
"""Deconstruct the TestSeriesHelper object."""
|
||||
super(TestSeriesHelper, self).tearDown()
|
||||
TestSeriesHelper.MySeriesHelper._reset_()
|
||||
self.assertEqual(
|
||||
TestSeriesHelper.MySeriesHelper._json_body_(),
|
||||
[],
|
||||
'Resetting helper did not empty datapoints.')
|
||||
|
||||
def test_auto_commit(self):
|
||||
"""Test write_points called after valid number of events."""
|
||||
class AutoCommitTest(SeriesHelper):
|
||||
"""Define a SeriesHelper instance to test autocommit."""
|
||||
|
||||
class Meta:
|
||||
"""Define metadata for AutoCommitTest."""
|
||||
|
||||
series_name = 'events.stats.{server_name}'
|
||||
fields = ['some_stat']
|
||||
tags = ['server_name', 'other_tag']
|
||||
bulk_size = 5
|
||||
client = InfluxDBClient()
|
||||
autocommit = True
|
||||
|
||||
fake_write_points = mock.MagicMock()
|
||||
AutoCommitTest(server_name='us.east-1', some_stat=159, other_tag='gg')
|
||||
AutoCommitTest._client.write_points = fake_write_points
|
||||
AutoCommitTest(server_name='us.east-1', some_stat=158, other_tag='gg')
|
||||
AutoCommitTest(server_name='us.east-1', some_stat=157, other_tag='gg')
|
||||
AutoCommitTest(server_name='us.east-1', some_stat=156, other_tag='gg')
|
||||
self.assertFalse(fake_write_points.called)
|
||||
AutoCommitTest(server_name='us.east-1', some_stat=3443, other_tag='gg')
|
||||
self.assertTrue(fake_write_points.called)
|
||||
|
||||
@mock.patch('influxdb.helper.SeriesHelper._current_timestamp')
|
||||
def testSingleSeriesName(self, current_timestamp):
|
||||
"""Test JSON conversion when there is only one series name."""
|
||||
current_timestamp.return_value = current_date = datetime.today()
|
||||
TestSeriesHelper.MySeriesHelper(
|
||||
server_name='us.east-1', other_tag='ello', some_stat=159)
|
||||
TestSeriesHelper.MySeriesHelper(
|
||||
server_name='us.east-1', other_tag='ello', some_stat=158)
|
||||
TestSeriesHelper.MySeriesHelper(
|
||||
server_name='us.east-1', other_tag='ello', some_stat=157)
|
||||
TestSeriesHelper.MySeriesHelper(
|
||||
server_name='us.east-1', other_tag='ello', some_stat=156)
|
||||
expectation = [
|
||||
{
|
||||
"measurement": "events.stats.us.east-1",
|
||||
"tags": {
|
||||
"other_tag": "ello",
|
||||
"server_name": "us.east-1"
|
||||
},
|
||||
"fields": {
|
||||
"some_stat": 159
|
||||
},
|
||||
"time": current_date,
|
||||
},
|
||||
{
|
||||
"measurement": "events.stats.us.east-1",
|
||||
"tags": {
|
||||
"other_tag": "ello",
|
||||
"server_name": "us.east-1"
|
||||
},
|
||||
"fields": {
|
||||
"some_stat": 158
|
||||
},
|
||||
"time": current_date,
|
||||
},
|
||||
{
|
||||
"measurement": "events.stats.us.east-1",
|
||||
"tags": {
|
||||
"other_tag": "ello",
|
||||
"server_name": "us.east-1"
|
||||
},
|
||||
"fields": {
|
||||
"some_stat": 157
|
||||
},
|
||||
"time": current_date,
|
||||
},
|
||||
{
|
||||
"measurement": "events.stats.us.east-1",
|
||||
"tags": {
|
||||
"other_tag": "ello",
|
||||
"server_name": "us.east-1"
|
||||
},
|
||||
"fields": {
|
||||
"some_stat": 156
|
||||
},
|
||||
"time": current_date,
|
||||
}
|
||||
]
|
||||
|
||||
rcvd = TestSeriesHelper.MySeriesHelper._json_body_()
|
||||
self.assertTrue(all([el in expectation for el in rcvd]) and
|
||||
all([el in rcvd for el in expectation]),
|
||||
'Invalid JSON body of time series returned from '
|
||||
'_json_body_ for one series name: {0}.'.format(rcvd))
|
||||
|
||||
@mock.patch('influxdb.helper.SeriesHelper._current_timestamp')
|
||||
def testSeveralSeriesNames(self, current_timestamp):
|
||||
"""Test JSON conversion when there are multiple series names."""
|
||||
current_timestamp.return_value = current_date = datetime.today()
|
||||
TestSeriesHelper.MySeriesHelper(
|
||||
server_name='us.east-1', some_stat=159, other_tag='ello')
|
||||
TestSeriesHelper.MySeriesHelper(
|
||||
server_name='fr.paris-10', some_stat=158, other_tag='ello')
|
||||
TestSeriesHelper.MySeriesHelper(
|
||||
server_name='lu.lux', some_stat=157, other_tag='ello')
|
||||
TestSeriesHelper.MySeriesHelper(
|
||||
server_name='uk.london', some_stat=156, other_tag='ello')
|
||||
expectation = [
|
||||
{
|
||||
'fields': {
|
||||
'some_stat': 157
|
||||
},
|
||||
'measurement': 'events.stats.lu.lux',
|
||||
'tags': {
|
||||
'other_tag': 'ello',
|
||||
'server_name': 'lu.lux'
|
||||
},
|
||||
"time": current_date,
|
||||
},
|
||||
{
|
||||
'fields': {
|
||||
'some_stat': 156
|
||||
},
|
||||
'measurement': 'events.stats.uk.london',
|
||||
'tags': {
|
||||
'other_tag': 'ello',
|
||||
'server_name': 'uk.london'
|
||||
},
|
||||
"time": current_date,
|
||||
},
|
||||
{
|
||||
'fields': {
|
||||
'some_stat': 158
|
||||
},
|
||||
'measurement': 'events.stats.fr.paris-10',
|
||||
'tags': {
|
||||
'other_tag': 'ello',
|
||||
'server_name': 'fr.paris-10'
|
||||
},
|
||||
"time": current_date,
|
||||
},
|
||||
{
|
||||
'fields': {
|
||||
'some_stat': 159
|
||||
},
|
||||
'measurement': 'events.stats.us.east-1',
|
||||
'tags': {
|
||||
'other_tag': 'ello',
|
||||
'server_name': 'us.east-1'
|
||||
},
|
||||
"time": current_date,
|
||||
}
|
||||
]
|
||||
|
||||
rcvd = TestSeriesHelper.MySeriesHelper._json_body_()
|
||||
self.assertTrue(all([el in expectation for el in rcvd]) and
|
||||
all([el in rcvd for el in expectation]),
|
||||
'Invalid JSON body of time series returned from '
|
||||
'_json_body_ for several series names: {0}.'
|
||||
.format(rcvd))
|
||||
|
||||
@mock.patch('influxdb.helper.SeriesHelper._current_timestamp')
|
||||
def testSeriesWithoutTimeField(self, current_timestamp):
|
||||
"""Test that time is optional on a series without a time field."""
|
||||
current_date = datetime.today()
|
||||
yesterday = current_date - timedelta(days=1)
|
||||
current_timestamp.return_value = yesterday
|
||||
TestSeriesHelper.MySeriesHelper(
|
||||
server_name='us.east-1', other_tag='ello',
|
||||
some_stat=159, time=current_date
|
||||
)
|
||||
TestSeriesHelper.MySeriesHelper(
|
||||
server_name='us.east-1', other_tag='ello',
|
||||
some_stat=158,
|
||||
)
|
||||
point1, point2 = TestSeriesHelper.MySeriesHelper._json_body_()
|
||||
self.assertTrue('time' in point1 and 'time' in point2)
|
||||
self.assertEqual(point1['time'], current_date)
|
||||
self.assertEqual(point2['time'], yesterday)
|
||||
|
||||
def testSeriesWithoutAllTags(self):
|
||||
"""Test that creating a data point without a tag throws an error."""
|
||||
class MyTimeFieldSeriesHelper(SeriesHelper):
|
||||
|
||||
class Meta:
|
||||
client = TestSeriesHelper.client
|
||||
series_name = 'events.stats.{server_name}'
|
||||
fields = ['some_stat', 'time']
|
||||
tags = ['server_name', 'other_tag']
|
||||
bulk_size = 5
|
||||
autocommit = True
|
||||
|
||||
self.assertRaises(NameError, MyTimeFieldSeriesHelper,
|
||||
**{"server_name": 'us.east-1',
|
||||
"some_stat": 158})
|
||||
|
||||
@mock.patch('influxdb.helper.SeriesHelper._current_timestamp')
|
||||
def testSeriesWithTimeField(self, current_timestamp):
|
||||
"""Test that time is optional on a series with a time field."""
|
||||
current_date = datetime.today()
|
||||
yesterday = current_date - timedelta(days=1)
|
||||
current_timestamp.return_value = yesterday
|
||||
|
||||
class MyTimeFieldSeriesHelper(SeriesHelper):
|
||||
|
||||
class Meta:
|
||||
client = TestSeriesHelper.client
|
||||
series_name = 'events.stats.{server_name}'
|
||||
fields = ['some_stat', 'time']
|
||||
tags = ['server_name', 'other_tag']
|
||||
bulk_size = 5
|
||||
autocommit = True
|
||||
|
||||
MyTimeFieldSeriesHelper(
|
||||
server_name='us.east-1', other_tag='ello',
|
||||
some_stat=159, time=current_date
|
||||
)
|
||||
MyTimeFieldSeriesHelper(
|
||||
server_name='us.east-1', other_tag='ello',
|
||||
some_stat=158,
|
||||
)
|
||||
point1, point2 = MyTimeFieldSeriesHelper._json_body_()
|
||||
self.assertTrue('time' in point1 and 'time' in point2)
|
||||
self.assertEqual(point1['time'], current_date)
|
||||
self.assertEqual(point2['time'], yesterday)
|
||||
|
||||
def testInvalidHelpers(self):
|
||||
"""Test errors in invalid helpers."""
|
||||
class MissingMeta(SeriesHelper):
|
||||
"""Define instance of SeriesHelper for missing meta."""
|
||||
|
||||
pass
|
||||
|
||||
class MissingClient(SeriesHelper):
|
||||
"""Define SeriesHelper for missing client data."""
|
||||
|
||||
class Meta:
|
||||
"""Define metadat for MissingClient."""
|
||||
|
||||
series_name = 'events.stats.{server_name}'
|
||||
fields = ['time', 'server_name']
|
||||
autocommit = True
|
||||
|
||||
class MissingSeriesName(SeriesHelper):
|
||||
"""Define instance of SeriesHelper for missing series."""
|
||||
|
||||
class Meta:
|
||||
"""Define metadata for MissingSeriesName."""
|
||||
|
||||
fields = ['time', 'server_name']
|
||||
|
||||
class MissingFields(SeriesHelper):
|
||||
"""Define instance of SeriesHelper for missing fields."""
|
||||
|
||||
class Meta:
|
||||
"""Define metadata for MissingFields."""
|
||||
|
||||
series_name = 'events.stats.{server_name}'
|
||||
|
||||
for cls in [MissingMeta, MissingClient, MissingFields,
|
||||
MissingSeriesName]:
|
||||
self.assertRaises(
|
||||
AttributeError, cls, **{'time': 159,
|
||||
'server_name': 'us.east-1'})
|
||||
|
||||
@unittest.skip("Fails on py32")
|
||||
def testWarnBulkSizeZero(self):
|
||||
"""Test warning for an invalid bulk size."""
|
||||
class WarnBulkSizeZero(SeriesHelper):
|
||||
|
||||
class Meta:
|
||||
client = TestSeriesHelper.client
|
||||
series_name = 'events.stats.{server_name}'
|
||||
fields = ['time', 'server_name']
|
||||
tags = []
|
||||
bulk_size = 0
|
||||
autocommit = True
|
||||
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
warnings.simplefilter("always")
|
||||
try:
|
||||
WarnBulkSizeZero(time=159, server_name='us.east-1')
|
||||
except ConnectionError:
|
||||
# Server defined in the client is invalid, we're testing
|
||||
# the warning only.
|
||||
pass
|
||||
self.assertEqual(len(w), 1,
|
||||
'{0} call should have generated one warning.'
|
||||
.format(WarnBulkSizeZero))
|
||||
self.assertIn('forced to 1', str(w[-1].message),
|
||||
'Warning message did not contain "forced to 1".')
|
||||
|
||||
def testWarnBulkSizeNoEffect(self):
|
||||
"""Test warning for a set bulk size but autocommit False."""
|
||||
class WarnBulkSizeNoEffect(SeriesHelper):
|
||||
"""Define SeriesHelper for warning on bulk size."""
|
||||
|
||||
class Meta:
|
||||
"""Define metadat for WarnBulkSizeNoEffect."""
|
||||
|
||||
series_name = 'events.stats.{server_name}'
|
||||
fields = ['time', 'server_name']
|
||||
bulk_size = 5
|
||||
tags = []
|
||||
autocommit = False
|
||||
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
warnings.simplefilter("always")
|
||||
WarnBulkSizeNoEffect(time=159, server_name='us.east-1')
|
||||
self.assertEqual(len(w), 1,
|
||||
'{0} call should have generated one warning.'
|
||||
.format(WarnBulkSizeNoEffect))
|
||||
self.assertIn('has no affect', str(w[-1].message),
|
||||
'Warning message did not contain "has not affect".')
|
2
lib/influxdb/tests/influxdb08/__init__.py
Normal file
2
lib/influxdb/tests/influxdb08/__init__.py
Normal file
|
@ -0,0 +1,2 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Define the influxdb08 test package."""
|
904
lib/influxdb/tests/influxdb08/client_test.py
Normal file
904
lib/influxdb/tests/influxdb08/client_test.py
Normal file
|
@ -0,0 +1,904 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Client unit tests."""
|
||||
|
||||
import json
|
||||
import socket
|
||||
import sys
|
||||
import unittest
|
||||
import random
|
||||
import warnings
|
||||
|
||||
import mock
|
||||
import requests
|
||||
import requests.exceptions
|
||||
import requests_mock
|
||||
|
||||
from nose.tools import raises
|
||||
from mock import patch
|
||||
|
||||
from influxdb.influxdb08 import InfluxDBClient
|
||||
from influxdb.influxdb08.client import session
|
||||
|
||||
if sys.version < '3':
|
||||
import codecs
|
||||
|
||||
def u(x):
|
||||
"""Test codec."""
|
||||
return codecs.unicode_escape_decode(x)[0]
|
||||
else:
|
||||
def u(x):
|
||||
"""Test codec."""
|
||||
return x
|
||||
|
||||
|
||||
def _build_response_object(status_code=200, content=""):
|
||||
resp = requests.Response()
|
||||
resp.status_code = status_code
|
||||
resp._content = content.encode("utf8")
|
||||
return resp
|
||||
|
||||
|
||||
def _mocked_session(method="GET", status_code=200, content=""):
|
||||
method = method.upper()
|
||||
|
||||
def request(*args, **kwargs):
|
||||
"""Define a request for the _mocked_session."""
|
||||
c = content
|
||||
|
||||
# Check method
|
||||
assert method == kwargs.get('method', 'GET')
|
||||
|
||||
if method == 'POST':
|
||||
data = kwargs.get('data', None)
|
||||
|
||||
if data is not None:
|
||||
# Data must be a string
|
||||
assert isinstance(data, str)
|
||||
|
||||
# Data must be a JSON string
|
||||
assert c == json.loads(data, strict=True)
|
||||
|
||||
c = data
|
||||
|
||||
# Anyway, Content must be a JSON string (or empty string)
|
||||
if not isinstance(c, str):
|
||||
c = json.dumps(c)
|
||||
|
||||
return _build_response_object(status_code=status_code, content=c)
|
||||
|
||||
mocked = patch.object(
|
||||
session,
|
||||
'request',
|
||||
side_effect=request
|
||||
)
|
||||
|
||||
return mocked
|
||||
|
||||
|
||||
class TestInfluxDBClient(unittest.TestCase):
|
||||
"""Define a TestInfluxDBClient object."""
|
||||
|
||||
def setUp(self):
|
||||
"""Set up a TestInfluxDBClient object."""
|
||||
# By default, raise exceptions on warnings
|
||||
warnings.simplefilter('error', FutureWarning)
|
||||
|
||||
self.dummy_points = [
|
||||
{
|
||||
"points": [
|
||||
["1", 1, 1.0],
|
||||
["2", 2, 2.0]
|
||||
],
|
||||
"name": "foo",
|
||||
"columns": ["column_one", "column_two", "column_three"]
|
||||
}
|
||||
]
|
||||
|
||||
self.dsn_string = 'influxdb://uSr:pWd@host:1886/db'
|
||||
|
||||
def test_scheme(self):
|
||||
"""Test database scheme for TestInfluxDBClient object."""
|
||||
cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
|
||||
self.assertEqual(cli._baseurl, 'http://host:8086')
|
||||
|
||||
cli = InfluxDBClient(
|
||||
'host', 8086, 'username', 'password', 'database', ssl=True
|
||||
)
|
||||
self.assertEqual(cli._baseurl, 'https://host:8086')
|
||||
|
||||
def test_dsn(self):
|
||||
"""Test datasource name for TestInfluxDBClient object."""
|
||||
cli = InfluxDBClient.from_dsn(self.dsn_string)
|
||||
self.assertEqual('http://host:1886', cli._baseurl)
|
||||
self.assertEqual('uSr', cli._username)
|
||||
self.assertEqual('pWd', cli._password)
|
||||
self.assertEqual('db', cli._database)
|
||||
self.assertFalse(cli._use_udp)
|
||||
|
||||
cli = InfluxDBClient.from_dsn('udp+' + self.dsn_string)
|
||||
self.assertTrue(cli._use_udp)
|
||||
|
||||
cli = InfluxDBClient.from_dsn('https+' + self.dsn_string)
|
||||
self.assertEqual('https://host:1886', cli._baseurl)
|
||||
|
||||
cli = InfluxDBClient.from_dsn('https+' + self.dsn_string,
|
||||
**{'ssl': False})
|
||||
self.assertEqual('http://host:1886', cli._baseurl)
|
||||
|
||||
def test_switch_database(self):
|
||||
"""Test switch database for TestInfluxDBClient object."""
|
||||
cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
|
||||
cli.switch_database('another_database')
|
||||
self.assertEqual(cli._database, 'another_database')
|
||||
|
||||
@raises(FutureWarning)
|
||||
def test_switch_db_deprecated(self):
|
||||
"""Test deprecated switch database for TestInfluxDBClient object."""
|
||||
cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
|
||||
cli.switch_db('another_database')
|
||||
self.assertEqual(cli._database, 'another_database')
|
||||
|
||||
def test_switch_user(self):
|
||||
"""Test switch user for TestInfluxDBClient object."""
|
||||
cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
|
||||
cli.switch_user('another_username', 'another_password')
|
||||
self.assertEqual(cli._username, 'another_username')
|
||||
self.assertEqual(cli._password, 'another_password')
|
||||
|
||||
def test_write(self):
|
||||
"""Test write to database for TestInfluxDBClient object."""
|
||||
with requests_mock.Mocker() as m:
|
||||
m.register_uri(
|
||||
requests_mock.POST,
|
||||
"http://localhost:8086/write"
|
||||
)
|
||||
cli = InfluxDBClient(database='db')
|
||||
cli.write(
|
||||
{"database": "mydb",
|
||||
"retentionPolicy": "mypolicy",
|
||||
"points": [{"name": "cpu_load_short",
|
||||
"tags": {"host": "server01",
|
||||
"region": "us-west"},
|
||||
"timestamp": "2009-11-10T23:00:00Z",
|
||||
"values": {"value": 0.64}}]}
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
json.loads(m.last_request.body),
|
||||
{"database": "mydb",
|
||||
"retentionPolicy": "mypolicy",
|
||||
"points": [{"name": "cpu_load_short",
|
||||
"tags": {"host": "server01",
|
||||
"region": "us-west"},
|
||||
"timestamp": "2009-11-10T23:00:00Z",
|
||||
"values": {"value": 0.64}}]}
|
||||
)
|
||||
|
||||
def test_write_points(self):
|
||||
"""Test write points for TestInfluxDBClient object."""
|
||||
with requests_mock.Mocker() as m:
|
||||
m.register_uri(
|
||||
requests_mock.POST,
|
||||
"http://localhost:8086/db/db/series"
|
||||
)
|
||||
|
||||
cli = InfluxDBClient(database='db')
|
||||
cli.write_points(
|
||||
self.dummy_points
|
||||
)
|
||||
|
||||
self.assertListEqual(
|
||||
json.loads(m.last_request.body),
|
||||
self.dummy_points
|
||||
)
|
||||
|
||||
def test_write_points_string(self):
|
||||
"""Test write string points for TestInfluxDBClient object."""
|
||||
with requests_mock.Mocker() as m:
|
||||
m.register_uri(
|
||||
requests_mock.POST,
|
||||
"http://localhost:8086/db/db/series"
|
||||
)
|
||||
|
||||
cli = InfluxDBClient(database='db')
|
||||
cli.write_points(
|
||||
str(json.dumps(self.dummy_points))
|
||||
)
|
||||
|
||||
self.assertListEqual(
|
||||
json.loads(m.last_request.body),
|
||||
self.dummy_points
|
||||
)
|
||||
|
||||
def test_write_points_batch(self):
|
||||
"""Test write batch points for TestInfluxDBClient object."""
|
||||
with requests_mock.Mocker() as m:
|
||||
m.register_uri(requests_mock.POST,
|
||||
"http://localhost:8086/db/db/series")
|
||||
cli = InfluxDBClient('localhost', 8086,
|
||||
'username', 'password', 'db')
|
||||
cli.write_points(data=self.dummy_points, batch_size=2)
|
||||
self.assertEqual(1, m.call_count)
|
||||
|
||||
def test_write_points_batch_invalid_size(self):
|
||||
"""Test write batch points invalid size for TestInfluxDBClient."""
|
||||
with requests_mock.Mocker() as m:
|
||||
m.register_uri(requests_mock.POST,
|
||||
"http://localhost:8086/db/db/series")
|
||||
cli = InfluxDBClient('localhost', 8086,
|
||||
'username', 'password', 'db')
|
||||
cli.write_points(data=self.dummy_points, batch_size=-2)
|
||||
self.assertEqual(1, m.call_count)
|
||||
|
||||
def test_write_points_batch_multiple_series(self):
|
||||
"""Test write points batch multiple series."""
|
||||
dummy_points = [
|
||||
{"points": [["1", 1, 1.0], ["2", 2, 2.0], ["3", 3, 3.0],
|
||||
["4", 4, 4.0], ["5", 5, 5.0]],
|
||||
"name": "foo",
|
||||
"columns": ["val1", "val2", "val3"]},
|
||||
{"points": [["1", 1, 1.0], ["2", 2, 2.0], ["3", 3, 3.0],
|
||||
["4", 4, 4.0], ["5", 5, 5.0], ["6", 6, 6.0],
|
||||
["7", 7, 7.0], ["8", 8, 8.0]],
|
||||
"name": "bar",
|
||||
"columns": ["val1", "val2", "val3"]},
|
||||
]
|
||||
expected_last_body = [{'points': [['7', 7, 7.0], ['8', 8, 8.0]],
|
||||
'name': 'bar',
|
||||
'columns': ['val1', 'val2', 'val3']}]
|
||||
with requests_mock.Mocker() as m:
|
||||
m.register_uri(requests_mock.POST,
|
||||
"http://localhost:8086/db/db/series")
|
||||
cli = InfluxDBClient('localhost', 8086,
|
||||
'username', 'password', 'db')
|
||||
cli.write_points(data=dummy_points, batch_size=3)
|
||||
self.assertEqual(m.call_count, 5)
|
||||
self.assertEqual(expected_last_body, m.request_history[4].json())
|
||||
|
||||
def test_write_points_udp(self):
|
||||
"""Test write points UDP for TestInfluxDBClient object."""
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
port = random.randint(4000, 8000)
|
||||
s.bind(('0.0.0.0', port))
|
||||
|
||||
cli = InfluxDBClient(
|
||||
'localhost', 8086, 'root', 'root',
|
||||
'test', use_udp=True, udp_port=port
|
||||
)
|
||||
cli.write_points(self.dummy_points)
|
||||
|
||||
received_data, addr = s.recvfrom(1024)
|
||||
|
||||
self.assertEqual(self.dummy_points,
|
||||
json.loads(received_data.decode(), strict=True))
|
||||
|
||||
def test_write_bad_precision_udp(self):
|
||||
"""Test write UDP w/bad precision."""
|
||||
cli = InfluxDBClient(
|
||||
'localhost', 8086, 'root', 'root',
|
||||
'test', use_udp=True, udp_port=4444
|
||||
)
|
||||
|
||||
with self.assertRaisesRegexp(
|
||||
Exception,
|
||||
"InfluxDB only supports seconds precision for udp writes"
|
||||
):
|
||||
cli.write_points(
|
||||
self.dummy_points,
|
||||
time_precision='ms'
|
||||
)
|
||||
|
||||
@raises(Exception)
|
||||
def test_write_points_fails(self):
|
||||
"""Test failed write points for TestInfluxDBClient object."""
|
||||
with _mocked_session('post', 500):
|
||||
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
|
||||
cli.write_points([])
|
||||
|
||||
def test_write_points_with_precision(self):
|
||||
"""Test write points with precision."""
|
||||
with _mocked_session('post', 200, self.dummy_points):
|
||||
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
|
||||
self.assertTrue(cli.write_points(self.dummy_points))
|
||||
|
||||
def test_write_points_bad_precision(self):
|
||||
"""Test write points with bad precision."""
|
||||
cli = InfluxDBClient()
|
||||
with self.assertRaisesRegexp(
|
||||
Exception,
|
||||
"Invalid time precision is given. \(use 's', 'm', 'ms' or 'u'\)"
|
||||
):
|
||||
cli.write_points(
|
||||
self.dummy_points,
|
||||
time_precision='g'
|
||||
)
|
||||
|
||||
@raises(Exception)
|
||||
def test_write_points_with_precision_fails(self):
|
||||
"""Test write points where precision fails."""
|
||||
with _mocked_session('post', 500):
|
||||
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
|
||||
cli.write_points_with_precision([])
|
||||
|
||||
def test_delete_points(self):
|
||||
"""Test delete points for TestInfluxDBClient object."""
|
||||
with _mocked_session('delete', 204) as mocked:
|
||||
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
|
||||
self.assertTrue(cli.delete_points("foo"))
|
||||
|
||||
self.assertEqual(len(mocked.call_args_list), 1)
|
||||
args, kwds = mocked.call_args_list[0]
|
||||
|
||||
self.assertEqual(kwds['params'],
|
||||
{'u': 'username', 'p': 'password'})
|
||||
self.assertEqual(kwds['url'], 'http://host:8086/db/db/series/foo')
|
||||
|
||||
@raises(Exception)
|
||||
def test_delete_points_with_wrong_name(self):
|
||||
"""Test delete points with wrong name."""
|
||||
with _mocked_session('delete', 400):
|
||||
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
|
||||
cli.delete_points("nonexist")
|
||||
|
||||
@raises(NotImplementedError)
|
||||
def test_create_scheduled_delete(self):
|
||||
"""Test create scheduled deletes."""
|
||||
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
|
||||
cli.create_scheduled_delete([])
|
||||
|
||||
@raises(NotImplementedError)
|
||||
def test_get_list_scheduled_delete(self):
|
||||
"""Test get schedule list of deletes TestInfluxDBClient."""
|
||||
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
|
||||
cli.get_list_scheduled_delete()
|
||||
|
||||
@raises(NotImplementedError)
|
||||
def test_remove_scheduled_delete(self):
|
||||
"""Test remove scheduled delete TestInfluxDBClient."""
|
||||
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
|
||||
cli.remove_scheduled_delete(1)
|
||||
|
||||
def test_query(self):
|
||||
"""Test query for TestInfluxDBClient object."""
|
||||
data = [
|
||||
{
|
||||
"name": "foo",
|
||||
"columns": ["time", "sequence_number", "column_one"],
|
||||
"points": [
|
||||
[1383876043, 16, "2"], [1383876043, 15, "1"],
|
||||
[1383876035, 14, "2"], [1383876035, 13, "1"]
|
||||
]
|
||||
}
|
||||
]
|
||||
with _mocked_session('get', 200, data):
|
||||
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
|
||||
result = cli.query('select column_one from foo;')
|
||||
self.assertEqual(len(result[0]['points']), 4)
|
||||
|
||||
def test_query_chunked(self):
|
||||
"""Test chunked query for TestInfluxDBClient object."""
|
||||
cli = InfluxDBClient(database='db')
|
||||
example_object = {
|
||||
'points': [
|
||||
[1415206250119, 40001, 667],
|
||||
[1415206244555, 30001, 7],
|
||||
[1415206228241, 20001, 788],
|
||||
[1415206212980, 10001, 555],
|
||||
[1415197271586, 10001, 23]
|
||||
],
|
||||
'name': 'foo',
|
||||
'columns': [
|
||||
'time',
|
||||
'sequence_number',
|
||||
'val'
|
||||
]
|
||||
}
|
||||
example_response = \
|
||||
json.dumps(example_object) + json.dumps(example_object)
|
||||
|
||||
with requests_mock.Mocker() as m:
|
||||
m.register_uri(
|
||||
requests_mock.GET,
|
||||
"http://localhost:8086/db/db/series",
|
||||
text=example_response
|
||||
)
|
||||
|
||||
self.assertListEqual(
|
||||
cli.query('select * from foo', chunked=True),
|
||||
[example_object, example_object]
|
||||
)
|
||||
|
||||
def test_query_chunked_unicode(self):
|
||||
"""Test unicode chunked query for TestInfluxDBClient object."""
|
||||
cli = InfluxDBClient(database='db')
|
||||
example_object = {
|
||||
'points': [
|
||||
[1415206212980, 10001, u('unicode-\xcf\x89')],
|
||||
[1415197271586, 10001, u('more-unicode-\xcf\x90')]
|
||||
],
|
||||
'name': 'foo',
|
||||
'columns': [
|
||||
'time',
|
||||
'sequence_number',
|
||||
'val'
|
||||
]
|
||||
}
|
||||
example_response = \
|
||||
json.dumps(example_object) + json.dumps(example_object)
|
||||
|
||||
with requests_mock.Mocker() as m:
|
||||
m.register_uri(
|
||||
requests_mock.GET,
|
||||
"http://localhost:8086/db/db/series",
|
||||
text=example_response
|
||||
)
|
||||
|
||||
self.assertListEqual(
|
||||
cli.query('select * from foo', chunked=True),
|
||||
[example_object, example_object]
|
||||
)
|
||||
|
||||
@raises(Exception)
|
||||
def test_query_fail(self):
|
||||
"""Test failed query for TestInfluxDBClient."""
|
||||
with _mocked_session('get', 401):
|
||||
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
|
||||
cli.query('select column_one from foo;')
|
||||
|
||||
def test_query_bad_precision(self):
|
||||
"""Test query with bad precision for TestInfluxDBClient."""
|
||||
cli = InfluxDBClient()
|
||||
with self.assertRaisesRegexp(
|
||||
Exception,
|
||||
"Invalid time precision is given. \(use 's', 'm', 'ms' or 'u'\)"
|
||||
):
|
||||
cli.query('select column_one from foo', time_precision='g')
|
||||
|
||||
def test_create_database(self):
|
||||
"""Test create database for TestInfluxDBClient."""
|
||||
with _mocked_session('post', 201, {"name": "new_db"}):
|
||||
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
|
||||
self.assertTrue(cli.create_database('new_db'))
|
||||
|
||||
@raises(Exception)
|
||||
def test_create_database_fails(self):
|
||||
"""Test failed create database for TestInfluxDBClient."""
|
||||
with _mocked_session('post', 401):
|
||||
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
|
||||
cli.create_database('new_db')
|
||||
|
||||
def test_delete_database(self):
|
||||
"""Test delete database for TestInfluxDBClient."""
|
||||
with _mocked_session('delete', 204):
|
||||
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
|
||||
self.assertTrue(cli.delete_database('old_db'))
|
||||
|
||||
@raises(Exception)
|
||||
def test_delete_database_fails(self):
|
||||
"""Test failed delete database for TestInfluxDBClient."""
|
||||
with _mocked_session('delete', 401):
|
||||
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
|
||||
cli.delete_database('old_db')
|
||||
|
||||
def test_get_list_database(self):
|
||||
"""Test get list of databases for TestInfluxDBClient."""
|
||||
data = [
|
||||
{"name": "a_db"}
|
||||
]
|
||||
with _mocked_session('get', 200, data):
|
||||
cli = InfluxDBClient('host', 8086, 'username', 'password')
|
||||
self.assertEqual(len(cli.get_list_database()), 1)
|
||||
self.assertEqual(cli.get_list_database()[0]['name'], 'a_db')
|
||||
|
||||
@raises(Exception)
|
||||
def test_get_list_database_fails(self):
|
||||
"""Test failed get list of databases for TestInfluxDBClient."""
|
||||
with _mocked_session('get', 401):
|
||||
cli = InfluxDBClient('host', 8086, 'username', 'password')
|
||||
cli.get_list_database()
|
||||
|
||||
@raises(FutureWarning)
|
||||
def test_get_database_list_deprecated(self):
|
||||
"""Test deprecated get database list for TestInfluxDBClient."""
|
||||
data = [
|
||||
{"name": "a_db"}
|
||||
]
|
||||
with _mocked_session('get', 200, data):
|
||||
cli = InfluxDBClient('host', 8086, 'username', 'password')
|
||||
self.assertEqual(len(cli.get_database_list()), 1)
|
||||
self.assertEqual(cli.get_database_list()[0]['name'], 'a_db')
|
||||
|
||||
def test_delete_series(self):
|
||||
"""Test delete series for TestInfluxDBClient."""
|
||||
with _mocked_session('delete', 204):
|
||||
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
|
||||
cli.delete_series('old_series')
|
||||
|
||||
@raises(Exception)
|
||||
def test_delete_series_fails(self):
|
||||
"""Test failed delete series for TestInfluxDBClient."""
|
||||
with _mocked_session('delete', 401):
|
||||
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
|
||||
cli.delete_series('old_series')
|
||||
|
||||
def test_get_series_list(self):
|
||||
"""Test get list of series for TestInfluxDBClient."""
|
||||
cli = InfluxDBClient(database='db')
|
||||
|
||||
with requests_mock.Mocker() as m:
|
||||
example_response = \
|
||||
'[{"name":"list_series_result","columns":' \
|
||||
'["time","name"],"points":[[0,"foo"],[0,"bar"]]}]'
|
||||
|
||||
m.register_uri(
|
||||
requests_mock.GET,
|
||||
"http://localhost:8086/db/db/series",
|
||||
text=example_response
|
||||
)
|
||||
|
||||
self.assertListEqual(
|
||||
cli.get_list_series(),
|
||||
['foo', 'bar']
|
||||
)
|
||||
|
||||
def test_get_continuous_queries(self):
|
||||
"""Test get continuous queries for TestInfluxDBClient."""
|
||||
cli = InfluxDBClient(database='db')
|
||||
|
||||
with requests_mock.Mocker() as m:
|
||||
|
||||
# Tip: put this in a json linter!
|
||||
example_response = '[ { "name": "continuous queries", "columns"' \
|
||||
': [ "time", "id", "query" ], "points": [ [ ' \
|
||||
'0, 1, "select foo(bar,95) from \\"foo_bar' \
|
||||
's\\" group by time(5m) into response_times.' \
|
||||
'percentiles.5m.95" ], [ 0, 2, "select perce' \
|
||||
'ntile(value,95) from \\"response_times\\" g' \
|
||||
'roup by time(5m) into response_times.percen' \
|
||||
'tiles.5m.95" ] ] } ]'
|
||||
|
||||
m.register_uri(
|
||||
requests_mock.GET,
|
||||
"http://localhost:8086/db/db/series",
|
||||
text=example_response
|
||||
)
|
||||
|
||||
self.assertListEqual(
|
||||
cli.get_list_continuous_queries(),
|
||||
[
|
||||
'select foo(bar,95) from "foo_bars" group '
|
||||
'by time(5m) into response_times.percentiles.5m.95',
|
||||
|
||||
'select percentile(value,95) from "response_times" group '
|
||||
'by time(5m) into response_times.percentiles.5m.95'
|
||||
]
|
||||
)
|
||||
|
||||
def test_get_list_cluster_admins(self):
|
||||
"""Test get list of cluster admins, not implemented."""
|
||||
pass
|
||||
|
||||
def test_add_cluster_admin(self):
|
||||
"""Test add cluster admin for TestInfluxDBClient."""
|
||||
with requests_mock.Mocker() as m:
|
||||
m.register_uri(
|
||||
requests_mock.POST,
|
||||
"http://localhost:8086/cluster_admins"
|
||||
)
|
||||
|
||||
cli = InfluxDBClient(database='db')
|
||||
cli.add_cluster_admin(
|
||||
new_username='paul',
|
||||
new_password='laup'
|
||||
)
|
||||
|
||||
self.assertDictEqual(
|
||||
json.loads(m.last_request.body),
|
||||
{
|
||||
'name': 'paul',
|
||||
'password': 'laup'
|
||||
}
|
||||
)
|
||||
|
||||
def test_update_cluster_admin_password(self):
|
||||
"""Test update cluster admin pass for TestInfluxDBClient."""
|
||||
with requests_mock.Mocker() as m:
|
||||
m.register_uri(
|
||||
requests_mock.POST,
|
||||
"http://localhost:8086/cluster_admins/paul"
|
||||
)
|
||||
|
||||
cli = InfluxDBClient(database='db')
|
||||
cli.update_cluster_admin_password(
|
||||
username='paul',
|
||||
new_password='laup'
|
||||
)
|
||||
|
||||
self.assertDictEqual(
|
||||
json.loads(m.last_request.body),
|
||||
{'password': 'laup'}
|
||||
)
|
||||
|
||||
def test_delete_cluster_admin(self):
|
||||
"""Test delete cluster admin for TestInfluxDBClient."""
|
||||
with requests_mock.Mocker() as m:
|
||||
m.register_uri(
|
||||
requests_mock.DELETE,
|
||||
"http://localhost:8086/cluster_admins/paul",
|
||||
status_code=200,
|
||||
)
|
||||
|
||||
cli = InfluxDBClient(database='db')
|
||||
cli.delete_cluster_admin(username='paul')
|
||||
|
||||
self.assertIsNone(m.last_request.body)
|
||||
|
||||
def test_set_database_admin(self):
|
||||
"""Test set database admin for TestInfluxDBClient."""
|
||||
pass
|
||||
|
||||
def test_unset_database_admin(self):
|
||||
"""Test unset database admin for TestInfluxDBClient."""
|
||||
pass
|
||||
|
||||
def test_alter_database_admin(self):
|
||||
"""Test alter database admin for TestInfluxDBClient."""
|
||||
with requests_mock.Mocker() as m:
|
||||
m.register_uri(
|
||||
requests_mock.POST,
|
||||
"http://localhost:8086/db/db/users/paul"
|
||||
)
|
||||
|
||||
cli = InfluxDBClient(database='db')
|
||||
cli.alter_database_admin(
|
||||
username='paul',
|
||||
is_admin=False
|
||||
)
|
||||
|
||||
self.assertDictEqual(
|
||||
json.loads(m.last_request.body),
|
||||
{
|
||||
'admin': False
|
||||
}
|
||||
)
|
||||
|
||||
@raises(NotImplementedError)
|
||||
def test_get_list_database_admins(self):
|
||||
"""Test get list of database admins for TestInfluxDBClient."""
|
||||
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
|
||||
cli.get_list_database_admins()
|
||||
|
||||
@raises(NotImplementedError)
|
||||
def test_add_database_admin(self):
|
||||
"""Test add database admins for TestInfluxDBClient."""
|
||||
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
|
||||
cli.add_database_admin('admin', 'admin_secret_password')
|
||||
|
||||
@raises(NotImplementedError)
|
||||
def test_update_database_admin_password(self):
|
||||
"""Test update database admin pass for TestInfluxDBClient."""
|
||||
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
|
||||
cli.update_database_admin_password('admin', 'admin_secret_password')
|
||||
|
||||
@raises(NotImplementedError)
|
||||
def test_delete_database_admin(self):
|
||||
"""Test delete database admin for TestInfluxDBClient."""
|
||||
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
|
||||
cli.delete_database_admin('admin')
|
||||
|
||||
def test_get_database_users(self):
|
||||
"""Test get database users for TestInfluxDBClient."""
|
||||
cli = InfluxDBClient('localhost', 8086, 'username', 'password', 'db')
|
||||
|
||||
example_response = \
|
||||
'[{"name":"paul","isAdmin":false,"writeTo":".*","readFrom":".*"},'\
|
||||
'{"name":"bobby","isAdmin":false,"writeTo":".*","readFrom":".*"}]'
|
||||
|
||||
with requests_mock.Mocker() as m:
|
||||
m.register_uri(
|
||||
requests_mock.GET,
|
||||
"http://localhost:8086/db/db/users",
|
||||
text=example_response
|
||||
)
|
||||
users = cli.get_database_users()
|
||||
|
||||
self.assertEqual(json.loads(example_response), users)
|
||||
|
||||
def test_add_database_user(self):
|
||||
"""Test add database user for TestInfluxDBClient."""
|
||||
with requests_mock.Mocker() as m:
|
||||
m.register_uri(
|
||||
requests_mock.POST,
|
||||
"http://localhost:8086/db/db/users"
|
||||
)
|
||||
cli = InfluxDBClient(database='db')
|
||||
cli.add_database_user(
|
||||
new_username='paul',
|
||||
new_password='laup',
|
||||
permissions=('.*', '.*')
|
||||
)
|
||||
|
||||
self.assertDictEqual(
|
||||
json.loads(m.last_request.body),
|
||||
{
|
||||
'writeTo': '.*',
|
||||
'password': 'laup',
|
||||
'readFrom': '.*',
|
||||
'name': 'paul'
|
||||
}
|
||||
)
|
||||
|
||||
def test_add_database_user_bad_permissions(self):
|
||||
"""Test add database user with bad perms for TestInfluxDBClient."""
|
||||
cli = InfluxDBClient()
|
||||
|
||||
with self.assertRaisesRegexp(
|
||||
Exception,
|
||||
"'permissions' must be \(readFrom, writeTo\) tuple"
|
||||
):
|
||||
cli.add_database_user(
|
||||
new_password='paul',
|
||||
new_username='paul',
|
||||
permissions=('hello', 'hello', 'hello')
|
||||
)
|
||||
|
||||
def test_alter_database_user_password(self):
|
||||
"""Test alter database user pass for TestInfluxDBClient."""
|
||||
with requests_mock.Mocker() as m:
|
||||
m.register_uri(
|
||||
requests_mock.POST,
|
||||
"http://localhost:8086/db/db/users/paul"
|
||||
)
|
||||
|
||||
cli = InfluxDBClient(database='db')
|
||||
cli.alter_database_user(
|
||||
username='paul',
|
||||
password='n3wp4ss!'
|
||||
)
|
||||
|
||||
self.assertDictEqual(
|
||||
json.loads(m.last_request.body),
|
||||
{
|
||||
'password': 'n3wp4ss!'
|
||||
}
|
||||
)
|
||||
|
||||
def test_alter_database_user_permissions(self):
|
||||
"""Test alter database user perms for TestInfluxDBClient."""
|
||||
with requests_mock.Mocker() as m:
|
||||
m.register_uri(
|
||||
requests_mock.POST,
|
||||
"http://localhost:8086/db/db/users/paul"
|
||||
)
|
||||
|
||||
cli = InfluxDBClient(database='db')
|
||||
cli.alter_database_user(
|
||||
username='paul',
|
||||
permissions=('^$', '.*')
|
||||
)
|
||||
|
||||
self.assertDictEqual(
|
||||
json.loads(m.last_request.body),
|
||||
{
|
||||
'readFrom': '^$',
|
||||
'writeTo': '.*'
|
||||
}
|
||||
)
|
||||
|
||||
def test_alter_database_user_password_and_permissions(self):
|
||||
"""Test alter database user pass and perms for TestInfluxDBClient."""
|
||||
with requests_mock.Mocker() as m:
|
||||
m.register_uri(
|
||||
requests_mock.POST,
|
||||
"http://localhost:8086/db/db/users/paul"
|
||||
)
|
||||
|
||||
cli = InfluxDBClient(database='db')
|
||||
cli.alter_database_user(
|
||||
username='paul',
|
||||
password='n3wp4ss!',
|
||||
permissions=('^$', '.*')
|
||||
)
|
||||
|
||||
self.assertDictEqual(
|
||||
json.loads(m.last_request.body),
|
||||
{
|
||||
'password': 'n3wp4ss!',
|
||||
'readFrom': '^$',
|
||||
'writeTo': '.*'
|
||||
}
|
||||
)
|
||||
|
||||
def test_update_database_user_password_current_user(self):
|
||||
"""Test update database user pass for TestInfluxDBClient."""
|
||||
cli = InfluxDBClient(
|
||||
username='root',
|
||||
password='hello',
|
||||
database='database'
|
||||
)
|
||||
with requests_mock.Mocker() as m:
|
||||
m.register_uri(
|
||||
requests_mock.POST,
|
||||
"http://localhost:8086/db/database/users/root"
|
||||
)
|
||||
|
||||
cli.update_database_user_password(
|
||||
username='root',
|
||||
new_password='bye'
|
||||
)
|
||||
|
||||
self.assertEqual(cli._password, 'bye')
|
||||
|
||||
def test_delete_database_user(self):
|
||||
"""Test delete database user for TestInfluxDBClient."""
|
||||
with requests_mock.Mocker() as m:
|
||||
m.register_uri(
|
||||
requests_mock.DELETE,
|
||||
"http://localhost:8086/db/db/users/paul"
|
||||
)
|
||||
|
||||
cli = InfluxDBClient(database='db')
|
||||
cli.delete_database_user(username='paul')
|
||||
|
||||
self.assertIsNone(m.last_request.body)
|
||||
|
||||
@raises(NotImplementedError)
|
||||
def test_update_permission(self):
|
||||
"""Test update permission for TestInfluxDBClient."""
|
||||
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
|
||||
cli.update_permission('admin', [])
|
||||
|
||||
@mock.patch('requests.Session.request')
|
||||
def test_request_retry(self, mock_request):
|
||||
"""Test that two connection errors will be handled."""
|
||||
class CustomMock(object):
|
||||
"""Define CustomMock object."""
|
||||
|
||||
def __init__(self):
|
||||
self.i = 0
|
||||
|
||||
def connection_error(self, *args, **kwargs):
|
||||
"""Test connection error in CustomMock."""
|
||||
self.i += 1
|
||||
|
||||
if self.i < 3:
|
||||
raise requests.exceptions.ConnectionError
|
||||
else:
|
||||
r = requests.Response()
|
||||
r.status_code = 200
|
||||
return r
|
||||
|
||||
mock_request.side_effect = CustomMock().connection_error
|
||||
|
||||
cli = InfluxDBClient(database='db')
|
||||
cli.write_points(
|
||||
self.dummy_points
|
||||
)
|
||||
|
||||
@mock.patch('requests.Session.request')
|
||||
def test_request_retry_raises(self, mock_request):
|
||||
"""Test that three connection errors will not be handled."""
|
||||
class CustomMock(object):
|
||||
"""Define CustomMock object."""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize the object."""
|
||||
self.i = 0
|
||||
|
||||
def connection_error(self, *args, **kwargs):
|
||||
"""Test the connection error for CustomMock."""
|
||||
self.i += 1
|
||||
|
||||
if self.i < 4:
|
||||
raise requests.exceptions.ConnectionError
|
||||
else:
|
||||
r = requests.Response()
|
||||
r.status_code = 200
|
||||
return r
|
||||
|
||||
mock_request.side_effect = CustomMock().connection_error
|
||||
|
||||
cli = InfluxDBClient(database='db')
|
||||
|
||||
with self.assertRaises(requests.exceptions.ConnectionError):
|
||||
cli.write_points(self.dummy_points)
|
331
lib/influxdb/tests/influxdb08/dataframe_client_test.py
Normal file
331
lib/influxdb/tests/influxdb08/dataframe_client_test.py
Normal file
|
@ -0,0 +1,331 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Unit tests for misc module."""
|
||||
|
||||
from datetime import timedelta
|
||||
|
||||
import copy
|
||||
import json
|
||||
import unittest
|
||||
import warnings
|
||||
|
||||
import requests_mock
|
||||
|
||||
from nose.tools import raises
|
||||
|
||||
from influxdb.tests import skipIfPYpy, using_pypy
|
||||
|
||||
from .client_test import _mocked_session
|
||||
|
||||
if not using_pypy:
|
||||
import pandas as pd
|
||||
from pandas.util.testing import assert_frame_equal
|
||||
from influxdb.influxdb08 import DataFrameClient
|
||||
|
||||
|
||||
@skipIfPYpy
|
||||
class TestDataFrameClient(unittest.TestCase):
|
||||
"""Define the DataFramClient test object."""
|
||||
|
||||
def setUp(self):
|
||||
"""Set up an instance of TestDataFrameClient object."""
|
||||
# By default, raise exceptions on warnings
|
||||
warnings.simplefilter('error', FutureWarning)
|
||||
|
||||
def test_write_points_from_dataframe(self):
|
||||
"""Test write points from dataframe."""
|
||||
now = pd.Timestamp('1970-01-01 00:00+00:00')
|
||||
dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
|
||||
index=[now, now + timedelta(hours=1)],
|
||||
columns=["column_one", "column_two",
|
||||
"column_three"])
|
||||
points = [
|
||||
{
|
||||
"points": [
|
||||
["1", 1, 1.0, 0],
|
||||
["2", 2, 2.0, 3600]
|
||||
],
|
||||
"name": "foo",
|
||||
"columns": ["column_one", "column_two", "column_three", "time"]
|
||||
}
|
||||
]
|
||||
|
||||
with requests_mock.Mocker() as m:
|
||||
m.register_uri(requests_mock.POST,
|
||||
"http://localhost:8086/db/db/series")
|
||||
|
||||
cli = DataFrameClient(database='db')
|
||||
cli.write_points({"foo": dataframe})
|
||||
|
||||
self.assertListEqual(json.loads(m.last_request.body), points)
|
||||
|
||||
def test_write_points_from_dataframe_with_float_nan(self):
|
||||
"""Test write points from dataframe with NaN float."""
|
||||
now = pd.Timestamp('1970-01-01 00:00+00:00')
|
||||
dataframe = pd.DataFrame(data=[[1, float("NaN"), 1.0], [2, 2, 2.0]],
|
||||
index=[now, now + timedelta(hours=1)],
|
||||
columns=["column_one", "column_two",
|
||||
"column_three"])
|
||||
points = [
|
||||
{
|
||||
"points": [
|
||||
[1, None, 1.0, 0],
|
||||
[2, 2, 2.0, 3600]
|
||||
],
|
||||
"name": "foo",
|
||||
"columns": ["column_one", "column_two", "column_three", "time"]
|
||||
}
|
||||
]
|
||||
|
||||
with requests_mock.Mocker() as m:
|
||||
m.register_uri(requests_mock.POST,
|
||||
"http://localhost:8086/db/db/series")
|
||||
|
||||
cli = DataFrameClient(database='db')
|
||||
cli.write_points({"foo": dataframe})
|
||||
|
||||
self.assertListEqual(json.loads(m.last_request.body), points)
|
||||
|
||||
def test_write_points_from_dataframe_in_batches(self):
|
||||
"""Test write points from dataframe in batches."""
|
||||
now = pd.Timestamp('1970-01-01 00:00+00:00')
|
||||
dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
|
||||
index=[now, now + timedelta(hours=1)],
|
||||
columns=["column_one", "column_two",
|
||||
"column_three"])
|
||||
with requests_mock.Mocker() as m:
|
||||
m.register_uri(requests_mock.POST,
|
||||
"http://localhost:8086/db/db/series")
|
||||
|
||||
cli = DataFrameClient(database='db')
|
||||
self.assertTrue(cli.write_points({"foo": dataframe}, batch_size=1))
|
||||
|
||||
def test_write_points_from_dataframe_with_numeric_column_names(self):
|
||||
"""Test write points from dataframe with numeric columns."""
|
||||
now = pd.Timestamp('1970-01-01 00:00+00:00')
|
||||
# df with numeric column names
|
||||
dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
|
||||
index=[now, now + timedelta(hours=1)])
|
||||
points = [
|
||||
{
|
||||
"points": [
|
||||
["1", 1, 1.0, 0],
|
||||
["2", 2, 2.0, 3600]
|
||||
],
|
||||
"name": "foo",
|
||||
"columns": ['0', '1', '2', "time"]
|
||||
}
|
||||
]
|
||||
|
||||
with requests_mock.Mocker() as m:
|
||||
m.register_uri(requests_mock.POST,
|
||||
"http://localhost:8086/db/db/series")
|
||||
|
||||
cli = DataFrameClient(database='db')
|
||||
cli.write_points({"foo": dataframe})
|
||||
|
||||
self.assertListEqual(json.loads(m.last_request.body), points)
|
||||
|
||||
def test_write_points_from_dataframe_with_period_index(self):
|
||||
"""Test write points from dataframe with period index."""
|
||||
dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
|
||||
index=[pd.Period('1970-01-01'),
|
||||
pd.Period('1970-01-02')],
|
||||
columns=["column_one", "column_two",
|
||||
"column_three"])
|
||||
points = [
|
||||
{
|
||||
"points": [
|
||||
["1", 1, 1.0, 0],
|
||||
["2", 2, 2.0, 86400]
|
||||
],
|
||||
"name": "foo",
|
||||
"columns": ["column_one", "column_two", "column_three", "time"]
|
||||
}
|
||||
]
|
||||
|
||||
with requests_mock.Mocker() as m:
|
||||
m.register_uri(requests_mock.POST,
|
||||
"http://localhost:8086/db/db/series")
|
||||
|
||||
cli = DataFrameClient(database='db')
|
||||
cli.write_points({"foo": dataframe})
|
||||
|
||||
self.assertListEqual(json.loads(m.last_request.body), points)
|
||||
|
||||
def test_write_points_from_dataframe_with_time_precision(self):
|
||||
"""Test write points from dataframe with time precision."""
|
||||
now = pd.Timestamp('1970-01-01 00:00+00:00')
|
||||
dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
|
||||
index=[now, now + timedelta(hours=1)],
|
||||
columns=["column_one", "column_two",
|
||||
"column_three"])
|
||||
points = [
|
||||
{
|
||||
"points": [
|
||||
["1", 1, 1.0, 0],
|
||||
["2", 2, 2.0, 3600]
|
||||
],
|
||||
"name": "foo",
|
||||
"columns": ["column_one", "column_two", "column_three", "time"]
|
||||
}
|
||||
]
|
||||
|
||||
points_ms = copy.deepcopy(points)
|
||||
points_ms[0]["points"][1][-1] = 3600 * 1000
|
||||
|
||||
points_us = copy.deepcopy(points)
|
||||
points_us[0]["points"][1][-1] = 3600 * 1000000
|
||||
|
||||
with requests_mock.Mocker() as m:
|
||||
m.register_uri(requests_mock.POST,
|
||||
"http://localhost:8086/db/db/series")
|
||||
|
||||
cli = DataFrameClient(database='db')
|
||||
|
||||
cli.write_points({"foo": dataframe}, time_precision='s')
|
||||
self.assertListEqual(json.loads(m.last_request.body), points)
|
||||
|
||||
cli.write_points({"foo": dataframe}, time_precision='m')
|
||||
self.assertListEqual(json.loads(m.last_request.body), points_ms)
|
||||
|
||||
cli.write_points({"foo": dataframe}, time_precision='u')
|
||||
self.assertListEqual(json.loads(m.last_request.body), points_us)
|
||||
|
||||
@raises(TypeError)
|
||||
def test_write_points_from_dataframe_fails_without_time_index(self):
|
||||
"""Test write points from dataframe that fails without time index."""
|
||||
dataframe = pd.DataFrame(data=[["1", 1, 1.0], ["2", 2, 2.0]],
|
||||
columns=["column_one", "column_two",
|
||||
"column_three"])
|
||||
|
||||
with requests_mock.Mocker() as m:
|
||||
m.register_uri(requests_mock.POST,
|
||||
"http://localhost:8086/db/db/series")
|
||||
|
||||
cli = DataFrameClient(database='db')
|
||||
cli.write_points({"foo": dataframe})
|
||||
|
||||
@raises(TypeError)
|
||||
def test_write_points_from_dataframe_fails_with_series(self):
|
||||
"""Test failed write points from dataframe with series."""
|
||||
now = pd.Timestamp('1970-01-01 00:00+00:00')
|
||||
dataframe = pd.Series(data=[1.0, 2.0],
|
||||
index=[now, now + timedelta(hours=1)])
|
||||
|
||||
with requests_mock.Mocker() as m:
|
||||
m.register_uri(requests_mock.POST,
|
||||
"http://localhost:8086/db/db/series")
|
||||
|
||||
cli = DataFrameClient(database='db')
|
||||
cli.write_points({"foo": dataframe})
|
||||
|
||||
def test_query_into_dataframe(self):
|
||||
"""Test query into a dataframe."""
|
||||
data = [
|
||||
{
|
||||
"name": "foo",
|
||||
"columns": ["time", "sequence_number", "column_one"],
|
||||
"points": [
|
||||
[3600, 16, 2], [3600, 15, 1],
|
||||
[0, 14, 2], [0, 13, 1]
|
||||
]
|
||||
}
|
||||
]
|
||||
# dataframe sorted ascending by time first, then sequence_number
|
||||
dataframe = pd.DataFrame(data=[[13, 1], [14, 2], [15, 1], [16, 2]],
|
||||
index=pd.to_datetime([0, 0,
|
||||
3600, 3600],
|
||||
unit='s', utc=True),
|
||||
columns=['sequence_number', 'column_one'])
|
||||
with _mocked_session('get', 200, data):
|
||||
cli = DataFrameClient('host', 8086, 'username', 'password', 'db')
|
||||
result = cli.query('select column_one from foo;')
|
||||
assert_frame_equal(dataframe, result)
|
||||
|
||||
def test_query_multiple_time_series(self):
|
||||
"""Test query for multiple time series."""
|
||||
data = [
|
||||
{
|
||||
"name": "series1",
|
||||
"columns": ["time", "mean", "min", "max", "stddev"],
|
||||
"points": [[0, 323048, 323048, 323048, 0]]
|
||||
},
|
||||
{
|
||||
"name": "series2",
|
||||
"columns": ["time", "mean", "min", "max", "stddev"],
|
||||
"points": [[0, -2.8233, -2.8503, -2.7832, 0.0173]]
|
||||
},
|
||||
{
|
||||
"name": "series3",
|
||||
"columns": ["time", "mean", "min", "max", "stddev"],
|
||||
"points": [[0, -0.01220, -0.01220, -0.01220, 0]]
|
||||
}
|
||||
]
|
||||
dataframes = {
|
||||
'series1': pd.DataFrame(data=[[323048, 323048, 323048, 0]],
|
||||
index=pd.to_datetime([0], unit='s',
|
||||
utc=True),
|
||||
columns=['mean', 'min', 'max', 'stddev']),
|
||||
'series2': pd.DataFrame(data=[[-2.8233, -2.8503, -2.7832, 0.0173]],
|
||||
index=pd.to_datetime([0], unit='s',
|
||||
utc=True),
|
||||
columns=['mean', 'min', 'max', 'stddev']),
|
||||
'series3': pd.DataFrame(data=[[-0.01220, -0.01220, -0.01220, 0]],
|
||||
index=pd.to_datetime([0], unit='s',
|
||||
utc=True),
|
||||
columns=['mean', 'min', 'max', 'stddev'])
|
||||
}
|
||||
with _mocked_session('get', 200, data):
|
||||
cli = DataFrameClient('host', 8086, 'username', 'password', 'db')
|
||||
result = cli.query("""select mean(value), min(value), max(value),
|
||||
stddev(value) from series1, series2, series3""")
|
||||
self.assertEqual(dataframes.keys(), result.keys())
|
||||
for key in dataframes.keys():
|
||||
assert_frame_equal(dataframes[key], result[key])
|
||||
|
||||
def test_query_with_empty_result(self):
|
||||
"""Test query with empty results."""
|
||||
with _mocked_session('get', 200, []):
|
||||
cli = DataFrameClient('host', 8086, 'username', 'password', 'db')
|
||||
result = cli.query('select column_one from foo;')
|
||||
self.assertEqual(result, [])
|
||||
|
||||
def test_list_series(self):
|
||||
"""Test list of series for dataframe object."""
|
||||
response = [
|
||||
{
|
||||
'columns': ['time', 'name'],
|
||||
'name': 'list_series_result',
|
||||
'points': [[0, 'seriesA'], [0, 'seriesB']]
|
||||
}
|
||||
]
|
||||
with _mocked_session('get', 200, response):
|
||||
cli = DataFrameClient('host', 8086, 'username', 'password', 'db')
|
||||
series_list = cli.get_list_series()
|
||||
self.assertEqual(series_list, ['seriesA', 'seriesB'])
|
||||
|
||||
def test_datetime_to_epoch(self):
|
||||
"""Test convert datetime to epoch."""
|
||||
timestamp = pd.Timestamp('2013-01-01 00:00:00.000+00:00')
|
||||
cli = DataFrameClient('host', 8086, 'username', 'password', 'db')
|
||||
|
||||
self.assertEqual(
|
||||
cli._datetime_to_epoch(timestamp),
|
||||
1356998400.0
|
||||
)
|
||||
self.assertEqual(
|
||||
cli._datetime_to_epoch(timestamp, time_precision='s'),
|
||||
1356998400.0
|
||||
)
|
||||
self.assertEqual(
|
||||
cli._datetime_to_epoch(timestamp, time_precision='m'),
|
||||
1356998400000.0
|
||||
)
|
||||
self.assertEqual(
|
||||
cli._datetime_to_epoch(timestamp, time_precision='ms'),
|
||||
1356998400000.0
|
||||
)
|
||||
self.assertEqual(
|
||||
cli._datetime_to_epoch(timestamp, time_precision='u'),
|
||||
1356998400000000.0
|
||||
)
|
228
lib/influxdb/tests/influxdb08/helper_test.py
Normal file
228
lib/influxdb/tests/influxdb08/helper_test.py
Normal file
|
@ -0,0 +1,228 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Define set of helper functions for the dataframe."""
|
||||
|
||||
import unittest
|
||||
import warnings
|
||||
|
||||
import mock
|
||||
from influxdb.influxdb08 import SeriesHelper, InfluxDBClient
|
||||
from requests.exceptions import ConnectionError
|
||||
|
||||
|
||||
class TestSeriesHelper(unittest.TestCase):
|
||||
"""Define the SeriesHelper for test."""
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
"""Set up an instance of the TestSerisHelper object."""
|
||||
super(TestSeriesHelper, cls).setUpClass()
|
||||
|
||||
TestSeriesHelper.client = InfluxDBClient(
|
||||
'host',
|
||||
8086,
|
||||
'username',
|
||||
'password',
|
||||
'database'
|
||||
)
|
||||
|
||||
class MySeriesHelper(SeriesHelper):
|
||||
"""Define a subset SeriesHelper instance."""
|
||||
|
||||
class Meta:
|
||||
"""Define metadata for the TestSeriesHelper object."""
|
||||
|
||||
client = TestSeriesHelper.client
|
||||
series_name = 'events.stats.{server_name}'
|
||||
fields = ['time', 'server_name']
|
||||
bulk_size = 5
|
||||
autocommit = True
|
||||
|
||||
TestSeriesHelper.MySeriesHelper = MySeriesHelper
|
||||
|
||||
def test_auto_commit(self):
|
||||
"""Test that write_points called after the right number of events."""
|
||||
class AutoCommitTest(SeriesHelper):
|
||||
"""Define an instance of SeriesHelper for AutoCommit test."""
|
||||
|
||||
class Meta:
|
||||
"""Define metadata AutoCommitTest object."""
|
||||
|
||||
series_name = 'events.stats.{server_name}'
|
||||
fields = ['time', 'server_name']
|
||||
bulk_size = 5
|
||||
client = InfluxDBClient()
|
||||
autocommit = True
|
||||
|
||||
fake_write_points = mock.MagicMock()
|
||||
AutoCommitTest(server_name='us.east-1', time=159)
|
||||
AutoCommitTest._client.write_points = fake_write_points
|
||||
AutoCommitTest(server_name='us.east-1', time=158)
|
||||
AutoCommitTest(server_name='us.east-1', time=157)
|
||||
AutoCommitTest(server_name='us.east-1', time=156)
|
||||
self.assertFalse(fake_write_points.called)
|
||||
AutoCommitTest(server_name='us.east-1', time=3443)
|
||||
self.assertTrue(fake_write_points.called)
|
||||
|
||||
def testSingleSeriesName(self):
|
||||
"""Test JSON conversion when there is only one series name."""
|
||||
TestSeriesHelper.MySeriesHelper(server_name='us.east-1', time=159)
|
||||
TestSeriesHelper.MySeriesHelper(server_name='us.east-1', time=158)
|
||||
TestSeriesHelper.MySeriesHelper(server_name='us.east-1', time=157)
|
||||
TestSeriesHelper.MySeriesHelper(server_name='us.east-1', time=156)
|
||||
expectation = [{'points': [[159, 'us.east-1'],
|
||||
[158, 'us.east-1'],
|
||||
[157, 'us.east-1'],
|
||||
[156, 'us.east-1']],
|
||||
'name': 'events.stats.us.east-1',
|
||||
'columns': ['time', 'server_name']}]
|
||||
|
||||
rcvd = TestSeriesHelper.MySeriesHelper._json_body_()
|
||||
self.assertTrue(all([el in expectation for el in rcvd]) and
|
||||
all([el in rcvd for el in expectation]),
|
||||
'Invalid JSON body of time series returned from '
|
||||
'_json_body_ for one series name: {0}.'.format(rcvd))
|
||||
TestSeriesHelper.MySeriesHelper._reset_()
|
||||
self.assertEqual(
|
||||
TestSeriesHelper.MySeriesHelper._json_body_(),
|
||||
[],
|
||||
'Resetting helper did not empty datapoints.')
|
||||
|
||||
def testSeveralSeriesNames(self):
|
||||
"""Test JSON conversion when there is only one series name."""
|
||||
TestSeriesHelper.MySeriesHelper(server_name='us.east-1', time=159)
|
||||
TestSeriesHelper.MySeriesHelper(server_name='fr.paris-10', time=158)
|
||||
TestSeriesHelper.MySeriesHelper(server_name='lu.lux', time=157)
|
||||
TestSeriesHelper.MySeriesHelper(server_name='uk.london', time=156)
|
||||
expectation = [{'points': [[157, 'lu.lux']],
|
||||
'name': 'events.stats.lu.lux',
|
||||
'columns': ['time', 'server_name']},
|
||||
{'points': [[156, 'uk.london']],
|
||||
'name': 'events.stats.uk.london',
|
||||
'columns': ['time', 'server_name']},
|
||||
{'points': [[158, 'fr.paris-10']],
|
||||
'name': 'events.stats.fr.paris-10',
|
||||
'columns': ['time', 'server_name']},
|
||||
{'points': [[159, 'us.east-1']],
|
||||
'name': 'events.stats.us.east-1',
|
||||
'columns': ['time', 'server_name']}]
|
||||
|
||||
rcvd = TestSeriesHelper.MySeriesHelper._json_body_()
|
||||
self.assertTrue(all([el in expectation for el in rcvd]) and
|
||||
all([el in rcvd for el in expectation]),
|
||||
'Invalid JSON body of time series returned from '
|
||||
'_json_body_ for several series names: {0}.'
|
||||
.format(rcvd))
|
||||
TestSeriesHelper.MySeriesHelper._reset_()
|
||||
self.assertEqual(
|
||||
TestSeriesHelper.MySeriesHelper._json_body_(),
|
||||
[],
|
||||
'Resetting helper did not empty datapoints.')
|
||||
|
||||
def testInvalidHelpers(self):
|
||||
"""Test errors in invalid helpers."""
|
||||
class MissingMeta(SeriesHelper):
|
||||
"""Define SeriesHelper object for MissingMeta test."""
|
||||
|
||||
pass
|
||||
|
||||
class MissingClient(SeriesHelper):
|
||||
"""Define SeriesHelper object for MissingClient test."""
|
||||
|
||||
class Meta:
|
||||
"""Define metadata for MissingClient object."""
|
||||
|
||||
series_name = 'events.stats.{server_name}'
|
||||
fields = ['time', 'server_name']
|
||||
autocommit = True
|
||||
|
||||
class MissingSeriesName(SeriesHelper):
|
||||
"""Define SeriesHelper object for MissingSeries test."""
|
||||
|
||||
class Meta:
|
||||
"""Define metadata for MissingSeriesName object."""
|
||||
|
||||
fields = ['time', 'server_name']
|
||||
|
||||
class MissingFields(SeriesHelper):
|
||||
"""Define SeriesHelper for MissingFields test."""
|
||||
|
||||
class Meta:
|
||||
"""Define metadata for MissingFields object."""
|
||||
|
||||
series_name = 'events.stats.{server_name}'
|
||||
|
||||
for cls in [MissingMeta, MissingClient, MissingFields,
|
||||
MissingSeriesName]:
|
||||
self.assertRaises(
|
||||
AttributeError, cls, **{'time': 159,
|
||||
'server_name': 'us.east-1'})
|
||||
|
||||
def testWarnBulkSizeZero(self):
|
||||
"""Test warning for an invalid bulk size."""
|
||||
class WarnBulkSizeZero(SeriesHelper):
|
||||
"""Define SeriesHelper for WarnBulkSizeZero test."""
|
||||
|
||||
class Meta:
|
||||
"""Define metadata for WarnBulkSizeZero object."""
|
||||
|
||||
client = TestSeriesHelper.client
|
||||
series_name = 'events.stats.{server_name}'
|
||||
fields = ['time', 'server_name']
|
||||
bulk_size = 0
|
||||
autocommit = True
|
||||
|
||||
with warnings.catch_warnings(record=True) as rec_warnings:
|
||||
warnings.simplefilter("always")
|
||||
# Server defined in the client is invalid, we're testing
|
||||
# the warning only.
|
||||
with self.assertRaises(ConnectionError):
|
||||
WarnBulkSizeZero(time=159, server_name='us.east-1')
|
||||
|
||||
self.assertGreaterEqual(
|
||||
len(rec_warnings), 1,
|
||||
'{0} call should have generated one warning.'
|
||||
'Actual generated warnings: {1}'.format(
|
||||
WarnBulkSizeZero, '\n'.join(map(str, rec_warnings))))
|
||||
|
||||
expected_msg = (
|
||||
'Definition of bulk_size in WarnBulkSizeZero forced to 1, '
|
||||
'was less than 1.')
|
||||
|
||||
self.assertIn(expected_msg, list(w.message.args[0]
|
||||
for w in rec_warnings),
|
||||
'Warning message did not contain "forced to 1".')
|
||||
|
||||
def testWarnBulkSizeNoEffect(self):
|
||||
"""Test warning for a set bulk size but autocommit False."""
|
||||
class WarnBulkSizeNoEffect(SeriesHelper):
|
||||
"""Define SeriesHelper for WarnBulkSizeNoEffect object."""
|
||||
|
||||
class Meta:
|
||||
"""Define metadata for WarnBulkSizeNoEffect object."""
|
||||
|
||||
series_name = 'events.stats.{server_name}'
|
||||
fields = ['time', 'server_name']
|
||||
bulk_size = 5
|
||||
autocommit = False
|
||||
|
||||
with warnings.catch_warnings(record=True) as rec_warnings:
|
||||
warnings.simplefilter("always")
|
||||
WarnBulkSizeNoEffect(time=159, server_name='us.east-1')
|
||||
|
||||
self.assertGreaterEqual(
|
||||
len(rec_warnings), 1,
|
||||
'{0} call should have generated one warning.'
|
||||
'Actual generated warnings: {1}'.format(
|
||||
WarnBulkSizeNoEffect, '\n'.join(map(str, rec_warnings))))
|
||||
|
||||
expected_msg = (
|
||||
'Definition of bulk_size in WarnBulkSizeNoEffect has no affect '
|
||||
'because autocommit is false.')
|
||||
|
||||
self.assertIn(expected_msg, list(w.message.args[0]
|
||||
for w in rec_warnings),
|
||||
'Warning message did not contain the expected_msg.')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
50
lib/influxdb/tests/misc.py
Normal file
50
lib/influxdb/tests/misc.py
Normal file
|
@ -0,0 +1,50 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Define the misc handler for InfluxDBClient test."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import socket
|
||||
|
||||
|
||||
def get_free_ports(num_ports, ip='127.0.0.1'):
|
||||
"""Determine free ports on provided interface.
|
||||
|
||||
Get `num_ports` free/available ports on the interface linked to the `ip`
|
||||
:param int num_ports: The number of free ports to get
|
||||
:param str ip: The ip on which the ports have to be taken
|
||||
:return: a set of ports number
|
||||
"""
|
||||
sock_ports = []
|
||||
ports = set()
|
||||
try:
|
||||
for _ in range(num_ports):
|
||||
sock = socket.socket()
|
||||
cur = [sock, -1]
|
||||
# append the socket directly,
|
||||
# so that it'll be also closed (no leaked resource)
|
||||
# in the finally here after.
|
||||
sock_ports.append(cur)
|
||||
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
sock.bind((ip, 0))
|
||||
cur[1] = sock.getsockname()[1]
|
||||
finally:
|
||||
for sock, port in sock_ports:
|
||||
sock.close()
|
||||
ports.add(port)
|
||||
assert num_ports == len(ports)
|
||||
return ports
|
||||
|
||||
|
||||
def is_port_open(port, ip='127.0.0.1'):
|
||||
"""Check if given TCP port is open for connection."""
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
try:
|
||||
result = sock.connect_ex((ip, port))
|
||||
if not result:
|
||||
sock.shutdown(socket.SHUT_RDWR)
|
||||
return result == 0
|
||||
finally:
|
||||
sock.close()
|
202
lib/influxdb/tests/resultset_test.py
Normal file
202
lib/influxdb/tests/resultset_test.py
Normal file
|
@ -0,0 +1,202 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Define the resultset test package."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import unittest
|
||||
|
||||
from influxdb.exceptions import InfluxDBClientError
|
||||
from influxdb.resultset import ResultSet
|
||||
|
||||
|
||||
class TestResultSet(unittest.TestCase):
|
||||
"""Define the ResultSet test object."""
|
||||
|
||||
def setUp(self):
|
||||
"""Set up an instance of TestResultSet."""
|
||||
self.query_response = {
|
||||
"results": [
|
||||
{"series": [{"name": "cpu_load_short",
|
||||
"columns": ["time", "value", "host", "region"],
|
||||
"values": [
|
||||
["2015-01-29T21:51:28.968422294Z",
|
||||
0.64,
|
||||
"server01",
|
||||
"us-west"],
|
||||
["2015-01-29T21:51:28.968422294Z",
|
||||
0.65,
|
||||
"server02",
|
||||
"us-west"],
|
||||
]},
|
||||
{"name": "other_series",
|
||||
"columns": ["time", "value", "host", "region"],
|
||||
"values": [
|
||||
["2015-01-29T21:51:28.968422294Z",
|
||||
0.66,
|
||||
"server01",
|
||||
"us-west"],
|
||||
]}]}
|
||||
]
|
||||
}
|
||||
|
||||
self.rs = ResultSet(self.query_response['results'][0])
|
||||
|
||||
def test_filter_by_name(self):
|
||||
"""Test filtering by name in TestResultSet object."""
|
||||
expected = [
|
||||
{'value': 0.64,
|
||||
'time': '2015-01-29T21:51:28.968422294Z',
|
||||
'host': 'server01',
|
||||
'region': 'us-west'},
|
||||
{'value': 0.65,
|
||||
'time': '2015-01-29T21:51:28.968422294Z',
|
||||
'host': 'server02',
|
||||
'region': 'us-west'},
|
||||
]
|
||||
|
||||
self.assertEqual(expected, list(self.rs['cpu_load_short']))
|
||||
self.assertEqual(expected,
|
||||
list(self.rs.get_points(
|
||||
measurement='cpu_load_short')))
|
||||
|
||||
def test_filter_by_tags(self):
|
||||
"""Test filter by tags in TestResultSet object."""
|
||||
expected = [
|
||||
{'value': 0.64,
|
||||
'time': '2015-01-29T21:51:28.968422294Z',
|
||||
'host': 'server01',
|
||||
'region': 'us-west'},
|
||||
{'value': 0.66,
|
||||
'time': '2015-01-29T21:51:28.968422294Z',
|
||||
'host': 'server01',
|
||||
'region': 'us-west'},
|
||||
]
|
||||
|
||||
self.assertEqual(
|
||||
expected,
|
||||
list(self.rs[{"host": "server01"}])
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
expected,
|
||||
list(self.rs.get_points(tags={'host': 'server01'}))
|
||||
)
|
||||
|
||||
def test_filter_by_name_and_tags(self):
|
||||
"""Test filter by name and tags in TestResultSet object."""
|
||||
self.assertEqual(
|
||||
list(self.rs[('cpu_load_short', {"host": "server01"})]),
|
||||
[{'value': 0.64,
|
||||
'time': '2015-01-29T21:51:28.968422294Z',
|
||||
'host': 'server01',
|
||||
'region': 'us-west'}]
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
list(self.rs[('cpu_load_short', {"region": "us-west"})]),
|
||||
[
|
||||
{'value': 0.64,
|
||||
'time': '2015-01-29T21:51:28.968422294Z',
|
||||
'host': 'server01',
|
||||
'region': 'us-west'},
|
||||
{'value': 0.65,
|
||||
'time': '2015-01-29T21:51:28.968422294Z',
|
||||
'host': 'server02',
|
||||
'region': 'us-west'},
|
||||
]
|
||||
)
|
||||
|
||||
def test_keys(self):
|
||||
"""Test keys in TestResultSet object."""
|
||||
self.assertEqual(
|
||||
self.rs.keys(),
|
||||
[
|
||||
('cpu_load_short', None),
|
||||
('other_series', None),
|
||||
]
|
||||
)
|
||||
|
||||
def test_len(self):
|
||||
"""Test length in TestResultSet object."""
|
||||
self.assertEqual(
|
||||
len(self.rs),
|
||||
2
|
||||
)
|
||||
|
||||
def test_items(self):
|
||||
"""Test items in TestResultSet object."""
|
||||
items = list(self.rs.items())
|
||||
items_lists = [(item[0], list(item[1])) for item in items]
|
||||
|
||||
self.assertEqual(
|
||||
items_lists,
|
||||
[
|
||||
(
|
||||
('cpu_load_short', None),
|
||||
[
|
||||
{'time': '2015-01-29T21:51:28.968422294Z',
|
||||
'value': 0.64,
|
||||
'host': 'server01',
|
||||
'region': 'us-west'},
|
||||
{'time': '2015-01-29T21:51:28.968422294Z',
|
||||
'value': 0.65,
|
||||
'host': 'server02',
|
||||
'region': 'us-west'}]),
|
||||
(
|
||||
('other_series', None),
|
||||
[
|
||||
{'time': '2015-01-29T21:51:28.968422294Z',
|
||||
'value': 0.66,
|
||||
'host': 'server01',
|
||||
'region': 'us-west'}])]
|
||||
)
|
||||
|
||||
def test_point_from_cols_vals(self):
|
||||
"""Test points from columns in TestResultSet object."""
|
||||
cols = ['col1', 'col2']
|
||||
vals = [1, '2']
|
||||
|
||||
point = ResultSet.point_from_cols_vals(cols, vals)
|
||||
self.assertDictEqual(
|
||||
point,
|
||||
{'col1': 1, 'col2': '2'}
|
||||
)
|
||||
|
||||
def test_system_query(self):
|
||||
"""Test system query capabilities in TestResultSet object."""
|
||||
rs = ResultSet(
|
||||
{'series': [
|
||||
{'values': [['another', '48h0m0s', 3, False],
|
||||
['default', '0', 1, False],
|
||||
['somename', '24h0m0s', 4, True]],
|
||||
'columns': ['name', 'duration',
|
||||
'replicaN', 'default']}]}
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
rs.keys(),
|
||||
[('results', None)]
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
list(rs['results']),
|
||||
[
|
||||
{'duration': '48h0m0s', 'default': False, 'replicaN': 3,
|
||||
'name': 'another'},
|
||||
{'duration': '0', 'default': False, 'replicaN': 1,
|
||||
'name': 'default'},
|
||||
{'duration': '24h0m0s', 'default': True, 'replicaN': 4,
|
||||
'name': 'somename'}
|
||||
]
|
||||
)
|
||||
|
||||
def test_resultset_error(self):
|
||||
"""Test returning error in TestResultSet object."""
|
||||
with self.assertRaises(InfluxDBClientError):
|
||||
ResultSet({
|
||||
"series": [],
|
||||
"error": "Big error, many problems."
|
||||
})
|
1
lib/influxdb/tests/server_tests/__init__.py
Normal file
1
lib/influxdb/tests/server_tests/__init__.py
Normal file
|
@ -0,0 +1 @@
|
|||
"""Define the server tests package."""
|
84
lib/influxdb/tests/server_tests/base.py
Normal file
84
lib/influxdb/tests/server_tests/base.py
Normal file
|
@ -0,0 +1,84 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Define the base module for server test."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import sys
|
||||
|
||||
from influxdb.tests import using_pypy
|
||||
from influxdb.tests.server_tests.influxdb_instance import InfluxDbInstance
|
||||
|
||||
from influxdb.client import InfluxDBClient
|
||||
|
||||
if not using_pypy:
|
||||
from influxdb.dataframe_client import DataFrameClient
|
||||
|
||||
|
||||
def _setup_influxdb_server(inst):
|
||||
inst.influxd_inst = InfluxDbInstance(
|
||||
inst.influxdb_template_conf,
|
||||
udp_enabled=getattr(inst, 'influxdb_udp_enabled', False),
|
||||
)
|
||||
|
||||
inst.cli = InfluxDBClient('localhost',
|
||||
inst.influxd_inst.http_port,
|
||||
'root',
|
||||
'',
|
||||
database='db')
|
||||
if not using_pypy:
|
||||
inst.cliDF = DataFrameClient('localhost',
|
||||
inst.influxd_inst.http_port,
|
||||
'root',
|
||||
'',
|
||||
database='db')
|
||||
|
||||
|
||||
def _teardown_influxdb_server(inst):
|
||||
remove_tree = sys.exc_info() == (None, None, None)
|
||||
inst.influxd_inst.close(remove_tree=remove_tree)
|
||||
|
||||
|
||||
class SingleTestCaseWithServerMixin(object):
|
||||
"""Define the single testcase with server mixin.
|
||||
|
||||
A mixin for unittest.TestCase to start an influxdb server instance
|
||||
in a temporary directory **for each test function/case**
|
||||
"""
|
||||
|
||||
# 'influxdb_template_conf' attribute must be set
|
||||
# on the TestCase class or instance.
|
||||
|
||||
setUp = _setup_influxdb_server
|
||||
tearDown = _teardown_influxdb_server
|
||||
|
||||
|
||||
class ManyTestCasesWithServerMixin(object):
|
||||
"""Define the many testcase with server mixin.
|
||||
|
||||
Same as the SingleTestCaseWithServerMixin but this module creates
|
||||
a single instance for the whole class. Also pre-creates a fresh
|
||||
database: 'db'.
|
||||
"""
|
||||
|
||||
# 'influxdb_template_conf' attribute must be set on the class itself !
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
"""Set up an instance of the ManyTestCasesWithServerMixin."""
|
||||
_setup_influxdb_server(cls)
|
||||
|
||||
def setUp(self):
|
||||
"""Set up an instance of the ManyTestCasesWithServerMixin."""
|
||||
self.cli.create_database('db')
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
"""Deconstruct an instance of ManyTestCasesWithServerMixin."""
|
||||
_teardown_influxdb_server(cls)
|
||||
|
||||
def tearDown(self):
|
||||
"""Deconstruct an instance of ManyTestCasesWithServerMixin."""
|
||||
self.cli.drop_database('db')
|
825
lib/influxdb/tests/server_tests/client_test_with_server.py
Normal file
825
lib/influxdb/tests/server_tests/client_test_with_server.py
Normal file
|
@ -0,0 +1,825 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Unit tests for checking the InfluxDB server.
|
||||
|
||||
The good/expected interaction between:
|
||||
|
||||
+ the python client.. (obviously)
|
||||
+ and a *_real_* server instance running.
|
||||
|
||||
This basically duplicates what's in client_test.py
|
||||
but without mocking around every call.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from functools import partial
|
||||
import os
|
||||
import time
|
||||
import unittest
|
||||
import warnings
|
||||
|
||||
from influxdb import InfluxDBClient
|
||||
from influxdb.exceptions import InfluxDBClientError
|
||||
|
||||
from influxdb.tests import skipIfPYpy, using_pypy, skipServerTests
|
||||
from influxdb.tests.server_tests.base import ManyTestCasesWithServerMixin
|
||||
from influxdb.tests.server_tests.base import SingleTestCaseWithServerMixin
|
||||
|
||||
# By default, raise exceptions on warnings
|
||||
warnings.simplefilter('error', FutureWarning)
|
||||
|
||||
if not using_pypy:
|
||||
import pandas as pd
|
||||
from pandas.util.testing import assert_frame_equal
|
||||
|
||||
|
||||
THIS_DIR = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
|
||||
def point(series_name, timestamp=None, tags=None, **fields):
|
||||
"""Define what a point looks like."""
|
||||
res = {'measurement': series_name}
|
||||
|
||||
if timestamp:
|
||||
res['time'] = timestamp
|
||||
|
||||
if tags:
|
||||
res['tags'] = tags
|
||||
|
||||
res['fields'] = fields
|
||||
return res
|
||||
|
||||
|
||||
dummy_point = [ # some dummy points
|
||||
{
|
||||
"measurement": "cpu_load_short",
|
||||
"tags": {
|
||||
"host": "server01",
|
||||
"region": "us-west"
|
||||
},
|
||||
"time": "2009-11-10T23:00:00Z",
|
||||
"fields": {
|
||||
"value": 0.64
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
dummy_points = [ # some dummy points
|
||||
dummy_point[0],
|
||||
{
|
||||
"measurement": "memory",
|
||||
"tags": {
|
||||
"host": "server01",
|
||||
"region": "us-west"
|
||||
},
|
||||
"time": "2009-11-10T23:01:35Z",
|
||||
"fields": {
|
||||
"value": 33.0
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
if not using_pypy:
|
||||
dummy_pointDF = {
|
||||
"measurement": "cpu_load_short",
|
||||
"tags": {"host": "server01",
|
||||
"region": "us-west"},
|
||||
"dataframe": pd.DataFrame(
|
||||
[[0.64]], columns=['value'],
|
||||
index=pd.to_datetime(["2009-11-10T23:00:00Z"]))
|
||||
}
|
||||
dummy_pointsDF = [{
|
||||
"measurement": "cpu_load_short",
|
||||
"tags": {"host": "server01", "region": "us-west"},
|
||||
"dataframe": pd.DataFrame(
|
||||
[[0.64]], columns=['value'],
|
||||
index=pd.to_datetime(["2009-11-10T23:00:00Z"])),
|
||||
}, {
|
||||
"measurement": "memory",
|
||||
"tags": {"host": "server01", "region": "us-west"},
|
||||
"dataframe": pd.DataFrame(
|
||||
[[33]], columns=['value'],
|
||||
index=pd.to_datetime(["2009-11-10T23:01:35Z"])
|
||||
)
|
||||
}]
|
||||
|
||||
|
||||
dummy_point_without_timestamp = [
|
||||
{
|
||||
"measurement": "cpu_load_short",
|
||||
"tags": {
|
||||
"host": "server02",
|
||||
"region": "us-west"
|
||||
},
|
||||
"fields": {
|
||||
"value": 0.64
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
@skipServerTests
|
||||
class SimpleTests(SingleTestCaseWithServerMixin, unittest.TestCase):
|
||||
"""Define the class of simple tests."""
|
||||
|
||||
influxdb_template_conf = os.path.join(THIS_DIR, 'influxdb.conf.template')
|
||||
|
||||
def test_fresh_server_no_db(self):
|
||||
"""Test a fresh server without database."""
|
||||
self.assertEqual([], self.cli.get_list_database())
|
||||
|
||||
def test_create_database(self):
|
||||
"""Test create a database."""
|
||||
self.assertIsNone(self.cli.create_database('new_db_1'))
|
||||
self.assertIsNone(self.cli.create_database('new_db_2'))
|
||||
self.assertEqual(
|
||||
self.cli.get_list_database(),
|
||||
[{'name': 'new_db_1'}, {'name': 'new_db_2'}]
|
||||
)
|
||||
|
||||
def test_drop_database(self):
|
||||
"""Test drop a database."""
|
||||
self.test_create_database()
|
||||
self.assertIsNone(self.cli.drop_database('new_db_1'))
|
||||
self.assertEqual([{'name': 'new_db_2'}], self.cli.get_list_database())
|
||||
|
||||
def test_query_fail(self):
|
||||
"""Test that a query failed."""
|
||||
with self.assertRaises(InfluxDBClientError) as ctx:
|
||||
self.cli.query('select column_one from foo')
|
||||
self.assertIn('database not found: db',
|
||||
ctx.exception.content)
|
||||
|
||||
def test_query_fail_ignore_errors(self):
|
||||
"""Test query failed but ignore errors."""
|
||||
result = self.cli.query('select column_one from foo',
|
||||
raise_errors=False)
|
||||
self.assertEqual(result.error, 'database not found: db')
|
||||
|
||||
def test_create_user(self):
|
||||
"""Test create user."""
|
||||
self.cli.create_user('test_user', 'secret_password')
|
||||
rsp = list(self.cli.query("SHOW USERS")['results'])
|
||||
self.assertIn({'user': 'test_user', 'admin': False},
|
||||
rsp)
|
||||
|
||||
def test_create_user_admin(self):
|
||||
"""Test create admin user."""
|
||||
self.cli.create_user('test_user', 'secret_password', True)
|
||||
rsp = list(self.cli.query("SHOW USERS")['results'])
|
||||
self.assertIn({'user': 'test_user', 'admin': True},
|
||||
rsp)
|
||||
|
||||
def test_create_user_blank_password(self):
|
||||
"""Test create user with a blank pass."""
|
||||
self.cli.create_user('test_user', '')
|
||||
rsp = list(self.cli.query("SHOW USERS")['results'])
|
||||
self.assertIn({'user': 'test_user', 'admin': False},
|
||||
rsp)
|
||||
|
||||
def test_get_list_users_empty(self):
|
||||
"""Test get list of users, but empty."""
|
||||
rsp = self.cli.get_list_users()
|
||||
self.assertEqual([], rsp)
|
||||
|
||||
def test_get_list_users(self):
|
||||
"""Test get list of users."""
|
||||
self.cli.query("CREATE USER test WITH PASSWORD 'test'")
|
||||
rsp = self.cli.get_list_users()
|
||||
|
||||
self.assertEqual(
|
||||
[{'user': 'test', 'admin': False}],
|
||||
rsp
|
||||
)
|
||||
|
||||
def test_create_user_blank_username(self):
|
||||
"""Test create blank username."""
|
||||
with self.assertRaises(InfluxDBClientError) as ctx:
|
||||
self.cli.create_user('', 'secret_password')
|
||||
self.assertIn('username required',
|
||||
ctx.exception.content)
|
||||
rsp = list(self.cli.query("SHOW USERS")['results'])
|
||||
self.assertEqual(rsp, [])
|
||||
|
||||
def test_drop_user(self):
|
||||
"""Test drop a user."""
|
||||
self.cli.query("CREATE USER test WITH PASSWORD 'test'")
|
||||
self.cli.drop_user('test')
|
||||
users = list(self.cli.query("SHOW USERS")['results'])
|
||||
self.assertEqual(users, [])
|
||||
|
||||
def test_drop_user_nonexisting(self):
|
||||
"""Test dropping a nonexistent user."""
|
||||
with self.assertRaises(InfluxDBClientError) as ctx:
|
||||
self.cli.drop_user('test')
|
||||
self.assertIn('user not found',
|
||||
ctx.exception.content)
|
||||
|
||||
@unittest.skip("Broken as of 0.9.0")
|
||||
def test_revoke_admin_privileges(self):
|
||||
"""Test revoking admin privs, deprecated as of v0.9.0."""
|
||||
self.cli.create_user('test', 'test', admin=True)
|
||||
self.assertEqual([{'user': 'test', 'admin': True}],
|
||||
self.cli.get_list_users())
|
||||
self.cli.revoke_admin_privileges('test')
|
||||
self.assertEqual([{'user': 'test', 'admin': False}],
|
||||
self.cli.get_list_users())
|
||||
|
||||
def test_grant_privilege(self):
|
||||
"""Test grant privs to user."""
|
||||
self.cli.create_user('test', 'test')
|
||||
self.cli.create_database('testdb')
|
||||
self.cli.grant_privilege('all', 'testdb', 'test')
|
||||
# TODO: when supported by InfluxDB, check if privileges are granted
|
||||
|
||||
def test_grant_privilege_invalid(self):
|
||||
"""Test grant invalid privs to user."""
|
||||
self.cli.create_user('test', 'test')
|
||||
self.cli.create_database('testdb')
|
||||
with self.assertRaises(InfluxDBClientError) as ctx:
|
||||
self.cli.grant_privilege('', 'testdb', 'test')
|
||||
self.assertEqual(400, ctx.exception.code)
|
||||
self.assertIn('{"error":"error parsing query: ',
|
||||
ctx.exception.content)
|
||||
|
||||
def test_revoke_privilege(self):
|
||||
"""Test revoke privs from user."""
|
||||
self.cli.create_user('test', 'test')
|
||||
self.cli.create_database('testdb')
|
||||
self.cli.revoke_privilege('all', 'testdb', 'test')
|
||||
# TODO: when supported by InfluxDB, check if privileges are revoked
|
||||
|
||||
def test_revoke_privilege_invalid(self):
|
||||
"""Test revoke invalid privs from user."""
|
||||
self.cli.create_user('test', 'test')
|
||||
self.cli.create_database('testdb')
|
||||
with self.assertRaises(InfluxDBClientError) as ctx:
|
||||
self.cli.revoke_privilege('', 'testdb', 'test')
|
||||
self.assertEqual(400, ctx.exception.code)
|
||||
self.assertIn('{"error":"error parsing query: ',
|
||||
ctx.exception.content)
|
||||
|
||||
def test_invalid_port_fails(self):
|
||||
"""Test invalid port access fails."""
|
||||
with self.assertRaises(ValueError):
|
||||
InfluxDBClient('host', '80/redir', 'username', 'password')
|
||||
|
||||
|
||||
@skipServerTests
|
||||
class CommonTests(ManyTestCasesWithServerMixin, unittest.TestCase):
|
||||
"""Define a class to handle common tests for the server."""
|
||||
|
||||
influxdb_template_conf = os.path.join(THIS_DIR, 'influxdb.conf.template')
|
||||
|
||||
def test_write(self):
|
||||
"""Test write to the server."""
|
||||
self.assertIs(True, self.cli.write(
|
||||
{'points': dummy_point},
|
||||
params={'db': 'db'},
|
||||
))
|
||||
|
||||
def test_write_check_read(self):
|
||||
"""Test write and check read of data to server."""
|
||||
self.test_write()
|
||||
time.sleep(1)
|
||||
rsp = self.cli.query('SELECT * FROM cpu_load_short', database='db')
|
||||
self.assertListEqual([{'value': 0.64, 'time': '2009-11-10T23:00:00Z',
|
||||
"host": "server01", "region": "us-west"}],
|
||||
list(rsp.get_points()))
|
||||
|
||||
def test_write_points(self):
|
||||
"""Test writing points to the server."""
|
||||
self.assertIs(True, self.cli.write_points(dummy_point))
|
||||
|
||||
@skipIfPYpy
|
||||
def test_write_points_DF(self):
|
||||
"""Test writing points with dataframe."""
|
||||
self.assertIs(
|
||||
True,
|
||||
self.cliDF.write_points(
|
||||
dummy_pointDF['dataframe'],
|
||||
dummy_pointDF['measurement'],
|
||||
dummy_pointDF['tags']
|
||||
)
|
||||
)
|
||||
|
||||
def test_write_points_check_read(self):
|
||||
"""Test writing points and check read back."""
|
||||
self.test_write_points()
|
||||
time.sleep(1) # same as test_write_check_read()
|
||||
rsp = self.cli.query('SELECT * FROM cpu_load_short')
|
||||
|
||||
self.assertEqual(
|
||||
list(rsp),
|
||||
[[
|
||||
{'value': 0.64,
|
||||
'time': '2009-11-10T23:00:00Z',
|
||||
"host": "server01",
|
||||
"region": "us-west"}
|
||||
]]
|
||||
)
|
||||
|
||||
rsp2 = list(rsp.get_points())
|
||||
self.assertEqual(len(rsp2), 1)
|
||||
pt = rsp2[0]
|
||||
|
||||
self.assertEqual(
|
||||
pt,
|
||||
{'time': '2009-11-10T23:00:00Z',
|
||||
'value': 0.64,
|
||||
"host": "server01",
|
||||
"region": "us-west"}
|
||||
)
|
||||
|
||||
@unittest.skip("Broken as of 0.9.0")
|
||||
def test_write_points_check_read_DF(self):
|
||||
"""Test write points and check back with dataframe."""
|
||||
self.test_write_points_DF()
|
||||
time.sleep(1) # same as test_write_check_read()
|
||||
|
||||
rsp = self.cliDF.query('SELECT * FROM cpu_load_short')
|
||||
assert_frame_equal(
|
||||
rsp['cpu_load_short'],
|
||||
dummy_pointDF['dataframe']
|
||||
)
|
||||
|
||||
# Query with Tags
|
||||
rsp = self.cliDF.query(
|
||||
"SELECT * FROM cpu_load_short GROUP BY *")
|
||||
assert_frame_equal(
|
||||
rsp[('cpu_load_short',
|
||||
(('host', 'server01'), ('region', 'us-west')))],
|
||||
dummy_pointDF['dataframe']
|
||||
)
|
||||
|
||||
def test_write_multiple_points_different_series(self):
|
||||
"""Test write multiple points to different series."""
|
||||
self.assertIs(True, self.cli.write_points(dummy_points))
|
||||
time.sleep(1)
|
||||
rsp = self.cli.query('SELECT * FROM cpu_load_short')
|
||||
lrsp = list(rsp)
|
||||
|
||||
self.assertEqual(
|
||||
[[
|
||||
{'value': 0.64,
|
||||
'time': '2009-11-10T23:00:00Z',
|
||||
"host": "server01",
|
||||
"region": "us-west"}
|
||||
]],
|
||||
lrsp
|
||||
)
|
||||
|
||||
rsp = list(self.cli.query('SELECT * FROM memory'))
|
||||
|
||||
self.assertEqual(
|
||||
rsp,
|
||||
[[
|
||||
{'value': 33,
|
||||
'time': '2009-11-10T23:01:35Z',
|
||||
"host": "server01",
|
||||
"region": "us-west"}
|
||||
]]
|
||||
)
|
||||
|
||||
def test_select_into_as_post(self):
|
||||
"""Test SELECT INTO is POSTed."""
|
||||
self.assertIs(True, self.cli.write_points(dummy_points))
|
||||
time.sleep(1)
|
||||
rsp = self.cli.query('SELECT * INTO "newmeas" FROM "memory"')
|
||||
rsp = self.cli.query('SELECT * FROM "newmeas"')
|
||||
lrsp = list(rsp)
|
||||
|
||||
self.assertEqual(
|
||||
lrsp,
|
||||
[[
|
||||
{'value': 33,
|
||||
'time': '2009-11-10T23:01:35Z',
|
||||
"host": "server01",
|
||||
"region": "us-west"}
|
||||
]]
|
||||
)
|
||||
|
||||
@unittest.skip("Broken as of 0.9.0")
|
||||
def test_write_multiple_points_different_series_DF(self):
|
||||
"""Test write multiple points using dataframe to different series."""
|
||||
for i in range(2):
|
||||
self.assertIs(
|
||||
True, self.cliDF.write_points(
|
||||
dummy_pointsDF[i]['dataframe'],
|
||||
dummy_pointsDF[i]['measurement'],
|
||||
dummy_pointsDF[i]['tags']))
|
||||
time.sleep(1)
|
||||
rsp = self.cliDF.query('SELECT * FROM cpu_load_short')
|
||||
|
||||
assert_frame_equal(
|
||||
rsp['cpu_load_short'],
|
||||
dummy_pointsDF[0]['dataframe']
|
||||
)
|
||||
|
||||
rsp = self.cliDF.query('SELECT * FROM memory')
|
||||
assert_frame_equal(
|
||||
rsp['memory'],
|
||||
dummy_pointsDF[1]['dataframe']
|
||||
)
|
||||
|
||||
def test_write_points_batch(self):
|
||||
"""Test writing points in a batch."""
|
||||
dummy_points = [
|
||||
{"measurement": "cpu_usage", "tags": {"unit": "percent"},
|
||||
"time": "2009-11-10T23:00:00Z", "fields": {"value": 12.34}},
|
||||
{"measurement": "network", "tags": {"direction": "in"},
|
||||
"time": "2009-11-10T23:00:00Z", "fields": {"value": 123.00}},
|
||||
{"measurement": "network", "tags": {"direction": "out"},
|
||||
"time": "2009-11-10T23:00:00Z", "fields": {"value": 12.00}}
|
||||
]
|
||||
self.cli.write_points(points=dummy_points,
|
||||
tags={"host": "server01",
|
||||
"region": "us-west"},
|
||||
batch_size=2)
|
||||
time.sleep(5)
|
||||
net_in = self.cli.query("SELECT value FROM network "
|
||||
"WHERE direction='in'").raw
|
||||
net_out = self.cli.query("SELECT value FROM network "
|
||||
"WHERE direction='out'").raw
|
||||
cpu = self.cli.query("SELECT value FROM cpu_usage").raw
|
||||
self.assertIn(123, net_in['series'][0]['values'][0])
|
||||
self.assertIn(12, net_out['series'][0]['values'][0])
|
||||
self.assertIn(12.34, cpu['series'][0]['values'][0])
|
||||
|
||||
def test_query(self):
|
||||
"""Test querying data back from server."""
|
||||
self.assertIs(True, self.cli.write_points(dummy_point))
|
||||
|
||||
@unittest.skip('Not implemented for 0.9')
|
||||
def test_query_chunked(self):
|
||||
"""Test query for chunked response from server."""
|
||||
cli = InfluxDBClient(database='db')
|
||||
example_object = {
|
||||
'points': [
|
||||
[1415206250119, 40001, 667],
|
||||
[1415206244555, 30001, 7],
|
||||
[1415206228241, 20001, 788],
|
||||
[1415206212980, 10001, 555],
|
||||
[1415197271586, 10001, 23]
|
||||
],
|
||||
'name': 'foo',
|
||||
'columns': [
|
||||
'time',
|
||||
'sequence_number',
|
||||
'val'
|
||||
]
|
||||
}
|
||||
del cli
|
||||
del example_object
|
||||
# TODO ?
|
||||
|
||||
def test_delete_series_invalid(self):
|
||||
"""Test delete invalid series."""
|
||||
with self.assertRaises(InfluxDBClientError):
|
||||
self.cli.delete_series()
|
||||
|
||||
def test_default_retention_policy(self):
|
||||
"""Test add default retention policy."""
|
||||
rsp = self.cli.get_list_retention_policies()
|
||||
self.assertEqual(
|
||||
[
|
||||
{'name': 'autogen',
|
||||
'duration': '0s',
|
||||
'replicaN': 1,
|
||||
'shardGroupDuration': u'168h0m0s',
|
||||
'default': True}
|
||||
],
|
||||
rsp
|
||||
)
|
||||
|
||||
def test_create_retention_policy_default(self):
|
||||
"""Test create a new default retention policy."""
|
||||
self.cli.create_retention_policy('somename', '1d', 1, default=True)
|
||||
self.cli.create_retention_policy('another', '2d', 1, default=False)
|
||||
rsp = self.cli.get_list_retention_policies()
|
||||
|
||||
self.assertEqual(
|
||||
[
|
||||
{'duration': '0s',
|
||||
'default': False,
|
||||
'replicaN': 1,
|
||||
'shardGroupDuration': u'168h0m0s',
|
||||
'name': 'autogen'},
|
||||
{'duration': '24h0m0s',
|
||||
'default': True,
|
||||
'replicaN': 1,
|
||||
'shardGroupDuration': u'1h0m0s',
|
||||
'name': 'somename'},
|
||||
{'duration': '48h0m0s',
|
||||
'default': False,
|
||||
'replicaN': 1,
|
||||
'shardGroupDuration': u'24h0m0s',
|
||||
'name': 'another'}
|
||||
],
|
||||
rsp
|
||||
)
|
||||
|
||||
def test_create_retention_policy(self):
|
||||
"""Test creating a new retention policy, not default."""
|
||||
self.cli.create_retention_policy('somename', '1d', 1)
|
||||
# NB: creating a retention policy without specifying
|
||||
# shard group duration
|
||||
# leads to a shard group duration of 1 hour
|
||||
rsp = self.cli.get_list_retention_policies()
|
||||
self.assertEqual(
|
||||
[
|
||||
{'duration': '0s',
|
||||
'default': True,
|
||||
'replicaN': 1,
|
||||
'shardGroupDuration': u'168h0m0s',
|
||||
'name': 'autogen'},
|
||||
{'duration': '24h0m0s',
|
||||
'default': False,
|
||||
'replicaN': 1,
|
||||
'shardGroupDuration': u'1h0m0s',
|
||||
'name': 'somename'}
|
||||
],
|
||||
rsp
|
||||
)
|
||||
|
||||
self.cli.drop_retention_policy('somename', 'db')
|
||||
# recreate the RP
|
||||
self.cli.create_retention_policy('somename', '1w', 1,
|
||||
shard_duration='1h')
|
||||
|
||||
rsp = self.cli.get_list_retention_policies()
|
||||
self.assertEqual(
|
||||
[
|
||||
{'duration': '0s',
|
||||
'default': True,
|
||||
'replicaN': 1,
|
||||
'shardGroupDuration': u'168h0m0s',
|
||||
'name': 'autogen'},
|
||||
{'duration': '168h0m0s',
|
||||
'default': False,
|
||||
'replicaN': 1,
|
||||
'shardGroupDuration': u'1h0m0s',
|
||||
'name': 'somename'}
|
||||
],
|
||||
rsp
|
||||
)
|
||||
|
||||
self.cli.drop_retention_policy('somename', 'db')
|
||||
# recreate the RP
|
||||
self.cli.create_retention_policy('somename', '1w', 1)
|
||||
|
||||
rsp = self.cli.get_list_retention_policies()
|
||||
self.assertEqual(
|
||||
[
|
||||
{'duration': '0s',
|
||||
'default': True,
|
||||
'replicaN': 1,
|
||||
'shardGroupDuration': u'168h0m0s',
|
||||
'name': 'autogen'},
|
||||
{'duration': '168h0m0s',
|
||||
'default': False,
|
||||
'replicaN': 1,
|
||||
'shardGroupDuration': u'24h0m0s',
|
||||
'name': 'somename'}
|
||||
],
|
||||
rsp
|
||||
)
|
||||
|
||||
def test_alter_retention_policy(self):
|
||||
"""Test alter a retention policy, not default."""
|
||||
self.cli.create_retention_policy('somename', '1d', 1)
|
||||
|
||||
# Test alter duration
|
||||
self.cli.alter_retention_policy('somename', 'db',
|
||||
duration='4d',
|
||||
shard_duration='2h')
|
||||
# NB: altering retention policy doesn't change shard group duration
|
||||
rsp = self.cli.get_list_retention_policies()
|
||||
self.assertEqual(
|
||||
[
|
||||
{'duration': '0s',
|
||||
'default': True,
|
||||
'replicaN': 1,
|
||||
'shardGroupDuration': u'168h0m0s',
|
||||
'name': 'autogen'},
|
||||
{'duration': '96h0m0s',
|
||||
'default': False,
|
||||
'replicaN': 1,
|
||||
'shardGroupDuration': u'2h0m0s',
|
||||
'name': 'somename'}
|
||||
],
|
||||
rsp
|
||||
)
|
||||
|
||||
# Test alter replication
|
||||
self.cli.alter_retention_policy('somename', 'db',
|
||||
replication=4)
|
||||
|
||||
# NB: altering retention policy doesn't change shard group duration
|
||||
rsp = self.cli.get_list_retention_policies()
|
||||
self.assertEqual(
|
||||
[
|
||||
{'duration': '0s',
|
||||
'default': True,
|
||||
'replicaN': 1,
|
||||
'shardGroupDuration': u'168h0m0s',
|
||||
'name': 'autogen'},
|
||||
{'duration': '96h0m0s',
|
||||
'default': False,
|
||||
'replicaN': 4,
|
||||
'shardGroupDuration': u'2h0m0s',
|
||||
'name': 'somename'}
|
||||
],
|
||||
rsp
|
||||
)
|
||||
|
||||
# Test alter default
|
||||
self.cli.alter_retention_policy('somename', 'db',
|
||||
default=True)
|
||||
# NB: altering retention policy doesn't change shard group duration
|
||||
rsp = self.cli.get_list_retention_policies()
|
||||
self.assertEqual(
|
||||
[
|
||||
{'duration': '0s',
|
||||
'default': False,
|
||||
'replicaN': 1,
|
||||
'shardGroupDuration': u'168h0m0s',
|
||||
'name': 'autogen'},
|
||||
{'duration': '96h0m0s',
|
||||
'default': True,
|
||||
'replicaN': 4,
|
||||
'shardGroupDuration': u'2h0m0s',
|
||||
'name': 'somename'}
|
||||
],
|
||||
rsp
|
||||
)
|
||||
|
||||
# Test alter shard_duration
|
||||
self.cli.alter_retention_policy('somename', 'db',
|
||||
shard_duration='4h')
|
||||
|
||||
rsp = self.cli.get_list_retention_policies()
|
||||
self.assertEqual(
|
||||
[
|
||||
{'duration': '0s',
|
||||
'default': False,
|
||||
'replicaN': 1,
|
||||
'shardGroupDuration': u'168h0m0s',
|
||||
'name': 'autogen'},
|
||||
{'duration': '96h0m0s',
|
||||
'default': True,
|
||||
'replicaN': 4,
|
||||
'shardGroupDuration': u'4h0m0s',
|
||||
'name': 'somename'}
|
||||
],
|
||||
rsp
|
||||
)
|
||||
|
||||
def test_alter_retention_policy_invalid(self):
|
||||
"""Test invalid alter retention policy."""
|
||||
self.cli.create_retention_policy('somename', '1d', 1)
|
||||
with self.assertRaises(InfluxDBClientError) as ctx:
|
||||
self.cli.alter_retention_policy('somename', 'db')
|
||||
self.assertEqual(400, ctx.exception.code)
|
||||
self.assertIn('{"error":"error parsing query: ',
|
||||
ctx.exception.content)
|
||||
rsp = self.cli.get_list_retention_policies()
|
||||
self.assertEqual(
|
||||
[
|
||||
{'duration': '0s',
|
||||
'default': True,
|
||||
'replicaN': 1,
|
||||
'shardGroupDuration': u'168h0m0s',
|
||||
'name': 'autogen'},
|
||||
{'duration': '24h0m0s',
|
||||
'default': False,
|
||||
'replicaN': 1,
|
||||
'shardGroupDuration': u'1h0m0s',
|
||||
'name': 'somename'}
|
||||
],
|
||||
rsp
|
||||
)
|
||||
|
||||
def test_drop_retention_policy(self):
|
||||
"""Test drop a retention policy."""
|
||||
self.cli.create_retention_policy('somename', '1d', 1)
|
||||
|
||||
# Test drop retention
|
||||
self.cli.drop_retention_policy('somename', 'db')
|
||||
rsp = self.cli.get_list_retention_policies()
|
||||
self.assertEqual(
|
||||
[
|
||||
{'duration': '0s',
|
||||
'default': True,
|
||||
'replicaN': 1,
|
||||
'shardGroupDuration': u'168h0m0s',
|
||||
'name': 'autogen'}
|
||||
],
|
||||
rsp
|
||||
)
|
||||
|
||||
def test_issue_143(self):
|
||||
"""Test for PR#143 from repo."""
|
||||
pt = partial(point, 'a_series_name', timestamp='2015-03-30T16:16:37Z')
|
||||
pts = [
|
||||
pt(value=15),
|
||||
pt(tags={'tag_1': 'value1'}, value=5),
|
||||
pt(tags={'tag_1': 'value2'}, value=10),
|
||||
]
|
||||
self.cli.write_points(pts)
|
||||
time.sleep(1)
|
||||
rsp = list(self.cli.query('SELECT * FROM a_series_name \
|
||||
GROUP BY tag_1').get_points())
|
||||
|
||||
self.assertEqual(
|
||||
[
|
||||
{'time': '2015-03-30T16:16:37Z', 'value': 15},
|
||||
{'time': '2015-03-30T16:16:37Z', 'value': 5},
|
||||
{'time': '2015-03-30T16:16:37Z', 'value': 10}
|
||||
],
|
||||
rsp
|
||||
)
|
||||
|
||||
# a slightly more complex one with 2 tags values:
|
||||
pt = partial(point, 'series2', timestamp='2015-03-30T16:16:37Z')
|
||||
pts = [
|
||||
pt(tags={'tag1': 'value1', 'tag2': 'v1'}, value=0),
|
||||
pt(tags={'tag1': 'value1', 'tag2': 'v2'}, value=5),
|
||||
pt(tags={'tag1': 'value2', 'tag2': 'v1'}, value=10),
|
||||
]
|
||||
self.cli.write_points(pts)
|
||||
time.sleep(1)
|
||||
rsp = self.cli.query('SELECT * FROM series2 GROUP BY tag1,tag2')
|
||||
|
||||
self.assertEqual(
|
||||
[
|
||||
{'value': 0, 'time': '2015-03-30T16:16:37Z'},
|
||||
{'value': 5, 'time': '2015-03-30T16:16:37Z'},
|
||||
{'value': 10, 'time': '2015-03-30T16:16:37Z'}
|
||||
],
|
||||
list(rsp['series2'])
|
||||
)
|
||||
|
||||
all_tag2_equal_v1 = list(rsp.get_points(tags={'tag2': 'v1'}))
|
||||
|
||||
self.assertEqual(
|
||||
[{'value': 0, 'time': '2015-03-30T16:16:37Z'},
|
||||
{'value': 10, 'time': '2015-03-30T16:16:37Z'}],
|
||||
all_tag2_equal_v1,
|
||||
)
|
||||
|
||||
def test_query_multiple_series(self):
|
||||
"""Test query for multiple series."""
|
||||
pt = partial(point, 'series1', timestamp='2015-03-30T16:16:37Z')
|
||||
pts = [
|
||||
pt(tags={'tag1': 'value1', 'tag2': 'v1'}, value=0),
|
||||
]
|
||||
self.cli.write_points(pts)
|
||||
|
||||
pt = partial(point, 'series2', timestamp='1970-03-30T16:16:37Z')
|
||||
pts = [
|
||||
pt(tags={'tag1': 'value1', 'tag2': 'v1'},
|
||||
value=0, data1=33, data2="bla"),
|
||||
]
|
||||
self.cli.write_points(pts)
|
||||
|
||||
|
||||
@skipServerTests
|
||||
class UdpTests(ManyTestCasesWithServerMixin, unittest.TestCase):
|
||||
"""Define a class to test UDP series."""
|
||||
|
||||
influxdb_udp_enabled = True
|
||||
influxdb_template_conf = os.path.join(THIS_DIR,
|
||||
'influxdb.conf.template')
|
||||
|
||||
def test_write_points_udp(self):
|
||||
"""Test write points UDP."""
|
||||
cli = InfluxDBClient(
|
||||
'localhost',
|
||||
self.influxd_inst.http_port,
|
||||
'root',
|
||||
'',
|
||||
database='db',
|
||||
use_udp=True,
|
||||
udp_port=self.influxd_inst.udp_port
|
||||
)
|
||||
cli.write_points(dummy_point)
|
||||
|
||||
# The points are not immediately available after write_points.
|
||||
# This is to be expected because we are using udp (no response !).
|
||||
# So we have to wait some time,
|
||||
time.sleep(3) # 3 sec seems to be a good choice.
|
||||
rsp = self.cli.query('SELECT * FROM cpu_load_short')
|
||||
|
||||
self.assertEqual(
|
||||
# this is dummy_points :
|
||||
[
|
||||
{'value': 0.64,
|
||||
'time': '2009-11-10T23:00:00Z',
|
||||
"host": "server01",
|
||||
"region": "us-west"}
|
||||
],
|
||||
list(rsp['cpu_load_short'])
|
||||
)
|
198
lib/influxdb/tests/server_tests/influxdb_instance.py
Normal file
198
lib/influxdb/tests/server_tests/influxdb_instance.py
Normal file
|
@ -0,0 +1,198 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Define the test module for an influxdb instance."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import datetime
|
||||
import distutils
|
||||
import os
|
||||
import tempfile
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
import unittest
|
||||
|
||||
from influxdb.tests.misc import is_port_open, get_free_ports
|
||||
|
||||
# hack in check_output if it's not defined, like for python 2.6
|
||||
if "check_output" not in dir(subprocess):
|
||||
def f(*popenargs, **kwargs):
|
||||
"""Check for output."""
|
||||
if 'stdout' in kwargs:
|
||||
raise ValueError(
|
||||
'stdout argument not allowed, it will be overridden.'
|
||||
)
|
||||
process = subprocess.Popen(stdout=subprocess.PIPE,
|
||||
*popenargs,
|
||||
**kwargs)
|
||||
output, unused_err = process.communicate()
|
||||
retcode = process.poll()
|
||||
if retcode:
|
||||
cmd = kwargs.get("args")
|
||||
if cmd is None:
|
||||
cmd = popenargs[0]
|
||||
raise subprocess.CalledProcessError(retcode, cmd)
|
||||
return output
|
||||
subprocess.check_output = f
|
||||
|
||||
|
||||
class InfluxDbInstance(object):
|
||||
"""Define an instance of InfluxDB.
|
||||
|
||||
A class to launch of fresh influxdb server instance
|
||||
in a temporary place, using a config file template.
|
||||
"""
|
||||
|
||||
def __init__(self, conf_template, udp_enabled=False):
|
||||
"""Initialize an instance of InfluxDbInstance."""
|
||||
if os.environ.get("INFLUXDB_PYTHON_SKIP_SERVER_TESTS", None) == 'True':
|
||||
raise unittest.SkipTest(
|
||||
"Skipping server test (INFLUXDB_PYTHON_SKIP_SERVER_TESTS)"
|
||||
)
|
||||
|
||||
self.influxd_path = self.find_influxd_path()
|
||||
|
||||
errors = 0
|
||||
while True:
|
||||
try:
|
||||
self._start_server(conf_template, udp_enabled)
|
||||
break
|
||||
# Happens when the ports are already in use.
|
||||
except RuntimeError as e:
|
||||
errors += 1
|
||||
if errors > 2:
|
||||
raise e
|
||||
|
||||
def _start_server(self, conf_template, udp_enabled):
|
||||
# create a temporary dir to store all needed files
|
||||
# for the influxdb server instance :
|
||||
self.temp_dir_base = tempfile.mkdtemp()
|
||||
|
||||
# "temp_dir_base" will be used for conf file and logs,
|
||||
# while "temp_dir_influxdb" is for the databases files/dirs :
|
||||
tempdir = self.temp_dir_influxdb = tempfile.mkdtemp(
|
||||
dir=self.temp_dir_base)
|
||||
|
||||
# find a couple free ports :
|
||||
free_ports = get_free_ports(4)
|
||||
ports = {}
|
||||
for service in 'http', 'global', 'meta', 'udp':
|
||||
ports[service + '_port'] = free_ports.pop()
|
||||
if not udp_enabled:
|
||||
ports['udp_port'] = -1
|
||||
|
||||
conf_data = dict(
|
||||
meta_dir=os.path.join(tempdir, 'meta'),
|
||||
data_dir=os.path.join(tempdir, 'data'),
|
||||
wal_dir=os.path.join(tempdir, 'wal'),
|
||||
cluster_dir=os.path.join(tempdir, 'state'),
|
||||
handoff_dir=os.path.join(tempdir, 'handoff'),
|
||||
logs_file=os.path.join(self.temp_dir_base, 'logs.txt'),
|
||||
udp_enabled='true' if udp_enabled else 'false',
|
||||
)
|
||||
conf_data.update(ports)
|
||||
self.__dict__.update(conf_data)
|
||||
|
||||
conf_file = os.path.join(self.temp_dir_base, 'influxdb.conf')
|
||||
with open(conf_file, "w") as fh:
|
||||
with open(conf_template) as fh_template:
|
||||
fh.write(fh_template.read().format(**conf_data))
|
||||
|
||||
# now start the server instance:
|
||||
self.proc = subprocess.Popen(
|
||||
[self.influxd_path, '-config', conf_file],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE
|
||||
)
|
||||
|
||||
print(
|
||||
"%s > Started influxdb bin in %r with ports %s and %s.." % (
|
||||
datetime.datetime.now(),
|
||||
self.temp_dir_base,
|
||||
self.global_port,
|
||||
self.http_port
|
||||
)
|
||||
)
|
||||
|
||||
# wait for it to listen on the broker and admin ports:
|
||||
# usually a fresh instance is ready in less than 1 sec ..
|
||||
timeout = time.time() + 10 # so 10 secs should be enough,
|
||||
# otherwise either your system load is high,
|
||||
# or you run a 286 @ 1Mhz ?
|
||||
try:
|
||||
while time.time() < timeout:
|
||||
if (is_port_open(self.http_port) and
|
||||
is_port_open(self.global_port)):
|
||||
# it's hard to check if a UDP port is open..
|
||||
if udp_enabled:
|
||||
# so let's just sleep 0.5 sec in this case
|
||||
# to be sure that the server has open the port
|
||||
time.sleep(0.5)
|
||||
break
|
||||
time.sleep(0.5)
|
||||
if self.proc.poll() is not None:
|
||||
raise RuntimeError('influxdb prematurely exited')
|
||||
else:
|
||||
self.proc.terminate()
|
||||
self.proc.wait()
|
||||
raise RuntimeError('Timeout waiting for influxdb to listen'
|
||||
' on its ports (%s)' % ports)
|
||||
except RuntimeError as err:
|
||||
data = self.get_logs_and_output()
|
||||
data['reason'] = str(err)
|
||||
data['now'] = datetime.datetime.now()
|
||||
raise RuntimeError("%(now)s > %(reason)s. RC=%(rc)s\n"
|
||||
"stdout=%(out)s\nstderr=%(err)s\nlogs=%(logs)r"
|
||||
% data)
|
||||
|
||||
def find_influxd_path(self):
|
||||
"""Find the path for InfluxDB."""
|
||||
influxdb_bin_path = os.environ.get(
|
||||
'INFLUXDB_PYTHON_INFLUXD_PATH',
|
||||
None
|
||||
)
|
||||
|
||||
if influxdb_bin_path is None:
|
||||
influxdb_bin_path = distutils.spawn.find_executable('influxd')
|
||||
if not influxdb_bin_path:
|
||||
try:
|
||||
influxdb_bin_path = subprocess.check_output(
|
||||
['which', 'influxd']
|
||||
).strip()
|
||||
except subprocess.CalledProcessError:
|
||||
# fallback on :
|
||||
influxdb_bin_path = '/opt/influxdb/influxd'
|
||||
|
||||
if not os.path.isfile(influxdb_bin_path):
|
||||
raise unittest.SkipTest("Could not find influxd binary")
|
||||
|
||||
version = subprocess.check_output([influxdb_bin_path, 'version'])
|
||||
print("InfluxDB version: %s" % version, file=sys.stderr)
|
||||
|
||||
return influxdb_bin_path
|
||||
|
||||
def get_logs_and_output(self):
|
||||
"""Query for logs and output."""
|
||||
proc = self.proc
|
||||
try:
|
||||
with open(self.logs_file) as fh:
|
||||
logs = fh.read()
|
||||
except IOError as err:
|
||||
logs = "Couldn't read logs: %s" % err
|
||||
return {
|
||||
'rc': proc.returncode,
|
||||
'out': proc.stdout.read(),
|
||||
'err': proc.stderr.read(),
|
||||
'logs': logs
|
||||
}
|
||||
|
||||
def close(self, remove_tree=True):
|
||||
"""Close an instance of InfluxDB."""
|
||||
self.proc.terminate()
|
||||
self.proc.wait()
|
||||
if remove_tree:
|
||||
shutil.rmtree(self.temp_dir_base)
|
147
lib/influxdb/tests/test_line_protocol.py
Normal file
147
lib/influxdb/tests/test_line_protocol.py
Normal file
|
@ -0,0 +1,147 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Define the line protocol test module."""
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from datetime import datetime
|
||||
import unittest
|
||||
from pytz import UTC, timezone
|
||||
|
||||
from influxdb import line_protocol
|
||||
|
||||
|
||||
class TestLineProtocol(unittest.TestCase):
|
||||
"""Define the LineProtocol test object."""
|
||||
|
||||
def test_make_lines(self):
|
||||
"""Test make new lines in TestLineProtocol object."""
|
||||
data = {
|
||||
"tags": {
|
||||
"empty_tag": "",
|
||||
"none_tag": None,
|
||||
"backslash_tag": "C:\\",
|
||||
"integer_tag": 2,
|
||||
"string_tag": "hello"
|
||||
},
|
||||
"points": [
|
||||
{
|
||||
"measurement": "test",
|
||||
"fields": {
|
||||
"string_val": "hello!",
|
||||
"int_val": 1,
|
||||
"float_val": 1.1,
|
||||
"none_field": None,
|
||||
"bool_val": True,
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
self.assertEqual(
|
||||
line_protocol.make_lines(data),
|
||||
'test,backslash_tag=C:\\\\ ,integer_tag=2,string_tag=hello '
|
||||
'bool_val=True,float_val=1.1,int_val=1i,string_val="hello!"\n'
|
||||
)
|
||||
|
||||
def test_timezone(self):
|
||||
"""Test timezone in TestLineProtocol object."""
|
||||
dt = datetime(2009, 11, 10, 23, 0, 0, 123456)
|
||||
utc = UTC.localize(dt)
|
||||
berlin = timezone('Europe/Berlin').localize(dt)
|
||||
eastern = berlin.astimezone(timezone('US/Eastern'))
|
||||
data = {
|
||||
"points": [
|
||||
{"measurement": "A", "fields": {"val": 1},
|
||||
"time": 0},
|
||||
{"measurement": "A", "fields": {"val": 1},
|
||||
"time": "2009-11-10T23:00:00.123456Z"},
|
||||
{"measurement": "A", "fields": {"val": 1}, "time": dt},
|
||||
{"measurement": "A", "fields": {"val": 1}, "time": utc},
|
||||
{"measurement": "A", "fields": {"val": 1}, "time": berlin},
|
||||
{"measurement": "A", "fields": {"val": 1}, "time": eastern},
|
||||
]
|
||||
}
|
||||
self.assertEqual(
|
||||
line_protocol.make_lines(data),
|
||||
'\n'.join([
|
||||
'A val=1i 0',
|
||||
'A val=1i 1257894000123456000',
|
||||
'A val=1i 1257894000123456000',
|
||||
'A val=1i 1257894000123456000',
|
||||
'A val=1i 1257890400123456000',
|
||||
'A val=1i 1257890400123456000',
|
||||
]) + '\n'
|
||||
)
|
||||
|
||||
def test_string_val_newline(self):
|
||||
"""Test string value with newline in TestLineProtocol object."""
|
||||
data = {
|
||||
"points": [
|
||||
{
|
||||
"measurement": "m1",
|
||||
"fields": {
|
||||
"multi_line": "line1\nline1\nline3"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
self.assertEqual(
|
||||
line_protocol.make_lines(data),
|
||||
'm1 multi_line="line1\\nline1\\nline3"\n'
|
||||
)
|
||||
|
||||
def test_make_lines_unicode(self):
|
||||
"""Test make unicode lines in TestLineProtocol object."""
|
||||
data = {
|
||||
"tags": {
|
||||
"unicode_tag": "\'Привет!\'" # Hello! in Russian
|
||||
},
|
||||
"points": [
|
||||
{
|
||||
"measurement": "test",
|
||||
"fields": {
|
||||
"unicode_val": "Привет!", # Hello! in Russian
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
self.assertEqual(
|
||||
line_protocol.make_lines(data),
|
||||
'test,unicode_tag=\'Привет!\' unicode_val="Привет!"\n'
|
||||
)
|
||||
|
||||
def test_quote_ident(self):
|
||||
"""Test quote indentation in TestLineProtocol object."""
|
||||
self.assertEqual(
|
||||
line_protocol.quote_ident(r"""\foo ' bar " Örf"""),
|
||||
r'''"\\foo ' bar \" Örf"'''
|
||||
)
|
||||
|
||||
def test_quote_literal(self):
|
||||
"""Test quote literal in TestLineProtocol object."""
|
||||
self.assertEqual(
|
||||
line_protocol.quote_literal(r"""\foo ' bar " Örf"""),
|
||||
r"""'\\foo \' bar " Örf'"""
|
||||
)
|
||||
|
||||
def test_float_with_long_decimal_fraction(self):
|
||||
"""Ensure precision is preserved when casting floats into strings."""
|
||||
data = {
|
||||
"points": [
|
||||
{
|
||||
"measurement": "test",
|
||||
"fields": {
|
||||
"float_val": 1.0000000000000009,
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
self.assertEqual(
|
||||
line_protocol.make_lines(data),
|
||||
'test float_val=1.0000000000000009\n'
|
||||
)
|
Loading…
Add table
Add a link
Reference in a new issue