added forced package imports

This commit is contained in:
Nicholas St. Germain 2018-12-02 13:38:37 -06:00
parent 0e2ffdbbb1
commit ef9022c6eb
943 changed files with 125530 additions and 16 deletions

View file

@ -0,0 +1 @@
"""Define the server tests package."""

View file

@ -0,0 +1,84 @@
# -*- coding: utf-8 -*-
"""Define the base module for server test."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import sys
from influxdb.tests import using_pypy
from influxdb.tests.server_tests.influxdb_instance import InfluxDbInstance
from influxdb.client import InfluxDBClient
if not using_pypy:
from influxdb.dataframe_client import DataFrameClient
def _setup_influxdb_server(inst):
inst.influxd_inst = InfluxDbInstance(
inst.influxdb_template_conf,
udp_enabled=getattr(inst, 'influxdb_udp_enabled', False),
)
inst.cli = InfluxDBClient('localhost',
inst.influxd_inst.http_port,
'root',
'',
database='db')
if not using_pypy:
inst.cliDF = DataFrameClient('localhost',
inst.influxd_inst.http_port,
'root',
'',
database='db')
def _teardown_influxdb_server(inst):
remove_tree = sys.exc_info() == (None, None, None)
inst.influxd_inst.close(remove_tree=remove_tree)
class SingleTestCaseWithServerMixin(object):
"""Define the single testcase with server mixin.
A mixin for unittest.TestCase to start an influxdb server instance
in a temporary directory **for each test function/case**
"""
# 'influxdb_template_conf' attribute must be set
# on the TestCase class or instance.
setUp = _setup_influxdb_server
tearDown = _teardown_influxdb_server
class ManyTestCasesWithServerMixin(object):
"""Define the many testcase with server mixin.
Same as the SingleTestCaseWithServerMixin but this module creates
a single instance for the whole class. Also pre-creates a fresh
database: 'db'.
"""
# 'influxdb_template_conf' attribute must be set on the class itself !
@classmethod
def setUpClass(cls):
"""Set up an instance of the ManyTestCasesWithServerMixin."""
_setup_influxdb_server(cls)
def setUp(self):
"""Set up an instance of the ManyTestCasesWithServerMixin."""
self.cli.create_database('db')
@classmethod
def tearDownClass(cls):
"""Deconstruct an instance of ManyTestCasesWithServerMixin."""
_teardown_influxdb_server(cls)
def tearDown(self):
"""Deconstruct an instance of ManyTestCasesWithServerMixin."""
self.cli.drop_database('db')

View file

@ -0,0 +1,825 @@
# -*- coding: utf-8 -*-
"""Unit tests for checking the InfluxDB server.
The good/expected interaction between:
+ the python client.. (obviously)
+ and a *_real_* server instance running.
This basically duplicates what's in client_test.py
but without mocking around every call.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from functools import partial
import os
import time
import unittest
import warnings
from influxdb import InfluxDBClient
from influxdb.exceptions import InfluxDBClientError
from influxdb.tests import skipIfPYpy, using_pypy, skipServerTests
from influxdb.tests.server_tests.base import ManyTestCasesWithServerMixin
from influxdb.tests.server_tests.base import SingleTestCaseWithServerMixin
# By default, raise exceptions on warnings
warnings.simplefilter('error', FutureWarning)
if not using_pypy:
import pandas as pd
from pandas.util.testing import assert_frame_equal
THIS_DIR = os.path.abspath(os.path.dirname(__file__))
def point(series_name, timestamp=None, tags=None, **fields):
"""Define what a point looks like."""
res = {'measurement': series_name}
if timestamp:
res['time'] = timestamp
if tags:
res['tags'] = tags
res['fields'] = fields
return res
dummy_point = [ # some dummy points
{
"measurement": "cpu_load_short",
"tags": {
"host": "server01",
"region": "us-west"
},
"time": "2009-11-10T23:00:00Z",
"fields": {
"value": 0.64
}
}
]
dummy_points = [ # some dummy points
dummy_point[0],
{
"measurement": "memory",
"tags": {
"host": "server01",
"region": "us-west"
},
"time": "2009-11-10T23:01:35Z",
"fields": {
"value": 33.0
}
}
]
if not using_pypy:
dummy_pointDF = {
"measurement": "cpu_load_short",
"tags": {"host": "server01",
"region": "us-west"},
"dataframe": pd.DataFrame(
[[0.64]], columns=['value'],
index=pd.to_datetime(["2009-11-10T23:00:00Z"]))
}
dummy_pointsDF = [{
"measurement": "cpu_load_short",
"tags": {"host": "server01", "region": "us-west"},
"dataframe": pd.DataFrame(
[[0.64]], columns=['value'],
index=pd.to_datetime(["2009-11-10T23:00:00Z"])),
}, {
"measurement": "memory",
"tags": {"host": "server01", "region": "us-west"},
"dataframe": pd.DataFrame(
[[33]], columns=['value'],
index=pd.to_datetime(["2009-11-10T23:01:35Z"])
)
}]
dummy_point_without_timestamp = [
{
"measurement": "cpu_load_short",
"tags": {
"host": "server02",
"region": "us-west"
},
"fields": {
"value": 0.64
}
}
]
@skipServerTests
class SimpleTests(SingleTestCaseWithServerMixin, unittest.TestCase):
"""Define the class of simple tests."""
influxdb_template_conf = os.path.join(THIS_DIR, 'influxdb.conf.template')
def test_fresh_server_no_db(self):
"""Test a fresh server without database."""
self.assertEqual([], self.cli.get_list_database())
def test_create_database(self):
"""Test create a database."""
self.assertIsNone(self.cli.create_database('new_db_1'))
self.assertIsNone(self.cli.create_database('new_db_2'))
self.assertEqual(
self.cli.get_list_database(),
[{'name': 'new_db_1'}, {'name': 'new_db_2'}]
)
def test_drop_database(self):
"""Test drop a database."""
self.test_create_database()
self.assertIsNone(self.cli.drop_database('new_db_1'))
self.assertEqual([{'name': 'new_db_2'}], self.cli.get_list_database())
def test_query_fail(self):
"""Test that a query failed."""
with self.assertRaises(InfluxDBClientError) as ctx:
self.cli.query('select column_one from foo')
self.assertIn('database not found: db',
ctx.exception.content)
def test_query_fail_ignore_errors(self):
"""Test query failed but ignore errors."""
result = self.cli.query('select column_one from foo',
raise_errors=False)
self.assertEqual(result.error, 'database not found: db')
def test_create_user(self):
"""Test create user."""
self.cli.create_user('test_user', 'secret_password')
rsp = list(self.cli.query("SHOW USERS")['results'])
self.assertIn({'user': 'test_user', 'admin': False},
rsp)
def test_create_user_admin(self):
"""Test create admin user."""
self.cli.create_user('test_user', 'secret_password', True)
rsp = list(self.cli.query("SHOW USERS")['results'])
self.assertIn({'user': 'test_user', 'admin': True},
rsp)
def test_create_user_blank_password(self):
"""Test create user with a blank pass."""
self.cli.create_user('test_user', '')
rsp = list(self.cli.query("SHOW USERS")['results'])
self.assertIn({'user': 'test_user', 'admin': False},
rsp)
def test_get_list_users_empty(self):
"""Test get list of users, but empty."""
rsp = self.cli.get_list_users()
self.assertEqual([], rsp)
def test_get_list_users(self):
"""Test get list of users."""
self.cli.query("CREATE USER test WITH PASSWORD 'test'")
rsp = self.cli.get_list_users()
self.assertEqual(
[{'user': 'test', 'admin': False}],
rsp
)
def test_create_user_blank_username(self):
"""Test create blank username."""
with self.assertRaises(InfluxDBClientError) as ctx:
self.cli.create_user('', 'secret_password')
self.assertIn('username required',
ctx.exception.content)
rsp = list(self.cli.query("SHOW USERS")['results'])
self.assertEqual(rsp, [])
def test_drop_user(self):
"""Test drop a user."""
self.cli.query("CREATE USER test WITH PASSWORD 'test'")
self.cli.drop_user('test')
users = list(self.cli.query("SHOW USERS")['results'])
self.assertEqual(users, [])
def test_drop_user_nonexisting(self):
"""Test dropping a nonexistent user."""
with self.assertRaises(InfluxDBClientError) as ctx:
self.cli.drop_user('test')
self.assertIn('user not found',
ctx.exception.content)
@unittest.skip("Broken as of 0.9.0")
def test_revoke_admin_privileges(self):
"""Test revoking admin privs, deprecated as of v0.9.0."""
self.cli.create_user('test', 'test', admin=True)
self.assertEqual([{'user': 'test', 'admin': True}],
self.cli.get_list_users())
self.cli.revoke_admin_privileges('test')
self.assertEqual([{'user': 'test', 'admin': False}],
self.cli.get_list_users())
def test_grant_privilege(self):
"""Test grant privs to user."""
self.cli.create_user('test', 'test')
self.cli.create_database('testdb')
self.cli.grant_privilege('all', 'testdb', 'test')
# TODO: when supported by InfluxDB, check if privileges are granted
def test_grant_privilege_invalid(self):
"""Test grant invalid privs to user."""
self.cli.create_user('test', 'test')
self.cli.create_database('testdb')
with self.assertRaises(InfluxDBClientError) as ctx:
self.cli.grant_privilege('', 'testdb', 'test')
self.assertEqual(400, ctx.exception.code)
self.assertIn('{"error":"error parsing query: ',
ctx.exception.content)
def test_revoke_privilege(self):
"""Test revoke privs from user."""
self.cli.create_user('test', 'test')
self.cli.create_database('testdb')
self.cli.revoke_privilege('all', 'testdb', 'test')
# TODO: when supported by InfluxDB, check if privileges are revoked
def test_revoke_privilege_invalid(self):
"""Test revoke invalid privs from user."""
self.cli.create_user('test', 'test')
self.cli.create_database('testdb')
with self.assertRaises(InfluxDBClientError) as ctx:
self.cli.revoke_privilege('', 'testdb', 'test')
self.assertEqual(400, ctx.exception.code)
self.assertIn('{"error":"error parsing query: ',
ctx.exception.content)
def test_invalid_port_fails(self):
"""Test invalid port access fails."""
with self.assertRaises(ValueError):
InfluxDBClient('host', '80/redir', 'username', 'password')
@skipServerTests
class CommonTests(ManyTestCasesWithServerMixin, unittest.TestCase):
"""Define a class to handle common tests for the server."""
influxdb_template_conf = os.path.join(THIS_DIR, 'influxdb.conf.template')
def test_write(self):
"""Test write to the server."""
self.assertIs(True, self.cli.write(
{'points': dummy_point},
params={'db': 'db'},
))
def test_write_check_read(self):
"""Test write and check read of data to server."""
self.test_write()
time.sleep(1)
rsp = self.cli.query('SELECT * FROM cpu_load_short', database='db')
self.assertListEqual([{'value': 0.64, 'time': '2009-11-10T23:00:00Z',
"host": "server01", "region": "us-west"}],
list(rsp.get_points()))
def test_write_points(self):
"""Test writing points to the server."""
self.assertIs(True, self.cli.write_points(dummy_point))
@skipIfPYpy
def test_write_points_DF(self):
"""Test writing points with dataframe."""
self.assertIs(
True,
self.cliDF.write_points(
dummy_pointDF['dataframe'],
dummy_pointDF['measurement'],
dummy_pointDF['tags']
)
)
def test_write_points_check_read(self):
"""Test writing points and check read back."""
self.test_write_points()
time.sleep(1) # same as test_write_check_read()
rsp = self.cli.query('SELECT * FROM cpu_load_short')
self.assertEqual(
list(rsp),
[[
{'value': 0.64,
'time': '2009-11-10T23:00:00Z',
"host": "server01",
"region": "us-west"}
]]
)
rsp2 = list(rsp.get_points())
self.assertEqual(len(rsp2), 1)
pt = rsp2[0]
self.assertEqual(
pt,
{'time': '2009-11-10T23:00:00Z',
'value': 0.64,
"host": "server01",
"region": "us-west"}
)
@unittest.skip("Broken as of 0.9.0")
def test_write_points_check_read_DF(self):
"""Test write points and check back with dataframe."""
self.test_write_points_DF()
time.sleep(1) # same as test_write_check_read()
rsp = self.cliDF.query('SELECT * FROM cpu_load_short')
assert_frame_equal(
rsp['cpu_load_short'],
dummy_pointDF['dataframe']
)
# Query with Tags
rsp = self.cliDF.query(
"SELECT * FROM cpu_load_short GROUP BY *")
assert_frame_equal(
rsp[('cpu_load_short',
(('host', 'server01'), ('region', 'us-west')))],
dummy_pointDF['dataframe']
)
def test_write_multiple_points_different_series(self):
"""Test write multiple points to different series."""
self.assertIs(True, self.cli.write_points(dummy_points))
time.sleep(1)
rsp = self.cli.query('SELECT * FROM cpu_load_short')
lrsp = list(rsp)
self.assertEqual(
[[
{'value': 0.64,
'time': '2009-11-10T23:00:00Z',
"host": "server01",
"region": "us-west"}
]],
lrsp
)
rsp = list(self.cli.query('SELECT * FROM memory'))
self.assertEqual(
rsp,
[[
{'value': 33,
'time': '2009-11-10T23:01:35Z',
"host": "server01",
"region": "us-west"}
]]
)
def test_select_into_as_post(self):
"""Test SELECT INTO is POSTed."""
self.assertIs(True, self.cli.write_points(dummy_points))
time.sleep(1)
rsp = self.cli.query('SELECT * INTO "newmeas" FROM "memory"')
rsp = self.cli.query('SELECT * FROM "newmeas"')
lrsp = list(rsp)
self.assertEqual(
lrsp,
[[
{'value': 33,
'time': '2009-11-10T23:01:35Z',
"host": "server01",
"region": "us-west"}
]]
)
@unittest.skip("Broken as of 0.9.0")
def test_write_multiple_points_different_series_DF(self):
"""Test write multiple points using dataframe to different series."""
for i in range(2):
self.assertIs(
True, self.cliDF.write_points(
dummy_pointsDF[i]['dataframe'],
dummy_pointsDF[i]['measurement'],
dummy_pointsDF[i]['tags']))
time.sleep(1)
rsp = self.cliDF.query('SELECT * FROM cpu_load_short')
assert_frame_equal(
rsp['cpu_load_short'],
dummy_pointsDF[0]['dataframe']
)
rsp = self.cliDF.query('SELECT * FROM memory')
assert_frame_equal(
rsp['memory'],
dummy_pointsDF[1]['dataframe']
)
def test_write_points_batch(self):
"""Test writing points in a batch."""
dummy_points = [
{"measurement": "cpu_usage", "tags": {"unit": "percent"},
"time": "2009-11-10T23:00:00Z", "fields": {"value": 12.34}},
{"measurement": "network", "tags": {"direction": "in"},
"time": "2009-11-10T23:00:00Z", "fields": {"value": 123.00}},
{"measurement": "network", "tags": {"direction": "out"},
"time": "2009-11-10T23:00:00Z", "fields": {"value": 12.00}}
]
self.cli.write_points(points=dummy_points,
tags={"host": "server01",
"region": "us-west"},
batch_size=2)
time.sleep(5)
net_in = self.cli.query("SELECT value FROM network "
"WHERE direction='in'").raw
net_out = self.cli.query("SELECT value FROM network "
"WHERE direction='out'").raw
cpu = self.cli.query("SELECT value FROM cpu_usage").raw
self.assertIn(123, net_in['series'][0]['values'][0])
self.assertIn(12, net_out['series'][0]['values'][0])
self.assertIn(12.34, cpu['series'][0]['values'][0])
def test_query(self):
"""Test querying data back from server."""
self.assertIs(True, self.cli.write_points(dummy_point))
@unittest.skip('Not implemented for 0.9')
def test_query_chunked(self):
"""Test query for chunked response from server."""
cli = InfluxDBClient(database='db')
example_object = {
'points': [
[1415206250119, 40001, 667],
[1415206244555, 30001, 7],
[1415206228241, 20001, 788],
[1415206212980, 10001, 555],
[1415197271586, 10001, 23]
],
'name': 'foo',
'columns': [
'time',
'sequence_number',
'val'
]
}
del cli
del example_object
# TODO ?
def test_delete_series_invalid(self):
"""Test delete invalid series."""
with self.assertRaises(InfluxDBClientError):
self.cli.delete_series()
def test_default_retention_policy(self):
"""Test add default retention policy."""
rsp = self.cli.get_list_retention_policies()
self.assertEqual(
[
{'name': 'autogen',
'duration': '0s',
'replicaN': 1,
'shardGroupDuration': u'168h0m0s',
'default': True}
],
rsp
)
def test_create_retention_policy_default(self):
"""Test create a new default retention policy."""
self.cli.create_retention_policy('somename', '1d', 1, default=True)
self.cli.create_retention_policy('another', '2d', 1, default=False)
rsp = self.cli.get_list_retention_policies()
self.assertEqual(
[
{'duration': '0s',
'default': False,
'replicaN': 1,
'shardGroupDuration': u'168h0m0s',
'name': 'autogen'},
{'duration': '24h0m0s',
'default': True,
'replicaN': 1,
'shardGroupDuration': u'1h0m0s',
'name': 'somename'},
{'duration': '48h0m0s',
'default': False,
'replicaN': 1,
'shardGroupDuration': u'24h0m0s',
'name': 'another'}
],
rsp
)
def test_create_retention_policy(self):
"""Test creating a new retention policy, not default."""
self.cli.create_retention_policy('somename', '1d', 1)
# NB: creating a retention policy without specifying
# shard group duration
# leads to a shard group duration of 1 hour
rsp = self.cli.get_list_retention_policies()
self.assertEqual(
[
{'duration': '0s',
'default': True,
'replicaN': 1,
'shardGroupDuration': u'168h0m0s',
'name': 'autogen'},
{'duration': '24h0m0s',
'default': False,
'replicaN': 1,
'shardGroupDuration': u'1h0m0s',
'name': 'somename'}
],
rsp
)
self.cli.drop_retention_policy('somename', 'db')
# recreate the RP
self.cli.create_retention_policy('somename', '1w', 1,
shard_duration='1h')
rsp = self.cli.get_list_retention_policies()
self.assertEqual(
[
{'duration': '0s',
'default': True,
'replicaN': 1,
'shardGroupDuration': u'168h0m0s',
'name': 'autogen'},
{'duration': '168h0m0s',
'default': False,
'replicaN': 1,
'shardGroupDuration': u'1h0m0s',
'name': 'somename'}
],
rsp
)
self.cli.drop_retention_policy('somename', 'db')
# recreate the RP
self.cli.create_retention_policy('somename', '1w', 1)
rsp = self.cli.get_list_retention_policies()
self.assertEqual(
[
{'duration': '0s',
'default': True,
'replicaN': 1,
'shardGroupDuration': u'168h0m0s',
'name': 'autogen'},
{'duration': '168h0m0s',
'default': False,
'replicaN': 1,
'shardGroupDuration': u'24h0m0s',
'name': 'somename'}
],
rsp
)
def test_alter_retention_policy(self):
"""Test alter a retention policy, not default."""
self.cli.create_retention_policy('somename', '1d', 1)
# Test alter duration
self.cli.alter_retention_policy('somename', 'db',
duration='4d',
shard_duration='2h')
# NB: altering retention policy doesn't change shard group duration
rsp = self.cli.get_list_retention_policies()
self.assertEqual(
[
{'duration': '0s',
'default': True,
'replicaN': 1,
'shardGroupDuration': u'168h0m0s',
'name': 'autogen'},
{'duration': '96h0m0s',
'default': False,
'replicaN': 1,
'shardGroupDuration': u'2h0m0s',
'name': 'somename'}
],
rsp
)
# Test alter replication
self.cli.alter_retention_policy('somename', 'db',
replication=4)
# NB: altering retention policy doesn't change shard group duration
rsp = self.cli.get_list_retention_policies()
self.assertEqual(
[
{'duration': '0s',
'default': True,
'replicaN': 1,
'shardGroupDuration': u'168h0m0s',
'name': 'autogen'},
{'duration': '96h0m0s',
'default': False,
'replicaN': 4,
'shardGroupDuration': u'2h0m0s',
'name': 'somename'}
],
rsp
)
# Test alter default
self.cli.alter_retention_policy('somename', 'db',
default=True)
# NB: altering retention policy doesn't change shard group duration
rsp = self.cli.get_list_retention_policies()
self.assertEqual(
[
{'duration': '0s',
'default': False,
'replicaN': 1,
'shardGroupDuration': u'168h0m0s',
'name': 'autogen'},
{'duration': '96h0m0s',
'default': True,
'replicaN': 4,
'shardGroupDuration': u'2h0m0s',
'name': 'somename'}
],
rsp
)
# Test alter shard_duration
self.cli.alter_retention_policy('somename', 'db',
shard_duration='4h')
rsp = self.cli.get_list_retention_policies()
self.assertEqual(
[
{'duration': '0s',
'default': False,
'replicaN': 1,
'shardGroupDuration': u'168h0m0s',
'name': 'autogen'},
{'duration': '96h0m0s',
'default': True,
'replicaN': 4,
'shardGroupDuration': u'4h0m0s',
'name': 'somename'}
],
rsp
)
def test_alter_retention_policy_invalid(self):
"""Test invalid alter retention policy."""
self.cli.create_retention_policy('somename', '1d', 1)
with self.assertRaises(InfluxDBClientError) as ctx:
self.cli.alter_retention_policy('somename', 'db')
self.assertEqual(400, ctx.exception.code)
self.assertIn('{"error":"error parsing query: ',
ctx.exception.content)
rsp = self.cli.get_list_retention_policies()
self.assertEqual(
[
{'duration': '0s',
'default': True,
'replicaN': 1,
'shardGroupDuration': u'168h0m0s',
'name': 'autogen'},
{'duration': '24h0m0s',
'default': False,
'replicaN': 1,
'shardGroupDuration': u'1h0m0s',
'name': 'somename'}
],
rsp
)
def test_drop_retention_policy(self):
"""Test drop a retention policy."""
self.cli.create_retention_policy('somename', '1d', 1)
# Test drop retention
self.cli.drop_retention_policy('somename', 'db')
rsp = self.cli.get_list_retention_policies()
self.assertEqual(
[
{'duration': '0s',
'default': True,
'replicaN': 1,
'shardGroupDuration': u'168h0m0s',
'name': 'autogen'}
],
rsp
)
def test_issue_143(self):
"""Test for PR#143 from repo."""
pt = partial(point, 'a_series_name', timestamp='2015-03-30T16:16:37Z')
pts = [
pt(value=15),
pt(tags={'tag_1': 'value1'}, value=5),
pt(tags={'tag_1': 'value2'}, value=10),
]
self.cli.write_points(pts)
time.sleep(1)
rsp = list(self.cli.query('SELECT * FROM a_series_name \
GROUP BY tag_1').get_points())
self.assertEqual(
[
{'time': '2015-03-30T16:16:37Z', 'value': 15},
{'time': '2015-03-30T16:16:37Z', 'value': 5},
{'time': '2015-03-30T16:16:37Z', 'value': 10}
],
rsp
)
# a slightly more complex one with 2 tags values:
pt = partial(point, 'series2', timestamp='2015-03-30T16:16:37Z')
pts = [
pt(tags={'tag1': 'value1', 'tag2': 'v1'}, value=0),
pt(tags={'tag1': 'value1', 'tag2': 'v2'}, value=5),
pt(tags={'tag1': 'value2', 'tag2': 'v1'}, value=10),
]
self.cli.write_points(pts)
time.sleep(1)
rsp = self.cli.query('SELECT * FROM series2 GROUP BY tag1,tag2')
self.assertEqual(
[
{'value': 0, 'time': '2015-03-30T16:16:37Z'},
{'value': 5, 'time': '2015-03-30T16:16:37Z'},
{'value': 10, 'time': '2015-03-30T16:16:37Z'}
],
list(rsp['series2'])
)
all_tag2_equal_v1 = list(rsp.get_points(tags={'tag2': 'v1'}))
self.assertEqual(
[{'value': 0, 'time': '2015-03-30T16:16:37Z'},
{'value': 10, 'time': '2015-03-30T16:16:37Z'}],
all_tag2_equal_v1,
)
def test_query_multiple_series(self):
"""Test query for multiple series."""
pt = partial(point, 'series1', timestamp='2015-03-30T16:16:37Z')
pts = [
pt(tags={'tag1': 'value1', 'tag2': 'v1'}, value=0),
]
self.cli.write_points(pts)
pt = partial(point, 'series2', timestamp='1970-03-30T16:16:37Z')
pts = [
pt(tags={'tag1': 'value1', 'tag2': 'v1'},
value=0, data1=33, data2="bla"),
]
self.cli.write_points(pts)
@skipServerTests
class UdpTests(ManyTestCasesWithServerMixin, unittest.TestCase):
"""Define a class to test UDP series."""
influxdb_udp_enabled = True
influxdb_template_conf = os.path.join(THIS_DIR,
'influxdb.conf.template')
def test_write_points_udp(self):
"""Test write points UDP."""
cli = InfluxDBClient(
'localhost',
self.influxd_inst.http_port,
'root',
'',
database='db',
use_udp=True,
udp_port=self.influxd_inst.udp_port
)
cli.write_points(dummy_point)
# The points are not immediately available after write_points.
# This is to be expected because we are using udp (no response !).
# So we have to wait some time,
time.sleep(3) # 3 sec seems to be a good choice.
rsp = self.cli.query('SELECT * FROM cpu_load_short')
self.assertEqual(
# this is dummy_points :
[
{'value': 0.64,
'time': '2009-11-10T23:00:00Z',
"host": "server01",
"region": "us-west"}
],
list(rsp['cpu_load_short'])
)

View file

@ -0,0 +1,198 @@
# -*- coding: utf-8 -*-
"""Define the test module for an influxdb instance."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import datetime
import distutils
import os
import tempfile
import shutil
import subprocess
import sys
import time
import unittest
from influxdb.tests.misc import is_port_open, get_free_ports
# hack in check_output if it's not defined, like for python 2.6
if "check_output" not in dir(subprocess):
def f(*popenargs, **kwargs):
"""Check for output."""
if 'stdout' in kwargs:
raise ValueError(
'stdout argument not allowed, it will be overridden.'
)
process = subprocess.Popen(stdout=subprocess.PIPE,
*popenargs,
**kwargs)
output, unused_err = process.communicate()
retcode = process.poll()
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
raise subprocess.CalledProcessError(retcode, cmd)
return output
subprocess.check_output = f
class InfluxDbInstance(object):
"""Define an instance of InfluxDB.
A class to launch of fresh influxdb server instance
in a temporary place, using a config file template.
"""
def __init__(self, conf_template, udp_enabled=False):
"""Initialize an instance of InfluxDbInstance."""
if os.environ.get("INFLUXDB_PYTHON_SKIP_SERVER_TESTS", None) == 'True':
raise unittest.SkipTest(
"Skipping server test (INFLUXDB_PYTHON_SKIP_SERVER_TESTS)"
)
self.influxd_path = self.find_influxd_path()
errors = 0
while True:
try:
self._start_server(conf_template, udp_enabled)
break
# Happens when the ports are already in use.
except RuntimeError as e:
errors += 1
if errors > 2:
raise e
def _start_server(self, conf_template, udp_enabled):
# create a temporary dir to store all needed files
# for the influxdb server instance :
self.temp_dir_base = tempfile.mkdtemp()
# "temp_dir_base" will be used for conf file and logs,
# while "temp_dir_influxdb" is for the databases files/dirs :
tempdir = self.temp_dir_influxdb = tempfile.mkdtemp(
dir=self.temp_dir_base)
# find a couple free ports :
free_ports = get_free_ports(4)
ports = {}
for service in 'http', 'global', 'meta', 'udp':
ports[service + '_port'] = free_ports.pop()
if not udp_enabled:
ports['udp_port'] = -1
conf_data = dict(
meta_dir=os.path.join(tempdir, 'meta'),
data_dir=os.path.join(tempdir, 'data'),
wal_dir=os.path.join(tempdir, 'wal'),
cluster_dir=os.path.join(tempdir, 'state'),
handoff_dir=os.path.join(tempdir, 'handoff'),
logs_file=os.path.join(self.temp_dir_base, 'logs.txt'),
udp_enabled='true' if udp_enabled else 'false',
)
conf_data.update(ports)
self.__dict__.update(conf_data)
conf_file = os.path.join(self.temp_dir_base, 'influxdb.conf')
with open(conf_file, "w") as fh:
with open(conf_template) as fh_template:
fh.write(fh_template.read().format(**conf_data))
# now start the server instance:
self.proc = subprocess.Popen(
[self.influxd_path, '-config', conf_file],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
print(
"%s > Started influxdb bin in %r with ports %s and %s.." % (
datetime.datetime.now(),
self.temp_dir_base,
self.global_port,
self.http_port
)
)
# wait for it to listen on the broker and admin ports:
# usually a fresh instance is ready in less than 1 sec ..
timeout = time.time() + 10 # so 10 secs should be enough,
# otherwise either your system load is high,
# or you run a 286 @ 1Mhz ?
try:
while time.time() < timeout:
if (is_port_open(self.http_port) and
is_port_open(self.global_port)):
# it's hard to check if a UDP port is open..
if udp_enabled:
# so let's just sleep 0.5 sec in this case
# to be sure that the server has open the port
time.sleep(0.5)
break
time.sleep(0.5)
if self.proc.poll() is not None:
raise RuntimeError('influxdb prematurely exited')
else:
self.proc.terminate()
self.proc.wait()
raise RuntimeError('Timeout waiting for influxdb to listen'
' on its ports (%s)' % ports)
except RuntimeError as err:
data = self.get_logs_and_output()
data['reason'] = str(err)
data['now'] = datetime.datetime.now()
raise RuntimeError("%(now)s > %(reason)s. RC=%(rc)s\n"
"stdout=%(out)s\nstderr=%(err)s\nlogs=%(logs)r"
% data)
def find_influxd_path(self):
"""Find the path for InfluxDB."""
influxdb_bin_path = os.environ.get(
'INFLUXDB_PYTHON_INFLUXD_PATH',
None
)
if influxdb_bin_path is None:
influxdb_bin_path = distutils.spawn.find_executable('influxd')
if not influxdb_bin_path:
try:
influxdb_bin_path = subprocess.check_output(
['which', 'influxd']
).strip()
except subprocess.CalledProcessError:
# fallback on :
influxdb_bin_path = '/opt/influxdb/influxd'
if not os.path.isfile(influxdb_bin_path):
raise unittest.SkipTest("Could not find influxd binary")
version = subprocess.check_output([influxdb_bin_path, 'version'])
print("InfluxDB version: %s" % version, file=sys.stderr)
return influxdb_bin_path
def get_logs_and_output(self):
"""Query for logs and output."""
proc = self.proc
try:
with open(self.logs_file) as fh:
logs = fh.read()
except IOError as err:
logs = "Couldn't read logs: %s" % err
return {
'rc': proc.returncode,
'out': proc.stdout.read(),
'err': proc.stderr.read(),
'logs': logs
}
def close(self, remove_tree=True):
"""Close an instance of InfluxDB."""
self.proc.terminate()
self.proc.wait()
if remove_tree:
shutil.rmtree(self.temp_dir_base)