Skip to content

Commit

Permalink
[INT-55] Adding support for extra dimensions in config (#51)
Browse files Browse the repository at this point in the history
* Adding integration test that does basic check for presence of metrics

* [INT-55] Adding support for extra dimensions in config

* Tweak ES for integration tests

 - Use less memory
 - Don't depend on Docker volumes which don't work in CircleCI
  • Loading branch information
keitwb authored and charless-splunk committed May 8, 2017
1 parent 8c2c2ae commit 4fe5626
Show file tree
Hide file tree
Showing 17 changed files with 324 additions and 9 deletions.
37 changes: 37 additions & 0 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
version: '2'
jobs:
build:
docker:
- image: ubuntu:yakkety
working_directory: ~/code
steps:
- setup_remote_docker
- run:
name: Install Docker client
command: |
set -x
VER="17.03.0-ce"
apt-get update -q
apt-get install -yq curl python
curl -L -o /tmp/docker-$VER.tgz https://get.docker.com/builds/Linux/x86_64/docker-$VER.tgz
tar -xz -C /tmp -f /tmp/docker-$VER.tgz
mv /tmp/docker/* /usr/bin
- run:
name: Install docker-compose
command: |
set -x
curl -L https://github.com/docker/compose/releases/download/1.11.2/docker-compose-`uname -s`-`uname -m` > /usr/local/bin/docker-compose
chmod +x /usr/local/bin/docker-compose
- checkout
- run:
name: Run basic tests
working_directory: ~/code/tests
command: |
bash run_tests.sh | tee /tmp/test.log || true # The test command always exits non-zero
grep -v -E 'FAILED|ERROR' /tmp/test.log || exit 1
- run:
name: Run integration tests
working_directory: ~/code/tests/integration
command: |
chmod +x set_cluster_name wait_for_es
bash ./run.sh
6 changes: 0 additions & 6 deletions circle.yml

This file was deleted.

20 changes: 17 additions & 3 deletions elasticsearch_collectd.py
Original file line number Diff line number Diff line change
Expand Up @@ -660,6 +660,8 @@ def configure_callback(conf):
c.defaults.add(metric_name)
elif node.key == "IndexStatsMasterOnly":
c.master_only = str_to_bool(node.values[0])
elif node.key == "Dimensions":
c.extra_dimensions = node.values[0]
else:
log.warning('Unknown config key: %s.' % node.key)

Expand Down Expand Up @@ -757,6 +759,8 @@ def __init__(self):
self.es_current_master = False
self.node_id = None

self.extra_dimensions = ''

def sanatize_intervals(self):
"""Sanitizes the index interval to be greater or equal to and divisible by
the collection interval
Expand Down Expand Up @@ -1065,9 +1069,9 @@ def dispatch_stat(self, result, name, key, dimensions=None):

# If dimensions are provided, format them and append
# them to the plugin_instance
if dimensions:
val.plugin_instance += '[{dims}]'.format(dims=','.join(['='.join(d)
for d in dimensions.items()]))
dim_str = self.get_dimension_string(dimensions)
if dim_str:
val.plugin_instance += '[{dims}]'.format(dims=dim_str)

val.type = estype
val.type_instance = name
Expand All @@ -1076,6 +1080,16 @@ def dispatch_stat(self, result, name, key, dimensions=None):
log.info('Emitting value: %s' % val)
val.dispatch()

def get_dimension_string(self, dimensions):
dim_str = ''
if dimensions:
dim_str = ','.join(['='.join(d) for d in dimensions.items()])

if self.extra_dimensions:
dim_str += "%s%s" % (',' if dim_str else '', self.extra_dimensions)

return dim_str


def sanitize_type_instance(index_name):
"""
Expand Down
1 change: 1 addition & 0 deletions tests/integration/.env
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
COMPOSE_PROJECT_NAME=collectd-elasticsearch-int
1 change: 1 addition & 0 deletions tests/integration/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Dockerfile.es.*
28 changes: 28 additions & 0 deletions tests/integration/20-elasticsearch-test.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
<LoadPlugin "python">
Globals true
</LoadPlugin>

<Plugin "python">
ModulePath "/usr/share/collectd/collectd-elasticsearch"

Import "elasticsearch_collectd"

<Module "elasticsearch_collectd">
Interval 3
IndexInterval 3
Host es17
Dimensions "testdim=5"
</Module>

<Module "elasticsearch_collectd">
Interval 3
IndexInterval 3
Host es24
</Module>

<Module "elasticsearch_collectd">
Interval 3
IndexInterval 3
Host es53
</Module>
</Plugin>
15 changes: 15 additions & 0 deletions tests/integration/Dockerfile.collectd
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
FROM quay.io/signalfuse/collectd:latest

# Disable everything we can except elasticsearch
ENV COLLECTD_INTERVAL=3 COLLECTD_HOSTNAME=es-test DISABLE_AGGREGATION=true DISABLE_CPU=true DISABLE_CPUFREQ=true DISABLE_DF=true DISABLE_DISK=true DISABLE_DOCKER=true DISABLE_HOST_MONITORING=true DISABLE_INTERFACE=true DISABLE_LOAD=true DISABLE_MEMORY=true DISABLE_PROTOCOLS=true DISABLE_VMEM=true DISABLE_UPTIME=true

# Debian is super minimalistic
RUN apt-get update &&\
apt-get install -yq netcat

CMD /.docker/wait_for_es
ADD tests/integration/wait_for_es /.docker/wait_for_es

## The context of the image build should be the root dir of this repo!!
ADD elasticsearch_collectd.py /usr/share/collectd/collectd-elasticsearch/
ADD tests/integration/20-elasticsearch-test.conf /etc/collectd/managed_config/
5 changes: 5 additions & 0 deletions tests/integration/Dockerfile.es
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
FROM elasticsearch:ES_VERSION

CMD /set_cluster_name

ADD set_cluster_name /set_cluster_name
6 changes: 6 additions & 0 deletions tests/integration/Dockerfile.sink
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
FROM python:2

EXPOSE 80 8080

ADD sink.py /opt/sink.py
CMD python -u /opt/sink.py
4 changes: 4 additions & 0 deletions tests/integration/Dockerfile.test
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
FROM python:2

ADD test.py /opt/test.py
CMD python -u /opt/test.py
46 changes: 46 additions & 0 deletions tests/integration/docker-compose.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
---
version: '2'
services:
collectd:
build:
context: ../..
dockerfile: tests/integration/Dockerfile.collectd
environment:
SF_API_TOKEN: testing
SF_INGEST_HOST: fake_sfx
depends_on:
- fake_sfx
- es17
- es24
- es53

es17:
build:
context: .
dockerfile: Dockerfile.es.1.7.6

es24:
build:
context: .
dockerfile: Dockerfile.es.2.4.5

es53:
build:
context: .
dockerfile: Dockerfile.es.5.3.2

fake_sfx:
build:
context: .
dockerfile: Dockerfile.sink

test:
build:
context: .
dockerfile: Dockerfile.test
depends_on:
- collectd


networks:
default: {}
6 changes: 6 additions & 0 deletions tests/integration/make-es-dockerfiles
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
#!/bin/bash

for version in 1.7.6 2.4.5 5.3.2
do
sed -e "s/ES_VERSION/$version/" Dockerfile.es > Dockerfile.es.$version
done
14 changes: 14 additions & 0 deletions tests/integration/run.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
#!/bin/bash

DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"

cd $DIR

./make-es-dockerfiles

docker-compose run --rm -T test
status=$?

docker-compose stop -t0

exit $status
13 changes: 13 additions & 0 deletions tests/integration/set_cluster_name
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
#!/bin/bash

cat <<EOH >> /etc/default/elasticsearch
ES_HEAP_SIZE=128m
MAX_LOCKED_MEMORY=100000
EOH

cat <<EOH > /usr/share/elasticsearch/config/elasticsearch.yml
cluster.name: es-${ELASTICSEARCH_VERSION}
http.host: 0.0.0.0
EOH

exec /docker-entrypoint.sh elasticsearch
61 changes: 61 additions & 0 deletions tests/integration/sink.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
import json
import signal
import threading
from time import time

# This module collects metrics from collectd and can echo them back out for
# making assertions on the collected metrics.


# Fake the /v1/collectd endpoint and just stick all of the metrics in a
# list
def run_fake_ingest(metric_data):
class FakeCollectdIngest(BaseHTTPRequestHandler):
def do_POST(self):
body = self.rfile.read(int(self.headers.getheader('Content-Length')))

metric_data.extend(json.loads(body))

self.send_response(200)
self.send_header("Content-Type", "text/ascii")
self.send_header("Content-Length", "2")
self.end_headers()
self.wfile.write("OK")

print 'Starting ingest server on port 80'
httpd = HTTPServer(('', 80), FakeCollectdIngest)
httpd.serve_forever()
print 'Ingest server shutting down'


# Dumps all of the collected metrics back out as JSON upon request
def serve_metric_data(metric_data):
class MetricDataSpewer(BaseHTTPRequestHandler):
def do_GET(self):
data = json.dumps(metric_data)
self.send_response(200)
self.send_header("Content-Type", "application/json")
self.send_header("Content-Length", str(len(data)))
self.end_headers()
print data
self.rfile.write(data)

print 'Starting metric spewer on port 8080'
httpd = HTTPServer(('', 8080), MetricDataSpewer)
httpd.serve_forever()
print 'Metric spewer shutting down'


if __name__ == "__main__":
# Lists are thread-safe due to the GIL
metric_data = []
t1 = threading.Thread(target=run_fake_ingest, args=(metric_data,))
t2 = threading.Thread(target=serve_metric_data, args=(metric_data,))

t1.start()
t2.start()

t1.join()
t2.join()

54 changes: 54 additions & 0 deletions tests/integration/test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
#!/usr/bin/env python

import httplib
import json
from time import time, sleep

# Quick and dirty integration test for multi-cluster support in one collectd
# instance. This test script is intended to be run with docker-compose with the
# provided docker-compose.yml configuration.

# This is not very flexible but could be expanded to support other types of
# integration tests if so desired.

VERSIONS_TESTED = [
'1.7.6',
'2.4.5',
'5.3.2',
]
TIMEOUT_SECS = 60


def get_metric_data():
# Use httplib instead of requests so we don't have to install stuff with pip
conn = httplib.HTTPConnection("fake_sfx", 8080)
conn.request("GET", "/")
resp = conn.getresponse()
conn.close()
return json.loads(resp.read())


def wait_for_metrics_from_each_cluster():
start = time()
for cluster in ['es-' + v for v in VERSIONS_TESTED]:
print 'Waiting for metrics from cluster %s...' % (cluster,)
eventually_true(lambda: any([cluster in m.get('plugin_instance') for m in get_metric_data()]),
TIMEOUT_SECS - (time() - start))
print 'Found!'


def eventually_true(f, timeout_secs):
start = time()
while True:
try:
assert f()
except AssertionError:
if time() - start > timeout_secs:
raise
sleep(0.5)
else:
break


if __name__ == "__main__":
wait_for_metrics_from_each_cluster()
16 changes: 16 additions & 0 deletions tests/integration/wait_for_es
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
#!/bin/bash

wait_for () {
host=$1
while ! nc -z $host 9200
do
sleep 0.2
done
}

for host in es17 es24 es53
do
wait_for $host
done

exec /.docker/run.sh

0 comments on commit 4fe5626

Please sign in to comment.