2018-11-11 08:41:11 +00:00
|
|
|
{ system ? builtins.currentSystem,
|
|
|
|
config ? {},
|
|
|
|
pkgs ? import ../.. { inherit system config; },
|
|
|
|
enableUnfree ? false
|
2019-04-09 11:34:01 +01:00
|
|
|
# To run the test on the unfree ELK use the folllowing command:
|
|
|
|
# NIXPKGS_ALLOW_UNFREE=1 nix-build nixos/tests/elk.nix -A ELK-6 --arg enableUnfree true
|
2018-11-11 08:41:11 +00:00
|
|
|
}:
|
|
|
|
|
|
|
|
with import ../lib/testing.nix { inherit system pkgs; };
|
2017-12-18 19:53:54 +00:00
|
|
|
with pkgs.lib;
|
2018-11-11 08:41:11 +00:00
|
|
|
|
2017-06-13 21:36:08 +01:00
|
|
|
let
|
|
|
|
esUrl = "http://localhost:9200";
|
|
|
|
|
2019-05-10 14:41:41 +01:00
|
|
|
totalHits = message :
|
|
|
|
"curl --silent --show-error '${esUrl}/_search' -H 'Content-Type: application/json' " +
|
|
|
|
''-d '{\"query\" : { \"match\" : { \"message\" : \"${message}\"}}}' '' +
|
|
|
|
"| jq .hits.total";
|
|
|
|
|
2019-04-14 20:39:46 +01:00
|
|
|
mkElkTest = name : elk :
|
|
|
|
let elasticsearchGe7 = builtins.compareVersions elk.elasticsearch.version "7" >= 0;
|
|
|
|
in makeTest {
|
2017-12-18 19:53:54 +00:00
|
|
|
inherit name;
|
|
|
|
meta = with pkgs.stdenv.lib.maintainers; {
|
2019-02-22 15:14:13 +00:00
|
|
|
maintainers = [ eelco offline basvandijk ];
|
2017-12-18 19:53:54 +00:00
|
|
|
};
|
|
|
|
nodes = {
|
|
|
|
one =
|
2019-05-10 14:41:41 +01:00
|
|
|
{ pkgs, lib, ... }: {
|
2017-12-18 19:53:54 +00:00
|
|
|
# Not giving the machine at least 2060MB results in elasticsearch failing with the following error:
|
|
|
|
#
|
|
|
|
# OpenJDK 64-Bit Server VM warning:
|
|
|
|
# INFO: os::commit_memory(0x0000000085330000, 2060255232, 0)
|
|
|
|
# failed; error='Cannot allocate memory' (errno=12)
|
|
|
|
#
|
|
|
|
# There is insufficient memory for the Java Runtime Environment to continue.
|
|
|
|
# Native memory allocation (mmap) failed to map 2060255232 bytes for committing reserved memory.
|
|
|
|
#
|
|
|
|
# When setting this to 2500 I got "Kernel panic - not syncing: Out of
|
|
|
|
# memory: compulsory panic_on_oom is enabled" so lets give it even a
|
|
|
|
# bit more room:
|
|
|
|
virtualisation.memorySize = 3000;
|
2017-06-13 21:36:08 +01:00
|
|
|
|
2017-12-18 19:53:54 +00:00
|
|
|
# For querying JSON objects returned from elasticsearch and kibana.
|
|
|
|
environment.systemPackages = [ pkgs.jq ];
|
2017-06-13 21:36:08 +01:00
|
|
|
|
2017-12-18 19:53:54 +00:00
|
|
|
services = {
|
2019-05-10 14:41:41 +01:00
|
|
|
|
|
|
|
journalbeat = let lt6 = builtins.compareVersions
|
|
|
|
elk.journalbeat.version "6" < 0; in {
|
|
|
|
enable = true;
|
|
|
|
package = elk.journalbeat;
|
|
|
|
extraConfig = mkOptionDefault (''
|
|
|
|
logging:
|
|
|
|
to_syslog: true
|
|
|
|
level: warning
|
|
|
|
metrics.enabled: false
|
|
|
|
output.elasticsearch:
|
|
|
|
hosts: [ "127.0.0.1:9200" ]
|
|
|
|
${optionalString lt6 "template.enabled: false"}
|
|
|
|
'' + optionalString (!lt6) ''
|
|
|
|
journalbeat.inputs:
|
|
|
|
- paths: []
|
|
|
|
seek: cursor
|
|
|
|
'');
|
|
|
|
};
|
|
|
|
|
2017-12-18 19:53:54 +00:00
|
|
|
logstash = {
|
|
|
|
enable = true;
|
|
|
|
package = elk.logstash;
|
|
|
|
inputConfig = ''
|
|
|
|
exec { command => "echo -n flowers" interval => 1 type => "test" }
|
|
|
|
exec { command => "echo -n dragons" interval => 1 type => "test" }
|
|
|
|
'';
|
|
|
|
filterConfig = ''
|
|
|
|
if [message] =~ /dragons/ {
|
|
|
|
drop {}
|
|
|
|
}
|
|
|
|
'';
|
|
|
|
outputConfig = ''
|
|
|
|
file {
|
|
|
|
path => "/tmp/logstash.out"
|
|
|
|
codec => line { format => "%{message}" }
|
|
|
|
}
|
|
|
|
elasticsearch {
|
|
|
|
hosts => [ "${esUrl}" ]
|
|
|
|
}
|
|
|
|
'';
|
|
|
|
};
|
2017-06-13 21:36:08 +01:00
|
|
|
|
2017-12-18 19:53:54 +00:00
|
|
|
elasticsearch = {
|
|
|
|
enable = true;
|
|
|
|
package = elk.elasticsearch;
|
|
|
|
};
|
2017-06-13 21:36:08 +01:00
|
|
|
|
2017-12-18 19:53:54 +00:00
|
|
|
kibana = {
|
|
|
|
enable = true;
|
|
|
|
package = elk.kibana;
|
|
|
|
};
|
2018-08-21 09:39:25 +01:00
|
|
|
|
|
|
|
elasticsearch-curator = {
|
2019-04-14 20:39:46 +01:00
|
|
|
# The current version of curator (5.6) doesn't support elasticsearch >= 7.0.0.
|
|
|
|
enable = !elasticsearchGe7;
|
2018-08-21 09:39:25 +01:00
|
|
|
actionYAML = ''
|
|
|
|
---
|
|
|
|
actions:
|
|
|
|
1:
|
|
|
|
action: delete_indices
|
|
|
|
description: >-
|
2018-08-25 15:46:39 +01:00
|
|
|
Delete indices older than 1 second (based on index name), for logstash-
|
2018-08-21 09:39:25 +01:00
|
|
|
prefixed indices. Ignore the error if the filter does not result in an
|
|
|
|
actionable list of indices (ignore_empty_list) and exit cleanly.
|
|
|
|
options:
|
|
|
|
ignore_empty_list: True
|
|
|
|
disable_action: False
|
|
|
|
filters:
|
|
|
|
- filtertype: pattern
|
|
|
|
kind: prefix
|
|
|
|
value: logstash-
|
|
|
|
- filtertype: age
|
|
|
|
source: name
|
|
|
|
direction: older
|
|
|
|
timestring: '%Y.%m.%d'
|
2018-08-25 15:46:39 +01:00
|
|
|
unit: seconds
|
2018-08-21 09:39:25 +01:00
|
|
|
unit_count: 1
|
|
|
|
'';
|
|
|
|
};
|
2017-06-13 21:36:08 +01:00
|
|
|
};
|
|
|
|
};
|
2017-12-18 19:53:54 +00:00
|
|
|
};
|
2017-06-13 21:36:08 +01:00
|
|
|
|
2017-12-18 19:53:54 +00:00
|
|
|
testScript = ''
|
|
|
|
startAll;
|
2017-06-13 21:36:08 +01:00
|
|
|
|
2019-05-10 14:41:41 +01:00
|
|
|
# Wait until elasticsearch is listening for connections.
|
2017-12-18 19:53:54 +00:00
|
|
|
$one->waitForUnit("elasticsearch.service");
|
2019-05-10 14:41:41 +01:00
|
|
|
$one->waitForOpenPort(9200);
|
2017-06-13 21:36:08 +01:00
|
|
|
|
2017-12-18 19:53:54 +00:00
|
|
|
# Continue as long as the status is not "red". The status is probably
|
|
|
|
# "yellow" instead of "green" because we are using a single elasticsearch
|
|
|
|
# node which elasticsearch considers risky.
|
|
|
|
#
|
2019-05-10 14:41:41 +01:00
|
|
|
# TODO: extend this test with multiple elasticsearch nodes
|
|
|
|
# and see if the status turns "green".
|
|
|
|
$one->waitUntilSucceeds(
|
|
|
|
"curl --silent --show-error '${esUrl}/_cluster/health' " .
|
|
|
|
"| jq .status | grep -v red");
|
2017-06-13 21:36:08 +01:00
|
|
|
|
2017-12-18 19:53:54 +00:00
|
|
|
# Perform some simple logstash tests.
|
|
|
|
$one->waitForUnit("logstash.service");
|
|
|
|
$one->waitUntilSucceeds("cat /tmp/logstash.out | grep flowers");
|
|
|
|
$one->waitUntilSucceeds("cat /tmp/logstash.out | grep -v dragons");
|
2017-06-13 21:36:08 +01:00
|
|
|
|
2017-12-18 19:53:54 +00:00
|
|
|
# See if kibana is healthy.
|
|
|
|
$one->waitForUnit("kibana.service");
|
2019-05-10 14:41:41 +01:00
|
|
|
$one->waitUntilSucceeds(
|
|
|
|
"curl --silent --show-error 'http://localhost:5601/api/status' " .
|
|
|
|
"| jq .status.overall.state | grep green");
|
2017-06-13 21:36:08 +01:00
|
|
|
|
2017-12-18 19:53:54 +00:00
|
|
|
# See if logstash messages arive in elasticsearch.
|
2019-05-10 14:41:41 +01:00
|
|
|
$one->waitUntilSucceeds("${totalHits "flowers"} | grep -v 0");
|
|
|
|
$one->waitUntilSucceeds("${totalHits "dragons"} | grep 0");
|
|
|
|
|
|
|
|
# Test if a message logged to the journal
|
|
|
|
# is ingested by elasticsearch via journalbeat.
|
|
|
|
$one->waitForUnit("journalbeat.service");
|
|
|
|
$one->execute("echo 'Supercalifragilisticexpialidocious' | systemd-cat");
|
|
|
|
$one->waitUntilSucceeds(
|
|
|
|
"${totalHits "Supercalifragilisticexpialidocious"} | grep -v 0");
|
|
|
|
|
2019-04-14 20:39:46 +01:00
|
|
|
'' + optionalString (!elasticsearchGe7) ''
|
2018-08-25 15:46:39 +01:00
|
|
|
# Test elasticsearch-curator.
|
2018-08-21 09:39:25 +01:00
|
|
|
$one->systemctl("stop logstash");
|
|
|
|
$one->systemctl("start elasticsearch-curator");
|
2019-05-10 14:41:41 +01:00
|
|
|
$one->waitUntilSucceeds(
|
|
|
|
"! curl --silent --show-error '${esUrl}/_cat/indices' " .
|
|
|
|
"| grep logstash | grep -q ^$1");
|
2017-12-18 19:53:54 +00:00
|
|
|
'';
|
|
|
|
};
|
|
|
|
in mapAttrs mkElkTest {
|
2019-08-13 22:52:01 +01:00
|
|
|
ELK-6 =
|
2018-06-24 12:22:12 +01:00
|
|
|
if enableUnfree
|
|
|
|
then {
|
|
|
|
elasticsearch = pkgs.elasticsearch6;
|
|
|
|
logstash = pkgs.logstash6;
|
|
|
|
kibana = pkgs.kibana6;
|
2019-05-10 14:41:41 +01:00
|
|
|
journalbeat = pkgs.journalbeat6;
|
2018-06-24 12:22:12 +01:00
|
|
|
}
|
|
|
|
else {
|
|
|
|
elasticsearch = pkgs.elasticsearch6-oss;
|
|
|
|
logstash = pkgs.logstash6-oss;
|
|
|
|
kibana = pkgs.kibana6-oss;
|
2019-05-10 14:41:41 +01:00
|
|
|
journalbeat = pkgs.journalbeat6;
|
2018-06-24 12:22:12 +01:00
|
|
|
};
|
2019-08-13 22:52:01 +01:00
|
|
|
ELK-7 =
|
2019-04-14 20:39:46 +01:00
|
|
|
if enableUnfree
|
|
|
|
then {
|
|
|
|
elasticsearch = pkgs.elasticsearch7;
|
|
|
|
logstash = pkgs.logstash7;
|
|
|
|
kibana = pkgs.kibana7;
|
2019-05-10 14:41:41 +01:00
|
|
|
journalbeat = pkgs.journalbeat7;
|
2019-04-14 20:39:46 +01:00
|
|
|
}
|
|
|
|
else {
|
|
|
|
elasticsearch = pkgs.elasticsearch7-oss;
|
|
|
|
logstash = pkgs.logstash7-oss;
|
|
|
|
kibana = pkgs.kibana7-oss;
|
2019-05-10 14:41:41 +01:00
|
|
|
journalbeat = pkgs.journalbeat7;
|
2019-04-14 20:39:46 +01:00
|
|
|
};
|
2017-12-18 19:53:54 +00:00
|
|
|
}
|