Add logstash ingestion for collectd

This change will allow logstash to ingest metrics from collectd. New
options have been added to enable the deployment and configure it.

Change-Id: I995c0db69fc68d5f5bcae27ce16956876368e2a8
Signed-off-by: cloudnull <kevin@cloudnull.com>
This commit is contained in:
cloudnull 2019-02-06 22:20:51 -06:00
parent 38f817aee7
commit 03d25dce3d
No known key found for this signature in database
GPG Key ID: 9443251A787B9FB3
4 changed files with 74 additions and 0 deletions

View File

@ -91,3 +91,12 @@ logstash_arcsight_event_brokers: []
# be derived automatically using 1/4 of the available RAM for logstash and 1/2
# of the available RAM for elasticsearch. The value is expected to be in MiB.
# logstash_heap_size: 10240 # type `int`
# Collectd ingestion options
logstash_collectd_input_enabled: false
logstash_collectd_port: 25826
logstash_collectd_buffer_size: 1452
# Security level can be ["Sign", "Encrypt"].
logstash_collectd_security_level: Sign
# To enable security the auth file is required.
#logstash_collectd_authfile: /etc/collectd/passwd

View File

@ -25,6 +25,25 @@
tags:
- always
- name: Check collectd plugin
block:
- name: Check for collectd authentication file
stat:
path: "{{ logstash_collectd_authfile }}"
register: _logstash_collectd
when:
- logstash_collectd_authfile is defined
- name: Notify collectd plugin failure
fail:
msg: >-
The logstash collectd plugin was enabled with security
enabled however the authentication file was not found.
when:
- not (_logstash_collectd.stat.exists | bool)
when:
- logstash_collectd_input_enabled | bool
- name: Ensure Logstash is installed
package:
name: "{{ logstash_distro_packages }}"

View File

@ -37,6 +37,22 @@
}
{% endif %}
}
{% endif %}
{% if logstash_collectd_input_enabled | bool %}
input {
udp {
port => {{ logstash_collectd_port }}
buffer_size => {{ logstash_collectd_buffer_size }}
codec => collectd { }
{% if (logstash_collectd_security_level is defined) and (logstash_collectd_authfile is defined) %}
security_level => {{ logstash_collectd_security_level }}
authfile => {{ logstash_collectd_authfile }}
{% endif %}
add_field => {
"[@metadata][source_type]" => "collectd"
}
}
}
{% endif %}
filter {
if [@metadata][source_type] == "syslog" {
@ -45,6 +61,12 @@
}
}
if [@metadata][source_type] == "collectd" {
mutate {
add_tag => ["collectd"]
}
}
# NOTE(mnaser): Filebeat doesn't support shipping to different outputs
# which means we need to parse `auditd` fileset here rather
# than rely on ingest.
@ -496,6 +518,15 @@
manage_template => {{ (data_node | bool) | lower }}
index => "syslog-%{+YYYY.MM.dd}"
}
} else if "collectd" in [tags] {
elasticsearch {
id => "elasticsearchCollectdDocIDOutputPipeline"
document_id => "%{[@metadata][fingerprint]}"
hosts => ["{{ '127.0.0.1:' ~ elastic_port }}"]
sniffing => {{ (elastic_sniffing_enabled | default(not data_node)) | bool | string | lower }}
manage_template => {{ (data_node | bool) | lower }}
index => "collectd-%{+YYYY.MM.dd}"
}
} else {
elasticsearch {
id => "elasticsearchUndefinedDocIDOutputPipeline"
@ -531,6 +562,14 @@
manage_template => {{ (data_node | bool) | lower }}
index => "syslog-%{+YYYY.MM.dd}"
}
} else if "collectd" in [tags] {
elasticsearch {
id => "elasticsearchCollectdOutputPipeline"
hosts => ["{{ '127.0.0.1:' ~ elastic_port }}"]
sniffing => {{ (elastic_sniffing_enabled | default(not data_node)) | bool | string | lower }}
manage_template => {{ (data_node | bool) | lower }}
index => "collectd-%{+YYYY.MM.dd}"
}
} else {
elasticsearch {
id => "elasticsearchUndefinedOutputPipeline"

View File

@ -14,6 +14,13 @@ elastic_hap_port: 9201
logstash_beat_input_port: 5044
logstash_syslog_input_port: 5140
logstash_syslog_input_mode: udp
logstash_collectd_input_enabled: false
logstash_collectd_port: 25826
logstash_collectd_buffer_size: 1452
# Security level can be ["Sign", "Encrypt"].
logstash_collectd_security_level: Sign
# To enable security the auth file is required.
#logstash_collectd_authfile: /etc/collectd/passwd
kibana_port: 5601
kibana_nginx_port: 81