Add variables to connect ELK and Grafana

With the option to deploy grafana the following changes allow a user to
automatically connect ELK and Grafana.

Change-Id: Ic8e64a31d860940c6863f46ce558908d5ef8f8e7
Signed-off-by: Kevin Carter <kevin.carter@rackspace.com>
This commit is contained in:
Kevin Carter 2018-04-13 17:37:38 -05:00
parent 969a30c6c7
commit 390314e18b
No known key found for this signature in database
GPG Key ID: 9443251A787B9FB3
5 changed files with 116 additions and 13 deletions

View File

@ -131,6 +131,22 @@ instances
cd /opt/openstack-ansible-ops/elk_metrics_6x
openstack-ansible installMetricbeat.yml
Adding Grafana visualizations
-----------------------------
See the grafana directory for more information on how to deploy grafana. Once
When deploying grafana, source the variable file from ELK in order to
automatically connect grafana to the Elasticsearch datastore and import
dashboards. Including the variable file is as simple as adding
``-e @../elk_metrics_6x/vars/variables.yml`` to the grafana playbook
run.
Included dashboards
* https://grafana.com/dashboards/5569
* https://grafana.com/dashboards/5566
Trouble shooting
----------------

View File

@ -324,7 +324,6 @@ filebeat.prospectors:
# to add additional information to the crawled log files for filtering
tags:
- openstack
- oslofmt
- cinder
- type: log
@ -366,7 +365,6 @@ filebeat.prospectors:
# to add additional information to the crawled log files for filtering
tags:
- openstack
- oslofmt
- glance
- type: log
@ -408,7 +406,6 @@ filebeat.prospectors:
# to add additional information to the crawled log files for filtering
tags:
- openstack
- oslofmt
- heat
- type: log
@ -450,7 +447,6 @@ filebeat.prospectors:
# to add additional information to the crawled log files for filtering
tags:
- openstack
- oslofmt
- horizon
- type: log
@ -492,7 +488,6 @@ filebeat.prospectors:
# to add additional information to the crawled log files for filtering
tags:
- openstack
- oslofmt
- keystone
- type: log
@ -534,7 +529,6 @@ filebeat.prospectors:
# to add additional information to the crawled log files for filtering
tags:
- openstack
- oslofmt
- neutron
- type: log
@ -576,7 +570,6 @@ filebeat.prospectors:
# to add additional information to the crawled log files for filtering
tags:
- openstack
- oslofmt
- nova
- type: log
@ -618,7 +611,6 @@ filebeat.prospectors:
# to add additional information to the crawled log files for filtering
tags:
- openstack
- oslofmt
- octavia
- type: log

View File

@ -16,3 +16,90 @@ kibana_server_name: "{{ ansible_hostname }}"
# logstash vars
logstash_beat_input_port: 5044
# Grafana
grafana_dashboards:
- dashboard_id: 5566
revision_id: 0
datasource: "metricbeat-Elasticsearch"
- dashboard_id: 5569
revision_id: 0
datasource: "filebeat-Elasticsearch"
grafana_datasources:
- name: "all-Elasticsearch"
type: "elasticsearch"
access: "proxy"
url: "http://{{ internal_lb_vip_address | default(hostvars[groups['elastic-logstash'][0]]['ansible_host']) }}:{{ elastic_hap_port }}"
basicAuth: false
basicAuthUser: ""
basicAuthPassword: ""
isDefault: true
database: "*"
jsonData:
esVersion: 56
keepCookies: []
maxConcurrentShardRequests: 256
timeField: "@timestamp"
timeInterval: ">10s"
- name: "auditbeat-Elasticsearch"
type: "elasticsearch"
access: "proxy"
url: "http://{{ internal_lb_vip_address | default(hostvars[groups['elastic-logstash'][0]]['ansible_host']) }}:{{ elastic_hap_port }}"
basicAuth: false
basicAuthUser: ""
basicAuthPassword: ""
isDefault: false
database: "auditbeat-*"
jsonData:
esVersion: 56
keepCookies: []
maxConcurrentShardRequests: 256
timeField: "@timestamp"
timeInterval: ">10s"
- name: "filebeat-Elasticsearch"
type: "elasticsearch"
access: "proxy"
url: "http://{{ internal_lb_vip_address | default(hostvars[groups['elastic-logstash'][0]]['ansible_host']) }}:{{ elastic_hap_port }}"
basicAuth: false
basicAuthUser: ""
basicAuthPassword: ""
isDefault: false
database: "filebeat-*"
jsonData:
esVersion: 56
keepCookies: []
maxConcurrentShardRequests: 256
timeField: "@timestamp"
timeInterval: ">10s"
- name: "metricbeat-Elasticsearch"
type: "elasticsearch"
access: "proxy"
url: "http://{{ internal_lb_vip_address | default(hostvars[groups['elastic-logstash'][0]]['ansible_host']) }}:{{ elastic_hap_port }}"
basicAuth: false
basicAuthUser: ""
basicAuthPassword: ""
isDefault: false
database: "metricbeat-*"
jsonData:
esVersion: 56
keepCookies: []
maxConcurrentShardRequests: 256
timeField: "@timestamp"
timeInterval: ">10s"
- name: "packetbeat-Elasticsearch"
type: "elasticsearch"
access: "proxy"
url: "http://{{ internal_lb_vip_address | default(hostvars[groups['elastic-logstash'][0]]['ansible_host']) }}:{{ elastic_hap_port }}"
basicAuth: false
basicAuthUser: ""
basicAuthPassword: ""
isDefault: false
database: "packetbeat-*"
jsonData:
esVersion: 56
keepCookies: []
maxConcurrentShardRequests: 256
timeField: "@timestamp"
timeInterval: ">10s"

View File

@ -71,7 +71,6 @@
when:
- (groups['galera_all'] | default([])) | length > 0
pre_tasks:
- name: Ensure https repos function
apt:
pkg: "apt-transport-https"

View File

@ -4,9 +4,18 @@ grafana_db_user: grafana
grafana_db_password: SuperSecrete
grafana_admin_password: admin
grafana_users:
allow_sign_up: False
allow_org_create: false
auto_assign_org: false
auto_assign_org_role: Viewer
default_theme: light
grafana_security:
admin_user: admin
admin_password: "{{ grafana_admin_password }}"
grafana_address: "{{ ansible_host }}"
grafana_plugins:
- grafana-piechart-panel
grafana_api_keys:
- name: "admin"
role: "Admin"