Skip to main content

ELK

What
#

ELK is an acronym that stands for Elasticsearch, Logstash, and Kibana. Together, these three components provide a powerful, integrated solution for managing large volumes of data, offering real-time insights and a comprehensive analytics suite

Why
#

Provides valuable insights on failure, diagnosis, application performance, infrastructure monitoring and data visualization

How
#

Folder structure:

elk
├── elasticsearch
|   └── Dockerfile
├── kibana
│   ├── Dockerfile
│   └── config
│   │   └── kibana.yml
├── logstash
│   ├── Dockerfile
│   └── pipeline
│   │   └── logstash.conf
├── filebeat
│   └── filebeat.yml
├── setup
│   ├── Dockerfile
│   └── entrypoint.sh
├── .env
└── docker-compose-elk.yaml 

Configure networks, volumes for data and common certificates storage on docker-compose-elk.yaml

volumes:
  certs:
    driver: local
  esdata:
    driver: local
  kibanadata:
    driver: local
  logstashdata:
    driver: local

networks:
  default:
    name: elk-net
    external: false

Create a .env file with environment variables required by Elk’ Stack

# Project namespace (defaults to the current folder name if not set)
COMPOSE_PROJECT_NAME=elk

# Password for the 'elastic' user (at least 6 characters)
ELASTIC_PASSWORD=changeme

# Password for the 'kibana_system' user (at least 6 characters)
KIBANA_PASSWORD=changeme

# Version of Elastic products
#https://www.elastic.co/downloads/past-releases#elasticsearch
STACK_VERSION=8.17.0

# Set the cluster name
CLUSTER_NAME=docker-cluster

# Set to 'basic' or 'trial' to automatically start the 30-day trial
LICENSE=basic
#LICENSE=trial

# Port to expose Elasticsearch HTTP API to the host
ES_PORT=9200

# Port to expose Kibana to the host
KIBANA_PORT=5601

# Increase or decrease based on the available host memory (in bytes)
ES_MEM_LIMIT=3073741824
KB_MEM_LIMIT=1073741824
LS_MEM_LIMIT=1073741824

# SAMPLE Predefined Key only to be used in POC environments
ENCRYPTION_KEY=c34d38b3a14956121ff2170e5030b471551370178f43e5626eec58b04a30fae2

Setup
#

Runs a temporal elasticsearch container to set system passwords and create SSL certificates.

Use the following Dockerfile:

ARG STACK_VERSION
FROM docker.elastic.co/elasticsearch/elasticsearch:${STACK_VERSION:-8.17.0}

ENTRYPOINT ["/entrypoint.sh"]

entryoint.sh will hold all necessary tasks to initialize ElasticSearch for the first time:

  • Create certificates
  • Create system passords
#!/usr/bin/env bash
if [ x${ELASTIC_PASSWORD} == x ]; then
	echo "Set the ELASTIC_PASSWORD environment variable in the .env file";
	exit 1;
elif [ x${KIBANA_PASSWORD} == x ]; then
	echo "Set the KIBANA_PASSWORD environment variable in the .env file";
	exit 1;
fi;

if [ ! -f config/certs/ca.zip ]; then
	echo "Creating CA";
	bin/elasticsearch-certutil ca --silent --pem -out config/certs/ca.zip;
	unzip config/certs/ca.zip -d config/certs;
fi;

if [ ! -f config/certs/certs.zip ]; then
	echo "Creating certs";
	echo -ne \
	"instances:\n"\
	" - name: elasticsearch\n"\
	"   dns:\n"\
	"     - elasticsearch\n"\
	"     - localhost\n"\
	"   ip:\n"\
	"     - 127.0.0.1\n"\
	" - name: kibana\n"\
	"   dns:\n"\
	"     - kibana\n"\
	"     - localhost\n"\
	"   ip:\n"\
	"     - 127.0.0.1\n"\
	" - name: logstash\n"\
	"   dns:\n"\
	"     - logstash\n"\
	"     - localhost\n"\
	"   ip:\n"\
	"     - 127.0.0.1\n"\
    > config/certs/instances.yml;
	bin/elasticsearch-certutil cert --silent --pem -out config/certs/certs.zip --in config/certs/instances.yml --ca-cert config/certs/ca/ca.crt --ca-key config/certs/ca/ca.key;
    unzip config/certs/certs.zip -d config/certs;
fi;

echo "Setting file permissions"
chown -R root:root config/certs;
find . -type d -exec chmod 750 \{\} \;;
find . -type f -exec chmod 640 \{\} \;;
echo "Waiting for Elasticsearch availability";

until curl -s --cacert config/certs/ca/ca.crt https://elasticsearch:9200 | grep -q "missing authentication credentials"; do sleep 30; done;
echo "Setting kibana_system password";
echo "Configure watermark levels for low on disk space. More info: https://www.elastic.co/guide/en/elasticsearch/reference/current/fix-watermark-errors.html"
curl --retry 10 --retry-delay 5 -s --cacert config/certs/ca/ca.crt -u "elastic:${ELASTIC_PASSWORD}" -X PUT "https://elasticsearch:9200/_cluster/settings" -H "Content-Type: application/json" -d"
{
	\"transient\": {
	\"cluster.routing.allocation.disk.watermark.low\": \"30mb\",
	\"cluster.routing.allocation.disk.watermark.high\": \"20mb\",
	\"cluster.routing.allocation.disk.watermark.flood_stage\":\"10mb\",
	\"cluster.info.update.interval\": \"1m\"
	}
}
"
echo "set up precondition for poc system"
curl -s --retry 10 --retry-delay 5 -X POST --cacert config/certs/ca/ca.crt -u "elastic:${ELASTIC_PASSWORD}" -H "Content-Type: application/json" https://elasticsearch:9200/_security/user/kibana_system/_password -d "{\"password\":\"${KIBANA_PASSWORD}\"}"

echo "All done!";

Finally add the setup service to docker-compose-elk.yaml

services:
  setup:
    build:
      context: setup/
      args:
        STACK_VERSION: ${STACK_VERSION}
    init: true
    user: "0"
    volumes:
      - certs:/usr/share/elasticsearch/config/certs
      - ./setup/entrypoint.sh:/entrypoint.sh:ro,Z
    environment:
      ELASTIC_PASSWORD: ${ELASTIC_PASSWORD:-}
      KIBANA_PASSWORD: ${KIBANA_PASSWORD:-}
    healthcheck:
      test: ["CMD-SHELL", "[ -f     config/certs/elasticsearch/elasticsearch.crt ]"]
      interval: 1s
      timeout: 5s
      retries: 120

ElasticSearch
#

Use the following Dockerfile

ARG STACK_VERSION
FROM docker.elastic.co/elasticsearch/elasticsearch:${STACK_VERSION:-8.17.0}

Add a new service to docker-compose-elk.yaml

elasticsearch:
  build:
    context: elasticsearch/
    args:
      STACK_VERSION: ${STACK_VERSION}
  volumes:
    - certs:/usr/share/elasticsearch/config/certs
    - esdata:/usr/share/elasticsearch/data:Z
  ports:
    - ${ES_PORT}:9200
    - 9300:9300
  environment:
    - node.name=elasticsearch
	- cluster.name=${CLUSTER_NAME}
	- discovery.type=single-node
	- ELASTIC_PASSWORD=${ELASTIC_PASSWORD}
	- bootstrap.memory_lock=true
	- xpack.security.enabled=true
	- xpack.security.http.ssl.enabled=true
	- xpack.security.http.ssl.key=certs/elasticsearch/elasticsearch.key
	- xpack.security.http.ssl.certificate=certs/elasticsearch/elasticsearch.crt
	- xpack.security.http.ssl.certificate_authorities=certs/ca/ca.crt
	- xpack.security.transport.ssl.enabled=true
	- xpack.security.transport.ssl.key=certs/elasticsearch/elasticsearch.key
	- xpack.security.transport.ssl.certificate=certs/elasticsearch/elasticsearch.crt
	- xpack.security.transport.ssl.certificate_authorities=certs/ca/ca.crt
	- xpack.security.transport.ssl.verification_mode=certificate
	- xpack.license.self_generated.type=${LICENSE}
	- xpack.ml.use_auto_machine_memory_percent=true
  mem_limit: ${ES_MEM_LIMIT}
  ulimits:
    memlock:
      soft: -1
      hard: -1
  healthcheck:
    test:
      [
        "CMD-SHELL",
        "curl -s --cacert config/certs/ca/ca.crt https://localhost:9200 | grep -q 'missing authentication credentials'",
      ]
    interval: 10s
    timeout: 10s
    retries: 120

Kibana
#

Use the following Dockerfile

ARG STACK_VERSION

# https://www.docker.elastic.co/
FROM docker.elastic.co/kibana/kibana:${STACK_VERSION:-8.17.0}

Add a config file ./kibana/config/kibana.yml:

## Default Kibana configuration from Kibana base image.
## https://github.com/elastic/kibana/blob/main/src/dev/build/tasks/os_packages/docker_generator/templates/kibana_yml.template.ts
#
server.name: kibana
server.host: 0.0.0.0
elasticsearch.hosts: [ http://elasticsearch:9200 ]

monitoring.ui.container.elasticsearch.enabled: true
monitoring.ui.container.logstash.enabled: true

Add a new service to docker-compose-elk.yaml

kibana:
  build:
    context: kibana/
  args:
    STACK_VERSION: ${STACK_VERSION}
  volumes:
    - certs:/usr/share/kibana/config/certs
    - kibanadata:/usr/share/kibana/data:Z
    - ./kibana/config/kibana.yml:/usr/share/kibana/config/kibana.yml:ro,Z
  ports:
    - 5601:5601
  environment:
    - SERVERNAME=kibana
    - ELASTICSEARCH_HOSTS=https://elasticsearch:9200
    - ELASTICSEARCH_USERNAME=kibana_system
    - ELASTICSEARCH_PASSWORD=${KIBANA_PASSWORD}
    - ELASTICSEARCH_SSL_CERTIFICATEAUTHORITIES=config/certs/ca/ca.crt
    - XPACK_SECURITY_ENCRYPTIONKEY=${ENCRYPTION_KEY}
    - XPACK_ENCRYPTEDSAVEDOBJECTS_ENCRYPTIONKEY=${ENCRYPTION_KEY}
    - XPACK_REPORTING_ENCRYPTIONKEY=${ENCRYPTION_KEY}
    - XPACK_REPORTING_KIBANASERVER_HOSTNAME=localhost
    - SERVER_SSL_ENABLED=true
    - SERVER_SSL_CERTIFICATE=config/certs/kibana/kibana.crt
    - SERVER_SSL_KEY=config/certs/kibana/kibana.key
    - SERVER_SSL_CERTIFICATEAUTHORITIES=config/certs/ca/ca.crt
  mem_limit: ${KB_MEM_LIMIT}
  healthcheck:
    test:
      [
        "CMD-SHELL",
        "curl -I -s --cacert config/certs/ca/ca.crt https://localhost:5601 | grep -q 'HTTP/1.1 302 Found'",
      ]
    interval: 10s
    timeout: 10s
    retries: 120
  depends_on:
    elasticsearch:
      condition: service_healthy

Logstash
#

Use the following Dockerfile

ARG STACK_VERSION
# https://www.docker.elastic.co/
FROM docker.elastic.co/logstash/logstash:${STACK_VERSION:-8.17.0}
# Add your logstash plugins setup here
# Example: RUN logstash-plugin install logstash-filter-json

Add a config file ./logstash/pipeline/logstash.conf:

input {

  file {
    #change to mode => "read" if the file is a compelte file. by default, the file will be removed once reading is complete -- backup your files if you need them.
    mode => "tail"
    path => "/usr/share/logstash/ingest_data/*.log"
  }
}

output {
  elasticsearch {
    index => "logstash-%{+YYYY.MM.dd}"
    hosts=> "${ELASTIC_HOSTS}"
    user=> "${ELASTIC_USER}"
    password=> "${ELASTIC_PASSWORD}"
    cacert=> "certs/ca/ca.crt"

  }

}

Add a new service to docker-compose-elk.yaml

logstash:
  depends_on:
    elasticsearch:
      condition: service_healthy
    kibana:
      condition: service_healthy
  build:
    context: logstash/
    args:
      STACK_VERSION: ${STACK_VERSION}
  labels:
    co.elastic.logs/module: logstash
  user: root
  volumes:
    - certs:/usr/share/logstash/certs
    - logstashdata:/usr/share/logstash/data
    - "/log/folder/:/usr/share/logstash/ingest_data/"
    - "./logstash/pipeline/logstash.conf:/usr/share/logstash/pipeline/logstash.conf:ro"
  ports:
    - 5044:5044
    - 50000:50000/tcp
    - 50000:50000/udp
    - 9600:9600
  environment:
    - xpack.monitoring.enabled=false
    - ELASTIC_USER=elastic
    - ELASTIC_PASSWORD=${ELASTIC_PASSWORD}
    - ELASTIC_HOSTS=https://elasticsearch:9200
    - CA_CERT=certs/ca/ca.crt

Filebeat (local)
#

Install filebeat locally:

curl -L -O https://artifacts.elastic.co/downloads/beats/filebeat/filebeat-8.17.1-linux-x86_64.tar.gz
tar xzvf filebeat-8.17.1-linux-x86_64.tar.gz

Config file:

filebeat.inputs:
- type: filestream
  id: my-fcc-filebeat
  enabled: true
  paths:
   - /.../logs/*.log

output.logstash:
  enabled: true
  hosts: ["localhost:5044"]
  ssl.certificate_authorities: ["/../elk/ca.crt"]
  ssl.certificate: "/../elk/logstash.crt"
  ssl.key: "/../elk/logstash.key"

output.console:
  enabled: false
  pretty: true

Run filebeat:

./filebeat -e -c filebeat.yml