mirror of
https://github.com/deviantony/docker-elk.git
synced 2025-12-13 18:29:59 +01:00
Create release-5.x branch for Elastic 5.x series
This commit is contained in:
153
.github/workflows/ci.yml
vendored
153
.github/workflows/ci.yml
vendored
@@ -2,9 +2,9 @@ name: CI
|
|||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: [ master ]
|
branches: [ release-5.x ]
|
||||||
pull_request:
|
pull_request:
|
||||||
branches: [ master ]
|
branches: [ release-5.x ]
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
|
||||||
@@ -17,25 +17,8 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
#####################################################
|
- name: Pre-build container images
|
||||||
# #
|
run: docker-compose build
|
||||||
# Install all dependencies required by test suites. #
|
|
||||||
# #
|
|
||||||
#####################################################
|
|
||||||
|
|
||||||
- name: Prepare environment
|
|
||||||
run: |
|
|
||||||
|
|
||||||
# Install Linux packages
|
|
||||||
#
|
|
||||||
# List of packages pre-installed in the runner:
|
|
||||||
# https://docs.github.com/en/free-pro-team@latest/actions/reference/specifications-for-github-hosted-runners#supported-software
|
|
||||||
|
|
||||||
sudo apt install -y expect
|
|
||||||
|
|
||||||
# Pre-build container images
|
|
||||||
|
|
||||||
docker-compose build
|
|
||||||
|
|
||||||
########################################################
|
########################################################
|
||||||
# #
|
# #
|
||||||
@@ -48,11 +31,9 @@ jobs:
|
|||||||
|
|
||||||
# Change password of 'elastic' user from 'changeme' to 'testpasswd' in config files
|
# Change password of 'elastic' user from 'changeme' to 'testpasswd' in config files
|
||||||
|
|
||||||
sed -i -e 's/\(xpack.monitoring.elasticsearch.username:\) elastic/\1 logstash_system/g' -e 's/\(xpack.monitoring.elasticsearch.password:\) changeme/\1 testpasswd/g' logstash/config/logstash.yml
|
sed -i 's/\(xpack.monitoring.elasticsearch.password:\) changeme/\1 testpasswd/g' logstash/config/logstash.yml
|
||||||
sed -i 's/\(password =>\) "changeme"/\1 "testpasswd"/g' logstash/pipeline/logstash.conf
|
sed -i 's/\(password =>\) "changeme"/\1 "testpasswd"/g' logstash/pipeline/logstash.conf
|
||||||
sed -i -e 's/\(elasticsearch.username:\) elastic/\1 kibana_system/g' -e 's/\(elasticsearch.password:\) changeme/\1 testpasswd/g' kibana/config/kibana.yml
|
sed -i 's/\(elasticsearch.password:\) changeme/\1 testpasswd/g' kibana/config/kibana.yml
|
||||||
sed -i -e 's/\(elasticsearch.password:\) changeme/\1 testpasswd/g' -e 's/\(secret_management.encryption_keys:\)/\1 [test-encrypt]/g' extensions/enterprise-search/config/enterprise-search.yml
|
|
||||||
sed -i 's/\(password:\) changeme/\1 testpasswd/g' extensions/apm-server/config/apm-server.yml
|
|
||||||
|
|
||||||
# Run Elasticsearch and wait for its availability
|
# Run Elasticsearch and wait for its availability
|
||||||
|
|
||||||
@@ -62,7 +43,7 @@ jobs:
|
|||||||
|
|
||||||
# Set passwords
|
# Set passwords
|
||||||
|
|
||||||
.github/workflows/scripts/elasticsearch-setup-passwords.exp
|
.github/workflows/scripts/elasticsearch-setup-passwords.sh
|
||||||
|
|
||||||
##########################################################
|
##########################################################
|
||||||
# #
|
# #
|
||||||
@@ -86,94 +67,6 @@ jobs:
|
|||||||
docker-compose logs logstash
|
docker-compose logs logstash
|
||||||
docker-compose logs kibana
|
docker-compose logs kibana
|
||||||
|
|
||||||
##############################
|
|
||||||
# #
|
|
||||||
# Test supported extensions. #
|
|
||||||
# #
|
|
||||||
##############################
|
|
||||||
|
|
||||||
#
|
|
||||||
# Logspout
|
|
||||||
#
|
|
||||||
|
|
||||||
- name: Execute Logspout test suite
|
|
||||||
run: |
|
|
||||||
|
|
||||||
# Set mandatory Logstash settings
|
|
||||||
|
|
||||||
sed -i '$ a input { udp { port => 5000 codec => json } }' logstash/pipeline/logstash.conf
|
|
||||||
|
|
||||||
# Run Logspout and execute tests.
|
|
||||||
# Logstash will be restarted as a result of building the Logspout
|
|
||||||
# image, so changes above will automatically take effect.
|
|
||||||
|
|
||||||
docker-compose -f docker-compose.yml -f extensions/logspout/logspout-compose.yml up -d logspout
|
|
||||||
.github/workflows/scripts/run-tests-logspout.sh
|
|
||||||
|
|
||||||
# Revert changes to Logstash configuration
|
|
||||||
|
|
||||||
sed -i '/input { udp { port => 5000 codec => json } }/d' logstash/pipeline/logstash.conf
|
|
||||||
|
|
||||||
- name: 'debug: Display state and logs (Logspout)'
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
docker-compose -f docker-compose.yml -f extensions/logspout/logspout-compose.yml ps
|
|
||||||
docker-compose -f docker-compose.yml -f extensions/logspout/logspout-compose.yml logs logspout
|
|
||||||
# next steps don't need Logspout
|
|
||||||
docker-compose -f docker-compose.yml -f extensions/logspout/logspout-compose.yml stop logspout
|
|
||||||
# next steps don't need Logstash
|
|
||||||
docker-compose stop logstash
|
|
||||||
|
|
||||||
#
|
|
||||||
# Enterprise Search
|
|
||||||
#
|
|
||||||
|
|
||||||
- name: Execute Enterprise Search test suite
|
|
||||||
run: |
|
|
||||||
|
|
||||||
# Set mandatory Elasticsearch settings
|
|
||||||
|
|
||||||
sed -i '$ a xpack.security.authc.api_key.enabled: true' elasticsearch/config/elasticsearch.yml
|
|
||||||
|
|
||||||
# Restart Elasticsearch for changes to take effect
|
|
||||||
|
|
||||||
docker-compose restart elasticsearch
|
|
||||||
|
|
||||||
# Run Enterprise Search and execute tests
|
|
||||||
|
|
||||||
docker-compose -f docker-compose.yml -f extensions/enterprise-search/enterprise-search-compose.yml up -d enterprise-search
|
|
||||||
.github/workflows/scripts/run-tests-enterprise-search.sh
|
|
||||||
|
|
||||||
# Revert changes to Elasticsearch configuration
|
|
||||||
|
|
||||||
sed -i '/xpack.security.authc.api_key.enabled: true/d' elasticsearch/config/elasticsearch.yml
|
|
||||||
docker-compose restart elasticsearch
|
|
||||||
|
|
||||||
- name: 'debug: Display state and logs (Enterprise Search)'
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
docker-compose -f docker-compose.yml -f extensions/enterprise-search/enterprise-search-compose.yml ps
|
|
||||||
docker-compose -f docker-compose.yml -f extensions/enterprise-search/enterprise-search-compose.yml logs enterprise-search
|
|
||||||
# next steps don't need Enterprise Search
|
|
||||||
docker-compose -f docker-compose.yml -f extensions/enterprise-search/enterprise-search-compose.yml stop enterprise-search
|
|
||||||
|
|
||||||
#
|
|
||||||
# APM Server
|
|
||||||
#
|
|
||||||
|
|
||||||
- name: Execute APM Server test suite
|
|
||||||
run: |
|
|
||||||
docker-compose -f docker-compose.yml -f extensions/apm-server/apm-server-compose.yml up -d apm-server
|
|
||||||
.github/workflows/scripts/run-tests-apm-server.sh
|
|
||||||
|
|
||||||
- name: 'debug: Display state and logs (APM Server)'
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
docker-compose -f docker-compose.yml -f extensions/apm-server/apm-server-compose.yml ps
|
|
||||||
docker-compose -f docker-compose.yml -f extensions/apm-server/apm-server-compose.yml logs apm-server
|
|
||||||
# next steps don't need APM Server
|
|
||||||
docker-compose -f docker-compose.yml -f extensions/apm-server/apm-server-compose.yml stop apm-server
|
|
||||||
|
|
||||||
##############
|
##############
|
||||||
# #
|
# #
|
||||||
# Tear down. #
|
# Tear down. #
|
||||||
@@ -182,13 +75,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Terminate all components
|
- name: Terminate all components
|
||||||
if: always()
|
if: always()
|
||||||
run: >-
|
run: docker-compose down -v
|
||||||
docker-compose
|
|
||||||
-f docker-compose.yml
|
|
||||||
-f extensions/logspout/logspout-compose.yml
|
|
||||||
-f extensions/enterprise-search/enterprise-search-compose.yml
|
|
||||||
-f extensions/apm-server/apm-server-compose.yml
|
|
||||||
down -v
|
|
||||||
|
|
||||||
test-swarm:
|
test-swarm:
|
||||||
name: 'Test suite: Swarm'
|
name: 'Test suite: Swarm'
|
||||||
@@ -200,22 +87,8 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
#####################################################
|
- name: Enable Swarm mode
|
||||||
# #
|
run: docker swarm init
|
||||||
# Install all dependencies required by test suites. #
|
|
||||||
# #
|
|
||||||
#####################################################
|
|
||||||
|
|
||||||
- name: Prepare environment
|
|
||||||
run: |
|
|
||||||
|
|
||||||
# Install Linux packages
|
|
||||||
|
|
||||||
sudo apt install -y expect
|
|
||||||
|
|
||||||
# Enable Swarm mode
|
|
||||||
|
|
||||||
docker swarm init
|
|
||||||
|
|
||||||
########################################################
|
########################################################
|
||||||
# #
|
# #
|
||||||
@@ -228,9 +101,9 @@ jobs:
|
|||||||
|
|
||||||
# Change password of 'elastic' user from 'changeme' to 'testpasswd' in config files
|
# Change password of 'elastic' user from 'changeme' to 'testpasswd' in config files
|
||||||
|
|
||||||
sed -i -e 's/\(xpack.monitoring.elasticsearch.username:\) elastic/\1 logstash_system/g' -e 's/\(xpack.monitoring.elasticsearch.password:\) changeme/\1 testpasswd/g' logstash/config/logstash.yml
|
sed -i 's/\(xpack.monitoring.elasticsearch.password:\) changeme/\1 testpasswd/g' logstash/config/logstash.yml
|
||||||
sed -i 's/\(password =>\) "changeme"/\1 "testpasswd"/g' logstash/pipeline/logstash.conf
|
sed -i 's/\(password =>\) "changeme"/\1 "testpasswd"/g' logstash/pipeline/logstash.conf
|
||||||
sed -i -e 's/\(elasticsearch.username:\) elastic/\1 kibana_system/g' -e 's/\(elasticsearch.password:\) changeme/\1 testpasswd/g' kibana/config/kibana.yml
|
sed -i 's/\(elasticsearch.password:\) changeme/\1 testpasswd/g' kibana/config/kibana.yml
|
||||||
|
|
||||||
# Run Elasticsearch and wait for its availability
|
# Run Elasticsearch and wait for its availability
|
||||||
|
|
||||||
@@ -241,7 +114,7 @@ jobs:
|
|||||||
|
|
||||||
# Set passwords
|
# Set passwords
|
||||||
|
|
||||||
.github/workflows/scripts/elasticsearch-setup-passwords.exp swarm
|
.github/workflows/scripts/elasticsearch-setup-passwords.sh swarm
|
||||||
|
|
||||||
##########################################################
|
##########################################################
|
||||||
# #
|
# #
|
||||||
|
|||||||
4
.github/workflows/docs.yml
vendored
4
.github/workflows/docs.yml
vendored
@@ -4,9 +4,9 @@ on:
|
|||||||
schedule:
|
schedule:
|
||||||
- cron: '0 0 * * 0' # At 00:00 every Sunday
|
- cron: '0 0 * * 0' # At 00:00 every Sunday
|
||||||
push:
|
push:
|
||||||
branches: [ master ]
|
branches: [ release-5.x ]
|
||||||
pull_request:
|
pull_request:
|
||||||
branches: [ master ]
|
branches: [ release-5.x ]
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
|
||||||
|
|||||||
1
.github/workflows/lint/markdown.yaml
vendored
1
.github/workflows/lint/markdown.yaml
vendored
@@ -34,6 +34,7 @@ MD012: true
|
|||||||
MD013:
|
MD013:
|
||||||
line_length: 120
|
line_length: 120
|
||||||
code_blocks: false
|
code_blocks: false
|
||||||
|
tables: false
|
||||||
|
|
||||||
# Dollar signs used before commands without showing output <https://github.com/DavidAnson/markdownlint/blob/master/doc/Rules.md#md014>
|
# Dollar signs used before commands without showing output <https://github.com/DavidAnson/markdownlint/blob/master/doc/Rules.md#md014>
|
||||||
MD014: false
|
MD014: false
|
||||||
|
|||||||
@@ -1,28 +0,0 @@
|
|||||||
#!/usr/bin/expect -f
|
|
||||||
|
|
||||||
# List of expected users with dummy password
|
|
||||||
set user "(elastic|apm_system|kibana_system|logstash_system|beats_system|remote_monitoring_user)"
|
|
||||||
set password "testpasswd"
|
|
||||||
|
|
||||||
# Find elasticsearch container id
|
|
||||||
set MODE [lindex $argv 0]
|
|
||||||
if { [string match "swarm" $MODE] } {
|
|
||||||
set cid [exec docker ps -q -f label=com.docker.swarm.service.name=elk_elasticsearch]
|
|
||||||
} else {
|
|
||||||
set cid [exec docker ps -q -f label=com.docker.compose.service=elasticsearch]
|
|
||||||
}
|
|
||||||
|
|
||||||
set cmd "docker exec -it $cid bin/elasticsearch-setup-passwords interactive -s -b -u http://localhost:9200"
|
|
||||||
|
|
||||||
spawn {*}$cmd
|
|
||||||
|
|
||||||
expect {
|
|
||||||
-re "(E|Ree)nter password for \\\[$user\\\]: " {
|
|
||||||
send "$password\r"
|
|
||||||
exp_continue
|
|
||||||
}
|
|
||||||
eof
|
|
||||||
}
|
|
||||||
|
|
||||||
lassign [wait] pid spawnid os_error_flag value
|
|
||||||
exit $value
|
|
||||||
41
.github/workflows/scripts/elasticsearch-setup-passwords.sh
vendored
Executable file
41
.github/workflows/scripts/elasticsearch-setup-passwords.sh
vendored
Executable file
@@ -0,0 +1,41 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
set -eu
|
||||||
|
set -o pipefail
|
||||||
|
|
||||||
|
|
||||||
|
source "$(dirname ${BASH_SOURCE[0]})/lib/testing.sh"
|
||||||
|
|
||||||
|
|
||||||
|
function set_password {
|
||||||
|
local user=$1
|
||||||
|
local pwd=$2
|
||||||
|
|
||||||
|
local ip
|
||||||
|
ip="$(service_ip elasticsearch)"
|
||||||
|
|
||||||
|
local -a args=( '-s' '-D-' '-w' '%{http_code}' '-H' 'Content-Type: application/json'
|
||||||
|
"http://${ip}:9200/_xpack/security/user/${user}/_password"
|
||||||
|
'-XPUT' "-d{\"password\": \"${pwd}\"}" )
|
||||||
|
|
||||||
|
if [ "$#" -ge 3 ]; then
|
||||||
|
args+=( ${@:3} )
|
||||||
|
fi
|
||||||
|
|
||||||
|
local output
|
||||||
|
|
||||||
|
output="$(curl "${args[@]}" || true)"
|
||||||
|
if [ "${output: -3}" -ne 200 ]; then
|
||||||
|
echo -e "\n${output::-3}"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
users=( 'kibana' 'logstash_system' 'elastic' )
|
||||||
|
|
||||||
|
for u in "${users[@]}"; do
|
||||||
|
echo '[+] Setting password for user' "$u"
|
||||||
|
set_password "$u" 'testpasswd' -u 'elastic:changeme'
|
||||||
|
done
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
set -eu
|
|
||||||
set -o pipefail
|
|
||||||
|
|
||||||
|
|
||||||
source "$(dirname ${BASH_SOURCE[0]})/lib/testing.sh"
|
|
||||||
|
|
||||||
|
|
||||||
cid="$(container_id apm-server)"
|
|
||||||
ip="$(service_ip apm-server)"
|
|
||||||
|
|
||||||
log 'Waiting for readiness of APM Server'
|
|
||||||
poll_ready "$cid" "http://${ip}:8200/"
|
|
||||||
6
.github/workflows/scripts/run-tests-core.sh
vendored
6
.github/workflows/scripts/run-tests-core.sh
vendored
@@ -19,10 +19,10 @@ log 'Waiting for readiness of Elasticsearch'
|
|||||||
poll_ready "$cid_es" "http://${ip_es}:9200/" -u 'elastic:testpasswd'
|
poll_ready "$cid_es" "http://${ip_es}:9200/" -u 'elastic:testpasswd'
|
||||||
|
|
||||||
log 'Waiting for readiness of Logstash'
|
log 'Waiting for readiness of Logstash'
|
||||||
poll_ready "$cid_ls" "http://${ip_ls}:9600/_node/pipelines/main?pretty"
|
poll_ready "$cid_ls" "http://${ip_ls}:9600/_node/pipeline?pretty"
|
||||||
|
|
||||||
log 'Waiting for readiness of Kibana'
|
log 'Waiting for readiness of Kibana'
|
||||||
poll_ready "$cid_kb" "http://${ip_kb}:5601/api/status" -u 'kibana_system:testpasswd'
|
poll_ready "$cid_kb" "http://${ip_kb}:5601/api/status" -u 'kibana:testpasswd'
|
||||||
|
|
||||||
log 'Creating Logstash index pattern in Kibana'
|
log 'Creating Logstash index pattern in Kibana'
|
||||||
source .env
|
source .env
|
||||||
@@ -34,7 +34,7 @@ curl -X POST -D- "http://${ip_kb}:5601/api/saved_objects/index-pattern" \
|
|||||||
-d '{"attributes":{"title":"logstash-*","timeFieldName":"@timestamp"}}'
|
-d '{"attributes":{"title":"logstash-*","timeFieldName":"@timestamp"}}'
|
||||||
|
|
||||||
log 'Searching index pattern via Kibana API'
|
log 'Searching index pattern via Kibana API'
|
||||||
response="$(curl "http://${ip_kb}:5601/api/saved_objects/_find?type=index-pattern" -s -u elastic:testpasswd)"
|
response="$(curl "http://${ip_kb}:5601/api/saved_objects/?type=index-pattern&fields=title" -s -u elastic:testpasswd)"
|
||||||
echo "$response"
|
echo "$response"
|
||||||
count="$(jq -rn --argjson data "${response}" '$data.total')"
|
count="$(jq -rn --argjson data "${response}" '$data.total')"
|
||||||
if [[ $count -ne 1 ]]; then
|
if [[ $count -ne 1 ]]; then
|
||||||
|
|||||||
@@ -1,40 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
set -eu
|
|
||||||
set -o pipefail
|
|
||||||
|
|
||||||
|
|
||||||
source "$(dirname ${BASH_SOURCE[0]})/lib/testing.sh"
|
|
||||||
|
|
||||||
|
|
||||||
cid_es="$(container_id elasticsearch)"
|
|
||||||
cid_en="$(container_id enterprise-search)"
|
|
||||||
|
|
||||||
ip_es="$(service_ip elasticsearch)"
|
|
||||||
ip_en="$(service_ip enterprise-search)"
|
|
||||||
|
|
||||||
log 'Waiting for readiness of Elasticsearch'
|
|
||||||
poll_ready "$cid_es" "http://${ip_es}:9200/" -u 'elastic:testpasswd'
|
|
||||||
|
|
||||||
log 'Waiting for readiness of Enterprise Search'
|
|
||||||
poll_ready "$cid_en" "http://${ip_en}:3002/api/ent/v1/internal/health" -u 'elastic:testpasswd'
|
|
||||||
|
|
||||||
log 'Retrieving private key from Elasticsearch'
|
|
||||||
response="$(curl "http://${ip_es}:9200/.ent-search-actastic-app_search_api_tokens_v2/_search?q=name:private-key" -s -u elastic:testpasswd)"
|
|
||||||
hits="$(jq -rn --argjson data "${response}" '$data.hits.hits')"
|
|
||||||
echo "$hits"
|
|
||||||
count="$(jq -rn --argjson data "${response}" '$data.hits.total.value')"
|
|
||||||
if [[ $count -ne 1 ]]; then
|
|
||||||
echo "Private key not found. Expected 1 result, got ${count}"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
key="$(jq -rn --argjson data "${hits}" '$data[0]._source.authentication_token')"
|
|
||||||
|
|
||||||
log 'Creating App Search engine'
|
|
||||||
response="$(curl "http://${ip_en}:3002/api/as/v1/engines" -s -d '{"name": "dockerelk"}' -H "Authorization: Bearer ${key}")"
|
|
||||||
echo "$response"
|
|
||||||
name="$(jq -rn --argjson data "${response}" '$data.name')"
|
|
||||||
if [[ $name != 'dockerelk' ]]; then
|
|
||||||
echo 'Failed to create engine'
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
52
.github/workflows/scripts/run-tests-logspout.sh
vendored
52
.github/workflows/scripts/run-tests-logspout.sh
vendored
@@ -1,52 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
set -eu
|
|
||||||
set -o pipefail
|
|
||||||
|
|
||||||
|
|
||||||
source "$(dirname ${BASH_SOURCE[0]})/lib/testing.sh"
|
|
||||||
|
|
||||||
|
|
||||||
cid_es="$(container_id elasticsearch)"
|
|
||||||
cid_ls="$(container_id logspout)"
|
|
||||||
|
|
||||||
ip_es="$(service_ip elasticsearch)"
|
|
||||||
ip_ls="$(service_ip logspout)"
|
|
||||||
|
|
||||||
log 'Waiting for readiness of Elasticsearch'
|
|
||||||
poll_ready "$cid_es" "http://${ip_es}:9200/" -u 'elastic:testpasswd'
|
|
||||||
|
|
||||||
log 'Waiting for readiness of Logspout'
|
|
||||||
poll_ready "$cid_ls" "http://${ip_ls}/health"
|
|
||||||
|
|
||||||
# When Logspout starts, it prints the following log line:
|
|
||||||
# 2021/01/07 16:14:52 # logspout v3.2.13-custom by gliderlabs
|
|
||||||
#
|
|
||||||
# which we expect to find by querying:
|
|
||||||
# docker.image:"docker-elk_logspout" AND message:"logspout gliderlabs"~3
|
|
||||||
#
|
|
||||||
log 'Searching a log entry forwarded by Logspout'
|
|
||||||
|
|
||||||
declare response
|
|
||||||
declare -i count
|
|
||||||
|
|
||||||
# retry for max 60s (30*2s)
|
|
||||||
for _ in $(seq 1 30); do
|
|
||||||
response="$(curl "http://${ip_es}:9200/_count?q=docker.image:%22docker-elk_logspout%22%20AND%20message:%22logspout%20gliderlabs%22~3&pretty" -s -u elastic:testpasswd)"
|
|
||||||
count="$(jq -rn --argjson data "${response}" '$data.count')"
|
|
||||||
if [[ $count -gt 0 ]]; then
|
|
||||||
break
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo -n 'x' >&2
|
|
||||||
sleep 2
|
|
||||||
done
|
|
||||||
echo -e '\n' >&2
|
|
||||||
|
|
||||||
echo "$response"
|
|
||||||
# Logspout may restart if Logstash isn't ready yet, so we tolerate multiple
|
|
||||||
# results
|
|
||||||
if [[ $count -lt 1 ]]; then
|
|
||||||
echo "Expected at least 1 document, got ${count}"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
45
.github/workflows/update.yml
vendored
45
.github/workflows/update.yml
vendored
@@ -1,45 +0,0 @@
|
|||||||
name: Update Elastic release
|
|
||||||
|
|
||||||
on:
|
|
||||||
schedule:
|
|
||||||
- cron: '0 0 * * 0' # At 00:00 every Sunday
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
|
|
||||||
check-and-update:
|
|
||||||
name: Check and update Elastic release
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Get latest release version
|
|
||||||
uses: octokit/request-action@v2.x
|
|
||||||
id: get_latest_release
|
|
||||||
with:
|
|
||||||
route: GET /repos/:repository/releases/latest
|
|
||||||
repository: elastic/elasticsearch
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Update stack version
|
|
||||||
run: |
|
|
||||||
source .env
|
|
||||||
cur_ver="$ELK_VERSION"
|
|
||||||
new_ver=${{ fromJson(steps.get_latest_release.outputs.data).tag_name }}
|
|
||||||
|
|
||||||
# Escape dot characters so sed interprets them as literal dots
|
|
||||||
cur_ver="$(echo $cur_ver | sed 's/\./\\./g')"
|
|
||||||
# Trim leading 'v' in semantic version
|
|
||||||
new_ver="${new_ver:1}"
|
|
||||||
|
|
||||||
for f in .env docker-stack.yml README.md; do
|
|
||||||
sed -i "s/${cur_ver}/${new_ver}/g" "$f"
|
|
||||||
done
|
|
||||||
|
|
||||||
- name: Send pull request to update to new version
|
|
||||||
uses: peter-evans/create-pull-request@v3
|
|
||||||
with:
|
|
||||||
commit-message: Update to ${{ fromJson(steps.get_latest_release.outputs.data).tag_name }}
|
|
||||||
title: Update to ${{ fromJson(steps.get_latest_release.outputs.data).tag_name }}
|
|
||||||
delete-branch: true
|
|
||||||
72
README.md
72
README.md
@@ -1,10 +1,13 @@
|
|||||||
# Elastic stack (ELK) on Docker
|
# Elastic stack (ELK) on Docker
|
||||||
|
|
||||||
[](https://www.elastic.co/blog/category/releases)
|
[](https://www.elastic.co/blog/category/releases)
|
||||||
[](https://github.com/deviantony/docker-elk/actions?query=workflow%3ACI+branch%3Amaster)
|
[](https://github.com/deviantony/docker-elk/actions?query=workflow%3ACI+branch%3Arelease-5.x)
|
||||||
[](https://gitter.im/deviantony/docker-elk?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
[](https://gitter.im/deviantony/docker-elk?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||||
|
|
||||||
Run the latest version of the [Elastic stack][elk-stack] with Docker and Docker Compose.
|
Run the version 5.x of the [Elastic stack][elk-stack] with Docker and Docker Compose.
|
||||||
|
|
||||||
|
| :warning: Important notice: This release version of the Elastic stack is marked as End Of Life by Elastic and is not supported anymore, see [Elastic maintenance tables](https://www.elastic.co/support/eol) for more details. |
|
||||||
|
| :--- |
|
||||||
|
|
||||||
It gives you the ability to analyze any data set by using the searching/aggregation capabilities of Elasticsearch and
|
It gives you the ability to analyze any data set by using the searching/aggregation capabilities of Elasticsearch and
|
||||||
the visualization power of Kibana.
|
the visualization power of Kibana.
|
||||||
@@ -122,20 +125,12 @@ exclusively. Make sure the repository is cloned in one of those locations or fol
|
|||||||
|
|
||||||
### Version selection
|
### Version selection
|
||||||
|
|
||||||
This repository tries to stay aligned with the latest version of the Elastic stack. The `master` branch tracks the
|
|
||||||
current major version (7.x).
|
|
||||||
|
|
||||||
To use a different version of the core Elastic components, simply change the version number inside the `.env` file. If
|
To use a different version of the core Elastic components, simply change the version number inside the `.env` file. If
|
||||||
you are upgrading an existing stack, please carefully read the note in the next section.
|
you are upgrading an existing stack, please carefully read the note in the next section.
|
||||||
|
|
||||||
**:warning: Always pay attention to the [official upgrade instructions][upgrade] for each individual component before
|
**:warning: Always pay attention to the [official upgrade instructions][upgrade] for each individual component before
|
||||||
performing a stack upgrade.**
|
performing a stack upgrade.**
|
||||||
|
|
||||||
Older major versions are also supported on separate branches:
|
|
||||||
|
|
||||||
* [`release-6.x`](https://github.com/deviantony/docker-elk/tree/release-6.x): 6.x series
|
|
||||||
* [`release-5.x`](https://github.com/deviantony/docker-elk/tree/release-5.x): 5.x series (End-Of-Life)
|
|
||||||
|
|
||||||
### Bringing up the stack
|
### Bringing up the stack
|
||||||
|
|
||||||
Clone this repository onto the Docker host that will run the stack, then start services locally using Docker Compose:
|
Clone this repository onto the Docker host that will run the stack, then start services locally using Docker Compose:
|
||||||
@@ -175,26 +170,24 @@ The stack is pre-configured with the following **privileged** bootstrap user:
|
|||||||
Although all stack components work out-of-the-box with this user, we strongly recommend using the unprivileged [built-in
|
Although all stack components work out-of-the-box with this user, we strongly recommend using the unprivileged [built-in
|
||||||
users][builtin-users] instead for increased security.
|
users][builtin-users] instead for increased security.
|
||||||
|
|
||||||
1. Initialize passwords for built-in users
|
1. Reset passwords for built-in users
|
||||||
|
|
||||||
```console
|
You can change passwords for built-in users using either Kibana or the Elasticsearch API. Follow the instructions
|
||||||
$ docker-compose exec -T elasticsearch bin/elasticsearch-setup-passwords auto --batch
|
from the following documentation paragraph: [Setting Up User Authentication > Reset Built-in User
|
||||||
```
|
Passwords][reset-pwds].
|
||||||
|
|
||||||
Passwords for all 6 built-in users will be randomly generated. Take note of them.
|
1. Disable the default password
|
||||||
|
|
||||||
1. Unset the bootstrap password (_optional_)
|
It is important to disable support for the default `changeme` password after resetting the passwords of built-in
|
||||||
|
users. To do this, set the `xpack.security.authc.accept_default_password` setting to `false` in the Elasticsearch
|
||||||
Remove the `ELASTIC_PASSWORD` environment variable from the `elasticsearch` service inside the Compose file
|
configuration file (`elasticsearch/config/elasticsearch.yml`), as instructed in the following documentation
|
||||||
(`docker-compose.yml`). It is only used to initialize the keystore during the initial startup of Elasticsearch.
|
paragraph: [Setting Up User Authentication > Disable Default Password Functionality][disable-default-pwd].
|
||||||
|
|
||||||
1. Replace usernames and passwords in configuration files
|
1. Replace usernames and passwords in configuration files
|
||||||
|
|
||||||
Use the `kibana_system` user (`kibana` for releases <7.8.0) inside the Kibana configuration file
|
Replace the passwords for the `kibana` user inside the Kibana configuration file (`kibana/config/kibana.yml`), for
|
||||||
(`kibana/config/kibana.yml`) and the `logstash_system` user inside the Logstash configuration file
|
the `logstash_system` user inside the Logstash configuration file (`logstash/config/logstash.yml`), and for the
|
||||||
(`logstash/config/logstash.yml`) in place of the existing `elastic` user.
|
`elastic` user inside the Logstash pipeline file (`logstash/pipeline/logstash.conf`).
|
||||||
|
|
||||||
Replace the password for the `elastic` user inside the Logstash pipeline file (`logstash/pipeline/logstash.conf`).
|
|
||||||
|
|
||||||
*:information_source: Do not use the `logstash_system` user inside the Logstash **pipeline** file, it does not have
|
*:information_source: Do not use the `logstash_system` user inside the Logstash **pipeline** file, it does not have
|
||||||
sufficient permissions to create indices. Follow the instructions at [Configuring Security in Logstash][ls-security]
|
sufficient permissions to create indices. Follow the instructions at [Configuring Security in Logstash][ls-security]
|
||||||
@@ -257,7 +250,7 @@ Create an index pattern via the Kibana API:
|
|||||||
```console
|
```console
|
||||||
$ curl -XPOST -D- 'http://localhost:5601/api/saved_objects/index-pattern' \
|
$ curl -XPOST -D- 'http://localhost:5601/api/saved_objects/index-pattern' \
|
||||||
-H 'Content-Type: application/json' \
|
-H 'Content-Type: application/json' \
|
||||||
-H 'kbn-version: 7.10.1' \
|
-H 'kbn-version: 5.6.16' \
|
||||||
-u elastic:<your generated elastic password> \
|
-u elastic:<your generated elastic password> \
|
||||||
-d '{"attributes":{"title":"logstash-*","timeFieldName":"@timestamp"}}'
|
-d '{"attributes":{"title":"logstash-*","timeFieldName":"@timestamp"}}'
|
||||||
```
|
```
|
||||||
@@ -308,8 +301,9 @@ containers: [Configuring Logstash for Docker][ls-docker].
|
|||||||
|
|
||||||
### How to disable paid features
|
### How to disable paid features
|
||||||
|
|
||||||
Switch the value of Elasticsearch's `xpack.license.self_generated.type` option from `trial` to `basic` (see [License
|
Disable each X-Pack feature individually by switching the value of their corresponding setting to `false` in the
|
||||||
settings][trial-license]).
|
`elasticsearch.yml`, `kibana.yml`, and `logstash.yml` configuration files, as described in the following documentation
|
||||||
|
page: [Enabling and Disabling X-Pack Features][xpack-disable].
|
||||||
|
|
||||||
### How to scale out the Elasticsearch cluster
|
### How to scale out the Elasticsearch cluster
|
||||||
|
|
||||||
@@ -428,30 +422,34 @@ instead of `elasticsearch`.*
|
|||||||
|
|
||||||
[linux-postinstall]: https://docs.docker.com/install/linux/linux-postinstall/
|
[linux-postinstall]: https://docs.docker.com/install/linux/linux-postinstall/
|
||||||
|
|
||||||
[booststap-checks]: https://www.elastic.co/guide/en/elasticsearch/reference/current/bootstrap-checks.html
|
[booststap-checks]: https://www.elastic.co/guide/en/elasticsearch/reference/5.6/bootstrap-checks.html
|
||||||
[es-sys-config]: https://www.elastic.co/guide/en/elasticsearch/reference/current/system-config.html
|
[es-sys-config]: https://www.elastic.co/guide/en/elasticsearch/reference/5.6/system-config.html
|
||||||
|
|
||||||
[win-shareddrives]: https://docs.docker.com/docker-for-windows/#shared-drives
|
[win-shareddrives]: https://docs.docker.com/docker-for-windows/#shared-drives
|
||||||
[mac-mounts]: https://docs.docker.com/docker-for-mac/osxfs/
|
[mac-mounts]: https://docs.docker.com/docker-for-mac/osxfs/
|
||||||
|
|
||||||
[builtin-users]: https://www.elastic.co/guide/en/elasticsearch/reference/current/built-in-users.html
|
[builtin-users]: https://www.elastic.co/guide/en/x-pack/5.6/setting-up-authentication.html#built-in-users
|
||||||
|
[reset-pwds]: https://www.elastic.co/guide/en/x-pack/5.6/setting-up-authentication.html#reset-built-in-user-passwords
|
||||||
|
[disable-default-pwd]: https://www.elastic.co/guide/en/x-pack/5.6/setting-up-authentication.html#disabling-default-password
|
||||||
[ls-security]: https://www.elastic.co/guide/en/logstash/current/ls-security.html
|
[ls-security]: https://www.elastic.co/guide/en/logstash/current/ls-security.html
|
||||||
[sec-tutorial]: https://www.elastic.co/guide/en/elasticsearch/reference/current/security-getting-started.html
|
[sec-tutorial]: https://www.elastic.co/guide/en/elasticsearch/reference/current/security-getting-started.html
|
||||||
|
|
||||||
[connect-kibana]: https://www.elastic.co/guide/en/kibana/current/connect-to-elasticsearch.html
|
[connect-kibana]: https://www.elastic.co/guide/en/kibana/5.6/connect-to-elasticsearch.html
|
||||||
[index-pattern]: https://www.elastic.co/guide/en/kibana/current/index-patterns.html
|
[index-pattern]: https://www.elastic.co/guide/en/kibana/5.6/index-patterns.html
|
||||||
|
|
||||||
[config-es]: ./elasticsearch/config/elasticsearch.yml
|
[config-es]: ./elasticsearch/config/elasticsearch.yml
|
||||||
[config-kbn]: ./kibana/config/kibana.yml
|
[config-kbn]: ./kibana/config/kibana.yml
|
||||||
[config-ls]: ./logstash/config/logstash.yml
|
[config-ls]: ./logstash/config/logstash.yml
|
||||||
|
|
||||||
[es-docker]: https://www.elastic.co/guide/en/elasticsearch/reference/current/docker.html
|
[es-docker]: https://www.elastic.co/guide/en/elasticsearch/reference/5.6/docker.html
|
||||||
[kbn-docker]: https://www.elastic.co/guide/en/kibana/current/docker.html
|
[kbn-docker]: https://www.elastic.co/guide/en/kibana/5.6/docker.html
|
||||||
[ls-docker]: https://www.elastic.co/guide/en/logstash/current/docker-config.html
|
[ls-docker]: https://www.elastic.co/guide/en/logstash/5.6/docker.html
|
||||||
|
|
||||||
|
[xpack-disable]: https://www.elastic.co/guide/en/x-pack/5.6/installing-xpack.html#xpack-enabling
|
||||||
|
|
||||||
[log4j-props]: https://github.com/elastic/logstash/tree/7.6/docker/data/logstash/config
|
[log4j-props]: https://github.com/elastic/logstash/tree/7.6/docker/data/logstash/config
|
||||||
[esuser]: https://github.com/elastic/elasticsearch/blob/7.6/distribution/docker/src/docker/Dockerfile#L23-L24
|
[esuser]: https://github.com/elastic/elasticsearch/blob/7.6/distribution/docker/src/docker/Dockerfile#L23-L24
|
||||||
|
|
||||||
[upgrade]: https://www.elastic.co/guide/en/elasticsearch/reference/current/setup-upgrade.html
|
[upgrade]: https://www.elastic.co/guide/en/elasticsearch/reference/5.6/setup-upgrade.html
|
||||||
|
|
||||||
[swarm-mode]: https://docs.docker.com/engine/swarm/
|
[swarm-mode]: https://docs.docker.com/engine/swarm/
|
||||||
|
|||||||
@@ -19,9 +19,8 @@ services:
|
|||||||
- "9300:9300"
|
- "9300:9300"
|
||||||
environment:
|
environment:
|
||||||
ES_JAVA_OPTS: "-Xmx256m -Xms256m"
|
ES_JAVA_OPTS: "-Xmx256m -Xms256m"
|
||||||
ELASTIC_PASSWORD: changeme
|
|
||||||
# Use single node discovery in order to disable production mode and avoid bootstrap checks.
|
# Use single node discovery in order to disable production mode and avoid bootstrap checks.
|
||||||
# see: https://www.elastic.co/guide/en/elasticsearch/reference/current/bootstrap-checks.html
|
# see: https://www.elastic.co/guide/en/elasticsearch/reference/5.6/bootstrap-checks.html
|
||||||
discovery.type: single-node
|
discovery.type: single-node
|
||||||
networks:
|
networks:
|
||||||
- elk
|
- elk
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ version: '3.3'
|
|||||||
services:
|
services:
|
||||||
|
|
||||||
elasticsearch:
|
elasticsearch:
|
||||||
image: docker.elastic.co/elasticsearch/elasticsearch:7.10.1
|
image: docker.elastic.co/elasticsearch/elasticsearch:5.6.16
|
||||||
ports:
|
ports:
|
||||||
- "9200:9200"
|
- "9200:9200"
|
||||||
- "9300:9300"
|
- "9300:9300"
|
||||||
@@ -12,9 +12,8 @@ services:
|
|||||||
target: /usr/share/elasticsearch/config/elasticsearch.yml
|
target: /usr/share/elasticsearch/config/elasticsearch.yml
|
||||||
environment:
|
environment:
|
||||||
ES_JAVA_OPTS: "-Xmx256m -Xms256m"
|
ES_JAVA_OPTS: "-Xmx256m -Xms256m"
|
||||||
ELASTIC_PASSWORD: changeme
|
|
||||||
# Use single node discovery in order to disable production mode and avoid bootstrap checks.
|
# Use single node discovery in order to disable production mode and avoid bootstrap checks.
|
||||||
# see: https://www.elastic.co/guide/en/elasticsearch/reference/current/bootstrap-checks.html
|
# see: https://www.elastic.co/guide/en/elasticsearch/reference/5.6/bootstrap-checks.html
|
||||||
discovery.type: single-node
|
discovery.type: single-node
|
||||||
# Force publishing on the 'elk' overlay.
|
# Force publishing on the 'elk' overlay.
|
||||||
network.publish_host: _eth0_
|
network.publish_host: _eth0_
|
||||||
@@ -25,7 +24,7 @@ services:
|
|||||||
replicas: 1
|
replicas: 1
|
||||||
|
|
||||||
logstash:
|
logstash:
|
||||||
image: docker.elastic.co/logstash/logstash:7.10.1
|
image: docker.elastic.co/logstash/logstash:5.6.16
|
||||||
ports:
|
ports:
|
||||||
- "5044:5044"
|
- "5044:5044"
|
||||||
- "5000:5000"
|
- "5000:5000"
|
||||||
@@ -44,7 +43,7 @@ services:
|
|||||||
replicas: 1
|
replicas: 1
|
||||||
|
|
||||||
kibana:
|
kibana:
|
||||||
image: docker.elastic.co/kibana/kibana:7.10.1
|
image: docker.elastic.co/kibana/kibana:5.6.16
|
||||||
ports:
|
ports:
|
||||||
- "5601:5601"
|
- "5601:5601"
|
||||||
configs:
|
configs:
|
||||||
|
|||||||
@@ -1,13 +1,14 @@
|
|||||||
---
|
---
|
||||||
## Default Elasticsearch configuration from Elasticsearch base image.
|
## Default Elasticsearch configuration from Elasticsearch base image.
|
||||||
## https://github.com/elastic/elasticsearch/blob/master/distribution/docker/src/docker/config/elasticsearch.yml
|
## https://github.com/elastic/elasticsearch-docker/blob/5.6/build/elasticsearch/elasticsearch.yml
|
||||||
#
|
#
|
||||||
cluster.name: "docker-cluster"
|
cluster.name: "docker-cluster"
|
||||||
network.host: 0.0.0.0
|
network.host: 0.0.0.0
|
||||||
|
|
||||||
## X-Pack settings
|
## X-Pack settings
|
||||||
## see https://www.elastic.co/guide/en/elasticsearch/reference/current/setup-xpack.html
|
## see https://www.elastic.co/guide/en/elasticsearch/reference/5.6/setup-xpack.html
|
||||||
#
|
#
|
||||||
xpack.license.self_generated.type: trial
|
|
||||||
xpack.security.enabled: true
|
xpack.security.enabled: true
|
||||||
xpack.monitoring.collection.enabled: true
|
xpack.monitoring.enabled: true
|
||||||
|
# enable default 'changeme' passwords
|
||||||
|
xpack.security.authc.accept_default_password: true
|
||||||
|
|||||||
@@ -1,4 +0,0 @@
|
|||||||
ARG ELK_VERSION
|
|
||||||
|
|
||||||
# https://www.docker.elastic.co/
|
|
||||||
FROM docker.elastic.co/enterprise-search/enterprise-search:${ELK_VERSION}
|
|
||||||
@@ -1,134 +0,0 @@
|
|||||||
# Enterprise Search extension
|
|
||||||
|
|
||||||
Elastic Enterprise Search is a suite of products for search applications backed by the Elastic Stack.
|
|
||||||
|
|
||||||
## Requirements
|
|
||||||
|
|
||||||
* 2 GB of free RAM, on top of the resources required by the other stack components and extensions.
|
|
||||||
|
|
||||||
Enterprise Search exposes the TCP port `3002` for its Web UI and API.
|
|
||||||
|
|
||||||
## Usage
|
|
||||||
|
|
||||||
### Generate an encryption key
|
|
||||||
|
|
||||||
Enterprise Search requires one or more [encryption keys][enterprisesearch-encryption] to be configured before the
|
|
||||||
initial startup. Failing to do so prevents the server from starting.
|
|
||||||
|
|
||||||
Encryption keys can contain any series of characters. Elastic recommends using 256-bit keys for optimal security.
|
|
||||||
|
|
||||||
Those encryption keys must be added manually to the [`config/enterprise-search.yml`][config-enterprisesearch] file. By
|
|
||||||
default, the list of encryption keys is empty and must be populated using one of the following formats:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
secret_management.encryption_keys:
|
|
||||||
- my_first_encryption_key
|
|
||||||
- my_second_encryption_key
|
|
||||||
- ...
|
|
||||||
```
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
secret_management.encryption_keys: [my_first_encryption_key, my_second_encryption_key, ...]
|
|
||||||
```
|
|
||||||
|
|
||||||
> :information_source: To generate a strong encryption key, for example using the AES-256 cipher, you can use the
|
|
||||||
> OpenSSL utility or any other online/offline tool of your choice:
|
|
||||||
>
|
|
||||||
> ```console
|
|
||||||
> $ openssl enc -aes-256 -P
|
|
||||||
>
|
|
||||||
> enter aes-256-cbc encryption password: <a strong password>
|
|
||||||
> Verifying - enter aes-256-cbc encryption password: <repeat your strong password>
|
|
||||||
> ...
|
|
||||||
>
|
|
||||||
> key=<generated AES key>
|
|
||||||
> ```
|
|
||||||
|
|
||||||
### Enable Elasticsearch's API key service
|
|
||||||
|
|
||||||
Enterprise Search requires Elasticsearch's built-in [API key service][es-security] to be enabled in order to start.
|
|
||||||
Unless Elasticsearch is configured to enable TLS on the HTTP interface (disabled by default), this service is disabled
|
|
||||||
by default.
|
|
||||||
|
|
||||||
To enable it, modify the Elasticsearch configuration file in [`elasticsearch/config/elasticsearch.yml`][config-es] and
|
|
||||||
add the following setting:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
xpack.security.authc.api_key.enabled: true
|
|
||||||
```
|
|
||||||
|
|
||||||
### Start the server
|
|
||||||
|
|
||||||
To include Enterprise Search in the stack, run Docker Compose from the root of the repository with an additional command
|
|
||||||
line argument referencing the `enterprise-search-compose.yml` file:
|
|
||||||
|
|
||||||
```console
|
|
||||||
$ docker-compose -f docker-compose.yml -f extensions/enterprise-search/enterprise-search-compose.yml up
|
|
||||||
```
|
|
||||||
|
|
||||||
Allow a few minutes for the stack to start, then open your web browser at the address <http://localhost:3002> to see the
|
|
||||||
Enterprise Search home page.
|
|
||||||
|
|
||||||
Enterprise Search is configured on first boot with the following default credentials:
|
|
||||||
|
|
||||||
* user: *enterprise_search*
|
|
||||||
* password: *changeme*
|
|
||||||
|
|
||||||
## Security
|
|
||||||
|
|
||||||
The Enterprise Search password is defined inside the Compose file via the `ENT_SEARCH_DEFAULT_PASSWORD` environment
|
|
||||||
variable. We highly recommend choosing a more secure password than the default one for security reasons.
|
|
||||||
|
|
||||||
To do so, change the value `ENT_SEARCH_DEFAULT_PASSWORD` environment variable inside the Compose file **before the first
|
|
||||||
boot**:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
enterprise-search:
|
|
||||||
|
|
||||||
environment:
|
|
||||||
ENT_SEARCH_DEFAULT_PASSWORD: {{some strong password}}
|
|
||||||
```
|
|
||||||
|
|
||||||
> :warning: The default Enterprise Search password can only be set during the initial boot. Once the password is
|
|
||||||
> persisted in Elasticsearch, it can only be changed via the Elasticsearch API.
|
|
||||||
|
|
||||||
For more information, please refer to [User Management and Security][enterprisesearch-security].
|
|
||||||
|
|
||||||
## Configuring Enterprise Search
|
|
||||||
|
|
||||||
The Enterprise Search configuration is stored in [`config/enterprise-search.yml`][config-enterprisesearch]. You can
|
|
||||||
modify this file using the [Default Enterprise Search configuration][enterprisesearch-config] as a reference.
|
|
||||||
|
|
||||||
You can also specify the options you want to override by setting environment variables inside the Compose file:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
enterprise-search:
|
|
||||||
|
|
||||||
environment:
|
|
||||||
ent_search.auth.source: standard
|
|
||||||
worker.threads: '6'
|
|
||||||
```
|
|
||||||
|
|
||||||
Any change to the Enterprise Search configuration requires a restart of the Enterprise Search container:
|
|
||||||
|
|
||||||
```console
|
|
||||||
$ docker-compose -f docker-compose.yml -f extensions/enterprise-search/enterprise-search-compose.yml restart enterprise-search
|
|
||||||
```
|
|
||||||
|
|
||||||
Please refer to the following documentation page for more details about how to configure Enterprise Search inside a
|
|
||||||
Docker container: [Running Enterprise Search Using Docker][enterprisesearch-docker].
|
|
||||||
|
|
||||||
## See also
|
|
||||||
|
|
||||||
[Enterprise Search documentation][enterprisesearch-docs]
|
|
||||||
|
|
||||||
[config-enterprisesearch]: ./config/enterprise-search.yml
|
|
||||||
|
|
||||||
[enterprisesearch-encryption]: https://www.elastic.co/guide/en/enterprise-search/current/encryption-keys.html
|
|
||||||
[enterprisesearch-security]: https://www.elastic.co/guide/en/workplace-search/current/workplace-search-security.html
|
|
||||||
[enterprisesearch-config]: https://www.elastic.co/guide/en/enterprise-search/current/configuration.html
|
|
||||||
[enterprisesearch-docker]: https://www.elastic.co/guide/en/enterprise-search/current/docker.html
|
|
||||||
[enterprisesearch-docs]: https://www.elastic.co/guide/en/enterprise-search/current/index.html
|
|
||||||
|
|
||||||
[es-security]: https://www.elastic.co/guide/en/elasticsearch/reference/current/security-settings.html#api-key-service-settings
|
|
||||||
[config-es]: ../../elasticsearch/config/elasticsearch.yml
|
|
||||||
@@ -1,27 +0,0 @@
|
|||||||
---
|
|
||||||
## Enterprise Search core configuration
|
|
||||||
## https://www.elastic.co/guide/en/enterprise-search/current/configuration.html
|
|
||||||
#
|
|
||||||
|
|
||||||
## --------------------- REQUIRED ---------------------
|
|
||||||
|
|
||||||
# Encryption keys to protect application secrets.
|
|
||||||
secret_management.encryption_keys:
|
|
||||||
# add encryption keys below
|
|
||||||
#- add encryption keys here
|
|
||||||
|
|
||||||
## ----------------------------------------------------
|
|
||||||
|
|
||||||
# IP address Enterprise Search listens on
|
|
||||||
ent_search.listen_host: 0.0.0.0
|
|
||||||
|
|
||||||
# URL at which users reach Enterprise Search
|
|
||||||
ent_search.external_url: http://localhost:3002
|
|
||||||
|
|
||||||
# Elasticsearch URL and credentials
|
|
||||||
elasticsearch.host: http://elasticsearch:9200
|
|
||||||
elasticsearch.username: elastic
|
|
||||||
elasticsearch.password: changeme
|
|
||||||
|
|
||||||
# Allow Enterprise Search to modify Elasticsearch settings. Used to enable auto-creation of Elasticsearch indexes.
|
|
||||||
allow_es_settings_modification: true
|
|
||||||
@@ -1,22 +0,0 @@
|
|||||||
version: '3.2'
|
|
||||||
|
|
||||||
services:
|
|
||||||
enterprise-search:
|
|
||||||
build:
|
|
||||||
context: extensions/enterprise-search/
|
|
||||||
args:
|
|
||||||
ELK_VERSION: $ELK_VERSION
|
|
||||||
volumes:
|
|
||||||
- type: bind
|
|
||||||
source: ./extensions/enterprise-search/config/enterprise-search.yml
|
|
||||||
target: /usr/share/enterprise-search/config/enterprise-search.yml
|
|
||||||
read_only: true
|
|
||||||
environment:
|
|
||||||
JAVA_OPTS: -Xmx2g -Xms2g
|
|
||||||
ENT_SEARCH_DEFAULT_PASSWORD: changeme
|
|
||||||
ports:
|
|
||||||
- '3002:3002'
|
|
||||||
networks:
|
|
||||||
- elk
|
|
||||||
depends_on:
|
|
||||||
- elasticsearch
|
|
||||||
@@ -1,13 +1,13 @@
|
|||||||
---
|
---
|
||||||
## Default Kibana configuration from Kibana base image.
|
## Default Kibana configuration from Kibana base image.
|
||||||
## https://github.com/elastic/kibana/blob/master/src/dev/build/tasks/os_packages/docker_generator/templates/kibana_yml.template.ts
|
## https://github.com/elastic/kibana-docker/blob/5.6/build/kibana/config/kibana.yml
|
||||||
#
|
#
|
||||||
server.name: kibana
|
server.name: kibana
|
||||||
server.host: 0.0.0.0
|
server.host: "0"
|
||||||
elasticsearch.hosts: [ "http://elasticsearch:9200" ]
|
elasticsearch.url: http://elasticsearch:9200
|
||||||
monitoring.ui.container.elasticsearch.enabled: true
|
xpack.monitoring.ui.container.elasticsearch.enabled: true
|
||||||
|
|
||||||
## X-Pack security credentials
|
## X-Pack security credentials
|
||||||
#
|
#
|
||||||
elasticsearch.username: elastic
|
elasticsearch.username: kibana
|
||||||
elasticsearch.password: changeme
|
elasticsearch.password: changeme
|
||||||
|
|||||||
@@ -1,12 +1,13 @@
|
|||||||
---
|
---
|
||||||
## Default Logstash configuration from Logstash base image.
|
## Default Logstash configuration from Logstash base image.
|
||||||
## https://github.com/elastic/logstash/blob/master/docker/data/logstash/config/logstash-full.yml
|
## https://github.com/elastic/logstash-docker/blob/5.6/build/logstash/config/logstash.yml
|
||||||
#
|
#
|
||||||
http.host: "0.0.0.0"
|
http.host: "0.0.0.0"
|
||||||
xpack.monitoring.elasticsearch.hosts: [ "http://elasticsearch:9200" ]
|
path.config: /usr/share/logstash/pipeline
|
||||||
|
xpack.monitoring.elasticsearch.url: http://elasticsearch:9200
|
||||||
|
|
||||||
## X-Pack security credentials
|
## X-Pack security credentials
|
||||||
#
|
#
|
||||||
xpack.monitoring.enabled: true
|
xpack.monitoring.enabled: true
|
||||||
xpack.monitoring.elasticsearch.username: elastic
|
xpack.monitoring.elasticsearch.username: logstash_system
|
||||||
xpack.monitoring.elasticsearch.password: changeme
|
xpack.monitoring.elasticsearch.password: changeme
|
||||||
|
|||||||
@@ -15,6 +15,5 @@ output {
|
|||||||
hosts => "elasticsearch:9200"
|
hosts => "elasticsearch:9200"
|
||||||
user => "elastic"
|
user => "elastic"
|
||||||
password => "changeme"
|
password => "changeme"
|
||||||
ecs_compatibility => disabled
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user