travis: Move reusable functions to bash lib
This commit is contained in:
parent
c680149e42
commit
b000267d86
|
@ -24,12 +24,12 @@ before_script:
|
||||||
- sed -i 's/\(password =>\) "changeme"/\1 "testpasswd"/g' logstash/pipeline/logstash.conf
|
- sed -i 's/\(password =>\) "changeme"/\1 "testpasswd"/g' logstash/pipeline/logstash.conf
|
||||||
|
|
||||||
script:
|
script:
|
||||||
# Compose
|
# Core Elastic Stack
|
||||||
- docker-compose up -d elasticsearch
|
- docker-compose up -d elasticsearch
|
||||||
- sleep 30
|
- sleep 30
|
||||||
- .travis/elasticsearch-setup-passwords.exp
|
- .travis/elasticsearch-setup-passwords.exp
|
||||||
- docker-compose up -d
|
- docker-compose up -d
|
||||||
- .travis/run-tests.sh
|
- .travis/run-tests-core.sh
|
||||||
- docker-compose ps
|
- docker-compose ps
|
||||||
- docker-compose logs elasticsearch
|
- docker-compose logs elasticsearch
|
||||||
- docker-compose logs kibana
|
- docker-compose logs kibana
|
||||||
|
@ -45,7 +45,7 @@ script:
|
||||||
- .travis/elasticsearch-setup-passwords.exp swarm
|
- .travis/elasticsearch-setup-passwords.exp swarm
|
||||||
- docker service scale elk_kibana=1 --detach=false
|
- docker service scale elk_kibana=1 --detach=false
|
||||||
- docker service scale elk_logstash=1 --detach=false
|
- docker service scale elk_logstash=1 --detach=false
|
||||||
- .travis/run-tests.sh swarm
|
- .travis/run-tests-core.sh swarm
|
||||||
- docker stack services elk
|
- docker stack services elk
|
||||||
- docker service logs elk_elasticsearch
|
- docker service logs elk_elasticsearch
|
||||||
- docker service logs elk_kibana
|
- docker service logs elk_kibana
|
||||||
|
|
|
@ -0,0 +1,50 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
function log {
|
||||||
|
echo -e "\n[+] $1\n"
|
||||||
|
}
|
||||||
|
|
||||||
|
function poll_ready {
|
||||||
|
local svc=$1
|
||||||
|
local url=$2
|
||||||
|
|
||||||
|
local -a args=( '-s' '-D-' '-w' '%{http_code}' "$url" )
|
||||||
|
if [ "$#" -ge 3 ]; then
|
||||||
|
args+=( '-u' "$3" )
|
||||||
|
fi
|
||||||
|
|
||||||
|
local label
|
||||||
|
if [ "$MODE" == "swarm" ]; then
|
||||||
|
label="com.docker.swarm.service.name=elk_${svc}"
|
||||||
|
else
|
||||||
|
label="com.docker.compose.service=${svc}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
local -i result=1
|
||||||
|
local cid
|
||||||
|
local output
|
||||||
|
|
||||||
|
# retry for max 120s (24*5s)
|
||||||
|
for _ in $(seq 1 24); do
|
||||||
|
cid="$(docker ps -q -f label="$label")"
|
||||||
|
if [ -z "${cid:-}" ]; then
|
||||||
|
echo "Container exited"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
set +e
|
||||||
|
output="$(curl "${args[@]}")"
|
||||||
|
set -e
|
||||||
|
if [ "${output: -3}" -eq 200 ]; then
|
||||||
|
result=0
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -n '.'
|
||||||
|
sleep 5
|
||||||
|
done
|
||||||
|
|
||||||
|
echo -e "\n${output::-3}"
|
||||||
|
|
||||||
|
return $result
|
||||||
|
}
|
|
@ -3,67 +3,22 @@
|
||||||
set -eu
|
set -eu
|
||||||
set -o pipefail
|
set -o pipefail
|
||||||
|
|
||||||
function log {
|
|
||||||
echo -e "\n[+] $1\n"
|
|
||||||
}
|
|
||||||
|
|
||||||
function poll_ready {
|
source "$(dirname ${BASH_SOURCE[0]})/lib/testing.sh"
|
||||||
local svc=$1
|
|
||||||
local url=$2
|
|
||||||
|
|
||||||
local -a args=( '-s' '-D-' '-w' '%{http_code}' "$url" )
|
|
||||||
if [ "$#" -ge 3 ]; then
|
|
||||||
args+=( '-u' "$3" )
|
|
||||||
fi
|
|
||||||
|
|
||||||
local label
|
|
||||||
if [ "$MODE" == "swarm" ]; then
|
|
||||||
label="com.docker.swarm.service.name=elk_${svc}"
|
|
||||||
else
|
|
||||||
label="com.docker.compose.service=${svc}"
|
|
||||||
fi
|
|
||||||
|
|
||||||
local -i result=1
|
|
||||||
local cid
|
|
||||||
local output
|
|
||||||
|
|
||||||
# retry for max 120s (24*5s)
|
|
||||||
for _ in $(seq 1 24); do
|
|
||||||
cid="$(docker ps -q -f label="$label")"
|
|
||||||
if [ -z "${cid:-}" ]; then
|
|
||||||
echo "Container exited"
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
set +e
|
|
||||||
output="$(curl "${args[@]}")"
|
|
||||||
set -e
|
|
||||||
if [ "${output: -3}" -eq 200 ]; then
|
|
||||||
result=0
|
|
||||||
break
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo -n '.'
|
|
||||||
sleep 5
|
|
||||||
done
|
|
||||||
|
|
||||||
echo -e "\n${output::-3}"
|
|
||||||
|
|
||||||
return $result
|
|
||||||
}
|
|
||||||
|
|
||||||
declare MODE=""
|
declare MODE=""
|
||||||
if [ "$#" -ge 1 ]; then
|
if [ "$#" -ge 1 ]; then
|
||||||
MODE=$1
|
MODE=$1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
log 'Waiting for Elasticsearch readiness'
|
log 'Waiting for readiness of Elasticsearch'
|
||||||
poll_ready elasticsearch 'http://localhost:9200/' 'elastic:testpasswd'
|
poll_ready elasticsearch 'http://localhost:9200/' 'elastic:testpasswd'
|
||||||
|
|
||||||
log 'Waiting for Kibana readiness'
|
log 'Waiting for readiness of Kibana'
|
||||||
poll_ready kibana 'http://localhost:5601/api/status' 'kibana:testpasswd'
|
poll_ready kibana 'http://localhost:5601/api/status' 'kibana:testpasswd'
|
||||||
|
|
||||||
log 'Waiting for Logstash readiness'
|
log 'Waiting for readiness of Logstash'
|
||||||
poll_ready logstash 'http://localhost:9600/_node/pipelines/main?pretty'
|
poll_ready logstash 'http://localhost:9600/_node/pipelines/main?pretty'
|
||||||
|
|
||||||
log 'Creating Logstash index pattern in Kibana'
|
log 'Creating Logstash index pattern in Kibana'
|
|
@ -59,8 +59,8 @@ Other available stack variants:
|
||||||
|
|
||||||
### Host setup
|
### Host setup
|
||||||
|
|
||||||
* [Docker Engine](https://docs.docker.com/install/) version **17.05+** or newer
|
* [Docker Engine](https://docs.docker.com/install/) version **17.05** or newer
|
||||||
* [Docker Compose](https://docs.docker.com/compose/install/) version **1.20.0+** or newer
|
* [Docker Compose](https://docs.docker.com/compose/install/) version **1.20.0** or newer
|
||||||
* 1.5 GB of RAM
|
* 1.5 GB of RAM
|
||||||
|
|
||||||
> :information_source: Especially on Linux, make sure your user has the [required permissions][linux-postinstall] to
|
> :information_source: Especially on Linux, make sure your user has the [required permissions][linux-postinstall] to
|
||||||
|
|
Loading…
Reference in New Issue