|
#!/bin/bash |
|
|
|
if [[ -z "${FILEBEAT_HOME}" ]]; then |
|
echo 'ERROR: Environment variable FILEBEAT_HOME not defined, aborting.' |
|
exit |
|
fi |
|
|
|
echo "Verify test indices do not already exist..." |
|
|
|
COUNT=$(curl 127.0.0.1:9200/_cat/indices | grep -E "filebeat-test|filebeat_es_defaults|filebeat_reduced|filebeat_best|filebeat_noall|filebeat_allquery|filebeat_combined" | wc -l) |
|
|
|
if [[ $COUNT -ne 0 ]]; then |
|
echo 'ERROR: One or more indices used in this test already exists. Aborting.' |
|
exit |
|
fi |
|
|
|
echo "Download sample access log..." |
|
curl -s -O https://s3.amazonaws.com/users.elasticsearch.org/cdahlqvist/files/sample_access_log.gz >> test.log |
|
|
|
echo "Unzip sample access log..." |
|
if [ ! -f ./sample_access_log ]; then |
|
gunzip ./sample_access_log.gz |
|
fi |
|
|
|
echo "Load index templates..." |
|
|
|
source load_templates.sh >> test.log |
|
|
|
echo "Load ingest pipelines..." |
|
|
|
source load_pipelines.sh >> test.log |
|
|
|
echo "Load data through Filebeat" |
|
|
|
$FILEBEAT_HOME/filebeat -e -modules=apache2 -setup -c ./filebeat.yml & |
|
|
|
echo "Wait until all 280000 log entries have been indexed" |
|
COUNT=0 |
|
|
|
while [ $COUNT == 0 ] |
|
do |
|
sleep 10 |
|
COUNT=$(curl -s 127.0.0.1:9200/_cat/indices | grep "filebeat-test" | grep 280000 | wc -l) |
|
done |
|
|
|
echo "Terminate Filebeat..." |
|
|
|
ps -ef | grep filebeat.yml | grep -v grep | awk '{print $2}' | xargs kill |
|
|
|
echo "Reindex data with default Elasticsearch mappings..." |
|
curl -s -XPOST 127.0.0.1:9200/_reindex -d '{ |
|
"source": { |
|
"index": "filebeat-test" |
|
}, |
|
"dest": { |
|
"index": "filebeat_es_defaults", |
|
"pipeline": "convert_filebeat_fields" |
|
} |
|
}' >> test.log |
|
|
|
echo "Reindex data with reduced number of fields..." |
|
curl -s -XPOST 127.0.0.1:9200/_reindex -d '{ |
|
"source": { |
|
"index": "filebeat-test" |
|
}, |
|
"dest": { |
|
"index": "filebeat_reduced", |
|
"pipeline": "remove_fields" |
|
} |
|
}' >> test.log |
|
|
|
echo "Reindex data with best_compression enabled..." |
|
curl -s -XPOST 127.0.0.1:9200/_reindex -d '{ |
|
"source": { |
|
"index": "filebeat-test" |
|
}, |
|
"dest": { |
|
"index": "filebeat_best" |
|
} |
|
}' >> test.log |
|
|
|
echo "Reindex data with _all field disabled..." |
|
curl -s -XPOST 127.0.0.1:9200/_reindex -d '{ |
|
"source": { |
|
"index": "filebeat-test" |
|
}, |
|
"dest": { |
|
"index": "filebeat_noall" |
|
} |
|
}' >> test.log |
|
|
|
echo "Reindex data with data enhanced for _all query instead of _all field..." |
|
curl -s -XPOST 127.0.0.1:9200/_reindex -d '{ |
|
"source": { |
|
"index": "filebeat-test" |
|
}, |
|
"dest": { |
|
"index": "filebeat_allquery" |
|
} |
|
}' >> test.log |
|
|
|
echo "Reindex data with reduced number of fields, best_compression enabled and _all query instead of _all field..." |
|
curl -s -XPOST 127.0.0.1:9200/_reindex -d '{ |
|
"source": { |
|
"index": "filebeat-test" |
|
}, |
|
"dest": { |
|
"index": "filebeat_combined", |
|
"pipeline": "remove_fields" |
|
} |
|
}' >> test.log |
|
|
|
echo "Wait until all records processed..." |
|
|
|
COUNT=0 |
|
|
|
while [ $COUNT -ne 7 ] |
|
do |
|
sleep 10 |
|
COUNT=$(curl -s 127.0.0.1:9200/_cat/indices | grep filebeat | grep 280000 | wc -l) |
|
done |
|
|
|
echo "Perform _forcemerge..." |
|
curl -s -XPOST 127.0.0.1:9200/filebeat*/_forcemerge -d '{ "max_num_segments": 1 }' >> test.log |
|
|
|
sleep 60 |
|
|
|
echo "Processing completed. Record results..." |
|
|
|
curl -s 127.0.0.1:9200/_cat/indices?bytes=b | grep filebeat | sort > ./index_size.txt |