Last active
October 18, 2016 15:17
-
-
Save sergeycherepanov/a36b0f11c069070690505bc847950808 to your computer and use it in GitHub Desktop.
Elastic stress test script
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/bin/bash | |
# | |
# Usage: bash <(curl https://gist.githubusercontent.com/SergeyCherepanov/a36b0f11c069070690505bc847950808/raw/57d4b6f1b30936be81f13f0200a9c015825da794/elastic-stress-test.sh) | |
# | |
python --version 2>&1 | grep '2.7' || { | |
apt-get update | |
apt-get install python2.7 | |
} | |
pip --version || { | |
/usr/bin/python <(curl https://bootstrap.pypa.io/get-pip.py) | |
} | |
docker --version || { | |
/bin/sh <(curl https://get.docker.com/) | |
} | |
stat /srv/elastic-data || { | |
mkdir /srv/elastic-data | |
} | |
if stat /dev/sdb; then | |
stat /dev/sdb1 || { | |
parted -a optimal /dev/sdb mkpart primary 0% 100% | |
mkfs.ext4 /dev/sdb1 | |
} | |
mount | grep /srv/elastic-data || { | |
mount /dev/sdb1 /srv/elastic-data/ | |
} | |
fi | |
docker ps | grep 'elasticsearch:1.0.0' || { | |
echo "Elastic heap size in MB: " | |
read heap_size_mb | |
gcloud docker pull gcr.io/oro-cloud-development/loc-ada2_elasticsearch:1.0.0 | |
docker run -d -p 9200:9200 -e ES_HEAP_SIZE=${heap_size_mb}m -v /srv/elastic-data:/usr/share/elasticsearch/data gcr.io/oro-cloud-development/loc-ada2_elasticsearch:1.0.0 | |
sleep 10 | |
} | |
stat ~/elasticsearch-stress-test || { | |
git clone https://github.com/logzio/elasticsearch-stress-test.git ~/elasticsearch-stress-test | |
pip install elasticsearch | |
} | |
cd ~/elasticsearch-stress-test | |
python elasticsearch-stress-test.py --es_address 127.0.0.1 --indices 10 --documents 10 --clients 1 --seconds 300 --number-of-shards 1 --number-of-replicas 0 --bulk-size 5000 --max-fields-per-document 10 --max-size-per-field 50 --no-cleanup --stats-frequency 15 | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment