Skip to content

Instantly share code, notes, and snippets.

@jschaub30
Last active December 1, 2015 22:03
Show Gist options
  • Save jschaub30/cde967070c2d8ccd7eb6 to your computer and use it in GitHub Desktop.
Save jschaub30/cde967070c2d8ccd7eb6 to your computer and use it in GitHub Desktop.
Shell scripts to test IO bandwidth
#!/bin/bash
[[ $# -ne 1 ]] && echo Usage: $0 [CSV_FN] && exit -1
CSV_FN=$1
echo "<table>"
head -n 1 $CSV_FN | \
sed -e 's/^/<tr><th>/' -e 's/,/<\/th><th>/g' -e 's/$/<\/th><\/tr>/'
tail -n +2 $CSV_FN | \
sed -e 's/^/<tr><td>/' -e 's/,/<\/td><td>/g' -e 's/$/<\/td><\/tr>/'
echo "</table>"
#!/bin/bash
# Runs simultaneous copies of dd in parallel
# Uses taskset to spread workload across all cpus
# - Set MAX_CPUS to match your system
# - Uses 1M block size (lines 23 and 41)
# Write test then read test
[ "$#" -lt "2" ] && echo Usage: $0 DIRECTORY NUM_COPIES [BLOCK_SIZE_KB] && exit 1
DIRECTORY=$1
NUM_COPIES=$2
BLOCK_SIZE_KB=$3
[ "$#" -lt "3" ] && BLOCK_SIZE_KB=1
MAX_CPUS=$(grep "processor" /proc/cpuinfo | tail -n 1 | cut -d: -f2)
if [[ "$MAX_CPUS" -lt 4 ]]
then
echo Error determining number of machine threads
echo Setting to 8
MAX_CPUS=8
fi
rm -f /tmp/dd_write_out*
rm -f /tmp/dd_read_out*
cd $DIRECTORY
for N in $(seq $NUM_COPIES)
do
CPU=$(((N-1)*8))
[ "$CPU" -gt "$MAX_CPUS" ] && CPU=$((CPU + 1))
CPU=$((CPU % $MAX_CPUS))
taskset -c $CPU dd if=/dev/zero of=$DIRECTORY/dd_data$N bs=${BLOCK_SIZE_KB}k \
count=$((1024*1024/BLOCK_SIZE_KB)) oflag=direct \
2> /tmp/dd_write_out$N&
PID[$N]=$!
done
#Wait for all PIDs to finish
for N in $(seq $NUM_COPIES)
do
wait ${PID[$N]}
done
WR_BW=$(grep copied /tmp/dd_write_out* | sed 's/.* s, \(.*\) [MG]B.*/\1/' | paste -sd+ - | bc)
for N in $(seq $NUM_COPIES)
do
CPU=$(((N-1)*8))
[ "$CPU" -gt "$MAX_CPUS" ] && CPU=$((CPU + 1))
CPU=$((CPU % $MAX_CPUS))
taskset -c $CPU dd of=/dev/null if=$DIRECTORY/dd_data$N bs=${BLOCK_SIZE_KB}k \
iflag=direct \
2> /tmp/dd_read_out$N&
PID[$N]=$!
done
#Wait for all PIDs to finish
for N in $(seq $NUM_COPIES)
do
wait ${PID[$N]}
done
RD_BW=$(grep copied /tmp/dd_read_out* | sed 's/.* s, \(.*\) [MG]B.*/\1/' | paste -sd+ - | bc)
echo num_copies,block_size_kb,write_bw,read_bw
echo $NUM_COPIES,$BLOCK_SIZE_KB,$WR_BW,$RD_BW
rm $DIRECTORY/dd_data*
#!/bin/bash
[ "$#" -lt 1 ] && echo USAGE: $0 DIRECTORY && exit 1
mv results.csv results.old
for BLOCK_SIZE_KB in 64 256 512 1024
do
for COPIES in 1 2 4 8 16
do
./measure_io.sh $1 $COPIES $BLOCK_SIZE_KB | tee -a results.csv
done
done
#!/bin/bash
cp results.csv tmp
head -n 1 results.csv > results.tidy.csv
grep -v num tmp >> results.tidy.csv
perl -pe 's|,(\d\.\d)|,\1e3|g' results.tidy.csv > tmp # Some dd lines report in GB/s not MB/s
cp tmp results.tidy.csv
./csv2html.sh results.tidy.csv > results.html
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment