Skip to content

Instantly share code, notes, and snippets.

@vitobotta
Created January 31, 2021 17:56
Show Gist options
  • Star 1 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save vitobotta/311d96a20e49b04878f61750507b1b06 to your computer and use it in GitHub Desktop.
Save vitobotta/311d96a20e49b04878f61750507b1b06 to your computer and use it in GitHub Desktop.
ttfb
#!/usr/bin/env bash
#
# Shows time in seconds to first byte of a url or urls
#
# Based on a gist https://gist.github.com/sandeepraju/1f5fbdbdd89551ba7925abe2645f92b5
# by https://github.com/sandeepraju
#
# Modified by jay@gooby.org, @jaygooby
#
# Usage: ttfb [options] url [url...]
# -d debug
# -l <log file> (infers -d) log response headers. Defaults to ./curl.log
# -n <number> of times to test time to first byte
# -v verbose output. Show request breakdown during -n calls
#
# Examples:
#
# ttfb https://example.com/example/url
# .098974
#
# ttfb -n 5 https://example.com/
# .....
# fastest .099195 slowest .103138 median .099684
#
# ttfb -n 5 bbc.co.uk news.bbc.co.uk
# .....
# .....
# bbc.co.uk fastest .045873 slowest .046870 median .045999
# news.bbc.co.uk fastest .042286 slowest .060245 median .046035
#
# ttfb bbc.co.uk news.bbc.co.uk
# bbc.co.uk .048378
# news.bbc.co.uk .049303
#
# Implicitly follows redirects using curl's -L option.
#
# Log all response headers (default log file is ./curl.log) by calling with -d
#
# Override the default log file by specifying -l /some/file
#
# Get min, max and median values by specifying the number of times to call
# the URL; use -n2 for 2 tests, -n5 for 5 and so on.
#
# If you specify more than one url and have specified -d or -l, the log file
# will be prefixed with the URL being requested.
#
# If you specify -n and -d or -l, the response headers from the consecutive
# requests will be concatenated in the log file.
#
# See https://blog.cloudflare.com/a-question-of-timing/
# and https://curl.haxx.se/docs/manpage.html for an explanation
# of how the curl variables relate to the various stages of
# the transfer.
#
# To get a better approximation of devtool's TTFB, consider
# the time without the connection overhead:
# %{time_starttransfer} - %{time_appconnect}
#
# Uses a dirty eval to do the ttfb arithmetic. Depends
# on bc and column commands.
set -eu
# check dependencies
for dependency in curl bc column; do
which $dependency > /dev/null || (echo "You need to have '$dependency' installed and in your \$PATH" >&2 && exit 1)
done
# check curl can use http2
HTTP_VERSION="--http2"
curl -so /dev/null --http2 https://example.com || HTTP_VERSION="--http1.1"
# Cribbed from https://stackoverflow.com/a/41762669/391826
median() {
arr=($(printf '%s\n' "${@}" | sort -n))
nel=${#arr[@]}
if (( $nel % 2 == 1 )); then # Odd number of elements
val="${arr[ $(($nel/2)) ]}"
else # Even number of elements
(( j=nel/2 ))
(( k=j-1 ))
val=$(echo "scale=6;(${arr[j]}" + "${arr[k]})"/2|bc -l)
fi
echo $val
}
# defaults
DEBUG=""
LOG=""
NUM_REQUESTS=0
VERBOSE=0
COOKIE=""
RESOLVE=""
PROXY_PROTOCOL=""
while getopts ":n:dlpvb:r:" OPTION
do
case $OPTION in
d) DEBUG=1 ;;
l) LOG="$OPTARG" ;;
n) NUM_REQUESTS=$OPTARG ;;
b) COOKIE=$OPTARG ;;
r) RESOLVE=$OPTARG ;;
p) PROXY_PROTOCOL=1 ;;
v) VERBOSE=1 ;;
\?) echo -e "Usage: ttfb [options] url [url...]\n\t-d debug\n\t-l <log file> (infers -d) log response headers. Defaults to ./curl.log\n\t-n <number> of times to test time to first byte\n\t-v verbose output. Show response breakdown (DNS lookup, TLS handshake etc)\n\t-b <cookie file> Use a cookie\n\t-r <resolve HOSTNAME:IP> Resolves the DNS to a specific IP\n\t-p Uses proxy protocol" >&2
exit 1
;;
:)
echo "Invalid option: $OPTARG requires an argument" 1>&2
esac
done
shift $((OPTIND - 1)) # shifts away every option argument,
# leaving urls as $@
if [ -z "$1" ]; then
echo "You didn't specify any urls to fetch"
exit 1
else
URLS="$@"
fi
# if we're given a custom log file, or log directory, implicitly set DEBUG=1
[ -n "$LOG" ] && DEBUG=1
# default the log file to curl.log in pwd or LOG_DIRECTORY if -o was specified
LOG="${LOG:-curl.log}"
# now work out if $LOG is relative or an absolute path
# and then get the dirname
[ "$LOG" != "${LOG#/}" ] && LOG_DIRECTORY=$(dirname "$LOG") || LOG_DIRECTORY=$(dirname "${PWD}/$LOG")
if [ ! -d "$LOG_DIRECTORY" ]; then
echo "Log directory $LOG_DIRECTORY doesn't exist" >&2
exit 1;
fi
# then set the actual log filename
LOG=$(basename "$LOG")
DEBUG=${DEBUG:-0}
PROXY_PROTOCOL=${PROXY_PROTOCOL:-0}
options=()
options+=(-o /dev/null)
options+=(-s)
options+=(-L)
options+=(--tlsv1.3)
if [ -n "$RESOLVE" ]; then
options+=(--resolve "${RESOLVE}")
fi
if [ $PROXY_PROTOCOL -eq 1 ]; then
options+=(--haproxy-protocol)
fi
if [ -n "$COOKIE" ]; then
options+=(-b "${COOKIE}")
fi
options+=($HTTP_VERSION)
options+=(-H 'Cache-Control: no-cache')
options+=(-w 'echo DNS lookup: %{time_namelookup} TLS handshake: %{time_appconnect} TTFB including connection: %{time_starttransfer} TTFB: $(echo %{time_starttransfer} - %{time_appconnect} | bc) Total time: %{time_total} \n')
if [ $DEBUG -eq 1 ]; then
options+=(-D "${LOG_DIRECTORY}/${LOG}")
fi
for URL in $URLS; do
# if we're checking more than one url
# output the url on the results line
if [ ${#@} -gt 1 ]; then
SHOW_URL="${URL}|"
if [[ $VERBOSE -eq 1 && -n "$NUM_REQUESTS" && "$NUM_REQUESTS" -gt 1 ]]; then
echo $URL >&2
fi
else
SHOW_URL=""
fi
# if multiple requests have been specified, then show min, max & median values
if [[ -n "$NUM_REQUESTS" && "$NUM_REQUESTS" -gt 1 ]]; then
ttfbs=()
for i in $(seq $NUM_REQUESTS); do
# if we're checking more than one url, and debug is set, then log
# the headers to a per-url file, but also for each request
if [[ ${#@} -gt 1 && $DEBUG -eq 1 ]]; then
LOGFILE="${URL//[^[:alnum:]]/_}"
options+=(-D "${LOG_DIRECTORY}/${LOGFILE}-${LOG}_${i}")
elif [ $DEBUG -eq 1 ]; then
# we only have the one URL, but we still are requesting multiple
# ttfb calls, so log the headers
options+=(-D "${LOG_DIRECTORY}/${LOG}_${i}")
fi
request=$(eval $(curl "${options[@]}" "$URL"))
ttfbs+=($(echo $request | grep -oE "TTFB: .{0,7}" | cut -d' ' -f2 | sort -n));
if [ $VERBOSE -eq 1 ]; then
echo "$request" >&2
else
printf "." >&2
fi
done
# tidy up - combine multiple request logs for the same url into a single file
if [[ ${#@} -gt 1 && $DEBUG -eq 1 ]]; then
cat "${LOG_DIRECTORY}/${LOGFILE}-${LOG}_"* > "${LOG_DIRECTORY}/${LOGFILE}-${LOG}"
rm "${LOG_DIRECTORY}/${LOGFILE}-${LOG}_"*
elif [ $DEBUG -eq 1 ]; then
cat "${LOG_DIRECTORY}/${LOG}_"* > "${LOG_DIRECTORY}/${LOG}"
rm "${LOG_DIRECTORY}/${LOG}_"*
fi
printf "\n" >&2
# sort the times
ttfbs=( $( printf "%s\n" "${ttfbs[@]}" | sort -n ) )
# show quickest, slowest and median fftb
printf "${SHOW_URL}\e[32mfastest \e[39m${ttfbs[0]} \e[91mslowest \e[39m${ttfbs[${#ttfbs[*]}-1]} \e[95mmedian \e[39m$(median ${ttfbs[*]})\e[39m\n";
else
if [ $VERBOSE -eq 1 ]; then
# echo -n "curl ${options[@]} $URL"
echo -e $SHOW_URL $(eval $(curl "${options[@]}" "$URL"))
else
echo -e $SHOW_URL $(eval $(curl "${options[@]}" "$URL") | grep -oE "TTFB: .{0,7}" | cut -d' ' -f2)
fi
fi
done #| column -s'|' -t
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment