Generate 2 CURL templates, the first is for readability and the second is for generating a CSV report over time.
$ tee &> /dev/null ~/.curl_report <<-EOT
url_effective: %{url_effective}\n
http_code: %{http_code}\n
\n
speed_download: %{speed_download}\n
size_download: %{size_download}\n
\n
time_namelookup: %{time_namelookup}\n
time_connect: %{time_connect}\n
time_appconnect: %{time_appconnect}\n
time_pretransfer: %{time_pretransfer}\n
time_redirect: %{time_redirect}\n
time_starttransfer: %{time_starttransfer}\n
...........................\n
time_total: %{time_total}\n
EOT
$ tee &> /dev/null ~/.curl_csv <<-EOT
%{time_namelookup},%{time_connect},%{time_appconnect},%{time_pretransfer},%{time_redirect},%{time_starttransfer},%{time_total},%{size_download},%{speed_download},%{http_code},%{url_effective}\n
EOT
$ curl -skL -o /dev/null \
-w "@${HOME}/.curl_report" \
--limit-rate 1250000 \
"http://localhost.localdomain/"
5 minute of testing with a 30 second interval.
NOTE: 10 Megabit per second = 1250000 bytes per second.
$ URL='http://localhost.localdomain'; \
for i in {1..10}; do \
printf -- \
'%s,' \
"$(date +%FT%T)"; \
curl -skL -o /dev/null \
-w "@${HOME}/.curl_csv" \
--limit-rate 1250000 \
"${URL}"; \
sleep 30; \
done