Skip to content

Instantly share code, notes, and snippets.

@xthezealot
Last active May 30, 2024 15:12
Show Gist options
  • Save xthezealot/bb0931e055f7e03bb0f1b59172f577ed to your computer and use it in GitHub Desktop.
Save xthezealot/bb0931e055f7e03bb0f1b59172f577ed to your computer and use it in GitHub Desktop.
Hello World HTTP benchmark

Hello World HTTP benchmark

  • Container: alpine:edge on linux/arm64
  • Command: wrk -t12 -c400 -d30s http://127.0.0.1:<PORT>

Go 1.22.2 / Alpine / Docker

Code

package main

import "net/http"

func main() {
	http.ListenAndServe(":8050", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
		w.Write([]byte("Hello World"))
	}))
}

Build

go build main.go

Result

Running 30s test @ http://127.0.0.1:8050
  12 threads and 400 connections
  Thread Stats   Avg      Stdev     Max   +/- Stdev
    Latency     1.11ms    1.41ms  28.17ms   84.84%
    Req/Sec    48.45k     7.25k   79.85k    68.45%
  17381368 requests in 30.09s, 2.07GB read
  Socket errors: connect 0, read 0, write 0, timeout 103
Requests/sec: 577617.35
Transfer/sec:     70.51MB

Dart 3.3.4 / Alpine / Docker

Code

import 'package:shelf/shelf.dart';
import 'package:shelf/shelf_io.dart' as shelf_io;

void main() async {
  var handler =
      const Pipeline().addHandler(_echoRequest);

  await shelf_io.serve(handler, '127.0.0.1', 8060);
}

Response _echoRequest(Request request) =>
    Response.ok('Hello World');

Build

dart compile exe main.dart

Result

Running 30s test @ http://127.0.0.1:8060
  12 threads and 400 connections
  Thread Stats   Avg      Stdev     Max   +/- Stdev
    Latency    12.95ms   10.97ms 470.70ms   99.56%
    Req/Sec     2.61k   422.24     9.68k    95.71%
  934341 requests in 30.09s, 233.46MB read
Requests/sec:  31051.56
Transfer/sec:      7.76MB

Bun 1.1.6 / Alpine / Docker

Code

Bun.serve({
	port: 8070,
	fetch() {
		return new Response("Hello World");
	},
})

Run

bun main.ts

Result

Running 30s test @ http://127.0.0.1:8070
  12 threads and 400 connections
  Thread Stats   Avg      Stdev     Max   +/- Stdev
    Latency     1.86ms  528.05us  21.47ms   80.13%
    Req/Sec    17.83k     3.64k   39.26k    67.61%
  6396110 requests in 30.07s, 774.68MB read
Requests/sec: 212696.02
Transfer/sec:     25.76MB

Node 20.12.2 / Alpine / Docker

Code

import { createServer } from "node:http"

createServer((_, res) => {
	res.end("Hello World")
}).listen(8110)

Run

node main.mjs

Result

Running 30s test @ http://127.0.0.1:8110
  12 threads and 400 connections
  Thread Stats   Avg      Stdev     Max   +/- Stdev
    Latency    16.50ms   82.21ms   1.62s    98.43%
    Req/Sec     4.28k   719.14    29.76k    95.18%
  1526508 requests in 30.09s, 195.08MB read
Requests/sec:  50725.11
Transfer/sec:      6.48MB

Crystal 1.12.1 / Alpine / Docker

Code

require "http/server"

server = HTTP::Server.new do |context|
  context.response.print "Hello World"
end

server.bind_tcp 8090
server.listen

Build

crystal build --release main.cr

Result

Running 30s test @ http://127.0.0.1:8090
  12 threads and 400 connections
  Thread Stats   Avg      Stdev     Max   +/- Stdev
    Latency     2.24ms  565.53us 210.87ms   97.16%
    Req/Sec    14.75k     1.25k   59.90k    89.78%
  5285629 requests in 30.04s, 373.02MB read
Requests/sec: 175927.63
Transfer/sec:     12.42MB

Roda 3.80.0 / Ruby 3.3.1 / macOS

Code

# frozen_string_literal: true
require "roda"

class App < Roda
  route do |r|
    r.root do
      "Hello World"
    end
  end
end

run App.freeze.app

Run

puma -s -t 12 -e production

Result

Running 30s test @ http://127.0.0.1:9292
  12 threads and 400 connections
  Thread Stats   Avg      Stdev     Max   +/- Stdev
    Latency    50.70ms  106.72ms 466.30ms   84.84%
    Req/Sec     2.89k     1.94k   12.57k    67.60%
  1008161 requests in 30.10s, 72.28MB read
  Socket errors: connect 155, read 3034, write 563, timeout 0
Requests/sec:  33497.44
Transfer/sec:      2.40MB

Nim 2.0.4 / Alpine / Docker

Code

import std/asynchttpserver
import std/asyncdispatch

proc main {.async.} =
  var server = newAsyncHttpServer()
  proc cb(req: Request) {.async.} =
    await req.respond(Http200, "Hello World")

  server.listen(Port(8100))

  while true:
    if server.shouldAcceptRequest():
      await server.acceptRequest(cb)
    else:
      await sleepAsync(500)

waitFor main()

Build

nim compile main.nim

Result

Running 30s test @ http://127.0.0.1:8100
  12 threads and 400 connections
  Thread Stats   Avg      Stdev     Max   +/- Stdev
    Latency    20.57ms   33.08ms   1.05s    99.23%
    Req/Sec     1.80k   251.91     4.99k    92.02%
  638919 requests in 30.06s, 30.47MB read
Requests/sec:  21258.21
Transfer/sec:      1.01MB

Sinatra 4.0.0 / Ruby 3.3.1 / macOS

Code

# frozen_string_literal: true
require "sinatra"

disable :logging

get "/" do
  "Hello World"
end

Run

APP_ENV=production ruby --jit main.rb

Result

Running 30s test @ http://127.0.0.1:4567
  12 threads and 400 connections
  Thread Stats   Avg      Stdev     Max   +/- Stdev
    Latency    69.11ms  143.66ms 573.66ms   84.62%
    Req/Sec     1.21k     0.87k    5.18k    68.58%
  410752 requests in 30.09s, 72.21MB read
  Socket errors: connect 155, read 3542, write 740, timeout 0
Requests/sec:  13650.87
Transfer/sec:      2.40MB

WEBrick / Ruby 3.3.0 / Alpine / Docker

Code

require "webrick"

server = WEBrick::HTTPServer.new(:Port => 8120)
server.mount_proc("/") { |request, response| response.body = "Hello World" }
trap("INT") { server.shutdown }
server.start

Run

ruby --jit main.rb

Result

Running 30s test @ http://127.0.0.1:8120
  12 threads and 400 connections
  Thread Stats   Avg      Stdev     Max   +/- Stdev
    Latency    50.97ms   24.20ms 665.12ms   90.93%
    Req/Sec   329.50    185.75   780.00     65.68%
  58987 requests in 30.11s, 8.90MB read
Requests/sec:   1958.78
Transfer/sec:    302.70KB
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment