Skip to content

Instantly share code, notes, and snippets.

@vqiu
Forked from senderista/nginx.conf
Created October 23, 2019 07:43
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save vqiu/0d2c395100478e58e636d7eb382f71b9 to your computer and use it in GitHub Desktop.
Save vqiu/0d2c395100478e58e636d7eb382f71b9 to your computer and use it in GitHub Desktop.
Token bucket with upstream stats
worker_processes 1;
error_log logs/error.log;
events {
worker_connections 1024;
}
http {
lua_shared_dict token_bucket 10M;
lua_shared_dict stats 5M;
init_by_lua '
ngx.shared.token_bucket:set("tokens_avail", 0)
ngx.shared.token_bucket:set("last_timestamp", os.time())
ngx.shared.stats:set("upstream_time-sum", 0)
ngx.shared.stats:set("upstream_time-count", 0)
';
upstream backend {
server localhost:1337;
}
server {
listen 8098;
location / {
proxy_pass http://backend;
default_type text/html;
access_by_lua '
local TOKEN_ACCUM_RATE = 10.0
local BUCKET_SIZE = 50
local TOKENS_PER_REQ = 1
local last_timestamp = ngx.shared.token_bucket:get("last_timestamp")
local cur_timestamp = os.time()
local time_elapsed = cur_timestamp - last_timestamp
local tokens_accum = time_elapsed * TOKEN_ACCUM_RATE
local tokens_avail = ngx.shared.token_bucket:get("tokens_avail")
tokens_avail = math.min(tokens_avail + tokens_accum, BUCKET_SIZE)
if tokens_avail >= TOKENS_PER_REQ then
ngx.shared.token_bucket:set("tokens_avail", tokens_avail - TOKENS_PER_REQ)
ngx.shared.token_bucket:set("last_timestamp", cur_timestamp)
ngx.exit(ngx.OK)
else
-- we have to set the status here because we call ngx.say(),
-- forcing response headers to be sent immediately
-- https://github.com/openresty/lua-resty-redis/issues/15#issuecomment-14747652
ngx.status = 429
ngx.say(string.format("%.1f tokens left", tokens_avail))
ngx.say("Request failed")
ngx.exit(429)
end
';
log_by_lua '
if ngx.var.upstream_response_time then
local upstream_time = tonumber(ngx.var.upstream_response_time)
local sum = ngx.shared.stats:get("upstream_time-sum")
sum = sum + upstream_time
ngx.shared.stats:set("upstream_time-sum", sum)
ngx.shared.stats:incr("upstream_time-count", 1)
end
';
}
location = /status {
content_by_lua '
local sum = ngx.shared.stats:get("upstream_time-sum")
local count = ngx.shared.stats:get("upstream_time-count")
if count > 0 then
local avg_ms = (sum*1000) / count
ngx.say("average upstream response time: ", math.floor(avg_ms+0.5), "ms (", count, " reqs)")
else
ngx.say("no data yet")
end
';
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment