Skip to content

Instantly share code, notes, and snippets.

@esseti
Created July 29, 2020 09:17
Show Gist options
  • Star 1 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save esseti/890b96259e74cc0308735ab5f88b6c28 to your computer and use it in GitHub Desktop.
Save esseti/890b96259e74cc0308735ab5f88b6c28 to your computer and use it in GitHub Desktop.
Rate limit in haproxy with lua
requests = {}
current_min = -1
rate_limit = 150
max_wait = 29000
burst=0
function rate_limit_request(txn)
math.randomseed(os.time())
-- user identifier
-- this is what i've in ha proxy to set the variable
name = txn:get_var('txn.identifier')
-- core.Debug( name)
local waited = 0
repeat
-- get the minute
time = os.date("*t")
minute = time.min
-- core.Debug(tostring(minute))
-- initi the matrix, just beacuse we have to
if requests[0] == nil then
requests[0]={}
end
if requests[1] == nil then
requests[1]={}
end
-- when minute is even, current is 1, when odd, we switch
-- so we just need 2 rows to store the data
if minute % 2 == 0 then
current = 1
prev = 0
else
current = 0
prev = 1
end
-- when we switch minute, we clear the, so current minut is empty
if minute ~= current_min then
-- when we switch, we clear current
current_min=minute
requests[current]={}
end
-- we get the value, so if it's nill, we set to 0
value = requests[current][name]
if value == nil then
requests[current][name] = -burst
end
-- we make the proportion, so we have a moivng window, see cloudflare example
seconds_elapsed = time.sec
-- core.Debug(tostring(seconds_elapsed))
prev_count = requests[prev][name]
if prev_count == nil then
prev_count=0
end
current_count = requests[current][name]
rate = (((60 - seconds_elapsed) / 60) * prev_count) + requests[current][name]
-- core.Debug(name.." "..prev_count.." "..requests[current][name].." "..rate.." "..rate_limit.." "..waited)
--- in case, we sleep a bit before recheckung that the rate is lower
if rate >= rate_limit then
local wait = math.random(1,40)
core.msleep(wait)
waited = waited + wait
end
-- core.Debug("sleep "..count.." rate "..rate)
-- if the rate is too much, he will wait
-- here we can play with the count to see that after a while we release stuff.
-- core.Debug(tostring(rate < rate_limit ).." "..tostring(waited > max_wait))
until (rate < rate_limit)
-- until (rate < rate_limit or waited > max_wait) -- in case we wanto to wait for amax time
-- core.Debug("done")
-- core.msleep(wait)
-- when request is made, we increase it.
requests[current][name] = requests[current][name] + 1
core.Debug(name.."-"..rate.." waited "..tostring(waited))
-- core.Debug( "over")
end
core.register_action("rate_limit_request", { "http-req" }, rate_limit_request, 0);
version: '3'
services:
loadbalancer:
# which version?
image: haproxy
ports:
- 8888:80
- 9000:9000
volumes:
- ./haproxy-ratelimiter.cfg:/usr/local/etc/haproxy/haproxy.cfg
- ./429.html:/etc/haproxy/429.html
- ./delay.lua:/etc/haproxy/delayw.lua
networks:
- net
api01:
image: httpd:2.4
networks:
- net
api02:
image: httpd:2.4
networks:
- net
api03:
image: httpd:2.4
networks:
- net
networks:
net:
#https://www.haproxy.com/blog/web-traffic-limitation/
#https://medium.com/faun/understanding-rate-limiting-on-haproxy-b0cf500310b1
#https://stackoverflow.com/questions/43776786/haproxy-slow-down-connections-from-specific-ip
global
lua-load /etc/haproxy/delay_new.lua
defaults
mode http
timeout connect 5000ms
timeout client 50000ms
timeout server 50000ms
# after this time the connection in the queue are gone, this is not IP based.
timeout queue 50000ms
frontend proxy
bind *:80
# # this is for the connections uses this table base on ip. we skip for now
stick-table type string len 180 size 200k expire 60s store gpc0,http_req_rate(60s),conn_rate(60s)
# # if out of the paramters, go away
# use_backend be_429_slow_down if conn_rate_abuse or err_rate_abuse or data_rate_abuse
# this is what we can use
# if there's auth, we use the auth + ip to track the user
acl has_auth_header req.fhdr(Authorization) -m found
# this add auth_ip as key, so we have sensitive info but per user
http-request set-header X-Concat %[req.fhdr(Authorization)]_%[src] if has_auth_header
http-request set-header X-Concat %[src] unless has_auth_header
# we use a dedicated table just in case.
http-request track-sc0 req.fhdr(X-Concat),regsub(.*\ ,)
http-request set-var(txn.identifier) req.fhdr(X-Concat),regsub(.*\ ,)
use_backend be_429_slow_down if { sc0_conn_rate gt 200 }
http-request lua.rate_limit_request
# too much is too much
use_backend api
backend api_tbl
stick-table type string len 180 size 200k expire 10s store gpc0,http_req_rate(10s),conn_rate(10s)
backend api
server api01 api01:80
server api02 api02:80
server api03 api03:80
backend be_429_slow_down
# this error here waits 5 and gives the too many requests
timeout tarpit 5s
http-request tarpit
# depening on the version here
# reqitarpit .
errorfile 500 /etc/haproxy/429.html
# stats, we can remove it
listen stats # Define a listen section called "stats"
bind :9000 # Listen on localhost:9000
mode http
stats enable # Enable stats page
stats hide-version # Hide HAProxy version
stats realm Haproxy\ Statistics # Title text for popup window
stats uri /haproxy_stats # Stats URI
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment