Skip to content

Instantly share code, notes, and snippets.

@haproxytechblog
Created August 16, 2023 19:20
Show Gist options
  • Save haproxytechblog/4e6acc6dc2e5fbdd16a02f37533175b4 to your computer and use it in GitHub Desktop.
Save haproxytechblog/4e6acc6dc2e5fbdd16a02f37533175b4 to your computer and use it in GitHub Desktop.
How to Log HTTP Headers With HAProxy for Debugging Purposes
global
log 127.0.0.1 local0
log 127.0.0.1 local1 notice
[...]
defaults
log global
mode http
option dontlognull
[...]
# This is a basic proxy configuration that appends all request headers to the
# end of the standard log line. Please be aware that if you are using an older
# version of HAProxy, the predefined variable "HAPROXY_HTTP_LOG_FMT"
# introduced in HAProxy 2.7 should be replaced with the default log format:
#
# "%ci:%cp [%tr] %ft %b/%s %TR/%Tw/%Tc/%Tr/%Ta %ST %B %CC %CS %tsc %ac/%fc/%bc/%sc/%rc %sq/%bq %hr %hs %{+Q}r".
frontend with-syslog
bind *:8480
log-format "${HAPROXY_HTTP_LOG_FMT} hdrs:%{+Q}[var(txn.req_hdrs)]"
http-request set-var(txn.req_hdrs) req.hdrs
# put your configuration here
Jul 5 05:23:51 localhost haproxy[15114]: 172.29.1.14:65076 [05/Jul/2023:05:23:51.240] with-syslog with-syslog/<NOSRV> 0/-1/-1/-1/0 200 91 - - LR-- 1/1/0/0/0 0/0 "GET / HTTP/1.1" hdrs:"host: 172.16.29.13:8480#015#012user-agent: curl/7.64.1#015#012accept: */*#015#012#015#012"
global
# To avoid truncation of the log line by HAProxy, it is necessary to
# increase the access log length. However, it's important to note that
# even with an increased access log length, there is no guarantee that
# the entire log line will not be truncated by the logging system, such
# as syslog or any other software used for logging.
log 127.0.0.1 len 4096 local0
log 127.0.0.1 local1 notice
[...]
-- The dump_headers function retrieves the headers of the current
-- transaction and sends them to an archiving application over HTTP
-- using the httpclient.
--
-- To utilize this function in a proxy, the following line needs to be
-- added:
--
-- http-request lua.dump_headers
--
-- This line instructs HAProxy to invoke the dump_headers function for
-- each HTTP request and send the headers to the specified archiving
-- application.
local function dump_headers(txn)
local hdrs = txn.f:req_hdrs()
local headers = {
-- We are aware of the body size, and we prefer not to send
-- it in chunks.
["content-length"]= {string.len(hdrs)},
}
-- Use POST method to send the header to the remote storage
-- application.
local httpclient = core.httpclient()
local response = httpclient:post{
url="http://127.0.0.1:8001/",
body=hdrs,
headers=headers,
timeout=10
}
end
-- Associate the dump_headers action with the dump_header function for
-- http-request rules.
core.register_action('dump_headers', {'http-req'}, dump_headers)
# This proxy utilizes the lua.dump_headers function to send the
# request header to a remote web application responsible for storing
# the header in a secure location.
frontend lua-log
bind *:8481
http-request lua.dump_headers
# put your configuration here
#!/usr/bin/env python3
# This is an example of a web service that demonstrates how to receive
# a request header in a POST request and asynchronously write them to
# a file. It is provided as an illustration of how to log HTTP headers
# using HAProxy.
#
# Please note that this server does not implement any security or
# performance measures.
#
# IMPORTANT: DO NOT USE IN A PRODUCTION ENVIRONMENT!
from http.server import BaseHTTPRequestHandler, HTTPServer
import threading
import queue
q = queue.Queue()
# ​​The log file is currently located in /tmp in this example because
# it doesn't require specific permissions to write to. You can change
# it to /var/log with the appropriate permissions if needed.
log_file='/tmp/headers.log'
def worker():
t = threading.currentThread().getName()
with open(log_file, 'a') as fh:
while True:
headers = q.get()
# Concatenate all headers into a single line using the delimiter
# "@@".
headers = bytes.decode(headers, 'utf-8')
headers = headers.rstrip().replace('\r\n','@@')
# Write headers and flush the file.
fh.write(headers+'\n')
fh.flush()
q.task_done()
class Server(BaseHTTPRequestHandler):
def do_POST(self):
# Read the Content-Length header to determine the number of
# bytes that need to be read from the body. Post the body
# content to the queue for asynchronous processing as swiftly
# as possible.
content_length = int(self.headers['Content-Length'])
try:
q.put(self.rfile.read(content_length))
finally:
self.send_response_only(200, "OK")
self.end_headers()
def run():
# Start http server
httpd = HTTPServer(('', 8001), Server)
# Turn-on the worker thread.
threading.Thread(target=worker,daemon=True).start()
try:
httpd.serve_forever()
except KeyboardInterrupt:
pass
# Wait for all workers before exiting.
q.join()
httpd.server_close()
if __name__ == '__main__':
run()
# This proxy operates similarly to lua-log, but with the addition of
# generating a unique ID for each request. It appends this unique ID
# to the x-unique-id header before invoking lua.dump_headers and
# includes it in the request logs for reference.
frontend lua-log-id
bind *:8482
log-format "${HAPROXY_HTTP_LOG_FMT} id:%{+Q}[var(txn.unique_id)]"
unique-id-format "%{+X}o %ci:%cp_%fi:%fp_%Ts_%rt:%pid"
# If an x-unique-id header already exists (if several proxies are
# chained), it will not be overwritten. However, it's important to
# be cautious with this method as the x-unique-id header can be
# potentially forged. It is recommended to implement additional
# validation mechanisms to ensure that only trusted sources are
# allowed to provide the x-unique-id header.
http-request set-header x-unique-id %[unique-id] if ! { req.hdr(x-unique-id) -m found }
http-request set-var(txn.unique_id) req.hdr(x-unique-id)
http-request lua.dump_headers
# put your configuration here
-- Create a queue to accumulate all headers for subsequent processing.
local headerqueue = core.queue()
-- dump_headers_async retrieves all request headers and pushes them to
-- the header queue. This approach minimizes the impact on the request
-- processing time as the data is processed asynchronously. The queue
-- is only allowed to hold 10000 items in order to avoid HAProxy from
-- using all of its allocated RAM in the event of a prolonged failure
-- of the remote server. Once this limit reached, the header logging
-- is silently discarded.
--
-- To utilize this function in a proxy, the following line needs to be
-- added:
--
-- http-request lua.dump_headers_async
--
local function dump_headers_async(txn)
if headerqueue:size() > 10000 then
return
end
local hdrs = txn.f:req_hdrs()
headerqueue:push(hdrs)
end
-- send_headers is a task worker which take all headers placed in the
-- header queue and posts them to the external storage application.
function send_headers()
local httpclient = core.httpclient()
while true do
local hdrs = headerqueue:pop_wait()
-- We are aware of the body size, and we prefer not to send
-- it in chunks.
local headers = {
["content-length"]= {string.len(hdrs)},
}
-- Use POST method to send the header to the remote storage
-- application.
local response = httpclient:post{
url="http://127.0.0.1:8001/",
body=hdrs,
headers=headers,
timeout=10
}
end
end
-- start send_headers worker.
core.register_task(send_headers)
-- Associate the dump_headers_async action with the dump_header_async
-- function for http-request rules.
core.register_action('dump_headers_async', {'http-req'}, dump_headers_async)
# The lua-log-async proxy is essentially the same as lua-log-id, but
# instead of calling lua.dump_headers, it invokes lua.dump_headers_async
# to push the headers into the header queue for asynchronous processing.
frontend lua-log-async
bind *:8483
log-format "${HAPROXY_HTTP_LOG_FMT} id:%{+Q}[var(txn.unique_id)]"
unique-id-format "%{+X}o %ci:%cp_%fi:%fp_%Ts_%rt:%pid"
http-request set-header x-unique-id %[unique-id] if ! { req.hdr(x-unique-id) -m found }
http-request set-var(txn.unique_id) req.hdr(x-unique-id)
http-request lua.dump_headers_async
# put your configuration here
# The lua-log-async-conditional proxy functions similarly to
# lua-log-async, but with an additional condition. It only sends logs
# to the remote logging server if the client's source IP is listed in
# the headers.map file. This file must exist in the filesystem and is
# empty by default. If you want to activate headers logging for a
# specific client, you can manage it using the debug-status-manager
# proxy.
listen lua-log-async-conditional
bind *:8484
log-format "${HAPROXY_HTTP_LOG_FMT} id:%{+Q}[var(txn.unique_id)]"
unique-id-format "%{+X}o %ci:%cp_%fi:%fp_%Ts_%rt:%pid"
http-request set-header x-unique-id %[unique-id] if ! { req.hdr(x-unique-id) -m found }
http-request set-var(txn.unique_id) req.hdr(x-unique-id)
# The logging of headers will be performed only if the client's IP
# address is found in the headers.map file. The "-M" flag ensures
# that the headers.map file is loaded as a map instead of an ACL.
http-request lua.dump_headers_async if { src -m ip -M -f headers.map }
# put your configuration here
socat unix:/var/run/haproxy.sock - <<< "add map headers.map 172.29.1.14 1"
socat unix:/var/run/haproxy.sock - <<< "del map headers.map 172.29.1.14"
# The debug-status-manager proxy allows the control of the content
# stored in the headers.map file in memory, without altering the file
# on the filesystem.
#
# Before making any changes, a series of sanity checks are performed:
# * The client must connect from the local network.
# * The request path must match a known path, and the "clientip"
# parameter with the client IP must be set.
#
# The request will return the debugging status for the provided client
# IP.
#
# To activate logs for a specific client:
# curl '172.16.29.13:8084/debug-on?clientip=172.29.1.14'
# To activate logs for a range of clients:
# curl '172.16.29.13:8084/debug-on?clientip=172.29.1.0/24'
# To deactivate a request header logging session:
# curl '127.0.0.1:8084/debug-off?clientip=172.29.1.14'
frontend debug-status-manager
bind 172.16.29.13:8084
# If the connection is not initiated from the local network, reject
# it as quickly as possible.
tcp-request connection reject if !{ src -m ip 172.16.29.0/24 }
# The request path must be either "/debug-on" or "/debug-off" and it
# must include a parameter named "clientip".
http-request deny if !{ path -m str /debug-on /debug-off } || !{ url_param(clientip) -m found }
# Set the logging status for the provided client and return its
# current status.
http-request set-map(headers.map) %[url_param(clientip)] 1 if { path -m str /debug-on }
http-request del-map(headers.map) %[url_param(clientip)] if { path -m str /debug-off }
http-request return status 200 content-type text/plain lf-string "%[url_param(clientip)] %[url_param(clientip),map_ip(headers.map,0)]\n"
curl '172.16.29.13:8084/debug-on?clientip=172.29.1.14'
curl '172.16.29.13:8084/debug-off?clientip=172.29.1.14'
curl '172.16.29.13:8084/debug-on?clientip=172.29.1.0/24'
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment