Skip to content

Instantly share code, notes, and snippets.

@drizzt
Last active January 12, 2024 21:30
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save drizzt/0a9e409e7cc6cb7cb6f0b113800ecf9c to your computer and use it in GitHub Desktop.
Save drizzt/0a9e409e7cc6cb7cb6f0b113800ecf9c to your computer and use it in GitHub Desktop.
#!/usr/bin/python3
from starlette.applications import Starlette
from starlette.responses import Response
from starlette.routing import Route
import httpx
async def topic(request):
host = request.headers.get("host")
path = request.path_params["path"]
target_url = f"https://{host}/{path}"
pubKey = request.headers.get("crypto-key").encode()
salt = request.headers.get("encryption").encode()
body = b"aesgcm\nEncryption: " + salt + b"\nCrypto-Key: " + pubKey + b"\n" + await request.body()
headers = dict(request.headers)
del headers["content-encoding"]
del headers["content-length"]
# Forward the request to the target URL
async with httpx.AsyncClient() as client:
upstream_response = await client.request(
method=request.method,
url=target_url,
headers=headers,
data=body,
params=request.query_params,
)
return Response(
content=upstream_response.content, status_code=upstream_response.status_code
)
app = Starlette(
debug=True,
routes=[
Route("/{path:path}", topic, methods=["POST"]),
],
)
map $http_content_encoding $ntfy_backend {
"aesgcm" "127.0.0.1:8000";
default "127.0.0.1:2586";
}
server {
listen 80;
listen [::]:80;
server_name YOURDOMAIN;
location /generic {
proxy_pass http://127.0.0.1:5000;
}
location / {
add_header alt-svc 'h3-27=":443"; ma=86400, h3-28=":443"; ma=86400, h3-29=":443"; ma=86400, h3=":443"; ma=86400';
proxy_pass http://$ntfy_backend;
proxy_http_version 1.1;
proxy_set_header Host $host;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_connect_timeout 3m;
proxy_send_timeout 3m;
proxy_read_timeout 3m;
client_max_body_size 0; # Stream request body to backend
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment