Skip to content

Instantly share code, notes, and snippets.

@alecmuffett
Created December 4, 2017 19:17
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save alecmuffett/f5cd8abcf161dbdaffd7a81ed8a088b9 to your computer and use it in GitHub Desktop.
Save alecmuffett/f5cd8abcf161dbdaffd7a81ed8a088b9 to your computer and use it in GitHub Desktop.
Sample EOTK outputs
# -*- awk -*-
# eotk (c) 2017 Alec Muffett
# EMACS awk mode works quite well for nginx configs
# ---- BEGIN HARD/CLASSIC SWITCH ----
# *HARD* configuration
# swap domain names for onions via brute-force, with whitelisted repairs...
# ---- END HARD/CLASSIC SWITCH ----
# logs and pids
pid /Users/alecm/src/eotk/projects.d/wikipedia.d/nginx.pid;
error_log /Users/alecm/src/eotk/projects.d/wikipedia.d/log.d/nginx-error.log error;
# TODO: notes for custom 403 error-handling pages:
# https://www.cyberciti.biz/faq/unix-linux-nginx-custom-error-403-page-configuration/
# https://nginx.org/en/docs/http/ngx_http_core_module.html#error_page
# performance
worker_processes auto; # softmap
worker_rlimit_nofile 256;
events {
worker_connections 256;
}
http {
# nginx fails without large enough buckets (sigh)
map_hash_bucket_size 128;
# dns for proxy (sigh)
resolver 8.8.8.8 8.8.4.4 ipv6=off valid=15s;
resolver_timeout 15s;
# we walk a line between keeping it small and flooding resources...
proxy_buffering on;
# for initial; impacts SSL header
proxy_buffer_size 8k;
# for rest of response
proxy_buffers 8 8k;
# how much can be busy sending to client?
proxy_busy_buffers_size 16k;
# where to stash oversize requests?
client_body_temp_path /tmp/nginx-body-wikipedia;
client_max_body_size 4m;
# in case we want to start spooling responses locally
proxy_temp_path /tmp/nginx-proxy-wikipedia;
proxy_max_temp_file_size 64m;
proxy_temp_file_write_size 8k;
# nginx caching static responses for 60 seconds
# - this is a lightweight cache to reduce "storms", hence the global
# approch of "cache everything for a small number of seconds"
# https://nginx.org/en/docs/http/ngx_http_proxy_module.html
proxy_cache_path /tmp/nginx-cache-wikipedia levels=1:2 keys_zone=wikipedia:256m;
proxy_cache wikipedia;
proxy_cache_min_uses 1;
proxy_cache_revalidate on;
proxy_cache_use_stale timeout updating;
proxy_cache_valid any 60s; # "any" includes 404s, etc
# content-types to not cache
map $http_content_type $no_cache_content_type {
default 0;
}
# hosts not to cache
map $http_host $no_cache_host {
hostnames;
default 0;
}
# so, should we skip caching this stuff for some reason?
proxy_no_cache $no_cache_content_type $no_cache_host;
proxy_cache_bypass $no_cache_content_type $no_cache_host;
# logs
access_log /Users/alecm/src/eotk/projects.d/wikipedia.d/log.d/nginx-access.log;
# global settings
server_tokens off;
# allow/deny (first wins)
allow "unix:";
deny all;
# rewrite these content types; text/html is implicit
subs_filter_types
application/javascript
application/json
application/x-javascript
text/css
text/javascript
text/xml
;
#==================================================================
# ---- BEGIN HARD MODE CODE ----
# preserve subs (save-phase): 1=description,2=re,3=i_or_empty,4=replacement
# saving regexp 'wikipedia\\.org' as 'tld-wp' for replacement with 'wikipedia.org' (i)
subs_filter
([>@\\s])(wikipedia\\.org)\\b
$1~-=~=-~tld-wp~-=~=-~
gir
;
# saving regexp 'wikimedia\\.org' as 'tld-wm' for replacement with 'wikimedia.org' (i)
subs_filter
([>@\\s])(wikimedia\\.org)\\b
$1~-=~=-~tld-wm~-=~=-~
gir
;
# for mediawiki.org -> mldhzqz4k4lhkspx.onion
subs_filter
\\bmediawiki\\.org\\b
mldhzqz4k4lhkspx.onion
gir
;
# for wikidata.org -> hrvv2p2y6hry4k3h.onion
subs_filter
\\bwikidata\\.org\\b
hrvv2p2y6hry4k3h.onion
gir
;
# for wikimedia.org -> cghmyoqnuvntham2.onion
subs_filter
\\bwikimedia\\.org\\b
cghmyoqnuvntham2.onion
gir
;
# for wikimediafoundation.org -> 5l25slwovi5dpgcs.onion
subs_filter
\\bwikimediafoundation\\.org\\b
5l25slwovi5dpgcs.onion
gir
;
# for wikibooks.org -> h2palsawrrwjmvix.onion
subs_filter
\\bwikibooks\\.org\\b
h2palsawrrwjmvix.onion
gir
;
# for wikinews.org -> rpzswndck5mhnd6d.onion
subs_filter
\\bwikinews\\.org\\b
rpzswndck5mhnd6d.onion
gir
;
# for wikipedia.org -> x73kmvi562rwnjju.onion
subs_filter
\\bwikipedia\\.org\\b
x73kmvi562rwnjju.onion
gir
;
# for wikiquote.org -> rxj62jvsqqnevcyl.onion
subs_filter
\\bwikiquote\\.org\\b
rxj62jvsqqnevcyl.onion
gir
;
# for wikisource.org -> 4cyegyx7lnzthq4w.onion
subs_filter
\\bwikisource\\.org\\b
4cyegyx7lnzthq4w.onion
gir
;
# for wikiversity.org -> bzrrgoz6q3dw67is.onion
subs_filter
\\bwikiversity\\.org\\b
bzrrgoz6q3dw67is.onion
gir
;
# for wikivoyage.org -> wsideljd73uexpwi.onion
subs_filter
\\bwikivoyage\\.org\\b
wsideljd73uexpwi.onion
gir
;
# for wiktionary.org -> omivuha6rjzyfjjp.onion
subs_filter
\\bwiktionary\\.org\\b
omivuha6rjzyfjjp.onion
gir
;
# foreignmap subs: 1=onion,2=re,3=re2,4=dns,5=re,6=re2
# for facebook.com -> facebookcorewwwi.onion
subs_filter
\\bfacebook\\.com\\b
facebookcorewwwi.onion
gir
;
# for nytimes.com -> nytimes3xbfgragh.onion
subs_filter
\\bnytimes\\.com\\b
nytimes3xbfgragh.onion
gir
;
# preserve subs (restore-phase): 1=description,2=re,3=i_or_empty,4=replacement
# restoring 'tld-wp' with 'wikipedia.org'
subs_filter
~-=~=-~tld-wp~-=~=-~
wikipedia.org
g
;
# restoring 'tld-wm' with 'wikimedia.org'
subs_filter
~-=~=-~tld-wm~-=~=-~
wikimedia.org
g
;
# ---- END HARD MODE CODE ----
#------------------------------------------------------------------
#==================================================================
# fix the cookies
proxy_cookie_domain
~^(.*?)\\bmediawiki\\.org$
$1mldhzqz4k4lhkspx.onion
;
proxy_cookie_domain
~^(.*?)\\bwikidata\\.org$
$1hrvv2p2y6hry4k3h.onion
;
proxy_cookie_domain
~^(.*?)\\bwikimedia\\.org$
$1cghmyoqnuvntham2.onion
;
proxy_cookie_domain
~^(.*?)\\bwikimediafoundation\\.org$
$15l25slwovi5dpgcs.onion
;
proxy_cookie_domain
~^(.*?)\\bwikibooks\\.org$
$1h2palsawrrwjmvix.onion
;
proxy_cookie_domain
~^(.*?)\\bwikinews\\.org$
$1rpzswndck5mhnd6d.onion
;
proxy_cookie_domain
~^(.*?)\\bwikipedia\\.org$
$1x73kmvi562rwnjju.onion
;
proxy_cookie_domain
~^(.*?)\\bwikiquote\\.org$
$1rxj62jvsqqnevcyl.onion
;
proxy_cookie_domain
~^(.*?)\\bwikisource\\.org$
$14cyegyx7lnzthq4w.onion
;
proxy_cookie_domain
~^(.*?)\\bwikiversity\\.org$
$1bzrrgoz6q3dw67is.onion
;
proxy_cookie_domain
~^(.*?)\\bwikivoyage\\.org$
$1wsideljd73uexpwi.onion
;
proxy_cookie_domain
~^(.*?)\\bwiktionary\\.org$
$1omivuha6rjzyfjjp.onion
;
# fix the header-redirects
proxy_redirect
~*^(.*?)\\bmediawiki\\.org\\b(.*)$
$1mldhzqz4k4lhkspx.onion$2
;
proxy_redirect
~*^(.*?)\\bwikidata\\.org\\b(.*)$
$1hrvv2p2y6hry4k3h.onion$2
;
proxy_redirect
~*^(.*?)\\bwikimedia\\.org\\b(.*)$
$1cghmyoqnuvntham2.onion$2
;
proxy_redirect
~*^(.*?)\\bwikimediafoundation\\.org\\b(.*)$
$15l25slwovi5dpgcs.onion$2
;
proxy_redirect
~*^(.*?)\\bwikibooks\\.org\\b(.*)$
$1h2palsawrrwjmvix.onion$2
;
proxy_redirect
~*^(.*?)\\bwikinews\\.org\\b(.*)$
$1rpzswndck5mhnd6d.onion$2
;
proxy_redirect
~*^(.*?)\\bwikipedia\\.org\\b(.*)$
$1x73kmvi562rwnjju.onion$2
;
proxy_redirect
~*^(.*?)\\bwikiquote\\.org\\b(.*)$
$1rxj62jvsqqnevcyl.onion$2
;
proxy_redirect
~*^(.*?)\\bwikisource\\.org\\b(.*)$
$14cyegyx7lnzthq4w.onion$2
;
proxy_redirect
~*^(.*?)\\bwikiversity\\.org\\b(.*)$
$1bzrrgoz6q3dw67is.onion$2
;
proxy_redirect
~*^(.*?)\\bwikivoyage\\.org\\b(.*)$
$1wsideljd73uexpwi.onion$2
;
proxy_redirect
~*^(.*?)\\bwiktionary\\.org\\b(.*)$
$1omivuha6rjzyfjjp.onion$2
;
# o_to_d_lookup -> if cannot remap, return input. note: old versions
# of lua-plugin cannot cope with code like o_to_d_mappings[o[1]]
# because of `long bracket syntax`; the `[o[` freaks it out.
# See: https://github.com/openresty/lua-nginx-module/issues/748
init_by_lua_block {
-- helper functions for elsewhere
slog = function (s) -- in case of manual debugging
ngx.log(ngx.ERR, s)
return
end
has_suffix = function (s, x)
return string.sub(s, -string.len(x)) == x
end
-- mapping onions to dns
o_to_d_mappings = {}
o_to_d_mappings["mldhzqz4k4lhkspx.onion"] = "mediawiki.org"
o_to_d_mappings["hrvv2p2y6hry4k3h.onion"] = "wikidata.org"
o_to_d_mappings["cghmyoqnuvntham2.onion"] = "wikimedia.org"
o_to_d_mappings["5l25slwovi5dpgcs.onion"] = "wikimediafoundation.org"
o_to_d_mappings["h2palsawrrwjmvix.onion"] = "wikibooks.org"
o_to_d_mappings["rpzswndck5mhnd6d.onion"] = "wikinews.org"
o_to_d_mappings["x73kmvi562rwnjju.onion"] = "wikipedia.org"
o_to_d_mappings["rxj62jvsqqnevcyl.onion"] = "wikiquote.org"
o_to_d_mappings["4cyegyx7lnzthq4w.onion"] = "wikisource.org"
o_to_d_mappings["bzrrgoz6q3dw67is.onion"] = "wikiversity.org"
o_to_d_mappings["wsideljd73uexpwi.onion"] = "wikivoyage.org"
o_to_d_mappings["omivuha6rjzyfjjp.onion"] = "wiktionary.org"
o_to_d_lookup = function (m)
local k = m[1] -- see note above re: array syntax
return ( o_to_d_mappings[k] or k )
end
onion_to_dns = function (i)
if i == nil or i == "" then
return i
end
local o, num, errs = ngx.re.gsub(i, "\\b([a-z2-7]{16}\\.onion)\\b", o_to_d_lookup, "io")
return o
end
-- mapping dns to onions, for experimentation
d_to_o_mappings = {}
d_to_o_mappings["mediawiki.org"] = "mldhzqz4k4lhkspx.onion"
d_to_o_mappings["wikidata.org"] = "hrvv2p2y6hry4k3h.onion"
d_to_o_mappings["wikimedia.org"] = "cghmyoqnuvntham2.onion"
d_to_o_mappings["wikimediafoundation.org"] = "5l25slwovi5dpgcs.onion"
d_to_o_mappings["wikibooks.org"] = "h2palsawrrwjmvix.onion"
d_to_o_mappings["wikinews.org"] = "rpzswndck5mhnd6d.onion"
d_to_o_mappings["wikipedia.org"] = "x73kmvi562rwnjju.onion"
d_to_o_mappings["wikiquote.org"] = "rxj62jvsqqnevcyl.onion"
d_to_o_mappings["wikisource.org"] = "4cyegyx7lnzthq4w.onion"
d_to_o_mappings["wikiversity.org"] = "bzrrgoz6q3dw67is.onion"
d_to_o_mappings["wikivoyage.org"] = "wsideljd73uexpwi.onion"
d_to_o_mappings["wiktionary.org"] = "omivuha6rjzyfjjp.onion"
d_to_o_lookup = function (m)
local k = m[1] -- see note above re: array syntax
return ( d_to_o_mappings[k] or k )
end
dns_to_onion = function (i)
local num, errs
i, num, errs = ngx.re.gsub(i, "\\bmediawiki\\.org\\b", "mldhzqz4k4lhkspx.onion", "io")
i, num, errs = ngx.re.gsub(i, "\\bwikidata\\.org\\b", "hrvv2p2y6hry4k3h.onion", "io")
i, num, errs = ngx.re.gsub(i, "\\bwikimedia\\.org\\b", "cghmyoqnuvntham2.onion", "io")
i, num, errs = ngx.re.gsub(i, "\\bwikimediafoundation\\.org\\b", "5l25slwovi5dpgcs.onion", "io")
i, num, errs = ngx.re.gsub(i, "\\bwikibooks\\.org\\b", "h2palsawrrwjmvix.onion", "io")
i, num, errs = ngx.re.gsub(i, "\\bwikinews\\.org\\b", "rpzswndck5mhnd6d.onion", "io")
i, num, errs = ngx.re.gsub(i, "\\bwikipedia\\.org\\b", "x73kmvi562rwnjju.onion", "io")
i, num, errs = ngx.re.gsub(i, "\\bwikiquote\\.org\\b", "rxj62jvsqqnevcyl.onion", "io")
i, num, errs = ngx.re.gsub(i, "\\bwikisource\\.org\\b", "4cyegyx7lnzthq4w.onion", "io")
i, num, errs = ngx.re.gsub(i, "\\bwikiversity\\.org\\b", "bzrrgoz6q3dw67is.onion", "io")
i, num, errs = ngx.re.gsub(i, "\\bwikivoyage\\.org\\b", "wsideljd73uexpwi.onion", "io")
i, num, errs = ngx.re.gsub(i, "\\bwiktionary\\.org\\b", "omivuha6rjzyfjjp.onion", "io")
return i
end
-- a note for future maintainers; if we were being strictly orthogonal then
-- the replacement with ONION_ADDRESS in much of this Lua block would have to
-- be double-escaped for potential backslashes, because double-quotes;
-- however this is not needed because DNS forbids backslash; the only code
-- where this becomes evident/necessary is here, with "_RE2":
dnsre_to_onionre = function (i)
local num, errs
i, num, errs = ngx.re.gsub(i, "\\bmediawiki\\\\\\.org\\b", "mldhzqz4k4lhkspx\\.onion", "io")
i, num, errs = ngx.re.gsub(i, "\\bwikidata\\\\\\.org\\b", "hrvv2p2y6hry4k3h\\.onion", "io")
i, num, errs = ngx.re.gsub(i, "\\bwikimedia\\\\\\.org\\b", "cghmyoqnuvntham2\\.onion", "io")
i, num, errs = ngx.re.gsub(i, "\\bwikimediafoundation\\\\\\.org\\b", "5l25slwovi5dpgcs\\.onion", "io")
i, num, errs = ngx.re.gsub(i, "\\bwikibooks\\\\\\.org\\b", "h2palsawrrwjmvix\\.onion", "io")
i, num, errs = ngx.re.gsub(i, "\\bwikinews\\\\\\.org\\b", "rpzswndck5mhnd6d\\.onion", "io")
i, num, errs = ngx.re.gsub(i, "\\bwikipedia\\\\\\.org\\b", "x73kmvi562rwnjju\\.onion", "io")
i, num, errs = ngx.re.gsub(i, "\\bwikiquote\\\\\\.org\\b", "rxj62jvsqqnevcyl\\.onion", "io")
i, num, errs = ngx.re.gsub(i, "\\bwikisource\\\\\\.org\\b", "4cyegyx7lnzthq4w\\.onion", "io")
i, num, errs = ngx.re.gsub(i, "\\bwikiversity\\\\\\.org\\b", "bzrrgoz6q3dw67is\\.onion", "io")
i, num, errs = ngx.re.gsub(i, "\\bwikivoyage\\\\\\.org\\b", "wsideljd73uexpwi\\.onion", "io")
i, num, errs = ngx.re.gsub(i, "\\bwiktionary\\\\\\.org\\b", "omivuha6rjzyfjjp\\.onion", "io")
return i
end
}
# filter the response headers en-route back to the user
header_filter_by_lua_block {
local k, v
-- ==================================================================
-- ==================================================================
-- rewrite cors/acao
k = "Access-Control-Allow-Origin"
v = ngx.header[k]
if v and v ~= "*" then
ngx.header[k] = dns_to_onion(v)
end
-- rewrite csp
k = "Content-Security-Policy"
v = ngx.header[k]
if v then
ngx.header[k] = dns_to_onion(v)
end
-- rewrite csp (report-only)
k = "Content-Security-Policy-Report-Only"
v = ngx.header[k]
if v then
ngx.header[k] = dns_to_onion(v)
end
}
# filter the response body en-route back to the user
body_filter_by_lua_block {
-- ==================================================================
-- ==================================================================
-- no debug traps
}
# csp not suppressed, will be rewritten instead, see below
# hsts suppression
proxy_hide_header "Strict-Transport-Security";
# hpkp suppression
proxy_hide_header "Public-Key-Pins";
proxy_hide_header "Public-Key-Pins-Report-Only";
# global proxy settings
proxy_read_timeout 15;
proxy_connect_timeout 15;
# SSL config
ssl_certificate /Users/alecm/src/eotk/projects.d/wikipedia.d/ssl.d/wikipedia.local.cert;
ssl_certificate_key /Users/alecm/src/eotk/projects.d/wikipedia.d/ssl.d/wikipedia.local.pem;
ssl_buffer_size 4k;
#ssl_ciphers 'EECDH+CHACHA20:EECDH+AESGCM:EECDH+AES256'; ## LibreSSL, OpenSSL 1.1.0+
ssl_ciphers 'EECDH+AESGCM:EECDH+AES256'; ## OpenSSL 1.0.1% to 1.0.2%
ssl_ecdh_curve prime256v1;
#ssl_ecdh_curve secp384r1:prime256v1; ## NGINX nginx 1.11.0 and later
ssl_prefer_server_ciphers on;
ssl_protocols TLSv1 TLSv1.1 TLSv1.2;
ssl_session_cache shared:SSL:10m;
ssl_session_timeout 10m;
# websockets: on the basis of http_upgrade, set connection_upgrade:
# empty -> empty
# default -> "upgrade"
map $http_upgrade $connection_upgrade {
default "upgrade";
"" "";
}
# FORCE_HTTPS is in use; set up separate server for port 80 & force redirects
server {
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-80.sock;
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-2.d/port-80.sock;
# subdomain regexp captures trailing dot, use carefully; does not need "~*"
# NB: this regexp should be kept in-sync with the other FORCE_HTTPS copy
server_name
mldhzqz4k4lhkspx.onion
~^(?<servernamesubdomain>([-0-9a-z]+\\.)+)mldhzqz4k4lhkspx\\.onion$
;
# suppress tor2web traffic; "let them use clearnet"
if ( $http_x_tor2web ) {
return 403 "This action is not supported over Onion yet, sorry.";
}
# tell the client to try again as HTTPS without ever leaving the onion
# use 307 / temporary redirect because your URIs may change in future
# use $host (not $server) to copy-over subdomains, etc, transparently
return 307 https://$host$request_uri;
}
# for mldhzqz4k4lhkspx.onion -> mediawiki.org
server {
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-443.sock ssl;
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-2.d/port-443.sock ssl;
# subdomain regexp captures trailing dot, use carefully; does not need "~*"
# NB: this regexp should be kept in-sync with the other FORCE_HTTPS copy
server_name
mldhzqz4k4lhkspx.onion
~^(?<servernamesubdomain>([-0-9a-z]+\\.)+)mldhzqz4k4lhkspx\\.onion$
;
# ---- BEGIN GENERATED CODE ---- -*- awk -*-
# blacklists (generated)
# no user_agent_blacklist
# no user_agent_blacklist_re
# no referer_blacklist
# no referer_blacklist_re
# no origin_blacklist
# no origin_blacklist_re
# no host_blacklist
# no host_blacklist_re
# no path_blacklist
# check path_blacklist_re (generated)
if ( $uri ~* "^\\." ) { return 500; }
if ( $uri ~* "^\\w+\\.php$" ) { return 500; }
if ( $uri ~* "\\.(sql|gz|tgz|zip|bz2)$" ) { return 500; }
if ( $uri ~* "^server-status$" ) { return 500; }
# no param_blacklist
# no param_blacklist_re
# polite blocks (generated)
# polite block for suppress_tor2web (generated)
if ( $http_x_tor2web ) { return 403 "This action is not supported over Onion yet, sorry."; }
# no block_referer
# no block_referer_re
# no block_origin
# no block_origin_re
# no block_host
# polite block for block_host_re (generated)
if ( $http_host ~* "^(login|donate)\\." ) { return 403 "This action is not supported over Onion yet, sorry."; }
# no block_path
# polite block for block_path_re (generated)
if ( $uri ~* "/User:" ) { return 403 "This action is not supported over Onion yet, sorry."; }
if ( $uri ~* "/Special:(UserLogin|(Create|Merge)Account|RenameRequest)\\b" ) { return 403 "This action is not supported over Onion yet, sorry."; }
# no block_location
# no block_location_re
# no block_param
# polite block for block_param_re (generated)
if ( $arg_title ~* "^User:" ) { return 403 "This action is not supported over Onion yet, sorry."; }
if ( $arg_title ~* "^Special:(UserLogin|(Create|Merge)Account|RenameRequest)\\b" ) { return 403 "This action is not supported over Onion yet, sorry."; }
# redirects (generated)
# no redirect_host
# no redirect_path
# no redirect_host_csv
# no redirect_path_csv
# no redirect_location_csv
# whitelists (generated)
# no user_agent_whitelist
# check user_agent_whitelist_re (generated)
set $fail_user_agent_whitelist_re 1;
if ( $http_user_agent ~* "^Mozilla.*Gecko" ) { set $fail_user_agent_whitelist_re 0; }
# no referer_whitelist
# no referer_whitelist_re
# no origin_whitelist
# no origin_whitelist_re
# no host_whitelist
# no host_whitelist_re
# no path_whitelist
# no path_whitelist_re
# no param_whitelist
# no param_whitelist_re
# ---- END GENERATED CODE ----
# no cookie_lock cookie setting
# for test & to help SSL certificate acceptance
location ~* ^/hello[-_]onion/?$ {
return 200 "Hello, Onion User!";
}
# no hardcoded_endpoints
#==================================================================
# ---- BEGIN HARD MODE CODE ----
# URI paths may contain onionified hostnames which need rewriting
# before passing upwards to the origin; this is a consequence of
# "hard-mode" brute-force rewriting, and needs gentle unpicking:
set $deonionified_uri '';
# There is an irreducible problem: what if the URI origin path
# *literally* includes the name of one of our onions? Well, it
# will be re-written to the corresponding DNS domain name. This is
# a small price to pay for the benefits of "hard-mode" rewrites on
# complex websites, but we can reduce the pain a tiny bit by
# making the in-path comparisons case-sensitive to lowercase, and
# document the issue with that workaround
if ( $uri ~ "\\.onion" ) { # cheapest, static, case-sensitive test, here...
set_by_lua_block $deonionified_uri {
local old_uri = ngx.var.uri
-- more expensive case-sensitive test, here...
local m, err = ngx.re.match(old_uri, "\\b[a-z2-7]{16}\\.onion\\b", "o")
if not m then
return "" -- nothing to attempt to rewrite, quick return
end
-- attempt rewrites (function is case-insensitive, hence...)
local new_uri = onion_to_dns(old_uri)
if new_uri == old_uri then -- nothing changed, quick return
return ""
end
return new_uri
}
}
if ( $deonionified_uri ) {
set $new_uri $deonionified_uri; # swap
set $deonionified_uri ''; # prevent revisiting
rewrite ^ $new_uri last;
}
# ---- END HARD MODE CODE ----
#------------------------------------------------------------------
#==================================================================
# for traffic
location / {
# ---- BEGIN GENERATED CODE ---- -*- awk -*-
# whitelist checks (generated)
# no user_agent_whitelist
# check success of user_agent_whitelist_re (generated)
if ( $fail_user_agent_whitelist_re ) { return 500; }
# no referer_whitelist
# no referer_whitelist_re
# no origin_whitelist
# no origin_whitelist_re
# no host_whitelist
# no host_whitelist_re
# no path_whitelist
# no path_whitelist_re
# no param_whitelist
# no param_whitelist_re
# ---- END GENERATED CODE ----
# no cookie-lock checks
proxy_pass "$scheme://${servernamesubdomain}mediawiki.org"; # note $scheme
proxy_http_version 1.1;
# a note on proxy_set_header, add_header, similar methods, etc;
# if you override *any* header then you will lose the other
# headers inherited from the parent contexts:
# https://blog.g3rt.nl/nginx-add_header-pitfall.html
proxy_set_header X-From-Onion 1;
proxy_set_header Host "${servernamesubdomain}mediawiki.org";
proxy_set_header Accept-Encoding "identity";
proxy_set_header Connection $connection_upgrade; # SSL
proxy_set_header Upgrade $http_upgrade; # SSL
proxy_ssl_server_name on; # SSL
# rewrite request referer
set_by_lua_block $referer2 { return onion_to_dns(ngx.var.http_referer) }
proxy_set_header Referer $referer2;
# rewrite request origin
set_by_lua_block $origin2 { return onion_to_dns(ngx.var.http_origin) }
proxy_set_header Origin $origin2;
# suppress non-GET methods (e.g.: POST)
limit_except GET {
deny all;
}
}
}
# FORCE_HTTPS is in use; set up separate server for port 80 & force redirects
server {
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-80.sock;
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-2.d/port-80.sock;
# subdomain regexp captures trailing dot, use carefully; does not need "~*"
# NB: this regexp should be kept in-sync with the other FORCE_HTTPS copy
server_name
hrvv2p2y6hry4k3h.onion
~^(?<servernamesubdomain>([-0-9a-z]+\\.)+)hrvv2p2y6hry4k3h\\.onion$
;
# suppress tor2web traffic; "let them use clearnet"
if ( $http_x_tor2web ) {
return 403 "This action is not supported over Onion yet, sorry.";
}
# tell the client to try again as HTTPS without ever leaving the onion
# use 307 / temporary redirect because your URIs may change in future
# use $host (not $server) to copy-over subdomains, etc, transparently
return 307 https://$host$request_uri;
}
# for hrvv2p2y6hry4k3h.onion -> wikidata.org
server {
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-443.sock ssl;
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-2.d/port-443.sock ssl;
# subdomain regexp captures trailing dot, use carefully; does not need "~*"
# NB: this regexp should be kept in-sync with the other FORCE_HTTPS copy
server_name
hrvv2p2y6hry4k3h.onion
~^(?<servernamesubdomain>([-0-9a-z]+\\.)+)hrvv2p2y6hry4k3h\\.onion$
;
# ---- BEGIN GENERATED CODE ---- -*- awk -*-
# blacklists (generated)
# no user_agent_blacklist
# no user_agent_blacklist_re
# no referer_blacklist
# no referer_blacklist_re
# no origin_blacklist
# no origin_blacklist_re
# no host_blacklist
# no host_blacklist_re
# no path_blacklist
# check path_blacklist_re (generated)
if ( $uri ~* "^\\." ) { return 500; }
if ( $uri ~* "^\\w+\\.php$" ) { return 500; }
if ( $uri ~* "\\.(sql|gz|tgz|zip|bz2)$" ) { return 500; }
if ( $uri ~* "^server-status$" ) { return 500; }
# no param_blacklist
# no param_blacklist_re
# polite blocks (generated)
# polite block for suppress_tor2web (generated)
if ( $http_x_tor2web ) { return 403 "This action is not supported over Onion yet, sorry."; }
# no block_referer
# no block_referer_re
# no block_origin
# no block_origin_re
# no block_host
# polite block for block_host_re (generated)
if ( $http_host ~* "^(login|donate)\\." ) { return 403 "This action is not supported over Onion yet, sorry."; }
# no block_path
# polite block for block_path_re (generated)
if ( $uri ~* "/User:" ) { return 403 "This action is not supported over Onion yet, sorry."; }
if ( $uri ~* "/Special:(UserLogin|(Create|Merge)Account|RenameRequest)\\b" ) { return 403 "This action is not supported over Onion yet, sorry."; }
# no block_location
# no block_location_re
# no block_param
# polite block for block_param_re (generated)
if ( $arg_title ~* "^User:" ) { return 403 "This action is not supported over Onion yet, sorry."; }
if ( $arg_title ~* "^Special:(UserLogin|(Create|Merge)Account|RenameRequest)\\b" ) { return 403 "This action is not supported over Onion yet, sorry."; }
# redirects (generated)
# no redirect_host
# no redirect_path
# no redirect_host_csv
# no redirect_path_csv
# no redirect_location_csv
# whitelists (generated)
# no user_agent_whitelist
# check user_agent_whitelist_re (generated)
set $fail_user_agent_whitelist_re 1;
if ( $http_user_agent ~* "^Mozilla.*Gecko" ) { set $fail_user_agent_whitelist_re 0; }
# no referer_whitelist
# no referer_whitelist_re
# no origin_whitelist
# no origin_whitelist_re
# no host_whitelist
# no host_whitelist_re
# no path_whitelist
# no path_whitelist_re
# no param_whitelist
# no param_whitelist_re
# ---- END GENERATED CODE ----
# no cookie_lock cookie setting
# for test & to help SSL certificate acceptance
location ~* ^/hello[-_]onion/?$ {
return 200 "Hello, Onion User!";
}
# no hardcoded_endpoints
#==================================================================
# ---- BEGIN HARD MODE CODE ----
# URI paths may contain onionified hostnames which need rewriting
# before passing upwards to the origin; this is a consequence of
# "hard-mode" brute-force rewriting, and needs gentle unpicking:
set $deonionified_uri '';
# There is an irreducible problem: what if the URI origin path
# *literally* includes the name of one of our onions? Well, it
# will be re-written to the corresponding DNS domain name. This is
# a small price to pay for the benefits of "hard-mode" rewrites on
# complex websites, but we can reduce the pain a tiny bit by
# making the in-path comparisons case-sensitive to lowercase, and
# document the issue with that workaround
if ( $uri ~ "\\.onion" ) { # cheapest, static, case-sensitive test, here...
set_by_lua_block $deonionified_uri {
local old_uri = ngx.var.uri
-- more expensive case-sensitive test, here...
local m, err = ngx.re.match(old_uri, "\\b[a-z2-7]{16}\\.onion\\b", "o")
if not m then
return "" -- nothing to attempt to rewrite, quick return
end
-- attempt rewrites (function is case-insensitive, hence...)
local new_uri = onion_to_dns(old_uri)
if new_uri == old_uri then -- nothing changed, quick return
return ""
end
return new_uri
}
}
if ( $deonionified_uri ) {
set $new_uri $deonionified_uri; # swap
set $deonionified_uri ''; # prevent revisiting
rewrite ^ $new_uri last;
}
# ---- END HARD MODE CODE ----
#------------------------------------------------------------------
#==================================================================
# for traffic
location / {
# ---- BEGIN GENERATED CODE ---- -*- awk -*-
# whitelist checks (generated)
# no user_agent_whitelist
# check success of user_agent_whitelist_re (generated)
if ( $fail_user_agent_whitelist_re ) { return 500; }
# no referer_whitelist
# no referer_whitelist_re
# no origin_whitelist
# no origin_whitelist_re
# no host_whitelist
# no host_whitelist_re
# no path_whitelist
# no path_whitelist_re
# no param_whitelist
# no param_whitelist_re
# ---- END GENERATED CODE ----
# no cookie-lock checks
proxy_pass "$scheme://${servernamesubdomain}wikidata.org"; # note $scheme
proxy_http_version 1.1;
# a note on proxy_set_header, add_header, similar methods, etc;
# if you override *any* header then you will lose the other
# headers inherited from the parent contexts:
# https://blog.g3rt.nl/nginx-add_header-pitfall.html
proxy_set_header X-From-Onion 1;
proxy_set_header Host "${servernamesubdomain}wikidata.org";
proxy_set_header Accept-Encoding "identity";
proxy_set_header Connection $connection_upgrade; # SSL
proxy_set_header Upgrade $http_upgrade; # SSL
proxy_ssl_server_name on; # SSL
# rewrite request referer
set_by_lua_block $referer2 { return onion_to_dns(ngx.var.http_referer) }
proxy_set_header Referer $referer2;
# rewrite request origin
set_by_lua_block $origin2 { return onion_to_dns(ngx.var.http_origin) }
proxy_set_header Origin $origin2;
# suppress non-GET methods (e.g.: POST)
limit_except GET {
deny all;
}
}
}
# FORCE_HTTPS is in use; set up separate server for port 80 & force redirects
server {
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-80.sock;
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-2.d/port-80.sock;
# subdomain regexp captures trailing dot, use carefully; does not need "~*"
# NB: this regexp should be kept in-sync with the other FORCE_HTTPS copy
server_name
cghmyoqnuvntham2.onion
~^(?<servernamesubdomain>([-0-9a-z]+\\.)+)cghmyoqnuvntham2\\.onion$
;
# suppress tor2web traffic; "let them use clearnet"
if ( $http_x_tor2web ) {
return 403 "This action is not supported over Onion yet, sorry.";
}
# tell the client to try again as HTTPS without ever leaving the onion
# use 307 / temporary redirect because your URIs may change in future
# use $host (not $server) to copy-over subdomains, etc, transparently
return 307 https://$host$request_uri;
}
# for cghmyoqnuvntham2.onion -> wikimedia.org
server {
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-443.sock ssl;
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-2.d/port-443.sock ssl;
# subdomain regexp captures trailing dot, use carefully; does not need "~*"
# NB: this regexp should be kept in-sync with the other FORCE_HTTPS copy
server_name
cghmyoqnuvntham2.onion
~^(?<servernamesubdomain>([-0-9a-z]+\\.)+)cghmyoqnuvntham2\\.onion$
;
# ---- BEGIN GENERATED CODE ---- -*- awk -*-
# blacklists (generated)
# no user_agent_blacklist
# no user_agent_blacklist_re
# no referer_blacklist
# no referer_blacklist_re
# no origin_blacklist
# no origin_blacklist_re
# no host_blacklist
# no host_blacklist_re
# no path_blacklist
# check path_blacklist_re (generated)
if ( $uri ~* "^\\." ) { return 500; }
if ( $uri ~* "^\\w+\\.php$" ) { return 500; }
if ( $uri ~* "\\.(sql|gz|tgz|zip|bz2)$" ) { return 500; }
if ( $uri ~* "^server-status$" ) { return 500; }
# no param_blacklist
# no param_blacklist_re
# polite blocks (generated)
# polite block for suppress_tor2web (generated)
if ( $http_x_tor2web ) { return 403 "This action is not supported over Onion yet, sorry."; }
# no block_referer
# no block_referer_re
# no block_origin
# no block_origin_re
# no block_host
# polite block for block_host_re (generated)
if ( $http_host ~* "^(login|donate)\\." ) { return 403 "This action is not supported over Onion yet, sorry."; }
# no block_path
# polite block for block_path_re (generated)
if ( $uri ~* "/User:" ) { return 403 "This action is not supported over Onion yet, sorry."; }
if ( $uri ~* "/Special:(UserLogin|(Create|Merge)Account|RenameRequest)\\b" ) { return 403 "This action is not supported over Onion yet, sorry."; }
# no block_location
# no block_location_re
# no block_param
# polite block for block_param_re (generated)
if ( $arg_title ~* "^User:" ) { return 403 "This action is not supported over Onion yet, sorry."; }
if ( $arg_title ~* "^Special:(UserLogin|(Create|Merge)Account|RenameRequest)\\b" ) { return 403 "This action is not supported over Onion yet, sorry."; }
# redirects (generated)
# no redirect_host
# no redirect_path
# no redirect_host_csv
# no redirect_path_csv
# no redirect_location_csv
# whitelists (generated)
# no user_agent_whitelist
# check user_agent_whitelist_re (generated)
set $fail_user_agent_whitelist_re 1;
if ( $http_user_agent ~* "^Mozilla.*Gecko" ) { set $fail_user_agent_whitelist_re 0; }
# no referer_whitelist
# no referer_whitelist_re
# no origin_whitelist
# no origin_whitelist_re
# no host_whitelist
# no host_whitelist_re
# no path_whitelist
# no path_whitelist_re
# no param_whitelist
# no param_whitelist_re
# ---- END GENERATED CODE ----
# no cookie_lock cookie setting
# for test & to help SSL certificate acceptance
location ~* ^/hello[-_]onion/?$ {
return 200 "Hello, Onion User!";
}
# no hardcoded_endpoints
#==================================================================
# ---- BEGIN HARD MODE CODE ----
# URI paths may contain onionified hostnames which need rewriting
# before passing upwards to the origin; this is a consequence of
# "hard-mode" brute-force rewriting, and needs gentle unpicking:
set $deonionified_uri '';
# There is an irreducible problem: what if the URI origin path
# *literally* includes the name of one of our onions? Well, it
# will be re-written to the corresponding DNS domain name. This is
# a small price to pay for the benefits of "hard-mode" rewrites on
# complex websites, but we can reduce the pain a tiny bit by
# making the in-path comparisons case-sensitive to lowercase, and
# document the issue with that workaround
if ( $uri ~ "\\.onion" ) { # cheapest, static, case-sensitive test, here...
set_by_lua_block $deonionified_uri {
local old_uri = ngx.var.uri
-- more expensive case-sensitive test, here...
local m, err = ngx.re.match(old_uri, "\\b[a-z2-7]{16}\\.onion\\b", "o")
if not m then
return "" -- nothing to attempt to rewrite, quick return
end
-- attempt rewrites (function is case-insensitive, hence...)
local new_uri = onion_to_dns(old_uri)
if new_uri == old_uri then -- nothing changed, quick return
return ""
end
return new_uri
}
}
if ( $deonionified_uri ) {
set $new_uri $deonionified_uri; # swap
set $deonionified_uri ''; # prevent revisiting
rewrite ^ $new_uri last;
}
# ---- END HARD MODE CODE ----
#------------------------------------------------------------------
#==================================================================
# for traffic
location / {
# ---- BEGIN GENERATED CODE ---- -*- awk -*-
# whitelist checks (generated)
# no user_agent_whitelist
# check success of user_agent_whitelist_re (generated)
if ( $fail_user_agent_whitelist_re ) { return 500; }
# no referer_whitelist
# no referer_whitelist_re
# no origin_whitelist
# no origin_whitelist_re
# no host_whitelist
# no host_whitelist_re
# no path_whitelist
# no path_whitelist_re
# no param_whitelist
# no param_whitelist_re
# ---- END GENERATED CODE ----
# no cookie-lock checks
proxy_pass "$scheme://${servernamesubdomain}wikimedia.org"; # note $scheme
proxy_http_version 1.1;
# a note on proxy_set_header, add_header, similar methods, etc;
# if you override *any* header then you will lose the other
# headers inherited from the parent contexts:
# https://blog.g3rt.nl/nginx-add_header-pitfall.html
proxy_set_header X-From-Onion 1;
proxy_set_header Host "${servernamesubdomain}wikimedia.org";
proxy_set_header Accept-Encoding "identity";
proxy_set_header Connection $connection_upgrade; # SSL
proxy_set_header Upgrade $http_upgrade; # SSL
proxy_ssl_server_name on; # SSL
# rewrite request referer
set_by_lua_block $referer2 { return onion_to_dns(ngx.var.http_referer) }
proxy_set_header Referer $referer2;
# rewrite request origin
set_by_lua_block $origin2 { return onion_to_dns(ngx.var.http_origin) }
proxy_set_header Origin $origin2;
# suppress non-GET methods (e.g.: POST)
limit_except GET {
deny all;
}
}
}
# FORCE_HTTPS is in use; set up separate server for port 80 & force redirects
server {
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-80.sock;
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-2.d/port-80.sock;
# subdomain regexp captures trailing dot, use carefully; does not need "~*"
# NB: this regexp should be kept in-sync with the other FORCE_HTTPS copy
server_name
5l25slwovi5dpgcs.onion
~^(?<servernamesubdomain>([-0-9a-z]+\\.)+)5l25slwovi5dpgcs\\.onion$
;
# suppress tor2web traffic; "let them use clearnet"
if ( $http_x_tor2web ) {
return 403 "This action is not supported over Onion yet, sorry.";
}
# tell the client to try again as HTTPS without ever leaving the onion
# use 307 / temporary redirect because your URIs may change in future
# use $host (not $server) to copy-over subdomains, etc, transparently
return 307 https://$host$request_uri;
}
# for 5l25slwovi5dpgcs.onion -> wikimediafoundation.org
server {
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-443.sock ssl;
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-2.d/port-443.sock ssl;
# subdomain regexp captures trailing dot, use carefully; does not need "~*"
# NB: this regexp should be kept in-sync with the other FORCE_HTTPS copy
server_name
5l25slwovi5dpgcs.onion
~^(?<servernamesubdomain>([-0-9a-z]+\\.)+)5l25slwovi5dpgcs\\.onion$
;
# ---- BEGIN GENERATED CODE ---- -*- awk -*-
# blacklists (generated)
# no user_agent_blacklist
# no user_agent_blacklist_re
# no referer_blacklist
# no referer_blacklist_re
# no origin_blacklist
# no origin_blacklist_re
# no host_blacklist
# no host_blacklist_re
# no path_blacklist
# check path_blacklist_re (generated)
if ( $uri ~* "^\\." ) { return 500; }
if ( $uri ~* "^\\w+\\.php$" ) { return 500; }
if ( $uri ~* "\\.(sql|gz|tgz|zip|bz2)$" ) { return 500; }
if ( $uri ~* "^server-status$" ) { return 500; }
# no param_blacklist
# no param_blacklist_re
# polite blocks (generated)
# polite block for suppress_tor2web (generated)
if ( $http_x_tor2web ) { return 403 "This action is not supported over Onion yet, sorry."; }
# no block_referer
# no block_referer_re
# no block_origin
# no block_origin_re
# no block_host
# polite block for block_host_re (generated)
if ( $http_host ~* "^(login|donate)\\." ) { return 403 "This action is not supported over Onion yet, sorry."; }
# no block_path
# polite block for block_path_re (generated)
if ( $uri ~* "/User:" ) { return 403 "This action is not supported over Onion yet, sorry."; }
if ( $uri ~* "/Special:(UserLogin|(Create|Merge)Account|RenameRequest)\\b" ) { return 403 "This action is not supported over Onion yet, sorry."; }
# no block_location
# no block_location_re
# no block_param
# polite block for block_param_re (generated)
if ( $arg_title ~* "^User:" ) { return 403 "This action is not supported over Onion yet, sorry."; }
if ( $arg_title ~* "^Special:(UserLogin|(Create|Merge)Account|RenameRequest)\\b" ) { return 403 "This action is not supported over Onion yet, sorry."; }
# redirects (generated)
# no redirect_host
# no redirect_path
# no redirect_host_csv
# no redirect_path_csv
# no redirect_location_csv
# whitelists (generated)
# no user_agent_whitelist
# check user_agent_whitelist_re (generated)
set $fail_user_agent_whitelist_re 1;
if ( $http_user_agent ~* "^Mozilla.*Gecko" ) { set $fail_user_agent_whitelist_re 0; }
# no referer_whitelist
# no referer_whitelist_re
# no origin_whitelist
# no origin_whitelist_re
# no host_whitelist
# no host_whitelist_re
# no path_whitelist
# no path_whitelist_re
# no param_whitelist
# no param_whitelist_re
# ---- END GENERATED CODE ----
# no cookie_lock cookie setting
# for test & to help SSL certificate acceptance
location ~* ^/hello[-_]onion/?$ {
return 200 "Hello, Onion User!";
}
# no hardcoded_endpoints
#==================================================================
# ---- BEGIN HARD MODE CODE ----
# URI paths may contain onionified hostnames which need rewriting
# before passing upwards to the origin; this is a consequence of
# "hard-mode" brute-force rewriting, and needs gentle unpicking:
set $deonionified_uri '';
# There is an irreducible problem: what if the URI origin path
# *literally* includes the name of one of our onions? Well, it
# will be re-written to the corresponding DNS domain name. This is
# a small price to pay for the benefits of "hard-mode" rewrites on
# complex websites, but we can reduce the pain a tiny bit by
# making the in-path comparisons case-sensitive to lowercase, and
# document the issue with that workaround
if ( $uri ~ "\\.onion" ) { # cheapest, static, case-sensitive test, here...
set_by_lua_block $deonionified_uri {
local old_uri = ngx.var.uri
-- more expensive case-sensitive test, here...
local m, err = ngx.re.match(old_uri, "\\b[a-z2-7]{16}\\.onion\\b", "o")
if not m then
return "" -- nothing to attempt to rewrite, quick return
end
-- attempt rewrites (function is case-insensitive, hence...)
local new_uri = onion_to_dns(old_uri)
if new_uri == old_uri then -- nothing changed, quick return
return ""
end
return new_uri
}
}
if ( $deonionified_uri ) {
set $new_uri $deonionified_uri; # swap
set $deonionified_uri ''; # prevent revisiting
rewrite ^ $new_uri last;
}
# ---- END HARD MODE CODE ----
#------------------------------------------------------------------
#==================================================================
# for traffic
location / {
# ---- BEGIN GENERATED CODE ---- -*- awk -*-
# whitelist checks (generated)
# no user_agent_whitelist
# check success of user_agent_whitelist_re (generated)
if ( $fail_user_agent_whitelist_re ) { return 500; }
# no referer_whitelist
# no referer_whitelist_re
# no origin_whitelist
# no origin_whitelist_re
# no host_whitelist
# no host_whitelist_re
# no path_whitelist
# no path_whitelist_re
# no param_whitelist
# no param_whitelist_re
# ---- END GENERATED CODE ----
# no cookie-lock checks
proxy_pass "$scheme://${servernamesubdomain}wikimediafoundation.org"; # note $scheme
proxy_http_version 1.1;
# a note on proxy_set_header, add_header, similar methods, etc;
# if you override *any* header then you will lose the other
# headers inherited from the parent contexts:
# https://blog.g3rt.nl/nginx-add_header-pitfall.html
proxy_set_header X-From-Onion 1;
proxy_set_header Host "${servernamesubdomain}wikimediafoundation.org";
proxy_set_header Accept-Encoding "identity";
proxy_set_header Connection $connection_upgrade; # SSL
proxy_set_header Upgrade $http_upgrade; # SSL
proxy_ssl_server_name on; # SSL
# rewrite request referer
set_by_lua_block $referer2 { return onion_to_dns(ngx.var.http_referer) }
proxy_set_header Referer $referer2;
# rewrite request origin
set_by_lua_block $origin2 { return onion_to_dns(ngx.var.http_origin) }
proxy_set_header Origin $origin2;
# suppress non-GET methods (e.g.: POST)
limit_except GET {
deny all;
}
}
}
# FORCE_HTTPS is in use; set up separate server for port 80 & force redirects
server {
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-80.sock;
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-2.d/port-80.sock;
# subdomain regexp captures trailing dot, use carefully; does not need "~*"
# NB: this regexp should be kept in-sync with the other FORCE_HTTPS copy
server_name
h2palsawrrwjmvix.onion
~^(?<servernamesubdomain>([-0-9a-z]+\\.)+)h2palsawrrwjmvix\\.onion$
;
# suppress tor2web traffic; "let them use clearnet"
if ( $http_x_tor2web ) {
return 403 "This action is not supported over Onion yet, sorry.";
}
# tell the client to try again as HTTPS without ever leaving the onion
# use 307 / temporary redirect because your URIs may change in future
# use $host (not $server) to copy-over subdomains, etc, transparently
return 307 https://$host$request_uri;
}
# for h2palsawrrwjmvix.onion -> wikibooks.org
server {
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-443.sock ssl;
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-2.d/port-443.sock ssl;
# subdomain regexp captures trailing dot, use carefully; does not need "~*"
# NB: this regexp should be kept in-sync with the other FORCE_HTTPS copy
server_name
h2palsawrrwjmvix.onion
~^(?<servernamesubdomain>([-0-9a-z]+\\.)+)h2palsawrrwjmvix\\.onion$
;
# ---- BEGIN GENERATED CODE ---- -*- awk -*-
# blacklists (generated)
# no user_agent_blacklist
# no user_agent_blacklist_re
# no referer_blacklist
# no referer_blacklist_re
# no origin_blacklist
# no origin_blacklist_re
# no host_blacklist
# no host_blacklist_re
# no path_blacklist
# check path_blacklist_re (generated)
if ( $uri ~* "^\\." ) { return 500; }
if ( $uri ~* "^\\w+\\.php$" ) { return 500; }
if ( $uri ~* "\\.(sql|gz|tgz|zip|bz2)$" ) { return 500; }
if ( $uri ~* "^server-status$" ) { return 500; }
# no param_blacklist
# no param_blacklist_re
# polite blocks (generated)
# polite block for suppress_tor2web (generated)
if ( $http_x_tor2web ) { return 403 "This action is not supported over Onion yet, sorry."; }
# no block_referer
# no block_referer_re
# no block_origin
# no block_origin_re
# no block_host
# polite block for block_host_re (generated)
if ( $http_host ~* "^(login|donate)\\." ) { return 403 "This action is not supported over Onion yet, sorry."; }
# no block_path
# polite block for block_path_re (generated)
if ( $uri ~* "/User:" ) { return 403 "This action is not supported over Onion yet, sorry."; }
if ( $uri ~* "/Special:(UserLogin|(Create|Merge)Account|RenameRequest)\\b" ) { return 403 "This action is not supported over Onion yet, sorry."; }
# no block_location
# no block_location_re
# no block_param
# polite block for block_param_re (generated)
if ( $arg_title ~* "^User:" ) { return 403 "This action is not supported over Onion yet, sorry."; }
if ( $arg_title ~* "^Special:(UserLogin|(Create|Merge)Account|RenameRequest)\\b" ) { return 403 "This action is not supported over Onion yet, sorry."; }
# redirects (generated)
# no redirect_host
# no redirect_path
# no redirect_host_csv
# no redirect_path_csv
# no redirect_location_csv
# whitelists (generated)
# no user_agent_whitelist
# check user_agent_whitelist_re (generated)
set $fail_user_agent_whitelist_re 1;
if ( $http_user_agent ~* "^Mozilla.*Gecko" ) { set $fail_user_agent_whitelist_re 0; }
# no referer_whitelist
# no referer_whitelist_re
# no origin_whitelist
# no origin_whitelist_re
# no host_whitelist
# no host_whitelist_re
# no path_whitelist
# no path_whitelist_re
# no param_whitelist
# no param_whitelist_re
# ---- END GENERATED CODE ----
# no cookie_lock cookie setting
# for test & to help SSL certificate acceptance
location ~* ^/hello[-_]onion/?$ {
return 200 "Hello, Onion User!";
}
# no hardcoded_endpoints
#==================================================================
# ---- BEGIN HARD MODE CODE ----
# URI paths may contain onionified hostnames which need rewriting
# before passing upwards to the origin; this is a consequence of
# "hard-mode" brute-force rewriting, and needs gentle unpicking:
set $deonionified_uri '';
# There is an irreducible problem: what if the URI origin path
# *literally* includes the name of one of our onions? Well, it
# will be re-written to the corresponding DNS domain name. This is
# a small price to pay for the benefits of "hard-mode" rewrites on
# complex websites, but we can reduce the pain a tiny bit by
# making the in-path comparisons case-sensitive to lowercase, and
# document the issue with that workaround
if ( $uri ~ "\\.onion" ) { # cheapest, static, case-sensitive test, here...
set_by_lua_block $deonionified_uri {
local old_uri = ngx.var.uri
-- more expensive case-sensitive test, here...
local m, err = ngx.re.match(old_uri, "\\b[a-z2-7]{16}\\.onion\\b", "o")
if not m then
return "" -- nothing to attempt to rewrite, quick return
end
-- attempt rewrites (function is case-insensitive, hence...)
local new_uri = onion_to_dns(old_uri)
if new_uri == old_uri then -- nothing changed, quick return
return ""
end
return new_uri
}
}
if ( $deonionified_uri ) {
set $new_uri $deonionified_uri; # swap
set $deonionified_uri ''; # prevent revisiting
rewrite ^ $new_uri last;
}
# ---- END HARD MODE CODE ----
#------------------------------------------------------------------
#==================================================================
# for traffic
location / {
# ---- BEGIN GENERATED CODE ---- -*- awk -*-
# whitelist checks (generated)
# no user_agent_whitelist
# check success of user_agent_whitelist_re (generated)
if ( $fail_user_agent_whitelist_re ) { return 500; }
# no referer_whitelist
# no referer_whitelist_re
# no origin_whitelist
# no origin_whitelist_re
# no host_whitelist
# no host_whitelist_re
# no path_whitelist
# no path_whitelist_re
# no param_whitelist
# no param_whitelist_re
# ---- END GENERATED CODE ----
# no cookie-lock checks
proxy_pass "$scheme://${servernamesubdomain}wikibooks.org"; # note $scheme
proxy_http_version 1.1;
# a note on proxy_set_header, add_header, similar methods, etc;
# if you override *any* header then you will lose the other
# headers inherited from the parent contexts:
# https://blog.g3rt.nl/nginx-add_header-pitfall.html
proxy_set_header X-From-Onion 1;
proxy_set_header Host "${servernamesubdomain}wikibooks.org";
proxy_set_header Accept-Encoding "identity";
proxy_set_header Connection $connection_upgrade; # SSL
proxy_set_header Upgrade $http_upgrade; # SSL
proxy_ssl_server_name on; # SSL
# rewrite request referer
set_by_lua_block $referer2 { return onion_to_dns(ngx.var.http_referer) }
proxy_set_header Referer $referer2;
# rewrite request origin
set_by_lua_block $origin2 { return onion_to_dns(ngx.var.http_origin) }
proxy_set_header Origin $origin2;
# suppress non-GET methods (e.g.: POST)
limit_except GET {
deny all;
}
}
}
# FORCE_HTTPS is in use; set up separate server for port 80 & force redirects
server {
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-80.sock;
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-2.d/port-80.sock;
# subdomain regexp captures trailing dot, use carefully; does not need "~*"
# NB: this regexp should be kept in-sync with the other FORCE_HTTPS copy
server_name
rpzswndck5mhnd6d.onion
~^(?<servernamesubdomain>([-0-9a-z]+\\.)+)rpzswndck5mhnd6d\\.onion$
;
# suppress tor2web traffic; "let them use clearnet"
if ( $http_x_tor2web ) {
return 403 "This action is not supported over Onion yet, sorry.";
}
# tell the client to try again as HTTPS without ever leaving the onion
# use 307 / temporary redirect because your URIs may change in future
# use $host (not $server) to copy-over subdomains, etc, transparently
return 307 https://$host$request_uri;
}
# for rpzswndck5mhnd6d.onion -> wikinews.org
server {
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-443.sock ssl;
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-2.d/port-443.sock ssl;
# subdomain regexp captures trailing dot, use carefully; does not need "~*"
# NB: this regexp should be kept in-sync with the other FORCE_HTTPS copy
server_name
rpzswndck5mhnd6d.onion
~^(?<servernamesubdomain>([-0-9a-z]+\\.)+)rpzswndck5mhnd6d\\.onion$
;
# ---- BEGIN GENERATED CODE ---- -*- awk -*-
# blacklists (generated)
# no user_agent_blacklist
# no user_agent_blacklist_re
# no referer_blacklist
# no referer_blacklist_re
# no origin_blacklist
# no origin_blacklist_re
# no host_blacklist
# no host_blacklist_re
# no path_blacklist
# check path_blacklist_re (generated)
if ( $uri ~* "^\\." ) { return 500; }
if ( $uri ~* "^\\w+\\.php$" ) { return 500; }
if ( $uri ~* "\\.(sql|gz|tgz|zip|bz2)$" ) { return 500; }
if ( $uri ~* "^server-status$" ) { return 500; }
# no param_blacklist
# no param_blacklist_re
# polite blocks (generated)
# polite block for suppress_tor2web (generated)
if ( $http_x_tor2web ) { return 403 "This action is not supported over Onion yet, sorry."; }
# no block_referer
# no block_referer_re
# no block_origin
# no block_origin_re
# no block_host
# polite block for block_host_re (generated)
if ( $http_host ~* "^(login|donate)\\." ) { return 403 "This action is not supported over Onion yet, sorry."; }
# no block_path
# polite block for block_path_re (generated)
if ( $uri ~* "/User:" ) { return 403 "This action is not supported over Onion yet, sorry."; }
if ( $uri ~* "/Special:(UserLogin|(Create|Merge)Account|RenameRequest)\\b" ) { return 403 "This action is not supported over Onion yet, sorry."; }
# no block_location
# no block_location_re
# no block_param
# polite block for block_param_re (generated)
if ( $arg_title ~* "^User:" ) { return 403 "This action is not supported over Onion yet, sorry."; }
if ( $arg_title ~* "^Special:(UserLogin|(Create|Merge)Account|RenameRequest)\\b" ) { return 403 "This action is not supported over Onion yet, sorry."; }
# redirects (generated)
# no redirect_host
# no redirect_path
# no redirect_host_csv
# no redirect_path_csv
# no redirect_location_csv
# whitelists (generated)
# no user_agent_whitelist
# check user_agent_whitelist_re (generated)
set $fail_user_agent_whitelist_re 1;
if ( $http_user_agent ~* "^Mozilla.*Gecko" ) { set $fail_user_agent_whitelist_re 0; }
# no referer_whitelist
# no referer_whitelist_re
# no origin_whitelist
# no origin_whitelist_re
# no host_whitelist
# no host_whitelist_re
# no path_whitelist
# no path_whitelist_re
# no param_whitelist
# no param_whitelist_re
# ---- END GENERATED CODE ----
# no cookie_lock cookie setting
# for test & to help SSL certificate acceptance
location ~* ^/hello[-_]onion/?$ {
return 200 "Hello, Onion User!";
}
# no hardcoded_endpoints
#==================================================================
# ---- BEGIN HARD MODE CODE ----
# URI paths may contain onionified hostnames which need rewriting
# before passing upwards to the origin; this is a consequence of
# "hard-mode" brute-force rewriting, and needs gentle unpicking:
set $deonionified_uri '';
# There is an irreducible problem: what if the URI origin path
# *literally* includes the name of one of our onions? Well, it
# will be re-written to the corresponding DNS domain name. This is
# a small price to pay for the benefits of "hard-mode" rewrites on
# complex websites, but we can reduce the pain a tiny bit by
# making the in-path comparisons case-sensitive to lowercase, and
# document the issue with that workaround
if ( $uri ~ "\\.onion" ) { # cheapest, static, case-sensitive test, here...
set_by_lua_block $deonionified_uri {
local old_uri = ngx.var.uri
-- more expensive case-sensitive test, here...
local m, err = ngx.re.match(old_uri, "\\b[a-z2-7]{16}\\.onion\\b", "o")
if not m then
return "" -- nothing to attempt to rewrite, quick return
end
-- attempt rewrites (function is case-insensitive, hence...)
local new_uri = onion_to_dns(old_uri)
if new_uri == old_uri then -- nothing changed, quick return
return ""
end
return new_uri
}
}
if ( $deonionified_uri ) {
set $new_uri $deonionified_uri; # swap
set $deonionified_uri ''; # prevent revisiting
rewrite ^ $new_uri last;
}
# ---- END HARD MODE CODE ----
#------------------------------------------------------------------
#==================================================================
# for traffic
location / {
# ---- BEGIN GENERATED CODE ---- -*- awk -*-
# whitelist checks (generated)
# no user_agent_whitelist
# check success of user_agent_whitelist_re (generated)
if ( $fail_user_agent_whitelist_re ) { return 500; }
# no referer_whitelist
# no referer_whitelist_re
# no origin_whitelist
# no origin_whitelist_re
# no host_whitelist
# no host_whitelist_re
# no path_whitelist
# no path_whitelist_re
# no param_whitelist
# no param_whitelist_re
# ---- END GENERATED CODE ----
# no cookie-lock checks
proxy_pass "$scheme://${servernamesubdomain}wikinews.org"; # note $scheme
proxy_http_version 1.1;
# a note on proxy_set_header, add_header, similar methods, etc;
# if you override *any* header then you will lose the other
# headers inherited from the parent contexts:
# https://blog.g3rt.nl/nginx-add_header-pitfall.html
proxy_set_header X-From-Onion 1;
proxy_set_header Host "${servernamesubdomain}wikinews.org";
proxy_set_header Accept-Encoding "identity";
proxy_set_header Connection $connection_upgrade; # SSL
proxy_set_header Upgrade $http_upgrade; # SSL
proxy_ssl_server_name on; # SSL
# rewrite request referer
set_by_lua_block $referer2 { return onion_to_dns(ngx.var.http_referer) }
proxy_set_header Referer $referer2;
# rewrite request origin
set_by_lua_block $origin2 { return onion_to_dns(ngx.var.http_origin) }
proxy_set_header Origin $origin2;
# suppress non-GET methods (e.g.: POST)
limit_except GET {
deny all;
}
}
}
# FORCE_HTTPS is in use; set up separate server for port 80 & force redirects
server {
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-80.sock;
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-2.d/port-80.sock;
# subdomain regexp captures trailing dot, use carefully; does not need "~*"
# NB: this regexp should be kept in-sync with the other FORCE_HTTPS copy
server_name
x73kmvi562rwnjju.onion
~^(?<servernamesubdomain>([-0-9a-z]+\\.)+)x73kmvi562rwnjju\\.onion$
;
# suppress tor2web traffic; "let them use clearnet"
if ( $http_x_tor2web ) {
return 403 "This action is not supported over Onion yet, sorry.";
}
# tell the client to try again as HTTPS without ever leaving the onion
# use 307 / temporary redirect because your URIs may change in future
# use $host (not $server) to copy-over subdomains, etc, transparently
return 307 https://$host$request_uri;
}
# for x73kmvi562rwnjju.onion -> wikipedia.org
server {
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-443.sock ssl;
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-2.d/port-443.sock ssl;
# subdomain regexp captures trailing dot, use carefully; does not need "~*"
# NB: this regexp should be kept in-sync with the other FORCE_HTTPS copy
server_name
x73kmvi562rwnjju.onion
~^(?<servernamesubdomain>([-0-9a-z]+\\.)+)x73kmvi562rwnjju\\.onion$
;
# ---- BEGIN GENERATED CODE ---- -*- awk -*-
# blacklists (generated)
# no user_agent_blacklist
# no user_agent_blacklist_re
# no referer_blacklist
# no referer_blacklist_re
# no origin_blacklist
# no origin_blacklist_re
# no host_blacklist
# no host_blacklist_re
# no path_blacklist
# check path_blacklist_re (generated)
if ( $uri ~* "^\\." ) { return 500; }
if ( $uri ~* "^\\w+\\.php$" ) { return 500; }
if ( $uri ~* "\\.(sql|gz|tgz|zip|bz2)$" ) { return 500; }
if ( $uri ~* "^server-status$" ) { return 500; }
# no param_blacklist
# no param_blacklist_re
# polite blocks (generated)
# polite block for suppress_tor2web (generated)
if ( $http_x_tor2web ) { return 403 "This action is not supported over Onion yet, sorry."; }
# no block_referer
# no block_referer_re
# no block_origin
# no block_origin_re
# no block_host
# polite block for block_host_re (generated)
if ( $http_host ~* "^(login|donate)\\." ) { return 403 "This action is not supported over Onion yet, sorry."; }
# no block_path
# polite block for block_path_re (generated)
if ( $uri ~* "/User:" ) { return 403 "This action is not supported over Onion yet, sorry."; }
if ( $uri ~* "/Special:(UserLogin|(Create|Merge)Account|RenameRequest)\\b" ) { return 403 "This action is not supported over Onion yet, sorry."; }
# no block_location
# no block_location_re
# no block_param
# polite block for block_param_re (generated)
if ( $arg_title ~* "^User:" ) { return 403 "This action is not supported over Onion yet, sorry."; }
if ( $arg_title ~* "^Special:(UserLogin|(Create|Merge)Account|RenameRequest)\\b" ) { return 403 "This action is not supported over Onion yet, sorry."; }
# redirects (generated)
# no redirect_host
# no redirect_path
# no redirect_host_csv
# no redirect_path_csv
# no redirect_location_csv
# whitelists (generated)
# no user_agent_whitelist
# check user_agent_whitelist_re (generated)
set $fail_user_agent_whitelist_re 1;
if ( $http_user_agent ~* "^Mozilla.*Gecko" ) { set $fail_user_agent_whitelist_re 0; }
# no referer_whitelist
# no referer_whitelist_re
# no origin_whitelist
# no origin_whitelist_re
# no host_whitelist
# no host_whitelist_re
# no path_whitelist
# no path_whitelist_re
# no param_whitelist
# no param_whitelist_re
# ---- END GENERATED CODE ----
# no cookie_lock cookie setting
# for test & to help SSL certificate acceptance
location ~* ^/hello[-_]onion/?$ {
return 200 "Hello, Onion User!";
}
# no hardcoded_endpoints
#==================================================================
# ---- BEGIN HARD MODE CODE ----
# URI paths may contain onionified hostnames which need rewriting
# before passing upwards to the origin; this is a consequence of
# "hard-mode" brute-force rewriting, and needs gentle unpicking:
set $deonionified_uri '';
# There is an irreducible problem: what if the URI origin path
# *literally* includes the name of one of our onions? Well, it
# will be re-written to the corresponding DNS domain name. This is
# a small price to pay for the benefits of "hard-mode" rewrites on
# complex websites, but we can reduce the pain a tiny bit by
# making the in-path comparisons case-sensitive to lowercase, and
# document the issue with that workaround
if ( $uri ~ "\\.onion" ) { # cheapest, static, case-sensitive test, here...
set_by_lua_block $deonionified_uri {
local old_uri = ngx.var.uri
-- more expensive case-sensitive test, here...
local m, err = ngx.re.match(old_uri, "\\b[a-z2-7]{16}\\.onion\\b", "o")
if not m then
return "" -- nothing to attempt to rewrite, quick return
end
-- attempt rewrites (function is case-insensitive, hence...)
local new_uri = onion_to_dns(old_uri)
if new_uri == old_uri then -- nothing changed, quick return
return ""
end
return new_uri
}
}
if ( $deonionified_uri ) {
set $new_uri $deonionified_uri; # swap
set $deonionified_uri ''; # prevent revisiting
rewrite ^ $new_uri last;
}
# ---- END HARD MODE CODE ----
#------------------------------------------------------------------
#==================================================================
# for traffic
location / {
# ---- BEGIN GENERATED CODE ---- -*- awk -*-
# whitelist checks (generated)
# no user_agent_whitelist
# check success of user_agent_whitelist_re (generated)
if ( $fail_user_agent_whitelist_re ) { return 500; }
# no referer_whitelist
# no referer_whitelist_re
# no origin_whitelist
# no origin_whitelist_re
# no host_whitelist
# no host_whitelist_re
# no path_whitelist
# no path_whitelist_re
# no param_whitelist
# no param_whitelist_re
# ---- END GENERATED CODE ----
# no cookie-lock checks
proxy_pass "$scheme://${servernamesubdomain}wikipedia.org"; # note $scheme
proxy_http_version 1.1;
# a note on proxy_set_header, add_header, similar methods, etc;
# if you override *any* header then you will lose the other
# headers inherited from the parent contexts:
# https://blog.g3rt.nl/nginx-add_header-pitfall.html
proxy_set_header X-From-Onion 1;
proxy_set_header Host "${servernamesubdomain}wikipedia.org";
proxy_set_header Accept-Encoding "identity";
proxy_set_header Connection $connection_upgrade; # SSL
proxy_set_header Upgrade $http_upgrade; # SSL
proxy_ssl_server_name on; # SSL
# rewrite request referer
set_by_lua_block $referer2 { return onion_to_dns(ngx.var.http_referer) }
proxy_set_header Referer $referer2;
# rewrite request origin
set_by_lua_block $origin2 { return onion_to_dns(ngx.var.http_origin) }
proxy_set_header Origin $origin2;
# suppress non-GET methods (e.g.: POST)
limit_except GET {
deny all;
}
}
}
# FORCE_HTTPS is in use; set up separate server for port 80 & force redirects
server {
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-80.sock;
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-2.d/port-80.sock;
# subdomain regexp captures trailing dot, use carefully; does not need "~*"
# NB: this regexp should be kept in-sync with the other FORCE_HTTPS copy
server_name
rxj62jvsqqnevcyl.onion
~^(?<servernamesubdomain>([-0-9a-z]+\\.)+)rxj62jvsqqnevcyl\\.onion$
;
# suppress tor2web traffic; "let them use clearnet"
if ( $http_x_tor2web ) {
return 403 "This action is not supported over Onion yet, sorry.";
}
# tell the client to try again as HTTPS without ever leaving the onion
# use 307 / temporary redirect because your URIs may change in future
# use $host (not $server) to copy-over subdomains, etc, transparently
return 307 https://$host$request_uri;
}
# for rxj62jvsqqnevcyl.onion -> wikiquote.org
server {
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-443.sock ssl;
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-2.d/port-443.sock ssl;
# subdomain regexp captures trailing dot, use carefully; does not need "~*"
# NB: this regexp should be kept in-sync with the other FORCE_HTTPS copy
server_name
rxj62jvsqqnevcyl.onion
~^(?<servernamesubdomain>([-0-9a-z]+\\.)+)rxj62jvsqqnevcyl\\.onion$
;
# ---- BEGIN GENERATED CODE ---- -*- awk -*-
# blacklists (generated)
# no user_agent_blacklist
# no user_agent_blacklist_re
# no referer_blacklist
# no referer_blacklist_re
# no origin_blacklist
# no origin_blacklist_re
# no host_blacklist
# no host_blacklist_re
# no path_blacklist
# check path_blacklist_re (generated)
if ( $uri ~* "^\\." ) { return 500; }
if ( $uri ~* "^\\w+\\.php$" ) { return 500; }
if ( $uri ~* "\\.(sql|gz|tgz|zip|bz2)$" ) { return 500; }
if ( $uri ~* "^server-status$" ) { return 500; }
# no param_blacklist
# no param_blacklist_re
# polite blocks (generated)
# polite block for suppress_tor2web (generated)
if ( $http_x_tor2web ) { return 403 "This action is not supported over Onion yet, sorry."; }
# no block_referer
# no block_referer_re
# no block_origin
# no block_origin_re
# no block_host
# polite block for block_host_re (generated)
if ( $http_host ~* "^(login|donate)\\." ) { return 403 "This action is not supported over Onion yet, sorry."; }
# no block_path
# polite block for block_path_re (generated)
if ( $uri ~* "/User:" ) { return 403 "This action is not supported over Onion yet, sorry."; }
if ( $uri ~* "/Special:(UserLogin|(Create|Merge)Account|RenameRequest)\\b" ) { return 403 "This action is not supported over Onion yet, sorry."; }
# no block_location
# no block_location_re
# no block_param
# polite block for block_param_re (generated)
if ( $arg_title ~* "^User:" ) { return 403 "This action is not supported over Onion yet, sorry."; }
if ( $arg_title ~* "^Special:(UserLogin|(Create|Merge)Account|RenameRequest)\\b" ) { return 403 "This action is not supported over Onion yet, sorry."; }
# redirects (generated)
# no redirect_host
# no redirect_path
# no redirect_host_csv
# no redirect_path_csv
# no redirect_location_csv
# whitelists (generated)
# no user_agent_whitelist
# check user_agent_whitelist_re (generated)
set $fail_user_agent_whitelist_re 1;
if ( $http_user_agent ~* "^Mozilla.*Gecko" ) { set $fail_user_agent_whitelist_re 0; }
# no referer_whitelist
# no referer_whitelist_re
# no origin_whitelist
# no origin_whitelist_re
# no host_whitelist
# no host_whitelist_re
# no path_whitelist
# no path_whitelist_re
# no param_whitelist
# no param_whitelist_re
# ---- END GENERATED CODE ----
# no cookie_lock cookie setting
# for test & to help SSL certificate acceptance
location ~* ^/hello[-_]onion/?$ {
return 200 "Hello, Onion User!";
}
# no hardcoded_endpoints
#==================================================================
# ---- BEGIN HARD MODE CODE ----
# URI paths may contain onionified hostnames which need rewriting
# before passing upwards to the origin; this is a consequence of
# "hard-mode" brute-force rewriting, and needs gentle unpicking:
set $deonionified_uri '';
# There is an irreducible problem: what if the URI origin path
# *literally* includes the name of one of our onions? Well, it
# will be re-written to the corresponding DNS domain name. This is
# a small price to pay for the benefits of "hard-mode" rewrites on
# complex websites, but we can reduce the pain a tiny bit by
# making the in-path comparisons case-sensitive to lowercase, and
# document the issue with that workaround
if ( $uri ~ "\\.onion" ) { # cheapest, static, case-sensitive test, here...
set_by_lua_block $deonionified_uri {
local old_uri = ngx.var.uri
-- more expensive case-sensitive test, here...
local m, err = ngx.re.match(old_uri, "\\b[a-z2-7]{16}\\.onion\\b", "o")
if not m then
return "" -- nothing to attempt to rewrite, quick return
end
-- attempt rewrites (function is case-insensitive, hence...)
local new_uri = onion_to_dns(old_uri)
if new_uri == old_uri then -- nothing changed, quick return
return ""
end
return new_uri
}
}
if ( $deonionified_uri ) {
set $new_uri $deonionified_uri; # swap
set $deonionified_uri ''; # prevent revisiting
rewrite ^ $new_uri last;
}
# ---- END HARD MODE CODE ----
#------------------------------------------------------------------
#==================================================================
# for traffic
location / {
# ---- BEGIN GENERATED CODE ---- -*- awk -*-
# whitelist checks (generated)
# no user_agent_whitelist
# check success of user_agent_whitelist_re (generated)
if ( $fail_user_agent_whitelist_re ) { return 500; }
# no referer_whitelist
# no referer_whitelist_re
# no origin_whitelist
# no origin_whitelist_re
# no host_whitelist
# no host_whitelist_re
# no path_whitelist
# no path_whitelist_re
# no param_whitelist
# no param_whitelist_re
# ---- END GENERATED CODE ----
# no cookie-lock checks
proxy_pass "$scheme://${servernamesubdomain}wikiquote.org"; # note $scheme
proxy_http_version 1.1;
# a note on proxy_set_header, add_header, similar methods, etc;
# if you override *any* header then you will lose the other
# headers inherited from the parent contexts:
# https://blog.g3rt.nl/nginx-add_header-pitfall.html
proxy_set_header X-From-Onion 1;
proxy_set_header Host "${servernamesubdomain}wikiquote.org";
proxy_set_header Accept-Encoding "identity";
proxy_set_header Connection $connection_upgrade; # SSL
proxy_set_header Upgrade $http_upgrade; # SSL
proxy_ssl_server_name on; # SSL
# rewrite request referer
set_by_lua_block $referer2 { return onion_to_dns(ngx.var.http_referer) }
proxy_set_header Referer $referer2;
# rewrite request origin
set_by_lua_block $origin2 { return onion_to_dns(ngx.var.http_origin) }
proxy_set_header Origin $origin2;
# suppress non-GET methods (e.g.: POST)
limit_except GET {
deny all;
}
}
}
# FORCE_HTTPS is in use; set up separate server for port 80 & force redirects
server {
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-80.sock;
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-2.d/port-80.sock;
# subdomain regexp captures trailing dot, use carefully; does not need "~*"
# NB: this regexp should be kept in-sync with the other FORCE_HTTPS copy
server_name
4cyegyx7lnzthq4w.onion
~^(?<servernamesubdomain>([-0-9a-z]+\\.)+)4cyegyx7lnzthq4w\\.onion$
;
# suppress tor2web traffic; "let them use clearnet"
if ( $http_x_tor2web ) {
return 403 "This action is not supported over Onion yet, sorry.";
}
# tell the client to try again as HTTPS without ever leaving the onion
# use 307 / temporary redirect because your URIs may change in future
# use $host (not $server) to copy-over subdomains, etc, transparently
return 307 https://$host$request_uri;
}
# for 4cyegyx7lnzthq4w.onion -> wikisource.org
server {
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-443.sock ssl;
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-2.d/port-443.sock ssl;
# subdomain regexp captures trailing dot, use carefully; does not need "~*"
# NB: this regexp should be kept in-sync with the other FORCE_HTTPS copy
server_name
4cyegyx7lnzthq4w.onion
~^(?<servernamesubdomain>([-0-9a-z]+\\.)+)4cyegyx7lnzthq4w\\.onion$
;
# ---- BEGIN GENERATED CODE ---- -*- awk -*-
# blacklists (generated)
# no user_agent_blacklist
# no user_agent_blacklist_re
# no referer_blacklist
# no referer_blacklist_re
# no origin_blacklist
# no origin_blacklist_re
# no host_blacklist
# no host_blacklist_re
# no path_blacklist
# check path_blacklist_re (generated)
if ( $uri ~* "^\\." ) { return 500; }
if ( $uri ~* "^\\w+\\.php$" ) { return 500; }
if ( $uri ~* "\\.(sql|gz|tgz|zip|bz2)$" ) { return 500; }
if ( $uri ~* "^server-status$" ) { return 500; }
# no param_blacklist
# no param_blacklist_re
# polite blocks (generated)
# polite block for suppress_tor2web (generated)
if ( $http_x_tor2web ) { return 403 "This action is not supported over Onion yet, sorry."; }
# no block_referer
# no block_referer_re
# no block_origin
# no block_origin_re
# no block_host
# polite block for block_host_re (generated)
if ( $http_host ~* "^(login|donate)\\." ) { return 403 "This action is not supported over Onion yet, sorry."; }
# no block_path
# polite block for block_path_re (generated)
if ( $uri ~* "/User:" ) { return 403 "This action is not supported over Onion yet, sorry."; }
if ( $uri ~* "/Special:(UserLogin|(Create|Merge)Account|RenameRequest)\\b" ) { return 403 "This action is not supported over Onion yet, sorry."; }
# no block_location
# no block_location_re
# no block_param
# polite block for block_param_re (generated)
if ( $arg_title ~* "^User:" ) { return 403 "This action is not supported over Onion yet, sorry."; }
if ( $arg_title ~* "^Special:(UserLogin|(Create|Merge)Account|RenameRequest)\\b" ) { return 403 "This action is not supported over Onion yet, sorry."; }
# redirects (generated)
# no redirect_host
# no redirect_path
# no redirect_host_csv
# no redirect_path_csv
# no redirect_location_csv
# whitelists (generated)
# no user_agent_whitelist
# check user_agent_whitelist_re (generated)
set $fail_user_agent_whitelist_re 1;
if ( $http_user_agent ~* "^Mozilla.*Gecko" ) { set $fail_user_agent_whitelist_re 0; }
# no referer_whitelist
# no referer_whitelist_re
# no origin_whitelist
# no origin_whitelist_re
# no host_whitelist
# no host_whitelist_re
# no path_whitelist
# no path_whitelist_re
# no param_whitelist
# no param_whitelist_re
# ---- END GENERATED CODE ----
# no cookie_lock cookie setting
# for test & to help SSL certificate acceptance
location ~* ^/hello[-_]onion/?$ {
return 200 "Hello, Onion User!";
}
# no hardcoded_endpoints
#==================================================================
# ---- BEGIN HARD MODE CODE ----
# URI paths may contain onionified hostnames which need rewriting
# before passing upwards to the origin; this is a consequence of
# "hard-mode" brute-force rewriting, and needs gentle unpicking:
set $deonionified_uri '';
# There is an irreducible problem: what if the URI origin path
# *literally* includes the name of one of our onions? Well, it
# will be re-written to the corresponding DNS domain name. This is
# a small price to pay for the benefits of "hard-mode" rewrites on
# complex websites, but we can reduce the pain a tiny bit by
# making the in-path comparisons case-sensitive to lowercase, and
# document the issue with that workaround
if ( $uri ~ "\\.onion" ) { # cheapest, static, case-sensitive test, here...
set_by_lua_block $deonionified_uri {
local old_uri = ngx.var.uri
-- more expensive case-sensitive test, here...
local m, err = ngx.re.match(old_uri, "\\b[a-z2-7]{16}\\.onion\\b", "o")
if not m then
return "" -- nothing to attempt to rewrite, quick return
end
-- attempt rewrites (function is case-insensitive, hence...)
local new_uri = onion_to_dns(old_uri)
if new_uri == old_uri then -- nothing changed, quick return
return ""
end
return new_uri
}
}
if ( $deonionified_uri ) {
set $new_uri $deonionified_uri; # swap
set $deonionified_uri ''; # prevent revisiting
rewrite ^ $new_uri last;
}
# ---- END HARD MODE CODE ----
#------------------------------------------------------------------
#==================================================================
# for traffic
location / {
# ---- BEGIN GENERATED CODE ---- -*- awk -*-
# whitelist checks (generated)
# no user_agent_whitelist
# check success of user_agent_whitelist_re (generated)
if ( $fail_user_agent_whitelist_re ) { return 500; }
# no referer_whitelist
# no referer_whitelist_re
# no origin_whitelist
# no origin_whitelist_re
# no host_whitelist
# no host_whitelist_re
# no path_whitelist
# no path_whitelist_re
# no param_whitelist
# no param_whitelist_re
# ---- END GENERATED CODE ----
# no cookie-lock checks
proxy_pass "$scheme://${servernamesubdomain}wikisource.org"; # note $scheme
proxy_http_version 1.1;
# a note on proxy_set_header, add_header, similar methods, etc;
# if you override *any* header then you will lose the other
# headers inherited from the parent contexts:
# https://blog.g3rt.nl/nginx-add_header-pitfall.html
proxy_set_header X-From-Onion 1;
proxy_set_header Host "${servernamesubdomain}wikisource.org";
proxy_set_header Accept-Encoding "identity";
proxy_set_header Connection $connection_upgrade; # SSL
proxy_set_header Upgrade $http_upgrade; # SSL
proxy_ssl_server_name on; # SSL
# rewrite request referer
set_by_lua_block $referer2 { return onion_to_dns(ngx.var.http_referer) }
proxy_set_header Referer $referer2;
# rewrite request origin
set_by_lua_block $origin2 { return onion_to_dns(ngx.var.http_origin) }
proxy_set_header Origin $origin2;
# suppress non-GET methods (e.g.: POST)
limit_except GET {
deny all;
}
}
}
# FORCE_HTTPS is in use; set up separate server for port 80 & force redirects
server {
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-80.sock;
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-2.d/port-80.sock;
# subdomain regexp captures trailing dot, use carefully; does not need "~*"
# NB: this regexp should be kept in-sync with the other FORCE_HTTPS copy
server_name
bzrrgoz6q3dw67is.onion
~^(?<servernamesubdomain>([-0-9a-z]+\\.)+)bzrrgoz6q3dw67is\\.onion$
;
# suppress tor2web traffic; "let them use clearnet"
if ( $http_x_tor2web ) {
return 403 "This action is not supported over Onion yet, sorry.";
}
# tell the client to try again as HTTPS without ever leaving the onion
# use 307 / temporary redirect because your URIs may change in future
# use $host (not $server) to copy-over subdomains, etc, transparently
return 307 https://$host$request_uri;
}
# for bzrrgoz6q3dw67is.onion -> wikiversity.org
server {
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-443.sock ssl;
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-2.d/port-443.sock ssl;
# subdomain regexp captures trailing dot, use carefully; does not need "~*"
# NB: this regexp should be kept in-sync with the other FORCE_HTTPS copy
server_name
bzrrgoz6q3dw67is.onion
~^(?<servernamesubdomain>([-0-9a-z]+\\.)+)bzrrgoz6q3dw67is\\.onion$
;
# ---- BEGIN GENERATED CODE ---- -*- awk -*-
# blacklists (generated)
# no user_agent_blacklist
# no user_agent_blacklist_re
# no referer_blacklist
# no referer_blacklist_re
# no origin_blacklist
# no origin_blacklist_re
# no host_blacklist
# no host_blacklist_re
# no path_blacklist
# check path_blacklist_re (generated)
if ( $uri ~* "^\\." ) { return 500; }
if ( $uri ~* "^\\w+\\.php$" ) { return 500; }
if ( $uri ~* "\\.(sql|gz|tgz|zip|bz2)$" ) { return 500; }
if ( $uri ~* "^server-status$" ) { return 500; }
# no param_blacklist
# no param_blacklist_re
# polite blocks (generated)
# polite block for suppress_tor2web (generated)
if ( $http_x_tor2web ) { return 403 "This action is not supported over Onion yet, sorry."; }
# no block_referer
# no block_referer_re
# no block_origin
# no block_origin_re
# no block_host
# polite block for block_host_re (generated)
if ( $http_host ~* "^(login|donate)\\." ) { return 403 "This action is not supported over Onion yet, sorry."; }
# no block_path
# polite block for block_path_re (generated)
if ( $uri ~* "/User:" ) { return 403 "This action is not supported over Onion yet, sorry."; }
if ( $uri ~* "/Special:(UserLogin|(Create|Merge)Account|RenameRequest)\\b" ) { return 403 "This action is not supported over Onion yet, sorry."; }
# no block_location
# no block_location_re
# no block_param
# polite block for block_param_re (generated)
if ( $arg_title ~* "^User:" ) { return 403 "This action is not supported over Onion yet, sorry."; }
if ( $arg_title ~* "^Special:(UserLogin|(Create|Merge)Account|RenameRequest)\\b" ) { return 403 "This action is not supported over Onion yet, sorry."; }
# redirects (generated)
# no redirect_host
# no redirect_path
# no redirect_host_csv
# no redirect_path_csv
# no redirect_location_csv
# whitelists (generated)
# no user_agent_whitelist
# check user_agent_whitelist_re (generated)
set $fail_user_agent_whitelist_re 1;
if ( $http_user_agent ~* "^Mozilla.*Gecko" ) { set $fail_user_agent_whitelist_re 0; }
# no referer_whitelist
# no referer_whitelist_re
# no origin_whitelist
# no origin_whitelist_re
# no host_whitelist
# no host_whitelist_re
# no path_whitelist
# no path_whitelist_re
# no param_whitelist
# no param_whitelist_re
# ---- END GENERATED CODE ----
# no cookie_lock cookie setting
# for test & to help SSL certificate acceptance
location ~* ^/hello[-_]onion/?$ {
return 200 "Hello, Onion User!";
}
# no hardcoded_endpoints
#==================================================================
# ---- BEGIN HARD MODE CODE ----
# URI paths may contain onionified hostnames which need rewriting
# before passing upwards to the origin; this is a consequence of
# "hard-mode" brute-force rewriting, and needs gentle unpicking:
set $deonionified_uri '';
# There is an irreducible problem: what if the URI origin path
# *literally* includes the name of one of our onions? Well, it
# will be re-written to the corresponding DNS domain name. This is
# a small price to pay for the benefits of "hard-mode" rewrites on
# complex websites, but we can reduce the pain a tiny bit by
# making the in-path comparisons case-sensitive to lowercase, and
# document the issue with that workaround
if ( $uri ~ "\\.onion" ) { # cheapest, static, case-sensitive test, here...
set_by_lua_block $deonionified_uri {
local old_uri = ngx.var.uri
-- more expensive case-sensitive test, here...
local m, err = ngx.re.match(old_uri, "\\b[a-z2-7]{16}\\.onion\\b", "o")
if not m then
return "" -- nothing to attempt to rewrite, quick return
end
-- attempt rewrites (function is case-insensitive, hence...)
local new_uri = onion_to_dns(old_uri)
if new_uri == old_uri then -- nothing changed, quick return
return ""
end
return new_uri
}
}
if ( $deonionified_uri ) {
set $new_uri $deonionified_uri; # swap
set $deonionified_uri ''; # prevent revisiting
rewrite ^ $new_uri last;
}
# ---- END HARD MODE CODE ----
#------------------------------------------------------------------
#==================================================================
# for traffic
location / {
# ---- BEGIN GENERATED CODE ---- -*- awk -*-
# whitelist checks (generated)
# no user_agent_whitelist
# check success of user_agent_whitelist_re (generated)
if ( $fail_user_agent_whitelist_re ) { return 500; }
# no referer_whitelist
# no referer_whitelist_re
# no origin_whitelist
# no origin_whitelist_re
# no host_whitelist
# no host_whitelist_re
# no path_whitelist
# no path_whitelist_re
# no param_whitelist
# no param_whitelist_re
# ---- END GENERATED CODE ----
# no cookie-lock checks
proxy_pass "$scheme://${servernamesubdomain}wikiversity.org"; # note $scheme
proxy_http_version 1.1;
# a note on proxy_set_header, add_header, similar methods, etc;
# if you override *any* header then you will lose the other
# headers inherited from the parent contexts:
# https://blog.g3rt.nl/nginx-add_header-pitfall.html
proxy_set_header X-From-Onion 1;
proxy_set_header Host "${servernamesubdomain}wikiversity.org";
proxy_set_header Accept-Encoding "identity";
proxy_set_header Connection $connection_upgrade; # SSL
proxy_set_header Upgrade $http_upgrade; # SSL
proxy_ssl_server_name on; # SSL
# rewrite request referer
set_by_lua_block $referer2 { return onion_to_dns(ngx.var.http_referer) }
proxy_set_header Referer $referer2;
# rewrite request origin
set_by_lua_block $origin2 { return onion_to_dns(ngx.var.http_origin) }
proxy_set_header Origin $origin2;
# suppress non-GET methods (e.g.: POST)
limit_except GET {
deny all;
}
}
}
# FORCE_HTTPS is in use; set up separate server for port 80 & force redirects
server {
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-80.sock;
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-2.d/port-80.sock;
# subdomain regexp captures trailing dot, use carefully; does not need "~*"
# NB: this regexp should be kept in-sync with the other FORCE_HTTPS copy
server_name
wsideljd73uexpwi.onion
~^(?<servernamesubdomain>([-0-9a-z]+\\.)+)wsideljd73uexpwi\\.onion$
;
# suppress tor2web traffic; "let them use clearnet"
if ( $http_x_tor2web ) {
return 403 "This action is not supported over Onion yet, sorry.";
}
# tell the client to try again as HTTPS without ever leaving the onion
# use 307 / temporary redirect because your URIs may change in future
# use $host (not $server) to copy-over subdomains, etc, transparently
return 307 https://$host$request_uri;
}
# for wsideljd73uexpwi.onion -> wikivoyage.org
server {
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-443.sock ssl;
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-2.d/port-443.sock ssl;
# subdomain regexp captures trailing dot, use carefully; does not need "~*"
# NB: this regexp should be kept in-sync with the other FORCE_HTTPS copy
server_name
wsideljd73uexpwi.onion
~^(?<servernamesubdomain>([-0-9a-z]+\\.)+)wsideljd73uexpwi\\.onion$
;
# ---- BEGIN GENERATED CODE ---- -*- awk -*-
# blacklists (generated)
# no user_agent_blacklist
# no user_agent_blacklist_re
# no referer_blacklist
# no referer_blacklist_re
# no origin_blacklist
# no origin_blacklist_re
# no host_blacklist
# no host_blacklist_re
# no path_blacklist
# check path_blacklist_re (generated)
if ( $uri ~* "^\\." ) { return 500; }
if ( $uri ~* "^\\w+\\.php$" ) { return 500; }
if ( $uri ~* "\\.(sql|gz|tgz|zip|bz2)$" ) { return 500; }
if ( $uri ~* "^server-status$" ) { return 500; }
# no param_blacklist
# no param_blacklist_re
# polite blocks (generated)
# polite block for suppress_tor2web (generated)
if ( $http_x_tor2web ) { return 403 "This action is not supported over Onion yet, sorry."; }
# no block_referer
# no block_referer_re
# no block_origin
# no block_origin_re
# no block_host
# polite block for block_host_re (generated)
if ( $http_host ~* "^(login|donate)\\." ) { return 403 "This action is not supported over Onion yet, sorry."; }
# no block_path
# polite block for block_path_re (generated)
if ( $uri ~* "/User:" ) { return 403 "This action is not supported over Onion yet, sorry."; }
if ( $uri ~* "/Special:(UserLogin|(Create|Merge)Account|RenameRequest)\\b" ) { return 403 "This action is not supported over Onion yet, sorry."; }
# no block_location
# no block_location_re
# no block_param
# polite block for block_param_re (generated)
if ( $arg_title ~* "^User:" ) { return 403 "This action is not supported over Onion yet, sorry."; }
if ( $arg_title ~* "^Special:(UserLogin|(Create|Merge)Account|RenameRequest)\\b" ) { return 403 "This action is not supported over Onion yet, sorry."; }
# redirects (generated)
# no redirect_host
# no redirect_path
# no redirect_host_csv
# no redirect_path_csv
# no redirect_location_csv
# whitelists (generated)
# no user_agent_whitelist
# check user_agent_whitelist_re (generated)
set $fail_user_agent_whitelist_re 1;
if ( $http_user_agent ~* "^Mozilla.*Gecko" ) { set $fail_user_agent_whitelist_re 0; }
# no referer_whitelist
# no referer_whitelist_re
# no origin_whitelist
# no origin_whitelist_re
# no host_whitelist
# no host_whitelist_re
# no path_whitelist
# no path_whitelist_re
# no param_whitelist
# no param_whitelist_re
# ---- END GENERATED CODE ----
# no cookie_lock cookie setting
# for test & to help SSL certificate acceptance
location ~* ^/hello[-_]onion/?$ {
return 200 "Hello, Onion User!";
}
# no hardcoded_endpoints
#==================================================================
# ---- BEGIN HARD MODE CODE ----
# URI paths may contain onionified hostnames which need rewriting
# before passing upwards to the origin; this is a consequence of
# "hard-mode" brute-force rewriting, and needs gentle unpicking:
set $deonionified_uri '';
# There is an irreducible problem: what if the URI origin path
# *literally* includes the name of one of our onions? Well, it
# will be re-written to the corresponding DNS domain name. This is
# a small price to pay for the benefits of "hard-mode" rewrites on
# complex websites, but we can reduce the pain a tiny bit by
# making the in-path comparisons case-sensitive to lowercase, and
# document the issue with that workaround
if ( $uri ~ "\\.onion" ) { # cheapest, static, case-sensitive test, here...
set_by_lua_block $deonionified_uri {
local old_uri = ngx.var.uri
-- more expensive case-sensitive test, here...
local m, err = ngx.re.match(old_uri, "\\b[a-z2-7]{16}\\.onion\\b", "o")
if not m then
return "" -- nothing to attempt to rewrite, quick return
end
-- attempt rewrites (function is case-insensitive, hence...)
local new_uri = onion_to_dns(old_uri)
if new_uri == old_uri then -- nothing changed, quick return
return ""
end
return new_uri
}
}
if ( $deonionified_uri ) {
set $new_uri $deonionified_uri; # swap
set $deonionified_uri ''; # prevent revisiting
rewrite ^ $new_uri last;
}
# ---- END HARD MODE CODE ----
#------------------------------------------------------------------
#==================================================================
# for traffic
location / {
# ---- BEGIN GENERATED CODE ---- -*- awk -*-
# whitelist checks (generated)
# no user_agent_whitelist
# check success of user_agent_whitelist_re (generated)
if ( $fail_user_agent_whitelist_re ) { return 500; }
# no referer_whitelist
# no referer_whitelist_re
# no origin_whitelist
# no origin_whitelist_re
# no host_whitelist
# no host_whitelist_re
# no path_whitelist
# no path_whitelist_re
# no param_whitelist
# no param_whitelist_re
# ---- END GENERATED CODE ----
# no cookie-lock checks
proxy_pass "$scheme://${servernamesubdomain}wikivoyage.org"; # note $scheme
proxy_http_version 1.1;
# a note on proxy_set_header, add_header, similar methods, etc;
# if you override *any* header then you will lose the other
# headers inherited from the parent contexts:
# https://blog.g3rt.nl/nginx-add_header-pitfall.html
proxy_set_header X-From-Onion 1;
proxy_set_header Host "${servernamesubdomain}wikivoyage.org";
proxy_set_header Accept-Encoding "identity";
proxy_set_header Connection $connection_upgrade; # SSL
proxy_set_header Upgrade $http_upgrade; # SSL
proxy_ssl_server_name on; # SSL
# rewrite request referer
set_by_lua_block $referer2 { return onion_to_dns(ngx.var.http_referer) }
proxy_set_header Referer $referer2;
# rewrite request origin
set_by_lua_block $origin2 { return onion_to_dns(ngx.var.http_origin) }
proxy_set_header Origin $origin2;
# suppress non-GET methods (e.g.: POST)
limit_except GET {
deny all;
}
}
}
# FORCE_HTTPS is in use; set up separate server for port 80 & force redirects
server {
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-80.sock;
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-2.d/port-80.sock;
# subdomain regexp captures trailing dot, use carefully; does not need "~*"
# NB: this regexp should be kept in-sync with the other FORCE_HTTPS copy
server_name
omivuha6rjzyfjjp.onion
~^(?<servernamesubdomain>([-0-9a-z]+\\.)+)omivuha6rjzyfjjp\\.onion$
;
# suppress tor2web traffic; "let them use clearnet"
if ( $http_x_tor2web ) {
return 403 "This action is not supported over Onion yet, sorry.";
}
# tell the client to try again as HTTPS without ever leaving the onion
# use 307 / temporary redirect because your URIs may change in future
# use $host (not $server) to copy-over subdomains, etc, transparently
return 307 https://$host$request_uri;
}
# for omivuha6rjzyfjjp.onion -> wiktionary.org
server {
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-443.sock ssl;
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-2.d/port-443.sock ssl;
# subdomain regexp captures trailing dot, use carefully; does not need "~*"
# NB: this regexp should be kept in-sync with the other FORCE_HTTPS copy
server_name
omivuha6rjzyfjjp.onion
~^(?<servernamesubdomain>([-0-9a-z]+\\.)+)omivuha6rjzyfjjp\\.onion$
;
# ---- BEGIN GENERATED CODE ---- -*- awk -*-
# blacklists (generated)
# no user_agent_blacklist
# no user_agent_blacklist_re
# no referer_blacklist
# no referer_blacklist_re
# no origin_blacklist
# no origin_blacklist_re
# no host_blacklist
# no host_blacklist_re
# no path_blacklist
# check path_blacklist_re (generated)
if ( $uri ~* "^\\." ) { return 500; }
if ( $uri ~* "^\\w+\\.php$" ) { return 500; }
if ( $uri ~* "\\.(sql|gz|tgz|zip|bz2)$" ) { return 500; }
if ( $uri ~* "^server-status$" ) { return 500; }
# no param_blacklist
# no param_blacklist_re
# polite blocks (generated)
# polite block for suppress_tor2web (generated)
if ( $http_x_tor2web ) { return 403 "This action is not supported over Onion yet, sorry."; }
# no block_referer
# no block_referer_re
# no block_origin
# no block_origin_re
# no block_host
# polite block for block_host_re (generated)
if ( $http_host ~* "^(login|donate)\\." ) { return 403 "This action is not supported over Onion yet, sorry."; }
# no block_path
# polite block for block_path_re (generated)
if ( $uri ~* "/User:" ) { return 403 "This action is not supported over Onion yet, sorry."; }
if ( $uri ~* "/Special:(UserLogin|(Create|Merge)Account|RenameRequest)\\b" ) { return 403 "This action is not supported over Onion yet, sorry."; }
# no block_location
# no block_location_re
# no block_param
# polite block for block_param_re (generated)
if ( $arg_title ~* "^User:" ) { return 403 "This action is not supported over Onion yet, sorry."; }
if ( $arg_title ~* "^Special:(UserLogin|(Create|Merge)Account|RenameRequest)\\b" ) { return 403 "This action is not supported over Onion yet, sorry."; }
# redirects (generated)
# no redirect_host
# no redirect_path
# no redirect_host_csv
# no redirect_path_csv
# no redirect_location_csv
# whitelists (generated)
# no user_agent_whitelist
# check user_agent_whitelist_re (generated)
set $fail_user_agent_whitelist_re 1;
if ( $http_user_agent ~* "^Mozilla.*Gecko" ) { set $fail_user_agent_whitelist_re 0; }
# no referer_whitelist
# no referer_whitelist_re
# no origin_whitelist
# no origin_whitelist_re
# no host_whitelist
# no host_whitelist_re
# no path_whitelist
# no path_whitelist_re
# no param_whitelist
# no param_whitelist_re
# ---- END GENERATED CODE ----
# no cookie_lock cookie setting
# for test & to help SSL certificate acceptance
location ~* ^/hello[-_]onion/?$ {
return 200 "Hello, Onion User!";
}
# no hardcoded_endpoints
#==================================================================
# ---- BEGIN HARD MODE CODE ----
# URI paths may contain onionified hostnames which need rewriting
# before passing upwards to the origin; this is a consequence of
# "hard-mode" brute-force rewriting, and needs gentle unpicking:
set $deonionified_uri '';
# There is an irreducible problem: what if the URI origin path
# *literally* includes the name of one of our onions? Well, it
# will be re-written to the corresponding DNS domain name. This is
# a small price to pay for the benefits of "hard-mode" rewrites on
# complex websites, but we can reduce the pain a tiny bit by
# making the in-path comparisons case-sensitive to lowercase, and
# document the issue with that workaround
if ( $uri ~ "\\.onion" ) { # cheapest, static, case-sensitive test, here...
set_by_lua_block $deonionified_uri {
local old_uri = ngx.var.uri
-- more expensive case-sensitive test, here...
local m, err = ngx.re.match(old_uri, "\\b[a-z2-7]{16}\\.onion\\b", "o")
if not m then
return "" -- nothing to attempt to rewrite, quick return
end
-- attempt rewrites (function is case-insensitive, hence...)
local new_uri = onion_to_dns(old_uri)
if new_uri == old_uri then -- nothing changed, quick return
return ""
end
return new_uri
}
}
if ( $deonionified_uri ) {
set $new_uri $deonionified_uri; # swap
set $deonionified_uri ''; # prevent revisiting
rewrite ^ $new_uri last;
}
# ---- END HARD MODE CODE ----
#------------------------------------------------------------------
#==================================================================
# for traffic
location / {
# ---- BEGIN GENERATED CODE ---- -*- awk -*-
# whitelist checks (generated)
# no user_agent_whitelist
# check success of user_agent_whitelist_re (generated)
if ( $fail_user_agent_whitelist_re ) { return 500; }
# no referer_whitelist
# no referer_whitelist_re
# no origin_whitelist
# no origin_whitelist_re
# no host_whitelist
# no host_whitelist_re
# no path_whitelist
# no path_whitelist_re
# no param_whitelist
# no param_whitelist_re
# ---- END GENERATED CODE ----
# no cookie-lock checks
proxy_pass "$scheme://${servernamesubdomain}wiktionary.org"; # note $scheme
proxy_http_version 1.1;
# a note on proxy_set_header, add_header, similar methods, etc;
# if you override *any* header then you will lose the other
# headers inherited from the parent contexts:
# https://blog.g3rt.nl/nginx-add_header-pitfall.html
proxy_set_header X-From-Onion 1;
proxy_set_header Host "${servernamesubdomain}wiktionary.org";
proxy_set_header Accept-Encoding "identity";
proxy_set_header Connection $connection_upgrade; # SSL
proxy_set_header Upgrade $http_upgrade; # SSL
proxy_ssl_server_name on; # SSL
# rewrite request referer
set_by_lua_block $referer2 { return onion_to_dns(ngx.var.http_referer) }
proxy_set_header Referer $referer2;
# rewrite request origin
set_by_lua_block $origin2 { return onion_to_dns(ngx.var.http_origin) }
proxy_set_header Origin $origin2;
# suppress non-GET methods (e.g.: POST)
limit_except GET {
deny all;
}
}
}
# header purge
more_clear_headers "Age";
more_clear_headers "Server";
more_clear_headers "Via";
more_clear_headers "X-From-Nginx";
more_clear_headers "X-NA";
more_clear_headers "X-Powered-By";
more_clear_headers "X-Request-Id";
more_clear_headers "X-Runtime";
more_clear_headers "X-Varnish";
}
LOG_LEVEL: info
TOR_ADDRESS: 127.0.0.1
TOR_PORT: 9055
REFRESH_INTERVAL: 300
PUBLISH_CHECK_INTERVAL: 150
INITIAL_DELAY: 90
STATUS_SOCKET_LOCATION: /Users/alecm/src/eotk/onionbalance.d/ob-status.sock
DESCRIPTOR_VALIDITY_PERIOD: 86400
DESCRIPTOR_OVERLAP_PERIOD: 3600
DESCRIPTOR_UPLOAD_PERIOD: 1200
services:
# PROJECT wikipedia
# mediawiki.org => mldhzqz4k4lhkspx.onion
- key: /Users/alecm/src/eotk/secrets.d/mldhzqz4k4lhkspx.key
instances:
- address: 'cpqeyjzxm65ddoeu'
- address: 'thhcdtfzgqekjkmn'
# wikibooks.org => h2palsawrrwjmvix.onion
- key: /Users/alecm/src/eotk/secrets.d/h2palsawrrwjmvix.key
instances:
- address: 'cpqeyjzxm65ddoeu'
- address: 'thhcdtfzgqekjkmn'
# wikidata.org => hrvv2p2y6hry4k3h.onion
- key: /Users/alecm/src/eotk/secrets.d/hrvv2p2y6hry4k3h.key
instances:
- address: 'cpqeyjzxm65ddoeu'
- address: 'thhcdtfzgqekjkmn'
# wikimedia.org => cghmyoqnuvntham2.onion
- key: /Users/alecm/src/eotk/secrets.d/cghmyoqnuvntham2.key
instances:
- address: 'cpqeyjzxm65ddoeu'
- address: 'thhcdtfzgqekjkmn'
# wikimediafoundation.org => 5l25slwovi5dpgcs.onion
- key: /Users/alecm/src/eotk/secrets.d/5l25slwovi5dpgcs.key
instances:
- address: 'cpqeyjzxm65ddoeu'
- address: 'thhcdtfzgqekjkmn'
# wikinews.org => rpzswndck5mhnd6d.onion
- key: /Users/alecm/src/eotk/secrets.d/rpzswndck5mhnd6d.key
instances:
- address: 'cpqeyjzxm65ddoeu'
- address: 'thhcdtfzgqekjkmn'
# wikipedia.org => x73kmvi562rwnjju.onion
- key: /Users/alecm/src/eotk/secrets.d/x73kmvi562rwnjju.key
instances:
- address: 'cpqeyjzxm65ddoeu'
- address: 'thhcdtfzgqekjkmn'
# wikiquote.org => rxj62jvsqqnevcyl.onion
- key: /Users/alecm/src/eotk/secrets.d/rxj62jvsqqnevcyl.key
instances:
- address: 'cpqeyjzxm65ddoeu'
- address: 'thhcdtfzgqekjkmn'
# wikisource.org => 4cyegyx7lnzthq4w.onion
- key: /Users/alecm/src/eotk/secrets.d/4cyegyx7lnzthq4w.key
instances:
- address: 'cpqeyjzxm65ddoeu'
- address: 'thhcdtfzgqekjkmn'
# wikiversity.org => bzrrgoz6q3dw67is.onion
- key: /Users/alecm/src/eotk/secrets.d/bzrrgoz6q3dw67is.key
instances:
- address: 'cpqeyjzxm65ddoeu'
- address: 'thhcdtfzgqekjkmn'
# wikivoyage.org => wsideljd73uexpwi.onion
- key: /Users/alecm/src/eotk/secrets.d/wsideljd73uexpwi.key
instances:
- address: 'cpqeyjzxm65ddoeu'
- address: 'thhcdtfzgqekjkmn'
# wiktionary.org => omivuha6rjzyfjjp.onion
- key: /Users/alecm/src/eotk/secrets.d/omivuha6rjzyfjjp.key
instances:
- address: 'cpqeyjzxm65ddoeu'
- address: 'thhcdtfzgqekjkmn'
# -*- conf -*-
# eotk (c) 2017 Alec Muffett
# template note: here we use TOR_DIR not PROJECT_DIR because of the
# relocation of Tor directories under `softmap`
DataDirectory /Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d
ControlPort unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/tor-control.sock
PidFile /Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/tor.pid
Log notice file /Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/tor.log
SafeLogging 1
HeartbeatPeriod 60 minutes
LongLivedPorts 80,443
RunAsDaemon 1
# use single onions
SocksPort 0 # have to disable this for single onions
HiddenServiceSingleHopMode 1 # yep, i want single onions
HiddenServiceNonAnonymousMode 1 # yes, really, honest, i swear
# softmap
HiddenServiceDir /Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d
HiddenServicePort 80 unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-80.sock
HiddenServicePort 443 unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-443.sock
HiddenServiceNumIntroductionPoints 3
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment