Skip to content

Instantly share code, notes, and snippets.

@alecmuffett
Last active November 27, 2017 23:31
Show Gist options
  • Star 2 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save alecmuffett/ea81f1978ae36fce85d5c3e909d52566 to your computer and use it in GitHub Desktop.
Save alecmuffett/ea81f1978ae36fce85d5c3e909d52566 to your computer and use it in GitHub Desktop.
Sample EOTK Output for Wikipedia
LOG_LEVEL: info
TOR_ADDRESS: 127.0.0.1
TOR_PORT: 9055
REFRESH_INTERVAL: 600
PUBLISH_CHECK_INTERVAL: 300
INITIAL_DELAY: 60
STATUS_SOCKET_LOCATION: /Users/alecm/src/eotk/onionbalance.d/ob-status.sock
DESCRIPTOR_VALIDITY_PERIOD: 86400
DESCRIPTOR_OVERLAP_PERIOD: 3600
DESCRIPTOR_UPLOAD_PERIOD: 3600
services:
# PROJECT wikipedia
# mediawiki.org => notrmquxg7nnaxky.onion
- key: /Users/alecm/src/eotk/secrets.d/notrmquxg7nnaxky.key
instances:
- address: '4uqww2fr4arz3eb2'
- address: '55asaglhcqxdr7y6'
# wikibooks.org => bhenpxugrhn7nuwi.onion
- key: /Users/alecm/src/eotk/secrets.d/bhenpxugrhn7nuwi.key
instances:
- address: '4uqww2fr4arz3eb2'
- address: '55asaglhcqxdr7y6'
# wikidata.org => unmojoyfpj6dssbq.onion
- key: /Users/alecm/src/eotk/secrets.d/unmojoyfpj6dssbq.key
instances:
- address: '4uqww2fr4arz3eb2'
- address: '55asaglhcqxdr7y6'
# wikimedia.org => muqyvgz7xjvzsili.onion
- key: /Users/alecm/src/eotk/secrets.d/muqyvgz7xjvzsili.key
instances:
- address: '4uqww2fr4arz3eb2'
- address: '55asaglhcqxdr7y6'
# wikimediafoundation.org => 2wmwaeiykper3ybi.onion
- key: /Users/alecm/src/eotk/secrets.d/2wmwaeiykper3ybi.key
instances:
- address: '4uqww2fr4arz3eb2'
- address: '55asaglhcqxdr7y6'
# wikinews.org => easbbp25gbrvq2r3.onion
- key: /Users/alecm/src/eotk/secrets.d/easbbp25gbrvq2r3.key
instances:
- address: '4uqww2fr4arz3eb2'
- address: '55asaglhcqxdr7y6'
# wikipedia.org => jcvn6enoyhqcblek.onion
- key: /Users/alecm/src/eotk/secrets.d/jcvn6enoyhqcblek.key
instances:
- address: '4uqww2fr4arz3eb2'
- address: '55asaglhcqxdr7y6'
# wikiquote.org => 65da4nqqcmdjvqhf.onion
- key: /Users/alecm/src/eotk/secrets.d/65da4nqqcmdjvqhf.key
instances:
- address: '4uqww2fr4arz3eb2'
- address: '55asaglhcqxdr7y6'
# wikisource.org => wlvkjriqk2e7dkcd.onion
- key: /Users/alecm/src/eotk/secrets.d/wlvkjriqk2e7dkcd.key
instances:
- address: '4uqww2fr4arz3eb2'
- address: '55asaglhcqxdr7y6'
# wikiversity.org => rrzbnmeov7rbwoah.onion
- key: /Users/alecm/src/eotk/secrets.d/rrzbnmeov7rbwoah.key
instances:
- address: '4uqww2fr4arz3eb2'
- address: '55asaglhcqxdr7y6'
# wikivoyage.org => fgvwe5aoui36adpm.onion
- key: /Users/alecm/src/eotk/secrets.d/fgvwe5aoui36adpm.key
instances:
- address: '4uqww2fr4arz3eb2'
- address: '55asaglhcqxdr7y6'
# wiktionary.org => x2jn2tulp4snfn24.onion
- key: /Users/alecm/src/eotk/secrets.d/x2jn2tulp4snfn24.key
instances:
- address: '4uqww2fr4arz3eb2'
- address: '55asaglhcqxdr7y6'
# -*- awk -*-
# eotk (c) 2017 Alec Muffett
# EMACS awk mode works quite well for nginx configs
# logs and pids
pid /Users/alecm/src/eotk/projects.d/wikipedia.d/nginx.pid;
error_log /Users/alecm/src/eotk/projects.d/wikipedia.d/log.d/nginx-error.log error;
# TODO: notes for custom 403 error-handling pages:
# https://www.cyberciti.biz/faq/unix-linux-nginx-custom-error-403-page-configuration/
# https://nginx.org/en/docs/http/ngx_http_core_module.html#error_page
# performance
worker_processes auto; # softmap
worker_rlimit_nofile 256;
events {
worker_connections 256;
}
http {
# nginx fails without large enough buckets (sigh)
map_hash_bucket_size 128;
# dns for proxy (sigh)
# we should do `ipv6=off` here, but compat issues, hence NGINX_RESOLVER_FLAGS
resolver 8.8.8.8 valid=15s ;
resolver_timeout 15s;
# we walk a line between keeping it small and flooding resources...
proxy_buffering on;
# for initial; impacts SSL header
proxy_buffer_size 16k;
# for rest of response
proxy_buffers 32 16k;
# how much can be busy sending to client?
proxy_busy_buffers_size 32k;
# in case we want to start spooling responses locally
proxy_temp_path /tmp/nginx-proxy-wikipedia;
proxy_max_temp_file_size 256m;
proxy_temp_file_write_size 16k;
# nginx caching static responses for 60 seconds
# - this is a lightweight cache to reduce "storms", hence the global
# approch of "cache everything for a small number of seconds"
# https://nginx.org/en/docs/http/ngx_http_proxy_module.html
proxy_cache_path /tmp/nginx-cache-wikipedia levels=1:2 keys_zone=wikipedia:64m;
proxy_cache wikipedia;
proxy_cache_min_uses 1;
proxy_cache_revalidate on;
proxy_cache_use_stale timeout updating;
proxy_cache_valid any 60s; # "any" includes 404s, etc
# content-types to not cache
map $http_content_type $no_cache_content_type {
default 0;
}
# hosts not to cache
map $http_host $no_cache_host {
hostnames;
default 0;
}
# so, should we skip caching this stuff for some reason?
proxy_no_cache $no_cache_content_type $no_cache_host;
proxy_cache_bypass $no_cache_content_type $no_cache_host;
# logs
access_log /Users/alecm/src/eotk/projects.d/wikipedia.d/log.d/nginx-access.log;
# global settings
server_tokens off;
# allow/deny (first wins)
allow "unix:";
deny all;
# rewrite these content types; text/html is implicit
subs_filter_types
application/javascript
application/json
application/x-javascript
text/css
text/javascript
text/xml
;
# subs_filter: these patterns bear some explanation; the goal is to
# work regular expressions really hard in order to minimise the
# number of expressions which are used in the basic config, so the
# basic pattern is to capture zero/more "sub." in "//sub.foo.com"
# and interpolate that into "//sub.xxxxxxxx.onion"; so far?
# but it turns out that some JSON libraries like to "escape" the
# forward slashes in JSON content, leading to input like (literal)
# "http:\/\/sub.foo.com\/foo.html" - so you need to add the
# backslashes, but then you need to escape the backslashes, except
# they need double-escaping in the regexp because of string
# interpolation; hence 4x backslash -> 1x matched character
# likewise we use the "_RE2" form of the re-escaped domain name in
# order to coerce the regexp to match literal dots, not wildcards.
# there seems to be some sort of shortcut at play here; the trailing
# "\\b" also seems to work as "\b" however that would apparently
# break the double-escaping that is necessary/works everywhere else
# in subs_filter.
# also, regrettably, named capture groups appear not to work, we're
# fortunate that there appear not to be more than 9 capture groups
# by default, lest "$1" bleed into the subsequent digits of an onion
# address: $1234567abcdefghij.onion
subs_filter
(/|\\\\/\\\\)/(([-0-9a-z]+\\.)+)?wikipedia\\.org\\b
$1/$2jcvn6enoyhqcblek.onion
gir
;
subs_filter
(/|\\\\/\\\\)/(([-0-9a-z]+\\.)+)?wikimedia\\.org\\b
$1/$2muqyvgz7xjvzsili.onion
gir
;
subs_filter
(/|\\\\/\\\\)/(([-0-9a-z]+\\.)+)?wikimediafoundation\\.org\\b
$1/$22wmwaeiykper3ybi.onion
gir
;
subs_filter
(/|\\\\/\\\\)/(([-0-9a-z]+\\.)+)?mediawiki\\.org\\b
$1/$2notrmquxg7nnaxky.onion
gir
;
subs_filter
(/|\\\\/\\\\)/(([-0-9a-z]+\\.)+)?wikibooks\\.org\\b
$1/$2bhenpxugrhn7nuwi.onion
gir
;
subs_filter
(/|\\\\/\\\\)/(([-0-9a-z]+\\.)+)?wikidata\\.org\\b
$1/$2unmojoyfpj6dssbq.onion
gir
;
subs_filter
(/|\\\\/\\\\)/(([-0-9a-z]+\\.)+)?wikinews\\.org\\b
$1/$2easbbp25gbrvq2r3.onion
gir
;
subs_filter
(/|\\\\/\\\\)/(([-0-9a-z]+\\.)+)?wikiquote\\.org\\b
$1/$265da4nqqcmdjvqhf.onion
gir
;
subs_filter
(/|\\\\/\\\\)/(([-0-9a-z]+\\.)+)?wikisource\\.org\\b
$1/$2wlvkjriqk2e7dkcd.onion
gir
;
subs_filter
(/|\\\\/\\\\)/(([-0-9a-z]+\\.)+)?wikiversity\\.org\\b
$1/$2rrzbnmeov7rbwoah.onion
gir
;
subs_filter
(/|\\\\/\\\\)/(([-0-9a-z]+\\.)+)?wikivoyage\\.org\\b
$1/$2fgvwe5aoui36adpm.onion
gir
;
subs_filter
(/|\\\\/\\\\)/(([-0-9a-z]+\\.)+)?wiktionary\\.org\\b
$1/$2x2jn2tulp4snfn24.onion
gir
;
# no foreignmap subs
# fix the cookies
proxy_cookie_domain
~^(.*?)\\bwikipedia\\.org$
$1jcvn6enoyhqcblek.onion
;
proxy_cookie_domain
~^(.*?)\\bwikimedia\\.org$
$1muqyvgz7xjvzsili.onion
;
proxy_cookie_domain
~^(.*?)\\bwikimediafoundation\\.org$
$12wmwaeiykper3ybi.onion
;
proxy_cookie_domain
~^(.*?)\\bmediawiki\\.org$
$1notrmquxg7nnaxky.onion
;
proxy_cookie_domain
~^(.*?)\\bwikibooks\\.org$
$1bhenpxugrhn7nuwi.onion
;
proxy_cookie_domain
~^(.*?)\\bwikidata\\.org$
$1unmojoyfpj6dssbq.onion
;
proxy_cookie_domain
~^(.*?)\\bwikinews\\.org$
$1easbbp25gbrvq2r3.onion
;
proxy_cookie_domain
~^(.*?)\\bwikiquote\\.org$
$165da4nqqcmdjvqhf.onion
;
proxy_cookie_domain
~^(.*?)\\bwikisource\\.org$
$1wlvkjriqk2e7dkcd.onion
;
proxy_cookie_domain
~^(.*?)\\bwikiversity\\.org$
$1rrzbnmeov7rbwoah.onion
;
proxy_cookie_domain
~^(.*?)\\bwikivoyage\\.org$
$1fgvwe5aoui36adpm.onion
;
proxy_cookie_domain
~^(.*?)\\bwiktionary\\.org$
$1x2jn2tulp4snfn24.onion
;
# fix the header-redirects
proxy_redirect
~*^(.*?)\\bwikipedia\\.org\\b(.*)$
$1jcvn6enoyhqcblek.onion$2
;
proxy_redirect
~*^(.*?)\\bwikimedia\\.org\\b(.*)$
$1muqyvgz7xjvzsili.onion$2
;
proxy_redirect
~*^(.*?)\\bwikimediafoundation\\.org\\b(.*)$
$12wmwaeiykper3ybi.onion$2
;
proxy_redirect
~*^(.*?)\\bmediawiki\\.org\\b(.*)$
$1notrmquxg7nnaxky.onion$2
;
proxy_redirect
~*^(.*?)\\bwikibooks\\.org\\b(.*)$
$1bhenpxugrhn7nuwi.onion$2
;
proxy_redirect
~*^(.*?)\\bwikidata\\.org\\b(.*)$
$1unmojoyfpj6dssbq.onion$2
;
proxy_redirect
~*^(.*?)\\bwikinews\\.org\\b(.*)$
$1easbbp25gbrvq2r3.onion$2
;
proxy_redirect
~*^(.*?)\\bwikiquote\\.org\\b(.*)$
$165da4nqqcmdjvqhf.onion$2
;
proxy_redirect
~*^(.*?)\\bwikisource\\.org\\b(.*)$
$1wlvkjriqk2e7dkcd.onion$2
;
proxy_redirect
~*^(.*?)\\bwikiversity\\.org\\b(.*)$
$1rrzbnmeov7rbwoah.onion$2
;
proxy_redirect
~*^(.*?)\\bwikivoyage\\.org\\b(.*)$
$1fgvwe5aoui36adpm.onion$2
;
proxy_redirect
~*^(.*?)\\bwiktionary\\.org\\b(.*)$
$1x2jn2tulp4snfn24.onion$2
;
# o_to_d_lookup -> if cannot remap, return input. note: old versions
# of lua-plugin cannot cope with code like o_to_d_mappings[o[1]]
# because of `long bracket syntax`; the `[o[` freaks it out.
# See: https://github.com/openresty/lua-nginx-module/issues/748
init_by_lua_block {
-- helper functions for elsewhere
slog = function (s) -- in case of manual debugging
ngx.log(ngx.ERR, s)
return
end
has_suffix = function (s, x)
return string.sub(s, -string.len(x)) == x
end
-- mapping onions to dns
o_to_d_mappings = {}
o_to_d_mappings["jcvn6enoyhqcblek.onion"] = "wikipedia.org"
o_to_d_mappings["muqyvgz7xjvzsili.onion"] = "wikimedia.org"
o_to_d_mappings["2wmwaeiykper3ybi.onion"] = "wikimediafoundation.org"
o_to_d_mappings["notrmquxg7nnaxky.onion"] = "mediawiki.org"
o_to_d_mappings["bhenpxugrhn7nuwi.onion"] = "wikibooks.org"
o_to_d_mappings["unmojoyfpj6dssbq.onion"] = "wikidata.org"
o_to_d_mappings["easbbp25gbrvq2r3.onion"] = "wikinews.org"
o_to_d_mappings["65da4nqqcmdjvqhf.onion"] = "wikiquote.org"
o_to_d_mappings["wlvkjriqk2e7dkcd.onion"] = "wikisource.org"
o_to_d_mappings["rrzbnmeov7rbwoah.onion"] = "wikiversity.org"
o_to_d_mappings["fgvwe5aoui36adpm.onion"] = "wikivoyage.org"
o_to_d_mappings["x2jn2tulp4snfn24.onion"] = "wiktionary.org"
o_to_d_lookup = function (m)
local k = m[1] -- see note above re: array syntax
return ( o_to_d_mappings[k] or k )
end
onion_to_dns = function (i)
if i == nil or i == "" then
return i
end
local o, num, errs = ngx.re.gsub(i, "\\b([a-z2-7]{16}\\.onion)\\b", o_to_d_lookup, "io")
return o
end
-- mapping dns to onions, for experimentation
d_to_o_mappings = {}
d_to_o_mappings["wikipedia.org"] = "jcvn6enoyhqcblek.onion"
d_to_o_mappings["wikimedia.org"] = "muqyvgz7xjvzsili.onion"
d_to_o_mappings["wikimediafoundation.org"] = "2wmwaeiykper3ybi.onion"
d_to_o_mappings["mediawiki.org"] = "notrmquxg7nnaxky.onion"
d_to_o_mappings["wikibooks.org"] = "bhenpxugrhn7nuwi.onion"
d_to_o_mappings["wikidata.org"] = "unmojoyfpj6dssbq.onion"
d_to_o_mappings["wikinews.org"] = "easbbp25gbrvq2r3.onion"
d_to_o_mappings["wikiquote.org"] = "65da4nqqcmdjvqhf.onion"
d_to_o_mappings["wikisource.org"] = "wlvkjriqk2e7dkcd.onion"
d_to_o_mappings["wikiversity.org"] = "rrzbnmeov7rbwoah.onion"
d_to_o_mappings["wikivoyage.org"] = "fgvwe5aoui36adpm.onion"
d_to_o_mappings["wiktionary.org"] = "x2jn2tulp4snfn24.onion"
d_to_o_lookup = function (m)
local k = m[1] -- see note above re: array syntax
return ( d_to_o_mappings[k] or k )
end
dns_to_onion = function (i)
local num, errs
i, num, errs = ngx.re.gsub(i, "\\bwikipedia\\.org\\b", "jcvn6enoyhqcblek.onion", "io")
i, num, errs = ngx.re.gsub(i, "\\bwikimedia\\.org\\b", "muqyvgz7xjvzsili.onion", "io")
i, num, errs = ngx.re.gsub(i, "\\bwikimediafoundation\\.org\\b", "2wmwaeiykper3ybi.onion", "io")
i, num, errs = ngx.re.gsub(i, "\\bmediawiki\\.org\\b", "notrmquxg7nnaxky.onion", "io")
i, num, errs = ngx.re.gsub(i, "\\bwikibooks\\.org\\b", "bhenpxugrhn7nuwi.onion", "io")
i, num, errs = ngx.re.gsub(i, "\\bwikidata\\.org\\b", "unmojoyfpj6dssbq.onion", "io")
i, num, errs = ngx.re.gsub(i, "\\bwikinews\\.org\\b", "easbbp25gbrvq2r3.onion", "io")
i, num, errs = ngx.re.gsub(i, "\\bwikiquote\\.org\\b", "65da4nqqcmdjvqhf.onion", "io")
i, num, errs = ngx.re.gsub(i, "\\bwikisource\\.org\\b", "wlvkjriqk2e7dkcd.onion", "io")
i, num, errs = ngx.re.gsub(i, "\\bwikiversity\\.org\\b", "rrzbnmeov7rbwoah.onion", "io")
i, num, errs = ngx.re.gsub(i, "\\bwikivoyage\\.org\\b", "fgvwe5aoui36adpm.onion", "io")
i, num, errs = ngx.re.gsub(i, "\\bwiktionary\\.org\\b", "x2jn2tulp4snfn24.onion", "io")
return i
end
-- a note for future maintainers; if we were being strictly orthogonal then
-- the replacement with ONION_ADDRESS in much of this Lua block would have to
-- be double-escaped for potential blackslashes, because double-quotes;
-- however this is not needed because DNS forbids backslash; the only code
-- where this becomes evident/necessary is here, with "_RE2":
dnsre_to_onionre = function (i)
local num, errs
i, num, errs = ngx.re.gsub(i, "\\bwikipedia\\\\\\.org\\b", "jcvn6enoyhqcblek\\.onion", "io")
i, num, errs = ngx.re.gsub(i, "\\bwikimedia\\\\\\.org\\b", "muqyvgz7xjvzsili\\.onion", "io")
i, num, errs = ngx.re.gsub(i, "\\bwikimediafoundation\\\\\\.org\\b", "2wmwaeiykper3ybi\\.onion", "io")
i, num, errs = ngx.re.gsub(i, "\\bmediawiki\\\\\\.org\\b", "notrmquxg7nnaxky\\.onion", "io")
i, num, errs = ngx.re.gsub(i, "\\bwikibooks\\\\\\.org\\b", "bhenpxugrhn7nuwi\\.onion", "io")
i, num, errs = ngx.re.gsub(i, "\\bwikidata\\\\\\.org\\b", "unmojoyfpj6dssbq\\.onion", "io")
i, num, errs = ngx.re.gsub(i, "\\bwikinews\\\\\\.org\\b", "easbbp25gbrvq2r3\\.onion", "io")
i, num, errs = ngx.re.gsub(i, "\\bwikiquote\\\\\\.org\\b", "65da4nqqcmdjvqhf\\.onion", "io")
i, num, errs = ngx.re.gsub(i, "\\bwikisource\\\\\\.org\\b", "wlvkjriqk2e7dkcd\\.onion", "io")
i, num, errs = ngx.re.gsub(i, "\\bwikiversity\\\\\\.org\\b", "rrzbnmeov7rbwoah\\.onion", "io")
i, num, errs = ngx.re.gsub(i, "\\bwikivoyage\\\\\\.org\\b", "fgvwe5aoui36adpm\\.onion", "io")
i, num, errs = ngx.re.gsub(i, "\\bwiktionary\\\\\\.org\\b", "x2jn2tulp4snfn24\\.onion", "io")
return i
end
}
# filter the response headers en-route back to the user
header_filter_by_lua_block {
-- is this javascript/json? if so, extra processing:
-- 1) set a processing flag to pick up in body_filter_by_lua_block
-- 2) invalidate content-length, because we will change it
local k, v
k = "Content-Type"
v = ngx.header[k]
if v == "application/javascript" or
v == "application/json" or
v == "application/x-javascript" or
v == "text/css" or
v == "text/javascript" then
ngx.ctx.needs_extra_processing = 1
ngx.header.content_length = nil
end
-- no extra_processing_csv checks
-- rewrite cors/acao
k = "Access-Control-Allow-Origin"
v = ngx.header[k]
if v and v ~= "*" then
ngx.header[k] = dns_to_onion(v)
end
-- rewrite csp
k = "Content-Security-Policy"
v = ngx.header[k]
if v then
ngx.header[k] = dns_to_onion(v)
end
-- rewrite csp (report-only)
k = "Content-Security-Policy-Report-Only"
v = ngx.header[k]
if v then
ngx.header[k] = dns_to_onion(v)
end
}
# filter the response body en-route back to the user
body_filter_by_lua_block {
-- rather than blindly replacing "foo.com" with "foo.onion" everywhere,
-- instead we restrict such brute-force replacements to JS and JSON ...
if ngx.ctx.needs_extra_processing == 1 then
local chunk = ngx.arg[1]
-- subs_filter picked up the "//"-anchored strings; now we sub the rest
chunk = dns_to_onion(chunk)
-- and we sub the basic "foo\.com" regular-expressions, too
chunk = dnsre_to_onionre(chunk)
-- more complex regular expressions are out of scope.
ngx.arg[1] = chunk
end
-- no debug traps
}
# csp not suppressed, will be rewritten instead, see below
# hsts suppression
proxy_hide_header "Strict-Transport-Security";
# hpkp suppression
proxy_hide_header "Public-Key-Pins";
proxy_hide_header "Public-Key-Pins-Report-Only";
# global proxy settings
proxy_read_timeout 15;
proxy_connect_timeout 15;
# SSL config
ssl_certificate /Users/alecm/src/eotk/projects.d/wikipedia.d/ssl.d/wikipedia.local.cert;
ssl_certificate_key /Users/alecm/src/eotk/projects.d/wikipedia.d/ssl.d/wikipedia.local.pem;
ssl_buffer_size 4k;
#ssl_ciphers 'EECDH+CHACHA20:EECDH+AESGCM:EECDH+AES256'; ## LibreSSL, OpenSSL 1.1.0+
ssl_ciphers 'EECDH+AESGCM:EECDH+AES256'; ## OpenSSL 1.0.1% to 1.0.2%
ssl_ecdh_curve prime256v1;
#ssl_ecdh_curve secp384r1:prime256v1; ## NGINX nginx 1.11.0 and later
ssl_prefer_server_ciphers on;
ssl_protocols TLSv1 TLSv1.1 TLSv1.2;
ssl_session_cache shared:SSL:10m;
ssl_session_timeout 10m;
# websockets: on the basis of http_upgrade, set connection_upgrade:
# empty -> empty
# default -> "upgrade"
map $http_upgrade $connection_upgrade {
default "upgrade";
"" "";
}
# FORCE_HTTPS is in use; set up separate server for port 80 & force redirects
server {
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-80.sock;
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-2.d/port-80.sock;
# subdomain regexp captures trailing dot, use carefully; does not need "~*"
# NB: this regexp should be kept in-sync with the other FORCE_HTTPS copy
server_name
jcvn6enoyhqcblek.onion
~^(?<servernamesubdomain>([-0-9a-z]+\\.)+)jcvn6enoyhqcblek\\.onion$
;
# tell the client to try again as HTTPS without ever leaving the onion
# use 307 / temporary redirect because your URIs may change in future
# use $host (not $server) to copy-over subdomains, etc, transparently
return 307 https://$host$request_uri;
}
# for jcvn6enoyhqcblek.onion -> wikipedia.org
server {
# softmap onion 1
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-443.sock ssl;
# softmap onion 2
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-2.d/port-443.sock ssl;
# subdomain regexp captures trailing dot, use carefully; does not need "~*"
# NB: this regexp should be kept in-sync with the other FORCE_HTTPS copy
server_name
jcvn6enoyhqcblek.onion
~^(?<servernamesubdomain>([-0-9a-z]+\\.)+)jcvn6enoyhqcblek\\.onion$
;
# for test & to help SSL certificate acceptance
location ~* ^/hello[-_]onion/?$ {
return 200 "Hello, Onion User!";
}
# no named location blocking
# no regular expression location blocking
# for traffic
location / {
proxy_pass "$scheme://${servernamesubdomain}wikipedia.org"; # note $scheme
proxy_http_version 1.1;
# a note on proxy_set_header, add_header, similar methods, etc;
# if you override *any* header then you will lose the other
# headers inherited from the parent contexts:
# https://blog.g3rt.nl/nginx-add_header-pitfall.html
proxy_set_header Host "${servernamesubdomain}wikipedia.org";
proxy_set_header Accept-Encoding "identity";
proxy_set_header Connection $connection_upgrade; # SSL
proxy_set_header Upgrade $http_upgrade; # SSL
proxy_ssl_server_name on; # SSL
# rewrite request referer
set_by_lua_block $referer2 { return onion_to_dns(ngx.var.http_referer) }
proxy_set_header Referer $referer2;
# rewrite request origin
set_by_lua_block $origin2 { return onion_to_dns(ngx.var.http_origin) }
proxy_set_header Origin $origin2;
# suppress non-GET methods (e.g.: POST)
limit_except GET {
deny all;
}
# no named host blocking
# no regular expression host blocking
}
}
# FORCE_HTTPS is in use; set up separate server for port 80 & force redirects
server {
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-80.sock;
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-2.d/port-80.sock;
# subdomain regexp captures trailing dot, use carefully; does not need "~*"
# NB: this regexp should be kept in-sync with the other FORCE_HTTPS copy
server_name
muqyvgz7xjvzsili.onion
~^(?<servernamesubdomain>([-0-9a-z]+\\.)+)muqyvgz7xjvzsili\\.onion$
;
# tell the client to try again as HTTPS without ever leaving the onion
# use 307 / temporary redirect because your URIs may change in future
# use $host (not $server) to copy-over subdomains, etc, transparently
return 307 https://$host$request_uri;
}
# for muqyvgz7xjvzsili.onion -> wikimedia.org
server {
# softmap onion 1
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-443.sock ssl;
# softmap onion 2
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-2.d/port-443.sock ssl;
# subdomain regexp captures trailing dot, use carefully; does not need "~*"
# NB: this regexp should be kept in-sync with the other FORCE_HTTPS copy
server_name
muqyvgz7xjvzsili.onion
~^(?<servernamesubdomain>([-0-9a-z]+\\.)+)muqyvgz7xjvzsili\\.onion$
;
# for test & to help SSL certificate acceptance
location ~* ^/hello[-_]onion/?$ {
return 200 "Hello, Onion User!";
}
# no named location blocking
# no regular expression location blocking
# for traffic
location / {
proxy_pass "$scheme://${servernamesubdomain}wikimedia.org"; # note $scheme
proxy_http_version 1.1;
# a note on proxy_set_header, add_header, similar methods, etc;
# if you override *any* header then you will lose the other
# headers inherited from the parent contexts:
# https://blog.g3rt.nl/nginx-add_header-pitfall.html
proxy_set_header Host "${servernamesubdomain}wikimedia.org";
proxy_set_header Accept-Encoding "identity";
proxy_set_header Connection $connection_upgrade; # SSL
proxy_set_header Upgrade $http_upgrade; # SSL
proxy_ssl_server_name on; # SSL
# rewrite request referer
set_by_lua_block $referer2 { return onion_to_dns(ngx.var.http_referer) }
proxy_set_header Referer $referer2;
# rewrite request origin
set_by_lua_block $origin2 { return onion_to_dns(ngx.var.http_origin) }
proxy_set_header Origin $origin2;
# suppress non-GET methods (e.g.: POST)
limit_except GET {
deny all;
}
# no named host blocking
# no regular expression host blocking
}
}
# FORCE_HTTPS is in use; set up separate server for port 80 & force redirects
server {
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-80.sock;
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-2.d/port-80.sock;
# subdomain regexp captures trailing dot, use carefully; does not need "~*"
# NB: this regexp should be kept in-sync with the other FORCE_HTTPS copy
server_name
2wmwaeiykper3ybi.onion
~^(?<servernamesubdomain>([-0-9a-z]+\\.)+)2wmwaeiykper3ybi\\.onion$
;
# tell the client to try again as HTTPS without ever leaving the onion
# use 307 / temporary redirect because your URIs may change in future
# use $host (not $server) to copy-over subdomains, etc, transparently
return 307 https://$host$request_uri;
}
# for 2wmwaeiykper3ybi.onion -> wikimediafoundation.org
server {
# softmap onion 1
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-443.sock ssl;
# softmap onion 2
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-2.d/port-443.sock ssl;
# subdomain regexp captures trailing dot, use carefully; does not need "~*"
# NB: this regexp should be kept in-sync with the other FORCE_HTTPS copy
server_name
2wmwaeiykper3ybi.onion
~^(?<servernamesubdomain>([-0-9a-z]+\\.)+)2wmwaeiykper3ybi\\.onion$
;
# for test & to help SSL certificate acceptance
location ~* ^/hello[-_]onion/?$ {
return 200 "Hello, Onion User!";
}
# no named location blocking
# no regular expression location blocking
# for traffic
location / {
proxy_pass "$scheme://${servernamesubdomain}wikimediafoundation.org"; # note $scheme
proxy_http_version 1.1;
# a note on proxy_set_header, add_header, similar methods, etc;
# if you override *any* header then you will lose the other
# headers inherited from the parent contexts:
# https://blog.g3rt.nl/nginx-add_header-pitfall.html
proxy_set_header Host "${servernamesubdomain}wikimediafoundation.org";
proxy_set_header Accept-Encoding "identity";
proxy_set_header Connection $connection_upgrade; # SSL
proxy_set_header Upgrade $http_upgrade; # SSL
proxy_ssl_server_name on; # SSL
# rewrite request referer
set_by_lua_block $referer2 { return onion_to_dns(ngx.var.http_referer) }
proxy_set_header Referer $referer2;
# rewrite request origin
set_by_lua_block $origin2 { return onion_to_dns(ngx.var.http_origin) }
proxy_set_header Origin $origin2;
# suppress non-GET methods (e.g.: POST)
limit_except GET {
deny all;
}
# no named host blocking
# no regular expression host blocking
}
}
# FORCE_HTTPS is in use; set up separate server for port 80 & force redirects
server {
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-80.sock;
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-2.d/port-80.sock;
# subdomain regexp captures trailing dot, use carefully; does not need "~*"
# NB: this regexp should be kept in-sync with the other FORCE_HTTPS copy
server_name
notrmquxg7nnaxky.onion
~^(?<servernamesubdomain>([-0-9a-z]+\\.)+)notrmquxg7nnaxky\\.onion$
;
# tell the client to try again as HTTPS without ever leaving the onion
# use 307 / temporary redirect because your URIs may change in future
# use $host (not $server) to copy-over subdomains, etc, transparently
return 307 https://$host$request_uri;
}
# for notrmquxg7nnaxky.onion -> mediawiki.org
server {
# softmap onion 1
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-443.sock ssl;
# softmap onion 2
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-2.d/port-443.sock ssl;
# subdomain regexp captures trailing dot, use carefully; does not need "~*"
# NB: this regexp should be kept in-sync with the other FORCE_HTTPS copy
server_name
notrmquxg7nnaxky.onion
~^(?<servernamesubdomain>([-0-9a-z]+\\.)+)notrmquxg7nnaxky\\.onion$
;
# for test & to help SSL certificate acceptance
location ~* ^/hello[-_]onion/?$ {
return 200 "Hello, Onion User!";
}
# no named location blocking
# no regular expression location blocking
# for traffic
location / {
proxy_pass "$scheme://${servernamesubdomain}mediawiki.org"; # note $scheme
proxy_http_version 1.1;
# a note on proxy_set_header, add_header, similar methods, etc;
# if you override *any* header then you will lose the other
# headers inherited from the parent contexts:
# https://blog.g3rt.nl/nginx-add_header-pitfall.html
proxy_set_header Host "${servernamesubdomain}mediawiki.org";
proxy_set_header Accept-Encoding "identity";
proxy_set_header Connection $connection_upgrade; # SSL
proxy_set_header Upgrade $http_upgrade; # SSL
proxy_ssl_server_name on; # SSL
# rewrite request referer
set_by_lua_block $referer2 { return onion_to_dns(ngx.var.http_referer) }
proxy_set_header Referer $referer2;
# rewrite request origin
set_by_lua_block $origin2 { return onion_to_dns(ngx.var.http_origin) }
proxy_set_header Origin $origin2;
# suppress non-GET methods (e.g.: POST)
limit_except GET {
deny all;
}
# no named host blocking
# no regular expression host blocking
}
}
# FORCE_HTTPS is in use; set up separate server for port 80 & force redirects
server {
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-80.sock;
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-2.d/port-80.sock;
# subdomain regexp captures trailing dot, use carefully; does not need "~*"
# NB: this regexp should be kept in-sync with the other FORCE_HTTPS copy
server_name
bhenpxugrhn7nuwi.onion
~^(?<servernamesubdomain>([-0-9a-z]+\\.)+)bhenpxugrhn7nuwi\\.onion$
;
# tell the client to try again as HTTPS without ever leaving the onion
# use 307 / temporary redirect because your URIs may change in future
# use $host (not $server) to copy-over subdomains, etc, transparently
return 307 https://$host$request_uri;
}
# for bhenpxugrhn7nuwi.onion -> wikibooks.org
server {
# softmap onion 1
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-443.sock ssl;
# softmap onion 2
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-2.d/port-443.sock ssl;
# subdomain regexp captures trailing dot, use carefully; does not need "~*"
# NB: this regexp should be kept in-sync with the other FORCE_HTTPS copy
server_name
bhenpxugrhn7nuwi.onion
~^(?<servernamesubdomain>([-0-9a-z]+\\.)+)bhenpxugrhn7nuwi\\.onion$
;
# for test & to help SSL certificate acceptance
location ~* ^/hello[-_]onion/?$ {
return 200 "Hello, Onion User!";
}
# no named location blocking
# no regular expression location blocking
# for traffic
location / {
proxy_pass "$scheme://${servernamesubdomain}wikibooks.org"; # note $scheme
proxy_http_version 1.1;
# a note on proxy_set_header, add_header, similar methods, etc;
# if you override *any* header then you will lose the other
# headers inherited from the parent contexts:
# https://blog.g3rt.nl/nginx-add_header-pitfall.html
proxy_set_header Host "${servernamesubdomain}wikibooks.org";
proxy_set_header Accept-Encoding "identity";
proxy_set_header Connection $connection_upgrade; # SSL
proxy_set_header Upgrade $http_upgrade; # SSL
proxy_ssl_server_name on; # SSL
# rewrite request referer
set_by_lua_block $referer2 { return onion_to_dns(ngx.var.http_referer) }
proxy_set_header Referer $referer2;
# rewrite request origin
set_by_lua_block $origin2 { return onion_to_dns(ngx.var.http_origin) }
proxy_set_header Origin $origin2;
# suppress non-GET methods (e.g.: POST)
limit_except GET {
deny all;
}
# no named host blocking
# no regular expression host blocking
}
}
# FORCE_HTTPS is in use; set up separate server for port 80 & force redirects
server {
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-80.sock;
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-2.d/port-80.sock;
# subdomain regexp captures trailing dot, use carefully; does not need "~*"
# NB: this regexp should be kept in-sync with the other FORCE_HTTPS copy
server_name
unmojoyfpj6dssbq.onion
~^(?<servernamesubdomain>([-0-9a-z]+\\.)+)unmojoyfpj6dssbq\\.onion$
;
# tell the client to try again as HTTPS without ever leaving the onion
# use 307 / temporary redirect because your URIs may change in future
# use $host (not $server) to copy-over subdomains, etc, transparently
return 307 https://$host$request_uri;
}
# for unmojoyfpj6dssbq.onion -> wikidata.org
server {
# softmap onion 1
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-443.sock ssl;
# softmap onion 2
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-2.d/port-443.sock ssl;
# subdomain regexp captures trailing dot, use carefully; does not need "~*"
# NB: this regexp should be kept in-sync with the other FORCE_HTTPS copy
server_name
unmojoyfpj6dssbq.onion
~^(?<servernamesubdomain>([-0-9a-z]+\\.)+)unmojoyfpj6dssbq\\.onion$
;
# for test & to help SSL certificate acceptance
location ~* ^/hello[-_]onion/?$ {
return 200 "Hello, Onion User!";
}
# no named location blocking
# no regular expression location blocking
# for traffic
location / {
proxy_pass "$scheme://${servernamesubdomain}wikidata.org"; # note $scheme
proxy_http_version 1.1;
# a note on proxy_set_header, add_header, similar methods, etc;
# if you override *any* header then you will lose the other
# headers inherited from the parent contexts:
# https://blog.g3rt.nl/nginx-add_header-pitfall.html
proxy_set_header Host "${servernamesubdomain}wikidata.org";
proxy_set_header Accept-Encoding "identity";
proxy_set_header Connection $connection_upgrade; # SSL
proxy_set_header Upgrade $http_upgrade; # SSL
proxy_ssl_server_name on; # SSL
# rewrite request referer
set_by_lua_block $referer2 { return onion_to_dns(ngx.var.http_referer) }
proxy_set_header Referer $referer2;
# rewrite request origin
set_by_lua_block $origin2 { return onion_to_dns(ngx.var.http_origin) }
proxy_set_header Origin $origin2;
# suppress non-GET methods (e.g.: POST)
limit_except GET {
deny all;
}
# no named host blocking
# no regular expression host blocking
}
}
# FORCE_HTTPS is in use; set up separate server for port 80 & force redirects
server {
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-80.sock;
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-2.d/port-80.sock;
# subdomain regexp captures trailing dot, use carefully; does not need "~*"
# NB: this regexp should be kept in-sync with the other FORCE_HTTPS copy
server_name
easbbp25gbrvq2r3.onion
~^(?<servernamesubdomain>([-0-9a-z]+\\.)+)easbbp25gbrvq2r3\\.onion$
;
# tell the client to try again as HTTPS without ever leaving the onion
# use 307 / temporary redirect because your URIs may change in future
# use $host (not $server) to copy-over subdomains, etc, transparently
return 307 https://$host$request_uri;
}
# for easbbp25gbrvq2r3.onion -> wikinews.org
server {
# softmap onion 1
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-443.sock ssl;
# softmap onion 2
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-2.d/port-443.sock ssl;
# subdomain regexp captures trailing dot, use carefully; does not need "~*"
# NB: this regexp should be kept in-sync with the other FORCE_HTTPS copy
server_name
easbbp25gbrvq2r3.onion
~^(?<servernamesubdomain>([-0-9a-z]+\\.)+)easbbp25gbrvq2r3\\.onion$
;
# for test & to help SSL certificate acceptance
location ~* ^/hello[-_]onion/?$ {
return 200 "Hello, Onion User!";
}
# no named location blocking
# no regular expression location blocking
# for traffic
location / {
proxy_pass "$scheme://${servernamesubdomain}wikinews.org"; # note $scheme
proxy_http_version 1.1;
# a note on proxy_set_header, add_header, similar methods, etc;
# if you override *any* header then you will lose the other
# headers inherited from the parent contexts:
# https://blog.g3rt.nl/nginx-add_header-pitfall.html
proxy_set_header Host "${servernamesubdomain}wikinews.org";
proxy_set_header Accept-Encoding "identity";
proxy_set_header Connection $connection_upgrade; # SSL
proxy_set_header Upgrade $http_upgrade; # SSL
proxy_ssl_server_name on; # SSL
# rewrite request referer
set_by_lua_block $referer2 { return onion_to_dns(ngx.var.http_referer) }
proxy_set_header Referer $referer2;
# rewrite request origin
set_by_lua_block $origin2 { return onion_to_dns(ngx.var.http_origin) }
proxy_set_header Origin $origin2;
# suppress non-GET methods (e.g.: POST)
limit_except GET {
deny all;
}
# no named host blocking
# no regular expression host blocking
}
}
# FORCE_HTTPS is in use; set up separate server for port 80 & force redirects
server {
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-80.sock;
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-2.d/port-80.sock;
# subdomain regexp captures trailing dot, use carefully; does not need "~*"
# NB: this regexp should be kept in-sync with the other FORCE_HTTPS copy
server_name
65da4nqqcmdjvqhf.onion
~^(?<servernamesubdomain>([-0-9a-z]+\\.)+)65da4nqqcmdjvqhf\\.onion$
;
# tell the client to try again as HTTPS without ever leaving the onion
# use 307 / temporary redirect because your URIs may change in future
# use $host (not $server) to copy-over subdomains, etc, transparently
return 307 https://$host$request_uri;
}
# for 65da4nqqcmdjvqhf.onion -> wikiquote.org
server {
# softmap onion 1
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-443.sock ssl;
# softmap onion 2
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-2.d/port-443.sock ssl;
# subdomain regexp captures trailing dot, use carefully; does not need "~*"
# NB: this regexp should be kept in-sync with the other FORCE_HTTPS copy
server_name
65da4nqqcmdjvqhf.onion
~^(?<servernamesubdomain>([-0-9a-z]+\\.)+)65da4nqqcmdjvqhf\\.onion$
;
# for test & to help SSL certificate acceptance
location ~* ^/hello[-_]onion/?$ {
return 200 "Hello, Onion User!";
}
# no named location blocking
# no regular expression location blocking
# for traffic
location / {
proxy_pass "$scheme://${servernamesubdomain}wikiquote.org"; # note $scheme
proxy_http_version 1.1;
# a note on proxy_set_header, add_header, similar methods, etc;
# if you override *any* header then you will lose the other
# headers inherited from the parent contexts:
# https://blog.g3rt.nl/nginx-add_header-pitfall.html
proxy_set_header Host "${servernamesubdomain}wikiquote.org";
proxy_set_header Accept-Encoding "identity";
proxy_set_header Connection $connection_upgrade; # SSL
proxy_set_header Upgrade $http_upgrade; # SSL
proxy_ssl_server_name on; # SSL
# rewrite request referer
set_by_lua_block $referer2 { return onion_to_dns(ngx.var.http_referer) }
proxy_set_header Referer $referer2;
# rewrite request origin
set_by_lua_block $origin2 { return onion_to_dns(ngx.var.http_origin) }
proxy_set_header Origin $origin2;
# suppress non-GET methods (e.g.: POST)
limit_except GET {
deny all;
}
# no named host blocking
# no regular expression host blocking
}
}
# FORCE_HTTPS is in use; set up separate server for port 80 & force redirects
server {
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-80.sock;
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-2.d/port-80.sock;
# subdomain regexp captures trailing dot, use carefully; does not need "~*"
# NB: this regexp should be kept in-sync with the other FORCE_HTTPS copy
server_name
wlvkjriqk2e7dkcd.onion
~^(?<servernamesubdomain>([-0-9a-z]+\\.)+)wlvkjriqk2e7dkcd\\.onion$
;
# tell the client to try again as HTTPS without ever leaving the onion
# use 307 / temporary redirect because your URIs may change in future
# use $host (not $server) to copy-over subdomains, etc, transparently
return 307 https://$host$request_uri;
}
# for wlvkjriqk2e7dkcd.onion -> wikisource.org
server {
# softmap onion 1
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-443.sock ssl;
# softmap onion 2
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-2.d/port-443.sock ssl;
# subdomain regexp captures trailing dot, use carefully; does not need "~*"
# NB: this regexp should be kept in-sync with the other FORCE_HTTPS copy
server_name
wlvkjriqk2e7dkcd.onion
~^(?<servernamesubdomain>([-0-9a-z]+\\.)+)wlvkjriqk2e7dkcd\\.onion$
;
# for test & to help SSL certificate acceptance
location ~* ^/hello[-_]onion/?$ {
return 200 "Hello, Onion User!";
}
# no named location blocking
# no regular expression location blocking
# for traffic
location / {
proxy_pass "$scheme://${servernamesubdomain}wikisource.org"; # note $scheme
proxy_http_version 1.1;
# a note on proxy_set_header, add_header, similar methods, etc;
# if you override *any* header then you will lose the other
# headers inherited from the parent contexts:
# https://blog.g3rt.nl/nginx-add_header-pitfall.html
proxy_set_header Host "${servernamesubdomain}wikisource.org";
proxy_set_header Accept-Encoding "identity";
proxy_set_header Connection $connection_upgrade; # SSL
proxy_set_header Upgrade $http_upgrade; # SSL
proxy_ssl_server_name on; # SSL
# rewrite request referer
set_by_lua_block $referer2 { return onion_to_dns(ngx.var.http_referer) }
proxy_set_header Referer $referer2;
# rewrite request origin
set_by_lua_block $origin2 { return onion_to_dns(ngx.var.http_origin) }
proxy_set_header Origin $origin2;
# suppress non-GET methods (e.g.: POST)
limit_except GET {
deny all;
}
# no named host blocking
# no regular expression host blocking
}
}
# FORCE_HTTPS is in use; set up separate server for port 80 & force redirects
server {
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-80.sock;
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-2.d/port-80.sock;
# subdomain regexp captures trailing dot, use carefully; does not need "~*"
# NB: this regexp should be kept in-sync with the other FORCE_HTTPS copy
server_name
rrzbnmeov7rbwoah.onion
~^(?<servernamesubdomain>([-0-9a-z]+\\.)+)rrzbnmeov7rbwoah\\.onion$
;
# tell the client to try again as HTTPS without ever leaving the onion
# use 307 / temporary redirect because your URIs may change in future
# use $host (not $server) to copy-over subdomains, etc, transparently
return 307 https://$host$request_uri;
}
# for rrzbnmeov7rbwoah.onion -> wikiversity.org
server {
# softmap onion 1
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-443.sock ssl;
# softmap onion 2
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-2.d/port-443.sock ssl;
# subdomain regexp captures trailing dot, use carefully; does not need "~*"
# NB: this regexp should be kept in-sync with the other FORCE_HTTPS copy
server_name
rrzbnmeov7rbwoah.onion
~^(?<servernamesubdomain>([-0-9a-z]+\\.)+)rrzbnmeov7rbwoah\\.onion$
;
# for test & to help SSL certificate acceptance
location ~* ^/hello[-_]onion/?$ {
return 200 "Hello, Onion User!";
}
# no named location blocking
# no regular expression location blocking
# for traffic
location / {
proxy_pass "$scheme://${servernamesubdomain}wikiversity.org"; # note $scheme
proxy_http_version 1.1;
# a note on proxy_set_header, add_header, similar methods, etc;
# if you override *any* header then you will lose the other
# headers inherited from the parent contexts:
# https://blog.g3rt.nl/nginx-add_header-pitfall.html
proxy_set_header Host "${servernamesubdomain}wikiversity.org";
proxy_set_header Accept-Encoding "identity";
proxy_set_header Connection $connection_upgrade; # SSL
proxy_set_header Upgrade $http_upgrade; # SSL
proxy_ssl_server_name on; # SSL
# rewrite request referer
set_by_lua_block $referer2 { return onion_to_dns(ngx.var.http_referer) }
proxy_set_header Referer $referer2;
# rewrite request origin
set_by_lua_block $origin2 { return onion_to_dns(ngx.var.http_origin) }
proxy_set_header Origin $origin2;
# suppress non-GET methods (e.g.: POST)
limit_except GET {
deny all;
}
# no named host blocking
# no regular expression host blocking
}
}
# FORCE_HTTPS is in use; set up separate server for port 80 & force redirects
server {
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-80.sock;
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-2.d/port-80.sock;
# subdomain regexp captures trailing dot, use carefully; does not need "~*"
# NB: this regexp should be kept in-sync with the other FORCE_HTTPS copy
server_name
fgvwe5aoui36adpm.onion
~^(?<servernamesubdomain>([-0-9a-z]+\\.)+)fgvwe5aoui36adpm\\.onion$
;
# tell the client to try again as HTTPS without ever leaving the onion
# use 307 / temporary redirect because your URIs may change in future
# use $host (not $server) to copy-over subdomains, etc, transparently
return 307 https://$host$request_uri;
}
# for fgvwe5aoui36adpm.onion -> wikivoyage.org
server {
# softmap onion 1
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-443.sock ssl;
# softmap onion 2
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-2.d/port-443.sock ssl;
# subdomain regexp captures trailing dot, use carefully; does not need "~*"
# NB: this regexp should be kept in-sync with the other FORCE_HTTPS copy
server_name
fgvwe5aoui36adpm.onion
~^(?<servernamesubdomain>([-0-9a-z]+\\.)+)fgvwe5aoui36adpm\\.onion$
;
# for test & to help SSL certificate acceptance
location ~* ^/hello[-_]onion/?$ {
return 200 "Hello, Onion User!";
}
# no named location blocking
# no regular expression location blocking
# for traffic
location / {
proxy_pass "$scheme://${servernamesubdomain}wikivoyage.org"; # note $scheme
proxy_http_version 1.1;
# a note on proxy_set_header, add_header, similar methods, etc;
# if you override *any* header then you will lose the other
# headers inherited from the parent contexts:
# https://blog.g3rt.nl/nginx-add_header-pitfall.html
proxy_set_header Host "${servernamesubdomain}wikivoyage.org";
proxy_set_header Accept-Encoding "identity";
proxy_set_header Connection $connection_upgrade; # SSL
proxy_set_header Upgrade $http_upgrade; # SSL
proxy_ssl_server_name on; # SSL
# rewrite request referer
set_by_lua_block $referer2 { return onion_to_dns(ngx.var.http_referer) }
proxy_set_header Referer $referer2;
# rewrite request origin
set_by_lua_block $origin2 { return onion_to_dns(ngx.var.http_origin) }
proxy_set_header Origin $origin2;
# suppress non-GET methods (e.g.: POST)
limit_except GET {
deny all;
}
# no named host blocking
# no regular expression host blocking
}
}
# FORCE_HTTPS is in use; set up separate server for port 80 & force redirects
server {
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-80.sock;
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-2.d/port-80.sock;
# subdomain regexp captures trailing dot, use carefully; does not need "~*"
# NB: this regexp should be kept in-sync with the other FORCE_HTTPS copy
server_name
x2jn2tulp4snfn24.onion
~^(?<servernamesubdomain>([-0-9a-z]+\\.)+)x2jn2tulp4snfn24\\.onion$
;
# tell the client to try again as HTTPS without ever leaving the onion
# use 307 / temporary redirect because your URIs may change in future
# use $host (not $server) to copy-over subdomains, etc, transparently
return 307 https://$host$request_uri;
}
# for x2jn2tulp4snfn24.onion -> wiktionary.org
server {
# softmap onion 1
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-443.sock ssl;
# softmap onion 2
listen unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-2.d/port-443.sock ssl;
# subdomain regexp captures trailing dot, use carefully; does not need "~*"
# NB: this regexp should be kept in-sync with the other FORCE_HTTPS copy
server_name
x2jn2tulp4snfn24.onion
~^(?<servernamesubdomain>([-0-9a-z]+\\.)+)x2jn2tulp4snfn24\\.onion$
;
# for test & to help SSL certificate acceptance
location ~* ^/hello[-_]onion/?$ {
return 200 "Hello, Onion User!";
}
# no named location blocking
# no regular expression location blocking
# for traffic
location / {
proxy_pass "$scheme://${servernamesubdomain}wiktionary.org"; # note $scheme
proxy_http_version 1.1;
# a note on proxy_set_header, add_header, similar methods, etc;
# if you override *any* header then you will lose the other
# headers inherited from the parent contexts:
# https://blog.g3rt.nl/nginx-add_header-pitfall.html
proxy_set_header Host "${servernamesubdomain}wiktionary.org";
proxy_set_header Accept-Encoding "identity";
proxy_set_header Connection $connection_upgrade; # SSL
proxy_set_header Upgrade $http_upgrade; # SSL
proxy_ssl_server_name on; # SSL
# rewrite request referer
set_by_lua_block $referer2 { return onion_to_dns(ngx.var.http_referer) }
proxy_set_header Referer $referer2;
# rewrite request origin
set_by_lua_block $origin2 { return onion_to_dns(ngx.var.http_origin) }
proxy_set_header Origin $origin2;
# suppress non-GET methods (e.g.: POST)
limit_except GET {
deny all;
}
# no named host blocking
# no regular expression host blocking
}
}
# header purge
more_clear_headers "Age";
more_clear_headers "Server";
more_clear_headers "Via";
more_clear_headers "X-From-Nginx";
more_clear_headers "X-NA";
more_clear_headers "X-Powered-By";
more_clear_headers "X-Request-Id";
more_clear_headers "X-Runtime";
more_clear_headers "X-Varnish";
}
# -*- conf -*-
# eotk (c) 2017 Alec Muffett
# template note: here we use TOR_DIR not PROJECT_DIR because of the
# relocation of Tor directories under `softmap`
DataDirectory /Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d
ControlPort unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/tor-control.sock
PidFile /Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/tor.pid
Log notice file /Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/tor.log
SafeLogging 1
HeartbeatPeriod 60 minutes
LongLivedPorts 80,443
RunAsDaemon 1
# use single onions
SocksPort 0 # have to disable this for single onions
HiddenServiceSingleHopMode 1 # yep, i want single onions
HiddenServiceNonAnonymousMode 1 # yes, really, honest, i swear
# softmap
HiddenServiceDir /Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d
HiddenServicePort 80 unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-80.sock
HiddenServicePort 443 unix:/Users/alecm/src/eotk/projects.d/wikipedia.d/hs-1.d/port-443.sock
HiddenServiceNumIntroductionPoints 3
@alecmuffett
Copy link
Author

all of the above is generated from this config file:

set project wiki
softmap %NEW_SOFT_ONION% wikipedia.org m
softmap %NEW_SOFT_ONION% wikimedia.org m
softmap %NEW_SOFT_ONION% wikimediafoundation.org m
softmap %NEW_SOFT_ONION% mediawiki.org m
softmap %NEW_SOFT_ONION% wikibooks.org m
softmap %NEW_SOFT_ONION% wikidata.org m
softmap %NEW_SOFT_ONION% wikinews.org m
softmap %NEW_SOFT_ONION% wikiquote.org m
softmap %NEW_SOFT_ONION% wikisource.org m
softmap %NEW_SOFT_ONION% wikiversity.org m
softmap %NEW_SOFT_ONION% wikivoyage.org m
softmap %NEW_SOFT_ONION% wiktionary.org m

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment