public
Last active

Thin-Prefork - a real-world example

  • Download Gist
rackup.rb
Ruby
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77
#!/usr/bin/env ruby
 
require 'rticulate'
require 'thin/prefork'
require 'thin/prefork/project'
require 'projectr/watch_changes'
 
debugging = ARGV.member?("-d")
if debugging then
stderr=$stderr.dup
rack_workers=1
else
stderr=IO.popen("logger -t 'rticulate' -p local1.info","w")
stderr.sync=true
rack_workers=Env['rack_workers'] || 3
end
 
url=URI.parse(Env['rackurl'])
pid=Env['pidfile']
 
# this trick from http://stackoverflow.com/questions/42566
local_addr=UDPSocket.open {|s| s.connect('10.0.0.1',1); s.addr.last }
 
module NotifyVarnish
def on_register
# the master process needs its own db handle which children don't
# interfere with when they call DB.disconnect on startup
@db ||= Sequel.connect(Env['database'])
@db << "insert into varnish_backends (address) values ('#{@host}:#{@port}');"
@db << "notify varnish;"
super
end
 
def on_start
# existing database connctions were opened by parent, so shut them down
# cos they won't work anyway
DB.disconnect
end
 
def on_unregister
@db << "delete from varnish_backends where address = '#{@host}:#{@port}';"
@db << "notify varnish;"
super
end
end
 
 
project=Projectr::Project[:rticulate]
project.load!
server=Thin::Prefork::Project.new(:app=>Rticulate.app,
:project=>project,
:host=>local_addr, :port=>url.port,
:stderr=>stderr,
:pid_file=>pid,
:worker_mixins=>NotifyVarnish,
:num_workers=>rack_workers)
 
Signal.trap("TERM") do
warn "Terminating children"
server.stop!
exit 0
end
 
Signal.trap("HUP") do
warn "Reloading"
server.reload!
end
 
if Env['environment']=='development' then
server.add_io_handler(project.watch_changes) do |o|
if o.changed_files.present? then
server.reload!
end
end
end
 
server.run!

Please sign in to comment on this gist.

Something went wrong with that request. Please try again.