Skip to content

Instantly share code, notes, and snippets.

@kyleterry
Created January 29, 2014 18:57
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save kyleterry/8694512 to your computer and use it in GitHub Desktop.
Save kyleterry/8694512 to your computer and use it in GitHub Desktop.
import env.common
import os
from fabric.api import env, cd, sudo, run, local, abort, task, prompt
from fabric.contrib import django
from lib.aquameta.fabric_utils import (virtualenv, database, raw_connection,
check_for_database_existence, yes_no_prompt,
env_apply_json_file)
from urllib import urlencode
from urllib2 import urlopen
import json
django.settings_module('beehive.env.bh_dev')
from django.conf import settings
env_apply_json_file('fabric.json')
staging_servers = {
'01': 'staging-01.office.aquameta.com',
'02': 'staging-02.office.aquameta.com',
'03': 'staging-03.vpn.aquameta.com',
'04': 'staging-04.vpn.aquameta.com',
'05': 'staging-05.vpn.aquameta.com',
'06': 'staging-06.vpn.aquameta.com',
}
www_01 = 'www-01.vpn.aquameta.com'
www_02 = 'www-02.vpn.aquameta.com'
third = 'third.vpn.aquameta.com'
bh_live_02 = 'bh-live-02.vpn.aquameta.com'
processing_01 = 'processing-01.vpn.aquameta.com'
donkey = 'donkey.vpn.aquameta.com'
live_servers = (www_01, www_02, processing_01, donkey, third, bh_live_02,)
@task
def local_env():
env.activate = 'echo'
env.env_type = 'local'
@task
def staging(number):
env.user = settings.REMOTE_USER
env.hosts = [staging_servers[number],]
env.env_name = 'env.bh_staging'
env.env_type = 'staging'
initial_questions()
@task
def live():
env.user = settings.REMOTE_USER
if yes_no_prompt('Did you mean to act on the live server?') == 'no':
abort('Oops, live server!')
env.hosts = [third,]
env.env_name = 'env.bh_live'
env.env_type = 'live'
initial_questions()
@task
def live_db():
env.user = settings.REMOTE_USER
env.hosts = [bh_live_02,]
env.env_name = 'env.bh_live'
env.env_type = 'live_db'
def initial_questions():
if env.env_type in ('staging', 'local'):
env.new_db = yes_no_prompt('Install new DB?') == 'yes'
# if env.new_db:
# env.rsync_new_db = yes_no_prompt('rsync over a new backup (no will use your current beehive.backup)?', default='yes') == 'yes'
elif env.env_type == 'live':
env.email_sysadmin = yes_no_prompt('Send sysadmin release email?', default='yes') == 'yes'
env.run_migrations = yes_no_prompt('Run migrations?') == 'yes'
@task
def deploy(branch='live'):
deploy_notify(branch)
pull_down_new_code(branch)
if env.env_type == 'live' and env.email_sysadmin:
with cd(env.deploy_dir):
sudo('./release.sh', user=env.beehive_user)
if env.env_type == 'staging' and env.new_db:
install_new_db()
load_code()
collect_static_compress_media()
if env.run_migrations and env.host == env.hosts[-1]:
migrate()
build_release_notes()
clear_session_data()
deploy_notify(branch, finished=True)
@task
def clear_session_data():
with cd(env.deploy_dir):
with virtualenv():
sudo('./manage.py cleanup', user=env.beehive_user)
def deploy_notify(branch, finished=False):
release_url = 'http://eastbank.aquameta.com/release'
hostname = env.host_string
if finished:
status = 'finished'
else:
status = 'started'
payload = {
'status': status,
'branch': branch,
'hostname': hostname,
}
data = {'payload': json.dumps(payload)}
try:
urlopen(release_url, urlencode(data))
except:
pass
@task
def install_new_db():
assert not live_server_check(), 'No installing databases on live servers'
with cd(env.deploy_dir):
command = 'fab new_db_local'
if not(staging_servers['01'] in env.hosts or staging_servers['02'] in env.hosts):
command += ':"backup@broadway.azurestandard.com:aquameta/beehive_data/beehive.backup"'
with virtualenv():
sudo(command, user=env.beehive_user)
@task
def pull_down_new_code(branch):
with cd(env.deploy_dir):
sudo('git fetch', user=env.beehive_user)
sudo('git checkout %s' % branch, user=env.beehive_user)
sudo('find . -iname "*pyc" -delete')
sudo('git pull', user=env.beehive_user)
requirements_install()
sudo('find . -iname "*pyc" -delete')
@task
def requirements_install(cmd=sudo):
sudo_user = {}
if env.env_type in ('live', 'staging'):
sudo_user = {'user': env.beehive_user}
with virtualenv():
cmd('pip install -r requirements_stable_pre.txt', **sudo_user)
cmd('pip install -r requirements_stable.txt', **sudo_user)
cmd('pip install -r requirements_stable_post.txt', **sudo_user)
@task
def migrate():
with cd(env.deploy_dir):
with virtualenv():
#sudo('./manage.py syncdb --noinput --settings=%s' % env.env_name, user=env.beehive_user)
# sudo('for db in $(./manage.py list_databases); do ./manage.py syncdb --noinput --settings=%s --database=$db; done' % env.env_name, user=env.beehive_user)
# sudo('./manage.py migrate schema --settings=%s' % env.env_name, user=env.beehive_user)
sudo('./manage.py migrate --settings=%s' % env.env_name, user=env.beehive_user)
@task
def build_release_notes():
with cd(env.deploy_dir):
with virtualenv():
if env.env_type == 'live':
sudo('./manage.py buildreleasenotes --settings=%s' % env.env_name, user=env.beehive_user)
else:
sudo('./manage.py buildreleasenotes --settings=%s --next-release' % env.env_name, user=env.beehive_user)
@task
def load_code():
sudo('sv restart beehive beehive-www www')
sudo('bash -c "/etc/init.d/default_celeryd restart; sleep 1"')
sudo('bash -c "/etc/init.d/priority_celeryd restart; sleep 1"')
@task
def collect_static_compress_media():
with cd(env.deploy_dir):
with virtualenv():
sudo('./manage.py collectstatic --settings=%s --noinput' % env.env_name, user=env.beehive_user)
sudo('./manage.py compress --settings=%s' % env.env_name, user=env.beehive_user)
@task
def new_db_dump():
assert live_server_check(), 'can only dump from live server'
sudo('/usr/local/bin/db-backup verbose', user=env.beehive_user)
def live_server_check():
for server in live_servers:
if server in env.hosts:
return True
return False
@task
def ls():
run('ls -la')
@task
def get_images():
try:
local_path = os.path.join(settings.PROJECT_ROOT, "htdocs")
local("rsync -rvu %s@warehouse.azurestandard.com:/srv/beehive/beehive/htdocs/ %s -e \"ssh -p 2203\"" % (
settings.REMOTE_USER, local_path),
capture=False)
except:
abort("Couldn't find the project root. Images not synced.")
fixtures = {
'beehive_install': ('sites.Site',),
'warehouse_pieces': ("warehouse_pieces.basemarkup", "warehouse_pieces.breakdownmarkup",
"warehouse_pieces.retailmarkup"),
'beekeeper': ('permissions.PermissionType','beekeeper.beehiveconfig',),
'vendor': ('vendor.PaymentTerm',),
'accounts_receivable': ('accounts_receivable.PaymentTermType',),
'purchasing': ('purchasing.DeliveryLocation',),
'web_home_page': ('web_home_page.HomePage',),
}
@task
def dump_fixtures():
with cd(settings.PROJECT_ROOT):
for app in fixtures:
local('./manage.py dumpdata %s --format=json --indent=4 > apps/%s/fixtures/test_data.json' % (
" ".join(fixtures[app]), app))
@task
def install_local(quiet=False):
local_env()
initial_questions()
if env.new_db:
failed = True
while failed:
try:
filtered_table_input = prompt(
'Which tables should be filtered?',
default='django_slow_log_record,productivity_action,productivity_stockaction,django_session,django_snailtracker_action,django_snailtracker_snailtrack')
env.filtered_tables = [t.strip() for t in filtered_table_input.split(',')]
failed = False
except:
print 'Error parsing filtered table input'
requirements_install(cmd=local)
if env.new_db:
new_db_local()
if env.run_migrations:
migrate_local()
@task
def kill_pycs():
local('find . -iname "*pyc" -delete')
@task
def new_db_local(get_backup_from='backup@helmut.office.aquameta.com:aquameta/beehive_data/beehive.backup'):
beehive_backup = 'db/indexed-live-data/beehive.backup'
filter_file = 'db/indexed-live-data/tablelist'
if 'postgresql_psycopg2' not in database('ENGINE') and database('NAME') != ':memory:':
abort('Beehive only officially supports psycopg2, %s is not supported' % database('ENGINE'))
kill_pycs()
local('mkdir -p db/indexed-live-data')
# if env.rsync_new_db:
local('rsync -PaivL %s %s' % (get_backup_from, beehive_backup))
print('Got latest indexed live data.')
# else:
# print('Using existing backup.')
if check_for_database_existence():
raw_connection(no_transaction=True).execute('DROP DATABASE %s' % database('NAME'))
raw_connection(no_transaction=True).execute('CREATE DATABASE %s' % database('NAME'))
restore_cmd = 'pg_restore --no-owner -v -U %s %s -d %s'%(database('USER'), beehive_backup, database('NAME'))
if hasattr(env, 'filtered_tables'):
filters = []
for table in env.filtered_tables:
filters.append("grep -iv 'table data [a-z]* %s '" % table)
filter_cmd = ' | '.join(filters)
os.system('pg_restore -l %(backup_file)s | %(table_filters)s > %(filter_file)s' % {
'backup_file': beehive_backup,
'table_filters': filter_cmd,
'filter_file': filter_file
})
restore_cmd += ' -L %s' % filter_file
if database('HOST'):
restore_cmd += ' -h {host}'.format(host=database('HOST'))
print 'Starting install.'
os.system(restore_cmd)
def play_completion_sound():
tada = os.path.join(settings.PROJECT_ROOT, 'lib/aquameta/music/tada.wav')
if (os.path.exists('/usr/bin/afplay')):
local('afplay %s'%tada)
elif (os.path.exists('/usr/bin/aplay')):
local('aplay %s'%tada)
@task
def migrate_local():
#local('./manage.py syncdb --noinput')
# local('for db in $(./manage.py list_databases); do ./manage.py syncdb --noinput --database=$db; done')
# local('./manage.py migrate schema')
local('./manage.py migrate')
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment