Skip to content

Instantly share code, notes, and snippets.

@kjprince
Created February 13, 2014 00:00
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save kjprince/8967101 to your computer and use it in GitHub Desktop.
Save kjprince/8967101 to your computer and use it in GitHub Desktop.
# ------------------------------------------------------------------------
# REMOVE WWW PREFIX ------------------------------------------------------
# ------------------------------------------------------------------------
server {
server_name *.[sitename];
# remove all sub domains and www
rewrite ^/(.*)$ http://[sitename]/$1 permanent;
}
# ------------------------------------------------------------------------
# [sitename] ------------------------------------------------------------
# ------------------------------------------------------------------------
server {
listen 80;
server_name [sitename];
root /srv/www/[sitename];
# ------------------------------------------------------------------------
# LOGGING ----------------------------------------------------------------
# ------------------------------------------------------------------------
# Syntax: error_log file [ debug | info | notice | warn | error | crit ]
#access_log /var/log/nginx/[sitename].access cache;
error_log /var/log/nginx/[sitename].error error;
access_log off;
rewrite_log off;
# ------------------------------------------------------------------------
# CUSTOME ERROR HANDLING -------------------------------------------------
# ------------------------------------------------------------------------
# Custom Error handing page, comment this out if you do not need it.
error_page 400 401 402 403 404 500 502 503 504 /error.htm;
location /error.htm {
internal;
}
# ------------------------------------------------------------------------
# CACHING ----------------------------------------------------------------
# ------------------------------------------------------------------------
# Cache these in the Browser
location ~* \.(js|css|png|jpg|jpeg|gif|ico)$ {
expires max;
log_not_found off;
}
# Include W3TC Configuration + Block Browser Access
include /srv/www/[sitename]/nginx.conf;
location ~ /nginx.conf {
deny all;
access_log off;
log_not_found off;
}
# ------------------------------------------------------------------------
# PROCESS PHP SCRIPTS TO PHP-FPM -----------------------------------------
# ------------------------------------------------------------------------
location / {
index index.php index.htm index.html;
location ~ \.php$ {
# server params
include fastcgi_params;
# ------------------------------------------------------------------------
# global params
fastcgi_split_path_info ^(.+\.php)(/.+)$;
fastcgi_param SCRIPT_FILENAME $document_root$fastcgi_script_name;
fastcgi_pass 127.0.0.1:9000;
fastcgi_intercept_errors on;
fastcgi_index index.php;
}
try_files $uri $uri/ /index.php?$args;
}
# ------------------------------------------------------------------------
# FILE RULES -------------------------------------------------------------
# ------------------------------------------------------------------------
# Process Robots Files
location = /robots.txt {
allow all;
log_not_found off;
access_log off;
}
# Deny all attempts to access hidden files such as .htaccess, .htpasswd, .DS_Store (Mac).
location ~ /\. {
deny all;
access_log off;
log_not_found off;
}
# ------------------------------------------------------------------------
# LOGIN SHORTCUT ---------------------------------------------------------
# ------------------------------------------------------------------------
location ~* /login/ {
rewrite ^/login/(.*)? /wp-admin$1;
}
}
#!/bin/bash
# ------------------------------------------------------------------------------
# SETUP DATABASE ---------------------------------------------------------------
# ------------------------------------------------------------------------------
# Access your server
ssh [username]@[server ipaddress] -p [ssh port]
su root
# Start by setting up you mysql databases first
mysql -u root -p mysql
# Create your wordpress database
create database [database name];
# Setup a User Jailed to that database
grant all on [database name].* to [db user name]@localhost identified by '[alpanumeric password]';
# Setup a Systems Administrative account with global local access
grant all on *.* to 'sysadmin'@'localhost' identified by '[alpanumeric password]';
# Optional: I setup an account with the same Admin user account I setup in the previous post with sys admin level access to the db
grant all on *.* to [server administrative user name]@'%' identified by '[alpanumeric password]';
# ------------------------------------------------------------------------------
# SETUP RACKSPACE CDN ----------------------------------------------------------
# ------------------------------------------------------------------------------
# Make sure we have all the packages we need installed
sudo apt-get install gcc libcurl4-openssl-dev libfuse-dev libxml2 libxml2-dev git-core
# Make a directory for builds, than git the latest cloud fuse clone
mkdir /builds
mkdir /builds/cloudfuse
cd /builds/cloudfuse
git clone git://github.com/redbo/cloudfuse.git
# Once we have the software downloaded we have to build it
cd /builds/cloudfuse/cloudfuse
./configure
make
sudo make install
# Now run the following to make sure cloudfuse was setup correctly:
which cloudfuse
# The command should return "/usr/local/bin/cloudfuse"
# Now go to the root users home directory [you should be logged in as root already]
cd /root
# Create the configuration file
nano .cloudfuse
# add all of the text between the starting [[ and ending ]]
# [[
username=[username]
api_key=[apikey]
use_snet=[true or false]
authurl=[US-based accounts use https://auth.api.rackspacecloud.com/v1.0 or UK-based accounts use https://lon.auth.api.rackspacecloud.com/v1.0]
#]]
# Save File, Exit
ctrl+x, y, enter = save file and exit
# ------------------------------------------------------------------------------
# SETUP WORDPRESS SITE BASE ----------------------------------------------------
# ------------------------------------------------------------------------------
# Create Site Home Folder
mkdir /srv/www/[sitename]
# Create the wordpress cloud directories
mkdir /srv/www/[sitename]/wp-content-cloudfiles
# Change folder permissions
chown root:www-data /srv/www/[sitename]/wp-content-cloudfiles
# Now let's test by invoking cloudfuse and navigating to that directory. prior to this step I uploaded a tmp image to the cdn folder
cloudfuse /srv/www/[sitename]/wp-content-cloudfiles -o allow_other,umask=774,nonempty
# Open crontab configuration
nano /etc/crontab
# Add the following under "# m h dom mon dow user command" starting [[ and ending ]] at the end of the file:
# [[
# Setup CloudFuse Directories
@reboot root modprobe fuse
# Setup Each Directory
@reboot root cloudfuse /srv/www/[sitename]wp-content-cloudfiles -o allow_other,umask=774,nonempty
#]]
# You will only need one configuration for "@rebootroot mod probe fuse" however, for each cloud directory you would like to setup you need to have at least 1 entry for "@reboot root cloudfuse /srv/www/[sitename]/wp-content-cloudfiles -o allow_other,umask=774,nonempty"
# reboot the box
reboot
ssh [username]@[server ipaddress] -p [ssh port setup earlier]
su root
# verify your fuse is still working
cd /srv/www/[sitename]/wp-content-cloudfiles
# ------------------------------------------------------------------------------
# SETUP NGiNX SITE BASE --------------------------------------------------------
# ------------------------------------------------------------------------------
#Setup default nginx site
nano /etc/nginx/sites-available/[sitename]/
cp /srv/www/default.site/index.html /srv/www/sites-available/[sitename]/index.html
# Add the following starting [[ and ending ]] to this file
# [[
server {
listen 80;
server_name [your domain];
root /srv/www/[sitename];
#Syntax: error_log file [ debug | info | notice | warn | error | crit ]
error_log /var/log/nginx/[sitename].error error;
rewrite_log off;
# ------------------------------------------------------------------------
location / {
index index.php index.htm index.html;
location ~ \.php$ {
# server params
includefastcgi_params;
# ------------------------------------------------------------------------
# global params
fastcgi_split_path_info ^(.+\.php)(/.+)$;
fastcgi_param SCRIPT_FILENAME $document_root$fastcgi_script_name;
fastcgi_pass 127.0.0.1:9000;
fastcgi_intercept_errors on;
fastcgi_index index.php;
}
try_files $uri $uri/ /index.php?$args;
}
}
#]]
# save file, exit
ctrl+x, y, enter = save file and exit
# make a symlink to make it active
ln -s /etc/nginx/sites-available/[sitename] /etc/nginx/sites-enabled/[sitename]
# Verify and Restart NGiNX
sudo /etc/init.d/nginx configtest
sudo /etc/init.d/nginx restart
# If you have your domain dns proper pointing to this box you should be able to test for your default domain.
http://[site url]
# ------------------------------------------------------------------------------
# SETUP WORDPRESS --------------------------------------------------------------
# ------------------------------------------------------------------------------
# 1. Follow: http://codex.wordpress.org/Installing_WordPress
# 2. You will need your db name, dbuser name, dbuser password, admin account, admin email address!
# 3. I typically pull all of the files down to my machine unzip them, and FTP all of them using the admin account to connect to the box.
# 4. Once you can see the default home page in WP for your url, let's continue.
# Setup Secure Permissions for all sites
chown -R root.admin /srv/backup
chown -R root.www-data /srv/www
chmod -R 774 /srv/www
# Setup a robots.txt file
nano /srv/www/vwahlah.blog/robots.txt
# add all of the text between the starting [[ and ending ]]
# [[
User-agent: *
# ------------------------------------------------------------------------
# WORDPRESS
# ------------------------------------------------------------------------
Disallow: /cgi-bin
Disallow: /wp-*
Disallow: /trackback
Disallow: /feed
Disallow: /archives
Disallow: /comments
Disallow: /category/*/*
Disallow: */trackback*
Disallow: */feed*
Disallow: */comments
Disallow: /*?*
Disallow: /*?
Allow: /wp-content/uploads
# Google Image
User-agent: Googlebot-Image
Disallow:
Allow: /*
# Google AdSense
User-agent: Mediapartners-Google*
Disallow:
Allow: /*
# ------------------------------------------------------------------------
# Block Specific Folders, Files, and Urls ending with these extensions
# ------------------------------------------------------------------------
Disallow: /app-includes*
Disallow: /readme.html
Disallow: /license.txt
Disallow: /*.php$
Disallow: /*.js$
Disallow: /*.inc$
Disallow: /*.css$
# ------------------------------------------------------------------------
# SITE MAP
# ------------------------------------------------------------------------
Sitemap: http://[siteurl]/sitemapindex.xml
# ------------------------------------------------------------------------
# BLOCK BAD BOTS
# ------------------------------------------------------------------------
User-agent: Twiceler www.cuill.com/robots.html
Disallow: /
User-agent: Mozilla/5.0 (Twiceler-0.9 http://www.cuill.com/twiceler/robot.html)
Disallow: /
User-agent: Twiceler www.cuill.com/twiceler/robot.html
Disallow: /
User-agent: Twiceler-0.9 http://www.cuill.com/twiceler/robot.html
Disallow: /
User-agent: litefinder
Disallow: /
User-agent: Baidu
Disallow: /
User-agent: Yahoo Pipes 1.0
Disallow: /
User-agent: Slurp
Disallow: /
User-agent: NPBot
Disallow: /
User-agent: TurnitinBot
Disallow: /
User-agent: EmailCollector
Disallow: /
User-agent: EmailWolf
Disallow: /
User-agent: CopyRightCheck
Disallow: /
User-agent: Black Hole
Disallow: /
User-agent: Titan
Disallow: /
User-agent: NetMechanic
Disallow: /
User-agent: CherryPicker
Disallow: /
User-agent: EmailSiphon
Disallow: /
User-agent: WebBandit
Disallow: /
User-agent: Crescent
Disallow: /
User-agent: NICErsPRO
Disallow: /
User-agent: SiteSnagger
Disallow: /
User-agent: ProWebWalker
Disallow: /
User-agent: CheeseBot
Disallow: /
User-agent: ia_archiver
Disallow: /
User-agent: ia_archiver/1.6
Disallow: /
User-agent: Teleport
Disallow: /
User-agent: TeleportPro
Disallow: /
User-agent: Wget
Disallow: /
User-agent: MIIxpc
Disallow: /
User-agent: Telesoft
Disallow: /
User-agent: Website Quester
Disallow: /
User-agent: WebZip
Disallow: /
User-agent: moget/2.1
Disallow: /
User-agent: WebZip/4.0
Disallow: /
User-agent: Mister PiX
Disallow: /
User-agent: WebStripper
Disallow: /
User-agent: WebSauger
Disallow: /
User-agent: WebCopier
Disallow: /
User-agent: NetAnts
Disallow: /
User-agent: WebAuto
Disallow: /
User-agent: TheNomad
Disallow: /
User-agent: WWW-Collector-E
Disallow: /
User-agent: RMA
Disallow: /
User-agent: libWeb/clsHTTP
Disallow: /
User-agent: asterias
Disallow: /
User-agent: httplib
Disallow: /
User-agent: turingos
Disallow: /
User-agent: spanner
Disallow: /
User-agent: InfoNaviRobot
Disallow: /
User-agent: Harvest/1.5
Disallow: /
User-agent: Bullseye/1.0
Disallow: /
User-agent: Mozilla/4.0 (compatible; BullsEye; Windows 95)
Disallow: /
User-agent: Crescent Internet ToolPak HTTP OLE Control v.1.0
Disallow: /
User-agent: CherryPickerSE/1.0
Disallow: /
User-agent: CherryPickerElite/1.0
Disallow: /
User-agent: WebBandit/3.50
Disallow: /
User-agent: DittoSpyder
Disallow: /
User-agent: SpankBot
Disallow: /
User-agent: BotALot
Disallow: /
User-agent: lwp-trivial/1.34
Disallow: /
User-agent: lwp-trivial
Disallow: /
User-agent: Wget/1.6
Disallow: /
User-agent: BunnySlippers
Disallow: /
User-agent: URLy Warning
Disallow: /
User-agent: Wget/1.5.3
Disallow: /
User-agent: LinkWalker
Disallow: /
User-agent: cosmos
Disallow: /
User-agent: moget
Disallow: /
User-agent: hloader
Disallow: /
User-agent: humanlinks
Disallow: /
User-agent: LinkextractorPro
Disallow: /
User-agent: Offline Explorer
Disallow: /
User-agent: Mata Hari
Disallow: /
User-agent: LexiBot
Disallow: /
User-agent: Web Image Collector
Disallow: /
User-agent: The Intraformant
Disallow: /
User-agent: True_Robot/1.0
Disallow: /
User-agent: True_Robot
Disallow: /
User-agent: BlowFish/1.0
Disallow: /
User-agent: JennyBot
Disallow: /
User-agent: MIIxpc/4.2
Disallow: /
User-agent: BuiltBotTough
Disallow: /
User-agent: ProPowerBot/2.14
Disallow: /
User-agent: BackDoorBot/1.0
Disallow: /
User-agent: toCrawl/UrlDispatcher
Disallow: /
User-agent: WebEnhancer
Disallow: /
User-agent: TightTwatBot
Disallow: /
User-agent: suzuran
Disallow: /
User-agent: VCI WebViewer VCI WebViewer Win32
Disallow: /
User-agent: VCI
Disallow: /
User-agent: Szukacz/1.4
Disallow: /
User-agent: QueryN Metasearch
Disallow: /
User-agent: Openfind data gathere
Disallow: /
User-agent: Openfind
Disallow: /
User-agent: Xenu's Link Sleuth 1.1c
Disallow: /
User-agent: Xenu's
Disallow: /
User-agent: Zeus
Disallow: /
User-agent: RepoMonkey Bait & Tackle/v1.01
Disallow: /
User-agent: RepoMonkey
Disallow: /
User-agent: Zeus 32297 Webster Pro V2.9 Win32
Disallow: /
User-agent: Webster Pro
Disallow: /
User-agent: EroCrawler
Disallow: /
User-agent: LinkScan/8.1a Unix
Disallow: /
User-agent: Kenjin Spider
Disallow: /
User-agent: Keyword Density/0.9
Disallow: /
User-agent: Cegbfeieh
Disallow: /
User-agent: SurveyBot
Disallow: /
User-agent: duggmirror
Disallow: /
# ]]
# save file, exit
ctrl+x, y, enter = save file and exit
# With the robots in place, let's setup the base site map
# Install the Better WP Sitemaps Plugin: http://wordpress.org/extend/plugins/bwp-google-xml-sitemaps/
# Make sure you can gerate a sitemap, if you receive the permissions error, here is the fix
chmod 777 /srv/www/[sitename]/wp-content/plugins/bwp-google-xml-sitemaps/cache/
# In order for the sitemap to be generated you need to setup a test dummy post, after your sitemap url should work
http://[siteurl]/sitemapindex.xml/
# ------------------------------------------------------------------------------
# SETUP FTP USERS --------------------------------------------------------------
# ------------------------------------------------------------------------------
# create the ftp users and lock them to the website directories
useradd -d /srv/www/[website/appname] -m [ftp user name]
# set the ftp account passwords
passwd [ftp user name]
# add the ftp users to the www-data user/group
adduser [ftp user name] www-data
# BUG FIX: 500 OOPS: vsftpd: refusing to run with writable root inside chroot()
sudo add-apt-repository ppa:thefrontiergroup/vsftpd
sudo apt-get update
sudo apt-get install vsftpd
# Edit the vsftpd.conf and append this setting to the end of the file to keep users' jailed!
nano /etc/vsftpd.conf
# Documentation: http://www.benscobie.com/fixing-500-oops-vsftpd-refusing-to-run-with-writable-root-inside-chroot/
# add all of the text between the starting [[ and ending ]]
# [[
# Keep non-chroot listed users jailed
allow_writeable_chroot=YES
# ]]
# save file, exit
ctrl+x, y, enter = save file and exit
# restart the service for changes to take effect
sudo service vsftpd restart
# test ftp via secondary terminal window:
ftp [ftp user name]@[server ipaddress] [ftp port]
# Make sure this ftp user is jailed to /srv/www/[sitename], first let's see what files we currently see
ls -al
# Now try to navigate up the tree
cd ../../
# We should only be able to see the same files we saw earlier
ls -al
# reboot the box
reboot
ssh [username]@[server ipaddress] -p [ssh port setup earlier]
su root
# ------------------------------------------------------------------------------
# CONFIGURE NiGNX ----------------------------------------------
# ------------------------------------------------------------------------------
# Make sure we have the php mime type
nano /etc/nginx/mime.types
# add all of the text between the starting [[ and ending ]] on line 2 after "types {"
# [[
text/php php php5;
# ]]
# save file, exit
ctrl+x, y, enter = save file and exit
# make sure the change worked
sudo /etc/init.d/nginx configtest
# If you followed my other guide, you can save some time by deleting the current config and replacing it
rm /etc/nginx/nginx.conf
nano /etc/nginx/nginx.conf
# add all of the text between the starting [[ and ending ]]
# [[
user www-data;
worker_processes 4;
pid /var/run/nginx.pid;
events {
worker_connections 768;
# multi_accept on;
}
http {
##-----------------------------------------------------------------------------
# Basic Settings
##-----------------------------------------------------------------------------
sendfile on;
tcp_nopush on;
tcp_nodelay on;
keepalive_timeout 65;
types_hash_max_size 2048;
# Large files can be uploaded to the server
client_max_body_size 5M;
client_body_buffer_size 128k;
include /etc/nginx/mime.types;
default_type application/octet-stream;
##-----------------------------------------------------------------------------
# Logging Settings
##-----------------------------------------------------------------------------
log_format cache '$remote_addr - $host [$time_local] '
'"$request" $status $upstream_cache_status $body_bytes_sent '
'"$http_referer" "$http_user_agent"';
access_log /var/log/nginx/access.log cache;
error_log /var/log/nginx/error.log;
##-----------------------------------------------------------------------------
## COMPRESSION START-----------------------------------------------------------
# src: http://www.ruby-forum.com/topic/141251
# src: http://wiki.brightbox.co.uk/docs:nginx
gzip on;
gzip_http_version 1.0;
gzip_comp_level 2;
gzip_proxied any;
gzip_min_length 1100;
gzip_buffers 16 8k;
gzip_types text/plain text/html text/css application/x-javascript application/xml application/xml+rss text/javascript;
# Some version of IE 6 don't handle compression well on some mime-types, so just disable for them
gzip_disable "MSIE [1-6].(?!.*SV1)";
# Set a vary header so downstream proxies don't send cached gzipped content to IE6
gzip_vary on;
## /COMPRESSION END -----------------------------------------------------------
##-----------------------------------------------------------------------------
##-----------------------------------------------------------------------------
# Virtual Host Configs
##-----------------------------------------------------------------------------
include /etc/nginx/sites-enabled/*;
}
# ]]
# save file, exit
ctrl+x, y, enter = save file and exit
# Make sure the change worked
sudo /etc/init.d/nginx configtest
# restart nginx to get them to stick
sudo /etc/init.d/nginx restart
# Now we have to adjust our wp website configuration, if you have been following my guide you can delete it and re-add it.
# Make copies for backup
cp /etc/nginx/sites-available/vwahlah /etc/nginx/sites-available/vwahlah.original
# Make the Updates
rm /etc/nginx/sites-available/vwahlah
nano /etc/nginx/sites-available/vwahlah
# add all of the text between the starting [[ and ending ]]
# [[
server {
listen 80;
server_name [sitename];
root /srv/www/[sitename];
# ------------------------------------------------------------------------
# Syntax: error_log file [ debug | info | notice | warn | error | crit ]
#access_log /var/log/nginx/vwahlah.blog.access cache;
error_log /var/log/nginx/vwahlah.blog.error error;
access_log off;
rewrite_log off;
# ------------------------------------------------------------------------
# Custom Error handing page, comment this out if you do not need it.
error_page 400 401 402 403 404 500 502 503 504 /error.htm;
location /error.htm {
internal;
}
# ------------------------------------------------------------------------
location / {
index index.php index.htm index.html;
location ~ \.php$ {
# server params
include fastcgi_params;
# ------------------------------------------------------------------------
# global params
fastcgi_split_path_info ^(.+\.php)(/.+)$;
fastcgi_param SCRIPT_FILENAME $document_root$fastcgi_script_name;
fastcgi_pass 127.0.0.1:9000;
fastcgi_intercept_errors on;
fastcgi_index index.php;
}
try_files $uri $uri/ /index.php?$args;
}
# ------------------------------------------------------------------------
location ~* \.(js|css|png|jpg|jpeg|gif|ico)$ {
expires max;
log_not_found off;
}
# ------------------------------------------------------------------------
location ~* /login/ {
rewrite ^/login/(.*)? /wp-admin$1;
}
# ------------------------------------------------------------------------
location = /robots.txt {
allow all;
log_not_found off;
access_log off;
}
}
# Remove the www prefix
server {
server_name *.vwahlah.com;
# remove all sub domains and www
rewrite ^/(.*)$ http://[sitename]/$1 permanent;
}
# ]]
# save file, exit
ctrl+x, y, enter = save file and exit
# ------------------------------------------------------------------------------
# SETUP CRON JOBS 4 Backup + Server Love ---------------------------------------
# ------------------------------------------------------------------------------
# Start by opening the default logger
nano /etc/rsyslog.d/50-default.conf
# Start tracking cron jobs
Uncomment: cron.* [remove the "#" around line 10]
Uncomment: daemon.* [remove the "#" around line 11]
# save file, exit
ctrl+x, y, enter = save file and exit
# restart the logger service
sudo service rsyslog restart
# Setup the nightly server reboot process
nano /etc/cron.daily/server-reboot
# add all of the text between the starting [[ and ending ]]
# [[
#!/bin/bash
#start
#-----------------------------------------------------------------------
#delete nginx cache if exists
rm -rf /var/cache/nginx
#restart server
DateStamp=$(date +"%Y%m%d %k:%M:%S");
echo $DateStamp >> /var/log/cron.reboot.log;
/sbin/shutdown -r now
#-----------------------------------------------------------------------
#end
# ]]
# save file, exit
ctrl+x, y, enter = save file and exit
# Make the job executable
chmod +x /etc/cron.daily/server-reboot
# Open crontab configuration
nano /etc/crontab
# Add the following under "# m h dom mon dow user command" starting [[ and ending ]] at the end of the file:
# [[
# Reboot Server Daily @ 1:30 AM
30 01 * * * root /etc/cron.daily/server-reboot
# ]]
# save file, exit
ctrl+x, y, enter = save file and exit
# Optional: you can test this job if you like by typing the following command
sh /etc/cron.daily/server-reboot
# Setup the nightly server backup database processes
mkdir /srv/backup/daily
mkdir /srv/backup/daily/databases
nano /etc/cron.daily/backup-databases
# add all of the text between the starting [[ and ending ]]
# [[
#!/bin/bash
#start
#-----------------------------------------------------------------------
#verify directory structure exists prior to running this job
BackUpDIR="/srv/backup/daily/databases/";
DateStamp=$(date +"%Y%m%d");
#Format of DBList="db1 db2 db3 db4"
DBList="[dbname]";
#I have a server system administrator account with access to all dbs, typically named sysadmin
DBUser="[user with db permissions like sysadmin]";
DBPwd="[user password]";
for DB in $DBList;
do
21mysqldump --opt -u$DBUser -p$DBPwd --add-drop-table --lock-tables --databases $DB > $BackUpDIR$DateStamp.$DB.sql;
tar zcf "$BackUpDIR$DateStamp.DB.$DB.tar.gz" -P $BackUpDIR$DateStamp.$DB.sql;
rm -rf $BackUpDIR$DateStamp.$DB.sql;
mysqldump --opt -u$DBUser -p$DBPwd --add-drop-table --lock-tables $DB > $BackUpDIR$DateStamp.$DB.tbls.sql;
tar zcf "$BackUpDIR$DateStamp.DB.$DB.tbls.tar.gz" -P $BackUpDIR$DateStamp.$DB.tbls.sql;
rm -rf $BackUpDIR$DateStamp.$DB.tbls.sql;
done
#-----------------------------------------------------------------------
#end
# ]]
# save file, exit
ctrl+x, y, enter = save file and exit
# Make the job executable
chmod +x /etc/cron.daily/backup-databases
# Open crontab configuration
nano /etc/crontab
# Add the following under "# m h dom mon dow user command" starting [[ and ending ]] at the end of the file:
# [[
# Backup Databases Daily @ 12:30 AM
30 00 * * * root /etc/cron.daily/backup-databases
# ]]
# save file, exit
ctrl+x, y, enter = save file and exit
# Optional: you can test this job if you like by typing the following command
sh /etc/cron.daily/backup-databases
# Setup the nightly server backup file processes
mkdir /srv/backup/daily
mkdir /srv/backup/daily/websites
nano /etc/cron.daily/backup-wordpress-site-files
# add all of the text between the starting [[ and ending ]]
# [[
#!/bin/bash
#start
#-----------------------------------------------------------------------
#verify directory structure exists prior to running this job
BackUpDIR="/srv/backup/daily/websites/";
SrvDir="/srv/www/";
#Format of SiteList="sitefolder1 sitefolder2 sitefolder3"
SiteList="[sitefolders]";
DateStamp=$(date +"%Y%m%d");
for Site in $SiteList;
do
21#backup all files, however, exclude the rackspace cloud cdn if you are using it
tar zcf "$BackUpDIR$DateStamp.website.code.$Site.tar.gz" -P $SrvDir$Site --exclude "$SrvDir$Site/wp-content-cloudfiles";
done
#-----------------------------------------------------------------------
#end
# ]]
# save file, exit
ctrl+x, y, enter = save file and exit
# Make the job executable
chmod +x /etc/cron.daily/backup-wordpress-site-files
# Open crontab configuration
nano /etc/crontab
# Add the following under "# m h dom mon dow user command" starting [[ and ending ]] at the end of the file:
# [[
# Backup Wordpress Site Files Daily @ 12:45 AM
45 00 * * * root /etc/cron.daily/backup-wordpress-site-files
# ]]
# save file, exit
ctrl+x, y, enter = save file and exit
# Optional: you can test this job if you like by typing the following command
sh /etc/cron.daily/backup-wordpress-site-files
# Setup the nightly server backup cleanup
nano /etc/cron.daily/backup-cleanup
# add all of the text between the starting [[ and ending ]]
# [[
#!/bin/bash
#start
#-----------------------------------------------------------------------
find /srv/backup/daily/databases/ -name '*.gz' -mtime +7 | xargs rm -f;
find /srv/backup/daily/websites/ -name '*.gz' -mtime +7 | xargs rm -f;
# Are Weekly Backups Implemented?
# find /srv/backup/weekly/ -name '*.gz' -mtime +30 | xargs rm -f;
#-----------------------------------------------------------------------
#end
# ]]
# save file, exit
ctrl+x, y, enter = save file and exit
# Make the job executable
chmod +x /etc/cron.daily/backup-cleanup
# Open crontab configuration
nano /etc/crontab
# Add the following under "# m h dom mon dow user command" starting [[ and ending ]] at the end of the file:
# [[
# Remove Backups Greater than 7 Days Old Daily @ 01:00 AM
00 01 * * * root /etc/cron.daily/backup-cleanup
# ]]
# save file, exit
ctrl+x, y, enter = save file and exit
# Optional: you can test this job if you like by typing the following command
sh /etc/cron.daily/backup-cleanup
# ------------------------------------------------------------------------------
# SETUP LOG Rotation -----------------------------------------------------------
# ------------------------------------------------------------------------------
# Backup the log configuration
cp /etc/logrotate.conf /etc/logrotate.conf.original
rm /etc/logrotate.conf
nano /etc/logrotate.conf
# add all of the text between the starting [[ and ending ]]
# [[
# see "man logrotate" for details
# rotate log files weekly
daily
# keep 2 weeks worth of backlogs
rotate 1
# create new (empty) log files after rotating old ones
create
# use the date in backlog filenames
dateext
# compress backlogs with a delay
compress
# packages drop log rotation information into this directory
include /etc/logrotate.d
# no packages own wtmp, or btmp -- we'll rotate them here
/var/log/wtmp {
missingok
weekly
create 0664 root utmp
rotate 7
}
/var/log/btmp {
missingok
weekly
create 0664 root utmp
rotate 7
}
# system-specific logs may be configured here
# ]]
# save file, exit
ctrl+x, y, enter = save file and exit
# reboot the box
reboot
# Take Rackspace Cloud Snapshot Immediately!!!
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment