bash xfs_repair -L /dev/mapper/centos-root
bash service NetworkManager stop
server { | |
listen 80; | |
server_name localhost; | |
root /Users/YOUR_USERNAME/Sites; | |
access_log /Library/Logs/default.access.log main; | |
location / { | |
include /usr/local/etc/nginx/conf.d/php-fpm; | |
} |
# install build dependencies | |
sudo yum install -y git-core zlib zlib-devel gcc-c++ patch readline readline-devel libyaml-devel libffi-devel openssl-devel make bzip2 autoconf automake libtool bison curl sqlite-devel | |
# clone and install rbenv environment | |
cd ~ | |
git clone git://github.com/sstephenson/rbenv.git .rbenv | |
git clone git://github.com/sstephenson/ruby-build.git ~/.rbenv/plugins/ruby-build | |
echo 'export PATH="$HOME/.rbenv/bin:$HOME/.rbenv/plugins/ruby-build/bin:$PATH"' >> ~/.bash_profile | |
echo 'eval "$(rbenv init -)"' >> ~/.bash_profile |
#!/bin/bash | |
# Set these variables | |
USER_NAME="you@example.com" | |
API_KEY="XXXXXXXX_YOUR_DHQ_API_KEY_XXXXXXXXXXXXX" | |
START_REV='SOME_ARBITRARY_GIT_REVISION_HASH_TO_START_FROM' | |
DHQ_API_PROJ="your-project-shortname" | |
DHQ_BASE_URL="https://yoursite.deployhq.com/" | |
DHQ_SERVER_GROUP="YOUR_SERVER_GROUP_UUID" | |
DHQ_SERVER_USERNAME="your-server-username" |
#!/usr/bin/php | |
<?php | |
define('LOCK_FILE', "/var/run/".basename( $argv[0], ".php" ).".lock" ); | |
if( isLocked() ) die( "Already running.\n" ); | |
# The rest of your script goes here.... | |
require 'vendor/autoload.php'; | |
use Aws\Common\Aws; |
<?php | |
/** | |
* Returns a pre-signed URL to access a restricted AWS S3 object. | |
* | |
* @param string $access_key the AWS access key | |
* @param string $secret_key the AWS secret key associated with the access key | |
* @param string $bucket the S3 bucket | |
* @param string $canonical_uri the object in the S3 bucket expressed as a canonical URI. | |
* This should begin with the / character, and should not be URL-encoded | |
* @param int $expires the time that the pre-signed URL will expire, in seconds |
$destination_path = 'C:\Users\dilli\Downloads\media_dump' | |
$connection_string = '[AZURE_STORAGE_CONNECTION_STRING]' | |
$storage_account = New-AzureStorageContext -ConnectionString $connection_string | |
$containers = Get-AzureStorageContainer -Context $storage_account | |
Write-Host 'Starting Storage Dump...' | |
foreach ($container in $containers) |
// By default the memory limit in Node.js is 512MB. | |
// This will cause FATAL ERROR- JS Allocation failed – process out of memory when processing large data files. | |
// It can be avoided by increasing the memory limit. | |
node --max_old_space_size=1024 server.js // increase to 1gb | |
node --max_old_space_size=2048 server.js // increase to 2gb | |
node --max_old_space_size=3072 server.js // increase to 3gb | |
node --max_old_space_size=4096 server.js // increase to 4gb | |
node --max_old_space_size=5120 server.js // increase to 5gb | |
node --max_old_space_size=6144 server.js // increase to 6gb |