function split-csv() {
fn=${1:0:-4};
fe=${1: -4};
split -d -n $2 ${fn}${fe} ${fn}__part_;
for f in ${fn}__part*; do mv $f ${f}${fe}; done;
};
Usage:
function split-csv() {
fn=${1:0:-4};
fe=${1: -4};
split -d -n $2 ${fn}${fe} ${fn}__part_;
for f in ${fn}__part*; do mv $f ${f}${fe}; done;
};
Usage:
# config/initializers/clear_logs.rb | |
# This snippet simply clears your logs when they are too large. | |
# Every time you start the rails environment it checks log sizes | |
# and clears the logs for you if necessary. | |
if Rails.env.development? | |
MAX_LOG_SIZE = 10.megabytes | |
logs = File.join(Rails.root, 'log', '*.log') |
brew tap homebrew/versions | |
brew install v8-319 | |
gem install libv8 -v '3.16.14.19' -- --with-system-v8 | |
gem install therubyracer -- --with-v8-dir=/usr/local/opt/v8-319 | |
bundle install |
Checks if the remote branch is master
, then asks a confirmation. Based on https://gist.github.com/ColCh/9d48693276aac50cac37a9fce23f9bda, but modified to check the remote name instead of local, making it work also for the really dangerous accidents like below:
git push -f origin e09b7418a310114f6aaf495f969c3859095a99af:master
Further info: https://dev.ghost.org/prevent-master-push/, https://coderwall.com/p/jp7d5q/create-a-global-git-commit-hook, https://git-scm.com/docs/githooks#_pre_push, https://stackoverflow.com/questions/22585091/git-hooks-pre-push-script-does-not-receive-input-via-stdin
package main | |
import "fmt" | |
func main() { | |
initialBoard := [][]int{ | |
[]int{1, 1, 1, 4}, | |
[]int{1, 1, 2, 4}, | |
[]int{2, 1, 2, 4}, |
{ | |
"window.zoomLevel": -1, | |
"files.associations": { | |
"*.peg": "pegjs" | |
}, | |
"gitlens.advanced.messages": { | |
"suppressCommitHasNoPreviousCommitWarning": false, | |
"suppressCommitNotFoundWarning": false, | |
"suppressFileNotUnderSourceControlWarning": false, | |
"suppressGitVersionWarning": false, |
Original idea from Transfer files from an FTP server to S3 by "Hack N Cheese".
I moved roughly a terrabyte in less than an hour. Granted, I couldn't take advantage of lftp
's --parallel=30
switch due to my ftp source limiting me to one connection at a time, but use-pget-n=N
did seem to help out.
m1.xlarge
) so data tranfers aren't limited by your local bandwidth at least. I also attached a fat 2TB EBS volume and symlinked it to /bigdisk
, and made sure the EBS volume was deleted after I terminated this EC2 box. I hope lftp
2.6.4 is available as a stable package by the next time I attempt this.lftp
2.6.4+ (Not easy to compile, so read the INSTALL
file and plow through all your missing dependencies - you'll also need to re-run `sudo ./configure && suThis is quick howto for installing vault on AWS Linux, mostly to remind myself. At the end of this tutorial, you'll have a working vault server, using s3 for the backend, self signed certificates for tls, and supervisord to ensure that the vault server is always running, and starts on reboot.
First things first, let's set up an s3 bucket to use as the storage backend for our s3 instance.
From the AWS Mangement Console, go to the S3 console.
Click on the Create Bucket
button
Array.prototype.splitAtEvery = (callback) -> | |
sections = [] | |
arrayClone = this.slice(0) | |
$.each(arrayClone, (idx, item) => | |
sectionsLength = 0 | |
_.each(sections, (section) => | |
sectionsLength += section.length; | |
) | |
if callback(this, idx, item) == true | |
sections.push(arrayClone.slice(0, idx+1-sectionsLength)) |