Skip to content

Instantly share code, notes, and snippets.

@PseudoSky
Last active December 12, 2017 04:19
Show Gist options
  • Save PseudoSky/ecd0678bd00f4578bddb6de0f7b2728b to your computer and use it in GitHub Desktop.
Save PseudoSky/ecd0678bd00f4578bddb6de0f7b2728b to your computer and use it in GitHub Desktop.
dot files and environment (you should NEVER blindly use peoples dot files)
export PATH=/usr/local/lib:/usr/local/bin:/usr/local/sbin:/usr/bin:/bin:/usr/sbin:/sbin:/opt/X11/bin
. ~/dot/snowbash
. ~/dot/completions
genpath
export PATH="/usr/local/bin:$PATH";
export NODE_ENV="development"
export NVM_DIR="/usr/local/nvm"
[ -s "$NVM_DIR/nvm.sh" ] && . "$NVM_DIR/nvm.sh"
linkchk () {
for element in $1/*; do
[ -h "$element" -a ! -e "$element" ] && echo \"$element\"
[ -d "$element" ] && linkchk $element
# Of course, '-h' tests for symbolic link, '-d' for directory.
done
}
addbin(){
# TODO: ADD SUPPORT FOR $1=DIR (CALLED WITH DIR NOT FILE)
if [ -e "$1" ]
then
echo "exists $1"
if [ ! -x "$1" ];then echo "Not Executable";
else
[ -e "~/dot/bin/$1" ] && rm ~/dot/bin/$1
echo `dirname \`realpath $1\`` > ~/dot/bin/$1
fi
fi
bup
# linkchk ~/dot/bin/
}
genpath(){
for f in `ls -p ~/dot/bin/ | grep -v /`
do
# export PATH=$PATH:$f
echo $f
if [ -e `cat ~/dot/bin/$f` ]
then
echo "adding `cat ~/dot/bin/$f`"
export PATH=$PATH:`cat ~/dot/bin/$f`
else
echo "File At ~/dot/bin/$f -> `cat ~/dot/bin/$f` dne"
fi
done
}
#!/bin/bash
# --------------------------- ENV ---------------------------
export NMAPDIR=/usr/local/Cellar/nmap/7.01/share/nmap/
export DEV="/Users/snow/Documents/dev"
export MSF_DATABASE_CONFIG=/usr/local/share/metasploit-framework/config/database.yml
export ANDROID_HOME="$DEV/cordova/android-sdk"
export PATH="$PATH:/Users/snow/Documents/dev/install/w3af"
export PATH="$PATH:$ANDROID_HOME/tools:$ANDROID_HOME/platform-tools"
export PATH="$PATH:$DEV/cordova/android-sdk/platform-tools"
source ~/dot/functions/mongoose-search.sh
# --------------------------- JAVA & HADOOP ---------------------------
export JAVA_HOME=$(/usr/libexec/java_home)
alias mcomp='mvn compile -Dskiptests'
alias mpack='mvn package -Pdist,native -DskipTests -Dtar'
alias report='hdfs dfsadmin -report'
function dumpy(){
if [ $2 ]
then
mahout vectordump -i $1 -p true -vs 15 -dt sequencefile -sort $1 -d $2 -n true -o .dump
else
mahout vectordump -i $1 -p true -vs 15 -sort $1 -n true -o .dump
fi
# mahout vectordump -i $1 -p true -vs 15 -dt sequencefile -sort $1 -d $2 -n true -o .dump
if [ $3 ]
then
tail -n $3 .dump
else
tail -n 20 .dump
fi
}
# --------------------------- NETWORK ---------------------------
# alias mip="ifconfig | sed -En 's/127.0.0.1//;s/.*inet (addr:)?(([0-9]*\.){3}[0-9]*).*/\2/p'"
alias lsp='lsof -i'
alias flush_cache='dscacheutil -flushcache'
# Ubuntu Version
alias ports='netstat -tulp'
# Mac Version
alias ports='lsof -i 4tcp -sTCP:LISTEN -P -n'
alias helpe='echo "wget --limit-rate=200k --no-clobber --convert-links --random-wait -r -p -E -e robots=off -U mozilla"';
mip (){
ifconfig | sed -En 's/127.0.0.1//;s/.*inet (addr:)?(([0-9]*\.){3}[0-9]*).*/\2/p';
}
myip (){
lynx -dump -hiddenlinks=ignore -nolist http://checkip.dyndns.org:8245/ | grep $
}
# --------------------------- PARTICLE ---------------------------
export PARTICLE_HOME="$DEV/hardware/particle"
export PARTICLE_KEY="$PARTICLE_HOME/spark-server"
export PARTICLE_CODE="$PARTICLE_HOME/firmware/modules"
alias pflash="pushd $PARTICLE_CODE; make all PLATFORM=photon APP=vader program-dfu;popd"
# pflash() {
# CMD="vader";
# if [ ! "" = "$1" ]
# then
# CMD=$1
# fi
# pushd $PARTICLE_CODE;
# make all PLATFORM=photon APP=$CMD program-dfu;
# popd
# }
alias pkey="particle keys server $PARTICLE_KEY/default_key.pub.pem `mip`"
# --------------------------- PYTHON ---------------------------
# export PYTHONPATH=/Library/Python/2.7/site-packages/
# export PYTHONPATH=/usr/local/lib/python3.5/site-packages/
# added by Miniconda3 3.19.0 installer
# export PATH="/Users/snow/Documents/dev/install/miniconda3/bin:$PATH"
alias simps='python -m SimpleHTTPServer'
pyswitch (){
export PYPRE="/usr/local/lib"
if [ "$PYTHONPATH" = "$PYPRE/python2.7/site-packages/" ]
then
export PYTHONPATH=/usr/local/lib/python3.5/site-packages/
alias python='python3.5'
else
# export PYTHONPATH=/Library/Python/2.7/site-packages/
export PYTHONPATH="$PYPRE/python2.7/site-packages/"
alias python='python2.7'
fi
if [ ! "" = "$1" ]
then
export CONDA="$1"
fi
if [ "$CONDA" = "true" ]
then
export PYTHONPATH="/usr/local/lib/python3.5/site-packages/";
export PATH="/Users/snow/Documents/dev/install/miniconda3/bin:$PATH"
fi
}
# pyswitch
condaswitch(){
if [ ! "" = "$1" ]
then
export CONDA="$1"
fi
if [ "$CONDA" = "true" ]
then
export PATH=$_REGULAR_PATH
export CONDA="false"
else
export _REGULAR_PATH=$PATH
export CONDA="true"
export PYTHONPATH=/usr/local/lib/python3.5/site-packages/
export PATH="/Users/snow/Documents/dev/install/miniconda3/bin:$PATH"
fi
}
# condaswitch "false"
rubyswitch(){
if [ ! "$_LP" = "" ]
then
echo "Was $PATH";
export PATH=$_LP;
echo "IS $PATH";
unset _LP;
else
echo "Was $PATH";
export _LP="$PATH";
export PATH="$HOME/.rbenv/shims:$PATH";
echo "IS $PATH";
fi
}
# --------------------------- GIT ---------------------------
alias gr='git config --get remote.origin.url'
alias gd='git diff'
alias gl="git log --graph --pretty=format:'%Cred%h%Creset -%C(yellow)%d%Creset %s %C(yellow):%an%Creset %Cgreen(%cr)%Creset' --abbrev-commit --date=relative";
alias gb='git branch'
alias ga='git add -A' # add a file to staging
alias gc='git commit -m'
alias gs='git status'
alias gay='git add -A'
alias clone='git clone'
cl(){
clone $1;
echo ""
# cd `echo $_ | sed -E 's#.*\/([^.]+).*#\1#'`
echo ""
echo "CHANGING INTO `echo $1 | sed -E 's#.*\/([^.]+).*#\1#'`"
}
# --------------------------- FS ---------------------------
fsizes(){
files=""
[ -f $1 ] && while read file; do
[ -f $file ] && files="$files $file"
done < $1
wc $files
}
alias trap='tree --prune -I $(cat .gitignore ~/.gitignore | egrep -v "^#.*$|^[[:space:]]*$" | tr "\\n" "|")'
# Makes a readme with the file sturcture in the current dir
# And takes the parameter (Project Name)
readme(){
touch README.md;
echo "Project: #$1" >> README.md;
echo "" >> README.md;
echo "### Files" >> README.md;
echo "" >> README.md;
echo '```' >> README.md;
echo "$(trap)" >> README.md;
echo '```' >> README.md;
}
alias bashit="test -f 'bashit' && source bashit && echo $'\nSourcing bashit `pwd`\n' && echo $BASHIT_CMDS";
# alias cd='pushd $1 && bashit';
alias bd='popd'
alias school='cd ~/Documents/school/current'
alias docs='cd ~/Documents'
alias dev='cd ~/Documents/dev'
alias ls="gls --color=auto" # alias ls='ls -a'
alias ll="gls --color=auto -lh"
alias la="gls --color=auto -a"
alias l="gls --color=auto"
alias ..='cd ../'
alias ....='cd ../../'
alias ......='cd ../../../'
alias rf='rm -rf'
md() {
mkdir -p "$*"
cd "$*"
}
alias op='open .'
alias clean_me='rm -rf ~/.Trash/* & rm ~/.DS_Store*'
alias cds='cd ~/Desktop/Duality'
alias lp='dpkg --get-selections' # UBUNTU
function lserv(){ # UBUNTU
sudo service --status-all | grep -E "\[ \+ \]|\[ - \] + \w{0,100} + ((\-+\w{0,100})?){0,10}";
}
alias lu='cat /etc/passwd | cut -d: -f1 | sort'
alias lg='cat /etc/group |cut -d: -f1 | sort'
# --------------------------- PROC UTILS ---------------------------
alias k9='kill -9';
alias ka='killall'
destroy () {
echo "Killing: $1";
echo "PIDS: ( `pidof $1`)";
for p in `pidof $1` ; do echo "killing $p"; kill -9 $p; done;
}
alias compr='tar -zcvf'
alias pack='dpkg --get-selections| less' #UBUNTU
alias txz='tar cfJ';
extract () {
if [ -f $1 ] ; then
case $1 in
*.tar.bz2) tar xvjf $1 ;;
*.tar.gz) tar xvzf $1 ;;
*.bz2) bunzip2 $1 ;;
*.rar) rar x $1 ;;
*.gz) gunzip $1 ;;
*.tar) tar xvf $1 ;;
*.tbz2) tar xvjf $1 ;;
*.tgz) tar xvzf $1 ;;
*.zip) unzip $1 ;;
*.Z) uncompress $1 ;;
*.7z) 7z x $1 ;;
*) echo "don't know how to extract '$1'..." ;;
esac
else
echo "'$1' is not a valid file!"
fi
}
# compr () {
# if [ -f $1 ] ; then
# case $1 in
# *.tar.bz2) tar xvjf $1 ;;
# *.tar.gz) tar xvzf $1 ;;
# *.bz2) bunzip2 $1 ;;
# *.rar) rar x $1 ;;
# *.gz) gunzip $1 ;;
# *.tar) tar xvf $1 ;;
# *.tbz2) tar xvjf $1 ;;
# *.tgz) tar xvzf $1 ;;
# *.zip) unzip $1 ;;
# *.Z) uncompress $1 ;;
# *.7z) 7z x $1 ;;
# *) echo "don't know how to extract '$1'..." ;;
# esac
# else
# echo "'$1' is not a valid file!"
# fi
# }
alias bup='source ~/.bash_profile'
alias redis-start="redis-server /usr/local/etc/redis.conf"
alias chrome='/Applications/Google\ Chrome.app/Contents/MacOS/Google\ Chrome --disable-web-security --allow-file-access-from-files'
alias subl='/Applications/Sublime\ Text.app/Contents/SharedSupport/bin/subl'
# ssnow(){
# echo "export SNOW_DIR=\"$*\";" > ~/dot/snow_dir;
# . ~/dot/snow_dir;
# echo "Snow dir now $SNOW_DIR";
# }
# alias snow='cd $SNOW_DIR'
# --------------------------- DEV: WEB ---------------------------
alias bi='bower install --save'
alias ni="npm install --save"
alias nid="npm install -D"
alias nig="npm install -g"
alias nl='npm link'
alias rs='rails server'
alias hrail='echo "rails generate scaffold <Model> <attribute>:<data type>"'
yng(){
yo angular-fullstack:$1 $2;
}
b64(){
EXTENSION=`echo "$1" | cut -d'.' -f2`
echo "var image=$.parseHTML('<img src=\"data:image/"$EXTENSION";base64,"$( base64 $1 )"\"/>')[0];" > $1-64.js
}
wraplns(){
COUNTER=0
while IFS='' read -r line || [[ -n "$line" ]]; do
# echo $line
if [ "$COUNTER" -ne "0" ] ; then echo -n '\n'$line >> $1-'wrapped.js'
else echo -n 'var wrapped_obj="'$line > $1-'wrapped.js'
fi
let COUNTER++
done < "$1"
echo '";'>> $1-'wrapped.js'
}
# brew install libpng fontconfig freetype
# ln -s /usr/local/Cellar/freetype/2.4.10 /usr/local/opt/freetype
# ln -Fs /usr/local/Cellar/fontconfig/2.11.1/lib/libfontconfig.1.dylib /usr/local/lib/libfontconfig.1.dylib
# ln -Fs /usr/local/Cellar/fontconfig/2.11.1/lib/libfontconfig.a /usr/local/lib/libfontconfig.a
# ln -Fs /usr/local/Cellar/fontconfig/2.11.1/lib/libfontconfig.dylib /usr/local/lib/libfontconfig.dylib
# brew install imagemagick potrace
2dxf(){
convert $1 $1.bmp;
potrace $1.bmp -b dxf;
rm $1.bmp;
}
#########################################################################
alias shell_fix='echo"
## You should be in groups adm cdrom sudo dip plugdev lpadmin sambashare
## To append the group you would just enter
sudo usermod -aG adm,cdrom,sudo,dip,plugdev,lpadmin,sambashare,snow snow
## To deactivate init scripts in /etc/rc#.d
## You add a K instad of an S infront of the link
## Grub configuration is at /etc/default/grub
## The main list is also at /boot/grub/menulist
## After you edit the files you have to run
sudo update-grub
## To reconfigure a package, you run
dpkg-reconfigure <package_name>
## Xorg logs are stored at /var/log/Xorg.0.log
## Building hadoop dist
mvn compile -Pnative
mvn package -Pdist,native,docs -DskipTests -Dtar
## The compiled package will be at
hadoop-dist/target/hadoop-X.X.X-SNAPSHOT.tar.gz
## Pushd and Popd in action
pushd common; ln -s ../build.properties build.properties; popd
## SAFE MODE OFFF
hdfs dfsadmin -safemode leave
"'
export HOMEBREW_GITHUB_API_TOKEN=69e4d6a22940ec0ed7fd5a2c9dd0dbe77b660f37
export MSF_DATABASE_CONFIG=/usr/local/share/metasploit-framework/config/database.yml
# Copyright (c) 2010, Huy Nguyen, http://www.huyng.com
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification, are permitted provided
# that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this list of conditions
# and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other materials provided with the distribution.
# * Neither the name of Huy Nguyen nor the names of contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
# TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# USAGE:
# bms bookmarkname - saves the curr dir as bookmarkname
# go bookmarkname - jumps to the that bookmark
# go b[TAB] - tab completion is available
# bmd bookmarkname - deletes the bookmark
# bmd [TAB] - tab completion is available
# bml - list all bookmarks
# setup file to store bookmarks
if [ ! -n "$SDIRS" ]; then
SDIRS=~/.sdirs
fi
touch $SDIRS
RED="0;31m"
GREEN="0;33m"
# save current directory to bookmarks
function bms {
check_help $1
_bookmark_name_valid "$@"
if [ -z "$exit_message" ]; then
_purge_line "$SDIRS" "export DIR_$1="
CURDIR=$(echo $PWD| sed "s#^$HOME#\$HOME#g")
echo "export DIR_$1=\"$CURDIR\"" >> $SDIRS
fi
}
# jump to bookmark
function bmg {
check_help $1
source $SDIRS
target="$(eval $(echo echo $(echo \$DIR_$1)))"
if [ -d "$target" ]; then
cd "$target"
elif [ ! -n "$target" ]; then
echo -e "\033[${RED}WARNING: '${1}' bashmark does not exist\033[00m"
else
echo -e "\033[${RED}WARNING: '${target}' does not exist\033[00m"
fi
}
# delete bookmark
function bmd {
check_help $1
_bookmark_name_valid "$@"
if [ -z "$exit_message" ]; then
_purge_line "$SDIRS" "export DIR_$1="
unset "DIR_$1"
fi
}
# list bookmarks with dirnam
function bml {
check_help $1
source $SDIRS
# if color output is not working for you, comment out the line below '\033[1;32m' == "red"
env | sort | awk '/DIR_.+/{split(substr($0,5),parts,"="); printf("\033[0;33m%-20s\033[0m %s\n", parts[1], parts[2]);}'
# uncomment this line if color output is not working with the line above
# env | grep "^DIR_" | cut -c5- | sort |grep "^.*="
}
# list bookmarks without dirname
function _l {
source $SDIRS
env | grep "^DIR_" | cut -c5- | sort | grep "^.*=" | cut -f1 -d "="
}
# validate bookmark name
function _bookmark_name_valid {
exit_message=""
if [ -z $1 ]; then
exit_message="bookmark name required"
echo $exit_message
elif [ "$1" != "$(echo $1 | sed 's/[^A-Za-z0-9_]//g')" ]; then
exit_message="bookmark name is not valid"
echo $exit_message
fi
}
# completion command
function _comp {
local curw
COMPREPLY=()
curw=${COMP_WORDS[COMP_CWORD]}
COMPREPLY=($(compgen -W '`_l`' -- $curw))
return 0
}
# ZSH completion command
function _compzsh {
reply=($(_l))
}
# safe delete line from sdirs
function _purge_line {
if [ -s "$1" ]; then
# safely create a temp file
t=$(mktemp -t bashmarks.XXXXXX) || exit 1
trap "rm -f -- '$t'" EXIT
# purge line
sed "/$2/d" "$1" > "$t"
mv "$t" "$1"
# cleanup temp file
rm -f -- "$t"
trap - EXIT
fi
}
# print out help for the forgetful
function check_help {
if [ "$1" = "-h" ] || [ "$1" = "-help" ] || [ "$1" = "--help" ] ; then
echo ''
echo 'bms <bookmark_name> - Saves the current directory as "bookmark_name"'
echo 'bmg <bookmark_name> - Goes (cd) to the directory associated with "bookmark_name"'
echo 'bmd <bookmark_name> - Deletes the bookmark'
echo 'bml - Lists all available bookmarks'
kill -SIGINT $$
fi
}
# bind completion command for g,p,d to _comp
if [ $ZSH_VERSION ]; then
compctl -K _compzsh bmg
compctl -K _compzsh bmd
else
shopt -s progcomp
complete -F _comp bmg
complete -F _comp bmd
fi
###-begin-yo-completion-###
if type complete &>/dev/null; then
_yo_completion () {
local words cword
if type _get_comp_words_by_ref &>/dev/null; then
_get_comp_words_by_ref -n = -n @ -w words -i cword
else
cword="$COMP_CWORD"
words=("${COMP_WORDS[@]}")
fi
local si="$IFS"
IFS=$'\n' COMPREPLY=($(COMP_CWORD="$cword" \
COMP_LINE="$COMP_LINE" \
COMP_POINT="$COMP_POINT" \
yo-complete completion -- "${words[@]}" \
2>/dev/null)) || return $?
IFS="$si"
}
complete -o default -F _yo_completion yo
fi
###-end-yo-completion-###
ant
arp-scan
atk
autoconf
automake
boost
brew-cask
cairo
cake
cassandra20
cgal
class-dump
cloog018
cmake
coreutils
curl
dfu-util
dnsmasq
docker
doxygen
elasticsearch
ettercap
ffmpeg
flac
fontconfig
freetype
freexl
ftgl
gcc
gcc-arm-none-eabi-49
gcc49
gdal
gdb
gdbm
gdk-pixbuf
geos
gettext
giflib
glib
gmp
gmp4
gobject-introspection
gpp
gradle
graphicsmagick
graphviz
gtk+
hadoop
harfbuzz
hdf5
hicolor-icon-theme
hydra
icu4c
imagemagick
isl
isl011
jpeg
json-c
kafka
lame
libevent
libffi
libgeotiff
liblo
liblwgeom
libmpc
libmpc08
libnet
libogg
libpng
libsndfile
libspatialite
libtiff
libtool
libusb
libvo-aacenc
libvorbis
libxml2
libyaml
little-cms
little-cms2
lua
luajit
luarocks
lynx
lzlib
mad
mahout
maven
mongodb
mono
mpfr
mpfr2
mysql
ngx_openresty
nmap
node
ntfs-3g
openssl
pango
parallel
pcap
pcre
phantomjs
php56
pidof
pig
pillow
pixman
pkg-config
portaudio
portmidi
postgis
postgresql
potrace
proj
py2cairo
pygobject
pygtk
pyqt
python
python3
qt
qwt
rbenv
readline
redis
ruby-build
ruby21
sdl
sfcgal
sip
sox
spark
sqlite
sqlmap
swftools
swig
szip
the_silver_searcher
tidy-html5
tor
torsocks
tree
unittest-cpp
unixodbc
webp
wget
wifi-password
x11vnc
x264
xvid
xz
zookeeper
#!/bin/bash
# NVM_NODE_UPDATER
# v2.0.0
#
# Makes keeping NVM-managed, global NodeJS installations up-to-date a breeze.
# First, the global NodeJS installation is updated to 'latest'.
# Second, all global NPM packages are migrated, then also updated to 'latest'.
# Requires the Node Version Manager (https://github.com/creationix/nvm).
#
nvm_node_updater () {
# Check for latest NodeJS version
NODE_VERSION_INSTALLED=$(nvm_ls_current)
NODE_VERSION_LIST=($(nvm_remote_versions | awk '/^v/'))
NODE_VERSION_LATEST=${NODE_VERSION_LIST[${#NODE_VERSION_LIST[@]} - 1]}
# Pretty Print
function PRINT_BLUE () {
while read; do
printf '\e[44m%s\e[0m\n' "[STASH NODE-UPDATE] $REPLY";
done
}
if [ "${NODE_VERSION_INSTALLED}" != "${NODE_VERSION_LATEST}" ]
then
# Update to latest NodeJS version (migrating all currently installed global NPM packages)
echo "Updating global NodeJS: ${NODE_VERSION_INSTALLED} to ${NODE_VERSION_LATEST}" | PRINT_BLUE
nvm install ${NODE_VERSION_LATEST} --reinstall-packages-from=${NODE_VERSION_INSTALLED}
nvm alias default ${NODE_VERSION_LATEST} && nvm use default
echo "Updated default NodeJS to ${NODE_VERSION_LATEST}." | PRINT_BLUE
else
echo "Global NodeJS installation is up to date:" | PRINT_BLUE
echo "-> ${NODE_VERSION_INSTALLED}" | PRINT_BLUE
fi
# Check for latest NPM package versions
NPM_GLOBAL_PACKAGES_LIST=($(npm --global list --parseable --depth=0 --loglevel silent | sed -e 's@.*/@@' | tr '\r\n' ' ' | sed -e 's/lib//g' -e 's/^[[:space:]]*//' -e 's/[[:space:]]*$//'))
NPM_GLOBAL_PACKAGES_COUNT=${#NPM_GLOBAL_PACKAGES_LIST[@]}
NPM_GLOBAL_PACKAGES=$( IFS=' '; echo "${NPM_GLOBAL_PACKAGES_LIST[*]}" )
NPM_GLOBAL_PACKAGES_OUTDATED_LIST=($(npm --global outdated --parseable --loglevel silent | cut -d: -f3 | cut -f1 -d'@' | tr '\r\n' ' ' | sed -e 's/^[[:space:]]*//' -e 's/[[:space:]]*$//'))
NPM_GLOBAL_PACKAGES_OUTDATED_COUNT=${#NPM_GLOBAL_PACKAGES_OUTDATED_LIST[@]}
NPM_GLOBAL_PACKAGES_OUTDATED=$( IFS=' '; echo "${NPM_GLOBAL_PACKAGES_OUTDATED_LIST[*]}" )
if [ -n "$NPM_GLOBAL_PACKAGES_OUTDATED_LIST" ]
then
# Update to latest NPM package versions
echo "Number of global packages requiring update: ${NPM_GLOBAL_PACKAGES_OUTDATED_COUNT}" | PRINT_BLUE
echo "-> ${NPM_GLOBAL_PACKAGES_OUTDATED}" | PRINT_BLUE
npm --global install --loglevel silent ${NPM_GLOBAL_PACKAGES}
else
echo "All ${NPM_GLOBAL_PACKAGES_COUNT} global packages are up to date:" | PRINT_BLUE
echo ${NPM_GLOBAL_PACKAGES} | PRINT_BLUE
fi
}
export RUNABLES="";
export RUNABLE_DIRS="~/dot/env/ ~/dot/functions/ ~/dot"
alias runables_update='export RUNABLES="";for runner in "$(runables $RUNABLE_DIRS)"; do while read src; do export RUNABLES="$src $RUNABLES"; . "$src";done <<< "$runner"; done;'
runables_in(){
# SUPER WEIRD WAY OF GETTING RID OF ~/
target=$(eval echo "$1")
target=$(realpath "$target")
if [ -d $target ] ; then
find $target -maxdepth 1 -perm -111 -type f
else
echo "(runables_in) directory $1 doesn't exist"
# # return 0
fi
}
runables_read(){
while read fname; do
[ -e "$fname" ] || continue
echo "$fname"
# runables_add "$fname"
done
}
runables(){
if [ "$1" = "-h" ]; then
echo "runables <directory> ...<directory>"
echo " -h : help"
echo " -r : read from pipe (cat executables.txt | runables -r)"
echo " -u : update runable scripts"
echo " -l : list active runable scripts"
echo
echo "to make the scripts runable give the script executable permissions"
echo "move it to one of the directories in 'RUNABLE_DIRS'"
echo "RUNABLE_DIRS: $RUNABLE_DIRS"
return 0
fi
if [ "$1" = "-l" ]; then
echo "Listing active runables"
echo
runables_active
return 0
fi
if [ "$1" = "-u" ]; then
runables_update
echo "Runables updated"
return 0
fi
if [ "$1" = "-r" ]; then
while read dname; do
runner=$( runables_in "$dname" | runables_read)
echo "$runner"
done
else
for dname in "$@"; do
runner=$( runables_in "$dname" | runables_read)
echo "$runner"
done
fi
}
runables_active(){
for runner in $RUNABLES; do
echo "$runner";
done
}
alias capture='sudo bettercap --parsers '*' --spoofer ARP'
alias scan='sudo arp-scan --interface=en0 192.168.1.0/24 -D'
alias nmap.walk='nmap -sCV --osscan-guess'
alias nmap.enum='nmap --script=http-enum -p80 -n'
#alias kl='wget --limit-rate=200k --no-clobber --convert-links --random-wait -r -p -E -e robots=off -U mozilla'
alias kl='wget -Nc -k --random-wait -r -p -E -e robots=off -U mozilla'
#!/bin/bash
. ~/dot/alias
. ~/dot/functions/bashmarks.bash
. ~/dot/functions/gitme.bash
. ~/dot/functions/addbin.bash
export PS1="\[$(tput bold)\]\[$(tput setaf 1)\][\[$(tput setaf 3)\]\u\[$(tput setaf 2)\]@\[$(tput setaf 4)\]\h \[$(tput setaf 5)\]\W\[$(tput setaf 1)\]]\[$(tput setaf 7)\]\\$ \[$(tput sgr0)\]"
[
{
"keys": ["super+shift+l"],
"command": "expand_selection",
"args": {"to": "line"}
},
{
"keys": ["super+l"],
"command": "split_selection_into_lines"
},
{
"keys": ["option+l"],
"command": "split_selection_into_lines"
},
{
"keys": ["ctrl+i"],
"command": "run_macro_file",
"args": {"file": "file:///Users/snow/Documents/config/sublime/macros/reindent.sublime-macro"}
},
{
"keys": ["option+s"],
"command": "run_macro_file",
"args": {"file": "file:///Users/snow/Documents/config/sublime/macros/sort-selection.sublime-macro"}
},
{
"keys": ["option+p"],
"command": "run_macro_file",
"args": {"file": "file:///Users/snow/Documents/config/sublime/macros/swap-selections.sublime-macro"}
},
{
"keys": ["option+up"],
"command": "run_macro_file",
"args": {"file": "file:///Users/snow/Documents/config/sublime/macros/swap-line-up.sublime-macro"}
},
{
"keys": ["option+down"],
"command": "run_macro_file",
"args": {"file": "file:///Users/snow/Documents/config/sublime/macros/swap-line-down.sublime-macro"}
},
{
"keys": ["option+u"],
"command": "run_macro_file",
"args": {"file": "file:///Users/snow/Documents/config/sublime/macros/upcase.sublime-macro"}
},
{
"keys": ["option+y"],
"command": "run_macro_file",
"args": {"file": "file:///Users/snow/Documents/config/sublime/macros/downcase.sublime-macro"}
},
{
"keys": ["ctrl+s"],
"command": "run_macro_file",
"args": {"file": "file:///Users/snow/Documents/config/sublime/macros/swap-case.sublime-macro"}
},
{
"keys": ["option+d"],
"command": "run_macro_file",
"args": {"file": "file:///Users/snow/Documents/config/sublime/macros/dup-line.sublime-macro"}
},
{
"keys": ["option+i"],
"command": "run_macro_file",
"args": {"file": "file:///Users/snow/Documents/config/sublime/macros/reindent-selection.sublime-macro"}
},
{
"keys": ["option+["],
"command": "find_all_under"
},
{
"keys": ["option+]"],
"command": "show_panel",
"args": {"panel": "find_in_files"}
},
{
"keys": ["ctrl+e"],
"command": "expand_selection",
"args": {
"to": "scope"
}
},
{
"keys": ["ctrl+shift+e"],
"command": "expand_selection",
"args": {
"to": "indentation"
}
},
{
"keys": ["ctrl+shift+b"],
"command": "expand_selection",
"args": {
"to": "brackets"
}
}
]
{
"always_prompt_for_file_reload": true,
"auto_complete_commit_on_tab": true,
"auto_complete_delay": 25,
"bold_folder_labels": true,
"close_windows_when_empty": true,
"color_scheme": "Packages/User/SublimeLinter/Monokai Bright (SL).tmTheme",
"copy_with_empty_selection": false,
"find_selected_text": true,
"font_size": 13,
"highlight_line": true,
"highlight_modified_tabs": true,
"ignored_packages":
[
"Emmet",
"Jade",
"Vintage"
],
"indent_guide_options":
[
"draw_active"
],
"open_files_in_new_window": false,
"scroll_past_end": true,
"scroll_speed": 1.5,
"tab_completion": true,
"tab_size": 2,
"theme": "Soda Dark 3.sublime-theme",
"translate_tabs_to_spaces": true,
"trim_trailing_white_space_on_save": false,
"typescript_plugin_tsc_version": "1.7.3",
"word_wrap": true
}
@PseudoSky
Copy link
Author

screen shot 2016-07-06 at 9 36 57 am

screen shot 2016-07-06 at 9 37 11 am

screen shot 2016-07-06 at 9 37 21 am

screen shot 2016-07-06 at 9 37 32 am

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment