Skip to content

Instantly share code, notes, and snippets.

boot2docker shellinit
docker run -d -p 8080:80 --name web nginx
docker exec -it web bash
echo myself for president > /usr/share/nginx/html/index.html
docker inspect web | grep IP
docker inspect -f '{{json .NetworkSettings.IPAddress}}' container
##Create the vm
boot2docker init -m 8192 -s 30000 ( memory and size in GB)
###Start the vm
boot2docker up
# Set the variables
`eval "$(boot2docker shellinit)"`
## Find the IP
boot2docker IP
##ssh into docker vm
boot2docker ssh
/usr/hdp/current/kafka-broker/bin/kafka-topics.sh --list --zookeeper localhost:2181
/usr/hdp/current/kafka-broker/bin/kafka-topics.sh --zookeeper localhost:2181 --create --topic test --replication-factor 1 --partitions 1
/usr/hdp/current/kafka-broker/bin/kafka-console-producer.sh --broker-list phdns01.cloud.hortonworks.com:6667 --topic test
/usr/hdp/current/kafka-broker/bin/kafka-console-consumer.sh --zookeeper localhost:2181 --topic test --from-beginning
#Install R and then following packages
#repr failed to create
yum install R-*
install.packages("evaluate", dependencies = TRUE)
install.packages("base64enc", dependencies = TRUE)
install.packages("devtools", dependencies = TRUE)
install_github('IRkernel/repr')
install.packages("dplyr", dependencies = TRUE)
install.packages("caret", dependencies = TRUE)
install.packages("repr", dependencies = TRUE)
HDFS test
Make sure that google connector is defined in Hadoop CLASSPATH as decribed in the blog
[hdfs@hdpgcp-1-1435537523061 ~]$ hdfs dfs -ls gs://hivetest/
15/06/28 21:15:32 INFO gcs.GoogleHadoopFileSystemBase: GHFS version: 1.4.0-hadoop2
15/06/28 21:15:33 WARN gcs.GoogleHadoopFileSystemBase: No working directory configured, using default: 'gs://hivetest/'
ambari-server stop
ambari-agent stop
pkill -9 java
#################################
# Remove Packages
################################
yum -y remove ambari-\*
yum -y remove hcatalog\*
yum -y remove hive\*
mysql -u hive -p -e " select concat( 'show create table ' , TBL_NAME,';') from TBLS" hive > file.sql
hive -f /tmp/file.sql
read -p "enter HS2 hostname: " HS2
read -p "enter username: " username
echo "enter password"
read -s passwd
read -p "enter filename: " filename
beeline -u jdbc:hive2://$HS2:10000/default -n $username -p $passwd -f $filename
yum install expect*
#!/usr/bin/expect
spawn ambari-server sync-ldap --existing
expect "Enter Ambari Admin login:"
send "admin\r"
expect "Enter Ambari Admin password:"
send "admin\r"
expect eof
drop table if exists crime;
create table crime (
caseid varchar,
Date varchar,
block varchar,
description varchar,
sdesc varchar,
ldesc varchar,
arrest char(2),
domestic char(2),