Skip to content

Instantly share code, notes, and snippets.

Embed
What would you like to do?
Download and setup [from the command line] Java, Scala, SBT and Spark on a Ubuntu machine.
### This script downloads and sets up the following binaries.
## Java: 8u91
## Scala: 2.11.8
## SBT: 0.13.11
## Spark: 1.6.1
sudo usermod -a -G sudo hduser
## Install the essential packages.
sudo apt-get install ssh zip p7zip p7zip-full multitail htop screen tmux -y # subversion git python-pip
export LOCATION=$HOME
mkdir $LOCATION/apps
mkdir $LOCATION/binaries
## Download JDK binary from Oracle website.
BASE_URL_8=http://download.oracle.com/otn-pub/java/jdk/8u91-b14/jdk-8u91
JDK_VERSION=${BASE_URL_8: -8}
declare -a PLATFORMS=("-linux-x64.tar.gz")
for platform in "${PLATFORMS[@]}"
do
wget -qc "$JDK_VERSION$platform" --no-check-certificate --no-cookies --header "Cookie: oraclelicense=accept-securebackup-cookie" "${BASE_URL_8}${platform}" -P $LOCATION/binaries/
done
## Download Scala latest binary.
wget -q http://downloads.lightbend.com/scala/2.11.8/scala-2.11.8.tgz -P $LOCATION/binaries/
## Download SBT latest binary.
wget -q https://dl.bintray.com/sbt/native-packages/sbt/0.13.11/sbt-0.13.11.tgz -P $LOCATION/binaries/
## Download Spark latest binary.
wget -q http://d3kbcqa49mib13.cloudfront.net/spark-1.6.1-bin-hadoop2.6.tgz -P $LOCATION/binaries/
## Explode all the binaries downloaded.
find $LOCATION/binaries -maxdepth 1 -type f -exec tar -xf {} \;
## Move all the exploded dirs to ~/apps folder.
mv jdk1.8.0_91 $LOCATION/apps/jdk8.0_91
mv sbt $LOCATION/apps/
mv spark-1.6.1-bin-hadoop2.6 $LOCATION/apps/spark-1.6.1
mv scala-2.11.8 $LOCATION/apps/
## Add entries of Java, Scala, SBT and Spark binaries to .bashrc .
echo "
export JAVA_HOME=$LOCATION/apps/jdk8.0_91
export SPARK_HOME=$LOCATION/apps/spark-1.6.1
export SCALA_HOME=$LOCATION/apps/scala-2.11.8
export SBT_HOME=$LOCATION/apps/sbt
export PATH=\$JAVA_HOME/bin:\$SCALA_HOME/bin:\$SBT_HOME/bin:\$SPARK_HOME/bin:\$SPARK_HOME/sbin:\$PATH
" >> ~/.bashrc
## Source .bashrc to pick the installed frameworks.
source ~/.bashrc
## For SSH Secure Shell to connect to Ubuntu 16.04, add the following lines.
echo 'echo "
Ciphers aes128-cbc,aes192-cbc,aes256-cbc,blowfish-cbc,arcfour
KexAlgorithms diffie-hellman-group1-sha1
" >> /etc/ssh/sshd_config' | sudo -s
sudo service ssh restart
### This script downloads and sets up the following binaries.
## Java: 8u121
## Scala: 2.12.1
## SBT: 0.13.13
## Spark: 2.1.0
## Install the essential packages.
sudo apt-get install dos2unix zip p7zip p7zip-full dos2unix multitail htop screen tmux -y # subversion git python-pip
export LOCATION=$HOME
mkdir $LOCATION/apps
mkdir $LOCATION/binaries
## Download JDK binary from Oracle website.
BASE_URL_8=http://download.oracle.com/otn-pub/java/jdk/8u121-b13/e9e7ea248e2c4826b92b3f075a80e441/jdk-8u121
## JDK_VERSION=${BASE_URL_8: -8}
JDK_VERSION=`echo $BASE_URL_8 | rev | cut -d "/" -f1 | rev`
declare -a PLATFORMS=("-linux-x64.tar.gz")
for platform in "${PLATFORMS[@]}"
do
wget -qc "$JDK_VERSION$platform" --no-check-certificate --no-cookies --header "Cookie: oraclelicense=accept-securebackup-cookie" "${BASE_URL_8}${platform}" -P $LOCATION/binaries/
done
## Download Scala latest binary.
wget -q http://downloads.lightbend.com/scala/2.12.1/scala-2.12.1.tgz -P $LOCATION/binaries/
## Download SBT latest binary.
wget -q https://dl.bintray.com/sbt/native-packages/sbt/0.13.13/sbt-0.13.13.tgz -P $LOCATION/binaries/
## Download Spark latest binary.
wget -q http://d3kbcqa49mib13.cloudfront.net/spark-2.1.0-bin-hadoop2.7.tgz -P $LOCATION/binaries/
## Explode all the binaries downloaded.
find $LOCATION/binaries -maxdepth 1 -type f -exec tar -xf {} \;
## Move all the exploded dirs to ~/apps folder.
mv jdk1.8.0_121 $LOCATION/apps/java
mv sbt-launcher-packaging-0.13.13 $LOCATION/apps/sbt
mv spark-2.1.0-bin-hadoop2.7 $LOCATION/apps/spark
mv scala-2.12.1 $LOCATION/apps/scala
## Add entries of Java, Scala, SBT and Spark binaries to .bashrc .
echo "
export JAVA_HOME=$LOCATION/apps/java
export SPARK_HOME=$LOCATION/apps/spark
export SCALA_HOME=$LOCATION/apps/scala
export SBT_HOME=$LOCATION/apps/sbt
export PATH=\$JAVA_HOME/bin:\$SCALA_HOME/bin:\$SBT_HOME/bin:\$SPARK_HOME/sbin:\$SPARK_HOME/bin:\$PATH
" >> ~/.bashrc
## Source .bashrc to pick the installed frameworks.
source ~/.bashrc
##### Or even better logoff and login again at this point.
echo "
$SPARK_HOME/sbin/start-master.sh
$SPARK_HOME/sbin/start-slave.sh spark://ubuntu:7077
" >> ~/start_spark.sh
echo "
$SPARK_HOME/sbin/stop-slave.sh
$SPARK_HOME/sbin/stop-master.sh
" >> ~/stop_spark.sh
sudo chmod +x start_spark.sh stop_spark.sh
sudo find / -type f -name "log4j.properties" -exec sed -i 's/TRACE/ERROR/g' {} +
sudo find / -type f -name "log4j.properties" -exec sed -i 's/DEBUG/ERROR/g' {} +
sudo find / -type f -name "log4j.properties" -exec sed -i 's/INFO/ERROR/g' {} +
sudo find / -type f -name "log4j.properties" -exec sed -i 's/WARN/ERROR/g' {} +
~/start_spark.sh
$SPARK_HOME/bin/run-example SparkPi
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
You can’t perform that action at this time.