Instantly share code, notes, and snippets.

Embed
What would you like to do?
Vagrantfile for Hadoop Cluster with CentOS 7 and Hadoop 2.8.0 (3 hosts)
#!/bin/sh
vagrant up node1 node2 master
#!/bin/sh
set -x
HOSTNAME=$1
: "Set hostname" && {
sudo hostname $HOSTNAME
echo $HOSTNAME | sudo tee /etc/hostname > /dev/null
}
: "Edit hosts file" && {
grep 192.168.33.10 /etc/hosts > /dev/null
if [ $? -ne 0 ] ; then
cat << 'EOF' | sudo tee -a /etc/hosts > /dev/null
192.168.33.10 master
192.168.33.11 node1
192.168.33.12 node2
EOF
fi
}
: "Install common packages" && {
sudo yum -y install epel-release
sudo yum -y install java-1.8.0-openjdk-devel openssh-clients rsync wget sshpass
}
: "Download Hadoop" && {
ls | grep hadoop-*.tar.gz > /dev/null
if [ $? -ne 0 ]; then
wget http://ftp.riken.jp/net/apache/hadoop/common/hadoop-2.8.0/hadoop-2.8.0.tar.gz -nv
tar xf hadoop-2.8.0.tar.gz
fi
}
: "Set environment variables to shell RC file" && {
grep JAVA_HOME ~/.bashrc > /dev/null
if [ $? -ne 0 ]; then
cat << 'EOF' >> ~/.bashrc
export JAVA_HOME=/usr/lib/jvm/jre-1.8.0-openjdk
export HADOOP_HOME=~/hadoop-2.8.0
export HADOOP_CONF_DIR=$HADOOP_HOME/etc/hadoop
export PATH=$HADOOP_HOME/bin:$HADOOP_HOME/sbin:$JAVA_HOME/bin:$PATH
EOF
fi
source ~/.bashrc
}
: "Hadoop execution check" && {
hadoop version
}
: "Install SSH public key to all nodes" && {
ssh-keygen -t rsa -P '' -f ~/.ssh/id_rsa
for node in master node1 node2; do
sshpass -p "vagrant" ssh-copy-id -i ~/.ssh/id_rsa.pub -o "StrictHostKeyChecking no" $node
done;
}
: "Setting configuration files" && {
: "etc/hadoop/slaves" && {
cat << 'EOF' > $HADOOP_HOME/etc/hadoop/slaves
node1
node2
EOF
}
: "etc/hadoop/core-site.xml" && {
grep fs.defaultFS $HADOOP_HOME/etc/hadoop/core-site.xml
if [ $? -ne 0 ]; then
cat << 'EOF' > /tmp/core-site.xml.property
<property>
<name>fs.defaultFS</name>
<value>hdfs://master:9000</value>
</property>
EOF
sed -i -e '
/^<configuration>$/r /tmp/core-site.xml.property
/^$/d
' $HADOOP_HOME/etc/hadoop/core-site.xml
fi
}
: "etc/hadoop/hdfs-site.xml" && {
grep dfs.replication $HADOOP_HOME/etc/hadoop/hdfs-site.xml
if [ $? -ne 0 ]; then
cat << 'EOF' > /tmp/hdfs-site.xml.property
<property>
<name>dfs.replication</name>
<value>2</value>
</property>
<property>
<name>dfs.namenode.secondary.http-address</name>
<value>master:50090</value>
</property>
EOF
sed -i -e '
/^<configuration>$/r /tmp/hdfs-site.xml.property
/^$/d
' $HADOOP_HOME/etc/hadoop/hdfs-site.xml
fi
}
: "etc/hadoop/mapred-site.xml" && {
grep mapreduce.framework.nam $HADOOP_HOME/etc/hadoop/mapred-site.xml
if [ $? -ne 0 ]; then
cp $HADOOP_HOME/etc/hadoop/mapred-site.xml{.template,}
cat << 'EOF' > /tmp/mapred-site.xml.property
<property>
<name>mapreduce.framework.name</name>
<value>yarn</value>
</property>
EOF
sed -i -e '
/^<configuration>$/r /tmp/mapred-site.xml.property
/^$/d
' $HADOOP_HOME/etc/hadoop/mapred-site.xml
fi
}
: "etc/hadoop/yarn-site.xml" && {
grep yarn.nodemanager.aux-service $HADOOP_HOME/etc/hadoop/yarn-site.xml
if [ $? -ne 0 ]; then
cat << 'EOF' > /tmp/yarn-site.xml.property
<property>
<name>yarn.nodemanager.aux-services</name>
<value>mapreduce_shuffle</value>
</property>
<property>
<name>yarn.resourcemanager.hostname</name>
<value>master</value>
</property>
EOF
sed -i -e '
/^<configuration>$/r /tmp/yarn-site.xml.property
/^$/d
' $HADOOP_HOME/etc/hadoop/yarn-site.xml
fi
}
: "Copy to slaves" && {
for node in node1 node2; do
scp $HADOOP_HOME/etc/hadoop/* $node:$HADOOP_HOME/etc/hadoop/
done;
}
}
: "Format HDFS" && {
$HADOOP_HOME/bin/hdfs namenode -format -force
}
: "Start daemons" && {
: "HDFS" && {
jps | grep NameNode
if [ $? -ne 0 ]; then
$HADOOP_HOME/sbin/start-dfs.sh
fi
}
: "YARN" && {
jps | grep ResourceManager
if [ $? -ne 0 ]; then
$HADOOP_HOME/sbin/start-yarn.sh
fi
}
: "MapReduce JobHistory server" && {
jps | grep JobHistoryServer
if [ $? -ne 0 ]; then
$HADOOP_HOME/sbin/mr-jobhistory-daemon.sh --config $HADOOP_CONF_DIR start historyserver
fi
}
}
#!/bin/sh
set -x
HOSTNAME=$1
: "Set hostname" && {
sudo hostname $HOSTNAME
echo $HOSTNAME | sudo tee /etc/hostname > /dev/null
}
: "Edit hosts file" && {
grep 192.168.33.10 /etc/hosts > /dev/null
if [ $? -ne 0 ] ; then
cat << 'EOF' | sudo tee -a /etc/hosts > /dev/null
192.168.33.10 master
192.168.33.11 node1
192.168.33.12 node2
EOF
fi
}
: "Install common packages" && {
sudo yum -y install java-1.8.0-openjdk-devel openssh-clients rsync wget
}
: "Download Hadoop" && {
ls | grep hadoop-*.tar.gz > /dev/null
if [ $? -ne 0 ]; then
wget http://ftp.riken.jp/net/apache/hadoop/common/hadoop-2.8.0/hadoop-2.8.0.tar.gz -nv
tar xf hadoop-2.8.0.tar.gz
fi
}
: "Set environment variables to shell RC file" && {
grep JAVA_HOME /etc/hosts > /dev/null
if [ $? -ne 0 ]; then
cat << 'EOF' >> ~/.bashrc
export JAVA_HOME=/usr/lib/jvm/jre-1.8.0-openjdk
export HADOOP_HOME=~/hadoop-2.8.0
export HADOOP_CONF_DIR=$HADOOP_HOME/etc/hadoop
export PATH=$HADOOP_HOME/bin:$HADOOP_HOME/sbin:$JAVA_HOME/bin:$PATH
EOF
fi
source ~/.bashrc
}
: "Hadoop execution check" && {
hadoop version
}
# -*- mode: ruby -*-
# vi: set ft=ruby :
# Vagrantfile API/syntax version. Don't touch unless you know what you're doing!
VAGRANTFILE_API_VERSION = "2"
Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
config.vm.define :master, primary: true do |master|
master.vm.box = "bento/centos-7.3"
master.vm.network "private_network", ip: "192.168.33.10"
master.vm.provider "virtualbox" do |vb|
vb.memory = "2048"
end
master.vm.provision "shell", privileged: false do |s|
s.path = "master.sh"
s.args = "master"
end
end
config.vm.define :node1 do |node1|
node1.vm.box = "bento/centos-7.3"
node1.vm.network "private_network", ip: "192.168.33.11"
node1.vm.provider "virtualbox" do |vb|
vb.memory = "2048"
end
node1.vm.provision "shell", privileged: false do |s|
s.path = "nodes.sh"
s.args = "node1"
end
end
config.vm.define :node2 do |node2|
node2.vm.box = "bento/centos-7.3"
node2.vm.network "private_network", ip: "192.168.33.12"
node2.vm.provider "virtualbox" do |vb|
vb.memory = "2048"
end
node2.vm.provision "shell", privileged: false do |s|
s.path = "nodes.sh"
s.args = "node2"
end
end
end
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment