yum install java-1.8.0-openjdk-devel.x86_64
mkdir -p /server/deep/data
mkdir -p /server/deep/tmp
cd /server/deep
wget http://apache.claz.org/hadoop/common/hadoop-2.6.3/hadoop-2.6.3.tar.gz
tar xzf hadoop-2.6.3.tar.gz
mv hadoop-2.6.3 hadoop
- Add to
~/.bashrc
export JAVA_HOME=/etc/alternatives/java_sdk_1.8.0
export JRE_HOME=$JAVA_HOME/jre
export PATH=$PATH:$JAVA_HOME/bin:$JRE_HOME/bin
export HADOOP_HOME=/server/deep/hadoop
export HADOOP_INSTALL=$HADOOP_HOME
export HADOOP_MAPRED_HOME=$HADOOP_HOME
export HADOOP_COMMON_HOME=$HADOOP_HOME
export HADOOP_HDFS_HOME=$HADOOP_HOME
export YARN_HOME=$HADOOP_HOME
export HADOOP_COMMON_LIB_NATIVE_DIR=$HADOOP_HOME/lib/native
export PATH=$PATH:$HADOOP_HOME/sbin:$HADOOP_HOME/bin
source ~/.bashrc
vi hadoop/etc/hadoop/hadoop-env.sh
- Search for
# The java implementation to use.
export JAVA_HOME=${JAVA_HOME}
# The java implementation to use.
# export JAVA_HOME=${JAVA_HOME}
# OPENOVATE CUSTOM:
export JAVA_HOME=/etc/alternatives/java_sdk_1.8.0/
vi hadoop/etc/hadoop/core-site.xml
- In between
<configuration>
, add
<property>
<name>hadoop.tmp.dir</name>
<value>/server/deep/tmp</value>
</property>
<property>
<name>fs.default.name</name>
<value>hdfs://127.0.0.1:9000</value>
</property>
vi hadoop/etc/hadoop/hdfs-site.xml
- In between
<configuration>
, add
<property>
<name>dfs.replication</name>
<value>1</value>
</property>
<property>
<name>dfs.name.dir</name>
<value>file:///server/deep/data/hdfs/namenode</value>
</property>
<property>
<name>dfs.data.dir</name>
<value>file:///server/deep/data/hdfs/datanode</value>
</property>
cp hadoop/etc/hadoop/mapred-site.xml.template hadoop/etc/hadoop/mapred-site.xml
vi hadoop/etc/hadoop/mapred-site.xml
- In between
<configuration>
, add
<property>
<name>mapreduce.framework.name</name>
<value>yarn</value>
</property>
vi hadoop/etc/hadoop/yarn-site.xml
- In between
<configuration>
, add
<property>
<name>yarn.nodemanager.aux-services</name>
<value>mapreduce_shuffle</value>
</property>
ssh-keygen -t dsa -P '' -f ~/.ssh/id_dsa
cat ~/.ssh/id_dsa.pub >> ~/.ssh/authorized_keys
hdfs namenode -format
- If .local. Error, you may need to
sudo vi /etc/hosts
and add 127.0.0.1 shared-app.novalocal
hadoop/sbin/start-dfs.sh
hadoop/sbin/start-yarn.sh