操作系统: Centos 7
三台机器(hadoop-0为master):
hadoop-0:192.168.116.130
hadoop-1:192.168.116.131
hadoop-2:192.168.116.132
软件包:
hadoop下载地址:
http://apache.fayea.com/hadoop/common/hadoop-2.7.3/hadoop-2.7.3.tar.gz
java下载地址:
http://www.oracle.com/technetwork/java/javase/downloads/jdk8-downloads-2133151.html
$ useradd hadoop
$ passwd hadoop
192.168.116.130主机:
echo “hadoop-0” > /etc/hostname
192.168.116.131主机:
echo “hadoop-1” > /etc/hostname
192.168.116.132主机:
echo “hadoop-2” > /etc/hostname
echo “192.168.116.130 hadoop-0” >>/etc/hosts
echo “192.168.116.131 hadoop-1” >>/etc/hosts
echo “192.168.116.132 hadoop-2” >>/etc/hosts
ping hadoop-0
ping hadoop-1
ping hadoop-2
$ ssh-keygen -t rsa -P '' -f ~/.ssh/id_rsa
$ cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys
$ chmod 0600 ~/.ssh/authorized_keys
将三台机器上的 ~/.ssh/id_rsa.pub汇总到同一个authorized_keys文件,并用该文件替换三台机器上的~/.ssh/authorized_keys文件,然后对authorized_keys文件授予0600权限
复制命令:
$ scp ~/.ssh/id_rsa.pub hadoop@hadoop-1:/tmp/authorized_keys
$ mv /tmp/authorized_keys ~/.ssh/
tar -zxvf jdk-8u101-linux-x64.tar.gz
vi /etc/profile
JAVA_HOME=/opt/jdk1.8.0_101
CLASSPATH=.:$JAVA_HOME/lib
PATH=$JAVA_HOME/bin:$PATH
export JAVA_HOME CLASSPATH PATH
source /etc/profile
root@ubuntu:/opt# java -version
java version "1.8.0_101"
Java(TM) SE Runtime Environment (build 1.8.0_101-b13)
Java HotSpot(TM) 64-Bit Server VM (build 25.101-b13, mixed mode)
$ tar -zxvf hadoop-2.7.3.tar.gz
$ mv /tmp/hadoop-2.7.3 /opt/hadoop
$ sudo chown -R hadoop:hadoop /opt/hadoop/
$ sudo chmod -R 775 /opt/hadoop/
以下修改可在一台机器上完成,然后将修改后的hadoop打包发送的另外两台机器
# The java implementation to use.
# export JAVA_HOME=${JAVA_HOME}
export JAVA_HOME=/opt/jdk1.8.0_101
<configuration>
<property>
<name>hadoop.tmp.dirname>
<value>file:/opt/hadoop/tmpvalue>
<description>Abase for other temporary directories.description>
property>
<property>
<name>fs.defaultFSname>
<value>hdfs://hadoop-0:9000value>
property>
configuration>
<configuration>
<property>
<name>dfs.namenode.secondary.http-addressname>
<value>hadoop2:50090value>
property>
<property>
<name>dfs.replicationname>
<value>2value>
property>
<property>
<name>dfs.namenode.name.dirname>
<value>file:/opt/hadoop/tmp/dfs/namevalue>
property>
<property>
<name>dfs.datanode.data.dirname>
<value>file:/opt/hadoop/tmp/dfs/datavalue>
property>
configuration>
<configuration>
<property>
<name>yarn.resourcemanager.hostnamename>
<value>hadoop-0value>
property>
<property>
<name>yarn.nodemanager.aux-servicesname>
<value>mapreduce_shufflevalue>
property>
configuration>
<configuration>
<property>
<name>mapreduce.framework.namename>
<value>yarnvalue>
property>
<property>
<name>mapreduce.jobhistory.addressname>
<value>hadoop-0:10020value>
property>
<property>
<name>mapreduce.jobhistory.webapp.addressname>
<value>hadoop-0:19888value>
property>
configuration>
hadoop-0
hadoop-1
hadoop-2
# 复制
$ scp /opt/hadoop root@hadoop-1:/opt/hadoop
$ scp /opt/hadoop root@hadoop-2:/opt/hadoop
# 授权
$ sudo chown -R hadoop:hadoop /opt/hadoop/
$ sudo chmod -R 775 /opt/hadoop/
export HADOOP_HOME=/opt/hadoop
export HADOOP_INSTALL=$HADOOP_HOME
export HADOOP_MAPRED_HOME=$HADOOP_HOME
export HADOOP_COMMON_HOME=$HADOOP_HOME
export HADOOP_HDFS_HOME=$HADOOP_HOME
export YARN_HOME=$HADOOP_HOME
export HADOOP_COMMON_LIB_NATIVE_DIR=$HADOOP_HOME/lib/native
export PATH=$PATH:$HADOOP_HOME/sbin:$HADOOP_HOME/bin
hdfs namenode -format
start-all.sh
$ jps
hadoop-0
115363 Jps
91430 DataNode
92216 NodeManager
91898 ResourceManager
91660 SecondaryNameNode
91263 NameNode
hadoop-1
18760 DataNode
18904 NodeManager
32443 Jps
hadoop-2
16913 Jps
3090 DataNode
3234 NodeManager
然后恭喜,安装完毕.
如果50070和8088页面打不开请检查防火墙
systemctl stop firewalld.service
systemctl disable firewalld.service
网址:
http://hadoop-0:50070/
http://hadoop-0:8088/
其他:
错误:Host key verification failed 解决方法:
http://www.51testing.com/html/38/225738-234384.html