参考: www.cnblogs.com/forbeat/p/8179877.html
重启centos:reboot
#set JAVA_HOME
export JAVA_HOME=/home/hadoop/program/jdk1.8.0_151 ## 这里要注意目录要换成自己解压的jdk 目录
export JRE_HOME=${JAVA_HOME}/jre
export CLASSPATH=.:${JAVA_HOME}/lib:${JRE_HOME}/lib
export PATH=${JAVA_HOME}/bin:$PATH
# set HADOOP_HOME
export HADOOP_HOME=/home/hadoop/program/hadoop-3.0.0
export PATH=$PATH:$HADOOP_HOME/sbin:$HADOOP_HOME/bin
export HADOOP_COMMON_LIB_NATIVE_DIR=$HADOOP_HOME/lib/native
export HADOOP_OPTS="$HADOOP_OPTS -Djava.library.path=$HADOOP_HOME/lib
export JAVA_HOME=/home/hadoop/program/jdk1.8.0_151 ## 这里要注意目录要换成自己解压的jdk 目录
fs.default.name
hdfs://hadoopmaster:9000
hadoop.tmp.dir
/home/hadoop/hadoop/tmp
修改hdfs-site.xml 配置副本个数以及数据存放的路径
dfs.replication
1
dfs.namenode.name.dir
/home/hadoop/hadoop/hdfs/name
dfs.namenode.data.dir
/home/hadoop/hadoop/hdfs/data
修改mapred-site.xml,配置使用yarn框架执行mapreduce处理程序,与之前版本多了后面两部
mapreduce.framework.name
yarn
mapreduce.application.classpath
/usr/local/hadoop3/etc/hadoop,
/usr/local/hadoop3/share/hadoop/common/*,
/usr/local/hadoop3/share/hadoop/common/lib/*,
/usr/local/hadoop3/share/hadoop/hdfs/*,
/usr/local/hadoop3/share/hadoop/hdfs/lib/*,
/usr/local/hadoop3/share/hadoop/mapreduce/*,
/usr/local/hadoop3/share/hadoop/mapreduce/lib/*,
/usr/local/hadoop3/share/hadoop/yarn/*,
/usr/local/hadoop3/share/hadoop/yarn/lib/*
yarn.resourcemanager.hostname
hadoopmaster
yarn.nodemanager.aux-services
mapreduce_shuffle
workers文件里添加主机名:hadoopmaster