vim /etc/profile
#set java environment
export JAVA_HOME=/usr/java/jdk/
export CLASSPATH=.:${JAVA_HOME}/lib:${JRE_HOME}/lib
export PATH=${JAVA_HOME}/bin:$PATH
#HADOOP_HOME
export HADOOP_HOME=/opt/hadoop-3.2.1
export PATH=$PATH:$HADOOP_HOME/bin:$HADOOP_HOME/sbin
export HDFS_NAMENODE_USER=root
export HDFS_DATANODE_USER=root
export HDFS_SECONDARYNAMENODE_USER=root
export YARN_RESOURCEMANAGER_USER=root
export YARN_NODEMANAGER_USER=root
环境变量生效
source /etc/profile
vim ${HADOOP_HOME}/etc/hadoop/hadoop-env.sh
vim /opt/hadoop-3.2.1/etc/hadoop/mapred-env.sh
vim /opt/hadoop-3.2.1/etc/hadoop/yarn-env.sh
vim /opt/hadoop-3.2.1/etc/hadoop/core-site.xml
fs.defaultFS
hdfs://192.168.137.135:9000
hadoop.tmp.dir
/opt/hadoop-3.2.1/tmp
配置临时目录前,请先创建此目录,不创建也可以。
vim /opt/hadoop-3.2.1/etc/hadoop/hdfs-site.xml
dfs.replication
1
dfs.http.address
192.168.137.135:50070
vim /opt/hadoop-3.2.1/etc/hadoop/mapred-site.xml
mapreduce.framework.name
yarn
vim /opt/hadoop-3.2.1/etc/hadoop/yarn-site.xml
cd ~
ssh-keygen -t rsa
一直回车
cd .ssh/
cp id_rsa.pub authorized_keys
ssh 192.168.137.135
这样就OK了
hdfs namenode -format
ls /opt/hadoop-3.2.1/tmp/dfs/name/current
start-all.sh
有这六个进程时证明了你的环境已经配置好了。
防火墙会阻止非本机对服务发起的请求,所以,如果要让外界访问到hadoop服务一定要配置防火墙,如果是在虚拟机上,就可以直接关闭了。
使用systemctl stop firewalld
来零时关闭
使用systemctl disable firewalld
来永久关闭
http://192.168.137.135:50070
http://192.168.137.135:9000