tar -zxvf hadoop-3.0.0.tar.gz
vi /etc/profile
export JAVA_HOME=/usr/local/java
export HADOOP_HOME=/usr/local/hadoop3
export PATH=$PATH:$JAVA_HOME/bin:$HADOOP_HOME/bin:$HADOOP_HOME/sbin
source /etc/profile
注意:/usr/local/hadoop3为解压路径 /usr/local/java为jdk路径
export JAVA_HOME=/usr/local/java
export HADOOP_HOME=/root/hadoop-3.0.0
export HDFS_NAMENODE_USER=root
export HDFS_DATANODE_USER=root
export HDFS_SECONDARYNAMENODE_USER=root
export YARN_RESOURCEMANAGER_USER=root
export YARN_NODEMANAGER_USER=root
fs.defaultFS
hdfs://hadoop102:9000
hadoop.tmp.dir
/root/hadoop-3.0.0/data
dfs.replication
1
yarn.resourcemanager.hostname
hadoop102
yarn.nodemanager.aux-services
mapreduce_shuffle
mapreduce.framework.name
yarn
mapreduce.admin.user.env
HADOOP_MAPRED_HOME=$HADOOP_COMMON_HOME
yarn.app.mapreduce.am.env
HADOOP_MAPRED_HOME=$HADOOP_COMMON_HOME
systemctl stop firewalld.service
开启防火墙:
systemctl start firewalld.service
关闭开机启动:
systemctl disable firewalld.service
开启开机启动:
systemctl enable firewalld.service
hadoop namenode -format
ssh-keygen 生成私钥和公钥
ssh-copy-id localhost 将公钥拷贝到要免密登录的机器上
start-dfs.sh
start-yarn.sh
jps查看是否启动
hdfs dfs -cat /logs/wordcount.log
hadoop jar /usr/local/hadoop3/share/hadoop/mapreduce/hadoop-mapreduce-examples-3.0.0.jarwordcount /logs/wordcount.log /output/wordcount
注意: /usr/local/hadoop3是解压路径,/logs/wordcount.log是输入文件路径,/output/wordcount运行结果路径
查看运行结果: hdfs dfs -text /output/wordcount/part-r-00000
http://hadoop102:9870/
http://hadoop102:8088/
hadoop fs -put jdk1.8.tar.gz /
上传到根目录