虚拟机Hadoop集群搭建5安装Hadoop

部分图片来源于黑马程序员
虚拟机Hadoop集群搭建5安装Hadoop_第1张图片
第二步:修改配置文件(注意所有配置文件编码都要是utf-8)

用ultraEdit远程编辑

	
	
		fs.default.name
		hdfs://node01:8020
	
	
	
		hadoop.tmp.dir
		/export/servers/hadoop-2.10.1/hadoopDatas/tempDatas
	
	
	
		io.file.buffer.size
		4096
	

	
	
		fs.trash.interval
		10080
	


hdfs-site.xml


	 
			dfs.namenode.secondary.http-address
			node01:50090
	

	
	
		dfs.namenode.http-address
		node01:50070
	
	
	
		dfs.namenode.name.dir
		file:///export/servers/hadoop-2.10.1/hadoopDatas/namenodeDatas,file:///export/servers/hadoop-2.10.1/hadoopDatas/namenodeDatas2
	
	
	
		dfs.datanode.data.dir
		file:///export/servers/hadoop-2.10.1/hadoopDatas/datanodeDatas,file:///export/servers/hadoop-2.10.1/hadoopDatas/datanodeDatas2
	
	
	
	
		dfs.namenode.edits.dir
		file:///export/servers/hadoop-2.10.1/hadoopDatas/nn/edits
	
	

	
		dfs.namenode.checkpoint.dir
		file:///export/servers/hadoop-2.10.1/hadoopDatas/snn/name
	
	
		dfs.namenode.checkpoint.edits.dir
		file:///export/servers/hadoop-2.10.1/hadoopDatas/dfs/snn/edits
	
	
	
		dfs.replication
		2
	

	
	
		dfs.permissions
		false
	

	
	
		dfs.blocksize
		134217728
	


hadoop-env.sh
添加
export JAVA_HOME=/usr/lib/jvm/java-1.8.0-openjdk-1.8.0.262.b10-0.el7_8.x86_64


```修改mapred-site.xml

	
	
			mapreduce.framework.name
			yarn
	

	
	
		mapreduce.job.ubertask.enable
		true
	
	
	
	
		mapreduce.jobhistory.address
		node01:10020
	

	
	
		mapreduce.jobhistory.webapp.address
		node01:19888
	

配置yarn-site.xml

	
	
		yarn.resourcemanager.hostname
		node01
	

	
		yarn.nodemanager.aux-services
		mapreduce_shuffle
	
	
	
	
		yarn.log-aggregation-enable
		true
	
	
	
		yarn.log-aggregation.retain-seconds
		604800
	
	
	    
		yarn.nodemanager.resource.memory-mb    
		20480
	

	  
        	 yarn.scheduler.minimum-allocation-mb
         	2048
	
	
		yarn.nodemanager.vmem-pmem-ratio
		2.1
	



mapred-env.sh
export JAVA_HOME=/usr/lib/jvm/java-1.8.0-openjdk-1.8.0.262.b10-0.el7_8.x86_64

slaves文件
node01
node02
node03

三台创建目录
mkdir -p /export/servers/hadoop-2.10.1/hadoopDatas/namenodeDatas
mkdir -p /export/servers/hadoop-2.10.1/hadoopDatas/namenodeDatas2
mkdir -p /export/servers/hadoop-2.10.1/hadoopDatas/datanodeDatas
mkdir -p /export/servers/hadoop-2.10.1/hadoopDatas/datanodeDatas2
mkdir -p /export/servers/hadoop-2.10.1/hadoopDatas/nn/edits
mkdir -p /export/servers/hadoop-2.10.1/hadoopDatas/snn/name
mkdir -p /export/servers/hadoop-2.10.1/hadoopDatas/dfs/snn/edits

拷贝hadoop到三台虚拟机
scp -r hadoop-2.10.1 node02:$PWD
scp -r hadoop-2.10.1 node03:$PWD

配置环境变量
#Hadoop environment
export HADOOP_HOME=/export/servers/hadoop-2.10.1
export PATH=$PATH:$HADOOP_HOME/bin:$HADOOP_HOME/sbin
export HADOOP_CONF_DIR=$HADOOP_HOME/etc/hadoop



你可能感兴趣的:(hadoop,大数据,linux,大数据,hadoop)