centos7最小化安装没有 ifconfig
命令
yum -y install net-tools
修改网卡名字为eth0
vim /etc/sysconfig/network
NETWORKING=yes
HOSTNAME=node1
最后还要分别 修改 /etc/hostname
vim /etc/sysconfig/network-scripts/ifcfg-eth0
TYPE="Ethernet"
PROXY_METHOD="none"
BROWSER_ONLY="no"
BOOTPROTO="static"
DEFROUTE="yes"
IPV4_FAILURE_FATAL="no"
IPV6INIT="yes"
IPV6_AUTOCONF="yes"
IPV6_DEFROUTE="yes"
IPV6_FAILURE_FATAL="no"
IPV6_ADDR_GEN_MODE="stable-privacy"
NAME="eth0"
UUID="41203fac-73cf-4b2d-bd9d-d7c5f72d7aa8"
DEVICE="eth0"
ONBOOT="yes"
IPADDR="192.168.73.101"
NETMASK="255.255.255.0"
GATEWAY="192.168.73.2"
vi /etc/resolv.conf
# Generated by NetworkManager
nameserver 192.168.73.2
vim /etc/hosts
127.0.0.1 localhost localhost.localdomain localhost4 localhost4.localdomain4
::1 localhost localhost.localdomain localhost6 localhost6.localdomain6
192.168.73.101 node1
192.168.73.102 node2
192.168.73.103 node3
192.168.73.104 node4
改变yum源为阿里云
rpm -qa | grep jdk
rpm -i /PATH/TO/PACKAGE_FILE
-h: 以#显示进度;每个#表示2%;
-v: 显示详细过程
-vv: 更详细的过程
rpm -ivh /PATH/TO/PACKAGE_FILE
–nodeps: 忽略依赖关系;
–replacepkgs: 重新安装,替rpm换原有安装;
–force: 强行安装,可以实现重装或降级;
rpm -ivh --prefix=/usr/apps jdk-8u181-linux-x64.rpm
配置环境变量:编辑/etc/profile 或者 ~/.bash_profile
export JAVA_HOME=/usr/apps/jdk1.8.0_181-amd64
export PATH=$PATH:$JAVA_HOME/bin
source /etc/profile
vim /etc/sysconfig/network-scripts/ifcfg-eth0
删掉HWADDR(物理地址)和UUID,重启系统会自动创建,根据个人情况修改成如下配置
TYPE="Ethernet"
PROXY_METHOD="none"
BROWSER_ONLY="no"
BOOTPROTO="static"
DEFROUTE="yes"
IPV4_FAILURE_FATAL="no"
IPV6INIT="yes"
IPV6_AUTOCONF="yes"
IPV6_DEFROUTE="yes"
IPV6_FAILURE_FATAL="no"
IPV6_ADDR_GEN_MODE="stable-privacy"
NAME="eth0"
DEVICE="eth0"
ONBOOT="yes"
IPADDR="192.168.73.102"
NETMASK="255.255.255.0"
GATEWAY="192.168.73.2"
vim /etc/sysconfig/network
# Created by anaconda
NETWORKING=yes
HOSTNAME=node2
四个节点同时操作
useradd hadoop
passwd hadoop
yum -y install lrzsz
sudo vim /etc/profile
export HADOOP_HOME=/home/hadoop/apps/hadoop-2.7.6
export PATH=$PATH:$HADOOP_HOME/bin:$HADOOP_HOME/sbin
将配置文件拷贝到node2,node3,node4
sudo scp /etc/profile node2:/etc/
同时操作所有节点,重新加载配置文件
source /etc/profile
配置文件所在目录 /home/hadoop/apps/hadoop-2.7.6/etc/hadoop
hadoop-env.sh
core-site.xml
<configuration>
<property>
<name>fs.defaultFSname>
<value>hdfs://node1:9000value>
property>
<property>
<name>hadoop.tmp.dirname>
<value>/home/hadoop/hdpdatavalue>
property>
configuration>
hdfs-site.xml
<configuration>
<property>
<name>dfs.replicationname>
<value>2value>
property>
configuration>
mapred-site.xml.template
<configuration>
<property>
<name>mapreduce.framework.namename>
<value>yarnvalue>
property>
configuration>
重命名 mv mapred-site.xml.template mapred-site.xml
yarn-site.xml
<configuration>
<property>
<name>yarn.resourcemanager.hostnamename>
<value>node1value>
property>
<property>
<name>yarn.nodemanager.aux-servicesname>
<value>mapreduce_shufflevalue>
property>
configuration>
/home/hadoop/apps/hadoop-2.7.6/etc/hadoop/slaves
node2
node3
node4
scp -r apps node2:/home/hadoop/
hadoop namenode -format
start-dfs.sh
发现报错:
[hadoop@node1 ~]$ start-dfs.sh
Starting namenodes on [node1]
node1: chown: changing ownership of ‘/home/hadoop/apps/hadoop-2.7.6/logs’: Operation not permitted
node1: starting namenode, logging to /home/hadoop/apps/hadoop-2.7.6/logs/hadoop-hadoop-namenode-node1.out
node1: /home/hadoop/apps/hadoop-2.7.6/sbin/hadoop-daemon.sh: line 159: /home/hadoop/apps/hadoop-2.7.6/logs/hadoop-hadoop-namenode-node1.out: Permission denied
解决方法:将hadoop主目录授权给当前用户(各节点都操作一次)
sudo chown -R hadoop /home/hadoop
再次启动hdfs
将node1,node2,node3,node4的IP地址添加到 windows 的 hosts 文件
前提要关闭 节点上的防火墙,关闭防火墙开机启动 centos7关闭防火墙
启动: systemctl start firewalld
关闭: systemctl stop firewalld
查看状态: systemctl status firewalld
开机禁用 : systemctl disable firewalld
开机启用 : systemctl enable firewalld
HDFS管理界面 http://node1:50070
Mapreduce 管理界面 http://node1:8088