编辑文件 vi /etc/sysconfig/network-scripts/ifcfg-eth0
DEVICE=eth0
#HWADDR=00:0C:29:42:15:C2
TYPE=Ethernet
ONBOOT=yes
NM_CONTROLLED=yes
BOOTPROTO=static
IPADDR=ip
NETMASK=255.255.255.0
GATEWAY=网关
DNS1=223.5.5.5
DNS2=114.114.114.114
编辑文件vi /etc/sysconfig/network
NETWORKING=yes
HOSTNAME=node01
vi /etc/hosts
本机IP node01
#查看防火墙状态
systemctl status firewalld.service
#关闭防火墙
systemctl stop firewalld.service
#开机自动关闭
chkconfig iptables off
#关闭 selinux
vi /etc/selinux/config
SELINUX=disabled
#安装时间同步服务
yum install ntp -y
#修改配置
vi /etc/ntp.conf
server ntp1.aliyun.com
#启动同步服务
service ntpd start
#开启开机自启动
chkconfig ntpd on
卸载已安装的jdk
rpm -qa|grep java
rpm -e --nodeps xxx
下载好安装包,jdk-8u181-linux-x64.rpm
rpm直接解压即可
rz命令
将本地文件传入服务器。
rpm -i 安装jdk
rpm -i jdk-8u181-linux-x64.rpm
配置环境变量
vi /etc/profile
export JAVA_HOME=/usr/java/default
export PATH=$PATH:$JAVA_HOME/bin
配置文件生效
source /etc/profile
解压
tar xf hadoop-2.6.5.tar.gz
配置环境变量
export JAVA_HOME=/usr/java/default
export HADOOP_HOME=/opt/bigdata/hadoop-2.6.5
export PATH=$PATH:$JAVA_HOME/bin:$HADOOP_HOME/bin:$HADOOP_HOME/sbin
Hadoop: Setting up a Single Node Cluster.
配置hadoop的角色:
In the distribution, edit the file etc/hadoop/hadoop-env.sh to define some parameters as follows:
set to the root of your Java installation
export JAVA_HOME=/usr/java/default
配置核心配置文件指定启动NN
vi /etc/hadoop/core-site.xml:
<configuration>
<property>
<name>fs.defaultFSname>
<value>hdfs://localhost:9000value>
property>
configuration>
指定副本以及路径
vi etc/hadoop/hdfs-site.xml:
<configuration>
<property>
<name>dfs.replicationname>
<value>1value>
property>
configuration>
vi slaves
node01
Format the filesystem:
$ bin/hdfs namenode -format
Start NameNode daemon and DataNode daemon:
$ sbin/start-dfs.sh
启动
start-dfs.sh
Starting namenodes on [node01]
node01: starting namenode, logging to /opt/bigdata/hadoop-2.6.5/logs/hadoop-root-namenode-node01.out
node01: starting datanode, logging to /opt/bigdata/hadoop-2.6.5/logs/hadoop-root-datanode-node01.out
Starting secondary namenodes [node01]
node01: starting secondarynamenode, logging to /opt/bigdata/hadoop-2.6.5/logs/hadoop-root-secondarynamenode-node01.out
http://node01:50070/explorer.html#/
[root@node01 hadoop]# hdfs dfs -mkdir /bigdata
[root@node01 hadoop]# hdfs dfs -mkdir -p /data/local